launchpad-reviewers team mailing list archive
-
launchpad-reviewers team
-
Mailing list archive
-
Message #31465
[Merge] ~jugmac00/launchpad:delete-rock-recipe-builds-and-jobs-when-deleting-recipes into launchpad:master
Jürgen Gmach has proposed merging ~jugmac00/launchpad:delete-rock-recipe-builds-and-jobs-when-deleting-recipes into launchpad:master.
Commit message:
Delete rock recipe builds and jobs when deleting recipes
Requested reviews:
Launchpad code reviewers (launchpad-reviewers)
For more details, see:
https://code.launchpad.net/~jugmac00/launchpad/+git/launchpad/+merge/472970
dependent by using/implementing the parser, which has been implemented already, see https://code.launchpad.net/~jugmac00/launchpad/+git/launchpad/+merge/473245
--
Your team Launchpad code reviewers is requested to review the proposed merge of ~jugmac00/launchpad:delete-rock-recipe-builds-and-jobs-when-deleting-recipes into launchpad:master.
diff --git a/database/schema/security.cfg b/database/schema/security.cfg
index 2c23fd6..a906e9a 100644
--- a/database/schema/security.cfg
+++ b/database/schema/security.cfg
@@ -2917,3 +2917,34 @@ public.teammembership = SELECT
public.teamparticipation = SELECT
public.webhook = SELECT
public.webhookjob = SELECT, INSERT
+
+[rock-build-job]
+type=user
+groups=script
+public.account = SELECT
+public.builder = SELECT
+public.buildfarmjob = SELECT, INSERT
+public.buildqueue = SELECT, INSERT, UPDATE
+public.rockfile = SELECT
+public.rockrecipe = SELECT, UPDATE
+public.rockrecipebuild = SELECT, INSERT, UPDATE
+public.rockrecipebuildjob = SELECT, UPDATE
+public.rockrecipejob = SELECT, UPDATE
+public.distribution = SELECT
+public.distroarchseries = SELECT
+public.distroseries = SELECT
+public.emailaddress = SELECT
+public.gitref = SELECT
+public.gitrepository = SELECT
+public.job = SELECT, INSERT, UPDATE
+public.libraryfilealias = SELECT
+public.libraryfilecontent = SELECT
+public.person = SELECT
+public.personsettings = SELECT
+public.pocketchroot = SELECT
+public.processor = SELECT
+public.product = SELECT
+public.teammembership = SELECT
+public.teamparticipation = SELECT
+public.webhook = SELECT
+public.webhookjob = SELECT, INSERT
diff --git a/lib/lp/archiveuploader/rockrecipeupload.py b/lib/lp/archiveuploader/rockrecipeupload.py
new file mode 100644
index 0000000..d7b9152
--- /dev/null
+++ b/lib/lp/archiveuploader/rockrecipeupload.py
@@ -0,0 +1,66 @@
+# Copyright 2024 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+"""Process a rock recipe upload."""
+
+__all__ = [
+ "RockRecipeUpload",
+]
+
+import os
+
+from zope.component import getUtility
+
+from lp.archiveuploader.utils import UploadError
+from lp.buildmaster.enums import BuildStatus
+from lp.services.helpers import filenameToContentType
+from lp.services.librarian.interfaces import ILibraryFileAliasSet
+
+
+class RockRecipeUpload:
+ """A rock recipe upload."""
+
+ def __init__(self, upload_path, logger):
+ """Create a `RockRecipeUpload`.
+
+ :param upload_path: A directory containing files to upload.
+ :param logger: The logger to be used.
+ """
+ self.upload_path = upload_path
+ self.logger = logger
+
+ self.librarian = getUtility(ILibraryFileAliasSet)
+
+ def process(self, build):
+ """Process this upload, loading it into the database."""
+ self.logger.debug("Beginning processing.")
+
+ found_rock = False
+ rock_paths = []
+ for dirpath, _, filenames in os.walk(self.upload_path):
+ if dirpath == self.upload_path:
+ # All relevant files will be in a subdirectory.
+ continue
+ for rock_file in sorted(filenames):
+ if rock_file.endswith(".rock"):
+ found_rock = True
+ rock_paths.append(os.path.join(dirpath, rock_file))
+
+ if not found_rock:
+ raise UploadError("Build did not produce any rocks.")
+
+ for rock_path in rock_paths:
+ libraryfile = self.librarian.create(
+ os.path.basename(rock_path),
+ os.stat(rock_path).st_size,
+ open(rock_path, "rb"),
+ filenameToContentType(rock_path),
+ restricted=build.is_private,
+ )
+ build.addFile(libraryfile)
+
+ # The master verifies the status to confirm successful upload.
+ self.logger.debug("Updating %s" % build.title)
+ build.updateStatus(BuildStatus.FULLYBUILT)
+
+ self.logger.debug("Finished upload.")
diff --git a/lib/lp/archiveuploader/tests/test_rockrecipeupload.py b/lib/lp/archiveuploader/tests/test_rockrecipeupload.py
new file mode 100644
index 0000000..28a12cb
--- /dev/null
+++ b/lib/lp/archiveuploader/tests/test_rockrecipeupload.py
@@ -0,0 +1,78 @@
+# Copyright 2024 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+"""Tests for `RockRecipeUpload`."""
+
+import os
+
+from storm.store import Store
+
+from lp.archiveuploader.tests.test_uploadprocessor import (
+ TestUploadProcessorBase,
+)
+from lp.archiveuploader.uploadprocessor import UploadHandler, UploadStatusEnum
+from lp.buildmaster.enums import BuildStatus
+from lp.rocks.interfaces.rockrecipe import ROCK_RECIPE_ALLOW_CREATE
+from lp.services.features.testing import FeatureFixture
+from lp.services.osutils import write_file
+
+
+class TestRockRecipeUploads(TestUploadProcessorBase):
+ """End-to-end tests of rock recipe uploads."""
+
+ def setUp(self):
+ super().setUp()
+ self.useFixture(FeatureFixture({ROCK_RECIPE_ALLOW_CREATE: "on"}))
+
+ self.setupBreezy()
+
+ self.switchToAdmin()
+ self.build = self.factory.makeRockRecipeBuild(
+ distro_arch_series=self.breezy["i386"]
+ )
+ self.build.updateStatus(BuildStatus.UPLOADING)
+ Store.of(self.build).flush()
+ self.switchToUploader()
+ self.options.context = "buildd"
+
+ self.uploadprocessor = self.getUploadProcessor(
+ self.layer.txn, builds=True
+ )
+
+ def test_sets_build_and_state_123(self):
+ # The upload processor uploads files and sets the correct status.
+ self.assertFalse(self.build.verifySuccessfulUpload())
+ upload_dir = os.path.join(
+ self.incoming_folder, "test", str(self.build.id), "ubuntu"
+ )
+ write_file(os.path.join(upload_dir, "foo_0_all.rock"), b"rock")
+ write_file(os.path.join(upload_dir, "foo_0_all.manifest"), b"manifest")
+ handler = UploadHandler.forProcessor(
+ self.uploadprocessor, self.incoming_folder, "test", self.build
+ )
+ result = handler.processRockRecipe(self.log)
+ self.assertEqual(
+ UploadStatusEnum.ACCEPTED,
+ result,
+ "Rock upload failed\nGot: %s" % self.log.getLogBuffer(),
+ )
+ self.assertEqual(BuildStatus.FULLYBUILT, self.build.status)
+ self.assertTrue(self.build.verifySuccessfulUpload())
+
+ def test_requires_rock(self):
+ # The upload processor fails if the upload does not contain any
+ # .rock files.
+ self.assertFalse(self.build.verifySuccessfulUpload())
+ upload_dir = os.path.join(
+ self.incoming_folder, "test", str(self.build.id), "ubuntu"
+ )
+ write_file(os.path.join(upload_dir, "foo_0_all.manifest"), b"manifest")
+ handler = UploadHandler.forProcessor(
+ self.uploadprocessor, self.incoming_folder, "test", self.build
+ )
+ result = handler.processRockRecipe(self.log)
+ self.assertEqual(UploadStatusEnum.REJECTED, result)
+ self.assertIn(
+ "ERROR Build did not produce any rocks.", self.log.getLogBuffer()
+ )
+ self.assertFalse(self.build.verifySuccessfulUpload())
diff --git a/lib/lp/archiveuploader/uploadprocessor.py b/lib/lp/archiveuploader/uploadprocessor.py
index baa903b..1289241 100644
--- a/lib/lp/archiveuploader/uploadprocessor.py
+++ b/lib/lp/archiveuploader/uploadprocessor.py
@@ -44,7 +44,6 @@ worst of the results from the various changes files found (in the order
above, failed being worst).
"""
-
import os
import shutil
import sys
@@ -61,6 +60,7 @@ from lp.archiveuploader.nascentupload import (
NascentUpload,
)
from lp.archiveuploader.ocirecipeupload import OCIRecipeUpload
+from lp.archiveuploader.rockrecipeupload import RockRecipeUpload
from lp.archiveuploader.snapupload import SnapUpload
from lp.archiveuploader.uploadpolicy import (
BuildDaemonUploadPolicy,
@@ -77,6 +77,7 @@ from lp.code.interfaces.sourcepackagerecipebuild import (
from lp.oci.interfaces.ocirecipebuild import IOCIRecipeBuild
from lp.registry.interfaces.distribution import IDistributionSet
from lp.registry.interfaces.person import IPersonSet
+from lp.rocks.interfaces.rockrecipebuild import IRockRecipeBuild
from lp.services.log.logger import BufferLogger
from lp.services.statsd.interfaces.statsd_client import IStatsdClient
from lp.services.webapp.adapter import (
@@ -775,6 +776,32 @@ class BuildUploadHandler(UploadHandler):
self.processor.ztm.abort()
raise
+ def processRockRecipe(self, logger=None):
+ """Process a rock recipe upload."""
+ assert IRockRecipeBuild.providedBy(self.build)
+ if logger is None:
+ logger = self.processor.log
+ try:
+ logger.info("Processing rock upload %s" % self.upload_path)
+ RockRecipeUpload(self.upload_path, logger).process(self.build)
+
+ if self.processor.dry_run:
+ logger.info("Dry run, aborting transaction.")
+ self.processor.ztm.abort()
+ else:
+ logger.info(
+ "Committing the transaction and any mails associated "
+ "with this upload."
+ )
+ self.processor.ztm.commit()
+ return UploadStatusEnum.ACCEPTED
+ except UploadError as e:
+ logger.error(str(e))
+ return UploadStatusEnum.REJECTED
+ except BaseException:
+ self.processor.ztm.abort()
+ raise
+
def process(self):
"""Process an upload that is the result of a build.
@@ -830,6 +857,8 @@ class BuildUploadHandler(UploadHandler):
result = self.processOCIRecipe(logger)
elif ICharmRecipeBuild.providedBy(self.build):
result = self.processCharmRecipe(logger)
+ elif IRockRecipeBuild.providedBy(self.build):
+ result = self.processRockRecipe(logger)
elif ICIBuild.providedBy(self.build):
result = self.processCIResult(logger)
else:
diff --git a/lib/lp/buildmaster/enums.py b/lib/lp/buildmaster/enums.py
index 672bc80..be0a30b 100644
--- a/lib/lp/buildmaster/enums.py
+++ b/lib/lp/buildmaster/enums.py
@@ -249,6 +249,15 @@ class BuildFarmJobType(DBEnumeratedType):
""",
)
+ ROCKRECIPEBUILD = DBItem(
+ 10,
+ """
+ Rock recipe build
+
+ Build a rock from a recipe.
+ """,
+ )
+
class BuildQueueStatus(DBEnumeratedType):
"""Build queue status.
diff --git a/lib/lp/registry/personmerge.py b/lib/lp/registry/personmerge.py
index f653c34..b16cea2 100644
--- a/lib/lp/registry/personmerge.py
+++ b/lib/lp/registry/personmerge.py
@@ -24,6 +24,7 @@ from lp.registry.interfaces.teammembership import (
ITeamMembershipSet,
TeamMembershipStatus,
)
+from lp.rocks.interfaces.rockrecipe import IRockRecipeSet
from lp.services.database import postgresql
from lp.services.database.interfaces import IStore
from lp.services.database.sqlbase import cursor, sqlvalues
@@ -936,6 +937,25 @@ def _mergeCharmRecipe(cur, from_person, to_person):
IStore(recipes[0]).flush()
+def _mergeRockRecipe(cur, from_person, to_person):
+ # This shouldn't use removeSecurityProxy.
+ recipes = getUtility(IRockRecipeSet).findByOwner(from_person)
+ existing_names = [
+ r.name for r in getUtility(IRockRecipeSet).findByOwner(to_person)
+ ]
+ for recipe in recipes:
+ naked_recipe = removeSecurityProxy(recipe)
+ new_name = naked_recipe.name
+ count = 1
+ while new_name in existing_names:
+ new_name = "%s-%s" % (recipe.name, count)
+ count += 1
+ naked_recipe.owner = to_person
+ naked_recipe.name = new_name
+ if not recipes.is_empty():
+ IStore(recipes[0]).flush()
+
+
def _purgeUnmergableTeamArtifacts(from_team, to_team, reviewer):
"""Purge team artifacts that cannot be merged, but can be removed."""
# A team cannot have more than one mailing list.
@@ -1192,6 +1212,9 @@ def merge_people(from_person, to_person, reviewer, delete=False):
_mergeCharmRecipe(cur, from_id, to_id)
skip.append(("charmrecipe", "owner"))
+ _mergeRockRecipe(cur, from_id, to_id)
+ skip.append(("rockrecipe", "owner"))
+
_mergeVulnerabilitySubscription(cur, from_id, to_id)
skip.append(("vulnerabilitysubscription", "person"))
diff --git a/lib/lp/registry/tests/test_personmerge.py b/lib/lp/registry/tests/test_personmerge.py
index fcc5245..dd6a8dd 100644
--- a/lib/lp/registry/tests/test_personmerge.py
+++ b/lib/lp/registry/tests/test_personmerge.py
@@ -41,6 +41,10 @@ from lp.registry.personmerge import (
merge_people,
)
from lp.registry.tests.test_person import KarmaTestMixin
+from lp.rocks.interfaces.rockrecipe import (
+ ROCK_RECIPE_ALLOW_CREATE,
+ IRockRecipeSet,
+)
from lp.services.config import config
from lp.services.database.sqlbase import cursor
from lp.services.features.testing import FeatureFixture
@@ -914,6 +918,60 @@ class TestMergePeople(TestCaseWithFactory, KarmaTestMixin):
),
)
+ def test_merge_moves_rock_recipes(self):
+ # When person/teams are merged, rock recipes owned by the from
+ # person are moved.
+ self.useFixture(FeatureFixture({ROCK_RECIPE_ALLOW_CREATE: "on"}))
+ duplicate = self.factory.makePerson()
+ mergee = self.factory.makePerson()
+ self.factory.makeRockRecipe(registrant=duplicate, owner=duplicate)
+ self._do_premerge(duplicate, mergee)
+ login_person(mergee)
+ duplicate, mergee = self._do_merge(duplicate, mergee)
+ self.assertEqual(
+ 1, getUtility(IRockRecipeSet).findByOwner(mergee).count()
+ )
+
+ def test_merge_with_duplicated_rock_recipes(self):
+ # If both the from and to people have rock recipes with the same
+ # name, merging renames the duplicate from the from person's side.
+ self.useFixture(FeatureFixture({ROCK_RECIPE_ALLOW_CREATE: "on"}))
+ duplicate = self.factory.makePerson()
+ mergee = self.factory.makePerson()
+ [ref] = self.factory.makeGitRefs()
+ [ref2] = self.factory.makeGitRefs()
+ self.factory.makeRockRecipe(
+ registrant=duplicate, owner=duplicate, name="foo", git_ref=ref
+ )
+ self.factory.makeRockRecipe(
+ registrant=mergee, owner=mergee, name="foo", git_ref=ref2
+ )
+ self._do_premerge(duplicate, mergee)
+ login_person(mergee)
+ duplicate, mergee = self._do_merge(duplicate, mergee)
+ recipes = sorted(
+ getUtility(IRockRecipeSet).findByOwner(mergee),
+ key=attrgetter("name"),
+ )
+ self.assertEqual(2, len(recipes))
+ self.assertThat(
+ recipes,
+ MatchesListwise(
+ [
+ MatchesStructure.byEquality(
+ git_repository=ref2.repository,
+ git_path=ref2.path,
+ name="foo",
+ ),
+ MatchesStructure.byEquality(
+ git_repository=ref.repository,
+ git_path=ref.path,
+ name="foo-1",
+ ),
+ ]
+ ),
+ )
+
class TestMergeMailingListSubscriptions(TestCaseWithFactory):
layer = DatabaseFunctionalLayer
diff --git a/lib/lp/rocks/adapters/__init__.py b/lib/lp/rocks/adapters/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/lib/lp/rocks/adapters/__init__.py
diff --git a/lib/lp/rocks/adapters/buildarch.py b/lib/lp/rocks/adapters/buildarch.py
new file mode 100644
index 0000000..d2a1569
--- /dev/null
+++ b/lib/lp/rocks/adapters/buildarch.py
@@ -0,0 +1,164 @@
+# Copyright 2024 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+__all__ = [
+ "determine_instances_to_build",
+]
+
+import json
+from collections import Counter, OrderedDict
+
+from lp.services.helpers import english_list
+
+
+class RockBasesParserError(Exception):
+ """Base class for all exceptions in this module."""
+
+
+class MissingPropertyError(RockBasesParserError):
+ """Error for when an expected property is not present in the YAML."""
+
+ def __init__(self, prop):
+ super().__init__(
+ f"Base specification is missing the {prop!r} property"
+ )
+ self.property = prop
+
+
+class BadPropertyError(RockBasesParserError):
+ """Error for when a YAML property is malformed in some way."""
+
+
+class DuplicateRunOnError(RockBasesParserError):
+ """Error for when multiple `run-on`s include the same architecture."""
+
+ def __init__(self, duplicates):
+ super().__init__(
+ "{} {} present in the 'run-on' of multiple items".format(
+ english_list([str(d) for d in duplicates]),
+ "is" if len(duplicates) == 1 else "are",
+ )
+ )
+
+
+class RockBase:
+ """A single base in rockcraft.yaml."""
+
+ def __init__(self, name, channel, architectures=None):
+ self.name = name
+ if not isinstance(channel, str):
+ raise BadPropertyError(
+ "Channel {!r} is not a string (missing quotes?)".format(
+ channel
+ )
+ )
+ self.channel = channel
+ self.architectures = architectures
+
+ @classmethod
+ def from_dict(cls, properties):
+ """Create a new base from a dict."""
+ try:
+ name = properties["name"]
+ except KeyError:
+ raise MissingPropertyError("name")
+ try:
+ channel = properties["channel"]
+ except KeyError:
+ raise MissingPropertyError("channel")
+ return cls(
+ name=name,
+ channel=channel,
+ architectures=properties.get("architectures"),
+ )
+
+ def __eq__(self, other):
+ return (
+ self.name == other.name
+ and self.channel == other.channel
+ and self.architectures == other.architectures
+ )
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ return hash((self.name, self.channel, tuple(self.architectures)))
+
+ def __str__(self):
+ return "{} {} {}".format(
+ self.name, self.channel, json.dumps(self.architectures)
+ )
+
+
+class RockBaseConfiguration:
+ """A base configuration entry in rockcraft.yaml."""
+
+ def __init__(self, build_on, run_on=None):
+ self.build_on = build_on
+ self.run_on = list(build_on) if run_on is None else run_on
+
+ @classmethod
+ def from_dict(cls, properties):
+ """Create a new base configuration from a dict."""
+ # Expand short-form configuration into long-form. Account for
+ # common typos in case the user intends to use long-form but did so
+ # incorrectly (for better error message handling).
+ if not any(
+ item in properties
+ for item in ("run-on", "run_on", "build-on", "build_on")
+ ):
+ base = RockBase.from_dict(properties)
+ return cls([base], run_on=[base])
+
+ try:
+ build_on = properties["build-on"]
+ except KeyError:
+ raise MissingPropertyError("build-on")
+ build_on = [RockBase.from_dict(item) for item in build_on]
+ run_on = properties.get("run-on")
+ if run_on is not None:
+ run_on = [RockBase.from_dict(item) for item in run_on]
+ return cls(build_on, run_on=run_on)
+
+
+def determine_instances_to_build(rockcraft_data, supported_arches):
+ """Return a list of instances to build based on rockcraft.yaml.
+
+ :param rockcraft_data: A parsed rockcraft.yaml.
+ :param supported_arches: An ordered list of all `DistroArchSeries` that
+ we can create builds for. Note that these may span multiple
+ `DistroSeries`.
+ :return: A list of `DistroArchSeries`.
+ """
+ bases_list = rockcraft_data.get("bases")
+ configs = [RockBaseConfiguration.from_dict(item) for item in bases_list]
+ # Ensure that multiple `run-on` items don't overlap; this is ambiguous
+ # and forbidden by rockcraft.
+ run_ons = Counter()
+ for config in configs:
+ run_ons.update(config.run_on)
+ duplicates = {config for config, count in run_ons.items() if count > 1}
+ if duplicates:
+ raise DuplicateRunOnError(duplicates)
+
+ instances = OrderedDict()
+ for config in configs:
+ # Rocks are allowed to declare that they build on architectures
+ # that Launchpad doesn't currently support (perhaps they're
+ # upcoming, or perhaps they used to be supported). We just ignore
+ # those.
+ for build_on in config.build_on:
+ for das in supported_arches:
+ if (
+ das.distroseries.distribution.name == build_on.name
+ and build_on.channel
+ in (das.distroseries.name, das.distroseries.version)
+ and das.architecturetag in build_on.architectures
+ ):
+ instances[das] = None
+ break
+ else:
+ continue
+ break
+ return list(instances)
diff --git a/lib/lp/rocks/adapters/tests/__init__.py b/lib/lp/rocks/adapters/tests/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/lib/lp/rocks/adapters/tests/__init__.py
diff --git a/lib/lp/rocks/adapters/tests/test_buildarch.py b/lib/lp/rocks/adapters/tests/test_buildarch.py
new file mode 100644
index 0000000..0868f9c
--- /dev/null
+++ b/lib/lp/rocks/adapters/tests/test_buildarch.py
@@ -0,0 +1,413 @@
+# Copyright 2024 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+from functools import partial
+
+from testscenarios import WithScenarios, load_tests_apply_scenarios
+from testtools.matchers import (
+ Equals,
+ MatchesException,
+ MatchesListwise,
+ MatchesStructure,
+ Raises,
+)
+from zope.component import getUtility
+
+from lp.app.interfaces.launchpad import ILaunchpadCelebrities
+from lp.buildmaster.interfaces.processor import (
+ IProcessorSet,
+ ProcessorNotFound,
+)
+from lp.rocks.adapters.buildarch import (
+ DuplicateRunOnError,
+ RockBase,
+ RockBaseConfiguration,
+ determine_instances_to_build,
+)
+from lp.testing import TestCase, TestCaseWithFactory
+from lp.testing.layers import LaunchpadZopelessLayer
+
+
+class TestRockBaseConfiguration(WithScenarios, TestCase):
+
+ scenarios = [
+ (
+ "expanded",
+ {
+ "base": {
+ "build-on": [
+ {
+ "name": "ubuntu",
+ "channel": "18.04",
+ "architectures": ["amd64"],
+ }
+ ],
+ "run-on": [
+ {
+ "name": "ubuntu",
+ "channel": "20.04",
+ "architectures": ["amd64", "arm64"],
+ },
+ {
+ "name": "ubuntu",
+ "channel": "18.04",
+ "architectures": ["amd64"],
+ },
+ ],
+ },
+ "expected_build_on": [
+ RockBase(
+ name="ubuntu", channel="18.04", architectures=["amd64"]
+ ),
+ ],
+ "expected_run_on": [
+ RockBase(
+ name="ubuntu",
+ channel="20.04",
+ architectures=["amd64", "arm64"],
+ ),
+ RockBase(
+ name="ubuntu", channel="18.04", architectures=["amd64"]
+ ),
+ ],
+ },
+ ),
+ (
+ "short form",
+ {
+ "base": {
+ "name": "ubuntu",
+ "channel": "20.04",
+ },
+ "expected_build_on": [
+ RockBase(name="ubuntu", channel="20.04")
+ ],
+ "expected_run_on": [RockBase(name="ubuntu", channel="20.04")],
+ },
+ ),
+ (
+ "no run-on",
+ {
+ "base": {
+ "build-on": [
+ {
+ "name": "ubuntu",
+ "channel": "20.04",
+ "architectures": ["amd64"],
+ }
+ ],
+ },
+ "expected_build_on": [
+ RockBase(
+ name="ubuntu", channel="20.04", architectures=["amd64"]
+ ),
+ ],
+ "expected_run_on": [
+ RockBase(
+ name="ubuntu", channel="20.04", architectures=["amd64"]
+ ),
+ ],
+ },
+ ),
+ ]
+
+ def test_base(self):
+ config = RockBaseConfiguration.from_dict(self.base)
+ self.assertEqual(self.expected_build_on, config.build_on)
+ self.assertEqual(self.expected_run_on, config.run_on)
+
+
+class TestDetermineInstancesToBuild(WithScenarios, TestCaseWithFactory):
+
+ layer = LaunchpadZopelessLayer
+
+ scenarios = [
+ (
+ "single entry, single arch",
+ {
+ "bases": [
+ {
+ "build-on": [
+ {
+ "name": "ubuntu",
+ "channel": "18.04",
+ "architectures": ["amd64"],
+ }
+ ],
+ "run-on": [
+ {
+ "name": "ubuntu",
+ "channel": "18.04",
+ "architectures": ["amd64"],
+ }
+ ],
+ }
+ ],
+ "expected": [("18.04", "amd64")],
+ },
+ ),
+ (
+ "multiple entries, single arch",
+ {
+ "bases": [
+ {
+ "build-on": [
+ {
+ "name": "ubuntu",
+ "channel": "18.04",
+ "architectures": ["amd64"],
+ }
+ ],
+ "run-on": [
+ {
+ "name": "ubuntu",
+ "channel": "18.04",
+ "architectures": ["amd64"],
+ }
+ ],
+ },
+ {
+ "build-on": [
+ {
+ "name": "ubuntu",
+ "channel": "20.04",
+ "architectures": ["amd64"],
+ }
+ ],
+ "run-on": [
+ {
+ "name": "ubuntu",
+ "channel": "20.04",
+ "architectures": ["amd64"],
+ }
+ ],
+ },
+ {
+ "build-on": [
+ {
+ "name": "ubuntu",
+ "channel": "20.04",
+ "architectures": ["riscv64"],
+ }
+ ],
+ "run-on": [
+ {
+ "name": "ubuntu",
+ "channel": "20.04",
+ "architectures": ["riscv64"],
+ }
+ ],
+ },
+ ],
+ "expected": [
+ ("18.04", "amd64"),
+ ("20.04", "amd64"),
+ ("20.04", "riscv64"),
+ ],
+ },
+ ),
+ (
+ "single entry, multiple arches",
+ {
+ "bases": [
+ {
+ "build-on": [
+ {
+ "name": "ubuntu",
+ "channel": "20.04",
+ "architectures": ["amd64"],
+ }
+ ],
+ "run-on": [
+ {
+ "name": "ubuntu",
+ "channel": "20.04",
+ "architectures": ["amd64", "riscv64"],
+ }
+ ],
+ }
+ ],
+ "expected": [("20.04", "amd64")],
+ },
+ ),
+ (
+ "multiple entries, with cross-arch",
+ {
+ "bases": [
+ {
+ "build-on": [
+ {
+ "name": "ubuntu",
+ "channel": "20.04",
+ "architectures": ["amd64"],
+ }
+ ],
+ "run-on": [
+ {
+ "name": "ubuntu",
+ "channel": "20.04",
+ "architectures": ["riscv64"],
+ }
+ ],
+ },
+ {
+ "build-on": [
+ {
+ "name": "ubuntu",
+ "channel": "20.04",
+ "architectures": ["amd64"],
+ }
+ ],
+ "run-on": [
+ {
+ "name": "ubuntu",
+ "channel": "20.04",
+ "architectures": ["amd64"],
+ }
+ ],
+ },
+ ],
+ "expected": [("20.04", "amd64")],
+ },
+ ),
+ (
+ "multiple run-on entries",
+ {
+ "bases": [
+ {
+ "build-on": [
+ {
+ "name": "ubuntu",
+ "channel": "20.04",
+ "architectures": ["amd64"],
+ }
+ ],
+ "run-on": [
+ {
+ "name": "ubuntu",
+ "channel": "18.04",
+ "architectures": ["amd64"],
+ },
+ {
+ "name": "ubuntu",
+ "channel": "20.04",
+ "architectures": ["amd64", "riscv64"],
+ },
+ ],
+ }
+ ],
+ "expected": [("20.04", "amd64")],
+ },
+ ),
+ (
+ "redundant outputs",
+ {
+ "bases": [
+ {
+ "build-on": [
+ {
+ "name": "ubuntu",
+ "channel": "18.04",
+ "architectures": ["amd64"],
+ }
+ ],
+ "run-on": [
+ {
+ "name": "ubuntu",
+ "channel": "20.04",
+ "architectures": ["amd64"],
+ }
+ ],
+ },
+ {
+ "build-on": [
+ {
+ "name": "ubuntu",
+ "channel": "20.04",
+ "architectures": ["amd64"],
+ }
+ ],
+ "run-on": [
+ {
+ "name": "ubuntu",
+ "channel": "20.04",
+ "architectures": ["amd64"],
+ }
+ ],
+ },
+ ],
+ "expected_exception": MatchesException(
+ DuplicateRunOnError,
+ r"ubuntu 20\.04 \[\"amd64\"\] is present in the 'run-on' "
+ r"of multiple items",
+ ),
+ },
+ ),
+ (
+ "no bases specified",
+ {
+ "bases": None,
+ "expected": [
+ ("20.04", "amd64"),
+ ("20.04", "arm64"),
+ ("20.04", "riscv64"),
+ ],
+ },
+ ),
+ ]
+
+ def test_parser(self):
+ distro_serieses = [
+ self.factory.makeDistroSeries(
+ distribution=getUtility(ILaunchpadCelebrities).ubuntu,
+ version=version,
+ )
+ for version in ("20.04", "18.04")
+ ]
+ dases = []
+ for arch_tag in ("amd64", "arm64", "riscv64"):
+ try:
+ processor = getUtility(IProcessorSet).getByName(arch_tag)
+ except ProcessorNotFound:
+ processor = self.factory.makeProcessor(
+ name=arch_tag, supports_virtualized=True
+ )
+ for distro_series in distro_serieses:
+ dases.append(
+ self.factory.makeDistroArchSeries(
+ distroseries=distro_series,
+ architecturetag=arch_tag,
+ processor=processor,
+ )
+ )
+ rockcraft_data = {}
+ if self.bases is not None:
+ rockcraft_data["bases"] = self.bases
+ build_instances_factory = partial(
+ determine_instances_to_build,
+ rockcraft_data,
+ dases,
+ distro_serieses[0],
+ )
+ if hasattr(self, "expected_exception"):
+ self.assertThat(
+ build_instances_factory, Raises(self.expected_exception)
+ )
+ else:
+ self.assertThat(
+ build_instances_factory(),
+ MatchesListwise(
+ [
+ MatchesStructure(
+ distroseries=MatchesStructure.byEquality(
+ version=version
+ ),
+ architecturetag=Equals(arch_tag),
+ )
+ for version, arch_tag in self.expected
+ ]
+ ),
+ )
+
+
+load_tests = load_tests_apply_scenarios
diff --git a/lib/lp/rocks/browser/configure.zcml b/lib/lp/rocks/browser/configure.zcml
index 20a2c89..e63b839 100644
--- a/lib/lp/rocks/browser/configure.zcml
+++ b/lib/lp/rocks/browser/configure.zcml
@@ -12,5 +12,19 @@
<lp:url
for="lp.rocks.interfaces.rockrecipe.IRockRecipe"
urldata="lp.rocks.browser.rockrecipe.RockRecipeURL" />
+<<<<<<< lib/lp/rocks/browser/configure.zcml
+=======
+ <lp:navigation
+ module="lp.rocks.browser.rockrecipe"
+ classes="RockRecipeNavigation" />
+ <lp:url
+ for="lp.rocks.interfaces.rockrecipe.IRockRecipeBuildRequest"
+ path_expression="string:+build-request/${id}"
+ attribute_to_parent="recipe" />
+ <lp:url
+ for="lp.rocks.interfaces.rockrecipebuild.IRockRecipeBuild"
+ path_expression="string:+build/${id}"
+ attribute_to_parent="recipe" />
+>>>>>>> lib/lp/rocks/browser/configure.zcml
</lp:facet>
</configure>
diff --git a/lib/lp/rocks/browser/rockrecipe.py b/lib/lp/rocks/browser/rockrecipe.py
index 67164d6..6a665fe 100644
--- a/lib/lp/rocks/browser/rockrecipe.py
+++ b/lib/lp/rocks/browser/rockrecipe.py
@@ -4,6 +4,10 @@
"""Rock recipe views."""
__all__ = [
+<<<<<<< lib/lp/rocks/browser/rockrecipe.py
+=======
+ "RockRecipeNavigation",
+>>>>>>> lib/lp/rocks/browser/rockrecipe.py
"RockRecipeURL",
]
@@ -11,7 +15,15 @@ from zope.component import getUtility
from zope.interface import implementer
from lp.registry.interfaces.personproduct import IPersonProductFactory
+<<<<<<< lib/lp/rocks/browser/rockrecipe.py
from lp.services.webapp.interfaces import ICanonicalUrlData
+=======
+from lp.rocks.interfaces.rockrecipe import IRockRecipe
+from lp.rocks.interfaces.rockrecipebuild import IRockRecipeBuildSet
+from lp.services.webapp import Navigation, stepthrough
+from lp.services.webapp.interfaces import ICanonicalUrlData
+from lp.soyuz.browser.build import get_build_by_id_str
+>>>>>>> lib/lp/rocks/browser/rockrecipe.py
@implementer(ICanonicalUrlData)
@@ -32,3 +44,25 @@ class RockRecipeURL:
@property
def path(self):
return "+rock/%s" % self.recipe.name
+<<<<<<< lib/lp/rocks/browser/rockrecipe.py
+=======
+
+
+class RockRecipeNavigation(Navigation):
+ usedfor = IRockRecipe
+
+ @stepthrough("+build-request")
+ def traverse_build_request(self, name):
+ try:
+ job_id = int(name)
+ except ValueError:
+ return None
+ return self.context.getBuildRequest(job_id)
+
+ @stepthrough("+build")
+ def traverse_build(self, name):
+ build = get_build_by_id_str(IRockRecipeBuildSet, name)
+ if build is None or build.recipe != self.context:
+ return None
+ return build
+>>>>>>> lib/lp/rocks/browser/rockrecipe.py
diff --git a/lib/lp/rocks/configure.zcml b/lib/lp/rocks/configure.zcml
index aa2cdca..a99688e 100644
--- a/lib/lp/rocks/configure.zcml
+++ b/lib/lp/rocks/configure.zcml
@@ -40,4 +40,66 @@
<allow interface="lp.rocks.interfaces.rockrecipe.IRockRecipeSet" />
</lp:securedutility>
+<<<<<<< lib/lp/rocks/configure.zcml
+=======
+ <!-- RockRecipeBuildRequest -->
+ <class class="lp.rocks.model.rockrecipe.RockRecipeBuildRequest">
+ <require
+ permission="launchpad.View"
+ interface="lp.rocks.interfaces.rockrecipe.IRockRecipeBuildRequest" />
+ </class>
+
+ <!-- RockRecipeBuild -->
+ <class class="lp.rocks.model.rockrecipebuild.RockRecipeBuild">
+ <require
+ permission="launchpad.View"
+ interface="lp.rocks.interfaces.rockrecipebuild.IRockRecipeBuildView" />
+ <require
+ permission="launchpad.Edit"
+ interface="lp.rocks.interfaces.rockrecipebuild.IRockRecipeBuildEdit" />
+ <require
+ permission="launchpad.Admin"
+ interface="lp.rocks.interfaces.rockrecipebuild.IRockRecipeBuildAdmin" />
+ </class>
+
+ <!-- RockRecipeBuildSet -->
+ <lp:securedutility
+ class="lp.rocks.model.rockrecipebuild.RockRecipeBuildSet"
+ provides="lp.rocks.interfaces.rockrecipebuild.IRockRecipeBuildSet">
+ <allow interface="lp.rocks.interfaces.rockrecipebuild.IRockRecipeBuildSet" />
+ </lp:securedutility>
+ <lp:securedutility
+ class="lp.rocks.model.rockrecipebuild.RockRecipeBuildSet"
+ provides="lp.buildmaster.interfaces.buildfarmjob.ISpecificBuildFarmJobSource"
+ name="ROCKRECIPEBUILD">
+ <allow interface="lp.buildmaster.interfaces.buildfarmjob.ISpecificBuildFarmJobSource" />
+ </lp:securedutility>
+
+ <!-- RockFile -->
+ <class class="lp.rocks.model.rockrecipebuild.RockFile">
+ <allow interface="lp.rocks.interfaces.rockrecipebuild.IRockFile" />
+ </class>
+
+ <!-- RockRecipeBuildBehaviour -->
+ <adapter
+ for="lp.rocks.interfaces.rockrecipebuild.IRockRecipeBuild"
+ provides="lp.buildmaster.interfaces.buildfarmjobbehaviour.IBuildFarmJobBehaviour"
+ factory="lp.rocks.model.rockrecipebuildbehaviour.RockRecipeBuildBehaviour"
+ permission="zope.Public" />
+
+ <!-- rock-related jobs -->
+ <class class="lp.rocks.model.rockrecipejob.RockRecipeJob">
+ <allow interface="lp.rocks.interfaces.rockrecipejob.IRockRecipeJob" />
+ </class>
+ <lp:securedutility
+ component="lp.rocks.model.rockrecipejob.RockRecipeRequestBuildsJob"
+ provides="lp.rocks.interfaces.rockrecipejob.IRockRecipeRequestBuildsJobSource">
+ <allow interface="lp.rocks.interfaces.rockrecipejob.IRockRecipeRequestBuildsJobSource" />
+ </lp:securedutility>
+ <class class="lp.rocks.model.rockrecipejob.RockRecipeRequestBuildsJob">
+ <allow interface="lp.rocks.interfaces.rockrecipejob.IRockRecipeJob" />
+ <allow interface="lp.rocks.interfaces.rockrecipejob.IRockRecipeRequestBuildsJob" />
+ </class>
+
+>>>>>>> lib/lp/rocks/configure.zcml
</configure>
diff --git a/lib/lp/rocks/emailtemplates/rockrecipebuild-notification.txt b/lib/lp/rocks/emailtemplates/rockrecipebuild-notification.txt
new file mode 100644
index 0000000..af69ceb
--- /dev/null
+++ b/lib/lp/rocks/emailtemplates/rockrecipebuild-notification.txt
@@ -0,0 +1,9 @@
+ * Rock Recipe: %(recipe_name)s
+ * Project: %(project_name)s
+ * Distroseries: %(distroseries)s
+ * Architecture: %(architecturetag)s
+ * State: %(build_state)s
+ * Duration: %(build_duration)s
+ * Build Log: %(log_url)s
+ * Upload Log: %(upload_log_url)s
+ * Builder: %(builder_url)s
diff --git a/lib/lp/rocks/interfaces/rockrecipe.py b/lib/lp/rocks/interfaces/rockrecipe.py
index a00c1f7..07db72a 100644
--- a/lib/lp/rocks/interfaces/rockrecipe.py
+++ b/lib/lp/rocks/interfaces/rockrecipe.py
@@ -6,25 +6,59 @@
__all__ = [
"BadRockRecipeSource",
"BadRockRecipeSearchContext",
+<<<<<<< lib/lp/rocks/interfaces/rockrecipe.py
"ROCK_RECIPE_ALLOW_CREATE",
"ROCK_RECIPE_PRIVATE_FEATURE_FLAG",
+=======
+ "CannotFetchRockcraftYaml",
+ "CannotParseRockcraftYaml",
+ "ROCK_RECIPE_ALLOW_CREATE",
+ "ROCK_RECIPE_PRIVATE_FEATURE_FLAG",
+ "RockRecipeBuildAlreadyPending",
+ "RockRecipeBuildDisallowedArchitecture",
+ "RockRecipeBuildRequestStatus",
+>>>>>>> lib/lp/rocks/interfaces/rockrecipe.py
"RockRecipeFeatureDisabled",
"RockRecipeNotOwner",
"RockRecipePrivacyMismatch",
"RockRecipePrivateFeatureDisabled",
"DuplicateRockRecipeName",
"IRockRecipe",
+<<<<<<< lib/lp/rocks/interfaces/rockrecipe.py
"IRockRecipeSet",
+=======
+ "IRockRecipeBuildRequest",
+ "IRockRecipeSet",
+ "MissingRockcraftYaml",
+>>>>>>> lib/lp/rocks/interfaces/rockrecipe.py
"NoSourceForRockRecipe",
"NoSuchRockRecipe",
]
import http.client
+<<<<<<< lib/lp/rocks/interfaces/rockrecipe.py
from lazr.restful.declarations import error_status, exported
from lazr.restful.fields import Reference, ReferenceChoice
from zope.interface import Interface
from zope.schema import Bool, Choice, Datetime, Dict, Int, List, Text, TextLine
+=======
+from lazr.enum import EnumeratedType, Item
+from lazr.restful.declarations import error_status, exported
+from lazr.restful.fields import CollectionField, Reference, ReferenceChoice
+from zope.interface import Interface
+from zope.schema import (
+ Bool,
+ Choice,
+ Datetime,
+ Dict,
+ Int,
+ List,
+ Set,
+ Text,
+ TextLine,
+)
+>>>>>>> lib/lp/rocks/interfaces/rockrecipe.py
from zope.security.interfaces import Unauthorized
from lp import _
@@ -35,6 +69,10 @@ from lp.app.validators.name import name_validator
from lp.app.validators.path import path_does_not_escape
from lp.code.interfaces.gitref import IGitRef
from lp.code.interfaces.gitrepository import IGitRepository
+<<<<<<< lib/lp/rocks/interfaces/rockrecipe.py
+=======
+from lp.registry.interfaces.person import IPerson
+>>>>>>> lib/lp/rocks/interfaces/rockrecipe.py
from lp.registry.interfaces.product import IProduct
from lp.services.fields import PersonChoice, PublicPersonChoice
from lp.snappy.validators.channels import channels_validator
@@ -113,6 +151,139 @@ class BadRockRecipeSearchContext(Exception):
"""The context is not valid for a rock recipe search."""
+<<<<<<< lib/lp/rocks/interfaces/rockrecipe.py
+=======
+class MissingRockcraftYaml(Exception):
+ """The repository for this rock recipe does not have a rockcraft.yaml."""
+
+ def __init__(self, branch_name):
+ super().__init__("Cannot find rockcraft.yaml in %s" % branch_name)
+
+
+class CannotFetchRockcraftYaml(Exception):
+ """Launchpad cannot fetch this rock recipe's rockcraft.yaml."""
+
+
+class CannotParseRockcraftYaml(Exception):
+ """Launchpad cannot parse this rock recipe's rockcraft.yaml."""
+
+
+@error_status(http.client.BAD_REQUEST)
+class RockRecipeBuildAlreadyPending(Exception):
+ """A build was requested when an identical build was already pending."""
+
+ def __init__(self):
+ super().__init__(
+ "An identical build of this rock recipe is already pending."
+ )
+
+
+@error_status(http.client.BAD_REQUEST)
+class RockRecipeBuildDisallowedArchitecture(Exception):
+ """A build was requested for a disallowed architecture."""
+
+ def __init__(self, das):
+ super().__init__(
+ "This rock recipe is not allowed to build for %s/%s."
+ % (das.distroseries.name, das.architecturetag)
+ )
+
+
+class RockRecipeBuildRequestStatus(EnumeratedType):
+ """The status of a request to build a rock recipe."""
+
+ PENDING = Item(
+ """
+ Pending
+
+ This rock recipe build request is pending.
+ """
+ )
+
+ FAILED = Item(
+ """
+ Failed
+
+ This rock recipe build request failed.
+ """
+ )
+
+ COMPLETED = Item(
+ """
+ Completed
+
+ This rock recipe build request completed successfully.
+ """
+ )
+
+
+class IRockRecipeBuildRequest(Interface):
+ """A request to build a rock recipe."""
+
+ id = Int(title=_("ID"), required=True, readonly=True)
+
+ date_requested = Datetime(
+ title=_("The time when this request was made"),
+ required=True,
+ readonly=True,
+ )
+
+ date_finished = Datetime(
+ title=_("The time when this request finished"),
+ required=False,
+ readonly=True,
+ )
+
+ recipe = Reference(
+ # Really IRockRecipe.
+ Interface,
+ title=_("Rock recipe"),
+ required=True,
+ readonly=True,
+ )
+
+ status = Choice(
+ title=_("Status"),
+ vocabulary=RockRecipeBuildRequestStatus,
+ required=True,
+ readonly=True,
+ )
+
+ error_message = TextLine(
+ title=_("Error message"), required=True, readonly=True
+ )
+
+ builds = CollectionField(
+ title=_("Builds produced by this request"),
+ # Really IRockRecipeBuild.
+ value_type=Reference(schema=Interface),
+ required=True,
+ readonly=True,
+ )
+
+ requester = Reference(
+ title=_("The person requesting the builds."),
+ schema=IPerson,
+ required=True,
+ readonly=True,
+ )
+
+ channels = Dict(
+ title=_("Source snap channels for builds produced by this request"),
+ key_type=TextLine(),
+ required=False,
+ readonly=True,
+ )
+
+ architectures = Set(
+ title=_("If set, this request is limited to these architecture tags"),
+ value_type=TextLine(),
+ required=False,
+ readonly=True,
+ )
+
+
+>>>>>>> lib/lp/rocks/interfaces/rockrecipe.py
class IRockRecipeView(Interface):
"""`IRockRecipe` attributes that require launchpad.View permission."""
@@ -149,6 +320,71 @@ class IRockRecipeView(Interface):
def visibleByUser(user):
"""Can the specified user see this rock recipe?"""
+<<<<<<< lib/lp/rocks/interfaces/rockrecipe.py
+=======
+ def requestBuild(build_request, distro_arch_series, channels=None):
+ """Request a single build of this rock recipe.
+
+ This method is for internal use; external callers should use
+ `requestBuilds` instead.
+
+ :param build_request: The `IRockRecipeBuildRequest` job being
+ processed.
+ :param distro_arch_series: The architecture to build for.
+ :param channels: A dictionary mapping snap names to channels to use
+ for this build.
+ :return: `IRockRecipeBuild`.
+ """
+
+ def requestBuilds(requester, channels=None, architectures=None):
+ """Request that the rock recipe be built.
+
+ This is an asynchronous operation; once the operation has finished,
+ the resulting build request's C{status} will be "Completed" and its
+ C{builds} collection will return the resulting builds.
+
+ :param requester: The person requesting the builds.
+ :param channels: A dictionary mapping snap names to channels to use
+ for these builds.
+ :param architectures: If not None, limit builds to architectures
+ with these architecture tags (in addition to any other
+ applicable constraints).
+ :return: An `IRockRecipeBuildRequest`.
+ """
+
+ def requestBuildsFromJob(
+ build_request,
+ channels=None,
+ architectures=None,
+ allow_failures=False,
+ logger=None,
+ ):
+ """Synchronous part of `RockRecipe.requestBuilds`.
+
+ Request that the rock recipe be built for relevant architectures.
+
+ :param build_request: The `IRockRecipeBuildRequest` job being
+ processed.
+ :param channels: A dictionary mapping snap names to channels to use
+ for these builds.
+ :param architectures: If not None, limit builds to architectures
+ with these architecture tags (in addition to any other
+ applicable constraints).
+ :param allow_failures: If True, log exceptions other than "already
+ pending" from individual build requests; if False, raise them to
+ the caller.
+ :param logger: An optional logger.
+ :return: A sequence of `IRockRecipeBuild` instances.
+ """
+
+ def getBuildRequest(job_id):
+ """Get an asynchronous build request by ID.
+
+ :param job_id: The ID of the build request.
+ :return: `IRockRecipeBuildRequest`.
+ """
+
+>>>>>>> lib/lp/rocks/interfaces/rockrecipe.py
class IRockRecipeEdit(Interface):
"""`IRockRecipe` methods that require launchpad.Edit permission."""
@@ -366,9 +602,38 @@ class IRockRecipeSet(Interface):
def getByName(owner, project, name):
"""Returns the appropriate `IRockRecipe` for the given objects."""
+<<<<<<< lib/lp/rocks/interfaces/rockrecipe.py
+ def isValidInformationType(information_type, owner, git_ref=None):
+ """Whether the information type context is valid."""
+
+=======
+ def exists(owner, project, name):
+ """Check to see if a matching rock recipe exists."""
+
def isValidInformationType(information_type, owner, git_ref=None):
"""Whether the information type context is valid."""
+ def preloadDataForRecipes(recipes, user):
+ """Load the data related to a list of rock recipes."""
+
+ def getRockcraftYaml(context, logger=None):
+ """Fetch a recipe's rockcraft.yaml from code hosting, if possible.
+
+ :param context: Either an `IRockRecipe` or the source branch for a
+ rock recipe.
+ :param logger: An optional logger.
+
+ :return: The recipe's parsed rockcraft.yaml.
+ :raises MissingRockcraftYaml: if this recipe has no
+ rockcraft.yaml.
+ :raises CannotFetchRockcraftYaml: if it was not possible to fetch
+ rockcraft.yaml from the code hosting backend for some other
+ reason.
+ :raises CannotParseRockcraftYaml: if the fetched rockcraft.yaml
+ cannot be parsed.
+ """
+
+>>>>>>> lib/lp/rocks/interfaces/rockrecipe.py
def findByGitRepository(repository, paths=None):
"""Return all rock recipes for the given Git repository.
@@ -377,6 +642,12 @@ class IRockRecipeSet(Interface):
these Git reference paths.
"""
+<<<<<<< lib/lp/rocks/interfaces/rockrecipe.py
+=======
+ def findByOwner(owner):
+ """Return all rock recipes with the given `owner`."""
+
+>>>>>>> lib/lp/rocks/interfaces/rockrecipe.py
def detachFromGitRepository(repository):
"""Detach all rock recipes from the given Git repository.
diff --git a/lib/lp/rocks/interfaces/rockrecipebuild.py b/lib/lp/rocks/interfaces/rockrecipebuild.py
new file mode 100644
index 0000000..f2f5eb7
--- /dev/null
+++ b/lib/lp/rocks/interfaces/rockrecipebuild.py
@@ -0,0 +1,196 @@
+# Copyright 2024 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+"""Rock recipe build interfaces."""
+
+__all__ = [
+ "IRockFile",
+ "IRockRecipeBuild",
+ "IRockRecipeBuildSet",
+]
+
+from lazr.restful.fields import Reference
+from zope.interface import Attribute, Interface
+from zope.schema import Bool, Datetime, Dict, Int, TextLine
+
+from lp import _
+from lp.buildmaster.interfaces.buildfarmjob import (
+ IBuildFarmJobEdit,
+ ISpecificBuildFarmJobSource,
+)
+from lp.buildmaster.interfaces.packagebuild import (
+ IPackageBuild,
+ IPackageBuildView,
+)
+from lp.registry.interfaces.person import IPerson
+from lp.rocks.interfaces.rockrecipe import IRockRecipe, IRockRecipeBuildRequest
+from lp.services.database.constants import DEFAULT
+from lp.services.librarian.interfaces import ILibraryFileAlias
+from lp.soyuz.interfaces.distroarchseries import IDistroArchSeries
+
+
+class IRockRecipeBuildView(IPackageBuildView):
+ """`IRockRecipeBuild` attributes that require launchpad.View."""
+
+ build_request = Reference(
+ IRockRecipeBuildRequest,
+ title=_("The build request that caused this build to be created."),
+ required=True,
+ readonly=True,
+ )
+
+ requester = Reference(
+ IPerson,
+ title=_("The person who requested this build."),
+ required=True,
+ readonly=True,
+ )
+
+ recipe = Reference(
+ IRockRecipe,
+ title=_("The rock recipe to build."),
+ required=True,
+ readonly=True,
+ )
+
+ distro_arch_series = Reference(
+ IDistroArchSeries,
+ title=_("The series and architecture for which to build."),
+ required=True,
+ readonly=True,
+ )
+
+ channels = Dict(
+ title=_("Source snap channels to use for this build."),
+ description=_(
+ "A dictionary mapping snap names to channels to use for this "
+ "build. Currently only 'core', 'core18', 'core20', "
+ "and 'rockcraft' keys are supported."
+ ),
+ key_type=TextLine(),
+ )
+
+ virtualized = Bool(
+ title=_("If True, this build is virtualized."), readonly=True
+ )
+
+ score = Int(
+ title=_("Score of the related build farm job (if any)."),
+ required=False,
+ readonly=True,
+ )
+
+ eta = Datetime(
+ title=_("The datetime when the build job is estimated to complete."),
+ readonly=True,
+ )
+
+ estimate = Bool(
+ title=_("If true, the date value is an estimate."), readonly=True
+ )
+
+ date = Datetime(
+ title=_(
+ "The date when the build completed or is estimated to complete."
+ ),
+ readonly=True,
+ )
+
+ revision_id = TextLine(
+ title=_("Revision ID"),
+ required=False,
+ readonly=True,
+ description=_(
+ "The revision ID of the branch used for this build, if "
+ "available."
+ ),
+ )
+
+ store_upload_metadata = Attribute(
+ _("A dict of data about store upload progress.")
+ )
+
+ def getFiles():
+ """Retrieve the build's `IRockFile` records.
+
+ :return: A result set of (`IRockFile`, `ILibraryFileAlias`,
+ `ILibraryFileContent`).
+ """
+
+ def getFileByName(filename):
+ """Return the corresponding `ILibraryFileAlias` in this context.
+
+ The following file types (and extension) can be looked up:
+
+ * Build log: '.txt.gz'
+ * Upload log: '_log.txt'
+
+ Any filename not matching one of these extensions is looked up as a
+ rock recipe output file.
+
+ :param filename: The filename to look up.
+ :raises NotFoundError: if no file exists with the given name.
+ :return: The corresponding `ILibraryFileAlias`.
+ """
+
+
+class IRockRecipeBuildEdit(IBuildFarmJobEdit):
+ """`IRockRecipeBuild` methods that require launchpad.Edit."""
+
+ def addFile(lfa):
+ """Add a file to this build.
+
+ :param lfa: An `ILibraryFileAlias`.
+ :return: An `IRockFile`.
+ """
+
+
+class IRockRecipeBuildAdmin(Interface):
+ """`IRockRecipeBuild` methods that require launchpad.Admin."""
+
+ def rescore(score):
+ """Change the build's score."""
+
+
+class IRockRecipeBuild(
+ IRockRecipeBuildView,
+ IRockRecipeBuildEdit,
+ IRockRecipeBuildAdmin,
+ IPackageBuild,
+):
+ """A build record for a rock recipe."""
+
+
+class IRockRecipeBuildSet(ISpecificBuildFarmJobSource):
+ """Utility to create and access `IRockRecipeBuild`s."""
+
+ def new(
+ build_request,
+ recipe,
+ distro_arch_series,
+ channels=None,
+ store_upload_metadata=None,
+ date_created=DEFAULT,
+ ):
+ """Create an `IRockRecipeBuild`."""
+
+ def preloadBuildsData(builds):
+ """Load the data related to a list of rock recipe builds."""
+
+
+class IRockFile(Interface):
+ """A file produced by a rock recipe build."""
+
+ build = Reference(
+ IRockRecipeBuild,
+ title=_("The rock recipe build producing this file."),
+ required=True,
+ readonly=True,
+ )
+
+ library_file = Reference(
+ ILibraryFileAlias,
+ title=_("The library file alias for this file."),
+ required=True,
+ readonly=True,
+ )
diff --git a/lib/lp/rocks/interfaces/rockrecipejob.py b/lib/lp/rocks/interfaces/rockrecipejob.py
new file mode 100644
index 0000000..f3f79a8
--- /dev/null
+++ b/lib/lp/rocks/interfaces/rockrecipejob.py
@@ -0,0 +1,135 @@
+# Copyright 2024 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+"""Rock recipe job interfaces."""
+
+__all__ = [
+ "IRockRecipeJob",
+ "IRockRecipeRequestBuildsJob",
+ "IRockRecipeRequestBuildsJobSource",
+]
+
+from lazr.restful.fields import Reference
+from zope.interface import Attribute, Interface
+from zope.schema import Datetime, Dict, List, Set, TextLine
+
+from lp import _
+from lp.registry.interfaces.person import IPerson
+from lp.rocks.interfaces.rockrecipe import IRockRecipe, IRockRecipeBuildRequest
+from lp.rocks.interfaces.rockrecipebuild import IRockRecipeBuild
+from lp.services.job.interfaces.job import IJob, IJobSource, IRunnableJob
+
+
+class IRockRecipeJob(Interface):
+ """A job related to a rock recipe."""
+
+ job = Reference(
+ title=_("The common Job attributes."),
+ schema=IJob,
+ required=True,
+ readonly=True,
+ )
+
+ recipe = Reference(
+ title=_("The rock recipe to use for this job."),
+ schema=IRockRecipe,
+ required=True,
+ readonly=True,
+ )
+
+ metadata = Attribute(_("A dict of data about the job."))
+
+
+class IRockRecipeRequestBuildsJob(IRunnableJob):
+ """A Job that processes a request for builds of a rock recipe."""
+
+ requester = Reference(
+ title=_("The person requesting the builds."),
+ schema=IPerson,
+ required=True,
+ readonly=True,
+ )
+
+ channels = Dict(
+ title=_("Source snap channels to use for these builds."),
+ description=_(
+ "A dictionary mapping snap names to channels to use for these "
+ "builds. Currently only 'core', 'core18', 'core20', and "
+ "'rockcraft' keys are supported."
+ ),
+ key_type=TextLine(),
+ required=False,
+ readonly=True,
+ )
+
+ architectures = Set(
+ title=_("If set, limit builds to these architecture tags."),
+ value_type=TextLine(),
+ required=False,
+ readonly=True,
+ )
+
+ date_created = Datetime(
+ title=_("Time when this job was created."),
+ required=True,
+ readonly=True,
+ )
+
+ date_finished = Datetime(
+ title=_("Time when this job finished."), required=True, readonly=True
+ )
+
+ error_message = TextLine(
+ title=_("Error message resulting from running this job."),
+ required=False,
+ readonly=True,
+ )
+
+ build_request = Reference(
+ title=_("The build request corresponding to this job."),
+ schema=IRockRecipeBuildRequest,
+ required=True,
+ readonly=True,
+ )
+
+ builds = List(
+ title=_("The builds created by this request."),
+ value_type=Reference(schema=IRockRecipeBuild),
+ required=True,
+ readonly=True,
+ )
+
+
+class IRockRecipeRequestBuildsJobSource(IJobSource):
+
+ def create(recipe, requester, channels=None, architectures=None):
+ """Request builds of a rock recipe.
+
+ :param recipe: The rock recipe to build.
+ :param requester: The person requesting the builds.
+ :param channels: A dictionary mapping snap names to channels to use
+ for these builds.
+ :param architectures: If not None, limit builds to architectures
+ with these architecture tags (in addition to any other
+ applicable constraints).
+ """
+
+ def findByRecipe(recipe, statuses=None, job_ids=None):
+ """Find jobs for a rock recipe.
+
+ :param recipe: A rock recipe to search for.
+ :param statuses: An optional iterable of `JobStatus`es to search for.
+ :param job_ids: An optional iterable of job IDs to search for.
+ :return: A sequence of `RockRecipeRequestBuildsJob`s with the
+ specified recipe.
+ """
+
+ def getByRecipeAndID(recipe, job_id):
+ """Get a job by rock recipe and job ID.
+
+ :return: The `RockRecipeRequestBuildsJob` with the specified recipe
+ and ID.
+ :raises: `NotFoundError` if there is no job with the specified
+ recipe and ID, or its `job_type` is not
+ `RockRecipeJobType.REQUEST_BUILDS`.
+ """
diff --git a/lib/lp/rocks/mail/__init__.py b/lib/lp/rocks/mail/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/lib/lp/rocks/mail/__init__.py
diff --git a/lib/lp/rocks/mail/rockrecipebuild.py b/lib/lp/rocks/mail/rockrecipebuild.py
new file mode 100644
index 0000000..5146f98
--- /dev/null
+++ b/lib/lp/rocks/mail/rockrecipebuild.py
@@ -0,0 +1,92 @@
+# Copyright 2024 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+__all__ = [
+ "RockRecipeBuildMailer",
+]
+
+from lp.app.browser.tales import DurationFormatterAPI
+from lp.services.config import config
+from lp.services.mail.basemailer import BaseMailer, RecipientReason
+from lp.services.webapp import canonical_url
+
+
+class RockRecipeBuildMailer(BaseMailer):
+
+ app = "rocks"
+
+ @classmethod
+ def forStatus(cls, build):
+ """Create a mailer for notifying about rock recipe build status.
+
+ :param build: The relevant build.
+ """
+ requester = build.requester
+ recipients = {requester: RecipientReason.forBuildRequester(requester)}
+ return cls(
+ "[Rock recipe build #%(build_id)d] %(build_title)s",
+ "rockrecipebuild-notification.txt",
+ recipients,
+ config.canonical.noreply_from_address,
+ "rock-recipe-build-status",
+ build,
+ )
+
+ def __init__(
+ self,
+ subject,
+ template_name,
+ recipients,
+ from_address,
+ notification_type,
+ build,
+ ):
+ super().__init__(
+ subject,
+ template_name,
+ recipients,
+ from_address,
+ notification_type=notification_type,
+ )
+ self.build = build
+
+ def _getHeaders(self, email, recipient):
+ """See `BaseMailer`."""
+ headers = super()._getHeaders(email, recipient)
+ headers["X-Launchpad-Build-State"] = self.build.status.name
+ return headers
+
+ def _getTemplateParams(self, email, recipient):
+ """See `BaseMailer`."""
+ build = self.build
+ params = super()._getTemplateParams(email, recipient)
+ params.update(
+ {
+ "architecturetag": build.distro_arch_series.architecturetag,
+ "build_duration": "",
+ "build_id": build.id,
+ "build_state": build.status.title,
+ "build_title": build.title,
+ "build_url": canonical_url(build),
+ "builder_url": "",
+ "distroseries": build.distro_series,
+ "log_url": "",
+ "project_name": build.recipe.project.name,
+ "recipe_name": build.recipe.name,
+ "upload_log_url": "",
+ }
+ )
+ if build.duration is not None:
+ duration_formatter = DurationFormatterAPI(build.duration)
+ params["build_duration"] = duration_formatter.approximateduration()
+ if build.log is not None:
+ params["log_url"] = build.log_url
+ if build.upload_log is not None:
+ params["upload_log_url"] = build.upload_log_url
+ if build.builder is not None:
+ params["builder_url"] = canonical_url(build.builder)
+ return params
+
+ def _getFooter(self, email, recipient, params):
+ """See `BaseMailer`."""
+ return "%(build_url)s\n" "%(reason)s\n" % params
diff --git a/lib/lp/rocks/model/rockrecipe.py b/lib/lp/rocks/model/rockrecipe.py
index 0a9b564..228e985 100644
--- a/lib/lp/rocks/model/rockrecipe.py
+++ b/lib/lp/rocks/model/rockrecipe.py
@@ -6,12 +6,35 @@
__all__ = [
"RockRecipe",
]
+<<<<<<< lib/lp/rocks/model/rockrecipe.py
from datetime import timezone
from storm.databases.postgres import JSON
from storm.locals import Bool, DateTime, Int, Reference, Unicode
from zope.component import getUtility
+=======
+from datetime import timezone
+from operator import attrgetter, itemgetter
+
+import yaml
+from lazr.lifecycle.event import ObjectCreatedEvent
+from storm.databases.postgres import JSON
+from storm.locals import (
+ And,
+ Bool,
+ DateTime,
+ Int,
+ Join,
+ Or,
+ Reference,
+ Select,
+ Store,
+ Unicode,
+)
+from zope.component import getUtility
+from zope.event import notify
+>>>>>>> lib/lp/rocks/model/rockrecipe.py
from zope.interface import implementer
from zope.security.proxy import removeSecurityProxy
@@ -20,6 +43,7 @@ from lp.app.enums import (
PUBLIC_INFORMATION_TYPES,
InformationType,
)
+<<<<<<< lib/lp/rocks/model/rockrecipe.py
from lp.code.model.gitrepository import GitRepository
from lp.registry.errors import PrivatePersonLinkageError
from lp.registry.interfaces.person import validate_public_person
@@ -30,16 +54,63 @@ from lp.rocks.interfaces.rockrecipe import (
IRockRecipe,
IRockRecipeSet,
NoSourceForRockRecipe,
+=======
+from lp.buildmaster.enums import BuildStatus
+from lp.code.errors import GitRepositoryBlobNotFound, GitRepositoryScanFault
+from lp.buildmaster.model.buildfarmjob import BuildFarmJob
+from lp.buildmaster.model.buildqueue import BuildQueue
+from lp.code.model.gitcollection import GenericGitCollection
+from lp.code.model.gitrepository import GitRepository
+from lp.registry.errors import PrivatePersonLinkageError
+from lp.registry.interfaces.person import IPersonSet, validate_public_person
+from lp.registry.model.distribution import Distribution
+from lp.registry.model.distroseries import DistroSeries
+from lp.registry.model.series import ACTIVE_STATUSES
+from lp.rocks.adapters.buildarch import determine_instances_to_build
+from lp.rocks.interfaces.rockrecipe import (
+ ROCK_RECIPE_ALLOW_CREATE,
+ ROCK_RECIPE_PRIVATE_FEATURE_FLAG,
+ CannotFetchRockcraftYaml,
+ CannotParseRockcraftYaml,
+ DuplicateRockRecipeName,
+ IRockRecipe,
+ IRockRecipeBuildRequest,
+ IRockRecipeSet,
+ MissingRockcraftYaml,
+ NoSourceForRockRecipe,
+ NoSuchRockRecipe,
+ RockRecipeBuildAlreadyPending,
+ RockRecipeBuildDisallowedArchitecture,
+ RockRecipeBuildRequestStatus,
+>>>>>>> lib/lp/rocks/model/rockrecipe.py
RockRecipeFeatureDisabled,
RockRecipeNotOwner,
RockRecipePrivacyMismatch,
RockRecipePrivateFeatureDisabled,
)
+<<<<<<< lib/lp/rocks/model/rockrecipe.py
from lp.services.database.constants import DEFAULT, UTC_NOW
+=======
+from lp.rocks.interfaces.rockrecipebuild import IRockRecipeBuildSet
+from lp.rocks.interfaces.rockrecipejob import IRockRecipeRequestBuildsJobSource
+from lp.rocks.model.rockrecipebuild import RockRecipeBuild
+from lp.rocks.model.rockrecipejob import RockRecipeJob
+from lp.services.database.bulk import load_related
+from lp.services.database.constants import DEFAULT, UTC_NOW
+from lp.services.database.decoratedresultset import DecoratedResultSet
+>>>>>>> lib/lp/rocks/model/rockrecipe.py
from lp.services.database.enumcol import DBEnum
from lp.services.database.interfaces import IPrimaryStore, IStore
from lp.services.database.stormbase import StormBase
from lp.services.features import getFeatureFlag
+<<<<<<< lib/lp/rocks/model/rockrecipe.py
+=======
+from lp.services.job.interfaces.job import JobStatus
+from lp.services.job.model.job import Job
+from lp.services.librarian.model import LibraryFileAlias
+from lp.services.propertycache import cachedproperty, get_property_cache
+from lp.soyuz.model.distroarchseries import DistroArchSeries, PocketChroot
+>>>>>>> lib/lp/rocks/model/rockrecipe.py
def rock_recipe_modified(recipe, event):
@@ -51,6 +122,81 @@ def rock_recipe_modified(recipe, event):
removeSecurityProxy(recipe).date_last_modified = UTC_NOW
+<<<<<<< lib/lp/rocks/model/rockrecipe.py
+=======
+@implementer(IRockRecipeBuildRequest)
+class RockRecipeBuildRequest:
+ """See `IRockRecipeBuildRequest`.
+
+ This is not directly backed by a database table; instead, it is a
+ webservice-friendly view of an asynchronous build request.
+ """
+
+ def __init__(self, recipe, id):
+ self.recipe = recipe
+ self.id = id
+
+ @classmethod
+ def fromJob(cls, job):
+ """See `IRockRecipeBuildRequest`."""
+ request = cls(job.recipe, job.job_id)
+ get_property_cache(request)._job = job
+ return request
+
+ @cachedproperty
+ def _job(self):
+ job_source = getUtility(IRockRecipeRequestBuildsJobSource)
+ return job_source.getByRecipeAndID(self.recipe, self.id)
+
+ @property
+ def date_requested(self):
+ """See `IRockRecipeBuildRequest`."""
+ return self._job.date_created
+
+ @property
+ def date_finished(self):
+ """See `IRockRecipeBuildRequest`."""
+ return self._job.date_finished
+
+ @property
+ def status(self):
+ """See `IRockRecipeBuildRequest`."""
+ status_map = {
+ JobStatus.WAITING: RockRecipeBuildRequestStatus.PENDING,
+ JobStatus.RUNNING: RockRecipeBuildRequestStatus.PENDING,
+ JobStatus.COMPLETED: RockRecipeBuildRequestStatus.COMPLETED,
+ JobStatus.FAILED: RockRecipeBuildRequestStatus.FAILED,
+ JobStatus.SUSPENDED: RockRecipeBuildRequestStatus.PENDING,
+ }
+ return status_map[self._job.job.status]
+
+ @property
+ def error_message(self):
+ """See `IRockRecipeBuildRequest`."""
+ return self._job.error_message
+
+ @property
+ def builds(self):
+ """See `IRockRecipeBuildRequest`."""
+ return self._job.builds
+
+ @property
+ def requester(self):
+ """See `IRockRecipeBuildRequest`."""
+ return self._job.requester
+
+ @property
+ def channels(self):
+ """See `IRockRecipeBuildRequest`."""
+ return self._job.channels
+
+ @property
+ def architectures(self):
+ """See `IRockRecipeBuildRequest`."""
+ return self._job.architectures
+
+
+>>>>>>> lib/lp/rocks/model/rockrecipe.py
@implementer(IRockRecipe)
class RockRecipe(StormBase):
"""See `IRockRecipe`."""
@@ -237,9 +383,242 @@ class RockRecipe(StormBase):
# more privacy infrastructure.
return False
+<<<<<<< lib/lp/rocks/model/rockrecipe.py
def destroySelf(self):
"""See `IRockRecipe`."""
IStore(RockRecipe).remove(self)
+=======
+ def _isBuildableArchitectureAllowed(self, das):
+ """Check whether we may build for a buildable `DistroArchSeries`.
+
+ The caller is assumed to have already checked that a suitable chroot
+ is available (either directly or via
+ `DistroSeries.buildable_architectures`).
+ """
+ return das.enabled and (
+ das.processor.supports_virtualized or not self.require_virtualized
+ )
+
+ def _isArchitectureAllowed(self, das):
+ """Check whether we may build for a `DistroArchSeries`."""
+ return (
+ das.getChroot() is not None
+ and self._isBuildableArchitectureAllowed(das)
+ )
+
+ def getAllowedArchitectures(self):
+ """See `ICharmRecipe`."""
+ store = Store.of(self)
+ origin = [
+ DistroArchSeries,
+ Join(
+ DistroSeries, DistroArchSeries.distroseries == DistroSeries.id
+ ),
+ Join(Distribution, DistroSeries.distribution == Distribution.id),
+ Join(
+ PocketChroot,
+ PocketChroot.distroarchseries == DistroArchSeries.id,
+ ),
+ Join(LibraryFileAlias, PocketChroot.chroot == LibraryFileAlias.id),
+ ]
+ # Preload DistroSeries and Distribution, since we'll need those in
+ # determine_architectures_to_build.
+ results = store.using(*origin).find(
+ (DistroArchSeries, DistroSeries, Distribution),
+ DistroSeries.status.is_in(ACTIVE_STATUSES),
+ )
+ all_buildable_dases = DecoratedResultSet(results, itemgetter(0))
+ return [
+ das
+ for das in all_buildable_dases
+ if self._isBuildableArchitectureAllowed(das)
+ ]
+
+ def _checkRequestBuild(self, requester):
+ """May `requester` request builds of this rock recipe?"""
+ if not requester.inTeam(self.owner):
+ raise RockRecipeNotOwner(
+ "%s cannot create rock recipe builds owned by %s."
+ % (requester.display_name, self.owner.display_name)
+ )
+
+ def requestBuild(self, build_request, distro_arch_series, channels=None):
+ """Request a single build of this rock recipe.
+
+ This method is for internal use; external callers should use
+ `requestBuilds` instead.
+
+ :param build_request: The `IRockRecipeBuildRequest` job being
+ processed.
+ :param distro_arch_series: The architecture to build for.
+ :param channels: A dictionary mapping snap names to channels to use
+ for this build.
+ :return: `IRockRecipeBuild`.
+ """
+ self._checkRequestBuild(build_request.requester)
+ if not self._isArchitectureAllowed(distro_arch_series):
+ raise RockRecipeBuildDisallowedArchitecture(distro_arch_series)
+
+ if not channels:
+ channels_clause = Or(
+ RockRecipeBuild.channels == None,
+ RockRecipeBuild.channels == {},
+ )
+ else:
+ channels_clause = RockRecipeBuild.channels == channels
+ pending = IStore(self).find(
+ RockRecipeBuild,
+ RockRecipeBuild.recipe == self,
+ RockRecipeBuild.processor == distro_arch_series.processor,
+ channels_clause,
+ RockRecipeBuild.status == BuildStatus.NEEDSBUILD,
+ )
+ if pending.any() is not None:
+ raise RockRecipeBuildAlreadyPending
+
+ build = getUtility(IRockRecipeBuildSet).new(
+ build_request, self, distro_arch_series, channels=channels
+ )
+ build.queueBuild()
+ notify(ObjectCreatedEvent(build, user=build_request.requester))
+ return build
+
+ def requestBuilds(self, requester, channels=None, architectures=None):
+ """See `IRockRecipe`."""
+ self._checkRequestBuild(requester)
+ job = getUtility(IRockRecipeRequestBuildsJobSource).create(
+ self, requester, channels=channels, architectures=architectures
+ )
+ return self.getBuildRequest(job.job_id)
+
+ def requestBuildsFromJob(
+ self,
+ build_request,
+ channels=None,
+ architectures=None,
+ allow_failures=False,
+ logger=None,
+ ):
+ """See `IRockRecipe`."""
+ try:
+ rockcraft_data = removeSecurityProxy(
+ getUtility(IRockRecipeSet).getRockcraftYaml(self)
+ )
+
+ # Sort by (Distribution.id, DistroSeries.id, Processor.id) for
+ # determinism. This is chosen to be a similar order as in
+ # BinaryPackageBuildSet.createForSource, to minimize confusion.
+ supported_arches = [
+ das
+ for das in sorted(
+ self.getAllowedArchitectures(),
+ key=attrgetter(
+ "distroseries.distribution.id",
+ "distroseries.id",
+ "processor.id",
+ ),
+ )
+ if (
+ architectures is None
+ or das.architecturetag in architectures
+ )
+ ]
+ instances_to_build = determine_instances_to_build(
+ rockcraft_data, supported_arches
+ )
+ except Exception as e:
+ if not allow_failures:
+ raise
+ elif logger is not None:
+ logger.exception(
+ " - %s/%s/%s: %s",
+ self.owner.name,
+ self.project.name,
+ self.name,
+ e,
+ )
+
+ builds = []
+ for das in instances_to_build:
+ try:
+ build = self.requestBuild(
+ build_request, das, channels=channels
+ )
+ if logger is not None:
+ logger.debug(
+ " - %s/%s/%s %s/%s/%s: Build requested.",
+ self.owner.name,
+ self.project.name,
+ self.name,
+ das.distroseries.distribution.name,
+ das.distroseries.name,
+ das.architecturetag,
+ )
+ builds.append(build)
+ except RockRecipeBuildAlreadyPending:
+ pass
+ except Exception as e:
+ if not allow_failures:
+ raise
+ elif logger is not None:
+ logger.exception(
+ " - %s/%s/%s %s/%s/%s: %s",
+ self.owner.name,
+ self.project.name,
+ self.name,
+ das.distroseries.distribution.name,
+ das.distroseries.name,
+ das.architecturetag,
+ e,
+ )
+ return builds
+
+ def getBuildRequest(self, job_id):
+ """See `IRockRecipe`."""
+ return RockRecipeBuildRequest(self, job_id)
+
+ def destroySelf(self):
+ """See `IRockRecipe`."""
+ store = IStore(self)
+ # Remove build jobs. There won't be many queued builds, so we can
+ # afford to do this the safe but slow way via BuildQueue.destroySelf
+ # rather than in bulk.
+ buildqueue_records = store.find(
+ BuildQueue,
+ BuildQueue._build_farm_job_id == RockRecipeBuild.build_farm_job_id,
+ RockRecipeBuild.recipe == self,
+ )
+ for buildqueue_record in buildqueue_records:
+ buildqueue_record.destroySelf()
+ build_farm_job_ids = list(
+ store.find(
+ RockRecipeBuild.build_farm_job_id,
+ RockRecipeBuild.recipe == self,
+ )
+ )
+ store.execute(
+ """
+ DELETE FROM RockFile
+ USING RockRecipeBuild
+ WHERE
+ RockFile.build = RockRecipeBuild.id AND
+ RockRecipeBuild.recipe = ?
+ """,
+ (self.id,),
+ )
+ store.find(RockRecipeBuild, RockRecipeBuild.recipe == self).remove()
+ affected_jobs = Select(
+ [RockRecipeJob.job_id],
+ And(RockRecipeJob.job == Job.id, RockRecipeJob.recipe == self),
+ )
+ store.find(Job, Job.id.is_in(affected_jobs)).remove()
+ # XXX jugmac00 2024-09-10: we need to remove webhooks once implemented
+ # getUtility(IWebhookSet).delete(self.webhooks)
+ store.remove(self)
+ store.find(
+ BuildFarmJob, BuildFarmJob.id.is_in(build_farm_job_ids)
+ ).remove()
+>>>>>>> lib/lp/rocks/model/rockrecipe.py
@implementer(IRockRecipeSet)
@@ -280,7 +659,11 @@ class RockRecipeSet:
if git_ref is None:
raise NoSourceForRockRecipe
+<<<<<<< lib/lp/rocks/model/rockrecipe.py
if self.getByName(owner, project, name) is not None:
+=======
+ if self.exists(owner, project, name):
+>>>>>>> lib/lp/rocks/model/rockrecipe.py
raise DuplicateRockRecipeName
# The relevant validators will do their own checks as well, but we
@@ -313,14 +696,32 @@ class RockRecipeSet:
return recipe
+<<<<<<< lib/lp/rocks/model/rockrecipe.py
def getByName(self, owner, project, name):
"""See `IRockRecipeSet`."""
+=======
+ def _getByName(self, owner, project, name):
+>>>>>>> lib/lp/rocks/model/rockrecipe.py
return (
IStore(RockRecipe)
.find(RockRecipe, owner=owner, project=project, name=name)
.one()
)
+<<<<<<< lib/lp/rocks/model/rockrecipe.py
+=======
+ def exists(self, owner, project, name):
+ """See `IRockRecipeSet."""
+ return self._getByName(owner, project, name) is not None
+
+ def getByName(self, owner, project, name):
+ """See `IRockRecipeSet`."""
+ recipe = self._getByName(owner, project, name)
+ if recipe is None:
+ raise NoSuchRockRecipe(name)
+ return recipe
+
+>>>>>>> lib/lp/rocks/model/rockrecipe.py
def isValidInformationType(self, information_type, owner, git_ref=None):
"""See `IRockRecipeSet`."""
private = information_type not in PUBLIC_INFORMATION_TYPES
@@ -340,6 +741,82 @@ class RockRecipeSet:
return True
+<<<<<<< lib/lp/rocks/model/rockrecipe.py
+=======
+ def preloadDataForRecipes(self, recipes, user=None):
+ """See `IRockRecipeSet`."""
+ recipes = [removeSecurityProxy(recipe) for recipe in recipes]
+
+ person_ids = set()
+ for recipe in recipes:
+ person_ids.add(recipe.registrant_id)
+ person_ids.add(recipe.owner_id)
+
+ repositories = load_related(
+ GitRepository, recipes, ["git_repository_id"]
+ )
+ if repositories:
+ GenericGitCollection.preloadDataForRepositories(repositories)
+
+ # Add repository owners to the list of pre-loaded persons. We need
+ # the target repository owner as well, since repository unique names
+ # aren't trigger-maintained.
+ person_ids.update(repository.owner_id for repository in repositories)
+
+ list(
+ getUtility(IPersonSet).getPrecachedPersonsFromIDs(
+ person_ids, need_validity=True
+ )
+ )
+
+ def getRockcraftYaml(self, context, logger=None):
+ """See `IRockRecipeSet`."""
+ if IRockRecipe.providedBy(context):
+ recipe = context
+ source = context.git_ref
+ else:
+ recipe = None
+ source = context
+ if source is None:
+ raise CannotFetchRockcraftYaml("Rock source is not defined")
+ try:
+ path = "rockcraft.yaml"
+ if recipe is not None and recipe.build_path is not None:
+ path = "/".join((recipe.build_path, path))
+ try:
+ blob = source.getBlob(path)
+ except GitRepositoryBlobNotFound:
+ if logger is not None:
+ logger.exception(
+ "Cannot find rockcraft.yaml in %s", source.unique_name
+ )
+ raise MissingRockcraftYaml(source.unique_name)
+ except GitRepositoryScanFault as e:
+ msg = "Failed to get rockcraft.yaml from %s"
+ if logger is not None:
+ logger.exception(msg, source.unique_name)
+ raise CannotFetchRockcraftYaml(
+ "%s: %s" % (msg % source.unique_name, e)
+ )
+
+ try:
+ rockcraft_data = yaml.safe_load(blob)
+ except Exception as e:
+ # Don't bother logging parsing errors from user-supplied YAML.
+ raise CannotParseRockcraftYaml(
+ "Cannot parse rockcraft.yaml from %s: %s"
+ % (source.unique_name, e)
+ )
+
+ if not isinstance(rockcraft_data, dict):
+ raise CannotParseRockcraftYaml(
+ "The top level of rockcraft.yaml from %s is not a mapping"
+ % source.unique_name
+ )
+
+ return rockcraft_data
+
+>>>>>>> lib/lp/rocks/model/rockrecipe.py
def findByGitRepository(self, repository, paths=None):
"""See `IRockRecipeSet`."""
clauses = [RockRecipe.git_repository == repository]
@@ -349,8 +826,17 @@ class RockRecipeSet:
# privacy infrastructure.
return IStore(RockRecipe).find(RockRecipe, *clauses)
+<<<<<<< lib/lp/rocks/model/rockrecipe.py
def detachFromGitRepository(self, repository):
"""See `IRockRecipeSet`."""
+=======
+ def findByOwner(self, owner):
+ """See `ICharmRecipeSet`."""
+ return IStore(RockRecipe).find(RockRecipe, owner=owner)
+
+ def detachFromGitRepository(self, repository):
+ """See `ICharmRecipeSet`."""
+>>>>>>> lib/lp/rocks/model/rockrecipe.py
self.findByGitRepository(repository).set(
git_repository_id=None, git_path=None, date_last_modified=UTC_NOW
)
diff --git a/lib/lp/rocks/model/rockrecipebuild.py b/lib/lp/rocks/model/rockrecipebuild.py
new file mode 100644
index 0000000..e6025e5
--- /dev/null
+++ b/lib/lp/rocks/model/rockrecipebuild.py
@@ -0,0 +1,445 @@
+# Copyright 2024 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+"""Rock recipe builds."""
+
+__all__ = [
+ "RockFile",
+ "RockRecipeBuild",
+]
+
+from datetime import timedelta, timezone
+
+import six
+from storm.databases.postgres import JSON
+from storm.locals import Bool, DateTime, Desc, Int, Reference, Store, Unicode
+from storm.store import EmptyResultSet
+from zope.component import getUtility
+from zope.interface import implementer
+
+from lp.app.errors import NotFoundError
+from lp.buildmaster.enums import (
+ BuildFarmJobType,
+ BuildQueueStatus,
+ BuildStatus,
+)
+from lp.buildmaster.interfaces.buildfarmjob import IBuildFarmJobSource
+from lp.buildmaster.model.buildfarmjob import SpecificBuildFarmJobSourceMixin
+from lp.buildmaster.model.packagebuild import PackageBuildMixin
+from lp.registry.interfaces.pocket import PackagePublishingPocket
+from lp.registry.interfaces.series import SeriesStatus
+from lp.registry.model.person import Person
+from lp.rocks.interfaces.rockrecipe import IRockRecipeSet
+from lp.rocks.interfaces.rockrecipebuild import (
+ IRockFile,
+ IRockRecipeBuild,
+ IRockRecipeBuildSet,
+)
+from lp.rocks.mail.rockrecipebuild import RockRecipeBuildMailer
+from lp.services.config import config
+from lp.services.database.bulk import load_related
+from lp.services.database.constants import DEFAULT
+from lp.services.database.decoratedresultset import DecoratedResultSet
+from lp.services.database.enumcol import DBEnum
+from lp.services.database.interfaces import IPrimaryStore, IStore
+from lp.services.database.stormbase import StormBase
+from lp.services.librarian.model import LibraryFileAlias, LibraryFileContent
+from lp.services.propertycache import cachedproperty, get_property_cache
+from lp.services.webapp.snapshot import notify_modified
+
+
+@implementer(IRockRecipeBuild)
+class RockRecipeBuild(PackageBuildMixin, StormBase):
+ """See `IRockRecipeBuild`."""
+
+ __storm_table__ = "RockRecipeBuild"
+
+ job_type = BuildFarmJobType.ROCKRECIPEBUILD
+
+ id = Int(name="id", primary=True)
+
+ build_request_id = Int(name="build_request", allow_none=False)
+
+ requester_id = Int(name="requester", allow_none=False)
+ requester = Reference(requester_id, "Person.id")
+
+ recipe_id = Int(name="recipe", allow_none=False)
+ recipe = Reference(recipe_id, "RockRecipe.id")
+
+ distro_arch_series_id = Int(name="distro_arch_series", allow_none=False)
+ distro_arch_series = Reference(
+ distro_arch_series_id, "DistroArchSeries.id"
+ )
+
+ channels = JSON("channels", allow_none=True)
+
+ processor_id = Int(name="processor", allow_none=False)
+ processor = Reference(processor_id, "Processor.id")
+
+ virtualized = Bool(name="virtualized", allow_none=False)
+
+ date_created = DateTime(
+ name="date_created", tzinfo=timezone.utc, allow_none=False
+ )
+ date_started = DateTime(
+ name="date_started", tzinfo=timezone.utc, allow_none=True
+ )
+ date_finished = DateTime(
+ name="date_finished", tzinfo=timezone.utc, allow_none=True
+ )
+ date_first_dispatched = DateTime(
+ name="date_first_dispatched", tzinfo=timezone.utc, allow_none=True
+ )
+
+ builder_id = Int(name="builder", allow_none=True)
+ builder = Reference(builder_id, "Builder.id")
+
+ status = DBEnum(name="status", enum=BuildStatus, allow_none=False)
+
+ log_id = Int(name="log", allow_none=True)
+ log = Reference(log_id, "LibraryFileAlias.id")
+
+ upload_log_id = Int(name="upload_log", allow_none=True)
+ upload_log = Reference(upload_log_id, "LibraryFileAlias.id")
+
+ dependencies = Unicode(name="dependencies", allow_none=True)
+
+ failure_count = Int(name="failure_count", allow_none=False)
+
+ build_farm_job_id = Int(name="build_farm_job", allow_none=False)
+ build_farm_job = Reference(build_farm_job_id, "BuildFarmJob.id")
+
+ revision_id = Unicode(name="revision_id", allow_none=True)
+
+ store_upload_metadata = JSON("store_upload_json_data", allow_none=True)
+
+ def __init__(
+ self,
+ build_farm_job,
+ build_request,
+ recipe,
+ distro_arch_series,
+ processor,
+ virtualized,
+ channels=None,
+ store_upload_metadata=None,
+ date_created=DEFAULT,
+ ):
+ """Construct a `RockRecipeBuild`."""
+ requester = build_request.requester
+ super().__init__()
+ self.build_farm_job = build_farm_job
+ self.build_request_id = build_request.id
+ self.requester = requester
+ self.recipe = recipe
+ self.distro_arch_series = distro_arch_series
+ self.processor = processor
+ self.virtualized = virtualized
+ self.channels = channels
+ self.store_upload_metadata = store_upload_metadata
+ self.date_created = date_created
+ self.status = BuildStatus.NEEDSBUILD
+
+ @property
+ def build_request(self):
+ return self.recipe.getBuildRequest(self.build_request_id)
+
+ @property
+ def is_private(self):
+ """See `IBuildFarmJob`."""
+ return self.recipe.private or self.recipe.owner.private
+
+ def __repr__(self):
+ return "<RockRecipeBuild ~%s/%s/+rock/%s/+build/%d>" % (
+ self.recipe.owner.name,
+ self.recipe.project.name,
+ self.recipe.name,
+ self.id,
+ )
+
+ @property
+ def title(self):
+ return "%s build of /~%s/%s/+rock/%s" % (
+ self.distro_arch_series.architecturetag,
+ self.recipe.owner.name,
+ self.recipe.project.name,
+ self.recipe.name,
+ )
+
+ @property
+ def distribution(self):
+ """See `IPackageBuild`."""
+ return self.distro_arch_series.distroseries.distribution
+
+ @property
+ def distro_series(self):
+ """See `IPackageBuild`."""
+ return self.distro_arch_series.distroseries
+
+ @property
+ def archive(self):
+ """See `IPackageBuild`."""
+ return self.distribution.main_archive
+
+ @property
+ def pocket(self):
+ """See `IPackageBuild`."""
+ return PackagePublishingPocket.UPDATES
+
+ @property
+ def score(self):
+ """See `IRockRecipeBuild`."""
+ if self.buildqueue_record is None:
+ return None
+ else:
+ return self.buildqueue_record.lastscore
+
+ @property
+ def can_be_retried(self):
+ """See `IBuildFarmJob`."""
+ # First check that the behaviour would accept the build if it
+ # succeeded.
+ if self.distro_series.status == SeriesStatus.OBSOLETE:
+ return False
+ return super().can_be_retried
+
+ def calculateScore(self):
+ """See `IBuildFarmJob`."""
+ # XXX jugmac00 2024-09-08: We'll probably need something like
+ # RockRecipe.relative_build_score at some point.
+ return 2510
+
+ def getMedianBuildDuration(self):
+ """Return the median duration of our successful builds."""
+ store = IStore(self)
+ result = store.find(
+ (RockRecipeBuild.date_started, RockRecipeBuild.date_finished),
+ RockRecipeBuild.recipe == self.recipe,
+ RockRecipeBuild.processor == self.processor,
+ RockRecipeBuild.status == BuildStatus.FULLYBUILT,
+ )
+ result.order_by(Desc(RockRecipeBuild.date_finished))
+ durations = [row[1] - row[0] for row in result[:9]]
+ if len(durations) == 0:
+ return None
+ durations.sort()
+ return durations[len(durations) // 2]
+
+ def estimateDuration(self):
+ """See `IBuildFarmJob`."""
+ median = self.getMedianBuildDuration()
+ if median is not None:
+ return median
+ return timedelta(minutes=10)
+
+ @cachedproperty
+ def eta(self):
+ """The datetime when the build job is estimated to complete.
+
+ This is the BuildQueue.estimated_duration plus the
+ Job.date_started or BuildQueue.getEstimatedJobStartTime.
+ """
+ if self.buildqueue_record is None:
+ return None
+ queue_record = self.buildqueue_record
+ if queue_record.status == BuildQueueStatus.WAITING:
+ start_time = queue_record.getEstimatedJobStartTime()
+ else:
+ start_time = queue_record.date_started
+ if start_time is None:
+ return None
+ duration = queue_record.estimated_duration
+ return start_time + duration
+
+ @property
+ def estimate(self):
+ """If true, the date value is an estimate."""
+ if self.date_finished is not None:
+ return False
+ return self.eta is not None
+
+ @property
+ def date(self):
+ """The date when the build completed or is estimated to complete."""
+ if self.estimate:
+ return self.eta
+ return self.date_finished
+
+ def getFiles(self):
+ """See `IRockRecipeBuild`."""
+ result = Store.of(self).find(
+ (RockFile, LibraryFileAlias, LibraryFileContent),
+ RockFile.build == self.id,
+ LibraryFileAlias.id == RockFile.library_file_id,
+ LibraryFileContent.id == LibraryFileAlias.content_id,
+ )
+ return result.order_by([LibraryFileAlias.filename, RockFile.id])
+
+ def getFileByName(self, filename):
+ """See `IRockRecipeBuild`."""
+ if filename.endswith(".txt.gz"):
+ file_object = self.log
+ elif filename.endswith("_log.txt"):
+ file_object = self.upload_log
+ else:
+ file_object = (
+ Store.of(self)
+ .find(
+ LibraryFileAlias,
+ RockFile.build == self.id,
+ LibraryFileAlias.id == RockFile.library_file_id,
+ LibraryFileAlias.filename == filename,
+ )
+ .one()
+ )
+
+ if file_object is not None and file_object.filename == filename:
+ return file_object
+
+ raise NotFoundError(filename)
+
+ def addFile(self, lfa):
+ """See `IRockRecipeBuild`."""
+ rock_file = RockFile(build=self, library_file=lfa)
+ IPrimaryStore(RockFile).add(rock_file)
+ return rock_file
+
+ def verifySuccessfulUpload(self):
+ """See `IPackageBuild`."""
+ return not self.getFiles().is_empty()
+
+ def updateStatus(
+ self,
+ status,
+ builder=None,
+ worker_status=None,
+ date_started=None,
+ date_finished=None,
+ force_invalid_transition=False,
+ ):
+ """See `IBuildFarmJob`."""
+ edited_fields = set()
+ with notify_modified(
+ self, edited_fields, snapshot_names=("status", "revision_id")
+ ) as previous_obj:
+ super().updateStatus(
+ status,
+ builder=builder,
+ worker_status=worker_status,
+ date_started=date_started,
+ date_finished=date_finished,
+ force_invalid_transition=force_invalid_transition,
+ )
+ if self.status != previous_obj.status:
+ edited_fields.add("status")
+ if worker_status is not None:
+ revision_id = worker_status.get("revision_id")
+ if revision_id is not None:
+ self.revision_id = six.ensure_text(revision_id)
+ if revision_id != previous_obj.revision_id:
+ edited_fields.add("revision_id")
+ # notify_modified evaluates all attributes mentioned in the
+ # interface, but we may then make changes that affect self.eta.
+ del get_property_cache(self).eta
+
+ def notify(self, extra_info=None):
+ """See `IPackageBuild`."""
+ if not config.builddmaster.send_build_notification:
+ return
+ if self.status == BuildStatus.FULLYBUILT:
+ return
+ mailer = RockRecipeBuildMailer.forStatus(self)
+ mailer.sendAll()
+
+
+@implementer(IRockRecipeBuildSet)
+class RockRecipeBuildSet(SpecificBuildFarmJobSourceMixin):
+ """See `IRockRecipeBuildSet`."""
+
+ def new(
+ self,
+ build_request,
+ recipe,
+ distro_arch_series,
+ channels=None,
+ store_upload_metadata=None,
+ date_created=DEFAULT,
+ ):
+ """See `IRockRecipeBuildSet`."""
+ store = IPrimaryStore(RockRecipeBuild)
+ build_farm_job = getUtility(IBuildFarmJobSource).new(
+ RockRecipeBuild.job_type, BuildStatus.NEEDSBUILD, date_created
+ )
+ virtualized = (
+ not distro_arch_series.processor.supports_nonvirtualized
+ or recipe.require_virtualized
+ )
+ build = RockRecipeBuild(
+ build_farm_job,
+ build_request,
+ recipe,
+ distro_arch_series,
+ distro_arch_series.processor,
+ virtualized,
+ channels=channels,
+ store_upload_metadata=store_upload_metadata,
+ date_created=date_created,
+ )
+ store.add(build)
+ return build
+
+ def getByID(self, build_id):
+ """See `ISpecificBuildFarmJobSource`."""
+ store = IPrimaryStore(RockRecipeBuild)
+ return store.get(RockRecipeBuild, build_id)
+
+ def getByBuildFarmJob(self, build_farm_job):
+ """See `ISpecificBuildFarmJobSource`."""
+ return (
+ Store.of(build_farm_job)
+ .find(RockRecipeBuild, build_farm_job_id=build_farm_job.id)
+ .one()
+ )
+
+ def preloadBuildsData(self, builds):
+ # Circular import.
+ from lp.rocks.model.rockrecipe import RockRecipe
+
+ load_related(Person, builds, ["requester_id"])
+ lfas = load_related(LibraryFileAlias, builds, ["log_id"])
+ load_related(LibraryFileContent, lfas, ["contentID"])
+ recipes = load_related(RockRecipe, builds, ["recipe_id"])
+ getUtility(IRockRecipeSet).preloadDataForRecipes(recipes)
+
+ def getByBuildFarmJobs(self, build_farm_jobs):
+ """See `ISpecificBuildFarmJobSource`."""
+ if len(build_farm_jobs) == 0:
+ return EmptyResultSet()
+ rows = Store.of(build_farm_jobs[0]).find(
+ RockRecipeBuild,
+ RockRecipeBuild.build_farm_job_id.is_in(
+ bfj.id for bfj in build_farm_jobs
+ ),
+ )
+ return DecoratedResultSet(rows, pre_iter_hook=self.preloadBuildsData)
+
+
+@implementer(IRockFile)
+class RockFile(StormBase):
+ """See `IRockFile`."""
+
+ __storm_table__ = "RockFile"
+
+ id = Int(name="id", primary=True)
+
+ build_id = Int(name="build", allow_none=False)
+ build = Reference(build_id, "RockRecipeBuild.id")
+
+ library_file_id = Int(name="library_file", allow_none=False)
+ library_file = Reference(library_file_id, "LibraryFileAlias.id")
+
+ def __init__(self, build, library_file):
+ """Construct a `RockFile`."""
+ super().__init__()
+ self.build = build
+ self.library_file = library_file
diff --git a/lib/lp/rocks/model/rockrecipebuildbehaviour.py b/lib/lp/rocks/model/rockrecipebuildbehaviour.py
new file mode 100644
index 0000000..f1f9d04
--- /dev/null
+++ b/lib/lp/rocks/model/rockrecipebuildbehaviour.py
@@ -0,0 +1,124 @@
+# Copyright 2024 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+"""An `IBuildFarmJobBehaviour` for `RockRecipeBuild`.
+
+Dispatches rock recipe build jobs to build-farm workers.
+"""
+
+__all__ = [
+ "RockRecipeBuildBehaviour",
+]
+
+from typing import Any, Generator
+
+from twisted.internet import defer
+from zope.component import adapter
+from zope.interface import implementer
+from zope.security.proxy import removeSecurityProxy
+
+from lp.buildmaster.builderproxy import BuilderProxyMixin
+from lp.buildmaster.enums import BuildBaseImageType
+from lp.buildmaster.interfaces.builder import CannotBuild
+from lp.buildmaster.interfaces.buildfarmjobbehaviour import (
+ BuildArgs,
+ IBuildFarmJobBehaviour,
+)
+from lp.buildmaster.model.buildfarmjobbehaviour import (
+ BuildFarmJobBehaviourBase,
+)
+from lp.registry.interfaces.series import SeriesStatus
+from lp.rocks.interfaces.rockrecipebuild import IRockRecipeBuild
+from lp.soyuz.adapters.archivedependencies import get_sources_list_for_building
+
+
+@adapter(IRockRecipeBuild)
+@implementer(IBuildFarmJobBehaviour)
+class RockRecipeBuildBehaviour(BuilderProxyMixin, BuildFarmJobBehaviourBase):
+ """Dispatches `RockRecipeBuild` jobs to workers."""
+
+ builder_type = "rock"
+ image_types = [BuildBaseImageType.LXD, BuildBaseImageType.CHROOT]
+
+ def getLogFileName(self):
+ das = self.build.distro_arch_series
+
+ # Examples:
+ # buildlog_rock_ubuntu_wily_amd64_name_FULLYBUILT.txt
+ return "buildlog_rock_%s_%s_%s_%s_%s.txt" % (
+ das.distroseries.distribution.name,
+ das.distroseries.name,
+ das.architecturetag,
+ self.build.recipe.name,
+ self.build.status.name,
+ )
+
+ def verifyBuildRequest(self, logger):
+ """Assert some pre-build checks.
+
+ The build request is checked:
+ * Virtualized builds can't build on a non-virtual builder
+ * Ensure that we have a chroot
+ """
+ build = self.build
+ if build.virtualized and not self._builder.virtualized:
+ raise AssertionError(
+ "Attempt to build virtual item on a non-virtual builder."
+ )
+
+ chroot = build.distro_arch_series.getChroot()
+ if chroot is None:
+ raise CannotBuild(
+ "Missing chroot for %s" % build.distro_arch_series.displayname
+ )
+
+ @defer.inlineCallbacks
+ def extraBuildArgs(self, logger=None) -> Generator[Any, Any, BuildArgs]:
+ """
+ Return the extra arguments required by the worker for the given build.
+ """
+ build = self.build
+ args: BuildArgs = yield super().extraBuildArgs(logger=logger)
+ yield self.startProxySession(args)
+ args["name"] = build.recipe.store_name or build.recipe.name
+ channels = build.channels or {}
+ # We have to remove the security proxy that Zope applies to this
+ # dict, since otherwise we'll be unable to serialise it to XML-RPC.
+ args["channels"] = removeSecurityProxy(channels)
+ (
+ args["archives"],
+ args["trusted_keys"],
+ ) = yield get_sources_list_for_building(
+ self, build.distro_arch_series, None, logger=logger
+ )
+ if build.recipe.build_path is not None:
+ args["build_path"] = build.recipe.build_path
+ if build.recipe.build_path is not None:
+ args["build_path"] = build.recipe.build_path
+ if build.recipe.git_ref is not None:
+ args["git_repository"] = build.recipe.git_repository.git_https_url
+ # "git clone -b" doesn't accept full ref names. If this becomes
+ # a problem then we could change launchpad-buildd to do "git
+ # clone" followed by "git checkout" instead.
+ if build.recipe.git_path != "HEAD":
+ args["git_path"] = build.recipe.git_ref.name
+ else:
+ raise CannotBuild(
+ "Source repository for ~%s/%s/+rock/%s has been deleted."
+ % (
+ build.recipe.owner.name,
+ build.recipe.project.name,
+ build.recipe.name,
+ )
+ )
+ args["private"] = build.is_private
+ return args
+
+ def verifySuccessfulBuild(self):
+ """See `IBuildFarmJobBehaviour`."""
+ # The implementation in BuildFarmJobBehaviourBase checks whether the
+ # target suite is modifiable in the target archive. However, a
+ # `RockRecipeBuild`'s archive is a source rather than a target, so
+ # that check does not make sense. We do, however, refuse to build
+ # for obsolete series.
+ assert self.build.distro_series.status != SeriesStatus.OBSOLETE
diff --git a/lib/lp/rocks/model/rockrecipejob.py b/lib/lp/rocks/model/rockrecipejob.py
new file mode 100644
index 0000000..f21df96
--- /dev/null
+++ b/lib/lp/rocks/model/rockrecipejob.py
@@ -0,0 +1,331 @@
+# Copyright 2024 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+"""Rock recipe jobs."""
+
+__all__ = [
+ "RockRecipeJob",
+ "RockRecipeJobType",
+ "RockRecipeRequestBuildsJob",
+]
+
+import transaction
+from lazr.delegates import delegate_to
+from lazr.enum import DBEnumeratedType, DBItem
+from storm.databases.postgres import JSON
+from storm.locals import Desc, Int, Reference
+from storm.store import EmptyResultSet
+from zope.component import getUtility
+from zope.interface import implementer, provider
+
+from lp.app.errors import NotFoundError
+from lp.registry.interfaces.person import IPersonSet
+from lp.rocks.interfaces.rockrecipe import (
+ CannotFetchRockcraftYaml,
+ CannotParseRockcraftYaml,
+ MissingRockcraftYaml,
+)
+from lp.rocks.interfaces.rockrecipejob import (
+ IRockRecipeJob,
+ IRockRecipeRequestBuildsJob,
+ IRockRecipeRequestBuildsJobSource,
+)
+from lp.rocks.model.rockrecipebuild import RockRecipeBuild
+from lp.services.config import config
+from lp.services.database.bulk import load_related
+from lp.services.database.decoratedresultset import DecoratedResultSet
+from lp.services.database.enumcol import DBEnum
+from lp.services.database.interfaces import IPrimaryStore, IStore
+from lp.services.database.stormbase import StormBase
+from lp.services.job.model.job import EnumeratedSubclass, Job
+from lp.services.job.runner import BaseRunnableJob
+from lp.services.mail.sendmail import format_address_for_person
+from lp.services.propertycache import cachedproperty
+from lp.services.scripts import log
+
+
+class RockRecipeJobType(DBEnumeratedType):
+ """Values that `IRockRecipeJob.job_type` can take."""
+
+ REQUEST_BUILDS = DBItem(
+ 0,
+ """
+ Request builds
+
+ This job requests builds of a rock recipe.
+ """,
+ )
+
+
+@implementer(IRockRecipeJob)
+class RockRecipeJob(StormBase):
+ """See `IRockRecipeJob`."""
+
+ __storm_table__ = "RockRecipeJob"
+
+ job_id = Int(name="job", primary=True, allow_none=False)
+ job = Reference(job_id, "Job.id")
+
+ recipe_id = Int(name="recipe", allow_none=False)
+ recipe = Reference(recipe_id, "RockRecipe.id")
+
+ job_type = DBEnum(
+ name="job_type", enum=RockRecipeJobType, allow_none=False
+ )
+
+ metadata = JSON("json_data", allow_none=False)
+
+ def __init__(self, recipe, job_type, metadata, **job_args):
+ """Constructor.
+
+ Extra keyword arguments are used to construct the underlying Job
+ object.
+
+ :param recipe: The `IRockRecipe` this job relates to.
+ :param job_type: The `RockRecipeJobType` of this job.
+ :param metadata: The type-specific variables, as a JSON-compatible
+ dict.
+ """
+ super().__init__()
+ self.job = Job(**job_args)
+ self.recipe = recipe
+ self.job_type = job_type
+ self.metadata = metadata
+
+ def makeDerived(self):
+ return RockRecipeJobDerived.makeSubclass(self)
+
+
+@delegate_to(IRockRecipeJob)
+class RockRecipeJobDerived(BaseRunnableJob, metaclass=EnumeratedSubclass):
+
+ def __init__(self, recipe_job):
+ self.context = recipe_job
+
+ def __repr__(self):
+ """An informative representation of the job."""
+ return "<%s for ~%s/%s/+rock/%s>" % (
+ self.__class__.__name__,
+ self.recipe.owner.name,
+ self.recipe.project.name,
+ self.recipe.name,
+ )
+
+ @classmethod
+ def get(cls, job_id):
+ """Get a job by id.
+
+ :return: The `RockRecipeJob` with the specified id, as the current
+ `RockRecipeJobDerived` subclass.
+ :raises: `NotFoundError` if there is no job with the specified id,
+ or its `job_type` does not match the desired subclass.
+ """
+ recipe_job = IStore(RockRecipeJob).get(RockRecipeJob, job_id)
+ if recipe_job.job_type != cls.class_job_type:
+ raise NotFoundError(
+ "No object found with id %d and type %s"
+ % (job_id, cls.class_job_type.title)
+ )
+ return cls(recipe_job)
+
+ @classmethod
+ def iterReady(cls):
+ """See `IJobSource`."""
+ jobs = IPrimaryStore(RockRecipeJob).find(
+ RockRecipeJob,
+ RockRecipeJob.job_type == cls.class_job_type,
+ RockRecipeJob.job == Job.id,
+ Job.id.is_in(Job.ready_jobs),
+ )
+ return (cls(job) for job in jobs)
+
+ def getOopsVars(self):
+ """See `IRunnableJob`."""
+ oops_vars = super().getOopsVars()
+ oops_vars.extend(
+ [
+ ("job_id", self.context.job.id),
+ ("job_type", self.context.job_type.title),
+ ("recipe_owner_name", self.context.recipe.owner.name),
+ ("recipe_project_name", self.context.recipe.project.name),
+ ("recipe_name", self.context.recipe.name),
+ ]
+ )
+ return oops_vars
+
+
+@implementer(IRockRecipeRequestBuildsJob)
+@provider(IRockRecipeRequestBuildsJobSource)
+class RockRecipeRequestBuildsJob(RockRecipeJobDerived):
+ """A Job that processes a request for builds of a rock recipe."""
+
+ class_job_type = RockRecipeJobType.REQUEST_BUILDS
+
+ user_error_types = (
+ CannotParseRockcraftYaml,
+ MissingRockcraftYaml,
+ )
+ retry_error_types = (CannotFetchRockcraftYaml,)
+
+ max_retries = 5
+
+ config = config.IRockRecipeRequestBuildsJobSource
+
+ @classmethod
+ def create(cls, recipe, requester, channels=None, architectures=None):
+ """See `IRockRecipeRequestBuildsJobSource`."""
+ metadata = {
+ "requester": requester.id,
+ "channels": channels,
+ # Really a set or None, but sets aren't directly
+ # JSON-serialisable.
+ "architectures": (
+ list(architectures) if architectures is not None else None
+ ),
+ }
+ recipe_job = RockRecipeJob(recipe, cls.class_job_type, metadata)
+ job = cls(recipe_job)
+ job.celeryRunOnCommit()
+ IStore(RockRecipeJob).flush()
+ return job
+
+ @classmethod
+ def findByRecipe(cls, recipe, statuses=None, job_ids=None):
+ """See `IRockRecipeRequestBuildsJobSource`."""
+ clauses = [
+ RockRecipeJob.recipe == recipe,
+ RockRecipeJob.job_type == cls.class_job_type,
+ ]
+ if statuses is not None:
+ clauses.extend(
+ [
+ RockRecipeJob.job == Job.id,
+ Job._status.is_in(statuses),
+ ]
+ )
+ if job_ids is not None:
+ clauses.append(RockRecipeJob.job_id.is_in(job_ids))
+ recipe_jobs = (
+ IStore(RockRecipeJob)
+ .find(RockRecipeJob, *clauses)
+ .order_by(Desc(RockRecipeJob.job_id))
+ )
+
+ def preload_jobs(rows):
+ load_related(Job, rows, ["job_id"])
+
+ return DecoratedResultSet(
+ recipe_jobs,
+ lambda recipe_job: cls(recipe_job),
+ pre_iter_hook=preload_jobs,
+ )
+
+ @classmethod
+ def getByRecipeAndID(cls, recipe, job_id):
+ """See `IRockRecipeRequestBuildsJobSource`."""
+ recipe_job = (
+ IStore(RockRecipeJob)
+ .find(
+ RockRecipeJob,
+ RockRecipeJob.job_id == job_id,
+ RockRecipeJob.recipe == recipe,
+ RockRecipeJob.job_type == cls.class_job_type,
+ )
+ .one()
+ )
+ if recipe_job is None:
+ raise NotFoundError(
+ "No REQUEST_BUILDS job with ID %d found for %r"
+ % (job_id, recipe)
+ )
+ return cls(recipe_job)
+
+ def getOperationDescription(self):
+ return "requesting builds of %s" % self.recipe.name
+
+ def getErrorRecipients(self):
+ if self.requester is None or self.requester.preferredemail is None:
+ return []
+ return [format_address_for_person(self.requester)]
+
+ @cachedproperty
+ def requester(self):
+ """See `IRockRecipeRequestBuildsJob`."""
+ requester_id = self.metadata["requester"]
+ return getUtility(IPersonSet).get(requester_id)
+
+ @property
+ def channels(self):
+ """See `IRockRecipeRequestBuildsJob`."""
+ return self.metadata["channels"]
+
+ @property
+ def architectures(self):
+ """See `IRockRecipeRequestBuildsJob`."""
+ architectures = self.metadata["architectures"]
+ return set(architectures) if architectures is not None else None
+
+ @property
+ def date_created(self):
+ """See `IRockRecipeRequestBuildsJob`."""
+ return self.context.job.date_created
+
+ @property
+ def date_finished(self):
+ """See `IRockRecipeRequestBuildsJob`."""
+ return self.context.job.date_finished
+
+ @property
+ def error_message(self):
+ """See `IRockRecipeRequestBuildsJob`."""
+ return self.metadata.get("error_message")
+
+ @error_message.setter
+ def error_message(self, message):
+ """See `IRockRecipeRequestBuildsJob`."""
+ self.metadata["error_message"] = message
+
+ @property
+ def build_request(self):
+ """See `IRockRecipeRequestBuildsJob`."""
+ return self.recipe.getBuildRequest(self.job.id)
+
+ @property
+ def builds(self):
+ """See `IRockRecipeRequestBuildsJob`."""
+ build_ids = self.metadata.get("builds")
+ if build_ids:
+ return IStore(RockRecipeBuild).find(
+ RockRecipeBuild, RockRecipeBuild.id.is_in(build_ids)
+ )
+ else:
+ return EmptyResultSet()
+
+ @builds.setter
+ def builds(self, builds):
+ """See `IRockRecipeRequestBuildsJob`."""
+ self.metadata["builds"] = [build.id for build in builds]
+
+ def run(self):
+ """See `IRunnableJob`."""
+ requester = self.requester
+ if requester is None:
+ log.info(
+ "Skipping %r because the requester has been deleted." % self
+ )
+ return
+ try:
+ self.builds = self.recipe.requestBuildsFromJob(
+ self.build_request,
+ channels=self.channels,
+ architectures=self.architectures,
+ logger=log,
+ )
+ self.error_message = None
+ except Exception as e:
+ self.error_message = str(e)
+ # The normal job infrastructure will abort the transaction, but
+ # we want to commit instead: the only database changes we make
+ # are to this job's metadata and should be preserved.
+ transaction.commit()
+ raise
diff --git a/lib/lp/rocks/security.py b/lib/lp/rocks/security.py
index c907f00..b005952 100644
--- a/lib/lp/rocks/security.py
+++ b/lib/lp/rocks/security.py
@@ -5,8 +5,15 @@
__all__ = []
+<<<<<<< lib/lp/rocks/security.py
from lp.app.security import AuthorizationBase
from lp.rocks.interfaces.rockrecipe import IRockRecipe
+=======
+from lp.app.security import AuthorizationBase, DelegatedAuthorization
+from lp.rocks.interfaces.rockrecipe import IRockRecipe, IRockRecipeBuildRequest
+from lp.rocks.interfaces.rockrecipebuild import IRockRecipeBuild
+from lp.security import AdminByBuilddAdmin
+>>>>>>> lib/lp/rocks/security.py
class ViewRockRecipe(AuthorizationBase):
@@ -49,3 +56,43 @@ class AdminRockRecipe(AuthorizationBase):
return user.in_ppa_self_admins and EditRockRecipe(
self.obj
).checkAuthenticated(user)
+<<<<<<< lib/lp/rocks/security.py
+=======
+
+
+class ViewCharmRecipeBuildRequest(DelegatedAuthorization):
+ permission = "launchpad.View"
+ usedfor = IRockRecipeBuildRequest
+
+ def __init__(self, obj):
+ super().__init__(obj, obj.recipe, "launchpad.View")
+
+
+class ViewRockRecipeBuild(DelegatedAuthorization):
+ permission = "launchpad.View"
+ usedfor = IRockRecipeBuild
+
+ def iter_objects(self):
+ yield self.obj.recipe
+
+
+class EditRockRecipeBuild(AdminByBuilddAdmin):
+ permission = "launchpad.Edit"
+ usedfor = IRockRecipeBuild
+
+ def checkAuthenticated(self, user):
+ """Check edit access for rock recipe builds.
+
+ Allow admins, buildd admins, and the owner of the rock recipe.
+ (Note that the requester of the build is required to be in the team
+ that owns the rock recipe.)
+ """
+ auth_recipe = EditRockRecipe(self.obj.recipe)
+ if auth_recipe.checkAuthenticated(user):
+ return True
+ return super().checkAuthenticated(user)
+
+
+class AdminRockRecipeBuild(AdminByBuilddAdmin):
+ usedfor = IRockRecipeBuild
+>>>>>>> lib/lp/rocks/security.py
diff --git a/lib/lp/rocks/tests/test_rockrecipe.py b/lib/lp/rocks/tests/test_rockrecipe.py
index 8069c3a..83cf199 100644
--- a/lib/lp/rocks/tests/test_rockrecipe.py
+++ b/lib/lp/rocks/tests/test_rockrecipe.py
@@ -2,16 +2,46 @@
# GNU Affero General Public License version 3 (see the file LICENSE).
"""Test rock recipes."""
+<<<<<<< lib/lp/rocks/tests/test_rockrecipe.py
+=======
+from textwrap import dedent
+
+from textwrap import dedent
+
+import transaction
+from storm.locals import Store
+from testtools.matchers import (
+ Equals,
+ Is,
+ MatchesDict,
+ MatchesSetwise,
+ MatchesStructure,
+)
+>>>>>>> lib/lp/rocks/tests/test_rockrecipe.py
from zope.component import getUtility
from zope.security.proxy import removeSecurityProxy
from lp.app.enums import InformationType
+<<<<<<< lib/lp/rocks/tests/test_rockrecipe.py
+=======
+from lp.app.interfaces.launchpad import ILaunchpadCelebrities
+from lp.buildmaster.enums import BuildQueueStatus, BuildStatus
+from lp.buildmaster.interfaces.buildqueue import IBuildQueue
+from lp.buildmaster.interfaces.processor import (
+ IProcessorSet,
+ ProcessorNotFound,
+)
+from lp.buildmaster.model.buildfarmjob import BuildFarmJob
+from lp.buildmaster.model.buildqueue import BuildQueue
+from lp.code.tests.helpers import GitHostingFixture
+>>>>>>> lib/lp/rocks/tests/test_rockrecipe.py
from lp.rocks.interfaces.rockrecipe import (
ROCK_RECIPE_ALLOW_CREATE,
IRockRecipe,
IRockRecipeSet,
NoSourceForRockRecipe,
+<<<<<<< lib/lp/rocks/tests/test_rockrecipe.py
RockRecipeFeatureDisabled,
RockRecipePrivateFeatureDisabled,
)
@@ -20,6 +50,39 @@ from lp.services.features.testing import FeatureFixture
from lp.services.webapp.snapshot import notify_modified
from lp.testing import TestCaseWithFactory, admin_logged_in, person_logged_in
from lp.testing.layers import DatabaseFunctionalLayer, LaunchpadZopelessLayer
+=======
+ RockRecipeBuildAlreadyPending,
+ RockRecipeBuildDisallowedArchitecture,
+ RockRecipeBuildRequestStatus,
+ RockRecipeFeatureDisabled,
+ RockRecipePrivateFeatureDisabled,
+)
+from lp.rocks.interfaces.rockrecipebuild import (
+ IRockRecipeBuild,
+ IRockRecipeBuildSet,
+)
+from lp.rocks.interfaces.rockrecipejob import IRockRecipeRequestBuildsJobSource
+from lp.rocks.model.rockrecipebuild import RockFile
+from lp.rocks.model.rockrecipejob import RockRecipeJob
+from lp.services.config import config
+from lp.services.database.constants import ONE_DAY_AGO, UTC_NOW
+from lp.services.database.interfaces import IStore
+from lp.services.database.sqlbase import (
+ flush_database_caches,
+ get_transaction_timestamp,
+)
+from lp.services.features.testing import FeatureFixture
+from lp.services.job.interfaces.job import JobStatus
+from lp.services.job.runner import JobRunner
+from lp.services.webapp.snapshot import notify_modified
+from lp.testing import TestCaseWithFactory, admin_logged_in, person_logged_in
+from lp.testing.dbuser import dbuser
+from lp.testing.layers import (
+ DatabaseFunctionalLayer,
+ LaunchpadFunctionalLayer,
+ LaunchpadZopelessLayer,
+)
+>>>>>>> lib/lp/rocks/tests/test_rockrecipe.py
class TestRockRecipeFeatureFlags(TestCaseWithFactory):
@@ -81,6 +144,232 @@ class TestRockRecipe(TestCaseWithFactory):
recipe, "date_last_modified", UTC_NOW
)
+<<<<<<< lib/lp/rocks/tests/test_rockrecipe.py
+=======
+ def test_requestBuilds(self):
+ # requestBuilds schedules a job and returns a corresponding
+ # RockRecipeBuildRequest.
+ recipe = self.factory.makeRockRecipe()
+ now = get_transaction_timestamp(IStore(recipe))
+ with person_logged_in(recipe.owner.teamowner):
+ request = recipe.requestBuilds(recipe.owner.teamowner)
+ self.assertThat(
+ request,
+ MatchesStructure(
+ date_requested=Equals(now),
+ date_finished=Is(None),
+ recipe=Equals(recipe),
+ status=Equals(RockRecipeBuildRequestStatus.PENDING),
+ error_message=Is(None),
+ channels=Is(None),
+ architectures=Is(None),
+ ),
+ )
+ [job] = getUtility(IRockRecipeRequestBuildsJobSource).iterReady()
+ self.assertThat(
+ job,
+ MatchesStructure(
+ job_id=Equals(request.id),
+ job=MatchesStructure.byEquality(status=JobStatus.WAITING),
+ recipe=Equals(recipe),
+ requester=Equals(recipe.owner.teamowner),
+ channels=Is(None),
+ architectures=Is(None),
+ ),
+ )
+
+ def test_requestBuilds_with_channels(self):
+ # If asked to build using particular snap channels, requestBuilds
+ # passes those through to the job.
+ recipe = self.factory.makeRockRecipe()
+ now = get_transaction_timestamp(IStore(recipe))
+ with person_logged_in(recipe.owner.teamowner):
+ request = recipe.requestBuilds(
+ recipe.owner.teamowner, channels={"rockcraft": "edge"}
+ )
+ self.assertThat(
+ request,
+ MatchesStructure(
+ date_requested=Equals(now),
+ date_finished=Is(None),
+ recipe=Equals(recipe),
+ status=Equals(RockRecipeBuildRequestStatus.PENDING),
+ error_message=Is(None),
+ channels=MatchesDict({"rockcraft": Equals("edge")}),
+ architectures=Is(None),
+ ),
+ )
+ [job] = getUtility(IRockRecipeRequestBuildsJobSource).iterReady()
+ self.assertThat(
+ job,
+ MatchesStructure(
+ job_id=Equals(request.id),
+ job=MatchesStructure.byEquality(status=JobStatus.WAITING),
+ recipe=Equals(recipe),
+ requester=Equals(recipe.owner.teamowner),
+ channels=Equals({"rockcraft": "edge"}),
+ architectures=Is(None),
+ ),
+ )
+
+ def test_requestBuilds_with_architectures(self):
+ # If asked to build for particular architectures, requestBuilds
+ # passes those through to the job.
+ recipe = self.factory.makeRockRecipe()
+ now = get_transaction_timestamp(IStore(recipe))
+ with person_logged_in(recipe.owner.teamowner):
+ request = recipe.requestBuilds(
+ recipe.owner.teamowner, architectures={"amd64", "i386"}
+ )
+ self.assertThat(
+ request,
+ MatchesStructure(
+ date_requested=Equals(now),
+ date_finished=Is(None),
+ recipe=Equals(recipe),
+ status=Equals(RockRecipeBuildRequestStatus.PENDING),
+ error_message=Is(None),
+ channels=Is(None),
+ architectures=MatchesSetwise(Equals("amd64"), Equals("i386")),
+ ),
+ )
+ [job] = getUtility(IRockRecipeRequestBuildsJobSource).iterReady()
+ self.assertThat(
+ job,
+ MatchesStructure(
+ job_id=Equals(request.id),
+ job=MatchesStructure.byEquality(status=JobStatus.WAITING),
+ recipe=Equals(recipe),
+ requester=Equals(recipe.owner.teamowner),
+ channels=Is(None),
+ architectures=MatchesSetwise(Equals("amd64"), Equals("i386")),
+ ),
+ )
+
+ def makeRequestBuildsJob(
+ self, distro_series_version, arch_tags, git_ref=None
+ ):
+ recipe = self.factory.makeRockRecipe(git_ref=git_ref)
+ distro_series = self.factory.makeDistroSeries(
+ distribution=getUtility(ILaunchpadCelebrities).ubuntu,
+ version=distro_series_version,
+ )
+ for arch_tag in arch_tags:
+ self.makeBuildableDistroArchSeries(
+ distroseries=distro_series, architecturetag=arch_tag
+ )
+ return getUtility(IRockRecipeRequestBuildsJobSource).create(
+ recipe, recipe.owner.teamowner, {"rockcraft": "edge"}
+ )
+
+ def assertRequestedBuildsMatch(
+ self, builds, job, distro_series_version, arch_tags, channels
+ ):
+ self.assertThat(
+ builds,
+ MatchesSetwise(
+ *(
+ MatchesStructure(
+ requester=Equals(job.requester),
+ recipe=Equals(job.recipe),
+ distro_arch_series=MatchesStructure(
+ distroseries=MatchesStructure.byEquality(
+ version=distro_series_version
+ ),
+ architecturetag=Equals(arch_tag),
+ ),
+ channels=Equals(channels),
+ )
+ for arch_tag in arch_tags
+ )
+ ),
+ )
+
+ def test_requestBuildsFromJob_restricts_explicit_list(self):
+ # requestBuildsFromJob limits builds targeted at an explicit list of
+ # architectures to those allowed for the recipe.
+ self.useFixture(
+ GitHostingFixture(
+ blob=dedent(
+ """\
+ bases:
+ - build-on:
+ - name: ubuntu
+ channel: "20.04"
+ architectures: [sparc]
+ - build-on:
+ - name: ubuntu
+ channel: "20.04"
+ architectures: [i386]
+ - build-on:
+ - name: ubuntu
+ channel: "20.04"
+ architectures: [avr]
+ """
+ )
+ )
+ )
+ job = self.makeRequestBuildsJob("20.04", ["sparc", "avr", "mips64el"])
+ self.assertEqual(
+ get_transaction_timestamp(IStore(job.recipe)), job.date_created
+ )
+ transaction.commit()
+ with person_logged_in(job.requester):
+ builds = job.recipe.requestBuildsFromJob(
+ job.build_request, channels=removeSecurityProxy(job.channels)
+ )
+ self.assertRequestedBuildsMatch(
+ builds, job, "20.04", ["sparc", "avr"], job.channels
+ )
+
+ def test_requestBuildsFromJob_architectures_parameter(self):
+ # If an explicit set of architectures was given as a parameter,
+ # requestBuildsFromJob intersects those with any other constraints
+ # when requesting builds.
+ # self.useFixture(GitHostingFixture(blob="name: foo\n"))
+ self.useFixture(
+ GitHostingFixture(
+ blob=dedent(
+ """\
+ bases:
+ - build-on:
+ - name: ubuntu
+ channel: "20.04"
+ architectures: [sparc]
+ - build-on:
+ - name: ubuntu
+ channel: "20.04"
+ architectures: [i386]
+ - build-on:
+ - name: ubuntu
+ channel: "20.04"
+ architectures: [avr]
+ - build-on:
+ - name: ubuntu
+ channel: "20.04"
+ architectures: [riscv64]
+ """
+ )
+ )
+ )
+ job = self.makeRequestBuildsJob(
+ "20.04", ["avr", "mips64el", "riscv64"]
+ )
+ self.assertEqual(
+ get_transaction_timestamp(IStore(job.recipe)), job.date_created
+ )
+ transaction.commit()
+ with person_logged_in(job.requester):
+ builds = job.recipe.requestBuildsFromJob(
+ job.build_request,
+ channels=removeSecurityProxy(job.channels),
+ architectures={"avr", "riscv64"},
+ )
+ self.assertRequestedBuildsMatch(
+ builds, job, "20.04", ["avr", "riscv64"], job.channels
+ )
+
+>>>>>>> lib/lp/rocks/tests/test_rockrecipe.py
def test_delete_without_builds(self):
# A rock recipe with no builds can be deleted.
owner = self.factory.makePerson()
@@ -88,6 +377,7 @@ class TestRockRecipe(TestCaseWithFactory):
recipe = self.factory.makeRockRecipe(
registrant=owner, owner=owner, project=project, name="condemned"
)
+<<<<<<< lib/lp/rocks/tests/test_rockrecipe.py
self.assertIsNotNone(
getUtility(IRockRecipeSet).getByName(owner, project, "condemned")
)
@@ -95,6 +385,290 @@ class TestRockRecipe(TestCaseWithFactory):
recipe.destroySelf()
self.assertIsNone(
getUtility(IRockRecipeSet).getByName(owner, project, "condemned")
+=======
+ self.assertTrue(
+ getUtility(IRockRecipeSet).exists(owner, project, "condemned")
+ )
+ with person_logged_in(recipe.owner):
+ recipe.destroySelf()
+ self.assertFalse(
+ getUtility(IRockRecipeSet).exists(owner, project, "condemned")
+ )
+
+ def makeBuildableDistroArchSeries(
+ self,
+ architecturetag=None,
+ processor=None,
+ supports_virtualized=True,
+ supports_nonvirtualized=True,
+ **kwargs,
+ ):
+ if architecturetag is None:
+ architecturetag = self.factory.getUniqueUnicode("arch")
+ if processor is None:
+ try:
+ processor = getUtility(IProcessorSet).getByName(
+ architecturetag
+ )
+ except ProcessorNotFound:
+ processor = self.factory.makeProcessor(
+ name=architecturetag,
+ supports_virtualized=supports_virtualized,
+ supports_nonvirtualized=supports_nonvirtualized,
+ )
+ das = self.factory.makeDistroArchSeries(
+ architecturetag=architecturetag, processor=processor, **kwargs
+ )
+ fake_chroot = self.factory.makeLibraryFileAlias(
+ filename="fake_chroot.tar.gz", db_only=True
+ )
+ das.addOrUpdateChroot(fake_chroot)
+ return das
+
+ def test_requestBuild(self):
+ # requestBuild creates a new RockRecipeBuild.
+ recipe = self.factory.makeRockRecipe()
+ das = self.makeBuildableDistroArchSeries()
+ build_request = self.factory.makeRockRecipeBuildRequest(recipe=recipe)
+ build = recipe.requestBuild(build_request, das)
+ self.assertTrue(IRockRecipeBuild.providedBy(build))
+ self.assertThat(
+ build,
+ MatchesStructure(
+ requester=Equals(recipe.owner.teamowner),
+ distro_arch_series=Equals(das),
+ channels=Is(None),
+ status=Equals(BuildStatus.NEEDSBUILD),
+ ),
+ )
+ store = Store.of(build)
+ store.flush()
+ build_queue = store.find(
+ BuildQueue,
+ BuildQueue._build_farm_job_id
+ == removeSecurityProxy(build).build_farm_job_id,
+ ).one()
+ self.assertProvides(build_queue, IBuildQueue)
+ self.assertEqual(recipe.require_virtualized, build_queue.virtualized)
+ self.assertEqual(BuildQueueStatus.WAITING, build_queue.status)
+
+ def test_requestBuild_score(self):
+ # Build requests have a relatively low queue score (2510).
+ recipe = self.factory.makeRockRecipe()
+ das = self.makeBuildableDistroArchSeries()
+ build_request = self.factory.makeRockRecipeBuildRequest(recipe=recipe)
+ build = recipe.requestBuild(build_request, das)
+ queue_record = build.buildqueue_record
+ queue_record.score()
+ self.assertEqual(2510, queue_record.lastscore)
+
+ def test_requestBuild_channels(self):
+ # requestBuild can select non-default channels.
+ recipe = self.factory.makeRockRecipe()
+ das = self.makeBuildableDistroArchSeries()
+ build_request = self.factory.makeRockRecipeBuildRequest(recipe=recipe)
+ build = recipe.requestBuild(
+ build_request, das, channels={"rockcraft": "edge"}
+ )
+ self.assertEqual({"rockcraft": "edge"}, build.channels)
+
+ def test_requestBuild_rejects_repeats(self):
+ # requestBuild refuses if there is already a pending build.
+ recipe = self.factory.makeRockRecipe()
+ distro_series = self.factory.makeDistroSeries()
+ arches = [
+ self.makeBuildableDistroArchSeries(distroseries=distro_series)
+ for _ in range(2)
+ ]
+ build_request = self.factory.makeRockRecipeBuildRequest(recipe=recipe)
+ old_build = recipe.requestBuild(build_request, arches[0])
+ self.assertRaises(
+ RockRecipeBuildAlreadyPending,
+ recipe.requestBuild,
+ build_request,
+ arches[0],
+ )
+ # We can build for a different distroarchseries.
+ recipe.requestBuild(build_request, arches[1])
+ # channels=None and channels={} are treated as equivalent, but
+ # anything else allows a new build.
+ self.assertRaises(
+ RockRecipeBuildAlreadyPending,
+ recipe.requestBuild,
+ build_request,
+ arches[0],
+ channels={},
+ )
+ recipe.requestBuild(
+ build_request, arches[0], channels={"core": "edge"}
+ )
+ self.assertRaises(
+ RockRecipeBuildAlreadyPending,
+ recipe.requestBuild,
+ build_request,
+ arches[0],
+ channels={"core": "edge"},
+ )
+ # Changing the status of the old build allows a new build.
+ old_build.updateStatus(BuildStatus.BUILDING)
+ old_build.updateStatus(BuildStatus.FULLYBUILT)
+ recipe.requestBuild(build_request, arches[0])
+
+ def test_requestBuild_virtualization(self):
+ # New builds are virtualized if any of the processor or recipe
+ # require it.
+ recipe = self.factory.makeRockRecipe()
+ distro_series = self.factory.makeDistroSeries()
+ dases = {}
+ for proc_nonvirt in True, False:
+ das = self.makeBuildableDistroArchSeries(
+ distroseries=distro_series,
+ supports_virtualized=True,
+ supports_nonvirtualized=proc_nonvirt,
+ )
+ dases[proc_nonvirt] = das
+ for proc_nonvirt, recipe_virt, build_virt in (
+ (True, False, False),
+ (True, True, True),
+ (False, False, True),
+ (False, True, True),
+ ):
+ das = dases[proc_nonvirt]
+ recipe = self.factory.makeRockRecipe(
+ require_virtualized=recipe_virt
+ )
+ build_request = self.factory.makeRockRecipeBuildRequest(
+ recipe=recipe
+ )
+ build = recipe.requestBuild(build_request, das)
+ self.assertEqual(build_virt, build.virtualized)
+
+ def test_requestBuild_nonvirtualized(self):
+ # A non-virtualized processor can build a rock recipe iff the
+ # recipe has require_virtualized set to False.
+ recipe = self.factory.makeRockRecipe()
+ distro_series = self.factory.makeDistroSeries()
+ das = self.makeBuildableDistroArchSeries(
+ distroseries=distro_series,
+ supports_virtualized=False,
+ supports_nonvirtualized=True,
+ )
+ build_request = self.factory.makeRockRecipeBuildRequest(recipe=recipe)
+ self.assertRaises(
+ RockRecipeBuildDisallowedArchitecture,
+ recipe.requestBuild,
+ build_request,
+ das,
+ )
+ with admin_logged_in():
+ recipe.require_virtualized = False
+ recipe.requestBuild(build_request, das)
+
+
+class TestRockRecipeDeleteWithBuilds(TestCaseWithFactory):
+
+ layer = LaunchpadFunctionalLayer
+
+ def setUp(self):
+ super().setUp()
+ self.useFixture(FeatureFixture({ROCK_RECIPE_ALLOW_CREATE: "on"}))
+
+ def test_delete_with_builds(self):
+ # A rock recipe with build requests and builds can be deleted.
+ # Doing so deletes all its build requests, their builds, and their
+ # files.
+ owner = self.factory.makePerson()
+ project = self.factory.makeProduct()
+ distroseries = self.factory.makeDistroSeries()
+ processor = self.factory.makeProcessor(supports_virtualized=True)
+ das = self.factory.makeDistroArchSeries(
+ distroseries=distroseries,
+ architecturetag=processor.name,
+ processor=processor,
+ )
+ das.addOrUpdateChroot(
+ self.factory.makeLibraryFileAlias(
+ filename="fake_chroot.tar.gz", db_only=True
+ )
+ )
+ self.useFixture(
+ GitHostingFixture(
+ blob=dedent(
+ """\
+ bases:
+ - build-on:
+ - name: "%s"
+ channel: "%s"
+ architectures: [%s]
+ """
+ % (
+ distroseries.distribution.name,
+ distroseries.name,
+ processor.name,
+ )
+ )
+ )
+ )
+ [git_ref] = self.factory.makeGitRefs()
+ condemned_recipe = self.factory.makeRockRecipe(
+ registrant=owner,
+ owner=owner,
+ project=project,
+ name="condemned",
+ git_ref=git_ref,
+ )
+ other_recipe = self.factory.makeRockRecipe(
+ registrant=owner, owner=owner, project=project, git_ref=git_ref
+ )
+ self.assertTrue(
+ getUtility(IRockRecipeSet).exists(owner, project, "condemned")
+ )
+ with person_logged_in(owner):
+ requests = []
+ jobs = []
+ for recipe in (condemned_recipe, other_recipe):
+ requests.append(recipe.requestBuilds(owner))
+ jobs.append(removeSecurityProxy(requests[-1])._job)
+ with dbuser(config.IRockRecipeRequestBuildsJobSource.dbuser):
+ JobRunner(jobs).runAll()
+ for job in jobs:
+ self.assertEqual(JobStatus.COMPLETED, job.job.status)
+ [build] = requests[0].builds
+ [other_build] = requests[1].builds
+ rock_file = self.factory.makeRockFile(build=build)
+ other_rock_file = self.factory.makeRockFile(build=other_build)
+ store = Store.of(condemned_recipe)
+ store.flush()
+ job_ids = [job.job_id for job in jobs]
+ build_id = build.id
+ build_queue_id = build.buildqueue_record.id
+ build_farm_job_id = removeSecurityProxy(build).build_farm_job_id
+ rock_file_id = removeSecurityProxy(rock_file).id
+ with person_logged_in(condemned_recipe.owner):
+ condemned_recipe.destroySelf()
+ flush_database_caches()
+ # The deleted recipe, its build requests, and its are gone.
+ self.assertFalse(
+ getUtility(IRockRecipeSet).exists(owner, project, "condemned")
+ )
+ self.assertIsNone(store.get(RockRecipeJob, job_ids[0]))
+ self.assertIsNone(getUtility(IRockRecipeBuildSet).getByID(build_id))
+ self.assertIsNone(store.get(BuildQueue, build_queue_id))
+ self.assertIsNone(store.get(BuildFarmJob, build_farm_job_id))
+ self.assertIsNone(store.get(RockFile, rock_file_id))
+ # Unrelated build requests, build jobs and builds are still present.
+ self.assertEqual(
+ removeSecurityProxy(jobs[1]).context,
+ store.get(RockRecipeJob, job_ids[1]),
+ )
+ self.assertEqual(
+ other_build,
+ getUtility(IRockRecipeBuildSet).getByID(other_build.id),
+ )
+ self.assertIsNotNone(other_build.buildqueue_record)
+ self.assertIsNotNone(
+ store.get(RockFile, removeSecurityProxy(other_rock_file).id)
+>>>>>>> lib/lp/rocks/tests/test_rockrecipe.py
)
@@ -224,6 +798,23 @@ class TestRockRecipeSet(TestCaseWithFactory):
),
)
+<<<<<<< lib/lp/rocks/tests/test_rockrecipe.py
+=======
+ def test_findByOwner(self):
+ # IRockRecipeSet.findByOwner returns all rock recipes with the
+ # given owner.
+ owners = [self.factory.makePerson() for i in range(2)]
+ recipes = []
+ for owner in owners:
+ for _ in range(2):
+ recipes.append(
+ self.factory.makeRockRecipe(registrant=owner, owner=owner)
+ )
+ recipe_set = getUtility(IRockRecipeSet)
+ self.assertContentEqual(recipes[:2], recipe_set.findByOwner(owners[0]))
+ self.assertContentEqual(recipes[2:], recipe_set.findByOwner(owners[1]))
+
+>>>>>>> lib/lp/rocks/tests/test_rockrecipe.py
def test_detachFromGitRepository(self):
# IRockRecipeSet.detachFromGitRepository clears the given Git
# repository from all rock recipes.
diff --git a/lib/lp/rocks/tests/test_rockrecipebuild.py b/lib/lp/rocks/tests/test_rockrecipebuild.py
new file mode 100644
index 0000000..a9d6ffe
--- /dev/null
+++ b/lib/lp/rocks/tests/test_rockrecipebuild.py
@@ -0,0 +1,449 @@
+# Copyright 2015-2024 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+"""Test rock package build features."""
+from datetime import datetime, timedelta, timezone
+
+import six
+from testtools.matchers import Equals
+from zope.component import getUtility
+from zope.security.proxy import removeSecurityProxy
+
+from lp.app.enums import InformationType
+from lp.app.errors import NotFoundError
+from lp.buildmaster.enums import BuildStatus
+from lp.buildmaster.interfaces.buildqueue import IBuildQueue
+from lp.buildmaster.interfaces.packagebuild import IPackageBuild
+from lp.buildmaster.interfaces.processor import IProcessorSet
+from lp.registry.enums import PersonVisibility, TeamMembershipPolicy
+from lp.registry.interfaces.series import SeriesStatus
+from lp.rocks.interfaces.rockrecipe import (
+ ROCK_RECIPE_ALLOW_CREATE,
+ ROCK_RECIPE_PRIVATE_FEATURE_FLAG,
+)
+from lp.rocks.interfaces.rockrecipebuild import (
+ IRockRecipeBuild,
+ IRockRecipeBuildSet,
+)
+from lp.services.config import config
+from lp.services.features.testing import FeatureFixture
+from lp.services.propertycache import clear_property_cache
+from lp.testing import (
+ StormStatementRecorder,
+ TestCaseWithFactory,
+ person_logged_in,
+)
+from lp.testing.layers import LaunchpadZopelessLayer
+from lp.testing.mail_helpers import pop_notifications
+from lp.testing.matchers import HasQueryCount
+
+expected_body = """\
+ * Rock Recipe: rock-1
+ * Project: rock-project
+ * Distroseries: distro unstable
+ * Architecture: i386
+ * State: Failed to build
+ * Duration: 10 minutes
+ * Build Log: %s
+ * Upload Log: %s
+ * Builder: http://launchpad.test/builders/bob
+"""
+
+
+class TestRockRecipeBuild(TestCaseWithFactory):
+
+ layer = LaunchpadZopelessLayer
+
+ def setUp(self):
+ super().setUp()
+ self.useFixture(FeatureFixture({ROCK_RECIPE_ALLOW_CREATE: "on"}))
+ self.build = self.factory.makeRockRecipeBuild()
+
+ def test_implements_interfaces(self):
+ # RockRecipeBuild implements IPackageBuild and IRockRecipeBuild.
+ self.assertProvides(self.build, IPackageBuild)
+ self.assertProvides(self.build, IRockRecipeBuild)
+
+ def test___repr__(self):
+ # RockRecipeBuild has an informative __repr__.
+ self.assertEqual(
+ "<RockRecipeBuild ~%s/%s/+rock/%s/+build/%s>"
+ % (
+ self.build.recipe.owner.name,
+ self.build.recipe.project.name,
+ self.build.recipe.name,
+ self.build.id,
+ ),
+ repr(self.build),
+ )
+
+ def test_title(self):
+ # RockRecipeBuild has an informative title.
+ das = self.build.distro_arch_series
+ self.assertEqual(
+ "%s build of /~%s/%s/+rock/%s"
+ % (
+ das.architecturetag,
+ self.build.recipe.owner.name,
+ self.build.recipe.project.name,
+ self.build.recipe.name,
+ ),
+ self.build.title,
+ )
+
+ def test_queueBuild(self):
+ # RockRecipeBuild can create the queue entry for itself.
+ bq = self.build.queueBuild()
+ self.assertProvides(bq, IBuildQueue)
+ self.assertEqual(
+ self.build.build_farm_job, removeSecurityProxy(bq)._build_farm_job
+ )
+ self.assertEqual(self.build, bq.specific_build)
+ self.assertEqual(self.build.virtualized, bq.virtualized)
+ self.assertIsNotNone(bq.processor)
+ self.assertEqual(bq, self.build.buildqueue_record)
+
+ def test_is_private(self):
+ # A RockRecipeBuild is private iff its recipe or owner are.
+ self.assertFalse(self.build.is_private)
+ self.useFixture(
+ FeatureFixture(
+ {
+ ROCK_RECIPE_ALLOW_CREATE: "on",
+ ROCK_RECIPE_PRIVATE_FEATURE_FLAG: "on",
+ }
+ )
+ )
+ private_team = self.factory.makeTeam(
+ membership_policy=TeamMembershipPolicy.MODERATED,
+ visibility=PersonVisibility.PRIVATE,
+ )
+ with person_logged_in(private_team.teamowner):
+ build = self.factory.makeRockRecipeBuild(
+ requester=private_team.teamowner,
+ owner=private_team,
+ information_type=InformationType.PROPRIETARY,
+ )
+ self.assertTrue(build.is_private)
+
+ def test_can_be_retried(self):
+ ok_cases = [
+ BuildStatus.FAILEDTOBUILD,
+ BuildStatus.MANUALDEPWAIT,
+ BuildStatus.CHROOTWAIT,
+ BuildStatus.FAILEDTOUPLOAD,
+ BuildStatus.CANCELLED,
+ BuildStatus.SUPERSEDED,
+ ]
+ for status in BuildStatus.items:
+ build = self.factory.makeRockRecipeBuild(status=status)
+ if status in ok_cases:
+ self.assertTrue(build.can_be_retried)
+ else:
+ self.assertFalse(build.can_be_retried)
+
+ def test_can_be_retried_obsolete_series(self):
+ # Builds for obsolete series cannot be retried.
+ distroseries = self.factory.makeDistroSeries(
+ status=SeriesStatus.OBSOLETE
+ )
+ das = self.factory.makeDistroArchSeries(distroseries=distroseries)
+ build = self.factory.makeRockRecipeBuild(distro_arch_series=das)
+ self.assertFalse(build.can_be_retried)
+
+ def test_can_be_cancelled(self):
+ # For all states that can be cancelled, can_be_cancelled returns True.
+ ok_cases = [
+ BuildStatus.BUILDING,
+ BuildStatus.NEEDSBUILD,
+ ]
+ for status in BuildStatus.items:
+ build = self.factory.makeRockRecipeBuild()
+ build.queueBuild()
+ build.updateStatus(status)
+ if status in ok_cases:
+ self.assertTrue(build.can_be_cancelled)
+ else:
+ self.assertFalse(build.can_be_cancelled)
+
+ def test_retry_resets_state(self):
+ # Retrying a build resets most of the state attributes, but does
+ # not modify the first dispatch time.
+ now = datetime.now(timezone.utc)
+ build = self.factory.makeRockRecipeBuild()
+ build.updateStatus(BuildStatus.BUILDING, date_started=now)
+ build.updateStatus(BuildStatus.FAILEDTOBUILD)
+ build.gotFailure()
+ with person_logged_in(build.recipe.owner):
+ build.retry()
+ self.assertEqual(BuildStatus.NEEDSBUILD, build.status)
+ self.assertEqual(now, build.date_first_dispatched)
+ self.assertIsNone(build.log)
+ self.assertIsNone(build.upload_log)
+ self.assertEqual(0, build.failure_count)
+
+ def test_cancel_not_in_progress(self):
+ # The cancel() method for a pending build leaves it in the CANCELLED
+ # state.
+ self.build.queueBuild()
+ self.build.cancel()
+ self.assertEqual(BuildStatus.CANCELLED, self.build.status)
+ self.assertIsNone(self.build.buildqueue_record)
+
+ def test_cancel_in_progress(self):
+ # The cancel() method for a building build leaves it in the
+ # CANCELLING state.
+ bq = self.build.queueBuild()
+ bq.markAsBuilding(self.factory.makeBuilder())
+ self.build.cancel()
+ self.assertEqual(BuildStatus.CANCELLING, self.build.status)
+ self.assertEqual(bq, self.build.buildqueue_record)
+
+ def test_estimateDuration(self):
+ # Without previous builds, the default time estimate is 10m.
+ self.assertEqual(600, self.build.estimateDuration().seconds)
+
+ def test_estimateDuration_with_history(self):
+ # Previous successful builds of the same recipe are used for
+ # estimates.
+ self.factory.makeRockRecipeBuild(
+ requester=self.build.requester,
+ recipe=self.build.recipe,
+ distro_arch_series=self.build.distro_arch_series,
+ status=BuildStatus.FULLYBUILT,
+ duration=timedelta(seconds=335),
+ )
+ for _ in range(3):
+ self.factory.makeRockRecipeBuild(
+ requester=self.build.requester,
+ recipe=self.build.recipe,
+ distro_arch_series=self.build.distro_arch_series,
+ status=BuildStatus.FAILEDTOBUILD,
+ duration=timedelta(seconds=20),
+ )
+ self.assertEqual(335, self.build.estimateDuration().seconds)
+
+ def test_build_cookie(self):
+ build = self.factory.makeRockRecipeBuild()
+ self.assertEqual("ROCKRECIPEBUILD-%d" % build.id, build.build_cookie)
+
+ def test_getFileByName_logs(self):
+ # getFileByName returns the logs when requested by name.
+ self.build.setLog(
+ self.factory.makeLibraryFileAlias(filename="buildlog.txt.gz")
+ )
+ self.assertEqual(
+ self.build.log, self.build.getFileByName("buildlog.txt.gz")
+ )
+ self.assertRaises(NotFoundError, self.build.getFileByName, "foo")
+ self.build.storeUploadLog("uploaded")
+ self.assertEqual(
+ self.build.upload_log,
+ self.build.getFileByName(self.build.upload_log.filename),
+ )
+
+ def test_getFileByName_uploaded_files(self):
+ # getFileByName returns uploaded files when requested by name.
+ filenames = ("ubuntu.squashfs", "ubuntu.manifest")
+ lfas = []
+ for filename in filenames:
+ lfa = self.factory.makeLibraryFileAlias(filename=filename)
+ lfas.append(lfa)
+ self.build.addFile(lfa)
+ self.assertContentEqual(
+ lfas, [row[1] for row in self.build.getFiles()]
+ )
+ for filename, lfa in zip(filenames, lfas):
+ self.assertEqual(lfa, self.build.getFileByName(filename))
+ self.assertRaises(NotFoundError, self.build.getFileByName, "missing")
+
+ def test_verifySuccessfulUpload(self):
+ self.assertFalse(self.build.verifySuccessfulUpload())
+ self.factory.makeRockFile(build=self.build)
+ self.assertTrue(self.build.verifySuccessfulUpload())
+
+ def test_updateStatus_stores_revision_id(self):
+ # If the builder reports a revision_id, updateStatus saves it.
+ self.assertIsNone(self.build.revision_id)
+ self.build.updateStatus(BuildStatus.BUILDING, worker_status={})
+ self.assertIsNone(self.build.revision_id)
+ self.build.updateStatus(
+ BuildStatus.BUILDING, worker_status={"revision_id": "dummy"}
+ )
+ self.assertEqual("dummy", self.build.revision_id)
+
+ def test_notify_fullybuilt(self):
+ # notify does not send mail when a recipe build completes normally.
+ build = self.factory.makeRockRecipeBuild(status=BuildStatus.FULLYBUILT)
+ build.notify()
+ self.assertEqual(0, len(pop_notifications()))
+
+ def test_notify_packagefail(self):
+ # notify sends mail when a recipe build fails.
+ person = self.factory.makePerson(name="person")
+ project = self.factory.makeProduct(name="rock-project")
+ distribution = self.factory.makeDistribution(name="distro")
+ distroseries = self.factory.makeDistroSeries(
+ distribution=distribution, name="unstable"
+ )
+ processor = getUtility(IProcessorSet).getByName("386")
+ das = self.factory.makeDistroArchSeries(
+ distroseries=distroseries,
+ architecturetag="i386",
+ processor=processor,
+ )
+ build = self.factory.makeRockRecipeBuild(
+ name="rock-1",
+ requester=person,
+ owner=person,
+ project=project,
+ distro_arch_series=das,
+ status=BuildStatus.FAILEDTOBUILD,
+ builder=self.factory.makeBuilder(name="bob"),
+ duration=timedelta(minutes=10),
+ )
+ build.setLog(self.factory.makeLibraryFileAlias())
+ build.notify()
+ [notification] = pop_notifications()
+ self.assertEqual(
+ config.canonical.noreply_from_address, notification["From"]
+ )
+ self.assertEqual(
+ "Person <%s>" % person.preferredemail.email, notification["To"]
+ )
+ subject = notification["Subject"].replace("\n ", " ")
+ self.assertEqual(
+ "[Rock recipe build #%d] i386 build of "
+ "/~person/rock-project/+rock/rock-1" % build.id,
+ subject,
+ )
+ self.assertEqual(
+ "Requester", notification["X-Launchpad-Message-Rationale"]
+ )
+ self.assertEqual(person.name, notification["X-Launchpad-Message-For"])
+ self.assertEqual(
+ "rock-recipe-build-status",
+ notification["X-Launchpad-Notification-Type"],
+ )
+ self.assertEqual(
+ "FAILEDTOBUILD", notification["X-Launchpad-Build-State"]
+ )
+ body, footer = six.ensure_text(
+ notification.get_payload(decode=True)
+ ).split("\n-- \n")
+ self.assertEqual(expected_body % (build.log_url, ""), body)
+ self.assertEqual(
+ "http://launchpad.test/~person/rock-project/+rock/rock-1/"
+ "+build/%d\n"
+ "You are the requester of the build.\n" % build.id,
+ footer,
+ )
+
+ def addFakeBuildLog(self, build):
+ build.setLog(self.factory.makeLibraryFileAlias("mybuildlog.txt"))
+
+ def test_log_url_123(self):
+ # The log URL for a rock recipe build will use the recipe context.
+ self.addFakeBuildLog(self.build)
+ self.build.log_url
+ self.assertEqual(
+ "http://launchpad.test/~%s/%s/+rock/%s/+build/%d/+files/"
+ "mybuildlog.txt"
+ % (
+ self.build.recipe.owner.name,
+ self.build.recipe.project.name,
+ self.build.recipe.name,
+ self.build.id,
+ ),
+ self.build.log_url,
+ )
+
+ def test_eta(self):
+ # RockRecipeBuild.eta returns a non-None value when it should, or
+ # None when there's no start time.
+ self.build.queueBuild()
+ self.assertIsNone(self.build.eta)
+ self.factory.makeBuilder(processors=[self.build.processor])
+ clear_property_cache(self.build)
+ self.assertIsNotNone(self.build.eta)
+
+ def test_eta_cached(self):
+ # The expensive completion time estimate is cached.
+ self.build.queueBuild()
+ self.build.eta
+ with StormStatementRecorder() as recorder:
+ self.build.eta
+ self.assertThat(recorder, HasQueryCount(Equals(0)))
+
+ def test_estimate(self):
+ # RockRecipeBuild.estimate returns True until the job is completed.
+ self.build.queueBuild()
+ self.factory.makeBuilder(processors=[self.build.processor])
+ self.build.updateStatus(BuildStatus.BUILDING)
+ self.assertTrue(self.build.estimate)
+ self.build.updateStatus(BuildStatus.FULLYBUILT)
+ clear_property_cache(self.build)
+ self.assertFalse(self.build.estimate)
+
+
+class TestRockRecipeBuildSet(TestCaseWithFactory):
+
+ layer = LaunchpadZopelessLayer
+
+ def setUp(self):
+ super().setUp()
+ self.useFixture(FeatureFixture({ROCK_RECIPE_ALLOW_CREATE: "on"}))
+
+ def test_getByBuildFarmJob_works(self):
+ build = self.factory.makeRockRecipeBuild()
+ self.assertEqual(
+ build,
+ getUtility(IRockRecipeBuildSet).getByBuildFarmJob(
+ build.build_farm_job
+ ),
+ )
+
+ def test_getByBuildFarmJob_returns_None_when_missing(self):
+ bpb = self.factory.makeBinaryPackageBuild()
+ self.assertIsNone(
+ getUtility(IRockRecipeBuildSet).getByBuildFarmJob(
+ bpb.build_farm_job
+ )
+ )
+
+ def test_getByBuildFarmJobs_works(self):
+ builds = [self.factory.makeRockRecipeBuild() for i in range(10)]
+ self.assertContentEqual(
+ builds,
+ getUtility(IRockRecipeBuildSet).getByBuildFarmJobs(
+ [build.build_farm_job for build in builds]
+ ),
+ )
+
+ def test_getByBuildFarmJobs_works_empty(self):
+ self.assertContentEqual(
+ [], getUtility(IRockRecipeBuildSet).getByBuildFarmJobs([])
+ )
+
+ def test_virtualized_recipe_requires(self):
+ recipe = self.factory.makeRockRecipe(require_virtualized=True)
+ target = self.factory.makeRockRecipeBuild(recipe=recipe)
+ self.assertTrue(target.virtualized)
+
+ def test_virtualized_processor_requires(self):
+ recipe = self.factory.makeRockRecipe(require_virtualized=False)
+ distro_arch_series = self.factory.makeDistroArchSeries()
+ distro_arch_series.processor.supports_nonvirtualized = False
+ target = self.factory.makeRockRecipeBuild(
+ distro_arch_series=distro_arch_series, recipe=recipe
+ )
+ self.assertTrue(target.virtualized)
+
+ def test_virtualized_no_support(self):
+ recipe = self.factory.makeRockRecipe(require_virtualized=False)
+ distro_arch_series = self.factory.makeDistroArchSeries()
+ distro_arch_series.processor.supports_nonvirtualized = True
+ target = self.factory.makeRockRecipeBuild(
+ recipe=recipe, distro_arch_series=distro_arch_series
+ )
+ self.assertFalse(target.virtualized)
diff --git a/lib/lp/rocks/tests/test_rockrecipebuildbehaviour.py b/lib/lp/rocks/tests/test_rockrecipebuildbehaviour.py
new file mode 100644
index 0000000..ede523b
--- /dev/null
+++ b/lib/lp/rocks/tests/test_rockrecipebuildbehaviour.py
@@ -0,0 +1,588 @@
+# Copyright 2024 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+"""Test rock recipe build behaviour."""
+
+import base64
+import os.path
+import time
+import uuid
+from datetime import datetime
+from urllib.parse import urlsplit
+
+from fixtures import MockPatch
+from pymacaroons import Macaroon
+from testtools import ExpectedException
+from testtools.matchers import (
+ ContainsDict,
+ Equals,
+ Is,
+ IsInstance,
+ MatchesDict,
+ MatchesListwise,
+ StartsWith,
+)
+from testtools.twistedsupport import (
+ AsynchronousDeferredRunTestForBrokenTwisted,
+)
+from twisted.internet import defer
+from zope.component import getUtility
+from zope.proxy import isProxy
+from zope.security.proxy import removeSecurityProxy
+
+from lp.app.enums import InformationType
+from lp.archivepublisher.interfaces.archivegpgsigningkey import (
+ IArchiveGPGSigningKey,
+)
+from lp.buildmaster.enums import BuildBaseImageType, BuildStatus
+from lp.buildmaster.interactor import shut_down_default_process_pool
+from lp.buildmaster.interfaces.builder import CannotBuild
+from lp.buildmaster.interfaces.buildfarmjobbehaviour import (
+ IBuildFarmJobBehaviour,
+)
+from lp.buildmaster.interfaces.processor import IProcessorSet
+from lp.buildmaster.tests.builderproxy import (
+ InProcessProxyAuthAPIFixture,
+ ProxyURLMatcher,
+ RevocationEndpointMatcher,
+)
+from lp.buildmaster.tests.mock_workers import (
+ MockBuilder,
+ OkWorker,
+ WorkerTestHelpers,
+)
+from lp.buildmaster.tests.test_buildfarmjobbehaviour import (
+ TestGetUploadMethodsMixin,
+ TestHandleStatusMixin,
+ TestVerifySuccessfulBuildMixin,
+)
+from lp.registry.interfaces.series import SeriesStatus
+from lp.rocks.interfaces.rockrecipe import (
+ ROCK_RECIPE_ALLOW_CREATE,
+ ROCK_RECIPE_PRIVATE_FEATURE_FLAG,
+)
+from lp.rocks.model.rockrecipebuildbehaviour import RockRecipeBuildBehaviour
+from lp.services.config import config
+from lp.services.features.testing import FeatureFixture
+from lp.services.log.logger import BufferLogger, DevNullLogger
+from lp.services.statsd.tests import StatsMixin
+from lp.services.webapp import canonical_url
+from lp.soyuz.adapters.archivedependencies import get_sources_list_for_building
+from lp.soyuz.enums import PackagePublishingStatus
+from lp.soyuz.tests.soyuz import Base64KeyMatches
+from lp.testing import TestCaseWithFactory
+from lp.testing.dbuser import dbuser
+from lp.testing.gpgkeys import gpgkeysdir
+from lp.testing.keyserver import InProcessKeyServerFixture
+from lp.testing.layers import LaunchpadZopelessLayer
+
+
+class TestRockRecipeBuildBehaviourBase(TestCaseWithFactory):
+ layer = LaunchpadZopelessLayer
+
+ def setUp(self):
+ self.useFixture(FeatureFixture({ROCK_RECIPE_ALLOW_CREATE: "on"}))
+ super().setUp()
+
+ def makeJob(self, distribution=None, with_builder=False, **kwargs):
+ """Create a sample `IRockRecipeBuildBehaviour`."""
+ if distribution is None:
+ distribution = self.factory.makeDistribution(name="distro")
+ distroseries = self.factory.makeDistroSeries(
+ distribution=distribution, name="unstable"
+ )
+ processor = getUtility(IProcessorSet).getByName("386")
+ distroarchseries = self.factory.makeDistroArchSeries(
+ distroseries=distroseries,
+ architecturetag="i386",
+ processor=processor,
+ )
+
+ # Taken from test_archivedependencies.py
+ for component_name in ("main", "universe"):
+ self.factory.makeComponentSelection(distroseries, component_name)
+
+ build = self.factory.makeRockRecipeBuild(
+ distro_arch_series=distroarchseries, name="test-rock", **kwargs
+ )
+ return IBuildFarmJobBehaviour(build)
+
+
+class TestRockRecipeBuildBehaviour(TestRockRecipeBuildBehaviourBase):
+ layer = LaunchpadZopelessLayer
+
+ def test_provides_interface(self):
+ # RockRecipeBuildBehaviour provides IBuildFarmJobBehaviour.
+ job = RockRecipeBuildBehaviour(None)
+ self.assertProvides(job, IBuildFarmJobBehaviour)
+
+ def test_adapts_IRockRecipeBuild(self):
+ # IBuildFarmJobBehaviour adapts an IRockRecipeBuild.
+ build = self.factory.makeRockRecipeBuild()
+ job = IBuildFarmJobBehaviour(build)
+ self.assertProvides(job, IBuildFarmJobBehaviour)
+
+ def test_verifyBuildRequest_valid(self):
+ # verifyBuildRequest doesn't raise any exceptions when called with a
+ # valid builder set.
+ job = self.makeJob()
+ lfa = self.factory.makeLibraryFileAlias()
+ job.build.distro_arch_series.addOrUpdateChroot(lfa)
+ builder = MockBuilder()
+ job.setBuilder(builder, OkWorker())
+ logger = BufferLogger()
+ job.verifyBuildRequest(logger)
+ self.assertEqual("", logger.getLogBuffer())
+
+ def test_verifyBuildRequest_virtual_mismatch(self):
+ # verifyBuildRequest raises on an attempt to build a virtualized
+ # build on a non-virtual builder.
+ job = self.makeJob()
+ lfa = self.factory.makeLibraryFileAlias()
+ job.build.distro_arch_series.addOrUpdateChroot(lfa)
+ builder = MockBuilder(virtualized=False)
+ job.setBuilder(builder, OkWorker())
+ logger = BufferLogger()
+ e = self.assertRaises(AssertionError, job.verifyBuildRequest, logger)
+ self.assertEqual(
+ "Attempt to build virtual item on a non-virtual builder.", str(e)
+ )
+
+ def test_verifyBuildRequest_no_chroot(self):
+ # verifyBuildRequest raises when the DAS has no chroot.
+ job = self.makeJob()
+ builder = MockBuilder()
+ job.setBuilder(builder, OkWorker())
+ logger = BufferLogger()
+ e = self.assertRaises(CannotBuild, job.verifyBuildRequest, logger)
+ self.assertIn("Missing chroot", str(e))
+
+
+class TestAsyncRockRecipeBuildBehaviour(
+ StatsMixin, TestRockRecipeBuildBehaviourBase
+):
+
+ run_tests_with = AsynchronousDeferredRunTestForBrokenTwisted.make_factory(
+ timeout=30
+ )
+
+ @defer.inlineCallbacks
+ def setUp(self):
+ super().setUp()
+ build_username = "OCIBUILD-1"
+ self.token = {
+ "secret": uuid.uuid4().hex,
+ "username": build_username,
+ "timestamp": datetime.utcnow().isoformat(),
+ }
+ self.proxy_url = (
+ "http://{username}:{password}"
+ "@{host}:{port}".format(
+ username=self.token["username"],
+ password=self.token["secret"],
+ host=config.builddmaster.builder_proxy_host,
+ port=config.builddmaster.builder_proxy_port,
+ )
+ )
+ self.proxy_api = self.useFixture(InProcessProxyAuthAPIFixture())
+ yield self.proxy_api.start()
+ self.now = time.time()
+ self.useFixture(MockPatch("time.time", return_value=self.now))
+ self.addCleanup(shut_down_default_process_pool)
+ self.setUpStats()
+
+ def makeJob(self, **kwargs):
+ # We need a builder in these tests, in order that requesting a proxy
+ # token can piggyback on its reactor and pool.
+ job = super().makeJob(**kwargs)
+ builder = MockBuilder()
+ builder.processor = job.build.processor
+ worker = self.useFixture(WorkerTestHelpers()).getClientWorker()
+ job.setBuilder(builder, worker)
+ self.addCleanup(worker.pool.closeCachedConnections)
+ return job
+
+ @defer.inlineCallbacks
+ def test_composeBuildRequest(self):
+ job = self.makeJob()
+ lfa = self.factory.makeLibraryFileAlias(db_only=True)
+ job.build.distro_arch_series.addOrUpdateChroot(lfa)
+ build_request = yield job.composeBuildRequest(None)
+ self.assertThat(
+ build_request,
+ MatchesListwise(
+ [
+ Equals("rock"),
+ Equals(job.build.distro_arch_series),
+ Equals(job.build.pocket),
+ Equals({}),
+ IsInstance(dict),
+ ]
+ ),
+ )
+
+ @defer.inlineCallbacks
+ def test_requestProxyToken_unconfigured(self):
+ self.pushConfig("builddmaster", builder_proxy_host=None)
+ job = self.makeJob()
+ with dbuser(config.builddmaster.dbuser):
+ args = yield job.extraBuildArgs()
+ self.assertEqual([], self.proxy_api.tokens.requests)
+ self.assertNotIn("proxy_url", args)
+ self.assertNotIn("revocation_endpoint", args)
+
+ @defer.inlineCallbacks
+ def test_requestProxyToken_no_secret(self):
+ self.pushConfig(
+ "builddmaster", builder_proxy_auth_api_admin_secret=None
+ )
+ job = self.makeJob()
+ expected_exception_msg = (
+ "builder_proxy_auth_api_admin_secret is not configured."
+ )
+ with ExpectedException(CannotBuild, expected_exception_msg):
+ yield job.extraBuildArgs()
+
+ @defer.inlineCallbacks
+ def test_requestProxyToken(self):
+ job = self.makeJob()
+ yield job.extraBuildArgs()
+ expected_uri = urlsplit(
+ config.builddmaster.builder_proxy_auth_api_endpoint
+ ).path.encode("UTF-8")
+ request_matcher = MatchesDict(
+ {
+ "method": Equals(b"POST"),
+ "uri": Equals(expected_uri),
+ "headers": ContainsDict(
+ {
+ b"Authorization": MatchesListwise(
+ [
+ Equals(
+ b"Basic "
+ + base64.b64encode(
+ b"admin-launchpad.test:admin-secret"
+ )
+ )
+ ]
+ ),
+ b"Content-Type": MatchesListwise(
+ [Equals(b"application/json")]
+ ),
+ }
+ ),
+ "json": MatchesDict(
+ {"username": StartsWith(job.build.build_cookie + "-")}
+ ),
+ }
+ )
+ self.assertThat(
+ self.proxy_api.tokens.requests,
+ MatchesListwise([request_matcher]),
+ )
+
+ @defer.inlineCallbacks
+ def test_extraBuildArgs_git(self):
+ # extraBuildArgs returns appropriate arguments if asked to build a
+ # job for a Git branch.
+ [ref] = self.factory.makeGitRefs()
+ job = self.makeJob(git_ref=ref, with_builder=True)
+ expected_archives, expected_trusted_keys = (
+ yield get_sources_list_for_building(
+ job, job.build.distro_arch_series, None
+ )
+ )
+ for archive_line in expected_archives:
+ self.assertIn("universe", archive_line)
+ with dbuser(config.builddmaster.dbuser):
+ args = yield job.extraBuildArgs()
+ self.assertThat(
+ args,
+ MatchesDict(
+ {
+ "archive_private": Is(False),
+ "archives": Equals(expected_archives),
+ "arch_tag": Equals("i386"),
+ "build_url": Equals(canonical_url(job.build)),
+ "builder_constraints": Equals([]),
+ "channels": Equals({}),
+ "fast_cleanup": Is(True),
+ "git_repository": Equals(ref.repository.git_https_url),
+ "git_path": Equals(ref.name),
+ "name": Equals("test-rock"),
+ "private": Is(False),
+ "proxy_url": ProxyURLMatcher(job, self.now),
+ "revocation_endpoint": RevocationEndpointMatcher(
+ job, self.now
+ ),
+ "series": Equals("unstable"),
+ "trusted_keys": Equals(expected_trusted_keys),
+ "use_fetch_service": Is(False),
+ }
+ ),
+ )
+
+ @defer.inlineCallbacks
+ def test_extraBuildArgs_git_HEAD(self):
+ # extraBuildArgs returns appropriate arguments if asked to build a
+ # job for the default branch in a Launchpad-hosted Git repository.
+ [ref] = self.factory.makeGitRefs()
+ removeSecurityProxy(ref.repository)._default_branch = ref.path
+ job = self.makeJob(
+ git_ref=ref.repository.getRefByPath("HEAD"), with_builder=True
+ )
+ expected_archives, expected_trusted_keys = (
+ yield get_sources_list_for_building(
+ job, job.build.distro_arch_series, None
+ )
+ )
+ for archive_line in expected_archives:
+ self.assertIn("universe", archive_line)
+ with dbuser(config.builddmaster.dbuser):
+ args = yield job.extraBuildArgs()
+ self.assertThat(
+ args,
+ MatchesDict(
+ {
+ "archive_private": Is(False),
+ "archives": Equals(expected_archives),
+ "arch_tag": Equals("i386"),
+ "build_url": Equals(canonical_url(job.build)),
+ "builder_constraints": Equals([]),
+ "channels": Equals({}),
+ "fast_cleanup": Is(True),
+ "git_repository": Equals(ref.repository.git_https_url),
+ "name": Equals("test-rock"),
+ "private": Is(False),
+ "proxy_url": ProxyURLMatcher(job, self.now),
+ "revocation_endpoint": RevocationEndpointMatcher(
+ job, self.now
+ ),
+ "series": Equals("unstable"),
+ "trusted_keys": Equals(expected_trusted_keys),
+ "use_fetch_service": Is(False),
+ }
+ ),
+ )
+
+ @defer.inlineCallbacks
+ def test_extraBuildArgs_prefers_store_name(self):
+ # For the "name" argument, extraBuildArgs prefers
+ # RockRecipe.store_name over RockRecipe.name if the former is set.
+ job = self.makeJob(store_name="something-else")
+ with dbuser(config.builddmaster.dbuser):
+ args = yield job.extraBuildArgs()
+ self.assertEqual("something-else", args["name"])
+
+ @defer.inlineCallbacks
+ def test_extraBuildArgs_archive_trusted_keys(self):
+ # If the archive has a signing key, extraBuildArgs sends it.
+ yield self.useFixture(InProcessKeyServerFixture()).start()
+ distribution = self.factory.makeDistribution()
+ key_path = os.path.join(gpgkeysdir, "ppa-sample@xxxxxxxxxxxxxxxxx")
+ yield IArchiveGPGSigningKey(distribution.main_archive).setSigningKey(
+ key_path, async_keyserver=True
+ )
+ job = self.makeJob(distribution=distribution)
+ self.factory.makeBinaryPackagePublishingHistory(
+ distroarchseries=job.build.distro_arch_series,
+ pocket=job.build.pocket,
+ archive=distribution.main_archive,
+ status=PackagePublishingStatus.PUBLISHED,
+ )
+ with dbuser(config.builddmaster.dbuser):
+ args = yield job.extraBuildArgs()
+ self.assertThat(
+ args["trusted_keys"],
+ MatchesListwise(
+ [
+ Base64KeyMatches(
+ "0D57E99656BEFB0897606EE9A022DD1F5001B46D"
+ ),
+ ]
+ ),
+ )
+
+ @defer.inlineCallbacks
+ def test_extraBuildArgs_build_path(self):
+ # If the recipe specifies a build path, extraBuildArgs sends it.
+ job = self.makeJob(build_path="src", with_builder=True)
+ with dbuser(config.builddmaster.dbuser):
+ args = yield job.extraBuildArgs()
+ self.assertEqual("src", args["build_path"])
+
+ @defer.inlineCallbacks
+ def test_extraBuildArgs_channels(self):
+ # If the build needs particular channels, extraBuildArgs sends them.
+ job = self.makeJob(channels={"rockcraft": "edge"}, with_builder=True)
+ with dbuser(config.builddmaster.dbuser):
+ args = yield job.extraBuildArgs()
+ self.assertFalse(isProxy(args["channels"]))
+ self.assertEqual({"rockcraft": "edge"}, args["channels"])
+
+ @defer.inlineCallbacks
+ def test_extraBuildArgs_archives_primary(self):
+ # The build uses the release, security, and updates pockets from the
+ # primary archive.
+ job = self.makeJob(with_builder=True)
+ expected_archives = [
+ "deb %s %s main universe"
+ % (job.archive.archive_url, job.build.distro_series.name),
+ "deb %s %s-security main universe"
+ % (job.archive.archive_url, job.build.distro_series.name),
+ "deb %s %s-updates main universe"
+ % (job.archive.archive_url, job.build.distro_series.name),
+ ]
+ with dbuser(config.builddmaster.dbuser):
+ extra_args = yield job.extraBuildArgs()
+ self.assertEqual(expected_archives, extra_args["archives"])
+
+ @defer.inlineCallbacks
+ def test_extraBuildArgs_private(self):
+ # If the recipe is private, extraBuildArgs sends the appropriate
+ # arguments.
+ self.useFixture(
+ FeatureFixture(
+ {
+ ROCK_RECIPE_ALLOW_CREATE: "on",
+ ROCK_RECIPE_PRIVATE_FEATURE_FLAG: "on",
+ }
+ )
+ )
+ job = self.makeJob(information_type=InformationType.PROPRIETARY)
+ with dbuser(config.builddmaster.dbuser):
+ args = yield job.extraBuildArgs()
+ self.assertTrue(args["private"])
+
+ @defer.inlineCallbacks
+ def test_composeBuildRequest_proxy_url_set(self):
+ job = self.makeJob()
+ build_request = yield job.composeBuildRequest(None)
+ self.assertThat(
+ build_request[4]["proxy_url"], ProxyURLMatcher(job, self.now)
+ )
+ self.assertFalse(build_request[4]["use_fetch_service"])
+
+ @defer.inlineCallbacks
+ def test_composeBuildRequest_git_ref_deleted(self):
+ # If the source Git reference has been deleted, composeBuildRequest
+ # raises CannotBuild.
+ repository = self.factory.makeGitRepository()
+ [ref] = self.factory.makeGitRefs(repository=repository)
+ owner = self.factory.makePerson(name="rock-owner")
+ project = self.factory.makeProduct(name="rock-project")
+ job = self.makeJob(
+ registrant=owner,
+ owner=owner,
+ project=project,
+ git_ref=ref,
+ )
+ repository.removeRefs([ref.path])
+ self.assertIsNone(job.build.recipe.git_ref)
+ expected_exception_msg = (
+ r"Source repository for "
+ r"~rock-owner/rock-project/\+rock/test-rock has been deleted."
+ )
+ with ExpectedException(CannotBuild, expected_exception_msg):
+ yield job.composeBuildRequest(None)
+
+ @defer.inlineCallbacks
+ def test_dispatchBuildToWorker_prefers_lxd(self):
+ self.pushConfig("builddmaster", builder_proxy_host=None)
+ job = self.makeJob()
+ builder = MockBuilder()
+ builder.processor = job.build.processor
+ worker = OkWorker()
+ job.setBuilder(builder, worker)
+ chroot_lfa = self.factory.makeLibraryFileAlias(db_only=True)
+ job.build.distro_arch_series.addOrUpdateChroot(
+ chroot_lfa, image_type=BuildBaseImageType.CHROOT
+ )
+ lxd_lfa = self.factory.makeLibraryFileAlias(db_only=True)
+ job.build.distro_arch_series.addOrUpdateChroot(
+ lxd_lfa, image_type=BuildBaseImageType.LXD
+ )
+ yield job.dispatchBuildToWorker(DevNullLogger())
+ self.assertEqual(
+ ("ensurepresent", lxd_lfa.http_url, "", ""), worker.call_log[0]
+ )
+ self.assertEqual(1, self.stats_client.incr.call_count)
+ self.assertEqual(
+ self.stats_client.incr.call_args_list[0][0],
+ (
+ "build.count,builder_name={},env=test,"
+ "job_type=ROCKRECIPEBUILD,region={}".format(
+ builder.name, builder.region
+ ),
+ ),
+ )
+
+ @defer.inlineCallbacks
+ def test_dispatchBuildToWorker_falls_back_to_chroot(self):
+ self.pushConfig("builddmaster", builder_proxy_host=None)
+ job = self.makeJob()
+ builder = MockBuilder()
+ builder.processor = job.build.processor
+ worker = OkWorker()
+ job.setBuilder(builder, worker)
+ chroot_lfa = self.factory.makeLibraryFileAlias(db_only=True)
+ job.build.distro_arch_series.addOrUpdateChroot(
+ chroot_lfa, image_type=BuildBaseImageType.CHROOT
+ )
+ yield job.dispatchBuildToWorker(DevNullLogger())
+ self.assertEqual(
+ ("ensurepresent", chroot_lfa.http_url, "", ""), worker.call_log[0]
+ )
+
+
+class MakeRockRecipeBuildMixin:
+ """Provide the common makeBuild method returning a queued build."""
+
+ def makeRockRecipe(self):
+ self.useFixture(FeatureFixture({ROCK_RECIPE_ALLOW_CREATE: "on"}))
+ return self.factory.makeRockRecipe(
+ store_upload=True,
+ store_name=self.factory.getUniqueUnicode(),
+ store_secrets={"root": Macaroon().serialize()},
+ )
+
+ def makeBuild(self):
+ recipe = self.makeRockRecipe()
+ build = self.factory.makeRockRecipeBuild(
+ requester=recipe.registrant,
+ recipe=recipe,
+ status=BuildStatus.BUILDING,
+ )
+ build.queueBuild()
+ return build
+
+ def makeUnmodifiableBuild(self):
+ recipe = self.makeRockRecipe()
+ build = self.factory.makeRockRecipeBuild(
+ requester=recipe.registrant,
+ recipe=recipe,
+ status=BuildStatus.BUILDING,
+ )
+ build.distro_series.status = SeriesStatus.OBSOLETE
+ build.queueBuild()
+ return build
+
+
+class TestGetUploadMethodsForRockRecipeBuild(
+ MakeRockRecipeBuildMixin, TestGetUploadMethodsMixin, TestCaseWithFactory
+):
+ """IPackageBuild.getUpload* methods work with rock recipe builds."""
+
+
+class TestVerifySuccessfulBuildForRockRecipeBuild(
+ MakeRockRecipeBuildMixin,
+ TestVerifySuccessfulBuildMixin,
+ TestCaseWithFactory,
+):
+ """IBuildFarmJobBehaviour.verifySuccessfulBuild works."""
+
+
+class TestHandleStatusForRockRecipeBuild(
+ MakeRockRecipeBuildMixin, TestHandleStatusMixin, TestCaseWithFactory
+):
+ """IPackageBuild.handleStatus works with rock recipe builds."""
diff --git a/lib/lp/rocks/tests/test_rockrecipejob.py b/lib/lp/rocks/tests/test_rockrecipejob.py
new file mode 100644
index 0000000..d3aee31
--- /dev/null
+++ b/lib/lp/rocks/tests/test_rockrecipejob.py
@@ -0,0 +1,278 @@
+# Copyright 2024 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+"""Tests for rock recipe jobs."""
+
+from textwrap import dedent
+
+import six
+from testtools.matchers import (
+ AfterPreprocessing,
+ ContainsDict,
+ Equals,
+ GreaterThan,
+ Is,
+ LessThan,
+ MatchesAll,
+ MatchesSetwise,
+ MatchesStructure,
+)
+from zope.component import getUtility
+
+from lp.app.interfaces.launchpad import ILaunchpadCelebrities
+from lp.code.tests.helpers import GitHostingFixture
+from lp.rocks.interfaces.rockrecipe import (
+ ROCK_RECIPE_ALLOW_CREATE,
+ CannotParseRockcraftYaml,
+)
+from lp.rocks.interfaces.rockrecipejob import (
+ IRockRecipeJob,
+ IRockRecipeRequestBuildsJob,
+)
+from lp.rocks.model.rockrecipejob import (
+ RockRecipeJob,
+ RockRecipeJobType,
+ RockRecipeRequestBuildsJob,
+)
+from lp.services.config import config
+from lp.services.database.interfaces import IStore
+from lp.services.database.sqlbase import get_transaction_timestamp
+from lp.services.features.testing import FeatureFixture
+from lp.services.job.interfaces.job import JobStatus
+from lp.services.job.runner import JobRunner
+from lp.services.mail.sendmail import format_address_for_person
+from lp.testing import TestCaseWithFactory
+from lp.testing.dbuser import dbuser
+from lp.testing.layers import ZopelessDatabaseLayer
+
+
+class TestRockRecipeJob(TestCaseWithFactory):
+
+ layer = ZopelessDatabaseLayer
+
+ def setUp(self):
+ super().setUp()
+ self.useFixture(FeatureFixture({ROCK_RECIPE_ALLOW_CREATE: "on"}))
+
+ def test_provides_interface(self):
+ # `RockRecipeJob` objects provide `IRockRecipeJob`.
+ recipe = self.factory.makeRockRecipe()
+ self.assertProvides(
+ RockRecipeJob(recipe, RockRecipeJobType.REQUEST_BUILDS, {}),
+ IRockRecipeJob,
+ )
+
+
+class TestRockRecipeRequestBuildsJob(TestCaseWithFactory):
+
+ layer = ZopelessDatabaseLayer
+
+ def setUp(self):
+ super().setUp()
+ self.useFixture(FeatureFixture({ROCK_RECIPE_ALLOW_CREATE: "on"}))
+
+ def test_provides_interface(self):
+ # `RockRecipeRequestBuildsJob` objects provide
+ # `IRockRecipeRequestBuildsJob`."""
+ recipe = self.factory.makeRockRecipe()
+ job = RockRecipeRequestBuildsJob.create(recipe, recipe.registrant)
+ self.assertProvides(job, IRockRecipeRequestBuildsJob)
+
+ def test___repr__(self):
+ # `RockRecipeRequestBuildsJob` objects have an informative __repr__.
+ recipe = self.factory.makeRockRecipe()
+ job = RockRecipeRequestBuildsJob.create(recipe, recipe.registrant)
+ self.assertEqual(
+ "<RockRecipeRequestBuildsJob for ~%s/%s/+rock/%s>"
+ % (recipe.owner.name, recipe.project.name, recipe.name),
+ repr(job),
+ )
+
+ def makeSeriesAndProcessors(self, distro_series_version, arch_tags):
+ distroseries = self.factory.makeDistroSeries(
+ distribution=getUtility(ILaunchpadCelebrities).ubuntu,
+ version=distro_series_version,
+ )
+ processors = [
+ self.factory.makeProcessor(
+ name=arch_tag, supports_virtualized=True
+ )
+ for arch_tag in arch_tags
+ ]
+ for processor in processors:
+ das = self.factory.makeDistroArchSeries(
+ distroseries=distroseries,
+ architecturetag=processor.name,
+ processor=processor,
+ )
+ das.addOrUpdateChroot(
+ self.factory.makeLibraryFileAlias(
+ filename="fake_chroot.tar.gz", db_only=True
+ )
+ )
+ return distroseries, processors
+
+ def test_run(self):
+ # The job requests builds and records the result.
+ distroseries, _ = self.makeSeriesAndProcessors(
+ "20.04", ["avr2001", "sparc64", "x32"]
+ )
+ [git_ref] = self.factory.makeGitRefs()
+ recipe = self.factory.makeRockRecipe(git_ref=git_ref)
+ expected_date_created = get_transaction_timestamp(IStore(recipe))
+ job = RockRecipeRequestBuildsJob.create(
+ recipe, recipe.registrant, channels={"core": "stable"}
+ )
+ rockcraft_yaml = dedent(
+ """\
+ bases:
+ - build-on:
+ - name: ubuntu
+ channel: "20.04"
+ architectures: [avr2001]
+ - build-on:
+ - name: ubuntu
+ channel: "20.04"
+ architectures: [x32]
+ """
+ )
+ self.useFixture(GitHostingFixture(blob=rockcraft_yaml))
+ with dbuser(config.IRockRecipeRequestBuildsJobSource.dbuser):
+ JobRunner([job]).runAll()
+ now = get_transaction_timestamp(IStore(recipe))
+ self.assertEmailQueueLength(0)
+ self.assertThat(
+ job,
+ MatchesStructure(
+ job=MatchesStructure.byEquality(status=JobStatus.COMPLETED),
+ date_created=Equals(expected_date_created),
+ date_finished=MatchesAll(
+ GreaterThan(expected_date_created), LessThan(now)
+ ),
+ error_message=Is(None),
+ builds=AfterPreprocessing(
+ set,
+ MatchesSetwise(
+ *[
+ MatchesStructure(
+ build_request=MatchesStructure.byEquality(
+ id=job.job.id
+ ),
+ requester=Equals(recipe.registrant),
+ recipe=Equals(recipe),
+ distro_arch_series=Equals(distroseries[arch]),
+ channels=Equals({"core": "stable"}),
+ )
+ for arch in ("avr2001", "x32")
+ ]
+ ),
+ ),
+ ),
+ )
+
+ def test_run_with_architectures(self):
+ # If the user explicitly requested architectures, the job passes
+ # those through when requesting builds, intersecting them with other
+ # constraints.
+ distroseries, _ = self.makeSeriesAndProcessors(
+ "20.04", ["avr2001", "sparc64", "x32"]
+ )
+ [git_ref] = self.factory.makeGitRefs()
+ recipe = self.factory.makeRockRecipe(git_ref=git_ref)
+ expected_date_created = get_transaction_timestamp(IStore(recipe))
+ job = RockRecipeRequestBuildsJob.create(
+ recipe,
+ recipe.registrant,
+ channels={"core": "stable"},
+ architectures=["sparc64", "x32"],
+ )
+ rockcraft_yaml = dedent(
+ """\
+ bases:
+ - build-on:
+ - name: ubuntu
+ channel: "20.04"
+ architectures: [avr2001]
+ - build-on:
+ - name: ubuntu
+ channel: "20.04"
+ architectures: [x32]
+ """
+ )
+ self.useFixture(GitHostingFixture(blob=rockcraft_yaml))
+ with dbuser(config.IRockRecipeRequestBuildsJobSource.dbuser):
+ JobRunner([job]).runAll()
+ now = get_transaction_timestamp(IStore(recipe))
+ self.assertEmailQueueLength(0)
+ self.assertThat(
+ job,
+ MatchesStructure(
+ job=MatchesStructure.byEquality(status=JobStatus.COMPLETED),
+ date_created=Equals(expected_date_created),
+ date_finished=MatchesAll(
+ GreaterThan(expected_date_created), LessThan(now)
+ ),
+ error_message=Is(None),
+ builds=AfterPreprocessing(
+ set,
+ MatchesSetwise(
+ MatchesStructure(
+ build_request=MatchesStructure.byEquality(
+ id=job.job.id
+ ),
+ requester=Equals(recipe.registrant),
+ recipe=Equals(recipe),
+ distro_arch_series=Equals(distroseries["x32"]),
+ channels=Equals({"core": "stable"}),
+ )
+ ),
+ ),
+ ),
+ )
+
+ def test_run_failed(self):
+ # A failed run sets the job status to FAILED and records the error
+ # message.
+ [git_ref] = self.factory.makeGitRefs()
+ recipe = self.factory.makeRockRecipe(git_ref=git_ref)
+ expected_date_created = get_transaction_timestamp(IStore(recipe))
+ job = RockRecipeRequestBuildsJob.create(
+ recipe, recipe.registrant, channels={"core": "stable"}
+ )
+ self.useFixture(GitHostingFixture()).getBlob.failure = (
+ CannotParseRockcraftYaml("Nonsense on stilts")
+ )
+ with dbuser(config.IRockRecipeRequestBuildsJobSource.dbuser):
+ JobRunner([job]).runAll()
+ now = get_transaction_timestamp(IStore(recipe))
+ [notification] = self.assertEmailQueueLength(1)
+ self.assertThat(
+ dict(notification),
+ ContainsDict(
+ {
+ "From": Equals(config.canonical.noreply_from_address),
+ "To": Equals(format_address_for_person(recipe.registrant)),
+ "Subject": Equals(
+ "Launchpad error while requesting builds of %s"
+ % recipe.name
+ ),
+ }
+ ),
+ )
+ self.assertEqual(
+ "Launchpad encountered an error during the following operation: "
+ "requesting builds of %s. Nonsense on stilts" % recipe.name,
+ six.ensure_text(notification.get_payload(decode=True)),
+ )
+ self.assertThat(
+ job,
+ MatchesStructure(
+ job=MatchesStructure.byEquality(status=JobStatus.FAILED),
+ date_created=Equals(expected_date_created),
+ date_finished=MatchesAll(
+ GreaterThan(expected_date_created), LessThan(now)
+ ),
+ error_message=Equals("Nonsense on stilts"),
+ builds=AfterPreprocessing(set, MatchesSetwise()),
+ ),
+ )
diff --git a/lib/lp/services/config/schema-lazr.conf b/lib/lp/services/config/schema-lazr.conf
index 07f04c6..c285996 100644
--- a/lib/lp/services/config/schema-lazr.conf
+++ b/lib/lp/services/config/schema-lazr.conf
@@ -1995,6 +1995,11 @@ module: lp.charms.interfaces.charmrecipejob
dbuser: charm-build-job
crontab_group: MAIN
+[IRockRecipeRequestBuildsJobSource]
+module: lp.rocks.interfaces.rockrecipejob
+dbuser: rock-build-job
+crontab_group: MAIN
+
[ICIBuildUploadJobSource]
module: lp.soyuz.interfaces.archivejob
dbuser: uploader
diff --git a/lib/lp/testing/factory.py b/lib/lp/testing/factory.py
index 9be2aad..fc8a055 100644
--- a/lib/lp/testing/factory.py
+++ b/lib/lp/testing/factory.py
@@ -209,6 +209,11 @@ from lp.registry.model.milestone import Milestone
from lp.registry.model.packaging import Packaging
from lp.registry.model.suitesourcepackage import SuiteSourcePackage
from lp.rocks.interfaces.rockrecipe import IRockRecipeSet
+<<<<<<< lib/lp/testing/factory.py
+=======
+from lp.rocks.interfaces.rockrecipebuild import IRockRecipeBuildSet
+from lp.rocks.model.rockrecipebuild import RockFile
+>>>>>>> lib/lp/testing/factory.py
from lp.services.auth.interfaces import IAccessTokenSet
from lp.services.auth.utils import create_access_token_secret
from lp.services.compat import message_as_bytes
@@ -6961,6 +6966,84 @@ class LaunchpadObjectFactory(ObjectFactory):
IStore(recipe).flush()
return recipe
+<<<<<<< lib/lp/testing/factory.py
+=======
+ def makeRockRecipeBuildRequest(
+ self, recipe=None, requester=None, channels=None, architectures=None
+ ):
+ """Make a new RockRecipeBuildRequest."""
+ if recipe is None:
+ recipe = self.makeRockRecipe()
+ if requester is None:
+ requester = recipe.owner.teamowner
+ if recipe.owner.is_team:
+ requester = recipe.owner.teamowner
+ else:
+ requester = recipe.owner
+ return recipe.requestBuilds(
+ requester, channels=channels, architectures=architectures
+ )
+
+ def makeRockRecipeBuild(
+ self,
+ registrant=None,
+ recipe=None,
+ build_request=None,
+ requester=None,
+ distro_arch_series=None,
+ channels=None,
+ store_upload_metadata=None,
+ date_created=DEFAULT,
+ status=BuildStatus.NEEDSBUILD,
+ builder=None,
+ duration=None,
+ **kwargs,
+ ):
+ if recipe is None:
+ if registrant is None:
+ if build_request is not None:
+ registrant = build_request.requester
+ else:
+ registrant = requester
+ recipe = self.makeRockRecipe(registrant=registrant, **kwargs)
+ if distro_arch_series is None:
+ distro_arch_series = self.makeDistroArchSeries()
+ if build_request is None:
+ build_request = self.makeRockRecipeBuildRequest(
+ recipe=recipe, requester=requester, channels=channels
+ )
+ build = getUtility(IRockRecipeBuildSet).new(
+ build_request,
+ recipe,
+ distro_arch_series,
+ channels=channels,
+ store_upload_metadata=store_upload_metadata,
+ date_created=date_created,
+ )
+ if duration is not None:
+ removeSecurityProxy(build).updateStatus(
+ BuildStatus.BUILDING,
+ builder=builder,
+ date_started=build.date_created,
+ )
+ removeSecurityProxy(build).updateStatus(
+ status,
+ builder=builder,
+ date_finished=build.date_started + duration,
+ )
+ else:
+ removeSecurityProxy(build).updateStatus(status, builder=builder)
+ IStore(build).flush()
+ return build
+
+ def makeRockFile(self, build=None, library_file=None):
+ if build is None:
+ build = self.makeRockRecipeBuild()
+ if library_file is None:
+ library_file = self.makeLibraryFileAlias()
+ return ProxyFactory(RockFile(build=build, library_file=library_file))
+
+>>>>>>> lib/lp/testing/factory.py
def makeCIBuild(
self,
git_repository=None,
Follow ups