← Back to team overview

launchpad-reviewers team mailing list archive

[Merge] ~cjwatson/lpcraft:run-one into lpcraft:main

 

Colin Watson has proposed merging ~cjwatson/lpcraft:run-one into lpcraft:main.

Commit message:
Add a "run-one" command

Requested reviews:
  Launchpad code reviewers (launchpad-reviewers)

For more details, see:
https://code.launchpad.net/~cjwatson/lpcraft/+git/lpcraft/+merge/413081

We need a way to run only a single job from a pipeline, both because we want an easy way to record log output for separate jobs separately, and because we want to be able to retry individual jobs.

Matrix expansion makes this somewhat complicated, because as a result there can be multiple expanded job definitions with the same name that differ only in (for example) series or environment.  The simplest approach I can think of turns out to be to just take an index as well as the job name; this is safe since matrix expansion has a stable order, and in Launchpad the dispatcher end is going to need to do its own matrix expansion anyway.

I went for a separate "run-one" command rather than options to "run" since the command-line syntax is sufficiently different that it makes sense to validate it separately.
-- 
Your team Launchpad code reviewers is requested to review the proposed merge of ~cjwatson/lpcraft:run-one into lpcraft:main.
diff --git a/lpcraft/commands/run.py b/lpcraft/commands/run.py
index 685ab70..6f2e8cc 100644
--- a/lpcraft/commands/run.py
+++ b/lpcraft/commands/run.py
@@ -7,7 +7,7 @@ import json
 import os
 from argparse import Namespace
 from pathlib import Path, PurePath
-from typing import List, Set
+from typing import List, Optional, Set
 
 from craft_cli import emit
 from craft_providers import Executor
@@ -15,9 +15,9 @@ from craft_providers.actions.snap_installer import install_from_store
 from dotenv import dotenv_values
 
 from lpcraft import env
-from lpcraft.config import Config, Output
+from lpcraft.config import Config, Job, Output
 from lpcraft.errors import CommandError
-from lpcraft.providers import get_provider
+from lpcraft.providers import Provider, get_provider
 from lpcraft.utils import get_host_architecture
 
 
@@ -159,11 +159,81 @@ def _copy_output_properties(
         json.dump(properties, f)
 
 
+def _run_job(
+    job_name: str, job: Job, provider: Provider, output: Optional[Path]
+) -> None:
+    """Run a single job."""
+    host_architecture = get_host_architecture()
+    if host_architecture not in job.architectures:
+        return
+    if job.run is None:
+        raise CommandError(
+            f"Job {job_name!r} for {job.series}/{host_architecture} "
+            f"does not set 'run'"
+        )
+
+    cwd = Path.cwd()
+    remote_cwd = env.get_managed_environment_project_path()
+
+    emit.progress(
+        f"Launching environment for {job.series}/{host_architecture}"
+    )
+    with provider.launched_environment(
+        project_name=cwd.name,
+        project_path=cwd,
+        series=job.series,
+        architecture=host_architecture,
+    ) as instance:
+        if job.snaps:
+            for snap in job.snaps:
+                emit.progress(f"Running `snap install {snap}`")
+                install_from_store(
+                    executor=instance,
+                    snap_name=snap,
+                    channel="stable",
+                    classic=True,
+                )
+        if job.packages:
+            packages_cmd = ["apt", "install", "-y"] + job.packages
+            emit.progress("Installing system packages")
+            with emit.open_stream(f"Running {packages_cmd}") as stream:
+                proc = instance.execute_run(
+                    packages_cmd,
+                    cwd=remote_cwd,
+                    env=job.environment,
+                    stdout=stream,
+                    stderr=stream,
+                )
+        run_cmd = ["bash", "--noprofile", "--norc", "-ec", job.run]
+        emit.progress("Running the job")
+        with emit.open_stream(f"Running {run_cmd}") as stream:
+            proc = instance.execute_run(
+                run_cmd,
+                cwd=remote_cwd,
+                env=job.environment,
+                stdout=stream,
+                stderr=stream,
+            )
+        if proc.returncode != 0:
+            raise CommandError(
+                f"Job {job_name!r} for "
+                f"{job.series}/{host_architecture} failed with "
+                f"exit status {proc.returncode}.",
+                retcode=proc.returncode,
+            )
+
+        if job.output is not None and output is not None:
+            target_path = output / job_name / job.series / host_architecture
+            target_path.mkdir(parents=True, exist_ok=True)
+            _copy_output_paths(job.output, remote_cwd, instance, target_path)
+            _copy_output_properties(
+                job.output, remote_cwd, instance, target_path
+            )
+
+
 def run(args: Namespace) -> int:
     """Run a pipeline, launching managed environments as needed."""
     config = Config.load(Path(".launchpad.yaml"))
-    host_architecture = get_host_architecture()
-    cwd = Path.cwd()
 
     provider = get_provider()
     provider.ensure_provider_is_available()
@@ -173,76 +243,27 @@ def run(args: Namespace) -> int:
         if not jobs:
             raise CommandError(f"No job definition for {job_name!r}")
         for job in jobs:
-            if host_architecture not in job.architectures:
-                continue
-            if job.run is None:
-                raise CommandError(
-                    f"Job {job_name!r} for {job.series}/{host_architecture} "
-                    f"does not set 'run'"
-                )
+            _run_job(job_name, job, provider, getattr(args, "output", None))
 
-            remote_cwd = env.get_managed_environment_project_path()
+    return 0
 
-            emit.progress(
-                f"Launching environment for {job.series}/{host_architecture}"
-            )
-            with provider.launched_environment(
-                project_name=cwd.name,
-                project_path=cwd,
-                series=job.series,
-                architecture=host_architecture,
-            ) as instance:
-                if job.snaps:
-                    for snap in job.snaps:
-                        emit.progress(f"Running `snap install {snap}`")
-                        install_from_store(
-                            executor=instance,
-                            snap_name=snap,
-                            channel="stable",
-                            classic=True,
-                        )
-                if job.packages:
-                    packages_cmd = ["apt", "install", "-y"] + job.packages
-                    emit.progress("Installing system packages")
-                    with emit.open_stream(f"Running {packages_cmd}") as stream:
-                        proc = instance.execute_run(
-                            packages_cmd,
-                            cwd=remote_cwd,
-                            env=job.environment,
-                            stdout=stream,
-                            stderr=stream,
-                        )
-                run_cmd = ["bash", "--noprofile", "--norc", "-ec", job.run]
-                emit.progress("Running the job")
-                with emit.open_stream(f"Running {run_cmd}") as stream:
-                    proc = instance.execute_run(
-                        run_cmd,
-                        cwd=remote_cwd,
-                        env=job.environment,
-                        stdout=stream,
-                        stderr=stream,
-                    )
-                if proc.returncode != 0:
-                    raise CommandError(
-                        f"Job {job_name!r} for "
-                        f"{job.series}/{host_architecture} failed with "
-                        f"exit status {proc.returncode}.",
-                        retcode=proc.returncode,
-                    )
-
-                if (
-                    job.output is not None
-                    and getattr(args, "output", None) is not None
-                ):
-                    target_path = (
-                        args.output / job_name / job.series / host_architecture
-                    )
-                    target_path.mkdir(parents=True, exist_ok=True)
-                    _copy_output_paths(
-                        job.output, remote_cwd, instance, target_path
-                    )
-                    _copy_output_properties(
-                        job.output, remote_cwd, instance, target_path
-                    )
+
+def run_one(args: Namespace) -> int:
+    """Select and run a single job from a pipeline."""
+    config = Config.load(Path(".launchpad.yaml"))
+
+    jobs = config.jobs.get(args.job, [])
+    if not jobs:
+        raise CommandError(f"No job definition for {args.job!r}")
+    if args.index >= len(jobs):
+        raise CommandError(
+            f"No job definition with index {args.index} for {args.job!r}"
+        )
+    job = jobs[args.index]
+
+    provider = get_provider()
+    provider.ensure_provider_is_available()
+
+    _run_job(args.job, job, provider, getattr(args, "output", None))
 
     return 0
diff --git a/lpcraft/commands/tests/test_run.py b/lpcraft/commands/tests/test_run.py
index a5e5000..5e4639e 100644
--- a/lpcraft/commands/tests/test_run.py
+++ b/lpcraft/commands/tests/test_run.py
@@ -55,7 +55,9 @@ class LocalExecuteRun:
         return subprocess.run(command, **run_kwargs)
 
 
-class TestRun(CommandBaseTestCase):
+class RunBaseTestCase(CommandBaseTestCase):
+    """Common code for run and run-one tests."""
+
     def setUp(self):
         super().setUp()
         self.tmp_project_path = Path(
@@ -82,6 +84,8 @@ class TestRun(CommandBaseTestCase):
             lxd_launcher=lxd_launcher,
         )
 
+
+class TestRun(RunBaseTestCase):
     def test_missing_config_file(self):
         result = self.run_command("run")
 
@@ -1013,3 +1017,173 @@ class TestRun(CommandBaseTestCase):
             ],
             execute_run.call_args_list,
         )
+
+
+class TestRunOne(RunBaseTestCase):
+    def test_missing_config_file(self):
+        result = self.run_command("run-one", "test", "0")
+
+        self.assertThat(
+            result,
+            MatchesStructure.byEquality(
+                exit_code=1,
+                errors=[
+                    YAMLError("Couldn't find config file '.launchpad.yaml'")
+                ],
+            ),
+        )
+
+    @patch("lpcraft.commands.run.get_provider")
+    @patch("lpcraft.commands.run.get_host_architecture", return_value="amd64")
+    def test_job_not_defined(
+        self, mock_get_host_architecture, mock_get_provider
+    ):
+        mock_get_provider.return_value = self.makeLXDProvider()
+        config = dedent(
+            """
+            pipeline:
+                - test
+
+            jobs: {}
+            """
+        )
+        Path(".launchpad.yaml").write_text(config)
+
+        result = self.run_command("run-one", "test", "0")
+
+        self.assertThat(
+            result,
+            MatchesStructure.byEquality(
+                exit_code=1,
+                errors=[CommandError("No job definition for 'test'")],
+            ),
+        )
+
+    @patch("lpcraft.commands.run.get_provider")
+    @patch("lpcraft.commands.run.get_host_architecture", return_value="amd64")
+    def test_job_index_not_defined(
+        self, mock_get_host_architecture, mock_get_provider
+    ):
+        mock_get_provider.return_value = self.makeLXDProvider()
+        config = dedent(
+            """
+            pipeline:
+                - test
+
+            jobs:
+                test:
+                    series: focal
+                    architectures: amd64
+                    run: tox
+            """
+        )
+        Path(".launchpad.yaml").write_text(config)
+
+        result = self.run_command("run-one", "test", "1")
+
+        self.assertThat(
+            result,
+            MatchesStructure.byEquality(
+                exit_code=1,
+                errors=[
+                    CommandError("No job definition with index 1 for 'test'")
+                ],
+            ),
+        )
+
+    @patch("lpcraft.commands.run.get_provider")
+    @patch("lpcraft.commands.run.get_host_architecture", return_value="amd64")
+    def test_job_fails(self, mock_get_host_architecture, mock_get_provider):
+        launcher = Mock(spec=launch)
+        provider = self.makeLXDProvider(lxd_launcher=launcher)
+        mock_get_provider.return_value = provider
+        execute_run = launcher.return_value.execute_run
+        execute_run.return_value = subprocess.CompletedProcess([], 2)
+        config = dedent(
+            """
+            pipeline:
+                - test
+                - build-wheel
+
+            jobs:
+                test:
+                    series: focal
+                    architectures: amd64
+                    run: tox
+                build-wheel:
+                    series: focal
+                    architectures: amd64
+                    run: pyproject-build
+            """
+        )
+        Path(".launchpad.yaml").write_text(config)
+
+        result = self.run_command("run-one", "build-wheel", "0")
+
+        self.assertThat(
+            result,
+            MatchesStructure.byEquality(
+                exit_code=2,
+                errors=[
+                    CommandError(
+                        "Job 'build-wheel' for focal/amd64 failed with exit "
+                        "status 2.",
+                        retcode=2,
+                    )
+                ],
+            ),
+        )
+        execute_run.assert_called_once_with(
+            ["bash", "--noprofile", "--norc", "-ec", "pyproject-build"],
+            cwd=Path("/root/project"),
+            env=None,
+            stdout=ANY,
+            stderr=ANY,
+        )
+
+    @patch("lpcraft.commands.run.get_provider")
+    @patch("lpcraft.commands.run.get_host_architecture", return_value="amd64")
+    def test_expands_matrix(
+        self, mock_get_host_architecture, mock_get_provider
+    ):
+        launcher = Mock(spec=launch)
+        provider = self.makeLXDProvider(lxd_launcher=launcher)
+        mock_get_provider.return_value = provider
+        execute_run = launcher.return_value.execute_run
+        execute_run.return_value = subprocess.CompletedProcess([], 0)
+        config = dedent(
+            """
+            pipeline:
+                - test
+                - build-wheel
+
+            jobs:
+                test:
+                    matrix:
+                        - series: bionic
+                          architectures: amd64
+                        - series: focal
+                          architectures: [amd64, s390x]
+                    run: tox
+                build-wheel:
+                    series: bionic
+                    architectures: amd64
+                    run: pyproject-build
+            """
+        )
+        Path(".launchpad.yaml").write_text(config)
+
+        result = self.run_command("run-one", "test", "1")
+
+        self.assertEqual(0, result.exit_code)
+        self.assertEqual(
+            ["focal"],
+            [c.kwargs["image_name"] for c in launcher.call_args_list],
+        )
+        execute_run.assert_called_once_with(
+            ["bash", "--noprofile", "--norc", "-ec", "tox"],
+            cwd=Path("/root/project"),
+            env=None,
+            stdout=ANY,
+            stderr=ANY,
+        )
diff --git a/lpcraft/main.py b/lpcraft/main.py
index ee4bb7d..94ea82e 100644
--- a/lpcraft/main.py
+++ b/lpcraft/main.py
@@ -11,7 +11,7 @@ from typing import List, Optional
 from craft_cli import CraftError, EmitterMode, emit
 
 from lpcraft._version import version_description as lpcraft_version
-from lpcraft.commands.run import run
+from lpcraft.commands.run import run, run_one
 from lpcraft.commands.version import version
 from lpcraft.errors import CommandError
 
@@ -66,9 +66,21 @@ def main(argv: Optional[List[str]] = None) -> int:
     parser_run.add_argument(
         "--output", type=Path, help="Write output files to this directory."
     )
-
     parser_run.set_defaults(func=run)
 
+    parser_run_one = subparsers.add_parser("run-one", help=run_one.__doc__)
+    parser_run_one.add_argument(
+        "--output", type=Path, help="Write output files to this directory."
+    )
+    parser_run_one.add_argument("job", help="Run only this job name.")
+    parser_run_one.add_argument(
+        "index",
+        type=int,
+        metavar="N",
+        help="Run only the Nth job with the given name (indexing from 0).",
+    )
+    parser_run_one.set_defaults(func=run_one)
+
     parser_version = subparsers.add_parser("version", help=version.__doc__)
     parser_version.set_defaults(func=version)
 
diff --git a/lpcraft/providers/__init__.py b/lpcraft/providers/__init__.py
index cdc1577..df1419b 100644
--- a/lpcraft/providers/__init__.py
+++ b/lpcraft/providers/__init__.py
@@ -2,6 +2,7 @@
 # GNU General Public License version 3 (see the file LICENSE).
 
 __all__ = [
+    "Provider",
     "get_provider",
 ]
 

Follow ups