← Back to team overview

launchpad-reviewers team mailing list archive

[Merge] ~cjwatson/lpcraft:multi-job-stages into lpcraft:main

 

Colin Watson has proposed merging ~cjwatson/lpcraft:multi-job-stages into lpcraft:main.

Commit message:
Support pipeline stages with multiple jobs

Requested reviews:
  Launchpad code reviewers (launchpad-reviewers)

For more details, see:
https://code.launchpad.net/~cjwatson/lpcraft/+git/lpcraft/+merge/414113

The specification calls for either of these kinds of pipeline stage to be valid:

    pipeline:
      # this stage is a list, which means jobs are executed in parallel
      - [test, lint]
      # this stage will only execute if previous steps in the pipeline
      # passed
      - build-wheel

lpcraft's documentation agrees, but the implementation currently rejects a pipeline stage that is a list of job names.  Implement a minimal version of this.  The jobs aren't yet executed in parallel, but for error handling purposes we act as if they were: all jobs in a stage are run even if some of them fail, and we only proceed to the next stage if they all succeed.
-- 
Your team Launchpad code reviewers is requested to review the proposed merge of ~cjwatson/lpcraft:multi-job-stages into lpcraft:main.
diff --git a/lpcraft/commands/run.py b/lpcraft/commands/run.py
index 2bbf75f..c83b668 100644
--- a/lpcraft/commands/run.py
+++ b/lpcraft/commands/run.py
@@ -1,4 +1,4 @@
-# Copyright 2021 Canonical Ltd.  This software is licensed under the
+# Copyright 2021-2022 Canonical Ltd.  This software is licensed under the
 # GNU General Public License version 3 (see the file LICENSE).
 
 import fnmatch
@@ -270,12 +270,29 @@ def run(args: Namespace) -> int:
     provider = get_provider()
     provider.ensure_provider_is_available()
 
-    for job_name in config.pipeline:
-        jobs = config.jobs.get(job_name, [])
-        if not jobs:
-            raise CommandError(f"No job definition for {job_name!r}")
-        for job in jobs:
-            _run_job(job_name, job, provider, getattr(args, "output", None))
+    for stage in config.pipeline:
+        stage_failed = False
+        for job_name in stage:
+            try:
+                jobs = config.jobs.get(job_name, [])
+                if not jobs:
+                    raise CommandError(f"No job definition for {job_name!r}")
+                for job in jobs:
+                    _run_job(
+                        job_name, job, provider, getattr(args, "output", None)
+                    )
+            except CommandError as e:
+                if len(stage) == 1:
+                    # Single-job stage, so just reraise this in order to get
+                    # simpler error messages.
+                    raise
+                else:
+                    emit.error(e)
+                    stage_failed = True
+        if stage_failed:
+            raise CommandError(
+                f"Some jobs in {stage} failed; stopping.", retcode=1
+            )
 
     return 0
 
diff --git a/lpcraft/commands/tests/test_run.py b/lpcraft/commands/tests/test_run.py
index 07cc226..b74099e 100644
--- a/lpcraft/commands/tests/test_run.py
+++ b/lpcraft/commands/tests/test_run.py
@@ -1,4 +1,4 @@
-# Copyright 2021 Canonical Ltd.  This software is licensed under the
+# Copyright 2021-2022 Canonical Ltd.  This software is licensed under the
 # GNU General Public License version 3 (see the file LICENSE).
 
 import io
@@ -354,6 +354,140 @@ class TestRun(RunBaseTestCase):
 
     @patch("lpcraft.commands.run.get_provider")
     @patch("lpcraft.commands.run.get_host_architecture", return_value="amd64")
+    def test_parallel_jobs_some_fail(
+        self, mock_get_host_architecture, mock_get_provider
+    ):
+        # Right now "parallel" jobs are not in fact executed in parallel,
+        # but we act if they are for the purpose of error handling: even if
+        # one job in a stage fails, we run all the jobs in that stage before
+        # stopping.
+        launcher = Mock(spec=launch)
+        provider = self.makeLXDProvider(lxd_launcher=launcher)
+        mock_get_provider.return_value = provider
+        execute_run = launcher.return_value.execute_run
+        execute_run.side_effect = iter(
+            [subprocess.CompletedProcess([], ret) for ret in (2, 0, 0)]
+        )
+        config = dedent(
+            """
+            pipeline:
+                - [lint, test]
+                - build-wheel
+
+            jobs:
+                lint:
+                    series: focal
+                    architectures: amd64
+                    run: flake8
+                test:
+                    series: focal
+                    architectures: amd64
+                    run: tox
+                build-wheel:
+                    series: bionic
+                    architectures: amd64
+                    run: pyproject-build
+            """
+        )
+        Path(".launchpad.yaml").write_text(config)
+
+        result = self.run_command("run")
+
+        self.assertThat(
+            result,
+            MatchesStructure.byEquality(
+                exit_code=1,
+                errors=[
+                    CommandError(
+                        "Job 'lint' for focal/amd64 failed with exit status "
+                        "2.",
+                        retcode=2,
+                    ),
+                    CommandError(
+                        "Some jobs in ['lint', 'test'] failed; stopping."
+                    ),
+                ],
+            ),
+        )
+        self.assertEqual(
+            ["focal", "focal"],
+            [c.kwargs["image_name"] for c in launcher.call_args_list],
+        )
+        self.assertEqual(
+            [
+                call(
+                    ["bash", "--noprofile", "--norc", "-ec", command],
+                    cwd=Path("/root/project"),
+                    env={},
+                    stdout=ANY,
+                    stderr=ANY,
+                )
+                for command in ("flake8", "tox")
+            ],
+            execute_run.call_args_list,
+        )
+
+    @patch("lpcraft.commands.run.get_provider")
+    @patch("lpcraft.commands.run.get_host_architecture", return_value="amd64")
+    def test_parallel_jobs_all_succeed(
+        self, mock_get_host_architecture, mock_get_provider
+    ):
+        # Right now "parallel" jobs are not in fact executed in parallel,
+        # but we do at least wait for all of them to succeed before
+        # proceeding to the next stage in the pipeline.
+        launcher = Mock(spec=launch)
+        provider = self.makeLXDProvider(lxd_launcher=launcher)
+        mock_get_provider.return_value = provider
+        execute_run = launcher.return_value.execute_run
+        execute_run.side_effect = iter(
+            [subprocess.CompletedProcess([], ret) for ret in (0, 0, 0)]
+        )
+        config = dedent(
+            """
+            pipeline:
+                - [lint, test]
+                - build-wheel
+
+            jobs:
+                lint:
+                    series: focal
+                    architectures: amd64
+                    run: flake8
+                test:
+                    series: focal
+                    architectures: amd64
+                    run: tox
+                build-wheel:
+                    series: bionic
+                    architectures: amd64
+                    run: pyproject-build
+            """
+        )
+        Path(".launchpad.yaml").write_text(config)
+
+        result = self.run_command("run")
+
+        self.assertEqual(0, result.exit_code)
+        self.assertEqual(
+            ["focal", "focal", "bionic"],
+            [c.kwargs["image_name"] for c in launcher.call_args_list],
+        )
+        self.assertEqual(
+            [
+                call(
+                    ["bash", "--noprofile", "--norc", "-ec", command],
+                    cwd=Path("/root/project"),
+                    env={},
+                    stdout=ANY,
+                    stderr=ANY,
+                )
+                for command in ("flake8", "tox", "pyproject-build")
+            ],
+            execute_run.call_args_list,
+        )
+
+    @patch("lpcraft.commands.run.get_provider")
+    @patch("lpcraft.commands.run.get_host_architecture", return_value="amd64")
     def test_expands_matrix(
         self, mock_get_host_architecture, mock_get_provider
     ):
diff --git a/lpcraft/config.py b/lpcraft/config.py
index 98931f6..b20322e 100644
--- a/lpcraft/config.py
+++ b/lpcraft/config.py
@@ -1,4 +1,4 @@
-# Copyright 2021 Canonical Ltd.  This software is licensed under the
+# Copyright 2021-2022 Canonical Ltd.  This software is licensed under the
 # GNU General Public License version 3 (see the file LICENSE).
 
 import re
@@ -101,9 +101,15 @@ def _expand_job_values(
 class Config(ModelConfigDefaults):
     """A .launchpad.yaml configuration file."""
 
-    pipeline: List[_Identifier]
+    pipeline: List[List[_Identifier]]
     jobs: Dict[StrictStr, List[Job]]
 
+    @pydantic.validator("pipeline", pre=True)
+    def validate_pipeline(
+        cls, v: List[Union[_Identifier, List[_Identifier]]]
+    ) -> List[List[_Identifier]]:
+        return [[stage] if isinstance(stage, str) else stage for stage in v]
+
     # XXX cjwatson 2021-11-17: This expansion strategy works, but it may
     # produce suboptimal error messages, and doesn't have a good way to do
     # things like limiting the keys that can be set in a matrix.
diff --git a/lpcraft/tests/test_config.py b/lpcraft/tests/test_config.py
index f07136d..96025b6 100644
--- a/lpcraft/tests/test_config.py
+++ b/lpcraft/tests/test_config.py
@@ -1,4 +1,4 @@
-# Copyright 2021 Canonical Ltd.  This software is licensed under the
+# Copyright 2021-2022 Canonical Ltd.  This software is licensed under the
 # GNU General Public License version 3 (see the file LICENSE).
 
 from datetime import timedelta
@@ -33,7 +33,7 @@ class TestConfig(TestCase):
             dedent(
                 """
                 pipeline:
-                    - test
+                    - [test]
 
                 jobs:
                     test:
@@ -48,7 +48,7 @@ class TestConfig(TestCase):
         self.assertThat(
             config,
             MatchesStructure(
-                pipeline=Equals(["test"]),
+                pipeline=Equals([["test"]]),
                 jobs=MatchesDict(
                     {
                         "test": MatchesListwise(
@@ -65,6 +65,25 @@ class TestConfig(TestCase):
             ),
         )
 
+    def test_load_single_pipeline(self):
+        # A single pipeline element can be written as a string, and is
+        # automatically wrapped in a list.
+        path = self.create_config(
+            dedent(
+                """
+                pipeline:
+                    - test
+
+                jobs:
+                    test:
+                        series: focal
+                        architectures: [amd64]
+                """
+            )
+        )
+        config = Config.load(path)
+        self.assertEqual([["test"]], config.pipeline)
+
     def test_load_single_architecture(self):
         # A single architecture can be written as a string, and is
         # automatically wrapped in a list.
@@ -165,7 +184,7 @@ class TestConfig(TestCase):
         self.assertThat(
             config,
             MatchesStructure(
-                pipeline=Equals(["test"]),
+                pipeline=Equals([["test"]]),
                 jobs=MatchesDict(
                     {
                         "test": MatchesListwise(