← Back to team overview

launchpad-reviewers team mailing list archive

[Merge] ~andersson123/lpci:devel-release-support into lpci:main

 

Tim Andersson has proposed merging ~andersson123/lpci:devel-release-support into lpci:main.

Requested reviews:
  Launchpad code reviewers (launchpad-reviewers)

For more details, see:
https://code.launchpad.net/~andersson123/lpci/+git/lpci/+merge/461564
-- 
Your team Launchpad code reviewers is requested to review the proposed merge of ~andersson123/lpci:devel-release-support into lpci:main.
diff --git a/lpci/commands/run.py b/lpci/commands/run.py
index e42a8a9..c52a884 100644
--- a/lpci/commands/run.py
+++ b/lpci/commands/run.py
@@ -11,6 +11,7 @@ import subprocess
 import tempfile
 from argparse import ArgumentParser, Namespace
 from pathlib import Path, PurePath
+from distro_info import UbuntuDistroInfo
 from tempfile import NamedTemporaryFile
 from typing import Dict, List, Optional, Set
 
@@ -425,6 +426,35 @@ def _install_apt_packages(
         )
 
 
+def _run_instance_command_silent(
+    command: str,
+    job_name: str,
+    job: Job,
+    instance: lxd.LXDInstance,
+    host_architecture: str,
+    remote_cwd: Path,
+    environment: Optional[Dict[str, Optional[str]]],
+    root: bool = True,
+) -> None:
+    full_run_cmd = ["bash", "--noprofile", "--norc", "-ec", command]
+    emit.message(f"Running command for the job: {full_run_cmd}")
+    with open("/dev/null", "w") as stream:
+        proc = instance.execute_run(
+            full_run_cmd,
+            cwd=remote_cwd,
+            env=environment,
+            stdout=stream,
+            stderr=stream,
+        )
+    if proc.returncode != 0:
+        raise CommandError(
+            f"Job {job_name!r} for "
+            f"{job.series}/{host_architecture} failed with "
+            f"exit status {proc.returncode}.",
+            retcode=proc.returncode,
+        )
+
+
 def _run_instance_command(
     command: str,
     job_name: str,
@@ -539,6 +569,43 @@ def _run_job(
 
     cwd = Path.cwd()
     remote_cwd = env.get_managed_environment_project_path()
+    # if job.series == "devel":
+    #     udi = UbuntuDistroInfo()
+    #     supported = udi.supported()
+    #     # only caveat is that distro-info-data must be up to date for this to work on
+    #     # the machine hosting the container.
+    #     devel = udi.devel()
+    #     if devel in supported:
+    #         supported.remove(devel)
+    #     job.series = supported[-1]
+    #     emit.message("Job series is 'devel', upgrading from latest release to development release.")
+    #     with provider.launched_environment(
+    #         project_name=cwd.name,
+    #         project_path=cwd,
+    #         series=job.series,
+    #         architecture=host_architecture,
+    #         gpu_nvidia=gpu_nvidia,
+    #         root=root,
+    #     ) as instance:
+    #         upgrade_commands = [
+    #             f"sed -i 's/{job.series}/{devel}/g' /etc/apt/sources.list",
+    #             "sudo DEBIAN_FRONTEND=noninteractive apt update -y",
+    #             "sudo DEBIAN_FRONTEND=noninteractive apt upgrade -y",
+    #             "sudo DEBIAN_FRONTEND=noninteractive apt-get dist-upgrade -y",
+    #             "sudo DEBIAN_FRONTEND=noninteractive apt-get autoclean -y",
+    #             "sudo DEBIAN_FRONTEND=noninteractive apt-get autoremove -y",
+    #         ]
+    #         for cmd in upgrade_commands:
+    #             _run_instance_command_silent(
+    #                 command=cmd,
+    #                 job_name=job_name,
+    #                 job=job,
+    #                 instance=instance,
+    #                 host_architecture=host_architecture,
+    #                 remote_cwd=remote_cwd,
+    #                 environment=environment,
+    #                 root=root,
+    #             )
 
     emit.progress(
         f"Launching environment for {job.series}/{host_architecture}"
@@ -551,6 +618,44 @@ def _run_job(
         gpu_nvidia=gpu_nvidia,
         root=root,
     ) as instance:
+        # Balls. it has to go here.
+        if job.series == "devel":
+            udi = UbuntuDistroInfo()
+            supported = udi.supported()
+            # only caveat is that distro-info-data must be up to date for this to work on
+            # the machine hosting the container.
+            devel = udi.devel()
+            if devel in supported:
+                supported.remove(devel)
+            job.series = supported[-1]
+            emit.message("Job series is 'devel', upgrading from latest release to development release.")
+            with provider.launched_environment(
+                project_name=cwd.name,
+                project_path=cwd,
+                series=job.series,
+                architecture=host_architecture,
+                gpu_nvidia=gpu_nvidia,
+                root=root,
+            ) as instance:
+                upgrade_commands = [
+                    f"sed -i 's/{job.series}/{devel}/g' /etc/apt/sources.list",
+                    "sudo DEBIAN_FRONTEND=noninteractive apt update -y",
+                    "sudo DEBIAN_FRONTEND=noninteractive apt upgrade -y",
+                    "sudo DEBIAN_FRONTEND=noninteractive apt-get dist-upgrade -y",
+                    "sudo DEBIAN_FRONTEND=noninteractive apt-get autoclean -y",
+                    "sudo DEBIAN_FRONTEND=noninteractive apt-get autoremove -y",
+                ]
+                for cmd in upgrade_commands:
+                    _run_instance_command_silent(
+                        command=cmd,
+                        job_name=job_name,
+                        job=job,
+                        instance=instance,
+                        host_architecture=host_architecture,
+                        remote_cwd=remote_cwd,
+                        environment=environment,
+                        root=root,
+                    )
         snaps = list(itertools.chain(*pm.hook.lpci_install_snaps()))
         for snap in snaps:
             emit.progress(
@@ -720,6 +825,9 @@ class RunCommand(BaseCommand):
 
         provider = get_provider()
         provider.ensure_provider_is_available()
+        # Actually I think we can do the upgrade here?
+        # Just after we've created the provider right?
+        # if args.
 
         secrets = {}
         if args.secrets_file:
diff --git a/lpci/providers/_lxd.py b/lpci/providers/_lxd.py
index c673502..83c97bc 100644
--- a/lpci/providers/_lxd.py
+++ b/lpci/providers/_lxd.py
@@ -285,6 +285,7 @@ class LXDProvider(Provider):
         :param series: Distribution series name.
         :param architecture: Targeted architecture name.
         """
+        # I guess this'd be somewhere I'd be looking at for looking at devel support
         alias = SERIES_TO_BUILDD_IMAGE_ALIAS[series]
         instance_name = self.get_instance_name(
             project_name=project_name,
@@ -347,6 +348,8 @@ class LXDProvider(Provider):
             )
         except (bases.BaseConfigurationError, lxd.LXDError) as error:
             raise CommandError(str(error)) from error
+        
+        # I think we want to upgrade here ?
 
         managed_project_path = get_managed_environment_project_path()
         try:
diff --git a/requirements.in b/requirements.in
index 7cdae3e..fdc1bd7 100644
--- a/requirements.in
+++ b/requirements.in
@@ -2,6 +2,7 @@ craft-cli
 craft-providers>=1.19.0  # 1.19.0 added support of bases.BuilddBaseAlias.MANTIC
 launchpadlib[keyring]
 pydantic
+distro-info
 PyYAML>=6.0.1  # 6.0.0 is not compatible with a current cython version
 python-dotenv
 pluggy
diff --git a/setup.cfg b/setup.cfg
index edde67e..35001e2 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -30,6 +30,7 @@ install_requires =
     launchpadlib[keyring]
     lazr.restfulclient
     pluggy
+    distro-info
     pydantic
     python-dotenv
 python_requires = >=3.8

Follow ups