canonical-ubuntu-qa team mailing list archive
-
canonical-ubuntu-qa team
-
Mailing list archive
-
Message #04320
[Merge] ~hyask/autopkgtest-cloud:skia/lxd_instance_cleanup into autopkgtest-cloud:master
Skia has proposed merging ~hyask/autopkgtest-cloud:skia/lxd_instance_cleanup into autopkgtest-cloud:master.
Requested reviews:
Canonical's Ubuntu QA (canonical-ubuntu-qa)
For more details, see:
https://code.launchpad.net/~hyask/autopkgtest-cloud/+git/autopkgtest-cloud/+merge/467265
Two independent improvements to the armhf LXD worker:
- set a password on the remotes to allow spice console login.
- correctly clean all remotes, even if one of them throws errors.
--
Your team Canonical's Ubuntu QA is requested to review the proposed merge of ~hyask/autopkgtest-cloud:skia/lxd_instance_cleanup into autopkgtest-cloud:master.
diff --git a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/armhf-lxd.userdata b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/armhf-lxd.userdata
index 6f5973b..9141db5 100644
--- a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/armhf-lxd.userdata
+++ b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/armhf-lxd.userdata
@@ -7,6 +7,10 @@ manage_etc_hosts: true
apt_update: true
apt_upgrade: true
+password: ubuntu
+chpasswd: { expire: False }
+ssh_pwauth: False
+
packages:
- distro-info
- libdpkg-perl
diff --git a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/cleanup-lxd b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/cleanup-lxd
index 5e9724b..23aac28 100755
--- a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/cleanup-lxd
+++ b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/cleanup-lxd
@@ -14,39 +14,50 @@ def parse_lxd_time(s):
def check_remote(remote):
- now = datetime.datetime.now(datetime.timezone.utc)
- containers = json.loads(
- subprocess.check_output(["lxc", "list", "-fjson", remote + ":"])
- )
- containers = [
- c for c in containers if c["name"].startswith("autopkgtest-lxd-")
- ]
- containers = sorted(
- containers, key=lambda c: parse_lxd_time(c["created_at"]), reverse=True
- )
-
- # Keep as many containers as we have services
- to_keep = len(
- glob.glob(f"/etc/systemd/system/autopkgtest@{remote}-*.service")
- )
+ try:
+ now = datetime.datetime.now(datetime.timezone.utc)
+ containers = json.loads(
+ subprocess.check_output(["lxc", "list", "-fjson", remote + ":"])
+ )
+ containers = [
+ c for c in containers if c["name"].startswith("autopkgtest-lxd-")
+ ]
+ containers = sorted(
+ containers,
+ key=lambda c: parse_lxd_time(c["created_at"]),
+ reverse=True,
+ )
- if to_keep < len(containers) and os.getenv("DEBUG"):
- print(
- f"{remote}: Keeping {to_keep} containers, deleting {len(containers) - to_keep} oldest",
- file=sys.stderr,
+ # Keep as many containers as we have services
+ to_keep = len(
+ glob.glob(f"/etc/systemd/system/autopkgtest@{remote}-*.service")
)
- for container in containers[to_keep:]:
- if now - parse_lxd_time(container["created_at"]) >= datetime.timedelta(
- minutes=MINIMUM_AGE_MINS
- ):
+ if to_keep < len(containers) and os.getenv("DEBUG"):
print(
- f"{remote}:{container['name']} is old - deleting",
+ f"{remote}: Keeping {to_keep} containers, "
+ "deleting {len(containers) - to_keep} oldest",
file=sys.stderr,
)
- subprocess.check_call(
- ["lxc", "delete", "--force", f"{remote}:{container['name']}"]
- )
+
+ for container in containers[to_keep:]:
+ if now - parse_lxd_time(
+ container["created_at"]
+ ) >= datetime.timedelta(minutes=MINIMUM_AGE_MINS):
+ print(
+ f"{remote}:{container['name']} is old - deleting",
+ file=sys.stderr,
+ )
+ subprocess.check_call(
+ [
+ "lxc",
+ "delete",
+ "--force",
+ f"{remote}:{container['name']}",
+ ]
+ )
+ except Exception as e:
+ print(f"Error while checking {remote }: {e}")
def main():
Follow ups