canonical-ubuntu-qa team mailing list archive
-
canonical-ubuntu-qa team
-
Mailing list archive
-
Message #02501
[Merge] ~andersson123/autopkgtest-cloud:integration-tests into autopkgtest-cloud:master
Tim Andersson has proposed merging ~andersson123/autopkgtest-cloud:integration-tests into autopkgtest-cloud:master.
Requested reviews:
Canonical's Ubuntu QA (canonical-ubuntu-qa)
For more details, see:
https://code.launchpad.net/~andersson123/autopkgtest-cloud/+git/autopkgtest-cloud/+merge/457239
--
Your team Canonical's Ubuntu QA is requested to review the proposed merge of ~andersson123/autopkgtest-cloud:integration-tests into autopkgtest-cloud:master.
diff --git a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/check-config-files b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/check-config-files
new file mode 100755
index 0000000..dbb27a1
--- /dev/null
+++ b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/check-config-files
@@ -0,0 +1,246 @@
+#!/usr/bin/python3
+import configparser
+import json
+import os
+import socket
+import subprocess
+import sys
+import urllib.parse
+import urllib.request
+
+import swiftclient
+from influxdb import InfluxDBClient
+
+INTEGRATION_KEY_FP = "/home/ubuntu/integration-key"
+
+bos_arches = ["arm64", "ppc64el", "s390x"]
+
+centres = {
+ "bos01": bos_arches,
+ "bos02": bos_arches,
+ "lcy02": None,
+}
+
+openstack_cmd = "openstack project list"
+
+openstack_commands = [
+ "openstack project list",
+ "openstack server list",
+ "openstack network list",
+ "openstack image list",
+]
+
+openstack_vars = [
+ "OS_USERNAME",
+ "OS_TENANT_NAME",
+ "OS_PASSWORD",
+ "OS_AUTH_URL",
+ "OS_REGION_NAME",
+]
+
+influx_vars = [
+ "INFLUXDB_HOSTNAME",
+ "INFLUXDB_PORT",
+ "INFLUXDB_USERNAME",
+ "INFLUXDB_PASSWORD",
+ "INFLUXDB_DATABASE",
+ "INFLUXDB_CONTEXT",
+]
+
+rabbit_vars = [
+ "RABBIT_HOST",
+ "RABBIT_USER",
+ "RABBIT_PASSWORD",
+]
+
+swift_vars = [
+ "SWIFT_AUTH_URL",
+ "SWIFT_AUTH_VERSION",
+ "SWIFT_PASSWORD",
+ "SWIFT_PROJECT_DOMAIN_NAME",
+ "SWIFT_PROJECT_NAME",
+ "SWIFT_REGION",
+ "SWIFT_TENANT",
+ "SWIFT_USER_DOMAIN_NAME",
+ "SWIFT_USERNAME",
+]
+
+worker_args = [
+ "[autopkgtest]",
+ "[virt]",
+ "checkout_dir",
+ "releases",
+ "setup_command",
+ "setup_command2",
+ "per_package_config_dir",
+ "architectures",
+ "package_size_default",
+ "package_size_big",
+ "args",
+]
+
+
+def check_cloudrcs(args):
+ centres, openstack_commands, openstack_vars = args
+ for centre, arches in centres.items():
+ for arch in arches:
+ if arch is None:
+ this_path = "~/cloudrcs/" + centre + ".rc"
+ else:
+ this_path = "~/cloudrcs/" + centre + "-" + arch + ".rc"
+ if not os.path.isfile(this_path):
+ return False
+ with open(this_path, "r") as f:
+ rc_file = f.read()
+ vars = []
+ for line in rc_file.splitlines():
+ if "export" in line:
+ this_line = line.split(" ")
+ var, value = this_line.split("=")
+ vars.append(var)
+ os.environ[var] = value
+ for var in openstack_vars:
+ if var not in rc_file:
+ return False
+ for command in openstack_commands:
+ _ = subprocess.run(command.split(" "), check=True)
+ return True
+
+
+def check_influx_creds(influx_file, influx_keys):
+ creds = check_env_file_and_keys(influx_file, influx_keys)
+ for cred in creds.splitlines():
+ if "=" in cred:
+ var, value = cred.split("=")
+ os.environ[var] = value
+ influx_client = InfluxDBClient(
+ os.environ["INFLUXDB_HOSTNAME"],
+ os.environ["INFLUXDB_PORT"],
+ os.environ["INFLUXDB_USERNAME"],
+ os.environ["INFLUXDB_PASSWORD"],
+ os.environ["INFLUXDB_DATABASE"],
+ )
+ influx_client.ping()
+ return True
+
+
+def check_env_file_and_keys(file, file_keys):
+ mypath = os.path.expanduser("~/" + file)
+ if not os.path.isfile(mypath):
+ raise FileNotFoundError("file %s doesn't exist" % file)
+ with open(mypath, "r") as f:
+ myf = f.read()
+ for k in file_keys:
+ if k not in myf:
+ raise KeyError("key %s not found in %s" % (k, file))
+ return myf
+
+
+def check_mirror_rc(mirror_file, mirror_vars):
+ _ = check_env_file_and_keys(mirror_file, mirror_vars)
+
+
+def check_rabbitmq_creds(rabbit_file, rabbit_vars):
+ _ = check_env_file_and_keys(rabbit_file, rabbit_vars)
+
+
+def check_net_name(net_file, net_vars):
+ _ = check_env_file_and_keys(net_file, net_vars)
+
+
+def check_swift_creds(swift_file, swift_vars):
+ creds = check_env_file_and_keys(swift_file, swift_vars)
+ for line in creds.splitlines():
+ var, value = line.split("=")
+ os.environ[var] = value.replace('"', "")
+ swift_creds = {
+ "authurl": os.environ["SWIFT_AUTH_URL"],
+ "user": os.environ["SWIFT_USERNAME"],
+ "key": os.environ["SWIFT_PASSWORD"],
+ "os_options": {
+ "region_name": os.environ["SWIFT_REGION"],
+ "project_domain_name": os.environ["SWIFT_PROJECT_DOMAIN_NAME"],
+ "project_name": os.environ["SWIFT_PROJECT_NAME"],
+ "user_domain_name": os.environ["SWIFT_USER_DOMAIN_NAME"],
+ },
+ "auth_version": 3,
+ }
+ swift_conn = swiftclient.Connection(**swift_creds)
+ _ = swift_conn.get_account()
+ swift_conn.close()
+ return True
+
+
+def check_worker_conf_files(args):
+ worker_args, centres = args
+ for centre, arches in centres.items():
+ if arches is not None:
+ for a in arches:
+ workerfile = "-".join(["worker", centre, a]) + ".conf"
+ _ = check_env_file_and_keys(workerfile, worker_args)
+
+
+files_and_keys = {
+ "influx.cred": {
+ "vars": influx_vars,
+ "func": check_influx_creds,
+ },
+ "rabbitmq.cred": {
+ "vars": rabbit_vars,
+ "func": check_rabbitmq_creds,
+ },
+ "swift-password.cred": {
+ "vars": swift_vars,
+ "func": check_swift_creds,
+ },
+ "mirror.rc": {
+ "vars": ["MIRROR"],
+ "func": check_mirror_rc,
+ },
+ "net-name.rc": {
+ "vars": ["NET_NAME"],
+ "func": check_net_name,
+ },
+ "worker-configs": {
+ "vars": (worker_args, centres),
+ "func": check_worker_conf_files,
+ },
+ "cloudrcs": {
+ "vars": (centres, openstack_commands, openstack_vars),
+ "func": check_cloudrcs,
+ },
+}
+
+RESULTS = {}
+
+if __name__ == "__main__":
+ for file, item in files_and_keys.items():
+ try:
+ if "." in file:
+ item["func"](file, item["vars"])
+ else:
+ item["func"](item["vars"])
+ RESULTS[file] = True
+ except Exception as _:
+ RESULTS[file] = False
+ with open("/home/ubuntu/check-config-files-results.json", "w") as f:
+ f.write(json.dumps(RESULTS, indent=2))
+ if os.path.isfile("/home/ubuntu/autopkgtest-url"):
+ with open("/home/ubuntu/autopkgtest-url", "r") as f:
+ webpage = f.read().rstrip()
+ keypass = ""
+ with open(INTEGRATION_KEY_FP, "r") as f:
+ keypass = f.read().rstrip()
+ post_me = {
+ "type": "cloud",
+ "source": socket.gethostname(),
+ "pass": keypass,
+ "test": __file__,
+ "results": RESULTS,
+ }
+ results_url = webpage + "/post-integration-results"
+ req = urllib.request.Request(results_url)
+ req.add_header("Content-Type", "application/json; charset=utf-8")
+ jsondata = json.dumps(post_me).encode("utf-8")
+ req.add_header("Content-Length", len(jsondata))
+ response = urllib.request.urlopen(req, jsondata)
diff --git a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/test-run-autopkgtest b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/test-run-autopkgtest
new file mode 100755
index 0000000..03c645a
--- /dev/null
+++ b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/test-run-autopkgtest
@@ -0,0 +1,327 @@
+#!/usr/bin/python3
+
+
+import configparser
+import datetime
+import json
+import logging
+import os
+import socket
+import subprocess
+import sys
+import tarfile
+import time
+import urllib.parse
+import urllib.request
+
+import requests
+import swiftclient
+from distro_info import UbuntuDistroInfo
+
+WORKING_DIR = "/home/ubuntu/autopkgtest-cloud/tools/"
+SCRIPT = "run-autopkgtest"
+
+UDI = UbuntuDistroInfo()
+LATEST = UDI.supported()[-1]
+# ARCHES = ["amd64", "arm64", "ppc64el", "s390x", "armhf", "ppa"]
+ARCHES = ["ppa"]
+PACKAGE = "gzip"
+AUTOPKGTEST_SITE = "https://autopkgtest.ubuntu.com"
+QUEUED_JSON = "%s/queues.json" % AUTOPKGTEST_SITE
+RUNNING_PAGE = "%s/static/running.json" % AUTOPKGTEST_SITE
+JOURNAL_CMD = [
+ "journalctl",
+ "--since",
+ "5 minutes ago",
+ "--no-pager",
+ "-u",
+ "autopkgtest@*",
+]
+PPA_NAME = "andersson123/hello"
+
+ARGS = ""
+
+TIMEOUT = 60 * 60
+
+URL = "https://launchpad.net/ubuntu/%s/+source/%s" % (LATEST, PACKAGE)
+SWIFT_CREDS_FP = "/home/ubuntu/swift-password.cred"
+
+INTEGRATION_KEY_FP = "/home/ubuntu/integration-key"
+
+
+def get_swift_creds():
+ swift_file = ""
+ file_vals = {}
+ with open(SWIFT_CREDS_FP, "r") as f:
+ swift_file = f.read()
+ for line in swift_file.splitlines():
+ key, val = line.split("=")
+ val = val.replace('"', "")
+ file_vals[key] = val
+ swift_creds = {
+ "authurl": file_vals["SWIFT_AUTH_URL"],
+ "user": file_vals["SWIFT_USERNAME"],
+ "key": file_vals["SWIFT_PASSWORD"],
+ "os_options": {
+ "region_name": file_vals["SWIFT_REGION"],
+ "project_domain_name": file_vals["SWIFT_PROJECT_DOMAIN_NAME"],
+ "project_name": file_vals["SWIFT_PROJECT_NAME"],
+ "user_domain_name": file_vals["SWIFT_USER_DOMAIN_NAME"],
+ },
+ "auth_version": file_vals["SWIFT_AUTH_VERSION"],
+ }
+ return swift_creds
+
+
+def find_result_in_swift(swift_con, arch):
+ time.sleep(15)
+ # presuming test_info will be useful
+ # add a sleep here so we can be sure the result is in swift
+ # Need to handle PPA case differently
+ # https://autopkgtest.ubuntu.com/results/autopkgtest-RELEASE-LPUSER-PPA/?format=plain
+ # results available like this
+ container_name = (
+ ("autopkgtest-" + LATEST)
+ if arch != "ppa"
+ else (
+ "autopkgtest-%s-%s-%s"
+ % (
+ LATEST,
+ PPA_NAME.split("/", maxsplit=1)[0],
+ PPA_NAME.split("/")[1],
+ )
+ )
+ )
+ arch_key = arch if arch != "ppa" else "amd64"
+ logging.info("Container name:\n%s" % container_name)
+
+ time_now = datetime.datetime.now()
+ while True:
+ _, objects = swift_con.get_container(container_name, full_listing=True)
+ for object in objects:
+ logging.info("Object:\n%s" % object["name"])
+ logging.info("Latest: %s" % LATEST)
+ logging.info("PPA Name: %s" % PPA_NAME)
+ logging.info("Package: %s" % PACKAGE)
+ logging.info("arch: %s" % arch_key)
+ ## check object name first
+ # ah yes i need to modify arch for this!
+ logging.info(
+ "Latest in object?: %s" % str(LATEST in object["name"])
+ )
+ logging.info(
+ "Package in object?: %s" % str(PACKAGE in object["name"])
+ )
+ logging.info(
+ "arch in object?: %s" % str(arch_key in object["name"])
+ )
+ if (
+ LATEST in object["name"]
+ and PACKAGE in object["name"]
+ and arch_key in object["name"]
+ ):
+ obj_time = object["last_modified"].split(".")[0]
+ datetime_obj_time = datetime.datetime.strptime(
+ obj_time, "%Y-%m-%dT%H:%M:%S"
+ )
+ time_diff = abs(
+ time_now.timestamp() - datetime_obj_time.timestamp()
+ )
+ logging.info("Are we getting here?")
+ logging.info("Time diff: %s" % str(time_diff))
+ if time_diff < 600:
+ return object
+
+
+def get_trigger():
+ r = requests.get(URL)
+ ctr = 0
+ resp = r.content.decode("utf-8")
+ idx = 0
+ for line in resp.splitlines():
+ if "Current version" in line:
+ idx = ctr + 1
+ ctr += 1
+ curr_ver = resp.splitlines()[idx]
+ curr_ver = (
+ curr_ver.replace("<dd>", "").replace("</dd>", "").replace(" ", "")
+ )
+ return "%s/%s" % (PACKAGE, curr_ver)
+
+
+def check_logfile_is_accessible(url):
+ url = url.replace("artifacts.tar.gz", "log.gz")
+ try:
+ r = requests.get(url)
+ except requests.exceptions.HTTPError as err:
+ logging.info("Acquiring logfile failed with:\n%s" % err)
+ return False
+ logging.info("Acquiring logfile succeeded!")
+ logging.debug("Full logfile:\n%s" % r.content)
+ return True
+
+
+def check_result(swift_con, arch):
+ logging.info("Getting container and object...")
+ this_test_results = {}
+ result = find_result_in_swift(swift_con=swift_con, arch=arch)
+ logging.info("Found object in swift:\n%s" % str(result))
+ object_path_lst = result["name"].split("/")
+ object_path_lst = object_path_lst[:-1]
+ object = "/".join(object_path_lst)
+ container = "autopkgtest-" + LATEST
+ logging.info("container: %s\nobject: %s" % (container, object))
+ url = "%s/results/%s/%s/%s" % (
+ AUTOPKGTEST_SITE,
+ container,
+ object,
+ "artifacts.tar.gz",
+ )
+ logging.info("Results url: %s" % url)
+ r = requests.get(url)
+ if r.status_code == 200:
+ with open("/tmp/artifacts.tar.gz", "wb") as f:
+ f.write(r.content)
+ logging.info("Acquired results!")
+ file = tarfile.open("/tmp/artifacts.tar.gz")
+ file.extractall("/tmp/")
+ file.close()
+ with open("/tmp/exitcode", "r") as f:
+ code = f.read()
+ logging.info("code: %s" % str(code))
+ this_test_results["logfile-accessible"] = check_logfile_is_accessible(url)
+ this_test_results["test-passed"] = False
+ try:
+ if int(code) == 0:
+ this_test_results["test-passed"] = True
+ except TypeError as _:
+ pass
+ return this_test_results
+
+
+if __name__ == "__main__":
+ logging.getLogger().setLevel(logging.INFO)
+ logging.info("getting trigger...")
+ trigger = get_trigger()
+ swift_creds = get_swift_creds()
+ swift_con = swiftclient.Connection(**swift_creds)
+ logging.info("got trigger: %s" % trigger)
+ results = {}
+ # I should also queue a test from a ppa
+ for arch in ARCHES:
+ results[arch] = {}
+ args = "%s%s -s %s -a %s --trigger=%s %s" % (
+ WORKING_DIR,
+ SCRIPT,
+ LATEST,
+ arch,
+ trigger,
+ PACKAGE,
+ )
+ if arch == "ppa":
+ args = args.replace("ppa", "amd64")
+ args += " --ppa %s" % PPA_NAME
+ logging.info(
+ "run-autopkgtest args:\n%s\nRunning autopkgtest..." % args
+ )
+ # submit the test
+ p = subprocess.run(args.split(" "), check=True)
+ test_info = {
+ PACKAGE: {
+ "triggers": [trigger],
+ }
+ }
+ in_queue = False
+ saved_item = ""
+
+ logging.info("Checking running.json for test...")
+ # wait for the test to appear in running.json
+ # This needs a timeout I believe
+ start_time = datetime.datetime.now()
+ failed = False
+ is_running = False
+ saved_skey = ""
+ while not is_running and not failed:
+ loop_time = datetime.datetime.now()
+ duration = loop_time - start_time
+ if duration.total_seconds() > TIMEOUT:
+ failed = True
+ break
+ running = requests.get(RUNNING_PAGE)
+ running_json = json.loads(running.content)
+ for package, values in running_json.items():
+ if package == PACKAGE:
+ for skey, details in values.items():
+ num_triggers = len(test_info[PACKAGE]["triggers"])
+ ctr = 0
+ for trigger in test_info[PACKAGE]["triggers"]:
+ if trigger in skey:
+ ctr += 1
+ if ctr == num_triggers:
+ try:
+ this_arch = arch if arch != "ppa" else "amd64"
+ test_info[PACKAGE][
+ "submit-time"
+ ] = running_json[package][skey][LATEST][
+ this_arch
+ ][
+ 0
+ ][
+ "submit-time"
+ ]
+ except KeyError as _:
+ continue
+ saved_skey = skey
+ is_running = True
+ logging.info("Test found in running.json!")
+ logging.info("Waiting for test to leave running.json...")
+ # wait for the test to leave running.json
+ while is_running and not failed:
+ loop_time = datetime.datetime.now()
+ duration = loop_time - start_time
+ if duration.total_seconds() > TIMEOUT:
+ failed = True
+ break
+ running = requests.get(RUNNING_PAGE)
+ if saved_skey not in running.content.decode("utf-8"):
+ is_running = False
+ logging.info("Test has left running.json!")
+ logging.info("Getting results for test!")
+ if not failed:
+ results[arch] = check_result(swift_con, arch)
+ else:
+ results[arch] = False
+ pass
+ logging.info("Results:\n%s" % json.dumps(results, indent=2))
+ # this needs changing
+ cp = configparser.ConfigParser()
+ cp.read(os.path.expanduser("~ubuntu/autopkgtest-cloud.conf"))
+ try:
+ webpage = cp["web"]["ExternalURL"].replace("/results", "")
+ except KeyError:
+ # change to logging maybe ?
+ print("No external url found!")
+ sys.exit(1)
+ keypass = ""
+ with open(INTEGRATION_KEY_FP, "r") as f:
+ keypass = f.read().rstrip()
+ post_me = {
+ "type": "cloud",
+ "source": socket.gethostname(),
+ "pass": keypass,
+ "test": __file__,
+ "results": results,
+ }
+ results_url = webpage + "/post-integration-results"
+ req = urllib.request.Request(results_url)
+ req.add_header("Content-Type", "application/json; charset=utf-8")
+ jsondata = json.dumps(post_me).encode("utf-8")
+ req.add_header("Content-Length", len(jsondata))
+ response = urllib.request.urlopen(req, jsondata)
+
+ with open("/home/ubuntu/test-run-autopkgtest-results.json", "w") as f:
+ f.write(json.dumps(results, indent=2))
+ for arch, result in results.items():
+ for key, t_f in result.items():
+ if not t_f:
+ sys.exit(1)
diff --git a/charms/focal/autopkgtest-cloud-worker/config.yaml b/charms/focal/autopkgtest-cloud-worker/config.yaml
index 7f0ef60..257d075 100644
--- a/charms/focal/autopkgtest-cloud-worker/config.yaml
+++ b/charms/focal/autopkgtest-cloud-worker/config.yaml
@@ -110,3 +110,7 @@ options:
description: Submit all metrics with this as the "context" tag,
to differentiate staging vs. production submissions
type: string
+ autopkgtest-hostname:
+ default: ~
+ description: URL for autopkgtest (prod or staging)
+ type: string
diff --git a/charms/focal/autopkgtest-cloud-worker/reactive/autopkgtest_cloud_worker.py b/charms/focal/autopkgtest-cloud-worker/reactive/autopkgtest_cloud_worker.py
index 2e8f376..51101a6 100644
--- a/charms/focal/autopkgtest-cloud-worker/reactive/autopkgtest_cloud_worker.py
+++ b/charms/focal/autopkgtest-cloud-worker/reactive/autopkgtest_cloud_worker.py
@@ -285,6 +285,13 @@ def clear_old_rcs():
log("...done", "INFO")
+@when_any(
+ "config.set.autopkgtest-hostname",
+ "config.changed.autopkgtest-hostname",
+)
+def write_hostname_file():
+ with open(os.path.expanduser("~ubuntu/autopkgtest-url"), "w") as f:
+ f.write("https://" + config().get("autopkgtest-hostname"))
@when_all(
"autopkgtest.autopkgtest_cloud_symlinked",
diff --git a/charms/focal/autopkgtest-cloud-worker/units/check-config-files.service b/charms/focal/autopkgtest-cloud-worker/units/check-config-files.service
new file mode 100644
index 0000000..85da744
--- /dev/null
+++ b/charms/focal/autopkgtest-cloud-worker/units/check-config-files.service
@@ -0,0 +1,8 @@
+[Unit]
+Description=Check all necessary config files for autopkgtest-cloud-worker charm
+
+[Service]
+Type=oneshot
+User=ubuntu
+Group=ubuntu
+ExecStart=/home/ubuntu/autopkgtest-cloud/tools/check-config-files
diff --git a/charms/focal/autopkgtest-cloud-worker/units/check-config-files.timer b/charms/focal/autopkgtest-cloud-worker/units/check-config-files.timer
new file mode 100644
index 0000000..fbe8b79
--- /dev/null
+++ b/charms/focal/autopkgtest-cloud-worker/units/check-config-files.timer
@@ -0,0 +1,9 @@
+[Unit]
+Description=Check all necessary config files for autopkgtest-cloud-worker charm
+
+[Timer]
+OnBootSec=2min
+OnCalendar=00 00 * * *
+
+[Install]
+WantedBy=autopkgtest.target
diff --git a/charms/focal/autopkgtest-cloud-worker/units/test-run-autopkgtest.service b/charms/focal/autopkgtest-cloud-worker/units/test-run-autopkgtest.service
new file mode 100644
index 0000000..3df205d
--- /dev/null
+++ b/charms/focal/autopkgtest-cloud-worker/units/test-run-autopkgtest.service
@@ -0,0 +1,8 @@
+[Unit]
+Description=Run script which checks tests pass on all architectures and with a ppa
+
+[Service]
+Type=oneshot
+User=ubuntu
+Group=ubuntu
+ExecStart=/home/ubuntu/autopkgtest-cloud/tools/test-run-autopkgtest
\ No newline at end of file
diff --git a/charms/focal/autopkgtest-cloud-worker/units/test-run-autopkgtest.timer b/charms/focal/autopkgtest-cloud-worker/units/test-run-autopkgtest.timer
new file mode 100644
index 0000000..c4b64ca
--- /dev/null
+++ b/charms/focal/autopkgtest-cloud-worker/units/test-run-autopkgtest.timer
@@ -0,0 +1,9 @@
+[Unit]
+Description=Run script which checks tests pass on all architectures and with a ppa
+
+[Timer]
+OnBootSec=2min
+OnCalendar=00 00 * * *
+
+[Install]
+WantedBy=autopkgtest.target
diff --git a/charms/focal/autopkgtest-web/units/check-config-files.service b/charms/focal/autopkgtest-web/units/check-config-files.service
new file mode 100644
index 0000000..45837d5
--- /dev/null
+++ b/charms/focal/autopkgtest-web/units/check-config-files.service
@@ -0,0 +1,8 @@
+[Unit]
+Description=Check all necessary config files for autopkgtest-web charm
+
+[Service]
+Type=oneshot
+User=ubuntu
+Group=ubuntu
+ExecStart=/home/ubuntu/webcontrol/check-config-files
diff --git a/charms/focal/autopkgtest-web/units/check-config-files.timer b/charms/focal/autopkgtest-web/units/check-config-files.timer
new file mode 100644
index 0000000..b89aa60
--- /dev/null
+++ b/charms/focal/autopkgtest-web/units/check-config-files.timer
@@ -0,0 +1,9 @@
+[Unit]
+Description=Check all necessary config files for autopkgtest-web charm
+
+[Timer]
+OnBootSec=2min
+OnCalendar=00 00 * * *
+
+[Install]
+WantedBy=autopkgtest-web.target
diff --git a/charms/focal/autopkgtest-web/units/endpoint-checker.service b/charms/focal/autopkgtest-web/units/endpoint-checker.service
new file mode 100644
index 0000000..b3ce9a3
--- /dev/null
+++ b/charms/focal/autopkgtest-web/units/endpoint-checker.service
@@ -0,0 +1,8 @@
+[Unit]
+Description=Check all endpoints for autopkgtest-web
+
+[Service]
+Type=oneshot
+User=ubuntu
+Group=ubuntu
+ExecStart=/home/ubuntu/webcontrol/endpoint-checker
\ No newline at end of file
diff --git a/charms/focal/autopkgtest-web/units/endpoint-checker.timer b/charms/focal/autopkgtest-web/units/endpoint-checker.timer
new file mode 100644
index 0000000..568d80f
--- /dev/null
+++ b/charms/focal/autopkgtest-web/units/endpoint-checker.timer
@@ -0,0 +1,9 @@
+[Unit]
+Description=Check all endpoints for autopkgtest-web
+
+[Timer]
+OnBootSec=2min
+OnCalendar=00 00 * * *
+
+[Install]
+WantedBy=autopkgtest-web.target
diff --git a/charms/focal/autopkgtest-web/webcontrol/browse.cgi b/charms/focal/autopkgtest-web/webcontrol/browse.cgi
index 7885e91..6faaafd 100755
--- a/charms/focal/autopkgtest-web/webcontrol/browse.cgi
+++ b/charms/focal/autopkgtest-web/webcontrol/browse.cgi
@@ -12,6 +12,7 @@ from wsgiref.handlers import CGIHandler
import distro_info
import flask
+from flask import request
from werkzeug.middleware.proxy_fix import ProxyFix
app = flask.Flask("browse")
@@ -25,7 +26,14 @@ SUPPORTED_UBUNTU_RELEASES = sorted(
set(UDI.supported() + UDI.supported_esm()), key=ALL_UBUNTU_RELEASES.index
)
+<<<<<<< charms/focal/autopkgtest-web/webcontrol/browse.cgi
INDEXED_PACKAGES_FP = ""
+=======
+INTEGRATION_TEST_RES_FP = (
+ "/run/autopkgtest_webcontrol/integration-test-results.json"
+)
+INTEGRATION_TESTS_PASS_FP = "/home/ubuntu/integration-key"
+>>>>>>> charms/focal/autopkgtest-web/webcontrol/browse.cgi
def init_config():
@@ -433,6 +441,100 @@ def testlist():
return render("browse-testlist.html", indexed_pkgs=indexed_pkgs)
+@app.route("/post-integration-results", methods=["POST"])
+def handle_results():
+ # need to check authentication using password or something
+ global INTEGRATION_TEST_RES_FP
+ results = {}
+ if os.path.isfile(INTEGRATION_TEST_RES_FP):
+ with open(INTEGRATION_TEST_RES_FP, "r") as f:
+ results = json.load(f)
+ data = request.json
+
+ # key check
+ keys = ["type", "source", "pass", "test", "results"]
+ # make this check more extensive ?
+ for k in keys:
+ if k not in data.keys():
+ return
+ if data["type"] not in ["cloud", "web"]:
+ return
+ # authenticate here
+ keypass = ""
+ if os.path.isfile(INTEGRATION_TESTS_PASS_FP):
+ with open(INTEGRATION_TESTS_PASS_FP, "r") as f:
+ keypass = f.read()
+ if data["pass"].rstrip() != keypass.rstrip():
+ return (
+ json.dumps({"success": False, "reason": "incorrect pass"}),
+ 403,
+ {"ContentType": "application/json"},
+ )
+
+ if "cloud" not in results.keys():
+ results["cloud"] = {}
+ if "web" not in results.keys():
+ results["web"] = {}
+ if data["source"] not in results[data["type"]].keys():
+ results[data["type"]][data["source"]] = {}
+ results[data["type"]][data["source"]][data["test"]] = data["results"]
+ with open(INTEGRATION_TEST_RES_FP, "w") as f:
+ json.dump(results, f, indent=2)
+ return (
+ json.dumps({"success": True}),
+ 200,
+ {"ContentType": "application/json"},
+ )
+
+ # results being posted
+ # results = {
+ # "type": "cloud/web",
+ # "source": "machine-name",
+ # "pass": "pass",
+ # "test": "config-files/endpoints/test-run-autopkgtest",
+ # "results": {}, # <- json of actual test results
+ # }
+
+ # results going out:
+ # results = {
+ # "cloud": {
+ # "machine1": {
+ # "test-name-1": {}, # <- results
+ # "test-name-2": {}, # <- results
+ # },
+ # "machine2": {
+ # "test-name-1": {}, # <- results
+ # "test-name-2": {}, # <- results
+ # }
+ # },
+ # "web" : {
+ # "machine1": {
+ # "test-name-1": {}, # <- results
+ # "test-name-2": {}, # <- results
+ # },
+ # "machine1": {
+ # "test-name-1": {}, # <- results
+ # "test-name-2": {}, # <- results
+ # },
+ # }
+ # }
+
+
+@app.route("/integration-test-results.json", methods=["GET"])
+def get_integration_test_results():
+ global INTEGRATION_TEST_RES_FP
+ results = {}
+ if os.path.isfile(INTEGRATION_TEST_RES_FP):
+ with open(INTEGRATION_TEST_RES_FP, "r") as f:
+ results = json.load(f)
+ return flask.Response(
+ json.dumps(results, indent=2), mimetype="application/json"
+ )
+ return flask.Response(
+ json.dumps({}, indent=2), mimetype="application/json"
+ )
+
+
@app.route("/statistics")
def statistics():
release_arches = get_release_arches()
diff --git a/charms/focal/autopkgtest-web/webcontrol/check-config-files b/charms/focal/autopkgtest-web/webcontrol/check-config-files
new file mode 100755
index 0000000..b002d40
--- /dev/null
+++ b/charms/focal/autopkgtest-web/webcontrol/check-config-files
@@ -0,0 +1,180 @@
+#!/usr/bin/python3
+import configparser
+import os
+import socket
+import sys
+import urllib.parse
+import urllib.request
+import json
+
+import requests
+from distro_info import UbuntuDistroInfo
+
+# openstack-creds <- todo: part of the update-github-jobs-swiftclient-refactor mp
+UDI = UbuntuDistroInfo()
+
+INTEGRATION_KEY_FP = "/home/ubuntu/integration-key"
+
+
+def check_env_file_and_keys(file, file_keys):
+ mypath = os.path.expanduser("~/" + file)
+ if not os.path.isfile(mypath):
+ raise FileNotFoundError("file %s doesn't exist" % file)
+ with open(mypath, "r") as f:
+ myf = f.read()
+ for k in file_keys:
+ if k not in myf:
+ raise KeyError("key %s not found in %s" % (k, file))
+ return myf
+
+
+def check_autopkgtest_cloud_conf(conf_file, conf_keys):
+ myf = check_env_file_and_keys(conf_file, conf_keys)
+ sw_url = ""
+ for line in myf.splitlines():
+ if "SwiftURL" in line:
+ sw_url = line.split("=")[1]
+ for supported in UDI.supported():
+ req = requests.get(sw_url + "/autopkgtest-" + supported)
+ if req.status_code != 200:
+ raise requests.ConnectionError(
+ "Container autopkgtest-%s is unreachable - something wrong with the SwiftURL: %s"
+ % (supported, sw_url)
+ )
+ cp = configparser.ConfigParser()
+ cp.read(os.path.expanduser("~/" + conf_file))
+
+
+def check_github_secrets(secrets_file, vars):
+ _ = check_env_file_and_keys(secrets_file, vars)
+
+
+def check_github_status_creds(creds_file, vars):
+ _ = check_env_file_and_keys(creds_file, vars)
+
+
+def check_swift_web_creds(creds_file, vars):
+ _ = check_env_file_and_keys(creds_file, vars)
+ cp = configparser.ConfigParser()
+ cp.read(os.path.expanduser("~/" + creds_file))
+ for var in vars[1:]:
+ _ = cp.get("swift", var)
+
+
+def check_openstack_creds(creds_file, vars):
+ _ = check_env_file_and_keys(creds_file, vars)
+
+
+autopkgtest_cloud_vars = [
+ "[web]",
+ "database",
+ "database_ro",
+ "SwiftURL",
+ "ExternalURL",
+ "cookies",
+ "[amqp]",
+ "uri",
+]
+
+github_secrets_keys = [
+ "systemd-upstream",
+ "ovs-upstream",
+ "ubuntu-image-autopkgtest",
+ "snapcraft",
+ "snapd",
+]
+
+github_status_credentials = [
+ "systemd-upstream",
+ "ubuntu-image-autopkgtest",
+ "snapcraft",
+ "snapd",
+]
+
+swift_web_creds = [
+ "[swift]",
+ "auth_url",
+ "username",
+ "password",
+ "tenant",
+ "region_name",
+ "project_name",
+ "project_domain_name",
+ "user_domain_name",
+ "obj_storage_url",
+]
+
+openstack_creds = [
+ "OS_REGION_NAME",
+ "OS_INTERFACE",
+ "OS_AUTH_URL",
+ "OS_PROJECT_DOMAIN_NAME",
+ "OS_USERNAME",
+ "OS_USER_DOMAIN_NAME",
+ "OS_PROJECT_NAME",
+ "OS_PASSWORD",
+ "OS_IDENTITY_API_VERSION",
+]
+
+
+files_and_keys = {
+ "autopkgtest-cloud.conf": {
+ "vars": autopkgtest_cloud_vars,
+ "func": check_autopkgtest_cloud_conf,
+ },
+ "github-secrets.json": {
+ "vars": github_secrets_keys,
+ "func": check_github_secrets,
+ },
+ "github-status-credentials.txt": {
+ "vars": github_status_credentials,
+ "func": check_github_status_creds,
+ },
+ "swift-web-credentials.conf": {
+ "vars": swift_web_creds,
+ "func": check_swift_web_creds,
+ },
+ "openstack-creds": {
+ "vars": openstack_creds,
+ "func": check_openstack_creds,
+ },
+}
+
+RESULTS = {}
+
+if __name__ == "__main__":
+ for file, item in files_and_keys.items():
+ try:
+ item["func"](file, item["vars"])
+ RESULTS[file] = True
+ except Exception as _:
+ RESULTS[file] = False
+
+ cp = configparser.ConfigParser()
+ cp.read(os.path.expanduser("~ubuntu/autopkgtest-cloud.conf"))
+ try:
+ webpage = cp["web"]["ExternalURL"].replace("/results", "")
+ except KeyError:
+ # change to logging maybe ?
+ print("No external url found!")
+ sys.exit(1)
+ keypass = ""
+ with open(INTEGRATION_KEY_FP, "r") as f:
+ keypass = f.read().rstrip()
+ post_me = {
+ "type": "web",
+ "source": socket.gethostname(),
+ "pass": keypass,
+ "test": __file__,
+ "results": RESULTS,
+ }
+ results_url = webpage + "/post-integration-results"
+ req = urllib.request.Request(results_url)
+ req.add_header('Content-Type', 'application/json; charset=utf-8')
+ jsondata = json.dumps(post_me).encode('utf-8')
+ req.add_header('Content-Length', len(jsondata))
+ response = urllib.request.urlopen(req, jsondata)
+
+ with open("/home/ubuntu/check-config-files-results.json", "w") as f:
+ f.write(json.dumps(RESULTS, indent=2))
+
diff --git a/charms/focal/autopkgtest-web/webcontrol/endpoint-checker b/charms/focal/autopkgtest-web/webcontrol/endpoint-checker
new file mode 100755
index 0000000..e4f3dc5
--- /dev/null
+++ b/charms/focal/autopkgtest-web/webcontrol/endpoint-checker
@@ -0,0 +1,92 @@
+#!/usr/bin/python3
+
+import argparse
+import configparser
+import socket
+import json
+import logging
+import os
+import sys
+import urllib.parse
+import urllib.request
+
+ENDPOINTS = [
+ "/",
+ "/queues.json",
+ "/queued.json",
+ "/packages/gzip",
+ "/packages/gzip/noble/amd64",
+ "/running",
+ "/static/running.json",
+ "/queue_size.json",
+ "/testlist",
+ "/statistics",
+]
+RESULTS = {}
+INTEGRATION_KEY_FP = "/home/ubuntu/integration-key"
+
+
+if __name__ == "__main__":
+ logging.getLogger().setLevel(logging.INFO)
+ cp = configparser.ConfigParser()
+ cp.read(os.path.expanduser("~ubuntu/autopkgtest-cloud.conf"))
+ try:
+ webpage = cp["web"]["ExternalURL"].replace("/results", "")
+ except KeyError:
+ # change to logging maybe ?
+ print("No external url found!")
+ sys.exit(1)
+ try:
+ cookies = cp["web"]["cookies"]
+ except KeyError:
+ print("No cookies in config!")
+ sys.exit(1)
+ logging.info("Webpage: %s" % webpage)
+ logging.info("Cookies: %s" % cookies)
+
+ for srvname_cookie in cookies.split(" "):
+ RESULTS[srvname_cookie] = {}
+ for endpoint in ENDPOINTS:
+ logging.info("Trying endpoint: %s" % endpoint)
+ try:
+ req = urllib.request.Request(webpage + endpoint)
+ req.add_header("Cookie", "SRVNAME=" + srvname_cookie)
+ output = urllib.request.urlopen(req).read()
+ if ".json" in endpoint:
+ try:
+ my_json = json.loads(output)
+ RESULTS[srvname_cookie][endpoint] = True
+ logging.info("Endpoint %s succeeded!")
+ except json.JSONDecodeError as e:
+ RESULTS[srvname_cookie][endpoint] = False
+ logging.info("Endpoint %s failed!")
+ else:
+ logging.info("Endpoint %s succeeded!")
+ # maybe needs work
+ RESULTS[srvname_cookie][endpoint] = True
+ # process ????? just make sure no failures ig?
+ # idk what to do here
+ pass
+ except urllib.error.HTTPError as _:
+ logging.info("Endpoint %s failed!")
+ # log here
+ RESULTS[srvname_cookie][endpoint] = False
+ with open("/home/ubuntu/endpoint-checker-results.json", "w") as f:
+ f.write(json.dumps(RESULTS, indent=2))
+ keypass = ""
+ with open(INTEGRATION_KEY_FP, "r") as f:
+ keypass = f.read().rstrip()
+ post_me = {
+ "type": "web",
+ "source": socket.gethostname(),
+ "pass": keypass,
+ "test": __file__,
+ "results": RESULTS,
+ }
+ results_url = webpage + "/post-integration-results"
+ req = urllib.request.Request(results_url)
+ req.add_header('Content-Type', 'application/json; charset=utf-8')
+ jsondata = json.dumps(post_me).encode('utf-8')
+ req.add_header('Content-Length', len(jsondata))
+ response = urllib.request.urlopen(req, jsondata)
+ logging.info("Results:\n%s" % json.dumps(RESULTS, indent=2))
diff --git a/charms/focal/autopkgtest-web/webcontrol/new-test-request b/charms/focal/autopkgtest-web/webcontrol/new-test-request
new file mode 100755
index 0000000..4098846
--- /dev/null
+++ b/charms/focal/autopkgtest-web/webcontrol/new-test-request
@@ -0,0 +1,124 @@
+#!/usr/bin/python3
+
+
+import json
+
+from distro_info import UbuntuDistroInfo
+from selenium import webdriver
+from selenium.webdriver.common.by import By
+
+UDI = UbuntuDistroInfo()
+LATEST = UDI.supported()[-1]
+ARGS = {
+ "migration-reference-all-proposed-check": {
+ "args": [
+ "release=noble",
+ "arch=amd64",
+ "package=gzip",
+ "trigger=migration-reference/0",
+ "all-proposed=1",
+ ],
+ "expected-response": "migration-reference/0 and all-proposed=1 are not compatible arguments.",
+ },
+ "invalid-release": {
+ "args": [
+ "release=bocal",
+ "arch=amd64",
+ "package=gzip",
+ "trigger=gzip/1.12-1ubuntu1",
+ ],
+ "expected-response": "release bocal not found",
+ },
+ "invalid-arch": {
+ "args": [
+ "release=noble",
+ "arch=amz64",
+ "package=gzip",
+ "trigger=gzip/1.12-1ubuntu1",
+ ],
+ "expected-response": "arch amz64 not found",
+ },
+ "invalid-ppa": {
+ "args": [
+ "release=noble",
+ "arch=amd64",
+ "package=gzip",
+ "trigger=gzip/1.12-1ubuntu1",
+ "ppa=andersson123/hell",
+ ],
+ "expected-response": "ppa andersson123/hell not found",
+ },
+ "invalid-package": {
+ "args": [
+ "release=noble",
+ "arch=amd64",
+ "package=gsip",
+ "trigger=gzip/1.12-1ubuntu1",
+ ],
+ "expected-response": "package gsip does not have any test results",
+ },
+ "ppa-migration-reference-0": {
+ "args": [
+ "release=noble",
+ "arch=amd64",
+ "package=gzip",
+ "trigger=migration-reference/0",
+ "ppa=andersson123/autopkgtest",
+ ],
+ "expected-response": "Cannot use PPAs with migration-reference/0",
+ },
+ "migration-reference-0-additional-triggers": {
+ "args": [
+ "release=noble",
+ "arch=amd64",
+ "package=gzip",
+ "trigger=gzip/1.12-1ubuntu1",
+ "trigger=migration-reference/0",
+ ],
+ "expected-response": "Cannot use additional triggers with migration-reference/0",
+ },
+ "malformed-trigger": {
+ "args": [
+ "release=noble",
+ "arch=amd64",
+ "package=gzip",
+ "trigger=gzip-1.12-1ubuntu1",
+ ],
+ "expected-response": "Malformed trigger, must be srcpackage/version",
+ },
+ "trigger-not-published-in-release": {
+ "args": [
+ "release=noble",
+ "arch=amd64",
+ "package=gzip",
+ "trigger=gzip/1.10-4ubuntu4.1",
+ ],
+ "expected-response": "gzip/1.10-4ubuntu4.1 is not published in noble",
+ },
+}
+
+
+webpage = "https://autopkgtest.ubuntu.com/request.cgi?"
+
+
+if __name__ == "__main__":
+ # need to authenticate first
+ browser = webdriver.Chrome()
+ login_url = webpage + "/login"
+ browser.get(login_url)
+ input("Press enter once you are authenticated.")
+ results = {}
+ for key, val in ARGS.items():
+ print("Running " + key)
+ this_req = webpage + "&".join(val["args"])
+ browser.get(this_req)
+ # our login button request.cgi?/login
+ # browser.find_element(By.XPATH, "/html/body/form/input[1]").click()
+ # "Yes log me in" on login.ubuntu.com
+ # browser.find_element(By.XPATH, "/html/body/div[1]/div/div/div[2]/section/div/form/div[1]/button").click()
+ html = browser.page_source
+ if val["expected-response"] in html:
+ results[key] = True
+ else:
+ results[key] = False
+ print(json.dumps(results, indent=2))
diff --git a/charms/focal/autopkgtest-web/webcontrol/test-request-cgi b/charms/focal/autopkgtest-web/webcontrol/test-request-cgi
new file mode 100755
index 0000000..659fd5b
--- /dev/null
+++ b/charms/focal/autopkgtest-web/webcontrol/test-request-cgi
@@ -0,0 +1,109 @@
+#!/usr/bin/python3
+
+import configparser
+import logging
+import os
+import sys
+import urllib.parse
+import urllib.request
+import webbrowser
+
+import requests
+from distro_info import UbuntuDistroInfo
+
+# what do i need to do here?
+# Request a test via webpage (gzip on amd64), have it run and pass
+# maybe check the is_request_queued_or_running call
+# use nick=andersson123 for now
+
+# Then request tests with disallowed arguments and check for the correct responses
+# - migration reference all proposed check (migration-reference=0 and all-proposed=0)
+# - invalid release?
+# - invalid architecture
+# - invalid ppa
+# - invalid package (no results)
+# - ppas with migration-reference=0
+# - migration reference=0 with additional triggers
+# - malformed trigger (invalid format, not srcpackage/version)
+# - trigger not published in PPA (not sure of this one)
+# - trigger not published in release (use an old trigger or something)
+# - requester not allowed - try setting nick to nothing
+
+# https://autopkgtest.staging.ubuntu.com/request.cgi?release=mantic&arch=amd64&package=gzip&trigger=gzip/1.12-1ubuntu1
+UDI = UbuntuDistroInfo()
+LATEST = UDI.supported()[-1]
+# SESSION_COOKIE = ""
+# this filepath will need changing
+# with open("/home/ubuntu/tim/session-cookie", "r") as f:
+# SESSION_COOKIE = f.read()
+
+ARGS = {
+ "migration-reference-all-proposed-check": {
+ "args": [
+ "release=%s" % LATEST,
+ "arch=amd64",
+ "package=gzip",
+ "trigger=migration-reference/0",
+ "all-proposed=1",
+ ],
+ "expected-response": "migration-reference/0 and all-proposed=1 are not compatible arguments.",
+ },
+ # add more after I've tested this works
+}
+
+
+if __name__ == "__main__":
+ logging.getLogger().setLevel(logging.INFO)
+ try:
+ cp = configparser.ConfigParser()
+ cp.read(os.path.expanduser("~ubuntu/autopkgtest-cloud.conf"))
+ except Exception as _:
+ pass
+ try:
+ webpage = cp["web"]["ExternalURL"].replace("/results", "/request.cgi?")
+ except KeyError:
+ # change to logging maybe ?
+ print("No external url found!")
+ webpage = "https://autopkgtest.ubuntu.com/request.cgi?"
+ # sys.exit(1)
+ try:
+ cookies = cp["web"]["cookies"]
+ except KeyError:
+ print("No cookies in config!")
+ cookies = "S0 S1"
+ # sys.exit(1)
+ # try:
+ # with open(os.path.expanduser("~ubuntu/nickname"), "r") as f:
+ # nickname = f.read()
+ # except FileNotFoundError:
+ # print("No nickname file found")
+ # sys.exit(1)
+ logging.info("Webpage: %s" % webpage)
+ logging.info("Cookies: %s" % cookies)
+ # logging.info("Nickname: %s" % nickname)
+ for key, val in ARGS.items():
+ logging.info("Running integration test for %s" % key)
+ this_req = webpage + "&".join(val["args"])
+ logging.info("Running the following request:\n%s" % this_req)
+ # passman = urllib.request.HTTPPasswordMgrWithDefaultRealm()
+ # passman.add_password(None, this_req, "andersson123", "Visionamdw7!")
+ # authhandler = urllib.request.HTTPBasicAuthHandler(passman)
+ # opener = urllib.request.install_opener(authhandler)
+ # urllib.request.install_opener(opener)
+ # res = urllib.request.urlopen(this_req)
+ # res_body = res.read()
+ # print(str(res_body))
+ # print(requests.get(this_req))
+ webbrowser.open(this_req)
+ # try:
+ # req = urllib.request.Request(this_req)
+ # need to add cookies here lol
+ # req.add_header("Cookie", "SRVNAME=S0")
+ # req.add_header("Cookie", "session=" + SESSION_COOKIE)
+ # req.add_header("Cookie", "nickname=" + nickname)
+ # output = urllib.request.urlopen(req).read()
+ # logging.info("Response:\n%s" % output)
+ # except urllib.error.HTTPError as e:
+ # logging.info("E:\n%s" % e)
+
+ # req = urllib.request.Requ
diff --git a/mojo/service-bundle b/mojo/service-bundle
index 534bebe..a5d22bb 100644
--- a/mojo/service-bundle
+++ b/mojo/service-bundle
@@ -40,6 +40,7 @@ applications:
influxdb-hostname: include-file://{{ local_dir }}/influx-hostname.txt
influxdb-password: include-file://{{ local_dir }}/influx-password.txt
influxdb-database: metrics
+ autopkgtest-hostname: {{ hostname }}
{%- if stage_name == "production" %}
influxdb-username: prod_proposed_migration
influxdb-context: production
Follow ups
-
Re: [Merge] ~andersson123/autopkgtest-cloud:integration-tests into autopkgtest-cloud:master
From: Tim Andersson, 2024-03-26
-
Re: [Merge] ~andersson123/autopkgtest-cloud:integration-tests into autopkgtest-cloud:master
From: Tim Andersson, 2024-03-26
-
Re: [Merge] ~andersson123/autopkgtest-cloud:integration-tests into autopkgtest-cloud:master
From: Tim Andersson, 2024-03-12
-
[Merge] ~andersson123/autopkgtest-cloud:integration-tests into autopkgtest-cloud:master
From: Tim Andersson, 2024-03-12
-
Re: [Merge] ~andersson123/autopkgtest-cloud:integration-tests into autopkgtest-cloud:master
From: Tim Andersson, 2024-03-12
-
Re: [Merge] ~andersson123/autopkgtest-cloud:integration-tests into autopkgtest-cloud:master
From: Tim Andersson, 2024-03-12
-
Re: [Merge] ~andersson123/autopkgtest-cloud:integration-tests into autopkgtest-cloud:master
From: Tim Andersson, 2024-03-12
-
Re: [Merge] ~andersson123/autopkgtest-cloud:integration-tests into autopkgtest-cloud:master
From: Tim Andersson, 2024-03-12
-
Re: [Merge] ~andersson123/autopkgtest-cloud:integration-tests into autopkgtest-cloud:master
From: Tim Andersson, 2024-03-12
-
Re: [Merge] ~andersson123/autopkgtest-cloud:integration-tests into autopkgtest-cloud:master
From: Tim Andersson, 2024-03-08
-
Re: [Merge] ~andersson123/autopkgtest-cloud:integration-tests into autopkgtest-cloud:master
From: Tim Andersson, 2024-03-08
-
Re: [Merge] ~andersson123/autopkgtest-cloud:integration-tests into autopkgtest-cloud:master
From: Tim Andersson, 2024-03-08
-
Re: [Merge] ~andersson123/autopkgtest-cloud:integration-tests into autopkgtest-cloud:master
From: Tim Andersson, 2024-03-08
-
Re: [Merge] ~andersson123/autopkgtest-cloud:integration-tests into autopkgtest-cloud:master
From: Tim Andersson, 2024-03-08
-
Re: [Merge] ~andersson123/autopkgtest-cloud:integration-tests into autopkgtest-cloud:master
From: Tim Andersson, 2024-03-08
-
Re: [Merge] ~andersson123/autopkgtest-cloud:integration-tests into autopkgtest-cloud:master
From: Tim Andersson, 2024-03-08
-
Re: [Merge] ~andersson123/autopkgtest-cloud:integration-tests into autopkgtest-cloud:master
From: Tim Andersson, 2024-03-08
-
Re: [Merge] ~andersson123/autopkgtest-cloud:integration-tests into autopkgtest-cloud:master
From: Tim Andersson, 2024-03-08
-
Re: [Merge] ~andersson123/autopkgtest-cloud:integration-tests into autopkgtest-cloud:master
From: Tim Andersson, 2024-03-08
-
Re: [Merge] ~andersson123/autopkgtest-cloud:integration-tests into autopkgtest-cloud:master
From: Tim Andersson, 2024-03-08
-
Re: [Merge] ~andersson123/autopkgtest-cloud:integration-tests into autopkgtest-cloud:master
From: Skia, 2024-03-01
-
Re: [Merge] ~andersson123/autopkgtest-cloud:integration-tests into autopkgtest-cloud:master
From: Tim Andersson, 2024-01-16
-
Re: [Merge] ~andersson123/autopkgtest-cloud:integration-tests into autopkgtest-cloud:master
From: Tim Andersson, 2024-01-16