canonical-ubuntu-qa team mailing list archive
-
canonical-ubuntu-qa team
-
Mailing list archive
-
Message #03283
Re: [Merge] ~andersson123/autopkgtest-cloud:integration-tests into autopkgtest-cloud:master
squash all the autopkgtest-hostname commits
Diff comments:
> diff --git a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/test-run-autopkgtest b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/test-run-autopkgtest
> new file mode 100755
> index 0000000..fd1f1f3
> --- /dev/null
> +++ b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/test-run-autopkgtest
> @@ -0,0 +1,326 @@
> +#!/usr/bin/python3
> +
> +
> +import configparser
> +import datetime
> +import json
> +import logging
> +import os
> +import socket
> +import subprocess
> +import sys
> +import tarfile
> +import time
> +import urllib.parse
> +import urllib.request
> +
> +import requests
> +import swiftclient
> +from distro_info import UbuntuDistroInfo
> +
> +WORKING_DIR = "/home/ubuntu/autopkgtest-cloud/tools/"
> +SCRIPT = "run-autopkgtest"
> +
> +UDI = UbuntuDistroInfo()
> +LATEST = UDI.supported()[-1]
> +# ARCHES = ["amd64", "arm64", "ppc64el", "s390x", "armhf", "ppa"]
> +ARCHES = ["ppa"]
> +PACKAGE = "gzip"
> +AUTOPKGTEST_SITE = "https://autopkgtest.ubuntu.com"
> +QUEUED_JSON = "%s/queues.json" % AUTOPKGTEST_SITE
> +RUNNING_PAGE = "%s/static/running.json" % AUTOPKGTEST_SITE
> +JOURNAL_CMD = [
> + "journalctl",
> + "--since",
> + "5 minutes ago",
> + "--no-pager",
> + "-u",
> + "autopkgtest@*",
> +]
> +PPA_NAME = "andersson123/hello"
> +
> +ARGS = ""
> +
> +TIMEOUT = 60 * 60
> +
> +URL = "https://launchpad.net/ubuntu/%s/+source/%s" % (LATEST, PACKAGE)
> +SWIFT_CREDS_FP = "/home/ubuntu/swift-password.cred"
> +
> +INTEGRATION_KEY_FP = "/home/ubuntu/integration-key"
> +
> +
> +def get_swift_creds():
> + swift_file = ""
> + file_vals = {}
> + with open(SWIFT_CREDS_FP, "r") as f:
> + swift_file = f.read()
> + for line in swift_file.splitlines():
> + key, val = line.split("=")
> + val = val.replace('"', "")
> + file_vals[key] = val
> + swift_creds = {
> + "authurl": file_vals["SWIFT_AUTH_URL"],
> + "user": file_vals["SWIFT_USERNAME"],
> + "key": file_vals["SWIFT_PASSWORD"],
> + "os_options": {
> + "region_name": file_vals["SWIFT_REGION"],
> + "project_domain_name": file_vals["SWIFT_PROJECT_DOMAIN_NAME"],
> + "project_name": file_vals["SWIFT_PROJECT_NAME"],
> + "user_domain_name": file_vals["SWIFT_USER_DOMAIN_NAME"],
> + },
> + "auth_version": file_vals["SWIFT_AUTH_VERSION"],
> + }
> + return swift_creds
> +
> +
> +def find_result_in_swift(swift_con, arch):
> + time.sleep(15)
> + # presuming test_info will be useful
> + # add a sleep here so we can be sure the result is in swift
> + # Need to handle PPA case differently
> + # https://autopkgtest.ubuntu.com/results/autopkgtest-RELEASE-LPUSER-PPA/?format=plain
> + # results available like this
> + container_name = (
> + ("autopkgtest-" + LATEST)
> + if arch != "ppa"
> + else (
> + "autopkgtest-%s-%s-%s"
> + % (
> + LATEST,
> + PPA_NAME.split("/", maxsplit=1)[0],
> + PPA_NAME.split("/")[1],
> + )
> + )
> + )
> + arch_key = arch if arch != "ppa" else "amd64"
> + logging.info("Container name:\n%s" % container_name)
> +
> + time_now = datetime.datetime.now()
> + while True:
> + _, objects = swift_con.get_container(container_name, full_listing=True)
> + for object in objects:
> + logging.info("Object:\n%s" % object["name"])
> + logging.info("Latest: %s" % LATEST)
> + logging.info("PPA Name: %s" % PPA_NAME)
> + logging.info("Package: %s" % PACKAGE)
> + logging.info("arch: %s" % arch_key)
> + ## check object name first
> + # ah yes i need to modify arch for this!
> + logging.info(
> + "Latest in object?: %s" % str(LATEST in object["name"])
> + )
> + logging.info(
> + "Package in object?: %s" % str(PACKAGE in object["name"])
> + )
> + logging.info(
> + "arch in object?: %s" % str(arch_key in object["name"])
> + )
> + if (
> + LATEST in object["name"]
> + and PACKAGE in object["name"]
> + and arch_key in object["name"]
> + ):
> + obj_time = object["last_modified"].split(".")[0]
> + datetime_obj_time = datetime.datetime.strptime(
> + obj_time, "%Y-%m-%dT%H:%M:%S"
> + )
> + time_diff = abs(
> + time_now.timestamp() - datetime_obj_time.timestamp()
> + )
> + logging.info("Are we getting here?")
> + logging.info("Time diff: %s" % str(time_diff))
> + if time_diff < 600:
> + return object
> +
> +
> +def get_trigger():
> + r = requests.get(URL)
> + ctr = 0
> + resp = r.content.decode("utf-8")
> + idx = 0
> + for line in resp.splitlines():
> + if "Current version" in line:
> + idx = ctr + 1
> + ctr += 1
> + curr_ver = resp.splitlines()[idx]
> + curr_ver = (
> + curr_ver.replace("<dd>", "").replace("</dd>", "").replace(" ", "")
> + )
> + return "%s/%s" % (PACKAGE, curr_ver)
> +
> +
> +def check_logfile_is_accessible(url):
> + url = url.replace("artifacts.tar.gz", "log.gz")
> + try:
> + r = requests.get(url)
> + except requests.exceptions.HTTPError as err:
> + logging.info("Acquiring logfile failed with:\n%s" % err)
> + return False
> + logging.info("Acquiring logfile succeeded!")
> + logging.debug("Full logfile:\n%s" % r.content)
> + return True
> +
> +
> +def check_result(swift_con, arch):
> + logging.info("Getting container and object...")
> + this_test_results = {}
> + result = find_result_in_swift(swift_con=swift_con, arch=arch)
> + logging.info("Found object in swift:\n%s" % str(result))
> + object_path_lst = result["name"].split("/")
> + object_path_lst = object_path_lst[:-1]
> + object = "/".join(object_path_lst)
> + container = "autopkgtest-" + LATEST
> + logging.info("container: %s\nobject: %s" % (container, object))
> + url = "%s/results/%s/%s/%s" % (
> + AUTOPKGTEST_SITE,
> + container,
> + object,
> + "artifacts.tar.gz",
> + )
> + logging.info("Results url: %s" % url)
> + r = requests.get(url)
> + if r.status_code == 200:
> + with open("/tmp/artifacts.tar.gz", "wb") as f:
> + f.write(r.content)
> + logging.info("Acquired results!")
> + file = tarfile.open("/tmp/artifacts.tar.gz")
> + file.extractall("/tmp/")
> + file.close()
> + with open("/tmp/exitcode", "r") as f:
> + code = f.read()
> + logging.info("code: %s" % str(code))
> + this_test_results["logfile-accessible"] = check_logfile_is_accessible(url)
> + this_test_results["test-passed"] = False
> + try:
> + if int(code) == 0:
> + this_test_results["test-passed"] = True
> + except TypeError as _:
> + pass
> + return this_test_results
> +
> +
> +if __name__ == "__main__":
> + logging.getLogger().setLevel(logging.INFO)
> + logging.info("getting trigger...")
> + trigger = get_trigger()
> + swift_creds = get_swift_creds()
> + swift_con = swiftclient.Connection(**swift_creds)
> + logging.info("got trigger: %s" % trigger)
> + results = {}
> + # I should also queue a test from a ppa
> + for arch in ARCHES:
> + results[arch] = {}
> + args = "%s%s -s %s -a %s --trigger=%s %s" % (
> + WORKING_DIR,
> + SCRIPT,
> + LATEST,
> + arch,
> + trigger,
> + PACKAGE,
> + )
> + if arch == "ppa":
> + args = args.replace("ppa", "amd64")
> + args += " --ppa %s" % PPA_NAME
> + logging.info(
> + "run-autopkgtest args:\n%s\nRunning autopkgtest..." % args
> + )
> + # submit the test
> + p = subprocess.run(args.split(" "), check=True)
> + test_info = {
> + PACKAGE: {
> + "triggers": [trigger],
> + }
> + }
> + in_queue = False
> + saved_item = ""
> +
> + logging.info("Checking running.json for test...")
> + # wait for the test to appear in running.json
> + # This needs a timeout I believe
> + start_time = datetime.datetime.now()
> + failed = False
> + is_running = False
> + saved_skey = ""
> + while not is_running and not failed:
> + loop_time = datetime.datetime.now()
> + duration = loop_time - start_time
> + if duration.total_seconds() > TIMEOUT:
> + failed = True
> + break
> + running = requests.get(RUNNING_PAGE)
> + running_json = json.loads(running.content)
> + for package, values in running_json.items():
> + if package == PACKAGE:
> + for skey, details in values.items():
> + num_triggers = len(test_info[PACKAGE]["triggers"])
> + ctr = 0
> + for trigger in test_info[PACKAGE]["triggers"]:
> + if trigger in skey:
> + ctr += 1
> + if ctr == num_triggers:
> + try:
> + this_arch = arch if arch != "ppa" else "amd64"
> + test_info[PACKAGE][
> + "submit-time"
> + ] = running_json[package][skey][LATEST][
> + this_arch
> + ][
> + 0
> + ][
> + "submit-time"
> + ]
> + except KeyError as _:
> + continue
> + saved_skey = skey
> + is_running = True
> + logging.info("Test found in running.json!")
> + logging.info("Waiting for test to leave running.json...")
> + # wait for the test to leave running.json
> + while is_running and not failed:
> + loop_time = datetime.datetime.now()
> + duration = loop_time - start_time
> + if duration.total_seconds() > TIMEOUT:
> + failed = True
> + break
> + running = requests.get(RUNNING_PAGE)
> + if saved_skey not in running.content.decode("utf-8"):
> + is_running = False
> + logging.info("Test has left running.json!")
> + logging.info("Getting results for test!")
> + if not failed:
> + results[arch] = check_result(swift_con, arch)
> + else:
> + results[arch] = False
> + logging.info("Results:\n%s" % json.dumps(results, indent=2))
> + # this needs changing
> + cp = configparser.ConfigParser()
> + cp.read(os.path.expanduser("~ubuntu/autopkgtest-cloud.conf"))
> + try:
> + webpage = cp["web"]["ExternalURL"].replace("/results", "")
> + except KeyError:
> + # change to logging maybe ?
> + print("No external url found!")
> + sys.exit(1)
> + keypass = ""
> + with open(INTEGRATION_KEY_FP, "r") as f:
> + keypass = f.read().rstrip()
> + post_me = {
> + "type": "cloud",
> + "source": socket.gethostname(),
> + "pass": keypass,
> + "test": __file__,
> + "results": results,
> + }
> + results_url = webpage + "/post-integration-results"
> + req = urllib.request.Request(results_url)
> + req.add_header("Content-Type", "application/json; charset=utf-8")
> + jsondata = json.dumps(post_me).encode("utf-8")
> + req.add_header("Content-Length", len(jsondata))
> + response = urllib.request.urlopen(req, jsondata)
> +
> + with open("/home/ubuntu/test-run-autopkgtest-results.json", "w") as f:
We shouldn't write to this filepath - we have too many config files at this location already. Write to /tmp or /run if we write to any location at all. The results are preserved by the post-integration-results endpoint
> + f.write(json.dumps(results, indent=2))
> + for arch, result in results.items():
> + for key, t_f in result.items():
> + if not t_f:
> + sys.exit(1)
> diff --git a/charms/focal/autopkgtest-web/webcontrol/browse.cgi b/charms/focal/autopkgtest-web/webcontrol/browse.cgi
> index 7355d6b..4437be8 100755
> --- a/charms/focal/autopkgtest-web/webcontrol/browse.cgi
> +++ b/charms/focal/autopkgtest-web/webcontrol/browse.cgi
> @@ -452,6 +457,100 @@ def testlist():
> return render("browse-testlist.html", indexed_pkgs=indexed_pkgs)
>
>
> +@app.route("/post-integration-results", methods=["POST"])
> +def handle_results():
> + # need to check authentication using password or something
> + global INTEGRATION_TEST_RES_FP
> + results = {}
> + if os.path.isfile(INTEGRATION_TEST_RES_FP):
> + with open(INTEGRATION_TEST_RES_FP, "r") as f:
> + results = json.load(f)
> + data = request.json
> +
> + # key check
> + keys = ["type", "source", "pass", "test", "results"]
> + # make this check more extensive ?
turn this into a smart one liner
all(condition)
> + for k in keys:
> + if k not in data.keys():
> + return
> + if data["type"] not in ["cloud", "web"]:
> + return
> + # authenticate here
> + keypass = ""
> + if os.path.isfile(INTEGRATION_TESTS_PASS_FP):
> + with open(INTEGRATION_TESTS_PASS_FP, "r") as f:
> + keypass = f.read()
> + if data["pass"].rstrip() != keypass.rstrip():
> + return (
See if Flask has an inbuilt function for returning a 403
> + json.dumps({"success": False, "reason": "incorrect pass"}),
> + 403,
> + {"ContentType": "application/json"},
> + )
> +
> + if "cloud" not in results.keys():
> + results["cloud"] = {}
> + if "web" not in results.keys():
> + results["web"] = {}
> + if data["source"] not in results[data["type"]].keys():
> + results[data["type"]][data["source"]] = {}
> + results[data["type"]][data["source"]][data["test"]] = data["results"]
> + with open(INTEGRATION_TEST_RES_FP, "w") as f:
> + json.dump(results, f, indent=2)
> + return (
same here as above
> + json.dumps({"success": True}),
> + 200,
> + {"ContentType": "application/json"},
> + )
> +
> + # results being posted
remove these comments, put into docstring
> + # results = {
> + # "type": "cloud/web",
> + # "source": "machine-name",
> + # "pass": "pass",
> + # "test": "config-files/endpoints/test-run-autopkgtest",
> + # "results": {}, # <- json of actual test results
> + # }
> +
> + # results going out:
> + # results = {
> + # "cloud": {
> + # "machine1": {
> + # "test-name-1": {}, # <- results
> + # "test-name-2": {}, # <- results
> + # },
> + # "machine2": {
> + # "test-name-1": {}, # <- results
> + # "test-name-2": {}, # <- results
> + # }
> + # },
> + # "web" : {
> + # "machine1": {
> + # "test-name-1": {}, # <- results
> + # "test-name-2": {}, # <- results
> + # },
> + # "machine1": {
> + # "test-name-1": {}, # <- results
> + # "test-name-2": {}, # <- results
> + # },
> + # }
> + # }
> +
> +
> +@app.route("/integration-test-results.json", methods=["GET"])
> +def get_integration_test_results():
> + global INTEGRATION_TEST_RES_FP
> + results = {}
> + if os.path.isfile(INTEGRATION_TEST_RES_FP):
> + with open(INTEGRATION_TEST_RES_FP, "r") as f:
> + results = json.load(f)
> + return flask.Response(
> + json.dumps(results, indent=2), mimetype="application/json"
> + )
> + return flask.Response(
> + json.dumps({}, indent=2), mimetype="application/json"
> + )
> +
> +
> @app.route("/statistics")
> def statistics():
> release_arches = get_release_arches()
> diff --git a/charms/focal/autopkgtest-web/webcontrol/endpoint-checker b/charms/focal/autopkgtest-web/webcontrol/endpoint-checker
> new file mode 100755
> index 0000000..e8994ed
> --- /dev/null
> +++ b/charms/focal/autopkgtest-web/webcontrol/endpoint-checker
consider moving this to something like Nagios - but this script is also fine. We can also look to improve our integration with landscape. Also maybe look to use requests instead of urllib
> @@ -0,0 +1,87 @@
> +#!/usr/bin/python3
> +
> +import configparser
> +import json
> +import logging
> +import os
> +import socket
> +import sys
> +import urllib.parse
> +import urllib.request
> +
> +ENDPOINTS = [
> + "/",
> + "/queues.json",
> + "/queued.json",
> + "/packages/gzip",
> + "/packages/gzip/noble/amd64",
> + "/running",
> + "/static/running.json",
> + "/queue_size.json",
> + "/testlist",
> + "/statistics",
> +]
> +RESULTS = {}
> +INTEGRATION_KEY_FP = "/home/ubuntu/integration-key"
> +
> +
> +if __name__ == "__main__":
> + logging.getLogger().setLevel(logging.INFO)
> + cp = configparser.ConfigParser()
> + cp.read(os.path.expanduser("~ubuntu/autopkgtest-cloud.conf"))
> + try:
> + webpage = cp["web"]["ExternalURL"].replace("/results", "")
> + except KeyError:
> + # change to logging maybe ?
> + print("No external url found!")
> + sys.exit(1)
> + try:
> + cookies = cp["web"]["cookies"]
> + except KeyError:
> + print("No cookies in config!")
> + sys.exit(1)
> + logging.info("Webpage: %s" % webpage)
> + logging.info("Cookies: %s" % cookies)
> +
> + for srvname_cookie in cookies.split(" "):
> + RESULTS[srvname_cookie] = {}
> + for endpoint in ENDPOINTS:
> + logging.info("Trying endpoint: %s" % endpoint)
> + try:
> + req = urllib.request.Request(webpage + endpoint)
> + req.add_header("Cookie", "SRVNAME=" + srvname_cookie)
> + output = urllib.request.urlopen(req).read()
> + if ".json" in endpoint:
> + try:
> + my_json = json.loads(output)
> + RESULTS[srvname_cookie][endpoint] = True
> + logging.info("Endpoint %s succeeded!")
> + except json.JSONDecodeError as e:
> + RESULTS[srvname_cookie][endpoint] = False
> + logging.info("Endpoint %s failed!")
> + else:
> + logging.info("Endpoint %s succeeded!")
> + RESULTS[srvname_cookie][endpoint] = True
> + except urllib.error.HTTPError as _:
> + logging.info("Endpoint %s failed!")
> + # log here
> + RESULTS[srvname_cookie][endpoint] = False
> + with open("/home/ubuntu/endpoint-checker-results.json", "w") as f:
> + f.write(json.dumps(RESULTS, indent=2))
> + keypass = ""
> + with open(INTEGRATION_KEY_FP, "r") as f:
> + keypass = f.read().rstrip()
> + post_me = {
> + "type": "web",
> + "source": socket.gethostname(),
> + "pass": keypass,
> + "test": __file__,
> + "results": RESULTS,
> + }
> + results_url = webpage + "/post-integration-results"
> + req = urllib.request.Request(results_url)
> + req.add_header("Content-Type", "application/json; charset=utf-8")
> + jsondata = json.dumps(post_me).encode("utf-8")
> + req.add_header("Content-Length", len(jsondata))
> + response = urllib.request.urlopen(req, jsondata)
> + logging.info("Results:\n%s" % json.dumps(RESULTS, indent=2))
> diff --git a/charms/focal/autopkgtest-web/webcontrol/test-request-cgi b/charms/focal/autopkgtest-web/webcontrol/test-request-cgi
> new file mode 100755
> index 0000000..e840168
> --- /dev/null
> +++ b/charms/focal/autopkgtest-web/webcontrol/test-request-cgi
this can *actually* be an integration test - this can be part of CI too for testing static changes - can also increase the frequency in the service file. This can also be checked by the endpoint checker. So maybe this should only go into CI.
> @@ -0,0 +1,193 @@
> +#!/usr/bin/python3
> +
> +import argparse
> +import configparser
> +import datetime
> +import json
> +import os
> +import socket
> +import sys
> +import urllib.parse
> +import urllib.request
> +
> +from distro_info import UbuntuDistroInfo
> +from selenium import webdriver
> +from selenium.webdriver.chrome.options import Options
> +
> +INTEGRATION_KEY_FP = "/home/ubuntu/integration-key"
> +UDI = UbuntuDistroInfo()
> +LATEST = UDI.supported()[-1]
> +# can maybe move this into a json file?
> +ARGS = {
> + "migration-reference-all-proposed-check": {
> + "args": [
> + "release=noble",
> + "arch=amd64",
> + "package=gzip",
> + "trigger=migration-reference/0",
> + "all-proposed=1",
> + ],
> + "expected-response": (
> + "migration-reference/0 and all-proposed=1 "
> + "are not compatible arguments."
> + ),
> + },
> + "invalid-release": {
> + "args": [
> + "release=bocal",
> + "arch=amd64",
> + "package=gzip",
> + "trigger=gzip/1.12-1ubuntu1",
> + ],
> + "expected-response": "release bocal not found",
> + },
> + "invalid-arch": {
> + "args": [
> + "release=noble",
> + "arch=amz64",
> + "package=gzip",
> + "trigger=gzip/1.12-1ubuntu1",
> + ],
> + "expected-response": "arch amz64 not found",
> + },
> + "invalid-ppa": {
> + "args": [
> + "release=noble",
> + "arch=amd64",
> + "package=gzip",
> + "trigger=gzip/1.12-1ubuntu1",
> + "ppa=andersson123/hell",
> + ],
> + "expected-response": "ppa andersson123/hell not found",
> + },
> + "invalid-package": {
> + "args": [
> + "release=noble",
> + "arch=amd64",
> + "package=gsip",
> + "trigger=gzip/1.12-1ubuntu1",
> + ],
> + "expected-response": "package gsip does not have any test results",
> + },
> + "ppa-migration-reference-0": {
> + "args": [
> + "release=noble",
> + "arch=amd64",
> + "package=gzip",
> + "trigger=migration-reference/0",
> + "ppa=andersson123/autopkgtest",
> + ],
> + "expected-response": "Cannot use PPAs with migration-reference/0",
> + },
> + "migration-reference-0-additional-triggers": {
> + "args": [
> + "release=noble",
> + "arch=amd64",
> + "package=gzip",
> + "trigger=gzip/1.12-1ubuntu1",
> + "trigger=migration-reference/0",
> + ],
> + "expected-response": "Cannot use additional triggers with migration-reference/0",
> + },
> + "malformed-trigger": {
> + "args": [
> + "release=noble",
> + "arch=amd64",
> + "package=gzip",
> + "trigger=gzip-1.12-1ubuntu1",
> + ],
> + "expected-response": "Malformed trigger, must be srcpackage/version",
> + },
> + "trigger-not-published-in-release": {
> + "args": [
> + "release=noble",
> + "arch=amd64",
> + "package=gzip",
> + "trigger=gzip/1.10-4ubuntu4.1",
> + ],
> + "expected-response": "gzip/1.10-4ubuntu4.1 is not published in noble",
> + },
> +}
> +
> +
> +def get_cookies(fp):
> + if os.path.isfile(fp):
> + with open(fp, "r") as cookie_file:
> + cookies = json.load(cookie_file)
> + return cookies
> +
> +
> +def check_cookies(cookies):
> + now = datetime.datetime.now()
> + for cookie in cookies:
> + cookie_eol = cookie["expires"]
> + if now.timestamp() > cookie_eol:
> + return False
> + return True
> +
> +
> +if __name__ == "__main__":
> + parser = argparse.ArgumentParser(
> + description="Test autopkgtest.ubuntu.com request.cgi"
> + )
> + parser.add_argument(
> + "--cookie-fp", "-c", default="/home/ubuntu/auth-cookies"
> + )
> + cp = configparser.ConfigParser()
> + cp.read(os.path.expanduser("~ubuntu/autopkgtest-cloud.conf"))
> +
> + try:
> + webpage = cp["web"]["ExternalURL"].replace("/results", "/request.cgi?")
> + except KeyError:
> + # change to logging maybe ?
> + print("No external url found!")
> + sys.exit(1)
> +
> + keypass = ""
> + with open(INTEGRATION_KEY_FP, "r") as f:
> + keypass = f.read().rstrip()
> +
> + args = parser.parse_args()
> + # Maybe we should check the cookies aren't outdated before running?
> + cookies = get_cookies(args.cookie_fp)
> + cookies_okay = check_cookies(cookies)
> + results = {}
> +
> + if cookies_okay:
> + chrome_options = Options()
> + chrome_options.add_argument("--no-sandbox")
> + chrome_options.add_argument("--headless")
> + chrome_options.add_argument("--disable-dev-shm-usage")
> +
> + browser = webdriver.Chrome(options=chrome_options)
> + browser.get(webpage)
> +
> + for cookie in cookies:
> + browser.add_cookie(cookie)
> + for key, val in ARGS.items():
> + print("Running " + key)
> + this_req = webpage + "&".join(val["args"])
> + browser.get(this_req)
> + html = browser.page_source
> + if val["expected-response"] in html:
> + results[key] = True
> + else:
> + results[key] = False
> + else:
> + results["null"] = "Cookies are out of date!"
> +
> + post_me = {
> + "type": "web",
> + "source": socket.gethostname(),
> + "pass": keypass,
> + "test": __file__,
> + "results": results,
> + }
> + results_url = webpage.replace("/request.cgi?", "/post-integration-results")
> + req = urllib.request.Request(results_url)
> + req.add_header("Content-Type", "application/json; charset=utf-8")
> + jsondata = json.dumps(post_me).encode("utf-8")
> + req.add_header("Content-Length", len(jsondata))
> + response = urllib.request.urlopen(req, jsondata)
> +
> + print(json.dumps(results, indent=2))
--
https://code.launchpad.net/~andersson123/autopkgtest-cloud/+git/autopkgtest-cloud/+merge/457239
Your team Canonical's Ubuntu QA is requested to review the proposed merge of ~andersson123/autopkgtest-cloud:integration-tests into autopkgtest-cloud:master.
References