← Back to team overview

canonical-ubuntu-qa team mailing list archive

Re: [Merge] ~andersson123/autopkgtest-cloud:d-a-r-make-me-faster into autopkgtest-cloud:master

 


Diff comments:

> diff --git a/charms/focal/autopkgtest-web/webcontrol/download-all-results b/charms/focal/autopkgtest-web/webcontrol/download-all-results
> index 1af7918..485f6bd 100755
> --- a/charms/focal/autopkgtest-web/webcontrol/download-all-results
> +++ b/charms/focal/autopkgtest-web/webcontrol/download-all-results
> @@ -11,73 +11,58 @@
>  # script can be used to find any results which were missed and insert them.
>  
>  import configparser
> -import http
>  import io
> +import itertools
>  import json
>  import logging
>  import os
> -import random
>  import sqlite3
> -import sys
>  import tarfile
> -import time
>  import urllib.parse
> -from urllib.request import urlopen
>  
> +import amqplib.client_0_8 as amqp
> +import swiftclient
>  from distro_info import UbuntuDistroInfo
>  from helpers.utils import get_test_id, init_db
>  
>  LOGGER = logging.getLogger(__name__)
> +WRITER_EXCHANGE_NAME = "sqlite-write-me.fanout"
> +SWIFT_CREDS_FILE = "/home/ubuntu/public-swift-creds"
>  
>  config = None
>  db_con = None
> +amqp_con = None
>  
>  
> -def list_remote_container(container_url):
> -    LOGGER.debug("Listing container %s", container_url)
> -    out = []
> +def amqp_connect():
> +    """Connect to AMQP server"""
>  
> -    def get_batch(start=None):
> -        url = f"{container_url}/?format=json"
> -        if start is not None:
> -            url += f"&marker={urllib.parse.quote(start)}"
> -
> -        LOGGER.debug('Retrieving "%s"', url)
> -        for _ in range(5):
> -            try:
> -                resp = urlopen(url)
> -            except http.client.RemoteDisconnected:
> -                LOGGER.debug("Got disconnected, sleeping")
> -                time.sleep(5)
> -                continue
> -            else:
> -                break
> -        json_string = resp.read()
> -        json_data = json.loads(json_string)
> -
> -        if not json_data:
> -            return None
> -
> -        out.extend([e["name"] for e in json_data])
> -        name = out[-1]
> -
> -        return name
> +    cp = configparser.ConfigParser()
> +    cp.read(os.path.expanduser("~ubuntu/autopkgtest-cloud.conf"))
> +    amqp_uri = cp["amqp"]["uri"]
> +    parts = urllib.parse.urlsplit(amqp_uri, allow_fragments=False)
> +    amqp_con = amqp.Connection(
> +        parts.hostname, userid=parts.username, password=parts.password
> +    )
> +    logging.info(
> +        "Connected to AMQP server at %s@%s" % (parts.username, parts.hostname)
> +    )
>  
> -    marker = get_batch()
> +    return amqp_con
>  
> -    while True:
> -        new_marker = get_batch(marker)
> -        if not new_marker or new_marker == marker:
> -            break
> -        marker = new_marker
>  
> -    out = [name for name in out if name.endswith("result.tar")]
> -    LOGGER.debug("Found %d items in %s", len(out), container_url)
> -    ret = {}
> -    for r in out:
> -        (_, _, _, _, run_id, _) = r.split("/")
> -        ret[run_id] = r
> -    return ret
> +# def list_remote_container(container_url):
> +def list_remote_container(container_name, swift_conn):
> +    LOGGER.debug("Listing container %s", container_name)
> +    _, object_list = swift_conn.get_container(
> +        container_name, full_listing=True
> +    )
> +    ret_me = {}
> +    for obj in object_list:

designed objects as test results instead

> +        if "result.tar" in obj["name"]:

changes to endswith

> +            obj_splitname = obj["name"].split("/")

dont use splitname here, just preserve the [4] value and maybe add a comment for clarity, rename to run_id

> +            ret_me[obj_splitname[4]] = obj["name"]
> +    return ret_me
>  
>  
>  def list_our_results(release):
> @@ -91,29 +76,16 @@ def list_our_results(release):
>      return {run_id for (run_id,) in c.fetchall()}
>  
>  
> -def fetch_one_result(url):
> +def fetch_one_result(container_name, object_name, swift_conn):
>      """Download one result URL from swift and add it to the DB"""
> -    (release, arch, _, src, run_id, _) = url.split("/")[-6:]
> +    # modify this to use swiftclient too.

remove comments

> +    # use public-swift-creds
> +    (release, arch, _, src, run_id, _) = object_name.split("/")
>      test_id = get_test_id(db_con, release, arch, src)
> -
> -    try:
> -        f = urlopen(url, timeout=30)
> -        if f.getcode() == 200:
> -            tar_bytes = io.BytesIO(f.read())
> -            f.close()
> -        else:
> -            raise NotImplementedError(
> -                "fetch_one_result(%s): cannot handle HTTP code %i"
> -                % (url, f.getcode())
> -            )
> -    except IOError as e:
> -        LOGGER.error("Failure to fetch %s: %s", url, str(e))
> -        # we tolerate "not found" (something went wrong on uploading the
> -        # result), but other things indicate infrastructure problems
> -        if hasattr(e, "code") and e.code == 404:  # pylint: disable=no-member
> -            return
> -        sys.exit(1)
> -
> +    # modify this to use swiftclient instead of urllib

remove comments

> +    # look at update-github-jobs for help
> +    _, contents = swift_conn.get_object(container_name, object_name)
> +    tar_bytes = io.BytesIO(contents)
>      try:
>          with tarfile.open(None, "r", tar_bytes) as tar:
>              exitcode = int(tar.extractfile("exitcode").read().strip())
> @@ -128,14 +100,12 @@ def fetch_one_result(url):
>                      srcver = "%s unknown" % (src)
>                  else:
>                      raise
> -            (ressrc, ver) = srcver.split()
> +            (_, ver) = srcver.split()
>              testinfo = json.loads(
>                  tar.extractfile("testinfo.json").read().decode()
>              )
>              test_uuid = testinfo.get("uuid", "")
>              duration = int(tar.extractfile("duration").read().strip())
> -            # KeyError means the file is not there, i.e. there isn't a human

un-remove this comment

> -            # requester
>              try:
>                  requester = (
>                      tar.extractfile("requester").read().decode().strip()
> @@ -182,63 +145,56 @@ def fetch_one_result(url):
>          if env in testinfo.keys():
>              env_vars.append(spec)
>  
> -    while True:
> -        try:
> -            with (
> -                db_con
> -            ):  # this starts a transaction, making sure we release the lock at the end
> -                c = db_con.cursor()
> -                c.execute(
> -                    "INSERT INTO result VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)",
> -                    (
> -                        test_id,
> -                        run_id,
> -                        ver,
> -                        test_triggers,
> -                        duration,
> -                        exitcode,
> -                        requester,
> -                        ",".join(env_vars),
> -                        test_uuid,
> -                    ),
> -                )
> -                db_con.commit()
> -            break
> -        except sqlite3.OperationalError as e:
> -            if "database is locked" in str(e):
> -                sleep_time = random.uniform(0.1, 2)
> -                LOGGER.info(
> -                    "database is currently locked, waiting %f seconds and trying again..."
> -                    % sleep_time
> -                )
> -                time.sleep(sleep_time)
> -            else:
> -                logging.info("insert operation failed with: %s" % str(e))
> -                break
> -        except sqlite3.IntegrityError:
> -            LOGGER.info("%s was already recorded - skipping", run_id)
> -            break
> +    # Insert the write request into the queue
> +    complete_amqp = amqp_con.channel()
> +    complete_amqp.access_request(
> +        "/complete", active=True, read=False, write=True
> +    )
> +    complete_amqp.exchange_declare(
> +        WRITER_EXCHANGE_NAME, "fanout", durable=True, auto_delete=False
> +    )
> +    write_me_msg = {
> +        "test_id": test_id,
> +        "run_id": run_id,
> +        "version": ver,
> +        "triggers": test_triggers,
> +        "duration": duration,
> +        "exitcode": exitcode,
> +        "requester": requester,
> +        "env": ",".join(env_vars),
> +        "uuid": test_uuid,
> +    }
> +    complete_amqp.basic_publish(
> +        amqp.Message(json.dumps(write_me_msg), delivery_mode=2),
> +        WRITER_EXCHANGE_NAME,
> +        "",
> +    )
>  
>  
> -def fetch_container(release, container_url):
> +def fetch_container(release, swift_conn):
>      """Download new results from a swift container"""
> +    container_name = "autopkgtest-" + release
>  
>      try:
>          our_results = list_our_results(release)
> -        known_results = list_remote_container(container_url)
> +        known_results = list_remote_container(container_name, swift_conn)
>  
> +        # the keys WERE the run_id, so need to fix dis

remove

>          need_to_fetch = set(known_results.keys()) - our_results
>  
>          LOGGER.debug("Need to download %d items", len(need_to_fetch))
>  
>          for run_id in need_to_fetch:
>              fetch_one_result(
> -                os.path.join(container_url, known_results[run_id])
> +                container_name=container_name,
> +                object_name=known_results[run_id],
> +                swift_conn=swift_conn,
>              )
> -    except urllib.error.HTTPError as e:
> -        if e.code == 401 or e.code == 404:
> -            LOGGER.warning(f"Couldn't access {container_url} - doesn't exist?")
> -            return
> +    except swiftclient.ClientException as e:
> +        LOGGER.warning(

should be error not warning

> +            "Something went wrong accessing container %s\nTraceback: %s"
> +            % (container_name, str(e))
> +        )
>          raise
>  
>  


-- 
https://code.launchpad.net/~andersson123/autopkgtest-cloud/+git/autopkgtest-cloud/+merge/461146
Your team Canonical's Ubuntu QA is requested to review the proposed merge of ~andersson123/autopkgtest-cloud:d-a-r-make-me-faster into autopkgtest-cloud:master.



References