← Back to team overview

canonical-ubuntu-qa team mailing list archive

Re: [Merge] ~andersson123/autopkgtest-cloud:modify_seed_new_release into autopkgtest-cloud:master

 

Review: Needs Fixing

I think the sys.exit(1) will break this retry approach, as Python will just quit.

More in general I was hoping to find something better than just retrying everything. This approach is basically equivalent of doing

  retry ./seed-new-release ...

with retry(1) is from the 'retry' package; or

  until ./seed-new-relese ...; do echo retrying; done

In other words, it's a big retry hammer, while I was hoping to better handle the actual failure we're hitting.

Maybe we should start by adding a dry-run option to the script, and then try to reproduce the failure in staging?

Diff comments:

> diff --git a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/seed-new-release b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/seed-new-release
> index 8fdd1a3..58823d6 100755
> --- a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/seed-new-release
> +++ b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/seed-new-release
> @@ -59,65 +64,81 @@ def copy_result(rel_path, old_release, new_release):
>                                     headers=headers_to_copy)
>              break
>          except (IOError, AttributeError, swiftclient.exceptions.ClientException) as e:
> -            print('Error connecting to swift, re-connecting in %is: %s' % (5 * retry, e))
> +            logging.info('Error connecting to swift, re-connecting in %is: %s' % (5 * retry, e))
>              time.sleep(5 * retry)
>              swift_con = connect_swift()
>      else:
> -        print('Repeated failure to connect to swift')
> +        logging.info('Repeated failure to connect to swift')
>          sys.exit(1)

If we end up here I think the Python interpreter will just exit, and no retry will happen.

>  
> -
> -ap = argparse.ArgumentParser()
> -ap.add_argument('old_release')
> -ap.add_argument('new_release')
> -ap.add_argument('results_db', help='path to autopkgtest.db')
> -args = ap.parse_args()
> -
> -# connect to Swift
> -swift_con = connect_swift()
> -
> -# create new container
> -swift_con.put_container('autopkgtest-' + args.new_release,
> -                        headers={'X-Container-Read': '.rlistings,.r:*'})
> -
> -# read existing names (needs multiple batches)
> -existing = set()
> -last = ''
> -while True:
> -   print('Getting existing results starting with "%s"' % last)
> -   batch = [i['name'] for i in swift_con.get_container('autopkgtest-' + args.new_release, marker=last)[1]]
> -   if not batch:
> -      break
> -   last = batch[-1]
> -   existing.update(batch)
> -
> -# get passing result per package/arch from database
> -db_con = sqlite3.connect(args.results_db)
> -for (package, arch, run_id) in db_con.execute(
> -        "SELECT package, arch, MAX(run_id) "
> -        "FROM test, result "
> -        "WHERE test.id = result.test_id AND release = '%s' "
> -        "   AND (exitcode = 0 OR exitcode = 2 "
> -        "        OR triggers = 'migration-reference/0') "
> -        "GROUP BY package, arch" % args.old_release):
> -
> -    for file in 'artifacts.tar.gz', 'result.tar', 'log.gz':
> -        path = '/%s/%s/%s/%s/%s' % (arch, srchash(package), package, run_id, file)
> -        if args.new_release + path in existing:
> -            print('%s%s already exists, skipping' % (args.old_release, path))
> -            continue
> -        copy_result(path, args.old_release, args.new_release)
> -
> -for (package, arch, run_id) in db_con.execute(
> -        "SELECT package, arch, MAX(run_id) "
> -        "FROM test, result "
> -        "WHERE test.id = result.test_id AND release = '%s' "
> -        "   AND triggers = 'migration-reference/0' "
> -        "GROUP BY package, arch" % args.old_release):
> -
> -    for file in 'artifacts.tar.gz', 'result.tar', 'log.gz':
> -        path = '/%s/%s/%s/%s/%s' % (arch, srchash(package), package, run_id, file)
> -        if args.new_release + path in existing:
> -            print('%s%s already exists, skipping' % (args.old_release, path))
> -            continue
> -        copy_result(path, args.old_release, args.new_release)
> +def attempt_seed_new_release(args):
> +    try:
> +        # connect to Swift
> +        swift_con = connect_swift()
> +
> +        # create new container
> +        swift_con.put_container('autopkgtest-' + args.new_release,
> +                                headers={'X-Container-Read': '.rlistings,.r:*'})
> +
> +        # read existing names (needs multiple batches)
> +        existing = set()
> +        last = ''
> +        while True:
> +            logging.info('Getting existing results starting with "%s"' % last)
> +            batch = [i['name'] for i in swift_con.get_container('autopkgtest-' + \
> +                                                                args.new_release, marker=last)[1]]
> +            if not batch:
> +                break
> +            last = batch[-1]
> +            existing.update(batch)
> +
> +        # get passing result per package/arch from database
> +        db_con = sqlite3.connect(args.results_db)
> +        for (package, arch, run_id) in db_con.execute(
> +                "SELECT package, arch, MAX(run_id) "
> +                "FROM test, result "
> +                "WHERE test.id = result.test_id AND release = '%s' "
> +                "   AND (exitcode = 0 OR exitcode = 2 "
> +                "        OR triggers = 'migration-reference/0') "
> +                "GROUP BY package, arch" % args.old_release):
> +
> +            for file in 'artifacts.tar.gz', 'result.tar', 'log.gz':
> +                path = '/%s/%s/%s/%s/%s' % (arch, srchash(package), package, run_id, file)
> +                if args.new_release + path in existing:
> +                    logging.info('%s%s already exists, skipping' % (args.old_release, path))
> +                    continue
> +                copy_result(path, args.old_release, args.new_release)
> +
> +        for (package, arch, run_id) in db_con.execute(
> +                "SELECT package, arch, MAX(run_id) "
> +                "FROM test, result "
> +                "WHERE test.id = result.test_id AND release = '%s' "
> +                "   AND triggers = 'migration-reference/0' "
> +                "GROUP BY package, arch" % args.old_release):
> +
> +            for file in 'artifacts.tar.gz', 'result.tar', 'log.gz':
> +                path = '/%s/%s/%s/%s/%s' % (arch, srchash(package), package, run_id, file)
> +                if args.new_release + path in existing:
> +                    logging.info('%s%s already exists, skipping' % (args.old_release, path))
> +                    continue
> +                copy_result(path, args.old_release, args.new_release)
> +        return True
> +    except Exception as _:
> +        return False
> +
> +
> +def main(args):
> +    while True:
> +        if attempt_seed_new_release(args):
> +            logging.info("seed-new-release has succeeded, exiting...")
> +            return
> +        logging.info("seed-new-release failed, retrying...")
> +
> +
> +if __name__ == "__main__":
> +    ap = argparse.ArgumentParser()
> +    ap.add_argument('old_release')
> +    ap.add_argument('new_release')
> +    ap.add_argument('results_db', help='path to autopkgtest.db')
> +    args = ap.parse_args()
> +    main(args)


-- 
https://code.launchpad.net/~andersson123/autopkgtest-cloud/+git/autopkgtest-cloud/+merge/443597
Your team Canonical's Ubuntu QA is subscribed to branch autopkgtest-cloud:master.



References