canonical-ubuntu-qa team mailing list archive
-
canonical-ubuntu-qa team
-
Mailing list archive
-
Message #00382
[Merge] ~andersson123/autopkgtest-cloud:add_mirror_tester into autopkgtest-cloud:master
Tim Andersson has proposed merging ~andersson123/autopkgtest-cloud:add_mirror_tester into autopkgtest-cloud:master.
Commit message:
add script for testing connection quality to various archives
Requested reviews:
Brian Murray (brian-murray)
For more details, see:
https://code.launchpad.net/~andersson123/autopkgtest-cloud/+git/autopkgtest-cloud/+merge/443431
add script for testing connection quality to various archives
--
Your team Canonical's Ubuntu QA is subscribed to branch autopkgtest-cloud:master.
diff --git a/.launchpad.yaml b/.launchpad.yaml
index f812f7e..cff0ccb 100755
--- a/.launchpad.yaml
+++ b/.launchpad.yaml
@@ -1,20 +1,9 @@
pipeline:
- - readthedocs_build
- lint_test
jobs:
lint_test:
series: focal
architectures: amd64
- packages: [pylint, python3, shellcheck]
- run: ./ci/lint_test
- readthedocs_build:
- series: focal
- packages: [python3-pip,
- python3,
- git,
- python3-requests,
- python3-git,
- yamllint]
- architectures: amd64
- run: python3 ./ci/trigger_readthedocs_build
+ packages: [pylint, python3, shellcheck, yamllint]
+ run: ./ci/lint_test -v
diff --git a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/create-test-instances b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/create-test-instances
index f0b7125..c057ce8 100644
--- a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/create-test-instances
+++ b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/create-test-instances
@@ -13,5 +13,5 @@ IMAGE=$(openstack image list | grep "adt/ubuntu-$DEVEL-$ARCH" | cut -d' ' -f2)
NET_ID=$(openstack network list | grep 'net_prod-proposed-migration' | cut -d' ' -f2)
for i in $(seq 1 10); do
- openstack server create --image $IMAGE --flavor cpu4-ram8-disk50 --nic net-id=$NET_ID -- "creation-test-$i"
+ openstack server create --image "${IMAGE}" --flavor cpu4-ram8-disk50 --nic net-id="${NET_ID}" -- "creation-test-${i}"
done
diff --git a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/build-adt-image b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/build-adt-image
index be6b4d3..ca9097a 100755
--- a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/build-adt-image
+++ b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/build-adt-image
@@ -1,9 +1,11 @@
#!/bin/bash
# Build adt cloud images with create-nova-image-new-release for the given
# cloud, release and arch
+# shellcheck disable=SC1090
set -eu
+# shellcheck disable=SC2034
IFS="[- ]" read -r RELEASE REGION ARCH bootstrap <<< "$@"
if [ -z "${RELEASE}" ] || [ -z "${REGION}" ] || [ -z "${ARCH}" ]; then
@@ -12,8 +14,8 @@ if [ -z "${RELEASE}" ] || [ -z "${REGION}" ] || [ -z "${ARCH}" ]; then
fi
if [ -z "${MIRROR:-}" ]; then
- if [ -e ~/mirror-${REGION}.rc ]; then
- . ~/mirror-${REGION}.rc
+ if [ -e ~/mirror-"${REGION}".rc ]; then
+ . ~/mirror-"${REGION}".rc
else
. ~/mirror.rc
fi
@@ -24,10 +26,10 @@ export MIRROR
export NET_NAME
if [ -z "${USE_CLOUD_CONFIG_FROM_ENV:-}" ]; then
- if [ -e ~/cloudrcs/${REGION}-${ARCH}.rc ]; then
- . ~/cloudrcs/${REGION}-${ARCH}.rc
+ if [ -e ~/cloudrcs/"${REGION}"-"${ARCH}".rc ]; then
+ . ~/cloudrcs/"${REGION}"-"${ARCH}".rc
else
- . ~/cloudrcs/${REGION}.rc
+ . ~/cloudrcs/"${REGION}".rc
fi
fi
@@ -73,11 +75,12 @@ fi
echo "$REGION-$ARCH: using image $IMG"
KEYNAME=${KEYNAME:-testbed-$(hostname)}
-$(dirname $0)/create-nova-image-new-release $RELEASE $ARCH $IMG "${KEYNAME}" "$IMAGE_NAME"
+directory=$(dirname "${0}")
+"${directory}"/create-nova-image-new-release "${RELEASE}" "${ARCH}" "${IMG}" "${KEYNAME}" "${IMAGE_NAME}"
# clean old images
-openstack image list --private -f value | grep --color=none -v "$IMAGE_NAME" | while read id img state; do
- if $(echo ${img} | grep -qs "adt/ubuntu-${RELEASE}-${ARCH}") && [ ${state} = active ]; then
+openstack image list --private -f value | grep --color=none -v "$IMAGE_NAME" | while read -r id img state; do
+ if echo "${img}" | grep -qs "adt/ubuntu-${RELEASE}-${ARCH}" && [ "${state}" = active ]; then
echo "Cleaning up old image $img ($id)"
- openstack image delete $id
+ openstack image delete "${id}"
fi
done
diff --git a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/cleanup-instances b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/cleanup-instances
index 14caca8..ba5a64a 100755
--- a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/cleanup-instances
+++ b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/cleanup-instances
@@ -1,14 +1,17 @@
#!/usr/bin/python3
-# clean up broken/orphaned instances
+'''
+clean up broken/orphaned instances
+'''
+# pylint: disable=import-error, invalid-name
import logging
import os
import re
import socket
import subprocess
import time
-from urllib.error import HTTPError
-import novaclient.client, novaclient.exceptions
+import novaclient.client
+import novaclient.exceptions
from influxdb import InfluxDBClient
from influxdb.exceptions import InfluxDBClientError
from keystoneauth1 import session
@@ -78,8 +81,8 @@ for instance in nova.servers.list():
time.strptime(instance.created, "%Y-%m-%dT%H:%M:%SZ")
)
logging.debug(
- "%s: status %s, age %is, networks %s"
- % (instance.name, instance.status, age, instance.networks)
+ "%s: status %s, age %is, networks %s",
+ instance.name, instance.status, age, instance.networks
)
# check state
@@ -88,10 +91,8 @@ for instance in nova.servers.list():
message = str(instance.fault)
except AttributeError:
message = "fault message not available"
- msg = "instance {} ({}) is in error state (message: {})".format(
- instance.name, instance.id, message
- )
- logging.warning("{}, deleting".format(msg))
+ msg = f"instance {instance.name} ({instance.id}) is in error state (message: {message})"
+ logging.warning("%s, deleting", msg)
measurements.append(
{
"measurement": "autopkgtest_delete_event",
@@ -108,7 +109,6 @@ for instance in nova.servers.list():
instance.delete()
except novaclient.exceptions.NotFound:
logging.warning("Couldn't delete instance: not found")
- pass
continue
if not instance.name.startswith("adt-"):
@@ -122,10 +122,9 @@ for instance in nova.servers.list():
# check age
if age > MAX_AGE:
- message = "instance {} ({}) is {:.1f} hours old, deleting".format(
- instance.name, instance.id, (float(age) / 3600)
- )
- logging.warning("{}, deleting".format(message))
+ message = f"instance {instance.name} ({instance.id}) is " + \
+ "{float(age) / 3600} hours old, deleting"
+ logging.warning("%s, deleting", message)
try:
instance.delete()
measurements.append(
@@ -142,26 +141,21 @@ for instance in nova.servers.list():
)
except novaclient.exceptions.NotFound:
logging.warning("Couldn't delete instance: not found")
- pass
-
# check matching adt-run process for instance name
try:
if (
- subprocess.call(
- [
- "pgrep",
- "-f",
- "python.*autopkgtest.* --name %s"
- % re.escape(instance.name),
- ],
- stdout=subprocess.PIPE,
- )
- != 0
+ subprocess.call(
+ [
+ "pgrep",
+ "-f",
+ f"python.*autopkgtest.* --name {re.escape(instance.name)}",
+ ],
+ stdout=subprocess.PIPE,
+ )
+ != 0
):
- message = "instance {} ({}) has no associated autopkgtest".format(
- instance.name, instance.id
- )
- logging.warning("{}, deleting".format(message))
+ message = f"instance {instance.name} ({instance.id}) has no associated autopkgtest"
+ logging.warning("%s, deleting", message)
try:
instance.delete()
measurements.append(
@@ -178,13 +172,11 @@ for instance in nova.servers.list():
)
except novaclient.exceptions.NotFound:
logging.warning("Couldn't delete instance: not found")
- pass
except IndexError:
- logging.warning("instance %s has invalid name" % instance.name)
+ logging.warning("instance %s has invalid name", instance.name)
if measurements and influx_client:
try:
influx_client.write_points(measurements)
except InfluxDBClientError as err:
- logging.warning("Write to InfluxDB failed: %s" % err)
- pass
+ logging.warning("Write to InfluxDB failed: %s", err)
diff --git a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/cleanup-lxd b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/cleanup-lxd
index 9b3d376..99a47f7 100755
--- a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/cleanup-lxd
+++ b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/cleanup-lxd
@@ -1,4 +1,8 @@
#!/usr/bin/python3
+'''
+Cleans up old lxd containers in autopkgtest-cloud
+'''
+#pylint: disable=invalid-name
import glob
import json
import os
@@ -10,10 +14,12 @@ MINIMUM_AGE_MINS = 60
def parse_lxd_time(s):
+ '''Get the age of the lxd container'''
return datetime.datetime.fromisoformat(s.split(".")[0] + "+00:00")
def check_remote(remote):
+ '''Deletes containers that are too old'''
now = datetime.datetime.now(datetime.timezone.utc)
containers = json.loads(
subprocess.check_output(["lxc", "list", "-fjson", remote + ":"])
@@ -34,7 +40,7 @@ def check_remote(remote):
for container in containers[to_keep:]:
if now - parse_lxd_time(container["created_at"]) >= datetime.timedelta(
- minutes=MINIMUM_AGE_MINS
+ minutes=MINIMUM_AGE_MINS
):
print(f"{remote}:{container['name']} is old - deleting", file=sys.stderr)
subprocess.check_call(
@@ -43,6 +49,7 @@ def check_remote(remote):
def main():
+ '''Main wrapper function for the node'''
if not os.path.exists("/usr/bin/lxc"):
return 0
@@ -53,6 +60,7 @@ def main():
continue
check_remote(remote)
+ return 0
if __name__ == "__main__":
diff --git a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/copy-security-group b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/copy-security-group
index 779a556..cb2fa7a 100755
--- a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/copy-security-group
+++ b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/copy-security-group
@@ -6,6 +6,7 @@ usage: copy-security-group [--source SOURCE] [--delete-only] NAME
Copies SOURCE to NAME, after deleting any existing groups called NAME.
If --delete-only is given, it only deletes existing groups called NAME.
"""
+# pylint: disable=invalid-name, import-error, broad-except
import os
import argparse
@@ -22,7 +23,7 @@ RULE_MEMBERS_IGNORE = ["id", "tags", "updated_at",
"project_id", "tenant_id", ]
-def main():
+if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Copy security groups')
parser.add_argument('name', metavar='NAME', help='name of security group')
parser.add_argument('--source', default='default',
@@ -33,71 +34,63 @@ def main():
# we get called from ExecStartPre of lxd units too (where
# copy-security-group isn't required), just bail out if that's the case
- if 'lxd' in args.name:
- return
-
- if os.environ.get('OS_IDENTITY_API_VERSION') == '3':
- auth = v3.Password(auth_url=os.environ['OS_AUTH_URL'],
- username=os.environ['OS_USERNAME'],
- password=os.environ['OS_PASSWORD'],
- project_name=os.environ['OS_PROJECT_NAME'],
- user_domain_name=os.environ['OS_USER_DOMAIN_NAME'],
- project_domain_name=os.environ['OS_PROJECT_DOMAIN_NAME'])
- else:
- auth = v2.Password(
- auth_url=os.environ['OS_AUTH_URL'],
- username=os.environ['OS_USERNAME'],
- password=os.environ['OS_PASSWORD'],
- tenant_name=os.environ['OS_TENANT_NAME'])
-
- sess = session.Session(auth=auth)
- neutron = client.Client(session=sess,
- tenant_name=os.environ.get("OS_TENANT_NAME"),
- region_name=os.environ["OS_REGION_NAME"])
-
- # Find the source group - crashes if it does not exists
- source = [g for g in neutron.list_security_groups()
- ['security_groups'] if g['name'] == args.source][0]
-
- description = "copy {} of {} ({})".format(args.name, args.source,
- source['description'])
-
- # Delete any existing group with the same name
- existing_groups = [g for g in
- neutron.list_security_groups()['security_groups']
- if g['name'] == args.name]
- existing_ports = neutron.list_ports()['ports']
- for target in existing_groups:
- print("Deleting existing group", target)
- for port in existing_ports:
- if target['id'] in port['security_groups']:
- print("Deleting port in group:", target['id'])
- try:
- neutron.delete_port(port['id'])
- except Exception as e:
- print("Could not delete port:", e)
- neutron.delete_security_group(target['id'])
-
- if not args.delete_only:
- print("Creating", description)
- target = neutron.create_security_group(
- {'security_group': {'name': args.name,
- 'description': description}}
- )["security_group"]
-
- for rule in target["security_group_rules"]:
- neutron.delete_security_group_rule(rule["id"])
-
- for rule in source["security_group_rules"]:
- rule = {k: v for k, v in rule.items()
- if v is not None and
- k not in RULE_MEMBERS_IGNORE}
-
- rule["security_group_id"] = target["id"]
-
- print("Copying rule", rule)
- neutron.create_security_group_rule({'security_group_rule': rule})
-
-
-if __name__ == '__main__':
- main()
+ if 'lxd' not in args.name:
+ if os.environ.get('OS_IDENTITY_API_VERSION') == '3':
+ auth = v3.Password(auth_url=os.environ['OS_AUTH_URL'],
+ username=os.environ['OS_USERNAME'],
+ password=os.environ['OS_PASSWORD'],
+ project_name=os.environ['OS_PROJECT_NAME'],
+ user_domain_name=os.environ['OS_USER_DOMAIN_NAME'],
+ project_domain_name=os.environ['OS_PROJECT_DOMAIN_NAME'])
+ else:
+ auth = v2.Password(
+ auth_url=os.environ['OS_AUTH_URL'],
+ username=os.environ['OS_USERNAME'],
+ password=os.environ['OS_PASSWORD'],
+ tenant_name=os.environ['OS_TENANT_NAME'])
+
+ sess = session.Session(auth=auth)
+ neutron = client.Client(session=sess,
+ tenant_name=os.environ.get("OS_TENANT_NAME"),
+ region_name=os.environ["OS_REGION_NAME"])
+
+ # Find the source group - crashes if it does not exists
+ source = [g for g in neutron.list_security_groups()
+ ['security_groups'] if g['name'] == args.source][0]
+
+ description = f'copy {args.name} of {args.source} ({source["description"]})'
+ # Delete any existing group with the same name
+ existing_groups = [g for g in
+ neutron.list_security_groups()['security_groups']
+ if g['name'] == args.name]
+ existing_ports = neutron.list_ports()['ports']
+ for target in existing_groups:
+ print("Deleting existing group", target)
+ for port in existing_ports:
+ if target['id'] in port['security_groups']:
+ print("Deleting port in group:", target['id'])
+ try:
+ neutron.delete_port(port['id'])
+ except Exception as e:
+ print("Could not delete port:", e)
+ neutron.delete_security_group(target['id'])
+
+ if not args.delete_only:
+ print("Creating", description)
+ target = neutron.create_security_group(
+ {'security_group': {'name': args.name,
+ 'description': description}}
+ )["security_group"]
+
+ for rule in target["security_group_rules"]:
+ neutron.delete_security_group_rule(rule["id"])
+
+ for rule in source["security_group_rules"]:
+ rule = {k: v for k, v in rule.items()
+ if v is not None and
+ k not in RULE_MEMBERS_IGNORE}
+
+ rule["security_group_id"] = target["id"]
+
+ print("Copying rule", rule)
+ neutron.create_security_group_rule({'security_group_rule': rule})
diff --git a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/create-nova-image-new-release b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/create-nova-image-new-release
index 444d919..d010e93 100755
--- a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/create-nova-image-new-release
+++ b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/create-nova-image-new-release
@@ -1,6 +1,8 @@
#!/bin/bash
# create an autopkgtest nova image for a new release, based on a generic image
# Author: Martin Pitt <martin.pitt@xxxxxxxxxx>
+# shellcheck disable=SC2154
+# shellcheck disable=SC2034
set -eu
RELEASE="${1:-}"
ARCH="${2:-}"
@@ -48,9 +50,9 @@ else
fi
# unbreak my server option :-(
-userdata=`mktemp`
-trap "rm $userdata" EXIT TERM INT QUIT PIPE
-cat <<EOF > $userdata
+userdata=$(mktemp)
+trap 'rm ${userdata}' EXIT TERM INT QUIT PIPE
+cat <<EOF > "${userdata}"
#cloud-config
manage_etc_hosts: true
@@ -67,13 +69,13 @@ EOF
# create new instance
INSTNAME="${BASEIMG}-adt-prepare"
-eval "$(openstack network show -f shell ${NET_NAME})"
+eval "$(openstack network show -f shell "${NET_NAME}")"
-NET_ID=${id}
+NET_ID="${id}"
retries=20
while true; do
- eval "$(openstack server create -f shell --flavor m1.small --image $BASEIMG --user-data $userdata --key-name $KEYNAME --wait $INSTNAME --nic net-id=${NET_ID})"
+ eval "$(openstack server create -f shell --flavor m1.small --image "${BASEIMG}" --user-data "${userdata}" --key-name "${KEYNAME}" --wait "${INSTNAME}" --nic net-id="${NET_ID}")"
if openstack server show "${id}" >/dev/null 2>/dev/null; then
break
fi
@@ -90,27 +92,27 @@ done
SRVID="${id}"
-trap "openstack server delete ${SRVID}" EXIT TERM INT QUIT PIPE
+trap 'openstack server delete ${SRVID}' EXIT TERM INT QUIT PIPE
# determine IP address
-eval "$(openstack server show -f shell ${SRVID})"
-ipaddr=$(echo ${addresses} | awk 'BEGIN { FS="=" } { print $2 }')
+eval "$(openstack server show -f shell "${SRVID}")"
+ipaddr=$(echo "${addresses}" | awk 'BEGIN { FS="=" } { print $2 }')
SSH_CMD="ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no ubuntu@$ipaddr"
echo "Waiting for ssh (may cause some error messages)..."
timeout 300 sh -c "while ! $SSH_CMD true; do sleep 5; done"
echo "Waiting until cloud-init is done..."
-timeout 25m $SSH_CMD 'while [ ! -e /var/lib/cloud/instance/boot-finished ]; do sleep 1; done'
+timeout 25m "${SSH_CMD}" 'while [ ! -e /var/lib/cloud/instance/boot-finished ]; do sleep 1; done'
echo "Running setup script..."
-cat "${SETUP_TESTBED}" | $SSH_CMD "sudo env MIRROR='${MIRROR:-}' RELEASE='$RELEASE' sh -"
+"${SSH_CMD}" "sudo env MIRROR='${MIRROR:-}' RELEASE='$RELEASE' sh -" < "${SETUP_TESTBED}"
echo "Running Canonical setup script..."
-CANONICAL_SCRIPT=$(dirname $(dirname $(readlink -f $0)))/worker-config-production/setup-canonical.sh
-cat "$CANONICAL_SCRIPT" | $SSH_CMD "sudo env MIRROR='${MIRROR:-}' RELEASE='$RELEASE' sh -"
+CANONICAL_SCRIPT="$(dirname "$(dirname "$(readlink -f "${0}")")")"/worker-config-production/setup-canonical.sh
+"${SSH_CMD}" "sudo env MIRROR='${MIRROR:-}' RELEASE='$RELEASE' sh -" < "${CANONICAL_SCRIPT}"
-arch=$($SSH_CMD dpkg --print-architecture)
+arch=$("${SSH_CMD}" dpkg --print-architecture)
echo "Check that the upgraded image boots..."
while true; do
@@ -138,10 +140,10 @@ $SSH_CMD sudo journalctl --rotate --vacuum-time=12h || true
echo "Powering off to get a clean file system..."
$SSH_CMD sudo poweroff || true
-eval "$(openstack server show -f shell ${SRVID})"
-while [ ${os_ext_sts_vm_state} != "stopped" ]; do
+eval "$(openstack server show -f shell "${SRVID}")"
+while [ "${os_ext_sts_vm_state}" != "stopped" ]; do
sleep 1
- eval "$(openstack server show -f shell ${SRVID})"
+ eval "$(openstack server show -f shell "${SRVID}")"
done
echo "Creating image $IMAGE_NAME ..."
@@ -155,8 +157,8 @@ while true; do
while [ $inner_retries -gt 0 ]; do
# server image create often loses its connection but it's actually
# working - if the image is uploading, wait a bit for it to finish
- eval $(openstack image show -f shell --prefix=image_ "${IMAGE_NAME}")
- eval $(openstack server show -f shell --prefix=server_ "${SRVID}")
+ eval "$(openstack image show -f shell --prefix=image_ "${IMAGE_NAME}")"
+ eval "$(openstack server show -f shell --prefix=server_ "${SRVID}")"
if [ "${server_os_ext_sts_task_state}" = "image_uploading" ] ||
[ "${image_status}" = "saving" ]; then
echo "image ${IMAGE_NAME} is uploading, waiting..." >&2
diff --git a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/create-nova-image-with-proposed-package b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/create-nova-image-with-proposed-package
index d20bbc2..60d1bdd 100755
--- a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/create-nova-image-with-proposed-package
+++ b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/create-nova-image-with-proposed-package
@@ -1,28 +1,31 @@
#!/usr/bin/python3
-# Create a nova image from an existing one with all installed binaries from a
-# given source package upgraded to -proposed.
-# Author: Martin Pitt <martin.pitt@xxxxxxxxxx>
-#
-# Usage: create-nova-image-with-proposed-package <image RE> <proposed source package name>
-# <image RE> is a regexp that matches a *substring* of the image name; of all
-# available active matching ones the latest one (by creation date) is taken
-# as a base image.
-#
-# <source> is the source package name to test. All of its binaries which have
-# a newer version available (usually from enabling -proposed) will be
-# updated.
-#
-# This creates a new image proposed-<source>/<original image base name>.
+'''
+Create a nova image from an existing one with all installed binaries from a
+given source package upgraded to -proposed.
+Author: Martin Pitt <martin.pitt@xxxxxxxxxx>
+
+Usage: create-nova-image-with-proposed-package <image RE> <proposed source package name>
+ <image RE> is a regexp that matches a *substring* of the image name; of all
+ available active matching ones the latest one (by creation date) is taken
+ as a base image.
+
+ <source> is the source package name to test. All of its binaries which have
+ a newer version available (usually from enabling -proposed) will be
+ updated.
+
+This creates a new image proposed-<source>/<original image base name>.
+'''
+# pylint: disable=anomalous-backslash-in-string, invalid-name, import-error, consider-using-f-string, consider-using-with, bad-option-value
import sys
import os
-import keystoneauth1.loading
-import glanceclient
-from glanceclient.common import utils
import re
import tempfile
import subprocess
+import keystoneauth1.loading
+import glanceclient
+from glanceclient.common import utils
def get_glance():
'''Return glance client object'''
@@ -39,20 +42,26 @@ def get_glance():
def find_latest_image(img_re):
'''find latest image that matches given RE'''
- latest = None
- for img in glance.images.list():
- if img.status == 'active' and image_re.search(img.name):
- if latest is None or img.created_at > latest.created_at:
- latest = img
- if not latest:
- sys.stderr.write('No image matched "%s"\n' % sys.argv[1])
+ latest_image = None
+ for image in glance.images.list():
+ if image.status == 'active' and img_re.search(img.name):
+ if latest_image is None or image.created_at > latest_image.created_at:
+ latest_image = image
+ if not latest_image:
+ sys.stderr.write(f'No image matched "{sys.argv[1]}"\n')
sys.exit(1)
- return latest
-
-
-def setup_image(image_path, source):
- # get a chroot shell into the image
- img_shell = subprocess.Popen(['sudo', '-n', 'mount-image-callback', '--system-mounts', '--system-resolvconf',
+ return latest_image
+
+
+def setup_image(image_path, binary_source):
+ '''
+ get a chroot shell into the image
+ '''
+ img_shell = subprocess.Popen(['sudo',
+ '-n',
+ 'mount-image-callback',
+ '--system-mounts',
+ '--system-resolvconf',
image_path, 'chroot', '_MOUNTPOINT_', '/bin/sh'],
stdin=subprocess.PIPE)
@@ -79,34 +88,33 @@ DEBIAN_FRONTEND=noninteractive apt-get install -y $SRCS
echo '* Cleaning up'
apt-get clean
rm -f /etc/machine-id /usr/sbin/policy-rc.d
- ''' % {'src': source}).encode())
+ ''' % {'src': binary_source}).encode())
img_shell.stdin.close()
img_shell.wait()
-#
-# main
-#
-
-if len(sys.argv) != 3:
- sys.stderr.write('Usage: %s <image RE> <proposed source package name>\n' % sys.argv[0])
- sys.exit(1)
-
-image_re = re.compile(sys.argv[1])
-source = sys.argv[2]
-glance = get_glance()
-latest = find_latest_image(image_re)
-
-print('* Downloading image %s (UUID: %s)...' % (latest.name, latest.id))
-workdir = tempfile.TemporaryDirectory(prefix='make-image-with-proposed-package.')
-img = os.path.join(workdir.name, 'image')
-utils.save_image(glance.images.data(latest.id), img)
-
-setup_image(img, source)
+if __name__ == "__main__":
+ if len(sys.argv) != 3:
+ sys.stderr.write('Usage: %s <image RE> <proposed source package name>\n' % sys.argv[0])
+ sys.exit(1)
-newimg_name = 'proposed-%s/%s' % (source, os.path.basename(latest.name))
-newimg = glance.images.create(name=newimg_name, disk_format=latest.disk_format, container_format=latest.container_format)
-print('* Uploading new image %s (UUID: %s)...' % (newimg.name, newimg.id))
-with open(img, 'rb') as f:
- glance.images.upload(newimg.id, f)
+ image_re = re.compile(sys.argv[1])
+ source = sys.argv[2]
+ glance = get_glance()
+ latest = find_latest_image(image_re)
+
+ print('* Downloading image %s (UUID: %s)...' % (latest.name, latest.id))
+ workdir = tempfile.TemporaryDirectory(prefix='make-image-with-proposed-package.')
+ img = os.path.join(workdir.name, 'image')
+ utils.save_image(glance.images.data(latest.id), img)
+
+ setup_image(img, source)
+
+ newimg_name = 'proposed-%s/%s' % (source, os.path.basename(latest.name))
+ newimg = glance.images.create(name=newimg_name,
+ disk_format=latest.disk_format,
+ container_format=latest.container_format)
+ print('* Uploading new image %s (UUID: %s)...' % (newimg.name, newimg.id))
+ with open(img, 'rb') as f:
+ glance.images.upload(newimg.id, f)
diff --git a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/ensure-keypair b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/ensure-keypair
index be664d6..31b7d32 100755
--- a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/ensure-keypair
+++ b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/ensure-keypair
@@ -2,12 +2,14 @@
set -eu
+# shellcheck disable=SC2034
IFS="[- ]" read -r RELEASE REGION ARCH bootstrap <<< "$@"
-if [ -e ~/cloudrcs/${REGION}-${ARCH}.rc ]; then
- . ~/cloudrcs/${REGION}-${ARCH}.rc
+# shellcheck disable=SC1090
+if [ -e ~/cloudrcs/"${REGION}"-"${ARCH}".rc ]; then
+ . ~/cloudrcs/"${REGION}"-"${ARCH}".rc
else
- . ~/cloudrcs/${REGION}.rc
+ . ~/cloudrcs/"${REGION}".rc
fi
if ! [ -e "${HOME}/.ssh/id_rsa" ]; then
diff --git a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/exec-in-region b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/exec-in-region
index 0261108..2e78e83 100755
--- a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/exec-in-region
+++ b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/exec-in-region
@@ -1,5 +1,6 @@
#!/bin/sh
# usage: exec-in-region <region name> <command> <argument>...
+# shellcheck disable=SC1090
set -e
@@ -25,7 +26,7 @@ export REGION
if [ "${REGION#lxd-}" != "$REGION" ]; then
LXD_ARCH=${REGION#*-}; LXD_ARCH=${LXD_ARCH%%-*}
else
- . ${HOME}/cloudrcs/${REGION}.rc
+ . "${HOME}"/cloudrcs/"${REGION}".rc
fi
exec "$@"
diff --git a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/filter-amqp b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/filter-amqp
index f6dda5a..cc7255f 100755
--- a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/filter-amqp
+++ b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/filter-amqp
@@ -1,15 +1,20 @@
#!/usr/bin/python3
-# Filter out AMQP requests that match a given regex
+'''
+Filter out AMQP requests that match a given regex
+'''
+# pylint: disable=invalid-name, import-error, deprecated-module
import logging
import optparse
-import sys
import re
import urllib.parse
import amqplib.client_0_8 as amqp
def filter_amqp(options, host, queue_name, regex):
+ '''
+ Checks amqp queue for strings with a given regex
+ '''
url_parts = urllib.parse.urlsplit(host, allow_fragments=False)
filter_re = re.compile(regex.encode('UTF-8'))
amqp_con = amqp.Connection(url_parts.hostname, userid=url_parts.username,
@@ -33,7 +38,7 @@ def filter_amqp(options, host, queue_name, regex):
ch.basic_ack(r.delivery_tag)
-def main():
+if __name__ == "__main__":
parser = optparse.OptionParser(
usage="usage: %prog [options] amqp://user:pass@host queue_name regex")
parser.add_option(
@@ -52,6 +57,3 @@ def main():
parser.error("Need to specify host, queue and regex")
filter_amqp(opts, args[0], args[1], args[2])
-
-if __name__ == '__main__':
- main()
diff --git a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/filter-amqp-dupes-upstream b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/filter-amqp-dupes-upstream
index 965fa10..f90293d 100755
--- a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/filter-amqp-dupes-upstream
+++ b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/filter-amqp-dupes-upstream
@@ -1,18 +1,19 @@
#!/usr/bin/python3
-# Filter out all but the latest request for a given upstream PR
+'''
+Filter out all but the latest request for a given upstream PR
+'''
+# pylint: disable=invalid-name, deprecated-module, import-error, no-member, too-many-locals, logging-not-lazy
-import dateutil.parser
-import distro_info
import json
import logging
import optparse
import os
-import sys
-import re
import urllib.parse
+from collections import defaultdict
+import distro_info
+import dateutil.parser
import amqplib.client_0_8 as amqp
-from collections import defaultdict
UDI = distro_info.UbuntuDistroInfo()
ALL_UBUNTU_RELEASES = UDI.all
@@ -21,12 +22,14 @@ SUPPORTED_UBUNTU_RELEASES = sorted(
)
def filter_amqp(options, host):
+ '''Filters the contents of the amqp queue'''
url_parts = urllib.parse.urlsplit(host, allow_fragments=False)
amqp_con = amqp.Connection(url_parts.hostname, userid=url_parts.username,
password=url_parts.password)
dry_run = '[dry-run] ' if options.dry_run else ''
- queues = (f'debci-upstream-{release}-{arch}' for release in SUPPORTED_UBUNTU_RELEASES for arch in ('amd64', 'arm64', 'armhf', 'i386', 'ppc64el', 's390x'))
+ queues = (f'debci-upstream-{release}-{arch}' for release in SUPPORTED_UBUNTU_RELEASES \
+ for arch in ('amd64', 'arm64', 'armhf', 'i386', 'ppc64el', 's390x'))
for queue_name in queues:
ch = amqp_con.channel()
logging.debug('Looking at %s', queue_name)
@@ -38,7 +41,7 @@ def filter_amqp(options, host):
(code, _, _, _) = e.args
if code != 404:
raise
- logging.debug(f'No such queue {queue_name}')
+ logging.debug('No such queue %s', queue_name)
break
if r is None:
break
@@ -49,22 +52,25 @@ def filter_amqp(options, host):
(pkg, params) = body.split(' ', 1)
params_j = json.loads(params)
submit_time = dateutil.parser.parse(params_j['submit-time'])
- pr = [val.split('=', 1)[1] for val in params_j['env'] if val.startswith('UPSTREAM_PULL_REQUEST')][0]
+ pr = [val.split('=', 1)[1] for val in params_j['env'] \
+ if val.startswith('UPSTREAM_PULL_REQUEST')][0]
try:
(delivery_tag, old_submit_time) = seen[pkg][pr]
if old_submit_time <= submit_time:
- logging.info(f'{dry_run}We have seen PR {pr} in {queue_name} before: acking the previous request')
+ logging.info('%sWe have seen PR %s ' + \
+ 'in %s before: acking the previous request',
+ dry_run, pr, queue_name)
if not options.dry_run:
ch.basic_ack(delivery_tag) # delivery tag, the old one NOT r.delivery_tag!
del seen[pkg][pr]
except KeyError:
pass
finally:
- logging.debug(f'Recording {pkg}/{pr} for {queue_name}')
+ logging.debug('Recording %s/%s for %s', pkg, pr, queue_name)
seen[pkg][pr] = (r.delivery_tag, submit_time)
-def main():
+if __name__ == "__main__":
parser = optparse.OptionParser(
usage="usage: %prog [options] amqp://user:pass@host queue_name regex")
parser.add_option(
@@ -81,9 +87,6 @@ def main():
user = os.environ['RABBIT_USER']
password = os.environ['RABBIT_PASSWORD']
- host = os.environ['RABBIT_HOST']
- uri = f'amqp://{user}:{password}@{host}'
+ hostname = os.environ['RABBIT_HOST']
+ uri = f'amqp://{user}:{password}@{hostname}'
filter_amqp(opts, uri)
-
-if __name__ == '__main__':
- main()
diff --git a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/metrics b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/metrics
index e7c552f..403b3fe 100755
--- a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/metrics
+++ b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/metrics
@@ -1,12 +1,16 @@
#!/usr/bin/python3
-
-from gi.repository import GLib, Gio
-from influxdb import InfluxDBClient
+'''
+Writes metrics to influxdb database
+'''
+# pylint: disable=fixme
import json
import os
import subprocess
+from gi.repository import GLib, Gio
+from influxdb import InfluxDBClient
+
SYSTEM_BUS = Gio.bus_get_sync(Gio.BusType.SYSTEM)
INFLUXDB_CONTEXT = os.environ["INFLUXDB_CONTEXT"]
@@ -18,10 +22,13 @@ INFLUXDB_USERNAME = os.environ["INFLUXDB_USERNAME"]
def make_submission(counts, measurement):
+ '''
+ makes submission request based on containers and units
+ '''
out = []
for arch in counts:
(active, error) = counts[arch]
- m = {
+ measure = {
"measurement": measurement,
"fields": {"count": active},
"tags": {
@@ -30,8 +37,8 @@ def make_submission(counts, measurement):
"instance": INFLUXDB_CONTEXT,
},
}
- out.append(m)
- m = {
+ out.append(measure)
+ measure = {
"measurement": measurement,
"fields": {"count": error},
"tags": {
@@ -40,11 +47,14 @@ def make_submission(counts, measurement):
"instance": INFLUXDB_CONTEXT,
},
}
- out.append(m)
+ out.append(measure)
return out
def get_units():
+ '''
+ gets units in autopkgtest-cloud env - for unit definition, see juju docs
+ '''
counts = {}
(units,) = SYSTEM_BUS.call_sync(
@@ -84,10 +94,10 @@ def get_units():
continue
try:
- (region, arch, n) = name_cloud.split("-", -1)
+ (_, arch, _) = name_cloud.split("-", -1)
except ValueError:
# autopkgtest@lcy01-1.service
- (region, n) = name_cloud.split("-", -1)
+ (_, _) = name_cloud.split("-", -1)
arch = "amd64"
(active, error) = counts.setdefault(arch, (0, 0))
@@ -101,17 +111,20 @@ def get_units():
def get_remotes():
+ '''
+ Gets list of remote containers in autopkgtest-cloud
+ '''
cluster_counts = {}
noncluster_counts = {}
out = subprocess.check_output(
["lxc", "remote", "list", "--format=json"], universal_newlines=True
)
- for r in json.loads(out):
- if not r.startswith("lxd"):
+ for req in json.loads(out):
+ if not req.startswith("lxd"):
continue
- (_, arch, ip) = r.split("-", 3)
+ (_, arch, _) = req.split("-", 3)
(cluster_active, cluster_error) = cluster_counts.setdefault(
arch, (0, 0)
)
@@ -120,12 +133,12 @@ def get_remotes():
)
try:
- cl = subprocess.check_output(
- ["lxc", "cluster", "list", f"{r}:", "--format=json"],
+ cluster_list = subprocess.check_output(
+ ["lxc", "cluster", "list", f"{req}:", "--format=json"],
stderr=subprocess.DEVNULL,
universal_newlines=True,
)
- for node in json.loads(cl):
+ for node in json.loads(cluster_list):
if node["status"] == "Online":
cluster_active += 1
else:
@@ -134,7 +147,7 @@ def get_remotes():
except subprocess.CalledProcessError: # it's not a cluster node
try:
subprocess.check_call(
- ["lxc", "list", f"{r}:"],
+ ["lxc", "list", f"{req}:"],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
timeout=30,
diff --git a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/retry-github-test b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/retry-github-test
index e47ecc2..4afe672 100755
--- a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/retry-github-test
+++ b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/retry-github-test
@@ -1,5 +1,6 @@
#!/usr/bin/python3
'''Retry a GitHub PR test request to autopkgtest.ubuntu.com'''
+# pylint: disable=invalid-name
import os
import sys
@@ -13,7 +14,8 @@ p = argparse.ArgumentParser(description='Retry a GitHub PR test request to autop
p.add_argument('pr_api_url',
help='GitHub PR API URL (e. g. https://api.github.com/repos/JoeDev/coolproj/pulls/1')
p.add_argument('test_url',
- help='autopkgtest URL (https://autopkgtest.ubuntu.com/request.cgi?release=xenial&arch=i386&...)')
+ help='autopkgtest URL (https://autopkgtest.ubuntu.com/' + \
+ 'request.cgi?release=xenial&arch=i386&...)')
p.add_argument('secret_file', type=argparse.FileType('rb'),
help='Path to the GitHub secret for this test web hook')
args = p.parse_args()
@@ -35,6 +37,6 @@ try:
with urllib.request.urlopen(req) as f:
print(f.read().decode())
except urllib.error.HTTPError as e:
- sys.stderr.write('Request failed with code %i: %s' % (e.code, e.msg))
+ sys.stderr.write(f'Request failed with code {e.code}: {e.msg}')
sys.stderr.write(e.fp.read().decode('UTF-8', 'replace'))
sys.exit(1)
diff --git a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/run-autopkgtest b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/run-autopkgtest
index a09ad42..ca19be2 100755
--- a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/run-autopkgtest
+++ b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/run-autopkgtest
@@ -1,7 +1,10 @@
#!/usr/bin/python3
-# Request runs of autopkgtests for packages
-# Imported from lp:ubuntu-archive-scripts, lightly modified to not rely on a
-# britney config file, to be used for administration or testing.
+'''
+Request runs of autopkgtests for packages
+Imported from lp:ubuntu-archive-scripts, lightly modified to not rely on a
+britney config file, to be used for administration or testing.
+'''
+# pylint: disable=invalid-name, import-error
from datetime import datetime
import os
@@ -122,52 +125,52 @@ def parse_args():
# verify syntax of triggers
for t in args.trigger:
try:
- (src, ver) = t.split("/")
+ (_, _) = t.split("/")
except ValueError:
parser.error(
- 'Invalid trigger format "%s", must be "sourcepkg/version"' % t
+ f'Invalid trigger format "{t}", must be "sourcepkg/version"'
)
# verify syntax of PPAs
for t in args.ppa:
try:
- (user, name) = t.split("/")
+ (_, _) = t.split("/")
except ValueError:
parser.error(
- 'Invalid ppa format "%s", must be "lpuser/ppaname"' % t
+ f'Invalid ppa format "{t}", must be "lpuser/ppaname"'
)
return args
if __name__ == "__main__":
- args = parse_args()
+ arguments = parse_args()
context = ""
params = {}
- if args.bulk:
+ if arguments.bulk:
context = "huge-"
- if args.trigger:
- params["triggers"] = args.trigger
- if args.ppa:
- params["ppas"] = args.ppa
+ if arguments.trigger:
+ params["triggers"] = arguments.trigger
+ if arguments.ppa:
+ params["ppas"] = arguments.ppa
context = "ppa-"
- if args.env:
- params["env"] = args.env
- if args.test_git:
- params["test-git"] = args.test_git
+ if arguments.env:
+ params["env"] = arguments.env
+ if arguments.test_git:
+ params["test-git"] = arguments.test_git
context = "upstream-"
- elif args.build_git:
- params["build-git"] = args.build_git
+ elif arguments.build_git:
+ params["build-git"] = arguments.build_git
context = "upstream-"
- if args.test_bzr:
- params["test-bzr"] = args.test_bzr
+ if arguments.test_bzr:
+ params["test-bzr"] = arguments.test_bzr
context = "upstream-"
- if args.swiftuser:
- params["swiftuser"] = args.swiftuser
- if args.readable_by:
- params["readable-by"] = args.readable_by
- if args.all_proposed:
+ if arguments.swiftuser:
+ params["swiftuser"] = arguments.swiftuser
+ if arguments.readable_by:
+ params["readable-by"] = arguments.readable_by
+ if arguments.all_proposed:
params["all-proposed"] = True
try:
params["requester"] = os.environ["SUDO_USER"]
@@ -180,37 +183,34 @@ if __name__ == "__main__":
try:
creds = urllib.parse.urlsplit(
- "amqp://{user}:{password}@{host}".format(
- user=os.environ["RABBIT_USER"],
- password=os.environ["RABBIT_PASSWORD"],
- host=os.environ["RABBIT_HOST"],
- ),
+ f'amqp://{os.environ["RABBIT_USER"]}:' + \
+ f'{os.environ["RABBIT_PASSWORD"]}@' + \
+ f'{os.environ["RABBIT_HOST"]}',
allow_fragments=False,
)
except KeyError:
- with open(os.path.expanduser("~/rabbitmq.cred"), "r") as f:
+ with open(os.path.expanduser("~/rabbitmq.cred"), "r", encoding='utf-8') as f:
env_dict = dict(
tuple(line.replace("\n", "").replace('"', "").split("="))
for line in f.readlines()
if not line.startswith("#")
)
creds = urllib.parse.urlsplit(
- "amqp://{user}:{password}@{host}".format(
- user=env_dict["RABBIT_USER"],
- password=env_dict["RABBIT_PASSWORD"],
- host=env_dict["RABBIT_HOST"],
- ),
+ f'amqp://{env_dict["RABBIT_USER"]}:' + \
+ f'{env_dict["RABBIT_PASSWORD"]}@' + \
+ f'{env_dict["RABBIT_HOST"]}',
allow_fragments=False,
)
assert creds.scheme == "amqp"
with amqp.Connection(
- creds.hostname, userid=creds.username, password=creds.password
+ creds.hostname, userid=creds.username, password=creds.password
) as amqp_con:
with amqp_con.channel() as ch:
- for arch in args.architecture:
- queue = "debci-%s%s-%s" % (context, args.series, arch)
- for pkg in args.package:
+ for arch in arguments.architecture:
+ queue = f'debci-{context}{arguments.series}-{arch}' % \
+ (context, arguments.series, arch)
+ for pkg in arguments.package:
ch.basic_publish(
amqp.Message(
pkg + params, delivery_mode=2
diff --git a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/test-mirrors b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/test-mirrors
new file mode 100755
index 0000000..1d1ee73
--- /dev/null
+++ b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/test-mirrors
@@ -0,0 +1,134 @@
+#!/usr/bin/python3
+import threading
+import aptsources.distro
+import aptsources.sourceslist
+import subprocess
+import logging
+import sys
+import time
+import json
+
+
+logging.basicConfig(level=logging.DEBUG)
+
+
+class archiveTester():
+ def __init__(self,
+ container_name="test-mirror-container",
+ release="lunar",
+ # package_to_test="nvidia-graphics-drivers-525",
+ package_to_test="autopkgtest",
+ archives=None):
+ self.container_name = container_name + "-" + release[0]
+ self.pkg_to_test = package_to_test
+ self.archives = []
+ self.release = release
+
+ self.sources_file = '''
+deb http://$ARCHIVE/ubuntu $RELEASE main restricted
+deb http://$ARCHIVE/ubuntu $RELEASE-updates main restricted
+deb http://$ARCHIVE/ubuntu $RELEASE universe
+deb http://$ARCHIVE/ubuntu $RELEASE-updates universe
+deb http://$ARCHIVE/ubuntu $RELEASE multiverse
+deb http://$ARCHIVE/ubuntu $RELEASE-updates multiverse
+deb http://$ARCHIVE/ubuntu $RELEASE-backports main restricted universe multiverse
+deb http://security.ubuntu.com/ubuntu $RELEASE-security main restricted
+deb http://security.ubuntu.com/ubuntu $RELEASE-security universe
+deb http://security.ubuntu.com/ubuntu $RELEASE-security multiverse
+deb-src http://$ARCHIVE/ubuntu $RELEASE main restricted
+deb-src http://$ARCHIVE/ubuntu $RELEASE-updates main restricted
+deb-src http://$ARCHIVE/ubuntu $RELEASE universe
+deb-src http://$ARCHIVE/ubuntu $RELEASE-updates universe
+deb-src http://$ARCHIVE/ubuntu $RELEASE multiverse
+deb-src http://$ARCHIVE/ubuntu $RELEASE-updates multiverse
+deb-src http://$ARCHIVE/ubuntu $RELEASE-backports main restricted universe multiverse
+deb-src http://security.ubuntu.com/ubuntu $RELEASE-security main restricted
+deb-src http://security.ubuntu.com/ubuntu $RELEASE-security universe
+deb-src http://security.ubuntu.com/ubuntu $RELEASE-security multiverse
+'''
+ if archives is None:
+ self.archives = [
+ # add more archives
+ "gb.archive.ubuntu.com",
+ "us.archive.ubuntu.com",
+ "cz.archive.ubuntu.com",
+ # doesn't work for some reason
+ # "ftpmaster.internal.com"
+ ]
+ else:
+ # FIXME: do some cfg file stuff here
+ pass
+ self.output_statistics = {}
+ for archive in self.archives:
+ self.output_statistics[archive] = {
+ "ping_stats": "",
+ "download_speed_stats": ""
+ }
+ self.start_lxc_container()
+ self.ping_archive()
+ self.download_package()
+ self.kill_lxc_container()
+ logging.info("Archive statistics:\n%s", json.dumps(self.output_statistics, sort_keys=False, indent=4))
+
+
+ def ping_archive(self):
+ lxc_cmd = ['lxc', 'exec', '-n', self.container_name, '--']
+ ping_cmd = ['/usr/bin/ping', '-c', '1', 'archive']
+ stats = {}
+ for archive in self.archives:
+ ping_cmd[3] = archive
+ ping_this_archive = subprocess.check_output((lxc_cmd + ping_cmd))
+ self.output_statistics[archive]["ping_stats"] = ping_this_archive.decode("utf-8").split("\n")[-2]
+
+
+ def start_lxc_container(self):
+ commands = ['lxc', 'launch', 'ubuntu:' + self.release, self.container_name]
+ start_container = subprocess.run(commands, check=True)
+ if start_container.returncode != 0:
+ logging.error("Container wasn't started correctly, exiting...")
+ return False
+ logging.info("Waiting for connectivity...")
+ check_cmd = ['lxc', 'list', '--format=csv']
+ # FIXME: need a timeout here
+ while True:
+ check_me = subprocess.Popen(check_cmd, stdout=subprocess.PIPE)
+ check_me_piped = subprocess.check_output(['grep', self.container_name], stdin=check_me.stdout)
+ check_me_list = check_me_piped.decode('utf-8').split(",")
+ if '' not in check_me_list:
+ logging.info(f'Container {self.container_name} instantiated successfully')
+ return True
+
+
+ def kill_lxc_container(self):
+ kill_cmd = ['lxc', 'delete', '--force', self.container_name]
+ killed = subprocess.run(kill_cmd, check=True)
+ if killed.returncode == 0:
+ logging.info(f'{self.container_name} has been successfully deleted.')
+ return True
+ logging.error(f"Couldn't delete container '{self.container_name}'")
+ return False
+
+
+ def download_package(self):
+ logging.info(f"Testing download of {self.pkg_to_test}")
+ lxc_pre = ['lxc', 'exec', '-n', '-t', self.container_name, '--']
+ apt_opt = '-o "Dir::Etc::sourcelist=/etc/apt/sources.list.d/sources.list"'
+ apt_update = ['apt'] + apt_opt.split(' ') + ['update']
+ install_pkg = ['apt-get'] + apt_opt.split(' ') + ['source', self.pkg_to_test]
+
+ subprocess.run(lxc_pre + ['apt', 'install', '-y', 'dpkg-dev'])
+ for archive in self.archives:
+ with open("/tmp/sources.list", 'w', encoding='utf-8') as f:
+ f.write(self.sources_file.replace("$ARCHIVE", archive).replace("$RELEASE", self.release))
+ copy_command = ['lxc', 'file', 'push', '/tmp/sources.list', self.container_name + '/etc/apt/sources.list.d/']
+ subprocess.run(copy_command, check=True)
+ subprocess.run(lxc_pre + apt_update, check=True)
+ print(" ".join(lxc_pre + install_pkg))
+ download_start_time = time.time()
+ subprocess.run(lxc_pre + install_pkg, check=True)
+ download_end_time = time.time()
+ self.output_statistics[archive]["download_speed_stats"] = str(download_end_time - download_start_time)
+
+
+if __name__ == "__main__":
+ pinger = archiveTester()
diff --git a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/with-distributed-lock b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/with-distributed-lock
index dd4b1c8..60915e1 100755
--- a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/with-distributed-lock
+++ b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/with-distributed-lock
@@ -1,7 +1,7 @@
#!/usr/bin/python3
-# Run a command while holding a distributed amqp lock
-#
+# pylint: disable=import-error, invalid-name, unused-argument
"""
+Run a command while holding a distributed amqp lock
Usage: with-distributed-lock <lock name> <command> <argument>...
Generates a RabbitMQ single active consumer queue named by the lock,
diff --git a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/worker-config-production/setup-canonical.sh b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/worker-config-production/setup-canonical.sh
index 93d48d8..2e4cd32 100644
--- a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/worker-config-production/setup-canonical.sh
+++ b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/worker-config-production/setup-canonical.sh
@@ -1,4 +1,6 @@
+#!/bin/sh
# Canonical/Ubuntu specific testbed setup
+# shellcheck disable=SC2230
# Remove trailing dot from the machine fqdn.
# Workaround for LP: #2019472.
@@ -70,6 +72,7 @@ if type iptables >/dev/null 2>&1; then
iptables -w -t mangle -A FORWARD -p tcp --tcp-flags SYN,RST SYN -j TCPMSS --clamp-mss-to-pmtu || true
EOF
chmod 755 /etc/rc.local
+ # shellcheck disable=SC1091
. /etc/rc.local
fi
diff --git a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/worker/worker b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/worker/worker
index 55b8ef6..762f0c3 100755
--- a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/worker/worker
+++ b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/worker/worker
@@ -1,9 +1,11 @@
#!/usr/bin/python3
-# autopkgtest cloud worker
-# Author: Martin Pitt <martin.pitt@xxxxxxxxxx>
-#
-# Requirements: python3-amqplib python3-swiftclient python3-influxdb
-# Requirements for running autopkgtest from git: python3-debian libdpkg-perl
+#pylint: disable=invalid-name, fixme, consider-using-f-string, too-many-lines, import-error, too-many-arguments, consider-using-with, protected-access, logging-not-lazy, format-string-without-interpolation, line-too-long, anomalous-backslash-in-string, missing-function-docstring, global-statement, unused-argument, redefined-builtin, used-before-assignment, too-many-locals, redefined-outer-name, unused-variable, bad-except-order, too-many-nested-blocks, too-many-return-statements, too-many-branches, too-many-statements, bad-option-value
+'''
+autopkgtest cloud worker
+Author: Martin Pitt <martin.pitt@xxxxxxxxxx>
+Requirements: python3-amqplib python3-swiftclient python3-influxdb
+Requirements for running autopkgtest from git: python3-debian libdpkg-perl
+'''
import os
import sys
@@ -22,6 +24,7 @@ import hashlib
import random
import fnmatch
import socket
+from urllib.error import HTTPError
import amqplib.client_0_8 as amqp
import distro_info
@@ -30,7 +33,6 @@ import systemd.journal
from influxdb import InfluxDBClient
from influxdb.exceptions import InfluxDBClientError
-from urllib.error import HTTPError
ALL_RELEASES = distro_info.UbuntuDistroInfo().get_all(result='object')
@@ -89,7 +91,8 @@ TEMPORARY_TEST_FAIL_STRINGS = ['Could not connect to ftpmaster.internal:80',
'Cannot initiate the connection to ppa.launchpad.net:80',
'Failed to fetch http://ftpmaster.internal/',
'" failed with stderr "error: Get https://0.0.0.0/1.0/operations/',
- 'RecursionError: maximum recursion depth exceeded in comparison', # #1908506
+ 'RecursionError: maximum recursion ' + \
+ 'depth exceeded in comparison', # #1908506
'Temporary failure resolving \'archive.ubuntu.com\'',
'Temporary failure resolving \'ports.ubuntu.com\'',
'Temporary failure resolving \'ftpmaster.internal\'',
@@ -112,8 +115,10 @@ FAIL_PKG_STRINGS = {'systemd*': ['timed out waiting for testbed to reboot',
'Timed out on waiting for ssh connection',
'Temporary failure resolving',
'VirtSubproc.Timeout',
- 'ERROR: testbed failure: testbed auxverb failed with exit code 255',
- 'ERROR: testbed failure: rules extract failed with exit code 100 (apt failure)'],
+ 'ERROR: testbed failure: testbed auxverb ' + \
+ 'failed with exit code 255',
+ 'ERROR: testbed failure: rules extract failed ' + \
+ 'with exit code 100 (apt failure)'],
'linux-*': ['timed out waiting for testbed to reboot',
'Timed out on waiting for ssh connection',
'ERROR: testbed failure: testbed auxverb failed',
@@ -133,7 +138,8 @@ FAIL_PKG_STRINGS = {'systemd*': ['timed out waiting for testbed to reboot',
'Timed out on waiting for ssh connection'],
'kdump-tools': ['This does not look like a tar archive',
'Timed out on waiting for ssh connection'],
- 'llvm-toolchain-*': ['clang: error: unable to execute command: Segmentation fault (core dumped)']}
+ 'llvm-toolchain-*': ['clang: error: unable to execute command: ' + \
+ 'Segmentation fault (core dumped)']}
# Exemptions from TEMPORARY_TEST_FAIL_STRINGS / FAIL_{PKG_,}STRINGS
# Adding dbconfig-common here is a hack of sorts LP: #2001714
@@ -220,10 +226,10 @@ def parse_args():
return parser.parse_args()
-def read_per_package_configs(cfg):
+def read_per_package_configs(pkg_cfg):
def read_per_package_file(filename):
out = set()
- with open(filename, 'r') as f:
+ with open(filename, 'r', encoding='utf-8') as f:
entries = {
line.strip()
for line in f.readlines()
@@ -245,7 +251,7 @@ def read_per_package_configs(cfg):
return out
global big_packages, long_tests, never_run
- dir = cfg.get('autopkgtest', 'per_package_config_dir').strip()
+ dir = pkg_cfg.get('autopkgtest', 'per_package_config_dir').strip()
big_packages = read_per_package_file(os.path.join(dir, "big_packages"))
long_tests = read_per_package_file(os.path.join(dir, "long_tests"))
@@ -269,8 +275,10 @@ def process_output_dir(dir, pkgname, code, triggers):
# the version, so that frontends (e.g. autopkgtest-web, or britney) can
# display the result.
if code in FAIL_CODES and 'testpkg-version' not in files:
- logging.warning('Code %d returned and no testpkg-version - returning "unknown" for %s' % (code, pkgname))
- with open(os.path.join(dir, 'testpkg-version'), 'w') as testpkg_version:
+ logging.warning('Code %d returned and no testpkg-version - returning "unknown" for %s',
+ code,
+ pkgname)
+ with open(os.path.join(dir, 'testpkg-version'), 'w', encoding='utf-8') as testpkg_version:
testpkg_version.write('%s unknown' % pkgname)
files.add('testpkg-version')
# we might need to fake testinfo.json up too, depending on how
@@ -279,23 +287,23 @@ def process_output_dir(dir, pkgname, code, triggers):
if 'testinfo.json' not in files and triggers:
logging.warning('...testinfo.json is missing too, faking one up')
triggers = ' '.join(triggers)
- with open(os.path.join(dir, 'testinfo.json'), 'w') as testinfo:
+ with open(os.path.join(dir, 'testinfo.json'), 'w', encoding='utf-8') as testinfo:
d = {'custom_environment':
['ADT_TEST_TRIGGERS=%s' % triggers]}
json.dump(d, testinfo, indent=True)
files.add('testinfo.json')
- with open(os.path.join(dir, 'testpkg-version'), 'r') as tpv:
+ with open(os.path.join(dir, 'testpkg-version'), 'r', encoding='utf-8') as tpv:
testpkg_version = tpv.read().split()[1]
try:
- with open(os.path.join(dir, 'duration'), 'r') as dur:
+ with open(os.path.join(dir, 'duration'), 'r', encoding='utf-8') as dur:
duration = dur.read()
except FileNotFoundError:
duration = None
try:
- with open(os.path.join(dir, 'requester'), 'r') as req:
+ with open(os.path.join(dir, 'requester'), 'r', encoding='utf-8') as req:
requester = req.read()
except FileNotFoundError:
requester = None
@@ -303,7 +311,9 @@ def process_output_dir(dir, pkgname, code, triggers):
# these are small and we need only these for gating and indexing
resultfiles = ['exitcode']
# these might not be present in infrastructure failure cases
- for f in ['testbed-packages', 'testpkg-version', 'duration', 'testinfo.json', 'requester', 'summary']:
+ for f in ['testbed-packages',
+ 'testpkg-version',
+ 'duration', 'testinfo.json', 'requester', 'summary']:
if f in files:
resultfiles.append(f)
subprocess.check_call(['tar', 'cf', 'result.tar'] + resultfiles, cwd=dir)
@@ -352,7 +362,7 @@ def host_arch(release, architecture):
def subst(s, big_package, release, architecture, hostarch, pkgname):
- subst = {
+ substitute = {
'RELEASE': release,
'ARCHITECTURE': architecture,
'HOSTARCH': hostarch,
@@ -364,14 +374,22 @@ def subst(s, big_package, release, architecture, hostarch, pkgname):
}
for i in args.variable:
k, v = i.split('=', 1)
- subst[k] = v
+ substitute[k] = v
- for k, v in subst.items():
+ for k, v in substitute.items():
s = s.replace('$' + k, v)
return s
-def send_status_info(queue, release, architecture, pkgname, params, out_dir, running, duration, private=False):
+def send_status_info(queue,
+ release,
+ architecture,
+ pkgname,
+ params,
+ out_dir,
+ running,
+ duration,
+ private=False):
'''Send status and logtail to status queue'''
if not queue:
@@ -406,18 +424,18 @@ def send_status_info(queue, release, architecture, pkgname, params, out_dir, run
'logtail': logtail})
queue.basic_publish(amqp.Message(msg, delivery_mode=2), status_exchange_name, '')
-def call_autopkgtest(argv, release, architecture, pkgname, params, out_dir, start_time, private=False):
+def call_autopkgtest(argv, release, architecture, pkgname,
+ params, out_dir, start_time, private=False):
'''Call autopkgtest and regularly send status/logtail to status_exchange_name
Return exit code.
'''
# set up status AMQP exchange
- global amqp_con
status_amqp = amqp_con.channel()
status_amqp.access_request('/data', active=True, read=False, write=True)
status_amqp.exchange_declare(status_exchange_name, 'fanout', durable=False, auto_delete=True)
- null_fd = open('/dev/null', 'w')
+ null_fd = open('/dev/null', 'w', encoding='utf-8')
autopkgtest = subprocess.Popen(argv, stdout=null_fd, stderr=subprocess.STDOUT)
# FIXME: Use autopkgtest.wait(timeout=10) once moving to Python 3
# only send status update every 10s, but check if program has finished every 1s
@@ -535,14 +553,16 @@ def request(msg):
os.makedirs(out_dir)
# now let's fake up a log file
- with open(os.path.join(out_dir, 'log'), 'w') as log:
- log.write('This package is marked to never run. To get the entry removed, contact a member of the release team.')
+ with open(os.path.join(out_dir, 'log'), 'w', encoding='utf-8') as log:
+ log.write('This package is marked to never run. To get the entry ' + \
+ 'removed, contact a member of the release team.')
triggers = None
# a json file containing the env
if 'triggers' in params:
triggers = ' '.join(params['triggers'])
- with open(os.path.join(out_dir, 'testinfo.json'), 'w') as testinfo:
+ with open(os.path.join(out_dir, 'testinfo.json'), 'w',
+ encoding='utf-8') as testinfo:
d = {'custom_environment':
['ADT_TEST_TRIGGERS=%s' % triggers]}
json.dump(d, testinfo, indent=True)
@@ -550,7 +570,8 @@ def request(msg):
# and the testpackage version (pkgname blacklisted)
# XXX: replace "blacklisted" here, but needs changes in
# proposed-migration and hints
- with open(os.path.join(out_dir, 'testpkg-version'), 'w') as testpkg_version:
+ with open(os.path.join(out_dir, 'testpkg-version'), 'w',
+ encoding='utf-8') as testpkg_version:
testpkg_version.write('%s blacklisted' % pkgname)
container = 'autopkgtest-' + release
@@ -584,7 +605,9 @@ def request(msg):
if 'triggers' in params and 'qemu-efi-noacpi/0' in params['triggers']:
if architecture == 'arm64':
- argv += ['--setup-commands', '/home/ubuntu/autopkgtest-cloud/worker-config-production/qemu-efi-noacpi.sh']
+ argv += ['--setup-commands',
+ '/home/ubuntu/autopkgtest-cloud/worker' + \
+ '-config-production/qemu-efi-noacpi.sh']
else:
# these will be written later on
code = 99
@@ -592,12 +615,13 @@ def request(msg):
os.makedirs(out_dir)
# fake a log file
- with open(os.path.join(out_dir, 'log'), 'w') as log:
+ with open(os.path.join(out_dir, 'log'), 'w', encoding='utf-8') as log:
log.write('Not running due to invalid trigger: qemu-efi-noacpi/0 is arm64 only')
dont_run = True
# and the testpackage version (invalid trigger with a reason)
- with open(os.path.join(out_dir, 'testpkg-version'), 'w') as testpkg_version:
+ with open(os.path.join(out_dir, 'testpkg-version'),
+ 'w', encoding='utf-8') as testpkg_version:
testpkg_version.write('invalid trigger: qemu-efi-noacpi/0 is arm64 only')
if 'ppas' in params and params['ppas']:
@@ -605,30 +629,39 @@ def request(msg):
try:
(ppacreds, _, ppaurl) = ppa.rpartition('@')
(ppaurl, _, fingerprint) = ppaurl.partition(':')
- (ppacreds_user, ppacreds_pass) = ppacreds.split(':') if ppacreds else (None, None)
+ (ppacreds_user, ppacreds_pass) = ppacreds.split(':') if ppacreds else (None,
+ None)
(ppauser, ppaname) = ppaurl.split('/')
except ValueError:
- logging.error('Invalid PPA specification, must be [user:token@]lpuser/ppa_name[:fingerprint]')
+ logging.error('Invalid PPA specification, must be ' + \
+ '[user:token@]lpuser/ppa_name[:fingerprint]')
msg.channel.basic_ack(msg.delivery_tag)
return
if fingerprint:
- logging.debug('Request states that PPA user %s, name %s has GPG fingerprint %s' % (ppauser, ppaname, fingerprint))
+ logging.debug('Request states that PPA user %s, name %s has GPG fingerprint %s',
+ ppauser, ppaname, fingerprint)
else:
# Private PPAs require the fingerprint passed through the
# request as we can't use the LP API to fetch it.
if ppacreds_user:
- logging.error('Invalid PPA specification, GPG fingerprint required for private PPAs')
+ logging.error('Invalid PPA specification, GPG fingerprint ' + \
+ 'required for private PPAs')
msg.channel.basic_ack(msg.delivery_tag)
return
for retry in range(5):
try:
- f = urllib.request.urlopen('https://api.launchpad.net/1.0/~%s/+archive/ubuntu/%s' % (ppauser, ppaname))
+ f = urllib.request.urlopen('https://api.launchpad.net/' + \
+ '1.0/~%s/+archive/ubuntu/%s' % \
+ (ppauser, ppaname))
contents = f.read().decode('UTF-8')
f.close()
fingerprint = json.loads(contents)['signing_key_fingerprint']
- logging.debug('PPA user %s, name %s has GPG fingerprint %s' % (ppauser, ppaname, fingerprint))
+ logging.debug('PPA user %s, name %s has GPG fingerprint %s' % \
+ (ppauser, ppaname, fingerprint))
except (IOError, ValueError, KeyError) as e:
- logging.error('Cannot get PPA information: "%s". Consuming the request - it will be left dangling; retry once the problem is resolved.' % e)
+ logging.error('Cannot get PPA information: "%s". Consuming the ' + \
+ 'request - it will be left dangling; retry once ' + \
+ 'the problem is resolved.' % e)
msg.channel.basic_ack(msg.delivery_tag)
return
except HTTPError as e:
@@ -641,7 +674,9 @@ def request(msg):
else:
break
else:
- logging.error('Cannot contact Launchpad to get PPA information. Consuming the request - it will be left dangling; retry once the problem is resolved.')
+ logging.error('Cannot contact Launchpad to get PPA information. ' + \
+ 'Consuming the request - it will be left dangling; ' + \
+ 'retry once the problem is resolved.')
msg.channel.basic_ack(msg.delivery_tag)
return
if ppacreds_user:
@@ -651,7 +686,8 @@ def request(msg):
else:
ppaprefix = 'http://'
# add GPG key
- argv += ['--setup-commands', 'apt-key adv --keyserver keyserver.ubuntu.com --recv-key ' + fingerprint]
+ argv += ['--setup-commands',
+ 'apt-key adv --keyserver keyserver.ubuntu.com --recv-key ' + fingerprint]
# add apt source
argv += ['--setup-commands', 'REL=$(sed -rn "/^(deb|deb-src) .*(ubuntu.com|ftpmaster)/ { s/^[^ ]+ +(\[.*\] *)?[^ ]* +([^ -]+) +.*$/\\2/p; q }" /etc/apt/sources.list); '
'echo "deb %(prefix)sppa.launchpad.net/%(u)s/%(p)s/ubuntu $REL main" > /etc/apt/sources.list.d/autopkgtest-%(u)s-%(p)s.list; '
@@ -754,14 +790,14 @@ def request(msg):
argv += ['--setup-commands', 'apt-get install -y linux-image-omap linux-headers-omap']
else:
argv += ['--setup-commands',
- ('apt-get install -y ^kernel-testing--%(t)s--full--preferred$ || ' +
- 'apt-get install -y ^linux-image%(f)s$ ^linux-headers%(f)s$ || ' +
- 'apt-get install -y ^linux-image-generic%(f)s$ ^linux-headers-generic%(f)s$') %
- {'f': flavor, 't': totest}]
+ ('apt-get install -y ^kernel-testing--%(t)s--full--preferred$ || ' +
+ 'apt-get install -y ^linux-image%(f)s$ ^linux-headers%(f)s$ || ' +
+ 'apt-get install -y ^linux-image-generic%(f)s$ ^linux-headers-generic%(f)s$') %
+ {'f': flavor, 't': totest}]
argv += ['--setup-commands',
- ('apt-get install -y ^kernel-testing--%(t)s--modules-extra--preferred$ || ' +
- 'apt-get install -y ^linux-modules-extra%(f)s$ || :') %
- {'f': flavor, 't': totest}]
+ ('apt-get install -y ^kernel-testing--%(t)s--modules-extra--preferred$ || ' +
+ 'apt-get install -y ^linux-modules-extra%(f)s$ || :') %
+ {'f': flavor, 't': totest}]
break
if 'testname' in params:
@@ -820,7 +856,7 @@ def request(msg):
if s in contents]
if temp_fails:
logging.warning('Saw %s in log, which is a sign of a temporary failure.',
- ' and '.join(temp_fails))
+ ' and '.join(temp_fails))
logging.warning('%sLog follows:', retrying)
logging.error(contents)
if retry < 2:
@@ -852,7 +888,7 @@ def request(msg):
if fails:
num_failures += 1
logging.warning('Saw %s in log, which is a sign of a real (not tmp) failure - seen %d so far',
- ' and '.join(fails), num_failures)
+ ' and '.join(fails), num_failures)
logging.warning('Testbed failure. %sLog follows:', retrying)
logging.error(contents)
if retry < 2:
@@ -878,17 +914,17 @@ def request(msg):
if code == 1:
logging.error('autopkgtest exited with unexpected error code 1')
sys.exit(1)
- with open(os.path.join(out_dir, 'exitcode'), 'w') as f:
+ with open(os.path.join(out_dir, 'exitcode'), 'w', encoding='utf-8') as f:
f.write('%i\n' % code)
- with open(os.path.join(out_dir, 'duration'), 'w') as f:
+ with open(os.path.join(out_dir, 'duration'), 'w', encoding='utf-8') as f:
f.write('%u\n' % duration)
if 'requester' in params:
- with open(os.path.join(out_dir, 'requester'), 'w') as f:
+ with open(os.path.join(out_dir, 'requester'), 'w', encoding='utf-8') as f:
f.write('%s\n' % params['requester'])
if 'readable-by' in params:
- with open(os.path.join(out_dir, 'readable-by'), 'w') as f:
+ with open(os.path.join(out_dir, 'readable-by'), 'w', encoding='utf-8') as f:
if isinstance(params['readable-by'], list):
f.write('\n'.join(params['readable-by']))
else:
@@ -978,20 +1014,19 @@ def request(msg):
finally:
shutil.rmtree(work_dir)
- global amqp_con
complete_amqp = amqp_con.channel()
complete_amqp.access_request('/complete', active=True, read=False, write=True)
complete_amqp.exchange_declare(complete_exchange_name, 'fanout', durable=True, auto_delete=False)
- complete_msg = json.dumps ({'architecture': architecture,
- 'container': container,
- 'duration': duration,
- 'exitcode': code,
- 'package': pkgname,
- 'testpkg_version': testpkg_version,
- 'release': release,
- 'requester': requester,
- 'swift_dir': swift_dir,
- 'triggers': triggers})
+ complete_msg = json.dumps({'architecture': architecture,
+ 'container': container,
+ 'duration': duration,
+ 'exitcode': code,
+ 'package': pkgname,
+ 'testpkg_version': testpkg_version,
+ 'release': release,
+ 'requester': requester,
+ 'swift_dir': swift_dir,
+ 'triggers': triggers})
complete_amqp.basic_publish(amqp.Message(complete_msg, delivery_mode=2),
complete_exchange_name, '')
diff --git a/charms/focal/autopkgtest-cloud-worker/lib/systemd.py b/charms/focal/autopkgtest-cloud-worker/lib/systemd.py
index b83828f..bbb0ca3 100644
--- a/charms/focal/autopkgtest-cloud-worker/lib/systemd.py
+++ b/charms/focal/autopkgtest-cloud-worker/lib/systemd.py
@@ -1,4 +1,5 @@
-#pylint: disable=missing-function-docstring
+"""Systemd handler for autopkgtest-cloud workers"""
+#pylint: disable=missing-function-docstring, fixme, import-error, consider-using-f-string, invalid-name, too-many-locals, too-many-branches, bad-option-value
import os
import shutil
from textwrap import dedent
@@ -36,7 +37,7 @@ def reload():
) # cancellable
-def enabledisable(unit_names, enabledisable, enabledisablearg, startstop):
+def enabledisable(unit_names, enabledisableflag, enabledisablearg, startstop):
print(
"calling {enabledisable} then {startstop} on {unit_names}".format(
**locals()
@@ -46,7 +47,7 @@ def enabledisable(unit_names, enabledisable, enabledisablearg, startstop):
"org.freedesktop.systemd1",
"/org/freedesktop/systemd1",
"org.freedesktop.systemd1.Manager",
- enabledisable,
+ enabledisableflag,
enabledisablearg,
GLib.VariantType(
"*"
@@ -131,7 +132,7 @@ def get_units():
lxd_object_paths = defaultdict(lambda: defaultdict(dict))
for unit in units:
- (name, _, _, active, _, _, object_path, _, _, _) = unit
+ (name, _, _, _, _, _, object_path, _, _, _) = unit
if name.startswith("build-adt-image@") and name.endswith(".timer"):
name_release_region_arch = name[16:][:-6]
(release, region, arch) = name_release_region_arch.split("-", -1)
@@ -171,8 +172,7 @@ def update_cloud_dropins(region, arch, n, releases):
def get_arches(release):
if arch == "amd64" and UbuntuRelease(release) < UbuntuRelease("focal"):
return ["amd64", "i386"]
- else:
- return [arch]
+ return [arch]
ensure_adt_units = " ".join(
[
@@ -192,7 +192,7 @@ def update_cloud_dropins(region, arch, n, releases):
shutil.rmtree(dropindir)
os.makedirs(dropindir)
- with open(os.path.join(dropindir, "ensure-adt-image.conf"), "w") as f:
+ with open(os.path.join(dropindir, "ensure-adt-image.conf"), "w", encoding='utf-8') as f:
f.write(
dedent(
"""\
@@ -222,7 +222,7 @@ def update_lxd_dropins(arch, ip, n):
pass
with open(
- os.path.join(dropindir, "autopkgtest-lxd-remote.conf"), "w"
+ os.path.join(dropindir, "autopkgtest-lxd-remote.conf"), "w", encoding='utf-8'
) as f:
remote_unit = "autopkgtest-lxd-remote@lxd-{}-{}.service".format(
arch, ip
@@ -334,7 +334,8 @@ def set_up_systemd_units(target_cloud_config, target_lxd_config, releases):
if releases_to_disable:
print(
- "Disabling build-adt-image timers for {region}/{arch}/{releases_to_disable}".format(
+ "Disabling build-adt-image timers for " + \
+ "{region}/{arch}/{releases_to_disable}".format(
**locals()
)
)
@@ -354,9 +355,7 @@ def set_up_systemd_units(target_cloud_config, target_lxd_config, releases):
target_n_units = target_lxd_config.get(arch, {}).get(ip, 0)
if target_n_units > 0:
update_lxd_dropins(arch, ip, target_n_units)
- if n_units == target_n_units:
- continue
- elif n_units < target_n_units:
+ if n_units < target_n_units:
# need to enable some units
delta = target_n_units - n_units
unit_names = [
@@ -364,7 +363,7 @@ def set_up_systemd_units(target_cloud_config, target_lxd_config, releases):
for n in range(n_units + 1, n_units + delta + 1)
]
enable(unit_names)
- else:
+ elif n_units > target_n_units:
# need to disable some units
delta = n_units - target_n_units
unit_names = [
diff --git a/charms/focal/autopkgtest-cloud-worker/lib/utils.py b/charms/focal/autopkgtest-cloud-worker/lib/utils.py
index c7fce49..eebe6a1 100644
--- a/charms/focal/autopkgtest-cloud-worker/lib/utils.py
+++ b/charms/focal/autopkgtest-cloud-worker/lib/utils.py
@@ -1,4 +1,4 @@
-#pylint: disable=missing-module-docstring, missing-class-docstring, missing-function-docstring
+#pylint: disable=missing-module-docstring, missing-class-docstring, missing-function-docstring, import-error, consider-using-f-string, bad-option-value
import os
import pwd
import subprocess
diff --git a/charms/focal/autopkgtest-cloud-worker/reactive/autopkgtest_cloud_worker.py b/charms/focal/autopkgtest-cloud-worker/reactive/autopkgtest_cloud_worker.py
index 3dc4625..4921fd7 100644
--- a/charms/focal/autopkgtest-cloud-worker/reactive/autopkgtest_cloud_worker.py
+++ b/charms/focal/autopkgtest-cloud-worker/reactive/autopkgtest_cloud_worker.py
@@ -1,4 +1,4 @@
-#pylint: disable=missing-module-docstring,missing-function-docstring
+#pylint: disable=missing-module-docstring,missing-function-docstring, consider-using-dict-items, import-error, wrong-import-order, invalid-name, fixme, consider-using-f-string, possibly-unused-variable, import-outside-toplevel, consider-using-with, redefined-outer-name, bad-option-value
from charms.layer import status
from charms.reactive import (
when,
@@ -11,7 +11,6 @@ from charms.reactive import (
hook,
not_unless,
)
-from charms.reactive.relations import endpoint_from_flag
from charmhelpers.core.hookenv import (
charm_dir,
config,
@@ -20,9 +19,7 @@ from charmhelpers.core.hookenv import (
storage_list,
)
from utils import install_autodep8, UnixUser
-
from textwrap import dedent
-
import glob
import os
import pygit2
@@ -126,21 +123,20 @@ def clone_autopkgtest():
"autopkgtest.influx-creds-written",
)
def set_up_systemd_units():
+ def link_and_enable(unit, dest, base):
+ os.symlink(unit, dest)
+ if "@" not in base:
+ subprocess.check_call(["systemctl", "enable", base])
for unit in glob.glob(os.path.join(charm_dir(), "units", "*")):
base = os.path.basename(unit)
dest = os.path.join(os.path.sep, "etc", "systemd", "system", base)
- def link_and_enable():
- os.symlink(unit, dest)
- if "@" not in base:
- subprocess.check_call(["systemctl", "enable", base])
-
try:
- link_and_enable()
+ link_and_enable(unit, dest, base)
except FileExistsError:
if not os.path.islink(dest):
os.unlink(dest)
- link_and_enable()
+ link_and_enable(unit, dest, base)
set_flag("autopkgtest.systemd_units_linked_and_enabled")
@@ -212,7 +208,7 @@ def set_up_rabbitmq(rabbitmq):
host = rabbitmq.private_address()
status.maintenance("Configuring rabbitmq")
log("Setting up rabbitmq connection to: {}@{}".format(username, host))
- with open(RABBITMQ_CRED_PATH, "w") as cred_file:
+ with open(RABBITMQ_CRED_PATH, "w", encoding='utf-8') as cred_file:
cred_file.write(
dedent(
"""\
@@ -398,7 +394,7 @@ def write_v2_config():
def write_swift_config():
with open(
- os.path.expanduser("~ubuntu/swift-password.cred"), "w"
+ os.path.expanduser("~ubuntu/swift-password.cred"), "w", encoding='utf-8'
) as swift_password_file:
for key in config():
if key.startswith("swift") and config()[key] is not None:
@@ -471,17 +467,18 @@ def write_worker_config():
}
def write(conf_file):
- with open(conf_file, "w") as cf:
+ with open(conf_file, "w", encoding='utf-8') as cf:
cp = configparser.ConfigParser()
cp.read_dict(conf)
cp.write(cf)
# FIXME: Hotfix for bos01
if "bos01" in conf_file:
- with open(conf_file, "r") as cf:
+ with open(conf_file, "r", encoding='utf-8') as cf:
conf_data = cf.read()
- with open(conf_file, "w") as cf:
- cf.write(conf_data.replace(config().get("mirror"), "http://us.ports.ubuntu.com/ubuntu-ports/"))
+ with open(conf_file, "w", encoding='utf-8') as cf:
+ cf.write(conf_data.replace(config().get("mirror"),
+ "http://us.ports.ubuntu.com/ubuntu-ports/"))
for region in nworkers_yaml:
@@ -494,13 +491,12 @@ def write_worker_config():
conf["autopkgtest"]["architectures"] = "amd64 i386"
write(conf_file)
break
- else:
- conf_file = os.path.join(
- os.path.expanduser("~ubuntu"),
- "worker-{}-{}.conf".format(region, arch),
- )
- conf["autopkgtest"]["architectures"] = arch
- write(conf_file)
+ conf_file = os.path.join(
+ os.path.expanduser("~ubuntu"),
+ "worker-{}-{}.conf".format(region, arch),
+ )
+ conf["autopkgtest"]["architectures"] = arch
+ write(conf_file)
for arch in lxdremotes_yaml:
conf_file = os.path.join(
@@ -516,7 +512,7 @@ def write_worker_config():
@when("config.changed.net-name")
def write_net_name():
clear_flag("autopkgtest.net-name-written")
- with open(os.path.expanduser("~ubuntu/net-name.rc"), "w") as f:
+ with open(os.path.expanduser("~ubuntu/net-name.rc"), "w", encoding='utf-8') as f:
f.write('NET_NAME="{}"\n'.format(config().get("net-name")))
set_flag("autopkgtest.net-name-written")
set_flag("autopkgtest.reload-needed")
@@ -524,7 +520,7 @@ def write_net_name():
@when("config.changed.mirror")
def write_mirror():
- with open(os.path.expanduser("~ubuntu/mirror.rc"), "w") as f:
+ with open(os.path.expanduser("~ubuntu/mirror.rc"), "w", encoding='utf-8') as f:
f.write('MIRROR="{}"\n'.format(config().get("mirror")))
set_flag("autopkgtest.reload-needed")
@@ -575,7 +571,7 @@ def write_influx_creds():
influxdb_database = config().get("influxdb-database")
influxdb_context = config().get("influxdb-context")
- with open(os.path.expanduser("~ubuntu/influx.cred"), "w") as cf:
+ with open(os.path.expanduser("~ubuntu/influx.cred"), "w", encoding='utf-8') as cf:
cf.write(
dedent(
f"""\
diff --git a/charms/focal/autopkgtest-cloud-worker/tests/10-deploy b/charms/focal/autopkgtest-cloud-worker/tests/10-deploy
index 2cd32f6..3400ff3 100755
--- a/charms/focal/autopkgtest-cloud-worker/tests/10-deploy
+++ b/charms/focal/autopkgtest-cloud-worker/tests/10-deploy
@@ -1,25 +1,38 @@
#!/usr/bin/python3
+# pylint: disable=import-error, invalid-name
+'''
+Unit test for deploying juju charm
+'''
-import amulet
-import requests
import unittest
+import requests
+import amulet
class TestCharm(unittest.TestCase):
+ '''
+ Tests juju charm
+ '''
def setUp(self):
- self.d = amulet.Deployment()
+ '''
+ Sets up service for juju charm
+ '''
+ self.deployment = amulet.Deployment()
- self.d.add('autopkgtest-cloud-worker')
- self.d.expose('autopkgtest-cloud-worker')
+ self.deployment.add('autopkgtest-cloud-worker')
+ self.deployment.expose('autopkgtest-cloud-worker')
- self.d.setup(timeout=900)
- self.d.sentry.wait()
+ self.deployment.setup(timeout=900)
+ self.deployment.sentry.wait()
- self.unit = self.d.sentry['autopkgtest-cloud-worker'][0]
+ self.unit = self.deployment.sentry['autopkgtest-cloud-worker'][0]
def test_service(self):
+ '''
+ Tests connectivity to juju charm via http
+ '''
# test we can access over http
- page = requests.get('http://{}'.format(self.unit.info['public-address']))
+ page = requests.get(f'http://{self.unit.info["public-address"]}')
self.assertEqual(page.status_code, 200)
# Now you can use self.d.sentry[SERVICE][UNIT] to address each of the units and perform
# more in-depth steps. Each self.d.sentry[SERVICE][UNIT] has the following methods:
@@ -30,6 +43,6 @@ class TestCharm(unittest.TestCase):
# - .directory_contents(PATH) - List files and folders in PATH on that unit
# - .relation(relation, service:rel) - Get relation data from return service
-
+
if __name__ == '__main__':
unittest.main()
diff --git a/charms/focal/autopkgtest-web/reactive/autopkgtest_web.py b/charms/focal/autopkgtest-web/reactive/autopkgtest_web.py
index fc87317..a6efd32 100644
--- a/charms/focal/autopkgtest-web/reactive/autopkgtest_web.py
+++ b/charms/focal/autopkgtest-web/reactive/autopkgtest_web.py
@@ -1,3 +1,14 @@
+"""
+Web app for autopkgtest-cloud
+"""
+#pylint: disable=import-error, possibly-unused-variable, consider-using-f-string, invalid-name, bad-option-value
+from textwrap import dedent
+import glob
+import os
+import shutil
+import subprocess
+
+from charmhelpers.core.hookenv import charm_dir, config
from charms.layer import status
from charms.reactive import (
when,
@@ -6,16 +17,8 @@ from charms.reactive import (
when_not,
set_flag,
clear_flag,
- hook,
)
-from charmhelpers.core.hookenv import charm_dir, config
-from textwrap import dedent
-
-import glob
-import os
-import shutil
-import subprocess
AUTOPKGTEST_CLOUD_CONF = os.path.expanduser("~ubuntu/autopkgtest-cloud.conf")
GITHUB_SECRETS_PATH = os.path.expanduser("~ubuntu/github-secrets.json")
@@ -29,6 +32,7 @@ SWIFT_WEB_CREDENTIALS_PATH = os.path.expanduser(
@when_not("autopkgtest-web.autopkgtest_web_symlinked")
def symlink_autopkgtest_cloud():
+ """Creates a symbolic link to webcontrol dir in autopkgtest-cloud repository"""
try:
autopkgtest_cloud = os.path.join(charm_dir(), "webcontrol")
os.symlink(autopkgtest_cloud, os.path.expanduser("~ubuntu/webcontrol"))
@@ -40,6 +44,7 @@ def symlink_autopkgtest_cloud():
@when("amqp.connected")
@when_not("amqp.available")
def setup_rabbitmq(rabbitmq):
+ """Setup access to rabbitmq queueing server"""
rabbitmq.request_access("webcontrol", "/")
status.waiting("Waiting on RabbitMQ to configure vhost")
@@ -50,14 +55,15 @@ def setup_rabbitmq(rabbitmq):
"config.set.hostname",
)
def write_autopkgtest_cloud_conf(rabbitmq):
+ """Sets up config for local and remote databases"""
swiftinternal = config().get("storage-url-internal")
hostname = config().get("hostname")
rabbituser = rabbitmq.username()
rabbitpassword = rabbitmq.password()
rabbithost = rabbitmq.private_address()
clear_flag("autopkgtest-web.config-written")
- with open(f"{AUTOPKGTEST_CLOUD_CONF}.new", "w") as f:
- f.write(
+ with open(f"{AUTOPKGTEST_CLOUD_CONF}.new", "w", encoding='utf-8') as config_file:
+ config_file.write(
dedent(
"""\
[web]
@@ -68,7 +74,7 @@ def write_autopkgtest_cloud_conf(rabbitmq):
[amqp]
uri=amqp://{rabbituser}:{rabbitpassword}@{rabbithost}""".format(
- **locals()
+ **locals()
)
)
)
@@ -81,6 +87,7 @@ def write_autopkgtest_cloud_conf(rabbitmq):
"autopkgtest-web.config-written",
)
def set_up_systemd_units():
+ """Sets up systemd units for autopkgtest-web services"""
any_changed = False
for unit in glob.glob(os.path.join(charm_dir(), "units", "*")):
base = os.path.basename(unit)
@@ -111,6 +118,7 @@ def set_up_systemd_units():
)
@when_not("autopkgtest-web.website-initially-configured")
def initially_configure_website(website):
+ """Sets initial variables for autopkgtest-web website"""
set_up_web_config(website)
@@ -127,8 +135,9 @@ def initially_configure_website(website):
"autopkgtest-web.website-initially-configured"
)
def set_up_web_config(apache):
+ """Sets up proxies and filepaths for website"""
webcontrol_dir = os.path.join(charm_dir(), "webcontrol")
- sn = config().get("hostname")
+ ser_name = config().get("hostname")
https_proxy = config().get("https-proxy")
no_proxy = config().get("no-proxy")
@@ -154,10 +163,10 @@ def set_up_web_config(apache):
pass
if https_proxy:
- with open(https_proxy_env, "w") as f:
- f.write("https_proxy={}".format(https_proxy))
- with open(https_proxy_apache, "w") as f:
- f.write("SetEnv https_proxy {}".format(https_proxy))
+ with open(https_proxy_env, "w", encoding='utf-8') as https_proxy_file:
+ https_proxy_file.write("https_proxy={}".format(https_proxy))
+ with open(https_proxy_apache, "w", encoding='utf-8') as https_proxy_apache_file:
+ https_proxy_apache_file.write("SetEnv https_proxy {}".format(https_proxy))
no_proxy_env = os.path.join(environment_d, "no_proxy.conf")
no_proxy_apache = os.path.join(conf_enabled, "no_proxy.conf")
@@ -172,12 +181,12 @@ def set_up_web_config(apache):
pass
if no_proxy:
- with open(no_proxy_env, "w") as f:
+ with open(no_proxy_env, "w", encoding='utf-8') as f:
f.write("no_proxy={}".format(no_proxy))
- with open(no_proxy_apache, "w") as f:
+ with open(no_proxy_apache, "w", encoding='utf-8') as f:
f.write("SetEnv no_proxy {}".format(no_proxy))
- server_name = "ServerName {}".format(sn) if sn else ""
+ server_name = "ServerName {}".format(ser_name) if ser_name else ""
apache.send_site_config(
dedent(
"""\
@@ -205,7 +214,7 @@ def set_up_web_config(apache):
{server_name}
</VirtualHost>
""".format(
- **locals()
+ **locals()
)
)
)
@@ -216,9 +225,10 @@ def set_up_web_config(apache):
@when_all("config.changed.github-secrets", "config.set.github-secrets")
def write_github_secrets():
+ """Writes github secrets to file"""
github_secrets = config().get("github-secrets")
- with open(GITHUB_SECRETS_PATH, "w") as f:
+ with open(GITHUB_SECRETS_PATH, "w", encoding='utf-8') as f:
f.write(github_secrets)
try:
@@ -232,6 +242,7 @@ def write_github_secrets():
@when_not("config.set.github-secrets")
def clear_github_secrets():
+ """Removes symlink to github secrets file"""
try:
os.unlink(GITHUB_SECRETS_PATH)
except FileNotFoundError:
@@ -246,9 +257,10 @@ def clear_github_secrets():
@when_all("config.changed.swift-web-credentials",
"config.set.swift-web-credentials")
def write_swift_web_credentials():
+ """Writes swift credentials to file"""
swift_credentials = config().get("swift-web-credentials")
- with open(SWIFT_WEB_CREDENTIALS_PATH, "w") as f:
+ with open(SWIFT_WEB_CREDENTIALS_PATH, "w", encoding='utf-8') as f:
f.write(swift_credentials)
try:
@@ -262,6 +274,7 @@ def write_swift_web_credentials():
@when_not("config.set.swift-web-credentials")
def clear_swift_web_credentials():
+ """Removes symlink to swift web creds file"""
try:
os.unlink(SWIFT_WEB_CREDENTIALS_PATH)
except FileNotFoundError:
@@ -278,9 +291,10 @@ def clear_swift_web_credentials():
"config.set.github-status-credentials",
)
def write_github_status_credentials():
+ """Writes github status creds to file and symlinks them"""
github_status_credentials = config().get("github-status-credentials")
- with open(GITHUB_STATUS_CREDENTIALS_PATH, "w") as f:
+ with open(GITHUB_STATUS_CREDENTIALS_PATH, "w", encoding='utf-8') as f:
f.write(github_status_credentials)
try:
@@ -294,6 +308,7 @@ def write_github_status_credentials():
@when_not("config.set.github-status-credentials")
def clear_github_status_credentials():
+ """Removes symlink to github status credentials"""
try:
os.unlink(GITHUB_STATUS_CREDENTIALS_PATH)
except FileNotFoundError:
@@ -309,6 +324,7 @@ def clear_github_status_credentials():
@when_not("autopkgtest-web.bootstrap-symlinked")
def symlink_bootstrap():
+ """Symlinks to bootstrap file"""
try:
os.symlink(
os.path.join(
@@ -323,7 +339,8 @@ def symlink_bootstrap():
@when_not("autopkgtest-web.runtime-dir-created")
def make_runtime_tmpfiles():
- with open("/etc/tmpfiles.d/autopkgtest-web-runtime.conf", "w") as r:
+ """Makes all of the necessary tmp files for autopkgtest-web"""
+ with open("/etc/tmpfiles.d/autopkgtest-web-runtime.conf", "w", encoding='utf-8') as r:
r.write("D %t/autopkgtest_webcontrol 0755 www-data www-data\n")
subprocess.check_call(["systemd-tmpfiles", "--create"])
set_flag("autopkgtest-web.runtime-dir-created")
@@ -331,6 +348,7 @@ def make_runtime_tmpfiles():
@when_not("autopkgtest-web.running-json-symlinked")
def symlink_running():
+ """Symlinks to json files"""
try:
os.symlink(
os.path.join(
@@ -345,6 +363,7 @@ def symlink_running():
@when_not("autopkgtest-web.public-db-symlinked")
def symlink_public_db():
+ """Creates symlink to public database on host"""
try:
publicdir = os.path.expanduser("~ubuntu/public/")
os.makedirs(publicdir)
@@ -361,12 +380,14 @@ def symlink_public_db():
@when("leadership.is_leader")
@when_not("autopkgtest-cloud.leadership_flag_written")
def write_leadership_flag():
- with open("/run/autopkgtest-web-is-leader", "w") as _:
+ """Sets juju leadership status"""
+ with open("/run/autopkgtest-web-is-leader", "w", encoding='utf-8') as _:
set_flag("autopkgtest-cloud.leadership_flag_written")
@when_not("leadership.is_leader")
@when("autopkgtest-cloud.leadership_flag_written")
def clear_leadership_flag():
+ """Clears juju leadership"""
os.unlink("/run/autopkgtest-web-is-leader")
clear_flag("autopkgtest-cloud.leadership_flag_written")
diff --git a/charms/focal/autopkgtest-web/webcontrol/amqp-status-collector b/charms/focal/autopkgtest-web/webcontrol/amqp-status-collector
index 66a9ed4..514bac4 100755
--- a/charms/focal/autopkgtest-web/webcontrol/amqp-status-collector
+++ b/charms/focal/autopkgtest-web/webcontrol/amqp-status-collector
@@ -1,6 +1,9 @@
#!/usr/bin/python3
-# Pick up running tests, their status and logtail from the "teststatus" fanout
-# queue, and regularly write it into /run/running.json
+#pylint: disable=invalid-name, import-error, too-many-function-args
+'''
+Pick up running tests, their status and logtail from the "teststatus" fanout
+queue, and regularly write it into /run/running.json
+'''
import os
import json
@@ -17,11 +20,11 @@ running_name = os.path.join(os.path.sep,
'run',
'amqp-status-collector',
'running.json')
-running_name_new = "{}.new".format(running_name)
+running_name_new = f"{running_name}.new"
# package -> runhash -> release -> arch -> (params, duration, logtail)
running_tests = {}
-last_update = 0
+LAST_UPDATE = 0
def amqp_connect():
@@ -33,19 +36,17 @@ def amqp_connect():
parts = urllib.parse.urlsplit(amqp_uri, allow_fragments=False)
amqp_con = amqp.Connection(parts.hostname, userid=parts.username,
password=parts.password)
- logging.info('Connected to AMQP server at %s@%s' % (parts.username, parts.hostname))
+ logging.info('Connected to AMQP server at %s@%s', parts.username, parts.hostname)
return amqp_con
-def update_output(amqp_channel, force_update=False):
+def update_output(force_update=False):
'''Update report'''
- global last_update
-
# update at most every 10 s
now = time.time()
- if not force_update and now - last_update < 10:
+ if not force_update and now - LAST_UPDATE < 10:
return
with open(running_name_new, 'w', encoding='utf-8') as f:
@@ -64,12 +65,13 @@ def process_message(msg):
runhash = ''
params = info.get('params', {})
for p in sorted(params):
- runhash += '%s_%s;' % (p, params[p])
+ runhash += f'{p}_{params[p]};'
if info['running']:
running_tests.setdefault(info['package'], {}).setdefault(
runhash, {}).setdefault(
- info['release'], {})[info['architecture']] = (params, info.get('duration', 0), info['logtail'])
+ info['release'], {})[info['architecture']] = \
+ (params, info.get('duration', 0), info['logtail'])
else:
try:
del running_tests[info['package']][runhash][info['release']][info['architecture']]
@@ -93,15 +95,15 @@ def process_message(msg):
logging.basicConfig(level=('DEBUG' in os.environ and logging.DEBUG or logging.INFO))
-amqp_con = amqp_connect()
-status_ch = amqp_con.channel()
+amqp_connection = amqp_connect()
+status_ch = amqp_connection.channel()
status_ch.access_request('/data', active=True, read=True, write=False)
status_ch.exchange_declare(exchange_name, 'fanout', durable=False, auto_delete=True)
-queue_name = 'running-listener-%s' % socket.getfqdn()
+queue_name = f'running-listener-{socket.getfqdn}'
status_ch.queue_declare(queue_name, durable=False, auto_delete=True)
status_ch.queue_bind(queue_name, exchange_name, queue_name)
-logging.info('Listening to requests on %s' % queue_name)
+logging.info('Listening to requests on %s', queue_name)
status_ch.basic_consume('', callback=process_message, no_ack=True)
while status_ch.callbacks:
status_ch.wait()
diff --git a/charms/focal/autopkgtest-web/webcontrol/cache-amqp b/charms/focal/autopkgtest-web/webcontrol/cache-amqp
index c32ea40..4804aba 100755
--- a/charms/focal/autopkgtest-web/webcontrol/cache-amqp
+++ b/charms/focal/autopkgtest-web/webcontrol/cache-amqp
@@ -1,4 +1,7 @@
#!/usr/bin/python3
+#pylint: disable=invalid-name, import-error, consider-using-f-string, too-many-locals, bad-option-value
+
+"""Handles the amqp cache"""
import argparse
import configparser
@@ -18,6 +21,7 @@ AMQP_CONTEXTS = ["ubuntu", "huge", "ppa", "upstream"]
class AutopkgtestQueueContents:
+ """Class to handle and store the contents of the autopkgtest queue"""
def __init__(self, amqp_uri, database):
assert amqp_uri is not None
assert database is not None
@@ -42,7 +46,7 @@ class AutopkgtestQueueContents:
self.amqp_channel.queue_declare(
queue_name, durable=True, passive=True
)
- logger.info(f"Semaphore queue '{queue_name}' exists")
+ logger.info("Semaphore queue %s exists", queue_name)
except AMQPChannelException as e:
(code, _, _, _) = e.args
if code != 404:
@@ -52,7 +56,7 @@ class AutopkgtestQueueContents:
self.amqp_channel = self.amqp_con.channel()
# queue does not exist, create it
logger.info(
- f"Semaphore queue {queue_name} does not exist, initialising..."
+ "Semaphore queue %s does not exist, initialising...", queue_name
)
self.amqp_channel.queue_declare(
queue_name, durable=True
@@ -62,9 +66,10 @@ class AutopkgtestQueueContents:
routing_key=queue_name,
)
else: # not the leader
- logging.error(
- "We are not the leader, and there is no semaphore queue yet, we can't do anything - exiting."
- )
+ logging.error("%s%s",
+ "We are not the leader, and there is no semaphore ",
+ "queue yet, we can't do anything - exiting."
+ )
sys.exit(0)
@property
@@ -82,7 +87,7 @@ class AutopkgtestQueueContents:
releases.append(row[0])
for r in releases:
for row in db_con.execute(
- "SELECT DISTINCT arch from test WHERE release=?", (r,)
+ "SELECT DISTINCT arch from test WHERE release=?", (r,)
):
release_arches.setdefault(r, []).append(row[0])
return release_arches
@@ -122,7 +127,6 @@ class AutopkgtestQueueContents:
res.append(r)
except (ValueError, IndexError):
logging.error('Received invalid request format "%s"', r)
- return
return res
def get_queue_contents(self):
@@ -142,7 +146,8 @@ class AutopkgtestQueueContents:
f"semaphore-{context}-{release}-{arch}"
)
logging.info(
- f"Trying to lock semaphore queue {semaphore_queue_name}..."
+ "Trying to lock semaphore queue %s...",
+ semaphore_queue_name
)
r = None
while r is None:
@@ -177,7 +182,8 @@ class AutopkgtestQueueContents:
}
all_arches.add(arch)
logging.debug(
- f"Releasing semaphore lock {semaphore_queue_name}"
+ "Releasing semaphore lock %s",
+ semaphore_queue_name
)
channel.basic_reject(r.delivery_tag, True)
@@ -233,22 +239,22 @@ if __name__ == "__main__":
logger.setLevel(logging.INFO)
try:
- amqp_uri = cp["amqp"]["uri"]
+ amqpuri = cp["amqp"]["uri"]
except KeyError:
print("No AMQP URI found", file=sys.stderr)
sys.exit(1)
try:
- database = cp["web"]["database_ro"]
+ db = cp["web"]["db_ro"]
except KeyError:
- print("No database found", file=sys.stderr)
+ print("No db found", file=sys.stderr)
sys.exit(1)
- aq = AutopkgtestQueueContents(amqp_uri, database)
+ aq = AutopkgtestQueueContents(amqpuri, db)
queue_contents = aq.get_queue_contents()
with tempfile.NamedTemporaryFile(
- mode="w", dir=os.path.dirname(args.output), delete=False
+ mode="w", dir=os.path.dirname(args.output), delete=False
) as tf:
try:
json.dump(queue_contents, tf, indent=2)
diff --git a/charms/focal/autopkgtest-web/webcontrol/download-all-results b/charms/focal/autopkgtest-web/webcontrol/download-all-results
index ab0a1e3..aebff04 100755
--- a/charms/focal/autopkgtest-web/webcontrol/download-all-results
+++ b/charms/focal/autopkgtest-web/webcontrol/download-all-results
@@ -1,14 +1,17 @@
#!/usr/bin/python3
+#pylint: disable=invalid-name, consider-using-with, too-many-locals, too-many-branches, consider-using-f-string, no-member, bad-option-value
-# Authors: Iain Lane, Martin Pitt
+'''
+Authors: Iain Lane, Martin Pitt
-# This script uses the OpenStack Swift API to list all of the contents of our
-# containers, diffs that against the local database, and then downloads any
-# missing results.
+This script uses the OpenStack Swift API to list all of the contents of our
+containers, diffs that against the local database, and then downloads any
+missing results.
-# While in normal operation the download-results script is supposed to receive
-# notification of completed jobs, in case of bugs or network outages etc, this
-# script can be used to find any results which were missed and insert them.
+While in normal operation the download-results script is supposed to receive
+notification of completed jobs, in case of bugs or network outages etc, this
+script can be used to find any results which were missed and insert them.
+'''
import os
import sys
@@ -21,10 +24,10 @@ import configparser
import urllib.parse
import time
import http
+from urllib.request import urlopen
from distro_info import UbuntuDistroInfo
from helpers.utils import get_test_id, init_db
-from urllib.request import urlopen
LOGGER = logging.getLogger(__name__)
@@ -33,6 +36,9 @@ db_con = None
def list_remote_container(container_url):
+ """
+ Shows details about a remote container
+ """
LOGGER.debug("Listing container %s", container_url)
out = []
@@ -42,10 +48,10 @@ def list_remote_container(container_url):
url += f"&marker={urllib.parse.quote(start)}"
LOGGER.debug('Retrieving "%s"', url)
- for retry in range(5):
+ for _ in range(5):
try:
resp = urlopen(url)
- except http.client.RemoteDisconnected as e:
+ except http.client.RemoteDisconnected as _:
LOGGER.debug('Got disconnected, sleeping')
time.sleep(5)
continue
@@ -79,20 +85,22 @@ def list_remote_container(container_url):
return ret
-def list_our_results(release):
- LOGGER.debug("Finding already recorded results for %s", release)
+def list_our_results(specified_release):
+ """Shows results for a specific release"""
+ LOGGER.debug("Finding already recorded results for %s", specified_release)
c = db_con.cursor()
c.execute(
- "SELECT run_id FROM result INNER JOIN test ON test.id = result.test_id WHERE test.release=?",
- (release,),
+ "SELECT run_id FROM result INNER JOIN test ON " + \
+ "test.id = result.test_id WHERE test.release=?",
+ (specified_release,),
)
return {run_id for (run_id,) in c.fetchall()}
def fetch_one_result(url):
"""Download one result URL from swift and add it to the DB"""
- (release, arch, _, src, run_id, _) = url.split("/")[-6:]
- test_id = get_test_id(db_con, release, arch, src)
+ (release_fetch, arch, _, src, run_id, _) = url.split("/")[-6:]
+ test_id = get_test_id(db_con, release_fetch, arch, src)
try:
f = urlopen(url, timeout=30)
@@ -119,7 +127,7 @@ def fetch_one_result(url):
srcver = (
tar.extractfile("testpkg-version").read().decode().strip()
)
- except KeyError as e:
+ except KeyError as _:
# not found
if exitcode in (4, 12, 20):
# repair it
@@ -135,7 +143,7 @@ def fetch_one_result(url):
# requester
try:
requester = tar.extractfile("requester").read().decode().strip()
- except KeyError as e:
+ except KeyError as _:
requester = ""
except (KeyError, ValueError, tarfile.TarError) as e:
LOGGER.debug("%s is damaged, ignoring: %s", url, str(e))
@@ -161,7 +169,7 @@ def fetch_one_result(url):
LOGGER.debug(
"Fetched test result for %s/%s/%s/%s %s (triggers: %s): exit code %i",
- release,
+ release_fetch,
arch,
src,
ver,
@@ -189,13 +197,11 @@ def fetch_one_result(url):
LOGGER.info("%s was already recorded - skipping", run_id)
-def fetch_container(release, container_url):
+def fetch_container(release_to_fetch, container_url):
"""Download new results from a swift container"""
- marker = ""
-
try:
- our_results = list_our_results(release)
+ our_results = list_our_results(release_to_fetch)
known_results = list_remote_container(container_url)
need_to_fetch = set(known_results.keys()) - our_results
@@ -206,7 +212,7 @@ def fetch_container(release, container_url):
fetch_one_result(os.path.join(container_url, known_results[run_id]))
except urllib.error.HTTPError as e:
if e.code == 401:
- LOGGER.warning(f"Couldn't access {container_url} - doesn't exist?")
+ LOGGER.warning("Couldn't access %s - doesn't exist?", container_url)
return
raise
diff --git a/charms/focal/autopkgtest-web/webcontrol/download-results b/charms/focal/autopkgtest-web/webcontrol/download-results
index b8d4188..0d1348b 100755
--- a/charms/focal/autopkgtest-web/webcontrol/download-results
+++ b/charms/focal/autopkgtest-web/webcontrol/download-results
@@ -1,4 +1,8 @@
#!/usr/bin/python3
+#pylint: disable=invalid-name, import-error, missing-function-docstring, too-many-locals
+'''
+Downloads database results from SQL
+'''
import configparser
import json
@@ -7,9 +11,7 @@ import os
import socket
import sqlite3
import urllib.parse
-
from helpers.utils import get_test_id, init_db
-from urllib.request import urlopen
import amqplib.client_0_8 as amqp
@@ -27,7 +29,7 @@ def amqp_connect():
parts.hostname, userid=parts.username, password=parts.password
)
logging.info(
- "Connected to AMQP server at %s@%s" % (parts.username, parts.hostname)
+ "Connected to AMQP server at %s@%s", parts.username, parts.hostname
)
return amqp_con
@@ -48,7 +50,7 @@ def process_message(msg, db_con):
if isinstance(body, bytes):
body = body.decode("UTF-8", errors="replace")
info = json.loads(body)
- logging.info("Received notification of completed test {}".format(info))
+ logging.info("Received notification of completed test %s", info)
arch = info["architecture"]
container = info["container"]
@@ -61,8 +63,8 @@ def process_message(msg, db_con):
(_, _, _, _, run_id) = info["swift_dir"].split("/")
# we don't handle PPA requests
- if container != ("autopkgtest-{}".format(release)):
- logging.debug("Ignoring non-distro request: {}".format(info))
+ if container != (f"autopkgtest-{release}"):
+ logging.debug("Ignoring non-distro request: %s", info)
msg.channel.basic_ack(msg.delivery_tag)
return
@@ -100,20 +102,20 @@ if __name__ == "__main__":
level=("DEBUG" in os.environ and logging.DEBUG or logging.INFO)
)
- db_con = db_connect()
- amqp_con = amqp_connect()
- status_ch = amqp_con.channel()
+ db_connection = db_connect()
+ amqp_connection = amqp_connect()
+ status_ch = amqp_connection.channel()
status_ch.access_request("/complete", active=True, read=True, write=False)
status_ch.exchange_declare(
EXCHANGE_NAME, "fanout", durable=True, auto_delete=False
)
- queue_name = "complete-listener-%s" % socket.getfqdn()
+ queue_name = "complete-listener-" + socket.getfqdn()
status_ch.queue_declare(queue_name, durable=True, auto_delete=False)
status_ch.queue_bind(queue_name, EXCHANGE_NAME, queue_name)
- logging.info("Listening to requests on %s" % queue_name)
+ logging.info("Listening to requests on %s", queue_name)
status_ch.basic_consume(
- "", callback=lambda msg: process_message(msg, db_con)
+ "", callback=lambda msg: process_message(msg, db_connection)
)
while status_ch.callbacks:
status_ch.wait()
diff --git a/charms/focal/autopkgtest-web/webcontrol/helpers/utils.py b/charms/focal/autopkgtest-web/webcontrol/helpers/utils.py
index ec74d8f..336da33 100644
--- a/charms/focal/autopkgtest-web/webcontrol/helpers/utils.py
+++ b/charms/focal/autopkgtest-web/webcontrol/helpers/utils.py
@@ -1,7 +1,7 @@
'''
utilities for autopkgtest-web webcontrol
'''
-#pylint: disable=protected-access
+#pylint: disable=protected-access, invalid-name
import logging
import os
import sqlite3
diff --git a/charms/focal/autopkgtest-web/webcontrol/private_results/app.py b/charms/focal/autopkgtest-web/webcontrol/private_results/app.py
index 95cec50..6696d10 100644
--- a/charms/focal/autopkgtest-web/webcontrol/private_results/app.py
+++ b/charms/focal/autopkgtest-web/webcontrol/private_results/app.py
@@ -1,11 +1,15 @@
"""Test Result Fetcher Flask App"""
+#pylint: disable=import-error, consider-using-f-string, too-many-arguments, bad-option-value
import os
import sys
import logging
-import swiftclient
import configparser
-
from html import escape
+from helpers.utils import setup_key
+from request.submit import Submit
+
+import swiftclient
+
from flask import (Flask, Response, request, session, redirect,
render_template_string)
from flask_openid import OpenID
@@ -13,9 +17,6 @@ from werkzeug.middleware.proxy_fix import ProxyFix
sys.path.append('..')
-from helpers.utils import setup_key
-from request.submit import Submit
-
HTML = """
<!doctype html>
@@ -40,21 +41,20 @@ LOGIN = """
DENIED = "Unprivileged or unavailable."
-def swift_get_object(connection, container, path):
+def swift_get_object(swift_connection, container, path):
"""Fetch an object from swift."""
try:
- _, contents = connection.get_object(container, path)
- except swiftclient.exceptions.ClientException as e:
- logging.error('Failed to fetch %s from container (%s)' %
- (path, str(e)))
+ _, contents = swift_connection.get_object(container, path)
+ except swiftclient.exceptions.ClientException as swift_error:
+ logging.error('Failed to fetch %s from container (%s)', path, str(swift_error))
return None
return contents
-def validate_user_path(connection, container, nick, path):
+def validate_user_path(swift_connection, container, nick, path):
"""Return true if user is allowed to view files under the given path."""
# First we need to check if this result is actually sharable
- allowed_file = swift_get_object(connection, container, path)
+ allowed_file = swift_get_object(swift_connection, container, path)
if not allowed_file:
return False
allowed = allowed_file.decode('utf-8').splitlines()
@@ -66,10 +66,10 @@ def validate_user_path(connection, container, nick, path):
for entity in allowed:
(code, response) = Submit.lp_request('~%s/participants' % entity, {})
if code != 200:
- logging.error('Unable to validate user %s (%s)' % (nick, code))
+ logging.error('Unable to validate user %s (%s)', nick, code)
return False
- for e in response.get('entries', []):
- if e.get('name') == nick:
+ for response_entry in response.get('entries', []):
+ if response_entry.get('name') == nick:
return True
return False
@@ -152,13 +152,11 @@ def index_result(container, series, arch, group, src, runid, file):
content_type = 'text/plain; charset=UTF-8'
headers = {'Content-Encoding': 'gzip'}
return Response(result, content_type=content_type, headers=headers)
- else:
- return result
- else:
- # XXX: render_template_string urlencodes its context values, so it's
- # not really possible to have 'nested HTML' rendered properly.
- return HTML.replace("{{ content }}",
- render_template_string(LOGIN, **session))
+ return result
+ # render_template_string urlencodes its context values, so it's
+ # not really possible to have 'nested HTML' rendered properly.
+ return HTML.replace("{{ content }}",
+ render_template_string(LOGIN, **session))
@app.route('/login', methods=['GET', 'POST'])
diff --git a/charms/focal/autopkgtest-web/webcontrol/publish-db b/charms/focal/autopkgtest-web/webcontrol/publish-db
index a44b04a..5eadb58 100755
--- a/charms/focal/autopkgtest-web/webcontrol/publish-db
+++ b/charms/focal/autopkgtest-web/webcontrol/publish-db
@@ -1,14 +1,15 @@
#!/usr/bin/python3
-# download/read Sources.gz for all known releases and write them into
-# a copy of autopkgtest.db, which is then published to the public location.
-# This is being used for statistics.
+'''
+download/read Sources.gz for all known releases and write them into
+a copy of autopkgtest.db, which is then published to the public location.
+This is being used for statistics.
+'''
+#pylint: disable=invalid-name, consider-using-f-string, c-extension-no-member, bad-option-value
import configparser
-import fcntl
import gzip
import logging
import os
-import shutil
import sqlite3
import tempfile
import urllib.request
@@ -75,7 +76,7 @@ def init_db(path, path_current, path_rw):
current_version_copied = True
except sqlite3.OperationalError as e:
if "no such column: pocket" not in str(
- e
+ e
) and "no such column: component" not in str(
e
): # schema upgrade
@@ -111,8 +112,9 @@ def init_db(path, path_current, path_rw):
return db
-def get_last_checked(db_con, url):
- c = db_con.cursor()
+def get_last_checked(db_connection, url):
+ """Get the time the database was last checked"""
+ c = db_connection.cursor()
c.execute("SELECT timestamp FROM url_last_checked WHERE url=?", (url,))
try:
@@ -122,14 +124,15 @@ def get_last_checked(db_con, url):
return None
-def get_sources(db_con, release):
+def get_sources(db_connection, release):
+ """Gets sources.gz for specific release"""
for component in components:
for pocket in (release, release + "-updates"):
logging.debug("Processing %s/%s", pocket, component)
try:
url = f"{archive_url}/dists/{pocket}/{component}/source/Sources.gz"
request = urllib.request.Request(url)
- last_checked = get_last_checked(db_con, url)
+ last_checked = get_last_checked(db_connection, url)
if last_checked:
request.add_header("If-Modified-Since", last_checked)
@@ -139,7 +142,7 @@ def get_sources(db_con, release):
temp_file.write(response.read())
last_modified = response.getheader("Last-Modified")
if last_modified:
- db_con.execute(
+ db_connection.execute(
"INSERT INTO url_last_checked (url, timestamp) "
"VALUES (:url, :timestamp) "
"ON CONFLICT (url) DO "
@@ -147,7 +150,7 @@ def get_sources(db_con, release):
{'url': url, 'timestamp': last_modified}
)
- db_con.execute(
+ db_connection.execute(
"DELETE FROM current_version "
"WHERE pocket = ? AND component = ?",
(pocket, component),
@@ -155,7 +158,7 @@ def get_sources(db_con, release):
temp_file.seek(0)
with gzip.open(temp_file) as fd:
for section in apt_pkg.TagFile(fd):
- db_con.execute(
+ db_connection.execute(
"INSERT INTO current_version "
"(release, pocket, component, package, version) "
"VALUES "
@@ -171,11 +174,10 @@ def get_sources(db_con, release):
'version': section["Version"],
},
)
- db_con.commit()
+ db_connection.commit()
except urllib.error.HTTPError as e:
if e.code == 304:
- logging.debug("url {} not modified".format(url))
- pass
+ logging.debug("url %s not modified", url)
if __name__ == "__main__":
diff --git a/charms/focal/autopkgtest-web/webcontrol/request/app.py b/charms/focal/autopkgtest-web/webcontrol/request/app.py
index 15c5b5c..b66cfad 100644
--- a/charms/focal/autopkgtest-web/webcontrol/request/app.py
+++ b/charms/focal/autopkgtest-web/webcontrol/request/app.py
@@ -1,4 +1,5 @@
"""Test Request Flask App"""
+#pylint: disable=import-error, consider-using-f-string, invalid-name, too-many-locals, no-else-return, no-value-for-parameter, too-many-return-statements, too-many-branches, too-many-statements, bad-option-value
import os
import logging
import hmac
@@ -58,24 +59,24 @@ SUCCESS = """
"""
-def check_github_sig(request):
- """Validate github signature of request.
+def check_github_sig(req):
+ """Validate github signature of req.
See https://developer.github.com/webhooks/securing/
"""
# load key
keyfile = os.path.expanduser('~/github-secrets.json')
- package = request.args.get('package')
+ package = req.args.get('package')
try:
- with open(keyfile) as f:
- keymap = json.load(f)
+ with open(keyfile, encoding='utf-8') as keyfile:
+ keymap = json.load(keyfile)
key = keymap[package].encode('ASCII')
- except (IOError, ValueError, KeyError, UnicodeEncodeError) as e:
- logging.error('Failed to load GitHub key for package %s: %s', package, e)
+ except (IOError, ValueError, KeyError, UnicodeEncodeError) as github_sig_error:
+ logging.error('Failed to load GitHub key for package %s: %s', package, github_sig_error)
return False
- sig_sha1 = request.headers.get('X-Hub-Signature', '')
- payload_sha1 = 'sha1=' + hmac.new(key, request.data, 'sha1').hexdigest()
+ sig_sha1 = req.headers.get('X-Hub-Signature', '')
+ payload_sha1 = 'sha1=' + hmac.new(key, req.data, 'sha1').hexdigest()
if hmac.compare_digest(sig_sha1, payload_sha1):
return True
logging.error('check_github_sig: signature mismatch! received: %s calculated: %s',
@@ -89,7 +90,7 @@ def invalid(message, code=400):
html = LOGOUT.format(**session)
else:
html = ''
- html += '<p>You submitted an invalid request: %s</p>' % maybe_escape(str(message))
+ html += f"<p>You submitted an invalid request: {maybe_escape(str(message))}</p>"
logging.error('Request failed with %i: %s', code, message)
return HTML.format(html), code
@@ -128,16 +129,16 @@ def index_root():
del params[getarg]
except KeyError:
pass
- l = request.args.getlist(getarg)
- if l:
- params[paramname] = [maybe_escape(p) for p in l]
+ req_list = request.args.getlist(getarg)
+ if req_list:
+ params[paramname] = [maybe_escape(p) for p in req_list]
# split "VAR1=value;VAR2=value" --env arguments, as some frameworks don't
# allow multipe "env="
if 'env' in params:
splitenv = []
- for e in params['env']:
- splitenv += e.split(';')
+ for env_param in params['env']:
+ splitenv += env_param.split(';')
params['env'] = splitenv
# request from github?
@@ -155,7 +156,8 @@ def index_root():
s = Submit()
try:
- params.setdefault('env', []).append('UPSTREAM_PULL_REQUEST=%i' % int(github_params['number']))
+ params.setdefault('env', []).append('UPSTREAM_PULL_REQUEST=%i' % \
+ int(github_params['number']))
statuses_url = github_params['pull_request']['statuses_url']
params['env'].append('GITHUB_STATUSES_URL=' + statuses_url)
@@ -177,7 +179,7 @@ def index_root():
with open(os.path.join(PATH, 'github-pending', '%s-%s-%s-%s-%s' %
(params['release'], params['arch'],
params['package'], github_params['number'],
- os.path.basename(statuses_url))), 'w') as f:
+ os.path.basename(statuses_url))), 'w', encoding='utf-8') as f:
f.write(json.dumps(params))
# tell GitHub that the test is pending
@@ -213,7 +215,7 @@ def index_root():
return HTML.format(LOGOUT +
"<p>Deleted {} requests</p>".format(count)).format(
- **ChainMap(session, params))
+ **ChainMap(session, params))
if params.get('ppas'):
s.send_amqp_request(context='ppa', **params)
@@ -223,7 +225,7 @@ def index_root():
if not params.get('ppas'):
url = 'https://autopkgtest.ubuntu.com/packages/{}/{}/{}'.format(
params['package'], params['release'], params['arch'])
- params['Result history'] = '<a href="{}">{}</a>'.format(url, url)
+ params['Result history'] = '<a href="{0}">{0}</a>'.format(url)
success = SUCCESS.format(
EMPTY.join(ROW.format(key, val) for key, val in sorted(params.items()))
)
diff --git a/charms/focal/autopkgtest-web/webcontrol/request/submit.py b/charms/focal/autopkgtest-web/webcontrol/request/submit.py
index 0e35b3f..f221d97 100644
--- a/charms/focal/autopkgtest-web/webcontrol/request/submit.py
+++ b/charms/focal/autopkgtest-web/webcontrol/request/submit.py
@@ -2,6 +2,7 @@
Author: Martin Pitt <martin.pitt@xxxxxxxxxx>
"""
+#pylint: disable=protected-access, invalid-name, consider-using-f-string, import-error, no-member, dangerous-default-value, too-many-arguments, too-many-locals, too-many-branches, too-many-statements, unused-argument, no-value-for-parameter, inconsistent-return-statements, bad-classmethod-argument, bad-option-value
import os
import json
@@ -33,6 +34,9 @@ ALLOWED_USERS_PERPACKAGE = {'snapcraft': ['snappy-m-o']}
class Submit:
+ '''
+ Class to submit an autopkgtest request
+ '''
def __init__(self):
cp = configparser.ConfigParser()
cp.read(os.path.expanduser('~ubuntu/autopkgtest-cloud.conf'))
@@ -40,7 +44,7 @@ class Submit:
# read valid releases and architectures from DB
self.db_con = sqlite3.connect('file:%s?mode=ro' % cp['web']['database_ro'], uri=True)
self.releases = set(UbuntuDistroInfo().supported() + UbuntuDistroInfo().supported_esm())
- logging.debug('Valid releases: %s' % self.releases)
+ logging.debug('Valid releases: %s', self.releases)
self.architectures = set()
c = self.db_con.cursor()
@@ -50,13 +54,13 @@ class Submit:
if row is None:
break
self.architectures.add(row[0])
- logging.debug('Valid architectures: %s' % self.architectures)
+ logging.debug('Valid architectures: %s', self.architectures)
# dissect AMQP URL
self.amqp_creds = urllib.parse.urlsplit(cp['amqp']['uri'],
allow_fragments=False)
assert self.amqp_creds.scheme == 'amqp'
- logging.debug('AMQP credentials: %s' % repr(self.amqp_creds))
+ logging.debug('AMQP credentials: %s', repr(self.amqp_creds))
def validate_distro_request(self, release, arch, package, triggers,
requester, ppas=[], **kwargs):
@@ -103,7 +107,9 @@ class Submit:
raise ValueError('Unknown PPA ' + ppa)
# allow kernel tests for EOL vivid
skip_result_check = (release == 'vivid' and triggers and triggers[0].startswith('linux'))
- if not self.is_valid_package_with_results(None if (ppas or skip_result_check) else release, arch, package):
+ if not self.is_valid_package_with_results(None if (ppas or skip_result_check) else release,
+ arch,
+ package):
raise ValueError('Package %s does not have any test results' %
package)
@@ -117,8 +123,8 @@ class Submit:
for trigger in triggers:
try:
trigsrc, trigver = trigger.split('/')
- except ValueError:
- raise ValueError('Malformed trigger, must be srcpackage/version')
+ except ValueError as exc:
+ raise ValueError('Malformed trigger, must be srcpackage/version') from exc
# Debian Policy 5.6.1 and 5.6.12
if not NAME.match(trigsrc) or not VERSION.match(trigver):
raise ValueError('Malformed trigger')
@@ -245,7 +251,7 @@ class Submit:
routing_key=queue)
@classmethod
- def post_json(klass, url, data, auth_file, project):
+ def post_json(cls, klass, url, data, auth_file, project):
"""Send POST request with JSON data via basic auth.
'data' is a dictionary which will be posted to 'url' in JSON encoded
@@ -259,7 +265,7 @@ class Submit:
HTTPError if POST request fails.
"""
# look up project in auth_file
- with open(auth_file) as f:
+ with open(auth_file, encoding='utf-8') as f:
contents = f.read()
for l in contents.splitlines():
if l.startswith(project + ':'):
@@ -287,7 +293,7 @@ class Submit:
"""Check if a ppa exists"""
team, _, name = ppa.partition('/')
if not NAME.match(team) or not NAME.match(name):
- return None
+ return False
# https://launchpad.net/+apidoc/1.0.html#person-getPPAByName
(code, response) = self.lp_request('~' + team, {
'ws.op': 'getPPAByName',
@@ -298,7 +304,7 @@ class Submit:
'is_valid_ppa(%s): code %u, response %s',
ppa, code, repr(response))
if code < 200 or code >= 300:
- return None
+ return False
if response.get('name') == name:
return True
@@ -349,8 +355,7 @@ class Submit:
release, package, version, code, repr(response))
if response.get('total_size', 0) > 0:
return response['entries'][0]['component_name']
- else:
- return None
+ return None
def can_upload(self, person, release, component, package):
"""Check if person can upload package into Ubuntu release"""
@@ -366,13 +371,13 @@ class Submit:
})
logging.debug('can_upload(%s, %s, %s, %s): (%u, %s)',
person, release, component, package, code, repr(response))
- return code >= 200 and code < 300
+ return 300 > code >= 200
def in_allowed_team(self, person, package=[], teams=[]):
"""Check if person is in ALLOWED_TEAMS"""
for team in (teams or ALLOWED_TEAMS):
- (code, response) = self.lp_request('~%s/participants' % team, {})
+ (_, response) = self.lp_request('~%s/participants' % team, {})
for e in response.get('entries', []):
if e.get('name') == person:
return True
diff --git a/charms/focal/autopkgtest-web/webcontrol/request/tests/test_app.py b/charms/focal/autopkgtest-web/webcontrol/request/tests/test_app.py
index c3bd16a..6edda1e 100644
--- a/charms/focal/autopkgtest-web/webcontrol/request/tests/test_app.py
+++ b/charms/focal/autopkgtest-web/webcontrol/request/tests/test_app.py
@@ -1,4 +1,5 @@
"""Test the Flask app."""
+#pylint: disable=no-value-for-parameter, missing-function-docstring, no-member, too-few-public-methods
import os
@@ -10,6 +11,9 @@ from request.submit import Submit
class AppTestBase(TestCase):
+ '''
+ Base class for testing the app
+ '''
def setUp(self):
request.app.app.config['TESTING'] = True
self.app = request.app.app.test_client()
@@ -38,7 +42,8 @@ class DistroRequestTests(AppTestBase):
@patch('request.app.Submit')
def test_nickname(self, mock_submit):
"""Hitting / with a nickname in the session prompts for logout."""
- mock_submit.return_value.validate_distro_request.side_effect = ValueError('not 31337 enough')
+ mock_submit.return_value.validate_distro_request.side_effect = \
+ ValueError('not 31337 enough')
with self.app.session_transaction() as session:
session['nickname'] = 'person'
ret = self.app.get('/')
@@ -47,7 +52,8 @@ class DistroRequestTests(AppTestBase):
@patch('request.app.Submit')
def test_missing_request(self, mock_submit):
"""Missing GET params should return 400."""
- mock_submit.return_value.validate_distro_request.side_effect = ValueError('not 31337 enough')
+ mock_submit.return_value.validate_distro_request.side_effect = \
+ ValueError('not 31337 enough')
self.prep_session()
ret = self.app.get('/')
self.assertEqual(ret.status_code, 400)
@@ -56,7 +62,8 @@ class DistroRequestTests(AppTestBase):
@patch('request.app.Submit')
def test_invalid_request(self, mock_submit):
"""Invalid GET params should return 400."""
- mock_submit.return_value.validate_distro_request.side_effect = ValueError('not 31337 enough')
+ mock_submit.return_value.validate_distro_request.side_effect = \
+ ValueError('not 31337 enough')
self.prep_session()
ret = self.app.get('/?arch=i386&package=hi&release=testy&trigger=foo/1')
self.assertEqual(ret.status_code, 400)
@@ -91,7 +98,8 @@ class DistroRequestTests(AppTestBase):
def test_valid_request_with_ppas(self, mock_submit):
"""Return success with all params & ppas."""
self.prep_session()
- ret = self.app.get('/?arch=i386&package=hi&release=testy&trigger=foo/1&ppa=train/overlay&ppa=train/001')
+ ret = self.app.get('/?arch=i386&package=hi&release=testy' + \
+ '&trigger=foo/1&ppa=train/overlay&ppa=train/001')
self.assertEqual(ret.status_code, 200)
self.assertIn(b'ubmitted', ret.data)
mock_submit.return_value.validate_distro_request.assert_called_once_with(
@@ -127,7 +135,8 @@ class GitHubRequestTests(AppTestBase):
def test_ping(self):
ret = self.app.post('/?arch=C51&package=hi&release=testy&build-git=http://x.com/foo',
content_type='application/json',
- headers=[('X-Hub-Signature', 'sha1=cb59904bf33c619ad2c52095deb405c86cc5adfd'),
+ headers=[('X-Hub-Signature',
+ 'sha1=cb59904bf33c619ad2c52095deb405c86cc5adfd'),
('X-GitHub-Event', 'ping')],
data=b'{"info": "https://api.github.com/xx"}')
self.assertEqual(ret.status_code, 200, ret.data)
@@ -139,8 +148,10 @@ class GitHubRequestTests(AppTestBase):
def test_invalid_secret_file(self, mock_submit):
ret = self.app.post('/?arch=C51&package=hi&release=testy&build-git=http://x.com/foo',
content_type='application/json',
- headers=[('X-Hub-Signature', 'sha1=8572f239e05c652710a4f85d2061cc0fcbc7b127')],
- data=b'{"action": "opened", "number": 2, "pr": "https://api.github.com/xx"}')
+ headers=[('X-Hub-Signature',
+ 'sha1=8572f239e05c652710a4f85d2061cc0fcbc7b127')],
+ data=b'{"action": "opened", "number":' + \
+ ' 2, "pr": "https://api.github.com/xx"}')
self.assertEqual(ret.status_code, 403, ret.data)
self.assertIn(b'GitHub signature verification failed', ret.data)
@@ -177,7 +188,8 @@ class GitHubRequestTests(AppTestBase):
mock_check_github_sig.return_value = True
ret = self.app.post('/?arch=C51&package=hi&release=testy&build-git=http://x.com/foo',
content_type='application/json',
- data=b'{"action": "boring", "number": 2, "pr": "https://api.github.com/xx"}')
+ data=b'{"action": "boring", "number": ' + \
+ '2, "pr": "https://api.github.com/xx"}')
self.assertEqual(ret.status_code, 200, ret.data)
self.assertIn(b'GitHub PR action boring is not relevant for testing', ret.data)
self.assertFalse(mock_submit.return_value.validate_git_request.called)
@@ -208,7 +220,8 @@ class GitHubRequestTests(AppTestBase):
def test_valid_simple(self, mock_submit):
ret = self.app.post('/?arch=C51&package=hi&release=testy&build-git=http://x.com/foo',
content_type='application/json',
- headers=[('X-Hub-Signature', 'sha1=1dae67d4406d21b498806968a3def61754498a21')],
+ headers=[('X-Hub-Signature',
+ 'sha1=1dae67d4406d21b498806968a3def61754498a21')],
data=b'{"action": "opened", "number": 2, "pull_request":'
b' {"statuses_url": "https://api.github.com/two"}}')
@@ -226,7 +239,8 @@ class GitHubRequestTests(AppTestBase):
# we recorded the request
request.app.open.assert_called_with(
os.path.join(request.app.PATH, 'github-pending', 'testy-C51-hi-2-two'), 'w')
- self.assertIn('GITHUB_STATUSES_URL=https://api.github.com/two', str(request.app.open().write.call_args))
+ self.assertIn('GITHUB_STATUSES_URL=https://api.github.com/two',
+ str(request.app.open().write.call_args))
self.assertIn('"arch": "C51"', str(request.app.open().write.call_args))
# we told GitHub about it
@@ -245,7 +259,8 @@ class GitHubRequestTests(AppTestBase):
ret = self.app.post('/?arch=C51&package=hi&release=testy&build-git=http://x.com/foo&'
'ppa=joe/stuff&ppa=mary/misc&env=THIS=a;THAT=b&env=THERE=c',
content_type='application/json',
- headers=[('X-Hub-Signature', 'sha1=f9041325575127310c304bb65f9befb0d13b1ce6')],
+ headers=[('X-Hub-Signature',
+ 'sha1=f9041325575127310c304bb65f9befb0d13b1ce6')],
data=b'{"action": "opened", "number": 2, "pull_request":'
b' {"statuses_url": "https://api.github.com/2"}}')
@@ -271,7 +286,8 @@ class GitHubRequestTests(AppTestBase):
def test_valid_generated_url(self, mock_submit):
ret = self.app.post('/?arch=C51&package=hi&release=testy',
content_type='application/json',
- headers=[('X-Hub-Signature', 'sha1=427a20827d46f5fe8e18f08b9a7fa09ba915ea08')],
+ headers=[('X-Hub-Signature',
+ 'sha1=427a20827d46f5fe8e18f08b9a7fa09ba915ea08')],
data=b'{"action": "opened", "number": 2, "pull_request":'
b' {"statuses_url": "https://api.github.com/two",'
b' "base": {"repo": {"clone_url": "https://github.com/joe/x.git"}}}}')
@@ -315,9 +331,11 @@ class GitHubRequestTests(AppTestBase):
mock_open(None, '{"hi": "1111111111111111111111111111111111111111"}'),
create=True)
def test_valid_testname(self, mock_submit):
- ret = self.app.post('/?arch=C51&package=hi&release=testy&build-git=http://x.com/foo&testname=first',
+ ret = self.app.post('/?arch=C51&package=hi&release=testy&build' + \
+ '-git=http://x.com/foo&testname=first',
content_type='application/json',
- headers=[('X-Hub-Signature', 'sha1=1dae67d4406d21b498806968a3def61754498a21')],
+ headers=[('X-Hub-Signature',
+ 'sha1=1dae67d4406d21b498806968a3def61754498a21')],
data=b'{"action": "opened", "number": 2, "pull_request":'
b' {"statuses_url": "https://api.github.com/two"}}')
@@ -335,7 +353,8 @@ class GitHubRequestTests(AppTestBase):
# we recorded the request
request.app.open.assert_called_with(
os.path.join(request.app.PATH, 'github-pending', 'testy-C51-hi-2-two'), 'w')
- self.assertIn('GITHUB_STATUSES_URL=https://api.github.com/two', str(request.app.open().write.call_args))
+ self.assertIn('GITHUB_STATUSES_URL=https://api.github.com/two',
+ str(request.app.open().write.call_args))
self.assertIn('"testname": "first"', str(request.app.open().write.call_args))
diff --git a/charms/focal/autopkgtest-web/webcontrol/request/tests/test_submit.py b/charms/focal/autopkgtest-web/webcontrol/request/tests/test_submit.py
index bae9185..d326e46 100644
--- a/charms/focal/autopkgtest-web/webcontrol/request/tests/test_submit.py
+++ b/charms/focal/autopkgtest-web/webcontrol/request/tests/test_submit.py
@@ -1,7 +1,7 @@
"""Submit Tests
-
Test all things related verifying input arguments and sending AMQP requests.
"""
+# pylint: disable=arguments-differ, consider-using-f-string, bad-option-value
import sqlite3
@@ -33,7 +33,7 @@ class SubmitTestBase(TestCase):
# mock config values
cfg = {'amqp': {'uri': 'amqp://user:s3kr1t@1.2.3.4'},
'web': {'database': '/ignored', 'database_ro': '/ignored'},
- 'autopkgtest' : { 'releases': 'testy grumpy' }}
+ 'autopkgtest' : {'releases': 'testy grumpy'}}
mock_configparser.return_value = MagicMock()
mock_configparser.return_value.__getitem__.side_effect = cfg.get
@@ -60,100 +60,105 @@ class DistroRequestValidationTests(SubmitTestBase):
def test_bad_release(self):
"""Unknown release"""
- with self.assertRaises(ValueError) as cme:
+ with self.assertRaises(ValueError) as mock_response_error:
self.submit.validate_distro_request('fooly', 'C51', 'blue', ['ab/1'], 'joe')
- self.assertEqual(str(cme.exception), 'Unknown release fooly')
+ self.assertEqual(str(mock_response_error.exception), 'Unknown release fooly')
def test_bad_arch(self):
"""Unknown architecture"""
- with self.assertRaises(ValueError) as cme:
+ with self.assertRaises(ValueError) as mock_response_error:
self.submit.validate_distro_request('testy', 'wut', 'blue', ['ab/1'], 'joe')
- self.assertEqual(str(cme.exception), 'Unknown architecture wut')
+ self.assertEqual(str(mock_response_error.exception), 'Unknown architecture wut')
def test_bad_package(self):
"""Unknown package"""
- with self.assertRaises(ValueError) as cme:
+ with self.assertRaises(ValueError) as mock_response_error:
self.submit.validate_distro_request('testy', 'C51', 'badpkg', ['ab/1'], 'joe')
- self.assertIn('Package badpkg', str(cme.exception))
+ self.assertIn('Package badpkg', str(mock_response_error.exception))
def test_bad_argument(self):
"""Unknown argument"""
- with self.assertRaises(ValueError) as cme:
+ with self.assertRaises(ValueError) as mock_response_error:
self.submit.validate_distro_request('testy', 'C51', 'blue', ['ab/1'], 'joe', foo='bar')
- self.assertIn('Invalid argument foo', str(cme.exception))
+ self.assertIn('Invalid argument foo', str(mock_response_error.exception))
def test_invalid_trigger_syntax(self):
"""Invalid syntax in trigger"""
# invalid trigger format
- with self.assertRaises(ValueError) as cme:
+ with self.assertRaises(ValueError) as mock_response_error:
self.submit.validate_distro_request('testy', 'C51', 'blue', ['ab'], 'joe')
- self.assertIn('Malformed trigger', str(cme.exception))
+ self.assertIn('Malformed trigger', str(mock_response_error.exception))
# invalid trigger source package name chars
- with self.assertRaises(ValueError) as cme:
+ with self.assertRaises(ValueError) as mock_response_error:
self.submit.validate_distro_request('testy', 'C51', 'blue', ['a!b/1'], 'joe')
# invalid trigger version chars
- with self.assertRaises(ValueError) as cme:
+ with self.assertRaises(ValueError) as mock_response_error:
self.submit.validate_distro_request('testy', 'C51', 'blue', ['ab/1!1'], 'joe')
- self.assertIn('Malformed trigger', str(cme.exception))
+ self.assertIn('Malformed trigger', str(mock_response_error.exception))
def test_disallowed_testname(self):
"""testname not allowed for distro tests"""
# we only allow this for GitHub requests; with distro requests it would
# be cheating as proposed-migration would consider those
- with self.assertRaises(ValueError) as cme:
- self.submit.validate_distro_request('testy', 'C51', 'blue', ['ab/1.2'], 'joe', testname='first')
- self.assertIn('Invalid argument testname', str(cme.exception))
+ with self.assertRaises(ValueError) as mock_response_error:
+ self.submit.validate_distro_request('testy',
+ 'C51',
+ 'blue',
+ ['ab/1.2'],
+ 'joe',
+ testname='first')
+ self.assertIn('Invalid argument testname', str(mock_response_error.exception))
@patch('request.submit.urllib.request.urlopen')
def test_ppa(self, mock_urlopen):
"""PPA does not exist"""
# invalid name don't even call lp
- with self.assertRaises(ValueError) as cme:
+ with self.assertRaises(ValueError) as mock_response_error:
self.submit.validate_distro_request(
'testy', 'C51', 'foo', ['ab/1.2'], 'joe', ['b~ad/ppa'])
- self.assertEqual(str(cme.exception), 'Unknown PPA b~ad/ppa')
+ self.assertEqual(str(mock_response_error.exception), 'Unknown PPA b~ad/ppa')
self.assertEqual(mock_urlopen.call_count, 0)
# mock Launchpad response: successful form, but no match
- cm = MagicMock()
- cm.__enter__.return_value = cm
- cm.getcode.return_value = 200
- cm.geturl.return_value = 'http://mock.launchpad.net'
- cm.read.return_value = b'{}'
- cm.return_value = cm
- mock_urlopen.return_value = cm
-
- with self.assertRaises(ValueError) as cme:
+ mock_response = MagicMock()
+ mock_response.__enter__.return_value = mock_response
+ mock_response.getcode.return_value = 200
+ mock_response.geturl.return_value = 'http://mock.launchpad.net'
+ mock_response.read.return_value = b'{}'
+ mock_response.return_value = mock_response
+ mock_urlopen.return_value = mock_response
+
+ with self.assertRaises(ValueError) as mock_response_error:
self.submit.validate_distro_request(
'testy', 'C51', 'foo', ['ab/1.2'], 'joe', ['bad/ppa'])
- self.assertEqual(str(cme.exception), 'Unknown PPA bad/ppa')
+ self.assertEqual(str(mock_response_error.exception), 'Unknown PPA bad/ppa')
self.assertEqual(mock_urlopen.call_count, 1)
# success
- cm.read.return_value = b'{"name": "there"}'
+ mock_response.read.return_value = b'{"name": "there"}'
self.assertTrue(self.submit.is_valid_ppa('hi/there'))
# broken JSON response
- cm.read.return_value = b'not { json}'
- with self.assertRaises(ValueError) as cme:
+ mock_response.read.return_value = b'not { json}'
+ with self.assertRaises(ValueError) as mock_response_error:
self.submit.validate_distro_request(
'testy', 'C51', 'foo', ['ab/1.2'], 'joe', ['broke/ness'])
# same, but entirely failing query -- let's be on the safe side
- cm.getcode.return_value = 404
- cm.read.return_value = b'<html>not found</html>'
- with self.assertRaises(ValueError) as cme:
+ mock_response.getcode.return_value = 404
+ mock_response.read.return_value = b'<html>not found</html>'
+ with self.assertRaises(ValueError) as mock_response_error:
self.submit.validate_distro_request(
'testy', 'C51', 'foo', ['ab/1.2'], 'joe', ['bro/ken'])
- self.assertEqual(str(cme.exception), 'Unknown PPA bro/ken')
+ self.assertEqual(str(mock_response_error.exception), 'Unknown PPA bro/ken')
@patch('request.submit.urllib.request.urlopen')
def test_nonexisting_trigger(self, mock_urlopen):
@@ -161,30 +166,30 @@ class DistroRequestValidationTests(SubmitTestBase):
# mock Launchpad response: successful form, but no matching
# source/version
- cm = MagicMock()
- cm.__enter__.return_value = cm
- cm.getcode.return_value = 200
- cm.geturl.return_value = 'http://mock.launchpad.net'
- cm.read.return_value = b'{"total_size": 0}'
- cm.return_value = cm
- mock_urlopen.return_value = cm
-
- with self.assertRaises(ValueError) as cme:
+ mock_response = MagicMock()
+ mock_response.__enter__.return_value = mock_response
+ mock_response.getcode.return_value = 200
+ mock_response.geturl.return_value = 'http://mock.launchpad.net'
+ mock_response.read.return_value = b'{"total_size": 0}'
+ mock_response.return_value = mock_response
+ mock_urlopen.return_value = mock_response
+
+ with self.assertRaises(ValueError) as mock_response_error:
self.submit.validate_distro_request('testy', 'C51', 'blue', ['ab/1.2'], 'joe')
- self.assertEqual(str(cme.exception), 'ab/1.2 is not published in testy')
+ self.assertEqual(str(mock_response_error.exception), 'ab/1.2 is not published in testy')
self.assertEqual(mock_urlopen.call_count, 1)
# broken JSON response
- cm.read.return_value = b'not { json}'
- with self.assertRaises(ValueError) as cme:
+ mock_response.read.return_value = b'not { json}'
+ with self.assertRaises(ValueError) as mock_response_error:
self.submit.validate_distro_request('testy', 'C51', 'blue', ['ab/1.2'], 'joe')
# same, but entirely failing query -- let's be on the safe side
- cm.getcode.return_value = 404
- cm.read.return_value = b'<html>not found</html>'
- with self.assertRaises(ValueError) as cme:
+ mock_response.getcode.return_value = 404
+ mock_response.read.return_value = b'<html>not found</html>'
+ with self.assertRaises(ValueError) as mock_response_error:
self.submit.validate_distro_request('testy', 'C51', 'blue', ['ab/1.2'], 'joe')
- self.assertEqual(str(cme.exception), 'ab/1.2 is not published in testy')
+ self.assertEqual(str(mock_response_error.exception), 'ab/1.2 is not published in testy')
@patch('request.submit.urllib.request.urlopen')
def test_bad_package_ppa(self, mock_urlopen):
@@ -192,19 +197,20 @@ class DistroRequestValidationTests(SubmitTestBase):
# mock Launchpad response: successful form, but no matching
# source/version
- cm = MagicMock()
- cm.__enter__.return_value = cm
- cm.getcode.return_value = 200
- cm.geturl.return_value = 'http://mock.launchpad.net'
- cm.read.side_effect = [b'{"name": "overlay"}',
- b'{"name": "goodstuff"}']
- cm.return_value = cm
- mock_urlopen.return_value = cm
-
- with self.assertRaises(ValueError) as cme:
+ mock_response = MagicMock()
+ mock_response.__enter__.return_value = mock_response
+ mock_response.getcode.return_value = 200
+ mock_response.geturl.return_value = 'http://mock.launchpad.net'
+ mock_response.read.side_effect = [b'{"name": "overlay"}',
+ b'{"name": "goodstuff"}']
+ mock_response.return_value = mock_response
+ mock_urlopen.return_value = mock_response
+
+ with self.assertRaises(ValueError) as mock_response_error:
self.submit.validate_distro_request('testy', 'C51', 'badpkg', ['ab/1.2'], 'joe',
ppas=['team/overlay', 'joe/goodstuff'])
- self.assertEqual(str(cme.exception), 'Package badpkg does not have any test results')
+ self.assertEqual(str(mock_response_error.exception),
+ 'Package badpkg does not have any test results')
self.assertEqual(mock_urlopen.call_count, 2)
@patch('request.submit.urllib.request.urlopen')
@@ -213,20 +219,21 @@ class DistroRequestValidationTests(SubmitTestBase):
# mock Launchpad response: successful form, but no matching
# source/version
- cm = MagicMock()
- cm.__enter__.return_value = cm
- cm.getcode.return_value = 200
- cm.geturl.return_value = 'http://mock.launchpad.net'
- cm.read.side_effect = [b'{"name": "overlay"}',
- b'{"name": "goodstuff"}',
- b'{"total_size": 0}']
- cm.return_value = cm
- mock_urlopen.return_value = cm
-
- with self.assertRaises(ValueError) as cme:
+ mock_response = MagicMock()
+ mock_response.__enter__.return_value = mock_response
+ mock_response.getcode.return_value = 200
+ mock_response.geturl.return_value = 'http://mock.launchpad.net'
+ mock_response.read.side_effect = [b'{"name": "overlay"}',
+ b'{"name": "goodstuff"}',
+ b'{"total_size": 0}']
+ mock_response.return_value = mock_response
+ mock_urlopen.return_value = mock_response
+
+ with self.assertRaises(ValueError) as mock_response_error:
self.submit.validate_distro_request('testy', 'C51', 'blue', ['ab/1.2'], 'joe',
ppas=['team/overlay', 'joe/goodstuff'])
- self.assertEqual(str(cme.exception), 'ab/1.2 is not published in PPA joe/goodstuff testy')
+ self.assertEqual(str(mock_response_error.exception),
+ 'ab/1.2 is not published in PPA joe/goodstuff testy')
self.assertEqual(mock_urlopen.call_count, 3)
@patch('request.submit.urllib.request.urlopen')
@@ -235,19 +242,20 @@ class DistroRequestValidationTests(SubmitTestBase):
# mock Launchpad response: successful form, matching
# source/version, upload not allowed
- cm = MagicMock()
- cm.__enter__.return_value = cm
- cm.getcode.return_value = 200
- cm.read.side_effect = [b'{"total_size": 1, "entries": [{"component_name": "main"}]}',
- HTTPError('https://lp/checkUpload', 403, 'Forbidden', {}, None),
- HTTPError('https://lp/checkUpload', 403, 'Forbidden', {}, None),
- b'{"total_size": 1, "entries": [{"name": "joe2"}]}']
- cm.return_value = cm
- mock_urlopen.return_value = cm
-
- with self.assertRaises(ValueError) as cme:
+ mock_response = MagicMock()
+ mock_response.__enter__.return_value = mock_response
+ mock_response.getcode.return_value = 200
+ mock_response.read.side_effect = \
+ [b'{"total_size": 1, "entries": [{"component_name": "main"}]}',
+ HTTPError('https://lp/checkUpload', 403, 'Forbidden', {}, None),
+ HTTPError('https://lp/checkUpload', 403, 'Forbidden', {}, None),
+ b'{"total_size": 1, "entries": [{"name": "joe2"}]}']
+ mock_response.return_value = mock_response
+ mock_urlopen.return_value = mock_response
+
+ with self.assertRaises(ValueError) as mock_response_error:
self.submit.validate_distro_request('testy', 'C51', 'blue', ['ab/1.2'], 'joe')
- self.assertIn('not allowed to upload blue or ab', str(cme.exception))
+ self.assertIn('not allowed to upload blue or ab', str(mock_response_error.exception))
self.assertEqual(mock_urlopen.call_count, 4)
@patch('request.submit.urllib.request.urlopen')
@@ -256,13 +264,14 @@ class DistroRequestValidationTests(SubmitTestBase):
# mock Launchpad response: successful form, matching
# source/version, upload allowed
- cm = MagicMock()
- cm.__enter__.return_value = cm
- cm.getcode.return_value = 200
- cm.read.side_effect = [b'{"total_size": 1, "entries": [{"component_name": "main"}]}',
- b'true']
- cm.return_value = cm
- mock_urlopen.return_value = cm
+ mock_response = MagicMock()
+ mock_response.__enter__.return_value = mock_response
+ mock_response.getcode.return_value = 200
+ mock_response.read.side_effect = [b'{"total_size": 1, "entries": ' + \
+ '[{"component_name": "main"}]}',
+ b'true']
+ mock_response.return_value = mock_response
+ mock_urlopen.return_value = mock_response
self.submit.validate_distro_request('testy', 'C51', 'blue', ['ab/1.2'], 'joe')
self.assertEqual(mock_urlopen.call_count, 2)
@@ -273,13 +282,14 @@ class DistroRequestValidationTests(SubmitTestBase):
# mock Launchpad response: successful form, matching
# source/version, upload allowed
- cm = MagicMock()
- cm.__enter__.return_value = cm
- cm.getcode.return_value = 200
- cm.read.side_effect = [b'{"total_size": 1, "entries": [{"component_name": "main"}]}',
- b'true']
- cm.return_value = cm
- mock_urlopen.return_value = cm
+ mock_response = MagicMock()
+ mock_response.__enter__.return_value = mock_response
+ mock_response.getcode.return_value = 200
+ mock_response.read.side_effect = [b'{"total_size": 1, "entries": ' + \
+ '[{"component_name": "main"}]}',
+ b'true']
+ mock_response.return_value = mock_response
+ mock_urlopen.return_value = mock_response
self.submit.validate_distro_request('testy', 'C51', 'blue', ['ab/1.2'],
'joe', **{'all-proposed': '1'})
@@ -288,10 +298,10 @@ class DistroRequestValidationTests(SubmitTestBase):
def test_distro_all_proposed_bad_value(self):
"""Valid distro request with invalid all-proposed value"""
- with self.assertRaises(ValueError) as cme:
+ with self.assertRaises(ValueError) as mock_response_error:
self.submit.validate_distro_request('testy', 'C51', 'blue', ['ab/1.2'],
'joe', **{'all-proposed': 'bogus'})
- self.assertIn('nvalid all-proposed value', str(cme.exception))
+ self.assertIn('nvalid all-proposed value', str(mock_response_error.exception))
@patch('request.submit.urllib.request.urlopen')
def test_validate_distro_whitelisted_team(self, mock_urlopen):
@@ -299,15 +309,18 @@ class DistroRequestValidationTests(SubmitTestBase):
# mock Launchpad response: successful form, matching
# source/version, upload allowed
- cm = MagicMock()
- cm.__enter__.return_value = cm
- cm.getcode.return_value = 200
- cm.read.side_effect = [b'{"total_size": 1, "entries": [{"component_name": "main"}]}',
- HTTPError('https://lp/checkUpload', 403, 'Forbidden', {}, None),
- HTTPError('https://lp/checkUpload', 403, 'Forbidden', {}, None),
- b'{"total_size": 1, "entries": [{"name": "joe"}]}']
- cm.return_value = cm
- mock_urlopen.return_value = cm
+ mock_response = MagicMock()
+ mock_response.__enter__.return_value = mock_response
+ mock_response.getcode.return_value = 200
+ mock_response.read.side_effect = [b'{"total_size": 1, "entries": ' + \
+ '[{"component_name": "main"}]}',
+ HTTPError('https://lp/checkUpload',
+ 403, 'Forbidden', {}, None),
+ HTTPError('https://lp/checkUpload',
+ 403, 'Forbidden', {}, None),
+ b'{"total_size": 1, "entries": [{"name": "joe"}]}']
+ mock_response.return_value = mock_response
+ mock_urlopen.return_value = mock_response
self.submit.validate_distro_request('testy', 'C51', 'blue', ['ab/1.2'], 'joe')
self.assertEqual(mock_urlopen.call_count, 4)
@@ -318,18 +331,20 @@ class DistroRequestValidationTests(SubmitTestBase):
# mock Launchpad response: successful form, matching
# source/version, upload allowed
- cm = MagicMock()
- cm.__enter__.return_value = cm
- cm.getcode.return_value = 200
- cm.read.side_effect = [b'{"name": "overlay"}',
- b'{"name": "goodstuff"}',
- # check if package is published in PPA
- b'{"total_size": 1, "entries": [{"component_name": "main"}]}',
- # component name in Ubuntu archive
- b'{"total_size": 1, "entries": [{"component_name": "universe"}]}',
- b'true']
- cm.return_value = cm
- mock_urlopen.return_value = cm
+ mock_response = MagicMock()
+ mock_response.__enter__.return_value = mock_response
+ mock_response.getcode.return_value = 200
+ mock_response.read.side_effect = [b'{"name": "overlay"}',
+ b'{"name": "goodstuff"}',
+ # check if package is published in PPA
+ b'{"total_size": 1, "entries": ' + \
+ '[{"component_name": "main"}]}',
+ # component name in Ubuntu archive
+ b'{"total_size": 1, "entries": ' + \
+ '[{"component_name": "universe"}]}',
+ b'true']
+ mock_response.return_value = mock_response
+ mock_urlopen.return_value = mock_response
self.submit.validate_distro_request('testy', 'C51', 'blue', ['ab/1.2'], 'joe',
ppas=['team/overlay', 'joe/goodstuff'])
@@ -340,105 +355,132 @@ class GitRequestValidationTests(SubmitTestBase):
"""Test verification of git branch test requests"""
def test_bad_release(self):
- with self.assertRaises(ValueError) as cme:
- self.submit.validate_git_request('fooly', 'C51', 'ab', **{'build-git': 'https://x.com/proj'})
- self.assertEqual(str(cme.exception), 'Unknown release fooly')
+ """Tests invalid release"""
+ with self.assertRaises(ValueError) as mock_response_error:
+ self.submit.validate_git_request('fooly',
+ 'C51',
+ 'ab',
+ **{'build-git': 'https://x.com/proj'})
+ self.assertEqual(str(mock_response_error.exception), 'Unknown release fooly')
def test_bad_arch(self):
- with self.assertRaises(ValueError) as cme:
- self.submit.validate_git_request('testy', 'wut', 'a!b', **{'build-git': 'https://x.com/proj'})
- self.assertEqual(str(cme.exception), 'Unknown architecture wut')
+ """Tests invalid architecture"""
+ with self.assertRaises(ValueError) as mock_response_error:
+ self.submit.validate_git_request('testy',
+ 'wut',
+ 'a!b',
+ **{'build-git': 'https://x.com/proj'})
+ self.assertEqual(str(mock_response_error.exception), 'Unknown architecture wut')
def test_bad_package(self):
- with self.assertRaises(ValueError) as cme:
- self.submit.validate_git_request('testy', 'C51', 'a!b', **{'build-git': 'https://x.com/proj'})
- self.assertEqual(str(cme.exception), 'Malformed package')
+ """Tests invalid package"""
+ with self.assertRaises(ValueError) as mock_response_error:
+ self.submit.validate_git_request('testy',
+ 'C51',
+ 'a!b',
+ **{'build-git': 'https://x.com/proj'})
+ self.assertEqual(str(mock_response_error.exception), 'Malformed package')
@patch('request.submit.urllib.request.urlopen')
def test_unknown_ppa(self, mock_urlopen):
+ """Tests invalid ppa"""
# mock Launchpad response: successful form, but no match
- cm = MagicMock()
- cm.__enter__.return_value = cm
- cm.getcode.return_value = 200
- cm.geturl.return_value = 'http://mock.launchpad.net'
- cm.read.return_value = b'{}'
- cm.return_value = cm
- mock_urlopen.return_value = cm
-
- with self.assertRaises(ValueError) as cme:
+ mock_response = MagicMock()
+ mock_response.__enter__.return_value = mock_response
+ mock_response.getcode.return_value = 200
+ mock_response.geturl.return_value = 'http://mock.launchpad.net'
+ mock_response.read.return_value = b'{}'
+ mock_response.return_value = mock_response
+ mock_urlopen.return_value = mock_response
+
+ with self.assertRaises(ValueError) as mock_response_error:
self.submit.validate_git_request('testy', 'C51', 'ab', ['bad/ppa'],
**{'build-git': 'https://x.com/proj'})
- self.assertEqual(str(cme.exception), 'Unknown PPA bad/ppa')
+ self.assertEqual(str(mock_response_error.exception), 'Unknown PPA bad/ppa')
self.assertEqual(mock_urlopen.call_count, 1)
@patch('request.submit.Submit.is_valid_ppa')
def test_bad_env(self, is_valid_ppa):
+ """Tests invalid env variables"""
is_valid_ppa.return_value = True
- with self.assertRaises(ValueError) as cme:
+ with self.assertRaises(ValueError) as mock_response_error:
self.submit.validate_git_request('testy', 'C51', 'ab',
env=['foo=1', 'bar=1\n='],
**{'build-git': 'https://x.com/proj',
'ppas': ['a/b']})
- self.assertIn('Invalid environment', str(cme.exception))
- self.assertIn('bar=1', str(cme.exception))
+ self.assertIn('Invalid environment', str(mock_response_error.exception))
+ self.assertIn('bar=1', str(mock_response_error.exception))
def test_no_ppa(self):
"""No PPA"""
- with self.assertRaises(ValueError) as cme:
+ with self.assertRaises(ValueError) as mock_response_error:
self.submit.validate_git_request('testy', 'C51', 'ab',
**{'build-git': 'https://x.com/proj'})
- self.assertEqual(str(cme.exception), 'Must specify at least one PPA (to associate results with)')
+ self.assertEqual(str(mock_response_error.exception),
+ 'Must specify at least one PPA (to associate results with)')
@patch('request.submit.Submit.is_valid_ppa')
def test_bad_git_url(self, is_valid_ppa):
+ """Tests invalid git url"""
is_valid_ppa.return_value = True
- with self.assertRaises(ValueError) as cme:
- self.submit.validate_git_request('testy', 'C51', 'ab', **{'build-git': 'foo://x.com/proj',
- 'ppas': ['a/b']})
- self.assertEqual(str(cme.exception), 'Malformed build-git')
+ with self.assertRaises(ValueError) as mock_response_error:
+ self.submit.validate_git_request('testy',
+ 'C51',
+ 'ab',
+ **{'build-git': 'foo://x.com/proj',
+ 'ppas': ['a/b']})
+ self.assertEqual(str(mock_response_error.exception), 'Malformed build-git')
@patch('request.submit.Submit.is_valid_ppa')
def test_unknown_param(self, is_valid_ppa):
+ """Tests unknown parameter passed"""
is_valid_ppa.return_value = True
- with self.assertRaises(ValueError) as cme:
+ with self.assertRaises(ValueError) as mock_response_error:
self.submit.validate_git_request('testy', 'C51', 'ab',
**{'build-git': 'http://x.com/proj', 'ppas': ['a/b'],
'foo': 'bar'})
- self.assertEqual(str(cme.exception), 'Unsupported arguments: foo')
+ self.assertEqual(str(mock_response_error.exception), 'Unsupported arguments: foo')
@patch('request.submit.Submit.is_valid_ppa')
def test_bad_testname(self, is_valid_ppa):
+ """Tests a janky testname"""
is_valid_ppa.return_value = True
- with self.assertRaises(ValueError) as cme:
+ with self.assertRaises(ValueError) as mock_response_error:
self.submit.validate_git_request('testy', 'C51', 'ab',
**{'build-git': 'http://x.com/proj', 'testname': 'a !',
'ppas': ['a/b']})
- self.assertEqual(str(cme.exception), 'Malformed testname')
+ self.assertEqual(str(mock_response_error.exception), 'Malformed testname')
@patch('request.submit.Submit.is_valid_ppa')
def test_valid(self, is_valid_ppa):
+ """Tests a valid test to make sure it works"""
is_valid_ppa.return_value = True
self.submit.validate_git_request('testy', 'C51', 'ab',
**{'build-git': 'http://x.com/proj',
- 'env': ['STATUS_URL=https://api.github.com/proj/123deadbeef'],
+ 'env': ['STATUS_URL=' + \
+ 'https://api.github.com/proj/123deadbeef'],
'ppas': ['a/b']})
@patch('request.submit.Submit.is_valid_ppa')
def test_branch(self, is_valid_ppa):
+ """Tests a specific branch of a package"""
is_valid_ppa.return_value = True
self.submit.validate_git_request('testy', 'C51', 'ab',
**{'build-git': 'http://x.com/proj#refs/pull/2/head',
- 'env': ['STATUS_URL=https://api.github.com/proj/123deadbeef'],
+ 'env': ['STATUS_URL=' + \
+ 'https://api.github.com/proj/123deadbeef'],
'ppas': ['a/b']})
@patch('request.submit.Submit.is_valid_ppa')
def test_valid_testname(self, is_valid_ppa):
+ """Tests with a valid testname"""
is_valid_ppa.return_value = True
self.submit.validate_git_request('testy', 'C51', 'ab',
**{'build-git': 'http://x.com/proj',
'testname': 'first',
- 'env': ['STATUS_URL=https://api.github.com/proj/123deadbeef'],
+ 'env': ['STATUS_URL=' + \
+ 'https://api.github.com/proj/123deadbeef'],
'ppas': ['a/b']})
@@ -448,6 +490,7 @@ class SendAMQPTests(SubmitTestBase):
@patch('request.submit.amqp.Connection')
@patch('request.submit.amqp.Message')
def test_valid_request(self, message_con, mock_con):
+ """Tests a completely valid package"""
# mostly a passthrough, but ensure that we do wrap the string in Message()
message_con.side_effect = lambda x: '>%s<' % x
@@ -465,6 +508,7 @@ class SendAMQPTests(SubmitTestBase):
@patch('request.submit.amqp.Connection')
@patch('request.submit.amqp.Message')
def test_valid_request_context(self, message_con, mock_con):
+ """Tests with valid context"""
# mostly a passthrough, but ensure that we do wrap the string in Message()
message_con.side_effect = lambda x: '>%s<' % x
diff --git a/charms/focal/autopkgtest-web/webcontrol/update-github-jobs b/charms/focal/autopkgtest-web/webcontrol/update-github-jobs
index f2bb430..d4cd611 100755
--- a/charms/focal/autopkgtest-web/webcontrol/update-github-jobs
+++ b/charms/focal/autopkgtest-web/webcontrol/update-github-jobs
@@ -1,5 +1,8 @@
#!/usr/bin/python3
-
+'''
+Processes autopkgtest-cloud jobs requested from github
+'''
+#pylint: disable=no-value-for-parameter, consider-using-f-string, consider-using-ternary, invalid-name, bad-option-value
import os
import json
import configparser
@@ -8,7 +11,6 @@ import time
import io
import sys
import tarfile
-import time
import urllib.request
import urllib.parse
from urllib.error import HTTPError
@@ -17,13 +19,13 @@ from request.submit import Submit
PENDING_DIR = '/run/autopkgtest_webcontrol/github-pending'
-swift_url = None
-external_url = None
+SWIFT_URL = None
+EXTERNAL_URL = None
def result_matches_job(result_url, params):
- # download result.tar and get exit code and testinfo
- for retry in range(5):
+ """download result.tar and get exit code and testinfo"""
+ for _ in range(5):
try:
with urllib.request.urlopen(result_url + '/result.tar') as f:
tar_bytes = io.BytesIO(f.read())
@@ -33,7 +35,7 @@ def result_matches_job(result_url, params):
time.sleep(1)
else:
logging.error('failed to download result %s', result_url)
- return
+ return -1
try:
with tarfile.open(None, 'r', tar_bytes) as tar:
@@ -41,18 +43,18 @@ def result_matches_job(result_url, params):
info = json.loads(tar.extractfile('testinfo.json').read().decode())
except (KeyError, ValueError, tarfile.TarError) as e:
logging.error('broken result %s: %s', result_url, e)
- return
+ return -1
try:
result_env = info['custom_environment']
except KeyError:
logging.info('result has no custom_environment, ignoring')
- return
+ return -1
# if the test result has the same parameters than the job, we have a winner
if result_env != params['env']:
logging.debug('exit code: %i, ignoring due to different test env: %s',
exitcode, result_env)
- return
+ return -1
logging.debug('exit code: %i, test env matches job: %s',
exitcode, result_env)
@@ -88,8 +90,9 @@ def finish_job(jobfile, params, code, log_url):
def process_job(jobfile):
+ """Processes requested github job"""
try:
- with open(jobfile) as f:
+ with open(jobfile, encoding='utf-8') as f:
params = json.load(f)
mtime = os.fstat(f.fileno()).st_mtime
except json.decoder.JSONDecodeError as e:
@@ -105,7 +108,7 @@ def process_job(jobfile):
container += '-' + params['ppas'][-1].replace('/', '-')
except (KeyError, IndexError):
pass
- container_url = os.path.join(swift_url, container)
+ container_url = os.path.join(SWIFT_URL, container)
package = params['package']
pkghash = package.startswith('lib') and package[:4] or package[0]
timestamp = time.strftime('%Y%m%d_%H%M%S', time.gmtime(mtime))
@@ -126,7 +129,7 @@ def process_job(jobfile):
code = result_matches_job(result_url, params)
if code is not None:
finish_job(jobfile, params, code,
- result_url.replace(swift_url, external_url) + '/log.gz')
+ result_url.replace(SWIFT_URL, EXTERNAL_URL) + '/log.gz')
break
except HTTPError as e:
logging.error('job %s URL %s failed: %s', os.path.basename(jobfile), query_url, e)
@@ -143,11 +146,11 @@ if __name__ == '__main__':
config = configparser.ConfigParser()
config.read(os.path.expanduser('~ubuntu/autopkgtest-cloud.conf'))
- swift_url = config['web']['SwiftURL']
+ SWIFT_URL = config['web']['SwiftURL']
try:
- external_url = config['web']['ExternalURL']
+ EXTERNAL_URL = config['web']['ExternalURL']
except KeyError:
- external_url = swift_url
+ EXTERNAL_URL = SWIFT_URL
jobs = sys.argv[1:]
@@ -156,4 +159,3 @@ if __name__ == '__main__':
for job in jobs:
process_job(os.path.join(PENDING_DIR, job))
-
diff --git a/ci/lint_test b/ci/lint_test
index e52edc4..06a4133 100755
--- a/ci/lint_test
+++ b/ci/lint_test
@@ -1,5 +1,5 @@
#!/usr/bin/python3
-# pylint: disable = invalid-name, broad-except, subprocess-run-check
+# pylint: disable = broad-except, unnecessary-dict-index-lookup, bad-option-value
'''
Script to lint the scripts in the autopkgtest-cloud repository in CI
'''
@@ -8,33 +8,33 @@ import os
import sys
import logging
import subprocess
+import argparse
-def check_for_extension(input_list, output_list, extension):
+def check_for_extension(input_list, output_list, file_extension):
'''
- Checks filepaths in a list for a given extension
+ Checks filepaths in a list for a given file_extension
'''
- for a in input_list:
- if os.path.isfile(a):
- # if str(a)[-3:] == extension:
- if extension in str(a)[-6:]:
- output_list.append(str(a))
+ for filepath in input_list:
+ if os.path.isfile(filepath):
+ if file_extension in str(filepath)[-6:]:
+ output_list.append(str(filepath))
return output_list
-def check_for_shebang(input_list, output_list, shebang):
+def check_for_shebang(input_list, output_list, shebang_for_check):
'''
- Checks filepaths in a given list for a given shebang
+ Checks filepaths in a given list for a given shebang_for_check
'''
- for b in input_list:
- if os.path.isfile(b):
+ for filepath in input_list:
+ if os.path.isfile(filepath):
try:
- with open(b, 'r', encoding='utf-8') as myfile:
+ with open(filepath, 'r', encoding='utf-8') as myfile:
file = myfile.read()
into_list = file.splitlines()
if len(into_list) > 1:
- if into_list[0] == shebang:
- output_list.append(str(b))
+ if into_list[0] == shebang_for_check:
+ output_list.append(str(filepath))
except Exception as _:
pass
return output_list
@@ -44,42 +44,56 @@ def remove_list_from_list(input_list, remove_list):
'''
Removes elements from remove_list from input_list
'''
- for ff in input_list:
- if os.path.isfile(ff):
- if str(ff) in remove_list:
- input_list.remove(ff)
+ for list_elem in input_list:
+ if os.path.isfile(list_elem):
+ if str(list_elem) in remove_list:
+ input_list.remove(list_elem)
return input_list
def run_lint_command(files_to_lint, lint_command, arguments=None):
'''
- Runs a given lint command over a list of filepaths and stores output
+ Runs given lint commands over a list of filepaths and stores output
and exit code
'''
- exit_codes = 0
- lint_output = ""
- # check lint command exists
- for f in files_to_lint:
- if arguments is None:
- cmd = [lint_command, f]
- result = subprocess.run(cmd, stdout=subprocess.PIPE)
- else:
- cmd = [lint_command]
- for arg in arguments.split(" "):
- cmd.append(arg)
- cmd.append(f)
- result = subprocess.run(cmd, stdout=subprocess.PIPE)
- lint_output += result.stdout.decode("utf-8") + "\n"
- exit_codes += result.returncode
- return lint_output, exit_codes
+ exit_codes = []
+ lint_output = []
+ lint_success = True
+ check_for_cmd = subprocess.run(["which", lint_command], stdout=subprocess.PIPE, check=False)
+ if check_for_cmd.returncode != 0:
+ logger.error("%s not present on system - please amend before using this script.",
+ lint_command)
+ sys.exit(1)
+ for file in files_to_lint:
+ if ".git" not in file:
+ if arguments is None:
+ cmd = [lint_command, file]
+ result = subprocess.run(cmd, stdout=subprocess.PIPE, check=False)
+ else:
+ cmd = [lint_command]
+ for arg in arguments.split(" "):
+ cmd.append(arg)
+ cmd.append(file)
+ result = subprocess.run(cmd, stdout=subprocess.PIPE, check=False)
+ lint_output.append(result.stdout.decode("utf-8") + "\n")
+ exit_codes.append(result.returncode)
+ if result.returncode != 0:
+ lint_success = False
+ return lint_output, exit_codes, lint_success
-if __name__=="__main__":
+if __name__ == "__main__":
+ # pylint: disable=invalid-name
+ parser = argparse.ArgumentParser(description="Args for lint test")
+ parser.add_argument('-v',
+ '--verbose',
+ help="Verbose output from lint test (y/n)",
+ action='store_true')
+ args = parser.parse_args()
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger('autopkgtest-cloud-linter')
- start_dir = "../"
- repo_dir = pathlib.Path(start_dir)
+ repo_dir = pathlib.Path("../")
repo_dir.rglob("*")
final_list_of_python_files = []
@@ -90,25 +104,28 @@ if __name__=="__main__":
"files": [],
"extensions": [".py"],
"shebangs": ["#!/usr/bin/python3"],
- "args": None,
- "output": "",
- "code": 0
+ "args": "--disable=E0012",
+ "output": [],
+ "code": [],
+ "success": False
},
"shellcheck": {
"files": [],
"extensions": [".sh", ".bash"],
"shebangs": ["#!/bin/bash", "#!/bin/sh"],
"args": None,
- "output": "",
- "code": 0
+ "output": [],
+ "code": [],
+ "success": False
},
'yamllint': {
"files": ["../"],
"extensions": None,
"shebangs": None,
"args": "--no-warnings",
- "output": "",
- "code": 0
+ "output": [],
+ "code": [],
+ "success": False
}
}
@@ -122,19 +139,23 @@ if __name__=="__main__":
data[key]["files"] = check_for_shebang(all_files, data[key]["files"], shebang)
all_files = remove_list_from_list(all_files, data[key]["files"])
data[key]["output"], \
- data[key]["code"] = run_lint_command(data[key]["files"], key, data[key]["args"])
- ecodesum = 0
- for _, oec in data.items():
- ecodesum += oec["code"]
- if ecodesum > 0:
+ data[key]["code"], \
+ data[key]["success"] = run_lint_command(data[key]["files"], key, data[key]["args"])
+
+ exit_code_sum = 0
+ for _, oec_s in data.items():
+ for e_c in oec_s["code"]:
+ exit_code_sum += e_c
+ if exit_code_sum > 0:
for key, item in data.items():
- if item["code"] > 0:
- # logger.info("%s output: \n%s", key, item["output"])
+ if not item["success"]:
logger.info("%s failed!", key)
- # sys.exit(1)
- # temporary exit code, will be set back to 1 when python and bash scripts have been linted
- # right now we are just checking yaml files
- if key == "yamllint" and item["code"] != 0:
- sys.exit(1)
- sys.exit(0)
- sys.exit(0)
\ No newline at end of file
+ if args.verbose:
+ for i in range(len(item["code"])):
+ if item["code"][i] != 0:
+ logger.info("%s", item["output"][i])
+ else:
+ logger.info("%s passed!", key)
+ sys.exit(1)
+ logger.info("All the following linting tests passed: %s\n", list(data.keys()))
+ sys.exit(0)
diff --git a/ci/trigger_readthedocs_build b/ci/trigger_readthedocs_build
deleted file mode 100755
index 46762dd..0000000
--- a/ci/trigger_readthedocs_build
+++ /dev/null
@@ -1,77 +0,0 @@
-#!/usr/bin/python3
-#pylint: disable=invalid-name
-'''
-Script intended to be run after commit to master in the autopkgtest-cloud repository
-Triggers a build of the readthedocs repo for this repo
-'''
-import sys
-import time
-import logging
-import requests
-import git
-
-
-def get_latest_commit(repo_url):
- '''
- Acquire latest commit to master
- '''
- git.Repo.clone_from(repo_url, "./new_repo")
- return git.Git("./new_repo").log('-1', '-X')
-
-
-def check_if_commit_affected_docs():
- '''
- Checks if latest commit to master of autopkgtest-cloud
- had any changes under the docs directory
- '''
- latest_commit_message = get_latest_commit("https://git.launchpad.net/autopkgtest-cloud")
-
- arr = latest_commit_message.split("\n")
- for a in arr:
- x = a.split(" ")
- if len(x) > 2:
- if "%" in x[1]:
- dir_of_percent = x[2]
- if dir_of_percent == "docs/":
- return True
- return False
-
-
-if __name__=="__main__":
- logging.basicConfig(level=logging.INFO)
- logger = logging.getLogger('readthedocs_build_triggerer')
- auth_token="replace-with-working-token"
- rtd_headers={'Authorization': f'token {auth_token}'}
- build_tr_url='https://readthedocs.org/api/v3/projects/autopkgtest-cloud/versions/latest/builds/'
-
- if not check_if_commit_affected_docs():
- logger.info("Latest commit to master didn't affect docs directory, not triggering build.")
- sys.exit(0)
- else:
- logger.info("Latest commit did affect docs directory, building on readthedocs")
- try:
- response1 = requests.post(build_tr_url, headers=rtd_headers)
- except Exception as _:
- logger.info("Tried to trigger readthedocs build but it failed!")
- sys.exit(0)
- build_no = str(response1.json()["build"]["id"])
- build_info_url='https://readthedocs.org/api/v3/projects/autopkgtest-cloud/builds/' + build_no
- logger.info("Build started at: %s", build_info_url)
- timeout=300
- start = time.time()
- while True:
- response2 = requests.get(build_info_url, headers=rtd_headers).json()
- build_state = response2["state"]["code"]
- if response2["finished"] is not None:
- logger.info("Build finished")
- exit_code = response2["success"]
- if exit_code:
- logger.info("Build success")
- sys.exit(0)
- else:
- logger.error("Build failed")
- sys.exit(1)
- time.sleep(1)
- if (time.time() - start) > timeout:
- logger.error("Build timed out")
- sys.exit(1)
diff --git a/docs/conf.py b/docs/conf.py
index a46ffdc..b4b923c 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -1,7 +1,7 @@
'''
Configuration file for the Sphinx documentation builder.
'''
-#pylint: disable=redefined-builtin
+#pylint: disable=redefined-builtin, invalid-name
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
diff --git a/lxc-slave-admin/cmd b/lxc-slave-admin/cmd
index ff991ff..0700964 100755
--- a/lxc-slave-admin/cmd
+++ b/lxc-slave-admin/cmd
@@ -1,6 +1,6 @@
#!/bin/sh
set -e
-MYDIR=`dirname $0`
+MYDIR=$(dirname "${0}")
if [ -z "$1" ]; then
echo "Usage: $0 <hosts> <commands or .commands file>" >&2
@@ -8,11 +8,11 @@ if [ -z "$1" ]; then
fi
if [ "$1" = "all" ]; then
- for f in $MYDIR/*.hosts; do
+ for f in "${MYDIR}"/*.hosts; do
hosts="$hosts -h $f";
done
else
- if [ -e ${1} ]; then
+ if [ -e "${1}" ]; then
hosts="-h ${1}"
elif [ -e "${1}.hosts" ]; then
hosts="-h ${1}.hosts"
@@ -29,8 +29,8 @@ if [ "${1%.commands}" != "$1" ]; then
exit 1
fi
# command file
- cat "$1" | parallel-ssh -x "-F $MYDIR/ssh_config" -OUserKnownHostsFile=/dev/null -OStrictHostKeyChecking=no -OIdentitiesOnly=yes $hosts -p8 -t 0 -i -I
+ parallel-ssh -x "-F ${MYDIR}/ssh_config" -OUserKnownHostsFile=/dev/null -OStrictHostKeyChecking=no -OIdentitiesOnly=yes "${hosts}" -p8 -t 0 -i -I < "${1}"
else
# command
- parallel-ssh -x "-F $MYDIR/ssh_config" -OUserKnownHostsFile=/dev/null -OStrictHostKeyChecking=no -OIdentitiesOnly=yes $hosts -p8 -t 0 -i -- "$@"
+ parallel-ssh -x "-F ${MYDIR}/ssh_config" -OUserKnownHostsFile=/dev/null -OStrictHostKeyChecking=no -OIdentitiesOnly=yes "${hosts}" -p8 -t 0 -i -- "$@"
fi
diff --git a/mojo/make-lxd-secgroup b/mojo/make-lxd-secgroup
index 634e598..1c1d961 100755
--- a/mojo/make-lxd-secgroup
+++ b/mojo/make-lxd-secgroup
@@ -1,5 +1,5 @@
#!/bin/sh
-
+# shellcheck disable=SC1090
set -eu
# there's apparently no way to get this dynamically
@@ -24,4 +24,4 @@ done
if [ -n "${ROUTER_IP:-}" ]; then
nova secgroup-add-rule lxd tcp 8443 8443 "${ROUTER_IP}/32" 2>/dev/null || true # perhaps it already existed
-fi
+fi
\ No newline at end of file
diff --git a/mojo/postdeploy b/mojo/postdeploy
index 0f857ae..4b0f88f 100755
--- a/mojo/postdeploy
+++ b/mojo/postdeploy
@@ -11,5 +11,6 @@ if [ "${MOJO_STAGE_NAME}" == "staging" ]; then
fi
echo "Setting up the floating IP address of the front end..."
-$(dirname $0)/add-floating-ip haproxy
-$(dirname $0)/add-floating-ip rabbitmq-server
+directory=$(dirname "{0}")
+"${directory}"/add-floating-ip haproxy
+"${directory}"/add-floating-ip rabbitmq-server