canonical-ubuntu-qa team mailing list archive
-
canonical-ubuntu-qa team
-
Mailing list archive
-
Message #00561
[Merge] ~andersson123/autopkgtest-cloud:lint_autopkgtest-web into autopkgtest-cloud:master
Tim Andersson has proposed merging ~andersson123/autopkgtest-cloud:lint_autopkgtest-web into autopkgtest-cloud:master.
Commit message:
Lint only autopkgtest-web directory for easier testing
Requested reviews:
Canonical's Ubuntu QA (canonical-ubuntu-qa)
For more details, see:
https://code.launchpad.net/~andersson123/autopkgtest-cloud/+git/autopkgtest-cloud/+merge/444162
Lint only autopkgtest-web directory for easier testing
--
Your team Canonical's Ubuntu QA is requested to review the proposed merge of ~andersson123/autopkgtest-cloud:lint_autopkgtest-web into autopkgtest-cloud:master.
diff --git a/.launchpad.yaml b/.launchpad.yaml
index 6dca924..4f6669e 100755
--- a/.launchpad.yaml
+++ b/.launchpad.yaml
@@ -6,4 +6,8 @@ jobs:
series: focal
architectures: amd64
packages: [pylint, python3, shellcheck, yamllint]
+<<<<<<< .launchpad.yaml
run: ./ci/lint_test
+=======
+ run: ./ci/lint_test -v
+>>>>>>> .launchpad.yaml
diff --git a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/create-test-instances b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/create-test-instances
old mode 100755
new mode 100644
diff --git a/charms/focal/autopkgtest-web/reactive/autopkgtest_web.py b/charms/focal/autopkgtest-web/reactive/autopkgtest_web.py
index fc87317..a6efd32 100644
--- a/charms/focal/autopkgtest-web/reactive/autopkgtest_web.py
+++ b/charms/focal/autopkgtest-web/reactive/autopkgtest_web.py
@@ -1,3 +1,14 @@
+"""
+Web app for autopkgtest-cloud
+"""
+#pylint: disable=import-error, possibly-unused-variable, consider-using-f-string, invalid-name, bad-option-value
+from textwrap import dedent
+import glob
+import os
+import shutil
+import subprocess
+
+from charmhelpers.core.hookenv import charm_dir, config
from charms.layer import status
from charms.reactive import (
when,
@@ -6,16 +17,8 @@ from charms.reactive import (
when_not,
set_flag,
clear_flag,
- hook,
)
-from charmhelpers.core.hookenv import charm_dir, config
-from textwrap import dedent
-
-import glob
-import os
-import shutil
-import subprocess
AUTOPKGTEST_CLOUD_CONF = os.path.expanduser("~ubuntu/autopkgtest-cloud.conf")
GITHUB_SECRETS_PATH = os.path.expanduser("~ubuntu/github-secrets.json")
@@ -29,6 +32,7 @@ SWIFT_WEB_CREDENTIALS_PATH = os.path.expanduser(
@when_not("autopkgtest-web.autopkgtest_web_symlinked")
def symlink_autopkgtest_cloud():
+ """Creates a symbolic link to webcontrol dir in autopkgtest-cloud repository"""
try:
autopkgtest_cloud = os.path.join(charm_dir(), "webcontrol")
os.symlink(autopkgtest_cloud, os.path.expanduser("~ubuntu/webcontrol"))
@@ -40,6 +44,7 @@ def symlink_autopkgtest_cloud():
@when("amqp.connected")
@when_not("amqp.available")
def setup_rabbitmq(rabbitmq):
+ """Setup access to rabbitmq queueing server"""
rabbitmq.request_access("webcontrol", "/")
status.waiting("Waiting on RabbitMQ to configure vhost")
@@ -50,14 +55,15 @@ def setup_rabbitmq(rabbitmq):
"config.set.hostname",
)
def write_autopkgtest_cloud_conf(rabbitmq):
+ """Sets up config for local and remote databases"""
swiftinternal = config().get("storage-url-internal")
hostname = config().get("hostname")
rabbituser = rabbitmq.username()
rabbitpassword = rabbitmq.password()
rabbithost = rabbitmq.private_address()
clear_flag("autopkgtest-web.config-written")
- with open(f"{AUTOPKGTEST_CLOUD_CONF}.new", "w") as f:
- f.write(
+ with open(f"{AUTOPKGTEST_CLOUD_CONF}.new", "w", encoding='utf-8') as config_file:
+ config_file.write(
dedent(
"""\
[web]
@@ -68,7 +74,7 @@ def write_autopkgtest_cloud_conf(rabbitmq):
[amqp]
uri=amqp://{rabbituser}:{rabbitpassword}@{rabbithost}""".format(
- **locals()
+ **locals()
)
)
)
@@ -81,6 +87,7 @@ def write_autopkgtest_cloud_conf(rabbitmq):
"autopkgtest-web.config-written",
)
def set_up_systemd_units():
+ """Sets up systemd units for autopkgtest-web services"""
any_changed = False
for unit in glob.glob(os.path.join(charm_dir(), "units", "*")):
base = os.path.basename(unit)
@@ -111,6 +118,7 @@ def set_up_systemd_units():
)
@when_not("autopkgtest-web.website-initially-configured")
def initially_configure_website(website):
+ """Sets initial variables for autopkgtest-web website"""
set_up_web_config(website)
@@ -127,8 +135,9 @@ def initially_configure_website(website):
"autopkgtest-web.website-initially-configured"
)
def set_up_web_config(apache):
+ """Sets up proxies and filepaths for website"""
webcontrol_dir = os.path.join(charm_dir(), "webcontrol")
- sn = config().get("hostname")
+ ser_name = config().get("hostname")
https_proxy = config().get("https-proxy")
no_proxy = config().get("no-proxy")
@@ -154,10 +163,10 @@ def set_up_web_config(apache):
pass
if https_proxy:
- with open(https_proxy_env, "w") as f:
- f.write("https_proxy={}".format(https_proxy))
- with open(https_proxy_apache, "w") as f:
- f.write("SetEnv https_proxy {}".format(https_proxy))
+ with open(https_proxy_env, "w", encoding='utf-8') as https_proxy_file:
+ https_proxy_file.write("https_proxy={}".format(https_proxy))
+ with open(https_proxy_apache, "w", encoding='utf-8') as https_proxy_apache_file:
+ https_proxy_apache_file.write("SetEnv https_proxy {}".format(https_proxy))
no_proxy_env = os.path.join(environment_d, "no_proxy.conf")
no_proxy_apache = os.path.join(conf_enabled, "no_proxy.conf")
@@ -172,12 +181,12 @@ def set_up_web_config(apache):
pass
if no_proxy:
- with open(no_proxy_env, "w") as f:
+ with open(no_proxy_env, "w", encoding='utf-8') as f:
f.write("no_proxy={}".format(no_proxy))
- with open(no_proxy_apache, "w") as f:
+ with open(no_proxy_apache, "w", encoding='utf-8') as f:
f.write("SetEnv no_proxy {}".format(no_proxy))
- server_name = "ServerName {}".format(sn) if sn else ""
+ server_name = "ServerName {}".format(ser_name) if ser_name else ""
apache.send_site_config(
dedent(
"""\
@@ -205,7 +214,7 @@ def set_up_web_config(apache):
{server_name}
</VirtualHost>
""".format(
- **locals()
+ **locals()
)
)
)
@@ -216,9 +225,10 @@ def set_up_web_config(apache):
@when_all("config.changed.github-secrets", "config.set.github-secrets")
def write_github_secrets():
+ """Writes github secrets to file"""
github_secrets = config().get("github-secrets")
- with open(GITHUB_SECRETS_PATH, "w") as f:
+ with open(GITHUB_SECRETS_PATH, "w", encoding='utf-8') as f:
f.write(github_secrets)
try:
@@ -232,6 +242,7 @@ def write_github_secrets():
@when_not("config.set.github-secrets")
def clear_github_secrets():
+ """Removes symlink to github secrets file"""
try:
os.unlink(GITHUB_SECRETS_PATH)
except FileNotFoundError:
@@ -246,9 +257,10 @@ def clear_github_secrets():
@when_all("config.changed.swift-web-credentials",
"config.set.swift-web-credentials")
def write_swift_web_credentials():
+ """Writes swift credentials to file"""
swift_credentials = config().get("swift-web-credentials")
- with open(SWIFT_WEB_CREDENTIALS_PATH, "w") as f:
+ with open(SWIFT_WEB_CREDENTIALS_PATH, "w", encoding='utf-8') as f:
f.write(swift_credentials)
try:
@@ -262,6 +274,7 @@ def write_swift_web_credentials():
@when_not("config.set.swift-web-credentials")
def clear_swift_web_credentials():
+ """Removes symlink to swift web creds file"""
try:
os.unlink(SWIFT_WEB_CREDENTIALS_PATH)
except FileNotFoundError:
@@ -278,9 +291,10 @@ def clear_swift_web_credentials():
"config.set.github-status-credentials",
)
def write_github_status_credentials():
+ """Writes github status creds to file and symlinks them"""
github_status_credentials = config().get("github-status-credentials")
- with open(GITHUB_STATUS_CREDENTIALS_PATH, "w") as f:
+ with open(GITHUB_STATUS_CREDENTIALS_PATH, "w", encoding='utf-8') as f:
f.write(github_status_credentials)
try:
@@ -294,6 +308,7 @@ def write_github_status_credentials():
@when_not("config.set.github-status-credentials")
def clear_github_status_credentials():
+ """Removes symlink to github status credentials"""
try:
os.unlink(GITHUB_STATUS_CREDENTIALS_PATH)
except FileNotFoundError:
@@ -309,6 +324,7 @@ def clear_github_status_credentials():
@when_not("autopkgtest-web.bootstrap-symlinked")
def symlink_bootstrap():
+ """Symlinks to bootstrap file"""
try:
os.symlink(
os.path.join(
@@ -323,7 +339,8 @@ def symlink_bootstrap():
@when_not("autopkgtest-web.runtime-dir-created")
def make_runtime_tmpfiles():
- with open("/etc/tmpfiles.d/autopkgtest-web-runtime.conf", "w") as r:
+ """Makes all of the necessary tmp files for autopkgtest-web"""
+ with open("/etc/tmpfiles.d/autopkgtest-web-runtime.conf", "w", encoding='utf-8') as r:
r.write("D %t/autopkgtest_webcontrol 0755 www-data www-data\n")
subprocess.check_call(["systemd-tmpfiles", "--create"])
set_flag("autopkgtest-web.runtime-dir-created")
@@ -331,6 +348,7 @@ def make_runtime_tmpfiles():
@when_not("autopkgtest-web.running-json-symlinked")
def symlink_running():
+ """Symlinks to json files"""
try:
os.symlink(
os.path.join(
@@ -345,6 +363,7 @@ def symlink_running():
@when_not("autopkgtest-web.public-db-symlinked")
def symlink_public_db():
+ """Creates symlink to public database on host"""
try:
publicdir = os.path.expanduser("~ubuntu/public/")
os.makedirs(publicdir)
@@ -361,12 +380,14 @@ def symlink_public_db():
@when("leadership.is_leader")
@when_not("autopkgtest-cloud.leadership_flag_written")
def write_leadership_flag():
- with open("/run/autopkgtest-web-is-leader", "w") as _:
+ """Sets juju leadership status"""
+ with open("/run/autopkgtest-web-is-leader", "w", encoding='utf-8') as _:
set_flag("autopkgtest-cloud.leadership_flag_written")
@when_not("leadership.is_leader")
@when("autopkgtest-cloud.leadership_flag_written")
def clear_leadership_flag():
+ """Clears juju leadership"""
os.unlink("/run/autopkgtest-web-is-leader")
clear_flag("autopkgtest-cloud.leadership_flag_written")
diff --git a/charms/focal/autopkgtest-web/webcontrol/amqp-status-collector b/charms/focal/autopkgtest-web/webcontrol/amqp-status-collector
index 66a9ed4..514bac4 100755
--- a/charms/focal/autopkgtest-web/webcontrol/amqp-status-collector
+++ b/charms/focal/autopkgtest-web/webcontrol/amqp-status-collector
@@ -1,6 +1,9 @@
#!/usr/bin/python3
-# Pick up running tests, their status and logtail from the "teststatus" fanout
-# queue, and regularly write it into /run/running.json
+#pylint: disable=invalid-name, import-error, too-many-function-args
+'''
+Pick up running tests, their status and logtail from the "teststatus" fanout
+queue, and regularly write it into /run/running.json
+'''
import os
import json
@@ -17,11 +20,11 @@ running_name = os.path.join(os.path.sep,
'run',
'amqp-status-collector',
'running.json')
-running_name_new = "{}.new".format(running_name)
+running_name_new = f"{running_name}.new"
# package -> runhash -> release -> arch -> (params, duration, logtail)
running_tests = {}
-last_update = 0
+LAST_UPDATE = 0
def amqp_connect():
@@ -33,19 +36,17 @@ def amqp_connect():
parts = urllib.parse.urlsplit(amqp_uri, allow_fragments=False)
amqp_con = amqp.Connection(parts.hostname, userid=parts.username,
password=parts.password)
- logging.info('Connected to AMQP server at %s@%s' % (parts.username, parts.hostname))
+ logging.info('Connected to AMQP server at %s@%s', parts.username, parts.hostname)
return amqp_con
-def update_output(amqp_channel, force_update=False):
+def update_output(force_update=False):
'''Update report'''
- global last_update
-
# update at most every 10 s
now = time.time()
- if not force_update and now - last_update < 10:
+ if not force_update and now - LAST_UPDATE < 10:
return
with open(running_name_new, 'w', encoding='utf-8') as f:
@@ -64,12 +65,13 @@ def process_message(msg):
runhash = ''
params = info.get('params', {})
for p in sorted(params):
- runhash += '%s_%s;' % (p, params[p])
+ runhash += f'{p}_{params[p]};'
if info['running']:
running_tests.setdefault(info['package'], {}).setdefault(
runhash, {}).setdefault(
- info['release'], {})[info['architecture']] = (params, info.get('duration', 0), info['logtail'])
+ info['release'], {})[info['architecture']] = \
+ (params, info.get('duration', 0), info['logtail'])
else:
try:
del running_tests[info['package']][runhash][info['release']][info['architecture']]
@@ -93,15 +95,15 @@ def process_message(msg):
logging.basicConfig(level=('DEBUG' in os.environ and logging.DEBUG or logging.INFO))
-amqp_con = amqp_connect()
-status_ch = amqp_con.channel()
+amqp_connection = amqp_connect()
+status_ch = amqp_connection.channel()
status_ch.access_request('/data', active=True, read=True, write=False)
status_ch.exchange_declare(exchange_name, 'fanout', durable=False, auto_delete=True)
-queue_name = 'running-listener-%s' % socket.getfqdn()
+queue_name = f'running-listener-{socket.getfqdn}'
status_ch.queue_declare(queue_name, durable=False, auto_delete=True)
status_ch.queue_bind(queue_name, exchange_name, queue_name)
-logging.info('Listening to requests on %s' % queue_name)
+logging.info('Listening to requests on %s', queue_name)
status_ch.basic_consume('', callback=process_message, no_ack=True)
while status_ch.callbacks:
status_ch.wait()
diff --git a/charms/focal/autopkgtest-web/webcontrol/cache-amqp b/charms/focal/autopkgtest-web/webcontrol/cache-amqp
index c32ea40..4804aba 100755
--- a/charms/focal/autopkgtest-web/webcontrol/cache-amqp
+++ b/charms/focal/autopkgtest-web/webcontrol/cache-amqp
@@ -1,4 +1,7 @@
#!/usr/bin/python3
+#pylint: disable=invalid-name, import-error, consider-using-f-string, too-many-locals, bad-option-value
+
+"""Handles the amqp cache"""
import argparse
import configparser
@@ -18,6 +21,7 @@ AMQP_CONTEXTS = ["ubuntu", "huge", "ppa", "upstream"]
class AutopkgtestQueueContents:
+ """Class to handle and store the contents of the autopkgtest queue"""
def __init__(self, amqp_uri, database):
assert amqp_uri is not None
assert database is not None
@@ -42,7 +46,7 @@ class AutopkgtestQueueContents:
self.amqp_channel.queue_declare(
queue_name, durable=True, passive=True
)
- logger.info(f"Semaphore queue '{queue_name}' exists")
+ logger.info("Semaphore queue %s exists", queue_name)
except AMQPChannelException as e:
(code, _, _, _) = e.args
if code != 404:
@@ -52,7 +56,7 @@ class AutopkgtestQueueContents:
self.amqp_channel = self.amqp_con.channel()
# queue does not exist, create it
logger.info(
- f"Semaphore queue {queue_name} does not exist, initialising..."
+ "Semaphore queue %s does not exist, initialising...", queue_name
)
self.amqp_channel.queue_declare(
queue_name, durable=True
@@ -62,9 +66,10 @@ class AutopkgtestQueueContents:
routing_key=queue_name,
)
else: # not the leader
- logging.error(
- "We are not the leader, and there is no semaphore queue yet, we can't do anything - exiting."
- )
+ logging.error("%s%s",
+ "We are not the leader, and there is no semaphore ",
+ "queue yet, we can't do anything - exiting."
+ )
sys.exit(0)
@property
@@ -82,7 +87,7 @@ class AutopkgtestQueueContents:
releases.append(row[0])
for r in releases:
for row in db_con.execute(
- "SELECT DISTINCT arch from test WHERE release=?", (r,)
+ "SELECT DISTINCT arch from test WHERE release=?", (r,)
):
release_arches.setdefault(r, []).append(row[0])
return release_arches
@@ -122,7 +127,6 @@ class AutopkgtestQueueContents:
res.append(r)
except (ValueError, IndexError):
logging.error('Received invalid request format "%s"', r)
- return
return res
def get_queue_contents(self):
@@ -142,7 +146,8 @@ class AutopkgtestQueueContents:
f"semaphore-{context}-{release}-{arch}"
)
logging.info(
- f"Trying to lock semaphore queue {semaphore_queue_name}..."
+ "Trying to lock semaphore queue %s...",
+ semaphore_queue_name
)
r = None
while r is None:
@@ -177,7 +182,8 @@ class AutopkgtestQueueContents:
}
all_arches.add(arch)
logging.debug(
- f"Releasing semaphore lock {semaphore_queue_name}"
+ "Releasing semaphore lock %s",
+ semaphore_queue_name
)
channel.basic_reject(r.delivery_tag, True)
@@ -233,22 +239,22 @@ if __name__ == "__main__":
logger.setLevel(logging.INFO)
try:
- amqp_uri = cp["amqp"]["uri"]
+ amqpuri = cp["amqp"]["uri"]
except KeyError:
print("No AMQP URI found", file=sys.stderr)
sys.exit(1)
try:
- database = cp["web"]["database_ro"]
+ db = cp["web"]["db_ro"]
except KeyError:
- print("No database found", file=sys.stderr)
+ print("No db found", file=sys.stderr)
sys.exit(1)
- aq = AutopkgtestQueueContents(amqp_uri, database)
+ aq = AutopkgtestQueueContents(amqpuri, db)
queue_contents = aq.get_queue_contents()
with tempfile.NamedTemporaryFile(
- mode="w", dir=os.path.dirname(args.output), delete=False
+ mode="w", dir=os.path.dirname(args.output), delete=False
) as tf:
try:
json.dump(queue_contents, tf, indent=2)
diff --git a/charms/focal/autopkgtest-web/webcontrol/download-all-results b/charms/focal/autopkgtest-web/webcontrol/download-all-results
index ab0a1e3..aebff04 100755
--- a/charms/focal/autopkgtest-web/webcontrol/download-all-results
+++ b/charms/focal/autopkgtest-web/webcontrol/download-all-results
@@ -1,14 +1,17 @@
#!/usr/bin/python3
+#pylint: disable=invalid-name, consider-using-with, too-many-locals, too-many-branches, consider-using-f-string, no-member, bad-option-value
-# Authors: Iain Lane, Martin Pitt
+'''
+Authors: Iain Lane, Martin Pitt
-# This script uses the OpenStack Swift API to list all of the contents of our
-# containers, diffs that against the local database, and then downloads any
-# missing results.
+This script uses the OpenStack Swift API to list all of the contents of our
+containers, diffs that against the local database, and then downloads any
+missing results.
-# While in normal operation the download-results script is supposed to receive
-# notification of completed jobs, in case of bugs or network outages etc, this
-# script can be used to find any results which were missed and insert them.
+While in normal operation the download-results script is supposed to receive
+notification of completed jobs, in case of bugs or network outages etc, this
+script can be used to find any results which were missed and insert them.
+'''
import os
import sys
@@ -21,10 +24,10 @@ import configparser
import urllib.parse
import time
import http
+from urllib.request import urlopen
from distro_info import UbuntuDistroInfo
from helpers.utils import get_test_id, init_db
-from urllib.request import urlopen
LOGGER = logging.getLogger(__name__)
@@ -33,6 +36,9 @@ db_con = None
def list_remote_container(container_url):
+ """
+ Shows details about a remote container
+ """
LOGGER.debug("Listing container %s", container_url)
out = []
@@ -42,10 +48,10 @@ def list_remote_container(container_url):
url += f"&marker={urllib.parse.quote(start)}"
LOGGER.debug('Retrieving "%s"', url)
- for retry in range(5):
+ for _ in range(5):
try:
resp = urlopen(url)
- except http.client.RemoteDisconnected as e:
+ except http.client.RemoteDisconnected as _:
LOGGER.debug('Got disconnected, sleeping')
time.sleep(5)
continue
@@ -79,20 +85,22 @@ def list_remote_container(container_url):
return ret
-def list_our_results(release):
- LOGGER.debug("Finding already recorded results for %s", release)
+def list_our_results(specified_release):
+ """Shows results for a specific release"""
+ LOGGER.debug("Finding already recorded results for %s", specified_release)
c = db_con.cursor()
c.execute(
- "SELECT run_id FROM result INNER JOIN test ON test.id = result.test_id WHERE test.release=?",
- (release,),
+ "SELECT run_id FROM result INNER JOIN test ON " + \
+ "test.id = result.test_id WHERE test.release=?",
+ (specified_release,),
)
return {run_id for (run_id,) in c.fetchall()}
def fetch_one_result(url):
"""Download one result URL from swift and add it to the DB"""
- (release, arch, _, src, run_id, _) = url.split("/")[-6:]
- test_id = get_test_id(db_con, release, arch, src)
+ (release_fetch, arch, _, src, run_id, _) = url.split("/")[-6:]
+ test_id = get_test_id(db_con, release_fetch, arch, src)
try:
f = urlopen(url, timeout=30)
@@ -119,7 +127,7 @@ def fetch_one_result(url):
srcver = (
tar.extractfile("testpkg-version").read().decode().strip()
)
- except KeyError as e:
+ except KeyError as _:
# not found
if exitcode in (4, 12, 20):
# repair it
@@ -135,7 +143,7 @@ def fetch_one_result(url):
# requester
try:
requester = tar.extractfile("requester").read().decode().strip()
- except KeyError as e:
+ except KeyError as _:
requester = ""
except (KeyError, ValueError, tarfile.TarError) as e:
LOGGER.debug("%s is damaged, ignoring: %s", url, str(e))
@@ -161,7 +169,7 @@ def fetch_one_result(url):
LOGGER.debug(
"Fetched test result for %s/%s/%s/%s %s (triggers: %s): exit code %i",
- release,
+ release_fetch,
arch,
src,
ver,
@@ -189,13 +197,11 @@ def fetch_one_result(url):
LOGGER.info("%s was already recorded - skipping", run_id)
-def fetch_container(release, container_url):
+def fetch_container(release_to_fetch, container_url):
"""Download new results from a swift container"""
- marker = ""
-
try:
- our_results = list_our_results(release)
+ our_results = list_our_results(release_to_fetch)
known_results = list_remote_container(container_url)
need_to_fetch = set(known_results.keys()) - our_results
@@ -206,7 +212,7 @@ def fetch_container(release, container_url):
fetch_one_result(os.path.join(container_url, known_results[run_id]))
except urllib.error.HTTPError as e:
if e.code == 401:
- LOGGER.warning(f"Couldn't access {container_url} - doesn't exist?")
+ LOGGER.warning("Couldn't access %s - doesn't exist?", container_url)
return
raise
diff --git a/charms/focal/autopkgtest-web/webcontrol/download-results b/charms/focal/autopkgtest-web/webcontrol/download-results
index b8d4188..0d1348b 100755
--- a/charms/focal/autopkgtest-web/webcontrol/download-results
+++ b/charms/focal/autopkgtest-web/webcontrol/download-results
@@ -1,4 +1,8 @@
#!/usr/bin/python3
+#pylint: disable=invalid-name, import-error, missing-function-docstring, too-many-locals
+'''
+Downloads database results from SQL
+'''
import configparser
import json
@@ -7,9 +11,7 @@ import os
import socket
import sqlite3
import urllib.parse
-
from helpers.utils import get_test_id, init_db
-from urllib.request import urlopen
import amqplib.client_0_8 as amqp
@@ -27,7 +29,7 @@ def amqp_connect():
parts.hostname, userid=parts.username, password=parts.password
)
logging.info(
- "Connected to AMQP server at %s@%s" % (parts.username, parts.hostname)
+ "Connected to AMQP server at %s@%s", parts.username, parts.hostname
)
return amqp_con
@@ -48,7 +50,7 @@ def process_message(msg, db_con):
if isinstance(body, bytes):
body = body.decode("UTF-8", errors="replace")
info = json.loads(body)
- logging.info("Received notification of completed test {}".format(info))
+ logging.info("Received notification of completed test %s", info)
arch = info["architecture"]
container = info["container"]
@@ -61,8 +63,8 @@ def process_message(msg, db_con):
(_, _, _, _, run_id) = info["swift_dir"].split("/")
# we don't handle PPA requests
- if container != ("autopkgtest-{}".format(release)):
- logging.debug("Ignoring non-distro request: {}".format(info))
+ if container != (f"autopkgtest-{release}"):
+ logging.debug("Ignoring non-distro request: %s", info)
msg.channel.basic_ack(msg.delivery_tag)
return
@@ -100,20 +102,20 @@ if __name__ == "__main__":
level=("DEBUG" in os.environ and logging.DEBUG or logging.INFO)
)
- db_con = db_connect()
- amqp_con = amqp_connect()
- status_ch = amqp_con.channel()
+ db_connection = db_connect()
+ amqp_connection = amqp_connect()
+ status_ch = amqp_connection.channel()
status_ch.access_request("/complete", active=True, read=True, write=False)
status_ch.exchange_declare(
EXCHANGE_NAME, "fanout", durable=True, auto_delete=False
)
- queue_name = "complete-listener-%s" % socket.getfqdn()
+ queue_name = "complete-listener-" + socket.getfqdn()
status_ch.queue_declare(queue_name, durable=True, auto_delete=False)
status_ch.queue_bind(queue_name, EXCHANGE_NAME, queue_name)
- logging.info("Listening to requests on %s" % queue_name)
+ logging.info("Listening to requests on %s", queue_name)
status_ch.basic_consume(
- "", callback=lambda msg: process_message(msg, db_con)
+ "", callback=lambda msg: process_message(msg, db_connection)
)
while status_ch.callbacks:
status_ch.wait()
diff --git a/charms/focal/autopkgtest-web/webcontrol/helpers/utils.py b/charms/focal/autopkgtest-web/webcontrol/helpers/utils.py
index ec74d8f..336da33 100644
--- a/charms/focal/autopkgtest-web/webcontrol/helpers/utils.py
+++ b/charms/focal/autopkgtest-web/webcontrol/helpers/utils.py
@@ -1,7 +1,7 @@
'''
utilities for autopkgtest-web webcontrol
'''
-#pylint: disable=protected-access
+#pylint: disable=protected-access, invalid-name
import logging
import os
import sqlite3
diff --git a/charms/focal/autopkgtest-web/webcontrol/private_results/app.py b/charms/focal/autopkgtest-web/webcontrol/private_results/app.py
index 95cec50..6696d10 100644
--- a/charms/focal/autopkgtest-web/webcontrol/private_results/app.py
+++ b/charms/focal/autopkgtest-web/webcontrol/private_results/app.py
@@ -1,11 +1,15 @@
"""Test Result Fetcher Flask App"""
+#pylint: disable=import-error, consider-using-f-string, too-many-arguments, bad-option-value
import os
import sys
import logging
-import swiftclient
import configparser
-
from html import escape
+from helpers.utils import setup_key
+from request.submit import Submit
+
+import swiftclient
+
from flask import (Flask, Response, request, session, redirect,
render_template_string)
from flask_openid import OpenID
@@ -13,9 +17,6 @@ from werkzeug.middleware.proxy_fix import ProxyFix
sys.path.append('..')
-from helpers.utils import setup_key
-from request.submit import Submit
-
HTML = """
<!doctype html>
@@ -40,21 +41,20 @@ LOGIN = """
DENIED = "Unprivileged or unavailable."
-def swift_get_object(connection, container, path):
+def swift_get_object(swift_connection, container, path):
"""Fetch an object from swift."""
try:
- _, contents = connection.get_object(container, path)
- except swiftclient.exceptions.ClientException as e:
- logging.error('Failed to fetch %s from container (%s)' %
- (path, str(e)))
+ _, contents = swift_connection.get_object(container, path)
+ except swiftclient.exceptions.ClientException as swift_error:
+ logging.error('Failed to fetch %s from container (%s)', path, str(swift_error))
return None
return contents
-def validate_user_path(connection, container, nick, path):
+def validate_user_path(swift_connection, container, nick, path):
"""Return true if user is allowed to view files under the given path."""
# First we need to check if this result is actually sharable
- allowed_file = swift_get_object(connection, container, path)
+ allowed_file = swift_get_object(swift_connection, container, path)
if not allowed_file:
return False
allowed = allowed_file.decode('utf-8').splitlines()
@@ -66,10 +66,10 @@ def validate_user_path(connection, container, nick, path):
for entity in allowed:
(code, response) = Submit.lp_request('~%s/participants' % entity, {})
if code != 200:
- logging.error('Unable to validate user %s (%s)' % (nick, code))
+ logging.error('Unable to validate user %s (%s)', nick, code)
return False
- for e in response.get('entries', []):
- if e.get('name') == nick:
+ for response_entry in response.get('entries', []):
+ if response_entry.get('name') == nick:
return True
return False
@@ -152,13 +152,11 @@ def index_result(container, series, arch, group, src, runid, file):
content_type = 'text/plain; charset=UTF-8'
headers = {'Content-Encoding': 'gzip'}
return Response(result, content_type=content_type, headers=headers)
- else:
- return result
- else:
- # XXX: render_template_string urlencodes its context values, so it's
- # not really possible to have 'nested HTML' rendered properly.
- return HTML.replace("{{ content }}",
- render_template_string(LOGIN, **session))
+ return result
+ # render_template_string urlencodes its context values, so it's
+ # not really possible to have 'nested HTML' rendered properly.
+ return HTML.replace("{{ content }}",
+ render_template_string(LOGIN, **session))
@app.route('/login', methods=['GET', 'POST'])
diff --git a/charms/focal/autopkgtest-web/webcontrol/publish-db b/charms/focal/autopkgtest-web/webcontrol/publish-db
index a44b04a..5eadb58 100755
--- a/charms/focal/autopkgtest-web/webcontrol/publish-db
+++ b/charms/focal/autopkgtest-web/webcontrol/publish-db
@@ -1,14 +1,15 @@
#!/usr/bin/python3
-# download/read Sources.gz for all known releases and write them into
-# a copy of autopkgtest.db, which is then published to the public location.
-# This is being used for statistics.
+'''
+download/read Sources.gz for all known releases and write them into
+a copy of autopkgtest.db, which is then published to the public location.
+This is being used for statistics.
+'''
+#pylint: disable=invalid-name, consider-using-f-string, c-extension-no-member, bad-option-value
import configparser
-import fcntl
import gzip
import logging
import os
-import shutil
import sqlite3
import tempfile
import urllib.request
@@ -75,7 +76,7 @@ def init_db(path, path_current, path_rw):
current_version_copied = True
except sqlite3.OperationalError as e:
if "no such column: pocket" not in str(
- e
+ e
) and "no such column: component" not in str(
e
): # schema upgrade
@@ -111,8 +112,9 @@ def init_db(path, path_current, path_rw):
return db
-def get_last_checked(db_con, url):
- c = db_con.cursor()
+def get_last_checked(db_connection, url):
+ """Get the time the database was last checked"""
+ c = db_connection.cursor()
c.execute("SELECT timestamp FROM url_last_checked WHERE url=?", (url,))
try:
@@ -122,14 +124,15 @@ def get_last_checked(db_con, url):
return None
-def get_sources(db_con, release):
+def get_sources(db_connection, release):
+ """Gets sources.gz for specific release"""
for component in components:
for pocket in (release, release + "-updates"):
logging.debug("Processing %s/%s", pocket, component)
try:
url = f"{archive_url}/dists/{pocket}/{component}/source/Sources.gz"
request = urllib.request.Request(url)
- last_checked = get_last_checked(db_con, url)
+ last_checked = get_last_checked(db_connection, url)
if last_checked:
request.add_header("If-Modified-Since", last_checked)
@@ -139,7 +142,7 @@ def get_sources(db_con, release):
temp_file.write(response.read())
last_modified = response.getheader("Last-Modified")
if last_modified:
- db_con.execute(
+ db_connection.execute(
"INSERT INTO url_last_checked (url, timestamp) "
"VALUES (:url, :timestamp) "
"ON CONFLICT (url) DO "
@@ -147,7 +150,7 @@ def get_sources(db_con, release):
{'url': url, 'timestamp': last_modified}
)
- db_con.execute(
+ db_connection.execute(
"DELETE FROM current_version "
"WHERE pocket = ? AND component = ?",
(pocket, component),
@@ -155,7 +158,7 @@ def get_sources(db_con, release):
temp_file.seek(0)
with gzip.open(temp_file) as fd:
for section in apt_pkg.TagFile(fd):
- db_con.execute(
+ db_connection.execute(
"INSERT INTO current_version "
"(release, pocket, component, package, version) "
"VALUES "
@@ -171,11 +174,10 @@ def get_sources(db_con, release):
'version': section["Version"],
},
)
- db_con.commit()
+ db_connection.commit()
except urllib.error.HTTPError as e:
if e.code == 304:
- logging.debug("url {} not modified".format(url))
- pass
+ logging.debug("url %s not modified", url)
if __name__ == "__main__":
diff --git a/charms/focal/autopkgtest-web/webcontrol/request/app.py b/charms/focal/autopkgtest-web/webcontrol/request/app.py
index 15c5b5c..b66cfad 100644
--- a/charms/focal/autopkgtest-web/webcontrol/request/app.py
+++ b/charms/focal/autopkgtest-web/webcontrol/request/app.py
@@ -1,4 +1,5 @@
"""Test Request Flask App"""
+#pylint: disable=import-error, consider-using-f-string, invalid-name, too-many-locals, no-else-return, no-value-for-parameter, too-many-return-statements, too-many-branches, too-many-statements, bad-option-value
import os
import logging
import hmac
@@ -58,24 +59,24 @@ SUCCESS = """
"""
-def check_github_sig(request):
- """Validate github signature of request.
+def check_github_sig(req):
+ """Validate github signature of req.
See https://developer.github.com/webhooks/securing/
"""
# load key
keyfile = os.path.expanduser('~/github-secrets.json')
- package = request.args.get('package')
+ package = req.args.get('package')
try:
- with open(keyfile) as f:
- keymap = json.load(f)
+ with open(keyfile, encoding='utf-8') as keyfile:
+ keymap = json.load(keyfile)
key = keymap[package].encode('ASCII')
- except (IOError, ValueError, KeyError, UnicodeEncodeError) as e:
- logging.error('Failed to load GitHub key for package %s: %s', package, e)
+ except (IOError, ValueError, KeyError, UnicodeEncodeError) as github_sig_error:
+ logging.error('Failed to load GitHub key for package %s: %s', package, github_sig_error)
return False
- sig_sha1 = request.headers.get('X-Hub-Signature', '')
- payload_sha1 = 'sha1=' + hmac.new(key, request.data, 'sha1').hexdigest()
+ sig_sha1 = req.headers.get('X-Hub-Signature', '')
+ payload_sha1 = 'sha1=' + hmac.new(key, req.data, 'sha1').hexdigest()
if hmac.compare_digest(sig_sha1, payload_sha1):
return True
logging.error('check_github_sig: signature mismatch! received: %s calculated: %s',
@@ -89,7 +90,7 @@ def invalid(message, code=400):
html = LOGOUT.format(**session)
else:
html = ''
- html += '<p>You submitted an invalid request: %s</p>' % maybe_escape(str(message))
+ html += f"<p>You submitted an invalid request: {maybe_escape(str(message))}</p>"
logging.error('Request failed with %i: %s', code, message)
return HTML.format(html), code
@@ -128,16 +129,16 @@ def index_root():
del params[getarg]
except KeyError:
pass
- l = request.args.getlist(getarg)
- if l:
- params[paramname] = [maybe_escape(p) for p in l]
+ req_list = request.args.getlist(getarg)
+ if req_list:
+ params[paramname] = [maybe_escape(p) for p in req_list]
# split "VAR1=value;VAR2=value" --env arguments, as some frameworks don't
# allow multipe "env="
if 'env' in params:
splitenv = []
- for e in params['env']:
- splitenv += e.split(';')
+ for env_param in params['env']:
+ splitenv += env_param.split(';')
params['env'] = splitenv
# request from github?
@@ -155,7 +156,8 @@ def index_root():
s = Submit()
try:
- params.setdefault('env', []).append('UPSTREAM_PULL_REQUEST=%i' % int(github_params['number']))
+ params.setdefault('env', []).append('UPSTREAM_PULL_REQUEST=%i' % \
+ int(github_params['number']))
statuses_url = github_params['pull_request']['statuses_url']
params['env'].append('GITHUB_STATUSES_URL=' + statuses_url)
@@ -177,7 +179,7 @@ def index_root():
with open(os.path.join(PATH, 'github-pending', '%s-%s-%s-%s-%s' %
(params['release'], params['arch'],
params['package'], github_params['number'],
- os.path.basename(statuses_url))), 'w') as f:
+ os.path.basename(statuses_url))), 'w', encoding='utf-8') as f:
f.write(json.dumps(params))
# tell GitHub that the test is pending
@@ -213,7 +215,7 @@ def index_root():
return HTML.format(LOGOUT +
"<p>Deleted {} requests</p>".format(count)).format(
- **ChainMap(session, params))
+ **ChainMap(session, params))
if params.get('ppas'):
s.send_amqp_request(context='ppa', **params)
@@ -223,7 +225,7 @@ def index_root():
if not params.get('ppas'):
url = 'https://autopkgtest.ubuntu.com/packages/{}/{}/{}'.format(
params['package'], params['release'], params['arch'])
- params['Result history'] = '<a href="{}">{}</a>'.format(url, url)
+ params['Result history'] = '<a href="{0}">{0}</a>'.format(url)
success = SUCCESS.format(
EMPTY.join(ROW.format(key, val) for key, val in sorted(params.items()))
)
diff --git a/charms/focal/autopkgtest-web/webcontrol/request/submit.py b/charms/focal/autopkgtest-web/webcontrol/request/submit.py
index 0e35b3f..f221d97 100644
--- a/charms/focal/autopkgtest-web/webcontrol/request/submit.py
+++ b/charms/focal/autopkgtest-web/webcontrol/request/submit.py
@@ -2,6 +2,7 @@
Author: Martin Pitt <martin.pitt@xxxxxxxxxx>
"""
+#pylint: disable=protected-access, invalid-name, consider-using-f-string, import-error, no-member, dangerous-default-value, too-many-arguments, too-many-locals, too-many-branches, too-many-statements, unused-argument, no-value-for-parameter, inconsistent-return-statements, bad-classmethod-argument, bad-option-value
import os
import json
@@ -33,6 +34,9 @@ ALLOWED_USERS_PERPACKAGE = {'snapcraft': ['snappy-m-o']}
class Submit:
+ '''
+ Class to submit an autopkgtest request
+ '''
def __init__(self):
cp = configparser.ConfigParser()
cp.read(os.path.expanduser('~ubuntu/autopkgtest-cloud.conf'))
@@ -40,7 +44,7 @@ class Submit:
# read valid releases and architectures from DB
self.db_con = sqlite3.connect('file:%s?mode=ro' % cp['web']['database_ro'], uri=True)
self.releases = set(UbuntuDistroInfo().supported() + UbuntuDistroInfo().supported_esm())
- logging.debug('Valid releases: %s' % self.releases)
+ logging.debug('Valid releases: %s', self.releases)
self.architectures = set()
c = self.db_con.cursor()
@@ -50,13 +54,13 @@ class Submit:
if row is None:
break
self.architectures.add(row[0])
- logging.debug('Valid architectures: %s' % self.architectures)
+ logging.debug('Valid architectures: %s', self.architectures)
# dissect AMQP URL
self.amqp_creds = urllib.parse.urlsplit(cp['amqp']['uri'],
allow_fragments=False)
assert self.amqp_creds.scheme == 'amqp'
- logging.debug('AMQP credentials: %s' % repr(self.amqp_creds))
+ logging.debug('AMQP credentials: %s', repr(self.amqp_creds))
def validate_distro_request(self, release, arch, package, triggers,
requester, ppas=[], **kwargs):
@@ -103,7 +107,9 @@ class Submit:
raise ValueError('Unknown PPA ' + ppa)
# allow kernel tests for EOL vivid
skip_result_check = (release == 'vivid' and triggers and triggers[0].startswith('linux'))
- if not self.is_valid_package_with_results(None if (ppas or skip_result_check) else release, arch, package):
+ if not self.is_valid_package_with_results(None if (ppas or skip_result_check) else release,
+ arch,
+ package):
raise ValueError('Package %s does not have any test results' %
package)
@@ -117,8 +123,8 @@ class Submit:
for trigger in triggers:
try:
trigsrc, trigver = trigger.split('/')
- except ValueError:
- raise ValueError('Malformed trigger, must be srcpackage/version')
+ except ValueError as exc:
+ raise ValueError('Malformed trigger, must be srcpackage/version') from exc
# Debian Policy 5.6.1 and 5.6.12
if not NAME.match(trigsrc) or not VERSION.match(trigver):
raise ValueError('Malformed trigger')
@@ -245,7 +251,7 @@ class Submit:
routing_key=queue)
@classmethod
- def post_json(klass, url, data, auth_file, project):
+ def post_json(cls, klass, url, data, auth_file, project):
"""Send POST request with JSON data via basic auth.
'data' is a dictionary which will be posted to 'url' in JSON encoded
@@ -259,7 +265,7 @@ class Submit:
HTTPError if POST request fails.
"""
# look up project in auth_file
- with open(auth_file) as f:
+ with open(auth_file, encoding='utf-8') as f:
contents = f.read()
for l in contents.splitlines():
if l.startswith(project + ':'):
@@ -287,7 +293,7 @@ class Submit:
"""Check if a ppa exists"""
team, _, name = ppa.partition('/')
if not NAME.match(team) or not NAME.match(name):
- return None
+ return False
# https://launchpad.net/+apidoc/1.0.html#person-getPPAByName
(code, response) = self.lp_request('~' + team, {
'ws.op': 'getPPAByName',
@@ -298,7 +304,7 @@ class Submit:
'is_valid_ppa(%s): code %u, response %s',
ppa, code, repr(response))
if code < 200 or code >= 300:
- return None
+ return False
if response.get('name') == name:
return True
@@ -349,8 +355,7 @@ class Submit:
release, package, version, code, repr(response))
if response.get('total_size', 0) > 0:
return response['entries'][0]['component_name']
- else:
- return None
+ return None
def can_upload(self, person, release, component, package):
"""Check if person can upload package into Ubuntu release"""
@@ -366,13 +371,13 @@ class Submit:
})
logging.debug('can_upload(%s, %s, %s, %s): (%u, %s)',
person, release, component, package, code, repr(response))
- return code >= 200 and code < 300
+ return 300 > code >= 200
def in_allowed_team(self, person, package=[], teams=[]):
"""Check if person is in ALLOWED_TEAMS"""
for team in (teams or ALLOWED_TEAMS):
- (code, response) = self.lp_request('~%s/participants' % team, {})
+ (_, response) = self.lp_request('~%s/participants' % team, {})
for e in response.get('entries', []):
if e.get('name') == person:
return True
diff --git a/charms/focal/autopkgtest-web/webcontrol/request/tests/test_app.py b/charms/focal/autopkgtest-web/webcontrol/request/tests/test_app.py
index c3bd16a..6edda1e 100644
--- a/charms/focal/autopkgtest-web/webcontrol/request/tests/test_app.py
+++ b/charms/focal/autopkgtest-web/webcontrol/request/tests/test_app.py
@@ -1,4 +1,5 @@
"""Test the Flask app."""
+#pylint: disable=no-value-for-parameter, missing-function-docstring, no-member, too-few-public-methods
import os
@@ -10,6 +11,9 @@ from request.submit import Submit
class AppTestBase(TestCase):
+ '''
+ Base class for testing the app
+ '''
def setUp(self):
request.app.app.config['TESTING'] = True
self.app = request.app.app.test_client()
@@ -38,7 +42,8 @@ class DistroRequestTests(AppTestBase):
@patch('request.app.Submit')
def test_nickname(self, mock_submit):
"""Hitting / with a nickname in the session prompts for logout."""
- mock_submit.return_value.validate_distro_request.side_effect = ValueError('not 31337 enough')
+ mock_submit.return_value.validate_distro_request.side_effect = \
+ ValueError('not 31337 enough')
with self.app.session_transaction() as session:
session['nickname'] = 'person'
ret = self.app.get('/')
@@ -47,7 +52,8 @@ class DistroRequestTests(AppTestBase):
@patch('request.app.Submit')
def test_missing_request(self, mock_submit):
"""Missing GET params should return 400."""
- mock_submit.return_value.validate_distro_request.side_effect = ValueError('not 31337 enough')
+ mock_submit.return_value.validate_distro_request.side_effect = \
+ ValueError('not 31337 enough')
self.prep_session()
ret = self.app.get('/')
self.assertEqual(ret.status_code, 400)
@@ -56,7 +62,8 @@ class DistroRequestTests(AppTestBase):
@patch('request.app.Submit')
def test_invalid_request(self, mock_submit):
"""Invalid GET params should return 400."""
- mock_submit.return_value.validate_distro_request.side_effect = ValueError('not 31337 enough')
+ mock_submit.return_value.validate_distro_request.side_effect = \
+ ValueError('not 31337 enough')
self.prep_session()
ret = self.app.get('/?arch=i386&package=hi&release=testy&trigger=foo/1')
self.assertEqual(ret.status_code, 400)
@@ -91,7 +98,8 @@ class DistroRequestTests(AppTestBase):
def test_valid_request_with_ppas(self, mock_submit):
"""Return success with all params & ppas."""
self.prep_session()
- ret = self.app.get('/?arch=i386&package=hi&release=testy&trigger=foo/1&ppa=train/overlay&ppa=train/001')
+ ret = self.app.get('/?arch=i386&package=hi&release=testy' + \
+ '&trigger=foo/1&ppa=train/overlay&ppa=train/001')
self.assertEqual(ret.status_code, 200)
self.assertIn(b'ubmitted', ret.data)
mock_submit.return_value.validate_distro_request.assert_called_once_with(
@@ -127,7 +135,8 @@ class GitHubRequestTests(AppTestBase):
def test_ping(self):
ret = self.app.post('/?arch=C51&package=hi&release=testy&build-git=http://x.com/foo',
content_type='application/json',
- headers=[('X-Hub-Signature', 'sha1=cb59904bf33c619ad2c52095deb405c86cc5adfd'),
+ headers=[('X-Hub-Signature',
+ 'sha1=cb59904bf33c619ad2c52095deb405c86cc5adfd'),
('X-GitHub-Event', 'ping')],
data=b'{"info": "https://api.github.com/xx"}')
self.assertEqual(ret.status_code, 200, ret.data)
@@ -139,8 +148,10 @@ class GitHubRequestTests(AppTestBase):
def test_invalid_secret_file(self, mock_submit):
ret = self.app.post('/?arch=C51&package=hi&release=testy&build-git=http://x.com/foo',
content_type='application/json',
- headers=[('X-Hub-Signature', 'sha1=8572f239e05c652710a4f85d2061cc0fcbc7b127')],
- data=b'{"action": "opened", "number": 2, "pr": "https://api.github.com/xx"}')
+ headers=[('X-Hub-Signature',
+ 'sha1=8572f239e05c652710a4f85d2061cc0fcbc7b127')],
+ data=b'{"action": "opened", "number":' + \
+ ' 2, "pr": "https://api.github.com/xx"}')
self.assertEqual(ret.status_code, 403, ret.data)
self.assertIn(b'GitHub signature verification failed', ret.data)
@@ -177,7 +188,8 @@ class GitHubRequestTests(AppTestBase):
mock_check_github_sig.return_value = True
ret = self.app.post('/?arch=C51&package=hi&release=testy&build-git=http://x.com/foo',
content_type='application/json',
- data=b'{"action": "boring", "number": 2, "pr": "https://api.github.com/xx"}')
+ data=b'{"action": "boring", "number": ' + \
+ '2, "pr": "https://api.github.com/xx"}')
self.assertEqual(ret.status_code, 200, ret.data)
self.assertIn(b'GitHub PR action boring is not relevant for testing', ret.data)
self.assertFalse(mock_submit.return_value.validate_git_request.called)
@@ -208,7 +220,8 @@ class GitHubRequestTests(AppTestBase):
def test_valid_simple(self, mock_submit):
ret = self.app.post('/?arch=C51&package=hi&release=testy&build-git=http://x.com/foo',
content_type='application/json',
- headers=[('X-Hub-Signature', 'sha1=1dae67d4406d21b498806968a3def61754498a21')],
+ headers=[('X-Hub-Signature',
+ 'sha1=1dae67d4406d21b498806968a3def61754498a21')],
data=b'{"action": "opened", "number": 2, "pull_request":'
b' {"statuses_url": "https://api.github.com/two"}}')
@@ -226,7 +239,8 @@ class GitHubRequestTests(AppTestBase):
# we recorded the request
request.app.open.assert_called_with(
os.path.join(request.app.PATH, 'github-pending', 'testy-C51-hi-2-two'), 'w')
- self.assertIn('GITHUB_STATUSES_URL=https://api.github.com/two', str(request.app.open().write.call_args))
+ self.assertIn('GITHUB_STATUSES_URL=https://api.github.com/two',
+ str(request.app.open().write.call_args))
self.assertIn('"arch": "C51"', str(request.app.open().write.call_args))
# we told GitHub about it
@@ -245,7 +259,8 @@ class GitHubRequestTests(AppTestBase):
ret = self.app.post('/?arch=C51&package=hi&release=testy&build-git=http://x.com/foo&'
'ppa=joe/stuff&ppa=mary/misc&env=THIS=a;THAT=b&env=THERE=c',
content_type='application/json',
- headers=[('X-Hub-Signature', 'sha1=f9041325575127310c304bb65f9befb0d13b1ce6')],
+ headers=[('X-Hub-Signature',
+ 'sha1=f9041325575127310c304bb65f9befb0d13b1ce6')],
data=b'{"action": "opened", "number": 2, "pull_request":'
b' {"statuses_url": "https://api.github.com/2"}}')
@@ -271,7 +286,8 @@ class GitHubRequestTests(AppTestBase):
def test_valid_generated_url(self, mock_submit):
ret = self.app.post('/?arch=C51&package=hi&release=testy',
content_type='application/json',
- headers=[('X-Hub-Signature', 'sha1=427a20827d46f5fe8e18f08b9a7fa09ba915ea08')],
+ headers=[('X-Hub-Signature',
+ 'sha1=427a20827d46f5fe8e18f08b9a7fa09ba915ea08')],
data=b'{"action": "opened", "number": 2, "pull_request":'
b' {"statuses_url": "https://api.github.com/two",'
b' "base": {"repo": {"clone_url": "https://github.com/joe/x.git"}}}}')
@@ -315,9 +331,11 @@ class GitHubRequestTests(AppTestBase):
mock_open(None, '{"hi": "1111111111111111111111111111111111111111"}'),
create=True)
def test_valid_testname(self, mock_submit):
- ret = self.app.post('/?arch=C51&package=hi&release=testy&build-git=http://x.com/foo&testname=first',
+ ret = self.app.post('/?arch=C51&package=hi&release=testy&build' + \
+ '-git=http://x.com/foo&testname=first',
content_type='application/json',
- headers=[('X-Hub-Signature', 'sha1=1dae67d4406d21b498806968a3def61754498a21')],
+ headers=[('X-Hub-Signature',
+ 'sha1=1dae67d4406d21b498806968a3def61754498a21')],
data=b'{"action": "opened", "number": 2, "pull_request":'
b' {"statuses_url": "https://api.github.com/two"}}')
@@ -335,7 +353,8 @@ class GitHubRequestTests(AppTestBase):
# we recorded the request
request.app.open.assert_called_with(
os.path.join(request.app.PATH, 'github-pending', 'testy-C51-hi-2-two'), 'w')
- self.assertIn('GITHUB_STATUSES_URL=https://api.github.com/two', str(request.app.open().write.call_args))
+ self.assertIn('GITHUB_STATUSES_URL=https://api.github.com/two',
+ str(request.app.open().write.call_args))
self.assertIn('"testname": "first"', str(request.app.open().write.call_args))
diff --git a/charms/focal/autopkgtest-web/webcontrol/request/tests/test_submit.py b/charms/focal/autopkgtest-web/webcontrol/request/tests/test_submit.py
index bae9185..d326e46 100644
--- a/charms/focal/autopkgtest-web/webcontrol/request/tests/test_submit.py
+++ b/charms/focal/autopkgtest-web/webcontrol/request/tests/test_submit.py
@@ -1,7 +1,7 @@
"""Submit Tests
-
Test all things related verifying input arguments and sending AMQP requests.
"""
+# pylint: disable=arguments-differ, consider-using-f-string, bad-option-value
import sqlite3
@@ -33,7 +33,7 @@ class SubmitTestBase(TestCase):
# mock config values
cfg = {'amqp': {'uri': 'amqp://user:s3kr1t@1.2.3.4'},
'web': {'database': '/ignored', 'database_ro': '/ignored'},
- 'autopkgtest' : { 'releases': 'testy grumpy' }}
+ 'autopkgtest' : {'releases': 'testy grumpy'}}
mock_configparser.return_value = MagicMock()
mock_configparser.return_value.__getitem__.side_effect = cfg.get
@@ -60,100 +60,105 @@ class DistroRequestValidationTests(SubmitTestBase):
def test_bad_release(self):
"""Unknown release"""
- with self.assertRaises(ValueError) as cme:
+ with self.assertRaises(ValueError) as mock_response_error:
self.submit.validate_distro_request('fooly', 'C51', 'blue', ['ab/1'], 'joe')
- self.assertEqual(str(cme.exception), 'Unknown release fooly')
+ self.assertEqual(str(mock_response_error.exception), 'Unknown release fooly')
def test_bad_arch(self):
"""Unknown architecture"""
- with self.assertRaises(ValueError) as cme:
+ with self.assertRaises(ValueError) as mock_response_error:
self.submit.validate_distro_request('testy', 'wut', 'blue', ['ab/1'], 'joe')
- self.assertEqual(str(cme.exception), 'Unknown architecture wut')
+ self.assertEqual(str(mock_response_error.exception), 'Unknown architecture wut')
def test_bad_package(self):
"""Unknown package"""
- with self.assertRaises(ValueError) as cme:
+ with self.assertRaises(ValueError) as mock_response_error:
self.submit.validate_distro_request('testy', 'C51', 'badpkg', ['ab/1'], 'joe')
- self.assertIn('Package badpkg', str(cme.exception))
+ self.assertIn('Package badpkg', str(mock_response_error.exception))
def test_bad_argument(self):
"""Unknown argument"""
- with self.assertRaises(ValueError) as cme:
+ with self.assertRaises(ValueError) as mock_response_error:
self.submit.validate_distro_request('testy', 'C51', 'blue', ['ab/1'], 'joe', foo='bar')
- self.assertIn('Invalid argument foo', str(cme.exception))
+ self.assertIn('Invalid argument foo', str(mock_response_error.exception))
def test_invalid_trigger_syntax(self):
"""Invalid syntax in trigger"""
# invalid trigger format
- with self.assertRaises(ValueError) as cme:
+ with self.assertRaises(ValueError) as mock_response_error:
self.submit.validate_distro_request('testy', 'C51', 'blue', ['ab'], 'joe')
- self.assertIn('Malformed trigger', str(cme.exception))
+ self.assertIn('Malformed trigger', str(mock_response_error.exception))
# invalid trigger source package name chars
- with self.assertRaises(ValueError) as cme:
+ with self.assertRaises(ValueError) as mock_response_error:
self.submit.validate_distro_request('testy', 'C51', 'blue', ['a!b/1'], 'joe')
# invalid trigger version chars
- with self.assertRaises(ValueError) as cme:
+ with self.assertRaises(ValueError) as mock_response_error:
self.submit.validate_distro_request('testy', 'C51', 'blue', ['ab/1!1'], 'joe')
- self.assertIn('Malformed trigger', str(cme.exception))
+ self.assertIn('Malformed trigger', str(mock_response_error.exception))
def test_disallowed_testname(self):
"""testname not allowed for distro tests"""
# we only allow this for GitHub requests; with distro requests it would
# be cheating as proposed-migration would consider those
- with self.assertRaises(ValueError) as cme:
- self.submit.validate_distro_request('testy', 'C51', 'blue', ['ab/1.2'], 'joe', testname='first')
- self.assertIn('Invalid argument testname', str(cme.exception))
+ with self.assertRaises(ValueError) as mock_response_error:
+ self.submit.validate_distro_request('testy',
+ 'C51',
+ 'blue',
+ ['ab/1.2'],
+ 'joe',
+ testname='first')
+ self.assertIn('Invalid argument testname', str(mock_response_error.exception))
@patch('request.submit.urllib.request.urlopen')
def test_ppa(self, mock_urlopen):
"""PPA does not exist"""
# invalid name don't even call lp
- with self.assertRaises(ValueError) as cme:
+ with self.assertRaises(ValueError) as mock_response_error:
self.submit.validate_distro_request(
'testy', 'C51', 'foo', ['ab/1.2'], 'joe', ['b~ad/ppa'])
- self.assertEqual(str(cme.exception), 'Unknown PPA b~ad/ppa')
+ self.assertEqual(str(mock_response_error.exception), 'Unknown PPA b~ad/ppa')
self.assertEqual(mock_urlopen.call_count, 0)
# mock Launchpad response: successful form, but no match
- cm = MagicMock()
- cm.__enter__.return_value = cm
- cm.getcode.return_value = 200
- cm.geturl.return_value = 'http://mock.launchpad.net'
- cm.read.return_value = b'{}'
- cm.return_value = cm
- mock_urlopen.return_value = cm
-
- with self.assertRaises(ValueError) as cme:
+ mock_response = MagicMock()
+ mock_response.__enter__.return_value = mock_response
+ mock_response.getcode.return_value = 200
+ mock_response.geturl.return_value = 'http://mock.launchpad.net'
+ mock_response.read.return_value = b'{}'
+ mock_response.return_value = mock_response
+ mock_urlopen.return_value = mock_response
+
+ with self.assertRaises(ValueError) as mock_response_error:
self.submit.validate_distro_request(
'testy', 'C51', 'foo', ['ab/1.2'], 'joe', ['bad/ppa'])
- self.assertEqual(str(cme.exception), 'Unknown PPA bad/ppa')
+ self.assertEqual(str(mock_response_error.exception), 'Unknown PPA bad/ppa')
self.assertEqual(mock_urlopen.call_count, 1)
# success
- cm.read.return_value = b'{"name": "there"}'
+ mock_response.read.return_value = b'{"name": "there"}'
self.assertTrue(self.submit.is_valid_ppa('hi/there'))
# broken JSON response
- cm.read.return_value = b'not { json}'
- with self.assertRaises(ValueError) as cme:
+ mock_response.read.return_value = b'not { json}'
+ with self.assertRaises(ValueError) as mock_response_error:
self.submit.validate_distro_request(
'testy', 'C51', 'foo', ['ab/1.2'], 'joe', ['broke/ness'])
# same, but entirely failing query -- let's be on the safe side
- cm.getcode.return_value = 404
- cm.read.return_value = b'<html>not found</html>'
- with self.assertRaises(ValueError) as cme:
+ mock_response.getcode.return_value = 404
+ mock_response.read.return_value = b'<html>not found</html>'
+ with self.assertRaises(ValueError) as mock_response_error:
self.submit.validate_distro_request(
'testy', 'C51', 'foo', ['ab/1.2'], 'joe', ['bro/ken'])
- self.assertEqual(str(cme.exception), 'Unknown PPA bro/ken')
+ self.assertEqual(str(mock_response_error.exception), 'Unknown PPA bro/ken')
@patch('request.submit.urllib.request.urlopen')
def test_nonexisting_trigger(self, mock_urlopen):
@@ -161,30 +166,30 @@ class DistroRequestValidationTests(SubmitTestBase):
# mock Launchpad response: successful form, but no matching
# source/version
- cm = MagicMock()
- cm.__enter__.return_value = cm
- cm.getcode.return_value = 200
- cm.geturl.return_value = 'http://mock.launchpad.net'
- cm.read.return_value = b'{"total_size": 0}'
- cm.return_value = cm
- mock_urlopen.return_value = cm
-
- with self.assertRaises(ValueError) as cme:
+ mock_response = MagicMock()
+ mock_response.__enter__.return_value = mock_response
+ mock_response.getcode.return_value = 200
+ mock_response.geturl.return_value = 'http://mock.launchpad.net'
+ mock_response.read.return_value = b'{"total_size": 0}'
+ mock_response.return_value = mock_response
+ mock_urlopen.return_value = mock_response
+
+ with self.assertRaises(ValueError) as mock_response_error:
self.submit.validate_distro_request('testy', 'C51', 'blue', ['ab/1.2'], 'joe')
- self.assertEqual(str(cme.exception), 'ab/1.2 is not published in testy')
+ self.assertEqual(str(mock_response_error.exception), 'ab/1.2 is not published in testy')
self.assertEqual(mock_urlopen.call_count, 1)
# broken JSON response
- cm.read.return_value = b'not { json}'
- with self.assertRaises(ValueError) as cme:
+ mock_response.read.return_value = b'not { json}'
+ with self.assertRaises(ValueError) as mock_response_error:
self.submit.validate_distro_request('testy', 'C51', 'blue', ['ab/1.2'], 'joe')
# same, but entirely failing query -- let's be on the safe side
- cm.getcode.return_value = 404
- cm.read.return_value = b'<html>not found</html>'
- with self.assertRaises(ValueError) as cme:
+ mock_response.getcode.return_value = 404
+ mock_response.read.return_value = b'<html>not found</html>'
+ with self.assertRaises(ValueError) as mock_response_error:
self.submit.validate_distro_request('testy', 'C51', 'blue', ['ab/1.2'], 'joe')
- self.assertEqual(str(cme.exception), 'ab/1.2 is not published in testy')
+ self.assertEqual(str(mock_response_error.exception), 'ab/1.2 is not published in testy')
@patch('request.submit.urllib.request.urlopen')
def test_bad_package_ppa(self, mock_urlopen):
@@ -192,19 +197,20 @@ class DistroRequestValidationTests(SubmitTestBase):
# mock Launchpad response: successful form, but no matching
# source/version
- cm = MagicMock()
- cm.__enter__.return_value = cm
- cm.getcode.return_value = 200
- cm.geturl.return_value = 'http://mock.launchpad.net'
- cm.read.side_effect = [b'{"name": "overlay"}',
- b'{"name": "goodstuff"}']
- cm.return_value = cm
- mock_urlopen.return_value = cm
-
- with self.assertRaises(ValueError) as cme:
+ mock_response = MagicMock()
+ mock_response.__enter__.return_value = mock_response
+ mock_response.getcode.return_value = 200
+ mock_response.geturl.return_value = 'http://mock.launchpad.net'
+ mock_response.read.side_effect = [b'{"name": "overlay"}',
+ b'{"name": "goodstuff"}']
+ mock_response.return_value = mock_response
+ mock_urlopen.return_value = mock_response
+
+ with self.assertRaises(ValueError) as mock_response_error:
self.submit.validate_distro_request('testy', 'C51', 'badpkg', ['ab/1.2'], 'joe',
ppas=['team/overlay', 'joe/goodstuff'])
- self.assertEqual(str(cme.exception), 'Package badpkg does not have any test results')
+ self.assertEqual(str(mock_response_error.exception),
+ 'Package badpkg does not have any test results')
self.assertEqual(mock_urlopen.call_count, 2)
@patch('request.submit.urllib.request.urlopen')
@@ -213,20 +219,21 @@ class DistroRequestValidationTests(SubmitTestBase):
# mock Launchpad response: successful form, but no matching
# source/version
- cm = MagicMock()
- cm.__enter__.return_value = cm
- cm.getcode.return_value = 200
- cm.geturl.return_value = 'http://mock.launchpad.net'
- cm.read.side_effect = [b'{"name": "overlay"}',
- b'{"name": "goodstuff"}',
- b'{"total_size": 0}']
- cm.return_value = cm
- mock_urlopen.return_value = cm
-
- with self.assertRaises(ValueError) as cme:
+ mock_response = MagicMock()
+ mock_response.__enter__.return_value = mock_response
+ mock_response.getcode.return_value = 200
+ mock_response.geturl.return_value = 'http://mock.launchpad.net'
+ mock_response.read.side_effect = [b'{"name": "overlay"}',
+ b'{"name": "goodstuff"}',
+ b'{"total_size": 0}']
+ mock_response.return_value = mock_response
+ mock_urlopen.return_value = mock_response
+
+ with self.assertRaises(ValueError) as mock_response_error:
self.submit.validate_distro_request('testy', 'C51', 'blue', ['ab/1.2'], 'joe',
ppas=['team/overlay', 'joe/goodstuff'])
- self.assertEqual(str(cme.exception), 'ab/1.2 is not published in PPA joe/goodstuff testy')
+ self.assertEqual(str(mock_response_error.exception),
+ 'ab/1.2 is not published in PPA joe/goodstuff testy')
self.assertEqual(mock_urlopen.call_count, 3)
@patch('request.submit.urllib.request.urlopen')
@@ -235,19 +242,20 @@ class DistroRequestValidationTests(SubmitTestBase):
# mock Launchpad response: successful form, matching
# source/version, upload not allowed
- cm = MagicMock()
- cm.__enter__.return_value = cm
- cm.getcode.return_value = 200
- cm.read.side_effect = [b'{"total_size": 1, "entries": [{"component_name": "main"}]}',
- HTTPError('https://lp/checkUpload', 403, 'Forbidden', {}, None),
- HTTPError('https://lp/checkUpload', 403, 'Forbidden', {}, None),
- b'{"total_size": 1, "entries": [{"name": "joe2"}]}']
- cm.return_value = cm
- mock_urlopen.return_value = cm
-
- with self.assertRaises(ValueError) as cme:
+ mock_response = MagicMock()
+ mock_response.__enter__.return_value = mock_response
+ mock_response.getcode.return_value = 200
+ mock_response.read.side_effect = \
+ [b'{"total_size": 1, "entries": [{"component_name": "main"}]}',
+ HTTPError('https://lp/checkUpload', 403, 'Forbidden', {}, None),
+ HTTPError('https://lp/checkUpload', 403, 'Forbidden', {}, None),
+ b'{"total_size": 1, "entries": [{"name": "joe2"}]}']
+ mock_response.return_value = mock_response
+ mock_urlopen.return_value = mock_response
+
+ with self.assertRaises(ValueError) as mock_response_error:
self.submit.validate_distro_request('testy', 'C51', 'blue', ['ab/1.2'], 'joe')
- self.assertIn('not allowed to upload blue or ab', str(cme.exception))
+ self.assertIn('not allowed to upload blue or ab', str(mock_response_error.exception))
self.assertEqual(mock_urlopen.call_count, 4)
@patch('request.submit.urllib.request.urlopen')
@@ -256,13 +264,14 @@ class DistroRequestValidationTests(SubmitTestBase):
# mock Launchpad response: successful form, matching
# source/version, upload allowed
- cm = MagicMock()
- cm.__enter__.return_value = cm
- cm.getcode.return_value = 200
- cm.read.side_effect = [b'{"total_size": 1, "entries": [{"component_name": "main"}]}',
- b'true']
- cm.return_value = cm
- mock_urlopen.return_value = cm
+ mock_response = MagicMock()
+ mock_response.__enter__.return_value = mock_response
+ mock_response.getcode.return_value = 200
+ mock_response.read.side_effect = [b'{"total_size": 1, "entries": ' + \
+ '[{"component_name": "main"}]}',
+ b'true']
+ mock_response.return_value = mock_response
+ mock_urlopen.return_value = mock_response
self.submit.validate_distro_request('testy', 'C51', 'blue', ['ab/1.2'], 'joe')
self.assertEqual(mock_urlopen.call_count, 2)
@@ -273,13 +282,14 @@ class DistroRequestValidationTests(SubmitTestBase):
# mock Launchpad response: successful form, matching
# source/version, upload allowed
- cm = MagicMock()
- cm.__enter__.return_value = cm
- cm.getcode.return_value = 200
- cm.read.side_effect = [b'{"total_size": 1, "entries": [{"component_name": "main"}]}',
- b'true']
- cm.return_value = cm
- mock_urlopen.return_value = cm
+ mock_response = MagicMock()
+ mock_response.__enter__.return_value = mock_response
+ mock_response.getcode.return_value = 200
+ mock_response.read.side_effect = [b'{"total_size": 1, "entries": ' + \
+ '[{"component_name": "main"}]}',
+ b'true']
+ mock_response.return_value = mock_response
+ mock_urlopen.return_value = mock_response
self.submit.validate_distro_request('testy', 'C51', 'blue', ['ab/1.2'],
'joe', **{'all-proposed': '1'})
@@ -288,10 +298,10 @@ class DistroRequestValidationTests(SubmitTestBase):
def test_distro_all_proposed_bad_value(self):
"""Valid distro request with invalid all-proposed value"""
- with self.assertRaises(ValueError) as cme:
+ with self.assertRaises(ValueError) as mock_response_error:
self.submit.validate_distro_request('testy', 'C51', 'blue', ['ab/1.2'],
'joe', **{'all-proposed': 'bogus'})
- self.assertIn('nvalid all-proposed value', str(cme.exception))
+ self.assertIn('nvalid all-proposed value', str(mock_response_error.exception))
@patch('request.submit.urllib.request.urlopen')
def test_validate_distro_whitelisted_team(self, mock_urlopen):
@@ -299,15 +309,18 @@ class DistroRequestValidationTests(SubmitTestBase):
# mock Launchpad response: successful form, matching
# source/version, upload allowed
- cm = MagicMock()
- cm.__enter__.return_value = cm
- cm.getcode.return_value = 200
- cm.read.side_effect = [b'{"total_size": 1, "entries": [{"component_name": "main"}]}',
- HTTPError('https://lp/checkUpload', 403, 'Forbidden', {}, None),
- HTTPError('https://lp/checkUpload', 403, 'Forbidden', {}, None),
- b'{"total_size": 1, "entries": [{"name": "joe"}]}']
- cm.return_value = cm
- mock_urlopen.return_value = cm
+ mock_response = MagicMock()
+ mock_response.__enter__.return_value = mock_response
+ mock_response.getcode.return_value = 200
+ mock_response.read.side_effect = [b'{"total_size": 1, "entries": ' + \
+ '[{"component_name": "main"}]}',
+ HTTPError('https://lp/checkUpload',
+ 403, 'Forbidden', {}, None),
+ HTTPError('https://lp/checkUpload',
+ 403, 'Forbidden', {}, None),
+ b'{"total_size": 1, "entries": [{"name": "joe"}]}']
+ mock_response.return_value = mock_response
+ mock_urlopen.return_value = mock_response
self.submit.validate_distro_request('testy', 'C51', 'blue', ['ab/1.2'], 'joe')
self.assertEqual(mock_urlopen.call_count, 4)
@@ -318,18 +331,20 @@ class DistroRequestValidationTests(SubmitTestBase):
# mock Launchpad response: successful form, matching
# source/version, upload allowed
- cm = MagicMock()
- cm.__enter__.return_value = cm
- cm.getcode.return_value = 200
- cm.read.side_effect = [b'{"name": "overlay"}',
- b'{"name": "goodstuff"}',
- # check if package is published in PPA
- b'{"total_size": 1, "entries": [{"component_name": "main"}]}',
- # component name in Ubuntu archive
- b'{"total_size": 1, "entries": [{"component_name": "universe"}]}',
- b'true']
- cm.return_value = cm
- mock_urlopen.return_value = cm
+ mock_response = MagicMock()
+ mock_response.__enter__.return_value = mock_response
+ mock_response.getcode.return_value = 200
+ mock_response.read.side_effect = [b'{"name": "overlay"}',
+ b'{"name": "goodstuff"}',
+ # check if package is published in PPA
+ b'{"total_size": 1, "entries": ' + \
+ '[{"component_name": "main"}]}',
+ # component name in Ubuntu archive
+ b'{"total_size": 1, "entries": ' + \
+ '[{"component_name": "universe"}]}',
+ b'true']
+ mock_response.return_value = mock_response
+ mock_urlopen.return_value = mock_response
self.submit.validate_distro_request('testy', 'C51', 'blue', ['ab/1.2'], 'joe',
ppas=['team/overlay', 'joe/goodstuff'])
@@ -340,105 +355,132 @@ class GitRequestValidationTests(SubmitTestBase):
"""Test verification of git branch test requests"""
def test_bad_release(self):
- with self.assertRaises(ValueError) as cme:
- self.submit.validate_git_request('fooly', 'C51', 'ab', **{'build-git': 'https://x.com/proj'})
- self.assertEqual(str(cme.exception), 'Unknown release fooly')
+ """Tests invalid release"""
+ with self.assertRaises(ValueError) as mock_response_error:
+ self.submit.validate_git_request('fooly',
+ 'C51',
+ 'ab',
+ **{'build-git': 'https://x.com/proj'})
+ self.assertEqual(str(mock_response_error.exception), 'Unknown release fooly')
def test_bad_arch(self):
- with self.assertRaises(ValueError) as cme:
- self.submit.validate_git_request('testy', 'wut', 'a!b', **{'build-git': 'https://x.com/proj'})
- self.assertEqual(str(cme.exception), 'Unknown architecture wut')
+ """Tests invalid architecture"""
+ with self.assertRaises(ValueError) as mock_response_error:
+ self.submit.validate_git_request('testy',
+ 'wut',
+ 'a!b',
+ **{'build-git': 'https://x.com/proj'})
+ self.assertEqual(str(mock_response_error.exception), 'Unknown architecture wut')
def test_bad_package(self):
- with self.assertRaises(ValueError) as cme:
- self.submit.validate_git_request('testy', 'C51', 'a!b', **{'build-git': 'https://x.com/proj'})
- self.assertEqual(str(cme.exception), 'Malformed package')
+ """Tests invalid package"""
+ with self.assertRaises(ValueError) as mock_response_error:
+ self.submit.validate_git_request('testy',
+ 'C51',
+ 'a!b',
+ **{'build-git': 'https://x.com/proj'})
+ self.assertEqual(str(mock_response_error.exception), 'Malformed package')
@patch('request.submit.urllib.request.urlopen')
def test_unknown_ppa(self, mock_urlopen):
+ """Tests invalid ppa"""
# mock Launchpad response: successful form, but no match
- cm = MagicMock()
- cm.__enter__.return_value = cm
- cm.getcode.return_value = 200
- cm.geturl.return_value = 'http://mock.launchpad.net'
- cm.read.return_value = b'{}'
- cm.return_value = cm
- mock_urlopen.return_value = cm
-
- with self.assertRaises(ValueError) as cme:
+ mock_response = MagicMock()
+ mock_response.__enter__.return_value = mock_response
+ mock_response.getcode.return_value = 200
+ mock_response.geturl.return_value = 'http://mock.launchpad.net'
+ mock_response.read.return_value = b'{}'
+ mock_response.return_value = mock_response
+ mock_urlopen.return_value = mock_response
+
+ with self.assertRaises(ValueError) as mock_response_error:
self.submit.validate_git_request('testy', 'C51', 'ab', ['bad/ppa'],
**{'build-git': 'https://x.com/proj'})
- self.assertEqual(str(cme.exception), 'Unknown PPA bad/ppa')
+ self.assertEqual(str(mock_response_error.exception), 'Unknown PPA bad/ppa')
self.assertEqual(mock_urlopen.call_count, 1)
@patch('request.submit.Submit.is_valid_ppa')
def test_bad_env(self, is_valid_ppa):
+ """Tests invalid env variables"""
is_valid_ppa.return_value = True
- with self.assertRaises(ValueError) as cme:
+ with self.assertRaises(ValueError) as mock_response_error:
self.submit.validate_git_request('testy', 'C51', 'ab',
env=['foo=1', 'bar=1\n='],
**{'build-git': 'https://x.com/proj',
'ppas': ['a/b']})
- self.assertIn('Invalid environment', str(cme.exception))
- self.assertIn('bar=1', str(cme.exception))
+ self.assertIn('Invalid environment', str(mock_response_error.exception))
+ self.assertIn('bar=1', str(mock_response_error.exception))
def test_no_ppa(self):
"""No PPA"""
- with self.assertRaises(ValueError) as cme:
+ with self.assertRaises(ValueError) as mock_response_error:
self.submit.validate_git_request('testy', 'C51', 'ab',
**{'build-git': 'https://x.com/proj'})
- self.assertEqual(str(cme.exception), 'Must specify at least one PPA (to associate results with)')
+ self.assertEqual(str(mock_response_error.exception),
+ 'Must specify at least one PPA (to associate results with)')
@patch('request.submit.Submit.is_valid_ppa')
def test_bad_git_url(self, is_valid_ppa):
+ """Tests invalid git url"""
is_valid_ppa.return_value = True
- with self.assertRaises(ValueError) as cme:
- self.submit.validate_git_request('testy', 'C51', 'ab', **{'build-git': 'foo://x.com/proj',
- 'ppas': ['a/b']})
- self.assertEqual(str(cme.exception), 'Malformed build-git')
+ with self.assertRaises(ValueError) as mock_response_error:
+ self.submit.validate_git_request('testy',
+ 'C51',
+ 'ab',
+ **{'build-git': 'foo://x.com/proj',
+ 'ppas': ['a/b']})
+ self.assertEqual(str(mock_response_error.exception), 'Malformed build-git')
@patch('request.submit.Submit.is_valid_ppa')
def test_unknown_param(self, is_valid_ppa):
+ """Tests unknown parameter passed"""
is_valid_ppa.return_value = True
- with self.assertRaises(ValueError) as cme:
+ with self.assertRaises(ValueError) as mock_response_error:
self.submit.validate_git_request('testy', 'C51', 'ab',
**{'build-git': 'http://x.com/proj', 'ppas': ['a/b'],
'foo': 'bar'})
- self.assertEqual(str(cme.exception), 'Unsupported arguments: foo')
+ self.assertEqual(str(mock_response_error.exception), 'Unsupported arguments: foo')
@patch('request.submit.Submit.is_valid_ppa')
def test_bad_testname(self, is_valid_ppa):
+ """Tests a janky testname"""
is_valid_ppa.return_value = True
- with self.assertRaises(ValueError) as cme:
+ with self.assertRaises(ValueError) as mock_response_error:
self.submit.validate_git_request('testy', 'C51', 'ab',
**{'build-git': 'http://x.com/proj', 'testname': 'a !',
'ppas': ['a/b']})
- self.assertEqual(str(cme.exception), 'Malformed testname')
+ self.assertEqual(str(mock_response_error.exception), 'Malformed testname')
@patch('request.submit.Submit.is_valid_ppa')
def test_valid(self, is_valid_ppa):
+ """Tests a valid test to make sure it works"""
is_valid_ppa.return_value = True
self.submit.validate_git_request('testy', 'C51', 'ab',
**{'build-git': 'http://x.com/proj',
- 'env': ['STATUS_URL=https://api.github.com/proj/123deadbeef'],
+ 'env': ['STATUS_URL=' + \
+ 'https://api.github.com/proj/123deadbeef'],
'ppas': ['a/b']})
@patch('request.submit.Submit.is_valid_ppa')
def test_branch(self, is_valid_ppa):
+ """Tests a specific branch of a package"""
is_valid_ppa.return_value = True
self.submit.validate_git_request('testy', 'C51', 'ab',
**{'build-git': 'http://x.com/proj#refs/pull/2/head',
- 'env': ['STATUS_URL=https://api.github.com/proj/123deadbeef'],
+ 'env': ['STATUS_URL=' + \
+ 'https://api.github.com/proj/123deadbeef'],
'ppas': ['a/b']})
@patch('request.submit.Submit.is_valid_ppa')
def test_valid_testname(self, is_valid_ppa):
+ """Tests with a valid testname"""
is_valid_ppa.return_value = True
self.submit.validate_git_request('testy', 'C51', 'ab',
**{'build-git': 'http://x.com/proj',
'testname': 'first',
- 'env': ['STATUS_URL=https://api.github.com/proj/123deadbeef'],
+ 'env': ['STATUS_URL=' + \
+ 'https://api.github.com/proj/123deadbeef'],
'ppas': ['a/b']})
@@ -448,6 +490,7 @@ class SendAMQPTests(SubmitTestBase):
@patch('request.submit.amqp.Connection')
@patch('request.submit.amqp.Message')
def test_valid_request(self, message_con, mock_con):
+ """Tests a completely valid package"""
# mostly a passthrough, but ensure that we do wrap the string in Message()
message_con.side_effect = lambda x: '>%s<' % x
@@ -465,6 +508,7 @@ class SendAMQPTests(SubmitTestBase):
@patch('request.submit.amqp.Connection')
@patch('request.submit.amqp.Message')
def test_valid_request_context(self, message_con, mock_con):
+ """Tests with valid context"""
# mostly a passthrough, but ensure that we do wrap the string in Message()
message_con.side_effect = lambda x: '>%s<' % x
diff --git a/charms/focal/autopkgtest-web/webcontrol/update-github-jobs b/charms/focal/autopkgtest-web/webcontrol/update-github-jobs
index f2bb430..d4cd611 100755
--- a/charms/focal/autopkgtest-web/webcontrol/update-github-jobs
+++ b/charms/focal/autopkgtest-web/webcontrol/update-github-jobs
@@ -1,5 +1,8 @@
#!/usr/bin/python3
-
+'''
+Processes autopkgtest-cloud jobs requested from github
+'''
+#pylint: disable=no-value-for-parameter, consider-using-f-string, consider-using-ternary, invalid-name, bad-option-value
import os
import json
import configparser
@@ -8,7 +11,6 @@ import time
import io
import sys
import tarfile
-import time
import urllib.request
import urllib.parse
from urllib.error import HTTPError
@@ -17,13 +19,13 @@ from request.submit import Submit
PENDING_DIR = '/run/autopkgtest_webcontrol/github-pending'
-swift_url = None
-external_url = None
+SWIFT_URL = None
+EXTERNAL_URL = None
def result_matches_job(result_url, params):
- # download result.tar and get exit code and testinfo
- for retry in range(5):
+ """download result.tar and get exit code and testinfo"""
+ for _ in range(5):
try:
with urllib.request.urlopen(result_url + '/result.tar') as f:
tar_bytes = io.BytesIO(f.read())
@@ -33,7 +35,7 @@ def result_matches_job(result_url, params):
time.sleep(1)
else:
logging.error('failed to download result %s', result_url)
- return
+ return -1
try:
with tarfile.open(None, 'r', tar_bytes) as tar:
@@ -41,18 +43,18 @@ def result_matches_job(result_url, params):
info = json.loads(tar.extractfile('testinfo.json').read().decode())
except (KeyError, ValueError, tarfile.TarError) as e:
logging.error('broken result %s: %s', result_url, e)
- return
+ return -1
try:
result_env = info['custom_environment']
except KeyError:
logging.info('result has no custom_environment, ignoring')
- return
+ return -1
# if the test result has the same parameters than the job, we have a winner
if result_env != params['env']:
logging.debug('exit code: %i, ignoring due to different test env: %s',
exitcode, result_env)
- return
+ return -1
logging.debug('exit code: %i, test env matches job: %s',
exitcode, result_env)
@@ -88,8 +90,9 @@ def finish_job(jobfile, params, code, log_url):
def process_job(jobfile):
+ """Processes requested github job"""
try:
- with open(jobfile) as f:
+ with open(jobfile, encoding='utf-8') as f:
params = json.load(f)
mtime = os.fstat(f.fileno()).st_mtime
except json.decoder.JSONDecodeError as e:
@@ -105,7 +108,7 @@ def process_job(jobfile):
container += '-' + params['ppas'][-1].replace('/', '-')
except (KeyError, IndexError):
pass
- container_url = os.path.join(swift_url, container)
+ container_url = os.path.join(SWIFT_URL, container)
package = params['package']
pkghash = package.startswith('lib') and package[:4] or package[0]
timestamp = time.strftime('%Y%m%d_%H%M%S', time.gmtime(mtime))
@@ -126,7 +129,7 @@ def process_job(jobfile):
code = result_matches_job(result_url, params)
if code is not None:
finish_job(jobfile, params, code,
- result_url.replace(swift_url, external_url) + '/log.gz')
+ result_url.replace(SWIFT_URL, EXTERNAL_URL) + '/log.gz')
break
except HTTPError as e:
logging.error('job %s URL %s failed: %s', os.path.basename(jobfile), query_url, e)
@@ -143,11 +146,11 @@ if __name__ == '__main__':
config = configparser.ConfigParser()
config.read(os.path.expanduser('~ubuntu/autopkgtest-cloud.conf'))
- swift_url = config['web']['SwiftURL']
+ SWIFT_URL = config['web']['SwiftURL']
try:
- external_url = config['web']['ExternalURL']
+ EXTERNAL_URL = config['web']['ExternalURL']
except KeyError:
- external_url = swift_url
+ EXTERNAL_URL = SWIFT_URL
jobs = sys.argv[1:]
@@ -156,4 +159,3 @@ if __name__ == '__main__':
for job in jobs:
process_job(os.path.join(PENDING_DIR, job))
-
diff --git a/ci/lint_test b/ci/lint_test
index e52edc4..06a4133 100755
--- a/ci/lint_test
+++ b/ci/lint_test
@@ -1,5 +1,5 @@
#!/usr/bin/python3
-# pylint: disable = invalid-name, broad-except, subprocess-run-check
+# pylint: disable = broad-except, unnecessary-dict-index-lookup, bad-option-value
'''
Script to lint the scripts in the autopkgtest-cloud repository in CI
'''
@@ -8,33 +8,33 @@ import os
import sys
import logging
import subprocess
+import argparse
-def check_for_extension(input_list, output_list, extension):
+def check_for_extension(input_list, output_list, file_extension):
'''
- Checks filepaths in a list for a given extension
+ Checks filepaths in a list for a given file_extension
'''
- for a in input_list:
- if os.path.isfile(a):
- # if str(a)[-3:] == extension:
- if extension in str(a)[-6:]:
- output_list.append(str(a))
+ for filepath in input_list:
+ if os.path.isfile(filepath):
+ if file_extension in str(filepath)[-6:]:
+ output_list.append(str(filepath))
return output_list
-def check_for_shebang(input_list, output_list, shebang):
+def check_for_shebang(input_list, output_list, shebang_for_check):
'''
- Checks filepaths in a given list for a given shebang
+ Checks filepaths in a given list for a given shebang_for_check
'''
- for b in input_list:
- if os.path.isfile(b):
+ for filepath in input_list:
+ if os.path.isfile(filepath):
try:
- with open(b, 'r', encoding='utf-8') as myfile:
+ with open(filepath, 'r', encoding='utf-8') as myfile:
file = myfile.read()
into_list = file.splitlines()
if len(into_list) > 1:
- if into_list[0] == shebang:
- output_list.append(str(b))
+ if into_list[0] == shebang_for_check:
+ output_list.append(str(filepath))
except Exception as _:
pass
return output_list
@@ -44,42 +44,56 @@ def remove_list_from_list(input_list, remove_list):
'''
Removes elements from remove_list from input_list
'''
- for ff in input_list:
- if os.path.isfile(ff):
- if str(ff) in remove_list:
- input_list.remove(ff)
+ for list_elem in input_list:
+ if os.path.isfile(list_elem):
+ if str(list_elem) in remove_list:
+ input_list.remove(list_elem)
return input_list
def run_lint_command(files_to_lint, lint_command, arguments=None):
'''
- Runs a given lint command over a list of filepaths and stores output
+ Runs given lint commands over a list of filepaths and stores output
and exit code
'''
- exit_codes = 0
- lint_output = ""
- # check lint command exists
- for f in files_to_lint:
- if arguments is None:
- cmd = [lint_command, f]
- result = subprocess.run(cmd, stdout=subprocess.PIPE)
- else:
- cmd = [lint_command]
- for arg in arguments.split(" "):
- cmd.append(arg)
- cmd.append(f)
- result = subprocess.run(cmd, stdout=subprocess.PIPE)
- lint_output += result.stdout.decode("utf-8") + "\n"
- exit_codes += result.returncode
- return lint_output, exit_codes
+ exit_codes = []
+ lint_output = []
+ lint_success = True
+ check_for_cmd = subprocess.run(["which", lint_command], stdout=subprocess.PIPE, check=False)
+ if check_for_cmd.returncode != 0:
+ logger.error("%s not present on system - please amend before using this script.",
+ lint_command)
+ sys.exit(1)
+ for file in files_to_lint:
+ if ".git" not in file:
+ if arguments is None:
+ cmd = [lint_command, file]
+ result = subprocess.run(cmd, stdout=subprocess.PIPE, check=False)
+ else:
+ cmd = [lint_command]
+ for arg in arguments.split(" "):
+ cmd.append(arg)
+ cmd.append(file)
+ result = subprocess.run(cmd, stdout=subprocess.PIPE, check=False)
+ lint_output.append(result.stdout.decode("utf-8") + "\n")
+ exit_codes.append(result.returncode)
+ if result.returncode != 0:
+ lint_success = False
+ return lint_output, exit_codes, lint_success
-if __name__=="__main__":
+if __name__ == "__main__":
+ # pylint: disable=invalid-name
+ parser = argparse.ArgumentParser(description="Args for lint test")
+ parser.add_argument('-v',
+ '--verbose',
+ help="Verbose output from lint test (y/n)",
+ action='store_true')
+ args = parser.parse_args()
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger('autopkgtest-cloud-linter')
- start_dir = "../"
- repo_dir = pathlib.Path(start_dir)
+ repo_dir = pathlib.Path("../")
repo_dir.rglob("*")
final_list_of_python_files = []
@@ -90,25 +104,28 @@ if __name__=="__main__":
"files": [],
"extensions": [".py"],
"shebangs": ["#!/usr/bin/python3"],
- "args": None,
- "output": "",
- "code": 0
+ "args": "--disable=E0012",
+ "output": [],
+ "code": [],
+ "success": False
},
"shellcheck": {
"files": [],
"extensions": [".sh", ".bash"],
"shebangs": ["#!/bin/bash", "#!/bin/sh"],
"args": None,
- "output": "",
- "code": 0
+ "output": [],
+ "code": [],
+ "success": False
},
'yamllint': {
"files": ["../"],
"extensions": None,
"shebangs": None,
"args": "--no-warnings",
- "output": "",
- "code": 0
+ "output": [],
+ "code": [],
+ "success": False
}
}
@@ -122,19 +139,23 @@ if __name__=="__main__":
data[key]["files"] = check_for_shebang(all_files, data[key]["files"], shebang)
all_files = remove_list_from_list(all_files, data[key]["files"])
data[key]["output"], \
- data[key]["code"] = run_lint_command(data[key]["files"], key, data[key]["args"])
- ecodesum = 0
- for _, oec in data.items():
- ecodesum += oec["code"]
- if ecodesum > 0:
+ data[key]["code"], \
+ data[key]["success"] = run_lint_command(data[key]["files"], key, data[key]["args"])
+
+ exit_code_sum = 0
+ for _, oec_s in data.items():
+ for e_c in oec_s["code"]:
+ exit_code_sum += e_c
+ if exit_code_sum > 0:
for key, item in data.items():
- if item["code"] > 0:
- # logger.info("%s output: \n%s", key, item["output"])
+ if not item["success"]:
logger.info("%s failed!", key)
- # sys.exit(1)
- # temporary exit code, will be set back to 1 when python and bash scripts have been linted
- # right now we are just checking yaml files
- if key == "yamllint" and item["code"] != 0:
- sys.exit(1)
- sys.exit(0)
- sys.exit(0)
\ No newline at end of file
+ if args.verbose:
+ for i in range(len(item["code"])):
+ if item["code"][i] != 0:
+ logger.info("%s", item["output"][i])
+ else:
+ logger.info("%s passed!", key)
+ sys.exit(1)
+ logger.info("All the following linting tests passed: %s\n", list(data.keys()))
+ sys.exit(0)
diff --git a/docs/conf.py b/docs/conf.py
index a46ffdc..b4b923c 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -1,7 +1,7 @@
'''
Configuration file for the Sphinx documentation builder.
'''
-#pylint: disable=redefined-builtin
+#pylint: disable=redefined-builtin, invalid-name
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
diff --git a/lxc-slave-admin/cmd b/lxc-slave-admin/cmd
index ff991ff..0700964 100755
--- a/lxc-slave-admin/cmd
+++ b/lxc-slave-admin/cmd
@@ -1,6 +1,6 @@
#!/bin/sh
set -e
-MYDIR=`dirname $0`
+MYDIR=$(dirname "${0}")
if [ -z "$1" ]; then
echo "Usage: $0 <hosts> <commands or .commands file>" >&2
@@ -8,11 +8,11 @@ if [ -z "$1" ]; then
fi
if [ "$1" = "all" ]; then
- for f in $MYDIR/*.hosts; do
+ for f in "${MYDIR}"/*.hosts; do
hosts="$hosts -h $f";
done
else
- if [ -e ${1} ]; then
+ if [ -e "${1}" ]; then
hosts="-h ${1}"
elif [ -e "${1}.hosts" ]; then
hosts="-h ${1}.hosts"
@@ -29,8 +29,8 @@ if [ "${1%.commands}" != "$1" ]; then
exit 1
fi
# command file
- cat "$1" | parallel-ssh -x "-F $MYDIR/ssh_config" -OUserKnownHostsFile=/dev/null -OStrictHostKeyChecking=no -OIdentitiesOnly=yes $hosts -p8 -t 0 -i -I
+ parallel-ssh -x "-F ${MYDIR}/ssh_config" -OUserKnownHostsFile=/dev/null -OStrictHostKeyChecking=no -OIdentitiesOnly=yes "${hosts}" -p8 -t 0 -i -I < "${1}"
else
# command
- parallel-ssh -x "-F $MYDIR/ssh_config" -OUserKnownHostsFile=/dev/null -OStrictHostKeyChecking=no -OIdentitiesOnly=yes $hosts -p8 -t 0 -i -- "$@"
+ parallel-ssh -x "-F ${MYDIR}/ssh_config" -OUserKnownHostsFile=/dev/null -OStrictHostKeyChecking=no -OIdentitiesOnly=yes "${hosts}" -p8 -t 0 -i -- "$@"
fi
diff --git a/mojo/make-lxd-secgroup b/mojo/make-lxd-secgroup
index 634e598..1c1d961 100755
--- a/mojo/make-lxd-secgroup
+++ b/mojo/make-lxd-secgroup
@@ -1,5 +1,5 @@
#!/bin/sh
-
+# shellcheck disable=SC1090
set -eu
# there's apparently no way to get this dynamically
@@ -24,4 +24,4 @@ done
if [ -n "${ROUTER_IP:-}" ]; then
nova secgroup-add-rule lxd tcp 8443 8443 "${ROUTER_IP}/32" 2>/dev/null || true # perhaps it already existed
-fi
+fi
\ No newline at end of file
diff --git a/mojo/postdeploy b/mojo/postdeploy
index 0f857ae..4b0f88f 100755
--- a/mojo/postdeploy
+++ b/mojo/postdeploy
@@ -11,5 +11,6 @@ if [ "${MOJO_STAGE_NAME}" == "staging" ]; then
fi
echo "Setting up the floating IP address of the front end..."
-$(dirname $0)/add-floating-ip haproxy
-$(dirname $0)/add-floating-ip rabbitmq-server
+directory=$(dirname "{0}")
+"${directory}"/add-floating-ip haproxy
+"${directory}"/add-floating-ip rabbitmq-server