canonical-ubuntu-qa team mailing list archive
-
canonical-ubuntu-qa team
-
Mailing list archive
-
Message #05466
[Merge] ~hyask/autopkgtest-cloud:skia/local_dev_worker into autopkgtest-cloud:master
Skia has proposed merging ~hyask/autopkgtest-cloud:skia/local_dev_worker into autopkgtest-cloud:master.
Requested reviews:
Canonical's Ubuntu QA (canonical-ubuntu-qa)
For more details, see:
https://code.launchpad.net/~hyask/autopkgtest-cloud/+git/autopkgtest-cloud/+merge/473523
A set of changes to allow easy local run of the worker, with sane default for developments.
--
Your team Canonical's Ubuntu QA is requested to review the proposed merge of ~hyask/autopkgtest-cloud:skia/local_dev_worker into autopkgtest-cloud:master.
diff --git a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/rabbit.creds.env b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/rabbit.creds.env
new file mode 100644
index 0000000..4fab7af
--- /dev/null
+++ b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/rabbit.creds.env
@@ -0,0 +1,3 @@
+RABBIT_HOST=127.0.0.1
+RABBIT_USER=guest
+RABBIT_PASSWORD=guest
diff --git a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/rabbit.creds.rc b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/rabbit.creds.rc
new file mode 100644
index 0000000..05fccc8
--- /dev/null
+++ b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/rabbit.creds.rc
@@ -0,0 +1,3 @@
+export RABBIT_HOST=127.0.0.1
+export RABBIT_USER=guest
+export RABBIT_PASSWORD=guest
diff --git a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/worker/README.md b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/worker/README.md
new file mode 100644
index 0000000..36a481f
--- /dev/null
+++ b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/worker/README.md
@@ -0,0 +1,59 @@
+# autopkgtest-cloud worker
+
+
+## Developing the worker locally
+
+First install the dependencies:
+`sudo apt install rabbitmq-server python3-swiftclient python3-influxdb python3-amqp`
+
+With the default config (see below), you need an autopkgtest LXD image:
+`autopkgtest-build-lxd ubuntu:noble`
+
+The default installation of RabbitMQ will do. The default credentials are the
+ones in `../rabbit.creds.rc`.
+
+If you don't want it running permanently on your local machine, you can disable
+the systemd service, and only start it when needed.
+
+### Run the worker
+
+From this current folder:
+
+* `source ../rabbit.creds.rc` to load the environment variables for RabbitMQ.
+* Then simply run `./worker`, it will launch the worker locally.
+
+You can have a look at `./worker.conf`, which is the default when no other
+arguments are supplied.
+You'll see that it defaults to running test with the LXD autopkgtest
+virt-server, which means that you first need to run
+`autopkgtest-build-lxd ubuntu:$RELEASE`, specifying the release codename you
+want to run the tests for.
+
+The default is also to run without depending on a `swift` server, keeping the
+results locally instead. Watch the worker logs to know the paths.
+
+#### As a systemd unit
+
+If for some reason you need to run it as a systemd unit, to test the
+integration, you can quickly setup a unit for your user with the following:
+```
+$ cat - > ~/.config/systemd/user/worker.service <<EOF
+[Unit]
+Description=Autopkgtest-cloud worker
+
+[Service]
+EnvironmentFile=$(dirname $(pwd))/rabbit.creds.env
+ExecStart=$(pwd)/worker
+EOF
+$ systemctl --user daemon-reload
+$ systemctl --user restart worker
+$ journalctl --user -u worker --since "15 min ago" -f
+```
+
+
+### Send a job
+
+From the `../tools` folder:
+
+* `source ../rabbit.creds.rc` to load the environment variables for RabbitMQ.
+* Schedule a job with `./run-autopkgtest -s noble -a amd64 gzip --trigger migration-reference/0`.
diff --git a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/worker/worker b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/worker/worker
index 2a55bce..8b88281 100755
--- a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/worker/worker
+++ b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/worker/worker
@@ -64,6 +64,7 @@ root_path = os.path.dirname(os.path.abspath(my_path))
args = None
cfg = None
swift_creds = {}
+swift_upload = True
exit_requested = None
running_test = False
status_exchange_name = "teststatus.fanout"
@@ -626,7 +627,8 @@ def cleanup_and_sleep(out_dir):
"""Empty the output dir for the next run, otherwise autopkgtest complains"""
shutil.rmtree(out_dir, ignore_errors=True)
os.mkdir(out_dir)
- time.sleep(300)
+ retry_delay = int(cfg.get("autopkgtest", "retry_delay", fallback=30))
+ time.sleep(retry_delay)
def kill_openstack_server(test_uuid: str):
@@ -1476,89 +1478,101 @@ def request(msg):
release, architecture, prefix, pkgname, run_id
)
- # publish results into swift
- logging.info("Putting results into swift %s %s", container, swift_dir)
+ global swift_upload
+ if swift_upload:
+ # publish results into swift
+ logging.info(
+ "Putting results into swift %s %s", container, swift_dir
+ )
- # create it if it does not exist yet
- swift_con = swiftclient.Connection(**swift_creds)
- try:
- swift_con.get_container(container, limit=1)
- except swiftclient.exceptions.ClientException:
- logging.info("container %s does not exist, creating it", container)
- if private:
- # private result, share only with swiftuser
- swift_con.put_container(
- container,
- headers={"X-Container-Read": "*:%s" % params["swiftuser"]},
- )
- else:
- # make it publicly readable
- swift_con.put_container(
- container, headers={"X-Container-Read": ".rlistings,.r:*"}
+ # create it if it does not exist yet
+ swift_con = swiftclient.Connection(**swift_creds)
+ try:
+ swift_con.get_container(container, limit=1)
+ except swiftclient.exceptions.ClientException:
+ logging.info(
+ "container %s does not exist, creating it", container
)
- # wait until it exists
- timeout = 50
- while timeout > 0:
- try:
- swift_con.get_container(container, limit=1)
- logging.debug(
- "newly created container %s exists now", container
- )
- break
- except swiftclient.exceptions.ClientException:
- logging.debug(
- "newly created container %s does not exist yet, continuing poll",
+ if private:
+ # private result, share only with swiftuser
+ swift_con.put_container(
container,
+ headers={
+ "X-Container-Read": "*:%s" % params["swiftuser"]
+ },
)
- time.sleep(1)
- timeout -= 1
- else:
- logging.error(
- "timed out waiting for newly created container %s",
- container,
- )
- sys.exit(1)
-
- for f in os.listdir(out_dir):
- path = os.path.join(out_dir, f)
- with open(path, "rb") as fd:
- if path.endswith("log.gz"):
- content_type = "text/plain; charset=UTF-8"
- headers = {"Content-Encoding": "gzip"}
else:
- content_type = None
- headers = None
-
- sleep_time = 10
- for retry in reversed(range(5)):
+ # make it publicly readable
+ swift_con.put_container(
+ container,
+ headers={"X-Container-Read": ".rlistings,.r:*"},
+ )
+ # wait until it exists
+ timeout = 50
+ while timeout > 0:
try:
- # swift_con.put_object() is missing the name kwarg
- swiftclient.put_object(
- swift_con.url,
- token=swift_con.token,
- container=container,
- name=os.path.join(swift_dir, f),
- contents=fd,
- content_type=content_type,
- headers=headers,
- content_length=os.path.getsize(path),
+ swift_con.get_container(container, limit=1)
+ logging.debug(
+ "newly created container %s exists now", container
)
break
- except Exception as e:
- if retry > 0:
- logging.info(
- "Failed to upload %s to swift (%s), retrying in %s seconds..."
- % (path, str(e), sleep_time)
- )
- time.sleep(sleep_time)
- sleep_time *= 2
- continue
-
- raise
+ except swiftclient.exceptions.ClientException:
+ logging.debug(
+ "newly created container %s does not exist yet, continuing poll",
+ container,
+ )
+ time.sleep(1)
+ timeout -= 1
+ else:
+ logging.error(
+ "timed out waiting for newly created container %s",
+ container,
+ )
+ sys.exit(1)
+
+ for f in os.listdir(out_dir):
+ path = os.path.join(out_dir, f)
+ with open(path, "rb") as fd:
+ if path.endswith("log.gz"):
+ content_type = "text/plain; charset=UTF-8"
+ headers = {"Content-Encoding": "gzip"}
+ else:
+ content_type = None
+ headers = None
- swift_con.close()
+ sleep_time = 10
+ for retry in reversed(range(5)):
+ try:
+ # swift_con.put_object() is missing the name kwarg
+ swiftclient.put_object(
+ swift_con.url,
+ token=swift_con.token,
+ container=container,
+ name=os.path.join(swift_dir, f),
+ contents=fd,
+ content_type=content_type,
+ headers=headers,
+ content_length=os.path.getsize(path),
+ )
+ break
+ except Exception as e:
+ if retry > 0:
+ logging.info(
+ "Failed to upload %s to swift (%s), retrying in %s seconds..."
+ % (path, str(e), sleep_time)
+ )
+ time.sleep(sleep_time)
+ sleep_time *= 2
+ continue
+
+ raise
+
+ swift_con.close()
finally:
- shutil.rmtree(work_dir, ignore_errors=True)
+ if swift_upload:
+ shutil.rmtree(work_dir, ignore_errors=True)
+ else:
+ logging.info("Keeping results in %s", work_dir)
global amqp_con
complete_amqp = amqp_con.channel()
@@ -1630,6 +1644,8 @@ def amqp_connect(cfg, callback):
"No architectures in configuration, defaulting to %s", my_arch
)
arches = [my_arch]
+ else:
+ logging.info("Loaded architectures from configuration: %s", arches)
# avoid preferring the same architecture on all workers
queues = []
@@ -1655,7 +1671,7 @@ def amqp_connect(cfg, callback):
def main():
"""Main program"""
- global cfg, args, swift_creds
+ global cfg, args, swift_creds, swift_upload
args = parse_args()
@@ -1677,39 +1693,51 @@ def main():
with open(args.config, "r") as f:
cfg.read_file(f)
+ handlers = None
+ if "INVOCATION_ID" in os.environ:
+ handlers = [systemd_logging_handler]
logging.basicConfig(
level=(args.debug and logging.DEBUG or logging.INFO),
format="%(levelname)s: %(message)s",
- handlers=[systemd_logging_handler],
+ handlers=handlers,
)
- auth_version = os.environ["SWIFT_AUTH_VERSION"]
-
- if auth_version == "2":
- swift_creds = {
- "authurl": os.environ["SWIFT_AUTH_URL"],
- "user": os.environ["SWIFT_USERNAME"],
- "key": os.environ["SWIFT_PASSWORD"],
- "tenant_name": os.environ["SWIFT_TENANT"],
- "os_options": {"region_name": os.environ["SWIFT_REGION"]},
- "auth_version": os.environ["SWIFT_AUTH_VERSION"],
- }
- else: # 3
- swift_creds = {
- "authurl": os.environ["SWIFT_AUTH_URL"],
- "user": os.environ["SWIFT_USERNAME"],
- "key": os.environ["SWIFT_PASSWORD"],
- "os_options": {
- "region_name": os.environ["SWIFT_REGION"],
- "project_domain_name": os.environ["SWIFT_PROJECT_DOMAIN_NAME"],
- "project_name": os.environ["SWIFT_PROJECT_NAME"],
- "user_domain_name": os.environ["SWIFT_USER_DOMAIN_NAME"],
- },
- "auth_version": auth_version,
- }
+ try:
+ auth_version = os.environ["SWIFT_AUTH_VERSION"]
+ except KeyError:
+ logging.warning(
+ "No SWIFT_AUTH_VERSION detected, disabling swift upload and keeping results locally instead"
+ )
+ swift_upload = False
+
+ if swift_upload:
+ if auth_version == "2":
+ swift_creds = {
+ "authurl": os.environ["SWIFT_AUTH_URL"],
+ "user": os.environ["SWIFT_USERNAME"],
+ "key": os.environ["SWIFT_PASSWORD"],
+ "tenant_name": os.environ["SWIFT_TENANT"],
+ "os_options": {"region_name": os.environ["SWIFT_REGION"]},
+ "auth_version": os.environ["SWIFT_AUTH_VERSION"],
+ }
+ else: # 3
+ swift_creds = {
+ "authurl": os.environ["SWIFT_AUTH_URL"],
+ "user": os.environ["SWIFT_USERNAME"],
+ "key": os.environ["SWIFT_PASSWORD"],
+ "os_options": {
+ "region_name": os.environ["SWIFT_REGION"],
+ "project_domain_name": os.environ[
+ "SWIFT_PROJECT_DOMAIN_NAME"
+ ],
+ "project_name": os.environ["SWIFT_PROJECT_NAME"],
+ "user_domain_name": os.environ["SWIFT_USER_DOMAIN_NAME"],
+ },
+ "auth_version": auth_version,
+ }
- # ensure that we can connect to swift
- swiftclient.Connection(**swift_creds).close()
+ # ensure that we can connect to swift
+ swiftclient.Connection(**swift_creds).close()
# connect to AMQP queues
queue = amqp_connect(cfg, request)
diff --git a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/worker/worker.conf b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/worker/worker.conf
index 12f818a..16f8a80 100644
--- a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/worker/worker.conf
+++ b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/worker/worker.conf
@@ -1,23 +1,26 @@
-[amqp]
-host = 127.0.0.1
-user = guest
-password = guest
-
-[swift]
-# anything commented out will default to $OS_* environment
-region_name = RegionOne
-auth_url = http://127.0.0.1:5000/v2.0/
-username = joe
-tenant = myproject
-password = s3kr1t
-
[autopkgtest]
+# Defaults to empty, which means the first `autopkgtest` found in the $PATH,
+# usually the one installed from the package.
checkout_dir =
-releases = trusty xenial
-architectures = i386 amd64
+# To use a version of `autopkgtest` cloned next to `autopkgtest-cloud`, use the following.
+# checkout_dir = ../../../../../../autopkgtest
+
+# Defaults to all LTS - 1 for convenience.
+releases = jammy noble
+
+# Defaults to empty, which means automatic detection of the machine arch.
+architectures =
+# To explicitely use another set of arch, use the following.
+# architectures = i386 amd64
+
setup_command =
# setup_command2 = $CHECKOUTDIR/setup-commands/setup-testbed
-per_package_config_dir = ~/autopkgtest-package-configs/
+
+# This default to `autopkgtest-package-configs` being cloned next to `autopkgtest-cloud`
+per_package_config_dir = ../../../../../../autopkgtest-package-configs/
+
+# Time between retries when a test fails (default in the code is 300s, let's make it 5s for dev)
+retry_delay = 5
[virt]
# normal packages get $PACKAGESIZE set to this value
@@ -29,4 +32,4 @@ package_size_big = m1.large
# args = ssh -s $CHECKOUTDIR/ssh-setup/nova -- --flavor $PACKAGESIZE --name adt-$RELEASE-$ARCHITECTURE-$PACKAGENAME-$TIMESTAMP-$HOSTNAME --image ubuntu/ubuntu-$RELEASE-.*-$ARCHITECTURE-server --keyname testbedkey
# example args for LXC runner
-# args = lxc --sudo autopkgtest-$RELEASE
+args = lxd autopkgtest/ubuntu/$RELEASE/$ARCHITECTURE
diff --git a/charms/focal/autopkgtest-cloud-worker/reactive/autopkgtest_cloud_worker.py b/charms/focal/autopkgtest-cloud-worker/reactive/autopkgtest_cloud_worker.py
index 6afbffc..831b3fa 100644
--- a/charms/focal/autopkgtest-cloud-worker/reactive/autopkgtest_cloud_worker.py
+++ b/charms/focal/autopkgtest-cloud-worker/reactive/autopkgtest_cloud_worker.py
@@ -592,6 +592,7 @@ def write_worker_config():
"stable_release_percentage": str(
config().get("stable-release-percentage")
),
+ "retry_delay": "300",
},
"virt": {
"args": config().get("worker-args"),
Follow ups