canonical-ubuntu-qa team mailing list archive
-
canonical-ubuntu-qa team
-
Mailing list archive
-
Message #02988
Re: [Merge] ~andersson123/autopkgtest-cloud:sqlite-db-backup into autopkgtest-cloud:master
Diff comments:
> diff --git a/charms/focal/autopkgtest-web/webcontrol/db-backup b/charms/focal/autopkgtest-web/webcontrol/db-backup
> new file mode 100755
> index 0000000..273e153
> --- /dev/null
> +++ b/charms/focal/autopkgtest-web/webcontrol/db-backup
> @@ -0,0 +1,201 @@
> +#!/usr/bin/python3
> +"""
> +This script periodically backs up the sqlite3 db to swift storage
> +and clears up old backups
> +"""
> +
> +import atexit
> +import configparser
> +import datetime
> +import gzip
> +import logging
> +import os
> +import sqlite3
> +import sys
> +from pathlib import Path
> +
> +import swiftclient
> +from helpers.utils import init_db
> +
> +DB_PATH = ""
> +DB_COPY_LOCATION = ""
> +DB_NAME = ""
> +CONTAINER_NAME = "db-backups"
> +MAX_DAYS = 7
> +SQLITE_DUMP_CMD = (
> + 'sqlite3 /home/ubuntu/autopkgtest.db ".backup autopkgtest.db.bak"'
> +)
> +DB_BACKUP_NAME = ""
> +DB_BACKUP_PATH = ""
> +
> +
> +def db_connect() -> sqlite3.Connection:
> + """
> + Establish connection to sqlite3 db
> + """
> + global DB_PATH
> + global DB_NAME
> + global DB_BACKUP_NAME
> + global DB_BACKUP_PATH
> + cp = configparser.ConfigParser()
> + cp.read(os.path.expanduser("~ubuntu/autopkgtest-cloud.conf"))
Hm, this is how we use ConfigParser in every instance in autopkgtest-cloud. I'll amend this but we might also need to amend it elsewhere :)
> + DB_PATH = Path(cp["web"]["database"])
> + DB_NAME = DB_PATH.name
> + DB_BACKUP_NAME = "%s.bak" % DB_NAME
> + DB_BACKUP_PATH = Path("/tmp") / (DB_PATH.name + ".bak")
> +
> + db_con = init_db(cp["web"]["database"])
> +
> + return db_con
> +
> +
> +def backup_db(db_con: sqlite3.Connection):
> + global DB_BACKUP_PATH
> + db_backup_con = sqlite3.connect(DB_BACKUP_PATH)
> + with db_backup_con:
> + db_con.backup(db_backup_con, pages=1)
> + db_backup_con.close()
> +
> +
> +def compress_db():
> + """
> + use gzip to compress database
> + """
> + global DB_BACKUP_PATH
> + with open(DB_BACKUP_PATH, "rb") as f_in, gzip.open(
> + "%s.gz" % DB_BACKUP_PATH, "wb"
> + ) as f_out:
> + f_out.writelines(f_in)
> +
> +
> +def init_swift_con() -> swiftclient.Connection:
> + """
> + Establish connection to swift storage
> + """
> + swift_creds = {
> + "authurl": os.environ["OS_AUTH_URL"],
> + "user": os.environ["OS_USERNAME"],
> + "key": os.environ["OS_PASSWORD"],
> + "os_options": {
> + "region_name": os.environ["OS_REGION_NAME"],
> + "project_domain_name": os.environ["OS_PROJECT_DOMAIN_NAME"],
> + "project_name": os.environ["OS_PROJECT_NAME"],
> + "user_domain_name": os.environ["OS_USER_DOMAIN_NAME"],
> + },
> + "auth_version": 3,
> + }
> + swift_conn = swiftclient.Connection(**swift_creds)
> + return swift_conn
> +
> +
> +def create_container_if_it_doesnt_exist(swift_conn: swiftclient.Connection):
> + """
> + create db-backups container if it doesn't already exist
> + """
> + global CONTAINER_NAME
> + try:
> + swift_conn.get_container(CONTAINER_NAME)
> + except swiftclient.exceptions.ClientException:
> + swift_conn.put_container(
> + CONTAINER_NAME,
> + )
> +
> +
> +def upload_backup_to_db(
> + swift_conn: swiftclient.Connection,
> +) -> swiftclient.Connection:
> + """
> + Upload compressed database to swift storage under container db-backups
> + """
> + global DB_PATH
> + global DB_BACKUP_PATH
> + global CONTAINER_NAME
> + now = datetime.datetime.now().strftime("%Y/%m/%d/%H_%M_%S")
It is only daily and the host uses UTC, but I think it's worth amending anyway :)
> + object_path = "%s/%s" % (now, DB_PATH.name + ".gz")
> + for _ in range(5):
Yeah :/ sometimes in our infra we lose connectivity completely (not often at all though) and the internal retry mechanism doesn't re-instate the connection so we have to do it like this unfortunately.
My gut instinct is also ugh but wcyd
> + try:
> + swift_conn.put_object(
> + CONTAINER_NAME,
> + object_path,
> + "%s.gz" % DB_BACKUP_PATH,
> + content_type="text/plain; charset=UTF-8",
> + headers={"Content-Encoding": "gzip"},
> + )
> + break
> + except swiftclient.exceptions.ClientException as e:
> + logging.info("exception: %s" % str(e))
> + swift_conn = init_swift_con()
> + return swift_conn
> +
> +
> +def delete_old_backups(
> + swift_conn: swiftclient.Connection,
> +) -> swiftclient.Connection:
> + """
> + Delete objects in db-backups container that are older than 7 days
> + """
> + global CONTAINER_NAME
> + global MAX_DAYS
> + logging.info("Removing old db backups...")
> + _, objects = swift_conn.get_container(CONTAINER_NAME)
> + now = datetime.datetime.now()
> +
> + for obj in objects:
> + last_modified = obj["last_modified"].split(".")[0]
> + timestamp = datetime.datetime.strptime(
> + last_modified, "%Y-%m-%dT%H:%M:%S"
> + )
> + diff = now - timestamp
> + if diff > datetime.timedelta(days=MAX_DAYS):
> + logging.info("Deleting %s" % obj["name"])
> + for _ in range(5):
> + try:
> + swift_conn.delete_object(CONTAINER_NAME, obj["name"])
> + break
> + except swiftclient.exceptions.ClientException as _:
> + swift_conn = init_swift_con()
> + return swift_conn
> +
> +
> +def cleanup():
> + """
> + Delete db and compressed db under /tmp
> + """
> + global DB_COPY_LOCATION
> + if os.path.isfile(DB_COPY_LOCATION):
> + os.remove(DB_COPY_LOCATION)
> + if os.path.isfile("%s.gz" % DB_COPY_LOCATION):
> + os.remove("%s.gz" % DB_COPY_LOCATION)
> +
> +
> +if __name__ == "__main__":
> + # Only bother running script if this unit is the leader.
> + logging.basicConfig(level="INFO")
> + if not os.path.isfile("/run/autopkgtest-web-is-leader"):
> + logging.info("unit is not leader, exiting...")
> + sys.exit(0)
> + # connect to db
> + logging.info("Connecting to db")
> + db_con = db_connect()
> + # backup the db
> + logging.info("Creating a backup of the db...")
> + backup_db(db_con)
> + # compress it
> + logging.info("Compressing db")
> + compress_db()
> + # register cleanup if anything fails
> + logging.info("Registering cleanup function")
> + atexit.register(cleanup)
> + # initialise swift conn
> + logging.info("Setting up swift connection")
> + swift_conn = init_swift_con()
> + # create container if it doesn't exist
> + create_container_if_it_doesnt_exist(swift_conn)
> + # upload to swift container
> + logging.info("Uploading db to swift!")
> + swift_conn = upload_backup_to_db(swift_conn)
> + # Remove old results
> + logging.info("Pruning old database backups")
> + swift_conn = delete_old_backups(swift_conn)
> + # run cleanup
> + cleanup()
--
https://code.launchpad.net/~andersson123/autopkgtest-cloud/+git/autopkgtest-cloud/+merge/460043
Your team Canonical's Ubuntu QA is requested to review the proposed merge of ~andersson123/autopkgtest-cloud:sqlite-db-backup into autopkgtest-cloud:master.
References