sts-sponsors team mailing list archive
-
sts-sponsors team
-
Mailing list archive
-
Message #08858
[Merge] ~alexsander-souza/maas-ci/+git/maas-ci-internal:use_proxy into maas-ci:maas-submodules-sync
Alexsander de Souza has proposed merging ~alexsander-souza/maas-ci/+git/maas-ci-internal:use_proxy into maas-ci:maas-submodules-sync.
Commit message:
keep proxy settings
we added 'ps5.canonical.com' to no_proxy in the jenkins node
Requested reviews:
MAAS Committers (maas-committers)
For more details, see:
https://code.launchpad.net/~alexsander-souza/maas-ci/+git/maas-ci-internal/+merge/443825
--
Your team MAAS Committers is requested to review the proposed merge of ~alexsander-souza/maas-ci/+git/maas-ci-internal:use_proxy into maas-ci:maas-submodules-sync.
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..9b58760
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,5 @@
+.ve
+.vscode
+jenkins_jobs.ini
+jenkins-jobs
+output*
diff --git a/Dockerfile b/Dockerfile
new file mode 100644
index 0000000..18e89d6
--- /dev/null
+++ b/Dockerfile
@@ -0,0 +1,35 @@
+ARG SERIES=bionic
+FROM ubuntu:${SERIES}
+
+ARG CACHE_BUST
+ARG MAKE_TARGET="install-dependencies"
+
+ENV MAKE_TARGET=${MAKE_TARGET}
+ENV DEBIAN_FRONTEND=noninteractive
+
+WORKDIR /work
+
+ADD checkout checkout
+RUN set -ex ;\
+ if [ ! -z $http_proxy ]; then \
+ echo "Acquire::https::proxy \"$http_proxy\";" > /etc/apt/apt.conf.d/maas-proxy ;\
+ echo "Acquire::http::proxy \"$http_proxy\";" >> /etc/apt/apt.conf.d/maas-proxy ;\
+ fi
+
+RUN echo ${CACHE_BUST} > /root/.cache_buster
+
+RUN set -ex ;\
+ apt-get -q update ;\
+ apt-get -q -y upgrade ;\
+ apt-get -q -y install \
+ git \
+ make \
+ python3-launchpadlib \
+ python3-yaml \
+ socat \
+ sudo \
+ wget
+
+RUN set -ex ;\
+ make -C /work/checkout ${MAKE_TARGET} ;\
+ rm -rf /work/checkout
diff --git a/Makefile b/Makefile
new file mode 100644
index 0000000..beaf66e
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,40 @@
+VIRTUALENV := .ve
+CHECK_OUTPUT = output-check
+
+.DEFAULT_GOAL := setup
+
+install-dependencies:
+ sudo apt-get -y install python3-venv
+
+clean:
+ rm -rf $(VIRTUALENV) $(CHECK_OUTPUT)
+
+setup: py-dep jenkins-jobs
+
+check: setup
+ rm -rf $(CHECK_OUTPUT)
+ ./jenkins-jobs test -r --config-xml -o $(CHECK_OUTPUT) jobs/
+
+py-dep: $(VIRTUALENV)
+ $(VIRTUALENV)/bin/pip install -r requirements.txt
+
+jenkins-jobs: $(VIRTUALENV)/.dev-bin
+ ln -fs $(VIRTUALENV)/bin/jenkins-jobs
+
+
+$(VIRTUALENV):
+ python3 -m venv --clear $(VIRTUALENV)
+
+$(VIRTUALENV)/.dev-bin:
+ $(MAKE) py-dep
+ touch $@
+
+py-freeze: VENV := .freeze_ve
+py-freeze: PIP := $(VENV)/bin/pip
+py-freeze:
+ rm -rf $(VENV)
+ python3 -m venv $(VENV)
+ $(PIP) install jenkins-job-builder
+ $(PIP) freeze | grep -E -v '^pkg-resources' > requirements.txt
+ rm -rf $(VENV)
+.PHONY: py-freeze
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..3ae74a5
--- /dev/null
+++ b/README.md
@@ -0,0 +1,102 @@
+# Job definitions for http://maas-ci.internal:8080/
+
+Note that the slaves need to have the following environment variables
+configured:
+
+ HTTP_PROXY=http://squid.internal:3128/
+ HTTPS_PROXY=http://squid.internal:3128/
+
+## Setting up Jenkins Job Builder
+
+
+1. Run `make install-dependencies setup` to install it in a virtualenv
+
+
+2. Create a configuration file under `~/.config/jenkins_jobs/jenkins_jobs.ini`
+ with a similar config:
+
+```
+[job_builder]
+recursive=True
+
+[jenkins]
+url=http://maas-ci.internal:8080
+user=<USERNAME>
+password=<PASSWORD>
+```
+
+You can create an [API token](http://maas-ci.internal:8080/user/$username/configure) and use it as a password.
+
+
+3. Test generating definitions
+
+```
+./jenkins-jobs test -o outdir/ $JOB_DEF_FILE:launchpad-ci/launchpad-ci.yaml
+```
+
+`JOB_DEF_FILE` is the name of the job you want to test.
+
+To test all jobs, run `make check` instead.
+
+
+4. Update a job in Jenkins:
+
+```
+./jenkins-jobs update $JOB_DEF_FILE:launchpad-ci/launchpad-ci.yaml
+```
+
+To update all jobs, run `./jenkins-jobs update ./` instead.
+
+5. Once all jobs are updated, you need to approve them [here](http://maas-ci.internal:8080/scriptApproval)
+
+
+## Jobs setup
+
+Next we need to create the jobs that will review and land our merge
+proposals. There are three jobs that need to be created, a reviewer, a lander,
+and a tester. They are defined in a job group, so all you need to do to add CI
+for you project is to create a YAML file (e.g. `jobs/myproject-ci.yaml`) that looks
+like this:
+
+```yaml
+- project:
+ name: myproject
+ repo_lp_path: ~myteam/myproject/+git/myrepo # or ~myteam/myproject
+ repo_lp_branch: main # optional, defaults to 'master'
+ ubuntu_series: '22.04'
+ deps_command: 'make dep'
+ build_command: 'make build'
+ lint_command: 'make lint'
+ test_command: 'make test'
+ jobs:
+ - '{name}-launchpad-ci'
+```
+
+Now if you run the following command, it will create the jobs:
+
+```
+jenkins-jobs update jobs/myproject-ci.yaml:launchpad-ci/launchpad-ci.yaml
+```
+
+Three jobs should have been created, `myproject-lander`, `myproject-reviewer`,
+and `myproject-tester`. The lander and reviewer are configured to run every two
+minutes and will pick up and merge proposals that need to be reviewed or landed
+and push them to the tester job to be tested.
+
+As for slaves, the only special that needs to be installed on them is the
+docker.io package, so that docker container can be run.
+
+If the slaves need to use proxies , they should be defined as environment
+variable. `HTTP_PROXY` and `HTTPS_PROXY` will be passed through to all the
+commands, and also be used as the apt proxy.
+
+
+## Credentials for snap upload/publishing
+
+For the snap jobs to work, the `crbs-snap-edge-upload` credential must be available in Jenkins, which should contain a snapcraft exported login (a macaroon which allows viewing, uploading and releasing the snap).
+
+This can be created with:
+
+ snapcraft export-login --snaps canonical-rbac --expires YYYY-MM-DDTHH:MM:SS --acls package_access,package_upload,package_release --channels=edge* canonical-rbac.login
+
+and it should then be uploaded to Jenkins to replace the current credentials when they expire.
diff --git a/docker-registry.md b/docker-registry.md
new file mode 100644
index 0000000..bbd3953
--- /dev/null
+++ b/docker-registry.md
@@ -0,0 +1,86 @@
+# Running a local Docker registry
+
+## Pre-requisites
+
+* Ubuntu host
+* A recent Docker install (tested on 20.10.2)
+* OpenSSL
+
+## Preparing the host
+
+Create a directory to hold the registry data
+
+```bash
+export REGISTRY_HOME=/var/lib/docker-registry
+export HOSTNAME=maas-ci.internal
+export PASSWD=$(openssl rand -base64 32)
+
+sudo mkdir -p ${REGISTRY_HOME}/data ${REGISTRY_HOME}/certs ${REGISTRY_HOME}/auth
+```
+
+Create self-signed certs
+
+```bash
+cd ${REGISTRY_HOME}
+
+# use HOSTNAME as CN in the certificate
+sudo openssl req \
+ -newkey rsa:4096 -nodes -sha256 -keyout certs/registry.key \
+ -addext "subjectAltName = DNS:${HOSTNAME}" \
+ -x509 -days 365 -out certs/registry.crt
+
+# enable the use of this certificate by docker
+sudo mkdir -p /etc/docker/certs.d/${HOSTNAME}\:5433
+sudo cp certs/registry.crt /etc/docker/certs.d/${HOSTNAME}\:5433/ca.crt
+
+# enable this for the host
+sudo cp certs/registry.crt /usr/local/share/ca-certificates/docker-registry.crt
+sudo update-ca-certificates
+```
+
+## Create users
+
+```bash
+docker run \
+ --entrypoint htpasswd \
+ httpd:2 -Bbn maasci ${PASSWD} | sudo tee ${REGISTRY_HOME}/auth/htpasswd
+```
+
+## Start docker registry
+
+```bash
+docker run -d \
+ --restart=always \
+ --name registry \
+ -v "${REGISTRY_HOME}"/auth:/auth \
+ -v "${REGISTRY_HOME}"/certs:/certs \
+ -v "${REGISTRY_HOME}"/data:/var/lib/registry \
+ -e "REGISTRY_AUTH=htpasswd" \
+ -e "REGISTRY_AUTH_HTPASSWD_REALM=Registry Realm" \
+ -e "REGISTRY_AUTH_HTPASSWD_PATH=/auth/htpasswd" \
+ -e "REGISTRY_HTTP_TLS_CERTIFICATE=/certs/registry.crt" \
+ -e "REGISTRY_HTTP_TLS_KEY=/certs/registry.key" \
+ -e "REGISTRY_HTTP_ADDR=0.0.0.0:5443" \
+ -e "REGISTRY_HTTP_HOST=https://${HOSTNAME}:5443" \
+ -p 5443:5443 \
+ registry:2
+```
+
+## Testing the local registry
+
+```bash
+# download image from docker hub
+docker pull ubuntu:20.04
+
+docker login -u maasci -p ${PASSWD} "${HOSTNAME}":5443
+
+# tag and push to local registry
+docker tag ubuntu:20.04 "${HOSTNAME}":5443/ubuntu:my-tag
+docker push "${HOSTNAME}":5443/ubuntu:my-tag
+```
+
+## Troubleshooting
+
+### HTTP proxy issues
+
+Remember to include `HOSTNAME` as an exception in `/etc/systemd/system/docker.service.d/no-proxy.conf`
diff --git a/jobs/build-env-maas-site-manager.groovy b/jobs/build-env-maas-site-manager.groovy
new file mode 100644
index 0000000..c7d6577
--- /dev/null
+++ b/jobs/build-env-maas-site-manager.groovy
@@ -0,0 +1,45 @@
+pipeline {
+ agent {
+ node {
+ label 'maas-tester'
+ }
+ }
+ stages {
+ stage('Checkout') {
+ steps {
+ withCredentials([sshUserPrivateKey(credentialsId: 'launchpad-ci-ssh-key', keyFileVariable: 'SSHKEY', usernameVariable: 'SSHUSER')]) {
+ script {
+ env.GIT_SSH_COMMAND = "ssh -i $SSHKEY -l $SSHUSER"
+ env.targetShortHash = sh(returnStdout: true, script: "git ls-remote git+ssh://${LP_REPOS} -b ${LP_BRANCH} | cut -f1").trim()
+ }
+ sh '''
+ rm -rf ci-utils launchpad-ci maas-site-manager maas_target_os_release
+ git clone git+ssh://git.launchpad.net/{{ git_ci_repos }} --single-branch --branch {{ git_ci_branch }} --recurse-submodules --depth 1 ci-utils
+ if [ "${SERIES}" != "auto" ]; then
+ echo ${SERIES} > maas_target_os_release
+ else
+ ci-utils/jobs/launchpad-ci/repo-ubuntu-release ${LP_REPOS} ${LP_BRANCH} > maas_target_os_release
+ fi
+ '''
+ }
+ script {
+ def shortHash = sh(returnStdout: true, script: "git -C ci-utils rev-parse --short HEAD").trim()
+ currentBuild.description = "Build environment: maas-ci-utils ${shortHash} - maas ${targetShortHash}"
+ }
+ }
+ }
+ stage('Build image') {
+ steps {
+ checkout changelog: false, poll: false, scm: scmGit(branches: [[name: "${LP_BRANCH}"]], extensions: [cloneOption(depth: 1, honorRefspec: true, noTags: true, reference: '', shallow: true), [$class: 'RelativeTargetDirectory', relativeTargetDir: 'checkout']], userRemoteConfigs: [[credentialsId: 'launchpad-ci-ssh-key', url: "git+ssh://${LP_REPOS}"]])
+ script {
+ def os_release = sh(script:'cat maas_target_os_release', returnStdout: true).trim()
+ def os_series = sh(script:"awk -F, \"/\$(echo ${os_release} | sed -e's/./\\./')/{ print \$3 }\" /usr/share/distro-info/ubuntu.csv", returnStdout: true).trim()
+ docker.withRegistry("{{ docker_registry }}", "{{ docker_registry_cred }}") {
+ def img = docker.build("build-env-{{ target }}-{{ series }}:${os_release}", "--build-arg 'SERIES=${os_release}' --build-arg CACHE_BUST='${BUILD_ID}' -f ci-utils/Dockerfile .")
+ img.push()
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/jobs/build-env-maas-site-manager.yaml b/jobs/build-env-maas-site-manager.yaml
new file mode 100644
index 0000000..a433224
--- /dev/null
+++ b/jobs/build-env-maas-site-manager.yaml
@@ -0,0 +1,47 @@
+- project:
+ name: build-env-site-manager
+ target: maas-site-manager
+ lp-repos: "git.launchpad.net/maas-site-manager"
+ ubuntu: "22.10"
+ series:
+ - main:
+ build-freq: 'H H(0-2) * * *'
+ jobs:
+ - "build-env-maas-site-manager-{series}-{ubuntu}":
+ lp-branch: "{series}"
+ views:
+ - "build-env-maas-site-manager"
+
+- view-template:
+ name: "build-env-maas-site-manager"
+ description: "Build env builder for MAAS Site Manager"
+ view-type: list
+ regex: "build-env-maas-site-manager-.*"
+
+- job-template:
+ name: "build-env-maas-site-manager-{series}-{ubuntu}"
+ build-freq: 'H H * * 6'
+ git_ci_repos: ~maas-committers/maas-ci/+git/maas-ci-internal
+ git_ci_branch: main
+ lp-branch: "main"
+ parameters:
+ - string:
+ description: "Source repos"
+ name: "LP_REPOS"
+ default: "{lp-repos}"
+ - string:
+ description: "Source branch"
+ name: "LP_BRANCH"
+ default: "{lp-branch}"
+ - string:
+ description: |
+ Ubuntu series to run tests on.
+ If set to 'auto', defaults to the target series for the branch.
+ name: "SERIES"
+ default: "{ubuntu}"
+ triggers:
+ - timed: "{build-freq}"
+ properties:
+ - build-discarder:
+ artifact-num-to-keep: 5
+ dsl: !include-jinja2: build-env-maas-site-manager.groovy
diff --git a/jobs/build-env-maas.groovy b/jobs/build-env-maas.groovy
new file mode 100644
index 0000000..1cbabcb
--- /dev/null
+++ b/jobs/build-env-maas.groovy
@@ -0,0 +1,49 @@
+pipeline {
+ agent {
+ node {
+ label 'maas-tester'
+ }
+ }
+ stages {
+ stage('Checkout') {
+ steps {
+ withCredentials([sshUserPrivateKey(credentialsId: 'launchpad-ci-ssh-key', keyFileVariable: 'SSHKEY', usernameVariable: 'SSHUSER')]) {
+ script {
+ env.GIT_SSH_COMMAND = "ssh -i $SSHKEY -l $SSHUSER"
+ }
+ sh '''
+ rm -rf ci-utils launchpad-ci maas maas_target_os_release
+ git clone git+ssh://git.launchpad.net/{{ git_ci_repos }} --single-branch --branch main --recurse-submodules --depth 1 ci-utils
+ if [ "${SERIES}" != "auto" ]; then
+ echo ${SERIES} > maas_target_os_release
+ else
+ ci-utils/jobs/launchpad-ci/repo-ubuntu-release ${LP_REPOS} ${LP_BRANCH} > maas_target_os_release
+ fi
+ '''
+ }
+ script {
+ def shortHash = sh(returnStdout: true, script: "git -C ci-utils rev-parse --short HEAD").trim()
+ def maasShortHash = sh(returnStdout: true, script: "git ls-remote git://${LP_REPOS} -b ${LP_BRANCH} | cut -f1").trim()
+ currentBuild.description = "Build environment: maas-ci-utils ${shortHash} - maas ${maasShortHash}"
+ }
+ }
+ }
+ stage('Build image') {
+ steps {
+ checkout changelog: false, poll: false, scm: scmGit(branches: [[name: "${LP_BRANCH}"]], extensions: [cloneOption(depth: 1, honorRefspec: true, noTags: true, reference: '', shallow: true), [$class: 'RelativeTargetDirectory', relativeTargetDir: 'checkout']], userRemoteConfigs: [[credentialsId: 'launchpad-ci-ssh-key', url: "git://${LP_REPOS}"]])
+ script {
+ def os_release = sh(script:'cat maas_target_os_release', returnStdout: true).trim()
+ def os_series = sh(script:"sed -n 's/^${os_release}[^,]\\+,[^,]\\+,\\([^,]\\+\\),.*/\\1/p' /usr/share/distro-info/ubuntu.csv", returnStdout: true).trim()
+
+ docker.withRegistry("{{ docker_registry }}", "{{ docker_registry_cred }}") {
+ def img = docker.build("build-env-{{ target }}-{{ series }}:${os_release}", "--build-arg 'SERIES=${os_release}' --build-arg CACHE_BUST='${BUILD_ID}' -f ci-utils/Dockerfile .")
+ img.push()
+ if (os_series) {
+ img.push("${os_series}")
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/jobs/build-env-maas.yaml b/jobs/build-env-maas.yaml
new file mode 100644
index 0000000..50613c6
--- /dev/null
+++ b/jobs/build-env-maas.yaml
@@ -0,0 +1,62 @@
+- project:
+ name: build-env-maas
+ target: maas
+ lp-repos: "git.launchpad.net/maas"
+ ubuntu: "auto"
+ series:
+ - master:
+ build-freq: 'H H(0-2) * * *'
+ - 3.3
+ - 3.2
+ - 3.1
+ - 3.0
+ - 2.9
+ jobs:
+ - "build-env-{target}-{series}-{ubuntu}":
+ lp-branch: "{series}"
+ views:
+ - "build-env-{target}"
+
+- project:
+ name: build-env-maas-next
+ target: maas
+ lp-repos: "git.launchpad.net/maas"
+ ubuntu: "23.10"
+ series:
+ - master
+ jobs:
+ - "build-env-{target}-{series}-{ubuntu}":
+ lp-branch: "{series}"
+
+- view-template:
+ name: "build-env-{target}"
+ description: "Build env builder"
+ view-type: list
+ regex: "build-env-{target}-.*"
+
+- job-template:
+ name: "build-env-{target}-{series}-{ubuntu}"
+ build-freq: 'H H * * 6'
+ git_ci_repos: ~maas-committers/maas-ci/+git/maas-ci-internal
+ lp-branch: "master"
+ parameters:
+ - string:
+ description: "Source repos"
+ name: "LP_REPOS"
+ default: "{lp-repos}"
+ - string:
+ description: "Source branch"
+ name: "LP_BRANCH"
+ default: "{lp-branch}"
+ - string:
+ description: |
+ Ubuntu series to run tests on.
+ If set to 'auto', defaults to the target series for the branch.
+ name: "SERIES"
+ default: "{ubuntu}"
+ triggers:
+ - timed: "{build-freq}"
+ properties:
+ - build-discarder:
+ artifact-num-to-keep: 5
+ dsl: !include-jinja2: build-env-maas.groovy
diff --git a/jobs/crbs-ci.yaml b/jobs/crbs-ci.yaml
new file mode 100644
index 0000000..64cf2e7
--- /dev/null
+++ b/jobs/crbs-ci.yaml
@@ -0,0 +1,10 @@
+- project:
+ name: crbs
+ repo_lp_path: ~crbs/crbs
+ ubuntu_series: '20.04'
+ deps_command: 'make ci-dep'
+ build_command: 'make ci-build'
+ lint_command: 'make ci-lint'
+ test_command: 'make ci-test'
+ jobs:
+ - '{name}-launchpad-ci'
diff --git a/jobs/crbs-deb.yaml b/jobs/crbs-deb.yaml
new file mode 100644
index 0000000..dabc032
--- /dev/null
+++ b/jobs/crbs-deb.yaml
@@ -0,0 +1,106 @@
+- job:
+ name: 'crbs-deb-builder'
+ parameters:
+ - string:
+ name: SNAP_REVISION
+ description: Revision of the snap to build the .deb package for
+ - string:
+ name: PACKAGE_BUILD_REVISION
+ description: Build revision for the .deb package
+ default: "1"
+ - string:
+ name: TARGET_RELEASES
+ description: Target ubuntu releases
+ default: bionic focal
+ - string:
+ name: UPLOAD_PPA
+ description: |
+ The PPA to upload the package to. If not specified, files will be
+ only archived as artifacts
+ logrotate:
+ artifactNumsToKeep: 20
+ dsl: |
+ pipeline {
+ agent {
+ label 'snap-builder-local'
+ }
+ stages {
+ stage('Checkout') {
+ steps {
+ withCredentials([sshUserPrivateKey(credentialsId: 'launchpad-ci-ssh-key', keyFileVariable: 'SSHKEY', usernameVariable: 'SSHUSER')]) {
+ script {
+ env.GIT_SSH_COMMAND = "ssh -i $SSHKEY -l $SSHUSER"
+ if (env.GIT_SSH_COMMAND_EXTRA) {
+ env.GIT_SSH_COMMAND += " ${env.GIT_SSH_COMMAND_EXTRA}"
+ }
+ }
+ sh '''
+ rm -rf crbs-deb
+ git clone git+ssh://git.launchpad.net/~crbs/crbs/+git/crbs-deb
+ '''
+ script {
+ currentBuild.description = "Deb package for snap revision ${SNAP_REVISION}"
+ }
+ }
+ }
+ }
+ stage('Deps') {
+ steps {
+ sh '''
+ sudo apt install --yes dpkg-dev devscripts debhelper
+ '''
+ }
+ }
+ stage('Download snap') {
+ steps {
+ withCredentials([
+ file(credentialsId: 'crbs-snap-edge-upload', variable: 'SNAPCRAFT_CREDS_FILE')
+ ]) {
+ // this needs to be run via script because snapcraft doesn't
+ // work well when a TTY isn't available.
+ sh '''
+ cd crbs-deb
+ script --return -c "./download-snap -l $SNAPCRAFT_CREDS_FILE $SNAP_REVISION" /dev/null
+ '''
+ }
+ }
+ }
+ stage('Build source packages') {
+ steps {
+ withCredentials([
+ file(credentialsId: 'maas-lander-gpg-key', variable: 'GPG_KEY_FILE')
+ ]) {
+ sh '''
+ export GNUPGHOME="$(mktemp -d)"
+ gpg --import "$GPG_KEY_FILE"
+
+ export DEBEMAIL="MAAS Lander <maas-lander@xxxxxxxxxxxxx>"
+ cd crbs-deb
+ for ubuntu_release in $TARGET_RELEASES; do
+ ./build-package -s -S -d packages/source -P "$PACKAGE_BUILD_REVISION" "$SNAP_REVISION" "$ubuntu_release"
+ done
+
+ if [ -n "$UPLOAD_PPA" ]; then
+ dput "$UPLOAD_PPA" packages/source/*.changes
+ fi
+
+ pkill -f "gpg-agent --homedir $GNUPGHOME"
+ rm -rf "$GNUPGHOME"
+ '''
+ }
+ archiveArtifacts artifacts: 'crbs-deb/packages/source/*'
+ }
+ }
+ stage('Build binary packages') {
+ steps {
+ sh '''
+ cd crbs-deb
+ for ubuntu_release in $TARGET_RELEASES; do
+ ./build-package -d packages/binary -P "$PACKAGE_BUILD_REVISION" "$SNAP_REVISION" "$ubuntu_release"
+ done
+ '''
+ archiveArtifacts artifacts: 'crbs-deb/packages/binary/*'
+ }
+ }
+ }
+ }
diff --git a/jobs/crbs-snap.yaml b/jobs/crbs-snap.yaml
new file mode 100644
index 0000000..33aad45
--- /dev/null
+++ b/jobs/crbs-snap.yaml
@@ -0,0 +1,141 @@
+- job:
+ name: 'crbs-snap-builder'
+ project-type: pipeline
+ parameters:
+ - string:
+ name: LP_GIT_REPO
+ default: crbs
+ description: 'LP path of git repo to build the snap for'
+ - string:
+ name: GIT_BRANCH
+ default: master
+ description: 'Git branch name to build the snap for'
+ - string:
+ name: GIT_REVISION
+ default: HEAD
+ description: 'Git revision to build the snap for'
+ logrotate:
+ artifactNumsToKeep: 20
+ dsl: |
+ pipeline {
+ agent {
+ label 'snap-builder'
+ }
+ stages {
+ stage('Checkout') {
+ steps {
+ sh "rm -rf crbs-checkout"
+ withCredentials([sshUserPrivateKey(credentialsId: 'launchpad-ci-ssh-key', keyFileVariable: 'SSHKEY', usernameVariable: 'SSHUSER')]) {
+ script {
+ env.GIT_SSH_COMMAND = "ssh -i $SSHKEY -l $SSHUSER"
+ if (env.GIT_SSH_COMMAND_EXTRA) {
+ env.GIT_SSH_COMMAND += " ${env.GIT_SSH_COMMAND_EXTRA}"
+ }
+ }
+ sh '''
+ git clone --recurse-submodules git+ssh://git.launchpad.net/${LP_GIT_REPO} crbs-checkout
+ cd crbs-checkout
+ if [ "${GIT_REVISION}" = "HEAD" ]; then
+ git checkout origin/${GIT_BRANCH}
+ else
+ git checkout ${GIT_REVISION}
+ fi
+ git submodule foreach git reset --hard HEAD
+ '''
+ script {
+ def shortHash = sh(returnStdout: true, script: "cd crbs-checkout && git rev-parse --short HEAD").trim()
+ currentBuild.description = "${LP_GIT_REPO}/${GIT_BRANCH} - ${shortHash}"
+ }
+ }
+ }
+ }
+ stage('Snap') {
+ steps {
+ withCredentials([
+ sshUserPrivateKey(credentialsId: 'launchpad-ci-ssh-key', keyFileVariable: 'SSHKEY', usernameVariable: 'SSHUSER'),
+ file(credentialsId: 'crbs-snap-edge-upload', variable: 'SNAPCRAFT_CREDS_FILE')
+ ]) {
+ script {
+ env.GIT_SSH_COMMAND = "ssh -i $SSHKEY -l $SSHUSER"
+ if (env.GIT_SSH_COMMAND_EXTRA) {
+ env.GIT_SSH_COMMAND += " ${env.GIT_SSH_COMMAND_EXTRA}"
+ }
+ }
+
+ // this needs to be run via script because snapcraft doesn't
+ // work well when a TTY isn't available.
+ sh 'script --return -qc "make -C crbs-checkout snap-clean snap" /dev/null'
+ archiveArtifacts artifacts: 'crbs-checkout/*.snap'
+ sh '''
+ cd crbs-checkout
+ GIT_HASH=$(git rev-parse --short HEAD)
+ script --return -qc " \
+ PATH="$PATH:/snap/bin"; \
+ snapcraft login --with $SNAPCRAFT_CREDS_FILE; \
+ snapcraft upload *.snap --release edge/ci-${GIT_HASH}; \
+ " /dev/null
+ '''
+ }
+ }
+ }
+ }
+ }
+
+
+- job:
+ name: 'crbs-snap-master'
+ project-type: pipeline
+ triggers:
+ - pollscm:
+ cron: "*/1 * * * *"
+ dsl: |
+ pipeline {
+ agent any
+ options {
+ disableConcurrentBuilds()
+ }
+ environment {
+ lp_git_repo = 'crbs'
+ git_branch = 'master'
+ shortHash = null
+ upload_ppa = 'ppa:crbs/edge'
+ }
+ stages {
+ stage('Setup') {
+ steps {
+ git poll: true, branch:git_branch, credentialsId: 'launchpad-ci-ssh-key', url: 'git+ssh://git.launchpad.net/' + lp_git_repo
+ script {
+ shortHash = sh(returnStdout: true, script: "git rev-parse --short HEAD").trim()
+ currentBuild.description = shortHash
+ }
+ }
+ }
+ stage('Build snap') {
+ steps {
+ withCredentials([file(credentialsId: 'crbs-snap-edge-upload', variable: 'SNAPCRAFT_CREDS_FILE')]) {
+ script {
+ build job:'crbs-snap-builder', parameters: [
+ [$class: 'StringParameterValue', name: 'LP_GIT_REPO', value: lp_git_repo],
+ [$class: 'StringParameterValue', name: 'GIT_BRANCH', value: git_branch],
+ [$class: 'StringParameterValue', name: 'GIT_REVISION', value: shortHash]
+ ]
+ def GIT_HASH = sh(
+ script: 'git rev-parse --short HEAD',
+ returnStdout: true
+ ).trim()
+ sh 'script --return -qc "snapcraft login --with $SNAPCRAFT_CREDS_FILE" /dev/null'
+ def SNAP_REVISION = sh(
+ script: "script --return -qc \"snapcraft revisions canonical-rbac | grep edge/ci-${GIT_HASH} | cut -d' ' -f1 | head -n1\" /dev/null",
+ returnStdout: true
+ ).trim()
+ sh "script --return -qc \"snapcraft release canonical-rbac ${SNAP_REVISION} edge\" /dev/null"
+ build job:'crbs-deb-builder', parameters: [
+ [$class: 'StringParameterValue', name: 'SNAP_REVISION', value: SNAP_REVISION],
+ [$class: 'StringParameterValue', name: 'UPLOAD_PPA', value: upload_ppa]
+ ]
+ }
+ }
+ }
+ }
+ }
+ }
diff --git a/jobs/github-ci/github-ci.groovy b/jobs/github-ci/github-ci.groovy
new file mode 100644
index 0000000..e51db73
--- /dev/null
+++ b/jobs/github-ci/github-ci.groovy
@@ -0,0 +1,149 @@
+pipeline {
+ agent {
+ docker {
+ {% if use_custom_build_image %}
+ image "build-env-{{ name }}-${GH_BRANCH}:${SERIES}"
+ {% else %}
+ image "ubuntu:${SERIES}"
+ {% endif %}
+ args '-u 0:0 -m 4g'
+ label '{{ tester_agent_label }}'
+ registryUrl '{{ docker_registry }}'
+ registryCredentialsId '{{ docker_registry_cred }}'
+ reuseNode true
+ }
+ }
+ options {
+ disableConcurrentBuilds()
+ }
+ stages {
+ stage('Prepare') {
+ steps{
+ script {
+ currentBuild.description = "{$GH_REPO}/${GH_BRANCH}"
+ }
+ withCredentials([
+ sshUserPrivateKey(credentialsId: '{{ ssh_key }}', keyFileVariable: 'SSHKEY', usernameVariable: 'SSHUSER')
+ ]) {
+ sh '''
+ if [ ! -z \$http_proxy ]; then
+ echo "Acquire::http::proxy \\"\$http_proxy\\"\\;" > /etc/apt/apt.conf.d/github-ci-proxy
+ echo "Acquire::https::proxy \\"\$http_proxy\\"\\;" >> /etc/apt/apt.conf.d/github-ci-proxy
+
+ mkdir -p ~/.ssh/
+ echo "Host github.com\n HostName ssh.github.com\n Port 443\n ProxyCommand /usr/bin/nc -X connect -x squid.internal %h %p\n" >> ~/.ssh/config
+ fi
+
+ export DEBIAN_FRONTEND=noninteractive
+ apt-get -y update
+ apt install -y git make sudo wget socat netcat
+
+ useradd ubuntu -d /home/ubuntu
+ mkdir -p /home/ubuntu
+ chown ubuntu:ubuntu /home/ubuntu
+ echo "ubuntu ALL=(ALL) NOPASSWD:ALL" > /etc/sudoers.d/90-docker
+
+ rm -rf ci maas-ci-internal
+ export GIT_SSH_COMMAND="ssh -i $SSHKEY -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null"
+ git clone git+ssh://${SSHUSER}@git.launchpad.net/~maas-committers/maas-ci/+git/maas-ci-internal --branch main --depth 1
+ '''
+ }
+ }
+ }
+ stage('Checkout') {
+ steps {
+ withCredentials([sshUserPrivateKey(credentialsId: '{{ ssh_key}}', keyFileVariable: 'SSHKEY', usernameVariable: 'SSHUSER')]) {
+ sh '''
+ export GIT_SSH_COMMAND="ssh -i $SSHKEY -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null"
+ mkdir -p /run/build/
+ git clone -b "${GH_BRANCH}" "git@xxxxxxxxxx:${GH_ORG}/${GH_REPO}.git" /run/build/checkout
+ chown -R ubuntu:ubuntu /run/build/checkout
+ if [ ! -z "${GH_TEST_REPO}" ]; then
+ git clone -b "${GH_TEST_BRANCH}" "git@xxxxxxxxxx:${GH_ORG}/${GH_TEST_REPO}.git" /run/build/tests
+ chown -R ubuntu:ubuntu /run/build/tests
+ fi
+ '''
+ }
+ }
+ }
+ stage('Install deps') {
+ steps {
+ sh '''
+ cd {{ deps_dir }}
+ sudo -E -H -u ubuntu {{ deps_command }}
+ '''
+ }
+ }
+ stage('Build') {
+ steps {
+ sh '''
+ cd {{ build_dir }}
+ sudo -E -H -u ubuntu {{ build_command }}
+ '''
+ }
+ }
+ stage('Lint') {
+ steps {
+ sh '''
+ cd {{ lint_dir }}
+ sudo -E -H -u ubuntu {{ lint_command }}
+ '''
+ }
+ }
+ stage('Setup Env') {
+ steps {
+ withCredentials([string(credentialsId: '{{ maas_api_key }}', variable: '{{ api_token_env_var }}')]){
+ sh '''
+ cd {{ setup_env_dir }}
+ export no_proxy="${no_proxy:-'localhost'},${MAAS_HOST}"
+ export NO_PROXY="${NO_PROXY:-'localhost'},${MAAS_HOST}"
+ sudo -E -H -u ubuntu {{ setup_env_command }}
+ '''
+ }
+ }
+ }
+ stage('Test') {
+ steps {
+ withCredentials([string(credentialsId: 'jenkins-maas-token', variable: '{{ api_token_env_var }}')]) {
+ {% if test_lock is defined and test_lock %}
+ lock('{{ test_lock }}') {
+ sh '''
+ export no_proxy="${no_proxy:-'localhost'},${MAAS_HOST}"
+ export NO_PROXY="${NO_PROXY:-'localhost'},${MAAS_HOST}"
+ cd {{ test_dir }}
+ sudo -E -H -u ubuntu {{ test_command }}
+ '''
+ }
+ {% else %}
+ sh '''
+ export no_proxy="${no_proxy:-'localhost'},${MAAS_HOST}"
+ export NO_PROXY="${NO_PROXY:-'localhost'},${MAAS_HOST}"
+ cd {{ test_dir }}
+ sudo -E -H -u ubuntu {{ test_command }}
+ '''
+ {% endif %}
+ }
+ }
+ post {
+ always {
+ sh '''
+ if [ -f /run/build/tests/junit.xml ]; then
+ cp /run/build/tests/junit*.xml ${WORKSPACE}
+ else
+ [ -f /run/build/checkout/junit.xml ] || echo '<testsuite tests="0" />' > /run/build/checkout/junit.xml
+ cp /run/build/checkout/junit*.xml ${WORKSPACE}
+ fi
+ '''
+ archiveArtifacts artifacts: 'junit*.xml'
+ junit allowEmptyResults: true, testResults: 'junit*.xml'
+ withCredentials([
+ string(credentialsId: 'jenkins-maas-token', variable: '{{ api_token_env_var }}'),
+ sshUserPrivateKey(credentialsId: 'maas-lander-ssh-key', keyFileVariable: 'SSHKEY', usernameVariable: 'SSHUSER')
+ ]) {
+ {{ post_test_actions }}
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/jobs/github-ci/github-ci.yaml b/jobs/github-ci/github-ci.yaml
new file mode 100644
index 0000000..5160b93
--- /dev/null
+++ b/jobs/github-ci/github-ci.yaml
@@ -0,0 +1,89 @@
+- job_group:
+ name: '{name}-github-ci'
+ jobs:
+ - 'gh-{name}-tester'
+
+- job-template:
+ name: 'gh-{name}-tester'
+ description: |
+ Github CI tester for {name}
+ dsl: !include-jinja2: github-ci.groovy
+ common_parameters:
+ - string:
+ name: GH_REPO
+ description: 'Github repo to clone'
+ default: '{repo_gh_path}'
+ - string:
+ name: GH_BRANCH
+ description: 'Github branch to clone'
+ default: '{repo_gh_branch}'
+ - string:
+ name: GH_ORG
+ description: 'Github org that owns the repo to clone'
+ default: '{repo_gh_org}'
+ - string:
+ name: GH_TEST_REPO
+ description: 'Github repo to clone if there are separate tests'
+ default: '{repo_gh_test_path}'
+ - string:
+ name: GH_TEST_BRANCH
+ description: 'Github branch to clone if there are separate tests'
+ default: '{repo_gh_test_branch}'
+ - string:
+ name: SERIES
+ description: 'The Ubuntu series to build and with'
+ default: '{ubuntu_series}'
+ - string:
+ name: GH_USER_NAME
+ description: 'The name to use to commit results and push them'
+ default: '{lander_name}'
+ - string:
+ name: GH_USER_EMAIL
+ description: 'The email to use to commit results and push them'
+ default: '{lander_email}'
+ - string:
+ name: MAAS_HOST
+ description: "The IP or Hostname used in the MAAS_URL, to be added to NO_PROXY"
+ default: '10.245.136.7'
+ parameters: !j2-yaml: |
+ {% for param in common_parameters %}
+ {% for key, value in param.items() %}
+ - {{ key }}: {{value|tojson}}
+ {% endfor %}
+ {% endfor %}
+ {% for param in extra_parameters %}
+ {% for key, value in param.items() %}
+ - {{ key }}: {{value|tojson}}
+ {% endfor %}
+ {% endfor %}
+ # overridable parameters
+ extra_parameters: []
+ repo_gh_branch: master
+ repo_gh_org: maas
+ tester_agent_label: ''
+ ssh_key: ''
+ # if set to true, an image named 'build-env-{name}-$LP_BRANCH_DEST:$SERIES'
+ # is used
+ use_custom_build_image: false
+ deps_dir: ''
+ build_dir: ''
+ lint_dir: ''
+ setup_env_command: ''
+ test_dir: ''
+ deps_command: '/bin/true'
+ build_command: '/bin/true'
+ lint_command: '/bin/true'
+ setup_env_command: '/bin/true'
+ test_command: '/bin/true'
+ post_test_actions: ''
+ ubuntu_series: '20.04'
+ lander_name: 'MAAS Lander'
+ lander_email: 'maas-lander@xxxxxxxxxxxxx'
+
+- view-template:
+ name: 'gh-{name}'
+ description: |
+ All the jobs related to {name}.
+ view-type: list
+ job-name:
+ - 'gh-{name}-tester'
diff --git a/jobs/globals.yaml b/jobs/globals.yaml
new file mode 100644
index 0000000..7970206
--- /dev/null
+++ b/jobs/globals.yaml
@@ -0,0 +1,6 @@
+- defaults:
+ name: global
+ project-type: pipeline
+ docker_registry: https://maas-ci.internal:5443
+ docker_registry_cred: maas-ci-registry
+ series: '22.04'
diff --git a/jobs/grafana-dashboards.groovy b/jobs/grafana-dashboards.groovy
new file mode 100644
index 0000000..fc72d8a
--- /dev/null
+++ b/jobs/grafana-dashboards.groovy
@@ -0,0 +1,56 @@
+pipeline {
+ agent {
+ docker {
+ image 'ubuntu:{{ series }}'
+ registryUrl '{{ docker_registry }}'
+ registryCredentialsId '{{ docker_registry_cred }}'
+ label 'maas-tester'
+ args '-u 0:0'
+ reuseNode true
+ }
+ }
+
+ stages {
+ stage('Prepare') {
+ steps {
+ sh '''
+ if [ ! -z \$http_proxy ]; then
+ echo "Acquire::http::proxy \\"\$http_proxy\\"\\;" > /etc/apt/apt.conf.d/launchpad-ci-proxy
+ echo "Acquire::https::proxy \\"\$http_proxy\\"\\;" >> /etc/apt/apt.conf.d/launchpad-ci-proxy
+ fi
+
+ export DEBIAN_FRONTEND=noninteractive
+ apt update
+ apt install -y git make sudo
+ '''
+ }
+ }
+ stage('Checkout') {
+ steps {
+ withCredentials([file(credentialsId: 'lp-lander-sshkey', variable: 'SSHKEY')]) {
+ withEnv(["GIT_SSH_COMMAND=ssh -i $SSHKEY -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null"]) {
+ sh '''
+ rm -rf checkout
+ git clone --depth 1 --single-branch --branch ${LP_BRANCH} git+ssh://maas-lander@xxxxxxxxxxxxxxxxx/${LP_REPO} checkout
+ '''
+ }
+ }
+ }
+ }
+ stage('Setup') {
+ steps {
+ sh '''
+ make -C checkout deps
+ '''
+ }
+ }
+ stage('Generate dashboards') {
+ steps {
+ sh '''
+ make -C checkout dashboards DATASOURCE="maas-influxdb.cloud.kpi.internal"
+ '''
+ archiveArtifacts artifacts: 'checkout/generated/*.json'
+ }
+ }
+ }
+}
diff --git a/jobs/grafana-dashboards.yaml b/jobs/grafana-dashboards.yaml
new file mode 100644
index 0000000..006325a
--- /dev/null
+++ b/jobs/grafana-dashboards.yaml
@@ -0,0 +1,22 @@
+- project:
+ name: grafana-dashboards
+ jobs:
+ - grafana-dashboards
+
+- job-template:
+ name: grafana-dashboards
+ parameters:
+ - string:
+ name: LP_REPO
+ description: 'Git repository'
+ default: '~maas-committers/maas-kpi/+git/maas-kpi'
+ - string:
+ name: LP_BRANCH
+ description: 'Git branch'
+ default: 'master'
+ triggers:
+ - timed: '@daily'
+ properties:
+ - build-discarder:
+ artifact-num-to-keep: 20
+ dsl: !include-jinja2: grafana-dashboards.groovy
diff --git a/jobs/initial-sql-update.groovy b/jobs/initial-sql-update.groovy
new file mode 100644
index 0000000..5ec515f
--- /dev/null
+++ b/jobs/initial-sql-update.groovy
@@ -0,0 +1,77 @@
+pipeline {
+ agent {
+ docker {
+ image 'build-env-maas-{{ repo_lp_branch }}:{{ series }}'
+ registryUrl '{{ docker_registry }}'
+ registryCredentialsId '{{ docker_registry_cred }}'
+ label 'maas-tester'
+ args '-u 0:0'
+ reuseNode true
+ }
+ }
+
+ stages {
+ stage('Prepare') {
+ steps {
+ cleanWs()
+ sh '''
+ if [ ! -z \$http_proxy ]; then
+ echo "Acquire::http::proxy \\"\$http_proxy\\"\\;" > /etc/apt/apt.conf.d/launchpad-ci-proxy
+ echo "Acquire::https::proxy \\"\$http_proxy\\"\\;" >> /etc/apt/apt.conf.d/launchpad-ci-proxy
+ fi
+
+ export DEBIAN_FRONTEND=noninteractive
+ apt-get update
+ apt -y install git make sudo lsb-release language-pack-en
+
+ useradd -m ubuntu
+ '''
+ }
+ }
+ stage("Update") {
+ steps {
+ withCredentials([file(credentialsId: 'lp-lander-sshkey', variable: 'SSHKEY')]) {
+ withEnv(["GIT_SSH_COMMAND=ssh -i $SSHKEY -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null"]) {
+ lock(resource: "maas-${LP_BRANCH_DEST}") {
+ sh '''
+ mkdir -p /run/build
+ cd /run/build
+ # Get the latest 10 revisions to speed things up. If the source branch is more than 10 revisions
+ # behind the destination, the merge will fail, and the branch will need to be rebased manually.
+ git clone --depth 10 --single-branch --branch ${LP_BRANCH_DEST} git+ssh://maas-lander@xxxxxxxxxxxxxxxxx/${LP_REPO_DEST} maas
+ git config --global --add safe.directory /run/build/maas
+ chown -R ubuntu:ubuntu /run/build
+
+ env -C /run/build/maas make install-dependencies
+ env -C /run/build/maas sudo -u ubuntu -E -H make build
+ '''
+ script {
+ env.USER_EMAIL = sh(script:'env -C /run/build/maas sudo -u ubuntu -E -H git show --format="%aE" -q', returnStdout: true).trim()
+ env.USER_NAME = sh(script:'env -C /run/build/maas sudo -u ubuntu -E -H git show --format="%aN" -q', returnStdout: true).trim()
+ }
+ sh '''
+ cd /run/build/maas
+ git config user.email "${USER_EMAIL}"
+ git config user.name "${USER_NAME}"
+ sudo -u ubuntu -E -H make update-initial-sql
+ git add -u
+ git status
+ # Don't fail if the commit is empty.
+ git commit -m "Update initial SQL" || exit 0
+ git push origin HEAD:${LP_BRANCH_DEST}
+ '''
+ }
+ }
+ }
+ }
+ }
+ }
+ post {
+ success {
+ mattermostSend (color: 'green', message: "[${env.JOB_NAME} #${env.BUILD_NUMBER}](${env.BUILD_URL}) :success: Updated initial SQL :postgres: into ${env.LP_REPO_DEST}:${env.LP_BRANCH_DEST}")
+ }
+ failure {
+ mattermostSend (color: 'red', message: "[${env.JOB_NAME} #${env.BUILD_NUMBER}](${env.BUILD_URL}) :fire: Failed to update initial SQL :postgres: into ${env.LP_REPO_DEST}:${env.LP_BRANCH_DEST}")
+ }
+ }
+}
diff --git a/jobs/initial-sql-update.yaml b/jobs/initial-sql-update.yaml
new file mode 100644
index 0000000..0e302b6
--- /dev/null
+++ b/jobs/initial-sql-update.yaml
@@ -0,0 +1,26 @@
+- project:
+ name: initial-sql-update
+ jobs:
+ - initial-sql-update
+
+- job-template:
+ name: initial-sql-update
+ triggers:
+ - timed: '@daily'
+ parameters:
+ - string:
+ name: LP_REPO_DEST
+ description: 'Git repository to merge into.'
+ default: '{repo_lp_path}'
+ - string:
+ name: LP_BRANCH_DEST
+ description: 'Git branch to merge into.'
+ default: '{repo_lp_branch}'
+ properties:
+ - build-discarder:
+ artifact-num-to-keep: 10
+ dsl: !include-jinja2: initial-sql-update.groovy
+
+ # parameters
+ repo_lp_path: ~maas-committers/maas
+ repo_lp_branch: master
diff --git a/jobs/is-maas-fast-yet.groovy b/jobs/is-maas-fast-yet.groovy
new file mode 100644
index 0000000..240f416
--- /dev/null
+++ b/jobs/is-maas-fast-yet.groovy
@@ -0,0 +1,106 @@
+pipeline {
+ agent {
+ docker {
+ image "build-env-maas-{{ series }}:{{ os }}"
+ registryUrl '{{ docker_registry }}'
+ registryCredentialsId '{{ docker_registry_cred }}'
+ label 'maas-tester'
+ args '-u 0:0'
+ reuseNode true
+ }
+ }
+ stages {
+ stage('Prepare') {
+ steps {
+ script {
+ currentBuild.displayName = "Is MAAS with {{ machines }} machines fast yet?"
+ }
+ sh '''
+ apt-get -y update
+ apt-get install -y git make sudo python3 python3-pip
+ pip3 install influxdb-client
+ useradd ubuntu -d /home/ubuntu
+ mkdir -p /home/ubuntu
+ chown ubuntu:ubuntu /home/ubuntu
+ echo "ubuntu ALL=(ALL) NOPASSWD:ALL" > /etc/sudoers.d/90-docker
+ '''
+ }
+ }
+ stage('Checkout') {
+ steps {
+ sh "rm -rf checkout ci-utils sampledata"
+ withCredentials([sshUserPrivateKey(credentialsId: 'launchpad-ci-ssh-key', keyFileVariable: 'SSHKEY', usernameVariable: 'SSHUSER')]) {
+ script {
+ env.GIT_SSH_COMMAND = "ssh -i $SSHKEY -l $SSHUSER"
+ if (env.GIT_SSH_COMMAND_EXTRA) {
+ env.GIT_SSH_COMMAND += " ${env.GIT_SSH_COMMAND_EXTRA}"
+ }
+ }
+ sh '''
+ git clone git+ssh://git.launchpad.net/{{ lp_git_ci_repo }} --single-branch --branch main --depth 1 ci-utils
+ git clone --recurse-submodules git+ssh://git.launchpad.net/{{ lp_git_repo }} --single-branch --branch {{ series }} --depth 1 checkout
+ chown -R ubuntu:ubuntu checkout
+ '''
+ script {
+ def shortHash = sh(returnStdout: true, script: "cd checkout && sudo -E -H -u ubuntu git rev-parse --short HEAD").trim()
+ currentBuild.description = "{{ os }}/{{ lp_git_repo }}/{{ series }} - ${shortHash}"
+ }
+ }
+ }
+ }
+ stage('Install deps') {
+ steps {
+ sh '''
+ sudo -E -H -u ubuntu make -C checkout install-dependencies
+ '''
+ }
+ }
+ stage('Build') {
+ steps {
+ sh '''
+ sudo -E -H -u ubuntu make -C checkout build bin/database
+ '''
+ }
+ }
+ stage('Import sample data') {
+ steps {
+ script {
+ copyArtifacts projectName: 'maas-sampledata-dumper-{{ os }}-{{ series }}-{{ machines }}', target: 'sampledata', flatten: true
+ }
+ script {
+ env.DBDUMP = '../sampledata/maasdb-{{ os }}-{{ series }}-{{ machines }}.dump'
+ }
+ sh '''
+ cd checkout
+ sudo -E -H -u ubuntu bin/database --preserve run -- pg_restore -O -x --disable-triggers --create --clean --if-exists -d postgres ${DBDUMP}
+ '''
+ }
+ }
+ stage('Perf') {
+ steps {
+ script {
+ env.OUTPUT_FILE = '{{ result_file }}'
+ }
+ lock(resource: "performance-tests") {
+ sh '''
+ sudo -E -H -u ubuntu checkout/utilities/run-perf-tests-ci ${DBDUMP}
+ '''
+ }
+ archiveArtifacts allowEmptyArchive: true, artifacts: "checkout/junit*.xml, sampledata/*.dump, checkout/{{ result_file }}"
+ junit allowEmptyResults: true, testResults: 'checkout/junit*.xml'
+ }
+ }
+ stage('Export results') {
+ steps {
+ withCredentials([file(credentialsId: 'maas-influxdb', variable: 'INFLUX_CRED')]) {
+ script {
+ sh '''
+ export DATASET="{{ os }}-{{ series }}-{{ machines }}"
+ ci-utils/utilities/export-perf-to-influxdb --credential=$INFLUX_CRED --system-id=$NODE_NAME --dataset=$DATASET --database=$INFLUX_DB --host=$INFLUX_URI checkout/{{ result_file }}
+ '''
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/jobs/is-maas-fast-yet.yaml b/jobs/is-maas-fast-yet.yaml
new file mode 100644
index 0000000..2ad4613
--- /dev/null
+++ b/jobs/is-maas-fast-yet.yaml
@@ -0,0 +1,66 @@
+- project:
+ name: is-maas-fast-yet
+ machines:
+ - 10
+ - 100
+ - 1000
+ series:
+ - master:
+ build-freq: 'H H(3-6) * * *'
+ - 3.3
+ - 3.2
+ - 3.1
+ os:
+ - 20.04
+ - 22.04
+ exclude:
+ - series: master
+ os: 20.04
+ - series: 3.3
+ os: 20.04
+ - series: 3.2
+ os: 22.04
+ - series: 3.1
+ os: 22.04
+ jobs:
+ - '{name}-tester-{os}-{series}-{machines}'
+ views:
+ - '{name}-tester'
+
+- view-template:
+ name: '{name}-tester'
+ description: 'MAAS performance tests'
+ view-type: list
+ regex: '{name}-tester-.*'
+
+- job-template:
+ name: '{name}-tester-{os}-{series}-{machines}'
+ build-freq: 'H H(12-18) * * 7'
+ parameters:
+ - string:
+ description: "Random seed for factories"
+ name: "MAAS_RAND_SEED"
+ default: ""
+ - string:
+ description: "Random seed for hash"
+ name: "PYTHONHASHSEED"
+ default: ""
+ - string:
+ description: "InfluxDB database"
+ name: "INFLUX_DB"
+ default: "maas_ci_perf"
+ - string:
+ description: "InfluxDB uri"
+ name: "INFLUX_URI"
+ default: "http://maas-influxdb.cloud.kpi.internal:8086"
+ triggers:
+ - timed: "{build-freq}"
+ properties:
+ - build-discarder:
+ artifact-num-to-keep: 20
+ dsl: !include-jinja2: is-maas-fast-yet.groovy
+
+ # parameters
+ lp_git_repo: maas
+ lp_git_ci_repo: ~maas-committers/maas-ci/+git/maas-ci-internal
+ result_file: maas-perf-results.json
diff --git a/jobs/launchpad-ci/job-lander.groovy b/jobs/launchpad-ci/job-lander.groovy
new file mode 100644
index 0000000..a7b0556
--- /dev/null
+++ b/jobs/launchpad-ci/job-lander.groovy
@@ -0,0 +1,90 @@
+def createBuildContextFromJob(job) {
+ ret_build = {
+ def local_job = job
+ def series = local_job.SERIES
+ def test_build = makeTestBuild(local_job.NAME, series, local_job)
+ def test_result = test_build.getResult()
+ def test_build_url = test_build.getAbsoluteUrl()
+ if (test_result != 'SUCCESS') {
+ println "Failed job for $local_job.LP_BRANCH_SRC into $local_job.LP_BRANCH_DEST."
+ sh "ci/launchpad --credentials $CREDS mark-mp --fail-merge ${test_build_url}console --repo-src $local_job.LP_REPO_SRC --branch-src $local_job.LP_BRANCH_SRC --repo-dest $local_job.LP_REPO_DEST --branch-dest $local_job.LP_BRANCH_DEST"
+ mattermostSend(color: 'red', message: "[${env.JOB_NAME} #${env.BUILD_NUMBER}](${env.BUILD_URL}) :fire: Failed to merge [${local_job.LP_REPO_SRC}:${local_job.LP_BRANCH_SRC} into ${local_job.LP_REPO_DEST}:${local_job.LP_BRANCH_DEST}](${local_job.LP_MP_LINK})\n \n${local_job.LP_COMMIT_MSG}")
+ } else {
+ try {
+ lock(resource: "$local_job.NAME-${local_job.LP_BRANCH_DEST}") {
+ script {
+ env.GIT_SSH_COMMAND = "ssh -i $SSHKEY -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null"
+ }
+ writeFile(file: "message.txt", text: local_job.LP_COMMIT_MSG)
+ sh """
+ work_dir=\$(mktemp -d)
+ ci/launchpad merge --work-dir \$work_dir --repo-dest git+ssh://${SSHUSER}@git.launchpad.net/${local_job.LP_REPO_DEST} --repo-src git+ssh://${SSHUSER}@git.launchpad.net/${local_job.LP_REPO_SRC} --branch-dest ${local_job.LP_BRANCH_DEST} --branch-src ${local_job.LP_BRANCH_SRC} --push --commit-msg-file message.txt
+ rm -rf \$work_dir message.txt
+ ci/launchpad --credentials $CREDS mark-mp --succeed-merge --repo-src $local_job.LP_REPO_SRC --branch-src $local_job.LP_BRANCH_SRC --repo-dest $local_job.LP_REPO_DEST --branch-dest $local_job.LP_BRANCH_DEST
+ """
+ }
+ mattermostSend(color: 'green', message: "[${env.JOB_NAME} #${env.BUILD_NUMBER}](${env.BUILD_URL}) :success: Merged [${local_job.LP_REPO_SRC}:${local_job.LP_BRANCH_SRC} into ${local_job.LP_REPO_DEST}:${local_job.LP_BRANCH_DEST}](${local_job.LP_MP_LINK})\n \n${local_job.LP_COMMIT_MSG}")
+ } catch (e) {
+ sh """
+ rm -f message.txt
+ ci/launchpad --credentials $CREDS mark-mp --fail-merge ${test_build_url}console --repo-src $local_job.LP_REPO_SRC --branch-src $local_job.LP_BRANCH_SRC --repo-dest $local_job.LP_REPO_DEST --branch-dest $local_job.LP_BRANCH_DEST
+ """
+ mattermostSend(color: 'red', message: "[${env.JOB_NAME} #${env.BUILD_NUMBER}](${env.BUILD_URL}) :fire: Failed to merge [${local_job.LP_REPO_SRC}:${local_job.LP_BRANCH_SRC} into ${local_job.LP_REPO_DEST}:${local_job.LP_BRANCH_DEST}](${local_job.LP_MP_LINK})\n \n${local_job.LP_COMMIT_MSG}")
+ }
+ }
+ }
+ return ret_build
+}
+
+pipeline {
+ agent {
+ label ''
+ }
+
+ options {
+ buildDiscarder(logRotator(daysToKeepStr: '2'))
+ disableConcurrentBuilds()
+ timeout(time: 1, unit: 'HOURS')
+ }
+
+ stages {
+ stage('Prepare') {
+ steps {
+ cleanWs()
+ withCredentials([
+ file(credentialsId: 'lp-lander-credentials', variable: 'CREDS'),
+ sshUserPrivateKey(credentialsId: 'launchpad-ci-ssh-key', keyFileVariable: 'SSHKEY', usernameVariable: 'SSHUSER')
+ ]) {
+ sh '''
+ rm -rf ci maas-ci-internal
+ export GIT_SSH_COMMAND="ssh -i $SSHKEY -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null"
+ git clone git+ssh://${SSHUSER}@git.launchpad.net/~maas-committers/maas-ci/+git/maas-ci-internal --branch main --depth 1
+ mv maas-ci-internal/jobs/launchpad-ci ci
+ '''
+ }
+ }
+ }
+
+ stage('Land Branches') {
+ steps {
+ withCredentials([
+ file(credentialsId: 'lp-lander-credentials', variable: 'CREDS'),
+ sshUserPrivateKey(credentialsId: 'launchpad-ci-ssh-key', keyFileVariable: 'SSHKEY', usernameVariable: 'SSHUSER')
+ ]) {
+ script {
+ env.GIT_SSH_COMMAND = "ssh -i $SSHKEY -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null"
+ def output = run('ci/launchpad --credentials $CREDS mergeable-jobs maas-ci-internal/jobs/')
+ def jobs = parseJobs(output)
+ if(jobs.size() > 0) {
+ def builds = [:]
+ for(job in jobs) {
+ builds[job.LP_BRANCH_DEST] = createBuildContextFromJob(job)
+ }
+ parallel builds
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/jobs/launchpad-ci/job-reviewer.groovy b/jobs/launchpad-ci/job-reviewer.groovy
new file mode 100644
index 0000000..128642f
--- /dev/null
+++ b/jobs/launchpad-ci/job-reviewer.groovy
@@ -0,0 +1,86 @@
+def createBuildContextForJob(job) {
+ ret_build = {
+ def local_job = job
+ def series = local_job.SERIES
+ sh "ci/launchpad --credentials $CREDS mark-mp --start-review --repo-src $local_job.LP_REPO_SRC --branch-src $local_job.LP_BRANCH_SRC --repo-dest $local_job.LP_REPO_DEST --branch-dest $local_job.LP_BRANCH_DEST"
+ def test_build = makeTestBuild(local_job.NAME, series, local_job)
+ def test_result = test_build.getResult()
+ def test_build_url = test_build.getAbsoluteUrl()
+ if (test_result != 'SUCCESS') {
+ println "Failed job for $local_job.LP_BRANCH_SRC into $local_job.LP_BRANCH_DEST on $series."
+ sh "ci/launchpad --credentials $CREDS mark-mp --fail-review ${test_build_url}console --repo-src $local_job.LP_REPO_SRC --branch-src $local_job.LP_BRANCH_SRC --repo-dest $local_job.LP_REPO_DEST --branch-dest $local_job.LP_BRANCH_DEST --commit $local_job.LP_COMMIT_SHA1"
+ } else {
+ sh "ci/launchpad --credentials $CREDS mark-mp --succeed-review --repo-src $local_job.LP_REPO_SRC --branch-src $local_job.LP_BRANCH_SRC --repo-dest $local_job.LP_REPO_DEST --branch-dest $local_job.LP_BRANCH_DEST --commit $local_job.LP_COMMIT_SHA1"
+ }
+ }
+ return ret_build
+}
+
+pipeline {
+ agent {
+ label ''
+ }
+
+ options {
+ buildDiscarder(logRotator(numToKeepStr: '25'))
+ disableConcurrentBuilds()
+ timeout(time: 1, unit: 'HOURS')
+ }
+
+
+ stages {
+ stage('Prepare') {
+ steps {
+ cleanWs()
+ withCredentials([
+ file(credentialsId: 'lp-lander-credentials', variable: 'CREDS'),
+ sshUserPrivateKey(credentialsId: 'launchpad-ci-ssh-key', keyFileVariable: 'SSHKEY', usernameVariable: 'SSHUSER')
+ ]) {
+ script {
+ env.GIT_SSH_COMMAND = "ssh -i $SSHKEY -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null"
+ }
+ sh '''
+ rm -rf ci maas-ci-internal
+ export GIT_SSH_COMMAND="ssh -i $SSHKEY -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null"
+ git clone git+ssh://${SSHUSER}@git.launchpad.net/~maas-committers/maas-ci/+git/maas-ci-internal --branch main --depth 1
+ mv maas-ci-internal/jobs/launchpad-ci ci
+ '''
+ }
+ }
+ }
+
+ stage('Find MPs that need review') {
+ steps {
+ withCredentials([
+ file(credentialsId: 'lp-lander-credentials', variable: 'CREDS'),
+ sshUserPrivateKey(credentialsId: 'launchpad-ci-ssh-key', keyFileVariable: 'SSHKEY', usernameVariable: 'SSHUSER')
+ ]) {
+ script {
+ env.GIT_SSH_COMMAND = "ssh -i $SSHKEY -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null"
+ def output = run('ci/launchpad --credentials $CREDS reviewable-jobs maas-ci-internal/jobs/')
+ env._mps_to_review = output
+ }
+ }
+ }
+ }
+ stage('Test MPs') {
+ when { not { environment name: '_mps_to_review', value: '[]' }}
+ steps {
+ withCredentials([
+ file(credentialsId: 'lp-lander-credentials', variable: 'CREDS'),
+ sshUserPrivateKey(credentialsId: 'launchpad-ci-ssh-key', keyFileVariable: 'SSHKEY', usernameVariable: 'SSHUSER')
+ ]) {
+ script {
+ env.GIT_SSH_COMMAND = "ssh -i $SSHKEY -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null"
+ def jobs = parseJobs(env._mps_to_review)
+ def builds = [:]
+ for(job in jobs) {
+ builds[job.LP_BRANCH_SRC] = createBuildContextForJob(job)
+ }
+ parallel builds
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/jobs/launchpad-ci/job-tester.groovy b/jobs/launchpad-ci/job-tester.groovy
new file mode 100644
index 0000000..705b1e9
--- /dev/null
+++ b/jobs/launchpad-ci/job-tester.groovy
@@ -0,0 +1,103 @@
+{% import 'launchpad-ci.groovy' as common %}
+{% include 'launchpad-ci-utils.groovy' %}
+
+pipeline {
+ agent {
+ docker {
+ {% if use_custom_build_image %}
+ image "build-env-{{ name }}-${LP_BRANCH_DEST}:${SERIES}"
+ {% else %}
+ image "ubuntu:${SERIES}"
+ {% endif %}
+ label '{{ tester_agent_label }}'
+ args '-u 0:0 -m 4g'
+ reuseNode true
+ }
+ }
+ options {
+ disableConcurrentBuilds()
+ timeout(time: 1, unit: 'HOURS')
+ }
+ stages {
+ stage('Prepare') {
+ steps {
+ script {
+ currentBuild.description = "${LP_REPO_SRC}/${LP_BRANCH_SRC}"
+ }
+ sh '''
+ if [ ! -z \$http_proxy ]
+ then
+ echo "Acquire::http::proxy \\"\$http_proxy\\"\\;" > /etc/apt/apt.conf.d/launchpad-ci-proxy
+ echo "Acquire::https::proxy \\"\$http_proxy\\"\\;" >> /etc/apt/apt.conf.d/launchpad-ci-proxy
+ fi
+
+ export DEBIAN_FRONTEND=noninteractive
+ apt-get -y update
+ apt install -y git make sudo wget socat python3-launchpadlib python3-yaml
+
+ useradd ubuntu -d /home/ubuntu
+ mkdir -p /home/ubuntu
+ chown ubuntu:ubuntu /home/ubuntu
+ echo "ubuntu ALL=(ALL) NOPASSWD:ALL" > /etc/sudoers.d/90-docker
+ rm -f ${WORKSPACE}/junit*.xml
+ '''
+ {{ common.clone_ci_repo_step() }}
+ }
+ }
+ stage('Checkout') {
+ steps {
+ withCredentials([sshUserPrivateKey(credentialsId: 'launchpad-ci-ssh-key', keyFileVariable: 'SSHKEY', usernameVariable: 'SSHUSER')]) {
+ sh '''
+ export GIT_SSH_COMMAND="ssh -i $SSHKEY -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null"
+ mkdir -p /run/build/checkout
+ ci/launchpad merge --work-dir /run/build/checkout --repo-dest git+ssh://${SSHUSER}@git.launchpad.net/${LP_REPO_DEST} --repo-src git+ssh://${SSHUSER}@git.launchpad.net/${LP_REPO_SRC} --branch-dest ${LP_BRANCH_DEST} --branch-src ${LP_BRANCH_SRC}
+ chown -R ubuntu:ubuntu /run/build
+ '''
+ }
+ }
+ }
+ stage('Install deps') {
+ steps {
+ sh '''
+ cd /run/build/checkout
+ sudo -E -H -u ubuntu {{ deps_command }}
+ '''
+ }
+ }
+ stage('Build') {
+ steps {
+ sh '''
+ cd /run/build/checkout
+ sudo -E -H -u ubuntu {{ build_command }}
+ '''
+ }
+ }
+ stage('Lint') {
+ steps {
+ sh '''
+ cd /run/build/checkout
+ sudo -E -H -u ubuntu {{ lint_command }}
+ '''
+ }
+ }
+ stage('Test') {
+ steps {
+ sh '''
+ cd /run/build/checkout
+ sudo -E -H -u ubuntu {{ test_command }}
+ '''
+ }
+ post {
+ always {
+ sh '''
+ [ -f /run/build/checkout/junit.xml ] || echo '<testsuite tests="0" />' > /run/build/checkout/junit.xml
+ cp /run/build/checkout/junit*.xml ${WORKSPACE}
+ '''
+ archiveArtifacts artifacts: 'junit*.xml'
+ junit allowEmptyResults: true, testResults: 'junit*.xml'
+ {{ post_test_actions }}
+ }
+ }
+ }
+ }
+}
diff --git a/jobs/launchpad-ci/launchpad b/jobs/launchpad-ci/launchpad
new file mode 100755
index 0000000..2e01883
--- /dev/null
+++ b/jobs/launchpad-ci/launchpad
@@ -0,0 +1,644 @@
+#!/usr/bin/env python3
+
+import argparse
+from concurrent import futures
+from dataclasses import dataclass
+from itertools import chain
+import json
+import logging
+import os
+from pathlib import Path
+from random import shuffle
+import re
+import subprocess
+import sys
+import tempfile
+from datetime import datetime
+
+import yaml
+from launchpadlib.launchpad import Launchpad
+
+logging.basicConfig(level=logging.DEBUG)
+
+logger = logging.getLogger("jenkins-launchpad")
+
+
+def git(*args, return_output=False):
+ """Call a git command."""
+ command = ["git", *args]
+ if return_output:
+ return subprocess.check_output(command)
+ subprocess.check_call(command)
+
+
+def login_to_lp(credentials):
+ if credentials:
+ lp = Launchpad.login_with(
+ "launchpad-ci", "production", credentials_file=credentials, version="devel"
+ )
+ else:
+ lp = Launchpad.login_anonymously("launchpad-ci", "production", version="devel")
+ return lp
+
+
+def get_repository(lp, repo_path):
+ return lp.git_repositories.getByPath(path=repo_path)
+
+
+def get_prerequisite_proposals(proposal):
+ """
+ Given a proposal, return all prerequisite proposals that are not in
+ the superseded state. There ideally should be one and only one here
+ (or zero), but sometimes there are not, depending on developer habits.
+ """
+ prerequisite_repo = proposal.prerequisite_git_repository
+ prerequisite_ref = proposal.prerequisite_git_path
+ # We only consider MPs which are against the same target repo
+ # and reference (eg. "main").
+ target_repo = proposal.target_git_repository
+ target_ref = proposal.target_git_path
+ if (
+ not prerequisite_repo
+ or not prerequisite_ref
+ or not target_repo.landing_candidates
+ ):
+ return []
+
+ prerequisite_repo_name = prerequisite_repo.unique_name
+ # Go through the list of MPs against the target repository and
+ # ensure they are against the same ref and that they match the
+ # prerequisite repo/ref.
+ return [
+ mp
+ for mp in target_repo.landing_candidates
+ if (
+ mp.target_git_path == target_ref
+ and mp.source_git_repository.unique_name == prerequisite_repo_name
+ and mp.source_git_path == prerequisite_ref
+ and mp.queue_status != "Superseded"
+ )
+ ]
+
+
+def is_approved(proposal):
+ target = proposal.target_git_repository
+ votes = proposal.votes
+ for vote in votes:
+ if not target.isPersonTrustedReviewer(reviewer=vote.reviewer):
+ continue
+ if vote.is_pending:
+ continue
+ if vote.comment.vote == "Approve":
+ return True
+ return False
+
+
+def get_latest_commit_sha1(proposal):
+ branch = proposal.source_git_repository.getRefByPath(path=proposal.source_git_path)
+ return branch.commit_sha1
+
+
+def extract_commit_from_comment(comment):
+ regex = r"COMMIT: \b(?P<sha1>[0-9a-f]{5,40})\b"
+ matches = re.search(regex, comment.message_body)
+ if matches is None:
+ return None
+ return matches.group("sha1")
+
+
+def has_test_marker(comment):
+ for line in comment.message_body.splitlines():
+ if line.startswith("jenkins: !test"):
+ return True
+ return False
+
+
+def generate_mergable_proposals(lp, git_repo):
+ approved_proposals = list(git_repo.getMergeProposals(status="Approved"))
+ shuffle(approved_proposals)
+ for proposal in approved_proposals:
+ if not is_approved(proposal):
+ continue
+ if not proposal.commit_message:
+ job_info = get_job_info(proposal)
+ branch_info = get_branch_info(job_info)
+ subject = "Re: [Merge] %s - MISSING COMMIT MESSAGE" % branch_info
+ comment = "UNABLE TO START LANDING\n\nSTATUS: MISSING COMMIT MESSAGE"
+ proposal.createComment(subject=subject, content=comment)
+ proposal.setStatus(status="Needs review")
+ proposal.lp_save()
+ continue
+
+ prereqs = get_prerequisite_proposals(proposal)
+ if len(prereqs) == 1 and prereqs[0].queue_status != "Merged":
+ continue
+
+ yield proposal
+
+
+def _get_mp(lp, git_repo_path, proposal_address, repo_logger):
+ git_repo = lp.git_repositories.getByPath(path=git_repo_path)
+ proposal_idx = {
+ mp["address"]: i for i, mp in enumerate(git_repo.landing_candidates.entries)
+ }
+ try:
+ return git_repo.landing_candidates[proposal_idx[proposal_address]]
+ except IndexError:
+ repo_logger.warning(f"Having to hydrate proposals for {git_repo.display_name}")
+ for proposal in git_repo.landing_candidates:
+ if proposal.address == proposal_address:
+ return proposal
+
+
+def should_review(args, git_repo_path, proposal_address, repo_logger):
+ lp = login_to_lp(args.credentials)
+ proposal = _get_mp(lp, git_repo_path, proposal_address, repo_logger)
+ repo_logger.debug(f"Considering {proposal.web_link}")
+ hit_test_marker = False
+ reviewed_commits = []
+ for comment in proposal.all_comments:
+ if has_test_marker(comment):
+ hit_test_marker = True
+ if lp.me == comment.author:
+ commit = extract_commit_from_comment(comment)
+ if commit:
+ reviewed_commits.append(commit)
+ if hit_test_marker:
+ # Test marker was set but a following comment holds a test, so
+ # the test has already been ran for this proposal.
+ hit_test_marker = False
+ if hit_test_marker:
+ repo_logger.debug(f"Found marker on {proposal.web_link}; Testing!")
+ return True
+ # If WIP, see if this has been marked to be tested.
+ if proposal.queue_status == "Work in progress":
+ repo_logger.debug(f"Skipping {proposal.web_link}: WiP but no marker found")
+ return False
+ # See if the latest commit has already been reviewed.
+ latest_commit = get_latest_commit_sha1(proposal)
+ if latest_commit in reviewed_commits:
+ repo_logger.debug(
+ f"Skipping {proposal.web_link}: {latest_commit} has already been reviewed"
+ )
+ return False
+ else:
+ return True
+
+
+def generate_reviewable_proposals(args, git_repo, repo_logger):
+ needs_review_proposals = list(git_repo.getMergeProposals(status="Needs review"))
+ wip_proposals = list(git_repo.getMergeProposals(status="Work in progress"))
+ shuffle(needs_review_proposals)
+ shuffle(wip_proposals)
+ ex = futures.ThreadPoolExecutor()
+ results = ex.map(
+ lambda mp: mp
+ if should_review(args, git_repo.unique_name, mp.address, repo_logger)
+ else None,
+ chain(needs_review_proposals, wip_proposals),
+ )
+
+ for proposal in results:
+ if proposal:
+ repo_logger.debug(f"Testing {proposal.web_link} !")
+ yield proposal
+
+
+def get_nice_repo_name(repo):
+ if repo.target_default:
+ return repo.unique_name.split("+")[0][:-1]
+ else:
+ return repo.unique_name
+
+
+def get_job_info(proposal):
+ branch_dest = proposal.target_git_path.split("refs/heads/")[1]
+ return {
+ "LP_REPO_SRC": get_nice_repo_name(proposal.source_git_repository),
+ "LP_BRANCH_SRC": proposal.source_git_path.split("refs/heads/")[1],
+ "LP_REPO_DEST": get_nice_repo_name(proposal.target_git_repository),
+ "LP_BRANCH_DEST": branch_dest,
+ "LP_COMMIT_MSG": proposal.commit_message,
+ "LP_COMMIT_SHA1": get_latest_commit_sha1(proposal),
+ "LP_MP_LINK": proposal.web_link,
+ "MP": proposal,
+ }
+
+
+def generate_jobs(proposals):
+ for proposal in proposals:
+ try:
+ yield get_job_info(proposal)
+ except Exception:
+ logger.debug(f"Skipping unknown job info for {proposal.web_link}")
+
+
+def is_job(job, job_info):
+ for key in ["LP_REPO_SRC", "LP_REPO_DEST", "LP_BRANCH_SRC", "LP_BRANCH_DEST"]:
+ if job[key] != job_info[key]:
+ return False
+ return True
+
+
+def find_proposal(lp, job_info):
+ source_repo = get_repository(lp, job_info["LP_REPO_SRC"])
+ source_ref = source_repo.getRefByPath(path=job_info["LP_BRANCH_SRC"])
+ for job in generate_jobs(source_ref.landing_targets):
+ if is_job(job, job_info):
+ return job["MP"]
+
+
+def get_branch_info(job_info):
+ return "-b %s lp:%s into -b %s lp:%s" % (
+ job_info["LP_BRANCH_SRC"],
+ job_info["LP_REPO_SRC"],
+ job_info["LP_BRANCH_DEST"],
+ job_info["LP_REPO_DEST"],
+ )
+
+
+def mark_bugs_fix_committed(proposal):
+ project = proposal.target_git_repository.target
+ if project.resource_type_link != "https://api.launchpad.net/devel/#project":
+ return
+ series_name = proposal.target_git_path.split("refs/heads/")[1]
+ series = project.getSeries(name=series_name)
+ if not series:
+ series = project.development_focus
+ if not series:
+ return
+
+ def find_task_for_target(bug, target):
+ for task in bug.bug_tasks:
+ if task.target == target:
+ return task
+ return None
+
+ for bug in proposal.bugs:
+ task = find_task_for_target(bug, series)
+ if not task and series == project.development_focus:
+ task = find_task_for_target(bug, project)
+ if task:
+ task.status = "Fix Committed"
+ set_milestone_on_task(project, task)
+ task.lp_save()
+
+
+def set_milestone_on_task(project, task):
+ """
+ Attempt to auto-determine the milestone to set, and set the milestone
+ of the given task. If the task already has a milestone set => noop.
+ Only processed if config setting `set_milestone` == True.
+ """
+ task_milestone = task.milestone
+ if task_milestone is not None:
+ return
+ now = datetime.utcnow()
+ target_milestone = find_target_milestone(project, now)
+ task.milestone = target_milestone
+
+
+def find_target_milestone(project, now):
+ """
+ Find a target milestone when resolving a bug task.
+
+ Compare the selected datetime `now` to the list of milestones.
+ Return the milestone where `targeted_date` is newer than the given
+ datetime. If the given time is greater than all open milestones:
+ target to the newest milestone in the list.
+
+ In this algorithm, milestones without targeted dates appear lexically
+ sorted at the end of the list. So the lowest sorting one will get
+ chosen if all milestones with dates attached are exhausted.
+
+ In other words, pick one of the milestones for the target. Preference:
+ 1) closest milestone (by date) in the future
+ 2) least lexically sorting milestone (by name)
+ 3) the last milestone in the list (covers len()==1 case).
+ """
+ earliest_after = latest_before = untargeted = None
+ for milestone in project.active_milestones:
+ if milestone.date_targeted is None:
+ if untargeted is not None:
+ if milestone.name < untargeted.name:
+ untargeted = milestone
+ else:
+ untargeted = milestone
+ elif milestone.date_targeted > now:
+ if earliest_after is not None:
+ if earliest_after.date_targeted > milestone.date_targeted:
+ earliest_after = milestone
+ else:
+ earliest_after = milestone
+ elif milestone.date_targeted < now:
+ if latest_before is not None:
+ if latest_before.date_targeted < milestone.date_targeted:
+ latest_before = milestone
+ else:
+ latest_before = milestone
+ if earliest_after is not None:
+ return earliest_after
+ elif untargeted is not None:
+ return untargeted
+ else:
+ return latest_before
+
+
+def _get_launchpad_ci_files(jobs_cfg_dir: Path):
+ launchpad_ci_files = []
+ # First pass of the yaml files to look for projects that are
+ # `{name}-launchpad-ci`, meaning they use launchpad-ci.yaml's
+ # job-group
+ grep = subprocess.run(
+ ["grep", "-l", "--", "-launchpad-ci"] + list(jobs_cfg_dir.glob("*.yaml")),
+ text=True,
+ check=True,
+ capture_output=True,
+ )
+ for line in grep.stdout.splitlines():
+ launchpad_ci_files.append(Path(line))
+ return launchpad_ci_files
+
+
+@dataclass
+class Repo:
+ name: str
+ series: str
+ lp_path: str
+
+
+def generate_repos(jobs_cfg_dir):
+ yaml_files = _get_launchpad_ci_files(jobs_cfg_dir)
+ shuffle(yaml_files)
+ for yaml_file in yaml_files:
+
+ data_logger = logging.getLogger(yaml_file.name)
+ try:
+ with yaml_file.open() as fh:
+ data = yaml.safe_load(fh)[0]
+ except (yaml.YAMLError, IndexError):
+ data_logger.error("Unable to load job config")
+ continue
+ else:
+ try:
+ project = data["project"]
+ name = project["name"]
+ lp_path = project["repo_lp_path"]
+ series = project.get("ubuntu_series")
+ except KeyError:
+ data_logger.error(
+ "...doesn't look like a launchpad-ci config, skipping"
+ )
+ continue
+ else:
+ repo = Repo(name, lp_path=lp_path, series=series)
+ data_logger.debug(f"Found {repo}")
+ yield repo, data_logger
+
+
+def handle_reviewable_jobs(args, lp):
+ """Check for MPs that can be reviewed."""
+ job_list = []
+ for repo, repo_logger in generate_repos(args.jobs_cfg_dir):
+ git_repo = get_repository(lp, repo.lp_path)
+ if git_repo is None:
+ repo_logger.error(f"Unable to load git repo at {repo.lp_path}")
+ continue
+ for proposal in generate_reviewable_proposals(args, git_repo, repo_logger):
+ job = get_job_info(proposal)
+ del job["MP"]
+ job["NAME"] = repo.name
+ job["SERIES"] = repo.series
+ job_list.append(job)
+ print(json.dumps(job_list))
+ return 0
+
+
+def handle_mergable_jobs(args, lp):
+ """Check for MPs that can be merged."""
+ job_list = []
+ for repo, repo_logger in generate_repos(args.jobs_cfg_dir):
+ git_repo = get_repository(lp, repo.lp_path)
+ if git_repo is None:
+ repo_logger.error(f"Unable to load git repo at {repo.lp_path}")
+ continue
+ for proposal in generate_mergable_proposals(lp, git_repo):
+ job = get_job_info(proposal)
+ del job["MP"]
+ job["NAME"] = repo.name
+ job["SERIES"] = repo.series
+ job_list.append(job)
+ print(json.dumps(job_list))
+ return 0
+
+
+def handle_mark_mp(args, lp):
+ job_info = {
+ "LP_REPO_SRC": args.repo_src,
+ "LP_BRANCH_SRC": args.branch_src,
+ "LP_REPO_DEST": args.repo_dest,
+ "LP_BRANCH_DEST": args.branch_dest,
+ "LP_COMMIT_SHA1": args.commit,
+ }
+ branch_info = get_branch_info(job_info)
+ proposal = find_proposal(lp, job_info)
+ if not proposal:
+ print("Unable to find merge proposal.")
+ return 1
+
+ if args.start_review:
+ lp.me = lp.me
+ for vote in proposal.votes:
+ if lp.me == vote.reviewer:
+ # Already a reviewer.
+ return 0
+
+ # New review set as the person running unit tests.
+ proposal.nominateReviewer(review_type="unittests", reviewer=lp.me)
+ proposal.lp_save()
+ return 0
+
+ if args.fail_review:
+ subject = "Re: [UNITTESTS] %s - TESTS FAILED" % branch_info
+ comment = "UNIT TESTS\n%s\n\nSTATUS: FAILED\nLOG: %s\nCOMMIT: %s" % (
+ branch_info,
+ args.fail_review,
+ args.commit,
+ )
+ proposal.createComment(subject=subject, content=comment, vote="Needs Fixing")
+ proposal.lp_save()
+ return 0
+
+ if args.succeed_review:
+ subject = "Re: [UNITTESTS] %s - TESTS PASS" % branch_info
+ comment = "UNIT TESTS\n%s\n\nSTATUS: SUCCESS\nCOMMIT: %s" % (
+ branch_info,
+ args.commit,
+ )
+ proposal.createComment(subject=subject, content=comment, vote="Approve")
+ proposal.lp_save()
+ return 0
+
+ if args.start_merge:
+ subject = "Re: [Merge] %s - LANDING STARTED" % branch_info
+ comment = "LANDING\n%s\n\nSTATUS: QUEUED\nLOG: %s" % (
+ branch_info,
+ args.start_merge,
+ )
+ proposal.createComment(subject=subject, content=comment)
+ return 0
+
+ if args.fail_merge:
+ subject = "Re: [Merge] %s - LANDING FAILED" % branch_info
+ comment = "LANDING\n%s\n\nSTATUS: FAILED BUILD\nLOG: %s" % (
+ branch_info,
+ args.fail_merge,
+ )
+ proposal.createComment(subject=subject, content=comment)
+ proposal.setStatus(status="Needs review")
+ proposal.lp_save()
+ return 0
+
+ if args.succeed_merge:
+ proposal.setStatus(status="Merged")
+ proposal.lp_save()
+ mark_bugs_fix_committed(proposal)
+ return 0
+
+ return 1
+
+
+def handle_merge(args, lp):
+ work_dir = args.work_dir if args.work_dir else tempfile.mkdtemp()
+ # allow specifying the same target to just run tests on a branch
+ same_target = (
+ (args.repo_dest, args.branch_dest) == (args.repo_src, args.branch_src)
+ ) or (not args.repo_src and not args.branch_src)
+
+ os.chdir(work_dir)
+ git("clone", args.repo_dest, ".", "--branch", args.branch_dest)
+ if same_target:
+ return
+ git("remote", "add", "source", args.repo_src)
+ git("fetch", "source")
+ name = git(
+ "show", '--format="%aN"', f"source/{args.branch_src}", "-q", return_output=True
+ )
+ email = git(
+ "show", '--format="%aE"', f"source/{args.branch_src}", "-q", return_output=True
+ )
+ git("config", "user.name", name)
+ git("config", "user.email", email)
+ git("merge", "--squash", f"source/{args.branch_src}")
+ if args.commit_msg_file:
+ commit_msg = args.commit_msg_file.read()
+ else:
+ commit_msg = "Merge into destination for testing the build."
+ git("commit", "-a", "-m", commit_msg)
+ if args.push:
+ git("push", "origin", f"HEAD:{args.branch_dest}")
+
+
+def main():
+ parser = argparse.ArgumentParser(description="Communicate with Launchpad.")
+ parser.add_argument(
+ "--credentials", help="Credentials file to login to login to launchpad."
+ )
+
+ subcommands = parser.add_subparsers(help="sub-command help")
+ reviewable_jobs_parser = subcommands.add_parser("reviewable-jobs")
+ reviewable_jobs_parser.add_argument(
+ "jobs_cfg_dir",
+ type=Path,
+ help="Path to directory containing yaml launchpad-ci configs listing the repos to look for open MPs in.",
+ )
+ reviewable_jobs_parser.set_defaults(func=handle_reviewable_jobs)
+
+ mergeable_jobs_parser = subcommands.add_parser("mergeable-jobs")
+ mergeable_jobs_parser.add_argument(
+ "jobs_cfg_dir",
+ type=Path,
+ help="Path to directory containing yaml launchpad-ci configs listing the repos to look for open MPs in.",
+ )
+ mergeable_jobs_parser.set_defaults(func=handle_mergable_jobs)
+
+ mark_mp_parser = subcommands.add_parser("mark-mp")
+ mark_mp_parser.add_argument(
+ "--start-review",
+ action="store_true",
+ default=False,
+ help="Mark the merge proposal that unit testing has started.",
+ )
+ mark_mp_parser.add_argument(
+ "--fail-review", help="Mark the merge proposal as failed unit testing."
+ )
+ mark_mp_parser.add_argument(
+ "--succeed-review",
+ action="store_true",
+ default=False,
+ help="Mark the merge proposal as passed testing.",
+ )
+ mark_mp_parser.add_argument(
+ "--start-merge",
+ action="store_true",
+ default=False,
+ help="Mark the merge proposal that unit testing has started.",
+ )
+ mark_mp_parser.add_argument(
+ "--fail-merge", help="Mark the merge proposal as failed unit testing."
+ )
+ mark_mp_parser.add_argument(
+ "--succeed-merge",
+ action="store_true",
+ default=False,
+ help="Mark the merge proposal as passed testing.",
+ )
+ mark_mp_parser.add_argument(
+ "--repo-src", help="Source repository of the merge proposal."
+ )
+ mark_mp_parser.add_argument(
+ "--branch-src", help="Source branch of the merge proposal."
+ )
+ mark_mp_parser.add_argument(
+ "--repo-dest", help="Destination repository of the merge proposal."
+ )
+ mark_mp_parser.add_argument(
+ "--branch-dest", help="Destination branch of the merge proposal."
+ )
+ mark_mp_parser.add_argument("--commit", help="SHA1 commit hash that was tested.")
+ mark_mp_parser.set_defaults(func=handle_mark_mp)
+
+ merge_parser = subcommands.add_parser("merge")
+ merge_parser.add_argument(
+ "--work-dir", help="Directory where the merge will take place"
+ )
+ merge_parser.add_argument(
+ "--repo-src", help="Source repository of the merge proposal."
+ )
+ merge_parser.add_argument(
+ "--branch-src", help="Source branch of the merge proposal."
+ )
+ merge_parser.add_argument(
+ "--repo-dest", help="Destination repository of the merge proposal."
+ )
+ merge_parser.add_argument(
+ "--branch-dest", help="Destination branch of the merge proposal."
+ )
+ merge_parser.add_argument(
+ "--commit-msg-file", help="Commit message file.", type=argparse.FileType("r")
+ )
+ merge_parser.add_argument(
+ "--push",
+ help="Whether to push the commits.",
+ action="store_true",
+ default=False,
+ )
+ merge_parser.set_defaults(func=handle_merge)
+
+ args = parser.parse_args()
+ lp = login_to_lp(args.credentials)
+ return args.func(args, lp)
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/jobs/launchpad-ci/launchpad-ci-utils.groovy b/jobs/launchpad-ci/launchpad-ci-utils.groovy
new file mode 100644
index 0000000..8808e3f
--- /dev/null
+++ b/jobs/launchpad-ci/launchpad-ci-utils.groovy
@@ -0,0 +1,58 @@
+import groovy.json.JsonSlurper
+
+@NonCPS
+def parseJobs(jsonString) {
+ def parsed_jobs = new JsonSlurper().parseText(jsonString)
+
+ // JsonSlurper returns a non-serializable LazyMap,
+ // so copy it into a regular map before returning
+ if(parsed_jobs.size() > 0) {
+ def jobs = [parsed_jobs.size()]
+ for(i = 0; i < parsed_jobs.size(); i += 1) {
+ def job = [:]
+ job['LP_REPO_SRC'] = parsed_jobs[i]['LP_REPO_SRC']
+ job['LP_BRANCH_SRC'] = parsed_jobs[i]['LP_BRANCH_SRC']
+ job['LP_REPO_DEST'] = parsed_jobs[i]['LP_REPO_DEST']
+ job['LP_BRANCH_DEST'] = parsed_jobs[i]['LP_BRANCH_DEST']
+ job['LP_COMMIT_MSG'] = parsed_jobs[i]['LP_COMMIT_MSG']
+ // This is only set in the reviewer job
+ job['LP_COMMIT_SHA1'] = parsed_jobs[i]['LP_COMMIT_SHA1']
+ job['LP_MP_LINK'] = parsed_jobs[i]['LP_MP_LINK']
+ job['SERIES'] = parsed_jobs[i]['SERIES']
+ job['NAME'] = parsed_jobs[i]['NAME']
+ jobs[i] = job
+ }
+ return jobs
+ } else {
+ return []
+ }
+}
+
+// create a tester job
+def makeTestBuild(name, series, job) {
+ if (! series) {
+ series = repoUbuntuRelease(job.LP_REPO_SRC, job.LP_BRANCH_SRC)
+ }
+ println("Running job for $job.LP_BRANCH_SRC into $job.LP_BRANCH_DEST on $series")
+ return build(
+ job: "$job.NAME-tester",
+ propagate: false,
+ parameters: [
+ [$class: 'StringParameterValue', name: 'LP_REPO_SRC', value: job.LP_REPO_SRC],
+ [$class: 'StringParameterValue', name: 'LP_REPO_DEST', value: job.LP_REPO_DEST],
+ [$class: 'StringParameterValue', name: 'LP_BRANCH_SRC', value: job.LP_BRANCH_SRC],
+ [$class: 'StringParameterValue', name: 'LP_BRANCH_DEST', value: job.LP_BRANCH_DEST],
+ [$class: 'StringParameterValue', name: 'SERIES', value: series]
+ ]
+ )
+}
+
+// run a command and return the output
+def run(command) {
+ return sh(script:command, returnStdout: true).trim()
+}
+
+// return the target ubuntu release for the repository/branch
+def repoUbuntuRelease(repository, branch) {
+ return run("ci/repo-ubuntu-release $repository $branch")
+}
diff --git a/jobs/launchpad-ci/launchpad-ci.groovy b/jobs/launchpad-ci/launchpad-ci.groovy
new file mode 100644
index 0000000..9258b92
--- /dev/null
+++ b/jobs/launchpad-ci/launchpad-ci.groovy
@@ -0,0 +1,13 @@
+{% macro clone_ci_repo_step() %}
+withCredentials([
+ file(credentialsId: 'lp-lander-credentials', variable: 'CREDS'),
+ sshUserPrivateKey(credentialsId: 'launchpad-ci-ssh-key', keyFileVariable: 'SSHKEY', usernameVariable: 'SSHUSER')
+]) {
+ sh '''
+ rm -rf ci maas-ci-internal
+ export GIT_SSH_COMMAND="ssh -i $SSHKEY -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null"
+ git clone git+ssh://${SSHUSER}@git.launchpad.net/~maas-committers/maas-ci/+git/maas-ci-internal --branch main --depth 1
+ mv maas-ci-internal/jobs/launchpad-ci ci
+ '''
+}
+{% endmacro %}
diff --git a/jobs/launchpad-ci/launchpad-ci.yaml b/jobs/launchpad-ci/launchpad-ci.yaml
new file mode 100644
index 0000000..e058165
--- /dev/null
+++ b/jobs/launchpad-ci/launchpad-ci.yaml
@@ -0,0 +1,91 @@
+- job-group:
+ name: '{name}-launchpad-ci'
+ jobs:
+ - '{name}-tester'
+ - 'reviewer'
+ - 'lander'
+
+- job:
+ name: 'reviewer'
+ description: |
+ Launchpad CI reviewer.
+
+ Maintained in lp:~maas-committers/maas-ci/+git/maas-ci-internal.
+ project-type: pipeline
+ # Can't use #!include-jinja2 on a job, just a job-template since
+ # a job doesn't have context variables to render the template
+ dsl: !include-raw:
+ - launchpad-ci-utils.groovy
+ - job-reviewer.groovy
+ triggers:
+ - timed: 'H/2 * * * *'
+
+
+- job:
+ name: 'lander'
+ description: |
+ Launchpad CI lander.
+
+ Maintained in lp:~maas-committers/maas-ci/+git/maas-ci-internal.
+ project-type: pipeline
+ # Can't use #!include-jinja2 on a job, just a job-template since
+ # a job doesn't have context variables to render the template
+ dsl: !include-raw:
+ - launchpad-ci-utils.groovy
+ - job-lander.groovy
+ triggers:
+ - timed: 'H/2 * * * *'
+
+ # overridable parameters
+ lander_agent_label: ''
+
+
+- job-template:
+ name: '{name}-tester'
+ description: |
+ Launchpad CI tester for {name}.
+
+ Maintained in lp:~maas-committers/maas-ci/+git/maas-ci-internal.
+ project-type: pipeline
+ dsl: !include-jinja2: job-tester.groovy
+ parameters:
+ - string:
+ name: LP_REPO_SRC
+ description: 'Git repository to merge from.'
+ - string:
+ name: LP_BRANCH_SRC
+ description: 'Git branch to merge from.'
+ - string:
+ name: LP_REPO_DEST
+ description: 'Git repository to merge into.'
+ default: '{repo_lp_path}'
+ - string:
+ name: LP_BRANCH_DEST
+ description: 'Git branch to merge into.'
+ default: '{repo_lp_branch}'
+ - string:
+ description: 'Ubuntu series to run tests on.'
+ name: SERIES
+
+ # overridable parameters
+ repo_lp_branch: master
+ tester_agent_label: ''
+ # if set to true, an image named 'build-env-{name}-$LP_BRANCH_DEST:$SERIES'
+ # is used
+ use_custom_build_image: false
+ deps_command: '/bin/true'
+ build_command: '/bin/true'
+ lint_command: '/bin/true'
+ test_command: '/bin/true'
+ post_test_actions: ''
+
+
+- view:
+ name: 'lp-ci'
+ description: |
+ All the launchpad-ci jobs.
+ view-type: list
+ regex: '.*-tester'
+ job-name:
+ - 'reviewer'
+ - 'lander'
diff --git a/jobs/launchpad-ci/repo-ubuntu-release b/jobs/launchpad-ci/repo-ubuntu-release
new file mode 100755
index 0000000..1a34bee
--- /dev/null
+++ b/jobs/launchpad-ci/repo-ubuntu-release
@@ -0,0 +1,67 @@
+#!/bin/bash -e
+#
+# Return the target Ubuntu release number for the specified Launchpad repository/branch.
+#
+# The SSHUSER variable is used as the username for the repository
+#
+
+clone_repo() {
+ local repository="$1"
+ local branch="$2"
+ local repo_path="$3"
+
+ local user_part
+ [ "$SSHUSER" ] && user_part="${SSHUSER}@" || user_part=""
+
+ trap 'rm -rf $repo_path' EXIT
+ git clone -q "git+ssh://${user_part}${repository}" --single-branch --branch "$branch" --depth 1 "$repo_path"
+}
+
+series_from_deb() {
+ local repo="$1"
+
+ local changelog="${repo}/debian/changelog"
+ [ -f "$changelog" ] || return 0
+ sed -n '1 s/.* \([a-z]\+\);.*/\1/p' "$changelog"
+}
+
+release_from_snap() {
+ local repo="$1"
+
+ local snapcraft_yaml="${repo}/snap/snapcraft.yaml"
+ [ -f "$snapcraft_yaml" ] || return 0
+
+ local series_year
+ series_year="$(sed -n 's/^base: core\([0-9]\+\)/\1/p' "$snapcraft_yaml")"
+ [ "$series_year" ] || return 0
+ echo "${series_year}.04"
+}
+
+ubuntu_release_from_series() {
+ local series="$1"
+ ubuntu-distro-info --series="$series" -r | cut -d' ' -f1
+}
+
+
+repository="$1"
+branch="$2"
+
+if [ -z "$branch" ]; then
+ echo "Usage: $(basename "$0") <repository> <branch>" >&2
+ exit 1
+fi
+
+# accpept both full and repo-only urls
+if [[ ! "$repository" =~ ^git\.launchpad\.net ]]; then
+ repository="git.launchpad.net/$repository"
+fi
+
+repo_path="$(mktemp -d)"
+clone_repo "$repository" "$branch" "$repo_path"
+
+series="$(series_from_deb "$repo_path")"
+if [ "$series" ]; then
+ ubuntu_release_from_series "$series"
+else
+ release_from_snap "$repo_path"
+fi
diff --git a/jobs/maas-ci-internal.yaml b/jobs/maas-ci-internal.yaml
new file mode 100644
index 0000000..c0bb693
--- /dev/null
+++ b/jobs/maas-ci-internal.yaml
@@ -0,0 +1,9 @@
+- project:
+ name: maas-ci-internal
+ repo_lp_path: ~maas-committers/maas-ci/+git/maas-ci-internal
+ ubuntu_series: '22.04'
+ deps_command: 'make install-dependencies'
+ build_command: 'make setup'
+ test_command: 'make check'
+ jobs:
+ - '{name}-launchpad-ci'
diff --git a/jobs/maas-ci.yaml b/jobs/maas-ci.yaml
new file mode 100644
index 0000000..678ec32
--- /dev/null
+++ b/jobs/maas-ci.yaml
@@ -0,0 +1,13 @@
+- project:
+ name: maas
+ repo_lp_path: ~maas-committers/maas
+ use_custom_build_image: true
+ deps_command: 'make install-dependencies'
+ build_command: 'make build'
+ lint_command: 'make lint'
+ test_command: 'make test'
+ post_test_actions: |
+ sh 'gzip </run/build/checkout/db/backend.log >$WORKSPACE/postgres.log.gz || echo "unable to collect Postgres logs"'
+ archiveArtifacts artifacts: 'postgres.log.gz', allowEmptyArchive: true
+ jobs:
+ - '{name}-launchpad-ci'
diff --git a/jobs/maas-kpi.yaml b/jobs/maas-kpi.yaml
new file mode 100644
index 0000000..3487d79
--- /dev/null
+++ b/jobs/maas-kpi.yaml
@@ -0,0 +1,8 @@
+- project:
+ name: maas-kpi
+ repo_lp_path: maas-kpi
+ ubuntu_series: '22.04'
+ deps_command: 'make deps'
+ lint_command: 'make lint'
+ jobs:
+ - '{name}-launchpad-ci'
diff --git a/jobs/maas-openapi-duplicator.groovy b/jobs/maas-openapi-duplicator.groovy
new file mode 100644
index 0000000..a3914ce
--- /dev/null
+++ b/jobs/maas-openapi-duplicator.groovy
@@ -0,0 +1,90 @@
+pipeline {
+ agent {
+ docker {
+ image 'ubuntu:{{ series }}'
+ registryUrl '{{ docker_registry }}'
+ registryCredentialsId '{{ docker_registry_cred }}'
+ label 'maas-tester'
+ args '-u 0:0'
+ reuseNode true
+ }
+ }
+
+ stages {
+ stage('Prepare') {
+ steps {
+ cleanWs()
+ sh '''
+ if [ ! -z \$http_proxy ]; then
+ echo "Acquire::http::proxy \\"\$http_proxy\\"\\;" > /etc/apt/apt.conf.d/launchpad-ci-proxy
+ echo "Acquire::https::proxy \\"\$http_proxy\\"\\;" >> /etc/apt/apt.conf.d/launchpad-ci-proxy
+ fi
+ export DEBIAN_FRONTEND=noninteractive
+ apt update
+ apt install -y devscripts tox python3-pip
+ '''
+ }
+ }
+ stage('Checkout') {
+ steps {
+ withCredentials([file(credentialsId: 'lp-lander-sshkey', variable: 'SSHKEY')]) {
+ withEnv(["GIT_SSH_COMMAND=ssh -i $SSHKEY -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null"]) {
+ checkout(
+ [
+ $class: "GitSCM",
+ extensions: [[$class: "RelativeTargetDirectory", relativeTargetDir: "maas"]],
+ branches: [[name: LP_BRANCH]],
+ userRemoteConfigs: [
+ [
+ url: "git+ssh://maas-lander@xxxxxxxxxxxxxxxxx/${LP_REPO}",
+ credentialsId: "lp-lander-sshkey",
+ name: "maas"
+ ]
+ ]
+ ]
+ )
+ }
+ }
+ }
+ }
+ stage('Lint Schema') {
+ steps {
+ dir("maas") {
+ sh """
+ make lint-oapi
+ """
+ def schema = readFile(file: "openapi.yaml")
+ }
+ }
+ }
+ stage('Push Schema') {
+ steps {
+ script {
+ sh "git clone ${GH_REPO} oapi_folder"
+ dir("oapi_folder") {
+ def schema_updated = !fileExists("${GH_FILE_LOC}") || schema != readFile(file: "${GH_FILE_LOC}")
+ if (schema_updated) {
+ writeFile(file: "${GH_FILE_LOC}", text: schema)
+ sh """
+ git add ${GH_FILE_LOC}
+ git commit -m "updating openapi schema"
+ git push
+ """
+ }
+ }
+ sh "rm -rf oapi_folder"
+ }
+ }
+ }
+ }
+ post {
+ success {
+ if (schema_updated) {
+ mattermostSend (color: 'green', message: "[${env.JOB_NAME} #${env.BUILD_NUMBER}](${env.BUILD_URL}) :success: updated OpenAPI Schema")
+ }
+ }
+ failure {
+ mattermostSend (color: 'red', message: "[${env.JOB_NAME} #${env.BUILD_NUMBER}](${env.BUILD_URL}) :fire: failed to update OpenAPI Schema")
+ }
+ }
+}
diff --git a/jobs/maas-openapi-duplicator.yaml b/jobs/maas-openapi-duplicator.yaml
new file mode 100644
index 0000000..3fd24e0
--- /dev/null
+++ b/jobs/maas-openapi-duplicator.yaml
@@ -0,0 +1,30 @@
+- project:
+ name: update-openapi-schema
+ jobs:
+ - update-openapi-schema
+
+- job-template:
+ name: update-openapi-schema
+ triggers:
+ - timed: '@daily'
+ parameters:
+ - string:
+ name: LP_REPO
+ description: 'Target git repository'
+ default: '~maas-committers/maas'
+ - string:
+ name: LP_BRANCH
+ description: 'Target git branch'
+ default: 'master'
+ - string:
+ name: GH_REPO
+ description: 'GitHub repository to push schema to'
+ default: 'https://github.com/maas/maas-openapi-yaml'
+ - string:
+ name: GH_FILE_LOC
+ description: 'Location of file in GitHub repo to push schema to'
+ default: 'openapi.yaml'
+ properties:
+ - build-discarder:
+ artifact-num-to-keep: 10
+ dsl: !include-jinja2: maas-openapi-duplicator.groovy
diff --git a/jobs/maas-release-tools.yaml b/jobs/maas-release-tools.yaml
new file mode 100644
index 0000000..b64fd92
--- /dev/null
+++ b/jobs/maas-release-tools.yaml
@@ -0,0 +1,11 @@
+---
+- project:
+ name: 'maas-release-tools'
+ repo_lp_path: '~maas-committers/maas/+git/maas-release-tools'
+ repo_lp_branch: 'main'
+ deps_command: 'make deps'
+ build_command: 'make bins'
+ lint_command: 'make lint'
+ ubuntu_series: '22.04'
+ jobs:
+ - '{name}-launchpad-ci'
diff --git a/jobs/maas-sampledata-dump.groovy b/jobs/maas-sampledata-dump.groovy
new file mode 100644
index 0000000..a331171
--- /dev/null
+++ b/jobs/maas-sampledata-dump.groovy
@@ -0,0 +1,87 @@
+pipeline {
+ agent {
+ docker {
+ image 'build-env-maas-{{ series }}:{{ os }}'
+ registryUrl '{{ docker_registry }}'
+ registryCredentialsId '{{ docker_registry_cred }}'
+ label 'maas-tester'
+ args '-u 0:0'
+ reuseNode true
+ }
+ }
+ stages {
+ stage('Prepare') {
+ steps {
+ sh '''
+ if [ ! -z \$http_proxy ]
+ then
+ echo "Acquire::http::proxy \\"\$http_proxy\\"\\;" > /etc/apt/apt.conf.d/launchpad-ci-proxy
+ echo "Acquire::https::proxy \\"\$http_proxy\\"\\;" >> /etc/apt/apt.conf.d/launchpad-ci-proxy
+ fi
+
+ export DEBIAN_FRONTEND=noninteractive
+ apt-get -y update
+ apt install -y git make sudo
+
+ useradd ubuntu -d /home/ubuntu
+ mkdir -p /home/ubuntu
+ chown ubuntu:ubuntu /home/ubuntu
+ echo "ubuntu ALL=(ALL) NOPASSWD:ALL" > /etc/sudoers.d/90-docker
+ '''
+ }
+ }
+ stage('Checkout') {
+ steps {
+ sh "rm -rf checkout"
+ withCredentials([sshUserPrivateKey(credentialsId: 'launchpad-ci-ssh-key', keyFileVariable: 'SSHKEY', usernameVariable: 'SSHUSER')]) {
+ script {
+ env.GIT_SSH_COMMAND = "ssh -i $SSHKEY -l $SSHUSER"
+ if (env.GIT_SSH_COMMAND_EXTRA) {
+ env.GIT_SSH_COMMAND += " ${env.GIT_SSH_COMMAND_EXTRA}"
+ }
+ }
+ sh '''
+ git clone --recurse-submodules git+ssh://git.launchpad.net/{{ lp_git_repo }} --single-branch --branch {{ series }} --depth 1 checkout
+ chown -R ubuntu:ubuntu checkout
+ '''
+ script {
+ def shortHash = sh(returnStdout: true, script: "cd checkout && sudo -E -H -u ubuntu git rev-parse --short HEAD").trim()
+ currentBuild.description = "{{ os }}/{{ lp_git_repo }}/{{ series }} - ${shortHash}"
+ }
+ }
+ }
+ }
+ stage('Install deps') {
+ steps {
+ sh '''
+ sudo -E -H -u ubuntu make -C checkout install-dependencies
+ '''
+ }
+ }
+ stage('DB setup') {
+ steps {
+ sh '''
+ sudo -E -H -u ubuntu make -C checkout syncdb
+ '''
+ }
+ }
+ stage('Sampledata') {
+ steps {
+ sh '''
+ sudo -E -H -u ubuntu make -C checkout SAMPLEDATA_MACHINES={{ machines }} sampledata
+ '''
+ }
+ }
+ stage('DB dump') {
+ steps {
+ script {
+ env.DB_DUMP = "maasdb-{{ os }}-{{ series }}-{{ machines }}.dump"
+ }
+ sh '''
+ sudo -E -H -u ubuntu make -C checkout dumpdb
+ '''
+ archiveArtifacts artifacts: "checkout/maasdb-{{ os }}-{{ series }}-{{ machines }}.dump"
+ }
+ }
+ }
+}
diff --git a/jobs/maas-sampledata-dump.yaml b/jobs/maas-sampledata-dump.yaml
new file mode 100644
index 0000000..343bbde
--- /dev/null
+++ b/jobs/maas-sampledata-dump.yaml
@@ -0,0 +1,59 @@
+- project:
+ name: maas-sampledata
+ machines:
+ - 10
+ - 100
+ - 1000
+ series:
+ - master:
+ build-freq: 'H H(3-4) * * *'
+ - 3.3
+ - 3.2
+ - 3.1
+ os:
+ - 22.04
+ - 20.04
+ exclude:
+ - series: master
+ os: 20.04
+ - series: 3.3
+ os: 20.04
+ - series: 3.2
+ os: 22.04
+ - series: 3.1
+ os: 22.04
+ jobs:
+ - '{name}-dumper-{os}-{series}-{machines}'
+ views:
+ - '{name}-dumper'
+
+- view-template:
+ name: '{name}-dumper'
+ description: 'MAAS sampledata generators'
+ view-type: list
+ regex: '{name}-dumper-.*'
+
+- job-template:
+ name: '{name}-dumper-{os}-{series}-{machines}'
+ description: "Sampledata dumper for {name}, generating {machines} machines"
+ build-freq: 'H H(6-11) * * 7'
+ triggers:
+ - timed: "{build-freq}"
+ parameters:
+ - string:
+ description: "Random seed for factories"
+ name: "MAAS_RAND_SEED"
+ default: ""
+ - string:
+ description: "Random seed for hash"
+ name: "PYTHONHASHSEED"
+ default: ""
+ logrotate:
+ artifactNumsToKeep: 20
+ properties:
+ - copyartifact:
+ projects: "is-maas-fast-yet*"
+ dsl: !include-jinja2: maas-sampledata-dump.groovy
+
+ # parameters
+ lp_git_repo: maas
diff --git a/jobs/maas-site-manager-ci.yaml b/jobs/maas-site-manager-ci.yaml
new file mode 100644
index 0000000..2be4bc0
--- /dev/null
+++ b/jobs/maas-site-manager-ci.yaml
@@ -0,0 +1,12 @@
+- project:
+ name: maas-site-manager
+ repo_lp_path: ~maas-committers/maas-site-manager
+ use_custom_build_image: true
+ repo_lp_branch: main
+ ubuntu_series: '22.10'
+ deps_command: 'make ci-dep'
+ build_command: 'make ci-build'
+ lint_command: 'make ci-lint'
+ test_command: 'make ci-test'
+ jobs:
+ - '{name}-launchpad-ci'
diff --git a/jobs/maas-terraform-test.yaml b/jobs/maas-terraform-test.yaml
new file mode 100644
index 0000000..03d86e8
--- /dev/null
+++ b/jobs/maas-terraform-test.yaml
@@ -0,0 +1,86 @@
+---
+- project:
+ name: maas-terraform
+ deps_dir: '/home/ubuntu/workspace/gh-maas-terraform-tester/maas-ci-internal'
+ build_dir: '/home/ubuntu/workspace/gh-maas-terraform-tester/maas-ci-internal'
+ setup_env_dir: '/home/ubuntu/workspace/gh-maas-terraform-tester/maas-ci-internal'
+ test_dir: /run/build/tests
+ deps_command: ./utilities/terraform/terraform_deps
+ build_command: ./utilities/terraform/terraform_build
+ setup_env_command: ./utilities/terraform/terraform_setup_env
+ test_command: ./terraform_test.py
+ ubuntu_series: '22.04'
+ repo_gh_org: maas
+ repo_gh_branch: master
+ repo_gh_path: terraform-provider-maas
+ repo_gh_test_branch: main
+ repo_gh_test_path: maas-terraform-e2e-tests
+ tester_agent_label: 'ci-lab'
+ jobs:
+ - 'gh-{name}-tester'
+ views:
+ - 'gh-{name}'
+ triggers:
+ - timed: '1H H * * *'
+ ssh_key: 'maas-lander-ssh-key'
+ api_token_env_var: 'TF_VAR_apikey'
+ extra_parameters:
+ - string:
+ name: PRIMARY_RACK_CONTROLLER
+ description: "The rack controller to manage the VLAN DHCP will be enabled on"
+ default: "ckbafg"
+ - string:
+ name: TF_VAR_maas_url
+ description: "The MAAS URL for Terraform to connect to"
+ default: 'http://10.245.136.7:5240/MAAS'
+ - string:
+ name: TF_VAR_test_machine_power_type
+ description: |
+ The type of BMC for the physical machine
+ Terraform will spin up
+ default: 'ipmi'
+ - string:
+ name: TF_VAR_test_machine_hostname
+ description: |
+ The hostname of an existing machine in MAAS to deploy
+ - string:
+ name: TF_VAR_test_machine_power_address
+ description: "The IP Address for MAAS to power on the physical machine"
+ default: '10.245.143.121'
+ - string:
+ name: TF_VAR_test_machine_power_user
+ description: "The User to access the BMC"
+ default: 'labmaas'
+ - credentials:
+ name: TF_VAR_test_machine_power_password
+ description: "The Password to access the BMC"
+ type: secrettext
+ default: 'terraform-power-password'
+ - string:
+ name: TF_VAR_test_machine_boot_mac
+ description: |
+ The MAC address for Terraform to provide
+ MAAS with for booting the physical machine
+ default: 18:66:da:6d:fb:3c
+ - string:
+ name: TF_VAR_path_to_block_device_id
+ description: |
+ The path on the physical machine of
+ a block device to define and partion within MAAS
+ - string:
+ name: TF_VAR_block_device_size
+ description: "The size of the block device to test with"
+ - string:
+ name: TF_VAR_block_device_partition_1_size
+ description: "The size of the first partition of the test block device"
+ - string:
+ name: TF_VAR_block_device_partition_2_size
+ description: "The size of the second partition of the test block device"
+ - string:
+ name: TF_VAR_lxd_address
+ description: "The address of LXD for creating a test VM"
+ maas_api_key: jenkins-maas-token
+ post_test_actions: |
+ sh '''
+ sudo -E -H -u ubuntu "$WORKSPACE/maas-ci-internal/utilities/terraform/terraform_post_actions"
+ '''
diff --git a/jobs/maas-test-db-ci.yaml b/jobs/maas-test-db-ci.yaml
new file mode 100644
index 0000000..f78a6c6
--- /dev/null
+++ b/jobs/maas-test-db-ci.yaml
@@ -0,0 +1,8 @@
+- project:
+ name: maas-test-db
+ ubuntu_series: '22.04'
+ repo_lp_path: ~maas-committers/maas/+git/maas-test-db
+ deps_command: 'sudo apt install -y shellcheck'
+ lint_command: 'make lint'
+ jobs:
+ - '{name}-launchpad-ci'
diff --git a/jobs/maas-ubuntu-devel-test.groovy b/jobs/maas-ubuntu-devel-test.groovy
new file mode 100644
index 0000000..7329fa5
--- /dev/null
+++ b/jobs/maas-ubuntu-devel-test.groovy
@@ -0,0 +1,38 @@
+pipeline {
+ agent {
+ docker {
+ image 'build-env-maas-{{ lp_git_branch }}:{{ ubuntu_devel_series }}'
+ registryUrl '{{ docker_registry }}'
+ registryCredentialsId '{{ docker_registry_cred }}'
+ args '-u 0:0'
+ reuseNode true
+ }
+ }
+ stages {
+ stage('Create test job') {
+ steps {
+ script {
+ def test_build = build(
+ job:'maas-tester',
+ propagate: false,
+ parameters: [
+ [$class: 'StringParameterValue', name: 'LP_REPO_SRC', value: '{{ lp_git_repo }}'],
+ [$class: 'StringParameterValue', name: 'LP_REPO_DEST', value: '{{ lp_git_repo }}'],
+ [$class: 'StringParameterValue', name: 'LP_BRANCH_SRC', value: '{{ lp_git_branch }}'],
+ [$class: 'StringParameterValue', name: 'LP_BRANCH_DEST', value: '{{ lp_git_branch }}' ],
+ [$class: 'StringParameterValue', name: 'SERIES', value: '{{ ubuntu_devel_series }}']
+ ]
+ )
+ def test_result = test_build.getResult()
+ def test_build_url = test_build.getAbsoluteUrl()
+ if (test_result != 'SUCCESS') {
+ mattermostSend(
+ color: 'red',
+ message: "[${env.JOB_NAME} #${env.BUILD_NUMBER}](${env.BUILD_URL}) :fire: Failed test run for {{ lp_git_repo }}:{{ lp_git_branch }} on {{ ubuntu_devel_series }} (${test_build_url}consoleText)"
+ )
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/jobs/maas-ubuntu-devel-test.yaml b/jobs/maas-ubuntu-devel-test.yaml
new file mode 100644
index 0000000..242fa39
--- /dev/null
+++ b/jobs/maas-ubuntu-devel-test.yaml
@@ -0,0 +1,15 @@
+- project:
+ name: maas-ubuntu-devel-test
+ jobs:
+ - maas-ubuntu-devel-test
+
+- job-template:
+ name: maas-ubuntu-devel-test
+ triggers:
+ - timed: '@daily'
+ dsl: !include-jinja2: maas-ubuntu-devel-test.groovy
+
+ # parameters
+ lp_git_repo: maas
+ lp_git_branch: master
+ ubuntu_devel_series: '23.10'
diff --git a/jobs/maas-version-bump.groovy b/jobs/maas-version-bump.groovy
new file mode 100644
index 0000000..d45d05d
--- /dev/null
+++ b/jobs/maas-version-bump.groovy
@@ -0,0 +1,62 @@
+pipeline {
+ agent {
+ docker {
+ image 'ubuntu:{{ series }}'
+ registryUrl '{{ docker_registry }}'
+ registryCredentialsId '{{ docker_registry_cred }}'
+ label 'maas-tester'
+ args '-u 0:0'
+ reuseNode true
+ }
+ }
+
+ stages {
+ stage('Prepare') {
+ steps {
+ cleanWs()
+ sh '''
+ if [ ! -z \$http_proxy ]; then
+ echo "Acquire::http::proxy \\"\$http_proxy\\"\\;" > /etc/apt/apt.conf.d/launchpad-ci-proxy
+ echo "Acquire::https::proxy \\"\$http_proxy\\"\\;" >> /etc/apt/apt.conf.d/launchpad-ci-proxy
+ fi
+
+ export DEBIAN_FRONTEND=noninteractive
+ apt update
+ apt install -y devscripts tox python3-pip
+ '''
+ }
+ }
+ stage('Bump release') {
+ steps {
+ withCredentials([file(credentialsId: 'lp-lander-sshkey', variable: 'SSHKEY')]) {
+ withEnv(["GIT_SSH_COMMAND=ssh -i $SSHKEY -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null"]) {
+ lock(resource: "maas-${LP_BRANCH}") {
+ sh '''
+ mkdir -p /run/build
+ cd /run/build
+ export DEBEMAIL DEBFULLNAME
+ git clone --depth 1 --single-branch --branch ${LP_TOOLS_BRANCH} git+ssh://maas-lander@xxxxxxxxxxxxxxxxx/${LP_TOOLS_REPO} maas-release-tools
+ make -C maas-release-tools bins
+ git clone --depth 1 --single-branch --branch ${LP_BRANCH} git+ssh://maas-lander@xxxxxxxxxxxxxxxxx/${LP_REPO} maas
+ cd maas
+
+ git config user.name "$DEBFULLNAME"
+ git config user.email "$DEBEMAIL"
+ ../maas-release-tools/bin/release-prepare "$RELEASE_VERSION" "$DEB_DISTRO_CODENAME"
+ git push origin HEAD:${LP_BRANCH}
+ '''
+ }
+ }
+ }
+ }
+ }
+ }
+ post {
+ success {
+ mattermostSend (color: 'green', message: "[${env.JOB_NAME} #${env.BUILD_NUMBER}](${env.BUILD_URL}) :success: ${env.DEBFULLNAME} bumped version to ${env.RELEASE_VERSION} on ${env.LP_BRANCH}")
+ }
+ failure {
+ mattermostSend (color: 'red', message: "[${env.JOB_NAME} #${env.BUILD_NUMBER}](${env.BUILD_URL}) :fire: ${env.DEBFULLNAME} failed to bump version to ${env.RELEASE_VERSION} on ${env.LP_BRANCH}")
+ }
+ }
+}
diff --git a/jobs/maas-version-bump.yaml b/jobs/maas-version-bump.yaml
new file mode 100644
index 0000000..1f6852a
--- /dev/null
+++ b/jobs/maas-version-bump.yaml
@@ -0,0 +1,40 @@
+- project:
+ name: maas-version-bump
+ jobs:
+ - maas-version-bump
+
+- job-template:
+ name: maas-version-bump
+ parameters:
+ - string:
+ name: LP_REPO
+ description: 'Target git repository'
+ default: '~maas-committers/maas'
+ - string:
+ name: LP_BRANCH
+ description: 'Target git branch'
+ default: 'master'
+ - string:
+ name: LP_TOOLS_REPO
+ description: 'Git repository for release tools'
+ default: '~maas-committers/maas/+git/maas-release-tools'
+ - string:
+ name: LP_TOOLS_BRANCH
+ description: 'Git branch for release tools'
+ default: 'main'
+ - string:
+ name: RELEASE_VERSION
+ description: The MAAS version to bump to. This must be the Python project version
+ - string:
+ name: DEB_DISTRO_CODENAME
+ description: Codename for the target deb package release
+ - string:
+ name: DEBFULLNAME
+ description: Full name of the releaser (for debian/changelog)
+ - string:
+ name: DEBEMAIL
+ description: E-mail of the releaser (for debian/changelog)
+ properties:
+ - build-discarder:
+ artifact-num-to-keep: 10
+ dsl: !include-jinja2: maas-version-bump.groovy
diff --git a/jobs/maasperformance-ci.yaml b/jobs/maasperformance-ci.yaml
new file mode 100644
index 0000000..4c1a8d0
--- /dev/null
+++ b/jobs/maasperformance-ci.yaml
@@ -0,0 +1,10 @@
+- project:
+ name: maas-performance
+ repo_lp_path: ~maas-committers/maas/+git/maas-performance
+ ubuntu_series: '20.04'
+ deps_command: 'make deb-dep'
+ build_command: 'make py-dep'
+ lint_command: 'make lint'
+ test_command: 'make coverage'
+ jobs:
+ - '{name}-launchpad-ci'
diff --git a/jobs/piston-ci.yaml b/jobs/piston-ci.yaml
new file mode 100644
index 0000000..fe64232
--- /dev/null
+++ b/jobs/piston-ci.yaml
@@ -0,0 +1,9 @@
+- project:
+ name: piston
+ repo_lp_path: django-piston3
+ ubuntu_series: '22.04'
+ deps_command: 'sudo apt install -y tox'
+ lint_command: 'tox -e lint'
+ test_command: "TOX_SKIP_ENV='(format|lint)' tox"
+ jobs:
+ - '{name}-launchpad-ci'
diff --git a/jobs/rebase-branch.groovy b/jobs/rebase-branch.groovy
new file mode 100644
index 0000000..ec7b971
--- /dev/null
+++ b/jobs/rebase-branch.groovy
@@ -0,0 +1,51 @@
+pipeline {
+ agent {
+ docker {
+ image 'ubuntu:{{ series }}'
+ registryUrl '{{ docker_registry }}'
+ registryCredentialsId '{{ docker_registry_cred }}'
+ args '-u 0:0'
+ reuseNode true
+ }
+ }
+
+ stages {
+ stage('Setup') {
+ steps {
+ cleanWs()
+ sh '''
+ if [ ! -z \$http_proxy ]; then
+ echo "Acquire::http::proxy \\"\$http_proxy\\"\\;" > /etc/apt/apt.conf.d/launchpad-ci-proxy
+ echo "Acquire::https::proxy \\"\$http_proxy\\"\\;" >> /etc/apt/apt.conf.d/launchpad-ci-proxy
+ fi
+
+ export DEBIAN_FRONTEND=noninteractive
+ apt-get update
+ apt -y install git
+ '''
+ }
+ }
+ stage('Rebase branch') {
+ steps {
+ withCredentials([file(credentialsId: 'lp-lander-sshkey', variable: 'SSHKEY')]) {
+ withEnv(["GIT_SSH_COMMAND=ssh -i $SSHKEY -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null"]) {
+ lock(resource: "rebase-branch") {
+ sh '''
+ mkdir -p /run/build
+ cd /run/build
+ git config --global user.name 'MAAS Lander'
+ git config --global user.email 'maas-lander@xxxxxxxxxxxxx'
+ git config --global safe.directory /run/build/repo
+
+ git clone --branch "$LP_BRANCH_SRC" "git+ssh://maas-lander@xxxxxxxxxxxxxxxxx/$LP_REPO" repo
+ cd repo
+ git rebase "origin/$LP_BRANCH_DEST"
+ git push --force origin "$LP_BRANCH_SRC"
+ '''
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/jobs/rebase-branch.yaml b/jobs/rebase-branch.yaml
new file mode 100644
index 0000000..563eafb
--- /dev/null
+++ b/jobs/rebase-branch.yaml
@@ -0,0 +1,19 @@
+- project:
+ name: rebase-branch
+ jobs:
+ - rebase-branch
+
+- job-template:
+ name: rebase-branch
+ project-type: pipeline
+ parameters:
+ - string:
+ name: LP_REPO
+ description: 'Git repository to work on.'
+ - string:
+ name: LP_BRANCH_SRC
+ description: 'Git branch to rebase.'
+ - string:
+ name: LP_BRANCH_DEST
+ description: 'Git branch to rebase onto.'
+ dsl: !include-jinja2: rebase-branch.groovy
diff --git a/jobs/system-tests-ci.yaml b/jobs/system-tests-ci.yaml
new file mode 100644
index 0000000..7313b44
--- /dev/null
+++ b/jobs/system-tests-ci.yaml
@@ -0,0 +1,8 @@
+- project:
+ name: system-tests
+ repo_lp_path: ~maas-committers/maas-ci/+git/system-tests
+ deps_command: 'sudo apt install -y tox'
+ lint_command: 'tox -e lint,mypy'
+ ubuntu_series: '22.04'
+ jobs:
+ - '{name}-launchpad-ci'
diff --git a/jobs/update-kpis.groovy b/jobs/update-kpis.groovy
new file mode 100644
index 0000000..76453a3
--- /dev/null
+++ b/jobs/update-kpis.groovy
@@ -0,0 +1,71 @@
+pipeline {
+ agent {
+ docker {
+ image 'ubuntu:{{ series }}'
+ registryUrl '{{ docker_registry }}'
+ registryCredentialsId '{{ docker_registry_cred }}'
+ label 'maas-tester'
+ args '-u 0:0'
+ reuseNode true
+ }
+ }
+
+ stages {
+ stage('Checkout') {
+ steps {
+ git credentialsId: 'launchpad-ci-ssh-key', url: 'git+ssh://git.launchpad.net/maas-kpi'
+ }
+ }
+ stage('System setup') {
+ steps {
+ script {
+ sh '''
+ if [ ! -z $http_proxy ]; then
+ echo "Acquire::http::proxy \\"$http_proxy\\"\\;" > /etc/apt/apt.conf.d/launchpad-ci-proxy
+ echo "Acquire::https::proxy \\"$http_proxy\\"\\;" >> /etc/apt/apt.conf.d/launchpad-ci-proxy
+ fi
+ export DEBIAN_FRONTEND=noninteractive
+ apt update
+ apt -y install make sudo
+ make deps
+ '''
+ }
+ }
+ }
+ stage('Python setup') {
+ steps {
+ script {
+ sh "make clean apps"
+ }
+ }
+ }
+ stage('Gather') {
+ steps {
+ withCredentials([
+ file(credentialsId: 'lp-lander-credentials', variable: 'LP_CREDENTIALS'),
+ file(credentialsId: 'maas-ro-prodstack-5', variable: 'SWIFT_KEY'),
+ file(credentialsId: 'github-api-token', variable: 'GITHUB_API_TOKEN'),
+ ]) {
+ script {
+ sh "make metrics"
+ }
+ }
+ }
+ }
+ stage('Push') {
+ steps {
+ withCredentials([file(credentialsId: 'maas-influxdb', variable: 'INFLUXDB_CREDENTIALS')]) {
+ script {
+ sh "make push"
+ }
+ }
+ }
+ }
+ }
+
+ post {
+ always {
+ archiveArtifacts artifacts: 'generated/*.metrics'
+ }
+ }
+}
diff --git a/jobs/update-kpis.yaml b/jobs/update-kpis.yaml
new file mode 100644
index 0000000..a10d3d2
--- /dev/null
+++ b/jobs/update-kpis.yaml
@@ -0,0 +1,27 @@
+- project:
+ name: update-kpis
+ jobs:
+ - update-kpis
+
+- job-template:
+ name: update-kpis
+ parameters:
+ - string:
+ name: INFLUXDB_HOST
+ description: Hostname of the InfluxDB
+ default: maas-influxdb.cloud.kpi.internal
+ - string:
+ name: INFLUXDB_DBNAME
+ description: The name of the database to use
+ default: maas
+ - string:
+ name: DAILYSTATS_DAYS
+ description: Number of days back to collect daily stats for
+ default: 3
+
+ triggers:
+ - timed: '@hourly'
+ properties:
+ - build-discarder:
+ artifact-num-to-keep: 100
+ dsl: !include-jinja2: update-kpis.groovy
diff --git a/jobs/update-release-branch.groovy b/jobs/update-release-branch.groovy
new file mode 100644
index 0000000..f1ec1e3
--- /dev/null
+++ b/jobs/update-release-branch.groovy
@@ -0,0 +1,61 @@
+pipeline {
+ agent {
+ docker {
+ image 'ubuntu:{{ series }}'
+ registryUrl '{{ docker_registry }}'
+ registryCredentialsId '{{ docker_registry_cred }}'
+ label 'master'
+ args '-u 0:0'
+ reuseNode true
+ }
+ }
+
+ stages {
+ stage('Prepare') {
+ steps {
+ cleanWs()
+ sh '''
+ if [ ! -z \$http_proxy ]; then
+ echo "Acquire::http::proxy \\"\$http_proxy\\"\\;" > /etc/apt/apt.conf.d/launchpad-ci-proxy
+ echo "Acquire::https::proxy \\"\$http_proxy\\"\\;" >> /etc/apt/apt.conf.d/launchpad-ci-proxy
+ fi
+
+ export DEBIAN_FRONTEND=noninteractive
+ apt update
+ apt install -y git
+ '''
+ }
+ }
+ stage('Update branch') {
+ steps {
+ withCredentials([file(credentialsId: 'lp-lander-sshkey', variable: 'SSHKEY')]) {
+ withEnv(["GIT_SSH_COMMAND=ssh -i $SSHKEY -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null"]) {
+ lock(resource: "maas-${RELEASE_BRANCH}") {
+ sh '''
+ mkdir -p /run/build
+ cd /run/build
+ git clone --depth 100 git+ssh://maas-lander@xxxxxxxxxxxxxxxxx/${LP_REPO} maas
+ cd maas
+
+ in_master=$(git branch --contains ${MASTER_COMMIT} master)
+ if [ -z "$in_master" ]; then
+ echo "Commit to be update to has be be in master."
+ exit 1
+ fi
+ git push origin ${MASTER_COMMIT}:refs/heads/${RELEASE_BRANCH}
+ '''
+ }
+ }
+ }
+ }
+ }
+ }
+ post {
+ success {
+ mattermostSend (color: 'green', message: "[${env.JOB_NAME} #${env.BUILD_NUMBER}](${env.BUILD_URL}) :success: ${env.LP_REPO}/${env.RELEASE_BRANCH} branch updated to ${env.MASTER_COMMIT} ")
+ }
+ failure {
+ mattermostSend (color: 'red', message: "[${env.JOB_NAME} #${env.BUILD_NUMBER}](${env.BUILD_URL}) :fire: Failure to update ${env.LP_REPO}/${env.RELEASE_BRANCH} to ${env.MASTER_COMMIT}")
+ }
+ }
+}
diff --git a/jobs/update-release-branch.yaml b/jobs/update-release-branch.yaml
new file mode 100644
index 0000000..df5f1e6
--- /dev/null
+++ b/jobs/update-release-branch.yaml
@@ -0,0 +1,22 @@
+- project:
+ name: update-release-branch
+ jobs:
+ - update-release-branch
+
+- job-template:
+ name: update-release-branch
+ parameters:
+ - string:
+ name: LP_REPO
+ description: 'Git repository to update'
+ default: '~maas-committers/maas'
+ - string:
+ name: MASTER_COMMIT
+ description: 'Git commit to update the release branch to'
+ - string:
+ name: RELEASE_BRANCH
+ description: 'Name of the release branch'
+ properties:
+ - build-discarder:
+ artifact-num-to-keep: 10
+ dsl: !include-jinja2: update-release-branch.groovy
diff --git a/requirements.txt b/requirements.txt
new file mode 100644
index 0000000..c21be80
--- /dev/null
+++ b/requirements.txt
@@ -0,0 +1,15 @@
+certifi==2022.9.24
+charset-normalizer==2.1.1
+fasteners==0.18
+idna==3.4
+jenkins-job-builder==4.1.0
+Jinja2==3.1.2
+MarkupSafe==2.1.1
+multi-key-dict==2.0.3
+pbr==5.11.0
+python-jenkins==1.7.0
+PyYAML==6.0
+requests==2.28.1
+six==1.16.0
+stevedore==4.1.1
+urllib3==1.26.13
diff --git a/utilities/export-perf-to-influxdb b/utilities/export-perf-to-influxdb
new file mode 100755
index 0000000..f0ad2bc
--- /dev/null
+++ b/utilities/export-perf-to-influxdb
@@ -0,0 +1,82 @@
+#!/usr/bin/env python3
+
+# Copyright 2022 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+"""Parses the JUnit results file and push them to InfluxDB"""
+
+import argparse
+import json
+import sys
+from datetime import datetime
+
+from influxdb_client import InfluxDBClient, Point
+from influxdb_client.client.write_api import SYNCHRONOUS
+
+retention_policy = "autogen"
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ "results",
+ type=argparse.FileType("r"),
+ help="Path to the JSON file",
+ )
+ parser.add_argument(
+ "--system-id",
+ default="local",
+ help="Test system identification",
+ )
+ parser.add_argument(
+ "--database",
+ default="maas_ci_perf",
+ help="Influx database name",
+ )
+ parser.add_argument(
+ "--host",
+ default="http://localhost:8086",
+ help="Influx DB location",
+ )
+ parser.add_argument(
+ "--credential",
+ type=argparse.FileType("r"),
+ default="influxdb.ini",
+ help="InfluxDB credentials file",
+ )
+ parser.add_argument(
+ "--dataset",
+ default="",
+ help="Dataset ID",
+ )
+ args = parser.parse_args()
+
+ data = json.load(args.results)
+ username, password = args.credential.read().strip().split(" ", 1)
+ bucket = f"{args.database}/{retention_policy}"
+
+ ts = datetime.now()
+
+ with InfluxDBClient(
+ url=args.host, token=f"{username}:{password}", org="-"
+ ) as client:
+ with client.write_api(write_options=SYNCHRONOUS) as write_api:
+ for test_name, test_results in data["tests"].items():
+ p = Point.from_dict(
+ {
+ "measurement": "testcase",
+ "tags": {
+ "system": args.system_id,
+ "dataset": args.dataset,
+ "test": test_name,
+ "revision": data["commit"],
+ },
+ "fields": test_results,
+ "time": ts,
+ }
+ )
+ write_api.write(bucket=bucket, record=p)
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/utilities/influxdb.ini.example b/utilities/influxdb.ini.example
new file mode 100644
index 0000000..aa0acdb
--- /dev/null
+++ b/utilities/influxdb.ini.example
@@ -0,0 +1 @@
+maas secret
\ No newline at end of file
diff --git a/utilities/terraform/terraform_build b/utilities/terraform/terraform_build
new file mode 100755
index 0000000..6116134
--- /dev/null
+++ b/utilities/terraform/terraform_build
@@ -0,0 +1,13 @@
+#!/bin/bash
+
+cd /run/build/checkout
+
+make build
+
+# overriding HOSTNAME to ensure we don't pull the provider from the registry
+# and use the local provider instead
+make install HOSTNAME=terraform.maas.io
+
+cd /run/build/tests
+
+pip install -r ./requirements.txt
diff --git a/utilities/terraform/terraform_deps b/utilities/terraform/terraform_deps
new file mode 100755
index 0000000..88c1162
--- /dev/null
+++ b/utilities/terraform/terraform_deps
@@ -0,0 +1,14 @@
+#!/bin/bash
+
+export DEBIAN_FRONTEND=noninteractive
+
+sudo apt update && sudo apt install -y build-essential software-properties-common gnupg golang python3-pip
+
+mkdir -p /etc/apt/keyrings
+wget -O- https://apt.releases.hashicorp.com/gpg | gpg --dearmor | sudo tee /etc/apt/keyrings/hashicorp-archive-keyring.gpg
+
+echo "deb [signed-by=/etc/apt/keyrings/hashicorp-archive-keyring.gpg] https://apt.releases.hashicorp.com $(lsb_release -cs) main" | sudo tee /etc/apt/sources.list.d/hashicorp.list
+
+sudo apt update && sudo apt install -y terraform
+
+pip install git+https://github.com/maas/python-libmaas.git
diff --git a/utilities/terraform/terraform_post_actions b/utilities/terraform/terraform_post_actions
new file mode 100755
index 0000000..a318db7
--- /dev/null
+++ b/utilities/terraform/terraform_post_actions
@@ -0,0 +1,44 @@
+#!/bin/bash
+
+export no_proxy="${no_proxy:-'localhost'},${MAAS_HOST}"
+export NO_PROXY="${NO_PROXY:-'localhost'},${MAAS_HOST}"
+export GIT_SSH_COMMAND="ssh -i $SSHKEY -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null"
+
+
+if [ ! -z $http_proxy ]; then
+ echo "Acquire::http::proxy \"$http_proxy\";" > /etc/apt/apt.conf.d/github-ci-proxy
+ echo "Acquire::https::proxy \"$http_proxy\";" >> /etc/apt/apt.conf.d/github-ci-proxy
+
+ mkdir -p ~/.ssh/
+ echo "Host github.com\n HostName ssh.github.com\n Port 443\n ProxyCommand /usr/bin/nc -X connect -x squid.internal %h %p\n" >> ~/.ssh/config
+fi
+
+python3 <<EOF
+import os
+
+from maas.client import connect
+
+
+if __name__ == "__main__":
+ maas = connect(os.environ["TF_VAR_maas_url"], apikey=os.environ["TF_VAR_apikey"])
+ fabric = maas.fabrics.get(id=0)
+ vlan = fabric.vlans.get_default()
+ vlan.dhcp_on = False
+ vlan.save()
+
+EOF
+
+cd /run/build/tests
+
+export COMMIT_HASH="$(git rev-parse HEAD)"
+
+./update_results
+
+git config user.email "${GH_USER_EMAIL}"
+git config user.name "${GH_USER_NAME}"
+mkdir -p ~/.ssh/
+echo "Host github.com\n HostName ssh.github.com\n Port 443\n ProxyCommand /usr/bin/nc -X connect -x squid.internal %h %p\n" >> ~/.ssh/config
+
+git add results.json
+git commit -m "updating test results for: ${COMMIT_HASH}"
+git push -u origin "${GH_TEST_BRANCH}"
diff --git a/utilities/terraform/terraform_setup_env b/utilities/terraform/terraform_setup_env
new file mode 100755
index 0000000..0cf4ab3
--- /dev/null
+++ b/utilities/terraform/terraform_setup_env
@@ -0,0 +1,16 @@
+#!/usr/bin/env python3
+
+import os
+
+from maas.client import connect
+
+
+if __name__ == "__main__":
+ maas = connect(os.environ["TF_VAR_maas_url"], apikey=os.environ["TF_VAR_apikey"])
+
+ rack_controller = maas.rack_controllers.get(system_id=os.environ["PRIMARY_RACK_CONTROLLER"])
+ fabric = maas.fabrics.get(id=0)
+ vlan = fabric.vlans.get_default()
+ vlan.dhcp_on = True
+ vlan.primary_rack = rack_controller
+ vlan.save()