sts-sponsors team mailing list archive
-
sts-sponsors team
-
Mailing list archive
-
Message #03552
[Merge] ~alexsander-souza/maas/+git/maas-release-tools:add_jenkins into maas:master
Alexsander de Souza has proposed merging ~alexsander-souza/maas/+git/maas-release-tools:add_jenkins into maas:master.
Commit message:
add checks for MAAS system integration tests
Requested reviews:
MAAS Maintainers (maas-maintainers)
For more details, see:
https://code.launchpad.net/~alexsander-souza/maas/+git/maas-release-tools/+merge/433613
--
Your team MAAS Committers is subscribed to branch maas:master.
diff --git a/.gitignore b/.gitignore
index 31cd627..faae3c6 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,3 +1,4 @@
+<<<<<<< .gitignore
*.DS_Store
*.egg
*.egg-info
@@ -63,3 +64,13 @@ __pycache__
\#*\#
.#*
*.*.swp
+=======
+*.pyc *.pyo
+__pycache__
+/*.egg-info
+/.eggs
+/.mypy_cache
+/.tox
+/bin
+/build
+>>>>>>> .gitignore
diff --git a/Makefile b/Makefile
index fbc57fc..b12630b 100644
--- a/Makefile
+++ b/Makefile
@@ -1,3 +1,4 @@
+<<<<<<< Makefile
python := python3
snapcraft := SNAPCRAFT_BUILD_INFO=1 snapcraft -v
@@ -456,3 +457,76 @@ snap-tree-sync: $(UI_BUILD) go-bins $(SNAP_UNPACKED_DIR_MARKER)
src/host-info/bin/machine-resources/ \
$(SNAP_UNPACKED_DIR)/usr/share/maas/machine-resources/
.PHONY: snap-tree-sync
+=======
+PY_FILES := setup.py maas_release_tools
+TOX := /usr/bin/tox
+TOX_DIR := .tox
+VE_DIR := $(TOX_DIR)/py
+
+SHELLCHECK := /snap/bin/shellcheck
+SCRIPTS_DIR := scripts
+
+BIN_DIR := bin
+
+export PATH := $(VIRTUALENV)/bin:$(PATH)
+
+.DEFAULT_GOAL := setup
+
+setup: deps bins
+.PHONY: setup
+
+clean:
+ rm -rf $(TOX_DIR) $(BIN_DIR) *.egg-info
+.PHONY: clean
+
+deps: $(TOX) $(SHELLCHECK)
+.PHONY: deps
+
+bins: py-bins shell-bins
+.PHONY: bins
+
+format: py-format
+.PHONY: format
+
+lint: py-lint shell-lint
+.PHONY: lint
+
+check: py-check
+.PHONY: check
+
+$(BIN_DIR):
+ mkdir $@
+
+# Python targets
+
+py-bins: $(VE_DIR) $(BIN_DIR)
+ for bin in $$(PIP_REQUIRE_VIRTUALENV=false python3 setup.py -q console_scripts 2>/dev/null); do \
+ ln -sf ../$(VE_DIR)/bin/$$bin $(BIN_DIR)/$$bin; \
+ done
+.PHONY: py-bins
+
+py-format py-lint py-check:
+ $(TOX) -e $(patsubst py-%,%,$@)
+.PHONY: py-format py-lint py-check
+
+$(VE_DIR):
+ $(TOX) -e py
+
+$(TOX):
+ sudo apt install --yes tox
+
+# Shell targets
+
+shell-bins:
+ for bin in $(wildcard $(SCRIPTS_DIR)/[^_]*); do \
+ ln -sf ../$$bin $(BIN_DIR)/$$(basename $$bin); \
+ done;
+.PHONY: shell-bins
+
+shell-lint: $(SHELLCHECK)
+ $(SHELLCHECK) -x -s bash $(SCRIPTS_DIR)/*
+.PHONY: shell-lint
+
+$(SHELLCHECK):
+ sudo snap install shellcheck
+>>>>>>> Makefile
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..c902c30
--- /dev/null
+++ b/README.md
@@ -0,0 +1,23 @@
+MAAS release tools
+==================
+
+This repo contains a collections of tools to build and manage MAAS releases.
+
+Run `make setup` to collect and install dependency and generate scripts, which
+will make them available under `bin/`.
+
+
+Development
+-----------
+
+Python scripts are implemented under the `maas_release_tools.scripts` package.
+
+Use `tox` to run `format`, `lint` and `check` commands.
+
+
+Shell scripts are under the `scripts/` directory, common functions and
+variables can be put in `scripts/_lib` and included from scripts.
+
+
+There are also top-level makefile commands for `format`, `lint` and `check`
+that can be run for the whole project.
diff --git a/maas_release_tools/__init__.py b/maas_release_tools/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/maas_release_tools/__init__.py
diff --git a/maas_release_tools/actions.py b/maas_release_tools/actions.py
new file mode 100644
index 0000000..a48ec3e
--- /dev/null
+++ b/maas_release_tools/actions.py
@@ -0,0 +1,38 @@
+"""Script actions."""
+
+from argparse import Namespace
+
+from .launchpad import LaunchpadActions
+
+
+class Actions:
+ """Perform actions."""
+
+ def __init__(self, lp_actions: LaunchpadActions, args: Namespace):
+ self.lp_actions = lp_actions
+ self.args = args
+
+ def __call__(self):
+ action_name = self.args.action.replace("-", "_")
+ action = getattr(self, action_name)
+ return action()
+
+ def assign_bugs_to_milestone(self):
+ """Assign bugs to a milestone, re-opening and closing as necessary"""
+ self.lp_actions.assign_bugs_to_milestone(
+ self.args.bugs, self.args.milestone
+ )
+
+ def move_done_bugs(self):
+ """Move completed bugs across milestones."""
+ self.lp_actions.move_done_bugs(
+ self.args.origin, self.args.dest, self.args.fixed_before
+ )
+
+ def move_unfinished_bugs(self):
+ """Move bugs not completed across milestones."""
+ self.lp_actions.unfinished_done_bugs(self.args.origin, self.args.dest)
+
+ def release_milestone(self):
+ """Release a milestone."""
+ self.lp_actions.release_milestone(self.args.milestone)
diff --git a/maas_release_tools/git.py b/maas_release_tools/git.py
new file mode 100644
index 0000000..5e377c0
--- /dev/null
+++ b/maas_release_tools/git.py
@@ -0,0 +1,83 @@
+"""Git helpers."""
+
+import re
+import subprocess
+from typing import Dict, List, NamedTuple
+from urllib.parse import ParseResult, urlparse
+
+_REMOTE_URL_RE = re.compile(r"(?P<name>.+)\t(?P<url>.+) \((?P<type>.+)\)$")
+
+
+class GitCommandResult(NamedTuple):
+ output: str
+ error: str
+ code: int
+
+ @property
+ def succeeded(self) -> bool:
+ return self.code == 0
+
+
+class Git:
+ """A wrapper around the git CLI."""
+
+ def get_short_rev(self, ref: str) -> str:
+ """Return the short revision for a reference."""
+ result = self._run("rev-parse", "--short", ref)
+ return result.output
+
+ def get_tag_commit(self, tag: str) -> str:
+ """Return the commit hash for a tag."""
+ result = self._run("rev-list", "-n", "1", f"tags/{tag}")
+ return result.output
+
+ def get_remote_branches_containing(self, ref: str) -> List[List[str]]:
+ result = self._run("branch", "-r", "--contains", ref)
+ return [
+ branch.strip().split("/", 1)
+ for branch in result.output.splitlines()
+ ]
+
+ def get_remote_urls(self) -> Dict[str, ParseResult]:
+ """Return a dict mapping remote names to their fetch URLs."""
+ result = self._run("remote", "-v")
+ urls = {}
+ for line in result.output.splitlines():
+ match = _REMOTE_URL_RE.match(line)
+ if not match:
+ continue
+ entry = match.groupdict()
+ if entry["type"] != "fetch":
+ continue
+ urls[entry["name"]] = urlparse(entry["url"])
+ return urls
+
+ def has_uncommited_changes(self) -> bool:
+ result = self._run("diff-index", "--quiet", "HEAD")
+ return not result.succeeded
+
+ def list_from_remote(
+ self, repo: str, heads: bool = True, tags: bool = True
+ ):
+ """List ref heads and tags from a repo that is not necessarily cloned locally"""
+ run_args = ["ls-remote"]
+ if heads:
+ run_args.append("-h")
+ if tags:
+ run_args.append("-t")
+ run_args.append(repo)
+ output = self._run(*run_args).output
+ return output.split("\n")
+
+ def _run(self, *args) -> GitCommandResult:
+ proc = subprocess.run(
+ ["git", *args],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ text=True,
+ )
+ return GitCommandResult(
+ output=proc.stdout.strip(),
+ error=proc.stderr.strip(),
+ code=proc.returncode,
+ )
diff --git a/maas_release_tools/launchpad.py b/maas_release_tools/launchpad.py
new file mode 100644
index 0000000..f070d9a
--- /dev/null
+++ b/maas_release_tools/launchpad.py
@@ -0,0 +1,170 @@
+"""Interact with Launchpad API."""
+
+from contextlib import contextmanager
+from datetime import datetime, timezone
+from functools import cached_property
+import logging
+from pathlib import Path
+from typing import List, Optional, Sequence
+
+from launchpadlib.launchpad import Launchpad
+
+DONE_BUGS = ("Invalid", "Won't Fix", "Fix Committed", "Fix Released")
+UNFINISHED_BUGS = ("New", "Confirmed", "Triaged", "In Progress", "Incomplete")
+
+
+class UnknownLaunchpadEntry(Exception):
+ def __init__(self, entry_type: str, identifier: str):
+ super().__init__(f"Unknown {entry_type}: {identifier}")
+
+
+class LaunchpadActions:
+
+ lp: Launchpad
+ logger: logging.Logger
+ dry_run: bool
+
+ def __init__(
+ self,
+ project: str,
+ credentials_file: Optional[Path] = None,
+ dry_run: bool = False,
+ ):
+ self.lp = self._get_client(credentials_file=credentials_file)
+ self.logger = logging.getLogger("launchpad")
+ self.dry_run = dry_run
+ self._project = self.lp.projects[project]
+ if not self._project:
+ raise UnknownLaunchpadEntry("project", project)
+
+ @cached_property
+ def me(self):
+ """Return the logged in user user from LP."""
+ return self.lp.me
+
+ def move_done_bugs(
+ self,
+ origin_milestone: str,
+ dest_milestone: str,
+ fixed_before: Optional[str] = None,
+ ):
+ """Move bugs that are done from a milestone to another."""
+ origin = self._get_milestone(origin_milestone)
+ dest = self._get_milestone(dest_milestone)
+ limit = (
+ datetime.fromisoformat(fixed_before).replace(tzinfo=timezone.utc)
+ if fixed_before
+ else None
+ )
+ self._move_bugs(
+ DONE_BUGS, origin, dest, dry_run=self.dry_run, fixed_before=limit
+ )
+
+ def move_unfinished_bugs(self, origin_milestone: str, dest_milestone: str):
+ """Move bugs that are not done from a milestone to another."""
+ origin = self._get_milestone(origin_milestone)
+ dest = self._get_milestone(dest_milestone)
+ self._move_bugs(UNFINISHED_BUGS, origin, dest, dry_run=self.dry_run)
+
+ def assign_bugs_to_milestone(
+ self, bugs: List[str], milestone_name: str
+ ) -> None:
+ """Assign bugs to a milestone, re-opening and closing as necessary."""
+ with self._active_milestone(milestone_name) as milestone:
+ for bug_number in bugs:
+ bug = self.lp.bugs[bug_number]
+ for task in bug.bug_tasks:
+ if task.target == self._project:
+ self.logger.info(
+ f"assigning bug {bug_number} to milestone {milestone.name}"
+ )
+ task.milestone = milestone
+ if not self.dry_run:
+ task.lp_save()
+ break
+ else:
+ self.logger.error(
+ f"No task found for {bug_number} on project {self._project}"
+ )
+
+ def release_milestone(self, name: str):
+ """Release a milestone marking finished bug as released."""
+ milestone = self._get_milestone(name)
+ bug_tasks = milestone.searchTasks(status="Fix Committed")
+ for bug_task in bug_tasks:
+ self.logger.info(
+ f"marking bug {bug_task.bug.id} as fix released: {bug_task.bug.title}"
+ )
+ bug_task.status = "Fix Released"
+ if not self.dry_run:
+ bug_task.lp_save()
+
+ self.logger.info(f"closing milestone {milestone.name}")
+ milestone.is_active = False
+ if not self.dry_run:
+ milestone.lp_save()
+
+ if milestone.release is None:
+ self.logger.info(f"releasing milestone {milestone.name}")
+ if not self.dry_run:
+ milestone.createProductRelease(date_released=datetime.utcnow())
+ else:
+ self.logger.info(f"milestone {milestone.name} already released")
+
+ def _get_client(
+ self, credentials_file: Optional[Path] = None
+ ) -> Launchpad:
+ """Return a Launchpad API client."""
+ kwargs = {
+ "service_root": "https://api.launchpad.net",
+ "version": "devel",
+ }
+ if credentials_file:
+ kwargs["credentials_file"] = str(credentials_file)
+ return Launchpad.login_with("maas-release-tools", **kwargs)
+
+ def _get_milestone(self, name: str):
+ milestone = self._project.getMilestone(name=name)
+ if not milestone:
+ raise UnknownLaunchpadEntry("milestone", name)
+ return milestone
+
+ @contextmanager
+ def _active_milestone(self, name):
+ """Allows callers to temporarily open a milestone."""
+ milestone = self._get_milestone(name)
+ was_active = milestone.is_active
+ if not was_active:
+ self.logger.info(f"marking milestone {name} as temporarily active")
+ if not self.dry_run:
+ milestone.is_active = True
+ if not was_active:
+ milestone.lp_save()
+ yield milestone
+ if not was_active:
+ self.logger.info(f"marking milestone {name} inactive")
+ milestone.is_active = False
+ if not self.dry_run:
+ milestone.lp_save()
+
+ def _move_bugs(
+ self,
+ statuses: Sequence[str],
+ orig_milestone,
+ dest_milestone,
+ dry_run: bool = False,
+ fixed_before: Optional[datetime] = None,
+ ):
+ bug_tasks = orig_milestone.searchTasks(status=statuses)
+ for bug_task in bug_tasks:
+ if fixed_before and bug_task.date_fix_committed > fixed_before:
+ self.logger.debug(
+ f"skiping bug {bug_task.bug.id}, fixed after the release date"
+ )
+ continue
+ self.logger.info(
+ f"retargeting bug {bug_task.bug.id}: {bug_task.bug.title}"
+ )
+ bug_task.milestone = dest_milestone
+ if not dry_run:
+ bug_task.lp_save()
diff --git a/maas_release_tools/maasci.py b/maas_release_tools/maasci.py
new file mode 100644
index 0000000..099df77
--- /dev/null
+++ b/maas_release_tools/maasci.py
@@ -0,0 +1,53 @@
+"""Interact with Jenkins API"""
+import configparser
+from functools import cached_property
+import logging
+from os.path import expanduser
+from pathlib import Path
+from typing import Optional, Tuple
+
+from jenkins import Jenkins
+
+JJB_CONFIG = Path("~/.config/jenkins_jobs/jenkins_jobs.ini")
+JJB_SECTION = "maas-integration-ci"
+
+
+class JenkinsActions:
+ def __init__(
+ self,
+ server_section: Optional[str] = None,
+ jenkins_config: Optional[Path] = None,
+ dry_run: bool = False,
+ ):
+ self._jenkins = self._get_client(
+ server_section=server_section, jenkins_config=jenkins_config
+ )
+ self.logger = logging.getLogger("jenkins")
+ self.dry_run = dry_run
+
+ def _get_client(
+ self,
+ server_section: Optional[str] = None,
+ jenkins_config: Optional[Path] = None,
+ ) -> Jenkins:
+ """Return a Jenkins API client."""
+ jenkins_config = jenkins_config or Path(expanduser(JJB_CONFIG))
+ server_section = server_section or JJB_SECTION
+ config = configparser.ConfigParser()
+ config.read(jenkins_config)
+ url = config[server_section]["url"]
+ kwargs = {
+ "username": config[server_section]["user"],
+ "password": config[server_section]["password"],
+ }
+ return Jenkins(url, **kwargs)
+
+ @cached_property
+ def me(self) -> str:
+ return self._jenkins.get_whoami()["id"]
+
+ def get_last_build_result(self, job_name: str) -> Tuple[str, str]:
+ job = self._jenkins.get_job_info(job_name)
+ last_build = job["lastCompletedBuild"]["number"]
+ build_info = self._jenkins.get_build_info(job_name, last_build)
+ return str(build_info["result"]), str(build_info["url"])
diff --git a/maas_release_tools/scripts/__init__.py b/maas_release_tools/scripts/__init__.py
new file mode 100644
index 0000000..f6db5a9
--- /dev/null
+++ b/maas_release_tools/scripts/__init__.py
@@ -0,0 +1,33 @@
+from argparse import Namespace
+from pathlib import Path
+from typing import IO, List, Tuple
+
+import pkg_resources
+from setuptools import Command
+
+
+class PrintConsoleScripts(Command):
+ """Print list of console_scripts."""
+
+ description = __doc__
+ user_options: List[Tuple] = []
+
+ def initialize_options(self):
+ pass
+
+ def finalize_options(self):
+ pass
+
+ def run(self):
+ for entry in pkg_resources.iter_entry_points("console_scripts"):
+ if not entry.module_name.startswith("maas_release_tools"):
+ continue
+ print(entry.name)
+
+
+def convert_file_descriptors_to_path(ns: Namespace):
+ """Replace file descriptor attributes in a Namespace with Path objects."""
+ for name, value in ns._get_kwargs():
+ if isinstance(value, IO):
+ value.close()
+ setattr(ns, "name", Path(value.name))
diff --git a/maas_release_tools/scripts/missingestimates.py b/maas_release_tools/scripts/missingestimates.py
new file mode 100644
index 0000000..36d473a
--- /dev/null
+++ b/maas_release_tools/scripts/missingestimates.py
@@ -0,0 +1,95 @@
+"""Find tickets in Evaluated with missing original estimates."""
+
+import argparse
+import json
+
+from jira import JIRA
+
+
+def parse_args():
+ parser = argparse.ArgumentParser(description=__doc__)
+ parser.add_argument(
+ "--verbose",
+ action="store_true",
+ help="Show tickets with estimates too",
+ )
+ parser.add_argument(
+ "--jira-json",
+ type=argparse.FileType("r"),
+ default="jira.token",
+ help="Path to JIRA credentials JSON blob matching {'jira-server': , 'jira-login': , 'jira-token': }",
+ )
+ ns = parser.parse_args()
+ try:
+ with ns.jira_json as fh:
+ config = json.load(fh)
+ jira_server, login, token = (
+ config["jira-server"],
+ config["jira-login"],
+ config["jira-token"],
+ )
+ except (KeyError, json.JSONDecodeError):
+ parser.error(
+ f"Unable to read JIRA credentials from {ns.jira_json.name}"
+ )
+ return ns, jira_server, login, token
+
+
+def find_estimated_issues(jira, project, component):
+ return jira.search_issues(
+ f"project={project} AND component={component} AND status=Evaluated ORDER BY rank"
+ )
+
+
+def calculate_wsjf(issue):
+ return round(
+ issue.fields["cost_of_delay"] / issue.fields["story_points"], 3
+ )
+
+
+def pythonise_propertyholder(fields_by_name):
+ """Make fields behave like a dict."""
+ # If we have a request for a custom field by slug name, then
+ # lookup the actual id and get it from the __dict__
+ def getitem(self2, key, *args, **kwargs):
+ under_key = fields_by_name.get(key, key)
+ return self2.__dict__.__getitem__(under_key)
+
+ from jira.resources import PropertyHolder
+
+ PropertyHolder.__getitem__ = getitem
+ PropertyHolder.__setitem__ = lambda s, k, v: s.__dict__.__setitem__(k, v)
+ PropertyHolder.__delitem__ = lambda s, k: s.__dict__.__delitem__(k)
+
+
+def main():
+ args, jira_server, login, token = parse_args()
+ jira = JIRA(jira_server, basic_auth=(login, token))
+
+ # Get custom fields
+ all_fields = jira.fields()
+
+ def slugify(name):
+ return name.lower().replace(" ", "_").replace("-", "_")
+
+ fields_by_name = {
+ slugify(field["name"]): field["id"] for field in all_fields
+ }
+
+ # Make the fields dict-like and friendly
+ pythonise_propertyholder(fields_by_name)
+
+ for issue in find_estimated_issues(jira, project="PF", component="MAAS"):
+ original_estimate = issue.fields["original_estimate"]
+ if original_estimate is not None:
+ if args.verbose:
+ print(
+ f"""\
+[{issue.key}] {issue.fields.summary} ✅
+ {issue.fields['story_points']}: {issue.fields.timeoriginalestimate}"""
+ )
+ else:
+ print(
+ f"[{issue.key}] {issue.fields.summary} ❌ missing original estimate"
+ )
+ return 0
diff --git a/maas_release_tools/scripts/release_manage.py b/maas_release_tools/scripts/release_manage.py
new file mode 100644
index 0000000..23db009
--- /dev/null
+++ b/maas_release_tools/scripts/release_manage.py
@@ -0,0 +1,95 @@
+"""Manage releases and milestones."""
+
+from argparse import ArgumentParser, FileType
+import logging
+import sys
+
+from . import convert_file_descriptors_to_path
+from ..actions import Actions
+from ..launchpad import LaunchpadActions
+
+
+def parse_args():
+ """Return parsed arguments for the script."""
+
+ def add_move_across_milestones_args(parser):
+ parser.add_argument("origin", help="origin milestone")
+ parser.add_argument("dest", help="destination milestone")
+
+ parser = ArgumentParser(
+ description="Manage project releases with Launchpad."
+ )
+ parser.add_argument(
+ "--debug",
+ action="store_true",
+ default=False,
+ help="debug log",
+ )
+ parser.add_argument(
+ "--credentials",
+ default=None,
+ type=FileType(),
+ help="Launchpad credentials file",
+ )
+ parser.add_argument(
+ "--dry-run",
+ action="store_true",
+ default=False,
+ help="don't actually perform actions",
+ )
+ parser.add_argument("project", help="the project to manage releases for")
+
+ subparsers = parser.add_subparsers(
+ metavar="ACTION", dest="action", help="action to perform"
+ )
+ subparsers.required = True
+
+ assign_bugs = subparsers.add_parser(
+ "assign-bugs-to-milestone",
+ help=Actions.assign_bugs_to_milestone.__doc__,
+ )
+ assign_bugs.add_argument("milestone", help="the milestone to assign")
+ assign_bugs.add_argument("bugs", nargs="+", help="the bugs to assign")
+
+ move_done_bugs = subparsers.add_parser(
+ "move-done-bugs", help="move done bugs from a milestone to another"
+ )
+ add_move_across_milestones_args(move_done_bugs)
+ move_done_bugs.add_argument(
+ "--fixed-before",
+ help="select bugs fixed before this date",
+ default=None,
+ )
+
+ move_unfinished_bugs = subparsers.add_parser(
+ "move-unfinished-bugs",
+ help="move unfinished bugs from a milestone to another",
+ )
+ add_move_across_milestones_args(move_unfinished_bugs)
+
+ release = subparsers.add_parser(
+ "release-milestone",
+ help="release a milestone, marking done bugs are released",
+ )
+ release.add_argument("milestone", help="the milestone to release")
+
+ ns = parser.parse_args()
+ convert_file_descriptors_to_path(ns)
+ return ns
+
+
+def main():
+ args = parse_args()
+ logging.basicConfig(
+ format="%(asctime)s - %(levelname)s - %(name)s - %(message)s",
+ level=logging.DEBUG if args.debug else logging.INFO,
+ stream=sys.stdout,
+ )
+
+ launchpad_actions = LaunchpadActions(
+ args.project,
+ credentials_file=args.credentials,
+ dry_run=args.dry_run,
+ )
+ actions = Actions(launchpad_actions, args)
+ sys.exit(actions())
diff --git a/maas_release_tools/scripts/release_status.py b/maas_release_tools/scripts/release_status.py
new file mode 100644
index 0000000..ba2a9b0
--- /dev/null
+++ b/maas_release_tools/scripts/release_status.py
@@ -0,0 +1,858 @@
+"""Show the current progress of the release process.
+
+You should use this script when releasing a new version of MAAS. The
+idea is that it will show you where you are in the release process, and
+tell you what to do next.
+
+It's still a work in progress. It tells you the status of various
+release tasks, but it doesn't do a good job at telling you what needs to
+be done.
+
+Please improve this script as you do a release, so that the next release
+will go smoother.
+"""
+
+from abc import ABC, abstractmethod
+from argparse import ArgumentParser, FileType
+import base64
+from functools import lru_cache
+import glob
+import json
+import subprocess
+import sys
+from typing import Iterable, Optional
+
+from debian.changelog import Changelog
+from jenkins import JenkinsException
+from lazr.restfulclient.errors import NotFound
+from pymacaroons import Macaroon
+import requests
+
+from . import convert_file_descriptors_to_path
+from ..git import Git
+from ..launchpad import DONE_BUGS, LaunchpadActions, UnknownLaunchpadEntry
+from ..maasci import JenkinsActions
+from ..version import get_branch_setup_version, ReleaseVersion
+
+MAAS_SNAP_ID = "shY22YTZ3RhJJDOj0MfmShTNZTEb1Jiq"
+BUILD_ARCHS = ("amd64", "arm64", "ppc64el", "s390x")
+
+
+def get_macaroon_refresh_help():
+ """Get help on refreshing the macaroon that can be copied&pasted."""
+ return (
+ " Please refresh it:\n"
+ " rm -f release.macaroon\n"
+ " snapcraft export-login --snaps maas,maas-test-db \\\n"
+ " --acls package_release,package_access release.macaroon"
+ )
+
+
+def get_macaroon_auth_error(res, snap_name):
+ if res.status_code in [200, 404]:
+ return None
+ error_message = res.text
+ if res.status_code in [401, 403]:
+ result = res.json()
+ for error in result.get("error_list", []):
+ if error.get("code") == "macaroon-needs-refresh":
+ error_message = (
+ f"Macaroon has expired.\n{get_macaroon_refresh_help()}"
+ )
+ break
+ if error.get("code") == "macaroon-permission-required":
+ missing_permission = error["extra"]["permission"]
+ error_message = (
+ f"Macaroon doesn't have {missing_permission} "
+ f"for {snap_name}.\n"
+ f"{get_macaroon_refresh_help()}"
+ )
+ break
+ return error_message
+
+
+def get_official_maas_remote(git: Git) -> Optional[str]:
+ for name, url in git.get_remote_urls().items():
+ if url.path == "/maas":
+ return name
+ return None
+
+
+@lru_cache(maxsize=1)
+def get_ubuntu_series() -> str:
+ try:
+ with open("debian/changelog", "r") as fh:
+ ch = Changelog(fh, max_blocks=1)
+ return ch.distributions
+ except OSError:
+ return "None"
+
+
+class ReleasePreparer:
+ def __init__(
+ self,
+ version: ReleaseVersion,
+ snapstore_auth,
+ launchpad: LaunchpadActions,
+ jenkins: JenkinsActions,
+ ):
+ self.launchpad = launchpad
+ self.jenkins = jenkins
+ self.version = version
+ self.snapstore_auth = snapstore_auth
+ self.git_short_rev = Git().get_short_rev("HEAD")
+
+ def run(self, args):
+ all_good = True
+ for step in self.steps:
+ if step.skip():
+ continue
+ print(step.title, end=": ")
+ success, message = step.check()
+ if not success:
+ all_good = False
+ print("\N{large red circle}")
+ else:
+ print("\N{check mark}")
+ if message:
+ for line in message.splitlines():
+ print(" " + str(line))
+
+ print()
+ if all_good:
+ print("All checks PASSED!")
+ else:
+ return "Some checks FAILED, proceed with caution"
+
+
+class ReleaseStep(ABC):
+ def __init__(self, preparer):
+ self.preparer = preparer
+ self.git = Git()
+
+ @property
+ @abstractmethod
+ def title(self):
+ """A one-line title of the release step."""
+
+ def skip(self):
+ """Return whether this step should be performed for this version grade."""
+ return False
+
+ @abstractmethod
+ def check(self):
+ """Return whether the step has already been performed.
+
+ It returns a tuple of (succeeded, message), where result is a
+ boolean indicating whether the check succeeded, and message is a
+ multi-line message to be displayed.
+ """
+
+
+class NoUncommittedChanges(ReleaseStep):
+ @property
+ def title(self):
+ return "No uncommitted changes"
+
+ def check(self):
+ if self.git.has_uncommited_changes():
+ return False, "Commit and push all changes before releasing."
+ else:
+ return True, None
+
+
+class CommitInRemoteBranch(ReleaseStep):
+ @property
+ def title(self):
+ return "Release commit in remote branch"
+
+ def check(self):
+ release_branch_name = self.preparer.version.major
+ if self.preparer.version.grade in ("alpha", "beta"):
+ # alpha and beta releases are released from master
+ release_branch_name = "master"
+ official_maas_remote = get_official_maas_remote(self.git)
+ if not official_maas_remote:
+ return False, "Official MAAS remote not found"
+ remote_branches = self.git.get_remote_branches_containing("HEAD")
+ for remote, branch_name in remote_branches:
+ if (
+ remote == official_maas_remote
+ and branch_name == release_branch_name
+ ):
+ return True, None
+ else:
+ error_message = (
+ "Current HEAD is not in "
+ f"{official_maas_remote}/{release_branch_name}"
+ )
+ return False, error_message
+
+
+class MAASVersion(ReleaseStep):
+ @property
+ def title(self):
+ return "MAAS version set in branch"
+
+ def check(self):
+ setup_version = get_branch_setup_version()
+ if setup_version != self.preparer.version.python_version:
+ error_message = (
+ f"setup.cfg has {setup_version}, run 'release-prepare'"
+ )
+ return False, error_message
+ proc = subprocess.run(
+ ["dpkg-parsechangelog", "-S", "version"],
+ stdout=subprocess.PIPE,
+ text=True,
+ )
+ no_epoch_version = proc.stdout.split(":", 1)[-1]
+ no_ubuntu_version = no_epoch_version.split("-", 1)[0]
+ package_version = no_ubuntu_version.replace("~", "-")
+
+ if package_version != self.preparer.version.version:
+ error_message = (
+ f"changelog has {package_version} (expected {self.preparer.version.version}), run "
+ "utilities/release-prepare"
+ )
+ return False, error_message
+
+ return True, None
+
+
+class SnapTrack(ReleaseStep):
+ def __init__(self, preparer, snap_name):
+ super().__init__(preparer)
+ self.snap_name = snap_name
+
+ @property
+ def title(self):
+ return f"{self.snap_name} {self.preparer.version.major} snap track"
+
+ def check(self):
+ request_data = {
+ "name": self.snap_name,
+ "track": self.preparer.version.major,
+ }
+ # Note that while the API call is called snap-track-update, it will
+ # give you the state of the track if "pattern" isn't sent.
+ res = requests.post(
+ "https://dashboard.snapcraft.io/dev/api/snap-track-update/",
+ json=request_data,
+ headers={
+ "Authorization": self.preparer.snapstore_auth,
+ "Accept": "application/json",
+ },
+ )
+ auth_error = get_macaroon_auth_error(res, self.snap_name)
+ return res.status_code == 200, auth_error
+
+
+class MAASPPA(ReleaseStep):
+ def __init__(self, preparer, ppa_type):
+ super().__init__(preparer)
+ assert ppa_type in ["stable", "candidate", "release-preparation"]
+ self.ppa_type = ppa_type
+ self.ppa_name = self.preparer.version.major
+ self.ppa_owner = self.preparer.launchpad.lp.people["maas"]
+ if ppa_type == "candidate":
+ self.ppa_name += "-next"
+ elif ppa_type == "release-preparation":
+ self.ppa_name = "maas-" + self.ppa_name + "-next"
+ self.ppa_owner = self.preparer.launchpad.me
+ self.ppa_path = f"{self.ppa_owner.name}/{self.ppa_name}"
+ ubuntu = self.preparer.launchpad.lp.distributions["ubuntu"]
+ self.current_series = ubuntu.getSeries(
+ name_or_version=get_ubuntu_series()
+ )
+
+ @property
+ def title(self):
+ return f"MAAS {self.ppa_type} PPA ({self.ppa_path})"
+
+ def check(self):
+ try:
+ ppa = self.ppa_owner.getPPAByName(name=self.ppa_name)
+ except NotFound:
+ return (
+ False,
+ f"ppa:{self.ppa_owner.name}/{self.ppa_name} couldn't be found.",
+ )
+ else:
+ ppa_archs = set(processor.name for processor in ppa.processors)
+ missing_archs = sorted(set(BUILD_ARCHS).difference(ppa_archs))
+ if missing_archs:
+ return False, (
+ f"Missing build architectures: {', '.join(missing_archs)}"
+ )
+
+ return True, None
+
+ def _check_packages_copied(self, source_ppa, target_ppa):
+ target_packages = list(
+ (package.source_package_name, package.source_package_version)
+ for package in target_ppa.getPublishedSources(
+ status="Published", distro_series=self.current_series
+ )
+ )
+ missing_packages = set()
+ for package in source_ppa.getPublishedSources(
+ status="Published", distro_series=self.current_series
+ ):
+ name, version = (
+ package.source_package_name,
+ package.source_package_version,
+ )
+ if (name, version) not in target_packages:
+ missing_packages.add((name, version))
+
+ if missing_packages:
+ error_message = "\n".join(
+ f"{name} {version} has not been copied"
+ for name, version in sorted(missing_packages)
+ )
+ error_message += f"\nGo to {source_ppa.web_link}/+copy-packages"
+ return False, error_message
+ else:
+ return True, None
+
+
+class MAASPackagePublished(MAASPPA):
+ def __init__(self, preparer):
+ super().__init__(preparer, "release-preparation")
+
+ @property
+ def title(self):
+ return f"MAAS package published in ({self.ppa_path})"
+
+ def check(self):
+ try:
+ ppa = self.ppa_owner.getPPAByName(name=self.ppa_name)
+ except NotFound:
+ return (
+ False,
+ f"ppa:{self.ppa_path} couldn't be found.",
+ )
+ else:
+ sources = list(
+ ppa.getPublishedSources(
+ source_name="maas",
+ status="Published",
+ distro_series=self.current_series,
+ )
+ )
+ if not sources:
+ return False, (
+ "Source package hasn't been published or uploaded yet."
+ )
+ [package] = sources
+ if not self._check_version(package.source_package_version):
+ expected = self.preparer.version.deb_version
+ return False, (
+ f"Currently published source version is {package.source_package_version}. Expected {expected}"
+ )
+ binaries = list(
+ ppa.getPublishedBinaries(
+ binary_name="maas",
+ exact_match=True,
+ status="Published",
+ )
+ )
+ if not binaries:
+ return False, "Binary packages haven't been published yet."
+ published_architectures = set()
+ for binary in binaries:
+ arch = binary.distro_arch_series_link.split("/")[-1]
+ if self._check_version(binary.binary_package_version):
+ published_architectures.add(arch)
+
+ non_published_architectures = sorted(
+ set(BUILD_ARCHS).difference(published_architectures)
+ )
+ if non_published_architectures:
+ return False, (
+ "Binary package hasn't been published for: "
+ f"{non_published_architectures}"
+ )
+
+ return True, None
+
+ def _check_version(self, package_version):
+ expected_package_version = self.preparer.version.deb_version
+ if ":" in package_version:
+ package_version = package_version.split(":", 1)[-1]
+ version_parts = package_version.split("-")
+ return (
+ version_parts[0] == expected_package_version
+ and version_parts[2] == f"g.{self.preparer.git_short_rev}"
+ )
+
+
+class PackagesCopiedFromDeps(MAASPPA):
+ def __init__(self, preparer):
+ super().__init__(preparer, "release-preparation")
+
+ @property
+ def title(self):
+ return "Packages copied from ppa:maas-committers/latest-deps"
+
+ def check(self):
+ try:
+ source_ppa = self.preparer.launchpad.lp.people[
+ "maas-committers"
+ ].getPPAByName(name="latest-deps")
+ except NotFound:
+ return False, "ppa:maas-committers/latest-deps couldn't be found."
+ try:
+ target_ppa = self.ppa_owner.getPPAByName(name=self.ppa_name)
+ except NotFound:
+ return (
+ False,
+ f"ppa:{self.ppa_path} couldn't be found.",
+ )
+ else:
+ return self._check_packages_copied(source_ppa, target_ppa)
+
+
+class PackagesCopiedToReleasePPA(MAASPPA):
+ @property
+ def title(self):
+ return f"Packages copied to ppa:{self.ppa_path}"
+
+ def skip(self):
+ return (
+ self.preparer.version.grade == "beta" and self.ppa_type == "stable"
+ )
+
+ def check(self):
+ sources = {
+ "stable": MAASPPA(self.preparer, "candidate"),
+ "candidate": MAASPPA(self.preparer, "release-preparation"),
+ }
+ source = sources[self.ppa_type]
+ try:
+ source_ppa = source.ppa_owner.getPPAByName(name=source.ppa_name)
+ except NotFound:
+ return (
+ False,
+ f"ppa:{self.ppa_path} couldn't be found.",
+ )
+ try:
+ target_ppa = self.ppa_owner.getPPAByName(name=self.ppa_name)
+ except NotFound:
+ return (
+ False,
+ f"ppa:{self.ppa_path} couldn't be found.",
+ )
+ else:
+ return self._check_packages_copied(source_ppa, target_ppa)
+
+
+def macaroon_auth(macaroons):
+ """Format a macaroon and its associated discharge.
+
+ :return: A string suitable to use in an Authorization header.
+
+ """
+ root_macaroon = Macaroon.deserialize(macaroons["r"])
+ discharged_macaroon = Macaroon.deserialize(macaroons["d"])
+ bound_macaroon = root_macaroon.prepare_for_request(
+ discharged_macaroon
+ ).serialize()
+ return f"Macaroon root={macaroons['r']}, discharge={bound_macaroon}"
+
+
+class PackageBuilt(ReleaseStep):
+ @property
+ def title(self):
+ return "MAAS source package is built"
+
+ def check(self):
+ short_rev = self.preparer.git_short_rev
+ package_version = self.preparer.version.deb_version
+ tar_gzs = glob.glob(
+ f"build_pkg/maas_{package_version}-*-g.{short_rev}.orig.tar.gz"
+ )
+ if len(tar_gzs) == 0:
+ return False, (
+ "No orig.tar.gz could be found for the current revision.\n"
+ "Run release-build."
+ )
+ [orig_tgz] = tar_gzs
+ return True, None
+
+
+class SnapsUploaded(ReleaseStep):
+
+ snap_name = "maas"
+
+ @property
+ def title(self):
+ return "Snaps have been built and uploaded to the store."
+
+ def _get_revisisions(self):
+ # XXX: This considers only the last 500 uploaded revisions. That's fine
+ # if you're currently working on the release, but it will
+ # fail if you check back later. We should probably make the
+ # different checks less fine grained. There's no need to
+ # check if the snap has been uploaded, if the snap is
+ # already released to a channel.
+ res = requests.get(
+ f"https://dashboard.snapcraft.io/dev/api/snaps/{MAAS_SNAP_ID}/"
+ + "history",
+ headers={
+ "Authorization": self.preparer.snapstore_auth,
+ "Accept": "application/json",
+ },
+ )
+ auth_error = get_macaroon_auth_error(res, self.snap_name)
+ if auth_error:
+ return None, auth_error
+ revision_map = {arch: [] for arch in BUILD_ARCHS}
+ for revision in res.json():
+ version = revision["version"]
+ if not version.startswith(self.preparer.version.deb_version):
+ continue
+ if not version.endswith(self.preparer.git_short_rev):
+ continue
+ revision_map[revision["arch"]].append(revision)
+ return revision_map, None
+
+ def check(self):
+ revision_map, error_message = self._get_revisisions()
+ if revision_map is None:
+ return False, error_message
+
+ missing_archs = sorted(
+ arch for arch, revision in revision_map.items() if not revision
+ )
+ if missing_archs:
+ return False, f"Missing builds for: {', '.join(missing_archs)}"
+
+ revision_info = []
+ for arch, revisions in revision_map.items():
+ latest_revision = max(
+ revision["revision"] for revision in revisions
+ )
+ revision_info.append(
+ f"Latest revision for {arch}: {latest_revision}"
+ )
+
+ return True, "\n".join(revision_info)
+
+
+class SnapsInChannel(SnapsUploaded):
+
+ snap_name = "maas"
+
+ def __init__(self, preparer, channel):
+ super().__init__(preparer)
+ self.channel = channel
+
+ @property
+ def title(self):
+ return f"Snaps have been released to {self.channel}"
+
+ def check(self):
+ revision_map, error_message = self._get_revisisions()
+ if revision_map is None:
+ return False, error_message
+ released_archs = set()
+ for arch, revisions in revision_map.items():
+ for revision in revisions:
+ for channel in revision["channels"]:
+ if channel == self.channel:
+ released_archs.add(arch)
+ break
+
+ missing_archs = sorted(set(BUILD_ARCHS).difference(released_archs))
+ if missing_archs:
+ return False, (f"Missing releases for: {', '.join(missing_archs)}")
+ return True, None
+
+
+class ReleaseTagged(ReleaseStep):
+ @property
+ def title(self):
+ return "Release has been tagged"
+
+ def check(self):
+ tag_name = self.preparer.version.version
+ tagged_revision = self.git.get_short_rev(
+ self.git.get_tag_commit(tag_name)
+ )
+ if tagged_revision == self.git.get_short_rev(tag_name):
+ return False, f"The {tag_name} isn't an annotated tag"
+ if not tagged_revision:
+ return False, "Release hasn't been tagged yet."
+
+ if tagged_revision != self.preparer.git_short_rev:
+ return False, (
+ f"{tag_name} points to {tagged_revision} instead of "
+ f"{self.preparer.git_short_rev}"
+ )
+
+ official_maas_remote = get_official_maas_remote(self.git)
+ remote_branches = self.git.get_remote_branches_containing(tag_name)
+ remotes = [remote for remote, _ in remote_branches]
+ if official_maas_remote not in remotes:
+ return False, (
+ f"{tag_name} tag is not pushed to {official_maas_remote}"
+ )
+
+ return True, None
+
+
+class VersionBranch(ReleaseStep):
+ def __init__(
+ self,
+ preparer: ReleasePreparer,
+ repo: str,
+ skip: Optional[Iterable[str]] = None,
+ ):
+ super().__init__(preparer)
+ self.repo = repo
+ self._skip_grades = skip or []
+
+ @property
+ def title(self) -> str:
+ return f"Git branch for {self._branch_version} created in remote repo {self.repo}"
+
+ @property
+ def _branch_version(self) -> str:
+ drop_idx = self.preparer.version.version.rfind(".")
+ return self.preparer.version.version[:drop_idx]
+
+ @property
+ def _ref_version(self) -> str:
+ return f"refs/heads/{self._branch_version}"
+
+ def skip(self):
+ return self.preparer.version.grade in self._skip_grades
+
+ def check(self):
+ refs = self.git.list_from_remote(self.repo, tags=False)
+ for branch in refs:
+ if self._ref_version in branch:
+ return True, None
+ return (
+ False,
+ f"{self._branch_version} was not found in remote repo {self.repo}",
+ )
+
+
+class MilestoneExist(ReleaseStep):
+ @property
+ def title(self):
+ return "Milestone created on Launchpad"
+
+ def check(self):
+ tag_name = self.preparer.version.version
+ try:
+ self.preparer.launchpad._get_milestone(tag_name)
+ except UnknownLaunchpadEntry:
+ return (
+ False,
+ f"Milestone {tag_name} was not found in Launchpad",
+ )
+ else:
+ return True, None
+
+
+class BugMovedToMilestone(ReleaseStep):
+ @property
+ def title(self):
+ return "Bugs moved to Milestone on Launchpad"
+
+ def check(self):
+ tag_name = self.preparer.version.version
+ try:
+ ms = self.preparer.launchpad._get_milestone(tag_name)
+ bug_tasks = ms.searchTasks(status=DONE_BUGS)
+ if len(bug_tasks) == 0:
+ return (
+ False,
+ f"Bugs not copied to milestone {tag_name}, use 'release-manage move-done-bugs' to fix this",
+ )
+ except UnknownLaunchpadEntry:
+ return (
+ False,
+ f"Milestone {tag_name} was not found in Launchpad",
+ )
+ else:
+ return True, None
+
+
+class MilestoneReleased(ReleaseStep):
+ @property
+ def title(self):
+ return "Milestone released on Launchpad"
+
+ def check(self):
+ tag_name = self.preparer.version.version
+ try:
+ ms = self.preparer.launchpad._get_milestone(tag_name)
+ if ms.is_active or len(ms.searchTasks(status="Fix Committed")) > 0:
+ return (
+ False,
+ "Milestone not released, use 'release-manage release-milestone' to fix this",
+ )
+ except UnknownLaunchpadEntry:
+ return (
+ False,
+ f"Milestone {tag_name} was not found in Launchpad",
+ )
+ else:
+ return True, None
+
+
+class DebianChangelogUpdated(ReleaseStep):
+ @property
+ def title(self):
+ return "Debian Changelog updated"
+
+ def check(self):
+ ver = self.preparer.version.deb_version
+ with open("debian/changelog", "r") as fh:
+ ch = Changelog(fh, max_blocks=1)
+ if ch.upstream_version == ver:
+ return True, None
+ return (
+ False,
+ f"Changelog doesn't contain {ver} entry",
+ )
+
+
+class SystemIntegrationTests(ReleaseStep):
+ def __init__(
+ self,
+ preparer: ReleasePreparer,
+ job_name: str,
+ ):
+ super().__init__(preparer)
+ self._job = job_name
+
+ @property
+ def title(self):
+ return f"System Integration '{self._job}' result"
+
+ def check(self):
+ try:
+ result, url = self.preparer.jenkins.get_last_build_result(
+ self._job
+ )
+ if result == "FAILURE":
+ return (
+ False,
+ f"Last build has failed, check {url}",
+ )
+ except JenkinsException:
+ return (
+ False,
+ "Failed to communicate with Jenkins, check your credentials",
+ )
+ else:
+ return True, None
+
+
+def parse_args():
+ parser = ArgumentParser(description=__doc__)
+ parser.add_argument("version", help="The version of MAAS to be released")
+ parser.add_argument(
+ "--dry-run",
+ action="store_true",
+ dest="dry_run",
+ help="Don't execute actions",
+ )
+ parser.add_argument(
+ "--launchpad-credentials",
+ default=None,
+ type=FileType(),
+ help="Launchpad credentials file",
+ )
+ parser.add_argument(
+ "--jenkins-config",
+ default=None,
+ type=FileType(),
+ help="Jenkins configuration file",
+ )
+ parser.add_argument(
+ "--jenkins-section",
+ default=None,
+ help="Jenkins server section name",
+ )
+
+ ns = parser.parse_args()
+ convert_file_descriptors_to_path(ns)
+ return ns
+
+
+def main():
+ args = parse_args()
+ try:
+ with open("release.macaroon", "r") as credentials_file:
+ raw = base64.b64decode(credentials_file.read())
+ macaroons = json.loads(raw)["v"]
+ except (OSError, json.decoder.JSONDecodeError):
+ print("Macaroon couldn't be found", file=sys.stderr)
+ print(get_macaroon_refresh_help(), file=sys.stderr)
+ return 1
+
+ launchpad = LaunchpadActions(
+ "maas",
+ credentials_file=args.launchpad_credentials,
+ dry_run=args.dry_run,
+ )
+ jenkins = JenkinsActions(
+ dry_run=args.dry_run,
+ jenkins_config=args.jenkins_config,
+ server_section=args.jenkins_section,
+ )
+ release_version = ReleaseVersion(args.version)
+ preparer = ReleasePreparer(
+ release_version,
+ macaroon_auth(macaroons),
+ launchpad=launchpad,
+ jenkins=jenkins,
+ )
+ preparer.steps = [
+ MAASVersion(preparer),
+ NoUncommittedChanges(preparer),
+ CommitInRemoteBranch(preparer),
+ DebianChangelogUpdated(preparer),
+ VersionBranch(
+ preparer,
+ f"git+ssh://{launchpad.lp.me.name}@git.launchpad.net/maas",
+ skip=["beta"],
+ ),
+ VersionBranch(
+ preparer,
+ f"git+ssh://{launchpad.lp.me.name}@git.launchpad.net/~maas-committers/maas/+git/maas-test-db",
+ ),
+ SnapTrack(preparer, "maas"),
+ SnapTrack(preparer, "maas-test-db"),
+ MAASPPA(preparer, "stable"),
+ MAASPPA(preparer, "candidate"),
+ MAASPPA(preparer, "release-preparation"),
+ PackagesCopiedFromDeps(preparer),
+ PackageBuilt(preparer),
+ MAASPackagePublished(preparer),
+ SnapsUploaded(preparer),
+ SnapsInChannel(
+ preparer,
+ release_version.snap_channels[0] + "/release-prep",
+ ),
+ SystemIntegrationTests(preparer, "maas-system-tests"),
+ SystemIntegrationTests(preparer, "maas-system-tests-snap"),
+ PackagesCopiedToReleasePPA(preparer, "candidate"),
+ *[
+ SnapsInChannel(preparer, snap_channel)
+ for snap_channel in release_version.snap_channels
+ ],
+ PackagesCopiedToReleasePPA(preparer, "stable"),
+ ReleaseTagged(preparer),
+ MilestoneExist(preparer),
+ BugMovedToMilestone(preparer),
+ MilestoneReleased(preparer),
+ ]
+ return preparer.run(args)
diff --git a/maas_release_tools/scripts/release_upload.py b/maas_release_tools/scripts/release_upload.py
new file mode 100644
index 0000000..f426659
--- /dev/null
+++ b/maas_release_tools/scripts/release_upload.py
@@ -0,0 +1,113 @@
+"""Validate and upload a MAAS deb release to a PPA."""
+
+from argparse import ArgumentParser, ArgumentTypeError, FileType, Namespace
+from pathlib import Path
+import re
+from subprocess import CalledProcessError, check_output, PIPE
+import sys
+from typing import cast, Optional
+
+from packaging.version import Version
+
+
+class PPAURL:
+ """A PPA URL."""
+
+ url: str
+ release: Version
+ pocket: Optional[str]
+
+ _PPA_RE = re.compile(r"^ppa:maas/(?P<release>[0-9.]+)(-(?P<pocket>.*))?$")
+
+ def __init__(self, url: str):
+ match = self._PPA_RE.match(url)
+ if not match:
+ raise ArgumentTypeError("Invalid MAAS PPA URL")
+
+ self.url = url
+ matches = match.groupdict()
+ self.release = Version(matches["release"])
+ self.pocket = matches["pocket"]
+
+ def __str__(self) -> str:
+ return self.url
+
+ @property
+ def is_stable(self) -> bool:
+ return not self.pocket
+
+
+class ChangesFile(FileType):
+
+ version: Version
+
+ _FILE_NAME_RE = re.compile(r"^maas_(?P<version>[^-]+)-.*_source.changes$")
+
+ def __call__(self, path: str):
+ fileobj = cast(ChangesFile, super().__call__(path))
+ base_path = Path(path).name
+ match = self._FILE_NAME_RE.match(base_path)
+ if not match:
+ raise ArgumentTypeError("Invalid changes file name.")
+ # add version
+ version = match.groupdict()["version"].replace("~", "")
+ fileobj.version = Version(version)
+ return fileobj
+
+
+def parse_args() -> Namespace:
+ parser = ArgumentParser(description=__doc__)
+ parser.add_argument(
+ "ppa_url",
+ type=PPAURL,
+ help="PPA URL, e.g. ppa:maas/2.9",
+ metavar="PPA_URL",
+ )
+ parser.add_argument(
+ "changes_file",
+ type=ChangesFile(),
+ help="Path to .changes file to upload",
+ metavar="CHANGES_FILE",
+ )
+ parser.add_argument(
+ "--validate-only",
+ action="store_true",
+ help="Dry run, don't actually upload",
+ )
+ return parser.parse_args()
+
+
+def upload_ppa(ppa_url: str, changes_file: str):
+ try:
+ output = check_output(["dput", ppa_url, changes_file], stderr=PIPE)
+ except CalledProcessError as e:
+ sys.exit("Upload failed with message:\n" + e.stderr.decode())
+ print(output.decode(), end="")
+
+
+def release_version(version: Version):
+ """Return version as major.minor only."""
+ return "{}.{}".format(*version.release[:2])
+
+
+def main():
+ args = parse_args()
+ version = args.changes_file.version
+ ppa_version = args.ppa_url.release
+ changes_release = release_version(version)
+ ppa_release = release_version(ppa_version)
+ if changes_release != ppa_release:
+ sys.exit(
+ f"PPA ({ppa_release}) and changes file ({changes_release}) "
+ "versions don't match"
+ )
+ if (
+ version > ppa_version
+ and version.is_prerelease
+ and args.ppa_url.is_stable
+ ):
+ sys.exit(
+ "Can't upload prerelease version of a point release to stable PPA."
+ )
+ if not args.validate_only:
+ upload_ppa(str(args.ppa_url), args.changes_file.name)
diff --git a/maas_release_tools/scripts/wsjfify.py b/maas_release_tools/scripts/wsjfify.py
new file mode 100644
index 0000000..8db0a3f
--- /dev/null
+++ b/maas_release_tools/scripts/wsjfify.py
@@ -0,0 +1,99 @@
+"""Recalculate WSJF for tickets with Cost of Delay and Story Points."""
+
+
+import argparse
+import json
+
+from jira import JIRA
+
+
+def parse_args():
+ parser = argparse.ArgumentParser(description=__doc__)
+ parser.add_argument(
+ "--dry-run",
+ action="store_true",
+ help="Dry run, don't actually change tickets",
+ )
+ parser.add_argument(
+ "--verbose",
+ action="store_true",
+ help="Verbose, show all tickets",
+ )
+ parser.add_argument(
+ "--jira-json",
+ type=argparse.FileType("r"),
+ default="jira.token",
+ help="Path to JIRA credentials JSON blob matching {'jira-server': , 'jira-login': , 'jira-token': }",
+ )
+ ns = parser.parse_args()
+ try:
+ with ns.jira_json as fh:
+ config = json.load(fh)
+ jira_server, login, token = (
+ config["jira-server"],
+ config["jira-login"],
+ config["jira-token"],
+ )
+ except (KeyError, json.JSONDecodeError):
+ parser.error(
+ f"Unable to read JIRA credentials from {ns.jira_json.name}"
+ )
+ return ns, jira_server, login, token
+
+
+def find_estimated_issues(jira, project, component):
+ return jira.search_issues(
+ f"project={project} AND component={component} AND status=Evaluated ORDER BY rank"
+ )
+
+
+def calculate_wsjf(issue):
+ return round(
+ issue.fields["cost_of_delay"] / issue.fields["story_points"], 3
+ )
+
+
+def pythonise_propertyholder(fields_by_name):
+ """Make fields behave like a dict."""
+ # If we have a request for a custom field by slug name, then
+ # lookup the actual id and get it from the __dict__
+ def getitem(self2, key, *args, **kwargs):
+ under_key = fields_by_name.get(key, key)
+ return self2.__dict__.__getitem__(under_key)
+
+ from jira.resources import PropertyHolder
+
+ PropertyHolder.__getitem__ = getitem
+ PropertyHolder.__setitem__ = lambda s, k, v: s.__dict__.__setitem__(k, v)
+ PropertyHolder.__delitem__ = lambda s, k: s.__dict__.__delitem__(k)
+
+
+def main():
+ args, jira_server, login, token = parse_args()
+ jira = JIRA(jira_server, basic_auth=(login, token))
+
+ # Get custom fields
+ all_fields = jira.fields()
+
+ def slugify(name):
+ return name.lower().replace(" ", "_").replace("-", "_")
+
+ fields_by_name = {
+ slugify(field["name"]): field["id"] for field in all_fields
+ }
+
+ # Make the fields dict-like and friendly
+ pythonise_propertyholder(fields_by_name)
+
+ for issue in find_estimated_issues(jira, project="PF", component="MAAS"):
+ existing_wsjf = issue.fields["wsjf"]
+ wsjf = calculate_wsjf(issue)
+ if wsjf != existing_wsjf:
+ print(
+ f"[{issue.key}] setting WSJF to {wsjf} (was {existing_wsjf})"
+ )
+ if not args.dry_run:
+ issue.update(fields={fields_by_name["WSJF"]: wsjf})
+ elif args.verbose:
+ print(f"[{issue.key}] {issue.fields.summary}: {wsjf} ✅")
+ return 0
diff --git a/maas_release_tools/version.py b/maas_release_tools/version.py
new file mode 100644
index 0000000..3b5b5e0
--- /dev/null
+++ b/maas_release_tools/version.py
@@ -0,0 +1,82 @@
+"""Utilities to deal with versions."""
+
+from configparser import ConfigParser
+from dataclasses import dataclass
+from pathlib import Path
+
+from packaging.version import Version
+
+
+class InvalidReleaseVersion(Exception):
+ """Provided release version is invalid."""
+
+
+@dataclass(init=False)
+class ReleaseVersion:
+
+ version: str
+ major: str
+ python_version: Version
+ grade: str
+ snap_channel: str
+ deb_version: str
+
+ def __init__(self, version: str):
+ self.version = version
+ self.major = self.version.rsplit(".", 1)[0]
+ self.python_version = self._python_version()
+ self.grade = self._grade()
+ self.snap_channels = self._snap_channels()
+ self.deb_version = self.version.replace("-", "~")
+
+ def _python_version(self) -> Version:
+ string_version = (
+ self.version.replace("-alpha", "a")
+ .replace("-beta", "b")
+ .replace("-rc", "rc")
+ )
+ return Version(string_version)
+
+ def _grade(self) -> str:
+ if "-" not in self.version:
+ return "final"
+ suffix = self.version.split("-")[1]
+ for grade in ("alpha", "beta", "rc"):
+ if suffix.startswith(grade):
+ return grade
+ else:
+ raise InvalidReleaseVersion(f"Unknown version suffix: {suffix}")
+
+ def _snap_channels(self) -> str:
+ grade_map = {
+ "final": "stable",
+ "rc": "candidate",
+ "beta": "beta",
+ }
+ # All releases should go to "beta" first to be tested by solqa.
+ channels = [] if self.grade == "beta" else [f"{self.major}/beta"]
+ try:
+ channels.append(f"{self.major}/{grade_map[self.grade]}")
+ except KeyError:
+ raise InvalidReleaseVersion(f"Unknown version grade: {self.grade}")
+ else:
+ return channels
+
+
+def get_branch_setup_version() -> Version:
+ """Return a Python version from the project setup config."""
+ config = ConfigParser()
+ config.read("setup.cfg")
+ try:
+ return Version(config["metadata"]["version"])
+ except KeyError:
+ # In 2.9 and earlier, the version is in setup.py.
+ for line in Path("setup.py").read_text().splitlines():
+ if "=" not in line:
+ continue
+ key, value = line.strip().split("=", 1)
+ if key == "version":
+ # value should look like '"2.9.3",'
+ return Version(value[1:-2])
+ else:
+ raise RuntimeError("Version not found in setup.py")
diff --git a/pyproject.toml b/pyproject.toml
index abafc45..0f1e58a 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,3 +1,4 @@
+<<<<<<< pyproject.toml
[build-system]
# With setuptools 50.0.0, 'make .ve' fails.
requires = ["setuptools < 50.0.0", "wheel"]
@@ -5,10 +6,14 @@ build-backend = "setuptools.build_meta"
[tool.black]
line-length = 79
+=======
+[tool.black]
+>>>>>>> pyproject.toml
exclude = """
/.egg
/.git
/.mypy_cache
+<<<<<<< pyproject.toml
/.ve
/build
/dist
@@ -58,3 +63,27 @@ markers = [
"perftest: marks tests for performance testing"
]
addopts = "--reuse-db"
+=======
+/.tox
+"""
+line-length = 79
+
+[tool.isort]
+force_grid_wrap = 0
+force_sort_within_sections = true
+from_first = false
+include_trailing_comma = true
+known_first_party = "maas_release_tools"
+line_length = 79
+multi_line_output = 3
+order_by_type = false
+profile = "black"
+use_parentheses = true
+
+[tool.mypy]
+ignore_missing_imports = true
+install_types = true
+non_interactive = true
+warn_return_any = true
+warn_unused_configs = true
+>>>>>>> pyproject.toml
diff --git a/scripts/_lib b/scripts/_lib
new file mode 100644
index 0000000..180007c
--- /dev/null
+++ b/scripts/_lib
@@ -0,0 +1,14 @@
+# -*- mode: sh -*-
+
+exit_error() {
+ echo "$@" >&2
+ exit 1
+}
+
+check_is_maas_tree() {
+ local package_name
+ package_name="$(python3 setup.py --name 2>/dev/null || true)"
+ if [ "$package_name" != "maas" ]; then
+ exit_error "Current directory is not a maas tree"
+ fi
+}
diff --git a/scripts/release-build b/scripts/release-build
new file mode 100755
index 0000000..9093fea
--- /dev/null
+++ b/scripts/release-build
@@ -0,0 +1,92 @@
+#!/bin/bash -e
+#
+# Build release packages the host Ubuntu release
+#
+# Usage:
+# ./release-build
+#
+# It's possible to override the packge build revision with DEB_BUILD_REV=n (by
+# default it's 1)
+#
+# If SKIP_UI_BUILD=1 is specified, the UI tree won't be rebuilt.
+#
+
+export DEBFULLNAME="${DEBFULLNAME:-$(git config user.name)}"
+export DEBEMAIL="${DEBEMAIL:-$(git config user.email)}"
+
+. /etc/lsb-release
+
+# shellcheck source=scripts/_lib
+. "$(dirname "$(realpath "$0")")/_lib"
+
+
+# The package build revision
+DEB_BUILD_REV=${DEB_BUILD_REV:-1}
+# Whether to skip the UI build
+SKIP_UI_BUILD=${SKIP_UI_BUILD:-0}
+
+ROOTDIR="$PWD"
+BUILDDIR="$ROOTDIR/build_pkg"
+PACKAGE_BUILD_AREA="$ROOTDIR/../build-area"
+
+
+git_tree_clean() {
+ git diff-index --quiet HEAD
+}
+
+
+build_source_package() {
+ if [ "$SKIP_UI_BUILD" != 1 ]; then
+ make -C "$ROOTDIR/src/maasui" clean build # ensure the UI is updated
+ fi
+ make -C "$ROOTDIR/src/maas-offline-docs" # ensure offline docs are updated
+ make -C "$ROOTDIR" package-tree
+}
+
+# ensure name and email address are correct for the signing (must have DEBEMAIL
+# and DEBFULLNAME set)
+ensure_changelog_author() {
+ dch -a "" --release-heuristic log --nomultimaint
+}
+
+update_changelog_version() {
+ deb_version="$(dpkg-parsechangelog -S Version)"
+ local new_version="${deb_version}~${DISTRIB_RELEASE}.${DEB_BUILD_REV}"
+ sed -i "1 s/(.*;/($new_version) ${DISTRIB_CODENAME};/" debian/changelog
+}
+
+check_build_dependencies() {
+ dpkg-checkbuilddeps debian/control
+}
+
+
+# Main
+check_is_maas_tree
+
+if ! git_tree_clean; then
+ exit_error "Git tree is not clean, please reset."
+fi
+
+
+# Check build dependencies early. The script would fail eventually without
+# this check, but it might take a few minutes of work first. Better to
+# fail early.
+check_build_dependencies
+
+mkdir -p "$BUILDDIR"
+
+build_source_package
+# copy source package in the build dir
+source_package_dir=$(
+ find "$PACKAGE_BUILD_AREA" -mindepth 1 -maxdepth 1 -type d | head -n1)
+source_package_dir="$(basename "$source_package_dir")"
+package_dir="$BUILDDIR/$source_package_dir"
+rm -rf "$package_dir"
+cp -a "$PACKAGE_BUILD_AREA"/* "$BUILDDIR"
+
+(
+ cd "$package_dir"
+ ensure_changelog_author
+ update_changelog_version
+ debuild -S -sa
+)
diff --git a/scripts/release-prepare b/scripts/release-prepare
new file mode 100755
index 0000000..f9e6164
--- /dev/null
+++ b/scripts/release-prepare
@@ -0,0 +1,115 @@
+#!/bin/bash -e
+#
+# Prepare a MAAS release by doing the following:
+#
+# - update python project version
+# - add d/changelog entry for the release
+# - commit changes
+#
+# The script must be called with a single argument for the MAAS python version
+# to update to.
+#
+
+# shellcheck source=scripts/_lib
+. "$(dirname "$(realpath "$0")")/_lib"
+
+export DEBFULLNAME="${DEBFULLNAME:-$(git config user.name)}"
+export DEBEMAIL="${DEBEMAIL:-$(git config user.email)}"
+
+git_tree_clean() {
+ git diff-index --quiet HEAD
+}
+
+git_show_commit() {
+ git show HEAD
+}
+
+version_changed() {
+ ! git diff -s --exit-code setup.cfg
+}
+
+deb_version() {
+ local version
+ version="$(echo "$1" | sed 's/a/~alpha/; tend; s/b/~beta/; tend; s/rc/~rc/; :end')"
+ epoch="$(head -1 "debian/changelog" | sed -n 's|maas (\([1-9]*\):.*|\1|p')"
+ if [ -n "$epoch" ]; then
+ echo "${epoch}:${version}-0ubuntu1"
+ else
+ echo "${version}-0ubuntu1"
+ fi
+}
+
+verbose_version() {
+ echo "$1" | sed 's/a/ alpha/; tend; s/b/ beta/; tend; s/rc/ RC/; :end'
+}
+
+tag_version() {
+ echo "$1" | sed 's/a/-alpha/; tend; s/b/-beta/; tend; s/rc/-rc/; :end'
+}
+
+replace_setup_version() {
+ local version="$1"
+ sed -i 's/\bversion = .*$/version = '"$version"'/' setup.cfg
+}
+
+add_debian_changelog() {
+ local version="$1"
+ local distro="$2"
+
+ local distro_opt
+ [ "$distro" ] && distro_opt="-D $distro" || distro_opt=""
+ # shellcheck disable=SC2086
+ dch $distro_opt -v "$(deb_version "$version")" \
+ "New upstream release, MAAS $(verbose_version "$version")."
+ dch -r ""
+}
+
+commit() {
+ local version="$1"
+ local message
+ message="Prepare for $(verbose_version "$version") release"
+
+ git commit -a -m "$message"
+}
+
+exit_usage() {
+ local script
+ script="$(basename "$0")"
+ exit_error "Usage $script <major>.<minor>.<micro>[{a,b,rc}<num>]"
+}
+
+
+# Main
+version="$1"
+distro="$2" # optional
+
+major_version="$(echo "${version}" | cut -d'.' -f-2)"
+current_branch="$(git branch --show-current)"
+
+check_is_maas_tree
+
+if [ -z "$version" ]; then
+ exit_usage
+elif ! echo "$version" | grep -Eq "^[2-9]+\.[0-9]+\.[0-9]+((a|b|rc)[0-9]+)?$"; then
+ echo "Invalid version!" >&2
+ exit_usage
+elif [[ "$major_version" != *${current_branch}* ]]; then
+ # Verify tags are created from the branch for that version if it exists.
+ for branch in $(git ls-remote --heads origin | awk 'FS="/" { print $3 }'); do
+ if [[ "$major_version" == *${branch}* ]]; then
+ exit_error "Branch ${branch} exists for version ${version}. Refusing to tag ${current_branch}."
+ fi
+ done
+fi
+
+if ! git_tree_clean; then
+ exit_error "Git tree is not clean, please reset."
+fi
+
+replace_setup_version "$version"
+if ! version_changed; then
+ exit_error "The version is already set to $1"
+fi
+add_debian_changelog "$version" "$distro"
+commit "$version"
+git_show_commit
diff --git a/setup.cfg b/setup.cfg
index 402ce94..8e77553 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -1,4 +1,5 @@
[metadata]
+<<<<<<< setup.cfg
name = maas
version = 3.3.0b3
description = Metal As A Service
@@ -137,3 +138,75 @@ commands =
deps = {[globals]deps_oapi}
commands =
python3 utilities/check-oapi openapi.yaml
+=======
+name = maas-release-tools
+version = 0.0.1
+description = MAAS release tools
+long_description = MAAS release tools
+
+[options]
+packages = find:
+install_requires =
+ jira
+ launchpadlib
+ packaging
+ pymacaroons
+ requests
+ python-debian
+
+[options.entry_points]
+console_scripts =
+ release-manage = maas_release_tools.scripts.release_manage:main
+ release-status = maas_release_tools.scripts.release_status:main
+ release-upload = maas_release_tools.scripts.release_upload:main
+ wsjfify = maas_release_tools.scripts.wsjfify:main
+ missing-estimates = maas_release_tools.scripts.missingestimates:main
+distutils.commands =
+ console_scripts = maas_release_tools.scripts:PrintConsoleScripts
+
+[options.extras_require]
+format =
+ black
+ flake8
+ isort
+check =
+ mypy
+
+[flake8]
+ignore = E203, E266, E501, W503
+exclude = .git, .tox, dist, *egg
+
+[globals]
+lint_files = setup.py maas_release_tools
+
+[tox:tox]
+minversion = 1.6
+envlist = lint, check
+skipsdist = True
+
+[testenv]
+usedevelop = True
+
+[testenv:format]
+deps =
+ .[format]
+commands=
+ {envbindir}/isort {[globals]lint_files}
+ {envbindir}/black -q {[globals]lint_files}
+
+[testenv:lint]
+deps =
+ black
+ flake8
+ isort
+commands =
+ {envbindir}/isort --check-only --diff {[globals]lint_files}
+ {envbindir}/black --check {[globals]lint_files}
+ {envbindir}/flake8 {[globals]lint_files}
+
+[testenv:check]
+deps =
+ .[check]
+commands =
+ mypy -p maas_release_tools {posargs}
+>>>>>>> setup.cfg
diff --git a/setup.py b/setup.py
index 9280621..07723fd 100644
--- a/setup.py
+++ b/setup.py
@@ -1,6 +1,9 @@
+<<<<<<< setup.py
# Copyright 2012-2021 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
+=======
+>>>>>>> setup.py
from setuptools import setup
setup()