launchpad-reviewers team mailing list archive
-
launchpad-reviewers team
-
Mailing list archive
-
Message #19658
[Merge] lp:~cjwatson/charms/trusty/turnipcake/build-label into lp:~canonical-launchpad-branches/charms/trusty/turnipcake/devel
Colin Watson has proposed merging lp:~cjwatson/charms/trusty/turnipcake/build-label into lp:~canonical-launchpad-branches/charms/trusty/turnipcake/devel.
Commit message:
Allow updating the code payload separately from the charm using a build label.
Requested reviews:
Launchpad code reviewers (launchpad-reviewers)
For more details, see:
https://code.launchpad.net/~cjwatson/charms/trusty/turnipcake/build-label/+merge/275469
Allow updating the code payload separately from the charm using a build label. This parallels https://code.launchpad.net/~cjwatson/charms/trusty/turnip/build-label/+merge/275468.
The approach used here is based heavily on the software-center-agent charm. The preferred approach is to store the payload in Swift (Canonical developers can use their Canonistack credentials for this), but you can also use "make deploy" or "make rollout" to do an initial deployment or an updated code rollout respectively and it'll push the payload around manually. The previous payload is kept in place for the sake of quick rollouts, but at the moment we prune all but the previous and current payloads: this has the benefit of not needing to worry about working out which the newest ones are.
The virtualenv moves inside the payload directory so that each payload gets its own. /srv/turnipcake/code remains as a symlink to the current payload, which is convenient.
There are no tests directly here, but it'll at least get integration testing by way of the corresponding changes to the Mojo spec.
--
Your team Launchpad code reviewers is requested to review the proposed merge of lp:~cjwatson/charms/trusty/turnipcake/build-label into lp:~canonical-launchpad-branches/charms/trusty/turnipcake/devel.
=== modified file '.bzrignore'
--- .bzrignore 2015-02-11 01:09:24 +0000
+++ .bzrignore 2015-10-22 22:09:21 +0000
@@ -1,3 +1,2 @@
lib/charmhelpers
-files/*.tar.gz
-files/*.tgz
+files/*
=== modified file 'Makefile.common'
--- Makefile.common 2015-03-26 02:18:32 +0000
+++ Makefile.common 2015-10-22 22:09:21 +0000
@@ -7,13 +7,13 @@
TEST_PREFIX := PYTHONPATH=$(HOOKS_DIR)
TEST_DIR := $(PWD)/tests
SOURCE_DIR ?= $(shell dirname $(PWD))/.source/$(APP_NAME)
-PIP_CACHE := $(PWD)/files/pip-cache
+FILES_DIR := $(PWD)/files
-ifeq ($(PIP_SOURCE_DIR),)
-PIP_CACHE_ARGS :=
-else
-PIP_CACHE_ARGS := --no-index --find-links=file://$(PIP_SOURCE_DIR)
-endif
+BUILD_LABEL = $(shell bzr log -rlast: --show-ids $(SOURCE_DIR) | sed -n 's/^revision-id: //p')
+TARBALL = $(APP_NAME).tar.gz
+ASSET = $(FILES_DIR)/$(BUILD_LABEL)/$(TARBALL)
+UNIT = $(APP_NAME)/0
+CHARM_UNIT_PATH := /var/lib/juju/agents/unit-$(APP_NAME)-0/charm
all: setup lint test
@@ -22,9 +22,22 @@
@juju upgrade-charm --repository=../.. $(APP_NAME)
-deploy: tarball pip-cache
+deploy: payload
@echo "Deploying $(APP_NAME)..."
@juju deploy --repository=../.. local:trusty/$(APP_NAME)
+ @$(MAKE) rollout SKIP_BUILD=true
+
+
+# deploy a new revision/branch
+rollout: _PATH=$(CHARM_UNIT_PATH)/files/$(BUILD_LABEL)
+rollout:
+ifneq ($(SKIP_BUILD),true)
+ $(MAKE) payload
+endif
+ # manually copy our asset to be in the right place, rather than upgrade-charm
+ juju scp $(ASSET) $(UNIT):$(TARBALL)
+ juju ssh $(UNIT) 'sudo mkdir -p $(_PATH) && sudo mv $(TARBALL) $(_PATH)/'
+ juju set $(APP_NAME) build_label=$(BUILD_LABEL)
ifeq ($(NO_FETCH_CODE),)
@@ -41,23 +54,14 @@
endif
-pip-cache: fetch-code
- @echo "Updating python dependency cache..."
- @mkdir -p $(PIP_CACHE)
- @pip install $(PIP_CACHE_ARGS) --no-use-wheel --download $(PIP_CACHE) \
- -r $(SOURCE_DIR)/requirements.txt \
- -r deploy-requirements.txt
-
-
check-rev:
ifndef REV
$(error Revision number required to fetch source: e.g. $ REV=10 make deploy)
endif
-tarball: fetch-code
- @echo "Creating tarball for deploy..."
- @mkdir -p files/
- @tar czf files/$(APP_NAME).tar.gz -C $(SOURCE_DIR) .
+payload: fetch-code
+ @echo "Building asset for $(BUILD_LABEL)..."
+ @$(MAKE) -C $(SOURCE_DIR) build-tarball TARBALL_BUILDS_DIR=$(FILES_DIR)
# The following targets are for charm maintenance.
@@ -67,7 +71,6 @@
@find . -depth -name '__pycache__' -exec rm -rf '{}' \;
@rm -f .coverage
@rm -rf $(SOURCE_DIR)
- @rm -rf $(PIP_CACHE)
@rm -rf .venv
@@ -100,4 +103,4 @@
@echo "Starting tests..."
@$(TEST_PREFIX) .venv/bin/coverage run -m unittest discover -s unit_tests
-.PHONY: clean lint setup tarball test upgrade
+.PHONY: clean lint setup payload test upgrade
=== modified file 'config.yaml'
--- config.yaml 2015-03-26 02:18:32 +0000
+++ config.yaml 2015-10-22 22:09:21 +0000
@@ -3,6 +3,10 @@
type: string
default: 'turnipcake'
description: Name of this application.
+ build_label:
+ type: string
+ default: ""
+ description: Build label to run.
nagios_context:
default: "juju"
type: string
@@ -37,3 +41,27 @@
type: string
default: turnipcake
description: The service will run under this group.
+ swift_username:
+ type: string
+ default: ""
+ description: Username to use when accessing Swift.
+ swift_password:
+ type: string
+ default: ""
+ description: Password to use when accessing Swift.
+ swift_auth_url:
+ type: string
+ default: ""
+ description: URL for authenticating against Keystone.
+ swift_region_name:
+ type: string
+ default: ""
+ description: Swift region.
+ swift_tenant_name:
+ type: string
+ default: ""
+ description: Entity that owns resources.
+ swift_container_name:
+ type: string
+ default: ""
+ description: Container to put objects in.
=== removed file 'deploy-requirements.txt'
--- deploy-requirements.txt 2015-03-27 01:39:30 +0000
+++ deploy-requirements.txt 1970-01-01 00:00:00 +0000
@@ -1,1 +0,0 @@
-gunicorn==19.3.0
=== modified file 'hooks/actions.py'
--- hooks/actions.py 2015-03-27 07:41:51 +0000
+++ hooks/actions.py 2015-10-22 22:09:21 +0000
@@ -1,6 +1,8 @@
+import errno
import grp
import os
import pwd
+import shutil
import subprocess
from charmhelpers import fetch
@@ -16,14 +18,22 @@
# Globals
CHARM_FILES_DIR = os.path.join(hookenv.charm_dir(), 'files')
-REQUIRED_PACKAGES = ['python-virtualenv', 'python-dev', 'make']
+REQUIRED_PACKAGES = [
+ 'python-virtualenv',
+ 'python-dev',
+ 'make',
+ 'python-swiftclient',
+ ]
BASE_DIR = config['base_dir']
+PAYLOADS_DIR = os.path.join(BASE_DIR, 'payloads')
CODE_DIR = os.path.join(BASE_DIR, 'code')
-VENV_DIR = os.path.join(BASE_DIR, 'venv')
+VENV_DIR = os.path.join(CODE_DIR, 'env')
+OLD_VENV_DIR = os.path.join(BASE_DIR, 'venv')
LOGS_DIR = os.path.join(BASE_DIR, 'logs')
DATA_DIR = os.path.join(BASE_DIR, 'data')
# XXX: Should really move this outside the code dir.
DB_DIR = os.path.join(BASE_DIR, 'code', 'db')
+CODE_TARBALL = 'turnipcake.tar.gz'
CODE_USER = config['code_user']
CODE_GROUP = config['code_group']
@@ -43,7 +53,7 @@
def make_srv_location():
hookenv.log('Creating directories...')
- for dir in (BASE_DIR, CODE_DIR):
+ for dir in (BASE_DIR, PAYLOADS_DIR):
host.mkdir(dir, owner=CODE_USER, group=CODE_GROUP, perms=0o755)
for dir in (LOGS_DIR, DATA_DIR):
host.mkdir(dir, owner=USER, group=GROUP, perms=0o755)
@@ -63,28 +73,159 @@
host.add_user_to_group(USER, GROUP)
-def unpack_source(service_name):
- hookenv.log('Deploying source...')
-
+def get_swift_creds(config):
+ return {
+ 'user': config['swift_username'],
+ 'project': config['swift_tenant_name'],
+ 'password': config['swift_password'],
+ 'authurl': config['swift_auth_url'],
+ 'region': config['swift_region_name'],
+ }
+
+
+def swift_base_cmd(**swift_creds):
+ return [
+ 'swift',
+ '--os-username=' + swift_creds['user'],
+ '--os-tenant-name=' + swift_creds['project'],
+ '--os-password=' + swift_creds['password'],
+ '--os-auth-url=' + swift_creds['authurl'],
+ '--os-region-name=' + swift_creds['region'],
+ ]
+
+
+def swift_get_etag(name, container=None, **swift_creds):
+ cmd = swift_base_cmd(**swift_creds) + ['stat', container, name]
+ file_stat = subprocess.check_output(cmd).splitlines()
+ for line in file_stat:
+ words = line.split()
+ if words[0] == 'ETag:':
+ return words[1]
+
+
+def swift_fetch(source, target, container=None, **swift_creds):
+ cmd = swift_base_cmd(**swift_creds) + [
+ 'download', '--output=' + target, container, source]
+ subprocess.check_call(cmd)
+
+
+def unlink_force(path):
+ """Unlink path, without worrying about whether it exists."""
+ try:
+ os.unlink(path)
+ except OSError as e:
+ if e.errno != errno.ENOENT:
+ raise
+
+
+def symlink_force(source, link_name):
+ """Create symlink link_name -> source, even if link_name exists."""
+ unlink_force(link_name)
+ os.symlink(source, link_name)
+
+
+def install_python_packages(target_dir):
+ hookenv.log('Installing Python dependencies...')
+ subprocess.check_call(
+ ['sudo', '-u', CODE_USER, 'make', '-C', target_dir, 'build',
+ 'PIP_SOURCE_DIR=%s' % os.path.join(target_dir, 'pip-cache')])
+
+
+def prune_payloads(keep):
+ for entry in os.listdir(PAYLOADS_DIR):
+ if entry in keep:
+ continue
+ entry_path = os.path.join(PAYLOADS_DIR, entry)
+ if os.path.isdir(entry_path):
+ hookenv.log('Purging old build in %s...' % entry_path)
+ shutil.rmtree(entry_path)
+
+
+def migrate_db():
+ hookenv.log('Migrating database...')
+ path = '%s:%s' % (os.path.join(VENV_DIR, 'bin'), os.environ['PATH'])
+
+ with host.chdir(CODE_DIR):
+ subprocess.check_call([
+ 'sudo', '-u', USER, 'PATH=%s' % path, 'make', 'migrate'])
+
+
+def deploy_code(service_name):
make_srv_location()
+ current_build_label = None
+ if os.path.islink(CODE_DIR):
+ current_build_label = os.path.basename(os.path.realpath(CODE_DIR))
+ elif os.path.isdir(os.path.join(CODE_DIR, '.bzr')):
+ log_output = subprocess.check_output(
+ ['bzr', 'log', '-rlast:', '--show-ids', CODE_DIR])
+ for line in log_output.splitlines():
+ if line.startswith('revision-id: '):
+ current_build_label = line[len('revision-id: '):]
+ desired_build_label = config['build_label']
+ if not desired_build_label:
+ if current_build_label is not None:
+ hookenv.log(
+ 'No desired build label, but build %s is already deployed' %
+ current_build_label)
+ # We're probably upgrading from a charm that used old-style code
+ # assets, so make sure we at least have a virtualenv available
+ # from the current preferred location.
+ if not os.path.isdir(VENV_DIR) and os.path.isdir(OLD_VENV_DIR):
+ os.symlink(OLD_VENV_DIR, VENV_DIR)
+ return
+ else:
+ raise AssertionError('Build label unset, so cannot deploy code')
+ if current_build_label == desired_build_label:
+ hookenv.log('Build %s already deployed' % desired_build_label)
+ return
+ hookenv.log('Deploying build %s...' % desired_build_label)
+
# Copy source archive
- archive_path = os.path.join(BASE_DIR, 'turnipcake.tar.gz')
-
- with open(os.path.join(CHARM_FILES_DIR, 'turnipcake.tar.gz')) as file:
- host.write_file(archive_path, file.read(), perms=0o644)
-
- # Unpack source
- archive.extract_tarfile(archive_path, CODE_DIR)
- os.chown(
- CODE_DIR,
- pwd.getpwnam(CODE_USER).pw_uid, grp.getgrnam(CODE_GROUP).gr_gid)
- host.lchownr(CODE_DIR, CODE_USER, CODE_GROUP)
-
- # Ensure the DB is writable by the app user. It really shouldn't
- # live in the code tree.
- os.chown(DB_DIR, pwd.getpwnam(USER).pw_uid, grp.getgrnam(GROUP).gr_gid)
- host.lchownr(DB_DIR, USER, GROUP)
+ archive_path = os.path.join(PAYLOADS_DIR, desired_build_label + '.tar.gz')
+ object_name = os.path.join(desired_build_label, CODE_TARBALL)
+
+ try:
+ if config['swift_container_name']:
+ swift_creds = get_swift_creds(config)
+ swift_container = config['swift_container_name']
+ swift_fetch(
+ os.path.join('turnipcake-builds', object_name), archive_path,
+ container=swift_container, **swift_creds)
+ else:
+ with open(os.path.join(CHARM_FILES_DIR, object_name)) as file:
+ host.write_file(archive_path, file.read(), perms=0o644)
+
+ # Unpack source
+ target_dir = os.path.join(PAYLOADS_DIR, desired_build_label)
+ if os.path.isdir(target_dir):
+ shutil.rmtree(target_dir)
+ archive.extract_tarfile(archive_path, target_dir)
+ os.chown(
+ target_dir,
+ pwd.getpwnam(CODE_USER).pw_uid, grp.getgrnam(CODE_GROUP).gr_gid)
+ host.lchownr(target_dir, CODE_USER, CODE_GROUP)
+
+ # Ensure the DB is writable by the app user. It really shouldn't
+ # live in the code tree.
+ os.chown(DB_DIR, pwd.getpwnam(USER).pw_uid, grp.getgrnam(GROUP).gr_gid)
+ host.lchownr(DB_DIR, USER, GROUP)
+
+ install_python_packages(target_dir)
+
+ if not os.path.islink(CODE_DIR) and os.path.isdir(CODE_DIR):
+ old_payload_dir = os.path.join(PAYLOADS_DIR, current_build_label)
+ if os.path.exists(old_payload_dir):
+ shutil.rmtree(CODE_DIR)
+ else:
+ os.rename(CODE_DIR, old_payload_dir)
+ symlink_force(
+ os.path.relpath(target_dir, os.path.dirname(CODE_DIR)), CODE_DIR)
+ prune_payloads([desired_build_label, current_build_label])
+ finally:
+ unlink_force(archive_path)
+
+ migrate_db()
def install_packages(service_name):
@@ -93,42 +234,11 @@
fetch.apt_install(REQUIRED_PACKAGES, fatal=True)
-def install_python_packages(service_name):
- hookenv.log('Installing Python dependencies...')
- pip_cache = os.path.join(CHARM_FILES_DIR, 'pip-cache')
- code_reqs = os.path.join(CODE_DIR, 'requirements.txt')
- deploy_reqs = os.path.join(hookenv.charm_dir(), 'deploy-requirements.txt')
-
- pip_bin = os.path.join(VENV_DIR, 'bin', 'pip')
-
- subprocess.call([
- 'sudo', '-u', CODE_USER, 'virtualenv', '--system-site-packages',
- VENV_DIR])
- subprocess.check_call([
- 'sudo', '-u', CODE_USER, pip_bin, 'install', '--no-index',
- '--find-links={}'.format(pip_cache), '-r', code_reqs,
- '-r', deploy_reqs])
- subprocess.check_call([
- 'sudo', '-u', CODE_USER, pip_bin, 'install', '--no-deps',
- '-e', CODE_DIR])
-
-
-def migrate_db(service_name):
- hookenv.log('Migrating database...')
- path = '%s:%s' % (os.path.join(VENV_DIR, 'bin'), os.environ['PATH'])
-
- with host.chdir(CODE_DIR):
- subprocess.check_call([
- 'sudo', '-u', USER, 'PATH=%s' % path, 'make', 'migrate'])
-
-
def publish_wsgi_relations(self):
# Publish the wsgi-file relation so the gunicorn subordinate can
# serve us. Other WSGI containers could be made to work, as the most
# gunicorn-specific thing is the --paste hack.
config = hookenv.config()
- code_dir = os.path.join(config['base_dir'], 'code')
- venv_bin = os.path.join(config['base_dir'], 'venv', 'bin')
# XXX We only support a single related turnip-api unit so far.
turnip_api_rid = sorted(hookenv.relation_ids('turnip-api'))[0]
turnip_api_unit = sorted(hookenv.related_units(turnip_api_rid))[0]
@@ -136,7 +246,7 @@
rid=turnip_api_rid, unit=turnip_api_unit)
env = {
- 'PATH': '%s:$PATH' % venv_bin,
+ 'PATH': '%s:$PATH' % os.path.join(VENV_DIR, 'bin'),
'TURNIP_ENDPOINT': 'http://%s:%s' % (
turnip_api_data['turnip_api_host'],
turnip_api_data['turnip_api_port']),
@@ -144,7 +254,7 @@
for relid in hookenv.relation_ids('wsgi-file'):
hookenv.relation_set(
relid,
- working_dir=code_dir,
+ working_dir=CODE_DIR,
wsgi_wsgi_file='--paste turnipcake.ini', # XXX: Gross.
wsgi_user=config['user'],
wsgi_group=config['group'],
=== modified file 'hooks/services.py'
--- hooks/services.py 2015-03-27 02:43:39 +0000
+++ hooks/services.py 2015-10-22 22:09:21 +0000
@@ -12,9 +12,8 @@
actions.execd_preinstall('turnipcake')
actions.install_packages('turnipcake')
actions.create_users('turnipcake')
- actions.unpack_source('turnipcake')
- actions.install_python_packages('turnipcake')
- actions.migrate_db('turnipcake')
+ if hookenv.hook_name() in ('install', 'upgrade-charm', 'config-changed'):
+ actions.deploy_code('turnipcake')
config = hookenv.config()
manager = ServiceManager([