← Back to team overview

cf-charmers team mailing list archive

Re: [Merge] lp:~johnsca/charms/trusty/cloudfoundry/raindance into lp:~cf-charmers/charms/trusty/cloudfoundry/trunk

 

Review: Approve

LGTM +1 just some minor inline comments

Diff comments:

> === modified file 'cloudfoundry/jobs.py'
> --- cloudfoundry/jobs.py	2014-09-29 22:12:51 +0000
> +++ cloudfoundry/jobs.py	2014-09-30 21:20:24 +0000
> @@ -11,8 +11,8 @@
>  from cloudfoundry import health_checks
>  from cloudfoundry.services import SERVICES
>  
> -PACKAGES_BASE_DIR = path('/var/vcap/packages')
> -RELEASES_DIR = path('/var/vcap/releases')
> +BASE_DIR = path('/var/vcap/packages')
> +RELEASES_DIR = BASE_DIR / 'releases'
>  
>  
>  def job_manager(service_name):
> @@ -41,7 +41,7 @@
>                  tasks.mount_nfs,
>                  tasks.fetch_job_artifacts,
>                  partial(tasks.install_job_packages,
> -                        PACKAGES_BASE_DIR, RELEASES_DIR),
> +                        BASE_DIR, RELEASES_DIR),
>                  tasks.job_templates(job.get('mapping', {})),
>                  tasks.set_script_permissions,
>                  tasks.monit.svc_force_reload,
> 
> === modified file 'cloudfoundry/tasks.py'
> --- cloudfoundry/tasks.py	2014-09-29 22:12:51 +0000
> +++ cloudfoundry/tasks.py	2014-09-30 21:20:24 +0000
> @@ -2,7 +2,6 @@
>  import re
>  import shutil
>  import subprocess
> -import tarfile
>  import yaml
>  import stat
>  import tempfile
> @@ -18,11 +17,14 @@
>  from cloudfoundry import templating
>  from cloudfoundry import utils
>  from .path import path
> +from raindance.package import PackageArchive
>  
>  logger = logging.getLogger(__name__)
>  
>  TEMPLATES_BASE_DIR = path('/var/vcap/jobs')
>  ARTIFACTS_DIR = path('/srv/artifacts')
> +SOFTWARE = 'cf'
> +ARCH = 'amd64'  # FIXME: determine from system

This is declared multiple places. The hooks/common def can come from cloudfoundry/config.py as that is common code.

>  
>  
>  @hookenv.cached
> @@ -33,7 +35,7 @@
>  
>  @hookenv.cached
>  def job_path():

I think this is the job of pa.build_mirror_section, no? Only looking at the diff hard to say, but I don't think this should be needed anymore.

> -    return '/'.join(['cf-%s' % release_version(), 'amd64'])
> +    return '/'.join(['cf-%s' % release_version(), ARCH])
>  
>  
>  def install_base_dependencies():
> @@ -95,57 +97,29 @@
>  
>  def fetch_job_artifacts(job_name):
>      orch = contexts.OrchestratorRelation()['orchestrator'][0]
> -    url = urlparse(orch['artifacts_url'])
> -    if url.scheme == 'nfs':
> -        return
> -    job_url = '/'.join([orch['artifacts_url'], job_path(), job_name])
> -    job_archive = ARTIFACTS_DIR / job_path() / job_name
> -    if job_archive.exists():
> -        return
> -    (ARTIFACTS_DIR / job_path()).makedirs_p()
> -    retry = True
> -    while retry:
> -        hookenv.log('Downloading {}.tgz from {}'.format(job_name, job_url))
> -        try:
> -            subprocess.check_call(['wget', '-t0', '-c', '-nv', job_url, '-O', job_archive])
> -        except subprocess.CalledProcessError as e:
> -            if e.returncode == 4:  # always retry network errors
> -                hookenv.log('Network error, retrying download', hookenv.WARNING)
> -                retry = True
> -            else:
> -                raise
> -        else:
> -            retry = False
> -
> -
> -def install_job_packages(pkg_base_dir, releases_dir, job_name):
> -    job_archive = ARTIFACTS_DIR / job_path() / job_name
> -    job_extract = path(hookenv.charm_dir()) / 'jobs' / release_version() / job_name
> -    if not job_extract.exists():
> -        try:
> -            job_extract.makedirs()
> -            with job_extract:
> -                subprocess.check_call(['tar', '-xzf', job_archive])
> -        except Exception as e:
> -            hookenv.log(str(e), hookenv.ERROR)
> -            job_extract.remove_p()
> -            raise
> -    package_path = job_extract / 'packages'
> -    version = release_version()
> -    if not pkg_base_dir.exists():
> -        pkg_base_dir.makedirs_p(mode=0755)
> -
> -    for package in package_path.files('*.tgz'):
> -        pkgname = package.basename().rsplit('-', 1)[0]
> -        pkgpath = releases_dir / version / 'packages' / pkgname
> -        if not pkgpath.exists():
> -            pkgpath.makedirs(mode=0755)
> -            with pkgpath:
> -                subprocess.check_call(['tar', '-xzf', package])
> -
> -        pkgdest = pkg_base_dir / pkgname
> -        if not pkgdest.exists():
> -            pkgpath.symlink(pkgdest)
> +    url = orch['artifacts_url']
> +    version = orch['cf_version']
> +    if urlparse(url).scheme == 'nfs':
> +        return
> +    pa = PackageArchive(url)
> +    mirror = pa.build_mirror_section(ARTIFACTS_DIR, SOFTWARE, [(version, ARCH)], [job_name])
> +    for filename in mirror:
> +        pass  # just need to iterate to force the (lazy) download
> +
> +
> +def install_job_packages(base_dir, releases_dir, job_name):
> +    orch = contexts.OrchestratorRelation()['orchestrator'][0]
> +    url = orch['artifacts_url']
> +    version = orch['cf_version']
> +    pa = PackageArchive(url)
> +    job = pa.setup_job(ARTIFACTS_DIR, releases_dir / version,
> +                       SOFTWARE, version, ARCH, job_name)
> +    for job_file in job:
> +        link_source = releases_dir / version / job_file
> +        link_file = base_dir / job_file
> +        link_file.remove_p()
> +        link_file.makdirs_p()
> +        link_source.symlink(link_file)
>  
>  
>  def set_script_permissions(job_name, tmplt_base_dir=TEMPLATES_BASE_DIR):
> @@ -166,8 +140,6 @@
>          return yaml.safe_load(fp)
>  
>  
> -
> -
>  def _enable_swapaccounting(s):
>      output = []
>      for line in s.split('\n'):
> @@ -200,6 +172,8 @@
>          fp.write(output)
>      subprocess.call('update-grub')
>      _reboot()
> +
> +
>  def patch_dea(job_name):
>      DEA_PATCH = """
>  --- container.rb   2014-08-01 15:49:04.472289999 +0000
> 
> === modified file 'config.yaml'
> --- config.yaml	2014-09-29 18:08:00 +0000
> +++ config.yaml	2014-09-30 21:20:24 +0000
> @@ -11,7 +11,7 @@
>          description: >
>              The URL from which the artifacts can be retrieved.  You will not be
>              able to use Juju to deploy Cloud Foundry until this is properly set.
> -        default: "http://cf-compiled-packages.s3-website-us-east-1.amazonaws.com";
> +        default: "http://cf-packages.s3-website-us-east-1.amazonaws.com";
>      cf_version:
>          type: string
>          description: >
> 
> === modified file 'hooks/common.py'
> --- hooks/common.py	2014-09-29 22:12:51 +0000
> +++ hooks/common.py	2014-09-30 21:20:24 +0000
> @@ -4,6 +4,7 @@
>  import yaml
>  import shutil
>  import subprocess
> +from urlparse import urlparse
>  
>  from charmhelpers.core import hookenv
>  from charmhelpers.core import services
> @@ -22,40 +23,41 @@
>  from deployer.action.importer import Importer
>  from deployer.utils import setup_logging
>  from jujuclient import EnvError
> +from raindance.package import PackageArchive
> +
> +
> +SOFTWARE = 'cf'
> +ARCH = 'amd64'  # FIXME: determine from system
> +
> +
> +@hookenv.cached
> +def get_version():
> +    config = hookenv.config()
> +    version = config.get('cf_version')
> +    if not version or version == 'latest':
> +        version = RELEASES[0]['releases'][1]
> +    return str(version)
>  
>  
>  def precache_job_artifacts(s):
>      config = hookenv.config()
> -    version = config.get('cf_version')
> -    if not version or version == 'latest':
> -        version = RELEASES[0]['releases'][1]
> -    prefix = path('cf-{}'.format(version)) / 'amd64'
> -    base_url = path(config['artifacts_url']) / prefix
> -    base_path = path('/srv/data') / prefix
> -    base_path.makedirs_p(mode=0755)
> -    for service in SERVICES.values():
> -        for job in service['jobs']:
> -            job_name = job['job_name']
> -            url = base_url / job_name
> -            artifact = base_path / job_name
> -            if not artifact.exists():
> -                subprocess.check_call(['wget', '-nv', url, '-O', artifact])
> -
> -
> -def export_nfs(s):
> -    subprocess.check_call([
> -        'exportfs', '-o', 'rw,sync,no_root_squash,no_all_squash', '*:/srv/data',
> -    ])
> +    if not config['mirror_artifacts']:
> +        return
> +    version = config['cf_version']
> +    url = config['artifacts_url']
> +    assert urlparse(url).scheme != 'nfs', 'NFS URLs not supported with mirror_artifacts'

While I don't disagree with the assert here it would be nice to have a way to tell the user at config-set time that settings would be invalid and not applied. This would now require a resolve phase.

> +    pa = PackageArchive(url)
> +    genmirror = pa.build_mirror_section(path('/srv/data'), SOFTWARE, [(version, ARCH)])
> +    files = [x for x in genmirror]
> +    return files
>  
>  
>  def generate(s):
>      config = hookenv.config()
> -    version = config.get('cf_version')
> +    version = get_version()
> +    build_dir = path(hookenv.charm_dir()) / 'build' / version
>      from_charmstore = not config['generate_dependents']
>      cs_namespace = config['charmstore_namespace']
> -    if not version or version == 'latest':
> -        version = RELEASES[0]['releases'][1]
> -    build_dir = os.path.join(hookenv.charm_dir(), 'build', str(version))
>      if os.path.exists(build_dir):
>          shutil.rmtree(build_dir)
>      generator = CharmGenerator(RELEASES, SERVICES, from_charmstore, cs_namespace)
> 
> === modified file 'hooks/install'
> --- hooks/install	2014-09-29 22:12:51 +0000
> +++ hooks/install	2014-09-30 21:20:24 +0000
> @@ -1,6 +1,5 @@
>  #!/usr/bin/env python
>  
> -import os
>  import subprocess
>  from charmhelpers import fetch
>  from charmhelpers.core import hookenv
> @@ -13,3 +12,5 @@
>      'ssh-keygen',
>      '-f', path(hookenv.charm_dir()) / 'orchestrator-key',
>      '-N', ''])
> +subprocess.check_call([
> +    'pip', 'install', '--use-wheel', '-f', './wheelhouse', 'raindance'])
> 
> === modified file 'tests/test_tasks.py'
> --- tests/test_tasks.py	2014-09-29 22:12:51 +0000
> +++ tests/test_tasks.py	2014-09-30 21:20:24 +0000
> @@ -70,93 +70,48 @@
>              tasks.enable_monit_http_interface()
>              assert not confd().write_text.called
>  
> -    @mock.patch('os.remove')
> -    @mock.patch('charmhelpers.core.hookenv.log')
> -    @mock.patch('cloudfoundry.tasks.ARTIFACTS_DIR')
> -    @mock.patch('subprocess.check_call')
> -    @mock.patch('cloudfoundry.tasks.job_path')
> -    @mock.patch('cloudfoundry.contexts.OrchestratorRelation')
> -    def test_fetch_job_artifacts(self, OrchRelation, job_path, check_call,
> -                                 adir, log, remove):
> -        OrchRelation.return_value = {'orchestrator': [{'cf_version': 'version',
> -                                     'artifacts_url': 'http://url'}]}
> -        adir.__div__.return_value = adir
> -        adir.exists.return_value = False
> -        job_path.return_value = 'job_path'
> -        tasks.fetch_job_artifacts('job_name')
> -        assert adir.makedirs_p.called
> -        check_call.assert_called_once_with([
> -            'wget', '-t0', '-c', '-nv',
> -            'http://url/job_path/job_name',
> -            '-O', adir])
> -
> -    @mock.patch('subprocess.check_call')
> -    @mock.patch('cloudfoundry.contexts.OrchestratorRelation')
> -    def test_fetch_job_artifacts_nfs(self, orch, cc):
> -        orch.return_value = {'orchestrator': [{'cf_version': 'version',
> -                             'artifacts_url': 'nfs://url'}]}
> -        tasks.fetch_job_artifacts('job_name')
> -        assert not cc.called
> -
> -    @mock.patch('subprocess.check_call')
> -    @mock.patch('cloudfoundry.tasks.ARTIFACTS_DIR')
> -    @mock.patch('cloudfoundry.contexts.OrchestratorRelation')
> -    def test_fetch_job_artifacts_exists(self, orch, adir, cc):
> -        orch.return_value = {'orchestrator': [{'cf_version': 'version',
> -                             'artifacts_url': 'http://url'}]}
> -        adir.__div__.return_value = adir
> -        adir.exists.return_value = True
> -        tasks.fetch_job_artifacts('job_name')
> -        assert not cc.called
> -
> -    @mock.patch('cloudfoundry.tasks.tarfile.open')
> -    @mock.patch('subprocess.check_call')
> -    @mock.patch('os.path.exists')
> -    @mock.patch('cloudfoundry.tasks.job_path')
> -    @mock.patch('cloudfoundry.contexts.OrchestratorRelation')
> -    def test_fetch_job_artifacts_same_version(self, OrchRelation, get_job_path, exists, check_call, taropen):
> -        OrchRelation.return_value = {'orchestrator': [{'cf_version': 'version',
> -                                     'artifacts_url': 'http://url'}]}
> -        get_job_path.return_value = 'job_path'
> -        exists.return_value = True
> -        tasks.fetch_job_artifacts('job_name')
> -        assert not check_call.called
> -        assert not taropen.called
> -
> -    @mock.patch('os.symlink')
> -    @mock.patch('os.unlink')
> -    @mock.patch('cloudfoundry.tasks.release_version')
> -    @mock.patch('cloudfoundry.tasks.job_path')
> -    @mock.patch('cloudfoundry.contexts.OrchestratorRelation')
> -    def test_install_job_packages(self, OrchRelation, job_path, rel_ver, unlink, symlink):
> -        job_path.return_value = 'job_path'
> -        rel_ver.return_value = 'version'
> -        OrchRelation.return_value = {'orchestrator': [{'cf_version': 'version'}]}
> -        filename = 'package-123abc.tgz'
> -
> -        script = mock.Mock(name='script', spec=path)
> -        script.stat().st_mode = 33204
> -        script.basename.return_value = filename
> -
> -        with mock.patch('subprocess.check_call') as cc,\
> -                mock.patch('cloudfoundry.tasks.path', spec=path) as pth,\
> -                mock.patch('cloudfoundry.tasks.ARTIFACTS_DIR', spec=path) as adir:
> -            adir.__div__.return_value = adir
> -            pth.return_value = pth.__div__.return_value = pth
> -            pth.exists.return_value = False
> -            pth.files.return_value = [pth]
> -
> -            tasks.install_job_packages(pth, pth,  'job_name')
> -
> -            self.assertEqual(cc.call_count, 2)
> -            cc.assert_has_calls([
> -                mock.call(['tar', '-xzf', adir]),
> -                mock.call(['tar', '-xzf', pth]),
> -            ])
> -
> -            assert pth.makedirs.called
> -            assert pth.makedirs_p.called
> -            pth.symlink.assert_called_once_with(pth)
> +    @mock.patch('cloudfoundry.tasks.PackageArchive')
> +    @mock.patch('cloudfoundry.contexts.OrchestratorRelation')
> +    def test_fetch_job_artifacts(self, OrchRelation, PackageArchive):
> +        OrchRelation.return_value = {'orchestrator': [{
> +            'cf_version': 'version',
> +            'artifacts_url': 'http://url',
> +        }]}
> +        tasks.fetch_job_artifacts('job_name')
> +        PackageArchive.assert_called_with('http://url')
> +        PackageArchive().build_mirror_section.assert_called()
> +        PackageArchive().build_mirror_section.iter.assert_called()
> +
> +    @mock.patch('cloudfoundry.tasks.PackageArchive')
> +    @mock.patch('cloudfoundry.contexts.OrchestratorRelation')
> +    def test_fetch_job_artifacts_nfs(self, orch, PA):
> +        orch.return_value = {'orchestrator': [{
> +            'cf_version': 'version',
> +            'artifacts_url': 'nfs://url',
> +        }]}
> +        tasks.fetch_job_artifacts('job_name')
> +        assert not PA.called
> +
> +    @mock.patch('cloudfoundry.tasks.PackageArchive')
> +    @mock.patch('cloudfoundry.contexts.OrchestratorRelation')
> +    def test_install_job_packages(self, OrchRelation, PackageArchive):
> +        OrchRelation.return_value = {'orchestrator': [{
> +            'cf_version': 'version',
> +            'artifacts_url': 'url',
> +        }]}
> +        bd = mock.MagicMock(spec=path)
> +        rd = mock.MagicMock(spec=path)
> +        pa = PackageArchive.return_value
> +        ls = rd.__div__().__div__.return_value
> +        lf = bd.__div__.return_value
> +        pa.setup_job.return_value = ['job_file']
> +
> +        tasks.install_job_packages(bd, rd, 'job_name')
> +        PackageArchive.assert_called_with('url')
> +        pa.setup_job.assert_called()
> +        lf.remove_p.assert_called()
> +        lf.makedirs_p.assert_called()
> +        ls.symlink.assert_called_with(lf)
>  
>      @mock.patch('cloudfoundry.contexts.OrchestratorRelation')
>      def test_job_path(self, OrchRelation):
> @@ -241,4 +196,3 @@
>          output = tasks._enable_swapaccounting(sample)
>          self.assertIn("arg1 cgroup_enable=memory swapaccount=1", output)
>          self.assertIn("recovery arg2 cgroup_enable=memory swapaccount=1", output)
> -
> 
> === modified file 'tox.ini'
> --- tox.ini	2014-07-21 07:43:59 +0000
> +++ tox.ini	2014-09-30 21:20:24 +0000
> @@ -24,3 +24,9 @@
>      tornado
>      juju-deployer
>      bzr
> +    raindance
> +    args
> +    boto
> +    clint
> +    path.py
> +    subparse
> 
> === modified file 'wheelhouse/PyYAML-3.11-cp27-none-linux_x86_64.whl'
> Binary files wheelhouse/PyYAML-3.11-cp27-none-linux_x86_64.whl	2014-06-25 07:08:08 +0000 and wheelhouse/PyYAML-3.11-cp27-none-linux_x86_64.whl	2014-09-30 21:20:24 +0000 differ
> === added file 'wheelhouse/args-0.1.0-py2-none-any.whl'
> Binary files wheelhouse/args-0.1.0-py2-none-any.whl	1970-01-01 00:00:00 +0000 and wheelhouse/args-0.1.0-py2-none-any.whl	2014-09-30 21:20:24 +0000 differ
> === added file 'wheelhouse/boto-2.32.1-py2.py3-none-any.whl'
> Binary files wheelhouse/boto-2.32.1-py2.py3-none-any.whl	1970-01-01 00:00:00 +0000 and wheelhouse/boto-2.32.1-py2.py3-none-any.whl	2014-09-30 21:20:24 +0000 differ
> === added file 'wheelhouse/clint-0.3.7-py2-none-any.whl'
> Binary files wheelhouse/clint-0.3.7-py2-none-any.whl	1970-01-01 00:00:00 +0000 and wheelhouse/clint-0.3.7-py2-none-any.whl	2014-09-30 21:20:24 +0000 differ
> === added file 'wheelhouse/path.py-6.2-py2-none-any.whl'
> Binary files wheelhouse/path.py-6.2-py2-none-any.whl	1970-01-01 00:00:00 +0000 and wheelhouse/path.py-6.2-py2-none-any.whl	2014-09-30 21:20:24 +0000 differ
> === added file 'wheelhouse/raindance-0.2dev-py2-none-any.whl'
> Binary files wheelhouse/raindance-0.2dev-py2-none-any.whl	1970-01-01 00:00:00 +0000 and wheelhouse/raindance-0.2dev-py2-none-any.whl	2014-09-30 21:20:24 +0000 differ
> === added file 'wheelhouse/requests-2.4.1-py2.py3-none-any.whl'
> Binary files wheelhouse/requests-2.4.1-py2.py3-none-any.whl	1970-01-01 00:00:00 +0000 and wheelhouse/requests-2.4.1-py2.py3-none-any.whl	2014-09-30 21:20:24 +0000 differ
> === added file 'wheelhouse/subparse-0.3.3-py2-none-any.whl'
> Binary files wheelhouse/subparse-0.3.3-py2-none-any.whl	1970-01-01 00:00:00 +0000 and wheelhouse/subparse-0.3.3-py2-none-any.whl	2014-09-30 21:20:24 +0000 differ


-- 
https://code.launchpad.net/~johnsca/charms/trusty/cloudfoundry/raindance/+merge/236624
Your team Cloud Foundry Charmers is subscribed to branch lp:~cf-charmers/charms/trusty/cloudfoundry/trunk.


References