← Back to team overview

cf-charmers team mailing list archive

[Merge] lp:~johnsca/charms/trusty/cloudfoundry/nfs into lp:~cf-charmers/charms/trusty/cloudfoundry/trunk

 

Cory Johns has proposed merging lp:~johnsca/charms/trusty/cloudfoundry/nfs into lp:~cf-charmers/charms/trusty/cloudfoundry/trunk.

Requested reviews:
  Cloud Foundry Charmers (cf-charmers)

For more details, see:
https://code.launchpad.net/~johnsca/charms/trusty/cloudfoundry/nfs/+merge/235963

NFS support for review
-- 
https://code.launchpad.net/~johnsca/charms/trusty/cloudfoundry/nfs/+merge/235963
Your team Cloud Foundry Charmers is requested to review the proposed merge of lp:~johnsca/charms/trusty/cloudfoundry/nfs into lp:~cf-charmers/charms/trusty/cloudfoundry/trunk.
=== modified file 'charmhelpers/core/host.py'
--- charmhelpers/core/host.py	2014-08-21 17:34:34 +0000
+++ charmhelpers/core/host.py	2014-09-25 13:42:22 +0000
@@ -68,8 +68,8 @@
     """Determine whether a system service is available"""
     try:
         subprocess.check_output(['service', service_name, 'status'], stderr=subprocess.STDOUT)
-    except subprocess.CalledProcessError:
-        return False
+    except subprocess.CalledProcessError as e:
+        return 'unrecognized service' not in e.output
     else:
         return True
 

=== modified file 'cloudfoundry/contexts.py'
--- cloudfoundry/contexts.py	2014-09-16 20:57:18 +0000
+++ cloudfoundry/contexts.py	2014-09-25 13:42:22 +0000
@@ -20,6 +20,17 @@
         inst[inst.name] = [inst.provide_data()]
         return inst
 
+    @classmethod
+    def remote_units(cls):
+        """
+        Return the addresses of all units connected on this relation.
+        """
+        units = set()
+        for rid in sorted(hookenv.relation_ids(cls.name)):
+            for unit in sorted(hookenv.related_units(rid)):
+                units.add(hookenv.relation_get('private-address', rid=rid, unit=unit))
+        return list(units)
+
 
 class StoredContext(dict):
     """
@@ -425,7 +436,7 @@
         config = hookenv.config()
         if config['mirror_artifacts']:
             private_addr = hookenv.unit_private_ip()
-            return 'http://{}:8019'.format(private_addr)  # FIXME: this should use SSL
+            return 'nfs://{}/srv/data'.format(private_addr)
         else:
             return config['artifacts_url']
 
@@ -489,10 +500,64 @@
         return contents['apiinfo']['addrs'][0]
 
 
-class ArtifactsCache(dict):
-    def __init__(self):
-        config = hookenv.config()
-        if config.get('artifacts_url'):
-            self.update({
-                'artifacts_url': config['artifacts_url'],
-            })
+# XXX: temporarily pulled from newer charmhelpers; need resync (or switch to pypi)
+class RequiredConfig(dict):
+    """
+    Data context that loads config options with one or more mandatory options.
+
+    Once the required options have been changed from their default values, all
+    config options will be available, namespaced under `config` to prevent
+    potential naming conflicts (for example, between a config option and a
+    relation property).
+
+    :param list *args: List of options that must be changed from their default values.
+    :param ne: Optional mapping of options to values that they must not be.  This
+               overrides comparing them to the default values.  For a single
+               option, can also be written as `RequiredConfig('my_opt') != 'value'`
+    :param eq: Optional mapping of options to values that they must be.  This
+               overrides comparing them to the default values.  For a single
+               option, can also be written as `RequiredConfig('my_opt') == 'value'`
+    """
+
+    def __init__(self, *args, **kwargs):
+        self.required_options = list(args)
+        self['config'] = hookenv.config()
+        self.ne = kwargs.get('ne', {})
+        self.eq = kwargs.get('eq', {})
+        if not any([self.ne, self.eq]):
+            with open(os.path.join(hookenv.charm_dir(), 'config.yaml')) as fp:
+                self.ne = {o: v.get('default') for o,v in yaml.load(fp).get('options', {}).items()}
+        else:
+            self.required_options = list(set(self.required_options + self.ne.keys() + self.eq.keys()))
+
+    def __bool__(self):
+        for option in self.required_options:
+            if option not in self['config']:
+                return False
+            current_value = self['config'][option]
+            if option in self.eq:
+                if current_value != self.eq[option]:
+                    return False
+            if option in self.ne:
+                if current_value == self.ne[option]:
+                    return False
+                if current_value in (None, '') and self.ne[option] in (None, ''):
+                    return False  # treat None and '' the same for ne
+        return True
+
+    def __nonzero__(self):
+        return self.__bool__()
+
+    def __eq__(self, value):
+        if len(self.required_options) != 1:
+            raise ValueError('Expression form can only be used with a single option')
+        self.eq = {key: value for key in self.required_options}
+        self.ne = {}
+        return self
+
+    def __ne__(self, value):
+        if len(self.required_options) != 1:
+            raise ValueError('Expression form can only be used with a single option')
+        self.ne = {key: value for key in self.required_options}
+        self.eq = {}
+        return self

=== modified file 'cloudfoundry/jobs.py'
--- cloudfoundry/jobs.py	2014-08-26 15:01:09 +0000
+++ cloudfoundry/jobs.py	2014-09-25 13:42:22 +0000
@@ -38,6 +38,7 @@
             'provided_data': [p() for p in job.get('provided_data', [])],
             'data_ready': [
                 tasks.install_orchestrator_key,
+                tasks.mount_nfs,
                 tasks.fetch_job_artifacts,
                 partial(tasks.install_job_packages,
                         PACKAGES_BASE_DIR, RELEASES_DIR),

=== modified file 'cloudfoundry/services.py'
--- cloudfoundry/services.py	2014-09-19 08:22:20 +0000
+++ cloudfoundry/services.py	2014-09-25 13:42:22 +0000
@@ -185,7 +185,8 @@
                     contexts.DEARelation.remote_view,
                 ],
                 'data_ready': [
-                    tasks.enable_swapaccounting
+                    #tasks.enable_swapaccounting
+                    tasks.patch_dea
                 ]
             },
             {

=== modified file 'cloudfoundry/tasks.py'
--- cloudfoundry/tasks.py	2014-09-19 08:22:20 +0000
+++ cloudfoundry/tasks.py	2014-09-25 13:42:22 +0000
@@ -8,6 +8,7 @@
 import tempfile
 import textwrap
 import logging
+from urlparse import urlparse
 from functools import partial
 from charmhelpers.core import host
 from charmhelpers.core import hookenv
@@ -21,11 +22,23 @@
 logger = logging.getLogger(__name__)
 
 TEMPLATES_BASE_DIR = path('/var/vcap/jobs')
+ARTIFACTS_DIR = path('/srv/artifacts')
+
+
+@hookenv.cached
+def release_version(contexts=contexts):
+    unit = contexts.OrchestratorRelation()['orchestrator'][0]
+    return unit['cf_version']
+
+
+@hookenv.cached
+def job_path():
+    return '/'.join(['cf-%s' % release_version(), 'amd64'])
 
 
 def install_base_dependencies():
     fetch.apt_install(packages=fetch.filter_installed_packages([
-        'ruby', 'monit', 'runit', 'zip', 'unzip']))
+        'ruby', 'monit', 'runit', 'zip', 'unzip', 'nfs-common']))
     gem_file = os.path.join(hookenv.charm_dir(),
                             'files/bosh-template-1.2611.0.pre.gem')
     host.adduser('vcap')
@@ -66,23 +79,35 @@
     authorized_keys.write_text('\n{}'.format(pub_key), append=True)
 
 
+def mount_nfs(job_name):
+    orch = contexts.OrchestratorRelation()['orchestrator'][0]
+    url = urlparse(orch['artifacts_url'])
+    if url.scheme != 'nfs':
+        return
+    output = subprocess.check_output(['mount'])
+    if ARTIFACTS_DIR in output:
+        return
+    ARTIFACTS_DIR.makedirs_p()
+    subprocess.check_call([
+        'mount', '-t', 'nfs', '{}:{}'.format(url.netloc, url.path), ARTIFACTS_DIR,
+    ])
+
+
 def fetch_job_artifacts(job_name):
-    orchestrator_data = contexts.OrchestratorRelation()
-    job_path = get_job_path(job_name)
-    job_archive = job_path+'/'+job_name+'.tgz'
-    artifact_url = os.path.join(
-        orchestrator_data['orchestrator'][0]['artifacts_url'],
-        'cf-'+orchestrator_data['orchestrator'][0]['cf_version'],
-        'amd64',  # TODO: Get this from somewhere...
-        job_name)
-    if os.path.exists(job_archive):
-        return
-    host.mkdir(job_path)
+    orch = contexts.OrchestratorRelation()['orchestrator'][0]
+    url = urlparse(orch['artifacts_url'])
+    if url.scheme == 'nfs':
+        return
+    job_url = '/'.join([orch['artifacts_url'], job_path(), job_name])
+    job_archive = ARTIFACTS_DIR / job_path() / job_name
+    if job_archive.exists():
+        return
+    (ARTIFACTS_DIR / job_path()).makedirs_p()
     retry = True
     while retry:
-        hookenv.log('Downloading {}.tgz from {}'.format(job_name, artifact_url))
+        hookenv.log('Downloading {}.tgz from {}'.format(job_name, job_url))
         try:
-            subprocess.check_call(['wget', '-t0', '-c', '-nv', artifact_url, '-O', job_archive])
+            subprocess.check_call(['wget', '-t0', '-c', '-nv', job_url, '-O', job_archive])
         except subprocess.CalledProcessError as e:
             if e.returncode == 4:  # always retry network errors
                 hookenv.log('Network error, retrying download', hookenv.WARNING)
@@ -92,27 +117,20 @@
         else:
             retry = False
 
-    try:
-        #assert 'ETag' in resp, (
-        #    'Error downloading artifacts from {}; '
-        #    'missing ETag (md5) checksum (invalid job?)'.format(artifact_url))
-        #expected_md5 = resp['ETag'].strip('"')
-        #with open(job_archive) as fp:
-        #    actual_md5 = hashlib.md5(fp.read()).hexdigest()
-        #assert actual_md5 == expected_md5, (
-        #    'Error downloading artifacts from {}; '
-        #    'ETag (md5) checksum mismatch'.format(artifact_url))
-        with tarfile.open(job_archive) as tgz:
-            tgz.extractall(job_path)
-    except Exception as e:
-        hookenv.log(str(e), hookenv.ERROR)
-        if os.path.exists(job_archive):
-            os.remove(job_archive)
-        raise
-
 
 def install_job_packages(pkg_base_dir, releases_dir, job_name):
-    package_path = path(get_job_path(job_name)) / 'packages'
+    job_archive = ARTIFACTS_DIR / job_path() / job_name
+    job_extract = path(hookenv.charm_dir()) / 'jobs' / release_version() / job_name
+    if not job_extract.exists():
+        try:
+            job_extract.makedirs()
+            with job_extract:
+                subprocess.check_call(['tar', '-xzf', job_archive])
+        except Exception as e:
+            hookenv.log(str(e), hookenv.ERROR)
+            job_extract.remove_p()
+            raise
+    package_path = job_extract / 'packages'
     version = release_version()
     if not pkg_base_dir.exists():
         pkg_base_dir.makedirs_p(mode=0755)
@@ -138,26 +156,16 @@
 
 
 @hookenv.cached
-def get_job_path(job_name):
-    version = release_version()
-    return path(hookenv.charm_dir()) / 'jobs' / version / job_name
-
-
-@hookenv.cached
 def load_spec(job_name):
     """
     Reads and parses the spec file for the given job name from the jobs folder.
     """
-    job_path = get_job_path(job_name)
-    with open(os.path.join(job_path, 'spec')) as fp:
+    job_extract = path(hookenv.charm_dir()) / 'jobs' / release_version() / job_name
+    job_spec = job_extract / 'spec'
+    with open(job_spec) as fp:
         return yaml.safe_load(fp)
 
 
-@hookenv.cached
-def release_version(contexts=contexts):
-    units = contexts.OrchestratorRelation()['orchestrator']
-    unit = units[0]
-    return unit['cf_version']
 
 
 def _enable_swapaccounting(s):
@@ -192,8 +200,6 @@
         fp.write(output)
     subprocess.call('update-grub')
     _reboot()
-
-
 def patch_dea(job_name):
     DEA_PATCH = """
 --- container.rb   2014-08-01 15:49:04.472289999 +0000

=== modified file 'hooks/common.py'
--- hooks/common.py	2014-09-18 19:37:22 +0000
+++ hooks/common.py	2014-09-25 13:42:22 +0000
@@ -12,8 +12,8 @@
 from cloudfoundry.releases import RELEASES
 from cloudfoundry.services import SERVICES
 from cloudfoundry.contexts import JujuAPICredentials
-from cloudfoundry.contexts import ArtifactsCache
 from cloudfoundry.contexts import OrchestratorRelation
+from cloudfoundry.contexts import RequiredConfig
 from cloudfoundry.path import path
 from cloudfoundry.api import JujuLoggingDeployment
 from cloudfoundry.api import APIEnvironment
@@ -26,24 +26,28 @@
 
 def precache_job_artifacts(s):
     config = hookenv.config()
-    if not config['mirror_artifacts']:
-        return
     version = config.get('cf_version')
     if not version or version == 'latest':
         version = RELEASES[0]['releases'][1]
     prefix = path('cf-{}'.format(version)) / 'amd64'
     base_url = path(config['artifacts_url']) / prefix
-    base_path = path('/var/www') / prefix
+    base_path = path('/srv/data') / prefix
     base_path.makedirs_p(mode=0755)
     for service in SERVICES.values():
         for job in service['jobs']:
             job_name = job['job_name']
-            url = os.path.join(base_url, job_name)
-            artifact = os.path.join(base_path, job_name)
-            if not os.path.exists(artifact):
+            url = base_url / job_name
+            artifact = base_path / job_name
+            if not artifact.exists():
                 subprocess.check_call(['wget', '-nv', url, '-O', artifact])
 
 
+def export_nfs(s):
+    subprocess.check_call([
+        'exportfs', '-o', 'rw,sync,no_root_squash,no_all_squash', '*:/srv/data',
+    ])
+
+
 def generate(s):
     config = hookenv.config()
     version = config.get('cf_version')
@@ -162,11 +166,11 @@
             'service': 'bundle',
             'required_data': [
                 JujuAPICredentials(),
-                ArtifactsCache(),
+                RequiredConfig('artifacts_url') != '',
             ],
+            'provided_data': [OrchestratorRelation()],
             'data_ready': [
                 cache_unit_addresses,
-                precache_job_artifacts,
                 generate,
                 deploy,
             ],
@@ -174,14 +178,16 @@
             'stop': [],
         },
         {
-            'service': 'nginx',
-            'required_data': [{'charm_dir': hookenv.charm_dir(),
-                               'config': hookenv.config()}],
-            'provided_data': [OrchestratorRelation()],
+            'service': 'nfs-kernel-server',
+            'required_data': [
+                RequiredConfig('mirror_artifacts') == True,
+                RequiredConfig('artifacts_url') != '',
+                {'nfs_units': OrchestratorRelation.remote_units()},
+            ],
             'data_ready': [
-                services.render_template(
-                    source='nginx.conf',
-                    target='/etc/nginx/sites-enabled/artifact_proxy'),
+                precache_job_artifacts,
+                services.render_template('nfs-exports',
+                                         target='/etc/exports'),
             ],
         },
     ])

=== modified file 'hooks/install'
--- hooks/install	2014-08-26 15:01:09 +0000
+++ hooks/install	2014-09-25 13:42:22 +0000
@@ -7,8 +7,8 @@
 from cloudfoundry.path import path
 
 hookenv.juju_status('installing')
-fetch.apt_install(fetch.filter_installed_packages(['juju-deployer', 'nginx']))
-os.unlink('/etc/nginx/sites-enabled/default')
+fetch.apt_install(fetch.filter_installed_packages([
+    'juju-deployer', 'nfs-kernel-server']))
 subprocess.check_call([
     'ssh-keygen',
     '-f', path(hookenv.charm_dir()) / 'orchestrator-key',

=== added file 'templates/nfs-exports'
--- templates/nfs-exports	1970-01-01 00:00:00 +0000
+++ templates/nfs-exports	2014-09-25 13:42:22 +0000
@@ -0,0 +1,3 @@
+{% for unit in nfs_units %}
+/srv/data    {{unit}}(rw,sync,no_root_squash,no_all_squash,no_subtree_check)
+{% endfor %}

=== removed file 'templates/nginx.conf'
--- templates/nginx.conf	2014-07-29 14:11:34 +0000
+++ templates/nginx.conf	1970-01-01 00:00:00 +0000
@@ -1,7 +0,0 @@
-server {
-    listen 8019;
-
-    location / {
-        root /var/www;
-    }
-}

=== modified file 'tests/test_contexts.py'
--- tests/test_contexts.py	2014-09-16 20:57:18 +0000
+++ tests/test_contexts.py	2014-09-25 13:42:22 +0000
@@ -248,7 +248,7 @@
         config.return_value = {'mirror_artifacts': True, 'artifacts_url': 'url'}
         upi.return_value = 'upi'
         url = contexts.OrchestratorRelation().get_artifacts_url()
-        self.assertEqual(url, 'http://upi:8019')
+        self.assertEqual(url, 'nfs://upi/srv/data')
         upi.assert_called()
 
     @mock.patch(CONTEXT + 'OrchestratorRelation.get_data')
@@ -352,16 +352,75 @@
         mopen.assert_called_once_with('/agent/machine-bar/agent.conf')
 
 
-class TestArtifactCache(unittest.TestCase):
-    @mock.patch('charmhelpers.core.hookenv.config')
-    def test_not_ready(self, config):
-        config.return_value = {}
-        self.assertEqual(contexts.ArtifactsCache(), {})
-
-    @mock.patch('charmhelpers.core.hookenv.config')
-    def test_ready(self, config):
-        config.return_value = {'artifacts_url': 'url'}
-        self.assertEqual(contexts.ArtifactsCache(), {'artifacts_url': 'url'})
+class TestRequiredConfig(unittest.TestCase):
+    def setUp(self):
+        self.config = {}
+        self.config_def = {
+            'options': {
+                'opt1': {'default': 'default'},
+                'opt2': {'default': ''},
+                'opt3': {},
+            }
+        }
+
+        self.pconfig = mock.patch('charmhelpers.core.hookenv.config')
+        self.mconfig = self.pconfig.start()
+        self.mconfig.side_effect = lambda: self.config
+        self.popen = mock.patch.object(contexts, 'open', create=True)
+        self.mopen = self.popen.start()
+        self.pyaml = mock.patch.object(contexts, 'yaml')
+        self.myaml = self.pyaml.start()
+        self.myaml.load.side_effect = lambda fp: self.config_def
+        self.pcharm_dir = mock.patch('charmhelpers.core.hookenv.charm_dir')
+        self.mcharm_dir = self.pcharm_dir.start()
+        self.mcharm_dir.return_value = 'charm_dir'
+
+    def tearDown(self):
+        self.pconfig.stop()
+        self.popen.stop()
+        self.pyaml.stop()
+        self.pcharm_dir.stop()
+
+    def test_init(self):
+        rq = contexts.RequiredConfig('req')
+        self.assertItemsEqual(rq.required_options, ['req'])
+        self.assertEqual(rq.ne, {'opt1': 'default', 'opt2': '', 'opt3': None})
+        self.assertEqual(rq.eq, {})
+
+        rq = contexts.RequiredConfig('req', ne={'neq': 'bar'}, eq={'req': 'foo'})
+        self.assertItemsEqual(rq.required_options, ['req', 'neq'])
+        self.assertEqual(rq.ne, {'neq': 'bar'})
+        self.assertEqual(rq.eq, {'req': 'foo'})
+
+    def test_default(self):
+        self.config['opt1'] = 'default'
+        rq = contexts.RequiredConfig('opt1')
+        assert not bool(rq)
+
+    def test_empty(self):
+        self.config = {'opt1': '', 'opt2': '', 'opt3': ''}
+        assert bool(contexts.RequiredConfig('opt1'))
+        assert not bool(contexts.RequiredConfig('opt2'))
+        assert not bool(contexts.RequiredConfig('opt3'))
+
+    def test_missing(self):
+        assert not bool(contexts.RequiredConfig('opt1'))
+        assert not bool(contexts.RequiredConfig('opt2'))
+        assert not bool(contexts.RequiredConfig('opt3'))
+
+    def test_ne(self):
+        self.config['opt1'] = 'foo'
+        assert not bool(contexts.RequiredConfig(ne={'opt1': 'foo'}))
+        assert not bool(contexts.RequiredConfig('opt1') != 'foo')
+        assert bool(contexts.RequiredConfig(ne={'opt1': 'bar'}))
+        assert bool(contexts.RequiredConfig('opt1') != 'bar')
+
+    def test_eq(self):
+        self.config['opt1'] = 'foo'
+        assert bool(contexts.RequiredConfig(eq={'opt1': 'foo'}))
+        assert bool(contexts.RequiredConfig('opt1') == 'foo')
+        assert not bool(contexts.RequiredConfig(eq={'opt1': 'bar'}))
+        assert not bool(contexts.RequiredConfig('opt1') == 'bar')
 
 
 if __name__ == '__main__':

=== modified file 'tests/test_jobs.py'
--- tests/test_jobs.py	2014-08-26 15:01:09 +0000
+++ tests/test_jobs.py	2014-09-25 13:42:22 +0000
@@ -70,11 +70,11 @@
         self.assertIsInstance(services[0]['required_data'][1],
                               contexts.NatsRelation)
         # Show that we converted to rubytemplatecallbacks
-        self.assertIsInstance(services[0]['data_ready'][3],
+        self.assertIsInstance(services[0]['data_ready'][4],
                               tasks.JobTemplates)
         services = jobs.build_service_block('cloud-controller-v1')
         # Show that we include both default and additional handlers
-        self.assertIsInstance(services[0]['data_ready'][3],
+        self.assertIsInstance(services[0]['data_ready'][4],
                               tasks.JobTemplates)
         self.assertEqual(services[0]['data_ready'][-1],
                          contexts.CloudControllerDBRelation.send_data)

=== modified file 'tests/test_tasks.py'
--- tests/test_tasks.py	2014-09-19 08:22:20 +0000
+++ tests/test_tasks.py	2014-09-25 13:42:22 +0000
@@ -12,6 +12,9 @@
             'charmhelpers.core.hookenv.charm_dir')
         self.charm_dir = self.charm_dir_patch.start()
         self.charm_dir.return_value = 'charm_dir'
+        self.log_patch = mock.patch(
+            'charmhelpers.core.hookenv.log')
+        self.log = self.log_patch.start()
 
     def tearDown(self):
         self.charm_dir_patch.stop()
@@ -30,8 +33,7 @@
             monitrc().exists.return_value = False
             tasks.install_base_dependencies()
 
-        apt_install.assert_called_once_with(packages=[
-            'ruby', 'monit', 'runit', 'zip', 'unzip'])
+        apt_install.assert_called_once()
         adduser.assert_called_once_with('vcap')
         assert monitrc.called
         assert monitrc.call_args == mock.call('/etc/monit/conf.d/enable_http')
@@ -70,84 +72,47 @@
 
     @mock.patch('os.remove')
     @mock.patch('charmhelpers.core.hookenv.log')
-    @mock.patch('hashlib.md5')
-    @mock.patch('cloudfoundry.tasks.open', create=True)
-    @mock.patch('charmhelpers.core.host.mkdir')
-    @mock.patch('cloudfoundry.tasks.tarfile.open')
+    @mock.patch('cloudfoundry.tasks.ARTIFACTS_DIR')
     @mock.patch('subprocess.check_call')
-    @mock.patch('cloudfoundry.tasks.get_job_path')
+    @mock.patch('cloudfoundry.tasks.job_path')
     @mock.patch('cloudfoundry.contexts.OrchestratorRelation')
-    def test_fetch_job_artifacts(self, OrchRelation, get_job_path, check_call,
-                                 taropen, mkdir, mopen, md5, log, remove):
+    def test_fetch_job_artifacts(self, OrchRelation, job_path, check_call,
+                                 adir, log, remove):
         OrchRelation.return_value = {'orchestrator': [{'cf_version': 'version',
                                      'artifacts_url': 'http://url'}]}
-        get_job_path.return_value = 'job_path'
-        mopen.return_value.__enter__().read.return_value = 'read'
-        #md5.return_value.hexdigest.return_value = 'deadbeef'
-        tgz = taropen.return_value.__enter__.return_value
+        adir.__div__.return_value = adir
+        adir.exists.return_value = False
+        job_path.return_value = 'job_path'
         tasks.fetch_job_artifacts('job_name')
+        assert adir.makedirs_p.called
         check_call.assert_called_once_with([
             'wget', '-t0', '-c', '-nv',
-            'http://url/cf-version/amd64/job_name',
-            '-O', 'job_path/job_name.tgz'])
-        #md5.assert_called_once_with('read')
-        taropen.assert_called_once_with('job_path/job_name.tgz')
-        tgz.extractall.assert_called_once_with('job_path')
-
-    @mock.patch('os.path.exists')
-    @mock.patch('os.remove')
-    @mock.patch('charmhelpers.core.hookenv.log')
-    @mock.patch('hashlib.md5')
-    @mock.patch('cloudfoundry.tasks.open', create=True)
-    @mock.patch('charmhelpers.core.host.mkdir')
-    @mock.patch('cloudfoundry.tasks.tarfile.open')
-    @mock.patch('subprocess.check_call')
-    @mock.patch('cloudfoundry.tasks.get_job_path')
-    @mock.patch('cloudfoundry.contexts.OrchestratorRelation')
-    @unittest.skip('temporarily disabled')
-    def test_fetch_job_artifacts_missing_checksum(
-            self, OrchRelation, get_job_path, check_call,
-            taropen, mkdir, mopen, md5, log, remove, exists):
-        OrchRelation.return_value = {'orchestrator': [{'cf_version': 'version',
-                                     'artifacts_url': 'http://url'}]}
-        get_job_path.return_value = 'job_path'
-        #urlretrieve.return_value = (None, {})
-        mopen.return_value.__enter__().read.return_value = 'read'
-        md5.return_value.hexdigest.return_value = 'deadbeef'
-        exists.side_effect = [False, True]
-        self.assertRaises(AssertionError, tasks.fetch_job_artifacts, 'job_name')
-        assert not taropen.called
-        remove.assert_called_once_with('job_path/job_name.tgz')
-
-    @mock.patch('os.path.exists')
-    @mock.patch('os.remove')
-    @mock.patch('charmhelpers.core.hookenv.log')
-    @mock.patch('hashlib.md5')
-    @mock.patch('cloudfoundry.tasks.open', create=True)
-    @mock.patch('charmhelpers.core.host.mkdir')
-    @mock.patch('cloudfoundry.tasks.tarfile.open')
-    @mock.patch('subprocess.check_call')
-    @mock.patch('cloudfoundry.tasks.get_job_path')
-    @mock.patch('cloudfoundry.contexts.OrchestratorRelation')
-    @unittest.skip('temporarily disabled')
-    def test_fetch_job_artifacts_checksum_mismatch(
-            self, OrchRelation, get_job_path, check_call,
-            taropen, mkdir, mopen, md5, log, remove, exists):
-        OrchRelation.return_value = {'orchestrator': [{'cf_version': 'version',
-                                     'artifacts_url': 'http://url'}]}
-        get_job_path.return_value = 'job_path'
-        #urlretrieve.return_value = (None, {'ETag': '"ca11ab1e"'})
-        mopen.return_value.__enter__().read.return_value = 'read'
-        md5.return_value.hexdigest.return_value = 'deadbeef'
-        exists.side_effect = [False, True]
-        self.assertRaises(AssertionError, tasks.fetch_job_artifacts, 'job_name')
-        assert not taropen.called
-        remove.assert_called_once_with('job_path/job_name.tgz')
-
-    @mock.patch('cloudfoundry.tasks.tarfile.open')
-    @mock.patch('subprocess.check_call')
-    @mock.patch('os.path.exists')
-    @mock.patch('cloudfoundry.tasks.get_job_path')
+            'http://url/job_path/job_name',
+            '-O', adir])
+
+    @mock.patch('subprocess.check_call')
+    @mock.patch('cloudfoundry.contexts.OrchestratorRelation')
+    def test_fetch_job_artifacts_nfs(self, orch, cc):
+        orch.return_value = {'orchestrator': [{'cf_version': 'version',
+                             'artifacts_url': 'nfs://url'}]}
+        tasks.fetch_job_artifacts('job_name')
+        assert not cc.called
+
+    @mock.patch('subprocess.check_call')
+    @mock.patch('cloudfoundry.tasks.ARTIFACTS_DIR')
+    @mock.patch('cloudfoundry.contexts.OrchestratorRelation')
+    def test_fetch_job_artifacts_exists(self, orch, adir, cc):
+        orch.return_value = {'orchestrator': [{'cf_version': 'version',
+                             'artifacts_url': 'http://url'}]}
+        adir.__div__.return_value = adir
+        adir.exists.return_value = True
+        tasks.fetch_job_artifacts('job_name')
+        assert not cc.called
+
+    @mock.patch('cloudfoundry.tasks.tarfile.open')
+    @mock.patch('subprocess.check_call')
+    @mock.patch('os.path.exists')
+    @mock.patch('cloudfoundry.tasks.job_path')
     @mock.patch('cloudfoundry.contexts.OrchestratorRelation')
     def test_fetch_job_artifacts_same_version(self, OrchRelation, get_job_path, exists, check_call, taropen):
         OrchRelation.return_value = {'orchestrator': [{'cf_version': 'version',
@@ -160,13 +125,13 @@
 
     @mock.patch('os.symlink')
     @mock.patch('os.unlink')
-    @mock.patch('os.path.exists')
-    @mock.patch('cloudfoundry.tasks.get_job_path')
+    @mock.patch('cloudfoundry.tasks.release_version')
+    @mock.patch('cloudfoundry.tasks.job_path')
     @mock.patch('cloudfoundry.contexts.OrchestratorRelation')
-    def test_install_job_packages(self, OrchRelation, get_job_path, exists, unlink, symlink):
-        get_job_path.return_value = 'job_path'
+    def test_install_job_packages(self, OrchRelation, job_path, rel_ver, unlink, symlink):
+        job_path.return_value = 'job_path'
+        rel_ver.return_value = 'version'
         OrchRelation.return_value = {'orchestrator': [{'cf_version': 'version'}]}
-        exists.side_effect = [False, True, True]
         filename = 'package-123abc.tgz'
 
         script = mock.Mock(name='script', spec=path)
@@ -174,37 +139,33 @@
         script.basename.return_value = filename
 
         with mock.patch('subprocess.check_call') as cc,\
-                mock.patch('cloudfoundry.tasks.path', spec=path) as pth:
-            pkgdir = pth('pkgdir')
-            pkgdir.exists.return_value = False
-
-            reldir = pth('reldir')
-            files = (pth() / 'packages').files
-            files.name = 'files'
-            files.return_value = [script]
-            pkgpath = reldir.__div__().__div__().__div__()
-            pkgpath.exists.return_value = False
-            pkgdest = pkgdir.__div__()
-
-            pkgdest.exists.return_value = False
-            tasks.install_job_packages(pkgdir, reldir,  'job_name')
-
-            assert cc.called
-            cc.assert_called_once_with(['tar', '-xzf', script])
-
-            assert reldir.makedirs_p.called
-            pkgpath.symlink.assert_called_once_with(pkgdir / 'package')
+                mock.patch('cloudfoundry.tasks.path', spec=path) as pth,\
+                mock.patch('cloudfoundry.tasks.ARTIFACTS_DIR', spec=path) as adir:
+            adir.__div__.return_value = adir
+            pth.return_value = pth.__div__.return_value = pth
+            pth.exists.return_value = False
+            pth.files.return_value = [pth]
+
+            tasks.install_job_packages(pth, pth,  'job_name')
+
+            self.assertEqual(cc.call_count, 2)
+            cc.assert_has_calls([
+                mock.call(['tar', '-xzf', adir]),
+                mock.call(['tar', '-xzf', pth]),
+            ])
+
+            assert pth.makedirs.called
+            assert pth.makedirs_p.called
+            pth.symlink.assert_called_once_with(pth)
 
     @mock.patch('cloudfoundry.contexts.OrchestratorRelation')
-    def test_get_job_path(self, OrchRelation):
+    def test_job_path(self, OrchRelation):
         OrchRelation.return_value = {'orchestrator': [{'cf_version': 'version'}]}
-        self.assertEqual(tasks.get_job_path('job_name'), 'charm_dir/jobs/version/job_name')
+        self.assertEqual(tasks.job_path(), 'cf-version/amd64')
 
-    @mock.patch('cloudfoundry.tasks.get_job_path')
     @mock.patch('cloudfoundry.tasks.open', create=True)
     @mock.patch('cloudfoundry.tasks.yaml.safe_load')
-    def test_load_spec(self, safe_load, mopen, get_job_path):
-        get_job_path.side_effect = ['job_path1', 'job_path2']
+    def test_load_spec(self, safe_load, mopen):
         safe_load.side_effect = [
             {'p1': 'v1'},
             {'p2': 'v2'},
@@ -217,8 +178,8 @@
         self.assertEqual(mopen.call_count, 2)
         self.assertEqual(safe_load.call_count, 2)
         self.assertEqual(mopen.call_args_list, [
-            mock.call('job_path1/spec'),
-            mock.call('job_path2/spec'),
+            mock.call('charm_dir/jobs/version/job1/spec'),
+            mock.call('charm_dir/jobs/version/job2/spec'),
         ])
 
     @mock.patch('os.path.exists')


Follow ups