← Back to team overview

bigdata-dev team mailing list archive

[Merge] lp:~bigdata-dev/charm-helpers/java-installer into lp:~bigdata-dev/charm-helpers/framework

 

Cory Johns has proposed merging lp:~bigdata-dev/charm-helpers/java-installer into lp:~bigdata-dev/charm-helpers/framework.

Requested reviews:
  Juju Big Data Development (bigdata-dev)

For more details, see:
https://code.launchpad.net/~bigdata-dev/charm-helpers/java-installer/+merge/252837

Converted Java install logic to use external script.

I'm not entirely happy with the "magic spec" behavior, but I couldn't figure out a better way to get the Java version into the spec during the hook run when Java is installed (the spec relation was instantiated, and thus the spec generated, before the main hook code runs).  Looking for ideas / suggestions.
-- 
Your team Juju Big Data Development is requested to review the proposed merge of lp:~bigdata-dev/charm-helpers/java-installer into lp:~bigdata-dev/charm-helpers/framework.
=== modified file 'charmhelpers/contrib/bigdata/handlers/apache.py'
--- charmhelpers/contrib/bigdata/handlers/apache.py	2015-03-10 16:49:12 +0000
+++ charmhelpers/contrib/bigdata/handlers/apache.py	2015-03-12 23:02:31 +0000
@@ -15,7 +15,7 @@
 # along with charm-helpers.  If not, see <http://www.gnu.org/licenses/>.
 
 import re
-from subprocess import check_output, check_call
+from subprocess import check_output, CalledProcessError
 import time
 
 from path import Path
@@ -31,21 +31,42 @@
 
 
 class HadoopBase(object):
-    JAVA_VERSION = '7'
-
     def __init__(self):
         self.cpu_arch = check_output(['uname', '-p']).strip()
         self.load_dist_config()
-        self.spec = {
-            'vendor': self.vendor,
-            'hadoop': self.hadoop_version,
-            'java': self.JAVA_VERSION,
-            'arch': self.cpu_arch,
-        }
         self.client_spec = {
             'hadoop': self.hadoop_version,
         }
 
+    def _java_installer(self, check_only=False):
+        java_installer = Path(jujuresources.resource_path('java-installer'))
+        java_installer.chmod(0o755)
+        try:
+            cmd = [java_installer]
+            if check_only:
+                cmd = cmd + ['check-only']
+            output = check_output(cmd)
+            java_home, java_version = map(str.strip, output.strip().split('\n'))
+            return Path(java_home), java_version
+        except CalledProcessError:
+            if not check_only:
+                raise
+            return None, None
+
+    def spec(self):
+        if not jujuresources.verify('java-installer'):
+            return None
+        else:
+            _, java_version = self._java_installer(check_only=True)
+            if not java_version:
+                return None
+            return {
+                'vendor': self.vendor,
+                'hadoop': self.hadoop_version,
+                'java': java_version,
+                'arch': self.cpu_arch,
+            }
+
     def load_dist_config(self, filename='dist.yaml'):
         self._dist_config = yaml.load(Path(filename).text())
 
@@ -81,8 +102,6 @@
 
     def verify_conditional_resources(self):
         conditional_resources = ['hadoop-%s' % self.cpu_arch]
-        if self.cpu_arch == 'ppc64le':
-            conditional_resources.append('ibm-java-installer')
         mirror_url = hookenv.config()['resources_mirror']
         return jujuresources.fetch(conditional_resources, mirror_url=mirror_url)
 
@@ -143,29 +162,11 @@
         with utils.disable_firewall():
             fetch.apt_update()
             fetch.apt_install(self.packages)
-            self.install_jdk()
+            self.install_java()
             self.install_hadoop()
 
-    def find_jdk(self):
-        # ppc64le uses the ibm jdk; other arches use openjdk
-        if self.cpu_arch == 'ppc64le':
-            output = check_output(['find', '/opt/', '-name', 'java-ppc64le-*'])
-        else:
-            output = check_output(['find', '/usr/', '-name', 'java-%s-openjdk-*' % self.JAVA_VERSION])
-        if output.strip():
-            return Path(output.split('\n')[0])
-        else:
-            return None
-
-    def install_jdk(self):
-        if self.find_jdk():
-            return
-        if self.cpu_arch == 'ppc64le':
-            java_installer = Path(jujuresources.resource_path('ibm-java-installer'))
-            java_installer.chmod(0o755)
-            check_call([java_installer, '-i', 'silent'])
-        else:
-            fetch.apt_install(['openjdk-%s-jdk' % self.JAVA_VERSION])
+    def install_java(self):
+        self.java_home, self.java_version = self._java_installer()
 
     def install_hadoop(self):
         jujuresources.install('hadoop-%s' % self.cpu_arch, destination=self.dirs['hadoop'], skip_top_level=True)
@@ -182,15 +183,12 @@
 
     def configure_hadoop(self):
         config = hookenv.config()
-        jdk_path = self.find_jdk()
-        if not jdk_path:
-            raise ValueError('Unable to find JDK')
 
-        jdk_bin = jdk_path / 'bin'
+        jdk_bin = self.java_home / 'bin'
         hadoop_bin = self.dirs['hadoop'] / 'bin'
         hadoop_sbin = self.dirs['hadoop'] / 'sbin'
         with utils.environment_edit_in_place('/etc/environment') as env:
-            env['JAVA_HOME'] = jdk_path
+            env['JAVA_HOME'] = self.java_home
             if jdk_bin not in env['PATH']:
                 env['PATH'] = ':'.join([env['PATH'], jdk_bin])
             if hadoop_bin not in env['PATH']:
@@ -215,7 +213,7 @@
 
         hadoop_env = self.dirs['hadoop_conf'] / 'hadoop-env.sh'
         utils.re_edit_in_place(hadoop_env, {
-            r'export JAVA_HOME *=.*': 'export JAVA_HOME=%s' % jdk_path,
+            r'export JAVA_HOME *=.*': 'export JAVA_HOME=%s' % self.java_home,
         })
 
         core_site = self.dirs['hadoop_conf'] / 'core-site.xml'

=== modified file 'charmhelpers/contrib/bigdata/relations.py'
--- charmhelpers/contrib/bigdata/relations.py	2015-03-10 21:41:34 +0000
+++ charmhelpers/contrib/bigdata/relations.py	2015-03-12 23:02:31 +0000
@@ -33,12 +33,13 @@
     This class adds a ``spec`` key to the ``required_keys`` and populates it
     in :meth:`provide`.  The ``spec`` value must be passed in to :meth:`__init__`.
 
-    The ``spec`` should be a mapping which describes all aspects of the charm's
-    environment or configuration that might affect its interoperability with the
-    remote charm.  The charm on the requires side of the relation will verify
-    that all of the keys in its ``spec`` are present and exactly equal on the
-    provides side of the relation.  This does mean that the requires side can
-    be a subset of the provides side, but not the other way around.
+    The ``spec`` should be a mapping (or a callback that returns a mapping)
+    which describes all aspects of the charm's environment or configuration
+    that might affect its interoperability with the remote charm.  The charm
+    on the requires side of the relation will verify that all of the keys in
+    its ``spec`` are present and exactly equal on the provides side of the
+    relation.  This does mean that the requires side can be a subset of the
+    provides side, but not the other way around.
 
     An example spec string might be::
 
@@ -56,27 +57,34 @@
             particulars which can cause issues if mismatched.
         """
         super(SpecMatchingRelation, self).__init__(*args, **kwargs)
-        if not spec:
-            raise ValueError('Empty spec')
-        self.spec = spec
+        self._spec = spec
         self.required_keys = ['spec'] + self.required_keys
 
-    def provide(self, remote_service, ready):
+    @property
+    def spec(self):
+        if callable(self._spec):
+            return self._spec()
+        return self._spec
+
+    def provide(self, remote_service, all_ready):
         """
         Provide the ``spec`` data to the remote service.
 
         Subclasses *must* either delegate to this method (e.g., via `super()`)
         or include ``'spec': self.spec`` in the provided data themselves.
         """
-        data = super(SpecMatchingRelation, self).provide(remote_service, ready)
-        data['spec'] = json.dumps(self.spec)
+        data = super(SpecMatchingRelation, self).provide(remote_service, all_ready)
+        if self.spec:
+            data['spec'] = json.dumps(self.spec)
         return data
 
     def is_ready(self):
         """
         Validate the ``spec`` data from the connected units to ensure that
-        it matches the local ``spec`` exactly.
+        it matches the local ``spec``.
         """
+        if not self.spec:
+            return False
         if not super(SpecMatchingRelation, self).is_ready():
             return False
         for unit, data in self.filtered_data().iteritems():


Follow ups