bigdata-dev team mailing list archive
-
bigdata-dev team
-
Mailing list archive
-
Message #00396
[Merge] lp:~bigdata-dev/charms/trusty/apache-spark-notebook/trunk into lp:charms/trusty/apache-spark-notebook
Kevin W Monroe has proposed merging lp:~bigdata-dev/charms/trusty/apache-spark-notebook/trunk into lp:charms/trusty/apache-spark-notebook.
Requested reviews:
Juju Big Data Development (bigdata-dev)
For more details, see:
https://code.launchpad.net/~bigdata-dev/charms/trusty/apache-spark-notebook/trunk/+merge/273764
changes to tests (simple stuff in the charm; complex stuff in the bundle), and update the mailing list in the readme.
--
Your team Juju Big Data Development is requested to review the proposed merge of lp:~bigdata-dev/charms/trusty/apache-spark-notebook/trunk into lp:charms/trusty/apache-spark-notebook.
=== modified file 'README.md'
--- README.md 2015-08-25 05:25:58 +0000
+++ README.md 2015-10-07 22:32:26 +0000
@@ -63,7 +63,7 @@
## Contact Information
-- <bigdata-dev@xxxxxxxxxxxxxxxxxxx>
+- <bigdata@xxxxxxxxxxxxxxxx>
## Help
=== added file 'resources/python/jujuresources-0.2.11.tar.gz'
Binary files resources/python/jujuresources-0.2.11.tar.gz 1970-01-01 00:00:00 +0000 and resources/python/jujuresources-0.2.11.tar.gz 2015-10-07 22:32:26 +0000 differ
=== removed file 'resources/python/jujuresources-0.2.9.tar.gz'
Binary files resources/python/jujuresources-0.2.9.tar.gz 2015-07-07 23:18:57 +0000 and resources/python/jujuresources-0.2.9.tar.gz 1970-01-01 00:00:00 +0000 differ
=== removed file 'tests/00-setup'
--- tests/00-setup 2015-05-10 17:39:10 +0000
+++ tests/00-setup 1970-01-01 00:00:00 +0000
@@ -1,5 +0,0 @@
-#!/bin/bash
-
-sudo add-apt-repository ppa:juju/stable -y
-sudo apt-get update
-sudo apt-get install python3 amulet -y
=== added file 'tests/01-basic-deployment.py'
--- tests/01-basic-deployment.py 1970-01-01 00:00:00 +0000
+++ tests/01-basic-deployment.py 2015-10-07 22:32:26 +0000
@@ -0,0 +1,26 @@
+#!/usr/bin/env python3
+
+import unittest
+import amulet
+
+
+class TestDeploy(unittest.TestCase):
+ """
+ Trivial deployment test for our Spark IPyNotebook.
+
+ This charm cannot do anything useful by itself, so integration testing
+ is done in the bundle.
+ """
+
+ def test_deploy(self):
+ self.d = amulet.Deployment(series='trusty')
+ self.d.add('spark', 'apache-spark')
+ self.d.add('notebook', 'apache-spark-notebook')
+ self.d.relate('spark:spark', 'notebook:spark')
+ self.d.setup(timeout=900)
+ self.d.sentry.wait(timeout=1800)
+ self.unit = self.d.sentry['notebook'][0]
+
+
+if __name__ == '__main__':
+ unittest.main()
=== removed file 'tests/100-deploy-spark-hdfs-yarn'
--- tests/100-deploy-spark-hdfs-yarn 2015-07-17 21:58:17 +0000
+++ tests/100-deploy-spark-hdfs-yarn 1970-01-01 00:00:00 +0000
@@ -1,62 +0,0 @@
-#!/usr/bin/python3
-
-import unittest
-import amulet
-
-
-class TestDeploy(unittest.TestCase):
- """
- Deployment test for Apache Spark using HDFS as shared storage and YARN as
- cluster job manager.
- """
-
- @classmethod
- def setUpClass(cls):
- cls.d = amulet.Deployment(series='trusty')
- # Deploy a hadoop cluster
- cls.d.add('yarn-master', charm='cs:~bigdata-dev/trusty/apache-hadoop-yarn-master')
- cls.d.add('hdfs-master', charm='cs:~bigdata-dev/trusty/apache-hadoop-hdfs-master')
- cls.d.add('compute-slave', charm='cs:~bigdata-dev/trusty/apache-hadoop-compute-slave', units=3)
- cls.d.add('plugin', charm='cs:~bigdata-dev/trusty/apache-hadoop-plugin')
- cls.d.relate('yarn-master:namenode', 'hdfs-master:namenode')
- cls.d.relate('compute-slave:nodemanager', 'yarn-master:nodemanager')
- cls.d.relate('compute-slave:datanode', 'hdfs-master:datanode')
- cls.d.relate('plugin:resourcemanager', 'yarn-master:resourcemanager')
- cls.d.relate('plugin:namenode', 'hdfs-master:namenode')
-
- # Add Spark Service
- cls.d.add('spark', charm='cs:~bigdata-dev/trusty/apache-spark')
- cls.d.relate('spark:hadoop-plugin', 'plugin:hadoop-plugin')
-
- # Add IPythonNotebook
- cls.d.add('notebook', charm='cs:~bigdata-dev/trusty/apache-spark-notebook')
- cls.d.relate('notebook:spark', 'spark:spark')
-
- cls.d.setup(timeout=3600)
- cls.d.sentry.wait()
- cls.unit = cls.d.sentry.unit['notebook/0']
-
-###########################################################################
-# Validate that the Spark HistoryServer is running
-###########################################################################
- def test_spark_status(self):
- o, c = self.unit.run("pgrep -a java | grep HistoryServer")
- assert c == 0, "Spark HistoryServer not running"
-
-###########################################################################
-# Validate that the Notebook process is running
-###########################################################################
- def test_notebook_status(self):
- o, c = self.unit.run("pgrep -a python | grep notebook")
- assert c == 0, "IPython Notebook daemon not running"
-
-###########################################################################
-# Validate Spark commandline operation - run SparkPi
-###########################################################################
- def test_spark_job(self):
- o, c = self.unit.run("su ubuntu -c '/home/ubuntu/sparkpi.sh'")
- assert c == 0, "SparkPi test failed: %s" % o
-
-
-if __name__ == '__main__':
- unittest.main()
=== renamed file 'tests/remote/test_dist_config.py' => 'tests/remote/test_dist_config.py.THIS' (properties changed: +x to -x)
=== modified file 'tests/tests.yaml'
--- tests/tests.yaml 2015-07-09 15:51:08 +0000
+++ tests/tests.yaml 2015-10-07 22:32:26 +0000
@@ -1,10 +1,3 @@
-# Driver for bundletester: https://github.com/juju-solutions/bundletester
-#
-# It may be useful to alter the defaults during manual testing. For example,
-# set 'reset: false' to reuse existing charms instead of redeploying them.
-
-# Allow bootstrap of current env, default: true
-bootstrap: true
-
-# Use juju-deployer to reset env between test, default: true
-reset: true
+reset: false
+packages:
+ - amulet
Follow ups