Merge lp:~bigdata-dev/charms/trusty/apache-spark-notebook/trunk into lp:charms/trusty/apache-spark-notebook

Proposed by Kevin W Monroe
Status: Merged
Merged at revision: 5
Proposed branch: lp:~bigdata-dev/charms/trusty/apache-spark-notebook/trunk
Merge into: lp:charms/trusty/apache-spark-notebook
Diff against target: 157 lines (+31/-79) (has conflicts)
6 files modified
README.md (+1/-1)
resources.yaml (+1/-1)
tests/00-setup (+0/-5)
tests/01-basic-deployment.py (+26/-0)
tests/100-deploy-spark-hdfs-yarn (+0/-62)
tests/tests.yaml (+3/-10)
Conflict: can't delete tests/remote because it is not empty.  Not deleting.
Conflict because tests/remote is not versioned, but has versioned children.  Versioned directory.
Contents conflict in tests/remote/test_dist_config.py
To merge this branch: bzr merge lp:~bigdata-dev/charms/trusty/apache-spark-notebook/trunk
Reviewer Review Type Date Requested Status
Kevin W Monroe Approve
Review via email: mp+286948@code.launchpad.net

Description of the change

update with latest from bigdata-dev:
- new tests
- updated resources

To post a comment you must log in.
Revision history for this message
Kevin W Monroe (kwmonroe) wrote :

+1

review: Approve

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'README.md'
2--- README.md 2015-08-25 05:25:58 +0000
3+++ README.md 2016-02-23 19:53:35 +0000
4@@ -63,7 +63,7 @@
5
6 ## Contact Information
7
8-- <bigdata-dev@lists.launchpad.net>
9+- <bigdata@lists.ubuntu.com>
10
11
12 ## Help
13
14=== modified file 'resources.yaml'
15--- resources.yaml 2015-08-25 03:00:54 +0000
16+++ resources.yaml 2016-02-23 19:53:35 +0000
17@@ -4,4 +4,4 @@
18 pathlib:
19 pypi: path.py>=7.0
20 jujubigdata:
21- pypi: jujubigdata>=4.0.0,<5.0.0
22+ pypi: jujubigdata>=5.0.0,<6.0.0
23
24=== removed file 'resources/python/charmhelpers-0.3.1.tar.gz'
25Binary files resources/python/charmhelpers-0.3.1.tar.gz 2015-07-07 23:18:57 +0000 and resources/python/charmhelpers-0.3.1.tar.gz 1970-01-01 00:00:00 +0000 differ
26=== added file 'resources/python/charmhelpers-0.3.2-1.0.0.tar.gz'
27Binary files resources/python/charmhelpers-0.3.2-1.0.0.tar.gz 1970-01-01 00:00:00 +0000 and resources/python/charmhelpers-0.3.2-1.0.0.tar.gz 2016-02-23 19:53:35 +0000 differ
28=== removed file 'resources/python/jujuresources-0.2.9.tar.gz'
29Binary files resources/python/jujuresources-0.2.9.tar.gz 2015-07-07 23:18:57 +0000 and resources/python/jujuresources-0.2.9.tar.gz 1970-01-01 00:00:00 +0000 differ
30=== added file 'resources/python/jujuresources-0.3.0.tar.gz'
31Binary files resources/python/jujuresources-0.3.0.tar.gz 1970-01-01 00:00:00 +0000 and resources/python/jujuresources-0.3.0.tar.gz 2016-02-23 19:53:35 +0000 differ
32=== removed file 'tests/00-setup'
33--- tests/00-setup 2015-05-10 17:39:10 +0000
34+++ tests/00-setup 1970-01-01 00:00:00 +0000
35@@ -1,5 +0,0 @@
36-#!/bin/bash
37-
38-sudo add-apt-repository ppa:juju/stable -y
39-sudo apt-get update
40-sudo apt-get install python3 amulet -y
41
42=== added file 'tests/01-basic-deployment.py'
43--- tests/01-basic-deployment.py 1970-01-01 00:00:00 +0000
44+++ tests/01-basic-deployment.py 2016-02-23 19:53:35 +0000
45@@ -0,0 +1,26 @@
46+#!/usr/bin/env python3
47+
48+import unittest
49+import amulet
50+
51+
52+class TestDeploy(unittest.TestCase):
53+ """
54+ Trivial deployment test for our Spark IPyNotebook.
55+
56+ This charm cannot do anything useful by itself, so integration testing
57+ is done in the bundle.
58+ """
59+
60+ def test_deploy(self):
61+ self.d = amulet.Deployment(series='trusty')
62+ self.d.add('spark', 'apache-spark')
63+ self.d.add('notebook', 'apache-spark-notebook')
64+ self.d.relate('spark:spark', 'notebook:spark')
65+ self.d.setup(timeout=900)
66+ self.d.sentry.wait(timeout=1800)
67+ self.unit = self.d.sentry['notebook'][0]
68+
69+
70+if __name__ == '__main__':
71+ unittest.main()
72
73=== removed file 'tests/100-deploy-spark-hdfs-yarn'
74--- tests/100-deploy-spark-hdfs-yarn 2015-07-17 21:58:17 +0000
75+++ tests/100-deploy-spark-hdfs-yarn 1970-01-01 00:00:00 +0000
76@@ -1,62 +0,0 @@
77-#!/usr/bin/python3
78-
79-import unittest
80-import amulet
81-
82-
83-class TestDeploy(unittest.TestCase):
84- """
85- Deployment test for Apache Spark using HDFS as shared storage and YARN as
86- cluster job manager.
87- """
88-
89- @classmethod
90- def setUpClass(cls):
91- cls.d = amulet.Deployment(series='trusty')
92- # Deploy a hadoop cluster
93- cls.d.add('yarn-master', charm='cs:~bigdata-dev/trusty/apache-hadoop-yarn-master')
94- cls.d.add('hdfs-master', charm='cs:~bigdata-dev/trusty/apache-hadoop-hdfs-master')
95- cls.d.add('compute-slave', charm='cs:~bigdata-dev/trusty/apache-hadoop-compute-slave', units=3)
96- cls.d.add('plugin', charm='cs:~bigdata-dev/trusty/apache-hadoop-plugin')
97- cls.d.relate('yarn-master:namenode', 'hdfs-master:namenode')
98- cls.d.relate('compute-slave:nodemanager', 'yarn-master:nodemanager')
99- cls.d.relate('compute-slave:datanode', 'hdfs-master:datanode')
100- cls.d.relate('plugin:resourcemanager', 'yarn-master:resourcemanager')
101- cls.d.relate('plugin:namenode', 'hdfs-master:namenode')
102-
103- # Add Spark Service
104- cls.d.add('spark', charm='cs:~bigdata-dev/trusty/apache-spark')
105- cls.d.relate('spark:hadoop-plugin', 'plugin:hadoop-plugin')
106-
107- # Add IPythonNotebook
108- cls.d.add('notebook', charm='cs:~bigdata-dev/trusty/apache-spark-notebook')
109- cls.d.relate('notebook:spark', 'spark:spark')
110-
111- cls.d.setup(timeout=3600)
112- cls.d.sentry.wait()
113- cls.unit = cls.d.sentry.unit['notebook/0']
114-
115-###########################################################################
116-# Validate that the Spark HistoryServer is running
117-###########################################################################
118- def test_spark_status(self):
119- o, c = self.unit.run("pgrep -a java | grep HistoryServer")
120- assert c == 0, "Spark HistoryServer not running"
121-
122-###########################################################################
123-# Validate that the Notebook process is running
124-###########################################################################
125- def test_notebook_status(self):
126- o, c = self.unit.run("pgrep -a python | grep notebook")
127- assert c == 0, "IPython Notebook daemon not running"
128-
129-###########################################################################
130-# Validate Spark commandline operation - run SparkPi
131-###########################################################################
132- def test_spark_job(self):
133- o, c = self.unit.run("su ubuntu -c '/home/ubuntu/sparkpi.sh'")
134- assert c == 0, "SparkPi test failed: %s" % o
135-
136-
137-if __name__ == '__main__':
138- unittest.main()
139
140=== renamed file 'tests/remote/test_dist_config.py' => 'tests/remote/test_dist_config.py.THIS' (properties changed: +x to -x)
141=== modified file 'tests/tests.yaml'
142--- tests/tests.yaml 2015-07-09 15:51:08 +0000
143+++ tests/tests.yaml 2016-02-23 19:53:35 +0000
144@@ -1,10 +1,3 @@
145-# Driver for bundletester: https://github.com/juju-solutions/bundletester
146-#
147-# It may be useful to alter the defaults during manual testing. For example,
148-# set 'reset: false' to reuse existing charms instead of redeploying them.
149-
150-# Allow bootstrap of current env, default: true
151-bootstrap: true
152-
153-# Use juju-deployer to reset env between test, default: true
154-reset: true
155+reset: false
156+packages:
157+ - amulet

Subscribers

People subscribed via source and target branches