Merge lp:~bigdata-dev/charms/trusty/apache-hive/trunk into lp:charms/trusty/apache-hive

Proposed by Cory Johns
Status: Merged
Merged at revision: 48
Proposed branch: lp:~bigdata-dev/charms/trusty/apache-hive/trunk
Merge into: lp:charms/trusty/apache-hive
Diff against target: 272 lines (+30/-193)
7 files modified
README.md (+1/-1)
resources.yaml (+2/-2)
tests/00-setup (+0/-8)
tests/01-basic-deployment.py (+24/-0)
tests/100-deploy-hive-mysql (+0/-100)
tests/remote/test_dist_config.py (+0/-72)
tests/tests.yaml (+3/-10)
To merge this branch: bzr merge lp:~bigdata-dev/charms/trusty/apache-hive/trunk
Reviewer Review Type Date Requested Status
Kevin W Monroe Approve
Review via email: mp+273615@code.launchpad.net

Description of the change

Test cleanups for CWR, and moving binaries to S3

To post a comment you must log in.
Revision history for this message
Kevin W Monroe (kwmonroe) wrote :

LGTM, +1

review: Approve

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'README.md'
2--- README.md 2015-08-24 23:23:36 +0000
3+++ README.md 2015-10-06 20:46:56 +0000
4@@ -84,7 +84,7 @@
5
6 ## Contact Information
7
8-- <bigdata-dev@lists.launchpad.net>
9+- <bigdata@lists.ubuntu.com>
10
11
12 ## Help
13
14=== modified file 'resources.yaml'
15--- resources.yaml 2015-08-24 23:23:36 +0000
16+++ resources.yaml 2015-10-06 20:46:56 +0000
17@@ -7,10 +7,10 @@
18 pypi: jujubigdata>=4.0.0,<5.0.0
19 optional_resources:
20 hive-ppc64le:
21- url: https://git.launchpad.net/bigdata-data/plain/apache/ppc64le/apache-hive-0.13.0-bin.tar.gz?id=c34a21c939f5fce9ab89b95d65fe2df50e7bbab0
22+ url: https://s3.amazonaws.com/jujubigdata/apache/ppc64le/apache-hive-0.13.0-bin-4c83564.tar.gz
23 hash: 4c835644eb72a08df059b86c45fb159b95df08e831334cb57e24654ef078e7ee
24 hash_type: sha256
25 hive-x86_64:
26- url: https://git.launchpad.net/bigdata-data/plain/apache/x86_64/apache-hive-1.0.0-bin.tar.gz?id=c34a21c939f5fce9ab89b95d65fe2df50e7bbab0
27+ url: https://s3.amazonaws.com/jujubigdata/apache/x86_64/apache-hive-1.0.0-bin-b8e121f.tar.gz
28 hash: b8e121f435defeb94d810eb6867d2d1c27973e4a3b4099f2716dbffafb274184
29 hash_type: sha256
30
31=== removed file 'tests/00-setup'
32--- tests/00-setup 2015-09-16 21:43:12 +0000
33+++ tests/00-setup 1970-01-01 00:00:00 +0000
34@@ -1,8 +0,0 @@
35-#!/bin/bash
36-
37-if ! dpkg -s amulet &> /dev/null; then
38- echo Installing Amulet...
39- sudo add-apt-repository -y ppa:juju/stable
40- sudo apt-get update
41- sudo apt-get -y install amulet
42-fi
43
44=== added file 'tests/01-basic-deployment.py'
45--- tests/01-basic-deployment.py 1970-01-01 00:00:00 +0000
46+++ tests/01-basic-deployment.py 2015-10-06 20:46:56 +0000
47@@ -0,0 +1,24 @@
48+#!/usr/bin/env python3
49+
50+import unittest
51+import amulet
52+
53+
54+class TestDeploy(unittest.TestCase):
55+ """
56+ Trivial deployment test for Apache Hive.
57+
58+ This charm cannot do anything useful by itself, so integration testing
59+ is done in the bundle.
60+ """
61+
62+ def test_deploy(self):
63+ self.d = amulet.Deployment(series='trusty')
64+ self.d.add('hive', 'apache-hive')
65+ self.d.setup(timeout=900)
66+ self.d.sentry.wait(timeout=1800)
67+ self.unit = self.d.sentry['hive'][0]
68+
69+
70+if __name__ == '__main__':
71+ unittest.main()
72
73=== removed file 'tests/100-deploy-hive-mysql'
74--- tests/100-deploy-hive-mysql 2015-09-17 14:17:43 +0000
75+++ tests/100-deploy-hive-mysql 1970-01-01 00:00:00 +0000
76@@ -1,100 +0,0 @@
77-#!/usr/bin/env python3
78-import unittest
79-import amulet
80-
81-
82-class TestDeploy(unittest.TestCase):
83- """
84- Deployment test for Apache Hive using HDFS as shared storage and YARN as
85- cluster job manager.
86- """
87-
88- @classmethod
89- def setUpClass(cls):
90- cls.d = amulet.Deployment(series='trusty')
91- # Deploy a hadoop cluster
92- cls.d.add('yarn-master', charm='cs:trusty/apache-hadoop-yarn-master')
93- cls.d.add('hdfs-master', charm='cs:trusty/apache-hadoop-hdfs-master')
94- cls.d.add('compute-slave', charm='cs:trusty/apache-hadoop-compute-slave')
95- cls.d.add('plugin', charm='cs:trusty/apache-hadoop-plugin')
96- cls.d.relate('yarn-master:namenode', 'hdfs-master:namenode')
97- cls.d.relate('compute-slave:nodemanager', 'yarn-master:nodemanager')
98- cls.d.relate('compute-slave:datanode', 'hdfs-master:datanode')
99- cls.d.relate('plugin:resourcemanager', 'yarn-master:resourcemanager')
100- cls.d.relate('plugin:namenode', 'hdfs-master:namenode')
101-
102- # Add MySQL service (hive needs binlog-format config)
103- cls.d.add('mysql', 'cs:trusty/mysql')
104- cls.d.configure('mysql', {'binlog-format': 'row'})
105-
106- # Add Hive service
107- cls.d.add('hive', charm='cs:trusty/apache-hive')
108- cls.d.relate('hive:db', 'mysql:db')
109- cls.d.relate('hive:hadoop-plugin', 'plugin:hadoop-plugin')
110-
111- cls.d.setup(timeout=3600)
112- cls.d.sentry.wait(timeout=3600)
113- cls.unit = cls.d.sentry.unit['hive/0']
114-
115-
116-###########################################################################
117-# Validate yarn mapreduce operation from the Hive node
118-# 1) validate mapreduce execution - writing to hdfs
119-###########################################################################
120- def test_yarn_mapreduce_exe1(self):
121- outdir = "/user/ubuntu/teragenout"
122- o, c = self.unit.run("su ubuntu -c 'hdfs dfs -rm -f -R %s &&"
123- "hadoop jar /usr/lib/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-examples*.jar teragen 10000 %s'"
124- % (outdir, outdir))
125- assert c == 0, "Teragen failed to execute: %s" % o
126-
127-###########################################################################
128-# 2) validate successful mapreduce operation after the execution
129-###########################################################################
130- o, c = self.unit.run("su hdfs -c 'hdfs dfs -ls %s/_SUCCESS'" % outdir)
131- assert c == 0, "Teragen executed, but expected output was not found: %s" % o
132-
133-###########################################################################
134-# Validate mapreduce operation from Hive node - validates job chain operation
135-# 1) validate mapreduce execution - reading and writing to hdfs
136-###########################################################################
137- def test_yarn_mapreduce_exe2(self):
138- indir = "/user/ubuntu/teragenout"
139- outdir = "/user/ubuntu/terasortout"
140- o, c = self.unit.run("su ubuntu -c 'hdfs dfs -rm -f -R %s &&"
141- "hadoop jar /usr/lib/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-examples*.jar terasort %s %s'"
142- % (outdir, indir, outdir))
143- assert c == 0, "Terasort failed to execute: %s" % o
144-
145-###########################################################################
146-# 2) validate a successful mapreduce operation after the execution
147-###########################################################################
148- o, c = self.unit.run("su hdfs -c 'hdfs dfs -ls %s/_SUCCESS'" % outdir)
149- assert c == 0, "Terasort executed, but expected output was not found: %s" % o
150-
151-###########################################################################
152-# Validate the service is running
153-# Validate the JVM
154-###########################################################################
155- def test_jvm_status(self):
156- o, c = self.unit.run("su hive -c 'pgrep -a java | grep HiveServer2'")
157- assert "HiveServer2" in o, "HiveServer2 not running"
158-
159-###########################################################################
160-# Validate HIVE command line operation - create a HIVE table
161-###########################################################################
162- def test_hive_create_tables(self):
163- o, c = self.unit.run("su hive -c \"hive -e 'create table test(col1 int, col2 string);' 2>&1\"")
164- assert c == 0, "Hive create table failed: %s" % o
165-
166-###########################################################################
167-# Validate HIVE commandline operation - show HIVE tables
168-###########################################################################
169- def test_hive_show_tables(self):
170- o, c = self.unit.run("su hive -c \"hive -e \'show tables;\' 2>&1\"")
171- # Look for the 'test' table from our previous test
172- assert "test" in o, "Hive show tables failed: %s" % o
173-
174-
175-if __name__ == '__main__':
176- unittest.main()
177
178=== removed directory 'tests/remote'
179=== removed file 'tests/remote/test_dist_config.py'
180--- tests/remote/test_dist_config.py 2015-08-21 21:51:45 +0000
181+++ tests/remote/test_dist_config.py 1970-01-01 00:00:00 +0000
182@@ -1,72 +0,0 @@
183-#!/usr/bin/env python
184-
185-import grp
186-import os
187-import pwd
188-import unittest
189-
190-import jujubigdata
191-
192-
193-class TestDistConfig(unittest.TestCase):
194- """
195- Test that the ``dist.yaml`` settings were applied properly, such as users, groups, and dirs.
196-
197- This is done as a remote test on the deployed unit rather than a regular
198- test under ``tests/`` because filling in the ``dist.yaml`` requires Juju
199- context (e.g., config).
200- """
201- @classmethod
202- def setUpClass(cls):
203- config = None
204- config_dir = os.environ['JUJU_CHARM_DIR']
205- config_file = 'dist.yaml'
206- if os.path.isfile(os.path.join(config_dir, config_file)):
207- config = os.path.join(config_dir, config_file)
208- if not config:
209- raise IOError('Could not find {} in {}'.format(config_file, config_dir))
210- reqs = ['vendor', 'hadoop_version', 'packages', 'groups', 'users',
211- 'dirs', 'ports']
212- cls.dist_config = jujubigdata.utils.DistConfig(config, reqs)
213-
214- def test_groups(self):
215- for name in self.dist_config.groups:
216- try:
217- grp.getgrnam(name)
218- except KeyError:
219- self.fail('Group {} is missing'.format(name))
220-
221- def test_users(self):
222- for username, details in self.dist_config.users.items():
223- try:
224- user = pwd.getpwnam(username)
225- except KeyError:
226- self.fail('User {} is missing'.format(username))
227- for groupname in details['groups']:
228- try:
229- group = grp.getgrnam(groupname)
230- except KeyError:
231- self.fail('Group {} referenced by user {} does not exist'.format(
232- groupname, username))
233- if group.gr_gid != user.pw_gid:
234- self.assertIn(username, group.gr_mem, 'User {} not in group {}'.format(
235- username, groupname))
236-
237- def test_dirs(self):
238- for name, details in self.dist_config.dirs.items():
239- dirpath = self.dist_config.path(name)
240- self.assertTrue(dirpath.isdir(), 'Dir {} is missing'.format(name))
241- stat = dirpath.stat()
242- owner = pwd.getpwuid(stat.st_uid).pw_name
243- group = grp.getgrgid(stat.st_gid).gr_name
244- perms = stat.st_mode & ~0o40000
245- self.assertEqual(owner, details.get('owner', 'root'),
246- 'Dir {} ({}) has wrong owner: {}'.format(name, dirpath, owner))
247- self.assertEqual(group, details.get('group', 'root'),
248- 'Dir {} ({}) has wrong group: {}'.format(name, dirpath, group))
249- self.assertEqual(perms, details.get('perms', 0o755),
250- 'Dir {} ({}) has wrong perms: 0o{:o}'.format(name, dirpath, perms))
251-
252-
253-if __name__ == '__main__':
254- unittest.main()
255
256=== modified file 'tests/tests.yaml'
257--- tests/tests.yaml 2015-06-25 15:00:26 +0000
258+++ tests/tests.yaml 2015-10-06 20:46:56 +0000
259@@ -1,10 +1,3 @@
260-# Driver for bundletester: https://github.com/juju-solutions/bundletester
261-#
262-# It may be useful to alter the defaults during manual testing. For example,
263-# set 'reset: false' to reuse existing charms instead of redeploying them.
264-
265-# Allow bootstrap of current env, default: true
266-bootstrap: true
267-
268-# Use juju-deployer to reset env between test, default: true
269-reset: true
270+reset: false
271+packages:
272+ - amulet

Subscribers

People subscribed via source and target branches