Merge lp:~bigdata-dev/charms/trusty/apache-hadoop-compute-slave/trunk into lp:charms/trusty/apache-hadoop-compute-slave

Proposed by Cory Johns
Status: Merged
Merged at revision: 89
Proposed branch: lp:~bigdata-dev/charms/trusty/apache-hadoop-compute-slave/trunk
Merge into: lp:charms/trusty/apache-hadoop-compute-slave
Diff against target: 241 lines (+7/-135)
7 files modified
README.md (+1/-1)
hooks/callbacks.py (+1/-12)
resources.yaml (+2/-2)
tests/00-setup (+0/-8)
tests/01-basic-deployment.py (+0/-40)
tests/remote/test_dist_config.py (+0/-72)
tests/tests.yaml (+3/-0)
To merge this branch: bzr merge lp:~bigdata-dev/charms/trusty/apache-hadoop-compute-slave/trunk
Reviewer Review Type Date Requested Status
Kevin W Monroe Approve
Review via email: mp+273436@code.launchpad.net

Description of the change

Remove trivial test in favor of bundle tests.

To post a comment you must log in.
105. By Cory Johns

Removed duplicate status function and fix lint errors

106. By Cory Johns

Re-added trivial test

107. By Cory Johns

Update mailing list

108. By Cory Johns

Switch to S3 for flume binaries

Revision history for this message
Kevin W Monroe (kwmonroe) wrote :

LGTM, +1

review: Approve

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'README.md'
2--- README.md 2015-09-15 18:29:08 +0000
3+++ README.md 2015-10-06 18:23:28 +0000
4@@ -96,7 +96,7 @@
5
6 ## Contact Information
7
8-- <bigdata-dev@lists.launchpad.net>
9+- <bigdata@lists.ubuntu.com>
10
11
12 ## Hadoop
13
14=== modified file 'hooks/callbacks.py'
15--- hooks/callbacks.py 2015-08-24 15:15:12 +0000
16+++ hooks/callbacks.py 2015-10-06 18:23:28 +0000
17@@ -21,7 +21,6 @@
18 from jujubigdata.relations import NameNodeMaster, ResourceManagerMaster, Ganglia
19 from charmhelpers.core.templating import render
20 from functools import partial
21-from subprocess import check_call
22
23
24 def update_blocked_status():
25@@ -61,16 +60,6 @@
26 ))
27
28
29-def update_working_status():
30- if unitdata.kv().get('charm.active', False):
31- hookenv.status_set('maintenance', 'Updating configuration')
32- return
33- yarn_connected = ResourceManagerMaster().connected_units()
34- hookenv.status_set('maintenance', 'Setting up DataNode%s' % (
35- ' and NodeManager' if yarn_connected else '',
36- ))
37-
38-
39 def update_active_status():
40 hdfs_ready = NameNodeMaster().is_ready()
41 yarn_connected = ResourceManagerMaster().connected_units()
42@@ -112,6 +101,6 @@
43 if ganglia_metrics_changed:
44 #check_call(['actions/restart-hdfs'])
45 # IMPLEMENT RESTART COMPUTE SLAVE?
46- hookenv.log("please manually restart compute slave hadoop components", level=None)
47+ hookenv.log("please manually restart compute slave hadoop components", level=None)
48
49 purge_ganglia_metrics = partial(conf_ganglia_metrics, purgeConf=True)
50
51=== modified file 'resources.yaml'
52--- resources.yaml 2015-09-15 18:29:08 +0000
53+++ resources.yaml 2015-10-06 18:23:28 +0000
54@@ -19,10 +19,10 @@
55 hash: 03ad135835bfe413f85fe176259237a8
56 hash_type: md5
57 hadoop-ppc64le:
58- url: https://git.launchpad.net/bigdata-data/plain/apache/ppc64le/hadoop-2.4.1-ppc64le.tar.gz?id=c34a21c939f5fce9ab89b95d65fe2df50e7bbab0
59+ url: https://s3.amazonaws.com/jujubigdata/apache/ppc64le/hadoop-2.4.1-ppc64le-09942b1.tar.gz
60 hash: 09942b168a3db0d183b281477d3dae9deb7b7bc4b5783ba5cda3965b62e71bd5
61 hash_type: sha256
62 hadoop-x86_64:
63- url: https://git.launchpad.net/bigdata-data/plain/apache/x86_64/hadoop-2.4.1.tar.gz?id=c34a21c939f5fce9ab89b95d65fe2df50e7bbab0
64+ url: https://s3.amazonaws.com/jujubigdata/apache/x86_64/hadoop-2.4.1-a790d39.tar.gz
65 hash: a790d39baba3a597bd226042496764e0520c2336eedb28a1a3d5c48572d3b672
66 hash_type: sha256
67
68=== added directory 'tests'
69=== removed directory 'tests'
70=== removed file 'tests/00-setup'
71--- tests/00-setup 2015-02-17 03:21:13 +0000
72+++ tests/00-setup 1970-01-01 00:00:00 +0000
73@@ -1,8 +0,0 @@
74-#!/bin/bash
75-
76-if ! dpkg -s amulet &> /dev/null; then
77- echo Installing Amulet...
78- sudo add-apt-repository -y ppa:juju/stable
79- sudo apt-get update
80- sudo apt-get -y install amulet
81-fi
82
83=== added file 'tests/01-basic-deployment.py'
84--- tests/01-basic-deployment.py 1970-01-01 00:00:00 +0000
85+++ tests/01-basic-deployment.py 2015-10-06 18:23:28 +0000
86@@ -0,0 +1,24 @@
87+#!/usr/bin/env python3
88+
89+import unittest
90+import amulet
91+
92+
93+class TestDeploy(unittest.TestCase):
94+ """
95+ Trivial deployment test for Apache Hadoop Compute Slave.
96+
97+ This charm cannot do anything useful by itself, so integration testing
98+ is done in the bundle.
99+ """
100+
101+ def test_deploy(self):
102+ self.d = amulet.Deployment(series='trusty')
103+ self.d.add('compute-slave', 'apache-hadoop-compute-slave')
104+ self.d.setup(timeout=900)
105+ self.d.sentry.wait(timeout=1800)
106+ self.unit = self.d.sentry['compute-slave'][0]
107+
108+
109+if __name__ == '__main__':
110+ unittest.main()
111
112=== removed file 'tests/01-basic-deployment.py'
113--- tests/01-basic-deployment.py 2015-09-15 17:14:40 +0000
114+++ tests/01-basic-deployment.py 1970-01-01 00:00:00 +0000
115@@ -1,40 +0,0 @@
116-#!/usr/bin/env python3
117-
118-import unittest
119-import amulet
120-
121-
122-class TestDeploy(unittest.TestCase):
123- """
124- Basic deployment test for Apache Hadoop Compute Slave.
125-
126- This charm cannot do anything useful by itself, so integration testing
127- is done in the bundle.
128- """
129-
130- @classmethod
131- def setUpClass(cls):
132- cls.d = amulet.Deployment(series='trusty')
133- cls.d.add('apache-hadoop-compute-slave')
134- cls.d.setup(timeout=900)
135- cls.d.sentry.wait(timeout=1800)
136- cls.unit = cls.d.sentry.unit['apache-hadoop-compute-slave/0']
137-
138- def test_deploy(self):
139- output, retcode = self.unit.run("pgrep -a java")
140- assert 'ResourceManager' not in output, "ResourceManager should not be started"
141- assert 'JobHistoryServer' not in output, "JobHistoryServer should not be started"
142- assert 'NodeManager' not in output, "NodeManager should not be started"
143- assert 'NameNode' not in output, "NameNode should not be started"
144- assert 'SecondaryNameNode' not in output, "SecondaryNameNode should not be started"
145- assert 'DataNode' not in output, "DataServer should not be started"
146-
147- def test_dist_config(self):
148- # test_dist_config.py is run on the deployed unit because it
149- # requires the Juju context to properly validate dist.yaml
150- output, retcode = self.unit.run("tests/remote/test_dist_config.py")
151- self.assertEqual(retcode, 0, 'Remote dist config test failed:\n{}'.format(output))
152-
153-
154-if __name__ == '__main__':
155- unittest.main()
156
157=== removed directory 'tests/remote'
158=== removed file 'tests/remote/test_dist_config.py'
159--- tests/remote/test_dist_config.py 2015-08-21 21:51:17 +0000
160+++ tests/remote/test_dist_config.py 1970-01-01 00:00:00 +0000
161@@ -1,72 +0,0 @@
162-#!/usr/bin/env python
163-
164-import grp
165-import os
166-import pwd
167-import unittest
168-
169-from charmhelpers.contrib import bigdata
170-
171-
172-class TestDistConfig(unittest.TestCase):
173- """
174- Test that the ``dist.yaml`` settings were applied properly, such as users, groups, and dirs.
175-
176- This is done as a remote test on the deployed unit rather than a regular
177- test under ``tests/`` because filling in the ``dist.yaml`` requires Juju
178- context (e.g., config).
179- """
180- @classmethod
181- def setUpClass(cls):
182- config = None
183- config_dir = os.environ['JUJU_CHARM_DIR']
184- config_file = 'dist.yaml'
185- if os.path.isfile(os.path.join(config_dir, config_file)):
186- config = os.path.join(config_dir, config_file)
187- if not config:
188- raise IOError('Could not find {} in {}'.format(config_file, config_dir))
189- reqs = ['vendor', 'hadoop_version', 'packages', 'groups', 'users',
190- 'dirs', 'ports']
191- cls.dist_config = bigdata.utils.DistConfig(config, reqs)
192-
193- def test_groups(self):
194- for name in self.dist_config.groups:
195- try:
196- grp.getgrnam(name)
197- except KeyError:
198- self.fail('Group {} is missing'.format(name))
199-
200- def test_users(self):
201- for username, details in self.dist_config.users.items():
202- try:
203- user = pwd.getpwnam(username)
204- except KeyError:
205- self.fail('User {} is missing'.format(username))
206- for groupname in details['groups']:
207- try:
208- group = grp.getgrnam(groupname)
209- except KeyError:
210- self.fail('Group {} referenced by user {} does not exist'.format(
211- groupname, username))
212- if group.gr_gid != user.pw_gid:
213- self.assertIn(username, group.gr_mem, 'User {} not in group {}'.format(
214- username, groupname))
215-
216- def test_dirs(self):
217- for name, details in self.dist_config.dirs.items():
218- dirpath = self.dist_config.path(name)
219- self.assertTrue(dirpath.isdir(), 'Dir {} is missing'.format(name))
220- stat = dirpath.stat()
221- owner = pwd.getpwuid(stat.st_uid).pw_name
222- group = grp.getgrgid(stat.st_gid).gr_name
223- perms = stat.st_mode & ~0o40000
224- self.assertEqual(owner, details.get('owner', 'root'),
225- 'Dir {} ({}) has wrong owner: {}'.format(name, dirpath, owner))
226- self.assertEqual(group, details.get('group', 'root'),
227- 'Dir {} ({}) has wrong group: {}'.format(name, dirpath, group))
228- self.assertEqual(perms, details.get('perms', 0o755),
229- 'Dir {} ({}) has wrong perms: 0o{:o}'.format(name, dirpath, perms))
230-
231-
232-if __name__ == '__main__':
233- unittest.main()
234
235=== added file 'tests/tests.yaml'
236--- tests/tests.yaml 1970-01-01 00:00:00 +0000
237+++ tests/tests.yaml 2015-10-06 18:23:28 +0000
238@@ -0,0 +1,3 @@
239+reset: false
240+packages:
241+ - amulet

Subscribers

People subscribed via source and target branches