Merge lp:~bigdata-dev/charms/trusty/apache-hadoop-yarn-master/trunk into lp:charms/trusty/apache-hadoop-yarn-master

Proposed by Kevin W Monroe
Status: Merged
Merged at revision: 90
Proposed branch: lp:~bigdata-dev/charms/trusty/apache-hadoop-yarn-master/trunk
Merge into: lp:charms/trusty/apache-hadoop-yarn-master
Diff against target: 320 lines (+222/-10) (has conflicts)
9 files modified
README.md (+21/-0)
config.yaml (+10/-0)
hooks/callbacks.py (+42/-5)
hooks/common.py (+24/-3)
hooks/ganglia-relation-broken (+26/-0)
hooks/ganglia-relation-changed (+26/-0)
metadata.yaml (+2/-0)
templates/hadoop-metrics2.properties.j2 (+69/-0)
tests/01-basic-deployment.py (+2/-2)
Text conflict in hooks/callbacks.py
Text conflict in hooks/common.py
To merge this branch: bzr merge lp:~bigdata-dev/charms/trusty/apache-hadoop-yarn-master/trunk
Reviewer Review Type Date Requested Status
Kevin W Monroe Approve
Review via email: mp+271165@code.launchpad.net
To post a comment you must log in.
Revision history for this message
Kevin W Monroe (kwmonroe) :
review: Approve

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'README.md'
2--- README.md 2015-08-24 23:16:05 +0000
3+++ README.md 2015-09-15 17:35:17 +0000
4@@ -27,6 +27,24 @@
5 hadoop jar my-job.jar
6
7
8+## Monitoring
9+
10+This charm supports monitoring via Ganglia. To enable monitoring, you must
11+do **both** of the following (the order does not matter):
12+
13+ * Add a relation to the [Ganglia charm][] via the `:master` relation
14+ * Enable the `ganglia_metrics` config option
15+
16+For example:
17+
18+ juju add-relation yarn-master ganglia:master
19+ juju set yarn-master ganglia_metrics=true
20+
21+Enabling monitoring will issue restart the ResourceManager and all NodeManager
22+components on all of the related compute-slaves. Take care to ensure that there
23+are no running jobs when enabling monitoring.
24+
25+
26 ## Deploying in Network-Restricted Environments
27
28 The Apache Hadoop charms can be deployed in environments with limited network
29@@ -75,3 +93,6 @@
30 - [Apache Hadoop bug trackers](http://hadoop.apache.org/issue_tracking.html)
31 - [Apache Hadoop mailing lists](http://hadoop.apache.org/mailing_lists.html)
32 - [Apache Hadoop Juju Charm](http://jujucharms.com/?text=hadoop)
33+
34+
35+[Ganglia charm]: http://jujucharms.com/ganglia/
36
37=== modified file 'config.yaml'
38--- config.yaml 2015-04-03 16:49:17 +0000
39+++ config.yaml 2015-09-15 17:35:17 +0000
40@@ -4,3 +4,13 @@
41 default: ''
42 description: |
43 URL from which to fetch resources (e.g., Hadoop binaries) instead of Launchpad.
44+ ganglia_metrics:
45+ type: boolean
46+ default: false
47+ description: |
48+ Enable metrics using Ganglia. Note that enabling this option will
49+ have no effect if the service is not related to a ganglia service
50+ via the ganglia:master relation. Enabling this option with the
51+ relation will issue a restart to the ResourceManager and all
52+ NodeManager components on all related compute-slaves.
53+ See the README for more information.
54
55=== modified file 'hooks/callbacks.py'
56--- hooks/callbacks.py 2015-08-10 22:59:54 +0000
57+++ hooks/callbacks.py 2015-09-15 17:35:17 +0000
58@@ -18,7 +18,10 @@
59
60 from charmhelpers.core import hookenv
61 from charmhelpers.core import unitdata
62-from jujubigdata.relations import NameNode, NodeManager
63+from jujubigdata.relations import NameNode, NodeManager, Ganglia
64+from charmhelpers.core.templating import render
65+from functools import partial
66+from subprocess import check_call
67
68
69 def update_blocked_status():
70@@ -46,7 +49,41 @@
71 hookenv.status_set('waiting', 'Waiting for compute slaves to provide NodeManagers')
72 else:
73 hookenv.status_set('blocked', 'Waiting for relation to compute slaves')
74-
75-
76-def clear_active_flag():
77- unitdata.kv().set('charm.active', False)
78+<<<<<<< TREE
79+
80+
81+def clear_active_flag():
82+ unitdata.kv().set('charm.active', False)
83+=======
84+
85+
86+def clear_active_flag():
87+ unitdata.kv().set('charm.active', False)
88+
89+
90+def conf_ganglia_metrics(purgeConf=False):
91+ """
92+ Send hadoop specific metrics to a ganglia server
93+ """
94+ config = hookenv.config()
95+ ganglia_metrics = config['ganglia_metrics'] and not purgeConf
96+ ganglia_metrics_changed = ganglia_metrics != unitdata.kv().get('ganglia_metrics', False)
97+ unitdata.kv().set('ganglia_metrics', ganglia_metrics)
98+ comment = '#' if not ganglia_metrics else ''
99+ ganglia_host = 'UNSET_BY_JUJU' if not ganglia_metrics else Ganglia().host()
100+ ganglia_sink_str = comment + '*.sink.ganglia.class=org.apache.hadoop.metrics2.sink.ganglia.GangliaSink31'
101+ hookenv.log("Configuring ganglia sink in /etc/hadoop/conf/hadoop-metrics2.properties", level=None)
102+ render(
103+ source='hadoop-metrics2.properties.j2',
104+ target='/etc/hadoop/conf/hadoop-metrics2.properties',
105+ context={
106+ 'ganglia_host': ganglia_host,
107+ 'ganglia_sink_str': ganglia_sink_str,
108+ },
109+ ),
110+ if ganglia_metrics_changed:
111+ check_call(['actions/restart-yarn'])
112+
113+
114+purge_ganglia_metrics = partial(conf_ganglia_metrics, purgeConf=True)
115+>>>>>>> MERGE-SOURCE
116
117=== modified file 'hooks/common.py' (properties changed: +x to -x)
118--- hooks/common.py 2015-08-10 22:59:54 +0000
119+++ hooks/common.py 2015-09-15 17:35:17 +0000
120@@ -113,9 +113,30 @@
121 charmframework.helpers.close_ports(dist_config.exposed_ports('yarn-master')),
122 yarn.stop_resourcemanager,
123 yarn.stop_jobhistory,
124- callbacks.update_blocked_status,
125- ],
126- },
127+<<<<<<< TREE
128+ callbacks.update_blocked_status,
129+ ],
130+ },
131+=======
132+ callbacks.update_blocked_status,
133+ ],
134+ },
135+ {
136+ 'name': 'ganglia',
137+ 'requires': [
138+ hadoop.is_installed,
139+ jujubigdata.relations.Ganglia,
140+ ],
141+ 'callbacks': [
142+ callbacks.conf_ganglia_metrics,
143+ ],
144+ 'cleanup': [
145+ callbacks.purge_ganglia_metrics,
146+ ],
147+
148+ },
149+
150+>>>>>>> MERGE-SOURCE
151 ])
152 manager.manage()
153
154
155=== added file 'hooks/ganglia-relation-broken'
156--- hooks/ganglia-relation-broken 1970-01-01 00:00:00 +0000
157+++ hooks/ganglia-relation-broken 2015-09-15 17:35:17 +0000
158@@ -0,0 +1,26 @@
159+#!/usr/bin/env python
160+# Licensed under the Apache License, Version 2.0 (the "License");
161+# you may not use this file except in compliance with the License.
162+# You may obtain a copy of the License at
163+#
164+# http://www.apache.org/licenses/LICENSE-2.0
165+#
166+# Unless required by applicable law or agreed to in writing, software
167+# distributed under the License is distributed on an "AS IS" BASIS,
168+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
169+# See the License for the specific language governing permissions and
170+# limitations under the License.
171+
172+"""
173+All hooks in this charm are managed by the Charm Framework.
174+The framework helps manage dependencies and preconditions to ensure that
175+steps are only executed when they can be successful. As such, no additional
176+code should be added to this hook; instead, please integrate new functionality
177+into the 'callbacks' list in hooks/common.py. New callbacks can be placed
178+in hooks/callbacks.py, if necessary.
179+
180+See http://big-data-charm-helpers.readthedocs.org/en/latest/examples/framework.html
181+for more information.
182+"""
183+import common
184+common.manage()
185
186=== added file 'hooks/ganglia-relation-changed'
187--- hooks/ganglia-relation-changed 1970-01-01 00:00:00 +0000
188+++ hooks/ganglia-relation-changed 2015-09-15 17:35:17 +0000
189@@ -0,0 +1,26 @@
190+#!/usr/bin/env python
191+# Licensed under the Apache License, Version 2.0 (the "License");
192+# you may not use this file except in compliance with the License.
193+# You may obtain a copy of the License at
194+#
195+# http://www.apache.org/licenses/LICENSE-2.0
196+#
197+# Unless required by applicable law or agreed to in writing, software
198+# distributed under the License is distributed on an "AS IS" BASIS,
199+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200+# See the License for the specific language governing permissions and
201+# limitations under the License.
202+
203+"""
204+All hooks in this charm are managed by the Charm Framework.
205+The framework helps manage dependencies and preconditions to ensure that
206+steps are only executed when they can be successful. As such, no additional
207+code should be added to this hook; instead, please integrate new functionality
208+into the 'callbacks' list in hooks/common.py. New callbacks can be placed
209+in hooks/callbacks.py, if necessary.
210+
211+See http://big-data-charm-helpers.readthedocs.org/en/latest/examples/framework.html
212+for more information.
213+"""
214+import common
215+common.manage()
216
217=== modified file 'metadata.yaml'
218--- metadata.yaml 2015-04-23 20:48:28 +0000
219+++ metadata.yaml 2015-09-15 17:35:17 +0000
220@@ -10,6 +10,8 @@
221 provides:
222 resourcemanager:
223 interface: mapred
224+ ganglia:
225+ interface: monitor
226 requires:
227 namenode:
228 interface: dfs
229
230=== added file 'resources/python/jujuresources-0.2.11.tar.gz'
231Binary files resources/python/jujuresources-0.2.11.tar.gz 1970-01-01 00:00:00 +0000 and resources/python/jujuresources-0.2.11.tar.gz 2015-09-15 17:35:17 +0000 differ
232=== added directory 'templates'
233=== added file 'templates/hadoop-metrics2.properties.j2'
234--- templates/hadoop-metrics2.properties.j2 1970-01-01 00:00:00 +0000
235+++ templates/hadoop-metrics2.properties.j2 2015-09-15 17:35:17 +0000
236@@ -0,0 +1,69 @@
237+#
238+# Licensed to the Apache Software Foundation (ASF) under one or more
239+# contributor license agreements. See the NOTICE file distributed with
240+# this work for additional information regarding copyright ownership.
241+# The ASF licenses this file to You under the Apache License, Version 2.0
242+# (the "License"); you may not use this file except in compliance with
243+# the License. You may obtain a copy of the License at
244+#
245+# http://www.apache.org/licenses/LICENSE-2.0
246+#
247+# Unless required by applicable law or agreed to in writing, software
248+# distributed under the License is distributed on an "AS IS" BASIS,
249+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
250+# See the License for the specific language governing permissions and
251+# limitations under the License.
252+#
253+
254+# syntax: [prefix].[source|sink].[instance].[options]
255+# See javadoc of package-info.java for org.apache.hadoop.metrics2 for details
256+
257+*.sink.file.class=org.apache.hadoop.metrics2.sink.FileSink
258+# default sampling period, in seconds
259+*.period=10
260+
261+# Defining sink for Ganglia 3.1
262+{{ ganglia_sink_str }}
263+
264+# Default polling period for GangliaSink
265+*.sink.ganglia.period=10
266+
267+# default for supportsparse is false
268+*.sink.ganglia.supportsparse=true
269+
270+# Directing output to ganglia servers
271+
272+*.sink.ganglia.slope=jvm.metrics.gcCount=zero,jvm.metrics.memHeapUsedM=both
273+*.sink.ganglia.dmax=jvm.metrics.threadsBlocked=70,jvm.metrics.memHeapUsedM=40
274+
275+namenode.sink.ganglia.servers={{ ganglia_host }}:8649
276+datanode.sink.ganglia.servers={{ ganglia_host }}:8649
277+jobtracker.sink.ganglia.servers={{ ganglia_host }}:8649
278+tasktracker.sink.ganglia.servers={{ ganglia_host }}:8649
279+maptask.sink.ganglia.servers={{ ganglia_host }}:8649
280+reducetask.sink.ganglia.servers={{ ganglia_host }}:8649
281+resourcemanager.sink.ganglia.servers={{ ganglia_host }}:8649
282+nodemanager.sink.ganglia.servers={{ ganglia_host }}:8649
283+historyserver.sink.ganglia.servers={{ ganglia_host }}:8649
284+journalnode.sink.ganglia.servers={{ ganglia_host }}:8649
285+resourcemanager.sink.ganglia.tagsForPrefix.yarn=Queue
286+
287+# The namen de-metrics. ut will contain metrics from all context
288+#namenode.sink.file.filename=namenode-metrics.out
289+# Specifying a special sampling period for namenode:
290+#namenode.sink.*.period=8
291+
292+#datanode.sink.file.filename=datanode-metrics.out
293+
294+# the following example split metrics of different
295+# context to different sinks (in this case files)
296+#jobtracker.sink.file_jvm.context=jvm
297+#jobtracker.sink.file_jvm.filename=jobtracker-jvm-metrics.out
298+#jobtracker.sink.file_mapred.context=mapred
299+#jobtracker.sink.file_mapred.filename=jobtracker-mapred-metrics.out
300+
301+#tasktracker.sink.file.filename=tasktracker-metrics.out
302+
303+#maptask.sink.file.filename=maptask-metrics.out
304+
305+#reducetask.sink.file.filename=reducetask-metrics.out
306
307=== modified file 'tests/01-basic-deployment.py'
308--- tests/01-basic-deployment.py 2015-03-04 01:00:27 +0000
309+++ tests/01-basic-deployment.py 2015-09-15 17:35:17 +0000
310@@ -16,8 +16,8 @@
311 def setUpClass(cls):
312 cls.d = amulet.Deployment(series='trusty')
313 cls.d.add('apache-hadoop-yarn-master')
314- cls.d.setup(timeout=9000)
315- cls.d.sentry.wait()
316+ cls.d.setup(timeout=900)
317+ cls.d.sentry.wait(timeout=1800)
318 cls.unit = cls.d.sentry.unit['apache-hadoop-yarn-master/0']
319
320 def test_deploy(self):

Subscribers

People subscribed via source and target branches