Merge lp:~bigdata-dev/charms/trusty/apache-hadoop-yarn-master/ganglia_metrics into lp:~bigdata-dev/charms/trusty/apache-hadoop-yarn-master/trunk

Proposed by Andrew McLeod
Status: Merged
Merged at revision: 103
Proposed branch: lp:~bigdata-dev/charms/trusty/apache-hadoop-yarn-master/ganglia_metrics
Merge into: lp:~bigdata-dev/charms/trusty/apache-hadoop-yarn-master/trunk
Diff against target: 252 lines (+178/-1) (has conflicts)
8 files modified
DEV-README.md (+5/-0)
config.yaml (+4/-0)
hooks/callbacks.py (+31/-1)
hooks/common.py (+15/-0)
hooks/ganglia-relation-broken (+26/-0)
hooks/ganglia-relation-changed (+26/-0)
metadata.yaml (+2/-0)
templates/hadoop-metrics2.properties.j2 (+69/-0)
Text conflict in DEV-README.md
To merge this branch: bzr merge lp:~bigdata-dev/charms/trusty/apache-hadoop-yarn-master/ganglia_metrics
Reviewer Review Type Date Requested Status
Juju Big Data Development Pending
Review via email: mp+268538@code.launchpad.net

Description of the change

Added capability to send ganglia metrics is relationship exists

To post a comment you must log in.
93. By Andrew McLeod

added ganglia_metrics config value, default false

94. By Andrew McLeod

changed restart-hdfs to restart-yarn

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'DEV-README.md'
2--- DEV-README.md 2015-08-20 21:59:00 +0000
3+++ DEV-README.md 2015-08-24 15:13:00 +0000
4@@ -77,8 +77,13 @@
5
6 ## Manual Deployment
7
8+<<<<<<< TREE
9 The easiest way to deploy an Apache Hadoop platform is to use one of
10 the [apache bundles](https://jujucharms.com/u/bigdata-charmers/#bundles).
11+=======
12+The easiest way to deploy the core Apache Hadoop platform is to use one of
13+the [apache bundles](https://jujucharms.com/u/bigdata-charmers/#bundles).
14+>>>>>>> MERGE-SOURCE
15 However, to manually deploy the base Apache Hadoop platform without using one
16 of the bundles, you can use the following:
17
18
19=== modified file 'config.yaml'
20--- config.yaml 2015-04-03 16:49:17 +0000
21+++ config.yaml 2015-08-24 15:13:00 +0000
22@@ -4,3 +4,7 @@
23 default: ''
24 description: |
25 URL from which to fetch resources (e.g., Hadoop binaries) instead of Launchpad.
26+ ganglia_metrics:
27+ type: boolean
28+ default: false
29+ description: if relationship exists to ganglia:master, enable ganglia metrics
30
31=== modified file 'hooks/callbacks.py'
32--- hooks/callbacks.py 2015-08-10 22:59:54 +0000
33+++ hooks/callbacks.py 2015-08-24 15:13:00 +0000
34@@ -18,7 +18,10 @@
35
36 from charmhelpers.core import hookenv
37 from charmhelpers.core import unitdata
38-from jujubigdata.relations import NameNode, NodeManager
39+from jujubigdata.relations import NameNode, NodeManager, Ganglia
40+from charmhelpers.core.templating import render
41+from functools import partial
42+from subprocess import check_call
43
44
45 def update_blocked_status():
46@@ -50,3 +53,30 @@
47
48 def clear_active_flag():
49 unitdata.kv().set('charm.active', False)
50+
51+
52+def conf_ganglia_metrics(purgeConf=False):
53+ """
54+ Send hadoop specific metrics to a ganglia server
55+ """
56+ config = hookenv.config()
57+ ganglia_metrics = config['ganglia_metrics'] and not purgeConf
58+ ganglia_metrics_changed = ganglia_metrics != unitdata.kv().get('ganglia_metrics', False)
59+ unitdata.kv().set('ganglia_metrics', ganglia_metrics)
60+ comment = '#' if not ganglia_metrics else ''
61+ ganglia_host = 'UNSET_BY_JUJU' if not ganglia_metrics else Ganglia().host()
62+ ganglia_sink_str = comment + '*.sink.ganglia.class=org.apache.hadoop.metrics2.sink.ganglia.GangliaSink31'
63+ hookenv.log("Configuring ganglia sink in /etc/hadoop/conf/hadoop-metrics2.properties", level=None)
64+ render(
65+ source='hadoop-metrics2.properties.j2',
66+ target='/etc/hadoop/conf/hadoop-metrics2.properties',
67+ context={
68+ 'ganglia_host': ganglia_host,
69+ 'ganglia_sink_str': ganglia_sink_str,
70+ },
71+ ),
72+ if ganglia_metrics_changed:
73+ check_call(['actions/restart-yarn'])
74+
75+
76+purge_ganglia_metrics = partial(conf_ganglia_metrics, purgeConf=True)
77
78=== modified file 'hooks/common.py'
79--- hooks/common.py 2015-08-10 22:59:54 +0000
80+++ hooks/common.py 2015-08-24 15:13:00 +0000
81@@ -116,6 +116,21 @@
82 callbacks.update_blocked_status,
83 ],
84 },
85+ {
86+ 'name': 'ganglia',
87+ 'requires': [
88+ hadoop.is_installed,
89+ jujubigdata.relations.Ganglia,
90+ ],
91+ 'callbacks': [
92+ callbacks.conf_ganglia_metrics,
93+ ],
94+ 'cleanup': [
95+ callbacks.purge_ganglia_metrics,
96+ ],
97+
98+ },
99+
100 ])
101 manager.manage()
102
103
104=== added file 'hooks/ganglia-relation-broken'
105--- hooks/ganglia-relation-broken 1970-01-01 00:00:00 +0000
106+++ hooks/ganglia-relation-broken 2015-08-24 15:13:00 +0000
107@@ -0,0 +1,26 @@
108+#!/usr/bin/env python
109+# Licensed under the Apache License, Version 2.0 (the "License");
110+# you may not use this file except in compliance with the License.
111+# You may obtain a copy of the License at
112+#
113+# http://www.apache.org/licenses/LICENSE-2.0
114+#
115+# Unless required by applicable law or agreed to in writing, software
116+# distributed under the License is distributed on an "AS IS" BASIS,
117+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
118+# See the License for the specific language governing permissions and
119+# limitations under the License.
120+
121+"""
122+All hooks in this charm are managed by the Charm Framework.
123+The framework helps manage dependencies and preconditions to ensure that
124+steps are only executed when they can be successful. As such, no additional
125+code should be added to this hook; instead, please integrate new functionality
126+into the 'callbacks' list in hooks/common.py. New callbacks can be placed
127+in hooks/callbacks.py, if necessary.
128+
129+See http://big-data-charm-helpers.readthedocs.org/en/latest/examples/framework.html
130+for more information.
131+"""
132+import common
133+common.manage()
134
135=== added file 'hooks/ganglia-relation-changed'
136--- hooks/ganglia-relation-changed 1970-01-01 00:00:00 +0000
137+++ hooks/ganglia-relation-changed 2015-08-24 15:13:00 +0000
138@@ -0,0 +1,26 @@
139+#!/usr/bin/env python
140+# Licensed under the Apache License, Version 2.0 (the "License");
141+# you may not use this file except in compliance with the License.
142+# You may obtain a copy of the License at
143+#
144+# http://www.apache.org/licenses/LICENSE-2.0
145+#
146+# Unless required by applicable law or agreed to in writing, software
147+# distributed under the License is distributed on an "AS IS" BASIS,
148+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
149+# See the License for the specific language governing permissions and
150+# limitations under the License.
151+
152+"""
153+All hooks in this charm are managed by the Charm Framework.
154+The framework helps manage dependencies and preconditions to ensure that
155+steps are only executed when they can be successful. As such, no additional
156+code should be added to this hook; instead, please integrate new functionality
157+into the 'callbacks' list in hooks/common.py. New callbacks can be placed
158+in hooks/callbacks.py, if necessary.
159+
160+See http://big-data-charm-helpers.readthedocs.org/en/latest/examples/framework.html
161+for more information.
162+"""
163+import common
164+common.manage()
165
166=== modified file 'metadata.yaml'
167--- metadata.yaml 2015-04-23 20:48:28 +0000
168+++ metadata.yaml 2015-08-24 15:13:00 +0000
169@@ -10,6 +10,8 @@
170 provides:
171 resourcemanager:
172 interface: mapred
173+ ganglia:
174+ interface: monitor
175 requires:
176 namenode:
177 interface: dfs
178
179=== added directory 'templates'
180=== added file 'templates/hadoop-metrics2.properties.j2'
181--- templates/hadoop-metrics2.properties.j2 1970-01-01 00:00:00 +0000
182+++ templates/hadoop-metrics2.properties.j2 2015-08-24 15:13:00 +0000
183@@ -0,0 +1,69 @@
184+#
185+# Licensed to the Apache Software Foundation (ASF) under one or more
186+# contributor license agreements. See the NOTICE file distributed with
187+# this work for additional information regarding copyright ownership.
188+# The ASF licenses this file to You under the Apache License, Version 2.0
189+# (the "License"); you may not use this file except in compliance with
190+# the License. You may obtain a copy of the License at
191+#
192+# http://www.apache.org/licenses/LICENSE-2.0
193+#
194+# Unless required by applicable law or agreed to in writing, software
195+# distributed under the License is distributed on an "AS IS" BASIS,
196+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
197+# See the License for the specific language governing permissions and
198+# limitations under the License.
199+#
200+
201+# syntax: [prefix].[source|sink].[instance].[options]
202+# See javadoc of package-info.java for org.apache.hadoop.metrics2 for details
203+
204+*.sink.file.class=org.apache.hadoop.metrics2.sink.FileSink
205+# default sampling period, in seconds
206+*.period=10
207+
208+# Defining sink for Ganglia 3.1
209+{{ ganglia_sink_str }}
210+
211+# Default polling period for GangliaSink
212+*.sink.ganglia.period=10
213+
214+# default for supportsparse is false
215+*.sink.ganglia.supportsparse=true
216+
217+# Directing output to ganglia servers
218+
219+*.sink.ganglia.slope=jvm.metrics.gcCount=zero,jvm.metrics.memHeapUsedM=both
220+*.sink.ganglia.dmax=jvm.metrics.threadsBlocked=70,jvm.metrics.memHeapUsedM=40
221+
222+namenode.sink.ganglia.servers={{ ganglia_host }}:8649
223+datanode.sink.ganglia.servers={{ ganglia_host }}:8649
224+jobtracker.sink.ganglia.servers={{ ganglia_host }}:8649
225+tasktracker.sink.ganglia.servers={{ ganglia_host }}:8649
226+maptask.sink.ganglia.servers={{ ganglia_host }}:8649
227+reducetask.sink.ganglia.servers={{ ganglia_host }}:8649
228+resourcemanager.sink.ganglia.servers={{ ganglia_host }}:8649
229+nodemanager.sink.ganglia.servers={{ ganglia_host }}:8649
230+historyserver.sink.ganglia.servers={{ ganglia_host }}:8649
231+journalnode.sink.ganglia.servers={{ ganglia_host }}:8649
232+resourcemanager.sink.ganglia.tagsForPrefix.yarn=Queue
233+
234+# The namen de-metrics. ut will contain metrics from all context
235+#namenode.sink.file.filename=namenode-metrics.out
236+# Specifying a special sampling period for namenode:
237+#namenode.sink.*.period=8
238+
239+#datanode.sink.file.filename=datanode-metrics.out
240+
241+# the following example split metrics of different
242+# context to different sinks (in this case files)
243+#jobtracker.sink.file_jvm.context=jvm
244+#jobtracker.sink.file_jvm.filename=jobtracker-jvm-metrics.out
245+#jobtracker.sink.file_mapred.context=mapred
246+#jobtracker.sink.file_mapred.filename=jobtracker-mapred-metrics.out
247+
248+#tasktracker.sink.file.filename=tasktracker-metrics.out
249+
250+#maptask.sink.file.filename=maptask-metrics.out
251+
252+#reducetask.sink.file.filename=reducetask-metrics.out

Subscribers

People subscribed via source and target branches

to all changes: