Merge lp:~bigdata-dev/charms/trusty/apache-hadoop-hdfs-secondary/trunk into lp:charms/trusty/apache-hadoop-hdfs-secondary

Proposed by Kevin W Monroe
Status: Merged
Merged at revision: 69
Proposed branch: lp:~bigdata-dev/charms/trusty/apache-hadoop-hdfs-secondary/trunk
Merge into: lp:charms/trusty/apache-hadoop-hdfs-secondary
Diff against target: 312 lines (+221/-10) (has conflicts)
9 files modified
README.md (+19/-0)
config.yaml (+10/-0)
hooks/callbacks.py (+43/-5)
hooks/common.py (+24/-3)
hooks/ganglia-relation-broken (+26/-0)
hooks/ganglia-relation-changed (+26/-0)
metadata.yaml (+2/-0)
templates/hadoop-metrics2.properties.j2 (+69/-0)
tests/01-basic-deployment.py (+2/-2)
Text conflict in hooks/callbacks.py
Text conflict in hooks/common.py
To merge this branch: bzr merge lp:~bigdata-dev/charms/trusty/apache-hadoop-hdfs-secondary/trunk
Reviewer Review Type Date Requested Status
Kevin W Monroe Approve
Review via email: mp+271163@code.launchpad.net
To post a comment you must log in.
Revision history for this message
Kevin W Monroe (kwmonroe) :
review: Approve

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'README.md'
2--- README.md 2015-08-24 23:12:12 +0000
3+++ README.md 2015-09-15 17:34:51 +0000
4@@ -29,6 +29,25 @@
5 hadoop jar my-job.jar
6
7
8+## Monitoring
9+
10+This charm supports monitoring via Ganglia. To enable monitoring, you must
11+do **both** of the following (the order does not matter):
12+
13+ * Add a relation to the [Ganglia charm][] via the `:master` relation
14+ * Enable the `ganglia_metrics` config option
15+
16+You must **also** enable metrics on [hdfs-master][] to initiate the restart
17+of the SecondaryNameNode component for it to begin collecting metrics.
18+
19+For example:
20+
21+ juju add-relation secondary-namenode ganglia:master
22+ juju add-relation hdfs-master ganglia:master
23+ juju set secondary-namenode ganglia_metrics=true
24+ juju set hdfs-master ganglia_metrics=true
25+
26+
27 ## Deploying in Network-Restricted Environments
28
29 The Apache Hadoop charms can be deployed in environments with limited network
30
31=== modified file 'config.yaml'
32--- config.yaml 2015-04-03 16:49:16 +0000
33+++ config.yaml 2015-09-15 17:34:51 +0000
34@@ -10,3 +10,13 @@
35 default: ''
36 description: |
37 URL from which to fetch resources (e.g., Hadoop binaries) instead of Launchpad.
38+ ganglia_metrics:
39+ type: boolean
40+ default: false
41+ description: |
42+ Enable metrics using Ganglia. Note that enabling this option will
43+ have no effect if the service is not related to a ganglia service
44+ via the ganglia:master relation. Enabling this option also will
45+ *not* restart the SecondaryNameNode component, so it will also be
46+ necessary to enable metrics on the hdfs-master service.
47+ See the README for more information.
48
49=== modified file 'hooks/callbacks.py'
50--- hooks/callbacks.py 2015-08-10 22:58:48 +0000
51+++ hooks/callbacks.py 2015-09-15 17:34:51 +0000
52@@ -18,7 +18,10 @@
53
54 from charmhelpers.core import hookenv
55 from charmhelpers.core import unitdata
56-from jujubigdata.relations import NameNodeMaster
57+from jujubigdata.relations import NameNodeMaster, Ganglia
58+from charmhelpers.core.templating import render
59+from functools import partial
60+from subprocess import check_call
61
62
63 def update_blocked_status():
64@@ -40,7 +43,42 @@
65 def update_active_status():
66 unitdata.kv().set('charm.active', True)
67 hookenv.status_set('active', 'Ready')
68-
69-
70-def clear_active_flag():
71- unitdata.kv().set('charm.active', False)
72+<<<<<<< TREE
73+
74+
75+def clear_active_flag():
76+ unitdata.kv().set('charm.active', False)
77+=======
78+
79+
80+def clear_active_flag():
81+ unitdata.kv().set('charm.active', False)
82+
83+
84+def conf_ganglia_metrics(purgeConf=False):
85+ """
86+ Send hadoop specific metrics to a ganglia server
87+ """
88+ config = hookenv.config()
89+ ganglia_metrics = config['ganglia_metrics'] and not purgeConf
90+ ganglia_metrics_changed = ganglia_metrics != unitdata.kv().get('ganglia_metrics', False)
91+ unitdata.kv().set('ganglia_metrics', ganglia_metrics)
92+ comment = '#' if not ganglia_metrics else ''
93+ ganglia_host = 'UNSET_BY_JUJU' if not ganglia_metrics else Ganglia().host()
94+ ganglia_sink_str = comment + '*.sink.ganglia.class=org.apache.hadoop.metrics2.sink.ganglia.GangliaSink31'
95+ hookenv.log("Configuring ganglia sink in /etc/hadoop/conf/hadoop-metrics2.properties", level=None)
96+ render(
97+ source='hadoop-metrics2.properties.j2',
98+ target='/etc/hadoop/conf/hadoop-metrics2.properties',
99+ context={
100+ 'ganglia_host': ganglia_host,
101+ 'ganglia_sink_str': ganglia_sink_str,
102+ },
103+ ),
104+ if ganglia_metrics_changed:
105+ #check_call(['actions/restart-hdfs'])
106+ # RESTART-HDFS TO BE IMPLEMENTED
107+ hookenv.log("please restart hdfs manually", level=None)
108+
109+purge_ganglia_metrics = partial(conf_ganglia_metrics, purgeConf=True)
110+>>>>>>> MERGE-SOURCE
111
112=== modified file 'hooks/common.py' (properties changed: +x to -x)
113--- hooks/common.py 2015-08-10 22:58:48 +0000
114+++ hooks/common.py 2015-09-15 17:34:51 +0000
115@@ -91,9 +91,30 @@
116 callbacks.clear_active_flag,
117 charmframework.helpers.close_ports(dist_config.exposed_ports('secondary-namenode')),
118 hdfs.stop_secondarynamenode,
119- callbacks.update_blocked_status,
120- ],
121- },
122+<<<<<<< TREE
123+ callbacks.update_blocked_status,
124+ ],
125+ },
126+=======
127+ callbacks.update_blocked_status,
128+ ],
129+ },
130+ {
131+ 'name': 'ganglia',
132+ 'requires': [
133+ hadoop.is_installed,
134+ jujubigdata.relations.Ganglia,
135+ ],
136+ 'callbacks': [
137+ callbacks.conf_ganglia_metrics,
138+ ],
139+ 'cleanup': [
140+ callbacks.purge_ganglia_metrics
141+ ],
142+
143+ },
144+
145+>>>>>>> MERGE-SOURCE
146 ])
147 manager.manage()
148
149
150=== added file 'hooks/ganglia-relation-broken'
151--- hooks/ganglia-relation-broken 1970-01-01 00:00:00 +0000
152+++ hooks/ganglia-relation-broken 2015-09-15 17:34:51 +0000
153@@ -0,0 +1,26 @@
154+#!/usr/bin/env python
155+# Licensed under the Apache License, Version 2.0 (the "License");
156+# you may not use this file except in compliance with the License.
157+# You may obtain a copy of the License at
158+#
159+# http://www.apache.org/licenses/LICENSE-2.0
160+#
161+# Unless required by applicable law or agreed to in writing, software
162+# distributed under the License is distributed on an "AS IS" BASIS,
163+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
164+# See the License for the specific language governing permissions and
165+# limitations under the License.
166+
167+"""
168+All hooks in this charm are managed by the Charm Framework.
169+The framework helps manage dependencies and preconditions to ensure that
170+steps are only executed when they can be successful. As such, no additional
171+code should be added to this hook; instead, please integrate new functionality
172+into the 'callbacks' list in hooks/common.py. New callbacks can be placed
173+in hooks/callbacks.py, if necessary.
174+
175+See http://big-data-charm-helpers.readthedocs.org/en/latest/examples/framework.html
176+for more information.
177+"""
178+import common
179+common.manage()
180
181=== added file 'hooks/ganglia-relation-changed'
182--- hooks/ganglia-relation-changed 1970-01-01 00:00:00 +0000
183+++ hooks/ganglia-relation-changed 2015-09-15 17:34:51 +0000
184@@ -0,0 +1,26 @@
185+#!/usr/bin/env python
186+# Licensed under the Apache License, Version 2.0 (the "License");
187+# you may not use this file except in compliance with the License.
188+# You may obtain a copy of the License at
189+#
190+# http://www.apache.org/licenses/LICENSE-2.0
191+#
192+# Unless required by applicable law or agreed to in writing, software
193+# distributed under the License is distributed on an "AS IS" BASIS,
194+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
195+# See the License for the specific language governing permissions and
196+# limitations under the License.
197+
198+"""
199+All hooks in this charm are managed by the Charm Framework.
200+The framework helps manage dependencies and preconditions to ensure that
201+steps are only executed when they can be successful. As such, no additional
202+code should be added to this hook; instead, please integrate new functionality
203+into the 'callbacks' list in hooks/common.py. New callbacks can be placed
204+in hooks/callbacks.py, if necessary.
205+
206+See http://big-data-charm-helpers.readthedocs.org/en/latest/examples/framework.html
207+for more information.
208+"""
209+import common
210+common.manage()
211
212=== modified file 'metadata.yaml'
213--- metadata.yaml 2015-05-12 21:52:31 +0000
214+++ metadata.yaml 2015-09-15 17:34:51 +0000
215@@ -11,3 +11,5 @@
216 provides:
217 secondary:
218 interface: dfs-secondary
219+ ganglia:
220+ interface: monitor
221
222=== added file 'resources/python/jujuresources-0.2.11.tar.gz'
223Binary files resources/python/jujuresources-0.2.11.tar.gz 1970-01-01 00:00:00 +0000 and resources/python/jujuresources-0.2.11.tar.gz 2015-09-15 17:34:51 +0000 differ
224=== added directory 'templates'
225=== added file 'templates/hadoop-metrics2.properties.j2'
226--- templates/hadoop-metrics2.properties.j2 1970-01-01 00:00:00 +0000
227+++ templates/hadoop-metrics2.properties.j2 2015-09-15 17:34:51 +0000
228@@ -0,0 +1,69 @@
229+#
230+# Licensed to the Apache Software Foundation (ASF) under one or more
231+# contributor license agreements. See the NOTICE file distributed with
232+# this work for additional information regarding copyright ownership.
233+# The ASF licenses this file to You under the Apache License, Version 2.0
234+# (the "License"); you may not use this file except in compliance with
235+# the License. You may obtain a copy of the License at
236+#
237+# http://www.apache.org/licenses/LICENSE-2.0
238+#
239+# Unless required by applicable law or agreed to in writing, software
240+# distributed under the License is distributed on an "AS IS" BASIS,
241+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
242+# See the License for the specific language governing permissions and
243+# limitations under the License.
244+#
245+
246+# syntax: [prefix].[source|sink].[instance].[options]
247+# See javadoc of package-info.java for org.apache.hadoop.metrics2 for details
248+
249+*.sink.file.class=org.apache.hadoop.metrics2.sink.FileSink
250+# default sampling period, in seconds
251+*.period=10
252+
253+# Defining sink for Ganglia 3.1
254+{{ ganglia_sink_str }}
255+
256+# Default polling period for GangliaSink
257+*.sink.ganglia.period=10
258+
259+# default for supportsparse is false
260+*.sink.ganglia.supportsparse=true
261+
262+# Directing output to ganglia servers
263+
264+*.sink.ganglia.slope=jvm.metrics.gcCount=zero,jvm.metrics.memHeapUsedM=both
265+*.sink.ganglia.dmax=jvm.metrics.threadsBlocked=70,jvm.metrics.memHeapUsedM=40
266+
267+namenode.sink.ganglia.servers={{ ganglia_host }}:8649
268+datanode.sink.ganglia.servers={{ ganglia_host }}:8649
269+jobtracker.sink.ganglia.servers={{ ganglia_host }}:8649
270+tasktracker.sink.ganglia.servers={{ ganglia_host }}:8649
271+maptask.sink.ganglia.servers={{ ganglia_host }}:8649
272+reducetask.sink.ganglia.servers={{ ganglia_host }}:8649
273+resourcemanager.sink.ganglia.servers={{ ganglia_host }}:8649
274+nodemanager.sink.ganglia.servers={{ ganglia_host }}:8649
275+historyserver.sink.ganglia.servers={{ ganglia_host }}:8649
276+journalnode.sink.ganglia.servers={{ ganglia_host }}:8649
277+resourcemanager.sink.ganglia.tagsForPrefix.yarn=Queue
278+
279+# The namen de-metrics. ut will contain metrics from all context
280+#namenode.sink.file.filename=namenode-metrics.out
281+# Specifying a special sampling period for namenode:
282+#namenode.sink.*.period=8
283+
284+#datanode.sink.file.filename=datanode-metrics.out
285+
286+# the following example split metrics of different
287+# context to different sinks (in this case files)
288+#jobtracker.sink.file_jvm.context=jvm
289+#jobtracker.sink.file_jvm.filename=jobtracker-jvm-metrics.out
290+#jobtracker.sink.file_mapred.context=mapred
291+#jobtracker.sink.file_mapred.filename=jobtracker-mapred-metrics.out
292+
293+#tasktracker.sink.file.filename=tasktracker-metrics.out
294+
295+#maptask.sink.file.filename=maptask-metrics.out
296+
297+#reducetask.sink.file.filename=reducetask-metrics.out
298
299=== modified file 'tests/01-basic-deployment.py'
300--- tests/01-basic-deployment.py 2015-03-04 01:42:08 +0000
301+++ tests/01-basic-deployment.py 2015-09-15 17:34:51 +0000
302@@ -16,8 +16,8 @@
303 def setUpClass(cls):
304 cls.d = amulet.Deployment(series='trusty')
305 cls.d.add('apache-hadoop-hdfs-secondary')
306- cls.d.setup(timeout=9000)
307- cls.d.sentry.wait()
308+ cls.d.setup(timeout=900)
309+ cls.d.sentry.wait(timeout=1800)
310 cls.unit = cls.d.sentry.unit['apache-hadoop-hdfs-secondary/0']
311
312 def test_deploy(self):

Subscribers

People subscribed via source and target branches