Merge lp:~bigdata-dev/charms/trusty/apache-hadoop-yarn-master/trunk into lp:charms/trusty/apache-hadoop-yarn-master
- Trusty Tahr (14.04)
- trunk
- Merge into trunk
Proposed by
Kevin W Monroe
Status: | Merged |
---|---|
Merged at revision: | 90 |
Proposed branch: | lp:~bigdata-dev/charms/trusty/apache-hadoop-yarn-master/trunk |
Merge into: | lp:charms/trusty/apache-hadoop-yarn-master |
Diff against target: |
320 lines (+222/-10) (has conflicts) 9 files modified
README.md (+21/-0) config.yaml (+10/-0) hooks/callbacks.py (+42/-5) hooks/common.py (+24/-3) hooks/ganglia-relation-broken (+26/-0) hooks/ganglia-relation-changed (+26/-0) metadata.yaml (+2/-0) templates/hadoop-metrics2.properties.j2 (+69/-0) tests/01-basic-deployment.py (+2/-2) Text conflict in hooks/callbacks.py Text conflict in hooks/common.py |
To merge this branch: | bzr merge lp:~bigdata-dev/charms/trusty/apache-hadoop-yarn-master/trunk |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Kevin W Monroe | Approve | ||
Review via email: mp+271165@code.launchpad.net |
Commit message
Description of the change
To post a comment you must log in.
Revision history for this message
Kevin W Monroe (kwmonroe) : | # |
review:
Approve
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'README.md' | |||
2 | --- README.md 2015-08-24 23:16:05 +0000 | |||
3 | +++ README.md 2015-09-15 17:35:17 +0000 | |||
4 | @@ -27,6 +27,24 @@ | |||
5 | 27 | hadoop jar my-job.jar | 27 | hadoop jar my-job.jar |
6 | 28 | 28 | ||
7 | 29 | 29 | ||
8 | 30 | ## Monitoring | ||
9 | 31 | |||
10 | 32 | This charm supports monitoring via Ganglia. To enable monitoring, you must | ||
11 | 33 | do **both** of the following (the order does not matter): | ||
12 | 34 | |||
13 | 35 | * Add a relation to the [Ganglia charm][] via the `:master` relation | ||
14 | 36 | * Enable the `ganglia_metrics` config option | ||
15 | 37 | |||
16 | 38 | For example: | ||
17 | 39 | |||
18 | 40 | juju add-relation yarn-master ganglia:master | ||
19 | 41 | juju set yarn-master ganglia_metrics=true | ||
20 | 42 | |||
21 | 43 | Enabling monitoring will issue restart the ResourceManager and all NodeManager | ||
22 | 44 | components on all of the related compute-slaves. Take care to ensure that there | ||
23 | 45 | are no running jobs when enabling monitoring. | ||
24 | 46 | |||
25 | 47 | |||
26 | 30 | ## Deploying in Network-Restricted Environments | 48 | ## Deploying in Network-Restricted Environments |
27 | 31 | 49 | ||
28 | 32 | The Apache Hadoop charms can be deployed in environments with limited network | 50 | The Apache Hadoop charms can be deployed in environments with limited network |
29 | @@ -75,3 +93,6 @@ | |||
30 | 75 | - [Apache Hadoop bug trackers](http://hadoop.apache.org/issue_tracking.html) | 93 | - [Apache Hadoop bug trackers](http://hadoop.apache.org/issue_tracking.html) |
31 | 76 | - [Apache Hadoop mailing lists](http://hadoop.apache.org/mailing_lists.html) | 94 | - [Apache Hadoop mailing lists](http://hadoop.apache.org/mailing_lists.html) |
32 | 77 | - [Apache Hadoop Juju Charm](http://jujucharms.com/?text=hadoop) | 95 | - [Apache Hadoop Juju Charm](http://jujucharms.com/?text=hadoop) |
33 | 96 | |||
34 | 97 | |||
35 | 98 | [Ganglia charm]: http://jujucharms.com/ganglia/ | ||
36 | 78 | 99 | ||
37 | === modified file 'config.yaml' | |||
38 | --- config.yaml 2015-04-03 16:49:17 +0000 | |||
39 | +++ config.yaml 2015-09-15 17:35:17 +0000 | |||
40 | @@ -4,3 +4,13 @@ | |||
41 | 4 | default: '' | 4 | default: '' |
42 | 5 | description: | | 5 | description: | |
43 | 6 | URL from which to fetch resources (e.g., Hadoop binaries) instead of Launchpad. | 6 | URL from which to fetch resources (e.g., Hadoop binaries) instead of Launchpad. |
44 | 7 | ganglia_metrics: | ||
45 | 8 | type: boolean | ||
46 | 9 | default: false | ||
47 | 10 | description: | | ||
48 | 11 | Enable metrics using Ganglia. Note that enabling this option will | ||
49 | 12 | have no effect if the service is not related to a ganglia service | ||
50 | 13 | via the ganglia:master relation. Enabling this option with the | ||
51 | 14 | relation will issue a restart to the ResourceManager and all | ||
52 | 15 | NodeManager components on all related compute-slaves. | ||
53 | 16 | See the README for more information. | ||
54 | 7 | 17 | ||
55 | === modified file 'hooks/callbacks.py' | |||
56 | --- hooks/callbacks.py 2015-08-10 22:59:54 +0000 | |||
57 | +++ hooks/callbacks.py 2015-09-15 17:35:17 +0000 | |||
58 | @@ -18,7 +18,10 @@ | |||
59 | 18 | 18 | ||
60 | 19 | from charmhelpers.core import hookenv | 19 | from charmhelpers.core import hookenv |
61 | 20 | from charmhelpers.core import unitdata | 20 | from charmhelpers.core import unitdata |
63 | 21 | from jujubigdata.relations import NameNode, NodeManager | 21 | from jujubigdata.relations import NameNode, NodeManager, Ganglia |
64 | 22 | from charmhelpers.core.templating import render | ||
65 | 23 | from functools import partial | ||
66 | 24 | from subprocess import check_call | ||
67 | 22 | 25 | ||
68 | 23 | 26 | ||
69 | 24 | def update_blocked_status(): | 27 | def update_blocked_status(): |
70 | @@ -46,7 +49,41 @@ | |||
71 | 46 | hookenv.status_set('waiting', 'Waiting for compute slaves to provide NodeManagers') | 49 | hookenv.status_set('waiting', 'Waiting for compute slaves to provide NodeManagers') |
72 | 47 | else: | 50 | else: |
73 | 48 | hookenv.status_set('blocked', 'Waiting for relation to compute slaves') | 51 | hookenv.status_set('blocked', 'Waiting for relation to compute slaves') |
78 | 49 | 52 | <<<<<<< TREE | |
79 | 50 | 53 | ||
80 | 51 | def clear_active_flag(): | 54 | |
81 | 52 | unitdata.kv().set('charm.active', False) | 55 | def clear_active_flag(): |
82 | 56 | unitdata.kv().set('charm.active', False) | ||
83 | 57 | ======= | ||
84 | 58 | |||
85 | 59 | |||
86 | 60 | def clear_active_flag(): | ||
87 | 61 | unitdata.kv().set('charm.active', False) | ||
88 | 62 | |||
89 | 63 | |||
90 | 64 | def conf_ganglia_metrics(purgeConf=False): | ||
91 | 65 | """ | ||
92 | 66 | Send hadoop specific metrics to a ganglia server | ||
93 | 67 | """ | ||
94 | 68 | config = hookenv.config() | ||
95 | 69 | ganglia_metrics = config['ganglia_metrics'] and not purgeConf | ||
96 | 70 | ganglia_metrics_changed = ganglia_metrics != unitdata.kv().get('ganglia_metrics', False) | ||
97 | 71 | unitdata.kv().set('ganglia_metrics', ganglia_metrics) | ||
98 | 72 | comment = '#' if not ganglia_metrics else '' | ||
99 | 73 | ganglia_host = 'UNSET_BY_JUJU' if not ganglia_metrics else Ganglia().host() | ||
100 | 74 | ganglia_sink_str = comment + '*.sink.ganglia.class=org.apache.hadoop.metrics2.sink.ganglia.GangliaSink31' | ||
101 | 75 | hookenv.log("Configuring ganglia sink in /etc/hadoop/conf/hadoop-metrics2.properties", level=None) | ||
102 | 76 | render( | ||
103 | 77 | source='hadoop-metrics2.properties.j2', | ||
104 | 78 | target='/etc/hadoop/conf/hadoop-metrics2.properties', | ||
105 | 79 | context={ | ||
106 | 80 | 'ganglia_host': ganglia_host, | ||
107 | 81 | 'ganglia_sink_str': ganglia_sink_str, | ||
108 | 82 | }, | ||
109 | 83 | ), | ||
110 | 84 | if ganglia_metrics_changed: | ||
111 | 85 | check_call(['actions/restart-yarn']) | ||
112 | 86 | |||
113 | 87 | |||
114 | 88 | purge_ganglia_metrics = partial(conf_ganglia_metrics, purgeConf=True) | ||
115 | 89 | >>>>>>> MERGE-SOURCE | ||
116 | 53 | 90 | ||
117 | === modified file 'hooks/common.py' (properties changed: +x to -x) | |||
118 | --- hooks/common.py 2015-08-10 22:59:54 +0000 | |||
119 | +++ hooks/common.py 2015-09-15 17:35:17 +0000 | |||
120 | @@ -113,9 +113,30 @@ | |||
121 | 113 | charmframework.helpers.close_ports(dist_config.exposed_ports('yarn-master')), | 113 | charmframework.helpers.close_ports(dist_config.exposed_ports('yarn-master')), |
122 | 114 | yarn.stop_resourcemanager, | 114 | yarn.stop_resourcemanager, |
123 | 115 | yarn.stop_jobhistory, | 115 | yarn.stop_jobhistory, |
127 | 116 | callbacks.update_blocked_status, | 116 | <<<<<<< TREE |
128 | 117 | ], | 117 | callbacks.update_blocked_status, |
129 | 118 | }, | 118 | ], |
130 | 119 | }, | ||
131 | 120 | ======= | ||
132 | 121 | callbacks.update_blocked_status, | ||
133 | 122 | ], | ||
134 | 123 | }, | ||
135 | 124 | { | ||
136 | 125 | 'name': 'ganglia', | ||
137 | 126 | 'requires': [ | ||
138 | 127 | hadoop.is_installed, | ||
139 | 128 | jujubigdata.relations.Ganglia, | ||
140 | 129 | ], | ||
141 | 130 | 'callbacks': [ | ||
142 | 131 | callbacks.conf_ganglia_metrics, | ||
143 | 132 | ], | ||
144 | 133 | 'cleanup': [ | ||
145 | 134 | callbacks.purge_ganglia_metrics, | ||
146 | 135 | ], | ||
147 | 136 | |||
148 | 137 | }, | ||
149 | 138 | |||
150 | 139 | >>>>>>> MERGE-SOURCE | ||
151 | 119 | ]) | 140 | ]) |
152 | 120 | manager.manage() | 141 | manager.manage() |
153 | 121 | 142 | ||
154 | 122 | 143 | ||
155 | === added file 'hooks/ganglia-relation-broken' | |||
156 | --- hooks/ganglia-relation-broken 1970-01-01 00:00:00 +0000 | |||
157 | +++ hooks/ganglia-relation-broken 2015-09-15 17:35:17 +0000 | |||
158 | @@ -0,0 +1,26 @@ | |||
159 | 1 | #!/usr/bin/env python | ||
160 | 2 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
161 | 3 | # you may not use this file except in compliance with the License. | ||
162 | 4 | # You may obtain a copy of the License at | ||
163 | 5 | # | ||
164 | 6 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
165 | 7 | # | ||
166 | 8 | # Unless required by applicable law or agreed to in writing, software | ||
167 | 9 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
168 | 10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
169 | 11 | # See the License for the specific language governing permissions and | ||
170 | 12 | # limitations under the License. | ||
171 | 13 | |||
172 | 14 | """ | ||
173 | 15 | All hooks in this charm are managed by the Charm Framework. | ||
174 | 16 | The framework helps manage dependencies and preconditions to ensure that | ||
175 | 17 | steps are only executed when they can be successful. As such, no additional | ||
176 | 18 | code should be added to this hook; instead, please integrate new functionality | ||
177 | 19 | into the 'callbacks' list in hooks/common.py. New callbacks can be placed | ||
178 | 20 | in hooks/callbacks.py, if necessary. | ||
179 | 21 | |||
180 | 22 | See http://big-data-charm-helpers.readthedocs.org/en/latest/examples/framework.html | ||
181 | 23 | for more information. | ||
182 | 24 | """ | ||
183 | 25 | import common | ||
184 | 26 | common.manage() | ||
185 | 0 | 27 | ||
186 | === added file 'hooks/ganglia-relation-changed' | |||
187 | --- hooks/ganglia-relation-changed 1970-01-01 00:00:00 +0000 | |||
188 | +++ hooks/ganglia-relation-changed 2015-09-15 17:35:17 +0000 | |||
189 | @@ -0,0 +1,26 @@ | |||
190 | 1 | #!/usr/bin/env python | ||
191 | 2 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
192 | 3 | # you may not use this file except in compliance with the License. | ||
193 | 4 | # You may obtain a copy of the License at | ||
194 | 5 | # | ||
195 | 6 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
196 | 7 | # | ||
197 | 8 | # Unless required by applicable law or agreed to in writing, software | ||
198 | 9 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
199 | 10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
200 | 11 | # See the License for the specific language governing permissions and | ||
201 | 12 | # limitations under the License. | ||
202 | 13 | |||
203 | 14 | """ | ||
204 | 15 | All hooks in this charm are managed by the Charm Framework. | ||
205 | 16 | The framework helps manage dependencies and preconditions to ensure that | ||
206 | 17 | steps are only executed when they can be successful. As such, no additional | ||
207 | 18 | code should be added to this hook; instead, please integrate new functionality | ||
208 | 19 | into the 'callbacks' list in hooks/common.py. New callbacks can be placed | ||
209 | 20 | in hooks/callbacks.py, if necessary. | ||
210 | 21 | |||
211 | 22 | See http://big-data-charm-helpers.readthedocs.org/en/latest/examples/framework.html | ||
212 | 23 | for more information. | ||
213 | 24 | """ | ||
214 | 25 | import common | ||
215 | 26 | common.manage() | ||
216 | 0 | 27 | ||
217 | === modified file 'metadata.yaml' | |||
218 | --- metadata.yaml 2015-04-23 20:48:28 +0000 | |||
219 | +++ metadata.yaml 2015-09-15 17:35:17 +0000 | |||
220 | @@ -10,6 +10,8 @@ | |||
221 | 10 | provides: | 10 | provides: |
222 | 11 | resourcemanager: | 11 | resourcemanager: |
223 | 12 | interface: mapred | 12 | interface: mapred |
224 | 13 | ganglia: | ||
225 | 14 | interface: monitor | ||
226 | 13 | requires: | 15 | requires: |
227 | 14 | namenode: | 16 | namenode: |
228 | 15 | interface: dfs | 17 | interface: dfs |
229 | 16 | 18 | ||
230 | === added file 'resources/python/jujuresources-0.2.11.tar.gz' | |||
231 | 17 | Binary files resources/python/jujuresources-0.2.11.tar.gz 1970-01-01 00:00:00 +0000 and resources/python/jujuresources-0.2.11.tar.gz 2015-09-15 17:35:17 +0000 differ | 19 | Binary files resources/python/jujuresources-0.2.11.tar.gz 1970-01-01 00:00:00 +0000 and resources/python/jujuresources-0.2.11.tar.gz 2015-09-15 17:35:17 +0000 differ |
232 | === added directory 'templates' | |||
233 | === added file 'templates/hadoop-metrics2.properties.j2' | |||
234 | --- templates/hadoop-metrics2.properties.j2 1970-01-01 00:00:00 +0000 | |||
235 | +++ templates/hadoop-metrics2.properties.j2 2015-09-15 17:35:17 +0000 | |||
236 | @@ -0,0 +1,69 @@ | |||
237 | 1 | # | ||
238 | 2 | # Licensed to the Apache Software Foundation (ASF) under one or more | ||
239 | 3 | # contributor license agreements. See the NOTICE file distributed with | ||
240 | 4 | # this work for additional information regarding copyright ownership. | ||
241 | 5 | # The ASF licenses this file to You under the Apache License, Version 2.0 | ||
242 | 6 | # (the "License"); you may not use this file except in compliance with | ||
243 | 7 | # the License. You may obtain a copy of the License at | ||
244 | 8 | # | ||
245 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
246 | 10 | # | ||
247 | 11 | # Unless required by applicable law or agreed to in writing, software | ||
248 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
249 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
250 | 14 | # See the License for the specific language governing permissions and | ||
251 | 15 | # limitations under the License. | ||
252 | 16 | # | ||
253 | 17 | |||
254 | 18 | # syntax: [prefix].[source|sink].[instance].[options] | ||
255 | 19 | # See javadoc of package-info.java for org.apache.hadoop.metrics2 for details | ||
256 | 20 | |||
257 | 21 | *.sink.file.class=org.apache.hadoop.metrics2.sink.FileSink | ||
258 | 22 | # default sampling period, in seconds | ||
259 | 23 | *.period=10 | ||
260 | 24 | |||
261 | 25 | # Defining sink for Ganglia 3.1 | ||
262 | 26 | {{ ganglia_sink_str }} | ||
263 | 27 | |||
264 | 28 | # Default polling period for GangliaSink | ||
265 | 29 | *.sink.ganglia.period=10 | ||
266 | 30 | |||
267 | 31 | # default for supportsparse is false | ||
268 | 32 | *.sink.ganglia.supportsparse=true | ||
269 | 33 | |||
270 | 34 | # Directing output to ganglia servers | ||
271 | 35 | |||
272 | 36 | *.sink.ganglia.slope=jvm.metrics.gcCount=zero,jvm.metrics.memHeapUsedM=both | ||
273 | 37 | *.sink.ganglia.dmax=jvm.metrics.threadsBlocked=70,jvm.metrics.memHeapUsedM=40 | ||
274 | 38 | |||
275 | 39 | namenode.sink.ganglia.servers={{ ganglia_host }}:8649 | ||
276 | 40 | datanode.sink.ganglia.servers={{ ganglia_host }}:8649 | ||
277 | 41 | jobtracker.sink.ganglia.servers={{ ganglia_host }}:8649 | ||
278 | 42 | tasktracker.sink.ganglia.servers={{ ganglia_host }}:8649 | ||
279 | 43 | maptask.sink.ganglia.servers={{ ganglia_host }}:8649 | ||
280 | 44 | reducetask.sink.ganglia.servers={{ ganglia_host }}:8649 | ||
281 | 45 | resourcemanager.sink.ganglia.servers={{ ganglia_host }}:8649 | ||
282 | 46 | nodemanager.sink.ganglia.servers={{ ganglia_host }}:8649 | ||
283 | 47 | historyserver.sink.ganglia.servers={{ ganglia_host }}:8649 | ||
284 | 48 | journalnode.sink.ganglia.servers={{ ganglia_host }}:8649 | ||
285 | 49 | resourcemanager.sink.ganglia.tagsForPrefix.yarn=Queue | ||
286 | 50 | |||
287 | 51 | # The namen de-metrics. ut will contain metrics from all context | ||
288 | 52 | #namenode.sink.file.filename=namenode-metrics.out | ||
289 | 53 | # Specifying a special sampling period for namenode: | ||
290 | 54 | #namenode.sink.*.period=8 | ||
291 | 55 | |||
292 | 56 | #datanode.sink.file.filename=datanode-metrics.out | ||
293 | 57 | |||
294 | 58 | # the following example split metrics of different | ||
295 | 59 | # context to different sinks (in this case files) | ||
296 | 60 | #jobtracker.sink.file_jvm.context=jvm | ||
297 | 61 | #jobtracker.sink.file_jvm.filename=jobtracker-jvm-metrics.out | ||
298 | 62 | #jobtracker.sink.file_mapred.context=mapred | ||
299 | 63 | #jobtracker.sink.file_mapred.filename=jobtracker-mapred-metrics.out | ||
300 | 64 | |||
301 | 65 | #tasktracker.sink.file.filename=tasktracker-metrics.out | ||
302 | 66 | |||
303 | 67 | #maptask.sink.file.filename=maptask-metrics.out | ||
304 | 68 | |||
305 | 69 | #reducetask.sink.file.filename=reducetask-metrics.out | ||
306 | 0 | 70 | ||
307 | === modified file 'tests/01-basic-deployment.py' | |||
308 | --- tests/01-basic-deployment.py 2015-03-04 01:00:27 +0000 | |||
309 | +++ tests/01-basic-deployment.py 2015-09-15 17:35:17 +0000 | |||
310 | @@ -16,8 +16,8 @@ | |||
311 | 16 | def setUpClass(cls): | 16 | def setUpClass(cls): |
312 | 17 | cls.d = amulet.Deployment(series='trusty') | 17 | cls.d = amulet.Deployment(series='trusty') |
313 | 18 | cls.d.add('apache-hadoop-yarn-master') | 18 | cls.d.add('apache-hadoop-yarn-master') |
316 | 19 | cls.d.setup(timeout=9000) | 19 | cls.d.setup(timeout=900) |
317 | 20 | cls.d.sentry.wait() | 20 | cls.d.sentry.wait(timeout=1800) |
318 | 21 | cls.unit = cls.d.sentry.unit['apache-hadoop-yarn-master/0'] | 21 | cls.unit = cls.d.sentry.unit['apache-hadoop-yarn-master/0'] |
319 | 22 | 22 | ||
320 | 23 | def test_deploy(self): | 23 | def test_deploy(self): |