Merge lp:~bigdata-dev/charms/trusty/apache-zeppelin/trunk into lp:charms/trusty/apache-zeppelin

Proposed by Kevin W Monroe
Status: Merged
Merged at revision: 19
Proposed branch: lp:~bigdata-dev/charms/trusty/apache-zeppelin/trunk
Merge into: lp:charms/trusty/apache-zeppelin
Diff against target: 341 lines (+80/-60)
2 files modified
resources/flume-tutorial/note.json (+79/-59)
tests/100-deploy-spark-hdfs-yarn (+1/-1)
To merge this branch: bzr merge lp:~bigdata-dev/charms/trusty/apache-zeppelin/trunk
Reviewer Review Type Date Requested Status
Kevin W Monroe Approve
Review via email: mp+271903@code.launchpad.net
To post a comment you must log in.
Revision history for this message
Kevin W Monroe (kwmonroe) :
review: Approve

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
=== modified file 'resources/flume-tutorial/note.json'
--- resources/flume-tutorial/note.json 2015-08-26 12:27:56 +0000
+++ resources/flume-tutorial/note.json 2015-09-22 03:37:16 +0000
@@ -1,7 +1,7 @@
1{1{
2 "paragraphs": [2 "paragraphs": [
3 {3 {
4 "text": "%md\n## Welcome to Realtime Syslog Analytic Tutorial Powered by Juju.\n### In this live tutorial we will demonstrat three main phases of any big data solution:\n#### 1. Data Ingestion - Apache Flume-syslog -\u003e Apache flume-hdfs\n#### 2. Data Processing - Apache Spark+YARN\n#### 3. Data Visualization - SparkSQL",4 "text": "%md\n## Welcome to the Realtime Syslog Analytics tutorial, powered by Juju.\n### In this live tutorial we will demonstrate three phases of a big data solution:\n#### 1. Data Ingestion: Flume-Syslog -\u003e Flume-HDFS\n#### 2. Data Processing: Spark+YARN\n#### 3. Data Visualization: SparkSQL+Zeppelin",
5 "config": {5 "config": {
6 "colWidth": 12.0,6 "colWidth": 12.0,
7 "graph": {7 "graph": {
@@ -12,7 +12,8 @@
12 "values": [],12 "values": [],
13 "groups": [],13 "groups": [],
14 "scatter": {}14 "scatter": {}
15 }15 },
16 "tableHide": false
16 },17 },
17 "settings": {18 "settings": {
18 "params": {},19 "params": {},
@@ -23,17 +24,17 @@
23 "result": {24 "result": {
24 "code": "SUCCESS",25 "code": "SUCCESS",
25 "type": "HTML",26 "type": "HTML",
26 "msg": "\u003ch2\u003eWelcome to Realtime Syslog Analytic Tutorial Powered by Juju.\u003c/h2\u003e\n\u003ch3\u003eIn this live tutorial we will demonstrat three main phases of any big data solution:\u003c/h3\u003e\n\u003ch4\u003e1. Data Ingestion - Apache Flume-syslog -\u003e Apache flume-hdfs\u003c/h4\u003e\n\u003ch4\u003e2. Data Processing - Apache Spark+YARN\u003c/h4\u003e\n\u003ch4\u003e3. Data Visualization - SparkSQL\u003c/h4\u003e\n"27 "msg": "\u003ch2\u003eWelcome to the Realtime Syslog Analytics tutorial, powered by Juju.\u003c/h2\u003e\n\u003ch3\u003eIn this live tutorial we will demonstrate three phases of a big data solution:\u003c/h3\u003e\n\u003ch4\u003e1. Data Ingestion: Flume-Syslog -\u003e Flume-HDFS\u003c/h4\u003e\n\u003ch4\u003e2. Data Processing: Spark+YARN\u003c/h4\u003e\n\u003ch4\u003e3. Data Visualization: SparkSQL+Zeppelin\u003c/h4\u003e\n"
27 },28 },
28 "dateCreated": "Aug 20, 2015 3:14:39 PM",29 "dateCreated": "Aug 20, 2015 3:14:39 PM",
29 "dateStarted": "Aug 25, 2015 9:34:23 AM",30 "dateStarted": "Sep 18, 2015 6:25:43 PM",
30 "dateFinished": "Aug 25, 2015 9:34:23 AM",31 "dateFinished": "Sep 18, 2015 6:25:43 PM",
31 "status": "FINISHED",32 "status": "FINISHED",
32 "progressUpdateIntervalMs": 50033 "progressUpdateIntervalMs": 500
33 },34 },
34 {35 {
35 "title": "Data Ingestion",36 "title": "Generate Data and Verify Ingestion",
36 "text": "import sys.process._\n// Generate syslog messages by running an spakk\n\"/home/ubuntu/sparkpi.sh\" !!\n// Verify that FLume has collected and sent the syslog messages to HDFS\n\"hadoop fs -ls -R /user/flume/flume-syslog\" !!",37 "text": "%sh\n# Generate syslog messages by trying to ssh to the hdfs-master unit.\n# This will likely result in a \u0027publickey denied\u0027 error, but it will\n# be enough to trigger a syslog event on the hdfs-master.\nfor i in `seq 1 10`;\ndo\n ssh -oStrictHostKeyChecking\u003dno hdfs-master-0 uptime \u003e/dev/null 2\u003e\u00261\n sleep 1\ndone\n\n# Check if Flume has collected and sent the syslog messages to HDFS.\n# If no output is seen from this command, wait a few minutes and try\n# again. The amount of time between Flume ingesting the event and it\n# being available in HDFS is controlled by the \u0027roll_interval\u0027\n# configuration option in the flume-hdfs charm.\nhadoop fs -ls -R /user/flume/flume-syslog | tail",
37 "config": {38 "config": {
38 "colWidth": 12.0,39 "colWidth": 12.0,
39 "graph": {40 "graph": {
@@ -45,7 +46,9 @@
45 "groups": [],46 "groups": [],
46 "scatter": {}47 "scatter": {}
47 },48 },
48 "title": true49 "title": true,
50 "tableHide": false,
51 "editorHide": false
49 },52 },
50 "settings": {53 "settings": {
51 "params": {},54 "params": {},
@@ -56,16 +59,17 @@
56 "result": {59 "result": {
57 "code": "SUCCESS",60 "code": "SUCCESS",
58 "type": "TEXT",61 "type": "TEXT",
59 "msg": "" },62 "msg": "drwxr-xr-x - flume supergroup 0 2015-09-22 03:19 /user/flume/flume-syslog/2015-09-22\n-rw-r--r-- 3 flume supergroup 302 2015-09-22 03:12 /user/flume/flume-syslog/2015-09-22/FlumeData.1442891213622\n-rw-r--r-- 3 flume supergroup 2328 2015-09-22 03:19 /user/flume/flume-syslog/2015-09-22/FlumeData.1442891678998\n"
63 },
60 "dateCreated": "Aug 20, 2015 6:09:43 PM",64 "dateCreated": "Aug 20, 2015 6:09:43 PM",
61 "dateStarted": "Aug 24, 2015 10:51:34 PM",65 "dateStarted": "Sep 22, 2015 3:29:15 AM",
62 "dateFinished": "Aug 24, 2015 10:52:11 PM",66 "dateFinished": "Sep 22, 2015 3:29:28 AM",
63 "status": "FINISHED",67 "status": "FINISHED",
64 "progressUpdateIntervalMs": 50068 "progressUpdateIntervalMs": 500
65 },69 },
66 {70 {
67 "title": "Data Processing in python",71 "title": "Simple Data Processing with Scala",
68 "text": "%pyspark\nsc.textFile(\"/user/flume/flume-syslog/*/*/*\").filter(lambda l: \"sshd\" in l).collect()",72 "text": "// Output the number of sshd syslog events\nsc.textFile(\"/user/flume/flume-syslog/*/*\").filter(line \u003d\u003e line.contains(\"sshd\")).count()",
69 "config": {73 "config": {
70 "colWidth": 12.0,74 "colWidth": 12.0,
71 "graph": {75 "graph": {
@@ -90,16 +94,17 @@
90 "result": {94 "result": {
91 "code": "SUCCESS",95 "code": "SUCCESS",
92 "type": "TEXT",96 "type": "TEXT",
93 "msg": "" },97 "msg": "res12: Long \u003d 40\n"
98 },
94 "dateCreated": "Aug 20, 2015 6:11:00 PM",99 "dateCreated": "Aug 20, 2015 6:11:00 PM",
95 "dateStarted": "Aug 24, 2015 10:54:10 PM",100 "dateStarted": "Sep 22, 2015 3:29:45 AM",
96 "dateFinished": "Aug 24, 2015 10:54:15 PM",101 "dateFinished": "Sep 22, 2015 3:29:46 AM",
97 "status": "FINISHED",102 "status": "FINISHED",
98 "progressUpdateIntervalMs": 500103 "progressUpdateIntervalMs": 500
99 },104 },
100 {105 {
101 "title": "Data Processing In Scala",106 "title": "Data processing to enable future queries",
102 "text": "import org.joda.time.DateTime\nimport org.joda.time.format.{DateTimeFormatterBuilder, DateTimeFormat}\nimport scala.util.Try\nval reSystemLog \u003d \"\"\"^\\\u003c\\d+\\\u003e([A-Za-z0-9, ]+\\d{2}:\\d{2}:\\d{2}(?:\\.\\d{3})?)\\s+(\\S+)\\s+([^\\[]+)\\[(\\d+)\\]\\s*:?\\s*(.*)\"\"\".r\ncase class SyslogMessage(timestamp: String, host: Option[String], process: String, pid: Int, message: String)\n\nval lines \u003d sc.textFile(\"/user/flume/flume-syslog/*/*/*\")\nval events \u003d lines.flatMap {\n case reSystemLog(timestamp,hostname, proc, pidS, msg) \u003d\u003e\n for {pid \u003c- Try(pidS.toInt).toOption} yield SyslogMessage(timestamp,Some(hostname), proc, pid, msg)\n case _ \u003d\u003e None\n }.toDF()\n\nevents.registerTempTable(\"syslog\")\n",107 "text": "import org.joda.time.DateTime\nimport org.joda.time.format.{DateTimeFormatterBuilder, DateTimeFormat}\nimport scala.util.Try\nval reSystemLog \u003d \"\"\"^\\\u003c\\d+\\\u003e([A-Za-z0-9, ]+\\d{2}:\\d{2}:\\d{2}(?:\\.\\d{3})?)\\s+(\\S+)\\s+([^\\[]+)\\[(\\d+)\\]\\s*:?\\s*(.*)\"\"\".r\ncase class SyslogMessage(timestamp: String, host: Option[String], process: String, pid: Int, message: String)\n\nval lines \u003d sc.textFile(\"/user/flume/flume-syslog/*/*\")\nval events \u003d lines.flatMap {\n case reSystemLog(timestamp,hostname, proc, pidS, msg) \u003d\u003e\n for {pid \u003c- Try(pidS.toInt).toOption} yield SyslogMessage(timestamp,Some(hostname), proc, pid, msg)\n case _ \u003d\u003e None\n }.toDF()\n\nevents.registerTempTable(\"syslog\")\n",
103 "config": {108 "config": {
104 "colWidth": 12.0,109 "colWidth": 12.0,
105 "graph": {110 "graph": {
@@ -124,11 +129,11 @@
124 "result": {129 "result": {
125 "code": "SUCCESS",130 "code": "SUCCESS",
126 "type": "TEXT",131 "type": "TEXT",
127 "msg": "import org.joda.time.DateTime\nimport org.joda.time.format.{DateTimeFormatterBuilder, DateTimeFormat}\nimport scala.util.Try\nreSystemLog: scala.util.matching.Regex \u003d ^\\\u003c\\d+\\\u003e([A-Za-z0-9, ]+\\d{2}:\\d{2}:\\d{2}(?:\\.\\d{3})?)\\s+(\\S+)\\s+([^\\[]+)\\[(\\d+)\\]\\s*:?\\s*(.*)\ndefined class SyslogMessage\nlines: org.apache.spark.rdd.RDD[String] \u003d /user/flume/flume-syslog/*/*/* MapPartitionsRDD[509] at textFile at \u003cconsole\u003e:73\nevents: org.apache.spark.sql.DataFrame \u003d [timestamp: string, host: string, process: string, pid: int, message: string]\n"132 "msg": "import org.joda.time.DateTime\nimport org.joda.time.format.{DateTimeFormatterBuilder, DateTimeFormat}\nimport scala.util.Try\nreSystemLog: scala.util.matching.Regex \u003d ^\\\u003c\\d+\\\u003e([A-Za-z0-9, ]+\\d{2}:\\d{2}:\\d{2}(?:\\.\\d{3})?)\\s+(\\S+)\\s+([^\\[]+)\\[(\\d+)\\]\\s*:?\\s*(.*)\ndefined class SyslogMessage\nlines: org.apache.spark.rdd.RDD[String] \u003d /user/flume/flume-syslog/*/* MapPartitionsRDD[50] at textFile at \u003cconsole\u003e:31\nevents: org.apache.spark.sql.DataFrame \u003d [timestamp: string, host: string, process: string, pid: int, message: string]\n"
128 },133 },
129 "dateCreated": "Aug 21, 2015 12:03:17 AM",134 "dateCreated": "Aug 21, 2015 12:03:17 AM",
130 "dateStarted": "Aug 24, 2015 10:54:28 PM",135 "dateStarted": "Sep 22, 2015 3:23:23 AM",
131 "dateFinished": "Aug 24, 2015 10:54:29 PM",136 "dateFinished": "Sep 22, 2015 3:23:26 AM",
132 "status": "FINISHED",137 "status": "FINISHED",
133 "progressUpdateIntervalMs": 500138 "progressUpdateIntervalMs": 500
134 },139 },
@@ -169,7 +174,9 @@
169 }174 }
170 }175 }
171 },176 },
172 "title": true177 "title": true,
178 "tableHide": false,
179 "editorHide": false
173 },180 },
174 "settings": {181 "settings": {
175 "params": {},182 "params": {},
@@ -180,26 +187,26 @@
180 "result": {187 "result": {
181 "code": "SUCCESS",188 "code": "SUCCESS",
182 "type": "TABLE",189 "type": "TABLE",
183 "msg": "process\tvalue\nCRON\t180\nntpdate\t1\nsshd\t6\nsu\t1\nsystemd-logind\t1\n"190 "msg": "process\tvalue\nCRON\t3\nsshd\t20\n"
184 },191 },
185 "dateCreated": "Aug 24, 2015 10:31:38 PM",192 "dateCreated": "Aug 24, 2015 10:31:38 PM",
186 "dateStarted": "Aug 24, 2015 10:54:37 PM",193 "dateStarted": "Sep 22, 2015 3:29:54 AM",
187 "dateFinished": "Aug 24, 2015 10:54:41 PM",194 "dateFinished": "Sep 22, 2015 3:29:57 AM",
188 "status": "FINISHED",195 "status": "FINISHED",
189 "progressUpdateIntervalMs": 500196 "progressUpdateIntervalMs": 500
190 },197 },
191 {198 {
192 "title": "Data Visualization",199 "title": "Data Visualization",
193 "text": "%sql \nselect pid, count(1) value\nfrom syslog\nwhere pid \u003e 5000 and pid \u003c 20000 and timestamp \u003e ${maxDate\u003d\"Aug 24\"}\ngroup by pid \norder by pid\n",200 "text": "%sql \nselect host, count(1) value\nfrom syslog\nwhere timestamp \u003e ${maxDate\u003d\"Sep 15\"}\ngroup by host\n",
194 "config": {201 "config": {
195 "colWidth": 4.0,202 "colWidth": 4.0,
196 "graph": {203 "graph": {
197 "mode": "pieChart",204 "mode": "table",
198 "height": 300.0,205 "height": 300.0,
199 "optionOpen": false,206 "optionOpen": false,
200 "keys": [207 "keys": [
201 {208 {
202 "name": "pid",209 "name": "host",
203 "index": 0.0,210 "index": 0.0,
204 "aggr": "sum"211 "aggr": "sum"
205 }212 }
@@ -213,11 +220,6 @@
213 ],220 ],
214 "groups": [],221 "groups": [],
215 "scatter": {222 "scatter": {
216 "xAxis": {
217 "name": "pid",
218 "index": 0.0,
219 "aggr": "sum"
220 },
221 "yAxis": {223 "yAxis": {
222 "name": "value",224 "name": "value",
223 "index": 1.0,225 "index": 1.0,
@@ -225,14 +227,17 @@
225 }227 }
226 }228 }
227 },229 },
228 "title": true230 "title": true,
231 "tableHide": false
229 },232 },
230 "settings": {233 "settings": {
231 "params": {},234 "params": {
235 "maxDate": "\"Sep 15\""
236 },
232 "forms": {237 "forms": {
233 "maxDate": {238 "maxDate": {
234 "name": "maxDate",239 "name": "maxDate",
235 "defaultValue": "\"Aug 24\"",240 "defaultValue": "\"Sep 15\"",
236 "hidden": false241 "hidden": false
237 }242 }
238 }243 }
@@ -242,33 +247,33 @@
242 "result": {247 "result": {
243 "code": "SUCCESS",248 "code": "SUCCESS",
244 "type": "TABLE",249 "type": "TABLE",
245 "msg": "pid\tvalue\n5073\t2\n5074\t1\n5218\t2\n5219\t1\n5374\t2\n5375\t1\n5485\t2\n5881\t2\n5882\t1\n"250 "msg": "host\tvalue\nhdfs-master-0\t23\n"
246 },251 },
247 "dateCreated": "Aug 21, 2015 1:11:17 AM",252 "dateCreated": "Aug 21, 2015 1:11:17 AM",
248 "dateStarted": "Aug 24, 2015 10:54:43 PM",253 "dateStarted": "Sep 22, 2015 3:30:03 AM",
249 "dateFinished": "Aug 24, 2015 10:54:45 PM",254 "dateFinished": "Sep 22, 2015 3:30:05 AM",
250 "status": "FINISHED",255 "status": "FINISHED",
251 "progressUpdateIntervalMs": 500256 "progressUpdateIntervalMs": 500
252 },257 },
253 {258 {
254 "title": "Data Visualization",259 "title": "Data Visualization",
255 "text": "%sql \nselect timestamp, count(1) value\nfrom syslog\nwhere timestamp \u003e ${maxDate\u003d\"Aug 24\"} and process \u003d\u003d \"sshd\"\ngroup by timestamp\norder by timestamp",260 "text": "%sql \nselect process, timestamp, message\nfrom syslog\nwhere timestamp \u003e ${maxDate\u003d\"Sep 15\"}\n",
256 "config": {261 "config": {
257 "colWidth": 4.0,262 "colWidth": 4.0,
258 "graph": {263 "graph": {
259 "mode": "pieChart",264 "mode": "table",
260 "height": 300.0,265 "height": 300.0,
261 "optionOpen": false,266 "optionOpen": false,
262 "keys": [267 "keys": [
263 {268 {
264 "name": "timestamp",269 "name": "process",
265 "index": 0.0,270 "index": 0.0,
266 "aggr": "sum"271 "aggr": "sum"
267 }272 }
268 ],273 ],
269 "values": [274 "values": [
270 {275 {
271 "name": "value",276 "name": "timestamp",
272 "index": 1.0,277 "index": 1.0,
273 "aggr": "sum"278 "aggr": "sum"
274 }279 }
@@ -276,27 +281,23 @@
276 "groups": [],281 "groups": [],
277 "scatter": {282 "scatter": {
278 "xAxis": {283 "xAxis": {
279 "name": "timestamp",284 "name": "process",
280 "index": 0.0,285 "index": 0.0,
281 "aggr": "sum"286 "aggr": "sum"
282 },
283 "yAxis": {
284 "name": "value",
285 "index": 1.0,
286 "aggr": "sum"
287 }287 }
288 }288 }
289 },289 },
290 "title": true290 "title": true,
291 "tableHide": false
291 },292 },
292 "settings": {293 "settings": {
293 "params": {294 "params": {
294 "maxDate": "\"Aug 20\""295 "maxDate": "\"Sep 15\""
295 },296 },
296 "forms": {297 "forms": {
297 "maxDate": {298 "maxDate": {
298 "name": "maxDate",299 "name": "maxDate",
299 "defaultValue": "\"Aug 24\"",300 "defaultValue": "\"Sep 15\"",
300 "hidden": false301 "hidden": false
301 }302 }
302 }303 }
@@ -306,32 +307,51 @@
306 "result": {307 "result": {
307 "code": "SUCCESS",308 "code": "SUCCESS",
308 "type": "TABLE",309 "type": "TABLE",
309 "msg": "timestamp\tvalue\nAug 21 11:20:45\t2\nAug 21 19:58:30\t2\nAug 24 21:59:47\t2\n"310 "msg": "process\ttimestamp\tmessage\nsshd\tSep 22 03:14:23\terror: Could not load host key: /etc/ssh/ssh_host_ed25519_key\nsshd\tSep 22 03:14:23\tConnection closed by 172.31.13.239 [preauth]\nsshd\tSep 22 03:14:24\terror: Could not load host key: /etc/ssh/ssh_host_ed25519_key\nsshd\tSep 22 03:14:24\tConnection closed by 172.31.13.239 [preauth]\nsshd\tSep 22 03:14:25\terror: Could not load host key: /etc/ssh/ssh_host_ed25519_key\nsshd\tSep 22 03:14:25\tConnection closed by 172.31.13.239 [preauth]\nsshd\tSep 22 03:14:26\terror: Could not load host key: /etc/ssh/ssh_host_ed25519_key\nsshd\tSep 22 03:14:26\tConnection closed by 172.31.13.239 [preauth]\nsshd\tSep 22 03:14:27\terror: Could not load host key: /etc/ssh/ssh_host_ed25519_key\nsshd\tSep 22 03:14:27\tConnection closed by 172.31.13.239 [preauth]\nsshd\tSep 22 03:14:28\terror: Could not load host key: /etc/ssh/ssh_host_ed25519_key\nsshd\tSep 22 03:14:28\tConnection closed by 172.31.13.239 [preauth]\nsshd\tSep 22 03:14:29\terror: Could not load host key: /etc/ssh/ssh_host_ed25519_key\nsshd\tSep 22 03:14:29\tConnection closed by 172.31.13.239 [preauth]\nsshd\tSep 22 03:14:30\terror: Could not load host key: /etc/ssh/ssh_host_ed25519_key\nsshd\tSep 22 03:14:30\tConnection closed by 172.31.13.239 [preauth]\nsshd\tSep 22 03:14:31\terror: Could not load host key: /etc/ssh/ssh_host_ed25519_key\nsshd\tSep 22 03:14:32\tConnection closed by 172.31.13.239 [preauth]\nsshd\tSep 22 03:14:33\terror: Could not load host key: /etc/ssh/ssh_host_ed25519_key\nsshd\tSep 22 03:14:33\tConnection closed by 172.31.13.239 [preauth]\nCRON\tSep 22 03:17:01\tpam_unix(cron:session): session opened for user root by (uid\u003d0)\nCRON\tSep 22 03:17:01\t(root) CMD ( cd / \u0026\u0026 run-parts --report /etc/cron.hourly)\nCRON\tSep 22 03:17:01\tpam_unix(cron:session): session closed for user root\n"
310 },311 },
311 "dateCreated": "Aug 21, 2015 8:29:46 AM",312 "dateCreated": "Aug 21, 2015 8:29:46 AM",
312 "dateStarted": "Aug 24, 2015 10:54:54 PM",313 "dateStarted": "Sep 22, 2015 3:30:26 AM",
313 "dateFinished": "Aug 24, 2015 10:54:55 PM",314 "dateFinished": "Sep 22, 2015 3:30:26 AM",
314 "status": "FINISHED",315 "status": "FINISHED",
315 "progressUpdateIntervalMs": 500316 "progressUpdateIntervalMs": 500
316 },317 },
317 {318 {
318 "config": {},319 "text": "",
320 "config": {
321 "colWidth": 12.0,
322 "graph": {
323 "mode": "table",
324 "height": 300.0,
325 "optionOpen": false,
326 "keys": [],
327 "values": [],
328 "groups": [],
329 "scatter": {}
330 },
331 "tableHide": false
332 },
319 "settings": {333 "settings": {
320 "params": {},334 "params": {},
321 "forms": {}335 "forms": {}
322 },336 },
323 "jobName": "paragraph_1440473909272_653880463",337 "jobName": "paragraph_1440473909272_653880463",
324 "id": "20150824-223829_186145308",338 "id": "20150824-223829_186145308",
339 "result": {
340 "code": "SUCCESS",
341 "type": "TEXT"
342 },
325 "dateCreated": "Aug 24, 2015 10:38:29 PM",343 "dateCreated": "Aug 24, 2015 10:38:29 PM",
326 "status": "READY",344 "dateStarted": "Sep 18, 2015 5:59:44 PM",
345 "dateFinished": "Sep 18, 2015 6:03:23 PM",
346 "status": "FINISHED",
327 "progressUpdateIntervalMs": 500347 "progressUpdateIntervalMs": 500
328 }348 }
329 ],349 ],
330 "name": "Real-time Analytic Tutorial",350 "name": "Zeppelin Flume/HDFS Tutorial",
331 "id": "flume-tutorial",351 "id": "flume-tutorial",
332 "angularObjects": {},352 "angularObjects": {},
333 "config": {353 "config": {
334 "looknfeel": "default"354 "looknfeel": "default"
335 },355 },
336 "info": {}356 "info": {}
337}357}
338\ No newline at end of file358\ No newline at end of file
339359
=== modified file 'tests/100-deploy-spark-hdfs-yarn'
--- tests/100-deploy-spark-hdfs-yarn 2015-09-16 21:28:31 +0000
+++ tests/100-deploy-spark-hdfs-yarn 2015-09-22 03:37:16 +0000
@@ -34,7 +34,7 @@
3434
35 cls.d.setup(timeout=3600)35 cls.d.setup(timeout=3600)
36 cls.d.sentry.wait(timeout=3600)36 cls.d.sentry.wait(timeout=3600)
37 cls.unit = cls.d.sentry.unit['zeppelin/0']37 cls.unit = cls.d.sentry.unit['spark/0']
3838
39###########################################################################39###########################################################################
40# Validate that the Spark HistoryServer is running40# Validate that the Spark HistoryServer is running

Subscribers

People subscribed via source and target branches