Merge lp:~bigdata-dev/charms/trusty/apache-zeppelin/trunk into lp:charms/trusty/apache-zeppelin
- Trusty Tahr (14.04)
- trunk
- Merge into trunk
Proposed by
Kevin W Monroe
Status: | Merged |
---|---|
Merged at revision: | 19 |
Proposed branch: | lp:~bigdata-dev/charms/trusty/apache-zeppelin/trunk |
Merge into: | lp:charms/trusty/apache-zeppelin |
Diff against target: |
341 lines (+80/-60) 2 files modified
resources/flume-tutorial/note.json (+79/-59) tests/100-deploy-spark-hdfs-yarn (+1/-1) |
To merge this branch: | bzr merge lp:~bigdata-dev/charms/trusty/apache-zeppelin/trunk |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Kevin W Monroe | Approve | ||
Review via email: mp+271903@code.launchpad.net |
Commit message
Description of the change
To post a comment you must log in.
Revision history for this message
Kevin W Monroe (kwmonroe) : | # |
review:
Approve
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'resources/flume-tutorial/note.json' | |||
2 | --- resources/flume-tutorial/note.json 2015-08-26 12:27:56 +0000 | |||
3 | +++ resources/flume-tutorial/note.json 2015-09-22 03:37:16 +0000 | |||
4 | @@ -1,7 +1,7 @@ | |||
5 | 1 | { | 1 | { |
6 | 2 | "paragraphs": [ | 2 | "paragraphs": [ |
7 | 3 | { | 3 | { |
9 | 4 | "text": "%md\n## Welcome to Realtime Syslog Analytic Tutorial Powered by Juju.\n### In this live tutorial we will demonstrat three main phases of any big data solution:\n#### 1. Data Ingestion - Apache Flume-syslog -\u003e Apache flume-hdfs\n#### 2. Data Processing - Apache Spark+YARN\n#### 3. Data Visualization - SparkSQL", | 4 | "text": "%md\n## Welcome to the Realtime Syslog Analytics tutorial, powered by Juju.\n### In this live tutorial we will demonstrate three phases of a big data solution:\n#### 1. Data Ingestion: Flume-Syslog -\u003e Flume-HDFS\n#### 2. Data Processing: Spark+YARN\n#### 3. Data Visualization: SparkSQL+Zeppelin", |
10 | 5 | "config": { | 5 | "config": { |
11 | 6 | "colWidth": 12.0, | 6 | "colWidth": 12.0, |
12 | 7 | "graph": { | 7 | "graph": { |
13 | @@ -12,7 +12,8 @@ | |||
14 | 12 | "values": [], | 12 | "values": [], |
15 | 13 | "groups": [], | 13 | "groups": [], |
16 | 14 | "scatter": {} | 14 | "scatter": {} |
18 | 15 | } | 15 | }, |
19 | 16 | "tableHide": false | ||
20 | 16 | }, | 17 | }, |
21 | 17 | "settings": { | 18 | "settings": { |
22 | 18 | "params": {}, | 19 | "params": {}, |
23 | @@ -23,17 +24,17 @@ | |||
24 | 23 | "result": { | 24 | "result": { |
25 | 24 | "code": "SUCCESS", | 25 | "code": "SUCCESS", |
26 | 25 | "type": "HTML", | 26 | "type": "HTML", |
28 | 26 | "msg": "\u003ch2\u003eWelcome to Realtime Syslog Analytic Tutorial Powered by Juju.\u003c/h2\u003e\n\u003ch3\u003eIn this live tutorial we will demonstrat three main phases of any big data solution:\u003c/h3\u003e\n\u003ch4\u003e1. Data Ingestion - Apache Flume-syslog -\u003e Apache flume-hdfs\u003c/h4\u003e\n\u003ch4\u003e2. Data Processing - Apache Spark+YARN\u003c/h4\u003e\n\u003ch4\u003e3. Data Visualization - SparkSQL\u003c/h4\u003e\n" | 27 | "msg": "\u003ch2\u003eWelcome to the Realtime Syslog Analytics tutorial, powered by Juju.\u003c/h2\u003e\n\u003ch3\u003eIn this live tutorial we will demonstrate three phases of a big data solution:\u003c/h3\u003e\n\u003ch4\u003e1. Data Ingestion: Flume-Syslog -\u003e Flume-HDFS\u003c/h4\u003e\n\u003ch4\u003e2. Data Processing: Spark+YARN\u003c/h4\u003e\n\u003ch4\u003e3. Data Visualization: SparkSQL+Zeppelin\u003c/h4\u003e\n" |
29 | 27 | }, | 28 | }, |
30 | 28 | "dateCreated": "Aug 20, 2015 3:14:39 PM", | 29 | "dateCreated": "Aug 20, 2015 3:14:39 PM", |
33 | 29 | "dateStarted": "Aug 25, 2015 9:34:23 AM", | 30 | "dateStarted": "Sep 18, 2015 6:25:43 PM", |
34 | 30 | "dateFinished": "Aug 25, 2015 9:34:23 AM", | 31 | "dateFinished": "Sep 18, 2015 6:25:43 PM", |
35 | 31 | "status": "FINISHED", | 32 | "status": "FINISHED", |
36 | 32 | "progressUpdateIntervalMs": 500 | 33 | "progressUpdateIntervalMs": 500 |
37 | 33 | }, | 34 | }, |
38 | 34 | { | 35 | { |
41 | 35 | "title": "Data Ingestion", | 36 | "title": "Generate Data and Verify Ingestion", |
42 | 36 | "text": "import sys.process._\n// Generate syslog messages by running an spakk\n\"/home/ubuntu/sparkpi.sh\" !!\n// Verify that FLume has collected and sent the syslog messages to HDFS\n\"hadoop fs -ls -R /user/flume/flume-syslog\" !!", | 37 | "text": "%sh\n# Generate syslog messages by trying to ssh to the hdfs-master unit.\n# This will likely result in a \u0027publickey denied\u0027 error, but it will\n# be enough to trigger a syslog event on the hdfs-master.\nfor i in `seq 1 10`;\ndo\n ssh -oStrictHostKeyChecking\u003dno hdfs-master-0 uptime \u003e/dev/null 2\u003e\u00261\n sleep 1\ndone\n\n# Check if Flume has collected and sent the syslog messages to HDFS.\n# If no output is seen from this command, wait a few minutes and try\n# again. The amount of time between Flume ingesting the event and it\n# being available in HDFS is controlled by the \u0027roll_interval\u0027\n# configuration option in the flume-hdfs charm.\nhadoop fs -ls -R /user/flume/flume-syslog | tail", |
43 | 37 | "config": { | 38 | "config": { |
44 | 38 | "colWidth": 12.0, | 39 | "colWidth": 12.0, |
45 | 39 | "graph": { | 40 | "graph": { |
46 | @@ -45,7 +46,9 @@ | |||
47 | 45 | "groups": [], | 46 | "groups": [], |
48 | 46 | "scatter": {} | 47 | "scatter": {} |
49 | 47 | }, | 48 | }, |
51 | 48 | "title": true | 49 | "title": true, |
52 | 50 | "tableHide": false, | ||
53 | 51 | "editorHide": false | ||
54 | 49 | }, | 52 | }, |
55 | 50 | "settings": { | 53 | "settings": { |
56 | 51 | "params": {}, | 54 | "params": {}, |
57 | @@ -56,16 +59,17 @@ | |||
58 | 56 | "result": { | 59 | "result": { |
59 | 57 | "code": "SUCCESS", | 60 | "code": "SUCCESS", |
60 | 58 | "type": "TEXT", | 61 | "type": "TEXT", |
62 | 59 | "msg": "" }, | 62 | "msg": "drwxr-xr-x - flume supergroup 0 2015-09-22 03:19 /user/flume/flume-syslog/2015-09-22\n-rw-r--r-- 3 flume supergroup 302 2015-09-22 03:12 /user/flume/flume-syslog/2015-09-22/FlumeData.1442891213622\n-rw-r--r-- 3 flume supergroup 2328 2015-09-22 03:19 /user/flume/flume-syslog/2015-09-22/FlumeData.1442891678998\n" |
63 | 63 | }, | ||
64 | 60 | "dateCreated": "Aug 20, 2015 6:09:43 PM", | 64 | "dateCreated": "Aug 20, 2015 6:09:43 PM", |
67 | 61 | "dateStarted": "Aug 24, 2015 10:51:34 PM", | 65 | "dateStarted": "Sep 22, 2015 3:29:15 AM", |
68 | 62 | "dateFinished": "Aug 24, 2015 10:52:11 PM", | 66 | "dateFinished": "Sep 22, 2015 3:29:28 AM", |
69 | 63 | "status": "FINISHED", | 67 | "status": "FINISHED", |
70 | 64 | "progressUpdateIntervalMs": 500 | 68 | "progressUpdateIntervalMs": 500 |
71 | 65 | }, | 69 | }, |
72 | 66 | { | 70 | { |
75 | 67 | "title": "Data Processing in python", | 71 | "title": "Simple Data Processing with Scala", |
76 | 68 | "text": "%pyspark\nsc.textFile(\"/user/flume/flume-syslog/*/*/*\").filter(lambda l: \"sshd\" in l).collect()", | 72 | "text": "// Output the number of sshd syslog events\nsc.textFile(\"/user/flume/flume-syslog/*/*\").filter(line \u003d\u003e line.contains(\"sshd\")).count()", |
77 | 69 | "config": { | 73 | "config": { |
78 | 70 | "colWidth": 12.0, | 74 | "colWidth": 12.0, |
79 | 71 | "graph": { | 75 | "graph": { |
80 | @@ -90,16 +94,17 @@ | |||
81 | 90 | "result": { | 94 | "result": { |
82 | 91 | "code": "SUCCESS", | 95 | "code": "SUCCESS", |
83 | 92 | "type": "TEXT", | 96 | "type": "TEXT", |
85 | 93 | "msg": "" }, | 97 | "msg": "res12: Long \u003d 40\n" |
86 | 98 | }, | ||
87 | 94 | "dateCreated": "Aug 20, 2015 6:11:00 PM", | 99 | "dateCreated": "Aug 20, 2015 6:11:00 PM", |
90 | 95 | "dateStarted": "Aug 24, 2015 10:54:10 PM", | 100 | "dateStarted": "Sep 22, 2015 3:29:45 AM", |
91 | 96 | "dateFinished": "Aug 24, 2015 10:54:15 PM", | 101 | "dateFinished": "Sep 22, 2015 3:29:46 AM", |
92 | 97 | "status": "FINISHED", | 102 | "status": "FINISHED", |
93 | 98 | "progressUpdateIntervalMs": 500 | 103 | "progressUpdateIntervalMs": 500 |
94 | 99 | }, | 104 | }, |
95 | 100 | { | 105 | { |
98 | 101 | "title": "Data Processing In Scala", | 106 | "title": "Data processing to enable future queries", |
99 | 102 | "text": "import org.joda.time.DateTime\nimport org.joda.time.format.{DateTimeFormatterBuilder, DateTimeFormat}\nimport scala.util.Try\nval reSystemLog \u003d \"\"\"^\\\u003c\\d+\\\u003e([A-Za-z0-9, ]+\\d{2}:\\d{2}:\\d{2}(?:\\.\\d{3})?)\\s+(\\S+)\\s+([^\\[]+)\\[(\\d+)\\]\\s*:?\\s*(.*)\"\"\".r\ncase class SyslogMessage(timestamp: String, host: Option[String], process: String, pid: Int, message: String)\n\nval lines \u003d sc.textFile(\"/user/flume/flume-syslog/*/*/*\")\nval events \u003d lines.flatMap {\n case reSystemLog(timestamp,hostname, proc, pidS, msg) \u003d\u003e\n for {pid \u003c- Try(pidS.toInt).toOption} yield SyslogMessage(timestamp,Some(hostname), proc, pid, msg)\n case _ \u003d\u003e None\n }.toDF()\n\nevents.registerTempTable(\"syslog\")\n", | 107 | "text": "import org.joda.time.DateTime\nimport org.joda.time.format.{DateTimeFormatterBuilder, DateTimeFormat}\nimport scala.util.Try\nval reSystemLog \u003d \"\"\"^\\\u003c\\d+\\\u003e([A-Za-z0-9, ]+\\d{2}:\\d{2}:\\d{2}(?:\\.\\d{3})?)\\s+(\\S+)\\s+([^\\[]+)\\[(\\d+)\\]\\s*:?\\s*(.*)\"\"\".r\ncase class SyslogMessage(timestamp: String, host: Option[String], process: String, pid: Int, message: String)\n\nval lines \u003d sc.textFile(\"/user/flume/flume-syslog/*/*\")\nval events \u003d lines.flatMap {\n case reSystemLog(timestamp,hostname, proc, pidS, msg) \u003d\u003e\n for {pid \u003c- Try(pidS.toInt).toOption} yield SyslogMessage(timestamp,Some(hostname), proc, pid, msg)\n case _ \u003d\u003e None\n }.toDF()\n\nevents.registerTempTable(\"syslog\")\n", |
100 | 103 | "config": { | 108 | "config": { |
101 | 104 | "colWidth": 12.0, | 109 | "colWidth": 12.0, |
102 | 105 | "graph": { | 110 | "graph": { |
103 | @@ -124,11 +129,11 @@ | |||
104 | 124 | "result": { | 129 | "result": { |
105 | 125 | "code": "SUCCESS", | 130 | "code": "SUCCESS", |
106 | 126 | "type": "TEXT", | 131 | "type": "TEXT", |
108 | 127 | "msg": "import org.joda.time.DateTime\nimport org.joda.time.format.{DateTimeFormatterBuilder, DateTimeFormat}\nimport scala.util.Try\nreSystemLog: scala.util.matching.Regex \u003d ^\\\u003c\\d+\\\u003e([A-Za-z0-9, ]+\\d{2}:\\d{2}:\\d{2}(?:\\.\\d{3})?)\\s+(\\S+)\\s+([^\\[]+)\\[(\\d+)\\]\\s*:?\\s*(.*)\ndefined class SyslogMessage\nlines: org.apache.spark.rdd.RDD[String] \u003d /user/flume/flume-syslog/*/*/* MapPartitionsRDD[509] at textFile at \u003cconsole\u003e:73\nevents: org.apache.spark.sql.DataFrame \u003d [timestamp: string, host: string, process: string, pid: int, message: string]\n" | 132 | "msg": "import org.joda.time.DateTime\nimport org.joda.time.format.{DateTimeFormatterBuilder, DateTimeFormat}\nimport scala.util.Try\nreSystemLog: scala.util.matching.Regex \u003d ^\\\u003c\\d+\\\u003e([A-Za-z0-9, ]+\\d{2}:\\d{2}:\\d{2}(?:\\.\\d{3})?)\\s+(\\S+)\\s+([^\\[]+)\\[(\\d+)\\]\\s*:?\\s*(.*)\ndefined class SyslogMessage\nlines: org.apache.spark.rdd.RDD[String] \u003d /user/flume/flume-syslog/*/* MapPartitionsRDD[50] at textFile at \u003cconsole\u003e:31\nevents: org.apache.spark.sql.DataFrame \u003d [timestamp: string, host: string, process: string, pid: int, message: string]\n" |
109 | 128 | }, | 133 | }, |
110 | 129 | "dateCreated": "Aug 21, 2015 12:03:17 AM", | 134 | "dateCreated": "Aug 21, 2015 12:03:17 AM", |
113 | 130 | "dateStarted": "Aug 24, 2015 10:54:28 PM", | 135 | "dateStarted": "Sep 22, 2015 3:23:23 AM", |
114 | 131 | "dateFinished": "Aug 24, 2015 10:54:29 PM", | 136 | "dateFinished": "Sep 22, 2015 3:23:26 AM", |
115 | 132 | "status": "FINISHED", | 137 | "status": "FINISHED", |
116 | 133 | "progressUpdateIntervalMs": 500 | 138 | "progressUpdateIntervalMs": 500 |
117 | 134 | }, | 139 | }, |
118 | @@ -169,7 +174,9 @@ | |||
119 | 169 | } | 174 | } |
120 | 170 | } | 175 | } |
121 | 171 | }, | 176 | }, |
123 | 172 | "title": true | 177 | "title": true, |
124 | 178 | "tableHide": false, | ||
125 | 179 | "editorHide": false | ||
126 | 173 | }, | 180 | }, |
127 | 174 | "settings": { | 181 | "settings": { |
128 | 175 | "params": {}, | 182 | "params": {}, |
129 | @@ -180,26 +187,26 @@ | |||
130 | 180 | "result": { | 187 | "result": { |
131 | 181 | "code": "SUCCESS", | 188 | "code": "SUCCESS", |
132 | 182 | "type": "TABLE", | 189 | "type": "TABLE", |
134 | 183 | "msg": "process\tvalue\nCRON\t180\nntpdate\t1\nsshd\t6\nsu\t1\nsystemd-logind\t1\n" | 190 | "msg": "process\tvalue\nCRON\t3\nsshd\t20\n" |
135 | 184 | }, | 191 | }, |
136 | 185 | "dateCreated": "Aug 24, 2015 10:31:38 PM", | 192 | "dateCreated": "Aug 24, 2015 10:31:38 PM", |
139 | 186 | "dateStarted": "Aug 24, 2015 10:54:37 PM", | 193 | "dateStarted": "Sep 22, 2015 3:29:54 AM", |
140 | 187 | "dateFinished": "Aug 24, 2015 10:54:41 PM", | 194 | "dateFinished": "Sep 22, 2015 3:29:57 AM", |
141 | 188 | "status": "FINISHED", | 195 | "status": "FINISHED", |
142 | 189 | "progressUpdateIntervalMs": 500 | 196 | "progressUpdateIntervalMs": 500 |
143 | 190 | }, | 197 | }, |
144 | 191 | { | 198 | { |
145 | 192 | "title": "Data Visualization", | 199 | "title": "Data Visualization", |
147 | 193 | "text": "%sql \nselect pid, count(1) value\nfrom syslog\nwhere pid \u003e 5000 and pid \u003c 20000 and timestamp \u003e ${maxDate\u003d\"Aug 24\"}\ngroup by pid \norder by pid\n", | 200 | "text": "%sql \nselect host, count(1) value\nfrom syslog\nwhere timestamp \u003e ${maxDate\u003d\"Sep 15\"}\ngroup by host\n", |
148 | 194 | "config": { | 201 | "config": { |
149 | 195 | "colWidth": 4.0, | 202 | "colWidth": 4.0, |
150 | 196 | "graph": { | 203 | "graph": { |
152 | 197 | "mode": "pieChart", | 204 | "mode": "table", |
153 | 198 | "height": 300.0, | 205 | "height": 300.0, |
154 | 199 | "optionOpen": false, | 206 | "optionOpen": false, |
155 | 200 | "keys": [ | 207 | "keys": [ |
156 | 201 | { | 208 | { |
158 | 202 | "name": "pid", | 209 | "name": "host", |
159 | 203 | "index": 0.0, | 210 | "index": 0.0, |
160 | 204 | "aggr": "sum" | 211 | "aggr": "sum" |
161 | 205 | } | 212 | } |
162 | @@ -213,11 +220,6 @@ | |||
163 | 213 | ], | 220 | ], |
164 | 214 | "groups": [], | 221 | "groups": [], |
165 | 215 | "scatter": { | 222 | "scatter": { |
166 | 216 | "xAxis": { | ||
167 | 217 | "name": "pid", | ||
168 | 218 | "index": 0.0, | ||
169 | 219 | "aggr": "sum" | ||
170 | 220 | }, | ||
171 | 221 | "yAxis": { | 223 | "yAxis": { |
172 | 222 | "name": "value", | 224 | "name": "value", |
173 | 223 | "index": 1.0, | 225 | "index": 1.0, |
174 | @@ -225,14 +227,17 @@ | |||
175 | 225 | } | 227 | } |
176 | 226 | } | 228 | } |
177 | 227 | }, | 229 | }, |
179 | 228 | "title": true | 230 | "title": true, |
180 | 231 | "tableHide": false | ||
181 | 229 | }, | 232 | }, |
182 | 230 | "settings": { | 233 | "settings": { |
184 | 231 | "params": {}, | 234 | "params": { |
185 | 235 | "maxDate": "\"Sep 15\"" | ||
186 | 236 | }, | ||
187 | 232 | "forms": { | 237 | "forms": { |
188 | 233 | "maxDate": { | 238 | "maxDate": { |
189 | 234 | "name": "maxDate", | 239 | "name": "maxDate", |
191 | 235 | "defaultValue": "\"Aug 24\"", | 240 | "defaultValue": "\"Sep 15\"", |
192 | 236 | "hidden": false | 241 | "hidden": false |
193 | 237 | } | 242 | } |
194 | 238 | } | 243 | } |
195 | @@ -242,33 +247,33 @@ | |||
196 | 242 | "result": { | 247 | "result": { |
197 | 243 | "code": "SUCCESS", | 248 | "code": "SUCCESS", |
198 | 244 | "type": "TABLE", | 249 | "type": "TABLE", |
200 | 245 | "msg": "pid\tvalue\n5073\t2\n5074\t1\n5218\t2\n5219\t1\n5374\t2\n5375\t1\n5485\t2\n5881\t2\n5882\t1\n" | 250 | "msg": "host\tvalue\nhdfs-master-0\t23\n" |
201 | 246 | }, | 251 | }, |
202 | 247 | "dateCreated": "Aug 21, 2015 1:11:17 AM", | 252 | "dateCreated": "Aug 21, 2015 1:11:17 AM", |
205 | 248 | "dateStarted": "Aug 24, 2015 10:54:43 PM", | 253 | "dateStarted": "Sep 22, 2015 3:30:03 AM", |
206 | 249 | "dateFinished": "Aug 24, 2015 10:54:45 PM", | 254 | "dateFinished": "Sep 22, 2015 3:30:05 AM", |
207 | 250 | "status": "FINISHED", | 255 | "status": "FINISHED", |
208 | 251 | "progressUpdateIntervalMs": 500 | 256 | "progressUpdateIntervalMs": 500 |
209 | 252 | }, | 257 | }, |
210 | 253 | { | 258 | { |
211 | 254 | "title": "Data Visualization", | 259 | "title": "Data Visualization", |
213 | 255 | "text": "%sql \nselect timestamp, count(1) value\nfrom syslog\nwhere timestamp \u003e ${maxDate\u003d\"Aug 24\"} and process \u003d\u003d \"sshd\"\ngroup by timestamp\norder by timestamp", | 260 | "text": "%sql \nselect process, timestamp, message\nfrom syslog\nwhere timestamp \u003e ${maxDate\u003d\"Sep 15\"}\n", |
214 | 256 | "config": { | 261 | "config": { |
215 | 257 | "colWidth": 4.0, | 262 | "colWidth": 4.0, |
216 | 258 | "graph": { | 263 | "graph": { |
218 | 259 | "mode": "pieChart", | 264 | "mode": "table", |
219 | 260 | "height": 300.0, | 265 | "height": 300.0, |
220 | 261 | "optionOpen": false, | 266 | "optionOpen": false, |
221 | 262 | "keys": [ | 267 | "keys": [ |
222 | 263 | { | 268 | { |
224 | 264 | "name": "timestamp", | 269 | "name": "process", |
225 | 265 | "index": 0.0, | 270 | "index": 0.0, |
226 | 266 | "aggr": "sum" | 271 | "aggr": "sum" |
227 | 267 | } | 272 | } |
228 | 268 | ], | 273 | ], |
229 | 269 | "values": [ | 274 | "values": [ |
230 | 270 | { | 275 | { |
232 | 271 | "name": "value", | 276 | "name": "timestamp", |
233 | 272 | "index": 1.0, | 277 | "index": 1.0, |
234 | 273 | "aggr": "sum" | 278 | "aggr": "sum" |
235 | 274 | } | 279 | } |
236 | @@ -276,27 +281,23 @@ | |||
237 | 276 | "groups": [], | 281 | "groups": [], |
238 | 277 | "scatter": { | 282 | "scatter": { |
239 | 278 | "xAxis": { | 283 | "xAxis": { |
241 | 279 | "name": "timestamp", | 284 | "name": "process", |
242 | 280 | "index": 0.0, | 285 | "index": 0.0, |
243 | 281 | "aggr": "sum" | 286 | "aggr": "sum" |
244 | 282 | }, | ||
245 | 283 | "yAxis": { | ||
246 | 284 | "name": "value", | ||
247 | 285 | "index": 1.0, | ||
248 | 286 | "aggr": "sum" | ||
249 | 287 | } | 287 | } |
250 | 288 | } | 288 | } |
251 | 289 | }, | 289 | }, |
253 | 290 | "title": true | 290 | "title": true, |
254 | 291 | "tableHide": false | ||
255 | 291 | }, | 292 | }, |
256 | 292 | "settings": { | 293 | "settings": { |
257 | 293 | "params": { | 294 | "params": { |
259 | 294 | "maxDate": "\"Aug 20\"" | 295 | "maxDate": "\"Sep 15\"" |
260 | 295 | }, | 296 | }, |
261 | 296 | "forms": { | 297 | "forms": { |
262 | 297 | "maxDate": { | 298 | "maxDate": { |
263 | 298 | "name": "maxDate", | 299 | "name": "maxDate", |
265 | 299 | "defaultValue": "\"Aug 24\"", | 300 | "defaultValue": "\"Sep 15\"", |
266 | 300 | "hidden": false | 301 | "hidden": false |
267 | 301 | } | 302 | } |
268 | 302 | } | 303 | } |
269 | @@ -306,32 +307,51 @@ | |||
270 | 306 | "result": { | 307 | "result": { |
271 | 307 | "code": "SUCCESS", | 308 | "code": "SUCCESS", |
272 | 308 | "type": "TABLE", | 309 | "type": "TABLE", |
274 | 309 | "msg": "timestamp\tvalue\nAug 21 11:20:45\t2\nAug 21 19:58:30\t2\nAug 24 21:59:47\t2\n" | 310 | "msg": "process\ttimestamp\tmessage\nsshd\tSep 22 03:14:23\terror: Could not load host key: /etc/ssh/ssh_host_ed25519_key\nsshd\tSep 22 03:14:23\tConnection closed by 172.31.13.239 [preauth]\nsshd\tSep 22 03:14:24\terror: Could not load host key: /etc/ssh/ssh_host_ed25519_key\nsshd\tSep 22 03:14:24\tConnection closed by 172.31.13.239 [preauth]\nsshd\tSep 22 03:14:25\terror: Could not load host key: /etc/ssh/ssh_host_ed25519_key\nsshd\tSep 22 03:14:25\tConnection closed by 172.31.13.239 [preauth]\nsshd\tSep 22 03:14:26\terror: Could not load host key: /etc/ssh/ssh_host_ed25519_key\nsshd\tSep 22 03:14:26\tConnection closed by 172.31.13.239 [preauth]\nsshd\tSep 22 03:14:27\terror: Could not load host key: /etc/ssh/ssh_host_ed25519_key\nsshd\tSep 22 03:14:27\tConnection closed by 172.31.13.239 [preauth]\nsshd\tSep 22 03:14:28\terror: Could not load host key: /etc/ssh/ssh_host_ed25519_key\nsshd\tSep 22 03:14:28\tConnection closed by 172.31.13.239 [preauth]\nsshd\tSep 22 03:14:29\terror: Could not load host key: /etc/ssh/ssh_host_ed25519_key\nsshd\tSep 22 03:14:29\tConnection closed by 172.31.13.239 [preauth]\nsshd\tSep 22 03:14:30\terror: Could not load host key: /etc/ssh/ssh_host_ed25519_key\nsshd\tSep 22 03:14:30\tConnection closed by 172.31.13.239 [preauth]\nsshd\tSep 22 03:14:31\terror: Could not load host key: /etc/ssh/ssh_host_ed25519_key\nsshd\tSep 22 03:14:32\tConnection closed by 172.31.13.239 [preauth]\nsshd\tSep 22 03:14:33\terror: Could not load host key: /etc/ssh/ssh_host_ed25519_key\nsshd\tSep 22 03:14:33\tConnection closed by 172.31.13.239 [preauth]\nCRON\tSep 22 03:17:01\tpam_unix(cron:session): session opened for user root by (uid\u003d0)\nCRON\tSep 22 03:17:01\t(root) CMD ( cd / \u0026\u0026 run-parts --report /etc/cron.hourly)\nCRON\tSep 22 03:17:01\tpam_unix(cron:session): session closed for user root\n" |
275 | 310 | }, | 311 | }, |
276 | 311 | "dateCreated": "Aug 21, 2015 8:29:46 AM", | 312 | "dateCreated": "Aug 21, 2015 8:29:46 AM", |
279 | 312 | "dateStarted": "Aug 24, 2015 10:54:54 PM", | 313 | "dateStarted": "Sep 22, 2015 3:30:26 AM", |
280 | 313 | "dateFinished": "Aug 24, 2015 10:54:55 PM", | 314 | "dateFinished": "Sep 22, 2015 3:30:26 AM", |
281 | 314 | "status": "FINISHED", | 315 | "status": "FINISHED", |
282 | 315 | "progressUpdateIntervalMs": 500 | 316 | "progressUpdateIntervalMs": 500 |
283 | 316 | }, | 317 | }, |
284 | 317 | { | 318 | { |
286 | 318 | "config": {}, | 319 | "text": "", |
287 | 320 | "config": { | ||
288 | 321 | "colWidth": 12.0, | ||
289 | 322 | "graph": { | ||
290 | 323 | "mode": "table", | ||
291 | 324 | "height": 300.0, | ||
292 | 325 | "optionOpen": false, | ||
293 | 326 | "keys": [], | ||
294 | 327 | "values": [], | ||
295 | 328 | "groups": [], | ||
296 | 329 | "scatter": {} | ||
297 | 330 | }, | ||
298 | 331 | "tableHide": false | ||
299 | 332 | }, | ||
300 | 319 | "settings": { | 333 | "settings": { |
301 | 320 | "params": {}, | 334 | "params": {}, |
302 | 321 | "forms": {} | 335 | "forms": {} |
303 | 322 | }, | 336 | }, |
304 | 323 | "jobName": "paragraph_1440473909272_653880463", | 337 | "jobName": "paragraph_1440473909272_653880463", |
305 | 324 | "id": "20150824-223829_186145308", | 338 | "id": "20150824-223829_186145308", |
306 | 339 | "result": { | ||
307 | 340 | "code": "SUCCESS", | ||
308 | 341 | "type": "TEXT" | ||
309 | 342 | }, | ||
310 | 325 | "dateCreated": "Aug 24, 2015 10:38:29 PM", | 343 | "dateCreated": "Aug 24, 2015 10:38:29 PM", |
312 | 326 | "status": "READY", | 344 | "dateStarted": "Sep 18, 2015 5:59:44 PM", |
313 | 345 | "dateFinished": "Sep 18, 2015 6:03:23 PM", | ||
314 | 346 | "status": "FINISHED", | ||
315 | 327 | "progressUpdateIntervalMs": 500 | 347 | "progressUpdateIntervalMs": 500 |
316 | 328 | } | 348 | } |
317 | 329 | ], | 349 | ], |
319 | 330 | "name": "Real-time Analytic Tutorial", | 350 | "name": "Zeppelin Flume/HDFS Tutorial", |
320 | 331 | "id": "flume-tutorial", | 351 | "id": "flume-tutorial", |
321 | 332 | "angularObjects": {}, | 352 | "angularObjects": {}, |
322 | 333 | "config": { | 353 | "config": { |
323 | 334 | "looknfeel": "default" | 354 | "looknfeel": "default" |
324 | 335 | }, | 355 | }, |
325 | 336 | "info": {} | 356 | "info": {} |
327 | 337 | } | 357 | } |
328 | 338 | \ No newline at end of file | 358 | \ No newline at end of file |
329 | 339 | 359 | ||
330 | === modified file 'tests/100-deploy-spark-hdfs-yarn' | |||
331 | --- tests/100-deploy-spark-hdfs-yarn 2015-09-16 21:28:31 +0000 | |||
332 | +++ tests/100-deploy-spark-hdfs-yarn 2015-09-22 03:37:16 +0000 | |||
333 | @@ -34,7 +34,7 @@ | |||
334 | 34 | 34 | ||
335 | 35 | cls.d.setup(timeout=3600) | 35 | cls.d.setup(timeout=3600) |
336 | 36 | cls.d.sentry.wait(timeout=3600) | 36 | cls.d.sentry.wait(timeout=3600) |
338 | 37 | cls.unit = cls.d.sentry.unit['zeppelin/0'] | 37 | cls.unit = cls.d.sentry.unit['spark/0'] |
339 | 38 | 38 | ||
340 | 39 | ########################################################################### | 39 | ########################################################################### |
341 | 40 | # Validate that the Spark HistoryServer is running | 40 | # Validate that the Spark HistoryServer is running |