├── README.md
├── kafka-log-appender
├── pom.xml
└── src
│ └── main
│ ├── java
│ └── com
│ │ └── log
│ │ └── kafka
│ │ ├── RogueApplication.java
│ │ ├── appender
│ │ └── KafkaAppender.java
│ │ └── formatter
│ │ ├── Formatter.java
│ │ ├── JsonFormatter.java
│ │ └── MessageFormatter.java
│ └── resources
│ └── logback.xml
└── log-kafka-storm
├── docker-compose.yml
├── pom.xml
├── src
└── main
│ └── java
│ └── com
│ └── log
│ └── kafka
│ ├── other
│ └── TestXMPP.java
│ └── storm
│ ├── common
│ ├── EWMA.java
│ └── NotifyMessageMapper.java
│ ├── filter
│ └── BooleanFilter.java
│ ├── function
│ ├── JsonProjectFunction.java
│ ├── MessageMapper.java
│ ├── MovingAverageFunction.java
│ ├── ThresholdFilterFunction.java
│ └── XMPPFunction.java
│ └── topology
│ └── LogAnalysisTopology.java
└── start-kafka-shell.sh
/README.md:
--------------------------------------------------------------------------------
1 | > 本文主要目的在于将Storm运行环境集成到docker容器中,在一个快速开发模式下运行测试。代码内容来自 [Storm Blueprints: Patterns for Distributed Real-time Computation](http://www.amazon.com/Storm-Blueprints-Distributed-Real-time-Computation/dp/178216829X) 第四章。
2 | ```
3 | 本代码仅供学习。这个样例程序主要是分析日志文件,一旦符合某个threshold,将通过XMPP告警。
4 | - 将日志信息写到Apache Kafka;
5 | - Storm流处理streaming功能负责分析日志;
6 | - 实现移动平均线式的分析;
7 | - Storm将告警信息使用XMPP协议通知给管理员。
8 | ```
9 |
10 |
11 | ##下载源码
12 | git clone https://github.com/jiekechoo/log-analysis-kafka-storm-docker.git
13 |
14 | - kafka-log-appender:将日志内容写到kafka程序
15 | - log-kafka-storm:docker-compose脚本和storm程序
16 | ##准备docker环境
17 | ###启动docker
18 | docker-compose环境搭建过程请查看我的[另一篇博客:CentOS 6 install docker and docker-compose](http://blog.csdn.net/jiekechoo/article/details/48690841)
19 |
20 | 进入log-kafka-storm目录,查看docker-compose.yml文件,假设我们的宿主主机是:**192.168.1.231**,如有需要改成你自己的主机地址,代码里所有的地方都要把地址改一下。
21 |
22 | ```
23 | [root@docker2 log-kafka-storm]# more docker-compose.yml
24 | zookeeper:
25 | image: jplock/zookeeper
26 | ports:
27 | - "2181:2181"
28 | nimbus:
29 | image: wurstmeister/storm-nimbus:0.9.2
30 | ports:
31 | - "3773:3773"
32 | - "3772:3772"
33 | - "6627:6627"
34 | links:
35 | - zookeeper:zk
36 | - kafka:kafka
37 | supervisor:
38 | image: wurstmeister/storm-supervisor:0.9.2
39 | ports:
40 | - "8000:8000"
41 | links:
42 | - nimbus:nimbus
43 | - zookeeper:zk
44 | - kafka:kafka
45 | ui:
46 | image: wurstmeister/storm-ui:0.9.2
47 | ports:
48 | - "8080:8080"
49 | links:
50 | - nimbus:nimbus
51 | - zookeeper:zk
52 | kafka:
53 | image: wurstmeister/kafka:0.8.1
54 | ports:
55 | - "9092:9092"
56 | links:
57 | - zookeeper:zk
58 | environment:
59 | BROKER_ID: 1
60 | HOST_IP: 192.168.1.231
61 | PORT: 9092
62 | volumes:
63 | - /var/run/docker.sock:/var/run/docker.sock
64 | openfire:
65 | image: mdouglas/openfire
66 | ports:
67 | - "5222:5222"
68 | - "5223:5223"
69 | - "9091:9091"
70 | - "9090:9090"
71 | ```
72 | 启动docker-compose
73 | ```
74 | [root@docker2 log-kafka-storm]# docker-compose up -d
75 | Creating logkafkastorm_zookeeper_1...
76 | Creating logkafkastorm_openfire_1...
77 | Creating logkafkastorm_kafka_1...
78 | Creating logkafkastorm_nimbus_1...
79 | Creating logkafkastorm_ui_1...
80 | Creating logkafkastorm_supervisor_1...
81 | ```
82 | 启动完成后应该都是 Up 状态
83 | ```
84 | [root@docker2 log-kafka-storm]# docker-compose ps
85 | Name Command State Ports
86 | -----------------------------------------------------------------------------------------------------------------------------
87 | logkafkastorm_kafka_1 /bin/sh -c start-kafka.sh Up 0.0.0.0:9092->9092/tcp
88 | logkafkastorm_nimbus_1 /bin/sh -c /usr/bin/start- Up 0.0.0.0:3772->3772/tcp,
89 | ... 0.0.0.0:3773->3773/tcp,
90 | 0.0.0.0:6627->6627/tcp
91 | logkafkastorm_openfire_1 /start.sh Up 0.0.0.0:5222->5222/tcp,
92 | 0.0.0.0:5223->5223/tcp,
93 | 0.0.0.0:9090->9090/tcp,
94 | 0.0.0.0:9091->9091/tcp
95 | logkafkastorm_supervisor_1 /bin/sh -c /usr/bin/start- Up 6700/tcp, 6701/tcp, 6702/tcp,
96 | ... 6703/tcp,
97 | 0.0.0.0:8000->8000/tcp
98 | logkafkastorm_ui_1 /bin/sh -c /usr/bin/start- Up 0.0.0.0:8080->8080/tcp
99 | ...
100 | logkafkastorm_zookeeper_1 /opt/zookeeper-3.4.5/bin/z Up 0.0.0.0:2181->2181/tcp,
101 | ... 2888/tcp, 3888/tcp
102 | ```
103 | 打开浏览器查询Storm UI状态,查看 http://192.168.1.231:8080/ 应该看到下面的画面,没有报错就可以了。
104 | 
105 |
106 | 至此,docker算是启动完毕。
107 |
108 | ###配置kafka
109 | 运行 start-kafka-shell.sh 脚本,主要是两个工作:
110 |
111 | - 1.进入kafka容器;
112 | - 2.创建topic名为 log-analysis
113 |
114 | ```
115 | [root@docker2 log-kafka-storm]# ./start-kafka-shell.sh
116 | root@4c7a5d233991:/# $KAFKA_HOME/bin/kafka-topics.sh --create --topic log-analysis --partitions 1 --zookeeper zk --replication-factor 1
117 | Created topic "log-analysis".
118 | ```
119 | 查看kafka的topic是否创建成功
120 | ```
121 | root@4c7a5d233991:/# $KAFKA_HOME/bin/kafka-topics.sh --describe --zookeeper zk
122 | Topic:log-analysis PartitionCount:1 ReplicationFactor:1 Configs:
123 | Topic: log-analysis Partition: 0 Leader: 9092 Replicas: 9092 Isr: 9092
124 | ```
125 | ###配置Openfire
126 | 浏览器打开 http://192.168.1.231:9090/ ,首先是初始化,域使用你自己的,我们这里是 sectong.com 。删除 服务器证书,否则android和ios客户端登录不上。
127 | 
128 | 创建两个用户,邮箱用 @sectong.com 结尾:
129 |
130 | - 用户名 *storm* 密码 *storm* ,用于发送消息;
131 | - 用户名 *alarm* 密码 *alarm* ,用户接收消息;
132 |
133 | 下载一个windows版本的IM客户端,[Spark](http://www.igniterealtime.org/projects/spark/) windows版本安装后如下图所示:
134 |
135 | 
136 |
137 | #编译程序
138 | ##Storm运行程序
139 | 编译Storm程序
140 | ```
141 | [root@docker2 log-kafka-storm]# mvn clean package
142 | ...此处省略过程
143 | [INFO] ------------------------------------------------------------------------
144 | [INFO] BUILD SUCCESS
145 | [INFO] ------------------------------------------------------------------------
146 | [INFO] Total time: 16.359 s
147 | [INFO] Finished at: 2015-09-25T23:53:29+08:00
148 | [INFO] Final Memory: 31M/373M
149 | [INFO] ------------------------------------------------------------------------
150 |
151 | [root@docker2 log-kafka-storm]# ll target/
152 | total 20M
153 | drwxr-xr-x+ 1 ppl None 0 Sep 25 23:53 archive-tmp/
154 | drwxr-xr-x+ 1 ppl None 0 Sep 25 23:53 classes/
155 | drwxr-xr-x+ 1 ppl None 0 Sep 25 23:53 generated-sources/
156 | -rwxr-xr-x 1 ppl None 20M Sep 25 23:53 log-kafka-storm-1.0-SNAPSHOT-jar-with-dependencies.jar
157 | -rwxr-xr-x 1 ppl None 17K Sep 25 23:53 log-kafka-storm-1.0-SNAPSHOT.jar
158 | drwxr-xr-x+ 1 ppl None 0 Sep 25 23:53 maven-archiver/
159 |
160 | ```
161 | 上传至Storm集群
162 | ```
163 | [root@docker2 log-kafka-storm]# storm jar ./target/log-kafka-storm-1.0-SNAPSHOT-jar-with-dependencies.jar com.log.kafka.storm.topology.LogAnalysisTopology 192.168.1.231 log-analysis-topology
164 |
165 | ...省略
166 | 36971 [main] INFO backtype.storm.StormSubmitter - Submitting topology log-analysis-topology in distributed mode with conf {"storm.xmpp.server":"192.168.1.231","nimbus.host":"192.168.1.231","storm.xmpp.password":"storm","topology.workers":3,"storm.zookeeper.port":2181,"storm.xmpp.user":"storm","storm.xmpp.to":"ahenrick@sectong.com","nimbus.thrift.port":6627,"storm.zookeeper.servers":["192.168.1.231"],"topology.max.spout.pending":5,"topology.trident.batch.emit.interval.millis":2000}
167 | 37663 [main] INFO backtype.storm.StormSubmitter - Finished submitting topology: log-analysis-topology
168 | ```
169 | jar包上传完毕,Storm UI 应该增加一个 Topology
170 |
171 | 
172 | 点击,进入
173 | 
174 |
175 | 到这里,Storm程序已经上传完毕。
176 |
177 | ##日志写入kafka程序
178 |
179 | 进入 kafka-log-appender 目录,编译程序
180 | ```
181 | [root@docker2 kafka-log-appender]# mvn clean package
182 |
183 | ...省略
184 | [INFO] ------------------------------------------------------------------------
185 | [INFO] BUILD SUCCESS
186 | [INFO] ------------------------------------------------------------------------
187 | [INFO] Total time: 3.287 s
188 | [INFO] Finished at: 2015-09-26T00:13:35+08:00
189 | [INFO] Final Memory: 20M/180M
190 | [INFO] ------------------------------------------------------------------------
191 |
192 | [root@docker2 kafka-log-appender]# ll target/
193 | total 8.0K
194 | drwxr-xr-x+ 1 ppl None 0 Sep 26 00:13 classes/
195 | -rwxr-xr-x 1 ppl None 7.0K Sep 26 00:13 kafka-log-appender-1.0-SNAPSHOT.jar
196 | drwxr-xr-x+ 1 ppl None 0 Sep 26 00:13 maven-archiver/
197 | drwxr-xr-x+ 1 ppl None 0 Sep 26 00:13 maven-status/
198 | ```
199 |
200 | 运行jar包程序,向kafka发送日志内容。这里需要注意:运行此jar包的主机需要得到kafka容器的主机名和ip,写入hosts文件。
201 |
202 | ```
203 | [root@docker2 kafka-log-appender]# java -cp target/kafka-log-appender-1.0-SNAPSHOT.jar com.log.kafka.RogueApplication
204 |
205 | 913 [main] INFO kafka.client.ClientUtils$ - Fetching metadata from broker id:0,host:192.168.1.231,port:9092 with correlation id 0 for 1 topic(s) Set(log-analysis)
206 | 931 [main] INFO kafka.producer.SyncProducer - Connected to 192.168.1.231:9092 for producing
207 | 989 [main] INFO kafka.producer.SyncProducer - Disconnecting from 192.168.1.231:9092
208 | 1053 [main] INFO kafka.producer.SyncProducer - Connected to c470c1bea851:9092 for producing
209 | 796 [main] WARN com.log.kafka.RogueApplication - This is a warning (slow state).
210 | 1566 [main] WARN com.log.kafka.RogueApplication - This is a warning (slow state).
211 | 2069 [main] WARN com.log.kafka.RogueApplication - This is a warning (slow state).
212 | 2572 [main] WARN com.log.kafka.RogueApplication - This is a warning (slow state).
213 | 3074 [main] WARN com.log.kafka.RogueApplication - This is a warning (slow state).
214 | 3578 [main] WARN com.log.kafka.RogueApplication - This is a warning (slow state).
215 | 4080 [main] WARN com.log.kafka.RogueApplication - This is a warning (rapid state).
216 | 4183 [main] WARN com.log.kafka.RogueApplication - This is a warning (rapid state).
217 | 4286 [main] WARN com.log.kafka.RogueApplication - This is a warning (rapid state).
218 | 4389 [main] WARN com.log.kafka.RogueApplication - This is a warning (rapid state).
219 | 4494 [main] WARN com.log.kafka.RogueApplication - This is a warning (rapid state).
220 | 4598 [main] WARN com.log.kafka.RogueApplication - This is a warning (rapid state).
221 | 4702 [main] WARN com.log.kafka.RogueApplication - This is a warning (rapid state).
222 | 4806 [main] WARN com.log.kafka.RogueApplication - This is a warning (rapid state).
223 | 4912 [main] WARN com.log.kafka.RogueApplication - This is a warning (rapid state).
224 | 5018 [main] WARN com.log.kafka.RogueApplication - This is a warning (rapid state).
225 | 5137 [main] WARN com.log.kafka.RogueApplication - This is a warning (rapid state).
226 | 5240 [main] WARN com.log.kafka.RogueApplication - This is a warning (rapid state).
227 | 5343 [main] WARN com.log.kafka.RogueApplication - This is a warning (rapid state).
228 | 5446 [main] WARN com.log.kafka.RogueApplication - This is a warning (rapid state).
229 | 5550 [main] WARN com.log.kafka.RogueApplication - This is a warning (rapid state).
230 | 5653 [main] WARN com.log.kafka.RogueApplication - This is a warning (slow state).
231 | 6157 [main] WARN com.log.kafka.RogueApplication - This is a warning (slow state).
232 | 6660 [main] WARN com.log.kafka.RogueApplication - This is a warning (slow state).
233 | 7162 [main] WARN com.log.kafka.RogueApplication - This is a warning (slow state).
234 | 7664 [main] WARN com.log.kafka.RogueApplication - This is a warning (slow state).
235 | 8166 [main] WARN com.log.kafka.RogueApplication - This is a warning (slow state).
236 | ```
237 | 发送成功后,Storm UI 相应数字增加,证明已经运行成功
238 |
239 | 
240 |
241 | Spark IM 客户端也会收到相应的告警信息
242 |
243 | 
244 |
245 | Android手机下载 [Xabber](http://www.xabber.com/)客户端,iOS下载[Monal IM](http://monal.im),登录IM,可以实时收到推送告警消息
246 |
247 | 
248 |
249 | 谢谢观赏!
250 |
--------------------------------------------------------------------------------
/kafka-log-appender/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 | 4.0.0
6 |
7 | com.kafka.log.appender
8 | kafka-log-appender
9 | 1.0-SNAPSHOT
10 |
11 |
12 | UTF-8
13 |
14 |
15 |
16 |
17 | org.apache.kafka
18 | kafka_2.9.2
19 | 0.8.1.1
20 |
21 |
22 | org.apache.zookeeper
23 | zookeeper
24 |
25 |
26 | log4j
27 | log4j
28 |
29 |
30 |
31 |
32 |
33 | org.slf4j
34 | slf4j-api
35 | 1.6.6
36 |
37 |
38 |
39 | org.slf4j
40 | log4j-over-slf4j
41 | 1.6.6
42 |
43 |
44 |
45 | ch.qos.logback
46 | logback-classic
47 | 1.1.2
48 |
49 |
50 |
51 |
52 |
--------------------------------------------------------------------------------
/kafka-log-appender/src/main/java/com/log/kafka/RogueApplication.java:
--------------------------------------------------------------------------------
1 | package com.log.kafka;
2 |
3 | import org.slf4j.Logger;
4 | import org.slf4j.LoggerFactory;
5 |
6 | /**
7 | * Created by ahenrick on 7/30/14.
8 | */
9 | public class RogueApplication {
10 |
11 | private static final Logger LOG = LoggerFactory.getLogger(RogueApplication.class);
12 |
13 | public static void main(String[] args) throws Exception {
14 | int slowCount = 6;
15 | int fastCount = 15;
16 | // slow state
17 | for (int i = 0; i < slowCount; i++) {
18 | LOG.warn("This is a warning (slow state).");
19 | Thread.sleep(500);
20 | }
21 | // enter rapid state
22 | for (int i = 0; i < fastCount; i++) {
23 | LOG.warn("This is a warning (rapid state).");
24 | Thread.sleep(100);
25 | }
26 | // return to slow state
27 | for (int i = 0; i < slowCount; i++) {
28 | LOG.warn("This is a warning (slow state).");
29 | Thread.sleep(500);
30 | }
31 | }
32 | }
--------------------------------------------------------------------------------
/kafka-log-appender/src/main/java/com/log/kafka/appender/KafkaAppender.java:
--------------------------------------------------------------------------------
1 | package com.log.kafka.appender;
2 |
3 | /**
4 | * Created by ahenrick on 8/3/14.
5 | */
6 |
7 | import ch.qos.logback.classic.spi.ILoggingEvent;
8 | import ch.qos.logback.core.AppenderBase;
9 | import com.log.kafka.formatter.Formatter;
10 | import com.log.kafka.formatter.MessageFormatter;
11 | import kafka.javaapi.producer.Producer;
12 | import kafka.producer.KeyedMessage;
13 | import kafka.producer.ProducerConfig;
14 |
15 | import java.util.Properties;
16 |
17 | public class KafkaAppender extends AppenderBase {
18 |
19 | private String zookeeperHost;
20 | private Producer producer;
21 | private Formatter formatter;
22 | private String brokerList;
23 | private String topic;
24 |
25 | public String getTopic() {
26 | return topic;
27 | }
28 |
29 | public void setTopic(String topic) {
30 | this.topic = topic;
31 | }
32 |
33 | public void setBrokerList(String s) {
34 | this.brokerList = s;
35 | }
36 |
37 | public String getBrokerList() {
38 | return this.brokerList;
39 | }
40 |
41 | public String getZookeeperHost() {
42 | return zookeeperHost;
43 | }
44 |
45 | public void setZookeeperHost(String zookeeperHost) {
46 | this.zookeeperHost = zookeeperHost;
47 | }
48 |
49 | public Formatter getFormatter() {
50 | return formatter;
51 | }
52 |
53 | public void setFormatter(Formatter formatter) {
54 | this.formatter = formatter;
55 | }
56 |
57 | @Override
58 | public void start() {
59 | if (this.formatter == null) {
60 | this.formatter = new MessageFormatter();
61 | }
62 | super.start();
63 | Properties props = new Properties();
64 | props.put("zk.connect", this.zookeeperHost);
65 | props.put("metadata.broker.list", this.brokerList);
66 | props.put("serializer.class", "kafka.serializer.StringEncoder");
67 | ProducerConfig config = new ProducerConfig(props);
68 | this.producer = new Producer(config);
69 | }
70 |
71 | @Override
72 | public void stop() {
73 | super.stop();
74 | this.producer.close();
75 | }
76 |
77 | @Override
78 | protected void append(ILoggingEvent event) {
79 | String payload = this.formatter.format(event);
80 | this.producer.send(new KeyedMessage(getTopic(), payload));
81 | }
82 |
83 | }
--------------------------------------------------------------------------------
/kafka-log-appender/src/main/java/com/log/kafka/formatter/Formatter.java:
--------------------------------------------------------------------------------
1 | package com.log.kafka.formatter;
2 |
3 | /**
4 | * Created by ahenrick on 8/3/14.
5 | */
6 | import ch.qos.logback.classic.spi.ILoggingEvent;
7 |
8 | public interface Formatter {
9 | String format(ILoggingEvent event);
10 | }
--------------------------------------------------------------------------------
/kafka-log-appender/src/main/java/com/log/kafka/formatter/JsonFormatter.java:
--------------------------------------------------------------------------------
1 | package com.log.kafka.formatter;
2 |
3 | import ch.qos.logback.classic.spi.ILoggingEvent;
4 |
5 | /**
6 | * Created by ahenrick on 8/3/14.
7 | */
8 | public class JsonFormatter implements Formatter {
9 | private static final String QUOTE = "\"";
10 | private static final String COLON = ":";
11 | private static final String COMMA = ",";
12 |
13 | private boolean expectJson = false;
14 |
15 | public String format(ILoggingEvent event) {
16 | StringBuilder sb = new StringBuilder();
17 | sb.append("{");
18 | fieldName("level", sb);
19 | quote(event.getLevel().levelStr, sb);
20 | sb.append(COMMA);
21 | fieldName("logger", sb);
22 | quote(event.getLoggerName(), sb);
23 | sb.append(COMMA);
24 | fieldName("timestamp", sb);
25 | sb.append(event.getTimeStamp());
26 | sb.append(COMMA);
27 | fieldName("message", sb);
28 | if (this.expectJson) {
29 | sb.append(event.getFormattedMessage());
30 | } else {
31 | quote(event.getFormattedMessage(), sb);
32 | }
33 |
34 | sb.append("}");
35 | return sb.toString();
36 | }
37 |
38 | private static void fieldName(String name, StringBuilder sb) {
39 | quote(name, sb);
40 | sb.append(COLON);
41 | }
42 |
43 | private static void quote(String value, StringBuilder sb) {
44 | sb.append(QUOTE);
45 | sb.append(value);
46 | sb.append(QUOTE);
47 | }
48 |
49 | public boolean isExpectJson() {
50 | return expectJson;
51 | }
52 |
53 | public void setExpectJson(boolean expectJson) {
54 | this.expectJson = expectJson;
55 | }
56 | }
--------------------------------------------------------------------------------
/kafka-log-appender/src/main/java/com/log/kafka/formatter/MessageFormatter.java:
--------------------------------------------------------------------------------
1 | package com.log.kafka.formatter;
2 |
3 | import ch.qos.logback.classic.spi.ILoggingEvent;
4 |
5 | /**
6 | * Created by ahenrick on 8/3/14.
7 | */
8 | public class MessageFormatter implements Formatter {
9 |
10 | public String format(ILoggingEvent event) {
11 | return event.getFormattedMessage();
12 | }
13 |
14 | }
15 |
--------------------------------------------------------------------------------
/kafka-log-appender/src/main/resources/logback.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
6 |
7 | %-4relative [%thread] %-5level %logger{35} - %msg %n
8 |
9 |
10 |
12 | log-analysis
13 | 192.168.1.231:2181
14 | 192.168.1.231:9092
15 |
16 |
17 |
20 | false
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
--------------------------------------------------------------------------------
/log-kafka-storm/docker-compose.yml:
--------------------------------------------------------------------------------
1 | zookeeper:
2 | image: jplock/zookeeper
3 | ports:
4 | - "2181:2181"
5 | nimbus:
6 | image: wurstmeister/storm-nimbus:0.9.2
7 | ports:
8 | - "3773:3773"
9 | - "3772:3772"
10 | - "6627:6627"
11 | links:
12 | - zookeeper:zk
13 | - kafka:kafka
14 | supervisor:
15 | image: wurstmeister/storm-supervisor:0.9.2
16 | ports:
17 | - "8000:8000"
18 | links:
19 | - nimbus:nimbus
20 | - zookeeper:zk
21 | - kafka:kafka
22 | ui:
23 | image: wurstmeister/storm-ui:0.9.2
24 | ports:
25 | - "8080:8080"
26 | links:
27 | - nimbus:nimbus
28 | - zookeeper:zk
29 | kafka:
30 | image: wurstmeister/kafka:0.8.1
31 | ports:
32 | - "9092:9092"
33 | links:
34 | - zookeeper:zk
35 | environment:
36 | BROKER_ID: 1
37 | HOST_IP: 192.168.1.231
38 | PORT: 9092
39 | volumes:
40 | - /var/run/docker.sock:/var/run/docker.sock
41 | openfire:
42 | image: mdouglas/openfire
43 | ports:
44 | - "5222:5222"
45 | - "5223:5223"
46 | - "9091:9091"
47 | - "9090:9090"
48 |
--------------------------------------------------------------------------------
/log-kafka-storm/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 | 4.0.0
6 |
7 | com.log.kafka.exmaple
8 | log-kafka-storm
9 | 1.0-SNAPSHOT
10 | jar
11 | log-kafka-storm
12 |
13 |
14 | UTF-8
15 |
16 |
17 |
18 | clojars
19 | https://clojars.org/repo/
20 |
21 | true
22 |
23 |
24 | true
25 |
26 |
27 |
28 |
29 |
30 |
31 | org.apache.storm
32 | storm-core
33 | 0.9.2-incubating
34 | provided
35 |
36 |
37 | org.apache.storm
38 | storm-kafka
39 | 0.9.2-incubating
40 |
41 |
42 | org.apache.kafka
43 | kafka_2.9.2
44 | 0.8.1.1
45 |
46 |
47 | org.apache.zookeeper
48 | zookeeper
49 |
50 |
51 | log4j
52 | log4j
53 |
54 |
55 |
56 |
57 | com.hazelcast
58 | hazelcast
59 | 3.0
60 |
61 |
62 | commons-collections
63 | commons-collections
64 | 3.2.1
65 |
66 |
67 | jivesoftware
68 | smack
69 | 3.1.0
70 |
71 |
72 | com.google.guava
73 | guava
74 | 13.0.1
75 |
76 |
77 |
78 |
79 |
80 | maven-assembly-plugin
81 |
82 |
83 | jar-with-dependencies
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 | make-assembly
94 | package
95 |
96 | single
97 |
98 |
99 |
100 |
101 |
102 | org.apache.maven.plugins
103 | maven-compiler-plugin
104 | 2.5
105 |
106 | 1.6
107 | 1.6
108 | UTF-8
109 |
110 |
111 |
112 |
113 |
--------------------------------------------------------------------------------
/log-kafka-storm/src/main/java/com/log/kafka/other/TestXMPP.java:
--------------------------------------------------------------------------------
1 | package com.log.kafka.other;
2 |
3 | import org.jivesoftware.smack.ConnectionConfiguration;
4 | import org.jivesoftware.smack.XMPPConnection;
5 | import org.jivesoftware.smack.XMPPException;
6 | import org.jivesoftware.smack.packet.Message;
7 |
8 | public class TestXMPP {
9 |
10 | public static void main(String args[]) throws Exception {
11 |
12 | ConnectionConfiguration config = new ConnectionConfiguration("192.168.1.231");
13 | XMPPConnection xmppConnection = new XMPPConnection(config);
14 | try {
15 | xmppConnection.connect();
16 | xmppConnection.login("storm", "storm");
17 | Message msg = new Message("alarm@sectong.com", Message.Type.normal);
18 | msg.setBody("Test Message");
19 | xmppConnection.sendPacket(msg);
20 | xmppConnection.disconnect();
21 | } catch (XMPPException e) {
22 | e.printStackTrace();
23 | }
24 | }
25 | }
26 |
27 |
--------------------------------------------------------------------------------
/log-kafka-storm/src/main/java/com/log/kafka/storm/common/EWMA.java:
--------------------------------------------------------------------------------
1 | package com.log.kafka.storm.common;
2 |
3 | import java.io.Serializable;
4 |
5 | /**
6 | * Created by ahenrick on 7/30/14.
7 | */
8 | public class EWMA implements Serializable {
9 |
10 | public static enum Time {
11 | MILLISECONDS(1), SECONDS(1000), MINUTES(SECONDS.getTime() * 60), HOURS(MINUTES.getTime() * 60), DAYS(HOURS
12 | .getTime() * 24), WEEKS(DAYS.getTime() * 7);
13 |
14 | private long millis;
15 |
16 | private Time(long millis) {
17 | this.millis = millis;
18 | }
19 |
20 | public long getTime() {
21 | return this.millis;
22 | }
23 | }
24 |
25 | // Unix load average-style alpha constants
26 | public static final double ONE_MINUTE_ALPHA = 1 - Math.exp(-5d / 60d / 1d);
27 | public static final double FIVE_MINUTE_ALPHA = 1 - Math.exp(-5d / 60d / 5d);
28 | public static final double FIFTEEN_MINUTE_ALPHA = 1 - Math.exp(-5d / 60d / 15d);
29 |
30 | private long window;
31 | private long alphaWindow;
32 | private long last;
33 | private double average;
34 | private double alpha = -1D;
35 | private boolean sliding = false;
36 |
37 | public EWMA() {
38 | }
39 |
40 | public EWMA sliding(double count, Time time) {
41 | return this.sliding((long) (time.getTime() * count));
42 | }
43 |
44 | public EWMA sliding(long window) {
45 | this.sliding = true;
46 | this.window = window;
47 | return this;
48 | }
49 |
50 | public EWMA withAlpha(double alpha) {
51 | if (!(alpha > 0.0D && alpha <= 1.0D)) {
52 | throw new IllegalArgumentException("Alpha must be between 0.0 and 1.0");
53 | }
54 | this.alpha = alpha;
55 | return this;
56 | }
57 |
58 | public EWMA withAlphaWindow(long alphaWindow) {
59 | this.alpha = -1;
60 | this.alphaWindow = alphaWindow;
61 | return this;
62 | }
63 |
64 | public EWMA withAlphaWindow(double count, Time time) {
65 | return this.withAlphaWindow((long) (time.getTime() * count));
66 | }
67 |
68 | public void mark() {
69 | mark(System.currentTimeMillis());
70 | }
71 |
72 | public synchronized void mark(long time) {
73 | if (this.sliding) {
74 | if (time - this.last > this.window) {
75 | // reset the sliding window
76 | this.last = 0;
77 | }
78 | }
79 | if (this.last == 0) {
80 | this.average = 0;
81 | this.last = time;
82 | }
83 | long diff = time - this.last;
84 | double alpha = this.alpha != -1.0 ? this.alpha : Math.exp(-1.0 * ((double) diff / this.alphaWindow));
85 | this.average = (1.0 - alpha) * diff + alpha * this.average;
86 | this.last = time;
87 | }
88 |
89 | public double getAverage() {
90 | return this.average;
91 | }
92 |
93 | public double getAverageIn(Time time) {
94 | return this.average == 0.0 ? this.average : this.average / time.getTime();
95 | }
96 |
97 | public double getAverageRatePer(Time time) {
98 | return this.average == 0.0 ? this.average : time.getTime() / this.average;
99 | }
100 |
101 | }
--------------------------------------------------------------------------------
/log-kafka-storm/src/main/java/com/log/kafka/storm/common/NotifyMessageMapper.java:
--------------------------------------------------------------------------------
1 | package com.log.kafka.storm.common;
2 |
3 | import com.log.kafka.storm.function.MessageMapper;
4 | import storm.trident.tuple.TridentTuple;
5 |
6 | import java.util.Date;
7 |
8 | /**
9 | * Created by ahenrick on 7/30/14.
10 | */
11 | public class NotifyMessageMapper implements MessageMapper {
12 |
13 | public String toMessageBody(TridentTuple tuple) {
14 | StringBuilder sb = new StringBuilder();
15 | sb.append("On " + new Date(tuple.getLongByField("timestamp")) + " ");
16 | sb.append("the application \"" + tuple.getStringByField("logger") + "\" ");
17 | sb.append("changed alert state based on a threshold of " + tuple.getDoubleByField("threshold") + ".\n");
18 | sb.append("The last value was " + tuple.getDoubleByField("average") + "\n");
19 | sb.append("The last message was \"" + tuple.getStringByField("message") + "\"");
20 | return sb.toString();
21 | }
22 | }
--------------------------------------------------------------------------------
/log-kafka-storm/src/main/java/com/log/kafka/storm/filter/BooleanFilter.java:
--------------------------------------------------------------------------------
1 | package com.log.kafka.storm.filter;
2 |
3 | import storm.trident.operation.BaseFilter;
4 | import storm.trident.tuple.TridentTuple;
5 |
6 | /**
7 | * Created by ahenrick on 7/30/14.
8 | */
9 | public class BooleanFilter extends BaseFilter {
10 | public boolean isKeep(TridentTuple tuple) {
11 | return tuple.getBoolean(0);
12 | }
13 | }
--------------------------------------------------------------------------------
/log-kafka-storm/src/main/java/com/log/kafka/storm/function/JsonProjectFunction.java:
--------------------------------------------------------------------------------
1 | package com.log.kafka.storm.function;
2 |
3 | import backtype.storm.tuple.Fields;
4 | import backtype.storm.tuple.Values;
5 | import org.json.simple.JSONValue;
6 | import storm.trident.operation.BaseFunction;
7 | import storm.trident.operation.TridentCollector;
8 | import storm.trident.tuple.TridentTuple;
9 |
10 | import java.util.Map;
11 |
12 | /**
13 | * Created by ahenrick on 7/30/14.
14 | */
15 | public class JsonProjectFunction extends BaseFunction {
16 |
17 | private Fields fields;
18 |
19 | public JsonProjectFunction(Fields fields) {
20 | this.fields = fields;
21 | }
22 |
23 | public void execute(TridentTuple tuple, TridentCollector collector) {
24 | String json = tuple.getString(0);
25 | Map map = (Map) JSONValue.parse(json);
26 | Values values = new Values();
27 | for (int i = 0; i < this.fields.size(); i++) {
28 | values.add(map.get(this.fields.get(i)));
29 | }
30 | collector.emit(values);
31 | }
32 |
33 | }
--------------------------------------------------------------------------------
/log-kafka-storm/src/main/java/com/log/kafka/storm/function/MessageMapper.java:
--------------------------------------------------------------------------------
1 | package com.log.kafka.storm.function;
2 |
3 | import storm.trident.tuple.TridentTuple;
4 |
5 | import java.io.Serializable;
6 |
7 | /**
8 | * Created by ahenrick on 7/30/14.
9 | */
10 | public interface MessageMapper extends Serializable {
11 | public String toMessageBody(TridentTuple tuple);
12 | }
--------------------------------------------------------------------------------
/log-kafka-storm/src/main/java/com/log/kafka/storm/function/MovingAverageFunction.java:
--------------------------------------------------------------------------------
1 | package com.log.kafka.storm.function;
2 |
3 | import backtype.storm.tuple.Values;
4 | import com.log.kafka.storm.common.EWMA;
5 | import org.slf4j.Logger;
6 | import org.slf4j.LoggerFactory;
7 | import storm.trident.operation.BaseFunction;
8 | import storm.trident.operation.TridentCollector;
9 | import storm.trident.tuple.TridentTuple;
10 | import static com.log.kafka.storm.common.EWMA.Time;
11 |
12 | /**
13 | * Created by ahenrick on 7/30/14.
14 | */
15 | public class MovingAverageFunction extends BaseFunction {
16 | private static final Logger LOG = LoggerFactory.getLogger(BaseFunction.class);
17 |
18 | private EWMA ewma;
19 | private Time emitRatePer;
20 |
21 | public MovingAverageFunction(EWMA ewma, Time emitRatePer) {
22 | this.ewma = ewma;
23 | this.emitRatePer = emitRatePer;
24 | }
25 |
26 | public void execute(TridentTuple tuple, TridentCollector collector) {
27 | this.ewma.mark(tuple.getLong(0));
28 | LOG.debug("Rate: {}", this.ewma.getAverageRatePer(this.emitRatePer));
29 | collector.emit(new Values(this.ewma.getAverageRatePer(this.emitRatePer)));
30 | }
31 | }
--------------------------------------------------------------------------------
/log-kafka-storm/src/main/java/com/log/kafka/storm/function/ThresholdFilterFunction.java:
--------------------------------------------------------------------------------
1 | package com.log.kafka.storm.function;
2 |
3 | import backtype.storm.tuple.Values;
4 | import org.slf4j.Logger;
5 | import org.slf4j.LoggerFactory;
6 | import storm.trident.operation.BaseFunction;
7 | import storm.trident.operation.TridentCollector;
8 | import storm.trident.tuple.TridentTuple;
9 |
10 | /**
11 | * Created by ahenrick on 7/30/14.
12 | */
13 | public class ThresholdFilterFunction extends BaseFunction {
14 | private static final Logger LOG = LoggerFactory.getLogger(ThresholdFilterFunction.class);
15 |
16 | private static enum State {
17 | BELOW, ABOVE;
18 | }
19 |
20 | private State last = State.BELOW;
21 | private double threshold;
22 |
23 | public ThresholdFilterFunction(double threshold) {
24 | this.threshold = threshold;
25 | }
26 |
27 | public void execute(TridentTuple tuple, TridentCollector collector) {
28 | double val = tuple.getDouble(0);
29 | State newState = val < this.threshold ? State.BELOW : State.ABOVE;
30 | boolean stateChange = this.last != newState;
31 | collector.emit(new Values(stateChange, threshold));
32 | this.last = newState;
33 | LOG.debug("State change? --> {}", stateChange);
34 | }
35 | }
--------------------------------------------------------------------------------
/log-kafka-storm/src/main/java/com/log/kafka/storm/function/XMPPFunction.java:
--------------------------------------------------------------------------------
1 | package com.log.kafka.storm.function;
2 |
3 | import org.jivesoftware.smack.ConnectionConfiguration;
4 | import org.jivesoftware.smack.XMPPConnection;
5 | import org.jivesoftware.smack.XMPPException;
6 | import org.jivesoftware.smack.packet.Message;
7 | import org.slf4j.Logger;
8 | import org.slf4j.LoggerFactory;
9 | import storm.trident.operation.BaseFunction;
10 | import storm.trident.operation.TridentCollector;
11 | import storm.trident.operation.TridentOperationContext;
12 | import storm.trident.tuple.TridentTuple;
13 |
14 | import java.util.Map;
15 |
16 | /**
17 | * Created by ahenrick on 7/30/14.
18 | */
19 | public class XMPPFunction extends BaseFunction {
20 | private static final Logger LOG = LoggerFactory.getLogger(XMPPFunction.class);
21 |
22 | public static final String XMPP_TO = "storm.xmpp.to";
23 | public static final String XMPP_USER = "storm.xmpp.user";
24 | public static final String XMPP_PASSWORD = "storm.xmpp.password";
25 | public static final String XMPP_SERVER = "storm.xmpp.server";
26 |
27 | private XMPPConnection xmppConnection;
28 | private String to;
29 | private MessageMapper mapper;
30 |
31 | public XMPPFunction(MessageMapper mapper) {
32 | this.mapper = mapper;
33 | }
34 |
35 | @Override
36 | public void prepare(Map conf, TridentOperationContext context) {
37 | LOG.debug("Prepare: {}", conf);
38 | super.prepare(conf, context);
39 | this.to = (String) conf.get(XMPP_TO);
40 | ConnectionConfiguration config = new ConnectionConfiguration((String) conf.get(XMPP_SERVER));
41 | this.xmppConnection = new XMPPConnection(config);
42 | try {
43 | this.xmppConnection.connect();
44 | this.xmppConnection.login((String) conf.get(XMPP_USER), (String) conf.get(XMPP_PASSWORD));
45 | } catch (XMPPException e) {
46 | LOG.warn("Error initializing XMPP Channel", e);
47 | }
48 | }
49 |
50 | public void execute(TridentTuple tuple, TridentCollector collector) {
51 | Message msg = new Message(this.to, Message.Type.normal);
52 | msg.setBody(this.mapper.toMessageBody(tuple));
53 | this.xmppConnection.sendPacket(msg);
54 |
55 | }
56 |
57 | }
58 |
--------------------------------------------------------------------------------
/log-kafka-storm/src/main/java/com/log/kafka/storm/topology/LogAnalysisTopology.java:
--------------------------------------------------------------------------------
1 | package com.log.kafka.storm.topology;
2 |
3 | import backtype.storm.Config;
4 | import backtype.storm.LocalCluster;
5 | import backtype.storm.StormSubmitter;
6 | import backtype.storm.generated.StormTopology;
7 | import backtype.storm.spout.SchemeAsMultiScheme;
8 | import backtype.storm.tuple.Fields;
9 | import com.log.kafka.storm.common.EWMA;
10 | import com.log.kafka.storm.common.NotifyMessageMapper;
11 | import com.log.kafka.storm.filter.BooleanFilter;
12 | import com.log.kafka.storm.function.JsonProjectFunction;
13 | import com.log.kafka.storm.function.MovingAverageFunction;
14 | import com.log.kafka.storm.function.ThresholdFilterFunction;
15 | import com.log.kafka.storm.function.XMPPFunction;
16 | import storm.kafka.BrokerHosts;
17 | import storm.kafka.StringScheme;
18 | import storm.kafka.ZkHosts;
19 | import storm.kafka.trident.OpaqueTridentKafkaSpout;
20 | import storm.kafka.trident.TridentKafkaConfig;
21 | import storm.trident.Stream;
22 | import storm.trident.TridentTopology;
23 |
24 | import java.util.Arrays;
25 |
26 | public class LogAnalysisTopology {
27 |
28 | private final BrokerHosts brokerHosts;
29 |
30 | public LogAnalysisTopology(String kafkaZookeeper) {
31 | brokerHosts = new ZkHosts(kafkaZookeeper);
32 | }
33 |
34 | public StormTopology buildTopology() {
35 |
36 | TridentTopology topology = new TridentTopology();
37 |
38 | TridentKafkaConfig kafkaConfig = new TridentKafkaConfig(brokerHosts, "log-analysis","storm");
39 | kafkaConfig.scheme = new SchemeAsMultiScheme(new StringScheme());
40 | kafkaConfig.forceFromStart = true;
41 |
42 | OpaqueTridentKafkaSpout kafkaSpout = new OpaqueTridentKafkaSpout(kafkaConfig);
43 |
44 | Stream spoutStream = topology.newStream("kafka-stream", kafkaSpout);
45 |
46 | Fields jsonFields = new Fields("level", "timestamp", "message", "logger");
47 | Stream parsedStream = spoutStream.each(new Fields("str"), new JsonProjectFunction(jsonFields), jsonFields);
48 |
49 | // drop the unparsed JSON to reduce tuple size
50 | parsedStream = parsedStream.project(jsonFields);
51 |
52 | EWMA ewma = new EWMA().sliding(1.0, EWMA.Time.MINUTES).withAlpha(EWMA.ONE_MINUTE_ALPHA);
53 | Stream averageStream = parsedStream.each(new Fields("timestamp"),
54 | new MovingAverageFunction(ewma, EWMA.Time.MINUTES), new Fields("average"));
55 |
56 | ThresholdFilterFunction tff = new ThresholdFilterFunction(50D);
57 | Stream thresholdStream = averageStream.each(new Fields("average"), tff, new Fields("change", "threshold"));
58 |
59 | Stream filteredStream = thresholdStream.each(new Fields("change"), new BooleanFilter());
60 |
61 | filteredStream.each(filteredStream.getOutputFields(), new XMPPFunction(new NotifyMessageMapper()), new Fields());
62 |
63 | return topology.build();
64 | }
65 |
66 | public static void main(String[] args) throws Exception {
67 |
68 | String dockerIp = args[0];
69 |
70 | Config conf = new Config();
71 | conf.put(XMPPFunction.XMPP_USER, "storm");
72 | conf.put(XMPPFunction.XMPP_PASSWORD, "storm");
73 | conf.put(XMPPFunction.XMPP_SERVER, dockerIp);
74 | conf.put(XMPPFunction.XMPP_TO, "alarm@sectong.com");
75 | conf.setMaxSpoutPending(5);
76 | conf.put(Config.TOPOLOGY_TRIDENT_BATCH_EMIT_INTERVAL_MILLIS, 2000);
77 |
78 | if (args.length > 1) {
79 | LogAnalysisTopology logAnalysisTopology = new LogAnalysisTopology(args[0]+":2181");
80 | conf.setNumWorkers(3);
81 | conf.put(Config.NIMBUS_HOST, dockerIp);
82 | conf.put(Config.NIMBUS_THRIFT_PORT, 6627);
83 | conf.put(Config.STORM_ZOOKEEPER_PORT, 2181);
84 | conf.put(Config.STORM_ZOOKEEPER_SERVERS, Arrays.asList(dockerIp));
85 | StormSubmitter.submitTopology(args[1], conf, logAnalysisTopology.buildTopology());
86 | } else {
87 | LogAnalysisTopology logAnalysisTopology = new LogAnalysisTopology(dockerIp);
88 | LocalCluster cluster = new LocalCluster();
89 | cluster.submitTopology("log-analysis", conf, logAnalysisTopology.buildTopology());
90 | }
91 | }
92 | }
93 |
--------------------------------------------------------------------------------
/log-kafka-storm/start-kafka-shell.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | docker run --link logkafkastorm_zookeeper_1:zk -i -t wurstmeister/kafka:0.8.1 /bin/bash
3 |
--------------------------------------------------------------------------------