├── LICENSE ├── README.md ├── pom.xml └── src └── main ├── filters ├── log-dev.properties ├── log-product.properties └── log-test.properties ├── java ├── com │ └── xuele │ │ └── log │ │ └── send │ │ └── kafka │ │ ├── KafkaAppender.java │ │ └── formatter │ │ ├── Formatter.java │ │ ├── JsonFormatter.java │ │ └── MessageFormatter.java └── test │ └── SendTest.java └── resources ├── conf.properties └── logback.xml /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2018 qindongliang 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 实时收集应用程序日志,统一发送到kafka中 2 | 3 | 查看kfaka中数据的命令: 4 | 5 | ```` 6 | kafka-console-consumer --zookeeper 192.168.10.5:2181 --topic temp_log_kafka 7 | ```` 8 | 9 | (1)json信息发送到kafka中的数据 10 | ````json 11 | {"method":"main","level":"INFO","line":"16","message":"es数据:1=>100","class":"test.SendTest","timestamp":"2017-02-07 16:52:41"} 12 | {"method":"main","level":"INFO","line":"16","message":"es数据:2=>100","class":"test.SendTest","timestamp":"2017-02-07 16:52:41"} 13 | {"method":"main","level":"INFO","line":"16","message":"es数据:3=>100","class":"test.SendTest","timestamp":"2017-02-07 16:52:41"} 14 | {"method":"main","level":"INFO","line":"16","message":"es数据:4=>100","class":"test.SendTest","timestamp":"2017-02-07 16:52:42"} 15 | {"method":"main","level":"INFO","line":"16","message":"es数据:5=>100","class":"test.SendTest","timestamp":"2017-02-07 16:52:42"} 16 | {"method":"main","level":"INFO","line":"16","message":"es数据:6=>100","class":"test.SendTest","timestamp":"2017-02-07 16:52:42"} 17 | {"method":"main","level":"INFO","line":"16","message":"es数据:7=>100","class":"test.SendTest","timestamp":"2017-02-07 16:52:42"} 18 | {"method":"main","level":"INFO","line":"16","message":"es数据:8=>100","class":"test.SendTest","timestamp":"2017-02-07 16:52:42"} 19 | {"method":"main","level":"INFO","line":"16","message":"es数据:9=>100","class":"test.SendTest","timestamp":"2017-02-07 16:52:42"} 20 | {"method":"main","level":"INFO","line":"16","message":"es数据:10=>100","class":"test.SendTest","timestamp":"2017-02-07 16:52:42"} 21 | ```` 22 | (2)普通文本的信息发送到kafka中的数据 23 | ````json 24 | hadoop数据:350 25 | hadoop数据:351 26 | hadoop数据:352 27 | hadoop数据:353 28 | hadoop数据:354 29 | hadoop数据:355 30 | hadoop数据:356 31 | hadoop数据:357 32 | hadoop数据:358 33 | hadoop数据:359 34 | hadoop数据:360 35 | ```` 36 | 37 | ## 博客相关 38 | 39 | 40 | (5)[iteye(2018.05月之前所有的文章,之后弃用)]() 41 | 42 | 43 | 44 | 45 | 46 | 47 | ## 我的公众号(woshigcs) 48 | 49 | 有问题可关注我的公众号留言咨询 50 | 51 | ![image](https://github.com/qindongliang/answer_sheet_scan/blob/master/imgs/gcs.jpg) 52 | -------------------------------------------------------------------------------- /pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 4.0.0 6 | 7 | com.xuele.bigdata 8 | log_to_kafka 9 | 1.0.0-SNAPSHOT 10 | 11 | 12 | 2.11.8 13 | 2.0.2 14 | 3.3 15 | 2.0.2 16 | 2.0.2 17 | 0.9.0.0 18 | 1.2.0 19 | 1.2.1 20 | 12.0 21 | 2.9.4 22 | 1.2.31 23 | 3.1.0 24 | 1.2.13 25 | 1.1.1 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | com.googlecode.json-simple 36 | json-simple 37 | ${json-simple.version} 38 | 39 | 40 | 41 | 42 | ch.qos.logback 43 | logback-classic 44 | ${logback.version} 45 | 46 | 47 | 48 | 49 | 50 | joda-time 51 | joda-time 52 | ${joad-time.version} 53 | 54 | 55 | 56 | 57 | com.alibaba 58 | fastjson 59 | ${fast.json} 60 | 61 | 62 | 63 | com.google.guava 64 | guava 65 | ${guava.version} 66 | 67 | 68 | org.apache.kafka 69 | kafka-clients 70 | ${kafka.version} 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | net.alchim31.maven 82 | scala-maven-plugin 83 | 3.2.1 84 | 85 | 86 | org.apache.maven.plugins 87 | maven-compiler-plugin 88 | 2.0.2 89 | 90 | 91 | 92 | 93 | 94 | net.alchim31.maven 95 | scala-maven-plugin 96 | 97 | 98 | scala-compile-first 99 | process-resources 100 | 101 | add-source 102 | compile 103 | 104 | 105 | 106 | scala-test-compile 107 | process-test-resources 108 | 109 | testCompile 110 | 111 | 112 | 113 | 114 | 115 | org.apache.maven.plugins 116 | maven-compiler-plugin 117 | 118 | 119 | compile 120 | 121 | compile 122 | 123 | 124 | 125 | 126 | 127 | 128 | 1.8 129 | 130 | 1.8 131 | 132 | UTF-8 133 | 134 | 135 | 136 | 137 | 138 | 139 | src/main/filters/log-${build.profile.id}.properties 140 | 141 | 142 | 143 | 144 | 145 | 146 | src/main/resources 147 | 148 | **/* 149 | 150 | 151 | 152 | 153 | true 154 | 155 | 156 | 157 | 158 | 159 | 160 | dev 161 | 162 | true 163 | 164 | 165 | dev 166 | 167 | 168 | 169 | 170 | 171 | test 172 | 173 | test 174 | 175 | 176 | 177 | 178 | 179 | product 180 | 181 | product 182 | 183 | 184 | 185 | 186 | 187 | 188 | -------------------------------------------------------------------------------- /src/main/filters/log-dev.properties: -------------------------------------------------------------------------------- 1 | kafka.brokers=192.168.201.6:9092,192.168.201.7:9092,192.168.201.8:9092 2 | kafka.log.topic=temp_log_kafka 3 | kafka.log.write.timeout=3000 4 | kafka.syncSend=true -------------------------------------------------------------------------------- /src/main/filters/log-product.properties: -------------------------------------------------------------------------------- 1 | kafka.brokers=192.168.201.6:9092,192.168.201.7:9092,192.168.201.8:9092 2 | kafka.log.topic=temp_log_kafka 3 | kafka.log.write.timeout=3000 4 | kafka.syncSend=true -------------------------------------------------------------------------------- /src/main/filters/log-test.properties: -------------------------------------------------------------------------------- 1 | kafka.brokers=192.168.201.6:9092,192.168.201.7:9092,192.168.201.8:9092 2 | kafka.log.topic=temp_log_kafka 3 | kafka.log.write.timeout=3000 4 | kafka.syncSend=true -------------------------------------------------------------------------------- /src/main/java/com/xuele/log/send/kafka/KafkaAppender.java: -------------------------------------------------------------------------------- 1 | package com.xuele.log.send.kafka; 2 | 3 | import ch.qos.logback.classic.spi.ILoggingEvent; 4 | import ch.qos.logback.core.AppenderBase; 5 | import com.xuele.log.send.kafka.formatter.Formatter; 6 | import com.xuele.log.send.kafka.formatter.JsonFormatter; 7 | import org.apache.kafka.clients.producer.KafkaProducer; 8 | import org.apache.kafka.clients.producer.ProducerConfig; 9 | import org.apache.kafka.clients.producer.ProducerRecord; 10 | import org.slf4j.Logger; 11 | import org.slf4j.LoggerFactory; 12 | 13 | import java.util.Properties; 14 | import java.util.concurrent.Future; 15 | 16 | /** 17 | * Created by QinDongLiang on 2017/1/9. 18 | */ 19 | public class KafkaAppender extends AppenderBase { 20 | 21 | private static final Logger log = LoggerFactory.getLogger(KafkaAppender.class); 22 | private Formatter formatter = new JsonFormatter(); 23 | private boolean logToSystemOut = false; 24 | private String kafkaProducerProperties; 25 | private String topic; 26 | private String brokers; 27 | // private String zkHost; 28 | private KafkaProducer producer; 29 | private String timeout; 30 | 31 | private boolean syncSend=true; 32 | 33 | @Override 34 | public void start() { 35 | super.start(); 36 | log.info("Starting KafkaAppender..."); 37 | Properties props = new Properties(); 38 | try { 39 | props.put("bootstrap.servers", brokers); 40 | props.put("timeout.ms", timeout); 41 | props.put("request.timeout.ms", timeout); 42 | props.put("metadata.fetch.timeout.ms", timeout); 43 | props.put("network.request.timeout.ms", timeout); 44 | 45 | props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); 46 | props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); 47 | props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); 48 | props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); 49 | 50 | 51 | // props.put(ProducerConfig.ACKS_CONFIG, "-1"); 52 | // props.put(ProducerConfig.BATCH_SIZE_CONFIG,1); 53 | 54 | // props.put(ProducerConfig.BUFFER_MEMORY_CONFIG,0); 55 | // props.put(ProducerConfig.SEND_BUFFER_CONFIG, "false"); 56 | 57 | producer = new KafkaProducer(props); 58 | } catch (Exception e) { 59 | log.error("初始化KafkaAppender失败{}={}", e+"", e.getMessage()); 60 | } 61 | log.info("kafkaProducerProperties = {}", kafkaProducerProperties); 62 | log.info("Kafka Producer Properties = {}", props); 63 | 64 | 65 | } 66 | 67 | @Override 68 | public void stop() { 69 | super.stop(); 70 | log.info("Stopping KafkaAppender..."); 71 | producer.close(); 72 | } 73 | 74 | 75 | @Override 76 | protected void append(ILoggingEvent event) { 77 | String string = this.formatter.format(event); 78 | 79 | try { 80 | ProducerRecord producerRecord = new ProducerRecord(topic, string); 81 | final Future send = producer.send(producerRecord); 82 | if(syncSend){ 83 | send.get(); 84 | } 85 | } catch (Exception e) { 86 | System.out.println("KafkaAppender: Exception sending message: '" + e + " : " + e.getMessage() + "'."); 87 | e.printStackTrace(); 88 | } 89 | } 90 | 91 | public Formatter getFormatter() { 92 | return formatter; 93 | } 94 | 95 | public void setFormatter(Formatter formatter) { 96 | this.formatter = formatter; 97 | } 98 | 99 | public String getTopic() { 100 | return topic; 101 | } 102 | 103 | public boolean isSyncSend() { 104 | return syncSend; 105 | } 106 | 107 | public void setSyncSend(boolean syncSend) { 108 | this.syncSend = syncSend; 109 | } 110 | 111 | public void setTopic(String topic) { 112 | this.topic = topic; 113 | } 114 | 115 | public String getLogToSystemOut() { 116 | return logToSystemOut + ""; 117 | } 118 | 119 | public void setLogToSystemOut(String logToSystemOutString) { 120 | if ("true".equalsIgnoreCase(logToSystemOutString)) { 121 | this.logToSystemOut = true; 122 | } 123 | } 124 | 125 | 126 | public String getKafkaProducerProperties() { 127 | return kafkaProducerProperties; 128 | } 129 | 130 | public void setKafkaProducerProperties(String kafkaProducerProperties) { 131 | this.kafkaProducerProperties = kafkaProducerProperties; 132 | } 133 | 134 | public String getBrokers() { 135 | return brokers; 136 | } 137 | 138 | public void setBrokers(String brokers) { 139 | this.brokers = brokers; 140 | } 141 | 142 | public String getTimeout() { 143 | return timeout; 144 | } 145 | 146 | public void setTimeout(String timeout) { 147 | this.timeout = timeout; 148 | } 149 | } 150 | -------------------------------------------------------------------------------- /src/main/java/com/xuele/log/send/kafka/formatter/Formatter.java: -------------------------------------------------------------------------------- 1 | package com.xuele.log.send.kafka.formatter; 2 | 3 | import ch.qos.logback.classic.spi.ILoggingEvent; 4 | 5 | public interface Formatter { 6 | String format(ILoggingEvent event); 7 | } 8 | -------------------------------------------------------------------------------- /src/main/java/com/xuele/log/send/kafka/formatter/JsonFormatter.java: -------------------------------------------------------------------------------- 1 | package com.xuele.log.send.kafka.formatter; 2 | 3 | import ch.qos.logback.classic.spi.ILoggingEvent; 4 | import org.joda.time.DateTime; 5 | import org.json.simple.JSONObject; 6 | import org.json.simple.JSONValue; 7 | 8 | import java.io.IOException; 9 | import java.io.StringReader; 10 | import java.util.Enumeration; 11 | import java.util.HashMap; 12 | import java.util.Map; 13 | import java.util.Properties; 14 | 15 | public class JsonFormatter implements Formatter { 16 | 17 | private boolean expectJsonMessage = false; 18 | private boolean includeMethodAndLineNumber = false; 19 | private Map extraPropertiesMap = null; 20 | 21 | public String format(ILoggingEvent event) { 22 | JSONObject jsonObject = new JSONObject(); 23 | jsonObject.put("level", event.getLevel().levelStr); 24 | jsonObject.put("class", event.getLoggerName()); 25 | jsonObject.put("timestamp", new DateTime(event.getTimeStamp()).toString("yyyy-MM-dd HH:mm:ss")); 26 | jsonObject.put("message", event.getFormattedMessage()); 27 | 28 | if (includeMethodAndLineNumber) { 29 | StackTraceElement[] callerDataArray = event.getCallerData(); 30 | if (callerDataArray != null && callerDataArray.length > 0) { 31 | StackTraceElement stackTraceElement = callerDataArray[0]; 32 | jsonObject.put("method", stackTraceElement.getMethodName()); 33 | jsonObject.put("line", stackTraceElement.getLineNumber() + ""); 34 | } 35 | } 36 | if (this.extraPropertiesMap != null) { 37 | jsonObject.putAll(extraPropertiesMap); 38 | } 39 | return jsonObject.toJSONString(); 40 | } 41 | 42 | public boolean getExpectJsonMessage() { 43 | return expectJsonMessage; 44 | } 45 | 46 | public void setExpectJsonMessage(boolean expectJsonMessage) { 47 | this.expectJsonMessage = expectJsonMessage; 48 | } 49 | 50 | public boolean getIncludeMethodAndLineNumber() { 51 | return includeMethodAndLineNumber; 52 | } 53 | 54 | public void setIncludeMethodAndLineNumber(boolean includeMethodAndLineNumber) { 55 | this.includeMethodAndLineNumber = includeMethodAndLineNumber; 56 | } 57 | 58 | public void setExtraProperties(String thatExtraProperties) { 59 | final Properties properties = new Properties(); 60 | try { 61 | properties.load(new StringReader(thatExtraProperties)); 62 | Enumeration enumeration = properties.propertyNames(); 63 | extraPropertiesMap = new HashMap(); 64 | while(enumeration.hasMoreElements()){ 65 | String name = (String)enumeration.nextElement(); 66 | String value = properties.getProperty(name); 67 | extraPropertiesMap.put(name,value); 68 | } 69 | } catch (IOException e) { 70 | System.out.println("There was a problem reading the extra properties configuration: "+e.getMessage()); 71 | e.printStackTrace(); 72 | } 73 | } 74 | } -------------------------------------------------------------------------------- /src/main/java/com/xuele/log/send/kafka/formatter/MessageFormatter.java: -------------------------------------------------------------------------------- 1 | package com.xuele.log.send.kafka.formatter; 2 | 3 | import ch.qos.logback.classic.spi.ILoggingEvent; 4 | 5 | /** 6 | * 7 | * Formatter implementation that simply returns the logback message. 8 | * 9 | * @author tgoetz 10 | * 11 | */ 12 | public class MessageFormatter implements Formatter { 13 | 14 | public String format(ILoggingEvent event) { 15 | return event.getFormattedMessage(); 16 | } 17 | 18 | } 19 | -------------------------------------------------------------------------------- /src/main/java/test/SendTest.java: -------------------------------------------------------------------------------- 1 | package test; 2 | 3 | import org.slf4j.Logger; 4 | import org.slf4j.LoggerFactory; 5 | 6 | /** 7 | * Created by QinDongLiang on 2017/1/9. 8 | */ 9 | public class SendTest { 10 | 11 | static Logger logger= LoggerFactory.getLogger(SendTest.class); 12 | 13 | public static void main(String[] args) { 14 | 15 | for(int i=500;i<=510;i++) { 16 | logger.info("hadoop数据:{}",i); 17 | } 18 | 19 | 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /src/main/resources/conf.properties: -------------------------------------------------------------------------------- 1 | kafka.brokers=${kafka.brokers} 2 | kafka.log.topic=${kafka.log.topic} 3 | kafka.log.write.timeout=${kafka.log.write.timeout} 4 | kafka.syncSend=${kafka.syncSend} -------------------------------------------------------------------------------- /src/main/resources/logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | [%-5level] [%date{yyyy-MM-dd HH:mm:ss}] %logger{96}:%line - %msg%n 11 | 12 | UTF-8 13 | 14 | 15 | 16 | 17 | logs/kp_diag_streaming-%d{yyyy-MM-dd}.%i.log 18 | 20 | 21 | 64 MB 22 | 23 | 24 | 25 | 26 | 27 | DEBUG 28 | 29 | 30 | true 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | [ %-5level] [%date{yyyy-MM-dd HH:mm:ss}] %logger{96} [%line] - %msg%n 40 | 41 | UTF-8 42 | 43 | 44 | 45 | INFO 46 | 47 | 48 | 49 | 50 | 51 | 53 | ${kafka.log.topic} 54 | ${kafka.brokers} 55 | ${kafka.log.write.timeout} 56 | ${kafka.syncSend} 57 | 58 | 59 | 60 | 61 | 62 | 63 | true 64 | 65 | 66 | 67 | 73 | true 74 | 75 | true 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | --------------------------------------------------------------------------------