├── .gitignore
├── LICENSE
├── README.md
├── pom.xml
├── spring-boot-starter-kafka-consumer
├── pom.xml
└── src
│ └── main
│ ├── java
│ └── io
│ │ └── goudai
│ │ └── starter
│ │ └── kafka
│ │ └── consumer
│ │ ├── KafkaBeanPostProcessor.java
│ │ ├── SkipMessageException.java
│ │ ├── ThreadKit.java
│ │ ├── annotation
│ │ ├── EnableKafka.java
│ │ └── KafkaListener.java
│ │ └── autoconfigure
│ │ └── KafkaConsumerAutoConfiguration.java
│ └── resources
│ └── META-INF
│ └── spring.factories
├── spring-boot-starter-kafka-core
├── pom.xml
└── src
│ └── main
│ └── java
│ └── io
│ └── goudai
│ └── starter
│ └── kafka
│ └── core
│ ├── JsonUtils.java
│ ├── NamedThreadFactory.java
│ └── StringUtils.java
├── spring-boot-starter-kafka-producer
├── pom.xml
└── src
│ └── main
│ ├── java
│ └── io
│ │ └── goudai
│ │ └── starter
│ │ └── kafka
│ │ └── producer
│ │ └── autoconfigure
│ │ └── KafkaProducerAutoConfiguration.java
│ └── resources
│ └── META-INF
│ └── spring.factories
└── spring-boot-starter-kafka-transaction-producer
├── pom.xml
└── src
└── main
├── java
└── io
│ └── github
│ └── goudai
│ └── kafka
│ └── transaction
│ └── producer
│ ├── EventSenderRunner.java
│ ├── IdGenerator.java
│ ├── KafkaEvent.java
│ ├── KafkaEventDatasource.java
│ ├── KafkaTransactionProducerAutoConfiguration.java
│ ├── TransactionProducer.java
│ └── TransactionProducerImpl.java
└── resources
└── META-INF
└── spring.factories
/.gitignore:
--------------------------------------------------------------------------------
1 | *.iml
2 | *.class
3 | target/
4 | .project
5 | .settings/
6 | .classpath
7 | .springBeans
8 | .idea/
9 | *Test.java
10 | /jvm.log
11 | *dependency-reduced-pom.xml
12 | *.log.*
13 | *.log
14 | 1.jpg
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Copyright (c) 2016 rushmore
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # dependency
2 | * kafka client 2.2.0
3 | * spring boot
4 | * goudai runner
5 | # Usage
6 |
7 | ## Update
8 |
9 | * V1.0.1
10 | 1.支持故障重启,默认启用故障重启,重启间隔为20s秒默认
11 | 2.支持idea自动补全
12 | * V1.0.2
13 | 修改groupId 发布到中央仓库
14 | * v2.2.1
15 | 支持最新版kafka-clients(2.2.0)
16 |
17 | ## consumer
18 |
19 | * add dependency to maven
20 |
21 | ```xml
22 |
23 | io.github.goudai
24 | spring-boot-starter-kafka-consumer
25 | 2.2.1
26 |
27 | ```
28 |
29 | * using on spring boot
30 |
31 | ```yaml
32 | # application.yml
33 | goudai:
34 | kafka:
35 | consumer:
36 | bootstrap-servers: ${KAFKA_SERVERS:localhost:9092}
37 | auto-restart:
38 | enabled: false # 默认为true 设置为false 表示关闭故障重启
39 | interval: 20 # 默认间隔20s
40 | ```
41 | ```java
42 | /**
43 | * 括号中指定group
44 | */
45 | @EnableKafka("user-consumer")
46 | public class UserConsumer {
47 |
48 | @KafkaListener(topic = "xxx")
49 | public void onUserRegisterCouponGranted(ConsumerRecord consumerRecord) {
50 | System.out.println(JsonUtils.toJson(consumerRecord));
51 | }
52 | }
53 |
54 | ```
55 |
56 |
57 | ## producer
58 |
59 | * add dependency to maven
60 |
61 | ```xml
62 |
63 | io.github.goudai
64 | spring-boot-starter-kafka-producer
65 | 2.2.1
66 |
67 | ```
68 |
69 | * using on spring boot
70 |
71 | ```yaml
72 | # application.yml
73 | goudai:
74 | kafka:
75 | producer:
76 | bootstrap-servers: ${KAFKA_SERVERS:localhost:9092}
77 |
78 | ```
79 | ```java
80 |
81 | @Component
82 | public class UserProducer {
83 |
84 | @Autowired
85 | Producer producer;
86 |
87 | public void sendMsg() {
88 |
89 | producer.send(new ProducerRecord("topic","kafkaContext json"));
90 |
91 |
92 | }
93 | }
94 |
95 | ```
96 |
--------------------------------------------------------------------------------
/pom.xml:
--------------------------------------------------------------------------------
1 |
2 | 4.0.0
3 |
4 |
5 | org.springframework.boot
6 | spring-boot-starter-parent
7 | 2.1.4.RELEASE
8 |
9 |
10 |
11 | io.github.goudai
12 | spring-boot-starter-kafka
13 | pom
14 | 2.2.3-SNAPSHOT
15 |
16 |
17 |
18 |
19 | qingmu
20 | 247687009@qq.com
21 |
22 |
23 |
24 |
25 |
26 | MIT License
27 | http://www.opensource.org/licenses/mit-license.php
28 | repo
29 |
30 |
31 |
32 |
33 | spring-boot-starter-kafka-core
34 | spring-boot-starter-kafka-consumer
35 | spring-boot-starter-kafka-producer
36 | spring-boot-starter-kafka-transaction-producer
37 |
38 |
39 |
40 | UTF-8
41 | 1.8
42 |
43 |
44 |
45 |
46 | scm:git:https://github.com/goudai/spring-boot-starter-kafka.git
47 | scm:git:https://github.com/goudai/spring-boot-starter-kafka.git
48 | scm:git:https://github.com/goudai/spring-boot-starter-kafka.git
49 | HEAD
50 |
51 |
52 |
53 |
54 |
55 | org.apache.kafka
56 | kafka-clients
57 | 2.2.0
58 |
59 |
60 | org.projectlombok
61 | lombok
62 | 1.18.6
63 | provided
64 |
65 |
66 | com.fasterxml.jackson.core
67 | jackson-databind
68 | 2.10.0.pr1
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 | org.apache.maven.plugins
77 | maven-source-plugin
78 | 3.0.1
79 |
80 |
81 | attach-sources
82 | verify
83 |
84 | jar-no-fork
85 |
86 |
87 |
88 |
89 |
90 | org.apache.maven.plugins
91 | maven-jar-plugin
92 | 3.1.0
93 |
94 |
95 |
96 | org.apache.maven.plugins
97 | maven-release-plugin
98 |
99 |
100 | org.apache.maven.scm
101 | maven-scm-provider-gitexe
102 | 1.9
103 |
104 |
105 |
106 |
107 | org.apache.maven.plugins
108 | maven-scm-plugin
109 | 1.9.2
110 |
111 |
112 | jgit
113 |
114 |
115 |
116 |
117 | org.apache.maven.scm
118 | maven-scm-provider-jgit
119 | 1.9.2
120 |
121 |
122 |
123 |
124 | org.apache.maven.plugins
125 | maven-source-plugin
126 | 3.0.1
127 |
128 |
129 | attach-sources
130 | verify
131 |
132 | jar-no-fork
133 |
134 |
135 |
136 |
137 |
138 | org.apache.maven.plugins
139 | maven-compiler-plugin
140 | 3.8.0
141 |
142 | UTF-8
143 | 1.8
144 | 1.8
145 |
146 | -parameters
147 |
148 |
149 |
150 |
151 |
152 |
153 |
157 |
158 |
159 | release
160 |
161 |
162 |
163 |
164 | org.apache.maven.plugins
165 | maven-javadoc-plugin
166 | 2.9.1
167 |
168 |
169 | package
170 |
171 | jar
172 |
173 |
174 | -Xdoclint:none
175 |
176 |
177 |
178 |
179 |
180 |
181 | org.apache.maven.plugins
182 | maven-gpg-plugin
183 | 1.6
184 |
185 |
186 | verify
187 |
188 | sign
189 |
190 |
191 |
192 |
193 |
194 |
195 |
196 |
197 | oss
198 | https://oss.sonatype.org/content/repositories/snapshots/
199 |
200 |
201 | oss
202 | https://oss.sonatype.org/service/local/staging/deploy/maven2/
203 |
204 |
205 |
206 |
207 |
208 |
209 |
--------------------------------------------------------------------------------
/spring-boot-starter-kafka-consumer/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | io.github.goudai
4 | spring-boot-starter-kafka
5 | 2.2.3-SNAPSHOT
6 |
7 | 4.0.0
8 |
9 | spring-boot-starter-kafka-consumer
10 | jar
11 |
12 |
13 |
14 | UTF-8
15 |
16 |
17 |
18 |
19 | org.apache.kafka
20 | kafka-clients
21 |
22 |
23 | org.springframework.boot
24 | spring-boot
25 |
26 |
27 | org.projectlombok
28 | lombok
29 | provided
30 |
31 |
32 | io.github.goudai
33 | spring-boot-starter-kafka-core
34 | 2.2.3-SNAPSHOT
35 |
36 |
37 | org.springframework.boot
38 | spring-boot-configuration-processor
39 | true
40 |
41 |
42 | org.springframework
43 | spring-web
44 |
45 |
46 | org.springframework.boot
47 | spring-boot-autoconfigure
48 |
49 |
50 | org.jodd
51 | jodd-mail
52 | 5.0.4
53 |
54 |
55 |
56 |
--------------------------------------------------------------------------------
/spring-boot-starter-kafka-consumer/src/main/java/io/goudai/starter/kafka/consumer/KafkaBeanPostProcessor.java:
--------------------------------------------------------------------------------
1 | package io.goudai.starter.kafka.consumer;
2 |
3 | import io.goudai.starter.kafka.consumer.annotation.EnableKafka;
4 | import io.goudai.starter.kafka.consumer.annotation.KafkaListener;
5 | import io.goudai.starter.kafka.consumer.autoconfigure.KafkaConsumerAutoConfiguration;
6 | import io.goudai.starter.kafka.core.StringUtils;
7 | import jodd.mail.Email;
8 | import jodd.mail.SendMailSession;
9 | import jodd.mail.SmtpServer;
10 | import lombok.*;
11 | import org.apache.kafka.clients.consumer.*;
12 | import org.apache.kafka.common.TopicPartition;
13 | import org.slf4j.Logger;
14 | import org.slf4j.LoggerFactory;
15 | import org.springframework.beans.BeansException;
16 | import org.springframework.beans.factory.DisposableBean;
17 | import org.springframework.beans.factory.annotation.Autowired;
18 | import org.springframework.beans.factory.annotation.Value;
19 | import org.springframework.beans.factory.config.BeanPostProcessor;
20 | import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
21 | import org.springframework.core.env.Environment;
22 |
23 | import javax.annotation.PostConstruct;
24 | import java.io.PrintWriter;
25 | import java.io.StringWriter;
26 | import java.lang.reflect.InvocationTargetException;
27 | import java.lang.reflect.Method;
28 | import java.util.Map;
29 | import java.util.Properties;
30 | import java.util.concurrent.*;
31 | import java.util.concurrent.atomic.AtomicBoolean;
32 | import java.util.concurrent.atomic.AtomicInteger;
33 |
34 | import static java.util.Arrays.asList;
35 | import static java.util.Collections.singletonMap;
36 |
37 | /**
38 | * Created by freeman on 17/2/21.
39 | */
40 | @Setter
41 | public class KafkaBeanPostProcessor implements BeanPostProcessor, DisposableBean {
42 |
43 | private Logger logger = LoggerFactory.getLogger(KafkaBeanPostProcessor.class);
44 | private Properties properties;
45 | private Map consumerRunningCache = new ConcurrentHashMap<>();
46 | private Map consumerRestartedCount = new ConcurrentHashMap<>();
47 | private AtomicInteger POOL_SEQ = new AtomicInteger(1);
48 | private long timeout;
49 | private KafkaConsumerAutoConfiguration.KafkaConsumerProperties.AutoRestart autoRestart;
50 | private KafkaConsumerAutoConfiguration.KafkaConsumerProperties kafkaConsumerProperties;
51 | private BlockingQueue queue = new LinkedBlockingQueue<>();
52 |
53 | private BlockingQueue emailQueue;
54 | @Value("${spring.application.name}")
55 | private String applicationName;
56 |
57 | @Autowired
58 | Environment environment;
59 | private SmtpServer smtpServer;
60 |
61 | public KafkaBeanPostProcessor(Properties properties, KafkaConsumerAutoConfiguration.KafkaConsumerProperties kafkaConsumerProperties) {
62 | this.properties = properties;
63 | this.autoRestart = kafkaConsumerProperties.getAutoRestart();
64 | this.kafkaConsumerProperties = kafkaConsumerProperties;
65 | if (kafkaConsumerProperties.getEmail().isEnabled()) {
66 | this.emailQueue = new ArrayBlockingQueue<>(kafkaConsumerProperties.getEmail().getEmailQueueSize());
67 | final KafkaConsumerAutoConfiguration.KafkaConsumerProperties.Email.Smtp smtp = kafkaConsumerProperties.getEmail().getSmtp();
68 | this.smtpServer = SmtpServer.create()
69 | .host(smtp.getHost())
70 | .port(smtp.getPort())
71 | .ssl(smtp.isUseSSL())
72 | .auth(smtp.getUsername(), smtp.getPassword())
73 | .debugMode(smtp.isDebugMode())
74 | .buildSmtpMailServer();
75 | } else {
76 | this.emailQueue = new ArrayBlockingQueue<>(0);
77 | }
78 | this.timeout = kafkaConsumerProperties.getTimeout();
79 |
80 |
81 | }
82 |
83 | @Override
84 | public Object postProcessBeforeInitialization(Object o, String s) throws BeansException {
85 | return o;
86 | }
87 |
88 | @Override
89 | public Object postProcessAfterInitialization(Object bean, String beanName) throws BeansException {
90 | val aClass = bean.getClass();
91 | val enableKafka = aClass.getAnnotation(EnableKafka.class);
92 | if (enableKafka != null) {
93 | for (val method : aClass.getMethods()) {
94 | val consumerAnnotation = method.getAnnotation(KafkaListener.class);
95 | if (consumerAnnotation != null) {
96 | val topic = consumerAnnotation.topic();
97 | if (StringUtils.isBlank(topic)) {
98 | throw new IllegalArgumentException(String.format("topic is required by %s.%s @KafkaListener", aClass.getName(), method.getName()));
99 | }
100 | String group = consumerAnnotation.group();
101 | if (StringUtils.isBlank(group)) {
102 | logger.info("group is null,using default value in @EnableKafka");
103 | if (StringUtils.isBlank(enableKafka.value())) {
104 | throw new IllegalArgumentException(String.format("group is required by %s.%s @KafkaListener ", aClass.getName(), method.getName()));
105 | } else {
106 | group = enableKafka.value();
107 | }
108 | }
109 | val config = (Properties) properties.clone();
110 | config.put(ConsumerConfig.GROUP_ID_CONFIG, group);
111 | ConsumeMetadata metadata = ConsumeMetadata.builder()
112 | .bean(bean)
113 | .config(config)
114 | .group(group)
115 | .topic(topic)
116 | .method(method)
117 | .build();
118 | queue.add(metadata);
119 |
120 | }
121 | }
122 | }
123 | return bean;
124 | }
125 |
126 | @Override
127 | public void destroy() {
128 | this.autoRestart.setEnabled(false);
129 | for (AtomicBoolean atomicBoolean : consumerRunningCache.values()) {
130 | atomicBoolean.set(false);
131 | }
132 | }
133 |
134 | @PostConstruct
135 | public void start() {
136 | ThreadKit.cachedThreadPool.execute(() -> {
137 | Thread thread = Thread.currentThread();
138 | thread.setName("QUEUE-Consumer-Thread " + POOL_SEQ.getAndIncrement());
139 | while (this.autoRestart.isEnabled()) {
140 | try {
141 | ConsumeMetadata take = queue.take();
142 | String key = take.group + ":" + take.topic;
143 | startConsumer(take);
144 | AtomicInteger atomicInteger = consumerRestartedCount.get(key);
145 | if (atomicInteger == null) {
146 | consumerRestartedCount.put(key, new AtomicInteger(0));
147 | logger.info("starting consumer group {}, topic {},metadata {} successfully ", take.getGroup(), take.getTopic(), take);
148 | } else {
149 | logger.error("The {} restart of the consumer {}-{} succeeded", atomicInteger.get(), key);
150 | }
151 |
152 | } catch (InterruptedException e) {
153 | logger.error(e.getMessage(), e);
154 | }
155 | }
156 | });
157 | }
158 |
159 | @PostConstruct
160 | @ConditionalOnProperty(name = "goudai.kafka.consumer.email.enabled", havingValue = "true")
161 | public void startEmail() {
162 | final String[] activeProfiles = environment.getActiveProfiles();
163 | final String activeProfile = activeProfiles.length == 1 ? activeProfiles[0] : environment.getDefaultProfiles()[0];
164 | ThreadKit.cachedThreadPool.execute(() -> {
165 | Thread thread = Thread.currentThread();
166 | thread.setName("Email-Consumer-Thread " + POOL_SEQ.getAndIncrement());
167 | while (kafkaConsumerProperties.getEmail().isEnabled()) {
168 | try {
169 | final EmailMate take = emailQueue.take();
170 |
171 | try (final StringWriter out = new StringWriter();
172 | final PrintWriter printWriter = new PrintWriter(out);
173 | final SendMailSession session = smtpServer.createSession();) {
174 | session.open();
175 | take.throwable.printStackTrace(printWriter);
176 | final KafkaConsumerAutoConfiguration.KafkaConsumerProperties.Email.Smtp smtp = kafkaConsumerProperties.getEmail().getSmtp();
177 | session.sendMail(Email.create()
178 | .from(smtp.getFrom())
179 | .to(smtp.getTo().toArray(new String[smtp.getTo().size()]))
180 | .subject(activeProfile + "-狗带kafka Consumer报警!" + applicationName)
181 | .htmlMessage(
182 | String.format("%s", out.toString().replaceAll("(\r\n|\n)", "
"))
184 | , "utf-8")
185 | );
186 | }
187 | } catch (Exception e) {
188 | logger.error(e.getMessage(), e);
189 | }
190 | }
191 | });
192 | }
193 |
194 |
195 | public void startConsumer(ConsumeMetadata metadata) {
196 | ThreadKit.cachedThreadPool.execute(() -> {
197 | String group = metadata.group;
198 | String topic = metadata.topic;
199 | Method method = metadata.method;
200 | Object bean = metadata.bean;
201 |
202 | val consumer = new KafkaConsumer(metadata.config);
203 | consumer.subscribe(asList(topic));
204 |
205 | val key = group + topic;
206 | consumerRunningCache.put(key, new AtomicBoolean(true));
207 | Thread.currentThread().setName(group + "-" + topic + "-" + POOL_SEQ.getAndIncrement());
208 | boolean isException = false;
209 | while (consumerRunningCache.get(key).get()) {
210 | ConsumerRecords consumerRecords = consumer.poll(timeout);
211 | isException = false;
212 | try {
213 | handleRecord(topic, method, bean, consumer, consumerRecords);
214 | } catch (Throwable e) {
215 | isException = true;
216 | handeException(metadata, group, topic, consumer, key, e);
217 | }
218 | }
219 | if (!isException) {
220 | consumer.close();
221 | }
222 |
223 | });
224 | }
225 |
226 | private void handleRecord(String topic, Method method, Object bean, KafkaConsumer consumer, ConsumerRecords consumerRecords) throws Throwable {
227 | for (ConsumerRecord consumerRecord : consumerRecords) {
228 | if (topic.equals(consumerRecord.topic())) {
229 | final String value = consumerRecord.value();
230 | if (StringUtils.isNotBlank(value)) {
231 | try {
232 | method.invoke(bean, consumerRecord);
233 | } catch (InvocationTargetException e) {
234 | if (e.getCause() != null && e.getCause() instanceof SkipMessageException) {
235 | logger.warn("skip message " + e.getCause().getMessage(), e.getCause());
236 | } else {
237 | throw e.getCause() != null ? e.getCause() : e;
238 | }
239 | }
240 | }
241 | }
242 |
243 | consumer.commitSync(singletonMap(
244 | new TopicPartition(consumerRecord.topic(), consumerRecord.partition()),
245 | new OffsetAndMetadata(consumerRecord.offset() + 1)));
246 | }
247 | }
248 |
249 | private void handeException(ConsumeMetadata metadata, String group, String topic, KafkaConsumer consumer, String key, Throwable e) {
250 | AtomicInteger atomicInteger = consumerRestartedCount.get(key);
251 | if (atomicInteger == null) {
252 | consumerRestartedCount.put(key, atomicInteger = new AtomicInteger(0));
253 | }
254 | final String message = "consumer发生故障 当前第 " + atomicInteger.getAndIncrement() + "次故障";
255 | if (kafkaConsumerProperties.getEmail().isEnabled()) {
256 | emailQueue.offer(EmailMate.builder().message(message).throwable(e).build());
257 | }
258 | logger.error(message, e);
259 | consumerRunningCache.get(key).set(false);
260 | try {
261 | consumer.close();
262 | } catch (Exception e1) {
263 | logger.error("关闭close异常", e1);
264 | }
265 |
266 | try {
267 | //故障之后sleep10s 避免死循环
268 | logger.info("enter sleep {} s", this.autoRestart.getInterval());
269 | TimeUnit.SECONDS.sleep(this.autoRestart.getInterval());
270 | } catch (InterruptedException e1) {
271 | logger.error(e.getMessage(), e);
272 | }
273 | if (this.autoRestart.isEnabled()) {
274 | //重新放回启动队列
275 | queue.add(metadata);
276 | logger.info("将" + metadata + "重新放回队列");
277 | } else {
278 | logger.warn("autoRestart is disabled,consumer close ,{},{}", topic, group);
279 | }
280 | }
281 |
282 | @Builder
283 | @AllArgsConstructor
284 | @NoArgsConstructor
285 | @Setter
286 | @Getter
287 | @ToString
288 | public static class ConsumeMetadata {
289 | private Method method;
290 | private Object bean;
291 | private String group;
292 | private String topic;
293 | private Properties config;
294 | }
295 |
296 | @Builder
297 | @AllArgsConstructor
298 | @NoArgsConstructor
299 | @Setter
300 | @Getter
301 | @ToString
302 | public static class EmailMate {
303 | private Throwable throwable;
304 | private String message;
305 | }
306 |
307 |
308 | }
309 |
310 |
--------------------------------------------------------------------------------
/spring-boot-starter-kafka-consumer/src/main/java/io/goudai/starter/kafka/consumer/SkipMessageException.java:
--------------------------------------------------------------------------------
1 | package io.goudai.starter.kafka.consumer;
2 |
3 | public class SkipMessageException extends RuntimeException {
4 |
5 | public SkipMessageException() {
6 | super();
7 | }
8 |
9 | public SkipMessageException(String message) {
10 | super(message);
11 | }
12 |
13 | public SkipMessageException(String message, Throwable cause) {
14 | super(message, cause);
15 | }
16 |
17 | public SkipMessageException(Throwable cause) {
18 | super(cause);
19 | }
20 |
21 | protected SkipMessageException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
22 | super(message, cause, enableSuppression, writableStackTrace);
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/spring-boot-starter-kafka-consumer/src/main/java/io/goudai/starter/kafka/consumer/ThreadKit.java:
--------------------------------------------------------------------------------
1 | package io.goudai.starter.kafka.consumer;
2 |
3 |
4 | import io.goudai.starter.kafka.core.NamedThreadFactory;
5 |
6 | import java.util.concurrent.ExecutorService;
7 | import java.util.concurrent.Executors;
8 |
9 | /**
10 | * Created by freeman on 16/7/21.
11 | */
12 | public class ThreadKit {
13 | public static final ExecutorService cachedThreadPool = Executors.newCachedThreadPool(new NamedThreadFactory("consumer", true));
14 | }
15 |
--------------------------------------------------------------------------------
/spring-boot-starter-kafka-consumer/src/main/java/io/goudai/starter/kafka/consumer/annotation/EnableKafka.java:
--------------------------------------------------------------------------------
1 | package io.goudai.starter.kafka.consumer.annotation;
2 |
3 | import org.springframework.stereotype.Component;
4 |
5 | import java.lang.annotation.*;
6 |
7 | /**
8 | * Created by freeman on 17/2/21.
9 | */
10 | @Component
11 | @Inherited
12 | @Target({ElementType.TYPE})
13 | @Retention(RetentionPolicy.RUNTIME)
14 | public @interface EnableKafka {
15 | String value() default "";
16 | }
17 |
--------------------------------------------------------------------------------
/spring-boot-starter-kafka-consumer/src/main/java/io/goudai/starter/kafka/consumer/annotation/KafkaListener.java:
--------------------------------------------------------------------------------
1 | package io.goudai.starter.kafka.consumer.annotation;
2 |
3 | import java.lang.annotation.ElementType;
4 | import java.lang.annotation.Retention;
5 | import java.lang.annotation.RetentionPolicy;
6 | import java.lang.annotation.Target;
7 |
8 | /**
9 | * Created by freeman on 17/2/21.
10 | */
11 | @Retention(RetentionPolicy.RUNTIME)
12 | @Target(value = {ElementType.METHOD})
13 | public @interface KafkaListener {
14 |
15 | String group() default "";
16 |
17 | String topic();
18 |
19 | String description() default "";
20 |
21 | }
22 |
--------------------------------------------------------------------------------
/spring-boot-starter-kafka-consumer/src/main/java/io/goudai/starter/kafka/consumer/autoconfigure/KafkaConsumerAutoConfiguration.java:
--------------------------------------------------------------------------------
1 | package io.goudai.starter.kafka.consumer.autoconfigure;
2 |
3 | import io.goudai.starter.kafka.consumer.KafkaBeanPostProcessor;
4 | import io.goudai.starter.kafka.core.JsonUtils;
5 | import lombok.Getter;
6 | import lombok.Setter;
7 | import lombok.extern.slf4j.Slf4j;
8 | import org.apache.kafka.clients.consumer.ConsumerConfig;
9 | import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
10 | import org.springframework.boot.context.properties.ConfigurationProperties;
11 | import org.springframework.boot.context.properties.EnableConfigurationProperties;
12 | import org.springframework.context.annotation.Bean;
13 | import org.springframework.context.annotation.Configuration;
14 |
15 | import java.util.Arrays;
16 | import java.util.List;
17 | import java.util.Properties;
18 |
19 | @Configuration
20 | @EnableConfigurationProperties(KafkaConsumerAutoConfiguration.KafkaConsumerProperties.class)
21 | @Slf4j
22 | public class KafkaConsumerAutoConfiguration {
23 |
24 |
25 | @Bean
26 | @ConditionalOnMissingBean
27 | public KafkaBeanPostProcessor kafkaBeanPostProcessor(KafkaConsumerProperties kafkaConsumerProperties) {
28 | Properties properties = new Properties();
29 | properties.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaConsumerProperties.bootstrapServers);
30 | properties.setProperty(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, String.valueOf(kafkaConsumerProperties.enableAutoCommit));
31 | properties.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, kafkaConsumerProperties.autoOffsetReset);
32 | properties.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, kafkaConsumerProperties.keyDeserializer);
33 | properties.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, kafkaConsumerProperties.valueDeserializer);
34 | log.info("initing KafkaBeanPostProcessor using properties : {}", JsonUtils.toJson(properties));
35 | KafkaBeanPostProcessor kafkaBeanPostProcessor = new KafkaBeanPostProcessor(properties, kafkaConsumerProperties);
36 | log.info("inited KafkaBeanPostProcessor successfully {} using properties : {}", kafkaBeanPostProcessor, JsonUtils.toJson(properties));
37 | return kafkaBeanPostProcessor;
38 | }
39 |
40 |
41 | @Setter
42 | @Getter
43 | @ConfigurationProperties(prefix = "goudai.kafka.consumer")
44 | public static class KafkaConsumerProperties {
45 |
46 | private String bootstrapServers;
47 |
48 | private long timeout = 3000L;
49 |
50 | private boolean enableAutoCommit = false;
51 |
52 | private String autoOffsetReset = "earliest";
53 |
54 | private String keyDeserializer = "org.apache.kafka.common.serialization.StringDeserializer";
55 |
56 | private String valueDeserializer = "org.apache.kafka.common.serialization.StringDeserializer";
57 |
58 | private AutoRestart autoRestart = new AutoRestart();
59 |
60 |
61 | private Email email = new Email();
62 |
63 | @Setter
64 | @Getter
65 | public static class AutoRestart {
66 | private boolean enabled = true;
67 | private int interval = 20;
68 | }
69 |
70 |
71 | @Setter
72 | @Getter
73 | public static class Email {
74 | private boolean enabled = true;
75 | private Smtp smtp = new Smtp();
76 | private int emailQueueSize = 100;
77 |
78 | @Setter
79 | @Getter
80 | public static class Smtp {
81 | private String host = "";
82 | private int port = 465;
83 | private String username = "";
84 | private String password = "";
85 | private boolean useSSL = true;
86 | private boolean debugMode = false;
87 | private String from = "";
88 | private List to = Arrays.asList("");
89 |
90 | }
91 | }
92 | }
93 |
94 | }
95 |
--------------------------------------------------------------------------------
/spring-boot-starter-kafka-consumer/src/main/resources/META-INF/spring.factories:
--------------------------------------------------------------------------------
1 | org.springframework.boot.autoconfigure.EnableAutoConfiguration=io.goudai.starter.kafka.consumer.autoconfigure.KafkaConsumerAutoConfiguration
--------------------------------------------------------------------------------
/spring-boot-starter-kafka-core/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | io.github.goudai
4 | spring-boot-starter-kafka
5 | 2.2.3-SNAPSHOT
6 |
7 | 4.0.0
8 |
9 | spring-boot-starter-kafka-core
10 | jar
11 |
12 |
13 |
14 | UTF-8
15 |
16 |
17 |
18 |
19 | org.apache.kafka
20 | kafka-clients
21 |
22 |
23 | org.springframework.boot
24 | spring-boot
25 |
26 |
27 | org.projectlombok
28 | lombok
29 |
30 |
31 | junit
32 | junit
33 |
34 |
35 | com.fasterxml.jackson.core
36 | jackson-databind
37 |
38 |
39 |
40 |
--------------------------------------------------------------------------------
/spring-boot-starter-kafka-core/src/main/java/io/goudai/starter/kafka/core/JsonUtils.java:
--------------------------------------------------------------------------------
1 | package io.goudai.starter.kafka.core;
2 |
3 | import com.fasterxml.jackson.annotation.JsonInclude;
4 | import com.fasterxml.jackson.core.type.TypeReference;
5 | import com.fasterxml.jackson.databind.DeserializationFeature;
6 | import com.fasterxml.jackson.databind.ObjectMapper;
7 | import org.slf4j.Logger;
8 | import org.slf4j.LoggerFactory;
9 | import org.springframework.util.Assert;
10 |
11 | import java.io.InputStream;
12 | import java.text.SimpleDateFormat;
13 | import java.util.Collection;
14 | import java.util.TimeZone;
15 |
16 | public final class JsonUtils {
17 |
18 | private static Logger logger = LoggerFactory.getLogger(JsonUtils.class);
19 |
20 | private static ObjectMapper mapper = new ObjectMapper();
21 |
22 | static {
23 | mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
24 | mapper.enable(DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT);
25 | mapper.disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES);
26 | mapper.enable(DeserializationFeature.READ_UNKNOWN_ENUM_VALUES_AS_NULL);
27 | mapper.setDateFormat(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"));
28 | mapper.setTimeZone(TimeZone.getTimeZone("GMT+8:00"));
29 | }
30 |
31 | public static String toJson(Object object) {
32 | try {
33 | return mapper.writeValueAsString(object);
34 | } catch (Exception e) {
35 | logger.error("write to json string error:" + object, e);
36 | throw new RuntimeException(e);
37 | }
38 | }
39 |
40 | public static T fromJson(String jsonString, Class valueType) {
41 | Assert.notNull(valueType,"valueType is null ");
42 | if (StringUtils.isBlank(jsonString)) {
43 | return null;
44 | }
45 | try {
46 | return mapper.readValue(jsonString, valueType);
47 | } catch (Exception e) {
48 | logger.error("parse json string error:" + jsonString, e);
49 | throw new RuntimeException(e);
50 | }
51 | }
52 |
53 | public static T fromJson(InputStream is, Class valueType) {
54 | Assert.notNull(valueType,"valueType is null ");
55 | Assert.notNull(is,"inputStream is null");
56 | try {
57 | return mapper.readValue(is, valueType);
58 | } catch (Exception e) {
59 | logger.error("parse json string error", e);
60 | throw new RuntimeException(e);
61 | }
62 | }
63 |
64 | public static , S> T fromJson(String jsonString, Class collectionType, Class elementType) {
65 | Assert.notNull(collectionType,"collectionType is null");
66 | Assert.notNull(elementType,"elementType is null");
67 | if (StringUtils.isBlank(jsonString)) {
68 | return null;
69 | }
70 | try {
71 | return mapper.readValue(jsonString, mapper.getTypeFactory().constructCollectionType(collectionType, elementType));
72 | } catch (Exception e) {
73 | logger.error("parse json string error:" + jsonString, e);
74 | throw new RuntimeException(e);
75 | }
76 | }
77 |
78 | public static T fromJson(String jsonString, TypeReference typeReference) {
79 | Assert.notNull(typeReference,"typeReference is null");
80 | if (StringUtils.isEmpty(jsonString)) {
81 | return null;
82 | }
83 | try {
84 | return mapper.readValue(jsonString, typeReference);
85 | } catch (Exception e) {
86 | logger.error("parse json string error:" + jsonString, e);
87 | throw new RuntimeException(e);
88 | }
89 | }
90 |
91 | }
92 |
--------------------------------------------------------------------------------
/spring-boot-starter-kafka-core/src/main/java/io/goudai/starter/kafka/core/NamedThreadFactory.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 1999-2011 Alibaba Group.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package io.goudai.starter.kafka.core;
17 |
18 | import java.util.concurrent.ThreadFactory;
19 | import java.util.concurrent.atomic.AtomicInteger;
20 |
21 | public class NamedThreadFactory implements ThreadFactory
22 | {
23 | private static final AtomicInteger POOL_SEQ = new AtomicInteger(1);
24 |
25 | private final AtomicInteger mThreadNum = new AtomicInteger(1);
26 |
27 | private final String mPrefix;
28 |
29 | private final boolean mDaemo;
30 |
31 | private final ThreadGroup mGroup;
32 |
33 | public NamedThreadFactory()
34 | {
35 | this("pool-" + POOL_SEQ.getAndIncrement(),false);
36 | }
37 |
38 | public NamedThreadFactory(String prefix)
39 | {
40 | this(prefix,false);
41 | }
42 |
43 | public NamedThreadFactory(String prefix,boolean daemo)
44 | {
45 | mPrefix = prefix + "-thread-";
46 | mDaemo = daemo;
47 | SecurityManager s = System.getSecurityManager();
48 | mGroup = ( s == null ) ? Thread.currentThread().getThreadGroup() : s.getThreadGroup();
49 | }
50 |
51 | public Thread newThread(Runnable runnable)
52 | {
53 | String name = mPrefix + mThreadNum.getAndIncrement();
54 | Thread ret = new Thread(mGroup,runnable,name,0);
55 | ret.setDaemon(mDaemo);
56 | return ret;
57 | }
58 |
59 | public ThreadGroup getThreadGroup()
60 | {
61 | return mGroup;
62 | }
63 | }
--------------------------------------------------------------------------------
/spring-boot-starter-kafka-core/src/main/java/io/goudai/starter/kafka/core/StringUtils.java:
--------------------------------------------------------------------------------
1 | package io.goudai.starter.kafka.core;
2 |
3 |
4 | public class StringUtils {
5 | public static boolean isBlank(CharSequence cs) {
6 | int strLen;
7 | if (cs != null && (strLen = cs.length()) != 0) {
8 | for (int i = 0; i < strLen; ++i) {
9 | if (!Character.isWhitespace(cs.charAt(i))) {
10 | return false;
11 | }
12 | }
13 |
14 | return true;
15 | } else {
16 | return true;
17 | }
18 | }
19 |
20 | public static boolean isNotBlank(CharSequence cs) {
21 | return !isBlank(cs);
22 | }
23 |
24 | public static boolean isEmpty(CharSequence cs) {
25 | return cs == null || cs.length() == 0;
26 | }
27 | //bin/kafka-topics.sh --zookeeper node1-zookeeper:2181,node2-zookeeper:2181,node3-zookeeper:2181 --partitions 50 --replication-factor 3 --topic test2 --if-not-exists --create test2
28 | }
29 |
--------------------------------------------------------------------------------
/spring-boot-starter-kafka-producer/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | spring-boot-starter-kafka
4 | io.github.goudai
5 | 2.2.3-SNAPSHOT
6 |
7 | 4.0.0
8 |
9 | spring-boot-starter-kafka-producer
10 | jar
11 |
12 |
13 |
14 | UTF-8
15 |
16 |
17 |
18 |
19 | org.apache.kafka
20 | kafka-clients
21 |
22 |
23 | org.apache.kafka
24 | kafka-clients
25 |
26 |
27 | org.springframework.boot
28 | spring-boot
29 |
30 |
31 | org.projectlombok
32 | lombok
33 |
34 |
35 | io.github.goudai
36 | spring-boot-starter-kafka-core
37 | 2.2.3-SNAPSHOT
38 |
39 |
40 | org.springframework.boot
41 | spring-boot-configuration-processor
42 | true
43 |
44 |
45 |
46 |
--------------------------------------------------------------------------------
/spring-boot-starter-kafka-producer/src/main/java/io/goudai/starter/kafka/producer/autoconfigure/KafkaProducerAutoConfiguration.java:
--------------------------------------------------------------------------------
1 | package io.goudai.starter.kafka.producer.autoconfigure;
2 |
3 | import io.goudai.starter.kafka.core.JsonUtils;
4 | import io.goudai.starter.kafka.core.StringUtils;
5 | import lombok.Getter;
6 | import lombok.Setter;
7 | import lombok.extern.slf4j.Slf4j;
8 | import org.apache.kafka.clients.producer.KafkaProducer;
9 | import org.apache.kafka.clients.producer.Producer;
10 | import org.apache.kafka.clients.producer.ProducerConfig;
11 | import org.springframework.boot.context.properties.ConfigurationProperties;
12 | import org.springframework.boot.context.properties.EnableConfigurationProperties;
13 | import org.springframework.context.annotation.Bean;
14 | import org.springframework.context.annotation.Configuration;
15 |
16 | import java.util.Properties;
17 |
18 | @Configuration
19 | @EnableConfigurationProperties(KafkaProducerAutoConfiguration.KafkaProducerProperties.class)
20 | @Slf4j
21 | public class KafkaProducerAutoConfiguration {
22 |
23 | @Bean
24 | public Producer producer(KafkaProducerProperties producerProperties) {
25 | Properties properties = new Properties();
26 | properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, producerProperties.bootstrapServers);
27 | properties.put(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, String.valueOf(producerProperties.enableIdempotence));
28 | if (StringUtils.isNotBlank(producerProperties.transactionalId)) {
29 | properties.put(ProducerConfig.TRANSACTIONAL_ID_CONFIG, producerProperties.transactionalId);
30 | }
31 | properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, producerProperties.keySerializer);
32 | properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, producerProperties.valueSerializer);
33 | if(!producerProperties.enableIdempotence){
34 | properties.put(ProducerConfig.ACKS_CONFIG, producerProperties.acks);
35 | }
36 | log.info("initing stringStringKafkaProducer using properties : {}", JsonUtils.toJson(properties));
37 | KafkaProducer stringStringKafkaProducer = new KafkaProducer<>(properties);
38 | log.info("init stringStringKafkaProducer successfully {} using properties : {}", stringStringKafkaProducer, JsonUtils.toJson(properties));
39 | return stringStringKafkaProducer;
40 | }
41 |
42 |
43 | @Setter
44 | @Getter
45 | @ConfigurationProperties(prefix = "goudai.kafka.producer")
46 | public static class KafkaProducerProperties {
47 |
48 | private String bootstrapServers;
49 |
50 | private boolean enableIdempotence = false;
51 |
52 | private String transactionalId = "";
53 |
54 | private String acks = "-1";
55 |
56 | private String keySerializer = "org.apache.kafka.common.serialization.StringSerializer";
57 |
58 | private String valueSerializer = "org.apache.kafka.common.serialization.StringSerializer";
59 |
60 | }
61 |
62 | }
63 |
--------------------------------------------------------------------------------
/spring-boot-starter-kafka-producer/src/main/resources/META-INF/spring.factories:
--------------------------------------------------------------------------------
1 | org.springframework.boot.autoconfigure.EnableAutoConfiguration=io.goudai.starter.kafka.producer.autoconfigure.KafkaProducerAutoConfiguration
--------------------------------------------------------------------------------
/spring-boot-starter-kafka-transaction-producer/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | spring-boot-starter-kafka
6 | io.github.goudai
7 | 2.2.3-SNAPSHOT
8 |
9 | 4.0.0
10 |
11 | spring-boot-starter-kafka-transaction-producer
12 |
13 |
14 |
15 | UTF-8
16 | 4.0.17
17 | 1.8
18 | 1.8
19 |
20 |
21 |
22 |
23 | junit
24 | junit
25 | 4.11
26 | test
27 |
28 |
29 | io.github.goudai
30 | spring-boot-starter-runner-zookeeper
31 | 4.0.17
32 |
33 |
34 | org.projectlombok
35 | lombok
36 | compile
37 |
38 |
39 | io.github.goudai
40 | spring-boot-starter-kafka-producer
41 | 2.2.3-SNAPSHOT
42 | compile
43 |
44 |
45 | io.github.goudai
46 | gd-generator-default
47 | 2.0.15
48 |
49 |
50 | javax.validation
51 | validation-api
52 | 2.0.1.Final
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 | maven-clean-plugin
61 | 3.0.0
62 |
63 |
64 |
65 | maven-resources-plugin
66 | 3.0.2
67 |
68 |
69 | maven-compiler-plugin
70 | 3.7.0
71 |
72 |
73 | maven-surefire-plugin
74 | 2.20.1
75 |
76 |
77 | maven-jar-plugin
78 | 3.0.2
79 |
80 |
81 | maven-install-plugin
82 | 2.5.2
83 |
84 |
85 | maven-deploy-plugin
86 | 2.8.2
87 |
88 |
89 |
90 |
91 |
92 |
--------------------------------------------------------------------------------
/spring-boot-starter-kafka-transaction-producer/src/main/java/io/github/goudai/kafka/transaction/producer/EventSenderRunner.java:
--------------------------------------------------------------------------------
1 | package io.github.goudai.kafka.transaction.producer;
2 |
3 | import io.goudai.starter.runner.zookeeper.AbstractMultipartRunner;
4 | import lombok.extern.slf4j.Slf4j;
5 | import org.apache.kafka.clients.producer.Producer;
6 | import org.apache.kafka.clients.producer.ProducerRecord;
7 | import org.apache.kafka.clients.producer.RecordMetadata;
8 | import org.springframework.beans.factory.annotation.Autowired;
9 |
10 | import java.util.Arrays;
11 | import java.util.HashSet;
12 | import java.util.List;
13 | import java.util.Set;
14 | import java.util.concurrent.Future;
15 | import java.util.concurrent.TimeUnit;
16 |
17 | /**
18 | * @author qingmu.io
19 | * 2018/11/19
20 | */
21 | @Slf4j
22 | public class EventSenderRunner extends AbstractMultipartRunner {
23 |
24 | @Autowired
25 | private Producer producer;
26 |
27 | @Autowired
28 | private KafkaEventDatasource eventMapper;
29 |
30 | @Override
31 | public void apply(String projectId) throws Exception {
32 | final List unsentEvents = eventMapper.getUnsentEvents();
33 | for (KafkaEvent event : unsentEvents) {
34 | Future future = producer.send(new ProducerRecord<>(event.getTopic(), event.getId(), event.getPayload()));
35 | /* waiting for send result. */
36 | future.get(5, TimeUnit.SECONDS);
37 | log.info("send event [%s] successfully", event);
38 | eventMapper.onSendSuccessful(event);
39 | }
40 | }
41 |
42 | @Override
43 | public Set getAllProjects() {
44 | return new HashSet<>(Arrays.asList("1"));
45 | }
46 | }
47 |
--------------------------------------------------------------------------------
/spring-boot-starter-kafka-transaction-producer/src/main/java/io/github/goudai/kafka/transaction/producer/IdGenerator.java:
--------------------------------------------------------------------------------
1 | package io.github.goudai.kafka.transaction.producer;
2 |
3 | import java.util.UUID;
4 |
5 | public interface IdGenerator {
6 |
7 | default String nextId(){
8 | return UUID.randomUUID().toString().replace("-","");
9 | }
10 | }
11 |
--------------------------------------------------------------------------------
/spring-boot-starter-kafka-transaction-producer/src/main/java/io/github/goudai/kafka/transaction/producer/KafkaEvent.java:
--------------------------------------------------------------------------------
1 | package io.github.goudai.kafka.transaction.producer;
2 |
3 | import io.gd.generator.annotation.Field;
4 | import io.gd.generator.annotation.query.Query;
5 | import io.gd.generator.annotation.view.View;
6 | import io.gd.generator.api.query.Predicate;
7 | import lombok.*;
8 |
9 | import javax.persistence.*;
10 | import javax.validation.constraints.NotBlank;
11 | import javax.validation.constraints.NotNull;
12 | import java.util.Date;
13 |
14 | import static io.gd.generator.api.query.Predicate.EQ;
15 |
16 | /**
17 | * @author qingmu.io
18 | * 2018/11/19
19 | */
20 | @Entity
21 | @Getter
22 | @Setter
23 | @Builder
24 | @AllArgsConstructor
25 | @NoArgsConstructor
26 | @ToString
27 | public class KafkaEvent {
28 |
29 | @Id
30 | @Field(label = "id")
31 | @NotBlank
32 | @Column(columnDefinition = "BigInt(20)")
33 | private String id;
34 |
35 | @Query({EQ})
36 | @Field(label = "项目id")
37 | @View
38 | @NotBlank
39 | @Column(columnDefinition = "BigInt(20)")
40 | private String projectId;
41 |
42 | @NotBlank
43 | @Field(label = "主题")
44 | @NotNull
45 | private String topic;
46 |
47 | @NotNull
48 | @Field(label = "创建时间")
49 | private Date createdTime;
50 |
51 | @NotNull
52 | @Version
53 | @Field(label = "乐观锁")
54 | private Integer version;
55 |
56 | @NotNull
57 | @Field(label = "是否发送")
58 | @Query(Predicate.EQ)
59 | private Boolean isSent;
60 |
61 | @Field(label = "发送时间")
62 | private Date sentTime;
63 |
64 | @Field(label = "消息体")
65 | @NotBlank
66 | @Lob
67 | private String payload;
68 |
69 |
70 | }
71 |
--------------------------------------------------------------------------------
/spring-boot-starter-kafka-transaction-producer/src/main/java/io/github/goudai/kafka/transaction/producer/KafkaEventDatasource.java:
--------------------------------------------------------------------------------
1 | package io.github.goudai.kafka.transaction.producer;
2 |
3 |
4 | import java.util.List;
5 |
6 | /**
7 | * @author qingmu.io
8 | * 2018/11/19
9 | */
10 |
11 | public interface KafkaEventDatasource {
12 |
13 | /**
14 | * 该接口将事件插入到数据库中,请保证当前这方法与当前业务使用的是同一套ORM框架,
15 | * 即同一条javax.sql.Connection
16 | *
17 | * @param event
18 | */
19 | void insertEvent(KafkaEvent event);
20 |
21 | /**
22 | * 该接口请返回所有的为发送的到mq的事件列表
23 | *
24 | * @return
25 | */
26 | List getUnsentEvents();
27 |
28 | /**
29 | * 该接口会在成功发送到Kafka之后调用,请在该接口更新DB中的event status。
30 | *
31 | * @param event
32 | */
33 | void onSendSuccessful(KafkaEvent event);
34 |
35 | }
36 |
--------------------------------------------------------------------------------
/spring-boot-starter-kafka-transaction-producer/src/main/java/io/github/goudai/kafka/transaction/producer/KafkaTransactionProducerAutoConfiguration.java:
--------------------------------------------------------------------------------
1 | package io.github.goudai.kafka.transaction.producer;
2 |
3 | import lombok.extern.slf4j.Slf4j;
4 | import org.springframework.beans.factory.annotation.Value;
5 | import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
6 | import org.springframework.context.annotation.Bean;
7 | import org.springframework.context.annotation.Configuration;
8 |
9 | @Configuration
10 | @Slf4j
11 | //@EnableConfigurationProperties(KafkaTransactionProducerAutoConfiguration.KafkaTransactionProducerProperties.class)
12 | public class KafkaTransactionProducerAutoConfiguration {
13 |
14 |
15 | @Value("${spring.application.name}")
16 | private String applicationName;
17 |
18 | @Bean
19 | public EventSenderRunner eventSenderRunner() {
20 | return new EventSenderRunner();
21 | }
22 |
23 | @Bean
24 | @ConditionalOnMissingBean
25 | public IdGenerator idGenerator() {
26 | return new IdGenerator() {
27 | };
28 | }
29 |
30 |
31 | }
32 |
--------------------------------------------------------------------------------
/spring-boot-starter-kafka-transaction-producer/src/main/java/io/github/goudai/kafka/transaction/producer/TransactionProducer.java:
--------------------------------------------------------------------------------
1 | package io.github.goudai.kafka.transaction.producer;
2 |
3 | /**
4 | * @author qingmu.io
5 | * 2018/11/19
6 | */
7 | public interface TransactionProducer {
8 |
9 |
10 | void send(String topic, Object object);
11 |
12 | }
13 |
14 |
--------------------------------------------------------------------------------
/spring-boot-starter-kafka-transaction-producer/src/main/java/io/github/goudai/kafka/transaction/producer/TransactionProducerImpl.java:
--------------------------------------------------------------------------------
1 | package io.github.goudai.kafka.transaction.producer;
2 |
3 | import io.goudai.starter.kafka.core.JsonUtils;
4 | import org.springframework.beans.factory.annotation.Autowired;
5 |
6 | import java.util.Date;
7 |
8 | public class TransactionProducerImpl implements TransactionProducer {
9 |
10 | @Autowired
11 | private KafkaEventDatasource goudaiEventMapper;
12 |
13 | @Autowired
14 | private IdGenerator idGenerator;
15 |
16 | @Override
17 | public void send(String topic, Object payload) {
18 | KafkaEvent event = new KafkaEvent();
19 | event.setTopic(topic);
20 | event.setPayload(JsonUtils.toJson(payload));
21 | event.setProjectId("1");
22 | event.setId(idGenerator.nextId());
23 | event.setVersion(0);
24 | event.setIsSent(false);
25 | event.setCreatedTime(new Date());
26 | goudaiEventMapper.insertEvent(event);
27 | }
28 |
29 |
30 | }
31 |
--------------------------------------------------------------------------------
/spring-boot-starter-kafka-transaction-producer/src/main/resources/META-INF/spring.factories:
--------------------------------------------------------------------------------
1 | org.springframework.boot.autoconfigure.EnableAutoConfiguration=io.goudai.starter.kafka.producer.autoconfigure.KafkaProducerAutoConfiguration
--------------------------------------------------------------------------------