├── .gitignore
├── README.md
├── pom.xml
├── src
└── main
│ ├── java
│ ├── distributed
│ │ └── transaction
│ │ │ ├── Application.java
│ │ │ ├── DistributedTranTest.java
│ │ │ ├── mappers
│ │ │ ├── EventProcessMapper.java
│ │ │ ├── EventPublishMapper.java
│ │ │ └── UserMapper.java
│ │ │ ├── model
│ │ │ ├── EventProcess.java
│ │ │ ├── EventPublish.java
│ │ │ ├── User.java
│ │ │ └── Voucher.java
│ │ │ ├── schedule
│ │ │ └── EventPublishSchedule.java
│ │ │ ├── service
│ │ │ ├── EventProcessService.java
│ │ │ ├── EventPublishService.java
│ │ │ └── UserService.java
│ │ │ └── utils
│ │ │ ├── EventProcessStatus.java
│ │ │ ├── EventPublishStatus.java
│ │ │ ├── EventType.java
│ │ │ └── KafkaUtils.java
│ └── kafka
│ │ └── KafkaTest.java
│ └── resources
│ ├── application.properties
│ ├── logback.xml
│ └── mybatis
│ └── mapper
│ ├── EventProcessMapper.xml
│ ├── EventPublishMapper.xml
│ └── UserMapper.xml
└── test.sql
/.gitignore:
--------------------------------------------------------------------------------
1 | /target
2 | /.settings
3 | .classpath
4 | .project
5 | .DS_Store
6 | *.iml
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # 说明手册
2 | 本工程是按照https://www.cnblogs.com/520playboy/p/6715438.html (使用事件和消息队列实现分布式事务)这个文章实现的。目前实现了注册用户成功,保存用户表和消息表EventPublish,然后轮询根据消息表发送kafka消息,然后消费者订阅消息并消费消息,把消息入库到EventProcess表。至于后面定时器定时处理EventProcess表中的数据的业务逻辑就没有写了(为了快速消费可以开启多线程,用springboot的异步@Async来并发处理数据)
3 |
4 | ## 工程使用方法:
5 | 1.启动Application,这里会自动启动定时器去扫描相关表,也会启动一个线程作为消费者去连接到kafka接收消息。
6 | 2.执行DistributedTranTest的addUser()可以模拟注册用户的逻辑。
7 |
8 | ## 这篇文章学习心得:
9 | 1.上面文章中的第2个图的EventPublish应该是EventProcess,至于为什么要把EventPublish和EventProcess分开,明明字段都是一样的。因为用户注册端和代金券端是2个微服务,很可能用的是2个库,所以这里用的2个表,如果2个微服务用的同一个库,能不能不分开(待考虑)?
10 | 2.Q:文章中的消费者消费消息,接收到kafka消息后,就根据消息进行实际的业务处理,直到一次接收到的消息全部处理成功,再给kafka手动返回消息偏移量。如果处理过程中失败,只需要下次再重新从kafka接收上次没有处理成功的偏移量开始算起的消息,为什么需要EventProcess表来存一次消息,然后在后面处理?
11 | A:主要是消息可能接受到了一批多个,在业务处理过程中(假设比较复杂),比如大部分都处理成功了,但是有几个就是处理不成功要报异常,那么这批数据的偏移量就都不能提交,因为一旦提交,这几条消息就没机会处理了,但是直接插入EventProcess表,由于业务简单的insert,一般都会成功,然后提交偏移量。后面再依据EventProcess表的数据来进行处理,处理失败也没有关系,下次轮询继续处理那几条处理失败的数据即可。
12 |
13 | ### 思考:
14 | 1.新增用户成功后,如果有那种时效性要求比较高的业务,需要马上知道有用户注册成功了,怎么设计
15 |
--------------------------------------------------------------------------------
/pom.xml:
--------------------------------------------------------------------------------
1 |
3 | 4.0.0
4 |
5 | com.feiynn.study
6 | study-spring-kafka
7 | 0.0.1-SNAPSHOT
8 | jar
9 |
10 | study-spring-kafka
11 | http://maven.apache.org
12 |
13 |
14 | UTF-8
15 | 1.1.11
16 |
17 |
18 |
19 | org.springframework.boot
20 | spring-boot-starter-parent
21 | 2.0.3.RELEASE
22 |
23 |
24 |
25 |
26 |
27 | org.springframework.kafka
28 | spring-kafka
29 | 2.1.7.RELEASE
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 | org.springframework.boot
41 | spring-boot-starter-test
42 |
43 |
44 |
45 | mysql
46 | mysql-connector-java
47 | compile
48 |
49 |
50 |
51 |
52 | com.alibaba
53 | druid-spring-boot-starter
54 | 1.1.10
55 |
56 |
57 |
58 | org.mybatis.spring.boot
59 | mybatis-spring-boot-starter
60 | 1.3.2
61 |
62 |
63 |
64 | ch.qos.logback
65 | logback-core
66 | ${logback.version}
67 |
68 |
69 | ch.qos.logback
70 | logback-classic
71 | ${logback.version}
72 |
73 |
74 |
75 | junit
76 | junit
77 | 4.12
78 |
79 |
80 | org.hamcrest
81 | hamcrest-library
82 | 1.3
83 |
84 |
85 | com.google.guava
86 | guava
87 | 19.0
88 |
89 |
90 | com.google.code.gson
91 | gson
92 | 2.8.5
93 |
94 |
95 |
96 |
97 |
98 |
99 | org.apache.maven.plugins
100 | maven-compiler-plugin
101 |
102 | 1.8
103 | 1.8
104 |
105 |
106 |
107 |
108 |
109 |
--------------------------------------------------------------------------------
/src/main/java/distributed/transaction/Application.java:
--------------------------------------------------------------------------------
1 | package distributed.transaction;
2 |
3 | import org.mybatis.spring.annotation.MapperScan;
4 | import org.springframework.boot.SpringApplication;
5 | import org.springframework.boot.autoconfigure.SpringBootApplication;
6 | import org.springframework.scheduling.annotation.EnableScheduling;
7 |
8 | /**
9 | * 入口类
10 | *
11 | * @author Dean
12 | */
13 | @SpringBootApplication
14 | @EnableScheduling
15 | @MapperScan("distributed.transaction.mappers")
16 | public class Application {
17 | public static void main(String[] args) {
18 | SpringApplication.run(Application.class, args);
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/src/main/java/distributed/transaction/DistributedTranTest.java:
--------------------------------------------------------------------------------
1 | package distributed.transaction;
2 |
3 | import distributed.transaction.model.User;
4 | import distributed.transaction.service.UserService;
5 | import org.junit.Test;
6 | import org.junit.runner.RunWith;
7 | import org.springframework.boot.test.context.SpringBootTest;
8 | import org.springframework.test.context.junit4.SpringRunner;
9 |
10 | import javax.annotation.Resource;
11 |
12 | /**
13 | * 用kafka实现分布式事务
14 | *
15 | * @author Dean
16 | * @see "使用事件和消息队列实现分布式事务"
17 | */
18 | @RunWith(SpringRunner.class)
19 | @SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.NONE, classes = Application.class)
20 | public class DistributedTranTest {
21 |
22 | @Resource
23 | private UserService userService;
24 |
25 | @Test
26 | public void addUser() {
27 | int userCount = 10;
28 | for (int i = 0; i < userCount; i++) {
29 | User user = new User("foo" + i);
30 | userService.addUser(user);
31 | }
32 | }
33 | }
34 |
--------------------------------------------------------------------------------
/src/main/java/distributed/transaction/mappers/EventProcessMapper.java:
--------------------------------------------------------------------------------
1 | package distributed.transaction.mappers;
2 |
3 | import distributed.transaction.model.EventProcess;
4 |
5 | /**
6 | * @author Dean
7 | */
8 | public interface EventProcessMapper {
9 | void save(EventProcess eventProcess);
10 | }
11 |
--------------------------------------------------------------------------------
/src/main/java/distributed/transaction/mappers/EventPublishMapper.java:
--------------------------------------------------------------------------------
1 | package distributed.transaction.mappers;
2 |
3 | import distributed.transaction.model.EventPublish;
4 | import org.apache.ibatis.annotations.Param;
5 |
6 | import java.util.List;
7 | import java.util.Map;
8 |
9 | /**
10 | * @author Dean
11 | */
12 | public interface EventPublishMapper {
13 |
14 | void save(EventPublish eventPublish);
15 |
16 | List list(Map param);
17 |
18 | void updateStatus(@Param("ids") List ids, @Param("status") String status);
19 | }
20 |
--------------------------------------------------------------------------------
/src/main/java/distributed/transaction/mappers/UserMapper.java:
--------------------------------------------------------------------------------
1 | package distributed.transaction.mappers;
2 |
3 | import distributed.transaction.model.User;
4 |
5 | /**
6 | * @author Dean
7 | */
8 | public interface UserMapper {
9 |
10 | void save(User user);
11 | }
12 |
--------------------------------------------------------------------------------
/src/main/java/distributed/transaction/model/EventProcess.java:
--------------------------------------------------------------------------------
1 | package distributed.transaction.model;
2 |
3 | import com.google.common.base.MoreObjects;
4 | import distributed.transaction.utils.EventProcessStatus;
5 | import distributed.transaction.utils.EventType;
6 |
7 | /**
8 | * 事件处理记录
9 | *
10 | * @author Dean
11 | */
12 | public class EventProcess {
13 | private Long id;
14 | private String payload;
15 |
16 | /**
17 | * 事件类型
18 | */
19 | private EventType eventType;
20 |
21 | /**
22 | * 事件处理状态
23 | */
24 | private EventProcessStatus status;
25 |
26 | @Override
27 | public String toString() {
28 | return MoreObjects.toStringHelper(this)
29 | .add("id", id)
30 | .add("payload", payload)
31 | .add("eventType", eventType)
32 | .add("status", status)
33 | .toString();
34 | }
35 |
36 | public Long getId() {
37 | return id;
38 | }
39 |
40 | public void setId(Long id) {
41 | this.id = id;
42 | }
43 |
44 | public String getPayload() {
45 | return payload;
46 | }
47 |
48 | public void setPayload(String payload) {
49 | this.payload = payload;
50 | }
51 |
52 | public EventType getEventType() {
53 | return eventType;
54 | }
55 |
56 | public void setEventType(EventType eventType) {
57 | this.eventType = eventType;
58 | }
59 |
60 | public EventProcessStatus getStatus() {
61 | return status;
62 | }
63 |
64 | public void setStatus(EventProcessStatus status) {
65 | this.status = status;
66 | }
67 | }
68 |
--------------------------------------------------------------------------------
/src/main/java/distributed/transaction/model/EventPublish.java:
--------------------------------------------------------------------------------
1 | package distributed.transaction.model;
2 |
3 | import com.google.common.base.MoreObjects;
4 | import distributed.transaction.utils.EventPublishStatus;
5 | import distributed.transaction.utils.EventType;
6 |
7 | /**
8 | * 事件发布记录
9 | *
10 | * @author Dean
11 | */
12 | public class EventPublish {
13 |
14 | private Long id;
15 | /**
16 | * 事件内容,保存发送到消息队列的json字符串
17 | * payload单词含义:有效载荷,在计算机中代表一个数据包或者其它传输单元中运载的基本必要数据
18 | */
19 | private String payload;
20 |
21 | /**
22 | * 事件类型
23 | */
24 | private EventType eventType;
25 |
26 | /**
27 | * 事件发布状态
28 | */
29 | private EventPublishStatus status;
30 |
31 | @Override
32 | public String toString() {
33 | return MoreObjects.toStringHelper(this)
34 | .add("id", id)
35 | .add("payload", payload)
36 | .add("eventType", eventType)
37 | .add("status", status)
38 | .toString();
39 | }
40 |
41 | public Long getId() {
42 | return id;
43 | }
44 |
45 | public void setId(Long id) {
46 | this.id = id;
47 | }
48 |
49 | public String getPayload() {
50 | return payload;
51 | }
52 |
53 | public void setPayload(String payload) {
54 | this.payload = payload;
55 | }
56 |
57 | public EventType getEventType() {
58 | return eventType;
59 | }
60 |
61 | public void setEventType(EventType eventType) {
62 | this.eventType = eventType;
63 | }
64 |
65 | public EventPublishStatus getStatus() {
66 | return status;
67 | }
68 |
69 | public void setStatus(EventPublishStatus status) {
70 | this.status = status;
71 | }
72 | }
73 |
--------------------------------------------------------------------------------
/src/main/java/distributed/transaction/model/User.java:
--------------------------------------------------------------------------------
1 | package distributed.transaction.model;
2 |
3 | import java.util.Date;
4 |
5 | /**
6 | * @author Dean
7 | */
8 | public class User {
9 | private Long id;
10 | private String name;
11 | private Date createTime;
12 |
13 | public User(String name) {
14 | this.name = name;
15 | this.createTime = new Date();
16 | }
17 |
18 | public Long getId() {
19 | return id;
20 | }
21 |
22 | public void setId(Long id) {
23 | this.id = id;
24 | }
25 |
26 | public String getName() {
27 | return name;
28 | }
29 |
30 | public void setName(String name) {
31 | this.name = name;
32 | }
33 |
34 | public Date getCreateTime() {
35 | return createTime;
36 | }
37 |
38 | public void setCreateTime(Date createTime) {
39 | this.createTime = createTime;
40 | }
41 | }
42 |
--------------------------------------------------------------------------------
/src/main/java/distributed/transaction/model/Voucher.java:
--------------------------------------------------------------------------------
1 | package distributed.transaction.model;
2 |
3 | /**
4 | * 代金券
5 | *
6 | * @author Dean
7 | */
8 | public class Voucher {
9 | private Long id;
10 | private Long userId;
11 | /**
12 | * 代金券类型
13 | */
14 | private String type;
15 | /**
16 | * 金额
17 | */
18 | private Double amount;
19 |
20 | public Long getId() {
21 | return id;
22 | }
23 |
24 | public void setId(Long id) {
25 | this.id = id;
26 | }
27 |
28 | public Long getUserId() {
29 | return userId;
30 | }
31 |
32 | public void setUserId(Long userId) {
33 | this.userId = userId;
34 | }
35 |
36 | public String getType() {
37 | return type;
38 | }
39 |
40 | public void setType(String type) {
41 | this.type = type;
42 | }
43 |
44 | public Double getAmount() {
45 | return amount;
46 | }
47 |
48 | public void setAmount(Double amount) {
49 | this.amount = amount;
50 | }
51 | }
52 |
--------------------------------------------------------------------------------
/src/main/java/distributed/transaction/schedule/EventPublishSchedule.java:
--------------------------------------------------------------------------------
1 | package distributed.transaction.schedule;
2 |
3 | import distributed.transaction.service.EventPublishService;
4 | import org.slf4j.Logger;
5 | import org.slf4j.LoggerFactory;
6 | import org.springframework.scheduling.annotation.Scheduled;
7 | import org.springframework.stereotype.Component;
8 |
9 | import javax.annotation.Resource;
10 |
11 | /**
12 | * 定时器
13 | *
14 | * @author Dean
15 | */
16 | @Component
17 | public class EventPublishSchedule {
18 |
19 | private final static Logger LOGGER = LoggerFactory.getLogger(EventPublishSchedule.class);
20 |
21 | @Resource
22 | private EventPublishService eventPublishService;
23 |
24 | /**
25 | * 每N毫秒执行一次
26 | */
27 | @Scheduled(fixedRate = 5000)
28 | public void publish() {
29 | LOGGER.debug("EventPublishSchedule execute.");
30 | eventPublishService.publish();
31 | }
32 | }
33 |
--------------------------------------------------------------------------------
/src/main/java/distributed/transaction/service/EventProcessService.java:
--------------------------------------------------------------------------------
1 | package distributed.transaction.service;
2 |
3 | import com.google.common.util.concurrent.ThreadFactoryBuilder;
4 | import distributed.transaction.mappers.EventProcessMapper;
5 | import distributed.transaction.model.EventProcess;
6 | import distributed.transaction.utils.EventProcessStatus;
7 | import distributed.transaction.utils.EventType;
8 | import distributed.transaction.utils.KafkaUtils;
9 | import org.springframework.stereotype.Service;
10 |
11 | import javax.annotation.PostConstruct;
12 | import javax.annotation.Resource;
13 | import java.util.concurrent.ExecutorService;
14 | import java.util.concurrent.Executors;
15 | import java.util.concurrent.ThreadFactory;
16 |
17 | /**
18 | * @author Dean
19 | */
20 | @Service
21 | public class EventProcessService {
22 |
23 | @Resource
24 | private EventProcessMapper eventProcessMapper;
25 |
26 | @PostConstruct
27 | public void init() {
28 | ThreadFactory threadFactory = new ThreadFactoryBuilder()
29 | .setNameFormat("MqMessageConsumeThread-%d")
30 | .setDaemon(true)
31 | .build();
32 | ExecutorService executorService = Executors.newSingleThreadExecutor(threadFactory);
33 | executorService.execute(new MqMessageConsumeThread());
34 | }
35 |
36 | private class MqMessageConsumeThread implements Runnable {
37 | @Override
38 | public void run() {
39 | KafkaUtils.consume(consumerRecord -> {
40 | EventProcess eventProcess = new EventProcess();
41 | eventProcess.setPayload(consumerRecord.value());
42 | eventProcess.setEventType(EventType.USER_CREATED);
43 | eventProcess.setStatus(EventProcessStatus.NEW);
44 | eventProcessMapper.save(eventProcess);
45 | });
46 | }
47 | }
48 | }
49 |
--------------------------------------------------------------------------------
/src/main/java/distributed/transaction/service/EventPublishService.java:
--------------------------------------------------------------------------------
1 | package distributed.transaction.service;
2 |
3 | import com.google.common.collect.Maps;
4 | import distributed.transaction.mappers.EventPublishMapper;
5 | import distributed.transaction.model.EventPublish;
6 | import distributed.transaction.utils.EventPublishStatus;
7 | import distributed.transaction.utils.KafkaUtils;
8 | import org.assertj.core.util.Lists;
9 | import org.slf4j.Logger;
10 | import org.slf4j.LoggerFactory;
11 | import org.springframework.stereotype.Service;
12 | import org.springframework.transaction.annotation.Transactional;
13 | import org.springframework.util.CollectionUtils;
14 |
15 | import javax.annotation.Resource;
16 | import java.util.Collections;
17 | import java.util.List;
18 | import java.util.Map;
19 |
20 | /**
21 | * @author Dean
22 | */
23 | @Service
24 | public class EventPublishService {
25 |
26 | private final static Logger LOGGER = LoggerFactory.getLogger(EventPublishService.class);
27 |
28 | @Resource
29 | private EventPublishMapper eventPublishMapper;
30 |
31 | @Transactional(rollbackFor = Exception.class)
32 | public void publish() {
33 | //查询所有状态为NEW的事件
34 | Map params = Maps.newHashMap();
35 | params.put("status", EventPublishStatus.NEW.name());
36 | List eventPublishes = eventPublishMapper.list(params);
37 | if (!CollectionUtils.isEmpty(eventPublishes)) {
38 | //发送消息队列
39 | List ids = sendEventPublish(eventPublishes);
40 | if (!CollectionUtils.isEmpty(ids)) {
41 | //更新状态为PUBLISHED
42 | eventPublishMapper.updateStatus(ids, EventPublishStatus.PUBLISHED.name());
43 | }
44 | }
45 | }
46 |
47 | /**
48 | * 发送EventPublish到消息队列
49 | *
50 | * @param eventPublishes EventPublish对象集合
51 | * @return 发送成功的EventPublish的ID集合
52 | */
53 | private static List sendEventPublish(List eventPublishes) {
54 | if (CollectionUtils.isEmpty(eventPublishes)) {
55 | return Collections.emptyList();
56 | }
57 | List ids = Lists.newArrayList();
58 | for (EventPublish eventPublish : eventPublishes) {
59 | try {
60 | KafkaUtils.sendSync(eventPublish.getEventType().name(), eventPublish.getPayload());
61 | ids.add(eventPublish.getId());
62 | } catch (Exception e) {
63 | LOGGER.error("发送Kafka消息失败,eventPublish={}", eventPublish, e);
64 | }
65 | }
66 | LOGGER.debug("发送Kafka消息成功,ids={}", ids);
67 | return ids;
68 | }
69 | }
70 |
--------------------------------------------------------------------------------
/src/main/java/distributed/transaction/service/UserService.java:
--------------------------------------------------------------------------------
1 | package distributed.transaction.service;
2 |
3 | import com.google.gson.Gson;
4 | import distributed.transaction.mappers.EventPublishMapper;
5 | import distributed.transaction.mappers.UserMapper;
6 | import distributed.transaction.model.EventPublish;
7 | import distributed.transaction.model.User;
8 | import distributed.transaction.utils.EventPublishStatus;
9 | import distributed.transaction.utils.EventType;
10 | import org.springframework.stereotype.Service;
11 | import org.springframework.transaction.annotation.Transactional;
12 |
13 | import javax.annotation.Resource;
14 |
15 | /**
16 | * @author Dean
17 | */
18 | @Service
19 | public class UserService {
20 |
21 | @Resource
22 | private UserMapper userMapper;
23 |
24 | @Resource
25 | private EventPublishMapper eventPublishMapper;
26 |
27 | @Transactional(rollbackFor = Exception.class)
28 | public void addUser(User user) {
29 | userMapper.save(user);
30 |
31 | EventPublish eventPublish = new EventPublish();
32 | eventPublish.setEventType(EventType.USER_CREATED);
33 | eventPublish.setPayload(new Gson().toJson(user));
34 | eventPublish.setStatus(EventPublishStatus.NEW);
35 | eventPublishMapper.save(eventPublish);
36 | }
37 | }
38 |
--------------------------------------------------------------------------------
/src/main/java/distributed/transaction/utils/EventProcessStatus.java:
--------------------------------------------------------------------------------
1 | package distributed.transaction.utils;
2 |
3 | /**
4 | * @author Dean
5 | */
6 | public enum EventProcessStatus {
7 | /**
8 | * 待处理
9 | */
10 | NEW,
11 | /**
12 | * 已处理
13 | */
14 | PROCESSED
15 | }
16 |
--------------------------------------------------------------------------------
/src/main/java/distributed/transaction/utils/EventPublishStatus.java:
--------------------------------------------------------------------------------
1 | package distributed.transaction.utils;
2 |
3 | /**
4 | * 事件发布状态
5 | *
6 | * @author Dean
7 | */
8 | public enum EventPublishStatus {
9 | /**
10 | * 待发布
11 | */
12 | NEW,
13 | /**
14 | * 已发布
15 | */
16 | PUBLISHED
17 | }
18 |
--------------------------------------------------------------------------------
/src/main/java/distributed/transaction/utils/EventType.java:
--------------------------------------------------------------------------------
1 | package distributed.transaction.utils;
2 |
3 |
4 | /**
5 | * 事件类型
6 | *
7 | * @author Dean
8 | */
9 | public enum EventType {
10 | /**
11 | * 用户创建成功
12 | */
13 | USER_CREATED
14 | }
15 |
--------------------------------------------------------------------------------
/src/main/java/distributed/transaction/utils/KafkaUtils.java:
--------------------------------------------------------------------------------
1 | package distributed.transaction.utils;
2 |
3 | import com.google.common.collect.Lists;
4 | import org.apache.kafka.clients.consumer.CommitFailedException;
5 | import org.apache.kafka.clients.consumer.ConsumerRecord;
6 | import org.apache.kafka.clients.consumer.ConsumerRecords;
7 | import org.apache.kafka.clients.consumer.KafkaConsumer;
8 | import org.apache.kafka.clients.producer.KafkaProducer;
9 | import org.apache.kafka.clients.producer.Producer;
10 | import org.apache.kafka.clients.producer.ProducerRecord;
11 | import org.slf4j.Logger;
12 | import org.slf4j.LoggerFactory;
13 |
14 | import java.util.Properties;
15 | import java.util.concurrent.ExecutionException;
16 | import java.util.function.Consumer;
17 |
18 | /**
19 | * @author Dean
20 | */
21 | public class KafkaUtils {
22 | private final static Logger LOGGER = LoggerFactory.getLogger(KafkaUtils.class);
23 |
24 | private static Producer producer;
25 |
26 | private static KafkaConsumer consumer;
27 |
28 | static {
29 | Properties producerProps = new Properties();
30 | //必需的3个参数
31 | producerProps.put("bootstrap.servers", "localhost:9092");
32 | producerProps.put("key.serializer",
33 | "org.apache.kafka.common.serialization.StringSerializer");
34 | producerProps.put("value.serializer",
35 | "org.apache.kafka.common.serialization.StringSerializer");
36 | producer = new KafkaProducer<>(producerProps);
37 |
38 | Properties consumerProps = new Properties();
39 | consumerProps.put("bootstrap.servers", "localhost:9092");
40 | consumerProps.put("key.deserializer",
41 | "org.apache.kafka.common.serialization.StringDeserializer");
42 | consumerProps.put("value.deserializer",
43 | "org.apache.kafka.common.serialization.StringDeserializer");
44 | consumerProps.put("group.id", "VoucherGroup");
45 | //关闭自动提交offset
46 | consumerProps.put("enable.auto.commit", "false");
47 | consumer = new KafkaConsumer<>(consumerProps);
48 | }
49 |
50 | /**
51 | * 同步发送消息
52 | *
53 | * @param topic topic
54 | * @param value 消息内容
55 | */
56 | public static void sendSync(String topic, String value) throws ExecutionException, InterruptedException {
57 | producer.send(new ProducerRecord<>(topic, value)).get();
58 | }
59 |
60 | /**
61 | * 消费消息
62 | *
63 | * @param c 回调函数,处理消息
64 | */
65 | public static void consume(Consumer> c) {
66 | consumer.subscribe(Lists.newArrayList(EventType.USER_CREATED.name()));
67 | while (true) {
68 | ConsumerRecords records = consumer.poll(100);
69 | for (ConsumerRecord record : records) {
70 | LOGGER.debug("接收到消息,ConsumerRecord={}", record);
71 | c.accept(record);
72 | }
73 | try {
74 | //同步手动提交offset
75 | consumer.commitSync();
76 | } catch (CommitFailedException e) {
77 | LOGGER.error("Kafka消费者提交offset失败", e);
78 | }
79 | }
80 | }
81 |
82 | }
83 |
--------------------------------------------------------------------------------
/src/main/java/kafka/KafkaTest.java:
--------------------------------------------------------------------------------
1 | package kafka;
2 |
3 | import org.apache.kafka.clients.consumer.ConsumerRecord;
4 | import org.apache.kafka.clients.consumer.ConsumerRecords;
5 | import org.apache.kafka.clients.consumer.KafkaConsumer;
6 | import org.apache.kafka.clients.producer.KafkaProducer;
7 | import org.apache.kafka.clients.producer.Producer;
8 | import org.apache.kafka.clients.producer.ProducerRecord;
9 | import org.junit.Test;
10 |
11 | import java.util.Arrays;
12 | import java.util.Properties;
13 | import java.util.concurrent.Future;
14 |
15 | /**
16 | * Kafka原生API
17 | * 所有Properties参数含义可以参考官网Producer Configs和Consumer Configs 以及《Kafka权威指南》
18 | *
19 | * @author Dean
20 | */
21 | public class KafkaTest {
22 |
23 | @Test
24 | public void produce() {
25 | String topic = "myTopic1";
26 | Properties props = new Properties();
27 | //必需的3个参数
28 | props.put("bootstrap.servers", "localhost:9092");
29 | props.put("key.serializer",
30 | "org.apache.kafka.common.serialization.StringSerializer");
31 | props.put("value.serializer",
32 | "org.apache.kafka.common.serialization.StringSerializer");
33 |
34 | props.put("acks", "all");
35 | props.put("retries", 0);
36 |
37 | //KafkaProducer是线程安全的,多个线程使用同一个实例效率更高,见KafkaProducer的源码注释
38 | Producer producer = new KafkaProducer<>(props);
39 |
40 | //topic如果不存在会自动创建,topic会保存在zookeeper的znode中
41 | //忽略send()方法返回的Future对象,代表不关心消息是否发送成功
42 | producer.send(new ProducerRecord<>(topic, "a1", "b1"));
43 |
44 | //同步发送消息并等待结果返回
45 | Future future = producer.send(new ProducerRecord<>(topic, "a2", "b2"));
46 | try {
47 | future.get();
48 | } catch (Exception e) {
49 | //出现异常代表服务器端发生错误
50 | e.printStackTrace();
51 | }
52 |
53 | //异步发送消息
54 | producer.send(new ProducerRecord<>(topic, "a2", "b2"), (metadata, exception) -> {
55 | //发送消息出现异常
56 | if (exception != null) {
57 | log(exception);
58 | } else {
59 | log("消息发送成功了");
60 | }
61 | });
62 | producer.close();
63 | }
64 |
65 | @Test
66 | public void consume() {
67 | Properties props = new Properties();
68 | props.put("bootstrap.servers", "localhost:9092");
69 | props.put("group.id", "myGroup1");
70 | props.put("key.deserializer",
71 | "org.apache.kafka.common.serialization.StringDeserializer");
72 | props.put("value.deserializer",
73 | "org.apache.kafka.common.serialization.StringDeserializer");
74 | KafkaConsumer consumer = new KafkaConsumer<>(props);
75 | consumer.subscribe(Arrays.asList("myTopic1", "myTopic2"));
76 | while (true) {
77 | ConsumerRecords records = consumer.poll(100);
78 | for (ConsumerRecord record : records) {
79 | System.out.printf("offset = %d, key = %s, value = %s%n",
80 | record.offset(),
81 | record.key(),
82 | record.value());
83 | }
84 | }
85 |
86 | }
87 |
88 | private void log(Object object) {
89 | System.out.println(object);
90 | }
91 | }
--------------------------------------------------------------------------------
/src/main/resources/application.properties:
--------------------------------------------------------------------------------
1 | #druid
2 | spring.datasource.druid.url=jdbc:mysql://localhost/test
3 | spring.datasource.druid.username=root
4 | spring.datasource.druid.password=fG0UltvDf1/iqAqtuhamkBzwKshosp+/l6AlPOyywMgS/x6keGA5UUhwdfDNZnWnoiSRIewyDF0VzCVLRYLaYA==
5 | public-key=MFwwDQYJKoZIhvcNAQEBBQADSwAwSAJBAIaZeUjq4jOXEwL/HyxhsAGwOZq4PA2ctfLPONfvRQKpOqeX7upvSmxJ3cr/paScbVaTldaPOjiQUJr2GiAx3KUCAwEAAQ==
6 | spring.datasource.druid.connectionProperties=config.decrypt=true;config.decrypt.key=${public-key}
7 | spring.datasource.druid.filters=config,stat
8 |
9 | spring.datasource.druid.initial-size=1
10 | spring.datasource.druid.max-active=20
11 | spring.datasource.druid.min-idle=1
12 | spring.datasource.druid.max-wait=60000
13 | spring.datasource.druid.pool-prepared-statements=true
14 | spring.datasource.druid.max-pool-prepared-statement-per-connection-size=20
15 | spring.datasource.druid.validation-query=SELECT 1 FROM DUAL
16 | spring.datasource.druid.test-on-borrow=false
17 | spring.datasource.druid.test-on-return=false
18 | spring.datasource.druid.test-while-idle=true
19 | spring.datasource.druid.time-between-eviction-runs-millis=60000
20 | spring.datasource.druid.min-evictable-idle-time-millis=300000
21 |
22 | spring.datasource.druid.filter.slf4j.enabled=true
23 | spring.datasource.druid.filter.slf4j.statement-executable-sql-log-enable=true
24 |
25 | #mybatis
26 | mybatis.mapper-locations=classpath:mybatis/mapper/*.xml
27 | mybatis.type-aliases-package=distributed.transaction.model
--------------------------------------------------------------------------------
/src/main/resources/logback.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | [%d] %thread %-5level %logger:%line - %msg%n
5 | true
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
--------------------------------------------------------------------------------
/src/main/resources/mybatis/mapper/EventProcessMapper.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 | INSERT INTO t_kafka_event_process (payload, eventType, status)
16 | VALUES (#{payload}, #{eventType}, #{status})
17 |
18 |
19 |
--------------------------------------------------------------------------------
/src/main/resources/mybatis/mapper/EventPublishMapper.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 | INSERT INTO t_kafka_event_publish (payload, eventType, status)
16 | VALUES (#{payload}, #{eventType}, #{status})
17 |
18 |
19 |
27 |
28 |
29 | UPDATE t_kafka_event_publish
30 | SET status = #{status}
31 |
32 |
33 |
34 | #{id}
35 |
36 |
37 |
38 |
39 |
40 |
--------------------------------------------------------------------------------
/src/main/resources/mybatis/mapper/UserMapper.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 | INSERT INTO t_kafka_user (name, createTime)
16 | VALUES (#{name}, #{createTime})
17 |
18 |
19 |
--------------------------------------------------------------------------------
/test.sql:
--------------------------------------------------------------------------------
1 | /*
2 | Navicat MySQL Data Transfer
3 |
4 | Source Server : mysql
5 | Source Server Version : 50625
6 | Source Host : localhost
7 | Source Database : test
8 |
9 | Target Server Version : 50625
10 | File Encoding : utf-8
11 |
12 | Date: 07/14/2018 14:47:52 PM
13 | */
14 |
15 | SET NAMES utf8;
16 | SET FOREIGN_KEY_CHECKS = 0;
17 |
18 | -- ----------------------------
19 | -- Table structure for `t_kafka_event_process`
20 | -- ----------------------------
21 | DROP TABLE IF EXISTS `t_kafka_event_process`;
22 | CREATE TABLE `t_kafka_event_process` (
23 | `id` bigint(20) unsigned NOT NULL AUTO_INCREMENT,
24 | `payload` varchar(2000) NOT NULL,
25 | `eventType` varchar(30) NOT NULL,
26 | `status` varchar(30) NOT NULL,
27 | PRIMARY KEY (`id`)
28 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8;
29 |
30 | -- ----------------------------
31 | -- Table structure for `t_kafka_event_publish`
32 | -- ----------------------------
33 | DROP TABLE IF EXISTS `t_kafka_event_publish`;
34 | CREATE TABLE `t_kafka_event_publish` (
35 | `id` bigint(20) unsigned NOT NULL AUTO_INCREMENT,
36 | `payload` varchar(2000) NOT NULL,
37 | `eventType` varchar(30) NOT NULL,
38 | `status` varchar(30) NOT NULL,
39 | PRIMARY KEY (`id`)
40 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8;
41 |
42 | -- ----------------------------
43 | -- Table structure for `t_kafka_user`
44 | -- ----------------------------
45 | DROP TABLE IF EXISTS `t_kafka_user`;
46 | CREATE TABLE `t_kafka_user` (
47 | `id` bigint(20) unsigned NOT NULL AUTO_INCREMENT,
48 | `name` varchar(255) NOT NULL,
49 | `createTime` datetime NOT NULL,
50 | PRIMARY KEY (`id`)
51 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8;
52 |
53 | SET FOREIGN_KEY_CHECKS = 1;
54 |
--------------------------------------------------------------------------------