├── docs ├── kafka-how-to-not-loss-message.md ├── 4-SpringBoot+Kafka之如何优雅的创建topic.md ├── 5.md ├── 1-大白话带你认识Kafka.md ├── 2-5分钟带你体验一把Kafka.md └── 3-10分钟学会如何在SpringBoot程序中使用Kafka作为消息队列.md ├── kafka-intro-maven-demo ├── .idea │ ├── .gitignore │ ├── vcs.xml │ ├── misc.xml │ ├── compiler.xml │ ├── checkstyle-idea.xml │ └── inspectionProfiles │ │ └── Project_Default.xml ├── target │ └── classes │ │ ├── Main.class │ │ ├── ConsumerCreator.class │ │ ├── KafkaConstants.class │ │ └── ProducerCreator.class ├── src │ └── main │ │ └── java │ │ ├── KafkaConstants.java │ │ ├── ConsumerCreator.java │ │ ├── ProducerCreator.java │ │ └── Main.java └── pom.xml ├── springboot-kafka-03-transaction ├── .mvn │ └── wrapper │ │ ├── maven-wrapper.jar │ │ ├── maven-wrapper.properties │ │ └── MavenWrapperDownloader.java ├── src │ ├── test │ │ └── java │ │ │ └── cn │ │ │ └── javaguide │ │ │ └── demo │ │ │ └── DemoApplicationTests.java │ └── main │ │ ├── java │ │ └── cn │ │ │ └── javaguide │ │ │ ├── entity │ │ │ └── Book.java │ │ │ ├── DemoApplication.java │ │ │ ├── service │ │ │ ├── BookProducerService.java │ │ │ └── BookConsumerService.java │ │ │ ├── controller │ │ │ └── BookController.java │ │ │ └── config │ │ │ └── KafkaConfig.java │ │ └── resources │ │ └── application.yml ├── .gitignore ├── pom.xml ├── mvnw.cmd └── mvnw ├── springboot-kafka-01-send-objects ├── .mvn │ └── wrapper │ │ ├── maven-wrapper.jar │ │ ├── maven-wrapper.properties │ │ └── MavenWrapperDownloader.java ├── src │ ├── test │ │ └── java │ │ │ └── cn │ │ │ └── javaguide │ │ │ └── springbootkafka01sendobjects │ │ │ └── SpringbootKafka01SendObjectsApplicationTests.java │ └── main │ │ ├── resources │ │ └── application.yml │ │ └── java │ │ └── cn │ │ └── javaguide │ │ └── springbootkafka01sendobjects │ │ ├── SpringbootKafka01SendObjectsApplication.java │ │ ├── entity │ │ └── Book.java │ │ ├── service │ │ ├── BookProducerService.java │ │ └── BookConsumerService.java │ │ ├── config │ │ └── KafkaConfig.java │ │ └── controller │ │ └── BookController.java ├── .gitignore ├── pom.xml ├── mvnw.cmd └── mvnw ├── springboot-kafka-02-config-topics ├── .mvn │ └── wrapper │ │ ├── maven-wrapper.jar │ │ ├── maven-wrapper.properties │ │ └── MavenWrapperDownloader.java ├── src │ ├── main │ │ ├── resources │ │ │ └── application.yml │ │ └── java │ │ │ └── cn │ │ │ └── github │ │ │ ├── SpringbootKafka02ConfigTopicsApplication.java │ │ │ └── config │ │ │ ├── TopicConfigurations.java │ │ │ └── TopicAdministrator.java │ └── test │ │ └── java │ │ └── cn │ │ └── github │ │ └── springbootkafka02configtopics │ │ └── SpringbootKafka02ConfigTopicsApplicationTests.java ├── .gitignore ├── pom.xml ├── mvnw.cmd └── mvnw └── README.md /docs/kafka-how-to-not-loss-message.md: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /kafka-intro-maven-demo/.idea/.gitignore: -------------------------------------------------------------------------------- 1 | # Default ignored files 2 | /workspace.xml -------------------------------------------------------------------------------- /kafka-intro-maven-demo/target/classes/Main.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snailclimb/springboot-kafka/HEAD/kafka-intro-maven-demo/target/classes/Main.class -------------------------------------------------------------------------------- /kafka-intro-maven-demo/target/classes/ConsumerCreator.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snailclimb/springboot-kafka/HEAD/kafka-intro-maven-demo/target/classes/ConsumerCreator.class -------------------------------------------------------------------------------- /kafka-intro-maven-demo/target/classes/KafkaConstants.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snailclimb/springboot-kafka/HEAD/kafka-intro-maven-demo/target/classes/KafkaConstants.class -------------------------------------------------------------------------------- /kafka-intro-maven-demo/target/classes/ProducerCreator.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snailclimb/springboot-kafka/HEAD/kafka-intro-maven-demo/target/classes/ProducerCreator.class -------------------------------------------------------------------------------- /springboot-kafka-03-transaction/.mvn/wrapper/maven-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snailclimb/springboot-kafka/HEAD/springboot-kafka-03-transaction/.mvn/wrapper/maven-wrapper.jar -------------------------------------------------------------------------------- /springboot-kafka-01-send-objects/.mvn/wrapper/maven-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snailclimb/springboot-kafka/HEAD/springboot-kafka-01-send-objects/.mvn/wrapper/maven-wrapper.jar -------------------------------------------------------------------------------- /springboot-kafka-02-config-topics/.mvn/wrapper/maven-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snailclimb/springboot-kafka/HEAD/springboot-kafka-02-config-topics/.mvn/wrapper/maven-wrapper.jar -------------------------------------------------------------------------------- /springboot-kafka-01-send-objects/.mvn/wrapper/maven-wrapper.properties: -------------------------------------------------------------------------------- 1 | distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.6.3/apache-maven-3.6.3-bin.zip 2 | wrapperUrl=https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar 3 | -------------------------------------------------------------------------------- /springboot-kafka-02-config-topics/.mvn/wrapper/maven-wrapper.properties: -------------------------------------------------------------------------------- 1 | distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.6.3/apache-maven-3.6.3-bin.zip 2 | wrapperUrl=https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar 3 | -------------------------------------------------------------------------------- /springboot-kafka-03-transaction/.mvn/wrapper/maven-wrapper.properties: -------------------------------------------------------------------------------- 1 | distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.6.3/apache-maven-3.6.3-bin.zip 2 | wrapperUrl=https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar 3 | -------------------------------------------------------------------------------- /kafka-intro-maven-demo/.idea/vcs.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | -------------------------------------------------------------------------------- /kafka-intro-maven-demo/src/main/java/KafkaConstants.java: -------------------------------------------------------------------------------- 1 | public class KafkaConstants { 2 | public static final String BROKER_LIST = "localhost:9092"; 3 | public static final String CLIENT_ID = "client1"; 4 | public static String GROUP_ID_CONFIG="consumerGroup1"; 5 | private KafkaConstants() { 6 | 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /springboot-kafka-03-transaction/src/test/java/cn/javaguide/demo/DemoApplicationTests.java: -------------------------------------------------------------------------------- 1 | package cn.javaguide.demo; 2 | 3 | import org.junit.jupiter.api.Test; 4 | import org.springframework.boot.test.context.SpringBootTest; 5 | 6 | @SpringBootTest 7 | class DemoApplicationTests { 8 | 9 | @Test 10 | void contextLoads() { 11 | } 12 | 13 | } 14 | -------------------------------------------------------------------------------- /springboot-kafka-03-transaction/src/main/java/cn/javaguide/entity/Book.java: -------------------------------------------------------------------------------- 1 | package cn.javaguide.entity; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Data; 5 | import lombok.NoArgsConstructor; 6 | 7 | @Data 8 | @AllArgsConstructor 9 | @NoArgsConstructor 10 | public class Book { 11 | private Long id; 12 | private String name; 13 | 14 | } 15 | -------------------------------------------------------------------------------- /springboot-kafka-02-config-topics/src/main/resources/application.yml: -------------------------------------------------------------------------------- 1 | server: 2 | port: 9090 3 | spring: 4 | kafka: 5 | bootstrap-servers: localhost:9092 6 | kafka: 7 | topics: 8 | - name: topic1 9 | num-partitions: 3 10 | replication-factor: 1 11 | - name: topic2 12 | num-partitions: 1 13 | replication-factor: 1 14 | - name: topic3 15 | num-partitions: 2 16 | replication-factor: 1 17 | -------------------------------------------------------------------------------- /springboot-kafka-03-transaction/src/main/java/cn/javaguide/DemoApplication.java: -------------------------------------------------------------------------------- 1 | package cn.javaguide; 2 | 3 | import org.springframework.boot.SpringApplication; 4 | import org.springframework.boot.autoconfigure.SpringBootApplication; 5 | 6 | @SpringBootApplication 7 | public class DemoApplication { 8 | 9 | public static void main(String[] args) { 10 | SpringApplication.run(DemoApplication.class, args); 11 | } 12 | 13 | } 14 | -------------------------------------------------------------------------------- /springboot-kafka-01-send-objects/src/test/java/cn/javaguide/springbootkafka01sendobjects/SpringbootKafka01SendObjectsApplicationTests.java: -------------------------------------------------------------------------------- 1 | package cn.javaguide.springbootkafka01sendobjects; 2 | 3 | import org.junit.jupiter.api.Test; 4 | import org.springframework.boot.test.context.SpringBootTest; 5 | 6 | @SpringBootTest 7 | class SpringbootKafka01SendObjectsApplicationTests { 8 | 9 | @Test 10 | void contextLoads() { 11 | } 12 | 13 | } 14 | -------------------------------------------------------------------------------- /springboot-kafka-02-config-topics/src/test/java/cn/github/springbootkafka02configtopics/SpringbootKafka02ConfigTopicsApplicationTests.java: -------------------------------------------------------------------------------- 1 | package cn.github.springbootkafka02configtopics; 2 | 3 | import org.junit.jupiter.api.Test; 4 | import org.springframework.boot.test.context.SpringBootTest; 5 | 6 | @SpringBootTest 7 | class SpringbootKafka02ConfigTopicsApplicationTests { 8 | 9 | @Test 10 | void contextLoads() { 11 | } 12 | 13 | } 14 | -------------------------------------------------------------------------------- /springboot-kafka-01-send-objects/src/main/resources/application.yml: -------------------------------------------------------------------------------- 1 | server: 2 | port: 9090 3 | spring: 4 | kafka: 5 | bootstrap-servers: localhost:9092 6 | consumer: 7 | # 配置消费者消息offset是否自动重置(消费者重连会能够接收最开始的消息) 8 | auto-offset-reset: earliest 9 | producer: 10 | value-serializer: org.springframework.kafka.support.serializer.JsonSerializer 11 | retries: 3 # 重试次数 12 | kafka: 13 | topic: 14 | my-topic: my-topic 15 | my-topic2: my-topic2 16 | -------------------------------------------------------------------------------- /springboot-kafka-01-send-objects/.gitignore: -------------------------------------------------------------------------------- 1 | HELP.md 2 | target/ 3 | !.mvn/wrapper/maven-wrapper.jar 4 | !**/src/main/** 5 | !**/src/test/** 6 | 7 | ### STS ### 8 | .apt_generated 9 | .classpath 10 | .factorypath 11 | .project 12 | .settings 13 | .springBeans 14 | .sts4-cache 15 | 16 | ### IntelliJ IDEA ### 17 | .idea 18 | *.iws 19 | *.iml 20 | *.ipr 21 | 22 | ### NetBeans ### 23 | /nbproject/private/ 24 | /nbbuild/ 25 | /dist/ 26 | /nbdist/ 27 | /.nb-gradle/ 28 | build/ 29 | 30 | ### VS Code ### 31 | .vscode/ 32 | -------------------------------------------------------------------------------- /springboot-kafka-02-config-topics/.gitignore: -------------------------------------------------------------------------------- 1 | HELP.md 2 | target/ 3 | !.mvn/wrapper/maven-wrapper.jar 4 | !**/src/main/** 5 | !**/src/test/** 6 | 7 | ### STS ### 8 | .apt_generated 9 | .classpath 10 | .factorypath 11 | .project 12 | .settings 13 | .springBeans 14 | .sts4-cache 15 | 16 | ### IntelliJ IDEA ### 17 | .idea 18 | *.iws 19 | *.iml 20 | *.ipr 21 | 22 | ### NetBeans ### 23 | /nbproject/private/ 24 | /nbbuild/ 25 | /dist/ 26 | /nbdist/ 27 | /.nb-gradle/ 28 | build/ 29 | 30 | ### VS Code ### 31 | .vscode/ 32 | -------------------------------------------------------------------------------- /springboot-kafka-02-config-topics/src/main/java/cn/github/SpringbootKafka02ConfigTopicsApplication.java: -------------------------------------------------------------------------------- 1 | package cn.github; 2 | 3 | import org.springframework.boot.SpringApplication; 4 | import org.springframework.boot.autoconfigure.SpringBootApplication; 5 | 6 | @SpringBootApplication 7 | public class SpringbootKafka02ConfigTopicsApplication { 8 | 9 | public static void main(String[] args) { 10 | SpringApplication.run(SpringbootKafka02ConfigTopicsApplication.class, args); 11 | } 12 | 13 | } 14 | -------------------------------------------------------------------------------- /springboot-kafka-03-transaction/.gitignore: -------------------------------------------------------------------------------- 1 | HELP.md 2 | target/ 3 | !.mvn/wrapper/maven-wrapper.jar 4 | !**/src/main/** 5 | !**/src/test/** 6 | 7 | ### STS ### 8 | .apt_generated 9 | .classpath 10 | .factorypath 11 | .project 12 | .settings 13 | .springBeans 14 | .sts4-cache 15 | 16 | ### IntelliJ IDEA ### 17 | .idea 18 | *.iws 19 | *.iml 20 | *.ipr 21 | 22 | ### NetBeans ### 23 | /nbproject/private/ 24 | /nbbuild/ 25 | /dist/ 26 | /nbdist/ 27 | /.nb-gradle/ 28 | build/ 29 | 30 | ### VS Code ### 31 | .vscode/ 32 | -------------------------------------------------------------------------------- /springboot-kafka-01-send-objects/src/main/java/cn/javaguide/springbootkafka01sendobjects/SpringbootKafka01SendObjectsApplication.java: -------------------------------------------------------------------------------- 1 | package cn.javaguide.springbootkafka01sendobjects; 2 | 3 | import org.springframework.boot.SpringApplication; 4 | import org.springframework.boot.autoconfigure.SpringBootApplication; 5 | 6 | @SpringBootApplication 7 | public class SpringbootKafka01SendObjectsApplication { 8 | 9 | public static void main(String[] args) { 10 | SpringApplication.run(SpringbootKafka01SendObjectsApplication.class, args); 11 | } 12 | 13 | } 14 | -------------------------------------------------------------------------------- /kafka-intro-maven-demo/.idea/misc.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 10 | 11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /kafka-intro-maven-demo/.idea/compiler.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /springboot-kafka-03-transaction/src/main/resources/application.yml: -------------------------------------------------------------------------------- 1 | server: 2 | port: 9090 3 | spring: 4 | kafka: 5 | bootstrap-servers: localhost:9092,localhost:9093,localhost:9094 6 | consumer: 7 | # 配置消费者消息offset是否自动重置(消费者重连会能够接收最开始的消息) 8 | auto-offset-reset: earliest 9 | # 事务隔离级别 10 | isolation-level: read_committed #仅读取已提交的消息 11 | producer: 12 | value-serializer: org.springframework.kafka.support.serializer.JsonSerializer 13 | retries: 3 # 重试次数 14 | # 启用事务 15 | transaction-id-prefix: my-tx. # 事务编号前缀 16 | kafka: 17 | topic: 18 | topic-test-transaction: topic-test-transaction 19 | -------------------------------------------------------------------------------- /kafka-intro-maven-demo/.idea/checkstyle-idea.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 15 | 16 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | > Kafka 是我在疫情期间在游戏之余学的。虽然之前用过 ActiveMQ 和 RabbitMQ,但是在 Kafka 这门技术面前我也算是一个初学者。文章中若有说法有点完善或者不准确的地方敬请指出。 2 | > 3 | > 总结出来的原因如下: 4 | > 5 | > 1. 方便他人的学习的同时也可以巩固自己的所学知识以便后续回顾; 6 | > 2. 想起来补充....... 7 | 8 | **理论:** 9 | 10 | 1. [大白话带你认识Kafka](./docs/1-大白话带你认识Kafka.md) (这篇文章将会用大白话的方式带你认识 Kafka) 11 | 2. [面试官问我如何保证Kafka不丢失消息?我哭了!](./docs/kafka-how-to-not-loss-message/md) 12 | 13 | **实战:** 14 | 15 | 1. [5分钟带你体验一把 Kafka](./docs/2-5分钟带你体验一把Kafka.md) (这篇文章让你学会如何使用 Docker 安装Kafka环境、使用命令行测试消息队列的功能以及如何在 Java 程序中简单使用Kafka) 16 | 2. [10分钟学会如何在SpringBoot程序中使用Kafka作为消息队列?](./docs/3-10分钟学会如何在SpringBoot程序中使用Kafka作为消息队列.md) (教你用正确的姿势整合Kafka 到 Spring Boot 中作为消息队列) 17 | 3. [SpringBoot+Kafka之如何优雅的创建 topic](./docs/4-SpringBoot+Kafka之如何优雅的创建topic.md) (教你用正确的姿势整合Kafka 到 Spring Boot 中作为消息队列) 18 | 4. ...... 19 | 20 | -------------------------------------------------------------------------------- /springboot-kafka-01-send-objects/src/main/java/cn/javaguide/springbootkafka01sendobjects/entity/Book.java: -------------------------------------------------------------------------------- 1 | package cn.javaguide.springbootkafka01sendobjects.entity; 2 | 3 | public class Book { 4 | private Long id; 5 | private String name; 6 | 7 | public Book() { 8 | } 9 | 10 | public Book(Long id, String name) { 11 | this.id = id; 12 | this.name = name; 13 | } 14 | 15 | public Long getId() { 16 | return id; 17 | } 18 | 19 | public void setId(Long id) { 20 | this.id = id; 21 | } 22 | 23 | public String getName() { 24 | return name; 25 | } 26 | 27 | public void setName(String name) { 28 | this.name = name; 29 | } 30 | 31 | @Override 32 | public String toString() { 33 | return "Book{" + 34 | "id=" + id + 35 | ", name='" + name + '\'' + 36 | '}'; 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /springboot-kafka-02-config-topics/src/main/java/cn/github/config/TopicConfigurations.java: -------------------------------------------------------------------------------- 1 | package cn.github.config; 2 | 3 | import lombok.Getter; 4 | import lombok.Setter; 5 | import lombok.ToString; 6 | import org.apache.kafka.clients.admin.NewTopic; 7 | import org.springframework.boot.context.properties.ConfigurationProperties; 8 | import org.springframework.context.annotation.Configuration; 9 | 10 | import java.util.List; 11 | 12 | @Configuration 13 | @ConfigurationProperties(prefix = "kafka") 14 | @Setter 15 | @Getter 16 | @ToString 17 | class TopicConfigurations { 18 | private List topics; 19 | 20 | @Setter 21 | @Getter 22 | @ToString 23 | static class Topic { 24 | String name; 25 | Integer numPartitions = 3; 26 | Short replicationFactor = 1; 27 | 28 | NewTopic toNewTopic() { 29 | return new NewTopic(this.name, this.numPartitions, this.replicationFactor); 30 | } 31 | 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /kafka-intro-maven-demo/src/main/java/ConsumerCreator.java: -------------------------------------------------------------------------------- 1 | import org.apache.kafka.clients.consumer.Consumer; 2 | import org.apache.kafka.clients.consumer.ConsumerConfig; 3 | import org.apache.kafka.clients.consumer.KafkaConsumer; 4 | import org.apache.kafka.common.serialization.StringDeserializer; 5 | 6 | import java.util.Properties; 7 | 8 | public class ConsumerCreator { 9 | 10 | public static Consumer createConsumer() { 11 | Properties properties = new Properties(); 12 | properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, KafkaConstants.BROKER_LIST); 13 | properties.put(ConsumerConfig.GROUP_ID_CONFIG, KafkaConstants.GROUP_ID_CONFIG); 14 | properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); 15 | properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); 16 | return new KafkaConsumer<>(properties); 17 | } 18 | 19 | } 20 | -------------------------------------------------------------------------------- /kafka-intro-maven-demo/src/main/java/ProducerCreator.java: -------------------------------------------------------------------------------- 1 | import org.apache.kafka.clients.producer.KafkaProducer; 2 | import org.apache.kafka.clients.producer.Producer; 3 | import org.apache.kafka.clients.producer.ProducerConfig; 4 | import org.apache.kafka.common.serialization.StringSerializer; 5 | 6 | import java.util.Properties; 7 | 8 | /** 9 | * @author shuang.kou 10 | */ 11 | public class ProducerCreator { 12 | 13 | 14 | public static Producer createProducer() { 15 | Properties properties = new Properties(); 16 | properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, KafkaConstants.BROKER_LIST); 17 | properties.put(ProducerConfig.CLIENT_ID_CONFIG, KafkaConstants.CLIENT_ID); 18 | properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); 19 | properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); 20 | return new KafkaProducer<>(properties); 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /springboot-kafka-03-transaction/src/main/java/cn/javaguide/service/BookProducerService.java: -------------------------------------------------------------------------------- 1 | package cn.javaguide.service; 2 | 3 | import org.springframework.kafka.core.KafkaTemplate; 4 | import org.springframework.stereotype.Service; 5 | import org.springframework.transaction.annotation.Transactional; 6 | 7 | import java.util.ArrayList; 8 | import java.util.List; 9 | 10 | /** 11 | * @author shuang.kou 12 | */ 13 | @Service 14 | public class BookProducerService { 15 | 16 | private List sendedBooks = new ArrayList<>(); 17 | 18 | private final KafkaTemplate kafkaTemplate; 19 | 20 | public BookProducerService(KafkaTemplate kafkaTemplate) { 21 | this.kafkaTemplate = kafkaTemplate; 22 | } 23 | 24 | @Transactional(rollbackFor = Exception.class) 25 | public void sendMessage(String topic, Object o) { 26 | // 发送消息 27 | kafkaTemplate.send(topic, o); 28 | // 模拟发生异常 29 | int a = 1 / 0; 30 | // 模拟业务操作 31 | sendedBooks.add(o); 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /kafka-intro-maven-demo/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 4.0.0 6 | 7 | cn.javaguide 8 | kafka-intro-maven-demo 9 | 1.0-SNAPSHOT 10 | 11 | 12 | 13 | org.apache.maven.plugins 14 | maven-compiler-plugin 15 | 16 | 8 17 | 8 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | org.apache.kafka 26 | kafka-clients 27 | 2.2.0 28 | 29 | 30 | 31 | -------------------------------------------------------------------------------- /springboot-kafka-03-transaction/src/main/java/cn/javaguide/service/BookConsumerService.java: -------------------------------------------------------------------------------- 1 | package cn.javaguide.service; 2 | 3 | import cn.javaguide.entity.Book; 4 | import org.slf4j.Logger; 5 | import org.slf4j.LoggerFactory; 6 | import org.springframework.beans.factory.annotation.Value; 7 | import org.springframework.kafka.annotation.KafkaListener; 8 | import org.springframework.stereotype.Service; 9 | 10 | @Service 11 | public class BookConsumerService { 12 | 13 | @Value("${kafka.topic.topic-test-transaction}") 14 | String topicTestTransaction; 15 | 16 | private final Logger logger = LoggerFactory.getLogger(BookProducerService.class); 17 | 18 | 19 | @KafkaListener(topics = {"${kafka.topic.topic-test-transaction}"}, id = "bookGroup") 20 | public void consumeMessage(Book book) { 21 | logger.info("消费者消费{}的消息 -> {}", topicTestTransaction, book.toString()); 22 | throw new RuntimeException("dlt"); 23 | } 24 | 25 | @KafkaListener(topics = {"${kafka.topic.topic-test-transaction}"}, id = "dltGroup") 26 | public void dltConsumeMessage(Book book) { 27 | logger.info("消费者消费{}的消息 -> {}", topicTestTransaction, book.toString()); 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /springboot-kafka-02-config-topics/src/main/java/cn/github/config/TopicAdministrator.java: -------------------------------------------------------------------------------- 1 | package cn.github.config; 2 | 3 | import org.apache.kafka.clients.admin.NewTopic; 4 | import org.springframework.beans.factory.InitializingBean; 5 | import org.springframework.context.annotation.Configuration; 6 | import org.springframework.web.context.support.GenericWebApplicationContext; 7 | 8 | import javax.annotation.PostConstruct; 9 | import java.util.List; 10 | 11 | /** 12 | * @author shuang.kou 13 | */ 14 | @Configuration 15 | public class TopicAdministrator { 16 | private final TopicConfigurations configurations; 17 | private final GenericWebApplicationContext context; 18 | 19 | public TopicAdministrator(TopicConfigurations configurations, GenericWebApplicationContext genericContext) { 20 | this.configurations = configurations; 21 | this.context = genericContext; 22 | } 23 | 24 | @PostConstruct 25 | public void init() { 26 | initializeBeans(configurations.getTopics()); 27 | } 28 | 29 | private void initializeBeans(List topics) { 30 | topics.forEach(t -> context.registerBean(t.name, NewTopic.class, t::toNewTopic)); 31 | } 32 | 33 | 34 | } 35 | -------------------------------------------------------------------------------- /springboot-kafka-01-send-objects/src/main/java/cn/javaguide/springbootkafka01sendobjects/service/BookProducerService.java: -------------------------------------------------------------------------------- 1 | package cn.javaguide.springbootkafka01sendobjects.service; 2 | 3 | import org.slf4j.Logger; 4 | import org.slf4j.LoggerFactory; 5 | import org.springframework.kafka.core.KafkaTemplate; 6 | import org.springframework.kafka.support.SendResult; 7 | import org.springframework.stereotype.Service; 8 | import org.springframework.util.concurrent.ListenableFuture; 9 | 10 | @Service 11 | public class BookProducerService { 12 | 13 | private static final Logger logger = LoggerFactory.getLogger(BookProducerService.class); 14 | 15 | private final KafkaTemplate kafkaTemplate; 16 | 17 | public BookProducerService(KafkaTemplate kafkaTemplate) { 18 | this.kafkaTemplate = kafkaTemplate; 19 | } 20 | 21 | public void sendMessage(String topic, Object o) { 22 | ListenableFuture> future = kafkaTemplate.send(topic, o); 23 | future.addCallback(result -> logger.info("生产者成功发送消息到topic:{} partition:{}的消息", result.getRecordMetadata().topic(), result.getRecordMetadata().partition()), 24 | ex -> logger.error("生产者发送消失败,原因:{}", ex.getMessage())); 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /springboot-kafka-03-transaction/src/main/java/cn/javaguide/controller/BookController.java: -------------------------------------------------------------------------------- 1 | package cn.javaguide.controller; 2 | 3 | import cn.javaguide.entity.Book; 4 | import cn.javaguide.service.BookProducerService; 5 | import org.springframework.beans.factory.annotation.Value; 6 | import org.springframework.web.bind.annotation.PostMapping; 7 | import org.springframework.web.bind.annotation.RequestMapping; 8 | import org.springframework.web.bind.annotation.RequestParam; 9 | import org.springframework.web.bind.annotation.RestController; 10 | 11 | import java.util.concurrent.atomic.AtomicLong; 12 | 13 | /** 14 | * @author shuang.kou 15 | */ 16 | @RestController 17 | @RequestMapping(value = "/book") 18 | public class BookController { 19 | @Value("${kafka.topic.topic-test-transaction}") 20 | String topicTestTransaction; 21 | 22 | private final BookProducerService producer; 23 | private AtomicLong atomicLong = new AtomicLong(); 24 | 25 | BookController(BookProducerService producer) { 26 | this.producer = producer; 27 | } 28 | 29 | @PostMapping 30 | public void sendMessageToKafkaTopic(@RequestParam("name") String name) { 31 | this.producer.sendMessage(topicTestTransaction, new Book(atomicLong.addAndGet(1), name)); 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /springboot-kafka-01-send-objects/src/main/java/cn/javaguide/springbootkafka01sendobjects/config/KafkaConfig.java: -------------------------------------------------------------------------------- 1 | package cn.javaguide.springbootkafka01sendobjects.config; 2 | 3 | import org.apache.kafka.clients.admin.NewTopic; 4 | import org.springframework.beans.factory.annotation.Value; 5 | import org.springframework.context.annotation.Bean; 6 | import org.springframework.context.annotation.Configuration; 7 | import org.springframework.kafka.support.converter.RecordMessageConverter; 8 | import org.springframework.kafka.support.converter.StringJsonMessageConverter; 9 | 10 | /** 11 | * @author shuang.kou 12 | */ 13 | @Configuration 14 | public class KafkaConfig { 15 | 16 | @Value("${kafka.topic.my-topic}") 17 | String myTopic; 18 | @Value("${kafka.topic.my-topic2}") 19 | String myTopic2; 20 | 21 | /** 22 | * JSON消息转换器 23 | */ 24 | @Bean 25 | public RecordMessageConverter jsonConverter() { 26 | return new StringJsonMessageConverter(); 27 | } 28 | 29 | /** 30 | * 通过注入一个 NewTopic 类型的 Bean 来创建 topic,如果 topic 已存在,则会忽略。 31 | */ 32 | @Bean 33 | public NewTopic myTopic() { 34 | return new NewTopic(myTopic, 2, (short) 1); 35 | } 36 | 37 | @Bean 38 | public NewTopic myTopic2() { 39 | return new NewTopic(myTopic2, 1, (short) 1); 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /springboot-kafka-01-send-objects/src/main/java/cn/javaguide/springbootkafka01sendobjects/controller/BookController.java: -------------------------------------------------------------------------------- 1 | package cn.javaguide.springbootkafka01sendobjects.controller; 2 | 3 | import cn.javaguide.springbootkafka01sendobjects.entity.Book; 4 | import cn.javaguide.springbootkafka01sendobjects.service.BookProducerService; 5 | import org.springframework.beans.factory.annotation.Value; 6 | import org.springframework.web.bind.annotation.PostMapping; 7 | import org.springframework.web.bind.annotation.RequestMapping; 8 | import org.springframework.web.bind.annotation.RequestParam; 9 | import org.springframework.web.bind.annotation.RestController; 10 | 11 | import java.util.concurrent.atomic.AtomicLong; 12 | 13 | /** 14 | * @author shuang.kou 15 | */ 16 | @RestController 17 | @RequestMapping(value = "/book") 18 | public class BookController { 19 | @Value("${kafka.topic.my-topic}") 20 | String myTopic; 21 | @Value("${kafka.topic.my-topic2}") 22 | String myTopic2; 23 | private final BookProducerService producer; 24 | private AtomicLong atomicLong = new AtomicLong(); 25 | 26 | BookController(BookProducerService producer) { 27 | this.producer = producer; 28 | } 29 | 30 | @PostMapping 31 | public void sendMessageToKafkaTopic(@RequestParam("name") String name) { 32 | this.producer.sendMessage(myTopic, new Book(atomicLong.addAndGet(1), name)); 33 | this.producer.sendMessage(myTopic2, new Book(atomicLong.addAndGet(1), name)); 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /kafka-intro-maven-demo/.idea/inspectionProfiles/Project_Default.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 36 | -------------------------------------------------------------------------------- /springboot-kafka-01-send-objects/src/main/java/cn/javaguide/springbootkafka01sendobjects/service/BookConsumerService.java: -------------------------------------------------------------------------------- 1 | package cn.javaguide.springbootkafka01sendobjects.service; 2 | 3 | import cn.javaguide.springbootkafka01sendobjects.entity.Book; 4 | import com.fasterxml.jackson.core.JsonProcessingException; 5 | import com.fasterxml.jackson.databind.ObjectMapper; 6 | import org.apache.kafka.clients.consumer.ConsumerRecord; 7 | import org.slf4j.Logger; 8 | import org.slf4j.LoggerFactory; 9 | import org.springframework.beans.factory.annotation.Value; 10 | import org.springframework.kafka.annotation.KafkaListener; 11 | import org.springframework.stereotype.Service; 12 | 13 | @Service 14 | public class BookConsumerService { 15 | 16 | @Value("${kafka.topic.my-topic}") 17 | private String myTopic; 18 | @Value("${kafka.topic.my-topic2}") 19 | private String myTopic2; 20 | private final Logger logger = LoggerFactory.getLogger(BookProducerService.class); 21 | private final ObjectMapper objectMapper = new ObjectMapper(); 22 | 23 | 24 | @KafkaListener(topics = {"${kafka.topic.my-topic}"}, groupId = "group1") 25 | public void consumeMessage(ConsumerRecord bookConsumerRecord) { 26 | try { 27 | Book book = objectMapper.readValue(bookConsumerRecord.value(), Book.class); 28 | logger.info("消费者消费topic:{} partition:{}的消息 -> {}", bookConsumerRecord.topic(), bookConsumerRecord.partition(), book.toString()); 29 | } catch (JsonProcessingException e) { 30 | e.printStackTrace(); 31 | } 32 | } 33 | 34 | @KafkaListener(topics = {"${kafka.topic.my-topic2}"}, groupId = "group2") 35 | public void consumeMessage2(Book book) { 36 | logger.info("消费者消费{}的消息 -> {}", myTopic2, book.toString()); 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /kafka-intro-maven-demo/src/main/java/Main.java: -------------------------------------------------------------------------------- 1 | import org.apache.kafka.clients.consumer.Consumer; 2 | import org.apache.kafka.clients.consumer.ConsumerRecord; 3 | import org.apache.kafka.clients.consumer.ConsumerRecords; 4 | import org.apache.kafka.clients.producer.Producer; 5 | import org.apache.kafka.clients.producer.ProducerRecord; 6 | import org.apache.kafka.clients.producer.RecordMetadata; 7 | 8 | import java.time.Duration; 9 | import java.util.Collections; 10 | import java.util.concurrent.ExecutionException; 11 | 12 | public class Main { 13 | private static final String TOPIC = "test-topic"; 14 | 15 | public static void main(String[] args) { 16 | sendMessage(); 17 | consumeMessage(); 18 | } 19 | 20 | static void sendMessage() { 21 | Producer producer = ProducerCreator.createProducer(); 22 | ProducerRecord record = 23 | new ProducerRecord<>(TOPIC, "hello, Kafka!"); 24 | try { 25 | //send message 26 | RecordMetadata metadata = producer.send(record).get(); 27 | System.out.println("Record sent to partition " + metadata.partition() 28 | + " with offset " + metadata.offset()); 29 | } catch (ExecutionException | InterruptedException e) { 30 | System.out.println("Error in sending record"); 31 | e.printStackTrace(); 32 | } 33 | producer.close(); 34 | } 35 | 36 | static void consumeMessage() { 37 | Consumer consumer = ConsumerCreator.createConsumer(); 38 | // 循环消费消息 39 | while (true) { 40 | //subscribe topic and consume message 41 | consumer.subscribe(Collections.singletonList(TOPIC)); 42 | 43 | ConsumerRecords consumerRecords = 44 | consumer.poll(Duration.ofMillis(1000)); 45 | for (ConsumerRecord consumerRecord : consumerRecords) { 46 | System.out.println("Consumer consume message:" + consumerRecord.value()); 47 | } 48 | } 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /springboot-kafka-01-send-objects/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 4 | 4.0.0 5 | 6 | org.springframework.boot 7 | spring-boot-starter-parent 8 | 2.2.4.RELEASE 9 | 10 | 11 | cn.javaguide 12 | springboot-kafka-01-send-objects 13 | 0.0.1-SNAPSHOT 14 | springboot-kafka-01-send-objects 15 | Demo project for Spring Boot 16 | 17 | 18 | 1.8 19 | 20 | 21 | 22 | 23 | org.springframework.boot 24 | spring-boot-starter-web 25 | 26 | 27 | org.springframework.kafka 28 | spring-kafka 29 | 30 | 31 | 32 | org.springframework.boot 33 | spring-boot-starter-test 34 | test 35 | 36 | 37 | org.junit.vintage 38 | junit-vintage-engine 39 | 40 | 41 | 42 | 43 | org.springframework.kafka 44 | spring-kafka-test 45 | test 46 | 47 | 48 | 49 | 50 | 51 | 52 | org.springframework.boot 53 | spring-boot-maven-plugin 54 | 55 | 56 | 57 | 58 | 59 | -------------------------------------------------------------------------------- /springboot-kafka-03-transaction/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 4 | 4.0.0 5 | 6 | org.springframework.boot 7 | spring-boot-starter-parent 8 | 2.2.4.RELEASE 9 | 10 | 11 | cn.javaguide 12 | springboot-kafka-03-transaction 13 | 0.0.1-SNAPSHOT 14 | demo 15 | Demo project for Spring Boot 16 | 17 | 18 | 1.8 19 | 20 | 21 | 22 | 23 | org.springframework.boot 24 | spring-boot-starter-web 25 | 26 | 27 | org.springframework.kafka 28 | spring-kafka 29 | 30 | 31 | 32 | org.projectlombok 33 | lombok 34 | true 35 | 36 | 37 | org.springframework.boot 38 | spring-boot-starter-test 39 | test 40 | 41 | 42 | org.junit.vintage 43 | junit-vintage-engine 44 | 45 | 46 | 47 | 48 | org.springframework.kafka 49 | spring-kafka-test 50 | test 51 | 52 | 53 | 54 | 55 | 56 | 57 | org.springframework.boot 58 | spring-boot-maven-plugin 59 | 60 | 61 | 62 | 63 | 64 | -------------------------------------------------------------------------------- /springboot-kafka-02-config-topics/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 4 | 4.0.0 5 | 6 | org.springframework.boot 7 | spring-boot-starter-parent 8 | 2.2.4.RELEASE 9 | 10 | 11 | cn.github 12 | springboot-kafka-02-config-topics 13 | 0.0.1-SNAPSHOT 14 | springboot-kafka-02-config-topics 15 | Demo project for Spring Boot 16 | 17 | 18 | 1.8 19 | 20 | 21 | 22 | 23 | org.springframework.boot 24 | spring-boot-starter-web 25 | 26 | 27 | org.springframework.kafka 28 | spring-kafka 29 | 30 | 31 | org.projectlombok 32 | lombok 33 | 1.18.10 34 | provided 35 | 36 | 37 | org.springframework.boot 38 | spring-boot-starter-test 39 | test 40 | 41 | 42 | org.junit.vintage 43 | junit-vintage-engine 44 | 45 | 46 | 47 | 48 | org.springframework.kafka 49 | spring-kafka-test 50 | test 51 | 52 | 53 | 54 | 55 | 56 | 57 | org.springframework.boot 58 | spring-boot-maven-plugin 59 | 60 | 61 | 62 | 63 | 64 | -------------------------------------------------------------------------------- /springboot-kafka-03-transaction/src/main/java/cn/javaguide/config/KafkaConfig.java: -------------------------------------------------------------------------------- 1 | package cn.javaguide.config; 2 | 3 | import org.apache.kafka.clients.admin.NewTopic; 4 | import org.springframework.beans.factory.annotation.Value; 5 | import org.springframework.boot.autoconfigure.kafka.ConcurrentKafkaListenerContainerFactoryConfigurer; 6 | import org.springframework.boot.autoconfigure.kafka.KafkaProperties; 7 | import org.springframework.context.annotation.Bean; 8 | import org.springframework.context.annotation.Configuration; 9 | import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory; 10 | import org.springframework.kafka.core.ConsumerFactory; 11 | import org.springframework.kafka.core.KafkaAdmin; 12 | import org.springframework.kafka.core.KafkaTemplate; 13 | import org.springframework.kafka.listener.DeadLetterPublishingRecoverer; 14 | import org.springframework.kafka.listener.SeekToCurrentErrorHandler; 15 | import org.springframework.kafka.support.converter.RecordMessageConverter; 16 | import org.springframework.kafka.support.converter.StringJsonMessageConverter; 17 | import org.springframework.util.backoff.FixedBackOff; 18 | 19 | /** 20 | * @author shuang.kou 21 | */ 22 | @Configuration 23 | public class KafkaConfig { 24 | 25 | @Value("${kafka.topic.topic-test-transaction}") 26 | String topicTestTransaction; 27 | 28 | @Value("${kafka.topic.topic-test-transaction2}") 29 | String topicTestTransaction2; 30 | 31 | @Bean 32 | public ConcurrentKafkaListenerContainerFactory kafkaListenerContainerFactory( 33 | ConcurrentKafkaListenerContainerFactoryConfigurer configurer, 34 | ConsumerFactory kafkaConsumerFactory, 35 | KafkaTemplate template) { 36 | ConcurrentKafkaListenerContainerFactory factory = new ConcurrentKafkaListenerContainerFactory<>(); 37 | configurer.configure(factory, kafkaConsumerFactory); 38 | // dead-letter after 3 tries 39 | FixedBackOff fixedBackOff = new FixedBackOff(0L, 2); 40 | DeadLetterPublishingRecoverer deadLetterPublishingRecoverer = new DeadLetterPublishingRecoverer(template); 41 | SeekToCurrentErrorHandler seekToCurrentErrorHandler = new SeekToCurrentErrorHandler(deadLetterPublishingRecoverer, fixedBackOff); 42 | factory.setErrorHandler(seekToCurrentErrorHandler); 43 | return factory; 44 | } 45 | 46 | /** 47 | * JSON消息转换器 48 | */ 49 | @Bean 50 | public RecordMessageConverter jsonConverter() { 51 | return new StringJsonMessageConverter(); 52 | } 53 | 54 | /** 55 | * 通过注入一个 NewTopic 类型的 Bean 来创建 topic,如果 topic 已存在,则会忽略。 56 | */ 57 | @Bean 58 | public NewTopic myTopic() { 59 | return new NewTopic(topicTestTransaction, 2, (short) 3); 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /docs/4-SpringBoot+Kafka之如何优雅的创建topic.md: -------------------------------------------------------------------------------- 1 | 在我们之前的代码中,我们是通过注入 `NewTopic` 类型的对象来创建 Kafka 的 topic 的。当我们的项目需要创建的 topic 逐渐变多的话,通过这种方式创建就不是那么友好了,我觉得主要带来的问题有两个: 2 | 3 | 1. Topic 信息不清晰; 4 | 2. 代码量变的庞大; 5 | 6 | ```java 7 | /** 8 | * 通过注入一个 NewTopic 类型的 Bean 来创建 topic,如果 topic 已存在,则会忽略。 9 | */ 10 | @Bean 11 | public NewTopic myTopic() { 12 | return new NewTopic(myTopic, 2, (short) 1); 13 | } 14 | 15 | @Bean 16 | public NewTopic myTopic2() { 17 | return new NewTopic(myTopic2, 1, (short) 1); 18 | } 19 | ``` 20 | 21 | 今天说一下我对于这个问题的解决办法: 22 | 23 | 在 `application.xml` 配置文件中配置 Kafka 连接信息以及我们项目中用到的 topic。 24 | 25 | ```yaml 26 | server: 27 | port: 9090 28 | spring: 29 | kafka: 30 | bootstrap-servers: localhost:9092 31 | kafka: 32 | topics: 33 | - name: topic1 34 | num-partitions: 3 35 | replication-factor: 1 36 | - name: topic2 37 | num-partitions: 1 38 | replication-factor: 1 39 | - name: topic3 40 | num-partitions: 2 41 | replication-factor: 1 42 | ``` 43 | 44 | `TopicConfigurations` 类专门用来读取我们的 topic 配置信息: 45 | 46 | ```java 47 | 48 | import lombok.Getter; 49 | import lombok.Setter; 50 | import lombok.ToString; 51 | import org.apache.kafka.clients.admin.NewTopic; 52 | import org.springframework.boot.context.properties.ConfigurationProperties; 53 | import org.springframework.context.annotation.Configuration; 54 | 55 | import java.util.List; 56 | 57 | @Configuration 58 | @ConfigurationProperties(prefix = "kafka") 59 | @Setter 60 | @Getter 61 | @ToString 62 | class TopicConfigurations { 63 | private List topics; 64 | 65 | @Setter 66 | @Getter 67 | @ToString 68 | static class Topic { 69 | String name; 70 | Integer numPartitions = 3; 71 | Short replicationFactor = 1; 72 | 73 | NewTopic toNewTopic() { 74 | return new NewTopic(this.name, this.numPartitions, this.replicationFactor); 75 | } 76 | 77 | } 78 | } 79 | 80 | ``` 81 | 82 | 在 `TopicAdministrator` 类中我们手动将 topic 对象注册到容器中。 83 | 84 | ```java 85 | 86 | import org.apache.kafka.clients.admin.NewTopic; 87 | import org.springframework.beans.factory.InitializingBean; 88 | import org.springframework.context.annotation.Configuration; 89 | import org.springframework.web.context.support.GenericWebApplicationContext; 90 | 91 | import javax.annotation.PostConstruct; 92 | import java.util.List; 93 | 94 | /** 95 | * @author shuang.kou 96 | */ 97 | @Configuration 98 | public class TopicAdministrator { 99 | private final TopicConfigurations configurations; 100 | private final GenericWebApplicationContext context; 101 | 102 | public TopicAdministrator(TopicConfigurations configurations, GenericWebApplicationContext genericContext) { 103 | this.configurations = configurations; 104 | this.context = genericContext; 105 | } 106 | 107 | @PostConstruct 108 | public void init() { 109 | initializeBeans(configurations.getTopics()); 110 | } 111 | 112 | private void initializeBeans(List topics) { 113 | topics.forEach(t -> context.registerBean(t.name, NewTopic.class, t::toNewTopic)); 114 | } 115 | 116 | 117 | } 118 | 119 | ``` 120 | 121 | 这样的话,当我们运行项目之后,就会自动创建 3 个名为:topic1、topic2 和 topic3 的主题了。 -------------------------------------------------------------------------------- /docs/5.md: -------------------------------------------------------------------------------- 1 | [搞定 Spring Boot 整合 Kafka(spring-kafka深入探秘)](https://mp.weixin.qq.com/s/O53swRW1QkaoIg3W4l0F0A) 2 | 3 | [Spring for Apache Kafka](https://docs.spring.io/spring-kafka/docs/2.3.5.RELEASE/reference/html/#preface) 4 | 5 | [芋道 Spring Boot 消息队列 Kafka 入门](http://www.iocoder.cn/Spring-Boot/Kafka/) 6 | 7 | ### 8 | 9 | 由于本次实战需要用到 Kafka 集群,所以我们首先来使用 Docker 搭建 1个节点 Zookeeper+3个节点的 Kafka 的环境。不清楚如何搭建的话,可以参考之前的文章: 10 | 11 | ## Kafka 事务 12 | 13 | **Kafka 的事务消息默认要求你的 Kafka Broker的节点在 3 个以上。** 这也就是为什么第一步要搭建 kafka 集群的一个原因了。当然你也可以通过修改`transaction.state.log.replication.factor=1`参数来做到单节点 kafka 就支持事务。 14 | 15 | ### 两个重要的配置参数 16 | 17 | 1. `transaction-id-prefix`: 事务编号前缀 18 | 2. ` isolation-level: read_committed` :仅读取已提交的消息 19 | 20 | `application.yml` 配置文件如下 21 | 22 | ```yaml 23 | server: 24 | port: 9090 25 | spring: 26 | kafka: 27 | bootstrap-servers: localhost:9092,localhost:9093,localhost:9094 28 | consumer: 29 | # 配置消费者消息offset是否自动重置(消费者重连会能够接收最开始的消息) 30 | auto-offset-reset: earliest 31 | # 事务隔离级别 32 | isolation-level: read_committed #仅读取已提交的消息 33 | producer: 34 | value-serializer: org.springframework.kafka.support.serializer.JsonSerializer 35 | retries: 3 # 重试次数 36 | # 启用事务 37 | transaction-id-prefix: my-tx. # 事务编号前缀 38 | kafka: 39 | topic: 40 | topic-test-transaction: topic-test-transaction 41 | 42 | ``` 43 | 44 | ### `executeInTransaction()`方法 45 | 46 | 如何发送带事务的消息呢?一种很简单的方法就是将我们的发送消息的逻辑和业务逻辑放到`KafkaTemplate`的`executeInTransaction()`中。 47 | 48 | `executeInTransaction()`方法参数如下: 49 | 50 | ```java 51 | public T executeInTransaction(OperationsCallback callback) { 52 | ...... 53 | } 54 | ``` 55 | 56 | `OperationsCallback`源码如下: 57 | 58 | ```java 59 | interface OperationsCallback { 60 | 61 | T doInOperations(KafkaOperations operations); 62 | 63 | } 64 | ``` 65 | 66 | 所以我们的代码可以这样写: 67 | 68 | ```java 69 | 70 | @Service 71 | public class BookProducerService { 72 | 73 | private List sendedBooks = new ArrayList<>(); 74 | private static final Logger logger = LoggerFactory.getLogger(BookProducerService.class); 75 | 76 | private final KafkaTemplate kafkaTemplate; 77 | 78 | public BookProducerService(KafkaTemplate kafkaTemplate) { 79 | this.kafkaTemplate = kafkaTemplate; 80 | } 81 | 82 | public void sendMessage(String topic, Object o) { 83 | kafkaTemplate.executeInTransaction(new KafkaOperations.OperationsCallback() { 84 | @Override 85 | public Object doInOperations(KafkaOperations operations) { 86 | // 发送消息 87 | operations.send(topic, o); 88 | // 模拟发生异常 89 | int a = 1 / 0; 90 | // 模拟业务操作 91 | sendedBooks.add(o); 92 | return null; 93 | } 94 | }); 95 | } 96 | } 97 | 98 | ``` 99 | 100 | **上面的代码可以用Java8 的 Lambda 改写,Lambda 忘记的或者不会的速度补起来,源码中的 Java8 的各种骚操作太常见了:** 101 | 102 | ```java 103 | public void sendMessage(String topic, Object o) { 104 | kafkaTemplate.executeInTransaction(kafkaOperations -> { 105 | // 发送消息 106 | kafkaOperations.send(topic, o); 107 | // 模拟发生异常 108 | int a = 1 / 0; 109 | // 模拟业务操作 110 | sendedBooks.add(o); 111 | return null; 112 | }); 113 | } 114 | ``` 115 | 116 | 简单说一下为什么`KafkaTemplate`的`executeInTransaction()`中执行的操作具有事务属性。 117 | 118 | **我们在`executeInTransaction()`方法中传入了一个了回调,如果你看 `executeInTransaction()` 源码的话就会发现实际上这个方法内部已经帮我们把事务操作做好了,避免我们自己写一遍!** 119 | 120 | 截取部分代码帮助理解: 121 | 122 | ```java 123 | try { 124 | //doInOperations就是我们的发送消息的逻辑和业务逻辑代码 125 | T result = callback.doInOperations(this); 126 | try { 127 | // 提交正在执行的事物 128 | producer.commitTransaction(); 129 | } 130 | catch (Exception e) { 131 | throw new SkipAbortException(e); 132 | } 133 | return result; 134 | } 135 | ``` 136 | 137 | ### 配合 `@Transactional`注解使用 138 | 139 | 直接使用 `@Transactional`也可以: 140 | 141 | ```java 142 | @Transactional(rollbackFor = Exception.class) 143 | public void sendMessage(String topic, Object o) { 144 | // 发送消息 145 | kafkaTemplate.send(topic, o); 146 | // 模拟发生异常 147 | int a = 1 / 0; 148 | // 模拟业务操作 149 | sendedBooks.add(o); 150 | } 151 | ``` 152 | 153 | ## Kafka 错误处理 154 | 155 | Spring-Kafka 将这种正常情况下无法被消费的消息称为死信消息(Dead-Letter Message),将存储死信消息的特殊队列称为死信队列(Dead-Letter Queue)。 -------------------------------------------------------------------------------- /springboot-kafka-03-transaction/.mvn/wrapper/MavenWrapperDownloader.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2007-present the original author or authors. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * https://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | import java.net.*; 18 | import java.io.*; 19 | import java.nio.channels.*; 20 | import java.util.Properties; 21 | 22 | public class MavenWrapperDownloader { 23 | 24 | private static final String WRAPPER_VERSION = "0.5.6"; 25 | /** 26 | * Default URL to download the maven-wrapper.jar from, if no 'downloadUrl' is provided. 27 | */ 28 | private static final String DEFAULT_DOWNLOAD_URL = "https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/" 29 | + WRAPPER_VERSION + "/maven-wrapper-" + WRAPPER_VERSION + ".jar"; 30 | 31 | /** 32 | * Path to the maven-wrapper.properties file, which might contain a downloadUrl property to 33 | * use instead of the default one. 34 | */ 35 | private static final String MAVEN_WRAPPER_PROPERTIES_PATH = 36 | ".mvn/wrapper/maven-wrapper.properties"; 37 | 38 | /** 39 | * Path where the maven-wrapper.jar will be saved to. 40 | */ 41 | private static final String MAVEN_WRAPPER_JAR_PATH = 42 | ".mvn/wrapper/maven-wrapper.jar"; 43 | 44 | /** 45 | * Name of the property which should be used to override the default download url for the wrapper. 46 | */ 47 | private static final String PROPERTY_NAME_WRAPPER_URL = "wrapperUrl"; 48 | 49 | public static void main(String args[]) { 50 | System.out.println("- Downloader started"); 51 | File baseDirectory = new File(args[0]); 52 | System.out.println("- Using base directory: " + baseDirectory.getAbsolutePath()); 53 | 54 | // If the maven-wrapper.properties exists, read it and check if it contains a custom 55 | // wrapperUrl parameter. 56 | File mavenWrapperPropertyFile = new File(baseDirectory, MAVEN_WRAPPER_PROPERTIES_PATH); 57 | String url = DEFAULT_DOWNLOAD_URL; 58 | if (mavenWrapperPropertyFile.exists()) { 59 | FileInputStream mavenWrapperPropertyFileInputStream = null; 60 | try { 61 | mavenWrapperPropertyFileInputStream = new FileInputStream(mavenWrapperPropertyFile); 62 | Properties mavenWrapperProperties = new Properties(); 63 | mavenWrapperProperties.load(mavenWrapperPropertyFileInputStream); 64 | url = mavenWrapperProperties.getProperty(PROPERTY_NAME_WRAPPER_URL, url); 65 | } catch (IOException e) { 66 | System.out.println("- ERROR loading '" + MAVEN_WRAPPER_PROPERTIES_PATH + "'"); 67 | } finally { 68 | try { 69 | if (mavenWrapperPropertyFileInputStream != null) { 70 | mavenWrapperPropertyFileInputStream.close(); 71 | } 72 | } catch (IOException e) { 73 | // Ignore ... 74 | } 75 | } 76 | } 77 | System.out.println("- Downloading from: " + url); 78 | 79 | File outputFile = new File(baseDirectory.getAbsolutePath(), MAVEN_WRAPPER_JAR_PATH); 80 | if (!outputFile.getParentFile().exists()) { 81 | if (!outputFile.getParentFile().mkdirs()) { 82 | System.out.println( 83 | "- ERROR creating output directory '" + outputFile.getParentFile().getAbsolutePath() + "'"); 84 | } 85 | } 86 | System.out.println("- Downloading to: " + outputFile.getAbsolutePath()); 87 | try { 88 | downloadFileFromURL(url, outputFile); 89 | System.out.println("Done"); 90 | System.exit(0); 91 | } catch (Throwable e) { 92 | System.out.println("- Error downloading"); 93 | e.printStackTrace(); 94 | System.exit(1); 95 | } 96 | } 97 | 98 | private static void downloadFileFromURL(String urlString, File destination) throws Exception { 99 | if (System.getenv("MVNW_USERNAME") != null && System.getenv("MVNW_PASSWORD") != null) { 100 | String username = System.getenv("MVNW_USERNAME"); 101 | char[] password = System.getenv("MVNW_PASSWORD").toCharArray(); 102 | Authenticator.setDefault(new Authenticator() { 103 | @Override 104 | protected PasswordAuthentication getPasswordAuthentication() { 105 | return new PasswordAuthentication(username, password); 106 | } 107 | }); 108 | } 109 | URL website = new URL(urlString); 110 | ReadableByteChannel rbc; 111 | rbc = Channels.newChannel(website.openStream()); 112 | FileOutputStream fos = new FileOutputStream(destination); 113 | fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE); 114 | fos.close(); 115 | rbc.close(); 116 | } 117 | 118 | } 119 | -------------------------------------------------------------------------------- /springboot-kafka-01-send-objects/.mvn/wrapper/MavenWrapperDownloader.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2007-present the original author or authors. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * https://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | import java.net.*; 18 | import java.io.*; 19 | import java.nio.channels.*; 20 | import java.util.Properties; 21 | 22 | public class MavenWrapperDownloader { 23 | 24 | private static final String WRAPPER_VERSION = "0.5.6"; 25 | /** 26 | * Default URL to download the maven-wrapper.jar from, if no 'downloadUrl' is provided. 27 | */ 28 | private static final String DEFAULT_DOWNLOAD_URL = "https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/" 29 | + WRAPPER_VERSION + "/maven-wrapper-" + WRAPPER_VERSION + ".jar"; 30 | 31 | /** 32 | * Path to the maven-wrapper.properties file, which might contain a downloadUrl property to 33 | * use instead of the default one. 34 | */ 35 | private static final String MAVEN_WRAPPER_PROPERTIES_PATH = 36 | ".mvn/wrapper/maven-wrapper.properties"; 37 | 38 | /** 39 | * Path where the maven-wrapper.jar will be saved to. 40 | */ 41 | private static final String MAVEN_WRAPPER_JAR_PATH = 42 | ".mvn/wrapper/maven-wrapper.jar"; 43 | 44 | /** 45 | * Name of the property which should be used to override the default download url for the wrapper. 46 | */ 47 | private static final String PROPERTY_NAME_WRAPPER_URL = "wrapperUrl"; 48 | 49 | public static void main(String args[]) { 50 | System.out.println("- Downloader started"); 51 | File baseDirectory = new File(args[0]); 52 | System.out.println("- Using base directory: " + baseDirectory.getAbsolutePath()); 53 | 54 | // If the maven-wrapper.properties exists, read it and check if it contains a custom 55 | // wrapperUrl parameter. 56 | File mavenWrapperPropertyFile = new File(baseDirectory, MAVEN_WRAPPER_PROPERTIES_PATH); 57 | String url = DEFAULT_DOWNLOAD_URL; 58 | if (mavenWrapperPropertyFile.exists()) { 59 | FileInputStream mavenWrapperPropertyFileInputStream = null; 60 | try { 61 | mavenWrapperPropertyFileInputStream = new FileInputStream(mavenWrapperPropertyFile); 62 | Properties mavenWrapperProperties = new Properties(); 63 | mavenWrapperProperties.load(mavenWrapperPropertyFileInputStream); 64 | url = mavenWrapperProperties.getProperty(PROPERTY_NAME_WRAPPER_URL, url); 65 | } catch (IOException e) { 66 | System.out.println("- ERROR loading '" + MAVEN_WRAPPER_PROPERTIES_PATH + "'"); 67 | } finally { 68 | try { 69 | if (mavenWrapperPropertyFileInputStream != null) { 70 | mavenWrapperPropertyFileInputStream.close(); 71 | } 72 | } catch (IOException e) { 73 | // Ignore ... 74 | } 75 | } 76 | } 77 | System.out.println("- Downloading from: " + url); 78 | 79 | File outputFile = new File(baseDirectory.getAbsolutePath(), MAVEN_WRAPPER_JAR_PATH); 80 | if (!outputFile.getParentFile().exists()) { 81 | if (!outputFile.getParentFile().mkdirs()) { 82 | System.out.println( 83 | "- ERROR creating output directory '" + outputFile.getParentFile().getAbsolutePath() + "'"); 84 | } 85 | } 86 | System.out.println("- Downloading to: " + outputFile.getAbsolutePath()); 87 | try { 88 | downloadFileFromURL(url, outputFile); 89 | System.out.println("Done"); 90 | System.exit(0); 91 | } catch (Throwable e) { 92 | System.out.println("- Error downloading"); 93 | e.printStackTrace(); 94 | System.exit(1); 95 | } 96 | } 97 | 98 | private static void downloadFileFromURL(String urlString, File destination) throws Exception { 99 | if (System.getenv("MVNW_USERNAME") != null && System.getenv("MVNW_PASSWORD") != null) { 100 | String username = System.getenv("MVNW_USERNAME"); 101 | char[] password = System.getenv("MVNW_PASSWORD").toCharArray(); 102 | Authenticator.setDefault(new Authenticator() { 103 | @Override 104 | protected PasswordAuthentication getPasswordAuthentication() { 105 | return new PasswordAuthentication(username, password); 106 | } 107 | }); 108 | } 109 | URL website = new URL(urlString); 110 | ReadableByteChannel rbc; 111 | rbc = Channels.newChannel(website.openStream()); 112 | FileOutputStream fos = new FileOutputStream(destination); 113 | fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE); 114 | fos.close(); 115 | rbc.close(); 116 | } 117 | 118 | } 119 | -------------------------------------------------------------------------------- /springboot-kafka-02-config-topics/.mvn/wrapper/MavenWrapperDownloader.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2007-present the original author or authors. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * https://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | import java.net.*; 18 | import java.io.*; 19 | import java.nio.channels.*; 20 | import java.util.Properties; 21 | 22 | public class MavenWrapperDownloader { 23 | 24 | private static final String WRAPPER_VERSION = "0.5.6"; 25 | /** 26 | * Default URL to download the maven-wrapper.jar from, if no 'downloadUrl' is provided. 27 | */ 28 | private static final String DEFAULT_DOWNLOAD_URL = "https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/" 29 | + WRAPPER_VERSION + "/maven-wrapper-" + WRAPPER_VERSION + ".jar"; 30 | 31 | /** 32 | * Path to the maven-wrapper.properties file, which might contain a downloadUrl property to 33 | * use instead of the default one. 34 | */ 35 | private static final String MAVEN_WRAPPER_PROPERTIES_PATH = 36 | ".mvn/wrapper/maven-wrapper.properties"; 37 | 38 | /** 39 | * Path where the maven-wrapper.jar will be saved to. 40 | */ 41 | private static final String MAVEN_WRAPPER_JAR_PATH = 42 | ".mvn/wrapper/maven-wrapper.jar"; 43 | 44 | /** 45 | * Name of the property which should be used to override the default download url for the wrapper. 46 | */ 47 | private static final String PROPERTY_NAME_WRAPPER_URL = "wrapperUrl"; 48 | 49 | public static void main(String args[]) { 50 | System.out.println("- Downloader started"); 51 | File baseDirectory = new File(args[0]); 52 | System.out.println("- Using base directory: " + baseDirectory.getAbsolutePath()); 53 | 54 | // If the maven-wrapper.properties exists, read it and check if it contains a custom 55 | // wrapperUrl parameter. 56 | File mavenWrapperPropertyFile = new File(baseDirectory, MAVEN_WRAPPER_PROPERTIES_PATH); 57 | String url = DEFAULT_DOWNLOAD_URL; 58 | if (mavenWrapperPropertyFile.exists()) { 59 | FileInputStream mavenWrapperPropertyFileInputStream = null; 60 | try { 61 | mavenWrapperPropertyFileInputStream = new FileInputStream(mavenWrapperPropertyFile); 62 | Properties mavenWrapperProperties = new Properties(); 63 | mavenWrapperProperties.load(mavenWrapperPropertyFileInputStream); 64 | url = mavenWrapperProperties.getProperty(PROPERTY_NAME_WRAPPER_URL, url); 65 | } catch (IOException e) { 66 | System.out.println("- ERROR loading '" + MAVEN_WRAPPER_PROPERTIES_PATH + "'"); 67 | } finally { 68 | try { 69 | if (mavenWrapperPropertyFileInputStream != null) { 70 | mavenWrapperPropertyFileInputStream.close(); 71 | } 72 | } catch (IOException e) { 73 | // Ignore ... 74 | } 75 | } 76 | } 77 | System.out.println("- Downloading from: " + url); 78 | 79 | File outputFile = new File(baseDirectory.getAbsolutePath(), MAVEN_WRAPPER_JAR_PATH); 80 | if (!outputFile.getParentFile().exists()) { 81 | if (!outputFile.getParentFile().mkdirs()) { 82 | System.out.println( 83 | "- ERROR creating output directory '" + outputFile.getParentFile().getAbsolutePath() + "'"); 84 | } 85 | } 86 | System.out.println("- Downloading to: " + outputFile.getAbsolutePath()); 87 | try { 88 | downloadFileFromURL(url, outputFile); 89 | System.out.println("Done"); 90 | System.exit(0); 91 | } catch (Throwable e) { 92 | System.out.println("- Error downloading"); 93 | e.printStackTrace(); 94 | System.exit(1); 95 | } 96 | } 97 | 98 | private static void downloadFileFromURL(String urlString, File destination) throws Exception { 99 | if (System.getenv("MVNW_USERNAME") != null && System.getenv("MVNW_PASSWORD") != null) { 100 | String username = System.getenv("MVNW_USERNAME"); 101 | char[] password = System.getenv("MVNW_PASSWORD").toCharArray(); 102 | Authenticator.setDefault(new Authenticator() { 103 | @Override 104 | protected PasswordAuthentication getPasswordAuthentication() { 105 | return new PasswordAuthentication(username, password); 106 | } 107 | }); 108 | } 109 | URL website = new URL(urlString); 110 | ReadableByteChannel rbc; 111 | rbc = Channels.newChannel(website.openStream()); 112 | FileOutputStream fos = new FileOutputStream(destination); 113 | fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE); 114 | fos.close(); 115 | rbc.close(); 116 | } 117 | 118 | } 119 | -------------------------------------------------------------------------------- /docs/1-大白话带你认识Kafka.md: -------------------------------------------------------------------------------- 1 | ## 前言 2 | 3 | > Kafka 是我在疫情期间在游戏之余学的。虽然之前用过 ActiveMQ 和 RabbitMQ,但是在 Kafka 这门技术面前我也算是一个初学者。文章中若有说法有点完善或者不准确的地方敬请指出。 4 | 5 | 今天我们来聊聊 Kafka ,主要是带你重新认识一下 Kafka,聊一下 Kafka 中比较重要的概念和问题。在后面的文章中我会介绍: 6 | 7 | 1. Kafka 的一些高级特性比如工作流程。 8 | 2. 使用 Docker 安装 Kafka 并简单使用其发送和消费消息。 9 | 3. Spring Boot 程序如何使用 Kafka 作为消息队列。 10 | 11 | 我们现在经常提到 Kafka 的时候就已经默认它是一个非常优秀的消息队列了,我们也会经常拿它给 RocketMQ、RabbitMQ 对比。我觉得 Kafka 相比其他消息队列主要的优势如下: 12 | 13 | 1. **极致的性能** :基于 Scala 和 Java 语言开发,设计中大量使用了批量处理和异步的思想,最高可以每秒处理千万级别的消息。 14 | 2. **生态系统兼容性无可匹敌** :Kafka 与周边生态系统的兼容性是最好的没有之一,尤其在大数据和流计算领域。 15 | 16 | 实际上在早期的时候 Kafka 并不是一个合格的消息队列,早期的 Kafka 在消息队列领域就像是一个衣衫褴褛的孩子一样,功能不完备并且有一些小问题比如丢失消息、不保证消息可靠性等等。当然,这也和 LinkedIn 最早开发 Kafka 用于处理海量的日志有很大关系,哈哈哈,人家本来最开始就不是为了作为消息队列滴,谁知道后面误打误撞在消息队列领域占据了一席之地。 17 | 18 | 随着后续的发展,这些短板都被 Kafka 逐步修复完善。所以,**Kafka 作为消息队列不可靠这个说法已经过时!** 19 | 20 | ## 初识 Kafka 21 | 22 | 先来看一下官网对其的介绍,应该是最权威和实时的了。是英文也没有关系,我已经将比较重要的信息都为你提取出来了。 23 | 24 | 25 | 26 | 从官方介绍中我们可以得到以下信息: 27 | 28 | Kafka 是一个分布式流式处理平台。这到底是什么意思呢? 29 | 30 | 流平台具有三个关键功能: 31 | 32 | 1. **消息队列**:发布和订阅消息流,这个功能类似于消息队列,这也是 Kafka 也被归类为消息队列的原因。 33 | 2. **容错的持久方式存储记录消息流**: Kafka 会把消息持久化到磁盘,有效避免了消息丢失的风险·。 34 | 3. **流式处理平台:** 在消息发布的时候进行处理,Kafka 提供了一个完整的流式处理类库。 35 | 36 | Kafka 主要有两大应用场景: 37 | 38 | 1. **消息队列** :建立实时流数据管道,以可靠地在系统或应用程序之间获取数据。 39 | 2. **数据处理:** 构建实时的流数据处理程序来转换或处理数据流。 40 | 41 | 关于 Kafka 几个非常重要的概念: 42 | 43 | 2. Kafka 将记录流(流数据)存储在 `topic` 中。 44 | 3. 每个记录由一个键、一个值、一个时间戳组成。 45 | 46 | ## Kafka 消息模型 47 | 48 | > 题外话:早期的 JMS 和 AMQP 属于消息服务领域权威组织所做的相关的标准,我在 [JavaGuide](https://github.com/Snailclimb/JavaGuide)的 [《消息队列其实很简单》](https://github.com/Snailclimb/JavaGuide#%E6%95%B0%E6%8D%AE%E9%80%9A%E4%BF%A1%E4%B8%AD%E9%97%B4%E4%BB%B6)这篇文章中介绍过。但是,这些标准的进化跟不上消息队列的演进速度,这些标准实际上已经属于废弃状态。所以,可能存在的情况是:不同的消息队列都有自己的一套消息模型。 49 | 50 | ### 队列模型:早期的消息模型 51 | 52 | ![](https://my-blog-to-use.oss-cn-beijing.aliyuncs.com/2019-11/队列模型23.png) 53 | 54 | **使用队列(Queue)作为消息通信载体,满足生产者与消费者模式,一条消息只能被一个消费者使用,未被消费的消息在队列中保留直到被消费或超时。** 比如:我们生产者发送 100 条消息的话,两个消费者来消费一般情况下两个消费者会按照消息发送的顺序各自消费一半(也就是你一个我一个的消费。) 55 | 56 | #### 队列模型存在的问题 57 | 58 | 假如我们存在这样一种情况:我们需要将生产者产生的消息分发给多个消费者,并且每个消费者都能接收到完成的消息内容。 59 | 60 | 这种情况,队列模型就不好解决了。很多比较杠精的人就说:我们可以为每个消费者创建一个单独的队列,让生产者发送多份。这是一种非常愚蠢的做法,浪费资源不说,还违背了使用消息队列的目的。 61 | 62 | ### 发布-订阅模型:Kafka 消息模型 63 | 64 | 发布-订阅模型主要是为了解决队列模型存在的问题。 65 | 66 | ![](https://my-blog-to-use.oss-cn-beijing.aliyuncs.com/2019-11/广播模型21312.png) 67 | 68 | 发布订阅模型(Pub-Sub) 使用**主题(Topic)** 作为消息通信载体,类似于**广播模式**;发布者发布一条消息,该消息通过主题传递给所有的订阅者,**在一条消息广播之后才订阅的用户则是收不到该条消息的**。 69 | 70 | **在发布 - 订阅模型中,如果只有一个订阅者,那它和队列模型就基本是一样的了。所以说,发布 - 订阅模型在功能层面上是可以兼容队列模型的。** 71 | 72 | **Kafka 采用的就是发布 - 订阅模型。** 73 | 74 | > **RocketMQ 的消息模型和 Kafka 基本是完全一样的。唯一的区别是 Kafka 中没有队列这个概念,与之对应的是 Partition(分区)。** 75 | 76 | ## Kafka 重要概念解读 77 | 78 | Kafka 将生产者发布的消息发送到 **Topic(主题)** 中,需要这些消息的消费者可以订阅这些 **Topic(主题)**,如下图所示: 79 | 80 | ![Kafka Topic Partition](https://my-blog-to-use.oss-cn-beijing.aliyuncs.com/2019-11/KafkaTopicPartitioning.png) 81 | 82 | 上面这张图也为我们引出了,Kafka 比较重要的几个概念: 83 | 84 | 1. **Producer(生产者)** : 产生消息的一方。 85 | 2. **Consumer(消费者)** : 消费消息的一方。 86 | 3. **Broker(代理)** : 可以看作是一个独立的 Kafka 实例。多个 Kafka Broker 组成一个 Kafka Cluster。 87 | 88 | 同时,你一定也注意到每个 Broker 中又包含了 Topic 以及 Partition 这两个重要的概念: 89 | 90 | - **Topic(主题)** : Producer 将消息发送到特定的主题,Consumer 通过订阅特定的 Topic(主题) 来消费消息。 91 | - **Partition(分区)** : Partition 属于 Topic 的一部分。一个 Topic 可以有多个 Partition ,并且同一 Topic 下的 Partition 可以分布在不同的 Broker 上,这也就表明一个 Topic 可以横跨多个 Broker 。这正如我上面所画的图一样。 92 | 93 | > 划重点:**Kafka 中的 Partition(分区) 实际上可以对应成为消息队列中的队列。这样是不是更好理解一点?** 94 | 95 | 另外,还有一点我觉得比较重要的是 Kafka 为分区(Partition)引入了多副本(Replica)机制。分区(Partition)中的多个副本之间会有一个叫做 leader 的家伙,其他副本称为 follower。我们发送的消息会被发送到 leader 副本,然后 follower 副本才能从 leader 副本中拉取消息进行同步。 96 | 97 | > 生产者和消费者只与 leader 副本交互。你可以理解为其他副本只是 leader 副本的拷贝,它们的存在只是为了保证消息存储的安全性。当 leader 副本发生故障时会从 follower 中选举出一个 leader,但是 follower 中如果有和 leader 同步程度达不到要求的参加不了 leader 的竞选。 98 | 99 | **Kafka 的多分区(Partition)以及多副本(Replica)机制有什么好处呢?** 100 | 101 | 1. Kafka 通过给特定 Topic 指定多个 Partition, 而各个 Partition 可以分布在不同的 Broker 上, 这样便能提供比较好的并发能力(负载均衡)。 102 | 2. Partition 可以指定对应的 Replica 数, 这也极大地提高了消息存储的安全性, 提高了容灾能力,不过也相应的增加了所需要的存储空间。 103 | 104 | ## Zookeeper 在 Kafka 中的作用 105 | 106 | > **要想搞懂 zookeeper 在 Kafka 中的作用 一定要自己搭建一个 Kafka 环境然后自己进 zookeeper 去看一下有哪些文件夹和 Kafka 有关,每个节点又保存了什么信息。** 一定不要光看不实践,这样学来的也终会忘记! 107 | 108 | 后面的文章中会介绍如何搭建 Kafka 环境,你且不要急,看了后续文章 3 分钟就能搭建一个 Kafka 环境。 109 | 110 | > 这部分内容参考和借鉴了这篇文章:https://www.jianshu.com/p/a036405f989c 。 111 | 112 | 下图就是我的本地 Zookeeper ,它成功和我本地的 Kafka 关联上(以下文件夹结构借助 idea 插件 Zookeeper tool 实现)。 113 | 114 | 115 | 116 | ZooKeeper 主要为 Kafka 提供元数据的管理的功能。 117 | 118 | 从图中我们可以看出,Zookeeper 主要为 Kafka 做了下面这些事情: 119 | 120 | 1. **Broker 注册** :在 Zookeeper 上会有一个专门**用来进行 Broker 服务器列表记录**的节点。每个 Broker 在启动时,都会到 Zookeeper 上进行注册,即到/brokers/ids 下创建属于自己的节点。每个 Broker 就会将自己的 IP 地址和端口等信息记录到该节点中去 121 | 2. **Topic 注册** : 在 Kafka 中,同一个**Topic 的消息会被分成多个分区**并将其分布在多个 Broker 上,**这些分区信息及与 Broker 的对应关系**也都是由 Zookeeper 在维护。比如我创建了一个名字为 my-topic 的主题并且它有两个分区,对应到 zookeeper 中会创建这些文件夹:`/brokers/topics/my-topic/Partitions/0`、`/brokers/topics/my-topic/Partitions/1` 122 | 3. **负载均衡** :上面也说过了 Kafka 通过给特定 Topic 指定多个 Partition, 而各个 Partition 可以分布在不同的 Broker 上, 这样便能提供比较好的并发能力。 对于同一个 Topic 的不同 Partition,Kafka 会尽力将这些 Partition 分布到不同的 Broker 服务器上。当生产者产生消息后也会尽量投递到不同 Broker 的 Partition 里面。当 Consumer 消费的时候,Zookeeper 可以根据当前的 Partition 数量以及 Consumer 数量来实现动态负载均衡。 123 | 4. ...... 124 | 125 | ## Kafka 如何保证消息的消费顺序? 126 | 127 | 我们在使用消息队列的过程中经常有业务场景需要严格保证消息的消费顺序,比如我们同时发了 2 个消息,这 2 个消息对应的操作分别对应的数据库操作是:更改用户会员等级、根据会员等级计算订单价格。假如这两条消息的消费顺序不一样造成的最终结果就会截然不同。 128 | 129 | 我们知道 Kafka 中 Partition(分区)是真正保存消息的地方,我们发送的消息都被放在了这里。而我们的 Partition(分区) 又存在于 Topic(主题) 这个概念中,并且我们可以给特定 Topic 指定多个 Partition。 130 | 131 | ![](https://my-blog-to-use.oss-cn-beijing.aliyuncs.com/2019-11/KafkaTopicPartionsLayout.png) 132 | 133 | 每次添加消息到 Partition(分区) 的时候都会采用尾加法,如上图所示。Kafka 只能为我们保证 Partition(分区) 中的消息有序,而不能保证 Topic(主题) 中的 Partition(分区) 的有序。 134 | 135 | > 消息在被追加到 Partition(分区)的时候都会分配一个特定的偏移量(offset)。Kafka 通过偏移量(offset)来保证消息在分区内的顺序性。 136 | 137 | 所以,我们就有一种很简单的保证消息消费顺序的方法:**1 个 Topic 只对应一个 Partition**。这样当然可以解决问题,但是破坏了 Kafka 的设计初衷。 138 | 139 | Kafka 中发送 1 条消息的时候,可以指定 topic, partition, key,data(数据) 4 个参数。如果你发送消息的时候指定了 Partition 的话,所有消息都会被发送到指定的 Partition。并且,同一个 key 的消息可以保证只发送到同一个 partition,这个我们可以采用表/对象的 id 来作为 key 。 140 | 141 | 总结一下,对于如何保证 Kafka 中消息消费的顺序,有了下面两种方法: 142 | 143 | 1. 1 个 Topic 只对应一个 Partition。 144 | 2. (推荐)发送消息的时候指定 key/Partition。 145 | 146 | 当然不仅仅只有上面两种方法,上面两种方法是我觉得比较好理解的, 147 | 148 | ## 推荐阅读 149 | 150 | - Apache Kafka using Keys for Partition:[https://linuxhint.com/apache_kafka_partitions/](https://linuxhint.com/apache_kafka_partitions/) 151 | - Spring Boot and Kafka – Practical Configuration Examples:[https://thepracticaldeveloper.com/2018/11/24/spring-boot-kafka-config/](https://thepracticaldeveloper.com/2018/11/24/spring-boot-kafka-config/) 152 | - 一文看懂大数据领域的六年巨变:[https://www.infoq.cn/article/b8\*EMm6AeiHDfI3SfT11](https://www.infoq.cn/article/b8*EMm6AeiHDfI3SfT11) -------------------------------------------------------------------------------- /springboot-kafka-01-send-objects/mvnw.cmd: -------------------------------------------------------------------------------- 1 | @REM ---------------------------------------------------------------------------- 2 | @REM Licensed to the Apache Software Foundation (ASF) under one 3 | @REM or more contributor license agreements. See the NOTICE file 4 | @REM distributed with this work for additional information 5 | @REM regarding copyright ownership. The ASF licenses this file 6 | @REM to you under the Apache License, Version 2.0 (the 7 | @REM "License"); you may not use this file except in compliance 8 | @REM with the License. You may obtain a copy of the License at 9 | @REM 10 | @REM https://www.apache.org/licenses/LICENSE-2.0 11 | @REM 12 | @REM Unless required by applicable law or agreed to in writing, 13 | @REM software distributed under the License is distributed on an 14 | @REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 15 | @REM KIND, either express or implied. See the License for the 16 | @REM specific language governing permissions and limitations 17 | @REM under the License. 18 | @REM ---------------------------------------------------------------------------- 19 | 20 | @REM ---------------------------------------------------------------------------- 21 | @REM Maven Start Up Batch script 22 | @REM 23 | @REM Required ENV vars: 24 | @REM JAVA_HOME - location of a JDK home dir 25 | @REM 26 | @REM Optional ENV vars 27 | @REM M2_HOME - location of maven2's installed home dir 28 | @REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands 29 | @REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a keystroke before ending 30 | @REM MAVEN_OPTS - parameters passed to the Java VM when running Maven 31 | @REM e.g. to debug Maven itself, use 32 | @REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 33 | @REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files 34 | @REM ---------------------------------------------------------------------------- 35 | 36 | @REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on' 37 | @echo off 38 | @REM set title of command window 39 | title %0 40 | @REM enable echoing by setting MAVEN_BATCH_ECHO to 'on' 41 | @if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO% 42 | 43 | @REM set %HOME% to equivalent of $HOME 44 | if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%") 45 | 46 | @REM Execute a user defined script before this one 47 | if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre 48 | @REM check for pre script, once with legacy .bat ending and once with .cmd ending 49 | if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat" 50 | if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd" 51 | :skipRcPre 52 | 53 | @setlocal 54 | 55 | set ERROR_CODE=0 56 | 57 | @REM To isolate internal variables from possible post scripts, we use another setlocal 58 | @setlocal 59 | 60 | @REM ==== START VALIDATION ==== 61 | if not "%JAVA_HOME%" == "" goto OkJHome 62 | 63 | echo. 64 | echo Error: JAVA_HOME not found in your environment. >&2 65 | echo Please set the JAVA_HOME variable in your environment to match the >&2 66 | echo location of your Java installation. >&2 67 | echo. 68 | goto error 69 | 70 | :OkJHome 71 | if exist "%JAVA_HOME%\bin\java.exe" goto init 72 | 73 | echo. 74 | echo Error: JAVA_HOME is set to an invalid directory. >&2 75 | echo JAVA_HOME = "%JAVA_HOME%" >&2 76 | echo Please set the JAVA_HOME variable in your environment to match the >&2 77 | echo location of your Java installation. >&2 78 | echo. 79 | goto error 80 | 81 | @REM ==== END VALIDATION ==== 82 | 83 | :init 84 | 85 | @REM Find the project base dir, i.e. the directory that contains the folder ".mvn". 86 | @REM Fallback to current working directory if not found. 87 | 88 | set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR% 89 | IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir 90 | 91 | set EXEC_DIR=%CD% 92 | set WDIR=%EXEC_DIR% 93 | :findBaseDir 94 | IF EXIST "%WDIR%"\.mvn goto baseDirFound 95 | cd .. 96 | IF "%WDIR%"=="%CD%" goto baseDirNotFound 97 | set WDIR=%CD% 98 | goto findBaseDir 99 | 100 | :baseDirFound 101 | set MAVEN_PROJECTBASEDIR=%WDIR% 102 | cd "%EXEC_DIR%" 103 | goto endDetectBaseDir 104 | 105 | :baseDirNotFound 106 | set MAVEN_PROJECTBASEDIR=%EXEC_DIR% 107 | cd "%EXEC_DIR%" 108 | 109 | :endDetectBaseDir 110 | 111 | IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig 112 | 113 | @setlocal EnableExtensions EnableDelayedExpansion 114 | for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a 115 | @endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS% 116 | 117 | :endReadAdditionalConfig 118 | 119 | SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe" 120 | set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar" 121 | set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain 122 | 123 | set DOWNLOAD_URL="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar" 124 | 125 | FOR /F "tokens=1,2 delims==" %%A IN ("%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties") DO ( 126 | IF "%%A"=="wrapperUrl" SET DOWNLOAD_URL=%%B 127 | ) 128 | 129 | @REM Extension to allow automatically downloading the maven-wrapper.jar from Maven-central 130 | @REM This allows using the maven wrapper in projects that prohibit checking in binary data. 131 | if exist %WRAPPER_JAR% ( 132 | if "%MVNW_VERBOSE%" == "true" ( 133 | echo Found %WRAPPER_JAR% 134 | ) 135 | ) else ( 136 | if not "%MVNW_REPOURL%" == "" ( 137 | SET DOWNLOAD_URL="%MVNW_REPOURL%/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar" 138 | ) 139 | if "%MVNW_VERBOSE%" == "true" ( 140 | echo Couldn't find %WRAPPER_JAR%, downloading it ... 141 | echo Downloading from: %DOWNLOAD_URL% 142 | ) 143 | 144 | powershell -Command "&{"^ 145 | "$webclient = new-object System.Net.WebClient;"^ 146 | "if (-not ([string]::IsNullOrEmpty('%MVNW_USERNAME%') -and [string]::IsNullOrEmpty('%MVNW_PASSWORD%'))) {"^ 147 | "$webclient.Credentials = new-object System.Net.NetworkCredential('%MVNW_USERNAME%', '%MVNW_PASSWORD%');"^ 148 | "}"^ 149 | "[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12; $webclient.DownloadFile('%DOWNLOAD_URL%', '%WRAPPER_JAR%')"^ 150 | "}" 151 | if "%MVNW_VERBOSE%" == "true" ( 152 | echo Finished downloading %WRAPPER_JAR% 153 | ) 154 | ) 155 | @REM End of extension 156 | 157 | @REM Provide a "standardized" way to retrieve the CLI args that will 158 | @REM work with both Windows and non-Windows executions. 159 | set MAVEN_CMD_LINE_ARGS=%* 160 | 161 | %MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %* 162 | if ERRORLEVEL 1 goto error 163 | goto end 164 | 165 | :error 166 | set ERROR_CODE=1 167 | 168 | :end 169 | @endlocal & set ERROR_CODE=%ERROR_CODE% 170 | 171 | if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost 172 | @REM check for post script, once with legacy .bat ending and once with .cmd ending 173 | if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat" 174 | if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd" 175 | :skipRcPost 176 | 177 | @REM pause the script if MAVEN_BATCH_PAUSE is set to 'on' 178 | if "%MAVEN_BATCH_PAUSE%" == "on" pause 179 | 180 | if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE% 181 | 182 | exit /B %ERROR_CODE% 183 | -------------------------------------------------------------------------------- /springboot-kafka-03-transaction/mvnw.cmd: -------------------------------------------------------------------------------- 1 | @REM ---------------------------------------------------------------------------- 2 | @REM Licensed to the Apache Software Foundation (ASF) under one 3 | @REM or more contributor license agreements. See the NOTICE file 4 | @REM distributed with this work for additional information 5 | @REM regarding copyright ownership. The ASF licenses this file 6 | @REM to you under the Apache License, Version 2.0 (the 7 | @REM "License"); you may not use this file except in compliance 8 | @REM with the License. You may obtain a copy of the License at 9 | @REM 10 | @REM https://www.apache.org/licenses/LICENSE-2.0 11 | @REM 12 | @REM Unless required by applicable law or agreed to in writing, 13 | @REM software distributed under the License is distributed on an 14 | @REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 15 | @REM KIND, either express or implied. See the License for the 16 | @REM specific language governing permissions and limitations 17 | @REM under the License. 18 | @REM ---------------------------------------------------------------------------- 19 | 20 | @REM ---------------------------------------------------------------------------- 21 | @REM Maven Start Up Batch script 22 | @REM 23 | @REM Required ENV vars: 24 | @REM JAVA_HOME - location of a JDK home dir 25 | @REM 26 | @REM Optional ENV vars 27 | @REM M2_HOME - location of maven2's installed home dir 28 | @REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands 29 | @REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a keystroke before ending 30 | @REM MAVEN_OPTS - parameters passed to the Java VM when running Maven 31 | @REM e.g. to debug Maven itself, use 32 | @REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 33 | @REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files 34 | @REM ---------------------------------------------------------------------------- 35 | 36 | @REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on' 37 | @echo off 38 | @REM set title of command window 39 | title %0 40 | @REM enable echoing by setting MAVEN_BATCH_ECHO to 'on' 41 | @if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO% 42 | 43 | @REM set %HOME% to equivalent of $HOME 44 | if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%") 45 | 46 | @REM Execute a user defined script before this one 47 | if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre 48 | @REM check for pre script, once with legacy .bat ending and once with .cmd ending 49 | if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat" 50 | if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd" 51 | :skipRcPre 52 | 53 | @setlocal 54 | 55 | set ERROR_CODE=0 56 | 57 | @REM To isolate internal variables from possible post scripts, we use another setlocal 58 | @setlocal 59 | 60 | @REM ==== START VALIDATION ==== 61 | if not "%JAVA_HOME%" == "" goto OkJHome 62 | 63 | echo. 64 | echo Error: JAVA_HOME not found in your environment. >&2 65 | echo Please set the JAVA_HOME variable in your environment to match the >&2 66 | echo location of your Java installation. >&2 67 | echo. 68 | goto error 69 | 70 | :OkJHome 71 | if exist "%JAVA_HOME%\bin\java.exe" goto init 72 | 73 | echo. 74 | echo Error: JAVA_HOME is set to an invalid directory. >&2 75 | echo JAVA_HOME = "%JAVA_HOME%" >&2 76 | echo Please set the JAVA_HOME variable in your environment to match the >&2 77 | echo location of your Java installation. >&2 78 | echo. 79 | goto error 80 | 81 | @REM ==== END VALIDATION ==== 82 | 83 | :init 84 | 85 | @REM Find the project base dir, i.e. the directory that contains the folder ".mvn". 86 | @REM Fallback to current working directory if not found. 87 | 88 | set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR% 89 | IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir 90 | 91 | set EXEC_DIR=%CD% 92 | set WDIR=%EXEC_DIR% 93 | :findBaseDir 94 | IF EXIST "%WDIR%"\.mvn goto baseDirFound 95 | cd .. 96 | IF "%WDIR%"=="%CD%" goto baseDirNotFound 97 | set WDIR=%CD% 98 | goto findBaseDir 99 | 100 | :baseDirFound 101 | set MAVEN_PROJECTBASEDIR=%WDIR% 102 | cd "%EXEC_DIR%" 103 | goto endDetectBaseDir 104 | 105 | :baseDirNotFound 106 | set MAVEN_PROJECTBASEDIR=%EXEC_DIR% 107 | cd "%EXEC_DIR%" 108 | 109 | :endDetectBaseDir 110 | 111 | IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig 112 | 113 | @setlocal EnableExtensions EnableDelayedExpansion 114 | for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a 115 | @endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS% 116 | 117 | :endReadAdditionalConfig 118 | 119 | SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe" 120 | set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar" 121 | set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain 122 | 123 | set DOWNLOAD_URL="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar" 124 | 125 | FOR /F "tokens=1,2 delims==" %%A IN ("%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties") DO ( 126 | IF "%%A"=="wrapperUrl" SET DOWNLOAD_URL=%%B 127 | ) 128 | 129 | @REM Extension to allow automatically downloading the maven-wrapper.jar from Maven-central 130 | @REM This allows using the maven wrapper in projects that prohibit checking in binary data. 131 | if exist %WRAPPER_JAR% ( 132 | if "%MVNW_VERBOSE%" == "true" ( 133 | echo Found %WRAPPER_JAR% 134 | ) 135 | ) else ( 136 | if not "%MVNW_REPOURL%" == "" ( 137 | SET DOWNLOAD_URL="%MVNW_REPOURL%/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar" 138 | ) 139 | if "%MVNW_VERBOSE%" == "true" ( 140 | echo Couldn't find %WRAPPER_JAR%, downloading it ... 141 | echo Downloading from: %DOWNLOAD_URL% 142 | ) 143 | 144 | powershell -Command "&{"^ 145 | "$webclient = new-object System.Net.WebClient;"^ 146 | "if (-not ([string]::IsNullOrEmpty('%MVNW_USERNAME%') -and [string]::IsNullOrEmpty('%MVNW_PASSWORD%'))) {"^ 147 | "$webclient.Credentials = new-object System.Net.NetworkCredential('%MVNW_USERNAME%', '%MVNW_PASSWORD%');"^ 148 | "}"^ 149 | "[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12; $webclient.DownloadFile('%DOWNLOAD_URL%', '%WRAPPER_JAR%')"^ 150 | "}" 151 | if "%MVNW_VERBOSE%" == "true" ( 152 | echo Finished downloading %WRAPPER_JAR% 153 | ) 154 | ) 155 | @REM End of extension 156 | 157 | @REM Provide a "standardized" way to retrieve the CLI args that will 158 | @REM work with both Windows and non-Windows executions. 159 | set MAVEN_CMD_LINE_ARGS=%* 160 | 161 | %MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %* 162 | if ERRORLEVEL 1 goto error 163 | goto end 164 | 165 | :error 166 | set ERROR_CODE=1 167 | 168 | :end 169 | @endlocal & set ERROR_CODE=%ERROR_CODE% 170 | 171 | if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost 172 | @REM check for post script, once with legacy .bat ending and once with .cmd ending 173 | if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat" 174 | if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd" 175 | :skipRcPost 176 | 177 | @REM pause the script if MAVEN_BATCH_PAUSE is set to 'on' 178 | if "%MAVEN_BATCH_PAUSE%" == "on" pause 179 | 180 | if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE% 181 | 182 | exit /B %ERROR_CODE% 183 | -------------------------------------------------------------------------------- /springboot-kafka-02-config-topics/mvnw.cmd: -------------------------------------------------------------------------------- 1 | @REM ---------------------------------------------------------------------------- 2 | @REM Licensed to the Apache Software Foundation (ASF) under one 3 | @REM or more contributor license agreements. See the NOTICE file 4 | @REM distributed with this work for additional information 5 | @REM regarding copyright ownership. The ASF licenses this file 6 | @REM to you under the Apache License, Version 2.0 (the 7 | @REM "License"); you may not use this file except in compliance 8 | @REM with the License. You may obtain a copy of the License at 9 | @REM 10 | @REM https://www.apache.org/licenses/LICENSE-2.0 11 | @REM 12 | @REM Unless required by applicable law or agreed to in writing, 13 | @REM software distributed under the License is distributed on an 14 | @REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 15 | @REM KIND, either express or implied. See the License for the 16 | @REM specific language governing permissions and limitations 17 | @REM under the License. 18 | @REM ---------------------------------------------------------------------------- 19 | 20 | @REM ---------------------------------------------------------------------------- 21 | @REM Maven Start Up Batch script 22 | @REM 23 | @REM Required ENV vars: 24 | @REM JAVA_HOME - location of a JDK home dir 25 | @REM 26 | @REM Optional ENV vars 27 | @REM M2_HOME - location of maven2's installed home dir 28 | @REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands 29 | @REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a keystroke before ending 30 | @REM MAVEN_OPTS - parameters passed to the Java VM when running Maven 31 | @REM e.g. to debug Maven itself, use 32 | @REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 33 | @REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files 34 | @REM ---------------------------------------------------------------------------- 35 | 36 | @REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on' 37 | @echo off 38 | @REM set title of command window 39 | title %0 40 | @REM enable echoing by setting MAVEN_BATCH_ECHO to 'on' 41 | @if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO% 42 | 43 | @REM set %HOME% to equivalent of $HOME 44 | if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%") 45 | 46 | @REM Execute a user defined script before this one 47 | if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre 48 | @REM check for pre script, once with legacy .bat ending and once with .cmd ending 49 | if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat" 50 | if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd" 51 | :skipRcPre 52 | 53 | @setlocal 54 | 55 | set ERROR_CODE=0 56 | 57 | @REM To isolate internal variables from possible post scripts, we use another setlocal 58 | @setlocal 59 | 60 | @REM ==== START VALIDATION ==== 61 | if not "%JAVA_HOME%" == "" goto OkJHome 62 | 63 | echo. 64 | echo Error: JAVA_HOME not found in your environment. >&2 65 | echo Please set the JAVA_HOME variable in your environment to match the >&2 66 | echo location of your Java installation. >&2 67 | echo. 68 | goto error 69 | 70 | :OkJHome 71 | if exist "%JAVA_HOME%\bin\java.exe" goto init 72 | 73 | echo. 74 | echo Error: JAVA_HOME is set to an invalid directory. >&2 75 | echo JAVA_HOME = "%JAVA_HOME%" >&2 76 | echo Please set the JAVA_HOME variable in your environment to match the >&2 77 | echo location of your Java installation. >&2 78 | echo. 79 | goto error 80 | 81 | @REM ==== END VALIDATION ==== 82 | 83 | :init 84 | 85 | @REM Find the project base dir, i.e. the directory that contains the folder ".mvn". 86 | @REM Fallback to current working directory if not found. 87 | 88 | set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR% 89 | IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir 90 | 91 | set EXEC_DIR=%CD% 92 | set WDIR=%EXEC_DIR% 93 | :findBaseDir 94 | IF EXIST "%WDIR%"\.mvn goto baseDirFound 95 | cd .. 96 | IF "%WDIR%"=="%CD%" goto baseDirNotFound 97 | set WDIR=%CD% 98 | goto findBaseDir 99 | 100 | :baseDirFound 101 | set MAVEN_PROJECTBASEDIR=%WDIR% 102 | cd "%EXEC_DIR%" 103 | goto endDetectBaseDir 104 | 105 | :baseDirNotFound 106 | set MAVEN_PROJECTBASEDIR=%EXEC_DIR% 107 | cd "%EXEC_DIR%" 108 | 109 | :endDetectBaseDir 110 | 111 | IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig 112 | 113 | @setlocal EnableExtensions EnableDelayedExpansion 114 | for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a 115 | @endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS% 116 | 117 | :endReadAdditionalConfig 118 | 119 | SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe" 120 | set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar" 121 | set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain 122 | 123 | set DOWNLOAD_URL="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar" 124 | 125 | FOR /F "tokens=1,2 delims==" %%A IN ("%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties") DO ( 126 | IF "%%A"=="wrapperUrl" SET DOWNLOAD_URL=%%B 127 | ) 128 | 129 | @REM Extension to allow automatically downloading the maven-wrapper.jar from Maven-central 130 | @REM This allows using the maven wrapper in projects that prohibit checking in binary data. 131 | if exist %WRAPPER_JAR% ( 132 | if "%MVNW_VERBOSE%" == "true" ( 133 | echo Found %WRAPPER_JAR% 134 | ) 135 | ) else ( 136 | if not "%MVNW_REPOURL%" == "" ( 137 | SET DOWNLOAD_URL="%MVNW_REPOURL%/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar" 138 | ) 139 | if "%MVNW_VERBOSE%" == "true" ( 140 | echo Couldn't find %WRAPPER_JAR%, downloading it ... 141 | echo Downloading from: %DOWNLOAD_URL% 142 | ) 143 | 144 | powershell -Command "&{"^ 145 | "$webclient = new-object System.Net.WebClient;"^ 146 | "if (-not ([string]::IsNullOrEmpty('%MVNW_USERNAME%') -and [string]::IsNullOrEmpty('%MVNW_PASSWORD%'))) {"^ 147 | "$webclient.Credentials = new-object System.Net.NetworkCredential('%MVNW_USERNAME%', '%MVNW_PASSWORD%');"^ 148 | "}"^ 149 | "[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12; $webclient.DownloadFile('%DOWNLOAD_URL%', '%WRAPPER_JAR%')"^ 150 | "}" 151 | if "%MVNW_VERBOSE%" == "true" ( 152 | echo Finished downloading %WRAPPER_JAR% 153 | ) 154 | ) 155 | @REM End of extension 156 | 157 | @REM Provide a "standardized" way to retrieve the CLI args that will 158 | @REM work with both Windows and non-Windows executions. 159 | set MAVEN_CMD_LINE_ARGS=%* 160 | 161 | %MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %* 162 | if ERRORLEVEL 1 goto error 163 | goto end 164 | 165 | :error 166 | set ERROR_CODE=1 167 | 168 | :end 169 | @endlocal & set ERROR_CODE=%ERROR_CODE% 170 | 171 | if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost 172 | @REM check for post script, once with legacy .bat ending and once with .cmd ending 173 | if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat" 174 | if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd" 175 | :skipRcPost 176 | 177 | @REM pause the script if MAVEN_BATCH_PAUSE is set to 'on' 178 | if "%MAVEN_BATCH_PAUSE%" == "on" pause 179 | 180 | if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE% 181 | 182 | exit /B %ERROR_CODE% 183 | -------------------------------------------------------------------------------- /springboot-kafka-01-send-objects/mvnw: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # ---------------------------------------------------------------------------- 3 | # Licensed to the Apache Software Foundation (ASF) under one 4 | # or more contributor license agreements. See the NOTICE file 5 | # distributed with this work for additional information 6 | # regarding copyright ownership. The ASF licenses this file 7 | # to you under the Apache License, Version 2.0 (the 8 | # "License"); you may not use this file except in compliance 9 | # with the License. You may obtain a copy of the License at 10 | # 11 | # https://www.apache.org/licenses/LICENSE-2.0 12 | # 13 | # Unless required by applicable law or agreed to in writing, 14 | # software distributed under the License is distributed on an 15 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 16 | # KIND, either express or implied. See the License for the 17 | # specific language governing permissions and limitations 18 | # under the License. 19 | # ---------------------------------------------------------------------------- 20 | 21 | # ---------------------------------------------------------------------------- 22 | # Maven Start Up Batch script 23 | # 24 | # Required ENV vars: 25 | # ------------------ 26 | # JAVA_HOME - location of a JDK home dir 27 | # 28 | # Optional ENV vars 29 | # ----------------- 30 | # M2_HOME - location of maven2's installed home dir 31 | # MAVEN_OPTS - parameters passed to the Java VM when running Maven 32 | # e.g. to debug Maven itself, use 33 | # set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 34 | # MAVEN_SKIP_RC - flag to disable loading of mavenrc files 35 | # ---------------------------------------------------------------------------- 36 | 37 | if [ -z "$MAVEN_SKIP_RC" ]; then 38 | 39 | if [ -f /etc/mavenrc ]; then 40 | . /etc/mavenrc 41 | fi 42 | 43 | if [ -f "$HOME/.mavenrc" ]; then 44 | . "$HOME/.mavenrc" 45 | fi 46 | 47 | fi 48 | 49 | # OS specific support. $var _must_ be set to either true or false. 50 | cygwin=false 51 | darwin=false 52 | mingw=false 53 | case "$(uname)" in 54 | CYGWIN*) cygwin=true ;; 55 | MINGW*) mingw=true ;; 56 | Darwin*) 57 | darwin=true 58 | # Use /usr/libexec/java_home if available, otherwise fall back to /Library/Java/Home 59 | # See https://developer.apple.com/library/mac/qa/qa1170/_index.html 60 | if [ -z "$JAVA_HOME" ]; then 61 | if [ -x "/usr/libexec/java_home" ]; then 62 | export JAVA_HOME="$(/usr/libexec/java_home)" 63 | else 64 | export JAVA_HOME="/Library/Java/Home" 65 | fi 66 | fi 67 | ;; 68 | esac 69 | 70 | if [ -z "$JAVA_HOME" ]; then 71 | if [ -r /etc/gentoo-release ]; then 72 | JAVA_HOME=$(java-config --jre-home) 73 | fi 74 | fi 75 | 76 | if [ -z "$M2_HOME" ]; then 77 | ## resolve links - $0 may be a link to maven's home 78 | PRG="$0" 79 | 80 | # need this for relative symlinks 81 | while [ -h "$PRG" ]; do 82 | ls=$(ls -ld "$PRG") 83 | link=$(expr "$ls" : '.*-> \(.*\)$') 84 | if expr "$link" : '/.*' >/dev/null; then 85 | PRG="$link" 86 | else 87 | PRG="$(dirname "$PRG")/$link" 88 | fi 89 | done 90 | 91 | saveddir=$(pwd) 92 | 93 | M2_HOME=$(dirname "$PRG")/.. 94 | 95 | # make it fully qualified 96 | M2_HOME=$(cd "$M2_HOME" && pwd) 97 | 98 | cd "$saveddir" 99 | # echo Using m2 at $M2_HOME 100 | fi 101 | 102 | # For Cygwin, ensure paths are in UNIX format before anything is touched 103 | if $cygwin; then 104 | [ -n "$M2_HOME" ] && 105 | M2_HOME=$(cygpath --unix "$M2_HOME") 106 | [ -n "$JAVA_HOME" ] && 107 | JAVA_HOME=$(cygpath --unix "$JAVA_HOME") 108 | [ -n "$CLASSPATH" ] && 109 | CLASSPATH=$(cygpath --path --unix "$CLASSPATH") 110 | fi 111 | 112 | # For Mingw, ensure paths are in UNIX format before anything is touched 113 | if $mingw; then 114 | [ -n "$M2_HOME" ] && 115 | M2_HOME="$( ( 116 | cd "$M2_HOME" 117 | pwd 118 | ))" 119 | [ -n "$JAVA_HOME" ] && 120 | JAVA_HOME="$( ( 121 | cd "$JAVA_HOME" 122 | pwd 123 | ))" 124 | fi 125 | 126 | if [ -z "$JAVA_HOME" ]; then 127 | javaExecutable="$(which javac)" 128 | if [ -n "$javaExecutable" ] && ! [ "$(expr \"$javaExecutable\" : '\([^ ]*\)')" = "no" ]; then 129 | # readlink(1) is not available as standard on Solaris 10. 130 | readLink=$(which readlink) 131 | if [ ! $(expr "$readLink" : '\([^ ]*\)') = "no" ]; then 132 | if $darwin; then 133 | javaHome="$(dirname \"$javaExecutable\")" 134 | javaExecutable="$(cd \"$javaHome\" && pwd -P)/javac" 135 | else 136 | javaExecutable="$(readlink -f \"$javaExecutable\")" 137 | fi 138 | javaHome="$(dirname \"$javaExecutable\")" 139 | javaHome=$(expr "$javaHome" : '\(.*\)/bin') 140 | JAVA_HOME="$javaHome" 141 | export JAVA_HOME 142 | fi 143 | fi 144 | fi 145 | 146 | if [ -z "$JAVACMD" ]; then 147 | if [ -n "$JAVA_HOME" ]; then 148 | if [ -x "$JAVA_HOME/jre/sh/java" ]; then 149 | # IBM's JDK on AIX uses strange locations for the executables 150 | JAVACMD="$JAVA_HOME/jre/sh/java" 151 | else 152 | JAVACMD="$JAVA_HOME/bin/java" 153 | fi 154 | else 155 | JAVACMD="$(which java)" 156 | fi 157 | fi 158 | 159 | if [ ! -x "$JAVACMD" ]; then 160 | echo "Error: JAVA_HOME is not defined correctly." >&2 161 | echo " We cannot execute $JAVACMD" >&2 162 | exit 1 163 | fi 164 | 165 | if [ -z "$JAVA_HOME" ]; then 166 | echo "Warning: JAVA_HOME environment variable is not set." 167 | fi 168 | 169 | CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher 170 | 171 | # traverses directory structure from process work directory to filesystem root 172 | # first directory with .mvn subdirectory is considered project base directory 173 | find_maven_basedir() { 174 | 175 | if [ -z "$1" ]; then 176 | echo "Path not specified to find_maven_basedir" 177 | return 1 178 | fi 179 | 180 | basedir="$1" 181 | wdir="$1" 182 | while [ "$wdir" != '/' ]; do 183 | if [ -d "$wdir"/.mvn ]; then 184 | basedir=$wdir 185 | break 186 | fi 187 | # workaround for JBEAP-8937 (on Solaris 10/Sparc) 188 | if [ -d "${wdir}" ]; then 189 | wdir=$( 190 | cd "$wdir/.." 191 | pwd 192 | ) 193 | fi 194 | # end of workaround 195 | done 196 | echo "${basedir}" 197 | } 198 | 199 | # concatenates all lines of a file 200 | concat_lines() { 201 | if [ -f "$1" ]; then 202 | echo "$(tr -s '\n' ' ' <"$1")" 203 | fi 204 | } 205 | 206 | BASE_DIR=$(find_maven_basedir "$(pwd)") 207 | if [ -z "$BASE_DIR" ]; then 208 | exit 1 209 | fi 210 | 211 | ########################################################################################## 212 | # Extension to allow automatically downloading the maven-wrapper.jar from Maven-central 213 | # This allows using the maven wrapper in projects that prohibit checking in binary data. 214 | ########################################################################################## 215 | if [ -r "$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" ]; then 216 | if [ "$MVNW_VERBOSE" = true ]; then 217 | echo "Found .mvn/wrapper/maven-wrapper.jar" 218 | fi 219 | else 220 | if [ "$MVNW_VERBOSE" = true ]; then 221 | echo "Couldn't find .mvn/wrapper/maven-wrapper.jar, downloading it ..." 222 | fi 223 | if [ -n "$MVNW_REPOURL" ]; then 224 | jarUrl="$MVNW_REPOURL/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar" 225 | else 226 | jarUrl="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar" 227 | fi 228 | while IFS="=" read key value; do 229 | case "$key" in wrapperUrl) 230 | jarUrl="$value" 231 | break 232 | ;; 233 | esac 234 | done <"$BASE_DIR/.mvn/wrapper/maven-wrapper.properties" 235 | if [ "$MVNW_VERBOSE" = true ]; then 236 | echo "Downloading from: $jarUrl" 237 | fi 238 | wrapperJarPath="$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" 239 | if $cygwin; then 240 | wrapperJarPath=$(cygpath --path --windows "$wrapperJarPath") 241 | fi 242 | 243 | if command -v wget >/dev/null; then 244 | if [ "$MVNW_VERBOSE" = true ]; then 245 | echo "Found wget ... using wget" 246 | fi 247 | if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then 248 | wget "$jarUrl" -O "$wrapperJarPath" 249 | else 250 | wget --http-user=$MVNW_USERNAME --http-password=$MVNW_PASSWORD "$jarUrl" -O "$wrapperJarPath" 251 | fi 252 | elif command -v curl >/dev/null; then 253 | if [ "$MVNW_VERBOSE" = true ]; then 254 | echo "Found curl ... using curl" 255 | fi 256 | if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then 257 | curl -o "$wrapperJarPath" "$jarUrl" -f 258 | else 259 | curl --user $MVNW_USERNAME:$MVNW_PASSWORD -o "$wrapperJarPath" "$jarUrl" -f 260 | fi 261 | 262 | else 263 | if [ "$MVNW_VERBOSE" = true ]; then 264 | echo "Falling back to using Java to download" 265 | fi 266 | javaClass="$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.java" 267 | # For Cygwin, switch paths to Windows format before running javac 268 | if $cygwin; then 269 | javaClass=$(cygpath --path --windows "$javaClass") 270 | fi 271 | if [ -e "$javaClass" ]; then 272 | if [ ! -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then 273 | if [ "$MVNW_VERBOSE" = true ]; then 274 | echo " - Compiling MavenWrapperDownloader.java ..." 275 | fi 276 | # Compiling the Java class 277 | ("$JAVA_HOME/bin/javac" "$javaClass") 278 | fi 279 | if [ -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then 280 | # Running the downloader 281 | if [ "$MVNW_VERBOSE" = true ]; then 282 | echo " - Running MavenWrapperDownloader.java ..." 283 | fi 284 | ("$JAVA_HOME/bin/java" -cp .mvn/wrapper MavenWrapperDownloader "$MAVEN_PROJECTBASEDIR") 285 | fi 286 | fi 287 | fi 288 | fi 289 | ########################################################################################## 290 | # End of extension 291 | ########################################################################################## 292 | 293 | export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-"$BASE_DIR"} 294 | if [ "$MVNW_VERBOSE" = true ]; then 295 | echo $MAVEN_PROJECTBASEDIR 296 | fi 297 | MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS" 298 | 299 | # For Cygwin, switch paths to Windows format before running java 300 | if $cygwin; then 301 | [ -n "$M2_HOME" ] && 302 | M2_HOME=$(cygpath --path --windows "$M2_HOME") 303 | [ -n "$JAVA_HOME" ] && 304 | JAVA_HOME=$(cygpath --path --windows "$JAVA_HOME") 305 | [ -n "$CLASSPATH" ] && 306 | CLASSPATH=$(cygpath --path --windows "$CLASSPATH") 307 | [ -n "$MAVEN_PROJECTBASEDIR" ] && 308 | MAVEN_PROJECTBASEDIR=$(cygpath --path --windows "$MAVEN_PROJECTBASEDIR") 309 | fi 310 | 311 | # Provide a "standardized" way to retrieve the CLI args that will 312 | # work with both Windows and non-Windows executions. 313 | MAVEN_CMD_LINE_ARGS="$MAVEN_CONFIG $@" 314 | export MAVEN_CMD_LINE_ARGS 315 | 316 | WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain 317 | 318 | exec "$JAVACMD" \ 319 | $MAVEN_OPTS \ 320 | -classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \ 321 | "-Dmaven.home=${M2_HOME}" "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \ 322 | ${WRAPPER_LAUNCHER} $MAVEN_CONFIG "$@" 323 | -------------------------------------------------------------------------------- /springboot-kafka-02-config-topics/mvnw: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # ---------------------------------------------------------------------------- 3 | # Licensed to the Apache Software Foundation (ASF) under one 4 | # or more contributor license agreements. See the NOTICE file 5 | # distributed with this work for additional information 6 | # regarding copyright ownership. The ASF licenses this file 7 | # to you under the Apache License, Version 2.0 (the 8 | # "License"); you may not use this file except in compliance 9 | # with the License. You may obtain a copy of the License at 10 | # 11 | # https://www.apache.org/licenses/LICENSE-2.0 12 | # 13 | # Unless required by applicable law or agreed to in writing, 14 | # software distributed under the License is distributed on an 15 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 16 | # KIND, either express or implied. See the License for the 17 | # specific language governing permissions and limitations 18 | # under the License. 19 | # ---------------------------------------------------------------------------- 20 | 21 | # ---------------------------------------------------------------------------- 22 | # Maven Start Up Batch script 23 | # 24 | # Required ENV vars: 25 | # ------------------ 26 | # JAVA_HOME - location of a JDK home dir 27 | # 28 | # Optional ENV vars 29 | # ----------------- 30 | # M2_HOME - location of maven2's installed home dir 31 | # MAVEN_OPTS - parameters passed to the Java VM when running Maven 32 | # e.g. to debug Maven itself, use 33 | # set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 34 | # MAVEN_SKIP_RC - flag to disable loading of mavenrc files 35 | # ---------------------------------------------------------------------------- 36 | 37 | if [ -z "$MAVEN_SKIP_RC" ]; then 38 | 39 | if [ -f /etc/mavenrc ]; then 40 | . /etc/mavenrc 41 | fi 42 | 43 | if [ -f "$HOME/.mavenrc" ]; then 44 | . "$HOME/.mavenrc" 45 | fi 46 | 47 | fi 48 | 49 | # OS specific support. $var _must_ be set to either true or false. 50 | cygwin=false 51 | darwin=false 52 | mingw=false 53 | case "$(uname)" in 54 | CYGWIN*) cygwin=true ;; 55 | MINGW*) mingw=true ;; 56 | Darwin*) 57 | darwin=true 58 | # Use /usr/libexec/java_home if available, otherwise fall back to /Library/Java/Home 59 | # See https://developer.apple.com/library/mac/qa/qa1170/_index.html 60 | if [ -z "$JAVA_HOME" ]; then 61 | if [ -x "/usr/libexec/java_home" ]; then 62 | export JAVA_HOME="$(/usr/libexec/java_home)" 63 | else 64 | export JAVA_HOME="/Library/Java/Home" 65 | fi 66 | fi 67 | ;; 68 | esac 69 | 70 | if [ -z "$JAVA_HOME" ]; then 71 | if [ -r /etc/gentoo-release ]; then 72 | JAVA_HOME=$(java-config --jre-home) 73 | fi 74 | fi 75 | 76 | if [ -z "$M2_HOME" ]; then 77 | ## resolve links - $0 may be a link to maven's home 78 | PRG="$0" 79 | 80 | # need this for relative symlinks 81 | while [ -h "$PRG" ]; do 82 | ls=$(ls -ld "$PRG") 83 | link=$(expr "$ls" : '.*-> \(.*\)$') 84 | if expr "$link" : '/.*' >/dev/null; then 85 | PRG="$link" 86 | else 87 | PRG="$(dirname "$PRG")/$link" 88 | fi 89 | done 90 | 91 | saveddir=$(pwd) 92 | 93 | M2_HOME=$(dirname "$PRG")/.. 94 | 95 | # make it fully qualified 96 | M2_HOME=$(cd "$M2_HOME" && pwd) 97 | 98 | cd "$saveddir" 99 | # echo Using m2 at $M2_HOME 100 | fi 101 | 102 | # For Cygwin, ensure paths are in UNIX format before anything is touched 103 | if $cygwin; then 104 | [ -n "$M2_HOME" ] && 105 | M2_HOME=$(cygpath --unix "$M2_HOME") 106 | [ -n "$JAVA_HOME" ] && 107 | JAVA_HOME=$(cygpath --unix "$JAVA_HOME") 108 | [ -n "$CLASSPATH" ] && 109 | CLASSPATH=$(cygpath --path --unix "$CLASSPATH") 110 | fi 111 | 112 | # For Mingw, ensure paths are in UNIX format before anything is touched 113 | if $mingw; then 114 | [ -n "$M2_HOME" ] && 115 | M2_HOME="$( ( 116 | cd "$M2_HOME" 117 | pwd 118 | ))" 119 | [ -n "$JAVA_HOME" ] && 120 | JAVA_HOME="$( ( 121 | cd "$JAVA_HOME" 122 | pwd 123 | ))" 124 | fi 125 | 126 | if [ -z "$JAVA_HOME" ]; then 127 | javaExecutable="$(which javac)" 128 | if [ -n "$javaExecutable" ] && ! [ "$(expr \"$javaExecutable\" : '\([^ ]*\)')" = "no" ]; then 129 | # readlink(1) is not available as standard on Solaris 10. 130 | readLink=$(which readlink) 131 | if [ ! $(expr "$readLink" : '\([^ ]*\)') = "no" ]; then 132 | if $darwin; then 133 | javaHome="$(dirname \"$javaExecutable\")" 134 | javaExecutable="$(cd \"$javaHome\" && pwd -P)/javac" 135 | else 136 | javaExecutable="$(readlink -f \"$javaExecutable\")" 137 | fi 138 | javaHome="$(dirname \"$javaExecutable\")" 139 | javaHome=$(expr "$javaHome" : '\(.*\)/bin') 140 | JAVA_HOME="$javaHome" 141 | export JAVA_HOME 142 | fi 143 | fi 144 | fi 145 | 146 | if [ -z "$JAVACMD" ]; then 147 | if [ -n "$JAVA_HOME" ]; then 148 | if [ -x "$JAVA_HOME/jre/sh/java" ]; then 149 | # IBM's JDK on AIX uses strange locations for the executables 150 | JAVACMD="$JAVA_HOME/jre/sh/java" 151 | else 152 | JAVACMD="$JAVA_HOME/bin/java" 153 | fi 154 | else 155 | JAVACMD="$(which java)" 156 | fi 157 | fi 158 | 159 | if [ ! -x "$JAVACMD" ]; then 160 | echo "Error: JAVA_HOME is not defined correctly." >&2 161 | echo " We cannot execute $JAVACMD" >&2 162 | exit 1 163 | fi 164 | 165 | if [ -z "$JAVA_HOME" ]; then 166 | echo "Warning: JAVA_HOME environment variable is not set." 167 | fi 168 | 169 | CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher 170 | 171 | # traverses directory structure from process work directory to filesystem root 172 | # first directory with .mvn subdirectory is considered project base directory 173 | find_maven_basedir() { 174 | 175 | if [ -z "$1" ]; then 176 | echo "Path not specified to find_maven_basedir" 177 | return 1 178 | fi 179 | 180 | basedir="$1" 181 | wdir="$1" 182 | while [ "$wdir" != '/' ]; do 183 | if [ -d "$wdir"/.mvn ]; then 184 | basedir=$wdir 185 | break 186 | fi 187 | # workaround for JBEAP-8937 (on Solaris 10/Sparc) 188 | if [ -d "${wdir}" ]; then 189 | wdir=$( 190 | cd "$wdir/.." 191 | pwd 192 | ) 193 | fi 194 | # end of workaround 195 | done 196 | echo "${basedir}" 197 | } 198 | 199 | # concatenates all lines of a file 200 | concat_lines() { 201 | if [ -f "$1" ]; then 202 | echo "$(tr -s '\n' ' ' <"$1")" 203 | fi 204 | } 205 | 206 | BASE_DIR=$(find_maven_basedir "$(pwd)") 207 | if [ -z "$BASE_DIR" ]; then 208 | exit 1 209 | fi 210 | 211 | ########################################################################################## 212 | # Extension to allow automatically downloading the maven-wrapper.jar from Maven-central 213 | # This allows using the maven wrapper in projects that prohibit checking in binary data. 214 | ########################################################################################## 215 | if [ -r "$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" ]; then 216 | if [ "$MVNW_VERBOSE" = true ]; then 217 | echo "Found .mvn/wrapper/maven-wrapper.jar" 218 | fi 219 | else 220 | if [ "$MVNW_VERBOSE" = true ]; then 221 | echo "Couldn't find .mvn/wrapper/maven-wrapper.jar, downloading it ..." 222 | fi 223 | if [ -n "$MVNW_REPOURL" ]; then 224 | jarUrl="$MVNW_REPOURL/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar" 225 | else 226 | jarUrl="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar" 227 | fi 228 | while IFS="=" read key value; do 229 | case "$key" in wrapperUrl) 230 | jarUrl="$value" 231 | break 232 | ;; 233 | esac 234 | done <"$BASE_DIR/.mvn/wrapper/maven-wrapper.properties" 235 | if [ "$MVNW_VERBOSE" = true ]; then 236 | echo "Downloading from: $jarUrl" 237 | fi 238 | wrapperJarPath="$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" 239 | if $cygwin; then 240 | wrapperJarPath=$(cygpath --path --windows "$wrapperJarPath") 241 | fi 242 | 243 | if command -v wget >/dev/null; then 244 | if [ "$MVNW_VERBOSE" = true ]; then 245 | echo "Found wget ... using wget" 246 | fi 247 | if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then 248 | wget "$jarUrl" -O "$wrapperJarPath" 249 | else 250 | wget --http-user=$MVNW_USERNAME --http-password=$MVNW_PASSWORD "$jarUrl" -O "$wrapperJarPath" 251 | fi 252 | elif command -v curl >/dev/null; then 253 | if [ "$MVNW_VERBOSE" = true ]; then 254 | echo "Found curl ... using curl" 255 | fi 256 | if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then 257 | curl -o "$wrapperJarPath" "$jarUrl" -f 258 | else 259 | curl --user $MVNW_USERNAME:$MVNW_PASSWORD -o "$wrapperJarPath" "$jarUrl" -f 260 | fi 261 | 262 | else 263 | if [ "$MVNW_VERBOSE" = true ]; then 264 | echo "Falling back to using Java to download" 265 | fi 266 | javaClass="$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.java" 267 | # For Cygwin, switch paths to Windows format before running javac 268 | if $cygwin; then 269 | javaClass=$(cygpath --path --windows "$javaClass") 270 | fi 271 | if [ -e "$javaClass" ]; then 272 | if [ ! -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then 273 | if [ "$MVNW_VERBOSE" = true ]; then 274 | echo " - Compiling MavenWrapperDownloader.java ..." 275 | fi 276 | # Compiling the Java class 277 | ("$JAVA_HOME/bin/javac" "$javaClass") 278 | fi 279 | if [ -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then 280 | # Running the downloader 281 | if [ "$MVNW_VERBOSE" = true ]; then 282 | echo " - Running MavenWrapperDownloader.java ..." 283 | fi 284 | ("$JAVA_HOME/bin/java" -cp .mvn/wrapper MavenWrapperDownloader "$MAVEN_PROJECTBASEDIR") 285 | fi 286 | fi 287 | fi 288 | fi 289 | ########################################################################################## 290 | # End of extension 291 | ########################################################################################## 292 | 293 | export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-"$BASE_DIR"} 294 | if [ "$MVNW_VERBOSE" = true ]; then 295 | echo $MAVEN_PROJECTBASEDIR 296 | fi 297 | MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS" 298 | 299 | # For Cygwin, switch paths to Windows format before running java 300 | if $cygwin; then 301 | [ -n "$M2_HOME" ] && 302 | M2_HOME=$(cygpath --path --windows "$M2_HOME") 303 | [ -n "$JAVA_HOME" ] && 304 | JAVA_HOME=$(cygpath --path --windows "$JAVA_HOME") 305 | [ -n "$CLASSPATH" ] && 306 | CLASSPATH=$(cygpath --path --windows "$CLASSPATH") 307 | [ -n "$MAVEN_PROJECTBASEDIR" ] && 308 | MAVEN_PROJECTBASEDIR=$(cygpath --path --windows "$MAVEN_PROJECTBASEDIR") 309 | fi 310 | 311 | # Provide a "standardized" way to retrieve the CLI args that will 312 | # work with both Windows and non-Windows executions. 313 | MAVEN_CMD_LINE_ARGS="$MAVEN_CONFIG $@" 314 | export MAVEN_CMD_LINE_ARGS 315 | 316 | WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain 317 | 318 | exec "$JAVACMD" \ 319 | $MAVEN_OPTS \ 320 | -classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \ 321 | "-Dmaven.home=${M2_HOME}" "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \ 322 | ${WRAPPER_LAUNCHER} $MAVEN_CONFIG "$@" 323 | -------------------------------------------------------------------------------- /springboot-kafka-03-transaction/mvnw: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # ---------------------------------------------------------------------------- 3 | # Licensed to the Apache Software Foundation (ASF) under one 4 | # or more contributor license agreements. See the NOTICE file 5 | # distributed with this work for additional information 6 | # regarding copyright ownership. The ASF licenses this file 7 | # to you under the Apache License, Version 2.0 (the 8 | # "License"); you may not use this file except in compliance 9 | # with the License. You may obtain a copy of the License at 10 | # 11 | # https://www.apache.org/licenses/LICENSE-2.0 12 | # 13 | # Unless required by applicable law or agreed to in writing, 14 | # software distributed under the License is distributed on an 15 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 16 | # KIND, either express or implied. See the License for the 17 | # specific language governing permissions and limitations 18 | # under the License. 19 | # ---------------------------------------------------------------------------- 20 | 21 | # ---------------------------------------------------------------------------- 22 | # Maven Start Up Batch script 23 | # 24 | # Required ENV vars: 25 | # ------------------ 26 | # JAVA_HOME - location of a JDK home dir 27 | # 28 | # Optional ENV vars 29 | # ----------------- 30 | # M2_HOME - location of maven2's installed home dir 31 | # MAVEN_OPTS - parameters passed to the Java VM when running Maven 32 | # e.g. to debug Maven itself, use 33 | # set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 34 | # MAVEN_SKIP_RC - flag to disable loading of mavenrc files 35 | # ---------------------------------------------------------------------------- 36 | 37 | if [ -z "$MAVEN_SKIP_RC" ]; then 38 | 39 | if [ -f /etc/mavenrc ]; then 40 | . /etc/mavenrc 41 | fi 42 | 43 | if [ -f "$HOME/.mavenrc" ]; then 44 | . "$HOME/.mavenrc" 45 | fi 46 | 47 | fi 48 | 49 | # OS specific support. $var _must_ be set to either true or false. 50 | cygwin=false 51 | darwin=false 52 | mingw=false 53 | case "$(uname)" in 54 | CYGWIN*) cygwin=true ;; 55 | MINGW*) mingw=true ;; 56 | Darwin*) 57 | darwin=true 58 | # Use /usr/libexec/java_home if available, otherwise fall back to /Library/Java/Home 59 | # See https://developer.apple.com/library/mac/qa/qa1170/_index.html 60 | if [ -z "$JAVA_HOME" ]; then 61 | if [ -x "/usr/libexec/java_home" ]; then 62 | export JAVA_HOME="$(/usr/libexec/java_home)" 63 | else 64 | export JAVA_HOME="/Library/Java/Home" 65 | fi 66 | fi 67 | ;; 68 | esac 69 | 70 | if [ -z "$JAVA_HOME" ]; then 71 | if [ -r /etc/gentoo-release ]; then 72 | JAVA_HOME=$(java-config --jre-home) 73 | fi 74 | fi 75 | 76 | if [ -z "$M2_HOME" ]; then 77 | ## resolve links - $0 may be a link to maven's home 78 | PRG="$0" 79 | 80 | # need this for relative symlinks 81 | while [ -h "$PRG" ]; do 82 | ls=$(ls -ld "$PRG") 83 | link=$(expr "$ls" : '.*-> \(.*\)$') 84 | if expr "$link" : '/.*' >/dev/null; then 85 | PRG="$link" 86 | else 87 | PRG="$(dirname "$PRG")/$link" 88 | fi 89 | done 90 | 91 | saveddir=$(pwd) 92 | 93 | M2_HOME=$(dirname "$PRG")/.. 94 | 95 | # make it fully qualified 96 | M2_HOME=$(cd "$M2_HOME" && pwd) 97 | 98 | cd "$saveddir" 99 | # echo Using m2 at $M2_HOME 100 | fi 101 | 102 | # For Cygwin, ensure paths are in UNIX format before anything is touched 103 | if $cygwin; then 104 | [ -n "$M2_HOME" ] && 105 | M2_HOME=$(cygpath --unix "$M2_HOME") 106 | [ -n "$JAVA_HOME" ] && 107 | JAVA_HOME=$(cygpath --unix "$JAVA_HOME") 108 | [ -n "$CLASSPATH" ] && 109 | CLASSPATH=$(cygpath --path --unix "$CLASSPATH") 110 | fi 111 | 112 | # For Mingw, ensure paths are in UNIX format before anything is touched 113 | if $mingw; then 114 | [ -n "$M2_HOME" ] && 115 | M2_HOME="$( ( 116 | cd "$M2_HOME" 117 | pwd 118 | ))" 119 | [ -n "$JAVA_HOME" ] && 120 | JAVA_HOME="$( ( 121 | cd "$JAVA_HOME" 122 | pwd 123 | ))" 124 | fi 125 | 126 | if [ -z "$JAVA_HOME" ]; then 127 | javaExecutable="$(which javac)" 128 | if [ -n "$javaExecutable" ] && ! [ "$(expr \"$javaExecutable\" : '\([^ ]*\)')" = "no" ]; then 129 | # readlink(1) is not available as standard on Solaris 10. 130 | readLink=$(which readlink) 131 | if [ ! $(expr "$readLink" : '\([^ ]*\)') = "no" ]; then 132 | if $darwin; then 133 | javaHome="$(dirname \"$javaExecutable\")" 134 | javaExecutable="$(cd \"$javaHome\" && pwd -P)/javac" 135 | else 136 | javaExecutable="$(readlink -f \"$javaExecutable\")" 137 | fi 138 | javaHome="$(dirname \"$javaExecutable\")" 139 | javaHome=$(expr "$javaHome" : '\(.*\)/bin') 140 | JAVA_HOME="$javaHome" 141 | export JAVA_HOME 142 | fi 143 | fi 144 | fi 145 | 146 | if [ -z "$JAVACMD" ]; then 147 | if [ -n "$JAVA_HOME" ]; then 148 | if [ -x "$JAVA_HOME/jre/sh/java" ]; then 149 | # IBM's JDK on AIX uses strange locations for the executables 150 | JAVACMD="$JAVA_HOME/jre/sh/java" 151 | else 152 | JAVACMD="$JAVA_HOME/bin/java" 153 | fi 154 | else 155 | JAVACMD="$(which java)" 156 | fi 157 | fi 158 | 159 | if [ ! -x "$JAVACMD" ]; then 160 | echo "Error: JAVA_HOME is not defined correctly." >&2 161 | echo " We cannot execute $JAVACMD" >&2 162 | exit 1 163 | fi 164 | 165 | if [ -z "$JAVA_HOME" ]; then 166 | echo "Warning: JAVA_HOME environment variable is not set." 167 | fi 168 | 169 | CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher 170 | 171 | # traverses directory structure from process work directory to filesystem root 172 | # first directory with .mvn subdirectory is considered project base directory 173 | find_maven_basedir() { 174 | 175 | if [ -z "$1" ]; then 176 | echo "Path not specified to find_maven_basedir" 177 | return 1 178 | fi 179 | 180 | basedir="$1" 181 | wdir="$1" 182 | while [ "$wdir" != '/' ]; do 183 | if [ -d "$wdir"/.mvn ]; then 184 | basedir=$wdir 185 | break 186 | fi 187 | # workaround for JBEAP-8937 (on Solaris 10/Sparc) 188 | if [ -d "${wdir}" ]; then 189 | wdir=$( 190 | cd "$wdir/.." 191 | pwd 192 | ) 193 | fi 194 | # end of workaround 195 | done 196 | echo "${basedir}" 197 | } 198 | 199 | # concatenates all lines of a file 200 | concat_lines() { 201 | if [ -f "$1" ]; then 202 | echo "$(tr -s '\n' ' ' <"$1")" 203 | fi 204 | } 205 | 206 | BASE_DIR=$(find_maven_basedir "$(pwd)") 207 | if [ -z "$BASE_DIR" ]; then 208 | exit 1 209 | fi 210 | 211 | ########################################################################################## 212 | # Extension to allow automatically downloading the maven-wrapper.jar from Maven-central 213 | # This allows using the maven wrapper in projects that prohibit checking in binary data. 214 | ########################################################################################## 215 | if [ -r "$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" ]; then 216 | if [ "$MVNW_VERBOSE" = true ]; then 217 | echo "Found .mvn/wrapper/maven-wrapper.jar" 218 | fi 219 | else 220 | if [ "$MVNW_VERBOSE" = true ]; then 221 | echo "Couldn't find .mvn/wrapper/maven-wrapper.jar, downloading it ..." 222 | fi 223 | if [ -n "$MVNW_REPOURL" ]; then 224 | jarUrl="$MVNW_REPOURL/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar" 225 | else 226 | jarUrl="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar" 227 | fi 228 | while IFS="=" read key value; do 229 | case "$key" in wrapperUrl) 230 | jarUrl="$value" 231 | break 232 | ;; 233 | esac 234 | done <"$BASE_DIR/.mvn/wrapper/maven-wrapper.properties" 235 | if [ "$MVNW_VERBOSE" = true ]; then 236 | echo "Downloading from: $jarUrl" 237 | fi 238 | wrapperJarPath="$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" 239 | if $cygwin; then 240 | wrapperJarPath=$(cygpath --path --windows "$wrapperJarPath") 241 | fi 242 | 243 | if command -v wget >/dev/null; then 244 | if [ "$MVNW_VERBOSE" = true ]; then 245 | echo "Found wget ... using wget" 246 | fi 247 | if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then 248 | wget "$jarUrl" -O "$wrapperJarPath" 249 | else 250 | wget --http-user=$MVNW_USERNAME --http-password=$MVNW_PASSWORD "$jarUrl" -O "$wrapperJarPath" 251 | fi 252 | elif command -v curl >/dev/null; then 253 | if [ "$MVNW_VERBOSE" = true ]; then 254 | echo "Found curl ... using curl" 255 | fi 256 | if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then 257 | curl -o "$wrapperJarPath" "$jarUrl" -f 258 | else 259 | curl --user $MVNW_USERNAME:$MVNW_PASSWORD -o "$wrapperJarPath" "$jarUrl" -f 260 | fi 261 | 262 | else 263 | if [ "$MVNW_VERBOSE" = true ]; then 264 | echo "Falling back to using Java to download" 265 | fi 266 | javaClass="$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.java" 267 | # For Cygwin, switch paths to Windows format before running javac 268 | if $cygwin; then 269 | javaClass=$(cygpath --path --windows "$javaClass") 270 | fi 271 | if [ -e "$javaClass" ]; then 272 | if [ ! -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then 273 | if [ "$MVNW_VERBOSE" = true ]; then 274 | echo " - Compiling MavenWrapperDownloader.java ..." 275 | fi 276 | # Compiling the Java class 277 | ("$JAVA_HOME/bin/javac" "$javaClass") 278 | fi 279 | if [ -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then 280 | # Running the downloader 281 | if [ "$MVNW_VERBOSE" = true ]; then 282 | echo " - Running MavenWrapperDownloader.java ..." 283 | fi 284 | ("$JAVA_HOME/bin/java" -cp .mvn/wrapper MavenWrapperDownloader "$MAVEN_PROJECTBASEDIR") 285 | fi 286 | fi 287 | fi 288 | fi 289 | ########################################################################################## 290 | # End of extension 291 | ########################################################################################## 292 | 293 | export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-"$BASE_DIR"} 294 | if [ "$MVNW_VERBOSE" = true ]; then 295 | echo $MAVEN_PROJECTBASEDIR 296 | fi 297 | MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS" 298 | 299 | # For Cygwin, switch paths to Windows format before running java 300 | if $cygwin; then 301 | [ -n "$M2_HOME" ] && 302 | M2_HOME=$(cygpath --path --windows "$M2_HOME") 303 | [ -n "$JAVA_HOME" ] && 304 | JAVA_HOME=$(cygpath --path --windows "$JAVA_HOME") 305 | [ -n "$CLASSPATH" ] && 306 | CLASSPATH=$(cygpath --path --windows "$CLASSPATH") 307 | [ -n "$MAVEN_PROJECTBASEDIR" ] && 308 | MAVEN_PROJECTBASEDIR=$(cygpath --path --windows "$MAVEN_PROJECTBASEDIR") 309 | fi 310 | 311 | # Provide a "standardized" way to retrieve the CLI args that will 312 | # work with both Windows and non-Windows executions. 313 | MAVEN_CMD_LINE_ARGS="$MAVEN_CONFIG $@" 314 | export MAVEN_CMD_LINE_ARGS 315 | 316 | WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain 317 | 318 | exec "$JAVACMD" \ 319 | $MAVEN_OPTS \ 320 | -classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \ 321 | "-Dmaven.home=${M2_HOME}" "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \ 322 | ${WRAPPER_LAUNCHER} $MAVEN_CONFIG "$@" 323 | -------------------------------------------------------------------------------- /docs/2-5分钟带你体验一把Kafka.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | 前置条件:你的电脑已经安装 Docker 4 | 5 | 主要内容: 6 | 7 | 1. 使用 Docker 安装 8 | 2. 使用命令行测试消息队列的功能 9 | 3. zookeeper和kafka可视化管理工具 10 | 4. Java 程序中简单使用Kafka 11 | 12 | ### 使用 Docker 安装搭建Kafka环境 13 | 14 | #### 单机版 15 | 16 | **下面使用的单机版的 Kafka 来作为演示,推荐先搭建单机版的Kafka来学习。** 17 | 18 | > 以下使用 Docker 搭建Kafka基本环境来自开源项目:https://github.com/simplesteph/kafka-stack-docker-compose 。当然,你也可以按照官方提供的来:https://github.com/wurstmeister/kafka-docker/blob/master/docker-compose.yml 。 19 | 20 | 新建一个名为 `zk-single-kafka-single.yml` 的文件,文件内容如下: 21 | 22 | ```yaml 23 | version: '2.1' 24 | 25 | services: 26 | zoo1: 27 | image: zookeeper:3.4.9 28 | hostname: zoo1 29 | ports: 30 | - "2181:2181" 31 | environment: 32 | ZOO_MY_ID: 1 33 | ZOO_PORT: 2181 34 | ZOO_SERVERS: server.1=zoo1:2888:3888 35 | volumes: 36 | - ./zk-single-kafka-single/zoo1/data:/data 37 | - ./zk-single-kafka-single/zoo1/datalog:/datalog 38 | 39 | kafka1: 40 | image: confluentinc/cp-kafka:5.3.1 41 | hostname: kafka1 42 | ports: 43 | - "9092:9092" 44 | environment: 45 | KAFKA_ADVERTISED_LISTENERS: LISTENER_DOCKER_INTERNAL://kafka1:19092,LISTENER_DOCKER_EXTERNAL://${DOCKER_HOST_IP:-127.0.0.1}:9092 46 | KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: LISTENER_DOCKER_INTERNAL:PLAINTEXT,LISTENER_DOCKER_EXTERNAL:PLAINTEXT 47 | KAFKA_INTER_BROKER_LISTENER_NAME: LISTENER_DOCKER_INTERNAL 48 | KAFKA_ZOOKEEPER_CONNECT: "zoo1:2181" 49 | KAFKA_BROKER_ID: 1 50 | KAFKA_LOG4J_LOGGERS: "kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state.change.logger=INFO" 51 | KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 52 | 53 | volumes: 54 | - ./zk-single-kafka-single/kafka1/data:/var/lib/kafka/data 55 | depends_on: 56 | - zoo1 57 | 58 | ``` 59 | 60 | 运行以下命令即可完成环境搭建(会自动下载并运行一个 zookeeper 和 kafka ) 61 | 62 | ```shell 63 | docker-compose -f zk-single-kafka-single.yml up 64 | ``` 65 | 66 | 如果需要停止Kafka相关容器的话,运行以下命令即可: 67 | 68 | ```shell 69 | docker-compose -f zk-single-kafka-single.yml down 70 | ``` 71 | 72 | #### 集群版 73 | 74 | > 以下使用 Docker 搭建Kafka基本环境来自开源项目:https://github.com/simplesteph/kafka-stack-docker-compose 。 75 | 76 | 新建一个名为 `zk-single-kafka-multiple.yml` 的文件,文件内容如下: 77 | 78 | ```java 79 | version: '2.1' 80 | 81 | services: 82 | zoo1: 83 | image: zookeeper:3.4.9 84 | hostname: zoo1 85 | ports: 86 | - "2181:2181" 87 | environment: 88 | ZOO_MY_ID: 1 89 | ZOO_PORT: 2181 90 | ZOO_SERVERS: server.1=zoo1:2888:3888 91 | volumes: 92 | - ./zk-single-kafka-multiple/zoo1/data:/data 93 | - ./zk-single-kafka-multiple/zoo1/datalog:/datalog 94 | 95 | kafka1: 96 | image: confluentinc/cp-kafka:5.4.0 97 | hostname: kafka1 98 | ports: 99 | - "9092:9092" 100 | environment: 101 | KAFKA_ADVERTISED_LISTENERS: LISTENER_DOCKER_INTERNAL://kafka1:19092,LISTENER_DOCKER_EXTERNAL://${DOCKER_HOST_IP:-127.0.0.1}:9092 102 | KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: LISTENER_DOCKER_INTERNAL:PLAINTEXT,LISTENER_DOCKER_EXTERNAL:PLAINTEXT 103 | KAFKA_INTER_BROKER_LISTENER_NAME: LISTENER_DOCKER_INTERNAL 104 | KAFKA_ZOOKEEPER_CONNECT: "zoo1:2181" 105 | KAFKA_BROKER_ID: 1 106 | KAFKA_LOG4J_LOGGERS: "kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state.change.logger=INFO" 107 | volumes: 108 | - ./zk-single-kafka-multiple/kafka1/data:/var/lib/kafka/data 109 | depends_on: 110 | - zoo1 111 | 112 | kafka2: 113 | image: confluentinc/cp-kafka:5.4.0 114 | hostname: kafka2 115 | ports: 116 | - "9093:9093" 117 | environment: 118 | KAFKA_ADVERTISED_LISTENERS: LISTENER_DOCKER_INTERNAL://kafka2:19093,LISTENER_DOCKER_EXTERNAL://${DOCKER_HOST_IP:-127.0.0.1}:9093 119 | KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: LISTENER_DOCKER_INTERNAL:PLAINTEXT,LISTENER_DOCKER_EXTERNAL:PLAINTEXT 120 | KAFKA_INTER_BROKER_LISTENER_NAME: LISTENER_DOCKER_INTERNAL 121 | KAFKA_ZOOKEEPER_CONNECT: "zoo1:2181" 122 | KAFKA_BROKER_ID: 2 123 | KAFKA_LOG4J_LOGGERS: "kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state.change.logger=INFO" 124 | volumes: 125 | - ./zk-single-kafka-multiple/kafka2/data:/var/lib/kafka/data 126 | depends_on: 127 | - zoo1 128 | 129 | 130 | kafka3: 131 | image: confluentinc/cp-kafka:5.4.0 132 | hostname: kafka3 133 | ports: 134 | - "9094:9094" 135 | environment: 136 | KAFKA_ADVERTISED_LISTENERS: LISTENER_DOCKER_INTERNAL://kafka3:19094,LISTENER_DOCKER_EXTERNAL://${DOCKER_HOST_IP:-127.0.0.1}:9094 137 | KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: LISTENER_DOCKER_INTERNAL:PLAINTEXT,LISTENER_DOCKER_EXTERNAL:PLAINTEXT 138 | KAFKA_INTER_BROKER_LISTENER_NAME: LISTENER_DOCKER_INTERNAL 139 | KAFKA_ZOOKEEPER_CONNECT: "zoo1:2181" 140 | KAFKA_BROKER_ID: 3 141 | KAFKA_LOG4J_LOGGERS: "kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state.change.logger=INFO" 142 | volumes: 143 | - ./zk-single-kafka-multiple/kafka3/data:/var/lib/kafka/data 144 | depends_on: 145 | - zoo1 146 | 147 | ``` 148 | 149 | 运行以下命令即可完成 1个节点 Zookeeper+3个节点的 Kafka 的环境搭建。 150 | 151 | ```shell 152 | docker-compose -f zk-single-kafka-multiple.yml up 153 | ``` 154 | 155 | 如果需要停止Kafka相关容器的话,运行以下命令即可: 156 | 157 | ```shell 158 | docker-compose -f zk-single-kafka-multiple.yml down 159 | ``` 160 | 161 | ### 使用命令行测试消息的生产和消费 162 | 163 | 一般情况下我们很少会用到 Kafka 的命令行操作。 164 | 165 | **1.进入 Kafka container 内部执行 Kafka 官方自带了一些命令** 166 | 167 | ```shell 168 | docker exec -ti docker_kafka1_1 bash 169 | ``` 170 | 171 | **2.列出所有 Topic** 172 | 173 | ```shell 174 | root@kafka1:/# kafka-topics --describe --zookeeper zoo1:2181 175 | ``` 176 | 177 | **3.创建一个 Topic** 178 | 179 | ```shell 180 | root@kafka1:/# kafka-topics --create --topic test --partitions 3 --zookeeper zoo1:2181 --replication-factor 1 181 | Created topic test. 182 | ``` 183 | 184 | 我们创建了一个名为 test 的 Topic, partition 数为 3, replica 数为 1。 185 | 186 | **4.消费者订阅主题** 187 | 188 | ```shell 189 | root@kafka1:/# kafka-console-consumer --bootstrap-server localhost:9092 --topic test 190 | send hello from console -producer 191 | ``` 192 | 193 | 我们订阅了 名为 test 的 Topic。 194 | 195 | **5.生产者向 Topic 发送消息** 196 | 197 | ```shell 198 | root@kafka1:/# kafka-console-producer --broker-list localhost:9092 --topic test 199 | >send hello from console -producer 200 | > 201 | ``` 202 | 203 | 我们使用 `kafka-console-producer ` 命令向名为 test 的 Topic 发送了一条消息,消息内容为:“send hello from console -producer” 204 | 205 | 这个时候,你会发现消费者成功接收到了消息: 206 | 207 | ```shell 208 | root@kafka1:/# kafka-console-consumer --bootstrap-server localhost:9092 --topic test 209 | send hello from console -producer 210 | ``` 211 | 212 | ### IDEA相关插件推荐 213 | 214 | #### Zoolytic-Zookeeper tool 215 | 216 | 这是一款 IDEA 提供的 Zookeeper 可视化工具插件,非常好用! 我们可以通过它: 217 | 218 | 1. 可视化ZkNodes节点信息 219 | 2. ZkNodes节点管理-添加/删除 220 | 3. 编辑zkNodes数据 221 | 4. ...... 222 | 223 | 实际使用效果如下: 224 | 225 | 226 | 227 | 使用方法: 228 | 229 | 1. 打开工具:View->Tool windows->Zoolytic; 230 | 2. 点击 “+” 号后在弹出框数据:“127.0.0.1:2181” 连接 zookeeper; 231 | 3. 连接之后点击新创建的连接然后点击“+”号旁边的刷新按钮即可! 232 | 233 | #### Kafkalytic 234 | 235 | IDEA 提供的 Kafka 可视化管理插件。这个插件为我们提供了下面这写功能: 236 | 237 | 1. 多个集群支持 238 | 2. 主题管理:创建/删除/更改分区 239 | 3. 使用正则表达式搜索主题 240 | 4. 发布字符串/字节序列化的消息 241 | 5. 使用不同的策略消费消息 242 | 243 | 实际使用效果如下: 244 | 245 | 246 | 247 | 使用方法: 248 | 249 | 1. 打开工具:View->Tool windows->kafkalytic; 250 | 251 | 2. 点击 “+” 号后在弹出框数据:“127.0.0.1:9092” 连接; 252 | 253 | ### Java 程序中简单使用Kafka 254 | 255 | > 代码地址:https://github.com/Snailclimb/springboot-kafka/tree/master/kafka-intro-maven-demo 256 | 257 | **Step 1:新建一个Maven项目** 258 | 259 | **Step2: `pom.xml` 中添加相关依赖** 260 | 261 | ``` 262 | 263 | org.apache.kafka 264 | kafka-clients 265 | 2.2.0 266 | 267 | ``` 268 | 269 | **Step 3:初始化消费者和生产者** 270 | 271 | `KafkaConstants`常量类中定义了Kafka一些常用配置常量。 272 | 273 | ```java 274 | public class KafkaConstants { 275 | public static final String BROKER_LIST = "localhost:9092"; 276 | public static final String CLIENT_ID = "client1"; 277 | public static String GROUP_ID_CONFIG="consumerGroup1"; 278 | private KafkaConstants() { 279 | 280 | } 281 | } 282 | 283 | ``` 284 | 285 | `ProducerCreator` 中有一个 `createProducer()` 方法方法用于返回一个 `KafkaProducer`对象 286 | 287 | ```java 288 | import org.apache.kafka.clients.producer.KafkaProducer; 289 | import org.apache.kafka.clients.producer.Producer; 290 | import org.apache.kafka.clients.producer.ProducerConfig; 291 | import org.apache.kafka.common.serialization.StringSerializer; 292 | 293 | import java.util.Properties; 294 | 295 | /** 296 | * @author shuang.kou 297 | */ 298 | public class ProducerCreator { 299 | 300 | 301 | public static Producer createProducer() { 302 | Properties properties = new Properties(); 303 | properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, KafkaConstants.BROKER_LIST); 304 | properties.put(ProducerConfig.CLIENT_ID_CONFIG, KafkaConstants.CLIENT_ID); 305 | properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); 306 | properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); 307 | return new KafkaProducer<>(properties); 308 | } 309 | } 310 | 311 | ``` 312 | 313 | ConsumerCreator 中有一个` createConsumer()` 方法方法用于返回一个 `KafkaConsumer` 对象 314 | 315 | ```java 316 | import org.apache.kafka.clients.consumer.Consumer; 317 | import org.apache.kafka.clients.consumer.ConsumerConfig; 318 | import org.apache.kafka.clients.consumer.KafkaConsumer; 319 | import org.apache.kafka.common.serialization.StringDeserializer; 320 | 321 | import java.util.Properties; 322 | 323 | public class ConsumerCreator { 324 | 325 | public static Consumer createConsumer() { 326 | Properties properties = new Properties(); 327 | properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, KafkaConstants.BROKER_LIST); 328 | properties.put(ConsumerConfig.GROUP_ID_CONFIG, KafkaConstants.GROUP_ID_CONFIG); 329 | properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); 330 | properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); 331 | return new KafkaConsumer<>(properties); 332 | } 333 | } 334 | 335 | ``` 336 | 337 | **Step 4:发送和消费消息** 338 | 339 | 生产者发送消息: 340 | 341 | ```java 342 | private static final String TOPIC = "test-topic"; 343 | Producer producer = ProducerCreator.createProducer(); 344 | ProducerRecord record = 345 | new ProducerRecord<>(TOPIC, "hello, Kafka!"); 346 | try { 347 | //send message 348 | RecordMetadata metadata = producer.send(record).get(); 349 | System.out.println("Record sent to partition " + metadata.partition() 350 | + " with offset " + metadata.offset()); 351 | } catch (ExecutionException | InterruptedException e) { 352 | System.out.println("Error in sending record"); 353 | e.printStackTrace(); 354 | } 355 | producer.close(); 356 | ``` 357 | 358 | 消费者消费消息: 359 | 360 | ```java 361 | Consumer consumer = ConsumerCreator.createConsumer(); 362 | // 循环消费消息 363 | while (true) { 364 | //subscribe topic and consume message 365 | consumer.subscribe(Collections.singletonList(TOPIC)); 366 | 367 | ConsumerRecords consumerRecords = 368 | consumer.poll(Duration.ofMillis(1000)); 369 | for (ConsumerRecord consumerRecord : consumerRecords) { 370 | System.out.println("Consumer consume message:" + consumerRecord.value()); 371 | } 372 | } 373 | ``` 374 | 375 | **Step 5:测试** 376 | 377 | 运行程序控制台打印出: 378 | 379 | ```j 380 | Record sent to partition 0 with offset 20 381 | Consumer consume message:hello, Kafka! 382 | ``` 383 | 384 | -------------------------------------------------------------------------------- /docs/3-10分钟学会如何在SpringBoot程序中使用Kafka作为消息队列.md: -------------------------------------------------------------------------------- 1 | ### Step1:创建项目 2 | 3 | 直接通过Spring 官方提供的 [Spring Initializr](https://start.spring.io/) 创建或者直接使用 IDEA 创建皆可。 4 | 5 | ![](https://imgkr.cn-bj.ufileos.com/946d907d-f983-4bb0-ad32-76deb15057cb.jpg) 6 | 7 | ### Step2: 配置 Kafka 8 | 9 | 通过 application.yml 配置文件配置 Kafka 基本信息 10 | 11 | ```yml 12 | server: 13 | port: 9090 14 | 15 | spring: 16 | kafka: 17 | consumer: 18 | bootstrap-servers: localhost:9092 19 | # 配置消费者消息offset是否自动重置(消费者重连会能够接收最开始的消息) 20 | auto-offset-reset: earliest 21 | producer: 22 | bootstrap-servers: localhost:9092 23 | # 发送的对象信息变为json格式 24 | value-serializer: org.springframework.kafka.support.serializer.JsonSerializer 25 | kafka: 26 | topic: 27 | my-topic: my-topic 28 | my-topic2: my-topic2 29 | ``` 30 | 31 | Kafka 额外配置类: 32 | 33 | ```java 34 | package cn.javaguide.springbootkafka01sendobjects.config; 35 | 36 | import org.apache.kafka.clients.admin.NewTopic; 37 | import org.springframework.beans.factory.annotation.Value; 38 | import org.springframework.context.annotation.Bean; 39 | import org.springframework.context.annotation.Configuration; 40 | import org.springframework.kafka.support.converter.RecordMessageConverter; 41 | import org.springframework.kafka.support.converter.StringJsonMessageConverter; 42 | 43 | /** 44 | * @author shuang.kou 45 | */ 46 | @Configuration 47 | public class KafkaConfig { 48 | 49 | @Value("${kafka.topic.my-topic}") 50 | String myTopic; 51 | @Value("${kafka.topic.my-topic2}") 52 | String myTopic2; 53 | 54 | /** 55 | * JSON消息转换器 56 | */ 57 | @Bean 58 | public RecordMessageConverter jsonConverter() { 59 | return new StringJsonMessageConverter(); 60 | } 61 | 62 | /** 63 | * 通过注入一个 NewTopic 类型的 Bean 来创建 topic,如果 topic 已存在,则会忽略。 64 | */ 65 | @Bean 66 | public NewTopic myTopic() { 67 | return new NewTopic(myTopic, 2, (short) 1); 68 | } 69 | 70 | @Bean 71 | public NewTopic myTopic2() { 72 | return new NewTopic(myTopic2, 1, (short) 1); 73 | } 74 | } 75 | 76 | ``` 77 | 78 | 当我们到了这一步之后,你就可以试着运行项目了,运行成功后你会发现 Spring Boot 会为你创建两个topic: 79 | 80 | 1. my-topic: partition 数为 2, replica 数为 1 81 | 2. my-topic2:partition 数为 1, replica 数为 1 82 | 83 | > 通过上一节说的:`kafka-topics --describe --zookeeper zoo1:2181` 命令查看或者直接通过IDEA 提供的 Kafka 可视化管理插件-Kafkalytic 来查看 84 | 85 | ### Step3:创建要发送的消息实体类 86 | 87 | ```java 88 | package cn.javaguide.springbootkafka01sendobjects.entity; 89 | 90 | public class Book { 91 | private Long id; 92 | private String name; 93 | 94 | public Book() { 95 | } 96 | 97 | public Book(Long id, String name) { 98 | this.id = id; 99 | this.name = name; 100 | } 101 | 102 | 省略 getter/setter以及 toString方法 103 | } 104 | 105 | ``` 106 | 107 | ### Step4:创建发送消息的生产者 108 | 109 | > 这一步内容比较长,会一步一步优化生产者的代码。 110 | 111 | ```java 112 | import org.slf4j.Logger; 113 | import org.slf4j.LoggerFactory; 114 | import org.springframework.kafka.core.KafkaTemplate; 115 | import org.springframework.stereotype.Service; 116 | 117 | @Service 118 | public class BookProducerService { 119 | 120 | private static final Logger logger = LoggerFactory.getLogger(BookProducerService.class); 121 | 122 | private final KafkaTemplate kafkaTemplate; 123 | 124 | public BookProducerService(KafkaTemplate kafkaTemplate) { 125 | this.kafkaTemplate = kafkaTemplate; 126 | } 127 | 128 | public void sendMessage(String topic, Object o) { 129 | kafkaTemplate.send(topic, o); 130 | } 131 | } 132 | ``` 133 | 134 | 我们使用Kafka 提供的 `KafkaTemplate ` 调用 `send()`方法出入要发往的topic和消息内容即可很方便的完成消息的发送: 135 | 136 | ```java 137 | kafkaTemplate.send(topic, o); 138 | ``` 139 | 140 | 如果我们想要知道消息发送的结果的话,`sendMessage`方法这样写: 141 | 142 | ```java 143 | public void sendMessage(String topic, Object o) { 144 | try { 145 | SendResult sendResult = kafkaTemplate.send(topic, o).get(); 146 | if (sendResult.getRecordMetadata() != null) { 147 | logger.info("生产者成功发送消息到" + sendResult.getProducerRecord().topic() + "-> " + sendResult.getProducerRecord().value().toString()); 148 | } 149 | } catch (InterruptedException | ExecutionException e) { 150 | e.printStackTrace(); 151 | } 152 | } 153 | ``` 154 | 155 | 但是这种属于同步的发送方式并不推荐,没有利用到 `Future`对象的特性。 156 | 157 | `KafkaTemplate ` 调用 `send()`方法实际上返回的是`ListenableFuture` 对象。 158 | 159 | `send()`方法源码如下: 160 | 161 | ```java 162 | @Override 163 | public ListenableFuture> send(String topic, @Nullable V data) { 164 | ProducerRecord producerRecord = new ProducerRecord<>(topic, data); 165 | return doSend(producerRecord); 166 | } 167 | ``` 168 | 169 | `ListenableFuture` 是Spring提供了继承自`Future` 的接口。 170 | 171 | `ListenableFuture`方法源码如下: 172 | 173 | ```java 174 | public interface ListenableFuture extends Future { 175 | void addCallback(ListenableFutureCallback var1); 176 | 177 | void addCallback(SuccessCallback var1, FailureCallback var2); 178 | 179 | default CompletableFuture completable() { 180 | CompletableFuture completable = new DelegatingCompletableFuture(this); 181 | this.addCallback(completable::complete, completable::completeExceptionally); 182 | return completable; 183 | } 184 | } 185 | ``` 186 | 187 | 继续优化`sendMessage`方法 188 | 189 | ```java 190 | public void sendMessage(String topic, Object o) { 191 | 192 | ListenableFuture> future = kafkaTemplate.send(topic, o); 193 | future.addCallback(new ListenableFutureCallback>() { 194 | 195 | @Override 196 | public void onSuccess(SendResult sendResult) { 197 | logger.info("生产者成功发送消息到" + topic + "-> " + sendResult.getProducerRecord().value().toString()); 198 | } 199 | @Override 200 | public void onFailure(Throwable throwable) { 201 | logger.error("生产者发送消息:{} 失败,原因:{}", o.toString(), throwable.getMessage()); 202 | } 203 | }); 204 | } 205 | ``` 206 | 207 | 使用lambda表达式再继续优化: 208 | 209 | ```java 210 | public void sendMessage(String topic, Object o) { 211 | 212 | ListenableFuture> future = kafkaTemplate.send(topic, o); 213 | future.addCallback(result -> logger.info("生产者成功发送消息到topic:{} partition:{}的消息", result.getRecordMetadata().topic(), result.getRecordMetadata().partition()), 214 | ex -> logger.error("生产者发送消失败,原因:{}", ex.getMessage())); 215 | } 216 | ``` 217 | 218 | 再来简单研究一下 `send(String topic, @Nullable V data)` 方法。 219 | 220 | 我们使用`send(String topic, @Nullable V data)`方法的时候实际会new 一个`ProducerRecord`对象发送, 221 | 222 | ```java 223 | @Override 224 | public ListenableFuture> send(String topic, @Nullable V data) { 225 | ProducerRecord producerRecord = new ProducerRecord<>(topic, data); 226 | return doSend(producerRecord); 227 | } 228 | ``` 229 | 230 | `ProducerRecord`类中有多个构造方法: 231 | 232 | ```java 233 | public ProducerRecord(String topic, V value) { 234 | this(topic, null, null, null, value, null); 235 | } 236 | public ProducerRecord(String topic, Integer partition, Long timestamp, K key, V 237 | ...... 238 | } 239 | ``` 240 | 241 | 如果我们想在发送的时候带上timestamp(时间戳)、key等信息的话,`sendMessage()`方法可以这样写: 242 | 243 | ```java 244 | public void sendMessage(String topic, Object o) { 245 | // 分区编号最好为 null,交给 kafka 自己去分配 246 | ProducerRecord producerRecord = new ProducerRecord<>(topic, null, System.currentTimeMillis(), String.valueOf(o.hashCode()), o); 247 | 248 | ListenableFuture> future = kafkaTemplate.send(producerRecord); 249 | future.addCallback(result -> logger.info("生产者成功发送消息到topic:{} partition:{}的消息", result.getRecordMetadata().topic(), result.getRecordMetadata().partition()), 250 | ex -> logger.error("生产者发送消失败,原因:{}", ex.getMessage())); 251 | } 252 | ``` 253 | 254 | ### Step5:创建消费消息的消费者 255 | 256 | 通过在方法上使用 `@KafkaListener` 注解监听消息,当有消息的时候就会通过 poll 下来消费。 257 | 258 | ```java 259 | import cn.javaguide.springbootkafka01sendobjects.entity.Book; 260 | import com.fasterxml.jackson.core.JsonProcessingException; 261 | import com.fasterxml.jackson.databind.ObjectMapper; 262 | import org.apache.kafka.clients.consumer.ConsumerRecord; 263 | import org.slf4j.Logger; 264 | import org.slf4j.LoggerFactory; 265 | import org.springframework.beans.factory.annotation.Value; 266 | import org.springframework.kafka.annotation.KafkaListener; 267 | import org.springframework.stereotype.Service; 268 | 269 | @Service 270 | public class BookConsumerService { 271 | 272 | @Value("${kafka.topic.my-topic}") 273 | private String myTopic; 274 | @Value("${kafka.topic.my-topic2}") 275 | private String myTopic2; 276 | private final Logger logger = LoggerFactory.getLogger(BookProducerService.class); 277 | private final ObjectMapper objectMapper = new ObjectMapper(); 278 | 279 | 280 | @KafkaListener(topics = {"${kafka.topic.my-topic}"}, groupId = "group1") 281 | public void consumeMessage(ConsumerRecord bookConsumerRecord) { 282 | try { 283 | Book book = objectMapper.readValue(bookConsumerRecord.value(), Book.class); 284 | logger.info("消费者消费topic:{} partition:{}的消息 -> {}", bookConsumerRecord.topic(), bookConsumerRecord.partition(), book.toString()); 285 | } catch (JsonProcessingException e) { 286 | e.printStackTrace(); 287 | } 288 | } 289 | 290 | @KafkaListener(topics = {"${kafka.topic.my-topic2}"}, groupId = "group2") 291 | public void consumeMessage2(Book book) { 292 | logger.info("消费者消费{}的消息 -> {}", myTopic2, book.toString()); 293 | } 294 | } 295 | 296 | ``` 297 | 298 | ### Step6:创建一个 Rest Controller 299 | 300 | ```java 301 | import cn.javaguide.springbootkafka01sendobjects.entity.Book; 302 | import cn.javaguide.springbootkafka01sendobjects.service.BookProducerService; 303 | import org.springframework.beans.factory.annotation.Value; 304 | import org.springframework.web.bind.annotation.PostMapping; 305 | import org.springframework.web.bind.annotation.RequestMapping; 306 | import org.springframework.web.bind.annotation.RequestParam; 307 | import org.springframework.web.bind.annotation.RestController; 308 | 309 | import java.util.concurrent.atomic.AtomicLong; 310 | 311 | /** 312 | * @author shuang.kou 313 | */ 314 | @RestController 315 | @RequestMapping(value = "/book") 316 | public class BookController { 317 | @Value("${kafka.topic.my-topic}") 318 | String myTopic; 319 | @Value("${kafka.topic.my-topic2}") 320 | String myTopic2; 321 | private final BookProducerService producer; 322 | private AtomicLong atomicLong = new AtomicLong(); 323 | 324 | BookController(BookProducerService producer) { 325 | this.producer = producer; 326 | } 327 | 328 | @PostMapping 329 | public void sendMessageToKafkaTopic(@RequestParam("name") String name) { 330 | this.producer.sendMessage(myTopic, new Book(atomicLong.addAndGet(1), name)); 331 | this.producer.sendMessage(myTopic2, new Book(atomicLong.addAndGet(1), name)); 332 | } 333 | } 334 | ``` 335 | 336 | ### Step7:测试 337 | 338 | 输入命令: 339 | 340 | ```shell 341 | curl -X POST -F 'name=Java' http://localhost:9090/book 342 | ``` 343 | 344 | 控制台打印出的效果如下: 345 | 346 | ![](https://my-blog-to-use.oss-cn-beijing.aliyuncs.com/2019-11/springboot-kafka-result.jpg) 347 | 348 | **my-topic 有2个partition(分区) 当你尝试发送多条消息的时候,你会发现消息会被比较均匀地发送到每个 partion 中。** 349 | 350 | ### 推荐阅读 351 | 352 | - [Spring-Kafka官方文档](https://docs.spring.io/spring-kafka/reference/html/#preface) 353 | - [Spring for Apache Kafka](https://spring.io/projects/spring-kafka#samples) 354 | - [How to Work with Apache Kafka in Your Spring Boot Application](https://www.confluent.io/blog/apache-kafka-spring-boot-application/) 355 | - [Spring Boot and Kafka – Practical Configuration Examples](https://thepracticaldeveloper.com/2018/11/24/spring-boot-kafka-config/) 356 | - [Spring Kafka – JSON Serializer and Deserializer Example](https://memorynotfound.com/spring-kafka-json-serializer-deserializer-example/) 357 | - [Spring Boot Kafka概览、配置及优雅地实现发布订阅](https://cloud.tencent.com/developer/article/1558924) --------------------------------------------------------------------------------