├── .gitattributes
├── .gitignore
├── 1. start zookeeper.bat
├── 2. start kafka.bat
├── 3. verify startup.bat
├── 4. stop kafka.bat
├── 5. stop zookeeper.bat
├── Notes.txt
├── README.md
├── avro-schema-registry-example
├── docker-compose.yml
├── order-consumer
│ ├── pom.xml
│ └── src
│ │ └── main
│ │ └── java
│ │ └── com
│ │ └── howtodoinjava
│ │ └── avro
│ │ └── example
│ │ └── consumer
│ │ └── OrderConsumer.java
├── order-service
│ ├── pom.xml
│ └── src
│ │ └── main
│ │ └── java
│ │ └── com
│ │ └── howtodoinjava
│ │ └── avro
│ │ └── example
│ │ └── producer
│ │ └── OrderProducer.java
├── pom.xml
└── schema
│ ├── pom.xml
│ └── src
│ └── main
│ ├── avro
│ └── Order.avsc
│ └── java
│ └── com
│ └── howtodoinjava
│ └── avro
│ └── example
│ └── domain
│ └── generated
│ └── Order.java
├── kafka-mock-producer-consumer
├── docker-compose.yaml
├── pom.xml
└── src
│ ├── main
│ └── java
│ │ └── com
│ │ └── howtodoinjava
│ │ └── app
│ │ ├── KafkaAdminExample.java
│ │ ├── Main.java
│ │ ├── db
│ │ └── OrderDB.java
│ │ ├── kafka
│ │ ├── constants
│ │ │ └── KafkaConstants.java
│ │ ├── consumer
│ │ │ └── MessageConsumer.java
│ │ ├── producer
│ │ │ └── MessageProducer.java
│ │ └── topic
│ │ │ └── CreateTopicService.java
│ │ └── service
│ │ └── OrderService.java
│ └── test
│ └── java
│ └── producer
│ ├── TestMyKafkaConsumer.java
│ └── TestMyKafkaProducer.java
├── kafka-using-docker-compose
├── docker-compose-single-node.yaml
└── docker-compose.yaml
├── spring-boot-kafka-app
├── .gitignore
├── .mvn
│ └── wrapper
│ │ ├── MavenWrapperDownloader.java
│ │ ├── maven-wrapper.jar
│ │ └── maven-wrapper.properties
├── mvnw
├── mvnw.cmd
├── pom.xml
└── src
│ ├── main
│ ├── java
│ │ └── com
│ │ │ └── howtodoinjava
│ │ │ └── kafka
│ │ │ └── demo
│ │ │ ├── SpringBootKafkaAppApplication.java
│ │ │ ├── common
│ │ │ └── AppConstants.java
│ │ │ ├── controller
│ │ │ └── KafkaProducerController.java
│ │ │ ├── model
│ │ │ └── User.java
│ │ │ └── service
│ │ │ ├── KafKaConsumerService.java
│ │ │ └── KafKaProducerService.java
│ └── resources
│ │ └── application.properties
│ └── test
│ └── java
│ └── com
│ └── howtodoinjava
│ └── kafka
│ └── demo
│ └── SpringBootKafkaAppApplicationTests.java
└── spring-kafka-java-config
├── pom.xml
└── src
└── main
├── java
└── com
│ └── howtodoinjava
│ └── kafka
│ └── demo
│ ├── SpringBootKafkaAppApplication.java
│ ├── config
│ ├── KafkaConsumerConfig.java
│ ├── KafkaProducerConfig.java
│ └── TopicConfig.java
│ ├── controller
│ └── KafkaProducerController.java
│ ├── model
│ └── User.java
│ └── service
│ ├── KafKaConsumerService.java
│ └── KafKaProducerService.java
└── resources
├── application.properties
└── logback.xml
/.gitattributes:
--------------------------------------------------------------------------------
1 | # Auto detect text files and perform LF normalization
2 | * text=auto
3 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | @@ -1,23 +1,31 @@
2 | # Compiled class file
3 | *.class
4 |
5 | # Log file
6 | *.log
7 |
8 | # BlueJ files
9 | *.ctxt
10 | tomcat
11 |
12 | # Mobile Tools for Java (J2ME)
13 | .mtj.tmp/
14 |
15 | # Package Files #
16 | # Binaries
17 | *.7z
18 | *.dmg
19 | *.gz
20 | *.iso
21 | *.jar
22 | *.rar
23 | *.tar
24 | *.zip
25 | *.war
26 | *.nar
27 | *.ear
28 | *.zip
29 | *.tar.gz
30 | *.rar
31 | *.sar
32 | *.class
33 |
34 | # Maven
35 | target/
36 |
37 | # eclipse project file
38 | .settings/
39 | .classpath
40 | .project
41 | .metadata
42 |
43 | # OS
44 | .DS_Store
45 |
46 | # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml
47 | hs_err_pid*
48 | # Misc
49 | *.swp
50 | *.prefs
51 |
52 | .idea
53 | *.iml
54 |
--------------------------------------------------------------------------------
/1. start zookeeper.bat:
--------------------------------------------------------------------------------
1 | cd E:\devsetup\bigdata\kafka2.5
2 | start cmd /k bin\windows\zookeeper-server-start.bat config\zookeeper.properties
--------------------------------------------------------------------------------
/2. start kafka.bat:
--------------------------------------------------------------------------------
1 | cd E:\devsetup\bigdata\kafka2.5
2 | start cmd /k bin\windows\kafka-server-start.bat config\server.properties
--------------------------------------------------------------------------------
/3. verify startup.bat:
--------------------------------------------------------------------------------
1 | cd E:\devsetup\bigdata\kafka2.5
2 | start cmd /k bin\windows\kafka-topics.bat --list --bootstrap-server localhost:9092
--------------------------------------------------------------------------------
/4. stop kafka.bat:
--------------------------------------------------------------------------------
1 | cd E:\devsetup\bigdata\kafka2.5
2 | start cmd /k bin\windows\kafka-server-stop.bat
--------------------------------------------------------------------------------
/5. stop zookeeper.bat:
--------------------------------------------------------------------------------
1 | cd E:\devsetup\bigdata\kafka2.5
2 | start cmd /k bin\windows\zookeeper-server-stop.bat
--------------------------------------------------------------------------------
/Notes.txt:
--------------------------------------------------------------------------------
1 | cd E:\devsetup\bigdata\kafka2.5
2 | start cmd /k bin\windows\zookeeper-server-start.bat config\zookeeper.properties
3 | start cmd /k bin\windows\kafka-server-start.bat config\server.properties
4 |
5 | cd E:\devsetup\bigdata\kafka2.5
6 | bin\windows\kafka-topics.bat --create --bootstrap-server localhost:9092 --replication-factor 1 --partitions 1 --topic test
7 | bin\windows\kafka-topics.bat --create --bootstrap-server localhost:9092 --replication-factor 1 --partitions 1 --topic users-log
8 | bin\windows\kafka-topics.bat --list --bootstrap-server localhost:9092
9 | bin\windows\kafka-console-producer.bat --bootstrap-server localhost:9092 --topic test
10 | bin\windows\kafka-console-consumer.bat --bootstrap-server localhost:9092 --topic test --from-beginning
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Related Tutorials
2 |
3 | This repository hosts the projects and their source codes written for kafka tutorials in [howtodoinjava.com](https://howtodoinjava.com/).
4 |
5 | 1. [Apache Kafka – Introduction](https://howtodoinjava.com/kafka/tutorial-introduction/)
6 | 2. [Apache Kafka – Getting Started on Windows 10](https://howtodoinjava.com/kafka/getting-started-windows-10/)
7 | 3. [Spring Boot with Kafka - Hello World Example](https://howtodoinjava.com/kafka/spring-boot-with-kafka/)
8 | 4. [Spring Boot Kafka JsonSerializer Example](https://howtodoinjava.com/kafka/spring-boot-jsonserializer-example/)
9 | 5. [Spring Boot Kafka Multiple Consumers Example](https://howtodoinjava.com/kafka/multiple-consumers-example/)
--------------------------------------------------------------------------------
/avro-schema-registry-example/docker-compose.yml:
--------------------------------------------------------------------------------
1 | ---
2 | version: '3.6'
3 | services:
4 | zookeeper:
5 | image: confluentinc/cp-zookeeper:7.1.0
6 | platform: linux/amd64
7 | hostname: zookeeper
8 | container_name: zookeeper
9 | ports:
10 | - "2181:2181"
11 | environment:
12 | ZOOKEEPER_CLIENT_PORT: 2181
13 | ZOOKEEPER_TICK_TIME: 2000
14 |
15 | broker:
16 | image: confluentinc/cp-server:7.1.0
17 | platform: linux/amd64
18 | hostname: broker
19 | container_name: broker
20 | depends_on:
21 | - zookeeper
22 | ports:
23 | - "9092:9092"
24 | - "9101:9101"
25 | environment:
26 | KAFKA_BROKER_ID: 1
27 | KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181'
28 | KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
29 | KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://broker:29092,PLAINTEXT_HOST://localhost:9092
30 | KAFKA_METRIC_REPORTERS: io.confluent.metrics.reporter.ConfluentMetricsReporter
31 | KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
32 | KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
33 | KAFKA_CONFLUENT_LICENSE_TOPIC_REPLICATION_FACTOR: 1
34 | KAFKA_CONFLUENT_BALANCER_TOPIC_REPLICATION_FACTOR: 1
35 | KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
36 | KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
37 | KAFKA_JMX_PORT: 9101
38 | KAFKA_JMX_HOSTNAME: localhost
39 | KAFKA_CONFLUENT_SCHEMA_REGISTRY_URL: http://schema-registry:8081
40 | CONFLUENT_METRICS_REPORTER_BOOTSTRAP_SERVERS: broker:29092
41 | CONFLUENT_METRICS_REPORTER_TOPIC_REPLICAS: 1
42 | CONFLUENT_METRICS_ENABLE: 'true'
43 | CONFLUENT_SUPPORT_CUSTOMER_ID: 'anonymous'
44 |
45 | schema-registry:
46 | image: confluentinc/cp-schema-registry:7.1.0
47 | platform: linux/amd64
48 | hostname: schema-registry
49 | container_name: schema-registry
50 | depends_on:
51 | - broker
52 | ports:
53 | - "8081:8081"
54 | environment:
55 | SCHEMA_REGISTRY_HOST_NAME: schema-registry
56 | SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: 'broker:29092'
57 | SCHEMA_REGISTRY_LISTENERS: http://0.0.0.0:8081
58 |
59 | create-topic:
60 | image: confluentinc/cp-kafka:7.1.0
61 | command: [ "kafka-topics", "--create", "--topic", "orders-sr", "--partitions", "1", "--replication-factor", "1", "--if-not-exists", "--bootstrap-server", "broker:9092" ]
62 | depends_on:
63 | - zookeeper
64 | environment:
65 | KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://broker:29092
--------------------------------------------------------------------------------
/avro-schema-registry-example/order-consumer/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 | 4.0.0
6 |
7 | com.howtodoinjava.avro.example
8 | avro-schema-registry-example
9 | 1.0-SNAPSHOT
10 | ../pom.xml
11 |
12 |
13 | order-consumer
14 |
15 |
16 | 17
17 | 17
18 | UTF-8
19 |
20 |
21 |
22 |
23 | com.howtodoinjava.avro.example
24 | schema
25 | 1.0
26 |
27 |
28 |
29 |
--------------------------------------------------------------------------------
/avro-schema-registry-example/order-consumer/src/main/java/com/howtodoinjava/avro/example/consumer/OrderConsumer.java:
--------------------------------------------------------------------------------
1 | package com.howtodoinjava.avro.example.consumer;
2 |
3 | import com.howtodoinjava.avro.example.domain.generated.Order;
4 | import io.confluent.kafka.serializers.KafkaAvroDeserializer;
5 | import io.confluent.kafka.serializers.KafkaAvroDeserializerConfig;
6 | import java.time.Duration;
7 | import java.util.Collections;
8 | import java.util.Properties;
9 | import org.apache.kafka.clients.consumer.ConsumerConfig;
10 | import org.apache.kafka.clients.consumer.ConsumerRecord;
11 | import org.apache.kafka.clients.consumer.ConsumerRecords;
12 | import org.apache.kafka.clients.consumer.KafkaConsumer;
13 |
14 | public class OrderConsumer {
15 |
16 | private static final String ORDERS_TOPIC_SR = "orders-sr";
17 |
18 | public static void main(String[] args) {
19 | KafkaConsumer consumer = configureConsumer();
20 |
21 | consumer.subscribe(Collections.singletonList(ORDERS_TOPIC_SR));
22 | System.out.println("Consumer Started");
23 |
24 | while (true) {
25 | ConsumerRecords records = consumer.poll(Duration.ofMillis(2000));
26 | for (ConsumerRecord orderRecord : records) {
27 | Order order = orderRecord.value();
28 | System.out.println(
29 | "Consumed message: \n key: " + orderRecord.key() + ", value: " + order.toString());
30 | }
31 | }
32 | }
33 |
34 | private static KafkaConsumer configureConsumer() {
35 | Properties props = new Properties();
36 | props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
37 | props.put(ConsumerConfig.GROUP_ID_CONFIG, "order.consumer.sr");
38 | props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, KafkaAvroDeserializer.class);
39 | props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, KafkaAvroDeserializer.class);
40 | props.put(KafkaAvroDeserializerConfig.SCHEMA_REGISTRY_URL_CONFIG, "http://localhost:8081");
41 | props.put(KafkaAvroDeserializerConfig.SPECIFIC_AVRO_READER_CONFIG, true);
42 |
43 | return new KafkaConsumer<>(props);
44 | }
45 |
46 | }
--------------------------------------------------------------------------------
/avro-schema-registry-example/order-service/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 | 4.0.0
6 |
7 | com.howtodoinjava.avro.example
8 | avro-schema-registry-example
9 | 1.0-SNAPSHOT
10 | ../pom.xml
11 |
12 |
13 | order-service
14 |
15 |
16 | 17
17 | 17
18 | UTF-8
19 |
20 |
21 |
22 |
23 | com.howtodoinjava.avro.example
24 | schema
25 | 1.0
26 |
27 |
28 |
29 |
--------------------------------------------------------------------------------
/avro-schema-registry-example/order-service/src/main/java/com/howtodoinjava/avro/example/producer/OrderProducer.java:
--------------------------------------------------------------------------------
1 | package com.howtodoinjava.avro.example.producer;
2 |
3 | import com.howtodoinjava.avro.example.domain.generated.Order;
4 | import io.confluent.kafka.serializers.KafkaAvroSerializer;
5 | import io.confluent.kafka.serializers.KafkaAvroSerializerConfig;
6 | import java.time.Instant;
7 | import java.util.Properties;
8 | import java.util.UUID;
9 | import org.apache.kafka.clients.producer.KafkaProducer;
10 | import org.apache.kafka.clients.producer.ProducerConfig;
11 | import org.apache.kafka.clients.producer.ProducerRecord;
12 |
13 | public class OrderProducer {
14 |
15 | private static final String ORDERS_TOPIC_SR = "orders-sr";
16 |
17 | public static void main(String[] args) {
18 |
19 | KafkaProducer producer = configureProducer();
20 | Order order = buildNewOrder();
21 |
22 | ProducerRecord producerRecord =
23 | new ProducerRecord<>(ORDERS_TOPIC_SR, order.getId().toString(), order);
24 |
25 | producer.send(producerRecord, (metadata, exception) -> {
26 | if (exception == null) {
27 | System.out.println("Message produced, record metadata: " + metadata);
28 | System.out.println("Producing message with data: " + producerRecord.value());
29 | } else {
30 | System.err.println("Error producing message: " + exception.getMessage());
31 | }
32 | });
33 |
34 | producer.flush();
35 | producer.close();
36 | }
37 |
38 | private static KafkaProducer configureProducer() {
39 | Properties properties = new Properties();
40 | properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
41 | properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, KafkaAvroSerializer.class);
42 | properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KafkaAvroSerializer.class);
43 | properties.put(KafkaAvroSerializerConfig.SCHEMA_REGISTRY_URL_CONFIG, "http://localhost:8081");
44 | return new KafkaProducer<>(properties);
45 | }
46 |
47 | private static Order buildNewOrder() {
48 | return Order.newBuilder()
49 | .setId(UUID.randomUUID())
50 | .setFirstName("John")
51 | .setLastName("Doe")
52 | //.setMiddleName("TestMiddleName")
53 | .setOrderedTime(Instant.now())
54 | .setStatus("NEW")
55 | .build();
56 | }
57 | }
58 |
--------------------------------------------------------------------------------
/avro-schema-registry-example/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 | 4.0.0
6 |
7 | org.springframework.boot
8 | spring-boot-starter-parent
9 | 3.1.2
10 |
11 |
12 |
13 | com.howtodoinjava.avro.example
14 | avro-schema-registry-example
15 | 1.0-SNAPSHOT
16 | pom
17 |
18 |
19 | schema
20 | order-service
21 | order-consumer
22 |
23 |
24 |
25 | 17
26 | 17
27 | UTF-8
28 |
29 |
30 |
31 |
32 | org.apache.avro
33 | avro
34 | 1.11.3
35 |
36 |
37 |
38 | io.confluent
39 | kafka-avro-serializer
40 | 5.5.1
41 |
42 |
43 | org.glassfish.jersey.core
44 | jersey-client
45 | 2.34
46 |
47 |
48 | org.glassfish.jersey.core
49 | jersey-common
50 | 2.34
51 |
52 |
53 |
54 |
55 |
56 | confluent
57 | Confluent
58 | https://packages.confluent.io/maven/
59 |
60 |
61 |
62 |
89 |
90 |
--------------------------------------------------------------------------------
/avro-schema-registry-example/schema/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 | 4.0.0
6 |
7 | com.howtodoinjava.avro.example
8 | avro-schema-registry-example
9 | 1.0-SNAPSHOT
10 | ../pom.xml
11 |
12 |
13 | schema
14 | 2.0
15 |
16 |
17 | 17
18 | 17
19 | UTF-8
20 |
21 |
22 |
23 |
24 |
25 | maven-compiler-plugin
26 | 3.8.0
27 |
28 |
29 | org.apache.avro
30 | avro-maven-plugin
31 | 1.9.2
32 |
33 |
34 | generate-sources
35 |
36 | schema
37 |
38 |
39 | ${project.basedir}/src/main/avro/
40 | ${project.basedir}/src/main/java/
41 | true
42 | org.apache.avro.Conversions$UUIDConversion
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
--------------------------------------------------------------------------------
/avro-schema-registry-example/schema/src/main/avro/Order.avsc:
--------------------------------------------------------------------------------
1 | {
2 | "name": "Order",
3 | "namespace": "com.howtodoinjava.avro.example.domain.generated",
4 | "type": "record",
5 | "fields": [
6 | {
7 | "name": "id",
8 | "type": {
9 | "type": "string",
10 | "logicalType": "uuid"
11 | }
12 | },
13 | {
14 | "name": "firstName",
15 | "type": "string"
16 | },
17 | {
18 | "name": "lastName",
19 | "type": "string"
20 | },
21 | {
22 | "name": "middleName",
23 | "type": "string"
24 | },
25 | {
26 | "name": "ordered_time",
27 | "type": {
28 | "type": "long",
29 | "logicalType": "timestamp-millis"
30 | }
31 | },
32 | {
33 | "name": "status",
34 | "type": "string"
35 | }
36 | ]
37 | }
38 |
--------------------------------------------------------------------------------
/avro-schema-registry-example/schema/src/main/java/com/howtodoinjava/avro/example/domain/generated/Order.java:
--------------------------------------------------------------------------------
1 | /**
2 | * Autogenerated by Avro
3 | *
4 | * DO NOT EDIT DIRECTLY
5 | */
6 | package com.howtodoinjava.avro.example.domain.generated;
7 |
8 | import org.apache.avro.generic.GenericArray;
9 | import org.apache.avro.specific.SpecificData;
10 | import org.apache.avro.util.Utf8;
11 | import org.apache.avro.message.BinaryMessageEncoder;
12 | import org.apache.avro.message.BinaryMessageDecoder;
13 | import org.apache.avro.message.SchemaStore;
14 |
15 | @org.apache.avro.specific.AvroGenerated
16 | public class Order extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord {
17 | private static final long serialVersionUID = 5756394621186830934L;
18 | public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"Order\",\"namespace\":\"com.howtodoinjava.avro.example.domain.generated\",\"fields\":[{\"name\":\"id\",\"type\":{\"type\":\"string\",\"logicalType\":\"uuid\"}},{\"name\":\"firstName\",\"type\":\"string\"},{\"name\":\"lastName\",\"type\":\"string\"},{\"name\":\"middleName\",\"type\":\"string\"},{\"name\":\"ordered_time\",\"type\":{\"type\":\"long\",\"logicalType\":\"timestamp-millis\"}},{\"name\":\"status\",\"type\":\"string\"}]}");
19 | public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; }
20 |
21 | private static SpecificData MODEL$ = new SpecificData();
22 | static {
23 | MODEL$.addLogicalTypeConversion(new org.apache.avro.Conversions.UUIDConversion());
24 | MODEL$.addLogicalTypeConversion(new org.apache.avro.data.TimeConversions.TimestampMillisConversion());
25 | }
26 |
27 | private static final BinaryMessageEncoder ENCODER =
28 | new BinaryMessageEncoder(MODEL$, SCHEMA$);
29 |
30 | private static final BinaryMessageDecoder DECODER =
31 | new BinaryMessageDecoder(MODEL$, SCHEMA$);
32 |
33 | /**
34 | * Return the BinaryMessageEncoder instance used by this class.
35 | * @return the message encoder used by this class
36 | */
37 | public static BinaryMessageEncoder getEncoder() {
38 | return ENCODER;
39 | }
40 |
41 | /**
42 | * Return the BinaryMessageDecoder instance used by this class.
43 | * @return the message decoder used by this class
44 | */
45 | public static BinaryMessageDecoder getDecoder() {
46 | return DECODER;
47 | }
48 |
49 | /**
50 | * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link SchemaStore}.
51 | * @param resolver a {@link SchemaStore} used to find schemas by fingerprint
52 | * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore
53 | */
54 | public static BinaryMessageDecoder createDecoder(SchemaStore resolver) {
55 | return new BinaryMessageDecoder(MODEL$, SCHEMA$, resolver);
56 | }
57 |
58 | /**
59 | * Serializes this Order to a ByteBuffer.
60 | * @return a buffer holding the serialized data for this instance
61 | * @throws java.io.IOException if this instance could not be serialized
62 | */
63 | public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException {
64 | return ENCODER.encode(this);
65 | }
66 |
67 | /**
68 | * Deserializes a Order from a ByteBuffer.
69 | * @param b a byte buffer holding serialized data for an instance of this class
70 | * @return a Order instance decoded from the given buffer
71 | * @throws java.io.IOException if the given bytes could not be deserialized into an instance of this class
72 | */
73 | public static Order fromByteBuffer(
74 | java.nio.ByteBuffer b) throws java.io.IOException {
75 | return DECODER.decode(b);
76 | }
77 |
78 | @Deprecated public java.util.UUID id;
79 | @Deprecated public java.lang.CharSequence firstName;
80 | @Deprecated public java.lang.CharSequence lastName;
81 | @Deprecated public java.lang.CharSequence middleName;
82 | @Deprecated public java.time.Instant ordered_time;
83 | @Deprecated public java.lang.CharSequence status;
84 |
85 | /**
86 | * Default constructor. Note that this does not initialize fields
87 | * to their default values from the schema. If that is desired then
88 | * one should use newBuilder()
.
89 | */
90 | public Order() {}
91 |
92 | /**
93 | * All-args constructor.
94 | * @param id The new value for id
95 | * @param firstName The new value for firstName
96 | * @param lastName The new value for lastName
97 | * @param middleName The new value for middleName
98 | * @param ordered_time The new value for ordered_time
99 | * @param status The new value for status
100 | */
101 | public Order(java.util.UUID id, java.lang.CharSequence firstName, java.lang.CharSequence lastName, java.lang.CharSequence middleName, java.time.Instant ordered_time, java.lang.CharSequence status) {
102 | this.id = id;
103 | this.firstName = firstName;
104 | this.lastName = lastName;
105 | this.middleName = middleName;
106 | this.ordered_time = ordered_time.truncatedTo(java.time.temporal.ChronoUnit.MILLIS);
107 | this.status = status;
108 | }
109 |
110 | public org.apache.avro.specific.SpecificData getSpecificData() { return MODEL$; }
111 | public org.apache.avro.Schema getSchema() { return SCHEMA$; }
112 | // Used by DatumWriter. Applications should not call.
113 | public java.lang.Object get(int field$) {
114 | switch (field$) {
115 | case 0: return id;
116 | case 1: return firstName;
117 | case 2: return lastName;
118 | case 3: return middleName;
119 | case 4: return ordered_time;
120 | case 5: return status;
121 | default: throw new org.apache.avro.AvroRuntimeException("Bad index");
122 | }
123 | }
124 |
125 | private static final org.apache.avro.Conversion>[] conversions =
126 | new org.apache.avro.Conversion>[] {
127 | new org.apache.avro.Conversions.UUIDConversion(),
128 | null,
129 | null,
130 | null,
131 | new org.apache.avro.data.TimeConversions.TimestampMillisConversion(),
132 | null,
133 | null
134 | };
135 |
136 | @Override
137 | public org.apache.avro.Conversion> getConversion(int field) {
138 | return conversions[field];
139 | }
140 |
141 | // Used by DatumReader. Applications should not call.
142 | @SuppressWarnings(value="unchecked")
143 | public void put(int field$, java.lang.Object value$) {
144 | switch (field$) {
145 | case 0: id = (java.util.UUID)value$; break;
146 | case 1: firstName = (java.lang.CharSequence)value$; break;
147 | case 2: lastName = (java.lang.CharSequence)value$; break;
148 | case 3: middleName = (java.lang.CharSequence)value$; break;
149 | case 4: ordered_time = (java.time.Instant)value$; break;
150 | case 5: status = (java.lang.CharSequence)value$; break;
151 | default: throw new org.apache.avro.AvroRuntimeException("Bad index");
152 | }
153 | }
154 |
155 | /**
156 | * Gets the value of the 'id' field.
157 | * @return The value of the 'id' field.
158 | */
159 | public java.util.UUID getId() {
160 | return id;
161 | }
162 |
163 |
164 | /**
165 | * Sets the value of the 'id' field.
166 | * @param value the value to set.
167 | */
168 | public void setId(java.util.UUID value) {
169 | this.id = value;
170 | }
171 |
172 | /**
173 | * Gets the value of the 'firstName' field.
174 | * @return The value of the 'firstName' field.
175 | */
176 | public java.lang.CharSequence getFirstName() {
177 | return firstName;
178 | }
179 |
180 |
181 | /**
182 | * Sets the value of the 'firstName' field.
183 | * @param value the value to set.
184 | */
185 | public void setFirstName(java.lang.CharSequence value) {
186 | this.firstName = value;
187 | }
188 |
189 | /**
190 | * Gets the value of the 'lastName' field.
191 | * @return The value of the 'lastName' field.
192 | */
193 | public java.lang.CharSequence getLastName() {
194 | return lastName;
195 | }
196 |
197 |
198 | /**
199 | * Sets the value of the 'lastName' field.
200 | * @param value the value to set.
201 | */
202 | public void setLastName(java.lang.CharSequence value) {
203 | this.lastName = value;
204 | }
205 |
206 | /**
207 | * Gets the value of the 'middleName' field.
208 | * @return The value of the 'middleName' field.
209 | */
210 | public java.lang.CharSequence getMiddleName() {
211 | return middleName;
212 | }
213 |
214 |
215 | /**
216 | * Sets the value of the 'middleName' field.
217 | * @param value the value to set.
218 | */
219 | public void setMiddleName(java.lang.CharSequence value) {
220 | this.middleName = value;
221 | }
222 |
223 | /**
224 | * Gets the value of the 'ordered_time' field.
225 | * @return The value of the 'ordered_time' field.
226 | */
227 | public java.time.Instant getOrderedTime() {
228 | return ordered_time;
229 | }
230 |
231 |
232 | /**
233 | * Sets the value of the 'ordered_time' field.
234 | * @param value the value to set.
235 | */
236 | public void setOrderedTime(java.time.Instant value) {
237 | this.ordered_time = value.truncatedTo(java.time.temporal.ChronoUnit.MILLIS);
238 | }
239 |
240 | /**
241 | * Gets the value of the 'status' field.
242 | * @return The value of the 'status' field.
243 | */
244 | public java.lang.CharSequence getStatus() {
245 | return status;
246 | }
247 |
248 |
249 | /**
250 | * Sets the value of the 'status' field.
251 | * @param value the value to set.
252 | */
253 | public void setStatus(java.lang.CharSequence value) {
254 | this.status = value;
255 | }
256 |
257 | /**
258 | * Creates a new Order RecordBuilder.
259 | * @return A new Order RecordBuilder
260 | */
261 | public static com.howtodoinjava.avro.example.domain.generated.Order.Builder newBuilder() {
262 | return new com.howtodoinjava.avro.example.domain.generated.Order.Builder();
263 | }
264 |
265 | /**
266 | * Creates a new Order RecordBuilder by copying an existing Builder.
267 | * @param other The existing builder to copy.
268 | * @return A new Order RecordBuilder
269 | */
270 | public static com.howtodoinjava.avro.example.domain.generated.Order.Builder newBuilder(com.howtodoinjava.avro.example.domain.generated.Order.Builder other) {
271 | if (other == null) {
272 | return new com.howtodoinjava.avro.example.domain.generated.Order.Builder();
273 | } else {
274 | return new com.howtodoinjava.avro.example.domain.generated.Order.Builder(other);
275 | }
276 | }
277 |
278 | /**
279 | * Creates a new Order RecordBuilder by copying an existing Order instance.
280 | * @param other The existing instance to copy.
281 | * @return A new Order RecordBuilder
282 | */
283 | public static com.howtodoinjava.avro.example.domain.generated.Order.Builder newBuilder(com.howtodoinjava.avro.example.domain.generated.Order other) {
284 | if (other == null) {
285 | return new com.howtodoinjava.avro.example.domain.generated.Order.Builder();
286 | } else {
287 | return new com.howtodoinjava.avro.example.domain.generated.Order.Builder(other);
288 | }
289 | }
290 |
291 | /**
292 | * RecordBuilder for Order instances.
293 | */
294 | @org.apache.avro.specific.AvroGenerated
295 | public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase
296 | implements org.apache.avro.data.RecordBuilder {
297 |
298 | private java.util.UUID id;
299 | private java.lang.CharSequence firstName;
300 | private java.lang.CharSequence lastName;
301 | private java.lang.CharSequence middleName;
302 | private java.time.Instant ordered_time;
303 | private java.lang.CharSequence status;
304 |
305 | /** Creates a new Builder */
306 | private Builder() {
307 | super(SCHEMA$);
308 | }
309 |
310 | /**
311 | * Creates a Builder by copying an existing Builder.
312 | * @param other The existing Builder to copy.
313 | */
314 | private Builder(com.howtodoinjava.avro.example.domain.generated.Order.Builder other) {
315 | super(other);
316 | if (isValidValue(fields()[0], other.id)) {
317 | this.id = data().deepCopy(fields()[0].schema(), other.id);
318 | fieldSetFlags()[0] = other.fieldSetFlags()[0];
319 | }
320 | if (isValidValue(fields()[1], other.firstName)) {
321 | this.firstName = data().deepCopy(fields()[1].schema(), other.firstName);
322 | fieldSetFlags()[1] = other.fieldSetFlags()[1];
323 | }
324 | if (isValidValue(fields()[2], other.lastName)) {
325 | this.lastName = data().deepCopy(fields()[2].schema(), other.lastName);
326 | fieldSetFlags()[2] = other.fieldSetFlags()[2];
327 | }
328 | if (isValidValue(fields()[3], other.middleName)) {
329 | this.middleName = data().deepCopy(fields()[3].schema(), other.middleName);
330 | fieldSetFlags()[3] = other.fieldSetFlags()[3];
331 | }
332 | if (isValidValue(fields()[4], other.ordered_time)) {
333 | this.ordered_time = data().deepCopy(fields()[4].schema(), other.ordered_time);
334 | fieldSetFlags()[4] = other.fieldSetFlags()[4];
335 | }
336 | if (isValidValue(fields()[5], other.status)) {
337 | this.status = data().deepCopy(fields()[5].schema(), other.status);
338 | fieldSetFlags()[5] = other.fieldSetFlags()[5];
339 | }
340 | }
341 |
342 | /**
343 | * Creates a Builder by copying an existing Order instance
344 | * @param other The existing instance to copy.
345 | */
346 | private Builder(com.howtodoinjava.avro.example.domain.generated.Order other) {
347 | super(SCHEMA$);
348 | if (isValidValue(fields()[0], other.id)) {
349 | this.id = data().deepCopy(fields()[0].schema(), other.id);
350 | fieldSetFlags()[0] = true;
351 | }
352 | if (isValidValue(fields()[1], other.firstName)) {
353 | this.firstName = data().deepCopy(fields()[1].schema(), other.firstName);
354 | fieldSetFlags()[1] = true;
355 | }
356 | if (isValidValue(fields()[2], other.lastName)) {
357 | this.lastName = data().deepCopy(fields()[2].schema(), other.lastName);
358 | fieldSetFlags()[2] = true;
359 | }
360 | if (isValidValue(fields()[3], other.middleName)) {
361 | this.middleName = data().deepCopy(fields()[3].schema(), other.middleName);
362 | fieldSetFlags()[3] = true;
363 | }
364 | if (isValidValue(fields()[4], other.ordered_time)) {
365 | this.ordered_time = data().deepCopy(fields()[4].schema(), other.ordered_time);
366 | fieldSetFlags()[4] = true;
367 | }
368 | if (isValidValue(fields()[5], other.status)) {
369 | this.status = data().deepCopy(fields()[5].schema(), other.status);
370 | fieldSetFlags()[5] = true;
371 | }
372 | }
373 |
374 | /**
375 | * Gets the value of the 'id' field.
376 | * @return The value.
377 | */
378 | public java.util.UUID getId() {
379 | return id;
380 | }
381 |
382 |
383 | /**
384 | * Sets the value of the 'id' field.
385 | * @param value The value of 'id'.
386 | * @return This builder.
387 | */
388 | public com.howtodoinjava.avro.example.domain.generated.Order.Builder setId(java.util.UUID value) {
389 | validate(fields()[0], value);
390 | this.id = value;
391 | fieldSetFlags()[0] = true;
392 | return this;
393 | }
394 |
395 | /**
396 | * Checks whether the 'id' field has been set.
397 | * @return True if the 'id' field has been set, false otherwise.
398 | */
399 | public boolean hasId() {
400 | return fieldSetFlags()[0];
401 | }
402 |
403 |
404 | /**
405 | * Clears the value of the 'id' field.
406 | * @return This builder.
407 | */
408 | public com.howtodoinjava.avro.example.domain.generated.Order.Builder clearId() {
409 | id = null;
410 | fieldSetFlags()[0] = false;
411 | return this;
412 | }
413 |
414 | /**
415 | * Gets the value of the 'firstName' field.
416 | * @return The value.
417 | */
418 | public java.lang.CharSequence getFirstName() {
419 | return firstName;
420 | }
421 |
422 |
423 | /**
424 | * Sets the value of the 'firstName' field.
425 | * @param value The value of 'firstName'.
426 | * @return This builder.
427 | */
428 | public com.howtodoinjava.avro.example.domain.generated.Order.Builder setFirstName(java.lang.CharSequence value) {
429 | validate(fields()[1], value);
430 | this.firstName = value;
431 | fieldSetFlags()[1] = true;
432 | return this;
433 | }
434 |
435 | /**
436 | * Checks whether the 'firstName' field has been set.
437 | * @return True if the 'firstName' field has been set, false otherwise.
438 | */
439 | public boolean hasFirstName() {
440 | return fieldSetFlags()[1];
441 | }
442 |
443 |
444 | /**
445 | * Clears the value of the 'firstName' field.
446 | * @return This builder.
447 | */
448 | public com.howtodoinjava.avro.example.domain.generated.Order.Builder clearFirstName() {
449 | firstName = null;
450 | fieldSetFlags()[1] = false;
451 | return this;
452 | }
453 |
454 | /**
455 | * Gets the value of the 'lastName' field.
456 | * @return The value.
457 | */
458 | public java.lang.CharSequence getLastName() {
459 | return lastName;
460 | }
461 |
462 |
463 | /**
464 | * Sets the value of the 'lastName' field.
465 | * @param value The value of 'lastName'.
466 | * @return This builder.
467 | */
468 | public com.howtodoinjava.avro.example.domain.generated.Order.Builder setLastName(java.lang.CharSequence value) {
469 | validate(fields()[2], value);
470 | this.lastName = value;
471 | fieldSetFlags()[2] = true;
472 | return this;
473 | }
474 |
475 | /**
476 | * Checks whether the 'lastName' field has been set.
477 | * @return True if the 'lastName' field has been set, false otherwise.
478 | */
479 | public boolean hasLastName() {
480 | return fieldSetFlags()[2];
481 | }
482 |
483 |
484 | /**
485 | * Clears the value of the 'lastName' field.
486 | * @return This builder.
487 | */
488 | public com.howtodoinjava.avro.example.domain.generated.Order.Builder clearLastName() {
489 | lastName = null;
490 | fieldSetFlags()[2] = false;
491 | return this;
492 | }
493 |
494 | /**
495 | * Gets the value of the 'middleName' field.
496 | * @return The value.
497 | */
498 | public java.lang.CharSequence getMiddleName() {
499 | return middleName;
500 | }
501 |
502 |
503 | /**
504 | * Sets the value of the 'middleName' field.
505 | * @param value The value of 'middleName'.
506 | * @return This builder.
507 | */
508 | public com.howtodoinjava.avro.example.domain.generated.Order.Builder setMiddleName(java.lang.CharSequence value) {
509 | validate(fields()[3], value);
510 | this.middleName = value;
511 | fieldSetFlags()[3] = true;
512 | return this;
513 | }
514 |
515 | /**
516 | * Checks whether the 'middleName' field has been set.
517 | * @return True if the 'middleName' field has been set, false otherwise.
518 | */
519 | public boolean hasMiddleName() {
520 | return fieldSetFlags()[3];
521 | }
522 |
523 |
524 | /**
525 | * Clears the value of the 'middleName' field.
526 | * @return This builder.
527 | */
528 | public com.howtodoinjava.avro.example.domain.generated.Order.Builder clearMiddleName() {
529 | middleName = null;
530 | fieldSetFlags()[3] = false;
531 | return this;
532 | }
533 |
534 | /**
535 | * Gets the value of the 'ordered_time' field.
536 | * @return The value.
537 | */
538 | public java.time.Instant getOrderedTime() {
539 | return ordered_time;
540 | }
541 |
542 |
543 | /**
544 | * Sets the value of the 'ordered_time' field.
545 | * @param value The value of 'ordered_time'.
546 | * @return This builder.
547 | */
548 | public com.howtodoinjava.avro.example.domain.generated.Order.Builder setOrderedTime(java.time.Instant value) {
549 | validate(fields()[4], value);
550 | this.ordered_time = value.truncatedTo(java.time.temporal.ChronoUnit.MILLIS);
551 | fieldSetFlags()[4] = true;
552 | return this;
553 | }
554 |
555 | /**
556 | * Checks whether the 'ordered_time' field has been set.
557 | * @return True if the 'ordered_time' field has been set, false otherwise.
558 | */
559 | public boolean hasOrderedTime() {
560 | return fieldSetFlags()[4];
561 | }
562 |
563 |
564 | /**
565 | * Clears the value of the 'ordered_time' field.
566 | * @return This builder.
567 | */
568 | public com.howtodoinjava.avro.example.domain.generated.Order.Builder clearOrderedTime() {
569 | fieldSetFlags()[4] = false;
570 | return this;
571 | }
572 |
573 | /**
574 | * Gets the value of the 'status' field.
575 | * @return The value.
576 | */
577 | public java.lang.CharSequence getStatus() {
578 | return status;
579 | }
580 |
581 |
582 | /**
583 | * Sets the value of the 'status' field.
584 | * @param value The value of 'status'.
585 | * @return This builder.
586 | */
587 | public com.howtodoinjava.avro.example.domain.generated.Order.Builder setStatus(java.lang.CharSequence value) {
588 | validate(fields()[5], value);
589 | this.status = value;
590 | fieldSetFlags()[5] = true;
591 | return this;
592 | }
593 |
594 | /**
595 | * Checks whether the 'status' field has been set.
596 | * @return True if the 'status' field has been set, false otherwise.
597 | */
598 | public boolean hasStatus() {
599 | return fieldSetFlags()[5];
600 | }
601 |
602 |
603 | /**
604 | * Clears the value of the 'status' field.
605 | * @return This builder.
606 | */
607 | public com.howtodoinjava.avro.example.domain.generated.Order.Builder clearStatus() {
608 | status = null;
609 | fieldSetFlags()[5] = false;
610 | return this;
611 | }
612 |
613 | @Override
614 | @SuppressWarnings("unchecked")
615 | public Order build() {
616 | try {
617 | Order record = new Order();
618 | record.id = fieldSetFlags()[0] ? this.id : (java.util.UUID) defaultValue(fields()[0]);
619 | record.firstName = fieldSetFlags()[1] ? this.firstName : (java.lang.CharSequence) defaultValue(fields()[1]);
620 | record.lastName = fieldSetFlags()[2] ? this.lastName : (java.lang.CharSequence) defaultValue(fields()[2]);
621 | record.middleName = fieldSetFlags()[3] ? this.middleName : (java.lang.CharSequence) defaultValue(fields()[3]);
622 | record.ordered_time = fieldSetFlags()[4] ? this.ordered_time : (java.time.Instant) defaultValue(fields()[4]);
623 | record.status = fieldSetFlags()[5] ? this.status : (java.lang.CharSequence) defaultValue(fields()[5]);
624 | return record;
625 | } catch (org.apache.avro.AvroMissingFieldException e) {
626 | throw e;
627 | } catch (java.lang.Exception e) {
628 | throw new org.apache.avro.AvroRuntimeException(e);
629 | }
630 | }
631 | }
632 |
633 | @SuppressWarnings("unchecked")
634 | private static final org.apache.avro.io.DatumWriter
635 | WRITER$ = (org.apache.avro.io.DatumWriter)MODEL$.createDatumWriter(SCHEMA$);
636 |
637 | @Override public void writeExternal(java.io.ObjectOutput out)
638 | throws java.io.IOException {
639 | WRITER$.write(this, SpecificData.getEncoder(out));
640 | }
641 |
642 | @SuppressWarnings("unchecked")
643 | private static final org.apache.avro.io.DatumReader
644 | READER$ = (org.apache.avro.io.DatumReader)MODEL$.createDatumReader(SCHEMA$);
645 |
646 | @Override public void readExternal(java.io.ObjectInput in)
647 | throws java.io.IOException {
648 | READER$.read(this, SpecificData.getDecoder(in));
649 | }
650 |
651 | }
652 |
653 |
654 |
655 |
656 |
657 |
658 |
659 |
660 |
661 |
662 |
--------------------------------------------------------------------------------
/kafka-mock-producer-consumer/docker-compose.yaml:
--------------------------------------------------------------------------------
1 | version: '3'
2 |
3 | services:
4 | kafka:
5 | image: confluentinc/cp-kafka:latest
6 | environment:
7 | KAFKA_BROKER_ID: 1
8 | KAFKA_LISTENERS: PLAINTEXT://:9092
9 | KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://localhost:9092
10 | KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'true'
11 | KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
12 | KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
13 | KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
14 | KAFKA_CONTROLLER_QUORUM_VOTERS: 1@localhost:9093
15 | KAFKA_PROCESS_ROLES: broker,controller
16 | KAFKA_NODE_ID: 1
17 | CLUSTER_ID: "your-unique-cluster-id"
18 | ports:
19 | - "9092:9092"
20 | - "9093:9093"
21 | volumes:
22 | - kafka-data:/var/lib/kafka/data
23 |
24 | volumes:
25 | kafka-data:
26 |
--------------------------------------------------------------------------------
/kafka-mock-producer-consumer/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 | 4.0.0
6 |
7 | org.example
8 | Kafka
9 | 1.0-SNAPSHOT
10 |
11 |
12 | 17
13 | 17
14 | UTF-8
15 |
16 |
17 |
18 |
19 |
20 | org.apache.kafka
21 | kafka-clients
22 | 3.6.1
23 |
24 |
25 |
26 | com.fasterxml.jackson.core
27 | jackson-core
28 | 2.16.1
29 |
30 |
31 | com.fasterxml.jackson.core
32 | jackson-databind
33 | 2.16.1
34 |
35 |
36 |
37 |
38 | org.junit.jupiter
39 | junit-jupiter-api
40 | 5.10.0
41 | test
42 |
43 |
44 |
45 |
--------------------------------------------------------------------------------
/kafka-mock-producer-consumer/src/main/java/com/howtodoinjava/app/KafkaAdminExample.java:
--------------------------------------------------------------------------------
1 | package com.howtodoinjava.app;
2 |
3 | import org.apache.kafka.clients.admin.AdminClient;
4 | import org.apache.kafka.clients.admin.AdminClientConfig;
5 | import org.apache.kafka.clients.admin.ListTopicsResult;
6 |
7 | import java.util.Properties;
8 | import java.util.concurrent.ExecutionException;
9 |
10 | public class KafkaAdminExample
11 | {
12 | public static void main(String[] args)
13 | {
14 | Properties config = new Properties();
15 | config.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
16 |
17 | try (AdminClient admin = AdminClient.create(config))
18 | {
19 | ListTopicsResult topics = admin.listTopics();
20 | topics.names().get().forEach(System.out::println);
21 | }
22 | catch (InterruptedException | ExecutionException e)
23 | {
24 | e.printStackTrace();
25 | }
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/kafka-mock-producer-consumer/src/main/java/com/howtodoinjava/app/Main.java:
--------------------------------------------------------------------------------
1 | package com.howtodoinjava.app;
2 |
3 | import com.howtodoinjava.app.db.OrderDB;
4 | import com.howtodoinjava.app.kafka.constants.KafkaConstants;
5 | import com.howtodoinjava.app.kafka.consumer.MessageConsumer;
6 | import com.howtodoinjava.app.kafka.producer.MessageProducer;
7 | import com.howtodoinjava.app.kafka.topic.CreateTopicService;
8 | import com.howtodoinjava.app.service.OrderService;
9 |
10 | public class Main
11 | {
12 | public static void main(String[] args)
13 | {
14 |
15 | CreateTopicService.createTopic(KafkaConstants.TOPIC_CREATE_ORDER);
16 | System.out.println("Topic created");
17 |
18 | try (var producer = new MessageProducer(); var consumer = new MessageConsumer())
19 | {
20 | System.out.println("MessageProducer and MessageConsumer created");
21 |
22 | OrderDB db = new OrderDB(consumer);
23 | db.startDB();
24 |
25 | OrderService orderService = new OrderService(producer);
26 |
27 |
28 | for (int i = 0; i < 10; i++)
29 | {
30 | orderService.createOrder(i, "Product " + i);
31 |
32 | Thread.sleep(1000);
33 | }
34 |
35 | Runtime.getRuntime().addShutdownHook(new Thread(db::stopDB));
36 | }
37 | catch (Exception e)
38 | {
39 | e.printStackTrace();
40 | }
41 |
42 | }
43 | }
44 |
--------------------------------------------------------------------------------
/kafka-mock-producer-consumer/src/main/java/com/howtodoinjava/app/db/OrderDB.java:
--------------------------------------------------------------------------------
1 | package com.howtodoinjava.app.db;
2 |
3 | import com.howtodoinjava.app.kafka.constants.KafkaConstants;
4 | import com.howtodoinjava.app.kafka.consumer.MessageConsumer;
5 |
6 | import java.util.Collections;
7 | import java.util.Map;
8 | import java.util.concurrent.ConcurrentHashMap;
9 |
10 | public class OrderDB
11 | {
12 | private final ConcurrentHashMap orders = new ConcurrentHashMap<>();
13 |
14 | private final MessageConsumer consumer;
15 |
16 | private Thread consumerThread;
17 |
18 | public OrderDB(MessageConsumer consumer)
19 | {
20 | this.consumer = consumer;
21 | }
22 |
23 | public void startDB()
24 | {
25 | if (consumerThread == null)
26 | {
27 | consumerThread = new Thread(() -> consumer.startPolling(KafkaConstants.TOPIC_CREATE_ORDER, record ->
28 | {
29 | System.out.println("---------------------Message Received---------------------");
30 | System.out.println("Topic: " + record.topic());
31 | System.out.println("Key: " + record.key());
32 | System.out.println("Value: " + record.value());
33 | System.out.println("Partition: " + record.partition());
34 | System.out.println("Offset: " + record.offset());
35 |
36 | orders.put(record.key(), record.value());
37 | }));
38 |
39 | consumerThread.start();
40 | }
41 |
42 | }
43 |
44 | public void startDB(int partition)
45 | {
46 | if (consumerThread == null)
47 | {
48 | consumerThread = new Thread(() -> consumer.startPolling(KafkaConstants.TOPIC_CREATE_ORDER, partition, record ->
49 | {
50 | System.out.println("---------------------Message Received On Partition " + record.partition() + " ---------------------");
51 | System.out.println("Topic: " + record.topic());
52 | System.out.println("Key: " + record.key());
53 | System.out.println("Value: " + record.value());
54 | System.out.println("Offset: " + record.offset());
55 |
56 | orders.put(record.key(), record.value());
57 | }));
58 |
59 | consumerThread.start();
60 | }
61 | }
62 |
63 | public Map getOrders()
64 | {
65 | return Collections.unmodifiableMap(orders);
66 | }
67 |
68 | public void stopDB()
69 | {
70 | consumer.stopPolling();
71 | }
72 | }
73 |
--------------------------------------------------------------------------------
/kafka-mock-producer-consumer/src/main/java/com/howtodoinjava/app/kafka/constants/KafkaConstants.java:
--------------------------------------------------------------------------------
1 | package com.howtodoinjava.app.kafka.constants;
2 |
3 | public class KafkaConstants
4 | {
5 |
6 | public static final String BOOTSTRAP_SERVER = "localhost:9092";
7 |
8 | public static final String TOPIC_CREATE_ORDER = "CreateOrder";
9 |
10 | public static final String GROUP_ID = "coders.group";
11 | }
12 |
--------------------------------------------------------------------------------
/kafka-mock-producer-consumer/src/main/java/com/howtodoinjava/app/kafka/consumer/MessageConsumer.java:
--------------------------------------------------------------------------------
1 | package com.howtodoinjava.app.kafka.consumer;
2 |
3 | import com.howtodoinjava.app.kafka.constants.KafkaConstants;
4 | import org.apache.kafka.clients.consumer.*;
5 | import org.apache.kafka.common.TopicPartition;
6 | import org.apache.kafka.common.errors.WakeupException;
7 | import org.apache.kafka.common.serialization.LongDeserializer;
8 | import org.apache.kafka.common.serialization.LongSerializer;
9 | import org.apache.kafka.common.serialization.StringDeserializer;
10 |
11 | import java.time.Duration;
12 | import java.util.List;
13 | import java.util.Properties;
14 | import java.util.concurrent.atomic.AtomicBoolean;
15 |
16 | public class MessageConsumer implements AutoCloseable
17 | {
18 | private final Consumer consumer;
19 |
20 | private static final AtomicBoolean HAS_MORE_MESSAGES = new AtomicBoolean(true);
21 |
22 | java.util.function.Consumer> action;
23 |
24 | public MessageConsumer()
25 | {
26 | var properties = new Properties();
27 |
28 | properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, KafkaConstants.BOOTSTRAP_SERVER);
29 | properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class.getName());
30 | properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
31 | properties.put(ConsumerConfig.GROUP_ID_CONFIG, KafkaConstants.GROUP_ID);
32 | properties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, OffsetResetStrategy.EARLIEST.toString());
33 |
34 | this.consumer = new KafkaConsumer<>(properties);
35 | }
36 |
37 | public MessageConsumer(Consumer consumer)
38 | {
39 | this.consumer = consumer;
40 | }
41 |
42 | public void startPolling(String topic, java.util.function.Consumer> action)
43 | {
44 | this.action = action;
45 |
46 | consumer.subscribe(List.of(topic));
47 |
48 | startPolling();
49 | }
50 |
51 | public void startPolling(String topic, int partition, java.util.function.Consumer> action)
52 | {
53 | this.action = action;
54 |
55 | consumer.assign(List.of(new TopicPartition(topic, partition)));
56 |
57 | startPolling();
58 | }
59 |
60 | private void startPolling()
61 | {
62 | try (consumer)
63 | {
64 | while (HAS_MORE_MESSAGES.get())
65 | {
66 | var consumerRecord = consumer.poll(Duration.ofMillis(300));
67 |
68 | consumerRecord.forEach(action);
69 |
70 | // commits the offset
71 | consumer.commitAsync();
72 | }
73 | }
74 | catch (WakeupException ignored)
75 | {
76 |
77 | }
78 | catch (Exception exception)
79 | {
80 | System.out.println(exception.getMessage());
81 | }
82 | }
83 |
84 | public void stopPolling()
85 | {
86 | System.out.println("Stopping Consumer");
87 |
88 | consumer.wakeup();
89 |
90 | HAS_MORE_MESSAGES.set(false);
91 | }
92 |
93 | @Override
94 | public void close() throws Exception
95 | {
96 | consumer.close();
97 | }
98 | }
99 |
--------------------------------------------------------------------------------
/kafka-mock-producer-consumer/src/main/java/com/howtodoinjava/app/kafka/producer/MessageProducer.java:
--------------------------------------------------------------------------------
1 | package com.howtodoinjava.app.kafka.producer;
2 |
3 | import com.howtodoinjava.app.kafka.constants.KafkaConstants;
4 | import org.apache.kafka.clients.producer.*;
5 | import org.apache.kafka.common.serialization.LongSerializer;
6 | import org.apache.kafka.common.serialization.StringSerializer;
7 |
8 | import java.util.Properties;
9 | import java.util.concurrent.Future;
10 |
11 | public class MessageProducer implements AutoCloseable
12 | {
13 |
14 | private final Producer producer;
15 |
16 | public MessageProducer()
17 | {
18 | var properties = new Properties();
19 | properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, KafkaConstants.BOOTSTRAP_SERVER);
20 | properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, LongSerializer.class.getName());
21 | properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
22 | this.producer = new KafkaProducer<>(properties);
23 | }
24 |
25 | public MessageProducer(Producer producer)
26 | {
27 | this.producer = producer;
28 | }
29 |
30 | public Future notify(String topic, Long orderId, String orderInfo)
31 | {
32 | try
33 | {
34 | var pRecord = new ProducerRecord<>(topic, orderId, orderInfo);
35 | return producer.send(pRecord);
36 | }
37 | catch (Exception e)
38 | {
39 | e.printStackTrace();
40 | throw new RuntimeException(e);
41 | }
42 | }
43 |
44 | @Override
45 | public void close() throws Exception
46 | {
47 | producer.close();
48 | }
49 | }
50 |
--------------------------------------------------------------------------------
/kafka-mock-producer-consumer/src/main/java/com/howtodoinjava/app/kafka/topic/CreateTopicService.java:
--------------------------------------------------------------------------------
1 | package com.howtodoinjava.app.kafka.topic;
2 |
3 |
4 | import com.howtodoinjava.app.kafka.constants.KafkaConstants;
5 | import org.apache.kafka.clients.admin.Admin;
6 | import org.apache.kafka.clients.admin.AdminClientConfig;
7 | import org.apache.kafka.clients.admin.NewTopic;
8 |
9 | import java.util.Collections;
10 | import java.util.Properties;
11 |
12 | public class CreateTopicService
13 | {
14 | public static void createTopic(String topicName)
15 | {
16 | Properties properties = new Properties();
17 |
18 | properties.put(
19 | AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, KafkaConstants.BOOTSTRAP_SERVER
20 | );
21 |
22 | try (Admin admin = Admin.create(properties))
23 | {
24 | int partitions = 3;
25 | short replicationFactor = 1;
26 |
27 | NewTopic newTopic = new NewTopic(topicName, partitions, replicationFactor);
28 |
29 | admin.createTopics(
30 | Collections.singleton(newTopic)
31 | );
32 | }
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/kafka-mock-producer-consumer/src/main/java/com/howtodoinjava/app/service/OrderService.java:
--------------------------------------------------------------------------------
1 | package com.howtodoinjava.app.service;
2 |
3 | import com.howtodoinjava.app.kafka.constants.KafkaConstants;
4 | import com.howtodoinjava.app.kafka.producer.MessageProducer;
5 | import org.apache.kafka.clients.producer.RecordMetadata;
6 |
7 | import java.util.concurrent.Future;
8 |
9 | public class OrderService
10 | {
11 |
12 | private final MessageProducer messageProducer;
13 |
14 | public OrderService(MessageProducer messageProducer)
15 | {
16 | this.messageProducer = messageProducer;
17 | }
18 |
19 | public Future createOrder(long orderId, String orderInfo)
20 | {
21 | return this.messageProducer.notify(KafkaConstants.TOPIC_CREATE_ORDER, orderId, orderInfo);
22 | }
23 |
24 | }
25 |
--------------------------------------------------------------------------------
/kafka-mock-producer-consumer/src/test/java/producer/TestMyKafkaConsumer.java:
--------------------------------------------------------------------------------
1 | package producer;
2 |
3 | import com.howtodoinjava.app.db.OrderDB;
4 | import com.howtodoinjava.app.kafka.constants.KafkaConstants;
5 | import com.howtodoinjava.app.kafka.consumer.MessageConsumer;
6 | import org.apache.kafka.clients.consumer.ConsumerRecord;
7 | import org.apache.kafka.clients.consumer.MockConsumer;
8 | import org.apache.kafka.clients.consumer.OffsetResetStrategy;
9 | import org.apache.kafka.common.TopicPartition;
10 | import org.junit.jupiter.api.Assertions;
11 | import org.junit.jupiter.api.Test;
12 |
13 | import java.util.Map;
14 | import java.util.concurrent.TimeUnit;
15 |
16 | public class TestMyKafkaConsumer
17 | {
18 |
19 | @Test
20 | void addOrder_verifyOrderStored()
21 | {
22 | try (var mockConsumer = new MockConsumer(OffsetResetStrategy.EARLIEST); var consumer = new MessageConsumer(mockConsumer))
23 | {
24 | OrderDB orderDB = new OrderDB(consumer);
25 |
26 | orderDB.startDB(1);
27 |
28 | mockConsumer.updateBeginningOffsets(Map.of(new TopicPartition(KafkaConstants.TOPIC_CREATE_ORDER, 1), 0L));
29 |
30 | mockConsumer.schedulePollTask(() -> mockConsumer.addRecord(new ConsumerRecord<>(KafkaConstants.TOPIC_CREATE_ORDER, 1, 0, 1L, "Product 1")));
31 |
32 | TimeUnit.MILLISECONDS.sleep(200); // wait for consumer to poll
33 |
34 | Assertions.assertEquals(1, orderDB.getOrders().size());
35 | }
36 | catch (Exception e)
37 | {
38 | e.printStackTrace();
39 | }
40 | }
41 |
42 | @Test
43 | void startDBOnDifferentPartition_verifyNoOrderStored()
44 | {
45 | try (var mockConsumer = new MockConsumer(OffsetResetStrategy.EARLIEST); var consumer = new MessageConsumer(mockConsumer))
46 | {
47 | OrderDB orderDB = new OrderDB(consumer);
48 |
49 | orderDB.startDB(1);
50 |
51 | mockConsumer.updateBeginningOffsets(Map.of(new TopicPartition(KafkaConstants.TOPIC_CREATE_ORDER, 1), 0L));
52 |
53 | mockConsumer.schedulePollTask(() -> mockConsumer.addRecord(new ConsumerRecord<>(KafkaConstants.TOPIC_CREATE_ORDER, 2, 0, 1L, "Product 1")));
54 |
55 | TimeUnit.MILLISECONDS.sleep(200); // wait for consumer to poll
56 |
57 | Assertions.assertEquals(0, orderDB.getOrders().size());
58 | }
59 | catch (Exception e)
60 | {
61 | e.printStackTrace();
62 | }
63 | }
64 |
65 | @Test
66 | void addOrderWithDifferentTopic_verifyNoOrderStored()
67 | {
68 | try (var mockConsumer = new MockConsumer(OffsetResetStrategy.EARLIEST); var consumer = new MessageConsumer(mockConsumer))
69 | {
70 | OrderDB orderDB = new OrderDB(consumer);
71 |
72 | orderDB.startDB(1);
73 |
74 | mockConsumer.updateBeginningOffsets(Map.of(new TopicPartition(KafkaConstants.TOPIC_CREATE_ORDER, 1), 0L));
75 |
76 | mockConsumer.schedulePollTask(() -> mockConsumer.addRecord(new ConsumerRecord<>(KafkaConstants.TOPIC_CREATE_ORDER + "-1", 2, 0, 1L, "Product 1")));
77 |
78 | TimeUnit.MILLISECONDS.sleep(200); // wait for consumer to poll
79 |
80 | Assertions.assertEquals(0, orderDB.getOrders().size());
81 | }
82 | catch (Exception e)
83 | {
84 | e.printStackTrace();
85 | }
86 | }
87 |
88 | @Test
89 | void testStopOrderDB()
90 | {
91 | try (var mockConsumer = new MockConsumer(OffsetResetStrategy.EARLIEST); var consumer = new MessageConsumer(mockConsumer))
92 | {
93 | OrderDB orderDB = new OrderDB(consumer);
94 |
95 | orderDB.startDB(1);
96 |
97 | orderDB.stopDB();
98 |
99 | TimeUnit.MILLISECONDS.sleep(200); // wait for consumer to poll
100 |
101 | Assertions.assertTrue(mockConsumer.closed());
102 | }
103 | catch (Exception e)
104 | {
105 | e.printStackTrace();
106 | }
107 | }
108 | }
109 |
--------------------------------------------------------------------------------
/kafka-mock-producer-consumer/src/test/java/producer/TestMyKafkaProducer.java:
--------------------------------------------------------------------------------
1 | package producer;
2 |
3 | import com.howtodoinjava.app.kafka.constants.KafkaConstants;
4 | import com.howtodoinjava.app.kafka.producer.MessageProducer;
5 | import com.howtodoinjava.app.service.OrderService;
6 | import org.apache.kafka.clients.producer.MockProducer;
7 | import org.apache.kafka.common.Cluster;
8 | import org.apache.kafka.common.PartitionInfo;
9 | import org.apache.kafka.common.errors.InvalidTopicException;
10 | import org.apache.kafka.common.serialization.LongSerializer;
11 | import org.apache.kafka.common.serialization.StringSerializer;
12 | import org.junit.jupiter.api.Assertions;
13 | import org.junit.jupiter.api.Test;
14 |
15 | import java.util.ArrayList;
16 | import java.util.Collections;
17 | import java.util.List;
18 |
19 | public class TestMyKafkaProducer
20 | {
21 | @Test
22 | void createOrder_verifyOrderCreated()
23 | {
24 | try (var mockProducer = new MockProducer<>(true, new LongSerializer(), new StringSerializer()); var producer = new MessageProducer(mockProducer))
25 | {
26 | OrderService orderService = new OrderService(producer);
27 |
28 | orderService.createOrder(1, "Product 1");
29 |
30 | Assertions.assertEquals(1, mockProducer.history().size());
31 | }
32 | catch (Exception e)
33 | {
34 | e.printStackTrace();
35 | }
36 | }
37 |
38 | @Test
39 | void createOrder_verifyTheTopic()
40 | {
41 | try (var mockProducer = new MockProducer<>(true, new LongSerializer(), new StringSerializer()); var producer = new MessageProducer(mockProducer))
42 | {
43 | OrderService orderService = new OrderService(producer);
44 |
45 | var metadata = orderService.createOrder(1, "Product 1");
46 |
47 | Assertions.assertEquals(1, mockProducer.history().size());
48 |
49 | Assertions.assertEquals(metadata.get().topic(), mockProducer.history().get(0).topic());
50 | }
51 | catch (Exception e)
52 | {
53 | e.printStackTrace();
54 | }
55 | }
56 |
57 | @Test
58 | void createOrderWithDifferentOrderID_verifySentToDifferentTopicPartition()
59 | {
60 | PartitionInfo partitionInfo0 = new PartitionInfo(KafkaConstants.TOPIC_CREATE_ORDER, 0, null, null, null);
61 | PartitionInfo partitionInfo1 = new PartitionInfo(KafkaConstants.TOPIC_CREATE_ORDER, 1, null, null, null);
62 |
63 | List list = new ArrayList<>();
64 |
65 | list.add(partitionInfo0);
66 | list.add(partitionInfo1);
67 |
68 | Cluster kafkaCluster = new Cluster("id1", new ArrayList<>(), list, Collections.emptySet(), Collections.emptySet());
69 |
70 | try (var mockProducer = new MockProducer<>(kafkaCluster, true, new LongSerializer(), new StringSerializer()); var producer = new MessageProducer(mockProducer))
71 | {
72 | OrderService orderService = new OrderService(producer);
73 |
74 | var metadata1 = orderService.createOrder(1, "Product 1");
75 |
76 | var metadata2 = orderService.createOrder(3, "Product 11");
77 |
78 | Assertions.assertNotEquals(metadata1.get().partition(), metadata2.get().partition());
79 | }
80 | catch (Exception e)
81 | {
82 | e.printStackTrace();
83 | }
84 | }
85 |
86 | @Test
87 | void createOrder_raiseException_verifyException()
88 | {
89 | try (var mockProducer = new MockProducer<>(false, new LongSerializer(), new StringSerializer()); var producer = new MessageProducer(mockProducer))
90 | {
91 | OrderService orderService = new OrderService(producer);
92 |
93 | var metadata = orderService.createOrder(1, "Product 1");
94 |
95 | InvalidTopicException e = new InvalidTopicException();
96 | mockProducer.errorNext(e);
97 |
98 | try
99 | {
100 | metadata.get();
101 | }
102 | catch (Exception exception)
103 | {
104 | Assertions.assertEquals(e, exception.getCause());
105 | }
106 | }
107 | catch (Exception e)
108 | {
109 | e.printStackTrace();
110 | }
111 | }
112 |
113 | @Test
114 | void createOrder_commitTransaction_verifyOrderCreated()
115 | {
116 |
117 | try (var mockProducer = new MockProducer<>(false, new LongSerializer(), new StringSerializer()); var producer = new MessageProducer(mockProducer))
118 | {
119 | OrderService orderService = new OrderService(producer);
120 |
121 | mockProducer.initTransactions();
122 |
123 | mockProducer.beginTransaction();
124 |
125 | var metadata = orderService.createOrder(1, "Product 1");
126 |
127 | Assertions.assertFalse(metadata.isDone());
128 |
129 | Assertions.assertEquals(mockProducer.history().size(), 0);
130 |
131 | mockProducer.commitTransaction();
132 |
133 | Assertions.assertEquals(mockProducer.history().size(), 1);
134 |
135 | Assertions.assertTrue(metadata.isDone());
136 | }
137 | catch (Exception e)
138 | {
139 | e.printStackTrace();
140 | }
141 | }
142 | }
143 |
--------------------------------------------------------------------------------
/kafka-using-docker-compose/docker-compose-single-node.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | version: '3'
3 | services:
4 | zookeeper:
5 | image: confluentinc/cp-zookeeper:latest
6 | container_name: zookeeper
7 | environment:
8 | ZOOKEEPER_SERVER_ID: 1
9 | ZOOKEEPER_CLIENT_PORT: 2181
10 | ZOOKEEPER_TICK_TIME: 2000
11 |
12 | broker:
13 | image: confluentinc/cp-kafka:latest
14 | container_name: broker
15 | ports:
16 | - "19092:9092"
17 | depends_on:
18 | - zookeeper
19 | environment:
20 | KAFKA_BROKER_ID: 1
21 | KAFKA_AUTO_CREATE_TOPICS_ENABLE: "true"
22 | KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
23 | KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
24 | KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://broker:9092,PLAINTEXT_HOST://localhost:19092
25 | KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
26 | KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
27 | KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
--------------------------------------------------------------------------------
/kafka-using-docker-compose/docker-compose.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | version: '3'
3 | services:
4 | zookeeper-1:
5 | image: confluentinc/cp-zookeeper:latest
6 | container_name: zookeeper-1
7 | environment:
8 | ZOOKEEPER_SERVER_ID: 1
9 | ZOOKEEPER_CLIENT_PORT: 2181
10 | ZOOKEEPER_TICK_TIME: 2000
11 | ports:
12 | - "22181:2181"
13 |
14 | zookeeper-2:
15 | image: confluentinc/cp-zookeeper:latest
16 | container_name: zookeeper-2
17 | environment:
18 | ZOOKEEPER_SERVER_ID: 2
19 | ZOOKEEPER_CLIENT_PORT: 2181
20 | ZOOKEEPER_TICK_TIME: 2000
21 | ports:
22 | - "32181:2181"
23 |
24 | broker-1:
25 | image: confluentinc/cp-kafka:latest
26 | container_name: broker-1
27 | ports:
28 | - "19092:9092"
29 | depends_on:
30 | - zookeeper-1
31 | - zookeeper-2
32 | environment:
33 | KAFKA_BROKER_ID: 1
34 | KAFKA_AUTO_CREATE_TOPICS_ENABLE: "true"
35 | KAFKA_ZOOKEEPER_CONNECT: 'zookeeper-1:2181,zookeeper-2:2181'
36 | KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
37 | KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://broker-1:9092,PLAINTEXT_HOST://localhost:19092
38 | KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
39 |
40 | broker-2:
41 | image: confluentinc/cp-kafka:latest
42 | container_name: broker-2
43 | ports:
44 | - "19093:9092"
45 | depends_on:
46 | - zookeeper-1
47 | - zookeeper-2
48 | environment:
49 | KAFKA_BROKER_ID: 2
50 | KAFKA_AUTO_CREATE_TOPICS_ENABLE: "true"
51 | KAFKA_ZOOKEEPER_CONNECT: 'zookeeper-1:2181,zookeeper-2:2181'
52 | KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
53 | KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://broker-2:9093,PLAINTEXT_HOST://localhost:19093
54 | KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
55 |
56 | broker-3:
57 | image: confluentinc/cp-kafka:latest
58 | container_name: broker-3
59 | ports:
60 | - "19094:9092"
61 | depends_on:
62 | - zookeeper-1
63 | - zookeeper-2
64 | environment:
65 | KAFKA_BROKER_ID: 3
66 | KAFKA_AUTO_CREATE_TOPICS_ENABLE: "true"
67 | KAFKA_ZOOKEEPER_CONNECT: 'zookeeper-1:2181,zookeeper-2:2181'
68 | KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
69 | KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://broker-3:9094,PLAINTEXT_HOST://localhost:19094
70 | KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
--------------------------------------------------------------------------------
/spring-boot-kafka-app/.gitignore:
--------------------------------------------------------------------------------
1 | HELP.md
2 | target/
3 | !.mvn/wrapper/maven-wrapper.jar
4 | !**/src/main/**
5 | !**/src/test/**
6 |
7 | ### STS ###
8 | .apt_generated
9 | .classpath
10 | .factorypath
11 | .project
12 | .settings
13 | .springBeans
14 | .sts4-cache
15 |
16 | ### IntelliJ IDEA ###
17 | .idea
18 | *.iws
19 | *.iml
20 | *.ipr
21 |
22 | ### NetBeans ###
23 | /nbproject/private/
24 | /nbbuild/
25 | /dist/
26 | /nbdist/
27 | /.nb-gradle/
28 | build/
29 |
30 | ### VS Code ###
31 | .vscode/
32 |
--------------------------------------------------------------------------------
/spring-boot-kafka-app/.mvn/wrapper/MavenWrapperDownloader.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2007-present the original author or authors.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * https://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | import java.net.*;
17 | import java.io.*;
18 | import java.nio.channels.*;
19 | import java.util.Properties;
20 |
21 | public class MavenWrapperDownloader {
22 |
23 | private static final String WRAPPER_VERSION = "0.5.6";
24 | /**
25 | * Default URL to download the maven-wrapper.jar from, if no 'downloadUrl' is provided.
26 | */
27 | private static final String DEFAULT_DOWNLOAD_URL = "https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/"
28 | + WRAPPER_VERSION + "/maven-wrapper-" + WRAPPER_VERSION + ".jar";
29 |
30 | /**
31 | * Path to the maven-wrapper.properties file, which might contain a downloadUrl property to
32 | * use instead of the default one.
33 | */
34 | private static final String MAVEN_WRAPPER_PROPERTIES_PATH =
35 | ".mvn/wrapper/maven-wrapper.properties";
36 |
37 | /**
38 | * Path where the maven-wrapper.jar will be saved to.
39 | */
40 | private static final String MAVEN_WRAPPER_JAR_PATH =
41 | ".mvn/wrapper/maven-wrapper.jar";
42 |
43 | /**
44 | * Name of the property which should be used to override the default download url for the wrapper.
45 | */
46 | private static final String PROPERTY_NAME_WRAPPER_URL = "wrapperUrl";
47 |
48 | public static void main(String args[]) {
49 | System.out.println("- Downloader started");
50 | File baseDirectory = new File(args[0]);
51 | System.out.println("- Using base directory: " + baseDirectory.getAbsolutePath());
52 |
53 | // If the maven-wrapper.properties exists, read it and check if it contains a custom
54 | // wrapperUrl parameter.
55 | File mavenWrapperPropertyFile = new File(baseDirectory, MAVEN_WRAPPER_PROPERTIES_PATH);
56 | String url = DEFAULT_DOWNLOAD_URL;
57 | if(mavenWrapperPropertyFile.exists()) {
58 | FileInputStream mavenWrapperPropertyFileInputStream = null;
59 | try {
60 | mavenWrapperPropertyFileInputStream = new FileInputStream(mavenWrapperPropertyFile);
61 | Properties mavenWrapperProperties = new Properties();
62 | mavenWrapperProperties.load(mavenWrapperPropertyFileInputStream);
63 | url = mavenWrapperProperties.getProperty(PROPERTY_NAME_WRAPPER_URL, url);
64 | } catch (IOException e) {
65 | System.out.println("- ERROR loading '" + MAVEN_WRAPPER_PROPERTIES_PATH + "'");
66 | } finally {
67 | try {
68 | if(mavenWrapperPropertyFileInputStream != null) {
69 | mavenWrapperPropertyFileInputStream.close();
70 | }
71 | } catch (IOException e) {
72 | // Ignore ...
73 | }
74 | }
75 | }
76 | System.out.println("- Downloading from: " + url);
77 |
78 | File outputFile = new File(baseDirectory.getAbsolutePath(), MAVEN_WRAPPER_JAR_PATH);
79 | if(!outputFile.getParentFile().exists()) {
80 | if(!outputFile.getParentFile().mkdirs()) {
81 | System.out.println(
82 | "- ERROR creating output directory '" + outputFile.getParentFile().getAbsolutePath() + "'");
83 | }
84 | }
85 | System.out.println("- Downloading to: " + outputFile.getAbsolutePath());
86 | try {
87 | downloadFileFromURL(url, outputFile);
88 | System.out.println("Done");
89 | System.exit(0);
90 | } catch (Throwable e) {
91 | System.out.println("- Error downloading");
92 | e.printStackTrace();
93 | System.exit(1);
94 | }
95 | }
96 |
97 | private static void downloadFileFromURL(String urlString, File destination) throws Exception {
98 | if (System.getenv("MVNW_USERNAME") != null && System.getenv("MVNW_PASSWORD") != null) {
99 | String username = System.getenv("MVNW_USERNAME");
100 | char[] password = System.getenv("MVNW_PASSWORD").toCharArray();
101 | Authenticator.setDefault(new Authenticator() {
102 | @Override
103 | protected PasswordAuthentication getPasswordAuthentication() {
104 | return new PasswordAuthentication(username, password);
105 | }
106 | });
107 | }
108 | URL website = new URL(urlString);
109 | ReadableByteChannel rbc;
110 | rbc = Channels.newChannel(website.openStream());
111 | FileOutputStream fos = new FileOutputStream(destination);
112 | fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE);
113 | fos.close();
114 | rbc.close();
115 | }
116 |
117 | }
118 |
--------------------------------------------------------------------------------
/spring-boot-kafka-app/.mvn/wrapper/maven-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lokeshgupta1981/Kafka-Tutorials/13e49ce71a991305580dd1b506b4e905fa5de24e/spring-boot-kafka-app/.mvn/wrapper/maven-wrapper.jar
--------------------------------------------------------------------------------
/spring-boot-kafka-app/.mvn/wrapper/maven-wrapper.properties:
--------------------------------------------------------------------------------
1 | distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.6.3/apache-maven-3.6.3-bin.zip
2 | wrapperUrl=https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar
3 |
--------------------------------------------------------------------------------
/spring-boot-kafka-app/mvnw:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | # ----------------------------------------------------------------------------
3 | # Licensed to the Apache Software Foundation (ASF) under one
4 | # or more contributor license agreements. See the NOTICE file
5 | # distributed with this work for additional information
6 | # regarding copyright ownership. The ASF licenses this file
7 | # to you under the Apache License, Version 2.0 (the
8 | # "License"); you may not use this file except in compliance
9 | # with the License. You may obtain a copy of the License at
10 | #
11 | # https://www.apache.org/licenses/LICENSE-2.0
12 | #
13 | # Unless required by applicable law or agreed to in writing,
14 | # software distributed under the License is distributed on an
15 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
16 | # KIND, either express or implied. See the License for the
17 | # specific language governing permissions and limitations
18 | # under the License.
19 | # ----------------------------------------------------------------------------
20 |
21 | # ----------------------------------------------------------------------------
22 | # Maven Start Up Batch script
23 | #
24 | # Required ENV vars:
25 | # ------------------
26 | # JAVA_HOME - location of a JDK home dir
27 | #
28 | # Optional ENV vars
29 | # -----------------
30 | # M2_HOME - location of maven2's installed home dir
31 | # MAVEN_OPTS - parameters passed to the Java VM when running Maven
32 | # e.g. to debug Maven itself, use
33 | # set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
34 | # MAVEN_SKIP_RC - flag to disable loading of mavenrc files
35 | # ----------------------------------------------------------------------------
36 |
37 | if [ -z "$MAVEN_SKIP_RC" ] ; then
38 |
39 | if [ -f /etc/mavenrc ] ; then
40 | . /etc/mavenrc
41 | fi
42 |
43 | if [ -f "$HOME/.mavenrc" ] ; then
44 | . "$HOME/.mavenrc"
45 | fi
46 |
47 | fi
48 |
49 | # OS specific support. $var _must_ be set to either true or false.
50 | cygwin=false;
51 | darwin=false;
52 | mingw=false
53 | case "`uname`" in
54 | CYGWIN*) cygwin=true ;;
55 | MINGW*) mingw=true;;
56 | Darwin*) darwin=true
57 | # Use /usr/libexec/java_home if available, otherwise fall back to /Library/Java/Home
58 | # See https://developer.apple.com/library/mac/qa/qa1170/_index.html
59 | if [ -z "$JAVA_HOME" ]; then
60 | if [ -x "/usr/libexec/java_home" ]; then
61 | export JAVA_HOME="`/usr/libexec/java_home`"
62 | else
63 | export JAVA_HOME="/Library/Java/Home"
64 | fi
65 | fi
66 | ;;
67 | esac
68 |
69 | if [ -z "$JAVA_HOME" ] ; then
70 | if [ -r /etc/gentoo-release ] ; then
71 | JAVA_HOME=`java-config --jre-home`
72 | fi
73 | fi
74 |
75 | if [ -z "$M2_HOME" ] ; then
76 | ## resolve links - $0 may be a link to maven's home
77 | PRG="$0"
78 |
79 | # need this for relative symlinks
80 | while [ -h "$PRG" ] ; do
81 | ls=`ls -ld "$PRG"`
82 | link=`expr "$ls" : '.*-> \(.*\)$'`
83 | if expr "$link" : '/.*' > /dev/null; then
84 | PRG="$link"
85 | else
86 | PRG="`dirname "$PRG"`/$link"
87 | fi
88 | done
89 |
90 | saveddir=`pwd`
91 |
92 | M2_HOME=`dirname "$PRG"`/..
93 |
94 | # make it fully qualified
95 | M2_HOME=`cd "$M2_HOME" && pwd`
96 |
97 | cd "$saveddir"
98 | # echo Using m2 at $M2_HOME
99 | fi
100 |
101 | # For Cygwin, ensure paths are in UNIX format before anything is touched
102 | if $cygwin ; then
103 | [ -n "$M2_HOME" ] &&
104 | M2_HOME=`cygpath --unix "$M2_HOME"`
105 | [ -n "$JAVA_HOME" ] &&
106 | JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
107 | [ -n "$CLASSPATH" ] &&
108 | CLASSPATH=`cygpath --path --unix "$CLASSPATH"`
109 | fi
110 |
111 | # For Mingw, ensure paths are in UNIX format before anything is touched
112 | if $mingw ; then
113 | [ -n "$M2_HOME" ] &&
114 | M2_HOME="`(cd "$M2_HOME"; pwd)`"
115 | [ -n "$JAVA_HOME" ] &&
116 | JAVA_HOME="`(cd "$JAVA_HOME"; pwd)`"
117 | fi
118 |
119 | if [ -z "$JAVA_HOME" ]; then
120 | javaExecutable="`which javac`"
121 | if [ -n "$javaExecutable" ] && ! [ "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then
122 | # readlink(1) is not available as standard on Solaris 10.
123 | readLink=`which readlink`
124 | if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then
125 | if $darwin ; then
126 | javaHome="`dirname \"$javaExecutable\"`"
127 | javaExecutable="`cd \"$javaHome\" && pwd -P`/javac"
128 | else
129 | javaExecutable="`readlink -f \"$javaExecutable\"`"
130 | fi
131 | javaHome="`dirname \"$javaExecutable\"`"
132 | javaHome=`expr "$javaHome" : '\(.*\)/bin'`
133 | JAVA_HOME="$javaHome"
134 | export JAVA_HOME
135 | fi
136 | fi
137 | fi
138 |
139 | if [ -z "$JAVACMD" ] ; then
140 | if [ -n "$JAVA_HOME" ] ; then
141 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
142 | # IBM's JDK on AIX uses strange locations for the executables
143 | JAVACMD="$JAVA_HOME/jre/sh/java"
144 | else
145 | JAVACMD="$JAVA_HOME/bin/java"
146 | fi
147 | else
148 | JAVACMD="`which java`"
149 | fi
150 | fi
151 |
152 | if [ ! -x "$JAVACMD" ] ; then
153 | echo "Error: JAVA_HOME is not defined correctly." >&2
154 | echo " We cannot execute $JAVACMD" >&2
155 | exit 1
156 | fi
157 |
158 | if [ -z "$JAVA_HOME" ] ; then
159 | echo "Warning: JAVA_HOME environment variable is not set."
160 | fi
161 |
162 | CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher
163 |
164 | # traverses directory structure from process work directory to filesystem root
165 | # first directory with .mvn subdirectory is considered project base directory
166 | find_maven_basedir() {
167 |
168 | if [ -z "$1" ]
169 | then
170 | echo "Path not specified to find_maven_basedir"
171 | return 1
172 | fi
173 |
174 | basedir="$1"
175 | wdir="$1"
176 | while [ "$wdir" != '/' ] ; do
177 | if [ -d "$wdir"/.mvn ] ; then
178 | basedir=$wdir
179 | break
180 | fi
181 | # workaround for JBEAP-8937 (on Solaris 10/Sparc)
182 | if [ -d "${wdir}" ]; then
183 | wdir=`cd "$wdir/.."; pwd`
184 | fi
185 | # end of workaround
186 | done
187 | echo "${basedir}"
188 | }
189 |
190 | # concatenates all lines of a file
191 | concat_lines() {
192 | if [ -f "$1" ]; then
193 | echo "$(tr -s '\n' ' ' < "$1")"
194 | fi
195 | }
196 |
197 | BASE_DIR=`find_maven_basedir "$(pwd)"`
198 | if [ -z "$BASE_DIR" ]; then
199 | exit 1;
200 | fi
201 |
202 | ##########################################################################################
203 | # Extension to allow automatically downloading the maven-wrapper.jar from Maven-central
204 | # This allows using the maven wrapper in projects that prohibit checking in binary data.
205 | ##########################################################################################
206 | if [ -r "$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" ]; then
207 | if [ "$MVNW_VERBOSE" = true ]; then
208 | echo "Found .mvn/wrapper/maven-wrapper.jar"
209 | fi
210 | else
211 | if [ "$MVNW_VERBOSE" = true ]; then
212 | echo "Couldn't find .mvn/wrapper/maven-wrapper.jar, downloading it ..."
213 | fi
214 | if [ -n "$MVNW_REPOURL" ]; then
215 | jarUrl="$MVNW_REPOURL/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar"
216 | else
217 | jarUrl="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar"
218 | fi
219 | while IFS="=" read key value; do
220 | case "$key" in (wrapperUrl) jarUrl="$value"; break ;;
221 | esac
222 | done < "$BASE_DIR/.mvn/wrapper/maven-wrapper.properties"
223 | if [ "$MVNW_VERBOSE" = true ]; then
224 | echo "Downloading from: $jarUrl"
225 | fi
226 | wrapperJarPath="$BASE_DIR/.mvn/wrapper/maven-wrapper.jar"
227 | if $cygwin; then
228 | wrapperJarPath=`cygpath --path --windows "$wrapperJarPath"`
229 | fi
230 |
231 | if command -v wget > /dev/null; then
232 | if [ "$MVNW_VERBOSE" = true ]; then
233 | echo "Found wget ... using wget"
234 | fi
235 | if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then
236 | wget "$jarUrl" -O "$wrapperJarPath"
237 | else
238 | wget --http-user=$MVNW_USERNAME --http-password=$MVNW_PASSWORD "$jarUrl" -O "$wrapperJarPath"
239 | fi
240 | elif command -v curl > /dev/null; then
241 | if [ "$MVNW_VERBOSE" = true ]; then
242 | echo "Found curl ... using curl"
243 | fi
244 | if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then
245 | curl -o "$wrapperJarPath" "$jarUrl" -f
246 | else
247 | curl --user $MVNW_USERNAME:$MVNW_PASSWORD -o "$wrapperJarPath" "$jarUrl" -f
248 | fi
249 |
250 | else
251 | if [ "$MVNW_VERBOSE" = true ]; then
252 | echo "Falling back to using Java to download"
253 | fi
254 | javaClass="$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.java"
255 | # For Cygwin, switch paths to Windows format before running javac
256 | if $cygwin; then
257 | javaClass=`cygpath --path --windows "$javaClass"`
258 | fi
259 | if [ -e "$javaClass" ]; then
260 | if [ ! -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then
261 | if [ "$MVNW_VERBOSE" = true ]; then
262 | echo " - Compiling MavenWrapperDownloader.java ..."
263 | fi
264 | # Compiling the Java class
265 | ("$JAVA_HOME/bin/javac" "$javaClass")
266 | fi
267 | if [ -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then
268 | # Running the downloader
269 | if [ "$MVNW_VERBOSE" = true ]; then
270 | echo " - Running MavenWrapperDownloader.java ..."
271 | fi
272 | ("$JAVA_HOME/bin/java" -cp .mvn/wrapper MavenWrapperDownloader "$MAVEN_PROJECTBASEDIR")
273 | fi
274 | fi
275 | fi
276 | fi
277 | ##########################################################################################
278 | # End of extension
279 | ##########################################################################################
280 |
281 | export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-"$BASE_DIR"}
282 | if [ "$MVNW_VERBOSE" = true ]; then
283 | echo $MAVEN_PROJECTBASEDIR
284 | fi
285 | MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS"
286 |
287 | # For Cygwin, switch paths to Windows format before running java
288 | if $cygwin; then
289 | [ -n "$M2_HOME" ] &&
290 | M2_HOME=`cygpath --path --windows "$M2_HOME"`
291 | [ -n "$JAVA_HOME" ] &&
292 | JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"`
293 | [ -n "$CLASSPATH" ] &&
294 | CLASSPATH=`cygpath --path --windows "$CLASSPATH"`
295 | [ -n "$MAVEN_PROJECTBASEDIR" ] &&
296 | MAVEN_PROJECTBASEDIR=`cygpath --path --windows "$MAVEN_PROJECTBASEDIR"`
297 | fi
298 |
299 | # Provide a "standardized" way to retrieve the CLI args that will
300 | # work with both Windows and non-Windows executions.
301 | MAVEN_CMD_LINE_ARGS="$MAVEN_CONFIG $@"
302 | export MAVEN_CMD_LINE_ARGS
303 |
304 | WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
305 |
306 | exec "$JAVACMD" \
307 | $MAVEN_OPTS \
308 | -classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \
309 | "-Dmaven.home=${M2_HOME}" "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \
310 | ${WRAPPER_LAUNCHER} $MAVEN_CONFIG "$@"
311 |
--------------------------------------------------------------------------------
/spring-boot-kafka-app/mvnw.cmd:
--------------------------------------------------------------------------------
1 | @REM ----------------------------------------------------------------------------
2 | @REM Licensed to the Apache Software Foundation (ASF) under one
3 | @REM or more contributor license agreements. See the NOTICE file
4 | @REM distributed with this work for additional information
5 | @REM regarding copyright ownership. The ASF licenses this file
6 | @REM to you under the Apache License, Version 2.0 (the
7 | @REM "License"); you may not use this file except in compliance
8 | @REM with the License. You may obtain a copy of the License at
9 | @REM
10 | @REM https://www.apache.org/licenses/LICENSE-2.0
11 | @REM
12 | @REM Unless required by applicable law or agreed to in writing,
13 | @REM software distributed under the License is distributed on an
14 | @REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15 | @REM KIND, either express or implied. See the License for the
16 | @REM specific language governing permissions and limitations
17 | @REM under the License.
18 | @REM ----------------------------------------------------------------------------
19 |
20 | @REM ----------------------------------------------------------------------------
21 | @REM Maven Start Up Batch script
22 | @REM
23 | @REM Required ENV vars:
24 | @REM JAVA_HOME - location of a JDK home dir
25 | @REM
26 | @REM Optional ENV vars
27 | @REM M2_HOME - location of maven2's installed home dir
28 | @REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands
29 | @REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a keystroke before ending
30 | @REM MAVEN_OPTS - parameters passed to the Java VM when running Maven
31 | @REM e.g. to debug Maven itself, use
32 | @REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
33 | @REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files
34 | @REM ----------------------------------------------------------------------------
35 |
36 | @REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on'
37 | @echo off
38 | @REM set title of command window
39 | title %0
40 | @REM enable echoing by setting MAVEN_BATCH_ECHO to 'on'
41 | @if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO%
42 |
43 | @REM set %HOME% to equivalent of $HOME
44 | if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%")
45 |
46 | @REM Execute a user defined script before this one
47 | if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre
48 | @REM check for pre script, once with legacy .bat ending and once with .cmd ending
49 | if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat"
50 | if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd"
51 | :skipRcPre
52 |
53 | @setlocal
54 |
55 | set ERROR_CODE=0
56 |
57 | @REM To isolate internal variables from possible post scripts, we use another setlocal
58 | @setlocal
59 |
60 | @REM ==== START VALIDATION ====
61 | if not "%JAVA_HOME%" == "" goto OkJHome
62 |
63 | echo.
64 | echo Error: JAVA_HOME not found in your environment. >&2
65 | echo Please set the JAVA_HOME variable in your environment to match the >&2
66 | echo location of your Java installation. >&2
67 | echo.
68 | goto error
69 |
70 | :OkJHome
71 | if exist "%JAVA_HOME%\bin\java.exe" goto init
72 |
73 | echo.
74 | echo Error: JAVA_HOME is set to an invalid directory. >&2
75 | echo JAVA_HOME = "%JAVA_HOME%" >&2
76 | echo Please set the JAVA_HOME variable in your environment to match the >&2
77 | echo location of your Java installation. >&2
78 | echo.
79 | goto error
80 |
81 | @REM ==== END VALIDATION ====
82 |
83 | :init
84 |
85 | @REM Find the project base dir, i.e. the directory that contains the folder ".mvn".
86 | @REM Fallback to current working directory if not found.
87 |
88 | set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR%
89 | IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir
90 |
91 | set EXEC_DIR=%CD%
92 | set WDIR=%EXEC_DIR%
93 | :findBaseDir
94 | IF EXIST "%WDIR%"\.mvn goto baseDirFound
95 | cd ..
96 | IF "%WDIR%"=="%CD%" goto baseDirNotFound
97 | set WDIR=%CD%
98 | goto findBaseDir
99 |
100 | :baseDirFound
101 | set MAVEN_PROJECTBASEDIR=%WDIR%
102 | cd "%EXEC_DIR%"
103 | goto endDetectBaseDir
104 |
105 | :baseDirNotFound
106 | set MAVEN_PROJECTBASEDIR=%EXEC_DIR%
107 | cd "%EXEC_DIR%"
108 |
109 | :endDetectBaseDir
110 |
111 | IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig
112 |
113 | @setlocal EnableExtensions EnableDelayedExpansion
114 | for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a
115 | @endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS%
116 |
117 | :endReadAdditionalConfig
118 |
119 | SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe"
120 | set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar"
121 | set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
122 |
123 | set DOWNLOAD_URL="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar"
124 |
125 | FOR /F "tokens=1,2 delims==" %%A IN ("%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties") DO (
126 | IF "%%A"=="wrapperUrl" SET DOWNLOAD_URL=%%B
127 | )
128 |
129 | @REM Extension to allow automatically downloading the maven-wrapper.jar from Maven-central
130 | @REM This allows using the maven wrapper in projects that prohibit checking in binary data.
131 | if exist %WRAPPER_JAR% (
132 | if "%MVNW_VERBOSE%" == "true" (
133 | echo Found %WRAPPER_JAR%
134 | )
135 | ) else (
136 | if not "%MVNW_REPOURL%" == "" (
137 | SET DOWNLOAD_URL="%MVNW_REPOURL%/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar"
138 | )
139 | if "%MVNW_VERBOSE%" == "true" (
140 | echo Couldn't find %WRAPPER_JAR%, downloading it ...
141 | echo Downloading from: %DOWNLOAD_URL%
142 | )
143 |
144 | powershell -Command "&{"^
145 | "$webclient = new-object System.Net.WebClient;"^
146 | "if (-not ([string]::IsNullOrEmpty('%MVNW_USERNAME%') -and [string]::IsNullOrEmpty('%MVNW_PASSWORD%'))) {"^
147 | "$webclient.Credentials = new-object System.Net.NetworkCredential('%MVNW_USERNAME%', '%MVNW_PASSWORD%');"^
148 | "}"^
149 | "[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12; $webclient.DownloadFile('%DOWNLOAD_URL%', '%WRAPPER_JAR%')"^
150 | "}"
151 | if "%MVNW_VERBOSE%" == "true" (
152 | echo Finished downloading %WRAPPER_JAR%
153 | )
154 | )
155 | @REM End of extension
156 |
157 | @REM Provide a "standardized" way to retrieve the CLI args that will
158 | @REM work with both Windows and non-Windows executions.
159 | set MAVEN_CMD_LINE_ARGS=%*
160 |
161 | %MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %*
162 | if ERRORLEVEL 1 goto error
163 | goto end
164 |
165 | :error
166 | set ERROR_CODE=1
167 |
168 | :end
169 | @endlocal & set ERROR_CODE=%ERROR_CODE%
170 |
171 | if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost
172 | @REM check for post script, once with legacy .bat ending and once with .cmd ending
173 | if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat"
174 | if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd"
175 | :skipRcPost
176 |
177 | @REM pause the script if MAVEN_BATCH_PAUSE is set to 'on'
178 | if "%MAVEN_BATCH_PAUSE%" == "on" pause
179 |
180 | if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE%
181 |
182 | exit /B %ERROR_CODE%
183 |
--------------------------------------------------------------------------------
/spring-boot-kafka-app/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 | 4.0.0
6 |
7 | org.springframework.boot
8 | spring-boot-starter-parent
9 | 3.3.1
10 |
11 |
12 | com.howtodoinjava
13 | spring-boot-kafka-app
14 | 0.0.1-SNAPSHOT
15 | spring-boot-kafka-app
16 | Demo project for Spring Boot
17 |
18 |
19 | 21
20 |
21 |
22 |
23 |
24 | org.springframework.boot
25 | spring-boot-starter-web
26 |
27 |
28 | org.springframework.kafka
29 | spring-kafka
30 |
31 |
32 |
33 | org.springframework.boot
34 | spring-boot-starter-test
35 | test
36 |
37 |
38 | org.junit.vintage
39 | junit-vintage-engine
40 |
41 |
42 |
43 |
44 | org.springframework.kafka
45 | spring-kafka-test
46 | test
47 |
48 |
49 |
50 |
51 |
52 |
53 | org.springframework.boot
54 | spring-boot-maven-plugin
55 |
56 |
57 |
58 |
59 |
60 |
--------------------------------------------------------------------------------
/spring-boot-kafka-app/src/main/java/com/howtodoinjava/kafka/demo/SpringBootKafkaAppApplication.java:
--------------------------------------------------------------------------------
1 | package com.howtodoinjava.kafka.demo;
2 |
3 | import org.springframework.boot.SpringApplication;
4 | import org.springframework.boot.autoconfigure.SpringBootApplication;
5 |
6 | @SpringBootApplication
7 | public class SpringBootKafkaAppApplication {
8 |
9 | public static void main(String[] args) {
10 | SpringApplication.run(SpringBootKafkaAppApplication.class, args);
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/spring-boot-kafka-app/src/main/java/com/howtodoinjava/kafka/demo/common/AppConstants.java:
--------------------------------------------------------------------------------
1 | package com.howtodoinjava.kafka.demo.common;
2 |
3 | public class AppConstants
4 | {
5 | public static final String TOPIC_NAME_TEST = "test";
6 | public static final String TOPIC_NAME_USER_LOG = "users-log";
7 | public static final String GROUP_ID = "group_id";
8 | }
9 |
--------------------------------------------------------------------------------
/spring-boot-kafka-app/src/main/java/com/howtodoinjava/kafka/demo/controller/KafkaProducerController.java:
--------------------------------------------------------------------------------
1 | package com.howtodoinjava.kafka.demo.controller;
2 |
3 | import org.springframework.beans.factory.annotation.Autowired;
4 | import org.springframework.web.bind.annotation.PostMapping;
5 | import org.springframework.web.bind.annotation.RequestMapping;
6 | import org.springframework.web.bind.annotation.RequestParam;
7 | import org.springframework.web.bind.annotation.RestController;
8 |
9 | import com.howtodoinjava.kafka.demo.model.User;
10 | import com.howtodoinjava.kafka.demo.service.KafKaProducerService;
11 |
12 | @RestController
13 | @RequestMapping(value = "/kafka")
14 | public class KafkaProducerController {
15 | private final KafKaProducerService producerService;
16 |
17 | @Autowired
18 | public KafkaProducerController(KafKaProducerService producerService) {
19 | this.producerService = producerService;
20 | }
21 |
22 | @PostMapping(value = "/publish")
23 | public void sendMessageToKafkaTopic(@RequestParam("message") String message) {
24 | this.producerService.sendMessage(message);
25 | }
26 |
27 | @PostMapping(value = "/createUser")
28 | public void sendMessageToKafkaTopic(
29 | @RequestParam("userId") long userId,
30 | @RequestParam("firstName") String firstName,
31 | @RequestParam("lastName") String lastName) {
32 |
33 | User user = new User();
34 | user.setUserId(userId);
35 | user.setFirstName(firstName);
36 | user.setLastName(lastName);
37 |
38 | this.producerService.saveCreateUserLog(user);
39 | }
40 | }
--------------------------------------------------------------------------------
/spring-boot-kafka-app/src/main/java/com/howtodoinjava/kafka/demo/model/User.java:
--------------------------------------------------------------------------------
1 | package com.howtodoinjava.kafka.demo.model;
2 |
3 | public class User
4 | {
5 | private long userId;
6 | private String firstName;
7 | private String lastName;
8 |
9 | public long getUserId() {
10 | return userId;
11 | }
12 | public void setUserId(long userId) {
13 | this.userId = userId;
14 | }
15 | public String getFirstName() {
16 | return firstName;
17 | }
18 | public void setFirstName(String firstName) {
19 | this.firstName = firstName;
20 | }
21 | public String getLastName() {
22 | return lastName;
23 | }
24 | public void setLastName(String lastName) {
25 | this.lastName = lastName;
26 | }
27 |
28 | @Override
29 | public String toString() {
30 | return "User [userId=" + userId + ", firstName=" + firstName + ", lastName=" + lastName + "]";
31 | }
32 | }
33 |
--------------------------------------------------------------------------------
/spring-boot-kafka-app/src/main/java/com/howtodoinjava/kafka/demo/service/KafKaConsumerService.java:
--------------------------------------------------------------------------------
1 | package com.howtodoinjava.kafka.demo.service;
2 |
3 | import org.slf4j.Logger;
4 | import org.slf4j.LoggerFactory;
5 | import org.springframework.kafka.annotation.KafkaListener;
6 | import org.springframework.stereotype.Service;
7 |
8 | import com.howtodoinjava.kafka.demo.common.AppConstants;
9 | import com.howtodoinjava.kafka.demo.model.User;
10 |
11 | @Service
12 | public class KafKaConsumerService
13 | {
14 | private final Logger logger
15 | = LoggerFactory.getLogger(KafKaConsumerService.class);
16 |
17 | @KafkaListener(topics = AppConstants.TOPIC_NAME_TEST, groupId = AppConstants.GROUP_ID)
18 | public void consume(String message) {
19 | logger.info(String.format("Message recieved -> %s", message));
20 | }
21 |
22 | @KafkaListener(topics = AppConstants.TOPIC_NAME_USER_LOG, groupId = AppConstants.GROUP_ID)
23 | public void consume(User user) {
24 | logger.info(String.format("User created -> %s", user));
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/spring-boot-kafka-app/src/main/java/com/howtodoinjava/kafka/demo/service/KafKaProducerService.java:
--------------------------------------------------------------------------------
1 | package com.howtodoinjava.kafka.demo.service;
2 |
3 | import org.slf4j.Logger;
4 | import org.slf4j.LoggerFactory;
5 | import org.springframework.beans.factory.annotation.Autowired;
6 | import org.springframework.kafka.core.KafkaTemplate;
7 | import org.springframework.stereotype.Service;
8 |
9 | import com.howtodoinjava.kafka.demo.common.AppConstants;
10 | import com.howtodoinjava.kafka.demo.model.User;
11 |
12 | @Service
13 | public class KafKaProducerService
14 | {
15 | private static final Logger logger =
16 | LoggerFactory.getLogger(KafKaProducerService.class);
17 |
18 | @Autowired
19 | private KafkaTemplate kafkaTemplate;
20 |
21 | public void sendMessage(String message)
22 | {
23 | logger.info(String.format("Message sent -> %s", message));
24 | this.kafkaTemplate.send(AppConstants.TOPIC_NAME_TEST, message);
25 | }
26 |
27 | public void saveCreateUserLog(User user)
28 | {
29 | logger.info(String.format("User created -> %s", user));
30 | this.kafkaTemplate.send(AppConstants.TOPIC_NAME_USER_LOG, user);
31 | }
32 | }
33 |
--------------------------------------------------------------------------------
/spring-boot-kafka-app/src/main/resources/application.properties:
--------------------------------------------------------------------------------
1 | server.port=9000
2 |
3 | spring.kafka.consumer.bootstrap-servers: localhost:9092
4 | spring.kafka.consumer.group-id: group-id
5 | spring.kafka.consumer.auto-offset-reset: earliest
6 | spring.kafka.consumer.key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
7 | spring.kafka.consumer.value-deserializer: org.springframework.kafka.support.serializer.JsonDeserializer
8 | spring.kafka.consumer.properties.spring.json.trusted.packages=*
9 |
10 | spring.kafka.producer.bootstrap-servers: localhost:9092
11 | spring.kafka.producer.key-serializer: org.apache.kafka.common.serialization.StringSerializer
12 | spring.kafka.producer.value-serializer: org.springframework.kafka.support.serializer.JsonSerializer
--------------------------------------------------------------------------------
/spring-boot-kafka-app/src/test/java/com/howtodoinjava/kafka/demo/SpringBootKafkaAppApplicationTests.java:
--------------------------------------------------------------------------------
1 | package com.howtodoinjava.kafka.demo;
2 |
3 | import org.junit.jupiter.api.Test;
4 | import org.springframework.boot.test.context.SpringBootTest;
5 |
6 | @SpringBootTest
7 | class SpringBootKafkaAppApplicationTests {
8 |
9 | @Test
10 | void contextLoads() {
11 | }
12 |
13 | }
14 |
--------------------------------------------------------------------------------
/spring-kafka-java-config/pom.xml:
--------------------------------------------------------------------------------
1 |
2 | 4.0.0
3 | com.howtodoinjava
4 | spring-kafka-java-config
5 | 0.0.1-SNAPSHOT
6 |
7 | org.springframework.boot
8 | spring-boot-starter-parent
9 | 3.3.1
10 |
11 |
12 |
13 | 21
14 |
15 |
16 |
17 | org.springframework.boot
18 | spring-boot-starter-web
19 |
20 |
21 | org.springframework.kafka
22 | spring-kafka
23 |
24 |
25 |
26 | org.springframework.boot
27 | spring-boot-starter-test
28 | test
29 |
30 |
31 | org.junit.vintage
32 | junit-vintage-engine
33 |
34 |
35 |
36 |
37 | org.springframework.kafka
38 | spring-kafka-test
39 | test
40 |
41 |
42 |
43 |
44 |
45 |
46 | org.springframework.boot
47 | spring-boot-maven-plugin
48 |
49 |
50 | org.apache.maven.plugins
51 | maven-compiler-plugin
52 |
53 | 21
54 | 21
55 |
56 |
57 |
58 |
59 |
--------------------------------------------------------------------------------
/spring-kafka-java-config/src/main/java/com/howtodoinjava/kafka/demo/SpringBootKafkaAppApplication.java:
--------------------------------------------------------------------------------
1 | package com.howtodoinjava.kafka.demo;
2 |
3 | import org.springframework.boot.SpringApplication;
4 | import org.springframework.boot.autoconfigure.SpringBootApplication;
5 | import org.springframework.kafka.annotation.EnableKafka;
6 |
7 | @EnableKafka
8 | @SpringBootApplication
9 | public class SpringBootKafkaAppApplication {
10 |
11 | public static void main(String[] args)
12 | {
13 | SpringApplication.run(SpringBootKafkaAppApplication.class, args);
14 | }
15 | }
16 |
--------------------------------------------------------------------------------
/spring-kafka-java-config/src/main/java/com/howtodoinjava/kafka/demo/config/KafkaConsumerConfig.java:
--------------------------------------------------------------------------------
1 | package com.howtodoinjava.kafka.demo.config;
2 |
3 | import java.util.HashMap;
4 | import java.util.Map;
5 |
6 | import org.apache.kafka.clients.consumer.ConsumerConfig;
7 | import org.apache.kafka.common.serialization.StringDeserializer;
8 | import org.springframework.beans.factory.annotation.Value;
9 | import org.springframework.context.annotation.Bean;
10 | import org.springframework.context.annotation.Configuration;
11 | import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
12 | import org.springframework.kafka.core.ConsumerFactory;
13 | import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
14 | import org.springframework.kafka.support.serializer.JsonDeserializer;
15 |
16 | import com.howtodoinjava.kafka.demo.model.User;
17 |
18 | @Configuration
19 | public class KafkaConsumerConfig
20 | {
21 | @Value(value = "${kafka.bootstrapAddress}")
22 | private String bootstrapAddress;
23 |
24 | @Value(value = "${general.topic.group.id}")
25 | private String groupId;
26 |
27 | @Value(value = "${user.topic.group.id}")
28 | private String userGroupId;
29 |
30 | // 1. Consume string data from Kafka
31 |
32 | @Bean
33 | public ConsumerFactory consumerFactory() {
34 | Map props = new HashMap<>();
35 | props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapAddress);
36 | props.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
37 | props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,
38 | StringDeserializer.class);
39 | props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,
40 | StringDeserializer.class);
41 | props.put(JsonDeserializer.TRUSTED_PACKAGES, "*");
42 | return new DefaultKafkaConsumerFactory<>(props);
43 | }
44 |
45 | @Bean
46 | public ConcurrentKafkaListenerContainerFactory
47 | kafkaListenerContainerFactory() {
48 | ConcurrentKafkaListenerContainerFactory factory
49 | = new ConcurrentKafkaListenerContainerFactory<>();
50 | factory.setConsumerFactory(consumerFactory());
51 | return factory;
52 | }
53 |
54 | // 2. Consume user objects from Kafka
55 |
56 | public ConsumerFactory userConsumerFactory() {
57 | Map props = new HashMap<>();
58 | props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapAddress);
59 | props.put(ConsumerConfig.GROUP_ID_CONFIG, userGroupId);
60 | props.put(JsonDeserializer.TRUSTED_PACKAGES, "*");
61 | return new DefaultKafkaConsumerFactory<>(props,
62 | new StringDeserializer(),
63 | new JsonDeserializer<>(User.class));
64 | }
65 |
66 | @Bean
67 | public ConcurrentKafkaListenerContainerFactory
68 | userKafkaListenerContainerFactory() {
69 | ConcurrentKafkaListenerContainerFactory factory
70 | = new ConcurrentKafkaListenerContainerFactory<>();
71 | factory.setConsumerFactory(userConsumerFactory());
72 | return factory;
73 | }
74 | }
75 |
--------------------------------------------------------------------------------
/spring-kafka-java-config/src/main/java/com/howtodoinjava/kafka/demo/config/KafkaProducerConfig.java:
--------------------------------------------------------------------------------
1 | package com.howtodoinjava.kafka.demo.config;
2 |
3 | import java.util.HashMap;
4 | import java.util.Map;
5 |
6 | import org.apache.kafka.clients.producer.ProducerConfig;
7 | import org.apache.kafka.common.serialization.StringSerializer;
8 | import org.springframework.beans.factory.annotation.Value;
9 | import org.springframework.context.annotation.Bean;
10 | import org.springframework.context.annotation.Configuration;
11 | import org.springframework.kafka.core.DefaultKafkaProducerFactory;
12 | import org.springframework.kafka.core.KafkaTemplate;
13 | import org.springframework.kafka.core.ProducerFactory;
14 | import org.springframework.kafka.support.serializer.JsonSerializer;
15 |
16 | import com.howtodoinjava.kafka.demo.model.User;
17 |
18 | @Configuration
19 | public class KafkaProducerConfig
20 | {
21 | @Value(value = "${kafka.bootstrapAddress}")
22 | private String bootstrapAddress;
23 |
24 | //1. Send string to Kafka
25 |
26 | @Bean
27 | public ProducerFactory producerFactory() {
28 | Map props = new HashMap<>();
29 | props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapAddress);
30 | props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
31 | props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
32 | return new DefaultKafkaProducerFactory<>(props);
33 | }
34 |
35 | @Bean
36 | public KafkaTemplate kafkaTemplate() {
37 | return new KafkaTemplate<>(producerFactory());
38 | }
39 |
40 | //2. Send User objects to Kafka
41 | @Bean
42 | public ProducerFactory userProducerFactory() {
43 | Map configProps = new HashMap<>();
44 | configProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapAddress);
45 | configProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
46 | configProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class);
47 | return new DefaultKafkaProducerFactory<>(configProps);
48 | }
49 |
50 | @Bean
51 | public KafkaTemplate userKafkaTemplate() {
52 | return new KafkaTemplate<>(userProducerFactory());
53 | }
54 | }
55 |
--------------------------------------------------------------------------------
/spring-kafka-java-config/src/main/java/com/howtodoinjava/kafka/demo/config/TopicConfig.java:
--------------------------------------------------------------------------------
1 | package com.howtodoinjava.kafka.demo.config;
2 |
3 | import java.util.HashMap;
4 | import java.util.Map;
5 |
6 | import org.apache.kafka.clients.admin.AdminClientConfig;
7 | import org.apache.kafka.clients.admin.NewTopic;
8 | import org.springframework.beans.factory.annotation.Value;
9 | import org.springframework.context.annotation.Bean;
10 | import org.springframework.context.annotation.Configuration;
11 | import org.springframework.kafka.config.TopicBuilder;
12 | import org.springframework.kafka.core.KafkaAdmin;
13 |
14 | @Configuration
15 | public class TopicConfig
16 | {
17 | @Value(value = "${kafka.bootstrapAddress}")
18 | private String bootstrapAddress;
19 |
20 | @Value(value = "${general.topic.name}")
21 | private String topicName;
22 |
23 | @Value(value = "${user.topic.name}")
24 | private String userTopicName;
25 |
26 | @Bean
27 | public NewTopic generalTopic() {
28 | return TopicBuilder.name(topicName)
29 | .partitions(1)
30 | .replicas(1)
31 | .build();
32 | }
33 |
34 | @Bean
35 | public NewTopic userTopic() {
36 | return TopicBuilder.name(userTopicName)
37 | .partitions(1)
38 | .replicas(1)
39 | .build();
40 | }
41 |
42 | //If not using spring boot
43 |
44 | @Bean
45 | public KafkaAdmin kafkaAdmin()
46 | {
47 | Map configs = new HashMap<>();
48 | configs.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapAddress);
49 | return new KafkaAdmin(configs);
50 | }
51 | }
52 |
--------------------------------------------------------------------------------
/spring-kafka-java-config/src/main/java/com/howtodoinjava/kafka/demo/controller/KafkaProducerController.java:
--------------------------------------------------------------------------------
1 | package com.howtodoinjava.kafka.demo.controller;
2 |
3 | import org.springframework.beans.factory.annotation.Autowired;
4 | import org.springframework.web.bind.annotation.PostMapping;
5 | import org.springframework.web.bind.annotation.RequestMapping;
6 | import org.springframework.web.bind.annotation.RequestParam;
7 | import org.springframework.web.bind.annotation.RestController;
8 |
9 | import com.howtodoinjava.kafka.demo.model.User;
10 | import com.howtodoinjava.kafka.demo.service.KafKaProducerService;
11 |
12 | @RestController
13 | @RequestMapping(value = "/kafka")
14 | public class KafkaProducerController {
15 | private final KafKaProducerService producerService;
16 |
17 | @Autowired
18 | public KafkaProducerController(KafKaProducerService producerService) {
19 | this.producerService = producerService;
20 | }
21 |
22 | @PostMapping(value = "/publish")
23 | public void sendMessageToKafkaTopic(@RequestParam("message") String message) {
24 | this.producerService.sendMessage(message);
25 | }
26 |
27 | @PostMapping(value = "/createUser")
28 | public void sendMessageToKafkaTopic(
29 | @RequestParam("userId") long userId,
30 | @RequestParam("firstName") String firstName,
31 | @RequestParam("lastName") String lastName) {
32 |
33 | User user = new User();
34 | user.setUserId(userId);
35 | user.setFirstName(firstName);
36 | user.setLastName(lastName);
37 |
38 | this.producerService.saveCreateUserLog(user);
39 | }
40 | }
--------------------------------------------------------------------------------
/spring-kafka-java-config/src/main/java/com/howtodoinjava/kafka/demo/model/User.java:
--------------------------------------------------------------------------------
1 | package com.howtodoinjava.kafka.demo.model;
2 |
3 | public class User
4 | {
5 | private long userId;
6 | private String firstName;
7 | private String lastName;
8 |
9 | public long getUserId() {
10 | return userId;
11 | }
12 | public void setUserId(long userId) {
13 | this.userId = userId;
14 | }
15 | public String getFirstName() {
16 | return firstName;
17 | }
18 | public void setFirstName(String firstName) {
19 | this.firstName = firstName;
20 | }
21 | public String getLastName() {
22 | return lastName;
23 | }
24 | public void setLastName(String lastName) {
25 | this.lastName = lastName;
26 | }
27 |
28 | @Override
29 | public String toString() {
30 | return "User [userId=" + userId + ", firstName=" + firstName + ", lastName=" + lastName + "]";
31 | }
32 | }
33 |
--------------------------------------------------------------------------------
/spring-kafka-java-config/src/main/java/com/howtodoinjava/kafka/demo/service/KafKaConsumerService.java:
--------------------------------------------------------------------------------
1 | package com.howtodoinjava.kafka.demo.service;
2 |
3 | import org.slf4j.Logger;
4 | import org.slf4j.LoggerFactory;
5 | import org.springframework.kafka.annotation.KafkaListener;
6 | import org.springframework.stereotype.Service;
7 |
8 | import com.howtodoinjava.kafka.demo.model.User;
9 |
10 | @Service
11 | public class KafKaConsumerService
12 | {
13 | private final Logger logger
14 | = LoggerFactory.getLogger(KafKaConsumerService.class);
15 |
16 | @KafkaListener(topics = "${general.topic.name}",
17 | groupId = "${general.topic.group.id}")
18 | public void consume(String message) {
19 | logger.info(String.format("Message recieved -> %s", message));
20 | }
21 |
22 | @KafkaListener(topics = "${user.topic.name}",
23 | groupId = "${user.topic.group.id}",
24 | containerFactory = "userKafkaListenerContainerFactory")
25 | public void consume(User user) {
26 | logger.info(String.format("User created -> %s", user));
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/spring-kafka-java-config/src/main/java/com/howtodoinjava/kafka/demo/service/KafKaProducerService.java:
--------------------------------------------------------------------------------
1 | package com.howtodoinjava.kafka.demo.service;
2 |
3 | import com.howtodoinjava.kafka.demo.model.User;
4 | import org.slf4j.Logger;
5 | import org.slf4j.LoggerFactory;
6 | import org.springframework.beans.factory.annotation.Autowired;
7 | import org.springframework.beans.factory.annotation.Value;
8 | import org.springframework.kafka.core.KafkaTemplate;
9 | import org.springframework.kafka.support.SendResult;
10 | import org.springframework.stereotype.Service;
11 |
12 | import java.util.concurrent.CompletableFuture;
13 |
14 | @Service
15 | public class KafKaProducerService {
16 | private static final Logger logger =
17 | LoggerFactory.getLogger(KafKaProducerService.class);
18 |
19 | //1. General topic with string payload
20 |
21 | @Value(value = "${general.topic.name}")
22 | private String topicName;
23 |
24 | @Autowired
25 | private KafkaTemplate kafkaTemplate;
26 |
27 | //2. Topic with user object payload
28 |
29 | @Value(value = "${user.topic.name}")
30 | private String userTopicName;
31 |
32 | @Autowired
33 | private KafkaTemplate userKafkaTemplate;
34 |
35 | public void sendMessage(String message) {
36 | CompletableFuture> future
37 | = this.kafkaTemplate.send(topicName, message);
38 |
39 | future.whenComplete((result, throwable) -> {
40 |
41 | if (throwable != null) {
42 | // handle failure
43 | logger.error("Unable to send message : " + message, throwable);
44 | } else {
45 | // handle success
46 | logger.info("Sent message: " + message + " with offset: " + result.getRecordMetadata().offset());
47 | }
48 | });
49 | }
50 |
51 | public void saveCreateUserLog(User user) {
52 | CompletableFuture> future
53 | = this.userKafkaTemplate.send(userTopicName, user);
54 |
55 | future.whenComplete((result, throwable) -> {
56 | if (throwable != null) {
57 | // handle failure
58 | logger.error("User created : " + user, throwable);
59 | } else {
60 | // handle success
61 | logger.info("User created: " + user + " with offset: " + result.getRecordMetadata().offset());
62 | }
63 | });
64 | }
65 |
66 | /*public void sendMessage(String message)
67 | {
68 | ListenableFuture> future
69 | = this.kafkaTemplate.send(topicName, message);
70 |
71 | future.addCallback(new ListenableFutureCallback>() {
72 | @Override
73 | public void onSuccess(SendResult result) {
74 | logger.info("Sent message: " + message
75 | + " with offset: " + result.getRecordMetadata().offset());
76 | }
77 |
78 | @Override
79 | public void onFailure(Throwable ex) {
80 | logger.error("Unable to send message : " + message, ex);
81 | }
82 | });
83 | }*/
84 |
85 | /*public void saveCreateUserLog(User user)
86 | {
87 | ListenableFuture> future
88 | = this.userKafkaTemplate.send(userTopicName, user);
89 |
90 | future.addCallback(new ListenableFutureCallback>() {
91 | @Override
92 | public void onSuccess(SendResult result) {
93 | logger.info("User created: "
94 | + user + " with offset: " + result.getRecordMetadata().offset());
95 | }
96 |
97 | @Override
98 | public void onFailure(Throwable ex) {
99 | logger.error("User created : " + user, ex);
100 | }
101 | });
102 | }*/
103 | }
104 |
--------------------------------------------------------------------------------
/spring-kafka-java-config/src/main/resources/application.properties:
--------------------------------------------------------------------------------
1 | server.port=9000
2 | kafka.bootstrapAddress=localhost:9092
3 |
4 | general.topic.name=test-log
5 | general.topic.group.id=group_id
6 |
7 | user.topic.name=user-log
8 | user.topic.group.id=group_id
--------------------------------------------------------------------------------
/spring-kafka-java-config/src/main/resources/logback.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------