├── .gitignore
├── README.md
├── docker-compose.yaml
├── kafka-command.md
├── kafka-plain-old-communication
├── pom.xml
└── src
│ └── main
│ └── java
│ └── com
│ └── amrut
│ └── kafka
│ ├── consumer
│ ├── Consumer.java
│ ├── ConsumerAssignAndSeek.java
│ ├── ConsumerGroupWithThreads.java
│ └── ConsumerWithManualCommit.java
│ └── producer
│ ├── Producer.java
│ ├── ProducerWithCallbackAndKeys.java
│ ├── ProducerWithHighThroughput.java
│ └── ProducerWithSafety.java
├── spring-cloud-stream-kafka-communication
├── pom.xml
└── src
│ ├── main
│ ├── java
│ │ └── com
│ │ │ └── amrut
│ │ │ └── prabhu
│ │ │ ├── KafkaProducer.java
│ │ │ ├── SpringCloudStreamKafkaApplication.java
│ │ │ └── dto
│ │ │ ├── Message.java
│ │ │ └── coverters
│ │ │ ├── MessageDeSerializer.java
│ │ │ └── MessageSerializer.java
│ └── resources
│ │ └── application.yml
│ └── test
│ └── java
│ └── com
│ └── amrut
│ └── prabhu
│ └── SpringCloudStreamKafkaApplicationTests.java
├── spring-cloud-stream-kafka-streams-with-kstreams
├── pom.xml
└── src
│ └── main
│ ├── java
│ └── com
│ │ └── amrut
│ │ └── prabhu
│ │ ├── SpringCloudStreamKafkaApplication.java
│ │ └── dto
│ │ ├── Department.java
│ │ ├── JoinedValue.java
│ │ ├── MyEvent.java
│ │ └── coverters
│ │ ├── JoinedValueSerDes.java
│ │ └── MyEventSerDes.java
│ └── resources
│ └── application.yml
└── spring-kafka-communication-service
├── pom.xml
└── src
├── main
├── java
│ └── com
│ │ └── amrut
│ │ └── prabhu
│ │ └── kafkacommunicationservice
│ │ ├── KafkaCommunicationServiceApplication.java
│ │ ├── KafkaConsumer.java
│ │ ├── KafkaProducer.java
│ │ └── dto
│ │ ├── Message.java
│ │ └── converters
│ │ ├── MessageDeSerializer.java
│ │ └── MessageSerializer.java
└── resources
│ └── application.yml
└── test
└── java
└── com
└── amrut
└── prabhu
└── kafkacommunicationservice
└── KafkaCommunicationServiceApplicationTests.java
/.gitignore:
--------------------------------------------------------------------------------
1 | .idea/*
2 | *.iml
3 |
4 | **/target/*
5 | .mvn/*
6 | **/.mvn
7 | .vscode/*
8 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Kafka Communication with Java and Spring
2 |
3 | Here is a repository with all the various ways of communicating with a Kafka cluster using Java. We also look at the two ways of communicating with Kafka using Spring framework.
4 |
5 | Kafka Communication using Apache Kafka Clients Library. [here](/kafka-plain-old-communication).
6 |
7 | Kafka Communication using Spring Kafka. [here](/spring-kafka-communication-service).
8 |
9 | Kafka Communication using Spring Cloud Streams. [here](/spring-cloud-stream-kafka-communication).
10 |
--------------------------------------------------------------------------------
/docker-compose.yaml:
--------------------------------------------------------------------------------
1 | services:
2 | zookeeper:
3 | image: confluentinc/cp-zookeeper:latest
4 | environment:
5 | ZOOKEEPER_CLIENT_PORT: 2181
6 | ZOOKEEPER_TICK_TIME: 2000
7 | ports:
8 | - 22181:2181
9 |
10 | kafka:
11 | image: confluentinc/cp-kafka:latest
12 | depends_on:
13 | - zookeeper
14 | ports:
15 | - 29092:29092
16 | environment:
17 | KAFKA_BROKER_ID: 1
18 | KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
19 | KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092,PLAINTEXT_HOST://localhost:29092
20 | KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
21 | KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
22 | KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
23 |
24 | # kafka UI tools
25 | kafka-ui:
26 | image: obsidiandynamics/kafdrop
27 | depends_on:
28 | - kafka
29 | ports:
30 | - 9091:9000
31 | environment:
32 | SERVER_SERVLET_CONTEXTPATH: "/"
33 | JVM_OPTS: "-Xms32M -Xmx64M"
34 | KAFKA_BROKERCONNECT: kafka:9092
35 |
36 | #https://github.com/redpanda-data/console
37 | redpanda:
38 | image: docker.redpanda.com/vectorized/console:latest
39 | depends_on:
40 | - kafka
41 | ports:
42 | - "8080:8080"
43 | environment:
44 | KAFKA_BROKERS: kafka:9092
45 |
46 | #https://github.com/consdata/kouncil
47 | kouncil:
48 | image: consdata/kouncil:latest
49 | depends_on:
50 | - kafka
51 | ports:
52 | - 9090:8080
53 | environment:
54 | bootstrapServers: kafka:9092
--------------------------------------------------------------------------------
/kafka-command.md:
--------------------------------------------------------------------------------
1 | # Spring Boot Kafka Communication
2 |
3 | ##Commands
4 |
5 | - Start Zookeeper
6 | ```shell
7 | bin/zookeeper-server-start.sh config/zookeeper.properties
8 | ```
9 | - Start Kafka
10 | ```shell
11 |
12 | ```
13 | - Create Kafka topic
14 | ```shell
15 | bin/kafka-topics.sh --create \
16 | --topic first-topic \
17 | --partitions=4 \
18 | --replication-factor=1 \
19 | --bootstrap-server localhost:9092
20 | ```
21 |
22 | - List Kafka Topics
23 | ```shell
24 | bin/kafka-topics.sh --list --bootstrap-server localhost:9092
25 | ```
26 |
27 | - Console consumer
28 | ```shell
29 | bin/kafka-console-consumer.sh \
30 | --bootstrap-server localhost:9092 \
31 | --property key.separator=: \
32 | --property print.key=true \
33 | --property print.offset=true \
34 | --property print.partition=true \
35 | --property print.value=true \
36 | --property print.headers=true \
37 | --topic first-topic \
38 | --from-beginning \
39 | --group group1
40 | ```
41 |
42 | - Console Consumer
43 | ```shell
44 | bin/kafka-console-producer.sh \
45 | --bootstrap-server localhost:9092 \
46 | --property parse.key=true \
47 | --property key.separator=: \
48 | --topic first-topic
49 | ```
50 |
51 | - Payload
52 | ```json
53 | mykey:"{\"name\": \"Jack\"}"
54 | ```
55 |
--------------------------------------------------------------------------------
/kafka-plain-old-communication/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 | 4.0.0
6 |
7 | com.amrut.prabhu
8 | kafka-workouts
9 | 1.0-SNAPSHOT
10 |
11 |
12 | 1.8
13 | 1.8
14 |
15 |
16 |
17 |
18 | org.apache.kafka
19 | kafka-clients
20 | 2.1.0
21 |
22 |
23 | org.slf4j
24 | slf4j-simple
25 | 1.7.25
26 |
27 |
28 |
29 |
30 |
--------------------------------------------------------------------------------
/kafka-plain-old-communication/src/main/java/com/amrut/kafka/consumer/Consumer.java:
--------------------------------------------------------------------------------
1 | package com.amrut.kafka.consumer;
2 |
3 | import org.apache.kafka.clients.consumer.ConsumerConfig;
4 | import org.apache.kafka.clients.consumer.ConsumerRecord;
5 | import org.apache.kafka.clients.consumer.ConsumerRecords;
6 | import org.apache.kafka.clients.consumer.KafkaConsumer;
7 | import org.apache.kafka.common.serialization.StringDeserializer;
8 | import org.slf4j.Logger;
9 | import org.slf4j.LoggerFactory;
10 |
11 | import java.time.Duration;
12 | import java.util.Collections;
13 | import java.util.Properties;
14 |
15 | public class Consumer {
16 |
17 | public static Logger logger = LoggerFactory.getLogger(Consumer.class);
18 |
19 | public static void main(String[] args) {
20 | String MY_GROUP_ID = "My_App";
21 | String BOOT_STRAP_SERVER = "127.0.0.1:9092";
22 |
23 | Properties properties = new Properties();
24 |
25 | properties.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, BOOT_STRAP_SERVER);
26 | properties.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
27 | properties.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
28 | properties.setProperty(ConsumerConfig.GROUP_ID_CONFIG, MY_GROUP_ID);
29 | properties.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
30 |
31 |
32 | //create kafka consumer
33 | KafkaConsumer consumer = new KafkaConsumer(properties);
34 |
35 |
36 | //subscribe the topic
37 | consumer.subscribe(Collections.singleton("amrut"));
38 |
39 |
40 | //read records
41 | while(true) {
42 |
43 | ConsumerRecords consumerRecords = consumer.poll(Duration.ofMillis(1000));
44 | for (ConsumerRecord record : consumerRecords){
45 |
46 | logger.info("Record ---------------------------------------------------");
47 | logger.info("Partition:-"+record.partition());
48 | logger.info("Offset:-"+record.offset());
49 | logger.info("Key:-"+record.key());
50 | logger.info("Value:-"+record.value());
51 | }
52 | }
53 |
54 | }
55 | }
56 |
--------------------------------------------------------------------------------
/kafka-plain-old-communication/src/main/java/com/amrut/kafka/consumer/ConsumerAssignAndSeek.java:
--------------------------------------------------------------------------------
1 | package com.amrut.kafka.consumer;
2 |
3 | import org.apache.kafka.clients.consumer.ConsumerConfig;
4 | import org.apache.kafka.clients.consumer.ConsumerRecord;
5 | import org.apache.kafka.clients.consumer.ConsumerRecords;
6 | import org.apache.kafka.clients.consumer.KafkaConsumer;
7 | import org.apache.kafka.common.TopicPartition;
8 | import org.apache.kafka.common.serialization.StringDeserializer;
9 | import org.slf4j.Logger;
10 | import org.slf4j.LoggerFactory;
11 |
12 | import java.time.Duration;
13 | import java.util.Arrays;
14 | import java.util.Properties;
15 |
16 | public class ConsumerAssignAndSeek {
17 |
18 | public static Logger logger = LoggerFactory.getLogger(ConsumerAssignAndSeek.class);
19 |
20 | public static void main(String[] args) {
21 | String BOOT_STRAP_SERVER = "127.0.0.1:9092";
22 |
23 | Properties properties = new Properties();
24 |
25 | properties.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, BOOT_STRAP_SERVER);
26 | properties.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
27 | properties.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
28 | properties.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
29 |
30 |
31 | //create kafka consumer
32 | KafkaConsumer consumer = new KafkaConsumer(properties);
33 |
34 |
35 | //assign a partition the topic
36 | TopicPartition topicPartition = new TopicPartition("amrut", 0);
37 | consumer.assign(Arrays.asList(topicPartition));
38 |
39 |
40 | //seek to an offset
41 |
42 | long offset = 5l;
43 |
44 | consumer.seek(topicPartition, offset);
45 | //read 4 records
46 | for (int recordsRead = 0; recordsRead < 4; recordsRead++) {
47 |
48 | ConsumerRecords consumerRecords = consumer.poll(Duration.ofMillis(1000));
49 | for (ConsumerRecord record : consumerRecords) {
50 |
51 | logger.info("Record ---------------------------------------------------");
52 | logger.info("Partition:-" + record.partition());
53 | logger.info("Offset:-" + record.offset());
54 | logger.info("Key:-" + record.key());
55 | logger.info("Value:-" + record.value());
56 |
57 |
58 | }
59 | }
60 |
61 | }
62 | }
63 |
--------------------------------------------------------------------------------
/kafka-plain-old-communication/src/main/java/com/amrut/kafka/consumer/ConsumerGroupWithThreads.java:
--------------------------------------------------------------------------------
1 | package com.amrut.kafka.consumer;
2 |
3 | import org.apache.kafka.clients.consumer.ConsumerConfig;
4 | import org.apache.kafka.clients.consumer.ConsumerRecord;
5 | import org.apache.kafka.clients.consumer.ConsumerRecords;
6 | import org.apache.kafka.clients.consumer.KafkaConsumer;
7 | import org.apache.kafka.common.errors.WakeupException;
8 | import org.apache.kafka.common.serialization.StringDeserializer;
9 | import org.slf4j.Logger;
10 | import org.slf4j.LoggerFactory;
11 |
12 | import java.time.Duration;
13 | import java.util.Collections;
14 | import java.util.Properties;
15 |
16 | public class ConsumerGroupWithThreads {
17 |
18 | public static Logger logger = LoggerFactory.getLogger(ConsumerGroupWithThreads.class);
19 |
20 |
21 | public static void main(String[] args) {
22 |
23 | new ConsumerGroupWithThreads().run();
24 | }
25 |
26 | public void run() {
27 | String MY_GROUP_ID = "My_App_2";
28 | String BOOT_STRAP_SERVER = "127.0.0.1:9092";
29 | ConsumerThread consumerThread = new ConsumerThread(BOOT_STRAP_SERVER, MY_GROUP_ID);
30 |
31 | Thread thread = new Thread(consumerThread);
32 | thread.start();
33 |
34 | Runtime.getRuntime().addShutdownHook(new Thread(() -> {
35 | consumerThread.shutdown();
36 | try {
37 | thread.join();
38 | } catch (InterruptedException e) {
39 | e.printStackTrace();
40 |
41 | }
42 | }));
43 |
44 | try {
45 | thread.join();
46 | } catch (InterruptedException e) {
47 | e.printStackTrace();
48 | logger.error("thread is interrupted");
49 | }
50 |
51 | logger.info("Main is exiting");
52 | }
53 |
54 | class ConsumerThread implements Runnable {
55 |
56 | private Logger logger = LoggerFactory.getLogger(ConsumerThread.class);
57 | private final String MY_GROUP_ID;
58 | private final String BOOT_STRAP_SERVER;
59 | private KafkaConsumer consumer;
60 |
61 | public ConsumerThread(String BOOT_STRAP_SERVER,
62 | String MY_GROUP_ID) {
63 | this.BOOT_STRAP_SERVER = BOOT_STRAP_SERVER;
64 | this.MY_GROUP_ID = MY_GROUP_ID;
65 | }
66 |
67 | @Override
68 | public void run() {
69 |
70 | Properties properties = new Properties();
71 |
72 | properties.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, BOOT_STRAP_SERVER);
73 | properties.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
74 | properties.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
75 | properties.setProperty(ConsumerConfig.GROUP_ID_CONFIG, MY_GROUP_ID);
76 | properties.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
77 |
78 |
79 | //create kafka consumer
80 | consumer = new KafkaConsumer(properties);
81 |
82 |
83 | //subscribe the topic
84 | consumer.subscribe(Collections.singleton("amrut"));
85 |
86 |
87 | try {
88 | //read records
89 | while (true) {
90 |
91 | ConsumerRecords consumerRecords = consumer.poll(Duration.ofMillis(1000));
92 | for (ConsumerRecord record : consumerRecords) {
93 |
94 | logger.info("Record ---------------------------------------------------");
95 | logger.info("Partition:-" + record.partition());
96 | logger.info("Offset:-" + record.offset());
97 | logger.info("Key:-" + record.key());
98 | logger.info("Value:-" + record.value());
99 | }
100 | }
101 |
102 | } catch (WakeupException e) {
103 | logger.error("Wakeup exception for shutdown");
104 | } finally {
105 | consumer.close();
106 | logger.info("consumer closing");
107 | }
108 | }
109 |
110 | public void shutdown() {
111 | // causes the poll function to throw a wakeup exception.
112 | consumer.wakeup();
113 | }
114 | }
115 | }
116 |
--------------------------------------------------------------------------------
/kafka-plain-old-communication/src/main/java/com/amrut/kafka/consumer/ConsumerWithManualCommit.java:
--------------------------------------------------------------------------------
1 | package com.amrut.kafka.consumer;
2 |
3 | import org.apache.kafka.clients.consumer.ConsumerConfig;
4 | import org.apache.kafka.clients.consumer.ConsumerRecord;
5 | import org.apache.kafka.clients.consumer.ConsumerRecords;
6 | import org.apache.kafka.clients.consumer.KafkaConsumer;
7 | import org.apache.kafka.common.serialization.StringDeserializer;
8 | import org.slf4j.Logger;
9 | import org.slf4j.LoggerFactory;
10 |
11 | import java.time.Duration;
12 | import java.util.Collections;
13 | import java.util.Properties;
14 |
15 | public class ConsumerWithManualCommit {
16 |
17 | public static Logger logger = LoggerFactory.getLogger(ConsumerWithManualCommit.class);
18 |
19 | public static void main(String[] args) {
20 | String MY_GROUP_ID = "My_App";
21 | String BOOT_STRAP_SERVER = "127.0.0.1:9092";
22 |
23 | Properties properties = new Properties();
24 |
25 | properties.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, BOOT_STRAP_SERVER);
26 | properties.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
27 | properties.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
28 | properties.setProperty(ConsumerConfig.GROUP_ID_CONFIG, MY_GROUP_ID);
29 | properties.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
30 |
31 |
32 | // properties for manual commit of offsets
33 |
34 | properties.setProperty(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG,"false");
35 | properties.setProperty(ConsumerConfig.MAX_POLL_RECORDS_CONFIG,"5");
36 |
37 |
38 | //create kafka consumer
39 | KafkaConsumer consumer = new KafkaConsumer(properties);
40 |
41 |
42 | //subscribe the topic
43 | consumer.subscribe(Collections.singleton("amrut"));
44 |
45 |
46 | //read records
47 | while(true) {
48 |
49 | ConsumerRecords consumerRecords = consumer.poll(Duration.ofMillis(1000));
50 | for (ConsumerRecord record : consumerRecords){
51 |
52 | logger.info("Record ---------------------------------------------------");
53 | logger.info("Partition:-"+record.partition());
54 | logger.info("Offset:-"+record.offset());
55 | logger.info("Key:-"+record.key());
56 | logger.info("Value:-"+record.value());
57 | }
58 |
59 | //committing offsets
60 | logger.info("Committing offsets synchronously");
61 | consumer.commitSync();
62 | }
63 |
64 | }
65 | }
66 |
--------------------------------------------------------------------------------
/kafka-plain-old-communication/src/main/java/com/amrut/kafka/producer/Producer.java:
--------------------------------------------------------------------------------
1 | package com.amrut.kafka.producer;
2 |
3 | import org.apache.kafka.clients.producer.KafkaProducer;
4 | import org.apache.kafka.clients.producer.ProducerConfig;
5 | import org.apache.kafka.clients.producer.ProducerRecord;
6 | import org.apache.kafka.common.serialization.StringSerializer;
7 |
8 | import java.util.Properties;
9 |
10 | public class Producer {
11 |
12 | public static void main(String[] args) {
13 | //Kafka properties
14 | Properties properties = new Properties();
15 | properties.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,"127.0.0.1:9092");
16 | properties.setProperty(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
17 | properties.setProperty(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
18 |
19 | // kafka producer
20 | KafkaProducer producer = new KafkaProducer(properties);
21 |
22 | // kafka record
23 | ProducerRecord record= new ProducerRecord("amrut","message");
24 |
25 | // send record -- async
26 | producer.send(record);
27 |
28 | //flush data
29 | producer.flush();
30 |
31 | //close producer
32 | producer.close();
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/kafka-plain-old-communication/src/main/java/com/amrut/kafka/producer/ProducerWithCallbackAndKeys.java:
--------------------------------------------------------------------------------
1 | package com.amrut.kafka.producer;
2 |
3 | import org.apache.kafka.clients.producer.KafkaProducer;
4 | import org.apache.kafka.clients.producer.ProducerConfig;
5 | import org.apache.kafka.clients.producer.ProducerRecord;
6 | import org.apache.kafka.common.serialization.StringSerializer;
7 | import org.slf4j.Logger;
8 | import org.slf4j.LoggerFactory;
9 |
10 | import java.util.Properties;
11 |
12 | public class ProducerWithCallbackAndKeys {
13 |
14 | public static void main(String[] args) {
15 | //Kafka properties
16 | Logger log = LoggerFactory.getLogger(ProducerWithCallbackAndKeys.class);
17 | Properties properties = new Properties();
18 | properties.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "127.0.0.1:9092");
19 | properties.setProperty(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
20 | properties.setProperty(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
21 |
22 | // kafka producer
23 | KafkaProducer producer = new KafkaProducer(properties);
24 |
25 | // kafka record
26 | ProducerRecord record = new ProducerRecord("amrut", "key","value");
27 |
28 | // send record -- async
29 | producer.send(record, (rcdMetaData, exp) -> {
30 | // call every successful send or in case of exception
31 |
32 | if(exp==null){
33 | log.info("Offset:- "+rcdMetaData.offset());
34 | log.info("Partition:- "+rcdMetaData.partition());
35 | log.info("Timestamp:- "+rcdMetaData.timestamp());
36 | }else {
37 | log.error("error",exp);
38 | }
39 | });
40 |
41 | //flush data
42 | producer.flush();
43 |
44 | //close producer
45 | producer.close();
46 | }
47 | }
48 |
--------------------------------------------------------------------------------
/kafka-plain-old-communication/src/main/java/com/amrut/kafka/producer/ProducerWithHighThroughput.java:
--------------------------------------------------------------------------------
1 | package com.amrut.kafka.producer;
2 |
3 | import org.apache.kafka.clients.producer.KafkaProducer;
4 | import org.apache.kafka.clients.producer.ProducerConfig;
5 | import org.apache.kafka.clients.producer.ProducerRecord;
6 | import org.apache.kafka.common.serialization.StringSerializer;
7 | import org.slf4j.Logger;
8 | import org.slf4j.LoggerFactory;
9 |
10 | import java.util.Properties;
11 |
12 | public class ProducerWithHighThroughput {
13 |
14 | public static void main(String[] args) {
15 | //Kafka properties
16 | Logger log = LoggerFactory.getLogger(ProducerWithHighThroughput.class);
17 | Properties properties = new Properties();
18 | properties.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "127.0.0.1:9092");
19 | properties.setProperty(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
20 | properties.setProperty(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
21 |
22 | // Safe producer properties
23 | properties.setProperty(ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION, "5"); // safe for kafka 2.0 or else use 1
24 | properties.setProperty(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, "true");
25 | properties.setProperty(ProducerConfig.ACKS_CONFIG, "all");
26 | properties.setProperty(ProducerConfig.RETRIES_CONFIG, Integer.toString(Integer.MAX_VALUE));
27 |
28 | // high throughput properties
29 | properties.setProperty(ProducerConfig.COMPRESSION_TYPE_CONFIG,"snappy");
30 | properties.setProperty(ProducerConfig.BATCH_SIZE_CONFIG,Integer.toString(32*1024)); //32KB
31 | properties.setProperty(ProducerConfig.LINGER_MS_CONFIG,"10"); //10 ms
32 |
33 | // kafka producer
34 | KafkaProducer producer = new KafkaProducer(properties);
35 |
36 | // kafka record
37 | ProducerRecord record = new ProducerRecord("amrut", "key", "value");
38 |
39 | // send record -- async
40 | producer.send(record, (rcdMetaData, exp) -> {
41 | // call every successful send or in case of exception
42 |
43 | if (exp == null) {
44 | log.info("Offset:- " + rcdMetaData.offset());
45 | log.info("Partition:- " + rcdMetaData.partition());
46 | log.info("Timestamp:- " + rcdMetaData.timestamp());
47 | } else {
48 | log.error("error", exp);
49 | }
50 | });
51 |
52 | //flush data
53 | producer.flush();
54 |
55 | //close producer
56 | producer.close();
57 | }
58 | }
59 |
--------------------------------------------------------------------------------
/kafka-plain-old-communication/src/main/java/com/amrut/kafka/producer/ProducerWithSafety.java:
--------------------------------------------------------------------------------
1 | package com.amrut.kafka.producer;
2 |
3 | import org.apache.kafka.clients.producer.KafkaProducer;
4 | import org.apache.kafka.clients.producer.ProducerConfig;
5 | import org.apache.kafka.clients.producer.ProducerRecord;
6 | import org.apache.kafka.common.serialization.StringSerializer;
7 | import org.slf4j.Logger;
8 | import org.slf4j.LoggerFactory;
9 |
10 | import java.util.Properties;
11 |
12 | public class ProducerWithSafety {
13 |
14 | public static void main(String[] args) {
15 | //Kafka properties
16 | Logger log = LoggerFactory.getLogger(ProducerWithSafety.class);
17 | Properties properties = new Properties();
18 | properties.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "127.0.0.1:9092");
19 | properties.setProperty(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
20 | properties.setProperty(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
21 |
22 | // Safe producer properties
23 | properties.setProperty(ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION, "5"); // safe for kafka 2.0 or else use 1
24 | properties.setProperty(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, "true");
25 | properties.setProperty(ProducerConfig.ACKS_CONFIG, "all");
26 | properties.setProperty(ProducerConfig.RETRIES_CONFIG, Integer.toString(Integer.MAX_VALUE));
27 |
28 |
29 | // kafka producer
30 | KafkaProducer producer = new KafkaProducer(properties);
31 |
32 | // kafka record
33 | ProducerRecord record = new ProducerRecord("amrut", "key", "value");
34 |
35 | // send record -- async
36 | producer.send(record, (rcdMetaData, exp) -> {
37 | // call every successful send or in case of exception
38 |
39 | if (exp == null) {
40 | log.info("Offset:- " + rcdMetaData.offset());
41 | log.info("Partition:- " + rcdMetaData.partition());
42 | log.info("Timestamp:- " + rcdMetaData.timestamp());
43 | } else {
44 | log.error("error", exp);
45 | }
46 | });
47 |
48 | //flush data
49 | producer.flush();
50 |
51 | //close producer
52 | producer.close();
53 | }
54 | }
55 |
--------------------------------------------------------------------------------
/spring-cloud-stream-kafka-communication/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 | 4.0.0
5 |
6 | org.springframework.boot
7 | spring-boot-starter-parent
8 | 2.6.1
9 |
10 |
11 |
12 | com.amrut.prabhu
13 | spring-cloud-stream-kafka-communication
14 | 0.0.1-SNAPSHOT
15 | spring-cloud-stream-kafka-communication
16 | Kafka communication with spring cloud Streams
17 |
18 | 17
19 | 2021.0.0
20 |
21 |
22 |
23 |
24 | org.apache.kafka
25 | kafka-streams
26 |
27 |
28 | org.springframework.cloud
29 | spring-cloud-stream-binder-kafka
30 |
31 |
32 | org.springframework.boot
33 | spring-boot-starter-test
34 | test
35 |
36 |
37 | org.springframework.cloud
38 | spring-cloud-stream
39 | test
40 | test-binder
41 | test-jar
42 |
43 |
44 |
45 |
46 |
47 | org.springframework.cloud
48 | spring-cloud-dependencies
49 | ${spring-cloud.version}
50 | pom
51 | import
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 | org.springframework.boot
60 | spring-boot-maven-plugin
61 |
62 |
63 |
64 |
65 |
66 |
--------------------------------------------------------------------------------
/spring-cloud-stream-kafka-communication/src/main/java/com/amrut/prabhu/KafkaProducer.java:
--------------------------------------------------------------------------------
1 | package com.amrut.prabhu;
2 |
3 | import com.amrut.prabhu.dto.Message;
4 | import org.springframework.beans.factory.annotation.Autowired;
5 | import org.springframework.cloud.stream.function.StreamBridge;
6 | import org.springframework.scheduling.annotation.Scheduled;
7 | import org.springframework.stereotype.Component;
8 |
9 | @Component
10 | public class KafkaProducer {
11 |
12 | @Autowired
13 | private StreamBridge streamBridge;
14 |
15 | @Scheduled(cron = "*/2 * * * * *")
16 | public void sendMessage(){
17 | streamBridge.send("producer-out-0",new Message(" jack from Stream bridge"));
18 | }
19 | }
20 |
--------------------------------------------------------------------------------
/spring-cloud-stream-kafka-communication/src/main/java/com/amrut/prabhu/SpringCloudStreamKafkaApplication.java:
--------------------------------------------------------------------------------
1 | package com.amrut.prabhu;
2 |
3 | import com.amrut.prabhu.dto.Message;
4 | import org.springframework.boot.SpringApplication;
5 | import org.springframework.boot.autoconfigure.SpringBootApplication;
6 | import org.springframework.context.annotation.Bean;
7 | import org.springframework.scheduling.annotation.EnableScheduling;
8 |
9 | import java.util.function.Consumer;
10 | import java.util.function.Supplier;
11 |
12 | @SpringBootApplication
13 | @EnableScheduling
14 | public class SpringCloudStreamKafkaApplication {
15 |
16 | public static void main(String[] args) {
17 | SpringApplication.run(SpringCloudStreamKafkaApplication.class, args);
18 | }
19 |
20 | @Bean
21 | public Consumer consumer() {
22 | return message -> System.out.println("received " + message);
23 | }
24 |
25 | @Bean
26 | public Supplier producer() {
27 | return () -> new Message(" jack from Streams");
28 | }
29 | }
30 |
--------------------------------------------------------------------------------
/spring-cloud-stream-kafka-communication/src/main/java/com/amrut/prabhu/dto/Message.java:
--------------------------------------------------------------------------------
1 | package com.amrut.prabhu.dto;
2 |
3 | public record Message(String name) {
4 | }
5 |
--------------------------------------------------------------------------------
/spring-cloud-stream-kafka-communication/src/main/java/com/amrut/prabhu/dto/coverters/MessageDeSerializer.java:
--------------------------------------------------------------------------------
1 | package com.amrut.prabhu.dto.coverters;
2 |
3 | import com.amrut.prabhu.dto.Message;
4 | import com.fasterxml.jackson.databind.ObjectMapper;
5 | import org.apache.kafka.common.errors.SerializationException;
6 | import org.apache.kafka.common.serialization.Deserializer;
7 |
8 | import java.io.IOException;
9 |
10 | public class MessageDeSerializer implements Deserializer {
11 |
12 | private final ObjectMapper objectMapper = new ObjectMapper();
13 |
14 | @Override
15 | public Message deserialize(String topic, byte[] data) {
16 | try {
17 | return objectMapper.readValue(new String(data), Message.class);
18 | } catch (IOException e) {
19 | throw new SerializationException(e);
20 | }
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/spring-cloud-stream-kafka-communication/src/main/java/com/amrut/prabhu/dto/coverters/MessageSerializer.java:
--------------------------------------------------------------------------------
1 | package com.amrut.prabhu.dto.coverters;
2 |
3 | import com.amrut.prabhu.dto.Message;
4 | import com.fasterxml.jackson.core.JsonProcessingException;
5 | import com.fasterxml.jackson.databind.ObjectMapper;
6 | import org.apache.kafka.common.errors.SerializationException;
7 | import org.apache.kafka.common.serialization.Serializer;
8 |
9 | public class MessageSerializer implements Serializer {
10 |
11 | private final ObjectMapper objectMapper = new ObjectMapper();
12 |
13 | @Override
14 | public byte[] serialize(String topic, Message data) {
15 | try {
16 | return objectMapper.writeValueAsBytes(data);
17 | } catch (JsonProcessingException e) {
18 | throw new SerializationException(e);
19 | }
20 |
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/spring-cloud-stream-kafka-communication/src/main/resources/application.yml:
--------------------------------------------------------------------------------
1 | spring:
2 | cloud:
3 | function:
4 | definition: consumer;producer
5 | stream:
6 | kafka:
7 | bindings:
8 | producer-out-0:
9 | producer:
10 | configuration:
11 | value.serializer: com.amrut.prabhu.dto.coverters.MessageSerializer
12 | consumer-in-0:
13 | consumer:
14 | configuration:
15 | value.deserializer: com.amrut.prabhu.dto.coverters.MessageDeSerializer
16 | binder:
17 | brokers: localhost:9092
18 |
19 | bindings:
20 | producer-out-0:
21 | destination : first-topic
22 | producer:
23 | useNativeEncoding: true # This enables using the custom serializer
24 | consumer-in-0:
25 | destination : first-topic
26 | consumer:
27 | use-native-decoding: true # This enables using the custom deserializer
28 |
--------------------------------------------------------------------------------
/spring-cloud-stream-kafka-communication/src/test/java/com/amrut/prabhu/SpringCloudStreamKafkaApplicationTests.java:
--------------------------------------------------------------------------------
1 | package com.amrut.prabhu;
2 |
3 | import org.junit.jupiter.api.Test;
4 | import org.springframework.boot.test.context.SpringBootTest;
5 |
6 | @SpringBootTest
7 | class SpringCloudStreamKafkaApplicationTests {
8 |
9 | @Test
10 | void contextLoads() {
11 | }
12 |
13 | }
14 |
--------------------------------------------------------------------------------
/spring-cloud-stream-kafka-streams-with-kstreams/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 | 4.0.0
5 |
6 | org.springframework.boot
7 | spring-boot-starter-parent
8 | 3.1.2
9 |
10 |
11 |
12 | com.amrut.prabhu
13 | spring-cloud-kstream-with-kafka-streams
14 | 0.0.1-SNAPSHOT
15 | spring-cloud-stream-kafka-communication
16 | Spring cloud Streams With Kstreams and Ktables
17 |
18 | 17
19 | 2022.0.3
20 |
21 |
22 |
23 | org.springframework.cloud
24 | spring-cloud-stream-binder-kafka
25 |
26 |
27 | org.springframework.cloud
28 | spring-cloud-stream-binder-kafka-streams
29 |
30 |
31 | org.springframework.boot
32 | spring-boot-starter-test
33 | test
34 |
35 |
36 |
37 |
38 |
39 | org.springframework.cloud
40 | spring-cloud-dependencies
41 | ${spring-cloud.version}
42 | pom
43 | import
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 | org.springframework.boot
52 | spring-boot-maven-plugin
53 |
54 |
55 |
56 |
57 |
58 |
--------------------------------------------------------------------------------
/spring-cloud-stream-kafka-streams-with-kstreams/src/main/java/com/amrut/prabhu/SpringCloudStreamKafkaApplication.java:
--------------------------------------------------------------------------------
1 | package com.amrut.prabhu;
2 |
3 | import com.amrut.prabhu.dto.Department;
4 | import com.amrut.prabhu.dto.JoinedValue;
5 | import com.amrut.prabhu.dto.MyEvent;
6 | import org.apache.kafka.common.serialization.Serdes;
7 | import org.apache.kafka.streams.KeyValue;
8 | import org.apache.kafka.streams.kstream.*;
9 | import org.springframework.boot.SpringApplication;
10 | import org.springframework.boot.autoconfigure.SpringBootApplication;
11 | import org.springframework.context.annotation.Bean;
12 | import org.springframework.kafka.support.KafkaHeaders;
13 | import org.springframework.messaging.support.MessageBuilder;
14 |
15 | import java.time.Duration;
16 | import java.time.temporal.ChronoUnit;
17 | import java.util.Random;
18 | import java.util.function.BiFunction;
19 | import java.util.function.Function;
20 | import java.util.function.Supplier;
21 |
22 | @SpringBootApplication
23 | public class SpringCloudStreamKafkaApplication {
24 |
25 | public static void main(String[] args) {
26 | SpringApplication.run(SpringCloudStreamKafkaApplication.class, args);
27 | }
28 |
29 | @Bean
30 | public Supplier> producer() {
31 | return () -> {
32 | Department department = Department.values()[new Random().nextInt(Department.values().length)];
33 | MyEvent myEvent = new MyEvent("Jack", department);
34 | return MessageBuilder.withPayload(myEvent)
35 | .setHeader(KafkaHeaders.KEY, department.name())
36 | .build();
37 | };
38 |
39 | }
40 |
41 | @Bean
42 | public Function, KStream> enhancer() {
43 | return input -> input
44 | // .peek((k, v) -> System.out.println("Enhancer " + k + " " + v))
45 | .mapValues(value -> value.name());
46 | }
47 |
48 |
49 | @Bean
50 | public Function, KStream> aggregate() {
51 | return input -> input
52 | //.peek((key, value) -> System.out.println("Aggregate->" + key + " " + value))
53 | .groupByKey()
54 | .windowedBy(TimeWindows.ofSizeWithNoGrace(Duration.ofSeconds(10)))
55 | .aggregate(() -> 0l,
56 | (key, value, aggregate) -> aggregate + 1,
57 | Materialized.with(Serdes.String(), Serdes.Long()))
58 |
59 | .suppress(Suppressed.untilWindowCloses(Suppressed.BufferConfig.unbounded()))
60 | .toStream()
61 | .map((w, v) -> new KeyValue<>(w.key(), v.toString()));
62 | // .peek((k, v) -> System.out.println("Enhancer " + k + " " + v));
63 | }
64 |
65 | @Bean
66 | public BiFunction, KStream, KStream> join() {
67 | return (input1, input2) -> input1.join(input2,
68 | (value1, valu2) -> new JoinedValue(value1, valu2),
69 | JoinWindows.ofTimeDifferenceWithNoGrace(Duration.of(10, ChronoUnit.SECONDS))
70 | ,StreamJoined.with(Serdes.String(),Serdes.String(),Serdes.String())
71 |
72 | )
73 | .peek((key,value) -> System.out.println("joined ->" + key + " "+ value));
74 |
75 | }
76 |
77 | }
78 |
--------------------------------------------------------------------------------
/spring-cloud-stream-kafka-streams-with-kstreams/src/main/java/com/amrut/prabhu/dto/Department.java:
--------------------------------------------------------------------------------
1 | package com.amrut.prabhu.dto;
2 |
3 | public enum Department {
4 | TECH, BUSINESS, FINANCE, PRODUCT
5 | }
6 |
--------------------------------------------------------------------------------
/spring-cloud-stream-kafka-streams-with-kstreams/src/main/java/com/amrut/prabhu/dto/JoinedValue.java:
--------------------------------------------------------------------------------
1 | package com.amrut.prabhu.dto;
2 |
3 | public record JoinedValue(String value1, String value2) {
4 | }
5 |
--------------------------------------------------------------------------------
/spring-cloud-stream-kafka-streams-with-kstreams/src/main/java/com/amrut/prabhu/dto/MyEvent.java:
--------------------------------------------------------------------------------
1 | package com.amrut.prabhu.dto;
2 |
3 | public record MyEvent(String name, Department department) {
4 |
5 | }
6 |
--------------------------------------------------------------------------------
/spring-cloud-stream-kafka-streams-with-kstreams/src/main/java/com/amrut/prabhu/dto/coverters/JoinedValueSerDes.java:
--------------------------------------------------------------------------------
1 | package com.amrut.prabhu.dto.coverters;
2 |
3 | import com.amrut.prabhu.dto.JoinedValue;
4 | import org.springframework.kafka.support.serializer.JsonSerde;
5 |
6 | public class JoinedValueSerDes extends JsonSerde {
7 |
8 | }
9 |
--------------------------------------------------------------------------------
/spring-cloud-stream-kafka-streams-with-kstreams/src/main/java/com/amrut/prabhu/dto/coverters/MyEventSerDes.java:
--------------------------------------------------------------------------------
1 | package com.amrut.prabhu.dto.coverters;
2 |
3 | import com.amrut.prabhu.dto.MyEvent;
4 | import org.springframework.kafka.support.serializer.JsonSerde;
5 |
6 | public class MyEventSerDes extends JsonSerde {
7 |
8 | }
9 |
--------------------------------------------------------------------------------
/spring-cloud-stream-kafka-streams-with-kstreams/src/main/resources/application.yml:
--------------------------------------------------------------------------------
1 | spring:
2 | cloud:
3 | function:
4 | definition: producer;enhancer;aggregate;join
5 | stream:
6 | kafka:
7 | streams:
8 | bindings:
9 | enhancer-in-0: # only required if you need to provide some configurations.
10 | consumer:
11 | keySerde: org.apache.kafka.common.serialization.Serdes$StringSerde
12 | valueSerde: com.amrut.prabhu.dto.coverters.MyEventSerDes #custom
13 | # enhancer-out-0: # only required if you need to provide some configurations.
14 | # producer:
15 | # configuration:
16 | # spring.json.add.type.headers: false
17 | aggregate-in-0:
18 | consumer:
19 | keySerde: org.apache.kafka.common.serialization.Serdes$StringSerde
20 | valueSerde: com.amrut.prabhu.dto.coverters.MyEventSerDes #custom
21 | # configuration:
22 | ## spring.json.add.type.headers: false
23 | # commit.interval.ms: 2000
24 | # join-in-0:
25 | # consumer:
26 | # keySerde: org.apache.kafka.common.serialization.Serdes$StringSerde
27 | # valueSerde: org.apache.kafka.common.serialization.Serdes$StringSerde
28 | ##
29 | join-out-0:
30 | producer:
31 | keySerde: org.apache.kafka.common.serialization.Serdes$StringSerde
32 | valueSerde: com.amrut.prabhu.dto.coverters.JoinedValueSerDes
33 | binder:
34 | configuration:
35 | commit.interval.ms: 2000 # to allow the stream to send the message as soon as possible and not wait
36 |
37 | # binder:
38 | # brokers: localhost:9092
39 | bindings:
40 | producer-out-0:
41 | producer:
42 | configuration:
43 | key.serializer: org.apache.kafka.common.serialization.StringSerializer
44 | bindings:
45 | producer-out-0:
46 | destination: first-topic
47 |
48 | enhancer-in-0:
49 | destination: first-topic
50 | enhancer-out-0:
51 | destination: second-topic
52 | #
53 | aggregate-in-0:
54 | destination: first-topic
55 | aggregate-out-0:
56 | destination: third-topic
57 | #
58 | join-in-0:
59 | destination: second-topic
60 | join-in-1:
61 | destination: third-topic
62 | join-out-0:
63 | destination: fourth-topic
64 |
--------------------------------------------------------------------------------
/spring-kafka-communication-service/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 | 4.0.0
5 |
6 | org.springframework.boot
7 | spring-boot-starter-parent
8 | 2.6.1
9 |
10 |
11 | com.amrut.prabhu
12 | spring-kafka-communication-service
13 | 0.0.1-SNAPSHOT
14 | Spring kafka Communication Service
15 |
16 | 17
17 |
18 |
19 |
20 | org.springframework.boot
21 | spring-boot-starter
22 |
23 |
24 | org.springframework.boot
25 | spring-boot-starter-web
26 |
27 |
28 | org.springframework.kafka
29 | spring-kafka
30 |
31 |
32 |
33 | org.projectlombok
34 | lombok
35 |
36 |
37 | org.springframework.boot
38 | spring-boot-starter-test
39 | test
40 |
41 |
42 | org.springframework.kafka
43 | spring-kafka-test
44 | test
45 |
46 |
47 |
48 |
49 |
50 |
51 | org.springframework.boot
52 | spring-boot-maven-plugin
53 |
54 |
55 |
56 |
57 |
58 |
--------------------------------------------------------------------------------
/spring-kafka-communication-service/src/main/java/com/amrut/prabhu/kafkacommunicationservice/KafkaCommunicationServiceApplication.java:
--------------------------------------------------------------------------------
1 | package com.amrut.prabhu.kafkacommunicationservice;
2 |
3 | import org.springframework.boot.SpringApplication;
4 | import org.springframework.boot.autoconfigure.SpringBootApplication;
5 | import org.springframework.scheduling.annotation.EnableScheduling;
6 |
7 | @SpringBootApplication
8 | @EnableScheduling
9 | public class KafkaCommunicationServiceApplication {
10 |
11 | public static void main(String[] args) {
12 | SpringApplication.run(KafkaCommunicationServiceApplication.class, args);
13 | }
14 |
15 | }
16 |
--------------------------------------------------------------------------------
/spring-kafka-communication-service/src/main/java/com/amrut/prabhu/kafkacommunicationservice/KafkaConsumer.java:
--------------------------------------------------------------------------------
1 | package com.amrut.prabhu.kafkacommunicationservice;
2 |
3 | import com.amrut.prabhu.kafkacommunicationservice.dto.Message;
4 | import org.apache.kafka.clients.consumer.ConsumerRecord;
5 | import org.springframework.kafka.annotation.KafkaListener;
6 | import org.springframework.stereotype.Component;
7 |
8 | @Component
9 | public class KafkaConsumer {
10 |
11 | @KafkaListener(id = "my-client-application", topics = "${topic.name}")
12 | public void consumer(ConsumerRecord consumerRecord) {
13 | System.out.println("Consumed Record Details: " + consumerRecord);
14 | Message message = consumerRecord.value();
15 | System.out.println("Consumed Message" + message);
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/spring-kafka-communication-service/src/main/java/com/amrut/prabhu/kafkacommunicationservice/KafkaProducer.java:
--------------------------------------------------------------------------------
1 | package com.amrut.prabhu.kafkacommunicationservice;
2 |
3 | import com.amrut.prabhu.kafkacommunicationservice.dto.Message;
4 | import org.apache.kafka.clients.producer.ProducerRecord;
5 | import org.springframework.beans.factory.annotation.Value;
6 | import org.springframework.kafka.core.KafkaTemplate;
7 | import org.springframework.scheduling.annotation.Scheduled;
8 | import org.springframework.stereotype.Component;
9 |
10 | import java.util.UUID;
11 |
12 | @Component
13 | public class KafkaProducer {
14 |
15 | @Value("${topic.name}")
16 | private String topicName;
17 |
18 | private KafkaTemplate kafkaTemplate;
19 |
20 | public KafkaProducer(KafkaTemplate kafkaTemplate) {
21 | this.kafkaTemplate = kafkaTemplate;
22 | }
23 |
24 | @Scheduled(cron = "*/2 * * * * *")
25 | public void sendMessage() {
26 | UUID key = UUID.randomUUID();
27 | Message payload = new Message("jack");
28 | System.out.println("Sending Data " + payload);
29 |
30 | ProducerRecord record = new ProducerRecord(topicName,
31 | key.toString(),
32 | payload);
33 | record.headers()
34 | .add("message-id", UUID.randomUUID()
35 | .toString()
36 | .getBytes());
37 | kafkaTemplate.send(record);
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/spring-kafka-communication-service/src/main/java/com/amrut/prabhu/kafkacommunicationservice/dto/Message.java:
--------------------------------------------------------------------------------
1 | package com.amrut.prabhu.kafkacommunicationservice.dto;
2 |
3 | public record Message(String name) {
4 | }
5 |
--------------------------------------------------------------------------------
/spring-kafka-communication-service/src/main/java/com/amrut/prabhu/kafkacommunicationservice/dto/converters/MessageDeSerializer.java:
--------------------------------------------------------------------------------
1 | package com.amrut.prabhu.kafkacommunicationservice.dto.converters;
2 |
3 | import com.amrut.prabhu.kafkacommunicationservice.dto.Message;
4 | import com.fasterxml.jackson.databind.ObjectMapper;
5 | import org.apache.kafka.common.errors.SerializationException;
6 | import org.apache.kafka.common.serialization.Deserializer;
7 |
8 | import java.io.IOException;
9 |
10 | public class MessageDeSerializer implements Deserializer {
11 |
12 | private final ObjectMapper objectMapper = new ObjectMapper();
13 |
14 | @Override
15 | public Message deserialize(String topic, byte[] data) {
16 | try {
17 | return objectMapper.readValue(data, Message.class);
18 | } catch (IOException e) {
19 | throw new SerializationException(e);
20 | }
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/spring-kafka-communication-service/src/main/java/com/amrut/prabhu/kafkacommunicationservice/dto/converters/MessageSerializer.java:
--------------------------------------------------------------------------------
1 | package com.amrut.prabhu.kafkacommunicationservice.dto.converters;
2 |
3 | import com.amrut.prabhu.kafkacommunicationservice.dto.Message;
4 | import com.fasterxml.jackson.core.JsonProcessingException;
5 | import com.fasterxml.jackson.databind.ObjectMapper;
6 | import org.apache.kafka.common.errors.SerializationException;
7 | import org.apache.kafka.common.serialization.Serializer;
8 |
9 | public class MessageSerializer implements Serializer {
10 |
11 | private final ObjectMapper objectMapper = new ObjectMapper();
12 |
13 | @Override
14 | public byte[] serialize(String topic, Message data) {
15 | try {
16 | return objectMapper.writeValueAsBytes(data);
17 | } catch (JsonProcessingException e) {
18 | throw new SerializationException(e);
19 | }
20 |
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/spring-kafka-communication-service/src/main/resources/application.yml:
--------------------------------------------------------------------------------
1 | spring:
2 | kafka:
3 | bootstrap-servers:
4 | - localhost:9092
5 | consumer:
6 | client-id: my-client-consumer
7 | group-id: spring-application-group
8 | key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
9 | value-deserializer: com.amrut.prabhu.kafkacommunicationservice.dto.converters.MessageDeSerializer
10 | producer:
11 | client-id: my-client-application
12 | key-serializer: org.apache.kafka.common.serialization.StringSerializer
13 | value-serializer: com.amrut.prabhu.kafkacommunicationservice.dto.converters.MessageSerializer
14 |
15 | topic:
16 | name: "transaction.000"
17 | server:
18 | port: 9191
--------------------------------------------------------------------------------
/spring-kafka-communication-service/src/test/java/com/amrut/prabhu/kafkacommunicationservice/KafkaCommunicationServiceApplicationTests.java:
--------------------------------------------------------------------------------
1 | package com.amrut.prabhu.kafkacommunicationservice;
2 |
3 | import org.junit.jupiter.api.Test;
4 | import org.springframework.boot.test.context.SpringBootTest;
5 |
6 | @SpringBootTest
7 | class KafkaCommunicationServiceApplicationTests {
8 |
9 | @Test
10 | void contextLoads() {
11 | }
12 |
13 | }
14 |
--------------------------------------------------------------------------------