├── .github └── workflows │ └── gradle.yml ├── .gitignore ├── Kafka_Security.md ├── README.md ├── SetUpKafka.md ├── SetUpKafka3.md ├── library-events-consumer ├── .gitignore ├── build.gradle ├── gradle │ └── wrapper │ │ ├── gradle-wrapper.jar │ │ └── gradle-wrapper.properties ├── gradlew ├── gradlew.bat ├── settings.gradle └── src │ ├── main │ ├── java │ │ └── com │ │ │ └── learnkafka │ │ │ ├── LibraryEventsConsumerApplication.java │ │ │ ├── config │ │ │ ├── LibraryEventsConsumerConfig.java │ │ │ └── LibraryEventsConsumerConfigLegacy.java │ │ │ ├── consumer │ │ │ ├── LibraryEventsConsumer.java │ │ │ ├── LibraryEventsConsumerManualOffset.java │ │ │ └── LibraryEventsRetryConsumer.java │ │ │ ├── entity │ │ │ ├── Book.java │ │ │ ├── FailureRecord.java │ │ │ ├── LibraryEvent.java │ │ │ └── LibraryEventType.java │ │ │ ├── jpa │ │ │ ├── FailureRecordRepository.java │ │ │ └── LibraryEventsRepository.java │ │ │ ├── scheduler │ │ │ └── RetryScheduler.java │ │ │ └── service │ │ │ ├── FailureService.java │ │ │ └── LibraryEventsService.java │ └── resources │ │ └── application.yml │ └── test │ └── java │ └── intg │ └── com │ └── learnkafka │ ├── consumer │ └── LibraryEventsConsumerIntegrationTest.java │ ├── jpa │ └── FailureRecordRepositoryTest.java │ └── scheduler │ └── RetrySchedulerIntegrationTest.java └── library-events-producer ├── .gitignore ├── build.gradle ├── gradle └── wrapper │ ├── gradle-wrapper.jar │ └── gradle-wrapper.properties ├── gradlew ├── gradlew.bat ├── settings.gradle └── src ├── main ├── java │ └── com │ │ └── learnkafka │ │ ├── LibraryEventsProducerApplication.java │ │ ├── config │ │ └── AutoCreateConfig.java │ │ ├── controller │ │ ├── LibraryEventControllerAdvice.java │ │ └── LibraryEventsController.java │ │ ├── domain │ │ ├── Book.java │ │ ├── LibraryEvent.java │ │ └── LibraryEventType.java │ │ └── producer │ │ └── LibraryEventProducer.java └── resources │ ├── application.yml │ └── curl-commands.txt └── test └── java ├── intg └── com │ └── learnkafka │ └── controller │ └── LibraryEventsControllerIntegrationTest.java └── unit └── com └── learnkafka ├── controller └── LibraryEventControllerUnitTest.java └── producer └── LibraryEventProducerUnitTest.java /.github/workflows/gradle.yml: -------------------------------------------------------------------------------- 1 | # This workflow will build a Java project with Gradle 2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/building-and-testing-java-with-gradle 3 | 4 | name: kafka-for-developers-using-spring-boot 5 | 6 | on: 7 | push: 8 | branches: [ master ] 9 | pull_request: 10 | branches: [ master ] 11 | 12 | jobs: 13 | build: 14 | runs-on: ubuntu-latest 15 | steps: 16 | - uses: actions/checkout@v2 17 | - name: Set up JDK 11 18 | uses: actions/setup-java@v1 19 | with: 20 | java-version: 11 21 | - name: Build Library Events Consumer 22 | run: | 23 | cd library-events-consumer 24 | chmod +x gradlew 25 | ./gradlew build 26 | - name: Build Library Events Producer 27 | run: | 28 | cd library-events-producer 29 | chmod +x gradlew 30 | ./gradlew build 31 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Compiled class file 2 | *.class 3 | 4 | # Log file 5 | *.log 6 | 7 | # BlueJ files 8 | *.ctxt 9 | 10 | # Mobile Tools for Java (J2ME) 11 | .mtj.tmp/ 12 | 13 | # Package Files # 14 | *.jar 15 | *.war 16 | *.nar 17 | *.ear 18 | *.zip 19 | *.tar.gz 20 | *.rar 21 | 22 | # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml 23 | hs_err_pid* 24 | 25 | /live-coding 26 | .DS_Store 27 | /code-zips 28 | /ssl 29 | .idea/ 30 | -------------------------------------------------------------------------------- /Kafka_Security.md: -------------------------------------------------------------------------------- 1 | # Enabling SSL in Kafka 2 | 3 | - Follow the below steps for enabling SSL in your local environment 4 | 5 | ## Generating the KeyStore 6 | 7 | - The below command is to generate the **keyStore**. 8 | - KeyStore in general has information about the server and the organization 9 | 10 | ``` 11 | keytool -keystore server.keystore.jks -alias localhost -validity 365 -genkey -keyalg RSA 12 | ``` 13 | 14 | **Example** 15 | - After entering all the details the final value will look like below. 16 | 17 | ``` 18 | CN=localhost, OU=localhost, O=localhost, L=Chennai, ST=TN, C=IN 19 | ``` 20 | 21 | ## Generating CA 22 | 23 | - The below command will generate the ca cert(SSL cert) and private key. This is normally needed if we are self signing the request. 24 | 25 | ``` 26 | openssl req -new -x509 -keyout ca-key -out ca-cert -days 365 -subj "/CN=local-security-CA" 27 | ``` 28 | 29 | ## Certificate Signing Request(CSR) 30 | 31 | - The below command will create a **cert-file** as a result of executing the command. 32 | 33 | ``` 34 | keytool -keystore server.keystore.jks -alias localhost -certreq -file cert-file 35 | ``` 36 | 37 | ## Signing the certificate 38 | 39 | - The below command takes care of signing the CSR and then it spits out a file **cert-signed** 40 | 41 | ``` 42 | openssl x509 -req -CA ca-cert -CAkey ca-key -in cert-file -out cert-signed -days 365 -CAcreateserial -passin pass:password 43 | ``` 44 | 45 | - To view the content inside the file **cert-signed**, run the below command. 46 | 47 | ``` 48 | keytool -printcert -v -file cert-signed 49 | ``` 50 | 51 | 52 | ## Adding the Signed Cert in to the KeyStore file 53 | 54 | ``` 55 | keytool -keystore server.keystore.jks -alias CARoot -import -file ca-cert 56 | keytool -keystore server.keystore.jks -alias localhost -import -file cert-signed 57 | ``` 58 | 59 | ## Generate the TrustStore 60 | 61 | - The below command takes care of generating the truststore for us and adds the **CA-Cert** in to it. 62 | - This is to make sure the client is going to trust all the certs issued by CA. 63 | 64 | ``` 65 | keytool -keystore client.truststore.jks -alias CARoot -import -file ca-cert 66 | ``` 67 | 68 | ## Broker SSL Settings 69 | 70 | ``` 71 | ssl.keystore.location=/server.keystore.jks 72 | ssl.keystore.password=password 73 | ssl.key.password=password 74 | ssl.endpoint.identification.algorithm= 75 | ``` 76 | # Accessing SSL Enabled Topics using Console Producers/Consumers 77 | 78 | - Create a topic 79 | 80 | ``` 81 | ./kafka-topics.sh --create --topic test-topic -zookeeper localhost:2181 --replication-factor 1 --partitions 3 82 | ``` 83 | 84 | - Create a file named **client-ssl.properties** and have the below properties configured in there. 85 | 86 | ``` 87 | security.protocol=SSL 88 | ssl.truststore.location=/client.truststore.jks 89 | ssl.truststore.password=password 90 | ssl.truststore.type=JKS 91 | ``` 92 | 93 | ## Producing Messages to Secured Topic 94 | 95 | - Command to Produce Messages to the secured topic 96 | 97 | ``` 98 | ./kafka-console-producer.sh --broker-list localhost:9095,localhost:9096,localhost:9097 --topic test-topic --producer.config client-ssl.properties 99 | ``` 100 | 101 | ## Consuming Messages from a Secured Topic 102 | 103 | - Command to Produce Messages to the secured topic 104 | 105 | ``` 106 | ./kafka-console-consumer.sh --bootstrap-server localhost:9095,localhost:9096,localhost:9097 --topic test-topic --consumer.config client-ssl.properties 107 | ``` 108 | 109 | 110 | ## Producing Messages to Non-Secured Topic 111 | 112 | ``` 113 | ./kafka-console-producer.sh --broker-list localhost:9092,localhost:9093,localhost:9094 --topic test-topic 114 | ``` 115 | 116 | 117 | ## Consuming Messages from a Non-Secured Topic 118 | 119 | ``` 120 | ./kafka-console-consumer.sh --bootstrap-server localhost:9092,localhost:9093,localhost:9094 --topic test-topic 121 | ``` 122 | 123 | ## 2 Way Authentication 124 | 125 | - This config is to enable the client authentication at the cluster end. 126 | 127 | ``` 128 | keytool -keystore server.truststore.jks -alias CARoot -import -file ca-cert 129 | ``` 130 | 131 | - Add the **ssl.client.auth** property in the **server.properties**. 132 | 133 | ``` 134 | ssl.truststore.location=/server.truststore.jks 135 | ssl.truststore.password=password 136 | ssl.client.auth=required 137 | ``` 138 | - Kafka Client should have the following the config in the **client-ssl.properties** file 139 | 140 | ``` 141 | ssl.keystore.type=JKS 142 | ssl.keystore.location=/client.keystore.jks 143 | ssl.keystore.password=password 144 | ssl.key.password=password 145 | ``` 146 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ![kafka-for-developers-using-spring-boot](https://github.com/dilipsundarraj1/kafka-for-developers-using-spring-boot/workflows/kafka-for-developers-using-spring-boot/badge.svg) 2 | # kafka-for-developers-using-spring-boot 3 | 4 | This repository has the complete code related to kafka producers/consumers using spring boot. 5 | 6 | 7 | 8 | - [Setup-Kafka](https://github.com/dilipsundarraj1/kafka-for-developers-using-spring-boot/blob/master/SetUpKafka.md) 9 | 10 | - [Setup-Kafka (Version 3)](SetUpKafka3.md) 11 | 12 | ## Securing your Kafka Cluster using SSL 13 | 14 | - [Kafka SSL SetUp](https://github.com/dilipsundarraj1/kafka-for-developers-using-spring-boot/blob/master/Kafka_Security.md) 15 | 16 | ## H2 Database 17 | 18 | - Access the h2 database in the following link - http://localhost:8081/h2-console 19 | -------------------------------------------------------------------------------- /SetUpKafka.md: -------------------------------------------------------------------------------- 1 | # Setting Up Kafka 2 | 3 |
Mac 4 |

5 | 6 | - Make sure you are navigated inside the bin directory. 7 | 8 | ## Start Zookeeper and Kafka Broker 9 | 10 | - Start up the Zookeeper. 11 | 12 | ``` 13 | ./zookeeper-server-start.sh ../config/zookeeper.properties 14 | ``` 15 | 16 | - Add the below properties in the server.properties 17 | 18 | ``` 19 | listeners=PLAINTEXT://localhost:9092 20 | auto.create.topics.enable=false 21 | ``` 22 | 23 | - Start up the Kafka Broker 24 | 25 | ``` 26 | ./kafka-server-start.sh ../config/server.properties 27 | ``` 28 | 29 | ## How to create a topic ? 30 | 31 | ``` 32 | ./kafka-topics.sh --create --topic test-topic -zookeeper localhost:2181 --replication-factor 1 --partitions 4 33 | ``` 34 | 35 | ## How to instantiate a Console Producer? 36 | 37 | ### Without Key 38 | 39 | ``` 40 | ./kafka-console-producer.sh --broker-list localhost:9092 --topic test-topic 41 | ``` 42 | 43 | ### With Key 44 | 45 | ``` 46 | ./kafka-console-producer.sh --broker-list localhost:9092 --topic test-topic --property "key.separator=-" --property "parse.key=true" 47 | ``` 48 | 49 | ## How to instantiate a Console Consumer? 50 | 51 | ### Without Key 52 | 53 | ``` 54 | ./kafka-console-consumer.sh --bootstrap-server localhost:9092 --topic test-topic --from-beginning 55 | ``` 56 | 57 | ### With Key 58 | 59 | ``` 60 | ./kafka-console-consumer.sh --bootstrap-server localhost:9092 --topic test-topic --from-beginning -property "key.separator= - " --property "print.key=true" 61 | ``` 62 | 63 | ### With Consumer Group 64 | 65 | ``` 66 | ./kafka-console-consumer.sh --bootstrap-server localhost:9092 --topic test-topic --group 67 | ``` 68 |

69 | 70 |
71 | 72 |
Windows 73 |

74 | 75 | - Make sure you are inside the **bin/windows** directory. 76 | 77 | ## Start Zookeeper and Kafka Broker 78 | 79 | - Start up the Zookeeper. 80 | 81 | ``` 82 | zookeeper-server-start.bat ..\..\config\zookeeper.properties 83 | ``` 84 | 85 | - Start up the Kafka Broker. 86 | 87 | ``` 88 | kafka-server-start.bat ..\..\config\server.properties 89 | ``` 90 | 91 | ## How to create a topic ? 92 | 93 | ``` 94 | kafka-topics.bat --create --topic test-topic -zookeeper localhost:2181 --replication-factor 1 --partitions 4 95 | ``` 96 | 97 | ## How to instantiate a Console Producer? 98 | 99 | ### Without Key 100 | 101 | ``` 102 | kafka-console-producer.bat --broker-list localhost:9092 --topic test-topic 103 | ``` 104 | 105 | ### With Key 106 | 107 | ``` 108 | kafka-console-producer.bat --broker-list localhost:9092 --topic test-topic --property "key.separator=-" --property "parse.key=true" 109 | ``` 110 | 111 | ## How to instantiate a Console Consumer? 112 | 113 | ### Without Key 114 | 115 | ``` 116 | kafka-console-consumer.bat --bootstrap-server localhost:9092 --topic test-topic --from-beginning 117 | ``` 118 | 119 | ### With Key 120 | 121 | ``` 122 | kafka-console-consumer.bat --bootstrap-server localhost:9092 --topic test-topic --from-beginning -property "key.separator= - " --property "print.key=true" 123 | ``` 124 | 125 | ### With Consumer Group 126 | 127 | ``` 128 | kafka-console-consumer.bat --bootstrap-server localhost:9092 --topic test-topic --group 129 | ``` 130 |

131 | 132 |
133 | 134 | ## Setting Up Multiple Kafka Brokers 135 | 136 | - The first step is to add a new **server.properties**. 137 | 138 | - We need to modify three properties to start up a multi broker set up. 139 | 140 | ``` 141 | broker.id= 142 | listeners=PLAINTEXT://localhost: 143 | log.dirs=/tmp/ 144 | auto.create.topics.enable=false 145 | ``` 146 | 147 | - Example config will be like below. 148 | 149 | ``` 150 | broker.id=1 151 | listeners=PLAINTEXT://localhost:9093 152 | log.dirs=/tmp/kafka-logs-1 153 | auto.create.topics.enable=false 154 | ``` 155 | 156 | ### Starting up the new Broker 157 | 158 | - Provide the new **server.properties** thats added. 159 | 160 | ``` 161 | ./kafka-server-start.sh ../config/server-1.properties 162 | ``` 163 | 164 | ``` 165 | ./kafka-server-start.sh ../config/server-2.properties 166 | ``` 167 | 168 | # Advanced Kafka CLI operations: 169 | 170 |
Mac 171 |

172 | 173 | ## List the topics in a cluster 174 | 175 | ``` 176 | ./kafka-topics.sh --zookeeper localhost:2181 --list 177 | ``` 178 | 179 | ## Describe topic 180 | 181 | - The below command can be used to describe all the topics. 182 | 183 | ``` 184 | ./kafka-topics.sh --zookeeper localhost:2181 --describe 185 | ``` 186 | 187 | - The below command can be used to describe a specific topic. 188 | 189 | ``` 190 | ./kafka-topics.sh --zookeeper localhost:2181 --describe --topic 191 | ``` 192 | 193 | ## Alter the min insync replica 194 | ``` 195 | ./kafka-topics.sh --alter --zookeeper localhost:2181 --topic library-events --config min.insync.replicas=2 196 | ``` 197 | 198 | ## Delete a topic 199 | 200 | ``` 201 | ./kafka-topics.sh --zookeeper localhost:2181 --delete --topic test-topic 202 | ``` 203 | ## How to view consumer groups 204 | 205 | ``` 206 | ./kafka-consumer-groups.sh --bootstrap-server localhost:9092 --list 207 | ``` 208 | 209 | ### Consumer Groups and their Offset 210 | 211 | ``` 212 | ./kafka-consumer-groups.sh --bootstrap-server localhost:9092 --describe --group console-consumer-27773 213 | ``` 214 | 215 | ## Viewing the Commit Log 216 | 217 | ``` 218 | ./kafka-run-class.sh kafka.tools.DumpLogSegments --deep-iteration --files /tmp/kafka-logs/test-topic-0/00000000000000000000.log 219 | ``` 220 | 221 | ## Setting the Minimum Insync Replica 222 | 223 | ``` 224 | ./kafka-configs.sh --alter --zookeeper localhost:2181 --entity-type topics --entity-name test-topic --add-config min.insync.replicas=2 225 | ``` 226 |

227 |
228 | 229 | 230 |
Windows 231 |

232 | 233 | - Make sure you are inside the **bin/windows** directory. 234 | 235 | ## List the topics in a cluster 236 | 237 | ``` 238 | kafka-topics.bat --zookeeper localhost:2181 --list 239 | ``` 240 | 241 | ## Describe topic 242 | 243 | - The below command can be used to describe all the topics. 244 | 245 | ``` 246 | kafka-topics.bat --zookeeper localhost:2181 --describe 247 | ``` 248 | 249 | - The below command can be used to describe a specific topic. 250 | 251 | ``` 252 | kafka-topics.bat --zookeeper localhost:2181 --describe --topic 253 | ``` 254 | 255 | ## Alter the min insync replica 256 | ``` 257 | kafka-topics.bat --alter --zookeeper localhost:2181 --topic library-events --config min.insync.replicas=2 258 | ``` 259 | 260 | 261 | ## Delete a topic 262 | 263 | ``` 264 | kafka-topics.bat --zookeeper localhost:2181 --delete --topic 265 | ``` 266 | 267 | 268 | ## How to view consumer groups 269 | 270 | ``` 271 | kafka-consumer-groups.bat --bootstrap-server localhost:9092 --list 272 | ``` 273 | 274 | ### Consumer Groups and their Offset 275 | 276 | ``` 277 | kafka-consumer-groups.bat --bootstrap-server localhost:9092 --describe --group console-consumer-27773 278 | ``` 279 | 280 | ## Viewing the Commit Log 281 | 282 | ``` 283 | kafka-run-class.bat kafka.tools.DumpLogSegments --deep-iteration --files /tmp/kafka-logs/test-topic-0/00000000000000000000.log 284 | ``` 285 |

286 |
287 | -------------------------------------------------------------------------------- /SetUpKafka3.md: -------------------------------------------------------------------------------- 1 | # Setting Up Kafka 3.0.0 2 | 3 |
Mac 4 |

5 | 6 | - Make sure you are navigated inside the bin directory. 7 | 8 | ## Start Zookeeper and Kafka Broker 9 | 10 | - Start up the Zookeeper. 11 | 12 | ``` 13 | ./zookeeper-server-start.sh ../config/zookeeper.properties 14 | ``` 15 | 16 | - Add the below properties in the server.properties 17 | 18 | ``` 19 | listeners=PLAINTEXT://localhost:9092 20 | auto.create.topics.enable=false 21 | ``` 22 | 23 | - Start up the Kafka Broker 24 | 25 | ``` 26 | ./kafka-server-start.sh ../config/server.properties 27 | ``` 28 | 29 | ## How to create a topic ? 30 | 31 | ``` 32 | ./kafka-topics.sh --create --topic test-topic --replication-factor 1 --partitions 4 --bootstrap-server localhost:9092 33 | ``` 34 | 35 | ## How to instantiate a Console Producer? 36 | 37 | ### Without Key 38 | 39 | ``` 40 | ./kafka-console-producer.sh --broker-list localhost:9092 --topic test-topic 41 | ``` 42 | 43 | ### With Key 44 | 45 | ``` 46 | ./kafka-console-producer.sh --broker-list localhost:9092 --topic test-topic --property "key.separator=-" --property "parse.key=true" 47 | ``` 48 | 49 | ## How to instantiate a Console Consumer? 50 | 51 | ### Without Key 52 | 53 | ``` 54 | ./kafka-console-consumer.sh --bootstrap-server localhost:9092 --topic test-topic --from-beginning 55 | ``` 56 | 57 | ### With Key 58 | 59 | ``` 60 | ./kafka-console-consumer.sh --bootstrap-server localhost:9092 --topic test-topic --from-beginning -property "key.separator= - " --property "print.key=true" 61 | ``` 62 | 63 | ### With Consumer Group 64 | 65 | ``` 66 | ./kafka-console-consumer.sh --bootstrap-server localhost:9092 --topic test-topic --group 67 | ``` 68 | 69 | ### Consume messages With Kafka Headers 70 | 71 | ``` 72 | ./kafka-console-consumer.sh --bootstrap-server localhost:9092 --topic library-events.DLT --from-beginning --property print.headers=true --property print.timestamp=true 73 | ``` 74 | 75 |

76 | 77 |
78 | 79 |
Windows 80 |

81 | 82 | - Make sure you are inside the **bin/windows** directory. 83 | 84 | ## Start Zookeeper and Kafka Broker 85 | 86 | - Start up the Zookeeper. 87 | 88 | ``` 89 | zookeeper-server-start.bat ..\..\config\zookeeper.properties 90 | ``` 91 | 92 | - Start up the Kafka Broker. 93 | 94 | ``` 95 | kafka-server-start.bat ..\..\config\server.properties 96 | ``` 97 | 98 | ## How to create a topic ? 99 | 100 | ``` 101 | kafka-topics.bat --create --topic test-topic --replication-factor 1 --partitions 4 --bootstrap-server localhost:9092 102 | ``` 103 | 104 | ## How to instantiate a Console Producer? 105 | 106 | ### Without Key 107 | 108 | ``` 109 | kafka-console-producer.bat --broker-list localhost:9092 --topic test-topic 110 | ``` 111 | 112 | ### With Key 113 | 114 | ``` 115 | kafka-console-producer.bat --broker-list localhost:9092 --topic test-topic --property "key.separator=-" --property "parse.key=true" 116 | ``` 117 | 118 | ## How to instantiate a Console Consumer? 119 | 120 | ### Without Key 121 | 122 | ``` 123 | kafka-console-consumer.bat --bootstrap-server localhost:9092 --topic test-topic --from-beginning 124 | ``` 125 | 126 | ### With Key 127 | 128 | ``` 129 | kafka-console-consumer.bat --bootstrap-server localhost:9092 --topic test-topic --from-beginning -property "key.separator= - " --property "print.key=true" 130 | ``` 131 | 132 | ### With Consumer Group 133 | 134 | ``` 135 | kafka-console-consumer.bat --bootstrap-server localhost:9092 --topic test-topic --group 136 | ``` 137 | 138 | ### Consume messages With Kafka Headers 139 | 140 | ``` 141 | kafka-console-consumer.bat --bootstrap-server localhost:9092 --topic library-events.DLT --from-beginning --property print.headers=true --property print.timestamp=true 142 | ``` 143 | 144 |

145 | 146 |
147 | 148 | ## Setting Up Multiple Kafka Brokers 149 | 150 | - The first step is to add a new **server.properties**. 151 | 152 | - We need to modify three properties to start up a multi broker set up. 153 | 154 | ``` 155 | broker.id= 156 | listeners=PLAINTEXT://localhost: 157 | log.dirs=/tmp/ 158 | auto.create.topics.enable=false 159 | ``` 160 | 161 | - Example config will be like below. 162 | 163 | ``` 164 | broker.id=1 165 | listeners=PLAINTEXT://localhost:9093 166 | log.dirs=/tmp/kafka-logs-1 167 | auto.create.topics.enable=false 168 | ``` 169 | 170 | ### Starting up the new Broker 171 | 172 | - Provide the new **server.properties** thats added. 173 | 174 | ``` 175 | ./kafka-server-start.sh ../config/server-1.properties 176 | ``` 177 | 178 | ``` 179 | ./kafka-server-start.sh ../config/server-2.properties 180 | ``` 181 | 182 | # Advanced Kafka CLI operations: 183 | 184 |
Mac 185 |

186 | 187 | ## List the topics in a cluster 188 | 189 | ``` 190 | ./kafka-topics.sh --bootstrap-server localhost:9092 --list 191 | ``` 192 | 193 | ## Describe topic 194 | 195 | - The below command can be used to describe all the topics. 196 | 197 | ``` 198 | ./kafka-topics.sh --bootstrap-server localhost:9092 --describe 199 | ``` 200 | 201 | - The below command can be used to describe a specific topic. 202 | 203 | ``` 204 | ./kafka-topics.sh --bootstrap-server localhost:9092 --describe --topic 205 | ``` 206 | 207 | ## Alter the min insync replica 208 | ``` 209 | ./kafka-configs.sh --bootstrap-server localhost:9092 --entity-type topics --entity-name library-events --alter --add-config min.insync.replicas=2 210 | ``` 211 | 212 | ## Alter the partitions of a topic 213 | ``` 214 | ./kafka-topics.sh --bootstrap-server localhost:9092 --alter --topic test-topic --partitions 40 215 | ``` 216 | 217 | ## Delete a topic 218 | 219 | ``` 220 | ./kafka-topics.sh --bootstrap-server localhost:9092 --delete --topic test-topic 221 | ``` 222 | ## How to view consumer groups 223 | 224 | ``` 225 | ./kafka-consumer-groups.sh --bootstrap-server localhost:9092 --list 226 | ``` 227 | 228 | ### Consumer Groups and their Offset 229 | 230 | ``` 231 | ./kafka-consumer-groups.sh --bootstrap-server localhost:9092 --describe --group console-consumer-27773 232 | ``` 233 | 234 | ## Viewing the Commit Log 235 | 236 | ``` 237 | ./kafka-run-class.sh kafka.tools.DumpLogSegments --deep-iteration --files /tmp/kafka-logs/test-topic-0/00000000000000000000.log 238 | ``` 239 | 240 | ## Setting the Minimum Insync Replica 241 | 242 | ``` 243 | ./kafka-configs.sh --alter --bootstrap-server localhost:9092 --entity-type topics --entity-name test-topic --add-config min.insync.replicas=2 244 | ``` 245 |

246 |
247 | 248 | 249 |
Windows 250 |

251 | 252 | - Make sure you are inside the **bin/windows** directory. 253 | 254 | ## List the topics in a cluster 255 | 256 | ``` 257 | kafka-topics.bat --bootstrap-server localhost:9092 --list 258 | ``` 259 | 260 | ## Describe topic 261 | 262 | - The below command can be used to describe all the topics. 263 | 264 | ``` 265 | kafka-topics.bat --bootstrap-server localhost:9092 --describe 266 | ``` 267 | 268 | - The below command can be used to describe a specific topic. 269 | 270 | ``` 271 | kafka-topics.bat --bootstrap-server localhost:9092 --describe --topic 272 | ``` 273 | 274 | ## Alter the min insync replica 275 | ``` 276 | kafka-configs.bat --bootstrap-server localhost:9092 --entity-type topics --entity-name library-events --alter --add-config min.insync.replicas=2 277 | ``` 278 | ## Alter the partitions of a topic 279 | ``` 280 | kafka-configs.bat --bootstrap-server localhost:9092 --alter --topic test-topic --partitions 40 281 | ``` 282 | 283 | ## Delete a topic 284 | 285 | ``` 286 | kafka-topics.bat --bootstrap-server localhost:9092 --delete --topic 287 | ``` 288 | 289 | 290 | ## How to view consumer groups 291 | 292 | ``` 293 | kafka-consumer-groups.bat --bootstrap-server localhost:9092 --list 294 | ``` 295 | 296 | ### Consumer Groups and their Offset 297 | 298 | ``` 299 | kafka-consumer-groups.bat --bootstrap-server localhost:9092 --describe --group console-consumer-27773 300 | ``` 301 | 302 | ## Viewing the Commit Log 303 | 304 | ``` 305 | kafka-run-class.bat kafka.tools.DumpLogSegments --deep-iteration --files /tmp/kafka-logs/test-topic-0/00000000000000000000.log 306 | ``` 307 |

308 |
309 | -------------------------------------------------------------------------------- /library-events-consumer/.gitignore: -------------------------------------------------------------------------------- 1 | HELP.md 2 | .gradle 3 | build/ 4 | !gradle/wrapper/gradle-wrapper.jar 5 | !**/src/main/** 6 | !**/src/test/** 7 | 8 | ### STS ### 9 | .apt_generated 10 | .classpath 11 | .factorypath 12 | .project 13 | .settings 14 | .springBeans 15 | .sts4-cache 16 | .DS_Store 17 | 18 | ### IntelliJ IDEA ### 19 | .idea 20 | *.iws 21 | *.iml 22 | *.ipr 23 | out/ 24 | 25 | ### NetBeans ### 26 | /nbproject/private/ 27 | /nbbuild/ 28 | /dist/ 29 | /nbdist/ 30 | /.nb-gradle/ 31 | 32 | ### VS Code ### 33 | .vscode/ 34 | -------------------------------------------------------------------------------- /library-events-consumer/build.gradle: -------------------------------------------------------------------------------- 1 | plugins { 2 | id 'org.springframework.boot' version '2.6.5' 3 | id 'io.spring.dependency-management' version '1.0.11.RELEASE' 4 | id 'java' 5 | } 6 | 7 | group = 'com.learnkafka' 8 | version = '0.0.1-SNAPSHOT' 9 | sourceCompatibility = '11' 10 | 11 | configurations { 12 | compileOnly { 13 | extendsFrom annotationProcessor 14 | } 15 | } 16 | 17 | repositories { 18 | mavenCentral() 19 | } 20 | 21 | dependencies { 22 | implementation 'org.springframework.boot:spring-boot-starter-data-jpa' 23 | implementation 'org.springframework.boot:spring-boot-starter-web' 24 | implementation 'org.springframework.kafka:spring-kafka' 25 | implementation 'org.springframework.boot:spring-boot-starter-validation' 26 | 27 | compileOnly 'org.projectlombok:lombok' 28 | runtimeOnly 'com.h2database:h2' 29 | annotationProcessor 'org.projectlombok:lombok' 30 | testImplementation('org.springframework.boot:spring-boot-starter-test') { 31 | exclude group: 'org.junit.vintage', module: 'junit-vintage-engine' 32 | } 33 | testImplementation 'org.springframework.kafka:spring-kafka-test' 34 | } 35 | 36 | sourceSets{ 37 | test { 38 | java.srcDirs = ['src/test/java/unit', 'src/test/java/intg'] 39 | } 40 | } 41 | 42 | 43 | test { 44 | useJUnitPlatform() 45 | } 46 | -------------------------------------------------------------------------------- /library-events-consumer/gradle/wrapper/gradle-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dilipsundarraj1/kafka-for-developers-using-spring-boot/c617a03d377f72fa63f191be2517dee22099c141/library-events-consumer/gradle/wrapper/gradle-wrapper.jar -------------------------------------------------------------------------------- /library-events-consumer/gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | #Wed Jan 08 05:18:41 CST 2020 2 | distributionUrl=https\://services.gradle.org/distributions/gradle-7.0-all.zip 3 | distributionBase=GRADLE_USER_HOME 4 | distributionPath=wrapper/dists 5 | zipStorePath=wrapper/dists 6 | zipStoreBase=GRADLE_USER_HOME 7 | -------------------------------------------------------------------------------- /library-events-consumer/gradlew: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env sh 2 | 3 | ############################################################################## 4 | ## 5 | ## Gradle start up script for UN*X 6 | ## 7 | ############################################################################## 8 | 9 | # Attempt to set APP_HOME 10 | # Resolve links: $0 may be a link 11 | PRG="$0" 12 | # Need this for relative symlinks. 13 | while [ -h "$PRG" ] ; do 14 | ls=`ls -ld "$PRG"` 15 | link=`expr "$ls" : '.*-> \(.*\)$'` 16 | if expr "$link" : '/.*' > /dev/null; then 17 | PRG="$link" 18 | else 19 | PRG=`dirname "$PRG"`"/$link" 20 | fi 21 | done 22 | SAVED="`pwd`" 23 | cd "`dirname \"$PRG\"`/" >/dev/null 24 | APP_HOME="`pwd -P`" 25 | cd "$SAVED" >/dev/null 26 | 27 | APP_NAME="Gradle" 28 | APP_BASE_NAME=`basename "$0"` 29 | 30 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 31 | DEFAULT_JVM_OPTS="" 32 | 33 | # Use the maximum available, or set MAX_FD != -1 to use that value. 34 | MAX_FD="maximum" 35 | 36 | warn () { 37 | echo "$*" 38 | } 39 | 40 | die () { 41 | echo 42 | echo "$*" 43 | echo 44 | exit 1 45 | } 46 | 47 | # OS specific support (must be 'true' or 'false'). 48 | cygwin=false 49 | msys=false 50 | darwin=false 51 | nonstop=false 52 | case "`uname`" in 53 | CYGWIN* ) 54 | cygwin=true 55 | ;; 56 | Darwin* ) 57 | darwin=true 58 | ;; 59 | MINGW* ) 60 | msys=true 61 | ;; 62 | NONSTOP* ) 63 | nonstop=true 64 | ;; 65 | esac 66 | 67 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar 68 | 69 | # Determine the Java command to use to start the JVM. 70 | if [ -n "$JAVA_HOME" ] ; then 71 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then 72 | # IBM's JDK on AIX uses strange locations for the executables 73 | JAVACMD="$JAVA_HOME/jre/sh/java" 74 | else 75 | JAVACMD="$JAVA_HOME/bin/java" 76 | fi 77 | if [ ! -x "$JAVACMD" ] ; then 78 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME 79 | 80 | Please set the JAVA_HOME variable in your environment to match the 81 | location of your Java installation." 82 | fi 83 | else 84 | JAVACMD="java" 85 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 86 | 87 | Please set the JAVA_HOME variable in your environment to match the 88 | location of your Java installation." 89 | fi 90 | 91 | # Increase the maximum file descriptors if we can. 92 | if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then 93 | MAX_FD_LIMIT=`ulimit -H -n` 94 | if [ $? -eq 0 ] ; then 95 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then 96 | MAX_FD="$MAX_FD_LIMIT" 97 | fi 98 | ulimit -n $MAX_FD 99 | if [ $? -ne 0 ] ; then 100 | warn "Could not set maximum file descriptor limit: $MAX_FD" 101 | fi 102 | else 103 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" 104 | fi 105 | fi 106 | 107 | # For Darwin, add options to specify how the application appears in the dock 108 | if $darwin; then 109 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" 110 | fi 111 | 112 | # For Cygwin, switch paths to Windows format before running java 113 | if $cygwin ; then 114 | APP_HOME=`cygpath --path --mixed "$APP_HOME"` 115 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` 116 | JAVACMD=`cygpath --unix "$JAVACMD"` 117 | 118 | # We build the pattern for arguments to be converted via cygpath 119 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` 120 | SEP="" 121 | for dir in $ROOTDIRSRAW ; do 122 | ROOTDIRS="$ROOTDIRS$SEP$dir" 123 | SEP="|" 124 | done 125 | OURCYGPATTERN="(^($ROOTDIRS))" 126 | # Add a user-defined pattern to the cygpath arguments 127 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then 128 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" 129 | fi 130 | # Now convert the arguments - kludge to limit ourselves to /bin/sh 131 | i=0 132 | for arg in "$@" ; do 133 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` 134 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option 135 | 136 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition 137 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` 138 | else 139 | eval `echo args$i`="\"$arg\"" 140 | fi 141 | i=$((i+1)) 142 | done 143 | case $i in 144 | (0) set -- ;; 145 | (1) set -- "$args0" ;; 146 | (2) set -- "$args0" "$args1" ;; 147 | (3) set -- "$args0" "$args1" "$args2" ;; 148 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;; 149 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; 150 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; 151 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; 152 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; 153 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; 154 | esac 155 | fi 156 | 157 | # Escape application args 158 | save () { 159 | for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done 160 | echo " " 161 | } 162 | APP_ARGS=$(save "$@") 163 | 164 | # Collect all arguments for the java command, following the shell quoting and substitution rules 165 | eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" 166 | 167 | # by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong 168 | if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then 169 | cd "$(dirname "$0")" 170 | fi 171 | 172 | exec "$JAVACMD" "$@" 173 | -------------------------------------------------------------------------------- /library-events-consumer/gradlew.bat: -------------------------------------------------------------------------------- 1 | @if "%DEBUG%" == "" @echo off 2 | @rem ########################################################################## 3 | @rem 4 | @rem Gradle startup script for Windows 5 | @rem 6 | @rem ########################################################################## 7 | 8 | @rem Set local scope for the variables with windows NT shell 9 | if "%OS%"=="Windows_NT" setlocal 10 | 11 | set DIRNAME=%~dp0 12 | if "%DIRNAME%" == "" set DIRNAME=. 13 | set APP_BASE_NAME=%~n0 14 | set APP_HOME=%DIRNAME% 15 | 16 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 17 | set DEFAULT_JVM_OPTS= 18 | 19 | @rem Find java.exe 20 | if defined JAVA_HOME goto findJavaFromJavaHome 21 | 22 | set JAVA_EXE=java.exe 23 | %JAVA_EXE% -version >NUL 2>&1 24 | if "%ERRORLEVEL%" == "0" goto init 25 | 26 | echo. 27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 28 | echo. 29 | echo Please set the JAVA_HOME variable in your environment to match the 30 | echo location of your Java installation. 31 | 32 | goto fail 33 | 34 | :findJavaFromJavaHome 35 | set JAVA_HOME=%JAVA_HOME:"=% 36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe 37 | 38 | if exist "%JAVA_EXE%" goto init 39 | 40 | echo. 41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 42 | echo. 43 | echo Please set the JAVA_HOME variable in your environment to match the 44 | echo location of your Java installation. 45 | 46 | goto fail 47 | 48 | :init 49 | @rem Get command-line arguments, handling Windows variants 50 | 51 | if not "%OS%" == "Windows_NT" goto win9xME_args 52 | 53 | :win9xME_args 54 | @rem Slurp the command line arguments. 55 | set CMD_LINE_ARGS= 56 | set _SKIP=2 57 | 58 | :win9xME_args_slurp 59 | if "x%~1" == "x" goto execute 60 | 61 | set CMD_LINE_ARGS=%* 62 | 63 | :execute 64 | @rem Setup the command line 65 | 66 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar 67 | 68 | @rem Execute Gradle 69 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% 70 | 71 | :end 72 | @rem End local scope for the variables with windows NT shell 73 | if "%ERRORLEVEL%"=="0" goto mainEnd 74 | 75 | :fail 76 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of 77 | rem the _cmd.exe /c_ return code! 78 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 79 | exit /b 1 80 | 81 | :mainEnd 82 | if "%OS%"=="Windows_NT" endlocal 83 | 84 | :omega 85 | -------------------------------------------------------------------------------- /library-events-consumer/settings.gradle: -------------------------------------------------------------------------------- 1 | rootProject.name = 'library-events-consumer' 2 | -------------------------------------------------------------------------------- /library-events-consumer/src/main/java/com/learnkafka/LibraryEventsConsumerApplication.java: -------------------------------------------------------------------------------- 1 | package com.learnkafka; 2 | 3 | import org.springframework.boot.SpringApplication; 4 | import org.springframework.boot.autoconfigure.SpringBootApplication; 5 | import org.springframework.scheduling.annotation.EnableScheduling; 6 | 7 | @SpringBootApplication 8 | @EnableScheduling 9 | public class LibraryEventsConsumerApplication { 10 | 11 | public static void main(String[] args) { 12 | SpringApplication.run(LibraryEventsConsumerApplication.class, args); 13 | } 14 | 15 | } 16 | -------------------------------------------------------------------------------- /library-events-consumer/src/main/java/com/learnkafka/config/LibraryEventsConsumerConfig.java: -------------------------------------------------------------------------------- 1 | package com.learnkafka.config; 2 | 3 | import com.learnkafka.service.FailureService; 4 | import com.learnkafka.service.LibraryEventsService; 5 | import lombok.extern.slf4j.Slf4j; 6 | import org.apache.kafka.clients.consumer.ConsumerRecord; 7 | import org.apache.kafka.common.TopicPartition; 8 | import org.springframework.beans.factory.ObjectProvider; 9 | import org.springframework.beans.factory.annotation.Autowired; 10 | import org.springframework.beans.factory.annotation.Value; 11 | import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; 12 | import org.springframework.boot.autoconfigure.kafka.ConcurrentKafkaListenerContainerFactoryConfigurer; 13 | import org.springframework.boot.autoconfigure.kafka.KafkaProperties; 14 | import org.springframework.context.annotation.Bean; 15 | import org.springframework.context.annotation.Configuration; 16 | import org.springframework.dao.RecoverableDataAccessException; 17 | import org.springframework.kafka.annotation.EnableKafka; 18 | import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory; 19 | import org.springframework.kafka.core.ConsumerFactory; 20 | import org.springframework.kafka.core.DefaultKafkaConsumerFactory; 21 | import org.springframework.kafka.core.KafkaTemplate; 22 | import org.springframework.kafka.listener.*; 23 | import org.springframework.kafka.support.ExponentialBackOffWithMaxRetries; 24 | import org.springframework.util.backoff.FixedBackOff; 25 | 26 | import java.util.List; 27 | 28 | @Configuration 29 | @EnableKafka 30 | @Slf4j 31 | public class LibraryEventsConsumerConfig { 32 | 33 | public static final String RETRY = "RETRY"; 34 | public static final String SUCCESS = "SUCCESS"; 35 | public static final String DEAD = "DEAD"; 36 | @Autowired 37 | LibraryEventsService libraryEventsService; 38 | 39 | @Autowired 40 | KafkaProperties kafkaProperties; 41 | 42 | @Autowired 43 | KafkaTemplate kafkaTemplate; 44 | 45 | @Autowired 46 | FailureService failureService; 47 | 48 | @Value("${topics.retry:library-events.RETRY}") 49 | private String retryTopic; 50 | 51 | @Value("${topics.dlt:library-events.DLT}") 52 | private String deadLetterTopic; 53 | 54 | 55 | public DeadLetterPublishingRecoverer publishingRecoverer() { 56 | 57 | DeadLetterPublishingRecoverer recoverer = new DeadLetterPublishingRecoverer(kafkaTemplate 58 | , (r, e) -> { 59 | log.error("Exception in publishingRecoverer : {} ", e.getMessage(), e); 60 | if (e.getCause() instanceof RecoverableDataAccessException) { 61 | return new TopicPartition(retryTopic, r.partition()); 62 | } else { 63 | return new TopicPartition(deadLetterTopic, r.partition()); 64 | } 65 | } 66 | ); 67 | 68 | return recoverer; 69 | 70 | } 71 | 72 | ConsumerRecordRecoverer consumerRecordRecoverer = (record, exception) -> { 73 | log.error("Exception is : {} Failed Record : {} ", exception, record); 74 | if (exception.getCause() instanceof RecoverableDataAccessException) { 75 | log.info("Inside the recoverable logic"); 76 | //Add any Recovery Code here. 77 | //failureService.saveFailedRecord((ConsumerRecord) record, exception, RETRY); 78 | 79 | } else { 80 | log.info("Inside the non recoverable logic and skipping the record : {}", record); 81 | 82 | } 83 | }; 84 | 85 | public DefaultErrorHandler errorHandler() { 86 | 87 | var exceptiopnToIgnorelist = List.of( 88 | IllegalArgumentException.class 89 | ); 90 | 91 | ExponentialBackOffWithMaxRetries expBackOff = new ExponentialBackOffWithMaxRetries(2); 92 | expBackOff.setInitialInterval(1_000L); 93 | expBackOff.setMultiplier(2.0); 94 | expBackOff.setMaxInterval(2_000L); 95 | 96 | var fixedBackOff = new FixedBackOff(1000L, 2L); 97 | 98 | /** 99 | * Just the Custom Error Handler 100 | */ 101 | // var defaultErrorHandler = new DefaultErrorHandler(fixedBackOff); 102 | 103 | /** 104 | * Error Handler with the BackOff, Exceptions to Ignore, RetryListener 105 | */ 106 | 107 | var defaultErrorHandler = new DefaultErrorHandler( 108 | //consumerRecordRecoverer 109 | publishingRecoverer() 110 | , 111 | fixedBackOff 112 | //expBackOff 113 | ); 114 | 115 | exceptiopnToIgnorelist.forEach(defaultErrorHandler::addNotRetryableExceptions); 116 | 117 | defaultErrorHandler.setRetryListeners( 118 | (record, ex, deliveryAttempt) -> 119 | log.info("Failed Record in Retry Listener exception : {} , deliveryAttempt : {} ", ex.getMessage(), deliveryAttempt) 120 | ); 121 | 122 | return defaultErrorHandler; 123 | } 124 | 125 | @Bean 126 | @ConditionalOnMissingBean(name = "kafkaListenerContainerFactory") 127 | ConcurrentKafkaListenerContainerFactory kafkaListenerContainerFactory( 128 | ConcurrentKafkaListenerContainerFactoryConfigurer configurer, 129 | ObjectProvider> kafkaConsumerFactory) { 130 | ConcurrentKafkaListenerContainerFactory factory = new ConcurrentKafkaListenerContainerFactory<>(); 131 | configurer.configure(factory, kafkaConsumerFactory 132 | .getIfAvailable(() -> new DefaultKafkaConsumerFactory<>(this.kafkaProperties.buildConsumerProperties()))); 133 | factory.setConcurrency(3); 134 | factory.setCommonErrorHandler(errorHandler()); 135 | return factory; 136 | } 137 | } -------------------------------------------------------------------------------- /library-events-consumer/src/main/java/com/learnkafka/config/LibraryEventsConsumerConfigLegacy.java: -------------------------------------------------------------------------------- 1 | package com.learnkafka.config; 2 | 3 | import com.learnkafka.service.LibraryEventsService; 4 | import lombok.extern.slf4j.Slf4j; 5 | import org.apache.kafka.clients.consumer.ConsumerRecord; 6 | import org.springframework.beans.factory.ObjectProvider; 7 | import org.springframework.beans.factory.annotation.Autowired; 8 | import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; 9 | import org.springframework.boot.autoconfigure.kafka.ConcurrentKafkaListenerContainerFactoryConfigurer; 10 | import org.springframework.boot.autoconfigure.kafka.KafkaProperties; 11 | import org.springframework.context.annotation.Bean; 12 | import org.springframework.context.annotation.Configuration; 13 | import org.springframework.dao.RecoverableDataAccessException; 14 | import org.springframework.kafka.annotation.EnableKafka; 15 | import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory; 16 | import org.springframework.kafka.core.ConsumerFactory; 17 | import org.springframework.kafka.core.DefaultKafkaConsumerFactory; 18 | import org.springframework.retry.RetryPolicy; 19 | import org.springframework.retry.backoff.FixedBackOffPolicy; 20 | import org.springframework.retry.policy.SimpleRetryPolicy; 21 | import org.springframework.retry.support.RetryTemplate; 22 | 23 | import java.util.Arrays; 24 | import java.util.HashMap; 25 | import java.util.Map; 26 | 27 | //@Configuration 28 | //@EnableKafka 29 | @Slf4j 30 | public class LibraryEventsConsumerConfigLegacy { 31 | 32 | @Autowired 33 | LibraryEventsService libraryEventsService; 34 | 35 | @Autowired 36 | KafkaProperties kafkaProperties; 37 | 38 | @Bean 39 | @ConditionalOnMissingBean(name = "kafkaListenerContainerFactory") 40 | ConcurrentKafkaListenerContainerFactory kafkaListenerContainerFactory( 41 | ConcurrentKafkaListenerContainerFactoryConfigurer configurer, 42 | ObjectProvider> kafkaConsumerFactory) { 43 | ConcurrentKafkaListenerContainerFactory factory = new ConcurrentKafkaListenerContainerFactory<>(); 44 | configurer.configure(factory, kafkaConsumerFactory 45 | .getIfAvailable(() -> new DefaultKafkaConsumerFactory<>(this.kafkaProperties.buildConsumerProperties()))); 46 | factory.setConcurrency(3); 47 | // factory.getContainerProperties().setAckMode(ContainerProperties.AckMode.MANUAL); 48 | factory.setErrorHandler(((thrownException, data) -> { 49 | log.info("Exception in consumerConfig is {} and the record is {}", thrownException.getMessage(), data); 50 | //persist 51 | })); 52 | factory.setRetryTemplate(retryTemplate()); 53 | factory.setRecoveryCallback((context -> { 54 | if(context.getLastThrowable().getCause() instanceof RecoverableDataAccessException){ 55 | //invoke recovery logic 56 | log.info("Inside the recoverable logic"); 57 | Arrays.asList(context.attributeNames()) 58 | .forEach(attributeName -> { 59 | log.info("Attribute name is : {} ", attributeName); 60 | log.info("Attribute Value is : {} ", context.getAttribute(attributeName)); 61 | }); 62 | 63 | ConsumerRecord consumerRecord = (ConsumerRecord) context.getAttribute("record"); 64 | libraryEventsService.handleRecovery(consumerRecord); 65 | }else{ 66 | log.info("Inside the non recoverable logic"); 67 | throw new RuntimeException(context.getLastThrowable().getMessage()); 68 | } 69 | 70 | 71 | return null; 72 | })); 73 | return factory; 74 | } 75 | 76 | /* @Bean 77 | ConcurrentKafkaListenerContainerFactory kafkaListenerContainerFactory( 78 | ConcurrentKafkaListenerContainerFactoryConfigurer configurer, 79 | ConsumerFactory kafkaConsumerFactory) { 80 | ConcurrentKafkaListenerContainerFactory factory = new ConcurrentKafkaListenerContainerFactory<>(); 81 | configurer.configure(factory, kafkaConsumerFactory); 82 | factory.setConcurrency(3); 83 | // factory.getContainerProperties().setAckMode(ContainerProperties.AckMode.MANUAL); 84 | factory.setErrorHandler(((thrownException, data) -> { 85 | log.info("Exception in consumerConfig is {} and the record is {}", thrownException.getMessage(), data); 86 | //persist 87 | })); 88 | factory.setRetryTemplate(retryTemplate()); 89 | factory.setRecoveryCallback((context -> { 90 | if(context.getLastThrowable().getCause() instanceof RecoverableDataAccessException){ 91 | //invoke recovery logic 92 | log.info("Inside the recoverable logic"); 93 | Arrays.asList(context.attributeNames()) 94 | .forEach(attributeName -> { 95 | log.info("Attribute name is : {} ", attributeName); 96 | log.info("Attribute Value is : {} ", context.getAttribute(attributeName)); 97 | }); 98 | 99 | ConsumerRecord consumerRecord = (ConsumerRecord) context.getAttribute("record"); 100 | libraryEventsService.handleRecovery(consumerRecord); 101 | }else{ 102 | log.info("Inside the non recoverable logic"); 103 | throw new RuntimeException(context.getLastThrowable().getMessage()); 104 | } 105 | 106 | 107 | return null; 108 | })); 109 | return factory; 110 | }*/ 111 | 112 | private RetryTemplate retryTemplate() { 113 | 114 | FixedBackOffPolicy fixedBackOffPolicy = new FixedBackOffPolicy(); 115 | fixedBackOffPolicy.setBackOffPeriod(1000); 116 | RetryTemplate retryTemplate = new RetryTemplate(); 117 | retryTemplate.setRetryPolicy(simpleRetryPolicy()); 118 | retryTemplate.setBackOffPolicy(fixedBackOffPolicy); 119 | return retryTemplate; 120 | } 121 | 122 | private RetryPolicy simpleRetryPolicy() { 123 | 124 | /*SimpleRetryPolicy simpleRetryPolicy = new SimpleRetryPolicy(); 125 | simpleRetryPolicy.setMaxAttempts(3);*/ 126 | Map, Boolean> exceptionsMap = new HashMap<>(); 127 | exceptionsMap.put(IllegalArgumentException.class, false); 128 | exceptionsMap.put(RecoverableDataAccessException.class, true); 129 | SimpleRetryPolicy simpleRetryPolicy = new SimpleRetryPolicy(3,exceptionsMap,true); 130 | return simpleRetryPolicy; 131 | } 132 | } -------------------------------------------------------------------------------- /library-events-consumer/src/main/java/com/learnkafka/consumer/LibraryEventsConsumer.java: -------------------------------------------------------------------------------- 1 | package com.learnkafka.consumer; 2 | 3 | import com.fasterxml.jackson.core.JsonProcessingException; 4 | import com.learnkafka.service.LibraryEventsService; 5 | import lombok.extern.slf4j.Slf4j; 6 | import org.apache.kafka.clients.consumer.ConsumerRecord; 7 | import org.springframework.beans.factory.annotation.Autowired; 8 | import org.springframework.kafka.annotation.KafkaHandler; 9 | import org.springframework.kafka.annotation.KafkaListener; 10 | import org.springframework.stereotype.Component; 11 | 12 | @Component 13 | @Slf4j 14 | //@KafkaListener 15 | public class LibraryEventsConsumer { 16 | 17 | @Autowired 18 | private LibraryEventsService libraryEventsService; 19 | 20 | @KafkaListener( 21 | topics = {"library-events"} 22 | , autoStartup = "${libraryListener.startup:true}" 23 | , groupId = "library-events-listener-group") 24 | public void onMessage(ConsumerRecord consumerRecord) throws JsonProcessingException { 25 | 26 | log.info("ConsumerRecord : {} ", consumerRecord); 27 | libraryEventsService.processLibraryEvent(consumerRecord); 28 | 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /library-events-consumer/src/main/java/com/learnkafka/consumer/LibraryEventsConsumerManualOffset.java: -------------------------------------------------------------------------------- 1 | package com.learnkafka.consumer; 2 | 3 | import lombok.extern.slf4j.Slf4j; 4 | import org.apache.kafka.clients.consumer.ConsumerRecord; 5 | import org.springframework.kafka.annotation.KafkaListener; 6 | import org.springframework.kafka.listener.AcknowledgingMessageListener; 7 | import org.springframework.kafka.support.Acknowledgment; 8 | import org.springframework.stereotype.Component; 9 | 10 | //@Component 11 | @Slf4j 12 | public class LibraryEventsConsumerManualOffset implements AcknowledgingMessageListener { 13 | 14 | @Override 15 | @KafkaListener(topics = {"library-event s"}) 16 | public void onMessage(ConsumerRecord consumerRecord, Acknowledgment acknowledgment) { 17 | log.info("ConsumerRecord in Manual Offset Consumer: {} ", consumerRecord ); 18 | acknowledgment.acknowledge(); 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /library-events-consumer/src/main/java/com/learnkafka/consumer/LibraryEventsRetryConsumer.java: -------------------------------------------------------------------------------- 1 | package com.learnkafka.consumer; 2 | 3 | import com.fasterxml.jackson.core.JsonProcessingException; 4 | import com.learnkafka.service.LibraryEventsService; 5 | import lombok.extern.slf4j.Slf4j; 6 | import org.apache.kafka.clients.consumer.ConsumerRecord; 7 | import org.springframework.beans.factory.annotation.Autowired; 8 | import org.springframework.kafka.annotation.KafkaListener; 9 | import org.springframework.stereotype.Component; 10 | 11 | @Component 12 | @Slf4j 13 | public class LibraryEventsRetryConsumer { 14 | 15 | @Autowired 16 | private LibraryEventsService libraryEventsService; 17 | 18 | @KafkaListener(topics = {"${topics.retry}"} 19 | , autoStartup = "${retryListener.startup:true}" 20 | , groupId = "retry-listener-group") 21 | public void onMessage(ConsumerRecord consumerRecord) throws JsonProcessingException { 22 | 23 | log.info("ConsumerRecord in Retry Consumer: {} ", consumerRecord ); 24 | libraryEventsService.processLibraryEvent(consumerRecord); 25 | 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /library-events-consumer/src/main/java/com/learnkafka/entity/Book.java: -------------------------------------------------------------------------------- 1 | package com.learnkafka.entity; 2 | 3 | 4 | import lombok.AllArgsConstructor; 5 | import lombok.Builder; 6 | import lombok.Data; 7 | import lombok.NoArgsConstructor; 8 | 9 | import javax.persistence.Entity; 10 | import javax.persistence.Id; 11 | import javax.persistence.JoinColumn; 12 | import javax.persistence.OneToOne; 13 | import javax.validation.constraints.NotBlank; 14 | import javax.validation.constraints.NotNull; 15 | 16 | @AllArgsConstructor 17 | @NoArgsConstructor 18 | @Data 19 | @Builder 20 | @Entity 21 | public class Book { 22 | @Id 23 | private Integer bookId; 24 | private String bookName; 25 | private String bookAuthor; 26 | @OneToOne 27 | @JoinColumn(name = "libraryEventId") 28 | private LibraryEvent libraryEvent; 29 | } 30 | -------------------------------------------------------------------------------- /library-events-consumer/src/main/java/com/learnkafka/entity/FailureRecord.java: -------------------------------------------------------------------------------- 1 | package com.learnkafka.entity; 2 | 3 | 4 | import lombok.AllArgsConstructor; 5 | import lombok.Builder; 6 | import lombok.Data; 7 | import lombok.NoArgsConstructor; 8 | 9 | import javax.persistence.*; 10 | 11 | @AllArgsConstructor 12 | @NoArgsConstructor 13 | @Data 14 | @Builder 15 | @Entity 16 | public class FailureRecord { 17 | @Id 18 | @GeneratedValue 19 | private Integer bookId; 20 | private String topic; 21 | private Integer key; 22 | private String errorRecord; 23 | private Integer partition; 24 | private Long offset_value; 25 | private String exception; 26 | private String status; 27 | 28 | } 29 | -------------------------------------------------------------------------------- /library-events-consumer/src/main/java/com/learnkafka/entity/LibraryEvent.java: -------------------------------------------------------------------------------- 1 | package com.learnkafka.entity; 2 | 3 | 4 | import lombok.*; 5 | 6 | import javax.persistence.*; 7 | import javax.validation.Valid; 8 | import javax.validation.constraints.NotNull; 9 | 10 | @AllArgsConstructor 11 | @NoArgsConstructor 12 | @Data 13 | @Builder 14 | @Entity 15 | public class LibraryEvent { 16 | 17 | @Id 18 | @GeneratedValue 19 | private Integer libraryEventId; 20 | @Enumerated(EnumType.STRING) 21 | private LibraryEventType libraryEventType; 22 | @OneToOne(mappedBy = "libraryEvent", cascade = {CascadeType.ALL}) 23 | @ToString.Exclude 24 | private Book book; 25 | 26 | } 27 | -------------------------------------------------------------------------------- /library-events-consumer/src/main/java/com/learnkafka/entity/LibraryEventType.java: -------------------------------------------------------------------------------- 1 | package com.learnkafka.entity; 2 | 3 | public enum LibraryEventType { 4 | NEW, 5 | UPDATE 6 | } 7 | -------------------------------------------------------------------------------- /library-events-consumer/src/main/java/com/learnkafka/jpa/FailureRecordRepository.java: -------------------------------------------------------------------------------- 1 | package com.learnkafka.jpa; 2 | 3 | import com.learnkafka.entity.FailureRecord; 4 | import org.springframework.data.repository.CrudRepository; 5 | 6 | import java.util.List; 7 | 8 | public interface FailureRecordRepository extends CrudRepository { 9 | 10 | List findAllByStatus(String status); 11 | } 12 | -------------------------------------------------------------------------------- /library-events-consumer/src/main/java/com/learnkafka/jpa/LibraryEventsRepository.java: -------------------------------------------------------------------------------- 1 | package com.learnkafka.jpa; 2 | 3 | import com.learnkafka.entity.LibraryEvent; 4 | import org.springframework.data.repository.CrudRepository; 5 | 6 | public interface LibraryEventsRepository extends CrudRepository { 7 | } 8 | -------------------------------------------------------------------------------- /library-events-consumer/src/main/java/com/learnkafka/scheduler/RetryScheduler.java: -------------------------------------------------------------------------------- 1 | package com.learnkafka.scheduler; 2 | 3 | import com.fasterxml.jackson.core.JsonProcessingException; 4 | import com.fasterxml.jackson.databind.ObjectMapper; 5 | import com.learnkafka.config.LibraryEventsConsumerConfig; 6 | import com.learnkafka.consumer.LibraryEventsConsumer; 7 | import com.learnkafka.entity.FailureRecord; 8 | import com.learnkafka.entity.LibraryEvent; 9 | import com.learnkafka.jpa.FailureRecordRepository; 10 | import com.learnkafka.service.LibraryEventsService; 11 | import lombok.extern.slf4j.Slf4j; 12 | import org.apache.kafka.clients.consumer.ConsumerRecord; 13 | import org.springframework.beans.factory.annotation.Autowired; 14 | import org.springframework.scheduling.annotation.Scheduled; 15 | import org.springframework.stereotype.Component; 16 | 17 | //@Component 18 | @Slf4j 19 | public class RetryScheduler { 20 | 21 | @Autowired 22 | LibraryEventsService libraryEventsService; 23 | 24 | @Autowired 25 | FailureRecordRepository failureRecordRepository; 26 | 27 | 28 | @Scheduled(fixedRate = 10000 ) 29 | public void retryFailedRecords(){ 30 | 31 | log.info("Retrying Failed Records Started!"); 32 | var status = LibraryEventsConsumerConfig.RETRY; 33 | failureRecordRepository.findAllByStatus(status) 34 | .forEach(failureRecord -> { 35 | try { 36 | //libraryEventsService.processLibraryEvent(); 37 | var consumerRecord = buildConsumerRecord(failureRecord); 38 | libraryEventsService.processLibraryEvent(consumerRecord); 39 | // libraryEventsConsumer.onMessage(consumerRecord); // This does not involve the recovery code for in the consumerConfig 40 | failureRecord.setStatus(LibraryEventsConsumerConfig.SUCCESS); 41 | } catch (Exception e){ 42 | log.error("Exception in retryFailedRecords : ", e); 43 | } 44 | 45 | }); 46 | 47 | } 48 | 49 | private ConsumerRecord buildConsumerRecord(FailureRecord failureRecord) { 50 | 51 | return new ConsumerRecord<>(failureRecord.getTopic(), 52 | failureRecord.getPartition(), failureRecord.getOffset_value(), failureRecord.getKey(), 53 | failureRecord.getErrorRecord()); 54 | 55 | } 56 | } 57 | -------------------------------------------------------------------------------- /library-events-consumer/src/main/java/com/learnkafka/service/FailureService.java: -------------------------------------------------------------------------------- 1 | package com.learnkafka.service; 2 | 3 | import com.learnkafka.entity.FailureRecord; 4 | import com.learnkafka.jpa.FailureRecordRepository; 5 | import org.apache.kafka.clients.consumer.ConsumerRecord; 6 | import org.springframework.kafka.support.KafkaHeaders; 7 | import org.springframework.stereotype.Service; 8 | 9 | @Service 10 | public class FailureService { 11 | 12 | private FailureRecordRepository failureRecordRepository; 13 | 14 | public FailureService(FailureRecordRepository failureRecordRepository) { 15 | this.failureRecordRepository = failureRecordRepository; 16 | } 17 | 18 | public void saveFailedRecord(ConsumerRecord record, Exception exception, String recordStatus){ 19 | var failureRecord = new FailureRecord(null,record.topic(), record.key(), record.value(), record.partition(),record.offset(), 20 | exception.getCause().getMessage(), 21 | recordStatus); 22 | 23 | failureRecordRepository.save(failureRecord); 24 | 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /library-events-consumer/src/main/java/com/learnkafka/service/LibraryEventsService.java: -------------------------------------------------------------------------------- 1 | package com.learnkafka.service; 2 | 3 | import com.fasterxml.jackson.core.JsonProcessingException; 4 | import com.fasterxml.jackson.databind.ObjectMapper; 5 | import com.learnkafka.entity.LibraryEvent; 6 | import com.learnkafka.jpa.LibraryEventsRepository; 7 | import lombok.extern.slf4j.Slf4j; 8 | import org.apache.kafka.clients.consumer.ConsumerRecord; 9 | import org.springframework.beans.factory.annotation.Autowired; 10 | import org.springframework.dao.RecoverableDataAccessException; 11 | import org.springframework.kafka.core.KafkaTemplate; 12 | import org.springframework.kafka.support.SendResult; 13 | import org.springframework.stereotype.Service; 14 | import org.springframework.util.concurrent.ListenableFuture; 15 | import org.springframework.util.concurrent.ListenableFutureCallback; 16 | 17 | import java.util.Optional; 18 | 19 | @Service 20 | @Slf4j 21 | public class LibraryEventsService { 22 | 23 | @Autowired 24 | ObjectMapper objectMapper; 25 | 26 | @Autowired 27 | KafkaTemplate kafkaTemplate; 28 | 29 | @Autowired 30 | private LibraryEventsRepository libraryEventsRepository; 31 | 32 | public void processLibraryEvent(ConsumerRecord consumerRecord) throws JsonProcessingException { 33 | LibraryEvent libraryEvent = objectMapper.readValue(consumerRecord.value(), LibraryEvent.class); 34 | log.info("libraryEvent : {} ", libraryEvent); 35 | 36 | if(libraryEvent.getLibraryEventId()!=null && ( libraryEvent.getLibraryEventId()==999 )){ 37 | throw new RecoverableDataAccessException("Temporary Network Issue"); 38 | } 39 | 40 | switch(libraryEvent.getLibraryEventType()){ 41 | case NEW: 42 | save(libraryEvent); 43 | break; 44 | case UPDATE: 45 | //validate the libraryevent 46 | validate(libraryEvent); 47 | save(libraryEvent); 48 | break; 49 | default: 50 | log.info("Invalid Library Event Type"); 51 | } 52 | 53 | } 54 | 55 | private void validate(LibraryEvent libraryEvent) { 56 | if(libraryEvent.getLibraryEventId()==null){ 57 | throw new IllegalArgumentException("Library Event Id is missing"); 58 | } 59 | 60 | Optional libraryEventOptional = libraryEventsRepository.findById(libraryEvent.getLibraryEventId()); 61 | if(!libraryEventOptional.isPresent()){ 62 | throw new IllegalArgumentException("Not a valid library Event"); 63 | } 64 | log.info("Validation is successful for the library Event : {} ", libraryEventOptional.get()); 65 | } 66 | 67 | private void save(LibraryEvent libraryEvent) { 68 | libraryEvent.getBook().setLibraryEvent(libraryEvent); 69 | libraryEventsRepository.save(libraryEvent); 70 | log.info("Successfully Persisted the libary Event {} ", libraryEvent); 71 | } 72 | 73 | public void handleRecovery(ConsumerRecord record){ 74 | 75 | Integer key = record.key(); 76 | String message = record.value(); 77 | 78 | ListenableFuture> listenableFuture = kafkaTemplate.sendDefault(key, message); 79 | listenableFuture.addCallback(new ListenableFutureCallback>() { 80 | @Override 81 | public void onFailure(Throwable ex) { 82 | handleFailure(key, message, ex); 83 | } 84 | 85 | @Override 86 | public void onSuccess(SendResult result) { 87 | handleSuccess(key, message, result); 88 | } 89 | }); 90 | } 91 | 92 | private void handleFailure(Integer key, String value, Throwable ex) { 93 | log.error("Error Sending the Message and the exception is {}", ex.getMessage()); 94 | try { 95 | throw ex; 96 | } catch (Throwable throwable) { 97 | log.error("Error in OnFailure: {}", throwable.getMessage()); 98 | } 99 | } 100 | 101 | private void handleSuccess(Integer key, String value, SendResult result) { 102 | log.info("Message Sent SuccessFully for the key : {} and the value is {} , partition is {}", key, value, result.getRecordMetadata().partition()); 103 | } 104 | } 105 | -------------------------------------------------------------------------------- /library-events-consumer/src/main/resources/application.yml: -------------------------------------------------------------------------------- 1 | spring: 2 | profiles: 3 | active: local 4 | server: 5 | port: 8081 6 | topics: 7 | retry: 'library-events.RETRY' 8 | dlt: 'library-events.DLT' 9 | 10 | --- 11 | spring: 12 | config: 13 | activate: 14 | on-profile: local 15 | kafka: 16 | template: 17 | default-topic: library-events 18 | consumer: 19 | bootstrap-servers: localhost:9092,localhost:9093,localhost:9094 20 | key-deserializer: org.apache.kafka.common.serialization.IntegerDeserializer 21 | value-deserializer: org.apache.kafka.common.serialization.StringDeserializer 22 | group-id: library-events-listener-group 23 | auto-offset-reset: latest 24 | producer: 25 | bootstrap-servers: 26 | # - localhost:9092,localhost:9093,localhost:9094 27 | - localhost:9092 28 | key-serializer: org.apache.kafka.common.serialization.IntegerSerializer 29 | value-serializer: org.apache.kafka.common.serialization.StringSerializer 30 | datasource: 31 | url: jdbc:h2:mem:testdb 32 | driver-class-name: org.h2.Driver 33 | username: postgres 34 | password: password 35 | jpa: 36 | database: h2 37 | database-platform: org.hibernate.dialect.H2Dialect 38 | generate-ddl: true 39 | h2: 40 | console: 41 | enabled: true 42 | 43 | --- 44 | 45 | spring: 46 | config: 47 | activate: 48 | on-profile: nonprod 49 | kafka: 50 | template: 51 | default-topic: library-events 52 | consumer: 53 | bootstrap-servers: localhost:9095,localhost:9096,localhost:9097 54 | key-deserializer: org.apache.kafka.common.serialization.IntegerDeserializer 55 | value-deserializer: org.apache.kafka.common.serialization.StringDeserializer 56 | group-id: library-events-listener-group 57 | ssl: 58 | trust-store-location: file:/Dilip/udemy/kafka-for-developers-using-spring-boot/ssl/client.truststore.jks 59 | trust-store-password: password 60 | key-store-location: file:/Dilip/udemy/kafka-for-developers-using-spring-boot/ssl/client.keystore.jks 61 | key-store-password: password 62 | producer: 63 | bootstrap-servers: 64 | - localhost:9095,localhost:9096,localhost:9097 65 | key-serializer: org.apache.kafka.common.serialization.IntegerSerializer 66 | value-serializer: org.apache.kafka.common.serialization.StringSerializer 67 | properties: 68 | security: 69 | protocol: SSL 70 | ssl.endpoint.identification.algorithm: 71 | datasource: 72 | url: jdbc:h2:mem:testdb 73 | driver-class-name: org.h2.Driver 74 | jpa: 75 | database: h2 76 | database-platform: org.hibernate.dialect.H2Dialect 77 | generate-ddl: true 78 | h2: 79 | console: 80 | enabled: true 81 | 82 | --- 83 | 84 | spring: 85 | config: 86 | activate: 87 | on-profile: prod 88 | kafka: 89 | consumer: 90 | bootstrap-servers: localhost:9092,localhost:9093,localhost:9094 91 | key-deserializer: org.apache.kafka.common.serialization.IntegerDeserializer 92 | value-deserializer: org.apache.kafka.common.serialization.StringDeserializer 93 | group-id: library-events-listener-group 94 | 95 | 96 | -------------------------------------------------------------------------------- /library-events-consumer/src/test/java/intg/com/learnkafka/consumer/LibraryEventsConsumerIntegrationTest.java: -------------------------------------------------------------------------------- 1 | package com.learnkafka.consumer; 2 | 3 | import com.fasterxml.jackson.core.JsonProcessingException; 4 | import com.fasterxml.jackson.databind.ObjectMapper; 5 | import com.learnkafka.entity.Book; 6 | import com.learnkafka.entity.LibraryEvent; 7 | import com.learnkafka.entity.LibraryEventType; 8 | import com.learnkafka.jpa.FailureRecordRepository; 9 | import com.learnkafka.jpa.LibraryEventsRepository; 10 | import com.learnkafka.service.LibraryEventsService; 11 | import org.apache.kafka.clients.consumer.Consumer; 12 | import org.apache.kafka.clients.consumer.ConsumerConfig; 13 | import org.apache.kafka.clients.consumer.ConsumerRecord; 14 | import org.apache.kafka.clients.consumer.ConsumerRecords; 15 | import org.apache.kafka.common.serialization.IntegerDeserializer; 16 | import org.apache.kafka.common.serialization.StringDeserializer; 17 | import org.junit.jupiter.api.*; 18 | import org.mockito.Mockito; 19 | import org.springframework.beans.factory.annotation.Autowired; 20 | import org.springframework.beans.factory.annotation.Value; 21 | import org.springframework.boot.test.context.SpringBootTest; 22 | import org.springframework.boot.test.mock.mockito.SpyBean; 23 | import org.springframework.kafka.config.KafkaListenerEndpointRegistry; 24 | import org.springframework.kafka.core.DefaultKafkaConsumerFactory; 25 | import org.springframework.kafka.core.KafkaTemplate; 26 | import org.springframework.kafka.listener.MessageListenerContainer; 27 | import org.springframework.kafka.test.EmbeddedKafkaBroker; 28 | import org.springframework.kafka.test.context.EmbeddedKafka; 29 | import org.springframework.kafka.test.utils.ContainerTestUtils; 30 | import org.springframework.kafka.test.utils.KafkaTestUtils; 31 | import org.springframework.test.annotation.DirtiesContext; 32 | import org.springframework.test.context.TestPropertySource; 33 | 34 | import java.util.*; 35 | import java.util.concurrent.CountDownLatch; 36 | import java.util.concurrent.ExecutionException; 37 | import java.util.concurrent.TimeUnit; 38 | import java.util.stream.Collectors; 39 | import java.util.stream.Stream; 40 | 41 | import static org.junit.jupiter.api.Assertions.assertEquals; 42 | import static org.junit.jupiter.api.Assertions.assertFalse; 43 | import static org.mockito.ArgumentMatchers.isA; 44 | import static org.mockito.Mockito.*; 45 | 46 | 47 | @SpringBootTest 48 | @EmbeddedKafka(topics = {"library-events" 49 | , "library-events.RETRY" 50 | , "library-events.DLT" 51 | } 52 | , partitions = 3) 53 | @TestPropertySource(properties = {"spring.kafka.producer.bootstrap-servers=${spring.embedded.kafka.brokers}" 54 | , "spring.kafka.consumer.bootstrap-servers=${spring.embedded.kafka.brokers}" 55 | , "retryListener.startup=false"}) 56 | public class LibraryEventsConsumerIntegrationTest { 57 | 58 | 59 | @Value("${topics.retry}") 60 | private String retryTopic; 61 | 62 | @Value("${topics.dlt}") 63 | private String deadLetterTopic; 64 | 65 | @Autowired 66 | EmbeddedKafkaBroker embeddedKafkaBroker; 67 | 68 | @Autowired 69 | KafkaTemplate kafkaTemplate; 70 | 71 | @Autowired 72 | KafkaListenerEndpointRegistry endpointRegistry; 73 | 74 | @SpyBean 75 | LibraryEventsConsumer libraryEventsConsumerSpy; 76 | 77 | @SpyBean 78 | LibraryEventsService libraryEventsServiceSpy; 79 | 80 | @Autowired 81 | LibraryEventsRepository libraryEventsRepository; 82 | 83 | 84 | @Autowired 85 | FailureRecordRepository failureRecordRepository; 86 | 87 | @Autowired 88 | ObjectMapper objectMapper; 89 | 90 | private Consumer consumer; 91 | 92 | @BeforeEach 93 | void setUp() { 94 | 95 | var container = endpointRegistry.getListenerContainers() 96 | .stream().filter(messageListenerContainer -> 97 | Objects.equals(messageListenerContainer.getGroupId(), "library-events-listener-group")) 98 | .collect(Collectors.toList()).get(0); 99 | ContainerTestUtils.waitForAssignment(container, embeddedKafkaBroker.getPartitionsPerTopic()); 100 | // for (MessageListenerContainer messageListenerContainer : endpointRegistry.getListenerContainers()) { 101 | // System.out.println("Group Id : "+ messageListenerContainer.getGroupId()); 102 | // if(Objects.equals(messageListenerContainer.getGroupId(), "library-events-listener-group")){ 103 | // System.out.println("Waiting for assignment"); 104 | // ContainerTestUtils.waitForAssignment(messageListenerContainer, embeddedKafkaBroker.getPartitionsPerTopic()); 105 | // } 106 | // } 107 | 108 | } 109 | 110 | @AfterEach 111 | void tearDown() { 112 | 113 | libraryEventsRepository.deleteAll(); 114 | failureRecordRepository.deleteAll(); 115 | 116 | } 117 | 118 | @Test 119 | void publishNewLibraryEvent() throws ExecutionException, InterruptedException, JsonProcessingException { 120 | //given 121 | String json = " {\"libraryEventId\":null,\"libraryEventType\":\"NEW\",\"book\":{\"bookId\":456,\"bookName\":\"Kafka Using Spring Boot\",\"bookAuthor\":\"Dilip\"}}"; 122 | kafkaTemplate.sendDefault(json).get(); 123 | 124 | //when 125 | CountDownLatch latch = new CountDownLatch(1); 126 | latch.await(3, TimeUnit.SECONDS); 127 | 128 | //then 129 | verify(libraryEventsConsumerSpy, times(1)).onMessage(isA(ConsumerRecord.class)); 130 | verify(libraryEventsServiceSpy, times(1)).processLibraryEvent(isA(ConsumerRecord.class)); 131 | 132 | List libraryEventList = (List) libraryEventsRepository.findAll(); 133 | assert libraryEventList.size() == 1; 134 | libraryEventList.forEach(libraryEvent -> { 135 | assert libraryEvent.getLibraryEventId() != null; 136 | assertEquals(456, libraryEvent.getBook().getBookId()); 137 | }); 138 | 139 | } 140 | 141 | @Test 142 | void publishUpdateLibraryEvent() throws JsonProcessingException, ExecutionException, InterruptedException { 143 | //given 144 | String json = "{\"libraryEventId\":null,\"libraryEventType\":\"NEW\",\"book\":{\"bookId\":456,\"bookName\":\"Kafka Using Spring Boot\",\"bookAuthor\":\"Dilip\"}}"; 145 | LibraryEvent libraryEvent = objectMapper.readValue(json, LibraryEvent.class); 146 | libraryEvent.getBook().setLibraryEvent(libraryEvent); 147 | libraryEventsRepository.save(libraryEvent); 148 | //publish the update LibraryEvent 149 | 150 | Book updatedBook = Book.builder(). 151 | bookId(456).bookName("Kafka Using Spring Boot 2.x").bookAuthor("Dilip").build(); 152 | libraryEvent.setLibraryEventType(LibraryEventType.UPDATE); 153 | libraryEvent.setBook(updatedBook); 154 | String updatedJson = objectMapper.writeValueAsString(libraryEvent); 155 | kafkaTemplate.sendDefault(libraryEvent.getLibraryEventId(), updatedJson).get(); 156 | 157 | //when 158 | CountDownLatch latch = new CountDownLatch(1); 159 | latch.await(3, TimeUnit.SECONDS); 160 | 161 | //then 162 | //verify(libraryEventsConsumerSpy, times(1)).onMessage(isA(ConsumerRecord.class)); 163 | //verify(libraryEventsServiceSpy, times(1)).processLibraryEvent(isA(ConsumerRecord.class)); 164 | LibraryEvent persistedLibraryEvent = libraryEventsRepository.findById(libraryEvent.getLibraryEventId()).get(); 165 | assertEquals("Kafka Using Spring Boot 2.x", persistedLibraryEvent.getBook().getBookName()); 166 | } 167 | 168 | @Test 169 | void publishModifyLibraryEvent_Not_A_Valid_LibraryEventId() throws JsonProcessingException, InterruptedException, ExecutionException { 170 | //given 171 | Integer libraryEventId = 123; 172 | String json = "{\"libraryEventId\":" + libraryEventId + ",\"libraryEventType\":\"UPDATE\",\"book\":{\"bookId\":456,\"bookName\":\"Kafka Using Spring Boot\",\"bookAuthor\":\"Dilip\"}}"; 173 | System.out.println(json); 174 | kafkaTemplate.sendDefault(libraryEventId, json).get(); 175 | //when 176 | CountDownLatch latch = new CountDownLatch(1); 177 | latch.await(5, TimeUnit.SECONDS); 178 | 179 | 180 | verify(libraryEventsConsumerSpy, times(1)).onMessage(isA(ConsumerRecord.class)); 181 | verify(libraryEventsServiceSpy, times(1)).processLibraryEvent(isA(ConsumerRecord.class)); 182 | 183 | Optional libraryEventOptional = libraryEventsRepository.findById(libraryEventId); 184 | assertFalse(libraryEventOptional.isPresent()); 185 | 186 | Map configs = new HashMap<>(KafkaTestUtils.consumerProps("group2", "true", embeddedKafkaBroker)); 187 | consumer = new DefaultKafkaConsumerFactory<>(configs, new IntegerDeserializer(), new StringDeserializer()).createConsumer(); 188 | embeddedKafkaBroker.consumeFromAnEmbeddedTopic(consumer, deadLetterTopic); 189 | 190 | ConsumerRecord consumerRecord = KafkaTestUtils.getSingleRecord(consumer, deadLetterTopic); 191 | 192 | System.out.println("consumer Record in deadletter topic : " + consumerRecord.value()); 193 | 194 | assertEquals(json, consumerRecord.value()); 195 | consumerRecord.headers() 196 | .forEach(header -> { 197 | System.out.println("Header Key : " + header.key() + ", Header Value : " + new String(header.value())); 198 | }); 199 | 200 | 201 | } 202 | 203 | @Test 204 | void publishModifyLibraryEvent_Null_LibraryEventId() throws JsonProcessingException, InterruptedException, ExecutionException { 205 | //given 206 | Integer libraryEventId = null; 207 | String json = "{\"libraryEventId\":" + libraryEventId + ",\"libraryEventType\":\"UPDATE\",\"book\":{\"bookId\":456,\"bookName\":\"Kafka Using Spring Boot\",\"bookAuthor\":\"Dilip\"}}"; 208 | kafkaTemplate.sendDefault(libraryEventId, json).get(); 209 | //when 210 | CountDownLatch latch = new CountDownLatch(1); 211 | latch.await(3, TimeUnit.SECONDS); 212 | 213 | 214 | verify(libraryEventsConsumerSpy, times(1)).onMessage(isA(ConsumerRecord.class)); 215 | verify(libraryEventsServiceSpy, times(1)).processLibraryEvent(isA(ConsumerRecord.class)); 216 | 217 | Map configs = new HashMap<>(KafkaTestUtils.consumerProps("group3", "true", embeddedKafkaBroker)); 218 | consumer = new DefaultKafkaConsumerFactory<>(configs, new IntegerDeserializer(), new StringDeserializer()).createConsumer(); 219 | embeddedKafkaBroker.consumeFromAnEmbeddedTopic(consumer, deadLetterTopic); 220 | 221 | ConsumerRecords consumerRecords = KafkaTestUtils.getRecords(consumer); 222 | 223 | var deadletterList = new ArrayList>(); 224 | consumerRecords.forEach((record) -> { 225 | if (record.topic().equals(deadLetterTopic)) { 226 | deadletterList.add(record); 227 | } 228 | }); 229 | 230 | var finalList = deadletterList.stream() 231 | .filter(record -> record.value().equals(json)) 232 | .collect(Collectors.toList()); 233 | 234 | assert finalList.size() == 1; 235 | } 236 | 237 | @Test 238 | void publishModifyLibraryEvent_999_LibraryEventId() throws JsonProcessingException, InterruptedException, ExecutionException { 239 | //given 240 | Integer libraryEventId = 999; 241 | String json = "{\"libraryEventId\":" + libraryEventId + ",\"libraryEventType\":\"UPDATE\",\"book\":{\"bookId\":456,\"bookName\":\"Kafka Using Spring Boot\",\"bookAuthor\":\"Dilip\"}}"; 242 | kafkaTemplate.sendDefault(libraryEventId, json).get(); 243 | //when 244 | CountDownLatch latch = new CountDownLatch(1); 245 | latch.await(3, TimeUnit.SECONDS); 246 | 247 | 248 | verify(libraryEventsConsumerSpy, times(3)).onMessage(isA(ConsumerRecord.class)); 249 | verify(libraryEventsServiceSpy, times(3)).processLibraryEvent(isA(ConsumerRecord.class)); 250 | } 251 | 252 | @Test 253 | //@Disabled 254 | void publishModifyLibraryEvent_999_LibraryEventId_deadletterTopic() throws JsonProcessingException, InterruptedException, ExecutionException { 255 | //given 256 | Integer libraryEventId = 999; 257 | String json = "{\"libraryEventId\":" + libraryEventId + ",\"libraryEventType\":\"UPDATE\",\"book\":{\"bookId\":456,\"bookName\":\"Kafka Using Spring Boot\",\"bookAuthor\":\"Dilip\"}}"; 258 | kafkaTemplate.sendDefault(libraryEventId, json).get(); 259 | //when 260 | CountDownLatch latch = new CountDownLatch(1); 261 | latch.await(3, TimeUnit.SECONDS); 262 | 263 | // Without Retry Listener 264 | // verify(libraryEventsConsumerSpy, times(3)).onMessage(isA(ConsumerRecord.class)); 265 | // verify(libraryEventsServiceSpy, times(3)).processLibraryEvent(isA(ConsumerRecord.class)); 266 | 267 | //with Retry listener 268 | verify(libraryEventsConsumerSpy, atLeast(3)).onMessage(isA(ConsumerRecord.class)); 269 | verify(libraryEventsServiceSpy, atLeast(3)).processLibraryEvent(isA(ConsumerRecord.class)); 270 | 271 | 272 | Map configs = new HashMap<>(KafkaTestUtils.consumerProps("group1", "true", embeddedKafkaBroker)); 273 | consumer = new DefaultKafkaConsumerFactory<>(configs, new IntegerDeserializer(), new StringDeserializer()).createConsumer(); 274 | embeddedKafkaBroker.consumeFromAnEmbeddedTopic(consumer, retryTopic); 275 | 276 | ConsumerRecord consumerRecord = KafkaTestUtils.getSingleRecord(consumer, retryTopic); 277 | 278 | System.out.println("consumer Record in deadletter topic : " + consumerRecord.value()); 279 | 280 | assertEquals(json, consumerRecord.value()); 281 | consumerRecord.headers() 282 | .forEach(header -> { 283 | System.out.println("Header Key : " + header.key() + ", Header Value : " + new String(header.value())); 284 | }); 285 | } 286 | 287 | @Test 288 | @Disabled 289 | void publishModifyLibraryEvent_999_LibraryEventId_failureRecord() throws JsonProcessingException, InterruptedException, ExecutionException { 290 | //given 291 | Integer libraryEventId = 999; 292 | String json = "{\"libraryEventId\":" + libraryEventId + ",\"libraryEventType\":\"UPDATE\",\"book\":{\"bookId\":456,\"bookName\":\"Kafka Using Spring Boot\",\"bookAuthor\":\"Dilip\"}}"; 293 | kafkaTemplate.sendDefault(libraryEventId, json).get(); 294 | //when 295 | CountDownLatch latch = new CountDownLatch(1); 296 | latch.await(5, TimeUnit.SECONDS); 297 | 298 | 299 | verify(libraryEventsConsumerSpy, times(3)).onMessage(isA(ConsumerRecord.class)); 300 | verify(libraryEventsServiceSpy, times(3)).processLibraryEvent(isA(ConsumerRecord.class)); 301 | 302 | 303 | var failureCount = failureRecordRepository.count(); 304 | assertEquals(1, failureCount); 305 | failureRecordRepository.findAll().forEach(failureRecord -> { 306 | System.out.println("failureRecord : " + failureRecord); 307 | }); 308 | 309 | } 310 | 311 | } 312 | -------------------------------------------------------------------------------- /library-events-consumer/src/test/java/intg/com/learnkafka/jpa/FailureRecordRepositoryTest.java: -------------------------------------------------------------------------------- 1 | package com.learnkafka.jpa; 2 | 3 | import com.learnkafka.config.LibraryEventsConsumerConfig; 4 | import com.learnkafka.entity.FailureRecord; 5 | import org.aspectj.lang.annotation.Before; 6 | import org.junit.jupiter.api.BeforeEach; 7 | import org.junit.jupiter.api.Test; 8 | import org.springframework.beans.factory.annotation.Autowired; 9 | import org.springframework.boot.test.autoconfigure.orm.jpa.DataJpaTest; 10 | import org.springframework.boot.test.context.SpringBootTest; 11 | import org.springframework.test.context.ActiveProfiles; 12 | import scala.util.Failure; 13 | 14 | import java.util.List; 15 | 16 | import static org.junit.jupiter.api.Assertions.*; 17 | 18 | @ActiveProfiles("test") 19 | @DataJpaTest 20 | class FailureRecordRepositoryTest { 21 | 22 | @Autowired 23 | FailureRecordRepository failureRecordRepository; 24 | 25 | @BeforeEach 26 | public void setUp(){ 27 | var record = "{\"libraryEventId\":1,\"book\":{\"bookId\":456,\"bookName\":\"Kafka Using Spring Boot 2.X\",\"bookAuthor\":\"Dilip\"}}"; 28 | 29 | var failureRecord = new FailureRecord(null,"library-events", 123, record,1,0L, "exception occurred", LibraryEventsConsumerConfig.RETRY); 30 | var failureRecord1= new FailureRecord(null,"library-events", 123, record,1,1L, "exception occurred",LibraryEventsConsumerConfig.DEAD); 31 | 32 | failureRecordRepository.saveAll(List.of(failureRecord, failureRecord1)); 33 | } 34 | 35 | @Test 36 | void findAllByStatus() { 37 | 38 | //when 39 | var failRecordList = failureRecordRepository.findAllByStatus(LibraryEventsConsumerConfig.RETRY); 40 | 41 | //then 42 | assertEquals(1, failRecordList.size()); 43 | } 44 | } -------------------------------------------------------------------------------- /library-events-consumer/src/test/java/intg/com/learnkafka/scheduler/RetrySchedulerIntegrationTest.java: -------------------------------------------------------------------------------- 1 | package com.learnkafka.scheduler; 2 | 3 | import com.fasterxml.jackson.core.JsonProcessingException; 4 | import com.learnkafka.config.LibraryEventsConsumerConfig; 5 | import com.learnkafka.entity.FailureRecord; 6 | import com.learnkafka.jpa.FailureRecordRepository; 7 | import com.learnkafka.jpa.LibraryEventsRepository; 8 | import com.learnkafka.service.LibraryEventsService; 9 | import org.apache.kafka.clients.consumer.ConsumerRecord; 10 | import org.junit.jupiter.api.BeforeEach; 11 | import org.junit.jupiter.api.Disabled; 12 | import org.junit.jupiter.api.Test; 13 | import org.mockito.Mockito; 14 | import org.springframework.beans.factory.annotation.Autowired; 15 | import org.springframework.boot.test.context.SpringBootTest; 16 | import org.springframework.boot.test.mock.mockito.SpyBean; 17 | import org.springframework.test.context.ActiveProfiles; 18 | import java.util.List; 19 | import static org.mockito.Mockito.times; 20 | 21 | @SpringBootTest 22 | @ActiveProfiles("test") 23 | @Disabled 24 | public class RetrySchedulerIntegrationTest { 25 | 26 | @SpyBean 27 | LibraryEventsService libraryEventsServiceSpy; 28 | 29 | @Autowired 30 | RetryScheduler retryScheduler; 31 | 32 | @Autowired 33 | FailureRecordRepository failureRecordRepository; 34 | 35 | @BeforeEach 36 | public void setUp(){ 37 | 38 | failureRecordRepository.deleteAll(); 39 | 40 | var record = "{\"libraryEventId\":1,\"book\":{\"bookId\":456,\"bookName\":\"Kafka Using Spring Boot 2.X\",\"bookAuthor\":\"Dilip\"}}"; 41 | 42 | var failureRecord = new FailureRecord(null,"library-events", 123, record,1,0L, "exception occurred", LibraryEventsConsumerConfig.RETRY); 43 | var failureRecord1= new FailureRecord(null,"library-events", 123, record,1,1L, "exception occurred",LibraryEventsConsumerConfig.DEAD); 44 | 45 | failureRecordRepository.saveAll(List.of(failureRecord, failureRecord1)); 46 | } 47 | 48 | 49 | 50 | @Test 51 | @Disabled 52 | public void retryFailedRecords() throws JsonProcessingException { 53 | 54 | retryScheduler.retryFailedRecords(); 55 | 56 | Mockito.verify(libraryEventsServiceSpy, times(1) ).processLibraryEvent(Mockito.isA(ConsumerRecord.class)); 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /library-events-producer/.gitignore: -------------------------------------------------------------------------------- 1 | HELP.md 2 | .gradle 3 | build/ 4 | !gradle/wrapper/gradle-wrapper.jar 5 | !**/src/main/** 6 | !**/src/test/** 7 | 8 | ### STS ### 9 | .apt_generated 10 | .classpath 11 | .factorypath 12 | .project 13 | .settings 14 | .springBeans 15 | .sts4-cache 16 | 17 | ### IntelliJ IDEA ### 18 | .idea 19 | *.iws 20 | *.iml 21 | *.ipr 22 | out/ 23 | 24 | ### NetBeans ### 25 | /nbproject/private/ 26 | /nbbuild/ 27 | /dist/ 28 | /nbdist/ 29 | /.nb-gradle/ 30 | 31 | ### VS Code ### 32 | .vscode/ 33 | -------------------------------------------------------------------------------- /library-events-producer/build.gradle: -------------------------------------------------------------------------------- 1 | plugins { 2 | id 'org.springframework.boot' version '2.6.5' 3 | id 'io.spring.dependency-management' version '1.0.11.RELEASE' 4 | id 'java' 5 | } 6 | 7 | group = 'com.learnkafka' 8 | version = '0.0.1-SNAPSHOT' 9 | sourceCompatibility = '11' 10 | 11 | configurations { 12 | compileOnly { 13 | extendsFrom annotationProcessor 14 | } 15 | } 16 | 17 | repositories { 18 | mavenCentral() 19 | } 20 | 21 | dependencies { 22 | implementation 'org.springframework.boot:spring-boot-starter-web' 23 | implementation 'org.springframework.kafka:spring-kafka' 24 | implementation 'org.springframework.boot:spring-boot-starter-validation' 25 | compileOnly 'org.projectlombok:lombok' 26 | annotationProcessor 'org.projectlombok:lombok' 27 | testImplementation('org.springframework.boot:spring-boot-starter-test') { 28 | exclude group: 'org.junit.vintage', module: 'junit-vintage-engine' 29 | } 30 | testImplementation 'org.springframework.kafka:spring-kafka-test' 31 | } 32 | 33 | sourceSets{ 34 | test { 35 | java.srcDirs = ['src/test/java/unit', 'src/test/java/intg'] 36 | } 37 | } 38 | 39 | test { 40 | useJUnitPlatform() 41 | } 42 | -------------------------------------------------------------------------------- /library-events-producer/gradle/wrapper/gradle-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dilipsundarraj1/kafka-for-developers-using-spring-boot/c617a03d377f72fa63f191be2517dee22099c141/library-events-producer/gradle/wrapper/gradle-wrapper.jar -------------------------------------------------------------------------------- /library-events-producer/gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | #Fri Dec 27 05:51:26 CST 2019 2 | distributionBase=GRADLE_USER_HOME 3 | distributionPath=wrapper/dists 4 | zipStoreBase=GRADLE_USER_HOME 5 | zipStorePath=wrapper/dists 6 | distributionUrl=https\://services.gradle.org/distributions/gradle-7.0-all.zip 7 | -------------------------------------------------------------------------------- /library-events-producer/gradlew: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env sh 2 | 3 | ############################################################################## 4 | ## 5 | ## Gradle start up script for UN*X 6 | ## 7 | ############################################################################## 8 | 9 | # Attempt to set APP_HOME 10 | # Resolve links: $0 may be a link 11 | PRG="$0" 12 | # Need this for relative symlinks. 13 | while [ -h "$PRG" ] ; do 14 | ls=`ls -ld "$PRG"` 15 | link=`expr "$ls" : '.*-> \(.*\)$'` 16 | if expr "$link" : '/.*' > /dev/null; then 17 | PRG="$link" 18 | else 19 | PRG=`dirname "$PRG"`"/$link" 20 | fi 21 | done 22 | SAVED="`pwd`" 23 | cd "`dirname \"$PRG\"`/" >/dev/null 24 | APP_HOME="`pwd -P`" 25 | cd "$SAVED" >/dev/null 26 | 27 | APP_NAME="Gradle" 28 | APP_BASE_NAME=`basename "$0"` 29 | 30 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 31 | DEFAULT_JVM_OPTS="" 32 | 33 | # Use the maximum available, or set MAX_FD != -1 to use that value. 34 | MAX_FD="maximum" 35 | 36 | warn () { 37 | echo "$*" 38 | } 39 | 40 | die () { 41 | echo 42 | echo "$*" 43 | echo 44 | exit 1 45 | } 46 | 47 | # OS specific support (must be 'true' or 'false'). 48 | cygwin=false 49 | msys=false 50 | darwin=false 51 | nonstop=false 52 | case "`uname`" in 53 | CYGWIN* ) 54 | cygwin=true 55 | ;; 56 | Darwin* ) 57 | darwin=true 58 | ;; 59 | MINGW* ) 60 | msys=true 61 | ;; 62 | NONSTOP* ) 63 | nonstop=true 64 | ;; 65 | esac 66 | 67 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar 68 | 69 | # Determine the Java command to use to start the JVM. 70 | if [ -n "$JAVA_HOME" ] ; then 71 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then 72 | # IBM's JDK on AIX uses strange locations for the executables 73 | JAVACMD="$JAVA_HOME/jre/sh/java" 74 | else 75 | JAVACMD="$JAVA_HOME/bin/java" 76 | fi 77 | if [ ! -x "$JAVACMD" ] ; then 78 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME 79 | 80 | Please set the JAVA_HOME variable in your environment to match the 81 | location of your Java installation." 82 | fi 83 | else 84 | JAVACMD="java" 85 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 86 | 87 | Please set the JAVA_HOME variable in your environment to match the 88 | location of your Java installation." 89 | fi 90 | 91 | # Increase the maximum file descriptors if we can. 92 | if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then 93 | MAX_FD_LIMIT=`ulimit -H -n` 94 | if [ $? -eq 0 ] ; then 95 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then 96 | MAX_FD="$MAX_FD_LIMIT" 97 | fi 98 | ulimit -n $MAX_FD 99 | if [ $? -ne 0 ] ; then 100 | warn "Could not set maximum file descriptor limit: $MAX_FD" 101 | fi 102 | else 103 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" 104 | fi 105 | fi 106 | 107 | # For Darwin, add options to specify how the application appears in the dock 108 | if $darwin; then 109 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" 110 | fi 111 | 112 | # For Cygwin, switch paths to Windows format before running java 113 | if $cygwin ; then 114 | APP_HOME=`cygpath --path --mixed "$APP_HOME"` 115 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` 116 | JAVACMD=`cygpath --unix "$JAVACMD"` 117 | 118 | # We build the pattern for arguments to be converted via cygpath 119 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` 120 | SEP="" 121 | for dir in $ROOTDIRSRAW ; do 122 | ROOTDIRS="$ROOTDIRS$SEP$dir" 123 | SEP="|" 124 | done 125 | OURCYGPATTERN="(^($ROOTDIRS))" 126 | # Add a user-defined pattern to the cygpath arguments 127 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then 128 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" 129 | fi 130 | # Now convert the arguments - kludge to limit ourselves to /bin/sh 131 | i=0 132 | for arg in "$@" ; do 133 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` 134 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option 135 | 136 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition 137 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` 138 | else 139 | eval `echo args$i`="\"$arg\"" 140 | fi 141 | i=$((i+1)) 142 | done 143 | case $i in 144 | (0) set -- ;; 145 | (1) set -- "$args0" ;; 146 | (2) set -- "$args0" "$args1" ;; 147 | (3) set -- "$args0" "$args1" "$args2" ;; 148 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;; 149 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; 150 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; 151 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; 152 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; 153 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; 154 | esac 155 | fi 156 | 157 | # Escape application args 158 | save () { 159 | for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done 160 | echo " " 161 | } 162 | APP_ARGS=$(save "$@") 163 | 164 | # Collect all arguments for the java command, following the shell quoting and substitution rules 165 | eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" 166 | 167 | # by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong 168 | if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then 169 | cd "$(dirname "$0")" 170 | fi 171 | 172 | exec "$JAVACMD" "$@" 173 | -------------------------------------------------------------------------------- /library-events-producer/gradlew.bat: -------------------------------------------------------------------------------- 1 | @if "%DEBUG%" == "" @echo off 2 | @rem ########################################################################## 3 | @rem 4 | @rem Gradle startup script for Windows 5 | @rem 6 | @rem ########################################################################## 7 | 8 | @rem Set local scope for the variables with windows NT shell 9 | if "%OS%"=="Windows_NT" setlocal 10 | 11 | set DIRNAME=%~dp0 12 | if "%DIRNAME%" == "" set DIRNAME=. 13 | set APP_BASE_NAME=%~n0 14 | set APP_HOME=%DIRNAME% 15 | 16 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 17 | set DEFAULT_JVM_OPTS= 18 | 19 | @rem Find java.exe 20 | if defined JAVA_HOME goto findJavaFromJavaHome 21 | 22 | set JAVA_EXE=java.exe 23 | %JAVA_EXE% -version >NUL 2>&1 24 | if "%ERRORLEVEL%" == "0" goto init 25 | 26 | echo. 27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 28 | echo. 29 | echo Please set the JAVA_HOME variable in your environment to match the 30 | echo location of your Java installation. 31 | 32 | goto fail 33 | 34 | :findJavaFromJavaHome 35 | set JAVA_HOME=%JAVA_HOME:"=% 36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe 37 | 38 | if exist "%JAVA_EXE%" goto init 39 | 40 | echo. 41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 42 | echo. 43 | echo Please set the JAVA_HOME variable in your environment to match the 44 | echo location of your Java installation. 45 | 46 | goto fail 47 | 48 | :init 49 | @rem Get command-line arguments, handling Windows variants 50 | 51 | if not "%OS%" == "Windows_NT" goto win9xME_args 52 | 53 | :win9xME_args 54 | @rem Slurp the command line arguments. 55 | set CMD_LINE_ARGS= 56 | set _SKIP=2 57 | 58 | :win9xME_args_slurp 59 | if "x%~1" == "x" goto execute 60 | 61 | set CMD_LINE_ARGS=%* 62 | 63 | :execute 64 | @rem Setup the command line 65 | 66 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar 67 | 68 | @rem Execute Gradle 69 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% 70 | 71 | :end 72 | @rem End local scope for the variables with windows NT shell 73 | if "%ERRORLEVEL%"=="0" goto mainEnd 74 | 75 | :fail 76 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of 77 | rem the _cmd.exe /c_ return code! 78 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 79 | exit /b 1 80 | 81 | :mainEnd 82 | if "%OS%"=="Windows_NT" endlocal 83 | 84 | :omega 85 | -------------------------------------------------------------------------------- /library-events-producer/settings.gradle: -------------------------------------------------------------------------------- 1 | rootProject.name = 'library-events-producer' 2 | -------------------------------------------------------------------------------- /library-events-producer/src/main/java/com/learnkafka/LibraryEventsProducerApplication.java: -------------------------------------------------------------------------------- 1 | package com.learnkafka; 2 | 3 | import org.springframework.boot.SpringApplication; 4 | import org.springframework.boot.autoconfigure.SpringBootApplication; 5 | 6 | @SpringBootApplication 7 | public class LibraryEventsProducerApplication { 8 | 9 | public static void main(String[] args) { 10 | SpringApplication.run(LibraryEventsProducerApplication.class, args); 11 | } 12 | 13 | } 14 | -------------------------------------------------------------------------------- /library-events-producer/src/main/java/com/learnkafka/config/AutoCreateConfig.java: -------------------------------------------------------------------------------- 1 | package com.learnkafka.config; 2 | 3 | import org.apache.kafka.clients.admin.AdminClientConfig; 4 | import org.apache.kafka.clients.admin.NewTopic; 5 | import org.springframework.context.annotation.Bean; 6 | import org.springframework.context.annotation.Configuration; 7 | import org.springframework.context.annotation.Profile; 8 | import org.springframework.kafka.config.TopicBuilder; 9 | import org.springframework.kafka.core.KafkaAdmin; 10 | 11 | import java.util.HashMap; 12 | import java.util.Map; 13 | 14 | @Configuration 15 | @Profile("local") 16 | public class AutoCreateConfig { 17 | 18 | @Bean 19 | public NewTopic libraryEvents(){ 20 | return TopicBuilder.name("library-events") 21 | .partitions(3) 22 | .replicas(1) 23 | .build(); 24 | } 25 | 26 | } 27 | -------------------------------------------------------------------------------- /library-events-producer/src/main/java/com/learnkafka/controller/LibraryEventControllerAdvice.java: -------------------------------------------------------------------------------- 1 | package com.learnkafka.controller; 2 | 3 | 4 | import lombok.extern.slf4j.Slf4j; 5 | import org.springframework.http.HttpStatus; 6 | import org.springframework.http.ResponseEntity; 7 | import org.springframework.validation.FieldError; 8 | import org.springframework.web.bind.MethodArgumentNotValidException; 9 | import org.springframework.web.bind.annotation.ControllerAdvice; 10 | import org.springframework.web.bind.annotation.ExceptionHandler; 11 | 12 | import java.util.List; 13 | import java.util.stream.Collectors; 14 | 15 | @ControllerAdvice 16 | @Slf4j 17 | public class LibraryEventControllerAdvice { 18 | 19 | @ExceptionHandler(MethodArgumentNotValidException.class) 20 | public ResponseEntity handleRequestBody(MethodArgumentNotValidException ex) { 21 | 22 | List errorList = ex.getBindingResult().getFieldErrors(); 23 | String errorMessage = errorList.stream() 24 | .map(fieldError -> fieldError.getField() + " - " + fieldError.getDefaultMessage()) 25 | .sorted() 26 | .collect(Collectors.joining(", ")); 27 | log.info("errorMessage : {} ", errorMessage); 28 | return new ResponseEntity<>(errorMessage, HttpStatus.BAD_REQUEST); 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /library-events-producer/src/main/java/com/learnkafka/controller/LibraryEventsController.java: -------------------------------------------------------------------------------- 1 | package com.learnkafka.controller; 2 | 3 | import com.fasterxml.jackson.core.JsonProcessingException; 4 | import com.learnkafka.domain.LibraryEvent; 5 | import com.learnkafka.domain.LibraryEventType; 6 | import com.learnkafka.producer.LibraryEventProducer; 7 | import lombok.extern.slf4j.Slf4j; 8 | import org.springframework.beans.factory.annotation.Autowired; 9 | import org.springframework.http.HttpStatus; 10 | import org.springframework.http.ResponseEntity; 11 | import org.springframework.kafka.support.SendResult; 12 | import org.springframework.web.bind.annotation.PostMapping; 13 | import org.springframework.web.bind.annotation.PutMapping; 14 | import org.springframework.web.bind.annotation.RequestBody; 15 | import org.springframework.web.bind.annotation.RestController; 16 | 17 | import javax.validation.Valid; 18 | import java.util.concurrent.ExecutionException; 19 | 20 | @RestController 21 | @Slf4j 22 | public class LibraryEventsController { 23 | 24 | @Autowired 25 | LibraryEventProducer libraryEventProducer; 26 | 27 | @PostMapping("/v1/libraryevent") 28 | public ResponseEntity postLibraryEvent(@RequestBody @Valid LibraryEvent libraryEvent) throws JsonProcessingException, ExecutionException, InterruptedException { 29 | 30 | //invoke kafka producer 31 | libraryEvent.setLibraryEventType(LibraryEventType.NEW); 32 | libraryEventProducer.sendLibraryEvent_Approach2(libraryEvent); 33 | //libraryEventProducer.sendLibraryEvent(libraryEvent); 34 | return ResponseEntity.status(HttpStatus.CREATED).body(libraryEvent); 35 | } 36 | 37 | //PUT 38 | @PutMapping("/v1/libraryevent") 39 | public ResponseEntity putLibraryEvent(@RequestBody @Valid LibraryEvent libraryEvent) throws JsonProcessingException, ExecutionException, InterruptedException { 40 | 41 | log.info("LibraryEvent : {} ",libraryEvent ); 42 | if(libraryEvent.getLibraryEventId()==null){ 43 | return ResponseEntity.status(HttpStatus.BAD_REQUEST).body("Please pass the LibraryEventId"); 44 | } 45 | 46 | libraryEvent.setLibraryEventType(LibraryEventType.UPDATE); 47 | libraryEventProducer.sendLibraryEvent_Approach2(libraryEvent); 48 | return ResponseEntity.status(HttpStatus.OK).body(libraryEvent); 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /library-events-producer/src/main/java/com/learnkafka/domain/Book.java: -------------------------------------------------------------------------------- 1 | package com.learnkafka.domain; 2 | 3 | 4 | import lombok.AllArgsConstructor; 5 | import lombok.Builder; 6 | import lombok.Data; 7 | import lombok.NoArgsConstructor; 8 | 9 | import javax.validation.constraints.NotBlank; 10 | import javax.validation.constraints.NotNull; 11 | 12 | @AllArgsConstructor 13 | @NoArgsConstructor 14 | @Data 15 | @Builder 16 | public class Book { 17 | @NotNull 18 | private Integer bookId; 19 | @NotBlank 20 | private String bookName; 21 | @NotBlank 22 | private String bookAuthor; 23 | } 24 | -------------------------------------------------------------------------------- /library-events-producer/src/main/java/com/learnkafka/domain/LibraryEvent.java: -------------------------------------------------------------------------------- 1 | package com.learnkafka.domain; 2 | 3 | 4 | import lombok.AllArgsConstructor; 5 | import lombok.Builder; 6 | import lombok.Data; 7 | import lombok.NoArgsConstructor; 8 | 9 | import javax.validation.Valid; 10 | import javax.validation.constraints.NotNull; 11 | 12 | @AllArgsConstructor 13 | @NoArgsConstructor 14 | @Data 15 | @Builder 16 | public class LibraryEvent { 17 | 18 | private Integer libraryEventId; 19 | private LibraryEventType libraryEventType; 20 | @NotNull 21 | @Valid 22 | private Book book; 23 | 24 | } 25 | -------------------------------------------------------------------------------- /library-events-producer/src/main/java/com/learnkafka/domain/LibraryEventType.java: -------------------------------------------------------------------------------- 1 | package com.learnkafka.domain; 2 | 3 | public enum LibraryEventType { 4 | NEW, 5 | UPDATE 6 | } 7 | -------------------------------------------------------------------------------- /library-events-producer/src/main/java/com/learnkafka/producer/LibraryEventProducer.java: -------------------------------------------------------------------------------- 1 | package com.learnkafka.producer; 2 | 3 | import com.fasterxml.jackson.core.JsonProcessingException; 4 | import com.fasterxml.jackson.databind.ObjectMapper; 5 | import com.learnkafka.domain.LibraryEvent; 6 | import lombok.extern.slf4j.Slf4j; 7 | import org.apache.kafka.clients.producer.ProducerRecord; 8 | import org.apache.kafka.common.header.Header; 9 | import org.apache.kafka.common.header.internals.RecordHeader; 10 | import org.springframework.beans.factory.annotation.Autowired; 11 | import org.springframework.kafka.core.KafkaTemplate; 12 | import org.springframework.kafka.support.SendResult; 13 | import org.springframework.stereotype.Component; 14 | import org.springframework.util.concurrent.ListenableFuture; 15 | import org.springframework.util.concurrent.ListenableFutureCallback; 16 | 17 | import java.util.List; 18 | import java.util.concurrent.ExecutionException; 19 | import java.util.concurrent.TimeUnit; 20 | import java.util.concurrent.TimeoutException; 21 | 22 | @Component 23 | @Slf4j 24 | public class LibraryEventProducer { 25 | 26 | @Autowired 27 | KafkaTemplate kafkaTemplate; 28 | 29 | String topic = "library-events"; 30 | @Autowired 31 | ObjectMapper objectMapper; 32 | 33 | public void sendLibraryEvent(LibraryEvent libraryEvent) throws JsonProcessingException { 34 | 35 | Integer key = libraryEvent.getLibraryEventId(); 36 | String value = objectMapper.writeValueAsString(libraryEvent); 37 | 38 | ListenableFuture> listenableFuture = kafkaTemplate.sendDefault(key,value); 39 | listenableFuture.addCallback(new ListenableFutureCallback>() { 40 | @Override 41 | public void onFailure(Throwable ex) { 42 | handleFailure(key, value, ex); 43 | } 44 | 45 | @Override 46 | public void onSuccess(SendResult result) { 47 | handleSuccess(key, value, result); 48 | } 49 | }); 50 | } 51 | 52 | public ListenableFuture> sendLibraryEvent_Approach2(LibraryEvent libraryEvent) throws JsonProcessingException { 53 | 54 | Integer key = libraryEvent.getLibraryEventId(); 55 | String value = objectMapper.writeValueAsString(libraryEvent); 56 | 57 | ProducerRecord producerRecord = buildProducerRecord(key, value, topic); 58 | 59 | ListenableFuture> listenableFuture = kafkaTemplate.send(producerRecord); 60 | 61 | listenableFuture.addCallback(new ListenableFutureCallback>() { 62 | @Override 63 | public void onFailure(Throwable ex) { 64 | handleFailure(key, value, ex); 65 | } 66 | 67 | @Override 68 | public void onSuccess(SendResult result) { 69 | handleSuccess(key, value, result); 70 | } 71 | }); 72 | 73 | return listenableFuture; 74 | } 75 | 76 | private ProducerRecord buildProducerRecord(Integer key, String value, String topic) { 77 | 78 | 79 | List
recordHeaders = List.of(new RecordHeader("event-source", "scanner".getBytes())); 80 | 81 | return new ProducerRecord<>(topic, null, key, value, recordHeaders); 82 | } 83 | 84 | 85 | public SendResult sendLibraryEventSynchronous(LibraryEvent libraryEvent) throws JsonProcessingException, ExecutionException, InterruptedException, TimeoutException { 86 | 87 | Integer key = libraryEvent.getLibraryEventId(); 88 | String value = objectMapper.writeValueAsString(libraryEvent); 89 | SendResult sendResult=null; 90 | try { 91 | sendResult = kafkaTemplate.sendDefault(key,value).get(1, TimeUnit.SECONDS); 92 | } catch (ExecutionException | InterruptedException e) { 93 | log.error("ExecutionException/InterruptedException Sending the Message and the exception is {}", e.getMessage()); 94 | throw e; 95 | } catch (Exception e) { 96 | log.error("Exception Sending the Message and the exception is {}", e.getMessage()); 97 | throw e; 98 | } 99 | 100 | return sendResult; 101 | 102 | } 103 | 104 | private void handleFailure(Integer key, String value, Throwable ex) { 105 | log.error("Error Sending the Message and the exception is {}", ex.getMessage()); 106 | try { 107 | throw ex; 108 | } catch (Throwable throwable) { 109 | log.error("Error in OnFailure: {}", throwable.getMessage()); 110 | } 111 | 112 | 113 | } 114 | 115 | private void handleSuccess(Integer key, String value, SendResult result) { 116 | log.info("Message Sent SuccessFully for the key : {} and the value is {} , partition is {}", key, value, result.getRecordMetadata().partition()); 117 | } 118 | } 119 | -------------------------------------------------------------------------------- /library-events-producer/src/main/resources/application.yml: -------------------------------------------------------------------------------- 1 | spring: 2 | profiles: 3 | active: local 4 | --- 5 | spring: 6 | config: 7 | activate: 8 | on-profile: local 9 | kafka: 10 | template: 11 | default-topic: library-events 12 | producer: 13 | bootstrap-servers: localhost:9092,localhost:9093,localhost:9094 14 | key-serializer: org.apache.kafka.common.serialization.IntegerSerializer 15 | value-serializer: org.apache.kafka.common.serialization.StringSerializer 16 | properties: 17 | acks: all 18 | retries: 10 19 | retry.backoff.ms: 1000 20 | admin: 21 | properties: 22 | bootstrap.servers: localhost:9092,localhost:9093,localhost:9094 23 | --- 24 | spring: 25 | config: 26 | activate: 27 | on-profile: nonprod 28 | kafka: 29 | template: 30 | default-topic: library-events 31 | producer: 32 | bootstrap-servers: localhost:9095,localhost:9096,localhost:9097 33 | key-serializer: org.apache.kafka.common.serialization.IntegerSerializer 34 | value-serializer: org.apache.kafka.common.serialization.StringSerializer 35 | ssl: 36 | trust-store-location: file:/Dilip/udemy/kafka-for-developers-using-spring-boot/ssl/client.truststore.jks 37 | trust-store-password: password 38 | key-store-location: file:/Dilip/udemy/kafka-for-developers-using-spring-boot/ssl/client.keystore.jks 39 | key-store-password: password 40 | properties: 41 | acks: all 42 | retries: 10 43 | retry.backoff.ms: 1000 44 | security: 45 | protocol: SSL 46 | ssl.endpoint.identification.algorithm: 47 | 48 | --- 49 | spring: 50 | config: 51 | activate: 52 | on-profile: prod 53 | kafka: 54 | producer: 55 | bootstrap-servers: prod:9092 56 | key-serializer: org.apache.kafka.common.serialization.IntegerSerializer 57 | value-serializer: org.apache.kafka.common.serialization.StringSerializer 58 | -------------------------------------------------------------------------------- /library-events-producer/src/main/resources/curl-commands.txt: -------------------------------------------------------------------------------- 1 | POST WITH-NULL-LIBRARY-EVENT-ID 2 | --------------------- 3 | curl -i \ 4 | -d '{"libraryEventId":null,"book":{"bookId":456,"bookName":"Kafka Using Spring Boot","bookAuthor":"Dilip"}}' \ 5 | -H "Content-Type: application/json" \ 6 | -X POST http://localhost:8080/v1/libraryevent 7 | 8 | PUT WITH ID - 1 9 | -------------- 10 | curl -i \ 11 | -d '{"libraryEventId":1,"book":{"bookId":456,"bookName":"Kafka Using Spring Boot 2.X","bookAuthor":"Dilip"}}' \ 12 | -H "Content-Type: application/json" \ 13 | -X PUT http://localhost:8080/v1/libraryevent 14 | 15 | curl -i \ 16 | -d '{"libraryEventId":2,"book":{"bookId":456,"bookName":"Kafka Using Spring Boot 2.X","bookAuthor":"Dilip"}}' \ 17 | -H "Content-Type: application/json" \ 18 | -X PUT http://localhost:8080/v1/libraryevent 19 | 20 | 21 | 22 | PUT WITH ID 23 | --------------------- 24 | curl -i \ 25 | -d '{"libraryEventId":123,"book":{"bookId":456,"bookName":"Kafka Using Spring Boot","bookAuthor":"Dilip"}}' \ 26 | -H "Content-Type: application/json" \ 27 | -X PUT http://localhost:8080/v1/libraryevent 28 | 29 | curl -i \ 30 | -d '{"libraryEventId":999,"book":{"bookId":456,"bookName":"Kafka Using Spring Boot","bookAuthor":"Dilip"}}' \ 31 | -H "Content-Type: application/json" \ 32 | -X PUT http://localhost:8080/v1/libraryevent 33 | 34 | curl -i \ 35 | -d '{"libraryEventId":2,"book":{"bookId":456,"bookName":"Kafka Using Spring Boot","bookAuthor":"Dilip"}}' \ 36 | -H "Content-Type: application/json" \ 37 | -X PUT http://localhost:8080/v1/libraryevent 38 | 39 | 40 | PUT WITHOUT ID 41 | --------------------- 42 | curl -i \ 43 | -d '{"libraryEventId":null,"book":{"bookId":456,"bookName":"Kafka Using Spring Boot","bookAuthor":"Dilip"}}' \ 44 | -H "Content-Type: application/json" \ 45 | -X PUT http://localhost:8080/v1/libraryevent 46 | 47 | 48 | ./kafka-topics.sh --create --topic library-events.DLT --replication-factor 1 --partitions 4 --bootstrap-server localhost:9092 49 | -------------------------------------------------------------------------------- /library-events-producer/src/test/java/intg/com/learnkafka/controller/LibraryEventsControllerIntegrationTest.java: -------------------------------------------------------------------------------- 1 | package com.learnkafka.controller; 2 | 3 | import com.learnkafka.domain.Book; 4 | import com.learnkafka.domain.LibraryEvent; 5 | import org.apache.kafka.clients.consumer.Consumer; 6 | import org.apache.kafka.clients.consumer.ConsumerConfig; 7 | import org.apache.kafka.clients.consumer.ConsumerRecord; 8 | import org.apache.kafka.clients.consumer.ConsumerRecords; 9 | import org.apache.kafka.common.serialization.IntegerDeserializer; 10 | import org.apache.kafka.common.serialization.StringDeserializer; 11 | import org.junit.jupiter.api.AfterEach; 12 | import org.junit.jupiter.api.BeforeEach; 13 | import org.junit.jupiter.api.Test; 14 | import org.junit.jupiter.api.Timeout; 15 | import org.springframework.beans.factory.annotation.Autowired; 16 | import org.springframework.boot.test.context.SpringBootTest; 17 | import org.springframework.boot.test.web.client.TestRestTemplate; 18 | import org.springframework.http.*; 19 | import org.springframework.kafka.core.DefaultKafkaConsumerFactory; 20 | import org.springframework.kafka.test.EmbeddedKafkaBroker; 21 | import org.springframework.kafka.test.context.EmbeddedKafka; 22 | import org.springframework.kafka.test.utils.KafkaTestUtils; 23 | import org.springframework.test.context.TestPropertySource; 24 | 25 | import java.util.HashMap; 26 | import java.util.Map; 27 | 28 | import static org.junit.jupiter.api.Assertions.assertEquals; 29 | 30 | @SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT) 31 | @EmbeddedKafka(topics = {"library-events"}, partitions = 3) 32 | @TestPropertySource(properties = {"spring.kafka.producer.bootstrap-servers=${spring.embedded.kafka.brokers}", 33 | "spring.kafka.admin.properties.bootstrap.servers=${spring.embedded.kafka.brokers}"}) 34 | public class LibraryEventsControllerIntegrationTest { 35 | 36 | @Autowired 37 | TestRestTemplate restTemplate; 38 | 39 | @Autowired 40 | EmbeddedKafkaBroker embeddedKafkaBroker; 41 | 42 | private Consumer consumer; 43 | 44 | @BeforeEach 45 | void setUp() { 46 | Map configs = new HashMap<>(KafkaTestUtils.consumerProps("group1", "true", embeddedKafkaBroker)); 47 | configs.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest"); 48 | consumer = new DefaultKafkaConsumerFactory<>(configs, new IntegerDeserializer(), new StringDeserializer()).createConsumer(); 49 | embeddedKafkaBroker.consumeFromAllEmbeddedTopics(consumer); 50 | } 51 | 52 | @AfterEach 53 | void tearDown() { 54 | consumer.close(); 55 | } 56 | 57 | @Test 58 | @Timeout(5) 59 | void postLibraryEvent() throws InterruptedException { 60 | //given 61 | Book book = Book.builder() 62 | .bookId(123) 63 | .bookAuthor("Dilip") 64 | .bookName("Kafka using Spring Boot") 65 | .build(); 66 | 67 | LibraryEvent libraryEvent = LibraryEvent.builder() 68 | .libraryEventId(null) 69 | .book(book) 70 | .build(); 71 | HttpHeaders headers = new HttpHeaders(); 72 | headers.set("content-type", MediaType.APPLICATION_JSON.toString()); 73 | HttpEntity request = new HttpEntity<>(libraryEvent, headers); 74 | 75 | //when 76 | ResponseEntity responseEntity = restTemplate.exchange("/v1/libraryevent", HttpMethod.POST, request, LibraryEvent.class); 77 | 78 | //then 79 | assertEquals(HttpStatus.CREATED, responseEntity.getStatusCode()); 80 | 81 | 82 | 83 | ConsumerRecords consumerRecords = KafkaTestUtils.getRecords(consumer); 84 | //Thread.sleep(3000); 85 | assert consumerRecords.count() == 1; 86 | consumerRecords.forEach(record-> { 87 | String expectedRecord = "{\"libraryEventId\":null,\"libraryEventType\":\"NEW\",\"book\":{\"bookId\":123,\"bookName\":\"Kafka using Spring Boot\",\"bookAuthor\":\"Dilip\"}}"; 88 | String value = record.value(); 89 | assertEquals(expectedRecord, value); 90 | }); 91 | 92 | 93 | } 94 | 95 | @Test 96 | @Timeout(5) 97 | void putLibraryEvent() throws InterruptedException { 98 | //given 99 | Book book = Book.builder() 100 | .bookId(456) 101 | .bookAuthor("Dilip") 102 | .bookName("Kafka using Spring Boot") 103 | .build(); 104 | 105 | LibraryEvent libraryEvent = LibraryEvent.builder() 106 | .libraryEventId(123) 107 | .book(book) 108 | .build(); 109 | HttpHeaders headers = new HttpHeaders(); 110 | headers.set("content-type", MediaType.APPLICATION_JSON.toString()); 111 | HttpEntity request = new HttpEntity<>(libraryEvent, headers); 112 | 113 | 114 | //when 115 | ResponseEntity responseEntity = restTemplate.exchange("/v1/libraryevent", HttpMethod.PUT, request, LibraryEvent.class); 116 | 117 | //then 118 | assertEquals(HttpStatus.OK, responseEntity.getStatusCode()); 119 | 120 | 121 | ConsumerRecords consumerRecords = KafkaTestUtils.getRecords(consumer); 122 | //Thread.sleep(3000); 123 | assert consumerRecords.count() == 2; 124 | consumerRecords.forEach(record-> { 125 | if(record.key()!=null){ 126 | String expectedRecord = "{\"libraryEventId\":123,\"libraryEventType\":\"UPDATE\",\"book\":{\"bookId\":456,\"bookName\":\"Kafka using Spring Boot\",\"bookAuthor\":\"Dilip\"}}"; 127 | String value = record.value(); 128 | assertEquals(expectedRecord, value); 129 | } 130 | }); 131 | 132 | 133 | } 134 | } -------------------------------------------------------------------------------- /library-events-producer/src/test/java/unit/com/learnkafka/controller/LibraryEventControllerUnitTest.java: -------------------------------------------------------------------------------- 1 | package com.learnkafka.controller; 2 | 3 | import com.fasterxml.jackson.core.JsonProcessingException; 4 | import com.fasterxml.jackson.databind.ObjectMapper; 5 | import com.learnkafka.domain.Book; 6 | import com.learnkafka.domain.LibraryEvent; 7 | import com.learnkafka.producer.LibraryEventProducer; 8 | import org.junit.jupiter.api.Test; 9 | import org.springframework.beans.factory.annotation.Autowired; 10 | import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc; 11 | import org.springframework.boot.test.autoconfigure.web.servlet.WebMvcTest; 12 | import org.springframework.boot.test.mock.mockito.MockBean; 13 | import org.springframework.http.MediaType; 14 | import org.springframework.test.web.servlet.MockMvc; 15 | 16 | import static org.mockito.ArgumentMatchers.eq; 17 | import static org.mockito.ArgumentMatchers.isA; 18 | import static org.mockito.Mockito.doNothing; 19 | import static org.mockito.Mockito.when; 20 | import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; 21 | import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.put; 22 | import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; 23 | import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; 24 | 25 | @WebMvcTest(LibraryEventsController.class) 26 | @AutoConfigureMockMvc 27 | public class LibraryEventControllerUnitTest { 28 | 29 | @Autowired 30 | MockMvc mockMvc; 31 | 32 | ObjectMapper objectMapper = new ObjectMapper(); 33 | 34 | @MockBean 35 | LibraryEventProducer libraryEventProducer; 36 | 37 | @Test 38 | void postLibraryEvent() throws Exception { 39 | //given 40 | Book book = Book.builder() 41 | .bookId(123) 42 | .bookAuthor("Dilip") 43 | .bookName("Kafka using Spring Boot") 44 | .build(); 45 | 46 | LibraryEvent libraryEvent = LibraryEvent.builder() 47 | .libraryEventId(null) 48 | .book(book) 49 | .build(); 50 | 51 | String json = objectMapper.writeValueAsString(libraryEvent); 52 | when(libraryEventProducer.sendLibraryEvent_Approach2(isA(LibraryEvent.class))).thenReturn(null); 53 | 54 | //expect 55 | mockMvc.perform(post("/v1/libraryevent") 56 | .content(json) 57 | .contentType(MediaType.APPLICATION_JSON)) 58 | .andExpect(status().isCreated()); 59 | 60 | } 61 | 62 | @Test 63 | void postLibraryEvent_4xx() throws Exception { 64 | //given 65 | 66 | Book book = Book.builder() 67 | .bookId(null) 68 | .bookAuthor(null) 69 | .bookName("Kafka using Spring Boot") 70 | .build(); 71 | 72 | LibraryEvent libraryEvent = LibraryEvent.builder() 73 | .libraryEventId(null) 74 | .book(book) 75 | .build(); 76 | 77 | String json = objectMapper.writeValueAsString(libraryEvent); 78 | when(libraryEventProducer.sendLibraryEvent_Approach2(isA(LibraryEvent.class))).thenReturn(null); 79 | //expect 80 | String expectedErrorMessage = "book.bookAuthor - must not be blank, book.bookId - must not be null"; 81 | mockMvc.perform(post("/v1/libraryevent") 82 | .content(json) 83 | .contentType(MediaType.APPLICATION_JSON)) 84 | .andExpect(status().is4xxClientError()) 85 | .andExpect(content().string(expectedErrorMessage)); 86 | 87 | } 88 | 89 | @Test 90 | void updateLibraryEvent() throws Exception { 91 | 92 | //given 93 | Book book = new Book().builder() 94 | .bookId(123) 95 | .bookAuthor("Dilip") 96 | .bookName("Kafka Using Spring Boot") 97 | .build(); 98 | 99 | LibraryEvent libraryEvent = LibraryEvent.builder() 100 | .libraryEventId(123) 101 | .book(book) 102 | .build(); 103 | String json = objectMapper.writeValueAsString(libraryEvent); 104 | when(libraryEventProducer.sendLibraryEvent_Approach2(isA(LibraryEvent.class))).thenReturn(null); 105 | 106 | //expect 107 | mockMvc.perform( 108 | put("/v1/libraryevent") 109 | .content(json) 110 | .contentType(MediaType.APPLICATION_JSON)) 111 | .andExpect(status().isOk()); 112 | 113 | } 114 | 115 | @Test 116 | void updateLibraryEvent_withNullLibraryEventId() throws Exception { 117 | 118 | //given 119 | Book book = new Book().builder() 120 | .bookId(123) 121 | .bookAuthor("Dilip") 122 | .bookName("Kafka Using Spring Boot") 123 | .build(); 124 | 125 | LibraryEvent libraryEvent = LibraryEvent.builder() 126 | .libraryEventId(null) 127 | .book(book) 128 | .build(); 129 | String json = objectMapper.writeValueAsString(libraryEvent); 130 | when(libraryEventProducer.sendLibraryEvent_Approach2(isA(LibraryEvent.class))).thenReturn(null); 131 | 132 | //expect 133 | mockMvc.perform( 134 | put("/v1/libraryevent") 135 | .content(json) 136 | .contentType(MediaType.APPLICATION_JSON)) 137 | .andExpect(status().is4xxClientError()) 138 | .andExpect(content().string("Please pass the LibraryEventId")); 139 | 140 | } 141 | } 142 | -------------------------------------------------------------------------------- /library-events-producer/src/test/java/unit/com/learnkafka/producer/LibraryEventProducerUnitTest.java: -------------------------------------------------------------------------------- 1 | package com.learnkafka.producer; 2 | 3 | import com.fasterxml.jackson.core.JsonProcessingException; 4 | import com.fasterxml.jackson.databind.ObjectMapper; 5 | import com.learnkafka.domain.Book; 6 | import com.learnkafka.domain.LibraryEvent; 7 | import org.apache.kafka.clients.producer.ProducerRecord; 8 | import org.apache.kafka.clients.producer.RecordMetadata; 9 | import org.apache.kafka.common.TopicPartition; 10 | import org.apache.kafka.common.protocol.types.Field; 11 | import org.junit.jupiter.api.Test; 12 | import org.junit.jupiter.api.extension.ExtendWith; 13 | import org.mockito.InjectMocks; 14 | import org.mockito.Mock; 15 | import org.mockito.Spy; 16 | import org.mockito.junit.jupiter.MockitoExtension; 17 | import org.springframework.kafka.core.KafkaTemplate; 18 | import org.springframework.kafka.support.SendResult; 19 | import org.springframework.util.concurrent.ListenableFuture; 20 | import org.springframework.util.concurrent.SettableListenableFuture; 21 | import scala.Int; 22 | 23 | import java.util.concurrent.ExecutionException; 24 | 25 | import static org.junit.jupiter.api.Assertions.assertThrows; 26 | import static org.mockito.ArgumentMatchers.isA; 27 | import static org.mockito.Mockito.when; 28 | 29 | @ExtendWith(MockitoExtension.class) 30 | public class LibraryEventProducerUnitTest { 31 | 32 | @Mock 33 | KafkaTemplate kafkaTemplate; 34 | 35 | @Spy 36 | ObjectMapper objectMapper = new ObjectMapper(); 37 | 38 | @InjectMocks 39 | LibraryEventProducer eventProducer; 40 | 41 | @Test 42 | void sendLibraryEvent_Approach2_failure() throws JsonProcessingException, ExecutionException, InterruptedException { 43 | //given 44 | Book book = Book.builder() 45 | .bookId(123) 46 | .bookAuthor("Dilip") 47 | .bookName("Kafka using Spring Boot") 48 | .build(); 49 | 50 | LibraryEvent libraryEvent = LibraryEvent.builder() 51 | .libraryEventId(null) 52 | .book(book) 53 | .build(); 54 | SettableListenableFuture future = new SettableListenableFuture(); 55 | 56 | future.setException(new RuntimeException("Exception Calling Kafka")); 57 | when(kafkaTemplate.send(isA(ProducerRecord.class))).thenReturn(future); 58 | //when 59 | 60 | assertThrows(Exception.class, ()->eventProducer.sendLibraryEvent_Approach2(libraryEvent).get()); 61 | 62 | } 63 | 64 | @Test 65 | void sendLibraryEvent_Approach2_success() throws JsonProcessingException, ExecutionException, InterruptedException { 66 | //given 67 | Book book = Book.builder() 68 | .bookId(123) 69 | .bookAuthor("Dilip") 70 | .bookName("Kafka using Spring Boot") 71 | .build(); 72 | 73 | LibraryEvent libraryEvent = LibraryEvent.builder() 74 | .libraryEventId(null) 75 | .book(book) 76 | .build(); 77 | String record = objectMapper.writeValueAsString(libraryEvent); 78 | SettableListenableFuture future = new SettableListenableFuture(); 79 | 80 | ProducerRecord producerRecord = new ProducerRecord("library-events", libraryEvent.getLibraryEventId(),record ); 81 | RecordMetadata recordMetadata = new RecordMetadata(new TopicPartition("library-events", 1), 82 | 1,1,System.currentTimeMillis(), 1, 2); 83 | SendResult sendResult = new SendResult(producerRecord,recordMetadata); 84 | 85 | future.set(sendResult); 86 | when(kafkaTemplate.send(isA(ProducerRecord.class))).thenReturn(future); 87 | //when 88 | 89 | ListenableFuture> listenableFuture = eventProducer.sendLibraryEvent_Approach2(libraryEvent); 90 | 91 | //then 92 | SendResult sendResult1 = listenableFuture.get(); 93 | assert sendResult1.getRecordMetadata().partition()==1; 94 | 95 | } 96 | } 97 | --------------------------------------------------------------------------------