├── .gitignore ├── Docker-Commands.md ├── Kafka-Docker-Commands .md ├── Kafka_Commands.md ├── README.md ├── client-ssl.properties ├── kafka ├── .classpath ├── .gitignore ├── .project ├── .settings │ ├── org.eclipse.jdt.core.prefs │ └── org.eclipse.m2e.core.prefs ├── pom.xml └── src │ └── main │ └── java │ └── com │ └── learnkafka │ ├── consumer │ ├── ConsumerKafka.java │ ├── ConsumerKafkaManualOffset.java │ ├── ConsumerKafkaResetOffset.java │ └── ConsumerKafkaSSL.java │ ├── consumergroup │ ├── ConsumerKafka1.java │ ├── ConsumerKafka2.java │ └── ConsumerKafka3.java │ ├── paritioner │ └── ProducerKafkaPartitioner.java │ └── producer │ ├── ProducerKafka.java │ └── ProducerKafkaSSL.java ├── learn-kafka-spring-boot-retry ├── .gitignore ├── build.gradle ├── gradle │ └── wrapper │ │ ├── gradle-wrapper.jar │ │ └── gradle-wrapper.properties ├── gradlew ├── gradlew.bat ├── settings.gradle └── src │ ├── main │ ├── java │ │ └── com │ │ │ └── learnkafka │ │ │ ├── LearnKafkaSpringBootRetryApplication.java │ │ │ ├── config │ │ │ └── ConsumerConfig.java │ │ │ └── consumer │ │ │ ├── ConsumerRetryListener.java │ │ │ └── TopicConsumer.java │ └── resources │ │ └── application.yml │ └── test │ └── java │ └── com │ └── learnkafka │ └── LearnKafkaSpringBootRetryApplicationTests.java ├── learnbootkafka-manual-offset-docker ├── .gitignore ├── .mvn │ └── wrapper │ │ ├── maven-wrapper.jar │ │ └── maven-wrapper.properties ├── README.md ├── mvnw ├── mvnw.cmd ├── pom.xml └── src │ ├── main │ ├── docker │ │ ├── Dockerfile │ │ └── docker-entrypoint.sh │ ├── java │ │ └── com │ │ │ └── learnbootkafka │ │ │ ├── LearnbootkafkaApplication.java │ │ │ ├── config │ │ │ └── KafkaConfig.java │ │ │ ├── consumer │ │ │ └── ConsumerKafka.java │ │ │ ├── controller │ │ │ └── HelloController.java │ │ │ └── producer │ │ │ └── ProducerKafka.java │ └── resources │ │ ├── application.yml │ │ └── application1.properties │ └── test │ └── java │ └── com │ └── learnbootkafka │ └── LearnbootkafkaApplicationTests.java ├── learnbootkafka-manual-offset ├── .gitignore ├── .mvn │ └── wrapper │ │ ├── maven-wrapper.jar │ │ └── maven-wrapper.properties ├── README.md ├── mvnw ├── mvnw.cmd ├── pom.xml └── src │ ├── main │ ├── docker │ │ ├── Dockerfile │ │ └── docker-entrypoint.sh │ ├── java │ │ └── com │ │ │ └── learnbootkafka │ │ │ ├── LearnbootkafkaApplication.java │ │ │ ├── config │ │ │ └── KafkaConfig.java │ │ │ ├── consumer │ │ │ └── ConsumerKafka.java │ │ │ ├── controller │ │ │ └── HelloController.java │ │ │ └── producer │ │ │ └── ProducerKafka.java │ └── resources │ │ ├── application.yml │ │ └── application1.properties │ └── test │ └── java │ └── com │ └── learnbootkafka │ └── LearnbootkafkaApplicationTests.java ├── learnbootkafka ├── .gitignore ├── .mvn │ └── wrapper │ │ ├── maven-wrapper.jar │ │ └── maven-wrapper.properties ├── README.md ├── mvnw ├── mvnw.cmd ├── pom.xml └── src │ ├── main │ ├── java │ │ └── com │ │ │ └── learnbootkafka │ │ │ ├── LearnbootkafkaApplication.java │ │ │ ├── config │ │ │ └── KafkaConfig.java │ │ │ ├── consumer │ │ │ └── ConsumerKafka.java │ │ │ ├── controller │ │ │ └── HelloController.java │ │ │ └── producer │ │ │ └── ProducerKafka.java │ └── resources │ │ ├── application.yml │ │ └── application1.properties │ └── test │ └── java │ └── com │ └── learnbootkafka │ └── LearnbootkafkaApplicationTests.java ├── learncamel-simple ├── .classpath ├── .project ├── .settings │ ├── org.eclipse.jdt.core.prefs │ └── org.eclipse.m2e.core.prefs ├── data │ ├── input │ │ └── file1.txt │ └── output │ │ └── file1.txt ├── pom.xml └── src │ └── main │ └── java │ └── com │ └── learncamel │ └── file │ └── CopyFilesCamel.java ├── learncamelkafka ├── .classpath ├── .project ├── .settings │ ├── org.eclipse.jdt.core.prefs │ └── org.eclipse.m2e.core.prefs ├── pom.xml └── src │ └── main │ ├── java │ └── com │ │ └── learncamel │ │ └── kafka │ │ ├── CamelKafkaClient.java │ │ └── CamelKafkaProducer.java │ └── resources │ └── log4j2.properties └── learnspark-kafka ├── .classpath ├── .project ├── .settings ├── org.eclipse.jdt.core.prefs └── org.eclipse.m2e.core.prefs ├── pom.xml └── src └── main ├── java └── com │ └── learnspark │ └── consumer │ └── SparkKafkaConsumer.java └── resources └── log4j.properties /.gitignore: -------------------------------------------------------------------------------- 1 | ### JetBrains template 2 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio 3 | 4 | *.iml 5 | *.eml 6 | 7 | #Ignore classes 8 | build/ 9 | 10 | ## Directory-based project format: 11 | #.idea/ 12 | # if you remove the above rule, at least ignore the following: 13 | 14 | # User-specific stuff: 15 | .idea/ 16 | .idea/workspace.xml 17 | .idea/tasks.xml 18 | .idea/dictionaries 19 | .idea/shelf 20 | 21 | RemoteSystemsTempFiles/ 22 | 23 | # Sensitive or high-churn files: 24 | .idea/dataSources.ids 25 | .idea/dataSources.xml 26 | .idea/sqlDataSources.xml 27 | .idea/dynamic.xml 28 | .idea/uiDesigner.xml 29 | 30 | # Gradle: 31 | .idea/gradle.xml 32 | .idea/libraries 33 | 34 | # Mongo Explorer plugin: 35 | .idea/mongoSettings.xml 36 | 37 | ## File-based project format: 38 | *.ipr 39 | *.iws 40 | 41 | ## Plugin-specific files: 42 | 43 | # IntelliJ 44 | /out/ 45 | 46 | # mpeltonen/sbt-idea plugin 47 | .idea_modules/ 48 | 49 | # JIRA plugin 50 | atlassian-ide-plugin.xml 51 | 52 | # Crashlytics plugin (for Android Studio and IntelliJ) 53 | com_crashlytics_export_strings.xml 54 | crashlytics.properties 55 | crashlytics-build.properties 56 | 57 | ### Maven template 58 | target/ 59 | pom.xml.tag 60 | pom.xml.releaseBackup 61 | pom.xml.versionsBackup 62 | pom.xml.next 63 | release.properties 64 | dependency-reduced-pom.xml 65 | buildNumber.properties 66 | .mvn/timing.properties 67 | 68 | ### Windows template 69 | # Windows image file caches 70 | Thumbs.db 71 | ehthumbs.db 72 | 73 | # Folder config file 74 | Desktop.ini 75 | 76 | # Recycle Bin used on file shares 77 | $RECYCLE.BIN/ 78 | 79 | # Windows Installer files 80 | *.cab 81 | *.msi 82 | *.msm 83 | *.msp 84 | 85 | # Windows shortcuts 86 | *.lnk 87 | 88 | ### MicrosoftOffice template 89 | *.tmp 90 | 91 | # Word temporary 92 | ~$*.doc* 93 | 94 | # Excel temporary 95 | ~$*.xls* 96 | 97 | # Excel Backup File 98 | *.xlk 99 | 100 | ### OSX template 101 | .DS_Store 102 | .AppleDouble 103 | .LSOverride 104 | 105 | # Icon must end with two \r 106 | Icon 107 | 108 | # Thumbnails 109 | ._* 110 | 111 | # Files that might appear in the root of a volume 112 | .DocumentRevisions-V100 113 | .fseventsd 114 | .Spotlight-V100 115 | .TemporaryItems 116 | .Trashes 117 | .VolumeIcon.icns 118 | 119 | # Directories potentially created on remote AFP share 120 | .AppleDB 121 | .AppleDesktop 122 | Network Trash Folder 123 | Temporary Items 124 | .apdisk 125 | 126 | ### Eclipse template 127 | *.pydevproject 128 | .metadata 129 | .gradle 130 | bin/ 131 | tmp/ 132 | # *.tmp 133 | *.bak 134 | *.swp 135 | *~.nib 136 | local.properties 137 | # .settings/ 138 | .loadpath 139 | 140 | # Eclipse Core 141 | # .project 142 | 143 | # External tool builders 144 | .externalToolBuilders/ 145 | 146 | # Locally stored "Eclipse launch configurations" 147 | # *.launch 148 | 149 | # CDT-specific 150 | .cproject 151 | 152 | # JDT-specific (Eclipse Java Development Tools) 153 | # .classpath 154 | 155 | # Java annotation processor (APT) 156 | # .factorypath 157 | 158 | # PDT-specific 159 | .buildpath 160 | 161 | # sbteclipse plugin 162 | .target 163 | 164 | # TeXlipse plugin 165 | .texlipse 166 | 167 | ### Java template 168 | *.class 169 | 170 | # Mobile Tools for Java (J2ME) 171 | .mtj.tmp/ 172 | 173 | # Package Files # 174 | #*.jar 175 | *.war 176 | *.ear 177 | 178 | # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml 179 | hs_err_pid* 180 | 181 | # Do not upload .car 182 | .car 183 | -------------------------------------------------------------------------------- /Docker-Commands.md: -------------------------------------------------------------------------------- 1 | ## How to build the docker image ? 2 | 3 | Maven command : 4 | 5 | ``` 6 | clean package docker:build 7 | ``` 8 | 9 | ## How to check the docker images ? 10 | 11 | ``` 12 | docker images 13 | ``` 14 | 15 | ## How to run the Docker Image? 16 | 17 | ``` 18 | docker run -it -p 8080:8080 --name bootkafka -e ENVIRONMENT=stage -e KAFKABROKER=localhost:9092 dilipthelip/learnbootkafka-manual-offset-docker 19 | ``` 20 | 21 | ## How to check the containers ? 22 | 23 | ``` 24 | docker ps -a 25 | ``` 26 | 27 | ## How to stop a container in Docker ? 28 | 29 | ``` 30 | docker stop 31 | ``` 32 | 33 | ## How to remove a container in Docker ? 34 | 35 | ``` 36 | docker rm 37 | ``` 38 | 39 | ## How to delete an image in Docker ? 40 | 41 | ``` 42 | docker rmi 43 | ``` 44 | 45 | ## How to push a docker Image: 46 | 47 | Step 1: 48 | 49 | - Create a docker hub account using the following link https://hub.docker.com/. 50 | 51 | Step 2: 52 | 53 | Login in to docker and push the image. Follow the below commands. 54 | 55 | ``` 56 | docker login 57 | 58 | docker push 59 | ``` 60 | 61 | 62 | ## Command to run the Docker Image to connect to Kafka running in docker 63 | ``` 64 | docker run --name dockerboot -p 8080:8080 -e ENVIRONMENT=stage -e KAFKABROKER=172.17.0.3:9092 dilipthelip/learnbootkafka-manual-offset-docker 65 | ``` 66 | 67 | ## Url to connect to the spring boot app: 68 | 69 | ``` 70 | http://localhost:8080/home?input=Hello 71 | ``` 72 | 73 | ## Setting needs to be done in VirtualBox 74 | 75 | Please have this settings done in your VirtualBox to connect to the docker container using the local host. 76 | 77 | ``` 78 | http://stackoverflow.com/questions/27471688/how-to-access-tomcat-running-in-docker-container-from-browser 79 | ``` 80 | 81 | 82 | ## How to kill a java process running on a particular port? 83 | 84 | ``` 85 | sudo lsof -i :8080 86 | 87 | kill -9 [PID] 88 | 89 | ``` 90 | -------------------------------------------------------------------------------- /Kafka-Docker-Commands .md: -------------------------------------------------------------------------------- 1 | ## Download Docker Images from github: 2 | 3 | Clone the following repo to your local. 4 | 5 | ``` 6 | git clone https://github.com/ches/docker-kafka.git 7 | ``` 8 | ## Run the Zookeeper Docker Image: 9 | 10 | ``` 11 | docker run -d --name zookeeper jplock/zookeeper:3.4.6 12 | ``` 13 | 14 | Check the docker zookeeper image is up by running the following command. 15 | 16 | ``` 17 | docker ps -a 18 | ``` 19 | ## Run the Kafka Broker Docker Image: 20 | 21 | ``` 22 | docker run -d --name kafka --link zookeeper:zookeeper ches/kafka 23 | ``` 24 | 25 | Check the docker kafka image is up by running the following command. 26 | 27 | ``` 28 | docker ps -a 29 | ``` 30 | 31 | ## Create a topic to the docker Kafka 32 | 33 | ### Check the port of zookeeper Docker instance 34 | 35 | ``` 36 | docker inspect --format '{{ .NetworkSettings.IPAddress }}' zookeeper 37 | ``` 38 | 39 | ### Check the port of kafka Docker instance 40 | 41 | ``` 42 | docker inspect --format '{{ .NetworkSettings.IPAddress }}' kafka 43 | ``` 44 | 45 | ### Set the env variables: 46 | 47 | ``` 48 | ZK_IP=$(docker inspect --format '{{ .NetworkSettings.IPAddress }}' zookeeper) 49 | KAFKA_IP=$(docker inspect --format '{{ .NetworkSettings.IPAddress }}' kafka) 50 | ``` 51 | 52 | ``` 53 | docker run --rm ches/kafka kafka-topics.sh --create --topic my-topic --replication-factor 1 --partitions 1 --zookeeper $ZK_IP:2181 54 | ``` 55 | -------------------------------------------------------------------------------- /Kafka_Commands.md: -------------------------------------------------------------------------------- 1 | # Kafka Commands 2 | 3 | ## How to download Kafka ? 4 | 5 | ``` 6 | curl "http://mirror.metrocast.net/apache/kafka/0.10.2.0/kafka_2.12-0.10.2.0.tgz" | tar xz 7 | ``` 8 | 9 | ## How to start a zookeeper ? 10 | 11 | **Windows:** 12 | 13 | ``` 14 | zookeeper-server-start.bat ..\..\config\zookeeper.properties 15 | ``` 16 | 17 | **MAC/Unix:** 18 | 19 | ``` 20 | ./zookeeper-server-start.sh ../config/zookeeper.properties 21 | ``` 22 | ## How to start a Kafka Broker ? 23 | 24 | **Windows:** 25 | 26 | ``` 27 | kafka-server-start.bat ..\..\config\server.properties 28 | ``` 29 | 30 | **MAC/Unix :** 31 | 32 | ``` 33 | ./kafka-server-start.sh ../config/server.properties 34 | ``` 35 | 36 | ## How to check the configuration of all the topics in a broker ? 37 | **Windows** 38 | 39 | ``` 40 | kafka-topics.bat --describe --zookeeper localhost:2181 41 | ``` 42 | 43 | **MAC:** 44 | ``` 45 | ./kafka-topics.sh --describe --zookeeper localhost:2181 46 | ``` 47 | 48 | ## How to check the configuration of a particular topic? 49 | **Windows** 50 | 51 | ``` 52 | kafka-topics.bat --describe --topic replicate_topic --zookeeper localhost:2181 53 | ``` 54 | **MAC:** 55 | ``` 56 | ./kafka-topics.sh --describe --topic replicate_topic --zookeeper localhost:2181 57 | ``` 58 | 59 | ## How to create a topic ? 60 | **Windows** 61 | ``` 62 | kafka-topics.bat --create --topic -zookeeper localhost:2181 --replication-factor 1 --partitions 1. 63 | ``` 64 | Example: 65 | 66 | ``` 67 | kafka-topics.bat --create --topic my-first-topic -zookeeper localhost:2181 --replication-factor 1 --partitions 1. 68 | ``` 69 | 70 | **MAC:** 71 | ``` 72 | ./kafka-topics.sh --create --topic -zookeeper localhost:2181 --replication-factor 1 --partitions 1 73 | 74 | ``` 75 | 76 | Example: 77 | The below command creates a topic called **my-first-topic**. 78 | ``` 79 | ./kafka-topics.sh --create --topic my-first-topic -zookeeper localhost:2181 --replication-factor 1 --partitions 1 80 | ``` 81 | 82 | ## How to instantiate a Console Producer? 83 | 84 | **Windows:** 85 | ``` 86 | kafka-console-producer.bat --broker-list localhost:9092 --topic 87 | ``` 88 | 89 | Example: 90 | ``` 91 | kafka-console-producer.bat --broker-list localhost:9092 --topic my-first-topic 92 | ``` 93 | 94 | **MAC:** 95 | ``` 96 | ./kafka-console-producer.sh --broker-list localhost:9092 --topic 97 | ``` 98 | 99 | Example : 100 | 101 | ``` 102 | ./kafka-console-producer.sh --broker-list localhost:9092 --topic my-first-topic 103 | ``` 104 | 105 | ## How to instantiate a Console Consumer? 106 | 107 | **Windows:** 108 | ``` 109 | kafka-console-consumer.bat --bootstrap-server localhost:9092 --topic --from-beginning 110 | ``` 111 | 112 | Example: 113 | ``` 114 | kafka-console-consumer.bat --bootstrap-server localhost:9092 --topic my-first-topic --from-beginning. 115 | 116 | ``` 117 | 118 | **MAC** 119 | ``` 120 | ./kafka-console-consumer.sh --bootstrap-server localhost:9092 --topic --from-beginning 121 | ``` 122 | 123 | Example: 124 | ``` 125 | ./kafka-console-consumer.sh --bootstrap-server localhost:9092 --topic my-first-topic --from-beginning 126 | ``` 127 | 128 | ## How to delete a topic? 129 | 130 | **Windows** 131 | 132 | ``` 133 | kafka-topics.bat --delete --zookeeper localhost:2181 --topic your_topic_name 134 | ``` 135 | 136 | **MAC:** 137 | 138 | ``` 139 | ./kafka-topics.sh --delete --zookeeper localhost:2181 --topic your_topic_name 140 | ``` 141 | 142 | ## How to alter the configuration of a topic ? 143 | 144 | **Windows:** 145 | 146 | ``` 147 | kafka-topics.bat --zookeeper localhost:2181 --alter --topic --partitions 4 148 | ``` 149 | 150 | ``` 151 | kafka-topics.bat --zookeeper localhost:2181 --alter --topic demo-topic --partitions 4 152 | ``` 153 | 154 | **MAC** 155 | 156 | ``` 157 | ./kafka-topics.sh --zookeeper localhost:2181 --alter --topic --partitions 4 158 | ``` 159 | 160 | ``` 161 | ./kafka-topics.sh --zookeeper localhost:2181 --alter --topic demo-topic --partitions 4 162 | ``` 163 | 164 | ## How to enable security in Kafka using SSL ? 165 | 166 | ### SSL Set up in KAFKA BROKER: 167 | 168 | Step 1: 169 | **Generate SSL key and Certificate for broker:** 170 | 171 | Keystore -> which stores each machine’s own identity. 172 | 173 | Here we are creating the keystore file **server.keystore.jks** that stores the Certificate. 174 | The validity of Certificate is given as 365 days below. 175 | 176 | ``` 177 | keytool -keystore server.keystore.jks -alias localhost -validity 365 -genkey 178 | ``` 179 | 180 | Step 2: 181 | 182 | **Creating your own CA:** 183 | Here we are creating a Certificate Authority which is responsible for signing certificates. 184 | We will add these certs to the **server.keystore.jks** file and **client.truststore.jks** that we will be creating in a while. 185 | 186 | ``` 187 | openssl req -new -x509 -keyout ca-key -out ca-cert -days 365 188 | ``` 189 | 190 | Step 3: 191 | 192 | **Here we will generate the truststore** 193 | 194 | ``` 195 | keytool -keystore server.truststore.jks -alias CARoot -import -file ca-cert 196 | 197 | ``` 198 | 199 | Step 4: 200 | 201 | **Sign all certificates in the keystore with the CA we generated.** 202 | 203 | Export the certificate in to the keystore. 204 | 205 | ``` 206 | keytool -keystore server.keystore.jks -alias localhost -certreq -file cert-file 207 | ``` 208 | Then sign it with the CA: 209 | 210 | ``` 211 | openssl x509 -req -CA ca-cert -CAkey ca-key -in cert-file -out cert-signed -days 365 -CAcreateserial -passin pass:kafka123 212 | 213 | keytool -keystore server.keystore.jks -alias CARoot -import -file ca-cert 214 | keytool -keystore server.keystore.jks -alias localhost -import -file cert-signed 215 | 216 | ``` 217 | Step 5: 218 | 219 | **Add the SSL in server.properties file of Kafka distribution** 220 | 221 | The below setting will make sure that the broker will authenticate the clients (Kafka Consumers) who are trying to access the broker. 222 | 223 | ``` 224 | listeners=SSL://localhost:9092 225 | 226 | advertised.listeners=SSL://localhost:9092 227 | 228 | 229 | security.inter.broker.protocol = SSL 230 | ssl.client.auth=required 231 | 232 | ssl.keystore.location=/server.keystore.jks 233 | ssl.keystore.password=changeit 234 | ssl.key.password=changeit 235 | ssl.truststore.location=/server.truststore.jks 236 | ssl.truststore.password=changeit 237 | ssl.keystore.type = JKS 238 | ssl.truststore.type = JKS 239 | ``` 240 | 241 | Step 6: 242 | 243 | Run the below command to check servers keystore and truststore are set up correctly. 244 | 245 | ``` 246 | openssl s_client -debug -connect localhost:9093 -tls1 247 | ``` 248 | 249 | 250 | 251 | With this we came to the ends of Setting up the SSL in **Kafka Broker**. 252 | 253 | ### Kafka Console Producer and Consumer using SSL: 254 | 255 | **Console Producer:** 256 | ``` 257 | ./kafka-console-producer.sh --broker-list localhost:9092 --topic test --producer.config ../client-ssl.properties 258 | ``` 259 | **Console Consumer:** 260 | ``` 261 | ./kafka-console-consumer.sh --bootstrap-server localhost:9092 --topic test --from-beginning --new-consumer --consumer.config ../client-ssl.properties 262 | 263 | ``` 264 | 265 | 266 | 267 | ## How to kill the Broker Process? 268 | 269 | Step 1: 270 | ``` 271 | ps ax | grep -i 'kafka\.Kafka' 272 | ``` 273 | 274 | Step 2: 275 | 276 | ``` 277 | kill -9 278 | ``` 279 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # TeachApacheKafka -------------------------------------------------------------------------------- /client-ssl.properties: -------------------------------------------------------------------------------- 1 | security.protocol=SSL 2 | ssl.truststore.location=/server.truststore.jks 3 | ssl.truststore.password=kafka123 4 | ssl.keystore.location=/server.keystore.jks 5 | ssl.keystore.password=kafka123 6 | ssl.key.password=kafka123 7 | -------------------------------------------------------------------------------- /kafka/.classpath: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | -------------------------------------------------------------------------------- /kafka/.gitignore: -------------------------------------------------------------------------------- 1 | /target/ 2 | -------------------------------------------------------------------------------- /kafka/.project: -------------------------------------------------------------------------------- 1 | 2 | 3 | kafka 4 | 5 | 6 | 7 | 8 | 9 | org.eclipse.jdt.core.javabuilder 10 | 11 | 12 | 13 | 14 | org.eclipse.m2e.core.maven2Builder 15 | 16 | 17 | 18 | 19 | 20 | org.eclipse.jdt.core.javanature 21 | org.eclipse.m2e.core.maven2Nature 22 | 23 | 24 | -------------------------------------------------------------------------------- /kafka/.settings/org.eclipse.jdt.core.prefs: -------------------------------------------------------------------------------- 1 | eclipse.preferences.version=1 2 | org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.5 3 | org.eclipse.jdt.core.compiler.compliance=1.5 4 | org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning 5 | org.eclipse.jdt.core.compiler.source=1.5 6 | -------------------------------------------------------------------------------- /kafka/.settings/org.eclipse.m2e.core.prefs: -------------------------------------------------------------------------------- 1 | activeProfiles= 2 | eclipse.preferences.version=1 3 | resolveWorkspaceProjects=true 4 | version=1 5 | -------------------------------------------------------------------------------- /kafka/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 4.0.0 3 | com.learnkafka 4 | kafka 5 | 0.0.1-SNAPSHOT 6 | 7 | 8 | 9 | org.apache.kafka 10 | kafka-clients 11 | 0.10.0.1 12 | 13 | 14 | 15 | 16 | 42 | 43 | 44 | io.fabric8 45 | docker-maven-plugin 46 | 0.16.7 47 | 48 | bootkafka 49 | java 50 | 51 | 52 | arquillian/age-checker:${project.version} 53 | 54 | ${project.basedir} 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | -------------------------------------------------------------------------------- /kafka/src/main/java/com/learnkafka/consumer/ConsumerKafka.java: -------------------------------------------------------------------------------- 1 | package com.learnkafka.consumer; 2 | 3 | import java.util.ArrayList; 4 | import java.util.Properties; 5 | 6 | import org.apache.kafka.clients.consumer.ConsumerRecord; 7 | import org.apache.kafka.clients.consumer.ConsumerRecords; 8 | import org.apache.kafka.clients.consumer.KafkaConsumer; 9 | 10 | public class ConsumerKafka { 11 | 12 | public static void main(String[] args) { 13 | 14 | Properties properties=new Properties(); 15 | properties.put("bootstrap.servers", "localhost:9092,localhost:9093"); 16 | properties.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); 17 | properties.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); 18 | properties.put("group.id","test1"); 19 | 20 | KafkaConsumer< String, String> consumer=new KafkaConsumer(properties); 21 | 22 | 23 | ArrayList topics=new ArrayList(); 24 | topics.add("my-fifth-topic"); 25 | 26 | consumer.subscribe(topics); // You can subscribe to any number of topics. 27 | 28 | try { 29 | 30 | while(true){ 31 | 32 | ConsumerRecords records = consumer.poll(1000); 33 | 34 | for(ConsumerRecord record : records){ 35 | 36 | System.out.println("Record read in KafkaConsumerApp : " + record.toString()); 37 | 38 | } 39 | } 40 | 41 | } catch (Exception e) { 42 | // TODO: handle exception 43 | System.out.println("Inside exception loop : "); 44 | e.printStackTrace(); 45 | }finally{ 46 | consumer.close(); 47 | } 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /kafka/src/main/java/com/learnkafka/consumer/ConsumerKafkaManualOffset.java: -------------------------------------------------------------------------------- 1 | package com.learnkafka.consumer; 2 | 3 | import java.util.ArrayList; 4 | import java.util.Properties; 5 | 6 | import org.apache.kafka.clients.consumer.ConsumerRecord; 7 | import org.apache.kafka.clients.consumer.ConsumerRecords; 8 | import org.apache.kafka.clients.consumer.KafkaConsumer; 9 | 10 | public class ConsumerKafkaManualOffset { 11 | 12 | public static void main(String[] args) { 13 | 14 | Properties properties=new Properties(); 15 | properties.put("bootstrap.servers", "localhost:9092"); 16 | properties.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); 17 | properties.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); 18 | properties.put("enable.auto.commit", false); 19 | properties.put("group.id","test1"); 20 | 21 | KafkaConsumer< String, String> consumer=new KafkaConsumer(properties); 22 | 23 | 24 | ArrayList topics=new ArrayList(); 25 | topics.add("my-fifth-topic"); 26 | 27 | consumer.subscribe(topics); // You can subscribe to any number of topics. 28 | 29 | try { 30 | 31 | while(true){ 32 | 33 | ConsumerRecords records = consumer.poll(1000); 34 | 35 | for(ConsumerRecord record : records){ 36 | 37 | System.out.println("Record read in KafkaConsumerApp : " + record.toString()); 38 | 39 | consumer.commitSync(); 40 | } 41 | } 42 | 43 | } catch (Exception e) { 44 | // TODO: handle exception 45 | System.out.println("Inside exception loop : "); 46 | e.printStackTrace(); 47 | }finally{ 48 | consumer.close(); 49 | } 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /kafka/src/main/java/com/learnkafka/consumer/ConsumerKafkaResetOffset.java: -------------------------------------------------------------------------------- 1 | package com.learnkafka.consumer; 2 | 3 | import java.util.ArrayList; 4 | import java.util.Properties; 5 | 6 | import org.apache.kafka.clients.consumer.ConsumerRecord; 7 | import org.apache.kafka.clients.consumer.ConsumerRecords; 8 | import org.apache.kafka.clients.consumer.KafkaConsumer; 9 | import org.apache.kafka.common.TopicPartition; 10 | 11 | public class ConsumerKafkaResetOffset { 12 | 13 | public static void main(String[] args) { 14 | 15 | Properties properties=new Properties(); 16 | properties.put("bootstrap.servers", "localhost:9092,localhost:9093"); 17 | properties.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); 18 | properties.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); 19 | properties.put("group.id","test1"); 20 | 21 | KafkaConsumer< String, String> consumer=new KafkaConsumer(properties); 22 | 23 | String topic = "demo-reset-topic"; 24 | 25 | 26 | ArrayList topics=new ArrayList(); 27 | topics.add(topic); 28 | 29 | consumer.subscribe(topics); // You can subscribe to any number of topics. 30 | 31 | // TopicPartition parition = new TopicPartition(topic, 0); 32 | // boolean flag=false; 33 | try { 34 | 35 | while(true){ 36 | 37 | ConsumerRecords records = consumer.poll(1000); 38 | 39 | // if(!flag){ 40 | // consumer.seek(parition, 0); 41 | // flag = true; 42 | // } 43 | 44 | for(ConsumerRecord record : records){ 45 | 46 | System.out.println("Record read in KafkaConsumerApp : " + record.toString()); 47 | 48 | } 49 | } 50 | 51 | } catch (Exception e) { 52 | // TODO: handle exception 53 | System.out.println("Inside exception loop : "); 54 | e.printStackTrace(); 55 | }finally{ 56 | consumer.close(); 57 | } 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /kafka/src/main/java/com/learnkafka/consumer/ConsumerKafkaSSL.java: -------------------------------------------------------------------------------- 1 | package com.learnkafka.consumer; 2 | 3 | import java.util.ArrayList; 4 | import java.util.Properties; 5 | 6 | import org.apache.kafka.clients.consumer.ConsumerRecord; 7 | import org.apache.kafka.clients.consumer.ConsumerRecords; 8 | import org.apache.kafka.clients.consumer.KafkaConsumer; 9 | 10 | public class ConsumerKafkaSSL { 11 | 12 | public static void main(String[] args) { 13 | 14 | Properties properties=new Properties(); 15 | properties.put("bootstrap.servers", "localhost:9092"); 16 | properties.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); 17 | properties.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); 18 | properties.put("group.id","test2"); 19 | properties.put("security.protocol","SSL"); 20 | properties.put("ssl.truststore.location","/server.truststore.jks"); 21 | properties.put("ssl.truststore.password","kafka123"); 22 | properties.put("ssl.keystore.location","/server.keystore.jks"); 23 | properties.put("ssl.keystore.password","kafka123"); 24 | properties.put("ssl.key.password","kafka123"); 25 | 26 | KafkaConsumer< String, String> consumer=null; 27 | 28 | try { 29 | ArrayList topics=new ArrayList(); 30 | topics.add("my-ssl-topic"); 31 | consumer = new KafkaConsumer(properties); 32 | consumer.subscribe(topics); // You can subscribe to any number of topics. 33 | 34 | 35 | 36 | while(true){ 37 | 38 | ConsumerRecords records = consumer.poll(1000); 39 | 40 | for(ConsumerRecord record : records){ 41 | 42 | System.out.println("Record read in KafkaConsumerApp : " + record.toString()); 43 | 44 | } 45 | } 46 | 47 | } catch (Exception e) { 48 | // TODO: handle exception 49 | System.out.println("Inside exception loop : "); 50 | e.printStackTrace(); 51 | }finally{ 52 | consumer.close(); 53 | } 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /kafka/src/main/java/com/learnkafka/consumergroup/ConsumerKafka1.java: -------------------------------------------------------------------------------- 1 | package com.learnkafka.consumergroup; 2 | 3 | import java.util.ArrayList; 4 | import java.util.Properties; 5 | 6 | import org.apache.kafka.clients.consumer.ConsumerRecord; 7 | import org.apache.kafka.clients.consumer.ConsumerRecords; 8 | import org.apache.kafka.clients.consumer.KafkaConsumer; 9 | 10 | public class ConsumerKafka1 { 11 | 12 | public static void main(String[] args) { 13 | 14 | Properties properties=new Properties(); 15 | properties.put("bootstrap.servers", "localhost:9092,localhost:9093"); 16 | properties.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); 17 | properties.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); 18 | properties.put("group.id","group1"); 19 | 20 | KafkaConsumer< String, String> consumer=new KafkaConsumer(properties); 21 | 22 | 23 | ArrayList topics=new ArrayList(); 24 | topics.add("my-fifth-topic"); 25 | 26 | consumer.subscribe(topics); // You can subscribe to any number of topics. 27 | 28 | try { 29 | 30 | while(true){ 31 | 32 | ConsumerRecords records = consumer.poll(10); 33 | 34 | for(ConsumerRecord record : records){ 35 | 36 | System.out.println("Record read in KafkaConsumerApp : " + record.toString()); 37 | 38 | } 39 | } 40 | 41 | } catch (Exception e) { 42 | // TODO: handle exception 43 | System.out.println("Inside exception loop : "); 44 | e.printStackTrace(); 45 | }finally{ 46 | consumer.close(); 47 | } 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /kafka/src/main/java/com/learnkafka/consumergroup/ConsumerKafka2.java: -------------------------------------------------------------------------------- 1 | package com.learnkafka.consumergroup; 2 | 3 | import java.util.ArrayList; 4 | import java.util.Properties; 5 | 6 | import org.apache.kafka.clients.consumer.ConsumerRecord; 7 | import org.apache.kafka.clients.consumer.ConsumerRecords; 8 | import org.apache.kafka.clients.consumer.KafkaConsumer; 9 | 10 | public class ConsumerKafka2 { 11 | 12 | public static void main(String[] args) { 13 | 14 | Properties properties=new Properties(); 15 | properties.put("bootstrap.servers", "localhost:9092,localhost:9093"); 16 | properties.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); 17 | properties.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); 18 | properties.put("group.id","group1"); 19 | 20 | KafkaConsumer< String, String> consumer=new KafkaConsumer(properties); 21 | 22 | 23 | ArrayList topics=new ArrayList(); 24 | topics.add("my-fifth-topic"); 25 | 26 | consumer.subscribe(topics); // You can subscribe to any number of topics. 27 | 28 | try { 29 | 30 | while(true){ 31 | 32 | ConsumerRecords records = consumer.poll(10); 33 | 34 | for(ConsumerRecord record : records){ 35 | 36 | System.out.println("Record read in KafkaConsumerApp : " + record.toString()); 37 | 38 | } 39 | } 40 | 41 | } catch (Exception e) { 42 | // TODO: handle exception 43 | System.out.println("Inside exception loop : "); 44 | e.printStackTrace(); 45 | }finally{ 46 | consumer.close(); 47 | } 48 | } 49 | 50 | } 51 | -------------------------------------------------------------------------------- /kafka/src/main/java/com/learnkafka/consumergroup/ConsumerKafka3.java: -------------------------------------------------------------------------------- 1 | package com.learnkafka.consumergroup; 2 | 3 | import java.util.ArrayList; 4 | import java.util.Properties; 5 | 6 | import org.apache.kafka.clients.consumer.ConsumerRecord; 7 | import org.apache.kafka.clients.consumer.ConsumerRecords; 8 | import org.apache.kafka.clients.consumer.KafkaConsumer; 9 | 10 | public class ConsumerKafka3 { 11 | public static void main(String[] args) { 12 | 13 | Properties properties=new Properties(); 14 | properties.put("bootstrap.servers", "localhost:9092,localhost:9093"); 15 | properties.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); 16 | properties.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); 17 | properties.put("group.id","group1"); 18 | 19 | KafkaConsumer< String, String> consumer=new KafkaConsumer(properties); 20 | 21 | 22 | ArrayList topics=new ArrayList(); 23 | topics.add("my-fifth-topic"); 24 | 25 | consumer.subscribe(topics); // You can subscribe to any number of topics. 26 | 27 | try { 28 | 29 | while(true){ 30 | 31 | ConsumerRecords records = consumer.poll(10); 32 | 33 | for(ConsumerRecord record : records){ 34 | 35 | System.out.println("Record read in KafkaConsumerApp : " + record.toString()); 36 | 37 | } 38 | } 39 | 40 | } catch (Exception e) { 41 | // TODO: handle exception 42 | System.out.println("Inside exception loop : "); 43 | e.printStackTrace(); 44 | }finally{ 45 | consumer.close(); 46 | } 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /kafka/src/main/java/com/learnkafka/paritioner/ProducerKafkaPartitioner.java: -------------------------------------------------------------------------------- 1 | package com.learnkafka.paritioner; 2 | 3 | import java.util.Map; 4 | 5 | import org.apache.kafka.clients.producer.Partitioner; 6 | import org.apache.kafka.common.Cluster; 7 | 8 | public class ProducerKafkaPartitioner implements Partitioner { 9 | 10 | public void configure(Map configs) { 11 | // TODO Auto-generated method stub 12 | 13 | } 14 | 15 | public int partition(String topic, Object key, byte[] keyBytes, 16 | Object value, byte[] valueBytes, Cluster cluster) { 17 | // TODO Auto-generated method stub 18 | return 0; 19 | } 20 | 21 | public void close() { 22 | // TODO Auto-generated method stub 23 | 24 | } 25 | 26 | } 27 | -------------------------------------------------------------------------------- /kafka/src/main/java/com/learnkafka/producer/ProducerKafka.java: -------------------------------------------------------------------------------- 1 | package com.learnkafka.producer; 2 | 3 | import java.util.Properties; 4 | 5 | import org.apache.kafka.clients.producer.KafkaProducer; 6 | import org.apache.kafka.clients.producer.ProducerRecord; 7 | 8 | public class ProducerKafka { 9 | 10 | public static void main(String[] args) { 11 | Properties properties=new Properties(); 12 | properties.put("bootstrap.servers", "localhost:9092"); 13 | properties.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); 14 | properties.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); 15 | 16 | KafkaProducer myProducer= new KafkaProducer(properties); 17 | 18 | try { 19 | 20 | for(int i=1;i<250;i++){ 21 | myProducer.send(new ProducerRecord("my-fifth-topic", "Message Value : " + Integer.toString(i))); 22 | myProducer.send(new ProducerRecord("my-fifth-topic", "url:/file")); 23 | 24 | 25 | } 26 | } catch (Exception e) { 27 | e.printStackTrace(); 28 | }finally{ 29 | myProducer.close(); 30 | } 31 | } 32 | 33 | } 34 | -------------------------------------------------------------------------------- /kafka/src/main/java/com/learnkafka/producer/ProducerKafkaSSL.java: -------------------------------------------------------------------------------- 1 | package com.learnkafka.producer; 2 | 3 | import java.util.Properties; 4 | 5 | import org.apache.kafka.clients.producer.KafkaProducer; 6 | import org.apache.kafka.clients.producer.ProducerRecord; 7 | 8 | public class ProducerKafkaSSL { 9 | public static void main(String[] args) { 10 | Properties properties=new Properties(); 11 | properties.put("bootstrap.servers", "localhost:9092"); 12 | properties.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); 13 | properties.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); 14 | properties.put("security.protocol","SSL"); 15 | properties.put("ssl.truststore.location","/server.truststore.jks"); 16 | properties.put("ssl.truststore.password","kafka123"); 17 | properties.put("ssl.keystore.location","/server.keystore.jks"); 18 | properties.put("ssl.keystore.password","kafka123"); 19 | properties.put("ssl.key.password","kafka123"); 20 | 21 | KafkaProducer myProducer= new KafkaProducer(properties); 22 | 23 | try { 24 | 25 | for(int i=1;i<2;i++){ 26 | myProducer.send(new ProducerRecord("my-ssl-topic", "Message Value : " + Integer.toString(i))); 27 | 28 | } 29 | } catch (Exception e) { 30 | e.printStackTrace(); 31 | }finally{ 32 | myProducer.close(); 33 | } 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /learn-kafka-spring-boot-retry/.gitignore: -------------------------------------------------------------------------------- 1 | HELP.md 2 | .gradle 3 | /build/ 4 | !gradle/wrapper/gradle-wrapper.jar 5 | 6 | ### STS ### 7 | .apt_generated 8 | .classpath 9 | .factorypath 10 | .project 11 | .settings 12 | .springBeans 13 | .sts4-cache 14 | 15 | ### IntelliJ IDEA ### 16 | .idea 17 | *.iws 18 | *.iml 19 | *.ipr 20 | /out/ 21 | 22 | ### NetBeans ### 23 | /nbproject/private/ 24 | /nbbuild/ 25 | /dist/ 26 | /nbdist/ 27 | /.nb-gradle/ 28 | 29 | ### VS Code ### 30 | .vscode/ 31 | -------------------------------------------------------------------------------- /learn-kafka-spring-boot-retry/build.gradle: -------------------------------------------------------------------------------- 1 | plugins { 2 | id 'org.springframework.boot' version '2.1.4.RELEASE' 3 | id 'java' 4 | } 5 | 6 | apply plugin: 'io.spring.dependency-management' 7 | 8 | group = 'com.learnkafka' 9 | version = '0.0.1-SNAPSHOT' 10 | sourceCompatibility = '1.8' 11 | 12 | configurations { 13 | compileOnly { 14 | extendsFrom annotationProcessor 15 | } 16 | } 17 | 18 | repositories { 19 | mavenCentral() 20 | } 21 | 22 | dependencies { 23 | implementation 'org.springframework.boot:spring-boot-starter' 24 | compile 'org.springframework.boot:spring-boot-starter-web' 25 | implementation 'org.springframework.kafka:spring-kafka' 26 | implementation 'org.springframework.retry:spring-retry' 27 | compileOnly 'org.projectlombok:lombok' 28 | annotationProcessor 'org.projectlombok:lombok' 29 | testImplementation 'org.springframework.boot:spring-boot-starter-test' 30 | testImplementation 'org.springframework.kafka:spring-kafka-test' 31 | } 32 | -------------------------------------------------------------------------------- /learn-kafka-spring-boot-retry/gradle/wrapper/gradle-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dilipsundarraj1/TeachApacheKafka/f1951861aacaf600544e098f3e4dfeb41de75029/learn-kafka-spring-boot-retry/gradle/wrapper/gradle-wrapper.jar -------------------------------------------------------------------------------- /learn-kafka-spring-boot-retry/gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | distributionBase=GRADLE_USER_HOME 2 | distributionPath=wrapper/dists 3 | distributionUrl=https\://services.gradle.org/distributions/gradle-5.2.1-bin.zip 4 | zipStoreBase=GRADLE_USER_HOME 5 | zipStorePath=wrapper/dists 6 | -------------------------------------------------------------------------------- /learn-kafka-spring-boot-retry/gradlew: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env sh 2 | 3 | ############################################################################## 4 | ## 5 | ## Gradle start up script for UN*X 6 | ## 7 | ############################################################################## 8 | 9 | # Attempt to set APP_HOME 10 | # Resolve links: $0 may be a link 11 | PRG="$0" 12 | # Need this for relative symlinks. 13 | while [ -h "$PRG" ] ; do 14 | ls=`ls -ld "$PRG"` 15 | link=`expr "$ls" : '.*-> \(.*\)$'` 16 | if expr "$link" : '/.*' > /dev/null; then 17 | PRG="$link" 18 | else 19 | PRG=`dirname "$PRG"`"/$link" 20 | fi 21 | done 22 | SAVED="`pwd`" 23 | cd "`dirname \"$PRG\"`/" >/dev/null 24 | APP_HOME="`pwd -P`" 25 | cd "$SAVED" >/dev/null 26 | 27 | APP_NAME="Gradle" 28 | APP_BASE_NAME=`basename "$0"` 29 | 30 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 31 | DEFAULT_JVM_OPTS='"-Xmx64m"' 32 | 33 | # Use the maximum available, or set MAX_FD != -1 to use that value. 34 | MAX_FD="maximum" 35 | 36 | warn () { 37 | echo "$*" 38 | } 39 | 40 | die () { 41 | echo 42 | echo "$*" 43 | echo 44 | exit 1 45 | } 46 | 47 | # OS specific support (must be 'true' or 'false'). 48 | cygwin=false 49 | msys=false 50 | darwin=false 51 | nonstop=false 52 | case "`uname`" in 53 | CYGWIN* ) 54 | cygwin=true 55 | ;; 56 | Darwin* ) 57 | darwin=true 58 | ;; 59 | MINGW* ) 60 | msys=true 61 | ;; 62 | NONSTOP* ) 63 | nonstop=true 64 | ;; 65 | esac 66 | 67 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar 68 | 69 | # Determine the Java command to use to start the JVM. 70 | if [ -n "$JAVA_HOME" ] ; then 71 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then 72 | # IBM's JDK on AIX uses strange locations for the executables 73 | JAVACMD="$JAVA_HOME/jre/sh/java" 74 | else 75 | JAVACMD="$JAVA_HOME/bin/java" 76 | fi 77 | if [ ! -x "$JAVACMD" ] ; then 78 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME 79 | 80 | Please set the JAVA_HOME variable in your environment to match the 81 | location of your Java installation." 82 | fi 83 | else 84 | JAVACMD="java" 85 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 86 | 87 | Please set the JAVA_HOME variable in your environment to match the 88 | location of your Java installation." 89 | fi 90 | 91 | # Increase the maximum file descriptors if we can. 92 | if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then 93 | MAX_FD_LIMIT=`ulimit -H -n` 94 | if [ $? -eq 0 ] ; then 95 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then 96 | MAX_FD="$MAX_FD_LIMIT" 97 | fi 98 | ulimit -n $MAX_FD 99 | if [ $? -ne 0 ] ; then 100 | warn "Could not set maximum file descriptor limit: $MAX_FD" 101 | fi 102 | else 103 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" 104 | fi 105 | fi 106 | 107 | # For Darwin, add options to specify how the application appears in the dock 108 | if $darwin; then 109 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" 110 | fi 111 | 112 | # For Cygwin, switch paths to Windows format before running java 113 | if $cygwin ; then 114 | APP_HOME=`cygpath --path --mixed "$APP_HOME"` 115 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` 116 | JAVACMD=`cygpath --unix "$JAVACMD"` 117 | 118 | # We build the pattern for arguments to be converted via cygpath 119 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` 120 | SEP="" 121 | for dir in $ROOTDIRSRAW ; do 122 | ROOTDIRS="$ROOTDIRS$SEP$dir" 123 | SEP="|" 124 | done 125 | OURCYGPATTERN="(^($ROOTDIRS))" 126 | # Add a user-defined pattern to the cygpath arguments 127 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then 128 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" 129 | fi 130 | # Now convert the arguments - kludge to limit ourselves to /bin/sh 131 | i=0 132 | for arg in "$@" ; do 133 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` 134 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option 135 | 136 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition 137 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` 138 | else 139 | eval `echo args$i`="\"$arg\"" 140 | fi 141 | i=$((i+1)) 142 | done 143 | case $i in 144 | (0) set -- ;; 145 | (1) set -- "$args0" ;; 146 | (2) set -- "$args0" "$args1" ;; 147 | (3) set -- "$args0" "$args1" "$args2" ;; 148 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;; 149 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; 150 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; 151 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; 152 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; 153 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; 154 | esac 155 | fi 156 | 157 | # Escape application args 158 | save () { 159 | for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done 160 | echo " " 161 | } 162 | APP_ARGS=$(save "$@") 163 | 164 | # Collect all arguments for the java command, following the shell quoting and substitution rules 165 | eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" 166 | 167 | # by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong 168 | if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then 169 | cd "$(dirname "$0")" 170 | fi 171 | 172 | exec "$JAVACMD" "$@" 173 | -------------------------------------------------------------------------------- /learn-kafka-spring-boot-retry/gradlew.bat: -------------------------------------------------------------------------------- 1 | @if "%DEBUG%" == "" @echo off 2 | @rem ########################################################################## 3 | @rem 4 | @rem Gradle startup script for Windows 5 | @rem 6 | @rem ########################################################################## 7 | 8 | @rem Set local scope for the variables with windows NT shell 9 | if "%OS%"=="Windows_NT" setlocal 10 | 11 | set DIRNAME=%~dp0 12 | if "%DIRNAME%" == "" set DIRNAME=. 13 | set APP_BASE_NAME=%~n0 14 | set APP_HOME=%DIRNAME% 15 | 16 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 17 | set DEFAULT_JVM_OPTS="-Xmx64m" 18 | 19 | @rem Find java.exe 20 | if defined JAVA_HOME goto findJavaFromJavaHome 21 | 22 | set JAVA_EXE=java.exe 23 | %JAVA_EXE% -version >NUL 2>&1 24 | if "%ERRORLEVEL%" == "0" goto init 25 | 26 | echo. 27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 28 | echo. 29 | echo Please set the JAVA_HOME variable in your environment to match the 30 | echo location of your Java installation. 31 | 32 | goto fail 33 | 34 | :findJavaFromJavaHome 35 | set JAVA_HOME=%JAVA_HOME:"=% 36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe 37 | 38 | if exist "%JAVA_EXE%" goto init 39 | 40 | echo. 41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 42 | echo. 43 | echo Please set the JAVA_HOME variable in your environment to match the 44 | echo location of your Java installation. 45 | 46 | goto fail 47 | 48 | :init 49 | @rem Get command-line arguments, handling Windows variants 50 | 51 | if not "%OS%" == "Windows_NT" goto win9xME_args 52 | 53 | :win9xME_args 54 | @rem Slurp the command line arguments. 55 | set CMD_LINE_ARGS= 56 | set _SKIP=2 57 | 58 | :win9xME_args_slurp 59 | if "x%~1" == "x" goto execute 60 | 61 | set CMD_LINE_ARGS=%* 62 | 63 | :execute 64 | @rem Setup the command line 65 | 66 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar 67 | 68 | @rem Execute Gradle 69 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% 70 | 71 | :end 72 | @rem End local scope for the variables with windows NT shell 73 | if "%ERRORLEVEL%"=="0" goto mainEnd 74 | 75 | :fail 76 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of 77 | rem the _cmd.exe /c_ return code! 78 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 79 | exit /b 1 80 | 81 | :mainEnd 82 | if "%OS%"=="Windows_NT" endlocal 83 | 84 | :omega 85 | -------------------------------------------------------------------------------- /learn-kafka-spring-boot-retry/settings.gradle: -------------------------------------------------------------------------------- 1 | pluginManagement { 2 | repositories { 3 | gradlePluginPortal() 4 | } 5 | } 6 | rootProject.name = 'learn-kafka-spring-boot-retry' 7 | -------------------------------------------------------------------------------- /learn-kafka-spring-boot-retry/src/main/java/com/learnkafka/LearnKafkaSpringBootRetryApplication.java: -------------------------------------------------------------------------------- 1 | package com.learnkafka; 2 | 3 | import org.springframework.boot.SpringApplication; 4 | import org.springframework.boot.autoconfigure.SpringBootApplication; 5 | 6 | @SpringBootApplication 7 | public class LearnKafkaSpringBootRetryApplication { 8 | 9 | public static void main(String[] args) { 10 | SpringApplication.run(LearnKafkaSpringBootRetryApplication.class, args); 11 | } 12 | 13 | } 14 | -------------------------------------------------------------------------------- /learn-kafka-spring-boot-retry/src/main/java/com/learnkafka/config/ConsumerConfig.java: -------------------------------------------------------------------------------- 1 | package com.learnkafka.config; 2 | 3 | import com.learnkafka.consumer.ConsumerRetryListener; 4 | import org.springframework.beans.factory.annotation.Autowired; 5 | import org.springframework.beans.factory.annotation.Value; 6 | import org.springframework.boot.autoconfigure.kafka.ConcurrentKafkaListenerContainerFactoryConfigurer; 7 | import org.springframework.context.annotation.Bean; 8 | import org.springframework.context.annotation.Configuration; 9 | import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory; 10 | import org.springframework.kafka.config.KafkaListenerContainerFactory; 11 | import org.springframework.kafka.core.ConsumerFactory; 12 | import org.springframework.kafka.listener.ConcurrentMessageListenerContainer; 13 | import org.springframework.retry.backoff.ExponentialBackOffPolicy; 14 | import org.springframework.retry.backoff.FixedBackOffPolicy; 15 | import org.springframework.retry.policy.AlwaysRetryPolicy; 16 | import org.springframework.retry.policy.SimpleRetryPolicy; 17 | import org.springframework.retry.support.RetryTemplate; 18 | 19 | @Configuration 20 | public class ConsumerConfig { 21 | 22 | @Autowired 23 | ConsumerRetryListener consumerRetryListener; 24 | 25 | @Value("${spring.kafka.retry.backoff.initial-interval}") 26 | private Long initialBackoffInterval; 27 | 28 | @Value("${spring.kafka.retry.backoff.max-interval}") 29 | private Long backOffMaxInterval; 30 | 31 | @Value("${spring.kafka.retry.generate-alert-retry-threshold}") 32 | private Integer maxRetries; 33 | 34 | 35 | @Bean 36 | ConcurrentKafkaListenerContainerFactory 37 | kafkaListenerContainerFactory(ConcurrentKafkaListenerContainerFactoryConfigurer configurer, ConsumerFactory consumerFactory) { 38 | ConcurrentKafkaListenerContainerFactory containerFactory = new ConcurrentKafkaListenerContainerFactory<>(); 39 | configurer.configure(containerFactory, consumerFactory); 40 | containerFactory.setRetryTemplate(retryTemplate()); // This enables the retry. 41 | return containerFactory; 42 | } 43 | 44 | @Bean 45 | public RetryTemplate retryTemplate() { 46 | SimpleRetryPolicy simpleRetryPolicy = getRetryPolicy(); 47 | FixedBackOffPolicy fixedBackOffPolicy = getBackOffPolicy(); 48 | RetryTemplate retryTemplate = new RetryTemplate(); 49 | retryTemplate.setRetryPolicy(simpleRetryPolicy); 50 | retryTemplate.setBackOffPolicy(fixedBackOffPolicy); 51 | retryTemplate.registerListener(consumerRetryListener);// This listener takes care of listening to the activity and logs the necessary events. 52 | return retryTemplate; 53 | } 54 | 55 | /** 56 | * SimpleRetryPolicy sets the number of times the retry will happen. 57 | * @return 58 | */ 59 | public SimpleRetryPolicy getRetryPolicy(){ 60 | SimpleRetryPolicy simpleRetryPolicy = new SimpleRetryPolicy(); 61 | simpleRetryPolicy.setMaxAttempts(maxRetries); 62 | return simpleRetryPolicy; 63 | } 64 | 65 | /** 66 | * FixedBackOffPolicy sets the interval between the retry. 67 | * @return 68 | */ 69 | public FixedBackOffPolicy getBackOffPolicy() { 70 | FixedBackOffPolicy backOffPolicy = new FixedBackOffPolicy(); 71 | backOffPolicy.setBackOffPeriod(initialBackoffInterval); 72 | return backOffPolicy; 73 | } 74 | } 75 | -------------------------------------------------------------------------------- /learn-kafka-spring-boot-retry/src/main/java/com/learnkafka/consumer/ConsumerRetryListener.java: -------------------------------------------------------------------------------- 1 | package com.learnkafka.consumer; 2 | 3 | import lombok.extern.slf4j.Slf4j; 4 | import org.springframework.beans.factory.annotation.Value; 5 | import org.springframework.retry.RetryCallback; 6 | import org.springframework.retry.RetryContext; 7 | import org.springframework.retry.listener.RetryListenerSupport; 8 | import org.springframework.stereotype.Component; 9 | 10 | @Component 11 | @Slf4j 12 | public class ConsumerRetryListener extends RetryListenerSupport { 13 | 14 | @Value("${spring.kafka.retry.generate-alert-retry-threshold}") 15 | private Integer maxRetries; 16 | 17 | @Override 18 | public boolean open(RetryContext context, RetryCallback callback) { 19 | log.info("Retry context opened"); 20 | return true; 21 | } 22 | 23 | @Override 24 | public void close(RetryContext context, RetryCallback callback, Throwable throwable) { 25 | if (context.getRetryCount() == Integer.valueOf(maxRetries)) { 26 | log.error("Retry Threshold reached"); 27 | } 28 | } 29 | @Override 30 | public void onError(RetryContext context, RetryCallback callback, Throwable throwable) { 31 | log.info("Retry in onError"); 32 | 33 | } 34 | 35 | } 36 | -------------------------------------------------------------------------------- /learn-kafka-spring-boot-retry/src/main/java/com/learnkafka/consumer/TopicConsumer.java: -------------------------------------------------------------------------------- 1 | package com.learnkafka.consumer; 2 | 3 | import lombok.extern.slf4j.Slf4j; 4 | import org.apache.kafka.clients.consumer.ConsumerRecord; 5 | import org.omg.SendingContext.RunTime; 6 | import org.springframework.kafka.annotation.KafkaListener; 7 | import org.springframework.kafka.listener.AcknowledgingMessageListener; 8 | import org.springframework.kafka.support.Acknowledgment; 9 | import org.springframework.stereotype.Component; 10 | 11 | @Component 12 | @Slf4j 13 | public class TopicConsumer { 14 | 15 | @KafkaListener(id = "load-event-processor", topics = {"${spring.kafka.consumer.topic}"}) 16 | public void onMessage(ConsumerRecord data, Acknowledgment acknowledgment) { 17 | 18 | String errorRecord = "10"; 19 | try { 20 | log.info("Consumer Record read is : " + data); 21 | String readMessage = data.value(); 22 | if (readMessage.equals(errorRecord)) { 23 | throw new RuntimeException("Poisonous Message"); 24 | } 25 | log.info("Message is : " + readMessage); 26 | } catch (RuntimeException e) { 27 | acknowledgment.acknowledge(); // commit even when in error so that the poisonous record wont be processed again. 28 | log.error("RuntimeException is : " + e); 29 | throw e; 30 | } catch (Exception e) { 31 | log.error("Exception is : " + e); 32 | } 33 | acknowledgment.acknowledge(); // commits the offset to Kafka 34 | log.info("Offset Commited"); 35 | 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /learn-kafka-spring-boot-retry/src/main/resources/application.yml: -------------------------------------------------------------------------------- 1 | spring: 2 | profiles: 3 | active: local 4 | kafka: 5 | bootstrap-servers: localhost:9092 6 | consumer: 7 | topic: my-first-topic2 8 | group-id: my-first-topic-group 9 | auto-offset-reset: latest 10 | enable-auto-commit: false 11 | properties: 12 | session.timeout.ms: 15000 13 | ssl: 14 | endpoint: 15 | identification: 16 | algorithm: 17 | value.deserializer: org.apache.kafka.common.serialization.StringDeserializer 18 | listener: 19 | poll-timeout: 3000 20 | concurrency: 1 21 | ack-mode: manual 22 | retry: 23 | generate-alert-retry-threshold: 3 24 | backoff: 25 | initial-interval: 10000 26 | max-interval: 60000 27 | -------------------------------------------------------------------------------- /learn-kafka-spring-boot-retry/src/test/java/com/learnkafka/LearnKafkaSpringBootRetryApplicationTests.java: -------------------------------------------------------------------------------- 1 | package com.learnkafka; 2 | 3 | import org.junit.Test; 4 | import org.junit.runner.RunWith; 5 | import org.springframework.boot.test.context.SpringBootTest; 6 | import org.springframework.test.context.junit4.SpringRunner; 7 | 8 | @RunWith(SpringRunner.class) 9 | @SpringBootTest 10 | public class LearnKafkaSpringBootRetryApplicationTests { 11 | 12 | @Test 13 | public void contextLoads() { 14 | } 15 | 16 | } 17 | -------------------------------------------------------------------------------- /learnbootkafka-manual-offset-docker/.gitignore: -------------------------------------------------------------------------------- 1 | target/ 2 | !.mvn/wrapper/maven-wrapper.jar 3 | 4 | ### STS ### 5 | .apt_generated 6 | .classpath 7 | .factorypath 8 | .project 9 | .settings 10 | .springBeans 11 | 12 | ### IntelliJ IDEA ### 13 | .idea 14 | *.iws 15 | *.iml 16 | *.ipr 17 | 18 | ### NetBeans ### 19 | nbproject/private/ 20 | build/ 21 | nbbuild/ 22 | dist/ 23 | nbdist/ 24 | .nb-gradle/ -------------------------------------------------------------------------------- /learnbootkafka-manual-offset-docker/.mvn/wrapper/maven-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dilipsundarraj1/TeachApacheKafka/f1951861aacaf600544e098f3e4dfeb41de75029/learnbootkafka-manual-offset-docker/.mvn/wrapper/maven-wrapper.jar -------------------------------------------------------------------------------- /learnbootkafka-manual-offset-docker/.mvn/wrapper/maven-wrapper.properties: -------------------------------------------------------------------------------- 1 | distributionUrl=https://repo1.maven.org/maven2/org/apache/maven/apache-maven/3.5.0/apache-maven-3.5.0-bin.zip 2 | -------------------------------------------------------------------------------- /learnbootkafka-manual-offset-docker/README.md: -------------------------------------------------------------------------------- 1 | # Learn Spring Boot Kafka 2 | 3 | ## Java Command to launch the jar file 4 | 5 | The below command will take the stage profile configuration and launch the job. 6 | 7 | ``` 8 | java -jar -Dspring.profiles.active=stage .jar 9 | ``` 10 | 11 | ## Launching multiple instances: 12 | 13 | ``` 14 | java -jar -Dspring.profiles.active=stage -Dserver.port=8081 .jar 15 | ``` 16 | -------------------------------------------------------------------------------- /learnbootkafka-manual-offset-docker/mvnw: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # ---------------------------------------------------------------------------- 3 | # Licensed to the Apache Software Foundation (ASF) under one 4 | # or more contributor license agreements. See the NOTICE file 5 | # distributed with this work for additional information 6 | # regarding copyright ownership. The ASF licenses this file 7 | # to you under the Apache License, Version 2.0 (the 8 | # "License"); you may not use this file except in compliance 9 | # with the License. You may obtain a copy of the License at 10 | # 11 | # http://www.apache.org/licenses/LICENSE-2.0 12 | # 13 | # Unless required by applicable law or agreed to in writing, 14 | # software distributed under the License is distributed on an 15 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 16 | # KIND, either express or implied. See the License for the 17 | # specific language governing permissions and limitations 18 | # under the License. 19 | # ---------------------------------------------------------------------------- 20 | 21 | # ---------------------------------------------------------------------------- 22 | # Maven2 Start Up Batch script 23 | # 24 | # Required ENV vars: 25 | # ------------------ 26 | # JAVA_HOME - location of a JDK home dir 27 | # 28 | # Optional ENV vars 29 | # ----------------- 30 | # M2_HOME - location of maven2's installed home dir 31 | # MAVEN_OPTS - parameters passed to the Java VM when running Maven 32 | # e.g. to debug Maven itself, use 33 | # set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 34 | # MAVEN_SKIP_RC - flag to disable loading of mavenrc files 35 | # ---------------------------------------------------------------------------- 36 | 37 | if [ -z "$MAVEN_SKIP_RC" ] ; then 38 | 39 | if [ -f /etc/mavenrc ] ; then 40 | . /etc/mavenrc 41 | fi 42 | 43 | if [ -f "$HOME/.mavenrc" ] ; then 44 | . "$HOME/.mavenrc" 45 | fi 46 | 47 | fi 48 | 49 | # OS specific support. $var _must_ be set to either true or false. 50 | cygwin=false; 51 | darwin=false; 52 | mingw=false 53 | case "`uname`" in 54 | CYGWIN*) cygwin=true ;; 55 | MINGW*) mingw=true;; 56 | Darwin*) darwin=true 57 | # 58 | # Look for the Apple JDKs first to preserve the existing behaviour, and then look 59 | # for the new JDKs provided by Oracle. 60 | # 61 | if [ -z "$JAVA_HOME" ] && [ -L /System/Library/Frameworks/JavaVM.framework/Versions/CurrentJDK ] ; then 62 | # 63 | # Apple JDKs 64 | # 65 | export JAVA_HOME=/System/Library/Frameworks/JavaVM.framework/Versions/CurrentJDK/Home 66 | fi 67 | 68 | if [ -z "$JAVA_HOME" ] && [ -L /System/Library/Java/JavaVirtualMachines/CurrentJDK ] ; then 69 | # 70 | # Apple JDKs 71 | # 72 | export JAVA_HOME=/System/Library/Java/JavaVirtualMachines/CurrentJDK/Contents/Home 73 | fi 74 | 75 | if [ -z "$JAVA_HOME" ] && [ -L "/Library/Java/JavaVirtualMachines/CurrentJDK" ] ; then 76 | # 77 | # Oracle JDKs 78 | # 79 | export JAVA_HOME=/Library/Java/JavaVirtualMachines/CurrentJDK/Contents/Home 80 | fi 81 | 82 | if [ -z "$JAVA_HOME" ] && [ -x "/usr/libexec/java_home" ]; then 83 | # 84 | # Apple JDKs 85 | # 86 | export JAVA_HOME=`/usr/libexec/java_home` 87 | fi 88 | ;; 89 | esac 90 | 91 | if [ -z "$JAVA_HOME" ] ; then 92 | if [ -r /etc/gentoo-release ] ; then 93 | JAVA_HOME=`java-config --jre-home` 94 | fi 95 | fi 96 | 97 | if [ -z "$M2_HOME" ] ; then 98 | ## resolve links - $0 may be a link to maven's home 99 | PRG="$0" 100 | 101 | # need this for relative symlinks 102 | while [ -h "$PRG" ] ; do 103 | ls=`ls -ld "$PRG"` 104 | link=`expr "$ls" : '.*-> \(.*\)$'` 105 | if expr "$link" : '/.*' > /dev/null; then 106 | PRG="$link" 107 | else 108 | PRG="`dirname "$PRG"`/$link" 109 | fi 110 | done 111 | 112 | saveddir=`pwd` 113 | 114 | M2_HOME=`dirname "$PRG"`/.. 115 | 116 | # make it fully qualified 117 | M2_HOME=`cd "$M2_HOME" && pwd` 118 | 119 | cd "$saveddir" 120 | # echo Using m2 at $M2_HOME 121 | fi 122 | 123 | # For Cygwin, ensure paths are in UNIX format before anything is touched 124 | if $cygwin ; then 125 | [ -n "$M2_HOME" ] && 126 | M2_HOME=`cygpath --unix "$M2_HOME"` 127 | [ -n "$JAVA_HOME" ] && 128 | JAVA_HOME=`cygpath --unix "$JAVA_HOME"` 129 | [ -n "$CLASSPATH" ] && 130 | CLASSPATH=`cygpath --path --unix "$CLASSPATH"` 131 | fi 132 | 133 | # For Migwn, ensure paths are in UNIX format before anything is touched 134 | if $mingw ; then 135 | [ -n "$M2_HOME" ] && 136 | M2_HOME="`(cd "$M2_HOME"; pwd)`" 137 | [ -n "$JAVA_HOME" ] && 138 | JAVA_HOME="`(cd "$JAVA_HOME"; pwd)`" 139 | # TODO classpath? 140 | fi 141 | 142 | if [ -z "$JAVA_HOME" ]; then 143 | javaExecutable="`which javac`" 144 | if [ -n "$javaExecutable" ] && ! [ "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then 145 | # readlink(1) is not available as standard on Solaris 10. 146 | readLink=`which readlink` 147 | if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then 148 | if $darwin ; then 149 | javaHome="`dirname \"$javaExecutable\"`" 150 | javaExecutable="`cd \"$javaHome\" && pwd -P`/javac" 151 | else 152 | javaExecutable="`readlink -f \"$javaExecutable\"`" 153 | fi 154 | javaHome="`dirname \"$javaExecutable\"`" 155 | javaHome=`expr "$javaHome" : '\(.*\)/bin'` 156 | JAVA_HOME="$javaHome" 157 | export JAVA_HOME 158 | fi 159 | fi 160 | fi 161 | 162 | if [ -z "$JAVACMD" ] ; then 163 | if [ -n "$JAVA_HOME" ] ; then 164 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then 165 | # IBM's JDK on AIX uses strange locations for the executables 166 | JAVACMD="$JAVA_HOME/jre/sh/java" 167 | else 168 | JAVACMD="$JAVA_HOME/bin/java" 169 | fi 170 | else 171 | JAVACMD="`which java`" 172 | fi 173 | fi 174 | 175 | if [ ! -x "$JAVACMD" ] ; then 176 | echo "Error: JAVA_HOME is not defined correctly." >&2 177 | echo " We cannot execute $JAVACMD" >&2 178 | exit 1 179 | fi 180 | 181 | if [ -z "$JAVA_HOME" ] ; then 182 | echo "Warning: JAVA_HOME environment variable is not set." 183 | fi 184 | 185 | CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher 186 | 187 | # For Cygwin, switch paths to Windows format before running java 188 | if $cygwin; then 189 | [ -n "$M2_HOME" ] && 190 | M2_HOME=`cygpath --path --windows "$M2_HOME"` 191 | [ -n "$JAVA_HOME" ] && 192 | JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"` 193 | [ -n "$CLASSPATH" ] && 194 | CLASSPATH=`cygpath --path --windows "$CLASSPATH"` 195 | fi 196 | 197 | # traverses directory structure from process work directory to filesystem root 198 | # first directory with .mvn subdirectory is considered project base directory 199 | find_maven_basedir() { 200 | local basedir=$(pwd) 201 | local wdir=$(pwd) 202 | while [ "$wdir" != '/' ] ; do 203 | if [ -d "$wdir"/.mvn ] ; then 204 | basedir=$wdir 205 | break 206 | fi 207 | wdir=$(cd "$wdir/.."; pwd) 208 | done 209 | echo "${basedir}" 210 | } 211 | 212 | # concatenates all lines of a file 213 | concat_lines() { 214 | if [ -f "$1" ]; then 215 | echo "$(tr -s '\n' ' ' < "$1")" 216 | fi 217 | } 218 | 219 | export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-$(find_maven_basedir)} 220 | MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS" 221 | 222 | # Provide a "standardized" way to retrieve the CLI args that will 223 | # work with both Windows and non-Windows executions. 224 | MAVEN_CMD_LINE_ARGS="$MAVEN_CONFIG $@" 225 | export MAVEN_CMD_LINE_ARGS 226 | 227 | WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain 228 | 229 | exec "$JAVACMD" \ 230 | $MAVEN_OPTS \ 231 | -classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \ 232 | "-Dmaven.home=${M2_HOME}" "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \ 233 | ${WRAPPER_LAUNCHER} "$@" 234 | -------------------------------------------------------------------------------- /learnbootkafka-manual-offset-docker/mvnw.cmd: -------------------------------------------------------------------------------- 1 | @REM ---------------------------------------------------------------------------- 2 | @REM Licensed to the Apache Software Foundation (ASF) under one 3 | @REM or more contributor license agreements. See the NOTICE file 4 | @REM distributed with this work for additional information 5 | @REM regarding copyright ownership. The ASF licenses this file 6 | @REM to you under the Apache License, Version 2.0 (the 7 | @REM "License"); you may not use this file except in compliance 8 | @REM with the License. You may obtain a copy of the License at 9 | @REM 10 | @REM http://www.apache.org/licenses/LICENSE-2.0 11 | @REM 12 | @REM Unless required by applicable law or agreed to in writing, 13 | @REM software distributed under the License is distributed on an 14 | @REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 15 | @REM KIND, either express or implied. See the License for the 16 | @REM specific language governing permissions and limitations 17 | @REM under the License. 18 | @REM ---------------------------------------------------------------------------- 19 | 20 | @REM ---------------------------------------------------------------------------- 21 | @REM Maven2 Start Up Batch script 22 | @REM 23 | @REM Required ENV vars: 24 | @REM JAVA_HOME - location of a JDK home dir 25 | @REM 26 | @REM Optional ENV vars 27 | @REM M2_HOME - location of maven2's installed home dir 28 | @REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands 29 | @REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a key stroke before ending 30 | @REM MAVEN_OPTS - parameters passed to the Java VM when running Maven 31 | @REM e.g. to debug Maven itself, use 32 | @REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 33 | @REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files 34 | @REM ---------------------------------------------------------------------------- 35 | 36 | @REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on' 37 | @echo off 38 | @REM enable echoing my setting MAVEN_BATCH_ECHO to 'on' 39 | @if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO% 40 | 41 | @REM set %HOME% to equivalent of $HOME 42 | if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%") 43 | 44 | @REM Execute a user defined script before this one 45 | if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre 46 | @REM check for pre script, once with legacy .bat ending and once with .cmd ending 47 | if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat" 48 | if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd" 49 | :skipRcPre 50 | 51 | @setlocal 52 | 53 | set ERROR_CODE=0 54 | 55 | @REM To isolate internal variables from possible post scripts, we use another setlocal 56 | @setlocal 57 | 58 | @REM ==== START VALIDATION ==== 59 | if not "%JAVA_HOME%" == "" goto OkJHome 60 | 61 | echo. 62 | echo Error: JAVA_HOME not found in your environment. >&2 63 | echo Please set the JAVA_HOME variable in your environment to match the >&2 64 | echo location of your Java installation. >&2 65 | echo. 66 | goto error 67 | 68 | :OkJHome 69 | if exist "%JAVA_HOME%\bin\java.exe" goto init 70 | 71 | echo. 72 | echo Error: JAVA_HOME is set to an invalid directory. >&2 73 | echo JAVA_HOME = "%JAVA_HOME%" >&2 74 | echo Please set the JAVA_HOME variable in your environment to match the >&2 75 | echo location of your Java installation. >&2 76 | echo. 77 | goto error 78 | 79 | @REM ==== END VALIDATION ==== 80 | 81 | :init 82 | 83 | set MAVEN_CMD_LINE_ARGS=%* 84 | 85 | @REM Find the project base dir, i.e. the directory that contains the folder ".mvn". 86 | @REM Fallback to current working directory if not found. 87 | 88 | set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR% 89 | IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir 90 | 91 | set EXEC_DIR=%CD% 92 | set WDIR=%EXEC_DIR% 93 | :findBaseDir 94 | IF EXIST "%WDIR%"\.mvn goto baseDirFound 95 | cd .. 96 | IF "%WDIR%"=="%CD%" goto baseDirNotFound 97 | set WDIR=%CD% 98 | goto findBaseDir 99 | 100 | :baseDirFound 101 | set MAVEN_PROJECTBASEDIR=%WDIR% 102 | cd "%EXEC_DIR%" 103 | goto endDetectBaseDir 104 | 105 | :baseDirNotFound 106 | set MAVEN_PROJECTBASEDIR=%EXEC_DIR% 107 | cd "%EXEC_DIR%" 108 | 109 | :endDetectBaseDir 110 | 111 | IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig 112 | 113 | @setlocal EnableExtensions EnableDelayedExpansion 114 | for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a 115 | @endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS% 116 | 117 | :endReadAdditionalConfig 118 | 119 | SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe" 120 | 121 | set WRAPPER_JAR="".\.mvn\wrapper\maven-wrapper.jar"" 122 | set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain 123 | 124 | %MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CMD_LINE_ARGS% 125 | if ERRORLEVEL 1 goto error 126 | goto end 127 | 128 | :error 129 | set ERROR_CODE=1 130 | 131 | :end 132 | @endlocal & set ERROR_CODE=%ERROR_CODE% 133 | 134 | if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost 135 | @REM check for post script, once with legacy .bat ending and once with .cmd ending 136 | if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat" 137 | if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd" 138 | :skipRcPost 139 | 140 | @REM pause the script if MAVEN_BATCH_PAUSE is set to 'on' 141 | if "%MAVEN_BATCH_PAUSE%" == "on" pause 142 | 143 | if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE% 144 | 145 | exit /B %ERROR_CODE% -------------------------------------------------------------------------------- /learnbootkafka-manual-offset-docker/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 4 | 4.0.0 5 | 6 | com.learnbootkafka-manual-offset 7 | learnbootkafka-manual-offset-docker 8 | 0.0.1-SNAPSHOT 9 | jar 10 | 11 | learnbootkafka 12 | Demo project for Spring Boot 13 | 14 | 15 | 16 | org.springframework.boot 17 | spring-boot-starter-parent 18 | 1.5.2.RELEASE 19 | 20 | 21 | 22 | 23 | UTF-8 24 | UTF-8 25 | 1.8 26 | dilipthelip 27 | 28 | 29 | 30 | 31 | org.springframework.boot 32 | spring-boot-starter-jersey 33 | 34 | 35 | org.springframework.kafka 36 | spring-kafka 37 | 38 | 39 | org.springframework.boot 40 | spring-boot-starter-web 41 | 42 | 43 | 44 | 45 | org.springframework.kafka 46 | spring-kafka 47 | 1.1.1.RELEASE 48 | 49 | 50 | 51 | 52 | org.springframework.boot 53 | spring-boot-starter-test 54 | test 55 | 56 | 57 | 58 | 59 | 60 | 61 | org.springframework.boot 62 | spring-boot-maven-plugin 63 | 64 | 65 | 66 | com.spotify 67 | docker-maven-plugin 68 | 0.4.11 69 | 70 | ${docker.image.prefix}/${project.artifactId} 71 | src/main/docker 72 | 73 | 74 | / 75 | ${project.build.directory} 76 | ${project.build.finalName}.jar 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | -------------------------------------------------------------------------------- /learnbootkafka-manual-offset-docker/src/main/docker/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM frolvlad/alpine-oraclejdk8:slim 2 | VOLUME /tmp 3 | ADD learnbootkafka-manual-offset-docker-0.0.1-SNAPSHOT.jar app.jar 4 | EXPOSE 8080 8443 8000 5 | COPY ./docker-entrypoint.sh /docker-entrypoint.sh 6 | ENTRYPOINT ["/docker-entrypoint.sh"] 7 | RUN chmod +x /docker-entrypoint.sh 8 | CMD ["start"] -------------------------------------------------------------------------------- /learnbootkafka-manual-offset-docker/src/main/docker/docker-entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | set -e 4 | 5 | if [ "$1" = 'start' ]; then 6 | 7 | # Check to see if the environment variable ENVIRONMENT is set. If it is, we can set our Spring Boot active profile 8 | # based on it. If it isn't, the default Spring Profile will be used. 9 | [ -z "$ENVIRONMENT" ] && printf "WARN:: No environment specified, relying on default Spring profile. To change the \ 10 | environment, please supply a Docker environment variable. Currently available environments are:\n\n\ 11 | stage (default)\n\ 12 | prod\n\n\ 13 | Example: docker run -e ENVIRONMENT=prod bootkafka bootkafka/learnbootkafka-manual-offset\n"; 14 | 15 | java_opts="-Djava.security.egd=file:/dev/./urandom" 16 | 17 | # If an environment variable is set to provide a different Spring Profile, use it. 18 | if [ $ENVIRONMENT ]; then 19 | java_opts="$java_opts -Dspring.profiles.active=$ENVIRONMENT" 20 | fi 21 | 22 | java_opts="$java_opts -Dkafka.broker=$KAFKABROKER" 23 | 24 | printf "java options are : $java_opts" 25 | # Launch the app with whatever Java options are required to execute. 26 | exec java -jar ${java_opts} /app.jar 27 | fi 28 | 29 | exec "$@" -------------------------------------------------------------------------------- /learnbootkafka-manual-offset-docker/src/main/java/com/learnbootkafka/LearnbootkafkaApplication.java: -------------------------------------------------------------------------------- 1 | package com.learnbootkafka; 2 | 3 | import org.springframework.boot.SpringApplication; 4 | import org.springframework.boot.autoconfigure.SpringBootApplication; 5 | 6 | @SpringBootApplication 7 | public class LearnbootkafkaApplication { 8 | 9 | public static void main(String[] args) { 10 | SpringApplication.run(LearnbootkafkaApplication.class, args); 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /learnbootkafka-manual-offset-docker/src/main/java/com/learnbootkafka/config/KafkaConfig.java: -------------------------------------------------------------------------------- 1 | package com.learnbootkafka.config; 2 | 3 | import java.util.HashMap; 4 | import java.util.Map; 5 | 6 | import org.apache.kafka.clients.consumer.ConsumerConfig; 7 | import org.apache.kafka.clients.producer.ProducerConfig; 8 | import org.apache.kafka.common.serialization.StringDeserializer; 9 | import org.apache.kafka.common.serialization.StringSerializer; 10 | import org.springframework.beans.factory.annotation.Autowired; 11 | import org.springframework.context.annotation.Bean; 12 | import org.springframework.context.annotation.Configuration; 13 | import org.springframework.core.env.Environment; 14 | import org.springframework.kafka.annotation.EnableKafka; 15 | import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory; 16 | import org.springframework.kafka.config.KafkaListenerContainerFactory; 17 | import org.springframework.kafka.core.ConsumerFactory; 18 | import org.springframework.kafka.core.DefaultKafkaConsumerFactory; 19 | import org.springframework.kafka.core.DefaultKafkaProducerFactory; 20 | import org.springframework.kafka.core.KafkaTemplate; 21 | import org.springframework.kafka.core.ProducerFactory; 22 | import org.springframework.kafka.listener.AbstractMessageListenerContainer; 23 | import org.springframework.kafka.listener.ConcurrentMessageListenerContainer; 24 | import org.springframework.retry.backoff.FixedBackOffPolicy; 25 | import org.springframework.retry.policy.AlwaysRetryPolicy; 26 | import org.springframework.retry.support.RetryTemplate; 27 | 28 | import com.learnbootkafka.consumer.ConsumerKafka; 29 | 30 | @Configuration 31 | @EnableKafka 32 | public class KafkaConfig { 33 | 34 | @Autowired 35 | Environment env; 36 | 37 | /** 38 | * Consumer Config Starts 39 | */ 40 | @Bean 41 | KafkaListenerContainerFactory> kafkaListenerContainerFactory() { 42 | ConcurrentKafkaListenerContainerFactory factory = new ConcurrentKafkaListenerContainerFactory<>(); 43 | factory.setConsumerFactory(consumerFactory()); 44 | factory.getContainerProperties().setPollTimeout(3000); 45 | factory.getContainerProperties().setAckMode(AbstractMessageListenerContainer.AckMode.MANUAL); 46 | return factory; 47 | } 48 | 49 | @Bean 50 | public ConsumerFactory consumerFactory() { 51 | 52 | return new DefaultKafkaConsumerFactory<>(consumerConfigs()); 53 | } 54 | 55 | @Bean 56 | public Map consumerConfigs() { 57 | Map propsMap = new HashMap<>(); 58 | propsMap.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, env.getProperty("kafka.broker")); 59 | propsMap.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, env.getProperty("enable.auto.commit")); 60 | propsMap.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, env.getProperty("auto.commit.interval.ms")); 61 | propsMap.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); 62 | propsMap.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); 63 | propsMap.put(ConsumerConfig.GROUP_ID_CONFIG, env.getProperty("group.id")); 64 | propsMap.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, env.getProperty("kafka.auto.offset.reset")); 65 | return propsMap; 66 | 67 | } 68 | 69 | @Bean 70 | public ConsumerKafka listener() { 71 | return new ConsumerKafka(); 72 | } 73 | 74 | /** 75 | * Consumer Config Ends 76 | */ 77 | 78 | /** 79 | * Producer Config Starts 80 | * @return 81 | */ 82 | 83 | @SuppressWarnings("rawtypes") 84 | @Bean 85 | public ProducerFactory producerFactory() { 86 | return new DefaultKafkaProducerFactory<>(producerConfigs()); 87 | } 88 | 89 | @Bean 90 | public KafkaTemplate kafkaTemplate() { 91 | return new KafkaTemplate(producerFactory()); 92 | } 93 | /** 94 | * Producer Config Ends 95 | * @return 96 | */ 97 | 98 | @Bean 99 | public Map producerConfigs() { 100 | Map props = new HashMap<>(); 101 | props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, env.getProperty("kafka.broker")); 102 | props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class); 103 | props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class); 104 | props.put(ProducerConfig.LINGER_MS_CONFIG, 1); 105 | 106 | 107 | return props; 108 | } 109 | 110 | } 111 | -------------------------------------------------------------------------------- /learnbootkafka-manual-offset-docker/src/main/java/com/learnbootkafka/consumer/ConsumerKafka.java: -------------------------------------------------------------------------------- 1 | package com.learnbootkafka.consumer; 2 | 3 | import org.apache.kafka.clients.consumer.ConsumerRecord; 4 | import org.springframework.kafka.annotation.KafkaListener; 5 | import org.springframework.kafka.listener.AcknowledgingMessageListener; 6 | import org.springframework.kafka.support.Acknowledgment; 7 | 8 | public class ConsumerKafka implements AcknowledgingMessageListener{ 9 | 10 | @Override 11 | @KafkaListener(id = "consumer", topics = {"${kafka.topic}"} ) 12 | public void onMessage(ConsumerRecord data, 13 | Acknowledgment acknowledgment) { 14 | // TODO Auto-generated method stub 15 | try{ 16 | System.out.println("Read Record is : " + data.value()); 17 | System.out.println("Offset is : " + data.offset()); 18 | System.out.println("Topic is : " + data.topic()); 19 | System.out.println("Partition is : " + data.partition()); 20 | 21 | }catch (Exception e ){ 22 | System.out.println("Push the messaged to Error Stream : " + e); 23 | }finally{ 24 | acknowledgment.acknowledge(); 25 | } 26 | 27 | 28 | 29 | 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /learnbootkafka-manual-offset-docker/src/main/java/com/learnbootkafka/controller/HelloController.java: -------------------------------------------------------------------------------- 1 | package com.learnbootkafka.controller; 2 | 3 | import org.springframework.beans.factory.annotation.Autowired; 4 | import org.springframework.core.env.Environment; 5 | import org.springframework.web.bind.annotation.RequestMapping; 6 | import org.springframework.web.bind.annotation.RequestParam; 7 | import org.springframework.web.bind.annotation.RestController; 8 | 9 | import com.learnbootkafka.producer.ProducerKafka; 10 | 11 | @RestController 12 | public class HelloController { 13 | 14 | @Autowired 15 | Environment env; 16 | 17 | @Autowired 18 | ProducerKafka producer; 19 | 20 | @RequestMapping(value="/home") 21 | public String getResult(@RequestParam("input") String value){ 22 | 23 | try{ 24 | producer.sendMessage(value); 25 | }catch(Exception e){ 26 | System.out.println("Inside Excotion"); 27 | } 28 | 29 | return env.getProperty("message.response"); 30 | } 31 | 32 | } 33 | -------------------------------------------------------------------------------- /learnbootkafka-manual-offset-docker/src/main/java/com/learnbootkafka/producer/ProducerKafka.java: -------------------------------------------------------------------------------- 1 | package com.learnbootkafka.producer; 2 | 3 | import org.springframework.beans.factory.annotation.Autowired; 4 | import org.springframework.core.env.Environment; 5 | import org.springframework.kafka.core.KafkaTemplate; 6 | import org.springframework.kafka.support.SendResult; 7 | import org.springframework.stereotype.Component; 8 | import org.springframework.util.concurrent.ListenableFuture; 9 | import org.springframework.util.concurrent.ListenableFutureCallback; 10 | 11 | @Component 12 | public class ProducerKafka { 13 | 14 | @Autowired 15 | private KafkaTemplate kafkaTemplate; 16 | 17 | @Autowired 18 | Environment env; 19 | 20 | public void sendMessage(String message){ 21 | ListenableFuture> future = kafkaTemplate 22 | .send(env.getProperty("kafka.topic"),message,message); 23 | 24 | future.addCallback( 25 | new ListenableFutureCallback>() { 26 | 27 | @Override 28 | public void onFailure(Throwable ex) { 29 | System.out.println("Inside Exception"); 30 | 31 | } 32 | 33 | @Override 34 | public void onSuccess(SendResult result) { 35 | // TODO Auto-generated method stub 36 | System.out.println("Inside Success"); 37 | 38 | } 39 | }); 40 | 41 | } 42 | 43 | } 44 | -------------------------------------------------------------------------------- /learnbootkafka-manual-offset-docker/src/main/resources/application.yml: -------------------------------------------------------------------------------- 1 | spring: 2 | profiles: 3 | active: dev 4 | 5 | --- 6 | 7 | spring: 8 | profiles: dev 9 | 10 | message.response: Hello from DEV spring boot properties 11 | 12 | kafka.broker: localhost:9092,localhost:9093,localhost:9094 13 | enable.auto.commit: false 14 | auto.commit.interval.ms: 100 15 | group.id: group1 16 | kafka.auto.offset.reset: earliest 17 | kafka.topic: my-topic 18 | 19 | 20 | --- 21 | 22 | spring: 23 | profiles: stage 24 | 25 | message.response: Hello from STAGE spring boot properties 26 | kafka.broker: localhost:9092,localhost:9093,localhost:9094 27 | enable.auto.commit: false 28 | auto.commit.interval.ms: 100 29 | group.id: group1 30 | kafka.auto.offset.reset: earliest 31 | kafka.topic: my-topic 32 | 33 | --- 34 | 35 | spring: 36 | profiles: prod 37 | 38 | message.response: Hello from PROD spring boot properties 39 | kafka.broker: 10.113.0.137:9092,10.113.0.137:9093,10.113.0.137:9094 40 | enable.auto.commit: false 41 | auto.commit.interval.ms: 100 42 | group.id: group1 43 | kafka.auto.offset.reset: earliest 44 | kafka.topic: my-topic 45 | 46 | 47 | --- -------------------------------------------------------------------------------- /learnbootkafka-manual-offset-docker/src/main/resources/application1.properties: -------------------------------------------------------------------------------- 1 | message.response=Hello from spring boot properties -------------------------------------------------------------------------------- /learnbootkafka-manual-offset-docker/src/test/java/com/learnbootkafka/LearnbootkafkaApplicationTests.java: -------------------------------------------------------------------------------- 1 | package com.learnbootkafka; 2 | 3 | import org.junit.Test; 4 | import org.junit.runner.RunWith; 5 | import org.springframework.boot.test.context.SpringBootTest; 6 | import org.springframework.test.context.junit4.SpringRunner; 7 | 8 | @RunWith(SpringRunner.class) 9 | //@SpringBootTest 10 | public class LearnbootkafkaApplicationTests { 11 | 12 | @Test 13 | public void contextLoads() { 14 | } 15 | 16 | } 17 | -------------------------------------------------------------------------------- /learnbootkafka-manual-offset/.gitignore: -------------------------------------------------------------------------------- 1 | target/ 2 | !.mvn/wrapper/maven-wrapper.jar 3 | 4 | ### STS ### 5 | .apt_generated 6 | .classpath 7 | .factorypath 8 | .project 9 | .settings 10 | .springBeans 11 | 12 | ### IntelliJ IDEA ### 13 | .idea 14 | *.iws 15 | *.iml 16 | *.ipr 17 | 18 | ### NetBeans ### 19 | nbproject/private/ 20 | build/ 21 | nbbuild/ 22 | dist/ 23 | nbdist/ 24 | .nb-gradle/ -------------------------------------------------------------------------------- /learnbootkafka-manual-offset/.mvn/wrapper/maven-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dilipsundarraj1/TeachApacheKafka/f1951861aacaf600544e098f3e4dfeb41de75029/learnbootkafka-manual-offset/.mvn/wrapper/maven-wrapper.jar -------------------------------------------------------------------------------- /learnbootkafka-manual-offset/.mvn/wrapper/maven-wrapper.properties: -------------------------------------------------------------------------------- 1 | distributionUrl=https://repo1.maven.org/maven2/org/apache/maven/apache-maven/3.5.0/apache-maven-3.5.0-bin.zip 2 | -------------------------------------------------------------------------------- /learnbootkafka-manual-offset/README.md: -------------------------------------------------------------------------------- 1 | # Learn Spring Boot Kafka 2 | 3 | ## Java Command to launch the jar file 4 | 5 | The below command will take the stage profile configuration and launch the job. 6 | 7 | ``` 8 | java -jar -Dspring.profiles.active=stage .jar 9 | ``` 10 | 11 | ## Launching multiple instances: 12 | 13 | ``` 14 | java -jar -Dspring.profiles.active=stage -Dserver.port=8081 .jar 15 | ``` 16 | -------------------------------------------------------------------------------- /learnbootkafka-manual-offset/mvnw: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # ---------------------------------------------------------------------------- 3 | # Licensed to the Apache Software Foundation (ASF) under one 4 | # or more contributor license agreements. See the NOTICE file 5 | # distributed with this work for additional information 6 | # regarding copyright ownership. The ASF licenses this file 7 | # to you under the Apache License, Version 2.0 (the 8 | # "License"); you may not use this file except in compliance 9 | # with the License. You may obtain a copy of the License at 10 | # 11 | # http://www.apache.org/licenses/LICENSE-2.0 12 | # 13 | # Unless required by applicable law or agreed to in writing, 14 | # software distributed under the License is distributed on an 15 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 16 | # KIND, either express or implied. See the License for the 17 | # specific language governing permissions and limitations 18 | # under the License. 19 | # ---------------------------------------------------------------------------- 20 | 21 | # ---------------------------------------------------------------------------- 22 | # Maven2 Start Up Batch script 23 | # 24 | # Required ENV vars: 25 | # ------------------ 26 | # JAVA_HOME - location of a JDK home dir 27 | # 28 | # Optional ENV vars 29 | # ----------------- 30 | # M2_HOME - location of maven2's installed home dir 31 | # MAVEN_OPTS - parameters passed to the Java VM when running Maven 32 | # e.g. to debug Maven itself, use 33 | # set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 34 | # MAVEN_SKIP_RC - flag to disable loading of mavenrc files 35 | # ---------------------------------------------------------------------------- 36 | 37 | if [ -z "$MAVEN_SKIP_RC" ] ; then 38 | 39 | if [ -f /etc/mavenrc ] ; then 40 | . /etc/mavenrc 41 | fi 42 | 43 | if [ -f "$HOME/.mavenrc" ] ; then 44 | . "$HOME/.mavenrc" 45 | fi 46 | 47 | fi 48 | 49 | # OS specific support. $var _must_ be set to either true or false. 50 | cygwin=false; 51 | darwin=false; 52 | mingw=false 53 | case "`uname`" in 54 | CYGWIN*) cygwin=true ;; 55 | MINGW*) mingw=true;; 56 | Darwin*) darwin=true 57 | # 58 | # Look for the Apple JDKs first to preserve the existing behaviour, and then look 59 | # for the new JDKs provided by Oracle. 60 | # 61 | if [ -z "$JAVA_HOME" ] && [ -L /System/Library/Frameworks/JavaVM.framework/Versions/CurrentJDK ] ; then 62 | # 63 | # Apple JDKs 64 | # 65 | export JAVA_HOME=/System/Library/Frameworks/JavaVM.framework/Versions/CurrentJDK/Home 66 | fi 67 | 68 | if [ -z "$JAVA_HOME" ] && [ -L /System/Library/Java/JavaVirtualMachines/CurrentJDK ] ; then 69 | # 70 | # Apple JDKs 71 | # 72 | export JAVA_HOME=/System/Library/Java/JavaVirtualMachines/CurrentJDK/Contents/Home 73 | fi 74 | 75 | if [ -z "$JAVA_HOME" ] && [ -L "/Library/Java/JavaVirtualMachines/CurrentJDK" ] ; then 76 | # 77 | # Oracle JDKs 78 | # 79 | export JAVA_HOME=/Library/Java/JavaVirtualMachines/CurrentJDK/Contents/Home 80 | fi 81 | 82 | if [ -z "$JAVA_HOME" ] && [ -x "/usr/libexec/java_home" ]; then 83 | # 84 | # Apple JDKs 85 | # 86 | export JAVA_HOME=`/usr/libexec/java_home` 87 | fi 88 | ;; 89 | esac 90 | 91 | if [ -z "$JAVA_HOME" ] ; then 92 | if [ -r /etc/gentoo-release ] ; then 93 | JAVA_HOME=`java-config --jre-home` 94 | fi 95 | fi 96 | 97 | if [ -z "$M2_HOME" ] ; then 98 | ## resolve links - $0 may be a link to maven's home 99 | PRG="$0" 100 | 101 | # need this for relative symlinks 102 | while [ -h "$PRG" ] ; do 103 | ls=`ls -ld "$PRG"` 104 | link=`expr "$ls" : '.*-> \(.*\)$'` 105 | if expr "$link" : '/.*' > /dev/null; then 106 | PRG="$link" 107 | else 108 | PRG="`dirname "$PRG"`/$link" 109 | fi 110 | done 111 | 112 | saveddir=`pwd` 113 | 114 | M2_HOME=`dirname "$PRG"`/.. 115 | 116 | # make it fully qualified 117 | M2_HOME=`cd "$M2_HOME" && pwd` 118 | 119 | cd "$saveddir" 120 | # echo Using m2 at $M2_HOME 121 | fi 122 | 123 | # For Cygwin, ensure paths are in UNIX format before anything is touched 124 | if $cygwin ; then 125 | [ -n "$M2_HOME" ] && 126 | M2_HOME=`cygpath --unix "$M2_HOME"` 127 | [ -n "$JAVA_HOME" ] && 128 | JAVA_HOME=`cygpath --unix "$JAVA_HOME"` 129 | [ -n "$CLASSPATH" ] && 130 | CLASSPATH=`cygpath --path --unix "$CLASSPATH"` 131 | fi 132 | 133 | # For Migwn, ensure paths are in UNIX format before anything is touched 134 | if $mingw ; then 135 | [ -n "$M2_HOME" ] && 136 | M2_HOME="`(cd "$M2_HOME"; pwd)`" 137 | [ -n "$JAVA_HOME" ] && 138 | JAVA_HOME="`(cd "$JAVA_HOME"; pwd)`" 139 | # TODO classpath? 140 | fi 141 | 142 | if [ -z "$JAVA_HOME" ]; then 143 | javaExecutable="`which javac`" 144 | if [ -n "$javaExecutable" ] && ! [ "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then 145 | # readlink(1) is not available as standard on Solaris 10. 146 | readLink=`which readlink` 147 | if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then 148 | if $darwin ; then 149 | javaHome="`dirname \"$javaExecutable\"`" 150 | javaExecutable="`cd \"$javaHome\" && pwd -P`/javac" 151 | else 152 | javaExecutable="`readlink -f \"$javaExecutable\"`" 153 | fi 154 | javaHome="`dirname \"$javaExecutable\"`" 155 | javaHome=`expr "$javaHome" : '\(.*\)/bin'` 156 | JAVA_HOME="$javaHome" 157 | export JAVA_HOME 158 | fi 159 | fi 160 | fi 161 | 162 | if [ -z "$JAVACMD" ] ; then 163 | if [ -n "$JAVA_HOME" ] ; then 164 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then 165 | # IBM's JDK on AIX uses strange locations for the executables 166 | JAVACMD="$JAVA_HOME/jre/sh/java" 167 | else 168 | JAVACMD="$JAVA_HOME/bin/java" 169 | fi 170 | else 171 | JAVACMD="`which java`" 172 | fi 173 | fi 174 | 175 | if [ ! -x "$JAVACMD" ] ; then 176 | echo "Error: JAVA_HOME is not defined correctly." >&2 177 | echo " We cannot execute $JAVACMD" >&2 178 | exit 1 179 | fi 180 | 181 | if [ -z "$JAVA_HOME" ] ; then 182 | echo "Warning: JAVA_HOME environment variable is not set." 183 | fi 184 | 185 | CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher 186 | 187 | # For Cygwin, switch paths to Windows format before running java 188 | if $cygwin; then 189 | [ -n "$M2_HOME" ] && 190 | M2_HOME=`cygpath --path --windows "$M2_HOME"` 191 | [ -n "$JAVA_HOME" ] && 192 | JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"` 193 | [ -n "$CLASSPATH" ] && 194 | CLASSPATH=`cygpath --path --windows "$CLASSPATH"` 195 | fi 196 | 197 | # traverses directory structure from process work directory to filesystem root 198 | # first directory with .mvn subdirectory is considered project base directory 199 | find_maven_basedir() { 200 | local basedir=$(pwd) 201 | local wdir=$(pwd) 202 | while [ "$wdir" != '/' ] ; do 203 | if [ -d "$wdir"/.mvn ] ; then 204 | basedir=$wdir 205 | break 206 | fi 207 | wdir=$(cd "$wdir/.."; pwd) 208 | done 209 | echo "${basedir}" 210 | } 211 | 212 | # concatenates all lines of a file 213 | concat_lines() { 214 | if [ -f "$1" ]; then 215 | echo "$(tr -s '\n' ' ' < "$1")" 216 | fi 217 | } 218 | 219 | export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-$(find_maven_basedir)} 220 | MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS" 221 | 222 | # Provide a "standardized" way to retrieve the CLI args that will 223 | # work with both Windows and non-Windows executions. 224 | MAVEN_CMD_LINE_ARGS="$MAVEN_CONFIG $@" 225 | export MAVEN_CMD_LINE_ARGS 226 | 227 | WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain 228 | 229 | exec "$JAVACMD" \ 230 | $MAVEN_OPTS \ 231 | -classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \ 232 | "-Dmaven.home=${M2_HOME}" "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \ 233 | ${WRAPPER_LAUNCHER} "$@" 234 | -------------------------------------------------------------------------------- /learnbootkafka-manual-offset/mvnw.cmd: -------------------------------------------------------------------------------- 1 | @REM ---------------------------------------------------------------------------- 2 | @REM Licensed to the Apache Software Foundation (ASF) under one 3 | @REM or more contributor license agreements. See the NOTICE file 4 | @REM distributed with this work for additional information 5 | @REM regarding copyright ownership. The ASF licenses this file 6 | @REM to you under the Apache License, Version 2.0 (the 7 | @REM "License"); you may not use this file except in compliance 8 | @REM with the License. You may obtain a copy of the License at 9 | @REM 10 | @REM http://www.apache.org/licenses/LICENSE-2.0 11 | @REM 12 | @REM Unless required by applicable law or agreed to in writing, 13 | @REM software distributed under the License is distributed on an 14 | @REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 15 | @REM KIND, either express or implied. See the License for the 16 | @REM specific language governing permissions and limitations 17 | @REM under the License. 18 | @REM ---------------------------------------------------------------------------- 19 | 20 | @REM ---------------------------------------------------------------------------- 21 | @REM Maven2 Start Up Batch script 22 | @REM 23 | @REM Required ENV vars: 24 | @REM JAVA_HOME - location of a JDK home dir 25 | @REM 26 | @REM Optional ENV vars 27 | @REM M2_HOME - location of maven2's installed home dir 28 | @REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands 29 | @REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a key stroke before ending 30 | @REM MAVEN_OPTS - parameters passed to the Java VM when running Maven 31 | @REM e.g. to debug Maven itself, use 32 | @REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 33 | @REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files 34 | @REM ---------------------------------------------------------------------------- 35 | 36 | @REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on' 37 | @echo off 38 | @REM enable echoing my setting MAVEN_BATCH_ECHO to 'on' 39 | @if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO% 40 | 41 | @REM set %HOME% to equivalent of $HOME 42 | if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%") 43 | 44 | @REM Execute a user defined script before this one 45 | if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre 46 | @REM check for pre script, once with legacy .bat ending and once with .cmd ending 47 | if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat" 48 | if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd" 49 | :skipRcPre 50 | 51 | @setlocal 52 | 53 | set ERROR_CODE=0 54 | 55 | @REM To isolate internal variables from possible post scripts, we use another setlocal 56 | @setlocal 57 | 58 | @REM ==== START VALIDATION ==== 59 | if not "%JAVA_HOME%" == "" goto OkJHome 60 | 61 | echo. 62 | echo Error: JAVA_HOME not found in your environment. >&2 63 | echo Please set the JAVA_HOME variable in your environment to match the >&2 64 | echo location of your Java installation. >&2 65 | echo. 66 | goto error 67 | 68 | :OkJHome 69 | if exist "%JAVA_HOME%\bin\java.exe" goto init 70 | 71 | echo. 72 | echo Error: JAVA_HOME is set to an invalid directory. >&2 73 | echo JAVA_HOME = "%JAVA_HOME%" >&2 74 | echo Please set the JAVA_HOME variable in your environment to match the >&2 75 | echo location of your Java installation. >&2 76 | echo. 77 | goto error 78 | 79 | @REM ==== END VALIDATION ==== 80 | 81 | :init 82 | 83 | set MAVEN_CMD_LINE_ARGS=%* 84 | 85 | @REM Find the project base dir, i.e. the directory that contains the folder ".mvn". 86 | @REM Fallback to current working directory if not found. 87 | 88 | set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR% 89 | IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir 90 | 91 | set EXEC_DIR=%CD% 92 | set WDIR=%EXEC_DIR% 93 | :findBaseDir 94 | IF EXIST "%WDIR%"\.mvn goto baseDirFound 95 | cd .. 96 | IF "%WDIR%"=="%CD%" goto baseDirNotFound 97 | set WDIR=%CD% 98 | goto findBaseDir 99 | 100 | :baseDirFound 101 | set MAVEN_PROJECTBASEDIR=%WDIR% 102 | cd "%EXEC_DIR%" 103 | goto endDetectBaseDir 104 | 105 | :baseDirNotFound 106 | set MAVEN_PROJECTBASEDIR=%EXEC_DIR% 107 | cd "%EXEC_DIR%" 108 | 109 | :endDetectBaseDir 110 | 111 | IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig 112 | 113 | @setlocal EnableExtensions EnableDelayedExpansion 114 | for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a 115 | @endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS% 116 | 117 | :endReadAdditionalConfig 118 | 119 | SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe" 120 | 121 | set WRAPPER_JAR="".\.mvn\wrapper\maven-wrapper.jar"" 122 | set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain 123 | 124 | %MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CMD_LINE_ARGS% 125 | if ERRORLEVEL 1 goto error 126 | goto end 127 | 128 | :error 129 | set ERROR_CODE=1 130 | 131 | :end 132 | @endlocal & set ERROR_CODE=%ERROR_CODE% 133 | 134 | if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost 135 | @REM check for post script, once with legacy .bat ending and once with .cmd ending 136 | if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat" 137 | if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd" 138 | :skipRcPost 139 | 140 | @REM pause the script if MAVEN_BATCH_PAUSE is set to 'on' 141 | if "%MAVEN_BATCH_PAUSE%" == "on" pause 142 | 143 | if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE% 144 | 145 | exit /B %ERROR_CODE% -------------------------------------------------------------------------------- /learnbootkafka-manual-offset/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 4 | 4.0.0 5 | 6 | com.learnbootkafka-manual-offset 7 | learnbootkafka-manual-offset 8 | 0.0.1-SNAPSHOT 9 | jar 10 | 11 | learnbootkafka 12 | Demo project for Spring Boot 13 | 14 | 15 | 16 | org.springframework.boot 17 | spring-boot-starter-parent 18 | 1.5.2.RELEASE 19 | 20 | 21 | 22 | 23 | UTF-8 24 | UTF-8 25 | 1.8 26 | dilipthelip 27 | 28 | 29 | 30 | 31 | org.springframework.boot 32 | spring-boot-starter-jersey 33 | 34 | 35 | org.springframework.kafka 36 | spring-kafka 37 | 38 | 39 | org.springframework.boot 40 | spring-boot-starter-web 41 | 42 | 43 | 44 | 45 | org.springframework.kafka 46 | spring-kafka 47 | 1.1.1.RELEASE 48 | 49 | 50 | 51 | 52 | org.springframework.boot 53 | spring-boot-starter-test 54 | test 55 | 56 | 57 | 58 | 59 | 60 | 61 | org.springframework.boot 62 | spring-boot-maven-plugin 63 | 64 | 65 | 66 | com.spotify 67 | docker-maven-plugin 68 | 0.4.11 69 | 70 | ${docker.image.prefix}/${project.artifactId} 71 | src/main/docker 72 | 73 | 74 | / 75 | ${project.build.directory} 76 | ${project.build.finalName}.jar 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | -------------------------------------------------------------------------------- /learnbootkafka-manual-offset/src/main/docker/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM frolvlad/alpine-oraclejdk8:slim 2 | VOLUME /tmp 3 | ADD learnbootkafka-manual-offset-0.0.1-SNAPSHOT.jar app.jar 4 | EXPOSE 8080 8443 8000 5 | COPY ./docker-entrypoint.sh /docker-entrypoint.sh 6 | ENTRYPOINT ["/docker-entrypoint.sh"] 7 | RUN chmod +x /docker-entrypoint.sh 8 | CMD ["start"] -------------------------------------------------------------------------------- /learnbootkafka-manual-offset/src/main/docker/docker-entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | set -e 4 | 5 | if [ "$1" = 'start' ]; then 6 | 7 | # Check to see if the environment variable ENVIRONMENT is set. If it is, we can set our Spring Boot active profile 8 | # based on it. If it isn't, the default Spring Profile will be used. 9 | [ -z "$ENVIRONMENT" ] && printf "WARN:: No environment specified, relying on default Spring profile. To change the \ 10 | environment, please supply a Docker environment variable. Currently available environments are:\n\n\ 11 | stage (default)\n\ 12 | prod\n\n\ 13 | Example: docker run -e ENVIRONMENT=prod bootkafka bootkafka/learnbootkafka-manual-offset\n"; 14 | 15 | java_opts="-Djava.security.egd=file:/dev/./urandom" 16 | 17 | # If an environment variable is set to provide a different Spring Profile, use it. 18 | if [ $ENVIRONMENT ]; then 19 | java_opts="$java_opts -Dspring.profiles.active=$ENVIRONMENT" 20 | fi 21 | 22 | java_opts="$java_opts -Dkafka.broker=$KAFKABROKER" 23 | 24 | printf "java options are : $java_opts" 25 | # Launch the app with whatever Java options are required to execute. 26 | exec java -jar ${java_opts} /app.jar 27 | fi 28 | 29 | exec "$@" -------------------------------------------------------------------------------- /learnbootkafka-manual-offset/src/main/java/com/learnbootkafka/LearnbootkafkaApplication.java: -------------------------------------------------------------------------------- 1 | package com.learnbootkafka; 2 | 3 | import org.springframework.boot.SpringApplication; 4 | import org.springframework.boot.autoconfigure.SpringBootApplication; 5 | 6 | @SpringBootApplication 7 | public class LearnbootkafkaApplication { 8 | 9 | public static void main(String[] args) { 10 | SpringApplication.run(LearnbootkafkaApplication.class, args); 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /learnbootkafka-manual-offset/src/main/java/com/learnbootkafka/config/KafkaConfig.java: -------------------------------------------------------------------------------- 1 | package com.learnbootkafka.config; 2 | 3 | import java.util.HashMap; 4 | import java.util.Map; 5 | 6 | import org.apache.kafka.clients.consumer.ConsumerConfig; 7 | import org.apache.kafka.clients.producer.ProducerConfig; 8 | import org.apache.kafka.common.serialization.StringDeserializer; 9 | import org.apache.kafka.common.serialization.StringSerializer; 10 | import org.springframework.beans.factory.annotation.Autowired; 11 | import org.springframework.context.annotation.Bean; 12 | import org.springframework.context.annotation.Configuration; 13 | import org.springframework.core.env.Environment; 14 | import org.springframework.kafka.annotation.EnableKafka; 15 | import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory; 16 | import org.springframework.kafka.config.KafkaListenerContainerFactory; 17 | import org.springframework.kafka.core.ConsumerFactory; 18 | import org.springframework.kafka.core.DefaultKafkaConsumerFactory; 19 | import org.springframework.kafka.core.DefaultKafkaProducerFactory; 20 | import org.springframework.kafka.core.KafkaTemplate; 21 | import org.springframework.kafka.core.ProducerFactory; 22 | import org.springframework.kafka.listener.AbstractMessageListenerContainer; 23 | import org.springframework.kafka.listener.ConcurrentMessageListenerContainer; 24 | import org.springframework.retry.backoff.FixedBackOffPolicy; 25 | import org.springframework.retry.policy.AlwaysRetryPolicy; 26 | import org.springframework.retry.support.RetryTemplate; 27 | 28 | import com.learnbootkafka.consumer.ConsumerKafka; 29 | 30 | @Configuration 31 | @EnableKafka 32 | public class KafkaConfig { 33 | 34 | @Autowired 35 | Environment env; 36 | 37 | /** 38 | * Consumer Config Starts 39 | */ 40 | @Bean 41 | KafkaListenerContainerFactory> kafkaListenerContainerFactory() { 42 | ConcurrentKafkaListenerContainerFactory factory = new ConcurrentKafkaListenerContainerFactory<>(); 43 | factory.setConsumerFactory(consumerFactory()); 44 | factory.getContainerProperties().setPollTimeout(3000); 45 | factory.getContainerProperties().setAckMode(AbstractMessageListenerContainer.AckMode.MANUAL); 46 | return factory; 47 | } 48 | 49 | @Bean 50 | public ConsumerFactory consumerFactory() { 51 | 52 | return new DefaultKafkaConsumerFactory<>(consumerConfigs()); 53 | } 54 | 55 | @Bean 56 | public Map consumerConfigs() { 57 | Map propsMap = new HashMap<>(); 58 | propsMap.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, env.getProperty("kafka.broker")); 59 | propsMap.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, env.getProperty("enable.auto.commit")); 60 | propsMap.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, env.getProperty("auto.commit.interval.ms")); 61 | propsMap.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); 62 | propsMap.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); 63 | propsMap.put(ConsumerConfig.GROUP_ID_CONFIG, env.getProperty("group.id")); 64 | propsMap.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, env.getProperty("kafka.auto.offset.reset")); 65 | return propsMap; 66 | 67 | } 68 | 69 | @Bean 70 | public ConsumerKafka listener() { 71 | return new ConsumerKafka(); 72 | } 73 | 74 | /** 75 | * Consumer Config Ends 76 | */ 77 | 78 | /** 79 | * Producer Config Starts 80 | * @return 81 | */ 82 | 83 | @SuppressWarnings("rawtypes") 84 | @Bean 85 | public ProducerFactory producerFactory() { 86 | return new DefaultKafkaProducerFactory<>(producerConfigs()); 87 | } 88 | 89 | @Bean 90 | public KafkaTemplate kafkaTemplate() { 91 | return new KafkaTemplate(producerFactory()); 92 | } 93 | /** 94 | * Producer Config Ends 95 | * @return 96 | */ 97 | 98 | @Bean 99 | public Map producerConfigs() { 100 | Map props = new HashMap<>(); 101 | props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, env.getProperty("kafka.broker")); 102 | props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class); 103 | props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class); 104 | props.put(ProducerConfig.LINGER_MS_CONFIG, 1); 105 | 106 | 107 | return props; 108 | } 109 | 110 | } 111 | -------------------------------------------------------------------------------- /learnbootkafka-manual-offset/src/main/java/com/learnbootkafka/consumer/ConsumerKafka.java: -------------------------------------------------------------------------------- 1 | package com.learnbootkafka.consumer; 2 | 3 | import org.apache.kafka.clients.consumer.ConsumerRecord; 4 | import org.springframework.kafka.annotation.KafkaListener; 5 | import org.springframework.kafka.listener.AcknowledgingMessageListener; 6 | import org.springframework.kafka.support.Acknowledgment; 7 | 8 | public class ConsumerKafka implements AcknowledgingMessageListener{ 9 | 10 | @Override 11 | @KafkaListener(id = "consumer", topics = {"${kafka.topic}"} ) 12 | public void onMessage(ConsumerRecord data, 13 | Acknowledgment acknowledgment) { 14 | // TODO Auto-generated method stub 15 | try{ 16 | System.out.println("Read Record is : " + data.value()); 17 | System.out.println("Offset is : " + data.offset()); 18 | System.out.println("Topic is : " + data.topic()); 19 | System.out.println("Partition is : " + data.partition()); 20 | 21 | }catch (Exception e ){ 22 | System.out.println("Push the messaged to Error Stream : " + e); 23 | }finally{ 24 | acknowledgment.acknowledge(); 25 | } 26 | 27 | 28 | 29 | 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /learnbootkafka-manual-offset/src/main/java/com/learnbootkafka/controller/HelloController.java: -------------------------------------------------------------------------------- 1 | package com.learnbootkafka.controller; 2 | 3 | import org.springframework.beans.factory.annotation.Autowired; 4 | import org.springframework.core.env.Environment; 5 | import org.springframework.web.bind.annotation.RequestMapping; 6 | import org.springframework.web.bind.annotation.RequestParam; 7 | import org.springframework.web.bind.annotation.RestController; 8 | 9 | import com.learnbootkafka.producer.ProducerKafka; 10 | 11 | @RestController 12 | public class HelloController { 13 | 14 | @Autowired 15 | Environment env; 16 | 17 | @Autowired 18 | ProducerKafka producer; 19 | 20 | @RequestMapping(value="/home") 21 | public String getResult(@RequestParam("input") String value){ 22 | 23 | try{ 24 | producer.sendMessage(value); 25 | }catch(Exception e){ 26 | System.out.println("Inside Excotion"); 27 | } 28 | 29 | return env.getProperty("message.response"); 30 | } 31 | 32 | } 33 | -------------------------------------------------------------------------------- /learnbootkafka-manual-offset/src/main/java/com/learnbootkafka/producer/ProducerKafka.java: -------------------------------------------------------------------------------- 1 | package com.learnbootkafka.producer; 2 | 3 | import org.springframework.beans.factory.annotation.Autowired; 4 | import org.springframework.core.env.Environment; 5 | import org.springframework.kafka.core.KafkaTemplate; 6 | import org.springframework.kafka.support.SendResult; 7 | import org.springframework.stereotype.Component; 8 | import org.springframework.util.concurrent.ListenableFuture; 9 | import org.springframework.util.concurrent.ListenableFutureCallback; 10 | 11 | @Component 12 | public class ProducerKafka { 13 | 14 | @Autowired 15 | private KafkaTemplate kafkaTemplate; 16 | 17 | @Autowired 18 | Environment env; 19 | 20 | public void sendMessage(String message){ 21 | ListenableFuture> future = kafkaTemplate 22 | .send(env.getProperty("kafka.topic"),message,message); 23 | 24 | future.addCallback( 25 | new ListenableFutureCallback>() { 26 | 27 | @Override 28 | public void onFailure(Throwable ex) { 29 | System.out.println("Inside Exception"); 30 | 31 | } 32 | 33 | @Override 34 | public void onSuccess(SendResult result) { 35 | // TODO Auto-generated method stub 36 | System.out.println("Inside Success"); 37 | 38 | } 39 | }); 40 | 41 | } 42 | 43 | } 44 | -------------------------------------------------------------------------------- /learnbootkafka-manual-offset/src/main/resources/application.yml: -------------------------------------------------------------------------------- 1 | spring: 2 | profiles: 3 | active: dev 4 | 5 | --- 6 | 7 | spring: 8 | profiles: dev 9 | 10 | message.response: Hello from DEV spring boot properties 11 | 12 | kafka.broker: localhost:9092,localhost:9093,localhost:9094 13 | enable.auto.commit: false 14 | auto.commit.interval.ms: 100 15 | group.id: group1 16 | kafka.auto.offset.reset: earliest 17 | kafka.topic: my-topic 18 | 19 | 20 | --- 21 | 22 | spring: 23 | profiles: stage 24 | 25 | message.response: Hello from STAGE spring boot properties 26 | kafka.broker: localhost:9092,localhost:9093,localhost:9094 27 | enable.auto.commit: false 28 | auto.commit.interval.ms: 100 29 | group.id: group1 30 | kafka.auto.offset.reset: earliest 31 | kafka.topic: my-topic 32 | 33 | --- 34 | 35 | spring: 36 | profiles: prod 37 | 38 | message.response: Hello from PROD spring boot properties 39 | kafka.broker: 10.113.0.137:9092,10.113.0.137:9093,10.113.0.137:9094 40 | enable.auto.commit: false 41 | auto.commit.interval.ms: 100 42 | group.id: group1 43 | kafka.auto.offset.reset: earliest 44 | kafka.topic: my-topic 45 | 46 | 47 | --- -------------------------------------------------------------------------------- /learnbootkafka-manual-offset/src/main/resources/application1.properties: -------------------------------------------------------------------------------- 1 | message.response=Hello from spring boot properties -------------------------------------------------------------------------------- /learnbootkafka-manual-offset/src/test/java/com/learnbootkafka/LearnbootkafkaApplicationTests.java: -------------------------------------------------------------------------------- 1 | package com.learnbootkafka; 2 | 3 | import org.junit.Test; 4 | import org.junit.runner.RunWith; 5 | import org.springframework.boot.test.context.SpringBootTest; 6 | import org.springframework.test.context.junit4.SpringRunner; 7 | 8 | @RunWith(SpringRunner.class) 9 | //@SpringBootTest 10 | public class LearnbootkafkaApplicationTests { 11 | 12 | @Test 13 | public void contextLoads() { 14 | } 15 | 16 | } 17 | -------------------------------------------------------------------------------- /learnbootkafka/.gitignore: -------------------------------------------------------------------------------- 1 | target/ 2 | !.mvn/wrapper/maven-wrapper.jar 3 | 4 | ### STS ### 5 | .apt_generated 6 | .classpath 7 | .factorypath 8 | .project 9 | .settings 10 | .springBeans 11 | 12 | ### IntelliJ IDEA ### 13 | .idea 14 | *.iws 15 | *.iml 16 | *.ipr 17 | 18 | ### NetBeans ### 19 | nbproject/private/ 20 | build/ 21 | nbbuild/ 22 | dist/ 23 | nbdist/ 24 | .nb-gradle/ -------------------------------------------------------------------------------- /learnbootkafka/.mvn/wrapper/maven-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dilipsundarraj1/TeachApacheKafka/f1951861aacaf600544e098f3e4dfeb41de75029/learnbootkafka/.mvn/wrapper/maven-wrapper.jar -------------------------------------------------------------------------------- /learnbootkafka/.mvn/wrapper/maven-wrapper.properties: -------------------------------------------------------------------------------- 1 | distributionUrl=https://repo1.maven.org/maven2/org/apache/maven/apache-maven/3.5.0/apache-maven-3.5.0-bin.zip 2 | -------------------------------------------------------------------------------- /learnbootkafka/README.md: -------------------------------------------------------------------------------- 1 | # Learn Spring Boot Kafka 2 | 3 | ## Java Command to launch the jar file 4 | 5 | The below command will take the stage profile configuration and launch the job. 6 | 7 | ``` 8 | java -jar -Dspring.profiles.active=stage .jar 9 | ``` 10 | -------------------------------------------------------------------------------- /learnbootkafka/mvnw: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # ---------------------------------------------------------------------------- 3 | # Licensed to the Apache Software Foundation (ASF) under one 4 | # or more contributor license agreements. See the NOTICE file 5 | # distributed with this work for additional information 6 | # regarding copyright ownership. The ASF licenses this file 7 | # to you under the Apache License, Version 2.0 (the 8 | # "License"); you may not use this file except in compliance 9 | # with the License. You may obtain a copy of the License at 10 | # 11 | # http://www.apache.org/licenses/LICENSE-2.0 12 | # 13 | # Unless required by applicable law or agreed to in writing, 14 | # software distributed under the License is distributed on an 15 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 16 | # KIND, either express or implied. See the License for the 17 | # specific language governing permissions and limitations 18 | # under the License. 19 | # ---------------------------------------------------------------------------- 20 | 21 | # ---------------------------------------------------------------------------- 22 | # Maven2 Start Up Batch script 23 | # 24 | # Required ENV vars: 25 | # ------------------ 26 | # JAVA_HOME - location of a JDK home dir 27 | # 28 | # Optional ENV vars 29 | # ----------------- 30 | # M2_HOME - location of maven2's installed home dir 31 | # MAVEN_OPTS - parameters passed to the Java VM when running Maven 32 | # e.g. to debug Maven itself, use 33 | # set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 34 | # MAVEN_SKIP_RC - flag to disable loading of mavenrc files 35 | # ---------------------------------------------------------------------------- 36 | 37 | if [ -z "$MAVEN_SKIP_RC" ] ; then 38 | 39 | if [ -f /etc/mavenrc ] ; then 40 | . /etc/mavenrc 41 | fi 42 | 43 | if [ -f "$HOME/.mavenrc" ] ; then 44 | . "$HOME/.mavenrc" 45 | fi 46 | 47 | fi 48 | 49 | # OS specific support. $var _must_ be set to either true or false. 50 | cygwin=false; 51 | darwin=false; 52 | mingw=false 53 | case "`uname`" in 54 | CYGWIN*) cygwin=true ;; 55 | MINGW*) mingw=true;; 56 | Darwin*) darwin=true 57 | # 58 | # Look for the Apple JDKs first to preserve the existing behaviour, and then look 59 | # for the new JDKs provided by Oracle. 60 | # 61 | if [ -z "$JAVA_HOME" ] && [ -L /System/Library/Frameworks/JavaVM.framework/Versions/CurrentJDK ] ; then 62 | # 63 | # Apple JDKs 64 | # 65 | export JAVA_HOME=/System/Library/Frameworks/JavaVM.framework/Versions/CurrentJDK/Home 66 | fi 67 | 68 | if [ -z "$JAVA_HOME" ] && [ -L /System/Library/Java/JavaVirtualMachines/CurrentJDK ] ; then 69 | # 70 | # Apple JDKs 71 | # 72 | export JAVA_HOME=/System/Library/Java/JavaVirtualMachines/CurrentJDK/Contents/Home 73 | fi 74 | 75 | if [ -z "$JAVA_HOME" ] && [ -L "/Library/Java/JavaVirtualMachines/CurrentJDK" ] ; then 76 | # 77 | # Oracle JDKs 78 | # 79 | export JAVA_HOME=/Library/Java/JavaVirtualMachines/CurrentJDK/Contents/Home 80 | fi 81 | 82 | if [ -z "$JAVA_HOME" ] && [ -x "/usr/libexec/java_home" ]; then 83 | # 84 | # Apple JDKs 85 | # 86 | export JAVA_HOME=`/usr/libexec/java_home` 87 | fi 88 | ;; 89 | esac 90 | 91 | if [ -z "$JAVA_HOME" ] ; then 92 | if [ -r /etc/gentoo-release ] ; then 93 | JAVA_HOME=`java-config --jre-home` 94 | fi 95 | fi 96 | 97 | if [ -z "$M2_HOME" ] ; then 98 | ## resolve links - $0 may be a link to maven's home 99 | PRG="$0" 100 | 101 | # need this for relative symlinks 102 | while [ -h "$PRG" ] ; do 103 | ls=`ls -ld "$PRG"` 104 | link=`expr "$ls" : '.*-> \(.*\)$'` 105 | if expr "$link" : '/.*' > /dev/null; then 106 | PRG="$link" 107 | else 108 | PRG="`dirname "$PRG"`/$link" 109 | fi 110 | done 111 | 112 | saveddir=`pwd` 113 | 114 | M2_HOME=`dirname "$PRG"`/.. 115 | 116 | # make it fully qualified 117 | M2_HOME=`cd "$M2_HOME" && pwd` 118 | 119 | cd "$saveddir" 120 | # echo Using m2 at $M2_HOME 121 | fi 122 | 123 | # For Cygwin, ensure paths are in UNIX format before anything is touched 124 | if $cygwin ; then 125 | [ -n "$M2_HOME" ] && 126 | M2_HOME=`cygpath --unix "$M2_HOME"` 127 | [ -n "$JAVA_HOME" ] && 128 | JAVA_HOME=`cygpath --unix "$JAVA_HOME"` 129 | [ -n "$CLASSPATH" ] && 130 | CLASSPATH=`cygpath --path --unix "$CLASSPATH"` 131 | fi 132 | 133 | # For Migwn, ensure paths are in UNIX format before anything is touched 134 | if $mingw ; then 135 | [ -n "$M2_HOME" ] && 136 | M2_HOME="`(cd "$M2_HOME"; pwd)`" 137 | [ -n "$JAVA_HOME" ] && 138 | JAVA_HOME="`(cd "$JAVA_HOME"; pwd)`" 139 | # TODO classpath? 140 | fi 141 | 142 | if [ -z "$JAVA_HOME" ]; then 143 | javaExecutable="`which javac`" 144 | if [ -n "$javaExecutable" ] && ! [ "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then 145 | # readlink(1) is not available as standard on Solaris 10. 146 | readLink=`which readlink` 147 | if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then 148 | if $darwin ; then 149 | javaHome="`dirname \"$javaExecutable\"`" 150 | javaExecutable="`cd \"$javaHome\" && pwd -P`/javac" 151 | else 152 | javaExecutable="`readlink -f \"$javaExecutable\"`" 153 | fi 154 | javaHome="`dirname \"$javaExecutable\"`" 155 | javaHome=`expr "$javaHome" : '\(.*\)/bin'` 156 | JAVA_HOME="$javaHome" 157 | export JAVA_HOME 158 | fi 159 | fi 160 | fi 161 | 162 | if [ -z "$JAVACMD" ] ; then 163 | if [ -n "$JAVA_HOME" ] ; then 164 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then 165 | # IBM's JDK on AIX uses strange locations for the executables 166 | JAVACMD="$JAVA_HOME/jre/sh/java" 167 | else 168 | JAVACMD="$JAVA_HOME/bin/java" 169 | fi 170 | else 171 | JAVACMD="`which java`" 172 | fi 173 | fi 174 | 175 | if [ ! -x "$JAVACMD" ] ; then 176 | echo "Error: JAVA_HOME is not defined correctly." >&2 177 | echo " We cannot execute $JAVACMD" >&2 178 | exit 1 179 | fi 180 | 181 | if [ -z "$JAVA_HOME" ] ; then 182 | echo "Warning: JAVA_HOME environment variable is not set." 183 | fi 184 | 185 | CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher 186 | 187 | # For Cygwin, switch paths to Windows format before running java 188 | if $cygwin; then 189 | [ -n "$M2_HOME" ] && 190 | M2_HOME=`cygpath --path --windows "$M2_HOME"` 191 | [ -n "$JAVA_HOME" ] && 192 | JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"` 193 | [ -n "$CLASSPATH" ] && 194 | CLASSPATH=`cygpath --path --windows "$CLASSPATH"` 195 | fi 196 | 197 | # traverses directory structure from process work directory to filesystem root 198 | # first directory with .mvn subdirectory is considered project base directory 199 | find_maven_basedir() { 200 | local basedir=$(pwd) 201 | local wdir=$(pwd) 202 | while [ "$wdir" != '/' ] ; do 203 | if [ -d "$wdir"/.mvn ] ; then 204 | basedir=$wdir 205 | break 206 | fi 207 | wdir=$(cd "$wdir/.."; pwd) 208 | done 209 | echo "${basedir}" 210 | } 211 | 212 | # concatenates all lines of a file 213 | concat_lines() { 214 | if [ -f "$1" ]; then 215 | echo "$(tr -s '\n' ' ' < "$1")" 216 | fi 217 | } 218 | 219 | export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-$(find_maven_basedir)} 220 | MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS" 221 | 222 | # Provide a "standardized" way to retrieve the CLI args that will 223 | # work with both Windows and non-Windows executions. 224 | MAVEN_CMD_LINE_ARGS="$MAVEN_CONFIG $@" 225 | export MAVEN_CMD_LINE_ARGS 226 | 227 | WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain 228 | 229 | exec "$JAVACMD" \ 230 | $MAVEN_OPTS \ 231 | -classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \ 232 | "-Dmaven.home=${M2_HOME}" "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \ 233 | ${WRAPPER_LAUNCHER} "$@" 234 | -------------------------------------------------------------------------------- /learnbootkafka/mvnw.cmd: -------------------------------------------------------------------------------- 1 | @REM ---------------------------------------------------------------------------- 2 | @REM Licensed to the Apache Software Foundation (ASF) under one 3 | @REM or more contributor license agreements. See the NOTICE file 4 | @REM distributed with this work for additional information 5 | @REM regarding copyright ownership. The ASF licenses this file 6 | @REM to you under the Apache License, Version 2.0 (the 7 | @REM "License"); you may not use this file except in compliance 8 | @REM with the License. You may obtain a copy of the License at 9 | @REM 10 | @REM http://www.apache.org/licenses/LICENSE-2.0 11 | @REM 12 | @REM Unless required by applicable law or agreed to in writing, 13 | @REM software distributed under the License is distributed on an 14 | @REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 15 | @REM KIND, either express or implied. See the License for the 16 | @REM specific language governing permissions and limitations 17 | @REM under the License. 18 | @REM ---------------------------------------------------------------------------- 19 | 20 | @REM ---------------------------------------------------------------------------- 21 | @REM Maven2 Start Up Batch script 22 | @REM 23 | @REM Required ENV vars: 24 | @REM JAVA_HOME - location of a JDK home dir 25 | @REM 26 | @REM Optional ENV vars 27 | @REM M2_HOME - location of maven2's installed home dir 28 | @REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands 29 | @REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a key stroke before ending 30 | @REM MAVEN_OPTS - parameters passed to the Java VM when running Maven 31 | @REM e.g. to debug Maven itself, use 32 | @REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 33 | @REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files 34 | @REM ---------------------------------------------------------------------------- 35 | 36 | @REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on' 37 | @echo off 38 | @REM enable echoing my setting MAVEN_BATCH_ECHO to 'on' 39 | @if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO% 40 | 41 | @REM set %HOME% to equivalent of $HOME 42 | if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%") 43 | 44 | @REM Execute a user defined script before this one 45 | if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre 46 | @REM check for pre script, once with legacy .bat ending and once with .cmd ending 47 | if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat" 48 | if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd" 49 | :skipRcPre 50 | 51 | @setlocal 52 | 53 | set ERROR_CODE=0 54 | 55 | @REM To isolate internal variables from possible post scripts, we use another setlocal 56 | @setlocal 57 | 58 | @REM ==== START VALIDATION ==== 59 | if not "%JAVA_HOME%" == "" goto OkJHome 60 | 61 | echo. 62 | echo Error: JAVA_HOME not found in your environment. >&2 63 | echo Please set the JAVA_HOME variable in your environment to match the >&2 64 | echo location of your Java installation. >&2 65 | echo. 66 | goto error 67 | 68 | :OkJHome 69 | if exist "%JAVA_HOME%\bin\java.exe" goto init 70 | 71 | echo. 72 | echo Error: JAVA_HOME is set to an invalid directory. >&2 73 | echo JAVA_HOME = "%JAVA_HOME%" >&2 74 | echo Please set the JAVA_HOME variable in your environment to match the >&2 75 | echo location of your Java installation. >&2 76 | echo. 77 | goto error 78 | 79 | @REM ==== END VALIDATION ==== 80 | 81 | :init 82 | 83 | set MAVEN_CMD_LINE_ARGS=%* 84 | 85 | @REM Find the project base dir, i.e. the directory that contains the folder ".mvn". 86 | @REM Fallback to current working directory if not found. 87 | 88 | set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR% 89 | IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir 90 | 91 | set EXEC_DIR=%CD% 92 | set WDIR=%EXEC_DIR% 93 | :findBaseDir 94 | IF EXIST "%WDIR%"\.mvn goto baseDirFound 95 | cd .. 96 | IF "%WDIR%"=="%CD%" goto baseDirNotFound 97 | set WDIR=%CD% 98 | goto findBaseDir 99 | 100 | :baseDirFound 101 | set MAVEN_PROJECTBASEDIR=%WDIR% 102 | cd "%EXEC_DIR%" 103 | goto endDetectBaseDir 104 | 105 | :baseDirNotFound 106 | set MAVEN_PROJECTBASEDIR=%EXEC_DIR% 107 | cd "%EXEC_DIR%" 108 | 109 | :endDetectBaseDir 110 | 111 | IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig 112 | 113 | @setlocal EnableExtensions EnableDelayedExpansion 114 | for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a 115 | @endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS% 116 | 117 | :endReadAdditionalConfig 118 | 119 | SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe" 120 | 121 | set WRAPPER_JAR="".\.mvn\wrapper\maven-wrapper.jar"" 122 | set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain 123 | 124 | %MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CMD_LINE_ARGS% 125 | if ERRORLEVEL 1 goto error 126 | goto end 127 | 128 | :error 129 | set ERROR_CODE=1 130 | 131 | :end 132 | @endlocal & set ERROR_CODE=%ERROR_CODE% 133 | 134 | if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost 135 | @REM check for post script, once with legacy .bat ending and once with .cmd ending 136 | if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat" 137 | if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd" 138 | :skipRcPost 139 | 140 | @REM pause the script if MAVEN_BATCH_PAUSE is set to 'on' 141 | if "%MAVEN_BATCH_PAUSE%" == "on" pause 142 | 143 | if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE% 144 | 145 | exit /B %ERROR_CODE% -------------------------------------------------------------------------------- /learnbootkafka/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 4 | 4.0.0 5 | 6 | com.learnbootkafka 7 | learnbootkafka 8 | 0.0.1-SNAPSHOT 9 | jar 10 | 11 | learnbootkafka 12 | Demo project for Spring Boot 13 | 14 | 15 | org.springframework.boot 16 | spring-boot-starter-parent 17 | 1.5.2.RELEASE 18 | 19 | 20 | 21 | 22 | UTF-8 23 | UTF-8 24 | 1.8 25 | 26 | 27 | 28 | 29 | org.springframework.boot 30 | spring-boot-starter-jersey 31 | 32 | 33 | org.springframework.kafka 34 | spring-kafka 35 | 36 | 37 | org.springframework.boot 38 | spring-boot-starter-web 39 | 40 | 41 | 42 | 43 | org.springframework.kafka 44 | spring-kafka 45 | 1.1.1.RELEASE 46 | 47 | 48 | 49 | 50 | org.springframework.boot 51 | spring-boot-starter-test 52 | test 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | org.springframework.boot 62 | spring-boot-maven-plugin 63 | 64 | 65 | 66 | 67 | 68 | 69 | -------------------------------------------------------------------------------- /learnbootkafka/src/main/java/com/learnbootkafka/LearnbootkafkaApplication.java: -------------------------------------------------------------------------------- 1 | package com.learnbootkafka; 2 | 3 | import org.springframework.boot.SpringApplication; 4 | import org.springframework.boot.autoconfigure.SpringBootApplication; 5 | 6 | @SpringBootApplication 7 | public class LearnbootkafkaApplication { 8 | 9 | public static void main(String[] args) { 10 | SpringApplication.run(LearnbootkafkaApplication.class, args); 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /learnbootkafka/src/main/java/com/learnbootkafka/config/KafkaConfig.java: -------------------------------------------------------------------------------- 1 | package com.learnbootkafka.config; 2 | 3 | import java.util.HashMap; 4 | import java.util.Map; 5 | 6 | import org.apache.kafka.clients.consumer.ConsumerConfig; 7 | import org.apache.kafka.clients.producer.ProducerConfig; 8 | import org.apache.kafka.common.serialization.StringDeserializer; 9 | import org.apache.kafka.common.serialization.StringSerializer; 10 | import org.springframework.beans.factory.annotation.Autowired; 11 | import org.springframework.context.annotation.Bean; 12 | import org.springframework.context.annotation.Configuration; 13 | import org.springframework.core.env.Environment; 14 | import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory; 15 | import org.springframework.kafka.config.KafkaListenerContainerFactory; 16 | import org.springframework.kafka.core.ConsumerFactory; 17 | import org.springframework.kafka.core.DefaultKafkaConsumerFactory; 18 | import org.springframework.kafka.core.DefaultKafkaProducerFactory; 19 | import org.springframework.kafka.core.KafkaTemplate; 20 | import org.springframework.kafka.core.ProducerFactory; 21 | import org.springframework.kafka.listener.ConcurrentMessageListenerContainer; 22 | 23 | import com.learnbootkafka.consumer.ConsumerKafka; 24 | 25 | @Configuration 26 | public class KafkaConfig { 27 | 28 | @Autowired 29 | Environment env; 30 | 31 | /** 32 | * Consumer Config Starts 33 | */ 34 | @Bean 35 | KafkaListenerContainerFactory> kafkaListenerContainerFactory() { 36 | ConcurrentKafkaListenerContainerFactory factory = new ConcurrentKafkaListenerContainerFactory<>(); 37 | factory.setConsumerFactory(consumerFactory()); 38 | return factory; 39 | } 40 | 41 | @Bean 42 | public ConsumerFactory consumerFactory() { 43 | 44 | // DefaultKafkaConsumerFactory factory=new DefaultKafkaConsumerFactory<>(consumerConfigs()); 45 | return new DefaultKafkaConsumerFactory<>(consumerConfigs()); 46 | } 47 | 48 | @Bean 49 | public Map consumerConfigs() { 50 | Map propsMap = new HashMap<>(); 51 | propsMap.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, env.getProperty("kafka.broker")); 52 | propsMap.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, env.getProperty("enable.auto.commit")); 53 | propsMap.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, env.getProperty("auto.commit.interval.ms")); 54 | propsMap.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); 55 | propsMap.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); 56 | propsMap.put(ConsumerConfig.GROUP_ID_CONFIG, env.getProperty("group.id")); 57 | propsMap.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, env.getProperty("kafka.auto.offset.reset")); 58 | return propsMap; 59 | 60 | } 61 | 62 | @Bean 63 | public ConsumerKafka listener() { 64 | return new ConsumerKafka(); 65 | } 66 | 67 | /** 68 | * Consumer Config Ends 69 | */ 70 | 71 | /** 72 | * Producer Config Starts 73 | * @return 74 | */ 75 | 76 | @SuppressWarnings("rawtypes") 77 | @Bean 78 | public ProducerFactory producerFactory() { 79 | return new DefaultKafkaProducerFactory<>(producerConfigs()); 80 | } 81 | 82 | @Bean 83 | public KafkaTemplate kafkaTemplate() { 84 | return new KafkaTemplate(producerFactory()); 85 | } 86 | /** 87 | * Producer Config Ends 88 | * @return 89 | */ 90 | 91 | @Bean 92 | public Map producerConfigs() { 93 | Map props = new HashMap<>(); 94 | props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, env.getProperty("kafka.broker")); 95 | props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class); 96 | props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class); 97 | props.put(ProducerConfig.LINGER_MS_CONFIG, 1); 98 | 99 | 100 | return props; 101 | } 102 | 103 | } 104 | -------------------------------------------------------------------------------- /learnbootkafka/src/main/java/com/learnbootkafka/consumer/ConsumerKafka.java: -------------------------------------------------------------------------------- 1 | package com.learnbootkafka.consumer; 2 | 3 | import org.apache.kafka.clients.consumer.ConsumerRecord; 4 | import org.springframework.kafka.annotation.KafkaListener; 5 | 6 | public class ConsumerKafka { 7 | 8 | 9 | @KafkaListener(id = "consumer", topics = {"${kafka.topic}"} ) 10 | public void onMessage(ConsumerRecord record) { 11 | 12 | System.out.println("Read Record is : " + record.value()); 13 | 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /learnbootkafka/src/main/java/com/learnbootkafka/controller/HelloController.java: -------------------------------------------------------------------------------- 1 | package com.learnbootkafka.controller; 2 | 3 | import org.springframework.beans.factory.annotation.Autowired; 4 | import org.springframework.core.env.Environment; 5 | import org.springframework.web.bind.annotation.RequestMapping; 6 | import org.springframework.web.bind.annotation.RequestParam; 7 | import org.springframework.web.bind.annotation.RestController; 8 | 9 | import com.learnbootkafka.producer.ProducerKafka; 10 | 11 | @RestController 12 | public class HelloController { 13 | 14 | @Autowired 15 | Environment env; 16 | 17 | @Autowired 18 | ProducerKafka producer; 19 | 20 | @RequestMapping(value="/home") 21 | public String getResult(@RequestParam("input") String value){ 22 | 23 | producer.sendMessage(value); 24 | return env.getProperty("message.response"); 25 | } 26 | 27 | } 28 | -------------------------------------------------------------------------------- /learnbootkafka/src/main/java/com/learnbootkafka/producer/ProducerKafka.java: -------------------------------------------------------------------------------- 1 | package com.learnbootkafka.producer; 2 | 3 | import org.springframework.beans.factory.annotation.Autowired; 4 | import org.springframework.core.env.Environment; 5 | import org.springframework.kafka.core.KafkaTemplate; 6 | import org.springframework.kafka.support.SendResult; 7 | import org.springframework.stereotype.Component; 8 | import org.springframework.util.concurrent.ListenableFuture; 9 | import org.springframework.util.concurrent.ListenableFutureCallback; 10 | 11 | @Component 12 | public class ProducerKafka { 13 | 14 | @Autowired 15 | private KafkaTemplate kafkaTemplate; 16 | 17 | @Autowired 18 | Environment env; 19 | 20 | public void sendMessage(String message){ 21 | ListenableFuture> future = kafkaTemplate 22 | .send(env.getProperty("kafka.topic"),message,message); 23 | 24 | future.addCallback( 25 | new ListenableFutureCallback>() { 26 | 27 | @Override 28 | public void onFailure(Throwable ex) { 29 | System.out.println("Inside Exception"); 30 | 31 | } 32 | 33 | @Override 34 | public void onSuccess(SendResult result) { 35 | // TODO Auto-generated method stub 36 | System.out.println("Inside Success"); 37 | 38 | } 39 | }); 40 | 41 | } 42 | 43 | } 44 | -------------------------------------------------------------------------------- /learnbootkafka/src/main/resources/application.yml: -------------------------------------------------------------------------------- 1 | spring: 2 | profiles: 3 | active: dev 4 | 5 | --- 6 | 7 | spring: 8 | profiles: dev 9 | 10 | message.response: Hello from DEV spring boot properties 11 | 12 | kafka.broker: localhost:9092,localhost:9093,localhost:9094 13 | enable.auto.commit: true 14 | auto.commit.interval.ms: 100 15 | group.id: group1 16 | kafka.auto.offset.reset: earliest 17 | kafka.topic: my-topic 18 | 19 | 20 | --- 21 | 22 | spring: 23 | profiles: stage 24 | 25 | message.response: Hello from STAGE spring boot properties 26 | 27 | --- 28 | 29 | spring: 30 | profiles: prod 31 | 32 | message.response: Hello from PROD spring boot properties 33 | 34 | 35 | --- -------------------------------------------------------------------------------- /learnbootkafka/src/main/resources/application1.properties: -------------------------------------------------------------------------------- 1 | message.response=Hello from spring boot properties -------------------------------------------------------------------------------- /learnbootkafka/src/test/java/com/learnbootkafka/LearnbootkafkaApplicationTests.java: -------------------------------------------------------------------------------- 1 | package com.learnbootkafka; 2 | 3 | import org.junit.Test; 4 | import org.junit.runner.RunWith; 5 | import org.springframework.boot.test.context.SpringBootTest; 6 | import org.springframework.test.context.junit4.SpringRunner; 7 | 8 | @RunWith(SpringRunner.class) 9 | @SpringBootTest 10 | public class LearnbootkafkaApplicationTests { 11 | 12 | @Test 13 | public void contextLoads() { 14 | } 15 | 16 | } 17 | -------------------------------------------------------------------------------- /learncamel-simple/.classpath: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | -------------------------------------------------------------------------------- /learncamel-simple/.project: -------------------------------------------------------------------------------- 1 | 2 | 3 | learncamel-simple 4 | 5 | 6 | 7 | 8 | 9 | org.eclipse.jdt.core.javabuilder 10 | 11 | 12 | 13 | 14 | org.eclipse.m2e.core.maven2Builder 15 | 16 | 17 | 18 | 19 | 20 | org.eclipse.jdt.core.javanature 21 | org.eclipse.m2e.core.maven2Nature 22 | 23 | 24 | -------------------------------------------------------------------------------- /learncamel-simple/.settings/org.eclipse.jdt.core.prefs: -------------------------------------------------------------------------------- 1 | eclipse.preferences.version=1 2 | org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.5 3 | org.eclipse.jdt.core.compiler.compliance=1.5 4 | org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning 5 | org.eclipse.jdt.core.compiler.source=1.5 6 | -------------------------------------------------------------------------------- /learncamel-simple/.settings/org.eclipse.m2e.core.prefs: -------------------------------------------------------------------------------- 1 | activeProfiles= 2 | eclipse.preferences.version=1 3 | resolveWorkspaceProjects=true 4 | version=1 5 | -------------------------------------------------------------------------------- /learncamel-simple/data/input/file1.txt: -------------------------------------------------------------------------------- 1 | Hi 2 | Hello -------------------------------------------------------------------------------- /learncamel-simple/data/output/file1.txt: -------------------------------------------------------------------------------- 1 | Hi 2 | Hello -------------------------------------------------------------------------------- /learncamel-simple/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 4.0.0 3 | com.learncamel 4 | learncamel-simple 5 | 0.0.1-SNAPSHOT 6 | learncamel-simple 7 | Camel project to copy a file from one location to another. 8 | 9 | 10 | 11 | org.apache.camel 12 | camel-core 13 | 2.18.3 14 | 15 | 16 | -------------------------------------------------------------------------------- /learncamel-simple/src/main/java/com/learncamel/file/CopyFilesCamel.java: -------------------------------------------------------------------------------- 1 | package com.learncamel.file; 2 | 3 | import org.apache.camel.CamelContext; 4 | import org.apache.camel.builder.RouteBuilder; 5 | import org.apache.camel.impl.DefaultCamelContext; 6 | 7 | public class CopyFilesCamel { 8 | 9 | public static void main(String[] args) { 10 | 11 | CamelContext context= new DefaultCamelContext(); 12 | try{ 13 | context.addRoutes(new RouteBuilder() { 14 | @Override 15 | public void configure() throws Exception { 16 | from("file:data/input?noop=true") 17 | .to("file:data/output"); 18 | } 19 | }); 20 | context.start(); 21 | Thread.sleep(5000); 22 | context.stop(); 23 | 24 | }catch(Exception e){ 25 | System.out.println("Inside Exception : " + e); 26 | } 27 | } 28 | 29 | } 30 | -------------------------------------------------------------------------------- /learncamelkafka/.classpath: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | -------------------------------------------------------------------------------- /learncamelkafka/.project: -------------------------------------------------------------------------------- 1 | 2 | 3 | learncamelkafka 4 | 5 | 6 | 7 | 8 | 9 | org.eclipse.jdt.core.javabuilder 10 | 11 | 12 | 13 | 14 | org.eclipse.m2e.core.maven2Builder 15 | 16 | 17 | 18 | 19 | 20 | org.eclipse.jdt.core.javanature 21 | org.eclipse.m2e.core.maven2Nature 22 | 23 | 24 | -------------------------------------------------------------------------------- /learncamelkafka/.settings/org.eclipse.jdt.core.prefs: -------------------------------------------------------------------------------- 1 | eclipse.preferences.version=1 2 | org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.5 3 | org.eclipse.jdt.core.compiler.compliance=1.5 4 | org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning 5 | org.eclipse.jdt.core.compiler.source=1.5 6 | -------------------------------------------------------------------------------- /learncamelkafka/.settings/org.eclipse.m2e.core.prefs: -------------------------------------------------------------------------------- 1 | activeProfiles= 2 | eclipse.preferences.version=1 3 | resolveWorkspaceProjects=true 4 | version=1 5 | -------------------------------------------------------------------------------- /learncamelkafka/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 4.0.0 3 | learncamel-kafka 4 | learncamelkafka 5 | 0.0.1-SNAPSHOT 6 | 7 | 8 | 9 | org.apache.camel 10 | camel-core 11 | 2.20.0-SNAPSHOT 12 | 13 | 14 | org.apache.camel 15 | camel-kafka 16 | 2.20.0-SNAPSHOT 17 | 18 | 19 | 20 | 21 | org.apache.logging.log4j 22 | log4j-api 23 | 2.7 24 | 25 | 26 | org.apache.logging.log4j 27 | log4j-core 28 | 2.7 29 | 30 | 31 | org.apache.logging.log4j 32 | log4j-slf4j-impl 33 | 2.7 34 | 35 | 36 | 37 | -------------------------------------------------------------------------------- /learncamelkafka/src/main/java/com/learncamel/kafka/CamelKafkaClient.java: -------------------------------------------------------------------------------- 1 | package com.learncamel.kafka; 2 | 3 | import org.apache.camel.CamelContext; 4 | import org.apache.camel.builder.RouteBuilder; 5 | import org.apache.camel.impl.DefaultCamelContext; 6 | 7 | public class CamelKafkaClient { 8 | 9 | public static void main(String[] args) throws Exception { 10 | CamelContext camelContext = new DefaultCamelContext(); 11 | 12 | try { 13 | camelContext.addRoutes(new RouteBuilder() { 14 | public void configure() { 15 | // log.info("About to start route: Kafka Server -> Log "); 16 | 17 | from("kafka:my-topic?brokers=localhost:9092" 18 | + "&consumersCount=1" 19 | + "&seekTo=beginning" 20 | + "&groupId=group1") 21 | .routeId("FromKafka") 22 | .log("${body}"); 23 | } 24 | }); 25 | } catch (Exception e) { 26 | // TODO Auto-generated catch block 27 | e.printStackTrace(); 28 | } 29 | 30 | camelContext.start(); 31 | 32 | // Run fror 5 mins 33 | Thread.sleep(5 * 60 * 1000); 34 | 35 | camelContext.stop(); 36 | 37 | } 38 | 39 | } 40 | -------------------------------------------------------------------------------- /learncamelkafka/src/main/java/com/learncamel/kafka/CamelKafkaProducer.java: -------------------------------------------------------------------------------- 1 | package com.learncamel.kafka; 2 | 3 | import java.util.HashMap; 4 | import java.util.Map; 5 | 6 | import org.apache.camel.CamelContext; 7 | import org.apache.camel.ProducerTemplate; 8 | import org.apache.camel.builder.RouteBuilder; 9 | import org.apache.camel.component.kafka.KafkaComponent; 10 | import org.apache.camel.component.kafka.KafkaConstants; 11 | import org.apache.camel.impl.DefaultCamelContext; 12 | 13 | public class CamelKafkaProducer { 14 | 15 | public static void main(String[] args) throws Exception { 16 | 17 | final CamelContext context = new DefaultCamelContext(); 18 | 19 | try { 20 | context.addRoutes(new RouteBuilder() { 21 | public void configure() { 22 | 23 | KafkaComponent kafka = new KafkaComponent(); 24 | kafka.setBrokers("localhost:9092"); 25 | context.addComponent("kafka", kafka); 26 | 27 | from("direct:pushtoTopic").routeId("DirectToKafka") 28 | .to("kafka:my-topic").log("${headers}"); 29 | 30 | } 31 | 32 | }); 33 | 34 | 35 | } catch (Exception e) { 36 | // TODO Auto-generated catch block 37 | e.printStackTrace(); 38 | } 39 | 40 | ProducerTemplate producerTemplate = context.createProducerTemplate(); 41 | context.start(); 42 | 43 | Map headers = new HashMap(); 44 | 45 | headers.put(KafkaConstants.PARTITION_KEY, 0); 46 | headers.put(KafkaConstants.KEY, "1"); 47 | for(int i=0;i<=5;i++){ 48 | producerTemplate.sendBodyAndHeaders("direct:pushtoTopic"," Hi Hello " + i, headers); 49 | } 50 | 51 | 52 | Thread.sleep(5 * 60 * 1000); 53 | 54 | context.stop(); 55 | 56 | } 57 | 58 | } 59 | -------------------------------------------------------------------------------- /learncamelkafka/src/main/resources/log4j2.properties: -------------------------------------------------------------------------------- 1 | 2 | appender.out.type = Console 3 | appender.out.name = out 4 | appender.out.layout.type = PatternLayout 5 | appender.out.layout.pattern = %d [%-15.15t] %-5p %-30.30c{1} - %m%n 6 | rootLogger.level = INFO 7 | rootLogger.appenderRef.out.ref = out -------------------------------------------------------------------------------- /learnspark-kafka/.classpath: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | -------------------------------------------------------------------------------- /learnspark-kafka/.project: -------------------------------------------------------------------------------- 1 | 2 | 3 | learnspark-kafka 4 | 5 | 6 | 7 | 8 | 9 | org.eclipse.jdt.core.javabuilder 10 | 11 | 12 | 13 | 14 | org.eclipse.m2e.core.maven2Builder 15 | 16 | 17 | 18 | 19 | 20 | org.eclipse.jdt.core.javanature 21 | org.eclipse.m2e.core.maven2Nature 22 | 23 | 24 | -------------------------------------------------------------------------------- /learnspark-kafka/.settings/org.eclipse.jdt.core.prefs: -------------------------------------------------------------------------------- 1 | eclipse.preferences.version=1 2 | org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled 3 | org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.8 4 | org.eclipse.jdt.core.compiler.codegen.unusedLocal=preserve 5 | org.eclipse.jdt.core.compiler.compliance=1.8 6 | org.eclipse.jdt.core.compiler.debug.lineNumber=generate 7 | org.eclipse.jdt.core.compiler.debug.localVariable=generate 8 | org.eclipse.jdt.core.compiler.debug.sourceFile=generate 9 | org.eclipse.jdt.core.compiler.problem.assertIdentifier=error 10 | org.eclipse.jdt.core.compiler.problem.enumIdentifier=error 11 | org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning 12 | org.eclipse.jdt.core.compiler.source=1.8 13 | -------------------------------------------------------------------------------- /learnspark-kafka/.settings/org.eclipse.m2e.core.prefs: -------------------------------------------------------------------------------- 1 | activeProfiles= 2 | eclipse.preferences.version=1 3 | resolveWorkspaceProjects=true 4 | version=1 5 | -------------------------------------------------------------------------------- /learnspark-kafka/pom.xml: -------------------------------------------------------------------------------- 1 | 3 | 4.0.0 4 | com.learnsparkkafka 5 | learnspark-kafka 6 | 0.0.1-SNAPSHOT 7 | 8 | 9 | 2.1.0 10 | 0.10.1.1 11 | 12 | 13 | 14 | 15 | 16 | org.apache.spark 17 | spark-streaming_2.11 18 | 2.1.0 19 | 20 | 21 | 22 | org.apache.spark 23 | spark-streaming-kafka-0-10_2.11 24 | 2.1.0 25 | 26 | 27 | 28 | org.apache.kafka 29 | kafka-clients 30 | 0.10.0.1 31 | 32 | 33 | 34 | -------------------------------------------------------------------------------- /learnspark-kafka/src/main/java/com/learnspark/consumer/SparkKafkaConsumer.java: -------------------------------------------------------------------------------- 1 | package com.learnspark.consumer; 2 | 3 | import java.util.Arrays; 4 | import java.util.Collection; 5 | import java.util.HashMap; 6 | import java.util.Map; 7 | 8 | import org.apache.kafka.clients.consumer.ConsumerRecord; 9 | import org.apache.kafka.common.serialization.StringDeserializer; 10 | import org.apache.spark.SparkConf; 11 | import org.apache.spark.api.java.JavaSparkContext; 12 | import org.apache.spark.api.java.function.PairFunction; 13 | import org.apache.spark.streaming.Duration; 14 | import org.apache.spark.streaming.Durations; 15 | import org.apache.spark.streaming.api.java.JavaInputDStream; 16 | import org.apache.spark.streaming.api.java.JavaStreamingContext; 17 | import org.apache.spark.streaming.kafka010.ConsumerStrategies; 18 | import org.apache.spark.streaming.kafka010.KafkaUtils; 19 | import org.apache.spark.streaming.kafka010.LocationStrategies; 20 | 21 | import scala.Tuple2; 22 | 23 | public class SparkKafkaConsumer { 24 | 25 | public static void main(String[] args) { 26 | 27 | SparkConf sparkConf = new SparkConf().setMaster("local[2]").setAppName("KafkaConsumer"); // Initialize the Spark Context 28 | 29 | 30 | JavaStreamingContext javaSC = new JavaStreamingContext(sparkConf, Durations.seconds(10)); // Interval to stream the value from topic. Poll for records every 10 seconds. 31 | 32 | Map kafkaParams = new HashMap(); 33 | kafkaParams.put("bootstrap.servers", "localhost:9092"); 34 | kafkaParams.put("key.deserializer", StringDeserializer.class); 35 | kafkaParams.put("value.deserializer", StringDeserializer.class); 36 | kafkaParams.put("group.id", "group1"); 37 | kafkaParams.put("auto.offset.reset", "earliest"); 38 | kafkaParams.put("enable.auto.commit", true); 39 | 40 | Collection topics = Arrays.asList("my-topic"); 41 | 42 | final JavaInputDStream> stream = 43 | KafkaUtils.createDirectStream( 44 | javaSC, 45 | LocationStrategies.PreferConsistent(), 46 | ConsumerStrategies.Subscribe(topics, kafkaParams) 47 | ); 48 | 49 | stream.foreachRDD(rdd -> { 50 | System.out.println("---RDD with " + rdd.partitions().size() 51 | + " partitions and " + rdd.count() + " records"); 52 | rdd.foreach(record -> System.out.println(record.value())); 53 | }); 54 | 55 | System.out.println("Before Start"); 56 | javaSC.start(); 57 | System.out.println("After Start"); 58 | try { 59 | javaSC.awaitTermination(); 60 | } catch (InterruptedException e) { 61 | // TODO Auto-generated catch block 62 | e.printStackTrace(); 63 | } 64 | 65 | 66 | } 67 | 68 | } 69 | -------------------------------------------------------------------------------- /learnspark-kafka/src/main/resources/log4j.properties: -------------------------------------------------------------------------------- 1 | # Define the root logger with appender file 2 | log4j.rootCategory=ERROR, console 3 | log4j.appender.console=org.apache.log4j.ConsoleAppender 4 | log4j.appender.console.target=System.err 5 | log4j.appender.console.layout=org.apache.log4j.PatternLayout 6 | log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n 7 | 8 | # Settings to quiet third party logs that are too verbose 9 | log4j.logger.org.eclipse.jetty=WARN 10 | log4j.logger.org.eclipse.jetty.util.component.AbstractLifeCycle=ERROR 11 | log4j.logger.org.apache.spark.repl.SparkIMain$exprTyper=ERROR 12 | log4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=ERROR --------------------------------------------------------------------------------