├── .doc └── kafka.png ├── 01-workspace ├── 01-kafka-setup │ ├── compose │ │ ├── docker-compose.yaml │ │ └── props │ │ │ └── server.properties │ └── image │ │ ├── Dockerfile │ │ └── runner.sh ├── 02-kafka-101 │ ├── 01-topic.sh │ ├── 02-producer.sh │ ├── 03-consumer.sh │ ├── 04-print-offset.sh │ ├── 05-consumer-group.sh │ ├── 06-reset-offset.sh │ └── 07-transaction.sh ├── 03-kafka-cluster │ ├── docker-compose.yaml │ └── props │ │ ├── s1.properties │ │ ├── s2.properties │ │ └── s3.properties ├── 04-assignment-resources │ ├── 01-product-service-resources │ │ ├── application.yaml │ │ ├── data.sql │ │ ├── product.csv │ │ └── static │ │ │ └── index.html │ └── 02-analytics-service-resources │ │ ├── application.properties │ │ ├── application.yaml │ │ ├── data.sql │ │ └── static │ │ └── index.html ├── 05-kafka-security-sasl-plain │ ├── 01-kafka-topics.sh │ ├── docker-compose.yaml │ └── props │ │ ├── consumer.properties │ │ ├── jaas.conf │ │ └── security.properties └── 06-kafka-security-sasl-ssl │ ├── 01-kafka-topics.sh │ ├── certs │ ├── kafka-signed.crt │ ├── kafka-signing-request.crt │ ├── kafka.keystore.jks │ ├── kafka.truststore.jks │ ├── root.crt │ └── root.key │ ├── docker-compose.yaml │ ├── generate-certs │ └── create-certificates.sh │ └── props │ ├── consumer.properties │ ├── jaas.conf │ └── security.properties ├── 02-reactive-kafka-playground ├── .gitignore ├── .mvn │ └── wrapper │ │ ├── maven-wrapper.jar │ │ └── maven-wrapper.properties ├── mvnw ├── mvnw.cmd ├── pom.xml └── src │ ├── main │ ├── java │ │ └── com │ │ │ └── vinsguru │ │ │ └── reactivekafkaplayground │ │ │ ├── ReactiveKafkaPlaygroundApplication.java │ │ │ ├── sec01 │ │ │ ├── Lec01KafkaConsumer.java │ │ │ └── Lec02KafkaConsumer.java │ │ │ ├── sec02 │ │ │ └── KafkaProducer.java │ │ │ ├── sec03 │ │ │ ├── KafkaConsumer.java │ │ │ └── KafkaProducer.java │ │ │ ├── sec04 │ │ │ ├── KafkaConsumer.java │ │ │ └── KafkaProducer.java │ │ │ ├── sec05 │ │ │ ├── KafkaConsumer.java │ │ │ ├── KafkaConsumerGroup.java │ │ │ └── KafkaProducer.java │ │ │ ├── sec06 │ │ │ ├── KafkaConsumer.java │ │ │ └── KafkaConsumerGroup.java │ │ │ ├── sec07 │ │ │ └── KafkaConsumer.java │ │ │ ├── sec08 │ │ │ ├── KafkaConsumer.java │ │ │ └── KafkaProducer.java │ │ │ ├── sec09 │ │ │ ├── KafkaConsumer.java │ │ │ └── KafkaProducer.java │ │ │ ├── sec10 │ │ │ ├── KafkaConsumer.java │ │ │ └── KafkaProducer.java │ │ │ ├── sec11 │ │ │ ├── KafkaConsumer.java │ │ │ └── KafkaProducer.java │ │ │ ├── sec12 │ │ │ ├── KafkaConsumerV1.java │ │ │ ├── KafkaConsumerV2.java │ │ │ ├── KafkaConsumerV3.java │ │ │ └── KafkaProducer.java │ │ │ ├── sec13 │ │ │ ├── KafkaConsumer.java │ │ │ ├── KafkaProducer.java │ │ │ ├── OrderEventProcessor.java │ │ │ ├── ReactiveDeadLetterTopicProducer.java │ │ │ └── RecordProcessingException.java │ │ │ ├── sec14 │ │ │ ├── KafkaConsumer.java │ │ │ └── KafkaProducer.java │ │ │ ├── sec15 │ │ │ ├── TransferDemo.java │ │ │ ├── TransferEvent.java │ │ │ ├── TransferEventConsumer.java │ │ │ └── TransferEventProcessor.java │ │ │ ├── sec16 │ │ │ ├── ConsumerRunner.java │ │ │ ├── DummyOrder.java │ │ │ ├── KafkaConsumerConfig.java │ │ │ ├── KafkaProducerConfig.java │ │ │ ├── OrderEvent.java │ │ │ └── ProducerRunner.java │ │ │ ├── sec17 │ │ │ ├── consumer │ │ │ │ ├── ConsumerRunner.java │ │ │ │ ├── DummyOrder.java │ │ │ │ └── KafkaConsumerConfig.java │ │ │ └── producer │ │ │ │ ├── KafkaProducerConfig.java │ │ │ │ ├── OrderEvent.java │ │ │ │ └── ProducerRunner.java │ │ │ └── sec18 │ │ │ └── KafkaProducer.java │ └── resources │ │ ├── application.properties │ │ ├── application.yaml │ │ ├── kafka.truststore.jks │ │ └── logback.xml │ └── test │ └── java │ └── com │ └── vinsguru │ └── reactivekafkaplayground │ ├── AbstractIT.java │ ├── EmbeddedKafkaPlaygroundTests.java │ ├── OrderEventConsumerTest.java │ └── OrderEventProducerTest.java ├── 03-assignment ├── analytics-service │ ├── .gitignore │ ├── .mvn │ │ └── wrapper │ │ │ ├── maven-wrapper.jar │ │ │ └── maven-wrapper.properties │ ├── mvnw │ ├── mvnw.cmd │ ├── pom.xml │ └── src │ │ ├── main │ │ ├── java │ │ │ └── com │ │ │ │ └── vinsguru │ │ │ │ └── analyticsservice │ │ │ │ ├── AnalyticsServiceApplication.java │ │ │ │ ├── config │ │ │ │ └── KafkaConsumerConfig.java │ │ │ │ ├── controller │ │ │ │ └── TrendingController.java │ │ │ │ ├── dto │ │ │ │ └── ProductTrendingDto.java │ │ │ │ ├── entity │ │ │ │ └── ProductViewCount.java │ │ │ │ ├── event │ │ │ │ └── ProductViewEvent.java │ │ │ │ ├── repository │ │ │ │ └── ProductViewRepository.java │ │ │ │ └── service │ │ │ │ ├── ProductTrendingBroadcastService.java │ │ │ │ └── ProductViewEventConsumer.java │ │ └── resources │ │ │ ├── application.properties │ │ │ ├── application.yaml │ │ │ ├── data.sql │ │ │ └── static │ │ │ └── index.html │ │ └── test │ │ └── java │ │ └── com │ │ └── vinsguru │ │ └── analyticsservice │ │ ├── AbstractIntegrationTest.java │ │ └── AnalyticsServiceApplicationTests.java └── product-service │ ├── .gitignore │ ├── .mvn │ └── wrapper │ │ ├── maven-wrapper.jar │ │ └── maven-wrapper.properties │ ├── mvnw │ ├── mvnw.cmd │ ├── pom.xml │ └── src │ ├── main │ ├── java │ │ └── com │ │ │ └── vinsguru │ │ │ └── productservice │ │ │ ├── ProductServiceApplication.java │ │ │ ├── config │ │ │ └── KafkaProducerConfig.java │ │ │ ├── controller │ │ │ └── ProductController.java │ │ │ ├── dto │ │ │ └── ProductDto.java │ │ │ ├── entity │ │ │ └── Product.java │ │ │ ├── event │ │ │ └── ProductViewEvent.java │ │ │ ├── repository │ │ │ └── ProductRepository.java │ │ │ ├── service │ │ │ ├── ProductService.java │ │ │ └── ProductViewEventProducer.java │ │ │ └── util │ │ │ └── EntityDtoUtil.java │ └── resources │ │ ├── application.properties │ │ ├── application.yaml │ │ ├── data.sql │ │ ├── product.csv │ │ └── static │ │ └── index.html │ └── test │ └── java │ └── com │ └── vinsguru │ └── productservice │ ├── AbstractIntegrationTest.java │ └── ProductServiceApplicationTests.java └── README.md /.doc/kafka.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vinsguru/reactive-kafka-course/6e03bbbe52cf58ea3c023f2c20cc0ba41b25a3ba/.doc/kafka.png -------------------------------------------------------------------------------- /01-workspace/01-kafka-setup/compose/docker-compose.yaml: -------------------------------------------------------------------------------- 1 | version: "3.8" 2 | services: 3 | kafka1: 4 | image: vinsdocker/kafka 5 | container_name: kafka 6 | ports: 7 | - "9092:9092" 8 | environment: 9 | KAFKA_CLUSTER_ID: OTMwNzFhYTY1ODNiNGE5OT 10 | volumes: 11 | - ./props/server.properties:/kafka/config/kraft/server.properties 12 | - ./data:/tmp/kafka-logs -------------------------------------------------------------------------------- /01-workspace/01-kafka-setup/compose/props/server.properties: -------------------------------------------------------------------------------- 1 | process.roles=broker,controller 2 | node.id=1 3 | listeners=PLAINTEXT://:9092,CONTROLLER://:9093 4 | controller.listener.names=CONTROLLER 5 | advertised.listeners=PLAINTEXT://localhost:9092 6 | inter.broker.listener.name=PLAINTEXT 7 | controller.quorum.voters=1@kafka:9093 8 | listener.security.protocol.map=CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT,SSL:SSL,SASL_PLAINTEXT:SASL_PLAINTEXT,SASL_SSL:SASL_SSL 9 | auto.create.topics.enable=true 10 | offsets.topic.replication.factor=1 11 | transaction.state.log.replication.factor=1 12 | transaction.state.log.min.isr=1 13 | log.dirs=/tmp/kafka-logs -------------------------------------------------------------------------------- /01-workspace/01-kafka-setup/image/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM eclipse-temurin:17-jre-focal 2 | 3 | ADD https://downloads.apache.org/kafka/3.4.0/kafka_2.13-3.4.0.tgz kafka.tgz 4 | 5 | RUN tar -xvzf kafka.tgz 6 | 7 | RUN rm kafka.tgz 8 | 9 | RUN mv /kafka_2.13-3.4.0 /kafka 10 | 11 | ENV PATH=${PATH}:/kafka/bin 12 | 13 | WORKDIR learning 14 | 15 | ADD runner.sh runner.sh 16 | 17 | CMD [ "sh", "runner.sh" ] -------------------------------------------------------------------------------- /01-workspace/01-kafka-setup/image/runner.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # If env variable not set, generate random one 4 | # Large organizations might have multiple kafka clusters. Each cluster is expected to have an ID 5 | clusterId=${KAFKA_CLUSTER_ID:-$(kafka-storage.sh random-uuid)} 6 | echo "Kafka Cluster ID : ${clusterId}" 7 | 8 | # For the first time, format the storage. It would create couple of files (meta.properties, checkpoint file). 9 | # If it is already formatted, it would be ignored. 10 | 11 | echo "Formatting storage" 12 | kafka-storage.sh format -t $clusterId -c /kafka/config/kraft/server.properties 13 | 14 | # Finally start the kafka server!! 15 | 16 | echo "Starting Kafka" 17 | exec kafka-server-start.sh /kafka/config/kraft/server.properties -------------------------------------------------------------------------------- /01-workspace/02-kafka-101/01-topic.sh: -------------------------------------------------------------------------------- 1 | # create a kafka topic called hello-world 2 | # we assume that directory which contains 'kafka-topics.sh' is included in the PATH 3 | kafka-topics.sh --bootstrap-server localhost:9092 --topic hello-world --create 4 | 5 | # list all topics 6 | kafka-topics.sh --bootstrap-server localhost:9092 --list 7 | 8 | # describe a topic 9 | kafka-topics.sh --bootstrap-server localhost:9092 --topic hello-world --describe 10 | 11 | # delete a topic 12 | kafka-topics.sh --bootstrap-server localhost:9092 --topic hello-world --delete 13 | 14 | # topic with partitons 15 | kafka-topics.sh --bootstrap-server localhost:9092 --topic order-events --create --partitions 2 16 | 17 | # topic with replicaiton factor 18 | kafka-topics.sh --bootstrap-server localhost:9092 --topic order-events --create --replication-factor 3 -------------------------------------------------------------------------------- /01-workspace/02-kafka-101/02-producer.sh: -------------------------------------------------------------------------------- 1 | # to produce messages 2 | kafka-console-producer.sh --bootstrap-server localhost:9092 --topic hello-world 3 | 4 | # linger.ms 5 | kafka-console-producer.sh --bootstrap-server localhost:9092 --topic hello-world --timeout 100 -------------------------------------------------------------------------------- /01-workspace/02-kafka-101/03-consumer.sh: -------------------------------------------------------------------------------- 1 | # to consume messages 2 | kafka-console-consumer.sh --bootstrap-server localhost:9092 --topic hello-world 3 | 4 | # to consume from beginning 5 | kafka-console-consumer.sh --bootstrap-server localhost:9092 --topic hello-world --from-beginning -------------------------------------------------------------------------------- /01-workspace/02-kafka-101/04-print-offset.sh: -------------------------------------------------------------------------------- 1 | 2 | # to print offset, time etc 3 | kafka-console-consumer.sh \ 4 | --bootstrap-server localhost:9092 \ 5 | --topic hello-world \ 6 | --property print.offset=true \ 7 | --property print.timestamp=true -------------------------------------------------------------------------------- /01-workspace/02-kafka-101/05-consumer-group.sh: -------------------------------------------------------------------------------- 1 | 2 | # create console producer 3 | kafka-console-producer.sh \ 4 | --bootstrap-server localhost:9092 \ 5 | --topic hello-world \ 6 | --property key.separator=: \ 7 | --property parse.key=true 8 | 9 | # create console consumer with a group 10 | kafka-console-consumer.sh \ 11 | --bootstrap-server localhost:9092 \ 12 | --topic hello-world \ 13 | --property print.offset=true \ 14 | --property print.key=true \ 15 | --group name 16 | 17 | # list all the consumer groups 18 | kafka-consumer-groups.sh --bootstrap-server localhost:9092 --list 19 | 20 | # describe a consumer group 21 | kafka-consumer-groups.sh \ 22 | --bootstrap-server localhost:9092 \ 23 | --group cg \ 24 | --describe 25 | 26 | -------------------------------------------------------------------------------- /01-workspace/02-kafka-101/06-reset-offset.sh: -------------------------------------------------------------------------------- 1 | # stop the consumers before you enter this command 2 | 3 | # dry-run 4 | kafka-consumer-groups.sh \ 5 | --bootstrap-server localhost:9092 \ 6 | --group cg \ 7 | --topic hello-world \ 8 | --reset-offsets \ 9 | --shift-by -3 \ 10 | --dry-run 11 | 12 | # reset offset by shifting the offset 13 | kafka-consumer-groups.sh \ 14 | --bootstrap-server localhost:9092 \ 15 | --group cg \ 16 | --topic hello-world \ 17 | --reset-offsets \ 18 | --shift-by -3 \ 19 | --execute 20 | 21 | # reset by duration 22 | kafka-consumer-groups.sh \ 23 | --bootstrap-server localhost:9092 \ 24 | --topic hello-world \ 25 | --group cg \ 26 | --reset-offsets \ 27 | --by-duration PT5M \ 28 | --execute 29 | 30 | # -- to the beginning 31 | kafka-consumer-groups.sh \ 32 | --bootstrap-server localhost:9092 \ 33 | --topic hello-world \ 34 | --group cg \ 35 | --reset-offsets \ 36 | --to-earliest \ 37 | --execute 38 | 39 | # -- to the end 40 | kafka-consumer-groups.sh \ 41 | --bootstrap-server localhost:9092 \ 42 | --topic hello-world \ 43 | --group cg \ 44 | --reset-offsets \ 45 | --to-latest \ 46 | --execute 47 | 48 | # -- to date-time 49 | kafka-consumer-groups.sh \ 50 | --bootstrap-server localhost:9092 \ 51 | --topic hello-world \ 52 | --group cg \ 53 | --reset-offsets \ 54 | --to-datetime 2023-01-01T01:00:00.000 \ 55 | --execute -------------------------------------------------------------------------------- /01-workspace/02-kafka-101/07-transaction.sh: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | kafka-topics.sh --bootstrap-server localhost:9092 --topic transfer-requests --create 5 | 6 | kafka-topics.sh --bootstrap-server localhost:9092 --topic transaction-events --create 7 | 8 | kafka-console-producer.sh \ 9 | --bootstrap-server localhost:9092 \ 10 | --topic transfer-requests \ 11 | --property key.separator=: \ 12 | --property parse.key=true 13 | 14 | kafka-console-consumer.sh \ 15 | --bootstrap-server localhost:9092 \ 16 | --topic transaction-events \ 17 | --property print.key=true \ 18 | --isolation-level=read_committed \ 19 | --from-beginning 20 | 21 | kafka-console-consumer.sh \ 22 | --bootstrap-server localhost:9092 \ 23 | --topic transaction-events \ 24 | --property print.key=true \ 25 | --from-beginning -------------------------------------------------------------------------------- /01-workspace/03-kafka-cluster/docker-compose.yaml: -------------------------------------------------------------------------------- 1 | version: "3.8" 2 | services: 3 | kafka1: 4 | image: vinsdocker/kafka 5 | container_name: kafka1 6 | ports: 7 | - "8081:8081" 8 | environment: 9 | - KAFKA_CLUSTER_ID=OTMwNzFhYTY1ODNiNGE5OT 10 | volumes: 11 | - ./props/s1.properties:/kafka/config/kraft/server.properties 12 | - ./data/b1:/tmp/kafka-logs 13 | kafka2: 14 | image: vinsdocker/kafka 15 | container_name: kafka2 16 | ports: 17 | - "8082:8082" 18 | environment: 19 | - KAFKA_CLUSTER_ID=OTMwNzFhYTY1ODNiNGE5OT 20 | volumes: 21 | - ./props/s2.properties:/kafka/config/kraft/server.properties 22 | - ./data/b2:/tmp/kafka-logs 23 | kafka3: 24 | image: vinsdocker/kafka 25 | container_name: kafka3 26 | ports: 27 | - "8083:8083" 28 | environment: 29 | - KAFKA_CLUSTER_ID=OTMwNzFhYTY1ODNiNGE5OT 30 | volumes: 31 | - ./props/s3.properties:/kafka/config/kraft/server.properties 32 | - ./data/b3:/tmp/kafka-logs -------------------------------------------------------------------------------- /01-workspace/03-kafka-cluster/props/s1.properties: -------------------------------------------------------------------------------- 1 | process.roles=broker,controller 2 | node.id=1 3 | listeners=INTERNAL://:9092,CONTROLLER://:9093,EXTERNAL://:8081 4 | controller.listener.names=CONTROLLER 5 | inter.broker.listener.name=INTERNAL 6 | advertised.listeners=INTERNAL://kafka1:9092,EXTERNAL://localhost:8081 7 | controller.quorum.voters=1@kafka1:9093,2@kafka2:9093,3@kafka3:9093 8 | 9 | # PLAINTEXT 10 | # SSL 11 | # SASL_PLAINTEXT 12 | # SASL_SSL 13 | 14 | listener.security.protocol.map=CONTROLLER:PLAINTEXT,INTERNAL:PLAINTEXT,EXTERNAL:PLAINTEXT,PLAINTEXT:PLAINTEXT,SSL:SSL,SASL_PLAINTEXT:SASL_PLAINTEXT,SASL_SSL:SASL_SSL 15 | auto.create.topics.enable=false 16 | 17 | # __consumer_offsets 18 | offsets.topic.replication.factor=3 19 | log.dirs=/tmp/kafka-logs -------------------------------------------------------------------------------- /01-workspace/03-kafka-cluster/props/s2.properties: -------------------------------------------------------------------------------- 1 | process.roles=broker,controller 2 | node.id=2 3 | listeners=INTERNAL://:9092,CONTROLLER://:9093,EXTERNAL://:8082 4 | controller.listener.names=CONTROLLER 5 | inter.broker.listener.name=INTERNAL 6 | advertised.listeners=INTERNAL://kafka2:9092,EXTERNAL://localhost:8082 7 | controller.quorum.voters=1@kafka1:9093,2@kafka2:9093,3@kafka3:9093 8 | 9 | listener.security.protocol.map=CONTROLLER:PLAINTEXT,INTERNAL:PLAINTEXT,EXTERNAL:PLAINTEXT,PLAINTEXT:PLAINTEXT,SSL:SSL,SASL_PLAINTEXT:SASL_PLAINTEXT,SASL_SSL:SASL_SSL 10 | auto.create.topics.enable=false 11 | 12 | # __consumer_offsets 13 | offsets.topic.replication.factor=3 14 | log.dirs=/tmp/kafka-logs -------------------------------------------------------------------------------- /01-workspace/03-kafka-cluster/props/s3.properties: -------------------------------------------------------------------------------- 1 | process.roles=broker,controller 2 | node.id=3 3 | listeners=INTERNAL://:9092,CONTROLLER://:9093,EXTERNAL://:8083 4 | controller.listener.names=CONTROLLER 5 | inter.broker.listener.name=INTERNAL 6 | advertised.listeners=INTERNAL://kafka3:9092,EXTERNAL://localhost:8083 7 | controller.quorum.voters=1@kafka1:9093,2@kafka2:9093,3@kafka3:9093 8 | 9 | listener.security.protocol.map=CONTROLLER:PLAINTEXT,INTERNAL:PLAINTEXT,EXTERNAL:PLAINTEXT,PLAINTEXT:PLAINTEXT,SSL:SSL,SASL_PLAINTEXT:SASL_PLAINTEXT,SASL_SSL:SASL_SSL 10 | auto.create.topics.enable=false 11 | 12 | # __consumer_offsets 13 | offsets.topic.replication.factor=3 14 | log.dirs=/tmp/kafka-logs -------------------------------------------------------------------------------- /01-workspace/04-assignment-resources/01-product-service-resources/application.yaml: -------------------------------------------------------------------------------- 1 | spring: 2 | kafka: 3 | bootstrap-servers: 4 | - localhost:9092 5 | producer: 6 | key-serializer: org.apache.kafka.common.serialization.StringSerializer 7 | value-serializer: org.springframework.kafka.support.serializer.JsonSerializer -------------------------------------------------------------------------------- /01-workspace/04-assignment-resources/01-product-service-resources/data.sql: -------------------------------------------------------------------------------- 1 | DROP TABLE IF EXISTS product; 2 | CREATE TABLE product AS SELECT * FROM CSVREAD('classpath:product.csv'); -------------------------------------------------------------------------------- /01-workspace/04-assignment-resources/01-product-service-resources/product.csv: -------------------------------------------------------------------------------- 1 | id,description,price 2 | 1,product-1,1 3 | 2,product-2,2 4 | 3,product-3,3 5 | 4,product-4,4 6 | 5,product-5,5 7 | 6,product-6,6 8 | 7,product-7,7 9 | 8,product-8,8 10 | 9,product-9,9 11 | 10,product-10,10 12 | 11,product-11,11 13 | 12,product-12,12 14 | 13,product-13,13 15 | 14,product-14,14 16 | 15,product-15,15 17 | 16,product-16,16 18 | 17,product-17,17 19 | 18,product-18,18 20 | 19,product-19,19 21 | 20,product-20,20 22 | 21,product-21,21 23 | 22,product-22,22 24 | 23,product-23,23 25 | 24,product-24,24 26 | 25,product-25,25 27 | 26,product-26,26 28 | 27,product-27,27 29 | 28,product-28,28 30 | 29,product-29,29 31 | 30,product-30,30 32 | 31,product-31,31 33 | 32,product-32,32 34 | 33,product-33,33 35 | 34,product-34,34 36 | 35,product-35,35 37 | 36,product-36,36 38 | 37,product-37,37 39 | 38,product-38,38 40 | 39,product-39,39 41 | 40,product-40,40 42 | 41,product-41,41 43 | 42,product-42,42 44 | 43,product-43,43 45 | 44,product-44,44 46 | 45,product-45,45 47 | 46,product-46,46 48 | 47,product-47,47 49 | 48,product-48,48 50 | 49,product-49,49 51 | 50,product-50,50 52 | 51,product-51,51 53 | 52,product-52,52 54 | 53,product-53,53 55 | 54,product-54,54 56 | 55,product-55,55 57 | 56,product-56,56 58 | 57,product-57,57 59 | 58,product-58,58 60 | 59,product-59,59 61 | 60,product-60,60 62 | 61,product-61,61 63 | 62,product-62,62 64 | 63,product-63,63 65 | 64,product-64,64 66 | 65,product-65,65 67 | 66,product-66,66 68 | 67,product-67,67 69 | 68,product-68,68 70 | 69,product-69,69 71 | 70,product-70,70 72 | 71,product-71,71 73 | 72,product-72,72 74 | 73,product-73,73 75 | 74,product-74,74 76 | 75,product-75,75 77 | 76,product-76,76 78 | 77,product-77,77 79 | 78,product-78,78 80 | 79,product-79,79 81 | 80,product-80,80 82 | 81,product-81,81 83 | 82,product-82,82 84 | 83,product-83,83 85 | 84,product-84,84 86 | 85,product-85,85 87 | 86,product-86,86 88 | 87,product-87,87 89 | 88,product-88,88 90 | 89,product-89,89 91 | 90,product-90,90 92 | 91,product-91,91 93 | 92,product-92,92 94 | 93,product-93,93 95 | 94,product-94,94 96 | 95,product-95,95 97 | 96,product-96,96 98 | 97,product-97,97 99 | 98,product-98,98 100 | 99,product-99,99 101 | 100,product-100,100 -------------------------------------------------------------------------------- /01-workspace/04-assignment-resources/01-product-service-resources/static/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Products Service 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 |
14 |
15 |
16 |

Product Service

17 |

You can click on these buttons to simulate product view

18 |
19 |
20 |
21 |
22 |
23 | 59 | 60 | -------------------------------------------------------------------------------- /01-workspace/04-assignment-resources/02-analytics-service-resources/application.properties: -------------------------------------------------------------------------------- 1 | product.trending.events.topic=product-trending-events 2 | product.view.events.topic=product-view-events 3 | logging.level.root=INFO 4 | server.port=7070 5 | -------------------------------------------------------------------------------- /01-workspace/04-assignment-resources/02-analytics-service-resources/application.yaml: -------------------------------------------------------------------------------- 1 | spring: 2 | kafka: 3 | bootstrap-servers: 4 | - localhost:9092 5 | consumer: 6 | group-id: trending-service-group 7 | auto-offset-reset: earliest 8 | key-deserializer: org.apache.kafka.common.serialization.StringDeserializer 9 | value-deserializer: org.springframework.kafka.support.serializer.JsonDeserializer 10 | properties: 11 | "group.instance.id": "1" 12 | "[spring.json.trusted.packages]": "*" -------------------------------------------------------------------------------- /01-workspace/04-assignment-resources/02-analytics-service-resources/data.sql: -------------------------------------------------------------------------------- 1 | DROP TABLE IF EXISTS product_view_count; 2 | CREATE TABLE product_view_count ( 3 | id INT NOT NULL, 4 | count INT 5 | ); -------------------------------------------------------------------------------- /01-workspace/04-assignment-resources/02-analytics-service-resources/static/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Analytics Service 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 |
14 |
15 |
16 |

Product Analytics

17 |

This chart shows the top 5 products based on the view count.

18 |

If you can see the chart, then great job. You did the assignment.

19 | 20 |
21 |
22 |
23 | 68 | 69 | 70 | -------------------------------------------------------------------------------- /01-workspace/05-kafka-security-sasl-plain/01-kafka-topics.sh: -------------------------------------------------------------------------------- 1 | kafka-topics.sh \ 2 | --bootstrap-server localhost:9092 \ 3 | --topic order-events --create 4 | 5 | kafka-topics.sh \ 6 | --bootstrap-server localhost:9092 \ 7 | --command-config consumer.properties \ 8 | --topic order-events --create -------------------------------------------------------------------------------- /01-workspace/05-kafka-security-sasl-plain/docker-compose.yaml: -------------------------------------------------------------------------------- 1 | version: "3.8" 2 | services: 3 | kafka: 4 | image: vinsdocker/kafka 5 | container_name: kafka 6 | ports: 7 | - "9092:9092" 8 | environment: 9 | KAFKA_CLUSTER_ID: OTMwNzFhYTY1ODNiNGE5OT 10 | KAFKA_OPTS: "-Djava.security.auth.login.config=/kafka/config/jaas.conf" 11 | volumes: 12 | - ./props/security.properties:/kafka/config/kraft/server.properties 13 | - ./props/jaas.conf:/kafka/config/jaas.conf 14 | - ./data:/tmp/kafka-logs 15 | - ./props/consumer.properties:/learning/consumer.properties -------------------------------------------------------------------------------- /01-workspace/05-kafka-security-sasl-plain/props/consumer.properties: -------------------------------------------------------------------------------- 1 | sasl.jaas.config=org.apache.kafka.common.security.plain.PlainLoginModule required serviceName="Kafka" username="client" password="client-secret"; 2 | security.protocol=SASL_PLAINTEXT 3 | sasl.mechanism=PLAIN 4 | -------------------------------------------------------------------------------- /01-workspace/05-kafka-security-sasl-plain/props/jaas.conf: -------------------------------------------------------------------------------- 1 | KafkaServer { 2 | org.apache.kafka.common.security.plain.PlainLoginModule required 3 | username="admin" 4 | password="secret" 5 | user_admin="secret" 6 | user_client="client-secret"; 7 | }; -------------------------------------------------------------------------------- /01-workspace/05-kafka-security-sasl-plain/props/security.properties: -------------------------------------------------------------------------------- 1 | process.roles=broker,controller 2 | node.id=1 3 | listeners=SASL_PLAINTEXT://:9092,CONTROLLER://:9093 4 | controller.listener.names=CONTROLLER 5 | advertised.listeners=SASL_PLAINTEXT://localhost:9092 6 | inter.broker.listener.name=SASL_PLAINTEXT 7 | controller.quorum.voters=1@kafka:9093 8 | listener.security.protocol.map=CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT,SSL:SSL,SASL_PLAINTEXT:SASL_PLAINTEXT,SASL_SSL:SASL_SSL 9 | auto.create.topics.enable=false 10 | offsets.topic.replication.factor=1 11 | transaction.state.log.replication.factor=1 12 | transaction.state.log.min.isr=1 13 | 14 | # This PLAIN is different from PLAINTEXT above 15 | sasl.enabled.mechanisms=PLAIN 16 | sasl.mechanism.controller.protocol=PLAIN 17 | sasl.mechanism.inter.broker.protocol=PLAIN -------------------------------------------------------------------------------- /01-workspace/06-kafka-security-sasl-ssl/01-kafka-topics.sh: -------------------------------------------------------------------------------- 1 | kafka-topics.sh \ 2 | --bootstrap-server localhost:9092 \ 3 | --command-config consumer.properties \ 4 | --topic order-events --create 5 | 6 | 7 | 8 | 9 | 10 | -------------------------------------------------------------------------------- /01-workspace/06-kafka-security-sasl-ssl/certs/kafka-signed.crt: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIICpDCCAYwCCQDaofz5dAOYFzANBgkqhkiG9w0BAQsFADAUMRIwEAYDVQQDDAls 3 | b2NhbGhvc3QwHhcNMjMwMzI2MjAzMjA4WhcNMzMwMzIzMjAzMjA4WjAUMRIwEAYD 4 | VQQDEwlsb2NhbGhvc3QwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCn 5 | RAfOB6pQALi8RUcbdzmZrHHusmiedIp3NbQ09QHeld7DHPg6DR63pyUHkURMFGEx 6 | x9OFezjIBiSDQHrTUM4JxNc1OcWzwCrSsNG4T+ASYzJYFMAFX5OPIYCJDFFuKG/F 7 | ds3dbB9TaWjTLHK+VV75EJrmn5w6cJ93OgyonJk74XCuygcbjaPTQaSs+pPF2PUP 8 | AZDP3DfTMC9uZREhWAMGC0ptb0BrcR1xFajBCzno80sw+Zmhbl8KumTTP070YDsb 9 | FguXvtWO3azrAzIT4mcqWDJpl4zB2k5duOy2IZeCHXqMRS+7Ay3LEvHrIAKQefkw 10 | 0aHyve0/mJpxIOF3EHpzAgMBAAEwDQYJKoZIhvcNAQELBQADggEBAFU0gG3SGX5y 11 | /uRu1IGypi2tVRlZ685mOkkUWWbfXbbs/6Igm8RpLBNw/MQ2nnxrx07e+ASd+3Nu 12 | /2xnCZgO9i/JvZnQXiDxXF3z/yzIW/P7PzIMPe0/+VM3ULLmYDEZ8S131clLtHWe 13 | bJXmNDZeLpxi8a/nxyXXnIGCE4+lYKkk22DunM6mAUWKeGljB4LHSzDZTExHsVhs 14 | PYiNp3kdQ1L8++S9SLbsIvmiMnJxoxkApewQpDAE0cqoPh0NGKtl6HbU2WoZP4Sn 15 | 3izBE+sej6aNMgMZ5wSTR6LuATaHAQ+lKXaGSjvddFtZ145u0Dombzt1ACGxlnG0 16 | jxU8K6J3awQ= 17 | -----END CERTIFICATE----- 18 | -------------------------------------------------------------------------------- /01-workspace/06-kafka-security-sasl-ssl/certs/kafka-signing-request.crt: -------------------------------------------------------------------------------- 1 | -----BEGIN NEW CERTIFICATE REQUEST----- 2 | MIICiTCCAXECAQAwFDESMBAGA1UEAxMJbG9jYWxob3N0MIIBIjANBgkqhkiG9w0B 3 | AQEFAAOCAQ8AMIIBCgKCAQEAp0QHzgeqUAC4vEVHG3c5maxx7rJonnSKdzW0NPUB 4 | 3pXewxz4Og0et6clB5FETBRhMcfThXs4yAYkg0B601DOCcTXNTnFs8Aq0rDRuE/g 5 | EmMyWBTABV+TjyGAiQxRbihvxXbN3WwfU2lo0yxyvlVe+RCa5p+cOnCfdzoMqJyZ 6 | O+FwrsoHG42j00GkrPqTxdj1DwGQz9w30zAvbmURIVgDBgtKbW9Aa3EdcRWowQs5 7 | 6PNLMPmZoW5fCrpk0z9O9GA7GxYLl77Vjt2s6wMyE+JnKlgyaZeMwdpOXbjstiGX 8 | gh16jEUvuwMtyxLx6yACkHn5MNGh8r3tP5iacSDhdxB6cwIDAQABoDAwLgYJKoZI 9 | hvcNAQkOMSEwHzAdBgNVHQ4EFgQUI7w/91OfgoRI22ttzryRpHdqn/MwDQYJKoZI 10 | hvcNAQELBQADggEBAIEBcyMtjoF8lGRUTcfA3OI+mf+FnOuaQj8wqUUh5TxPuuJu 11 | +OMEWywd7a6dKtwxYw1CL6rOTKkM5JeoGDl2s4EivrGwEj3kPkP+gDEkZ7LcL9IX 12 | fBfrqeD89mkGspsqB0DtcgBemDuwQV/jcyBq36fEbOVbt/io1kPv+6Ff7+bi9YyE 13 | 3ZRJO/0wyip2rGCZIDoKx8FQMFk15pBxMd6uKUSO1ByVPiiusswNPriJo5M+ouw8 14 | le3JJlHOAkFChb4q7boP5aZWr5Eb6UrNP63oz9hxvY8xvfEp6woOEiP+o2RjjQBx 15 | OG1isI6LuXbjlAlcuHwApDjDl00Gy/eeLax53Mk= 16 | -----END NEW CERTIFICATE REQUEST----- 17 | -------------------------------------------------------------------------------- /01-workspace/06-kafka-security-sasl-ssl/certs/kafka.keystore.jks: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vinsguru/reactive-kafka-course/6e03bbbe52cf58ea3c023f2c20cc0ba41b25a3ba/01-workspace/06-kafka-security-sasl-ssl/certs/kafka.keystore.jks -------------------------------------------------------------------------------- /01-workspace/06-kafka-security-sasl-ssl/certs/kafka.truststore.jks: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vinsguru/reactive-kafka-course/6e03bbbe52cf58ea3c023f2c20cc0ba41b25a3ba/01-workspace/06-kafka-security-sasl-ssl/certs/kafka.truststore.jks -------------------------------------------------------------------------------- /01-workspace/06-kafka-security-sasl-ssl/certs/root.crt: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIICpDCCAYwCCQCR3w8B2Drn7jANBgkqhkiG9w0BAQsFADAUMRIwEAYDVQQDDAls 3 | b2NhbGhvc3QwHhcNMjMwMzI2MjAzMjA4WhcNMjMwNDI1MjAzMjA4WjAUMRIwEAYD 4 | VQQDDAlsb2NhbGhvc3QwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQD5 5 | NaH+xN21C8hJmhHF3d1sD0JbvvhQuROBq7aFyE03YCoMCReB1PP6TPTJ836toaKP 6 | HWPgPIGjyWHa6J0ZB/W1YVhsPOtquqyBbkOmcgG8OrR8skSrUaAbISJRRQnU34xZ 7 | MjUUBVN7T2HYi23jG5htn1Q0TU5jcvFGayEh75nyrRL4qC+68rXh7tHOzIfyLV00 8 | 0aaKHLzG53oEUMU/cqvDYKp+eraszqfkjSqiAqSSunl3Eqg6niuGfssW9nix7+M3 9 | hSupO0KMwYWly++0q3C8qzMPXMOlr0giAtm1xwMfaNnWfsPznbqBndpna7/yZxjj 10 | 7h5KMk9eJv9tfxE9lfSZAgMBAAEwDQYJKoZIhvcNAQELBQADggEBALFVnLXr80hf 11 | wrAaA3EWUHU6CFBBJyIqAL90pOzrtiT1go/3lt7jWZAYjX62gXK8P8kbbz2legY1 12 | 1GXgmXdcc6ERDxhvIdhGo761djsNay/lOsUuU5A7Qa7mk3d5fgBhpsW/tCzAv5jR 13 | wB1iVQcvniY4lJ9ANJSDMzHSdSPHUfImvatrQg7jxdKaBwtv6NMq4pmWv7d97BeG 14 | YLDzT5D6PXZHfTlJGdeQfMPqyFthFGffpDTJLzVrjDTJdoLcNFZHeXtZm3YIDXlH 15 | J1jFdhqFTuySLDniZp+B80jeOxW5Ecy4jln0Ydydncr40Az0C8EcO3WDA4nFqU/1 16 | WRjvqDCLFbs= 17 | -----END CERTIFICATE----- 18 | -------------------------------------------------------------------------------- /01-workspace/06-kafka-security-sasl-ssl/certs/root.key: -------------------------------------------------------------------------------- 1 | -----BEGIN RSA PRIVATE KEY----- 2 | MIIEpAIBAAKCAQEA+TWh/sTdtQvISZoRxd3dbA9CW774ULkTgau2hchNN2AqDAkX 3 | gdTz+kz0yfN+raGijx1j4DyBo8lh2uidGQf1tWFYbDzrarqsgW5DpnIBvDq0fLJE 4 | q1GgGyEiUUUJ1N+MWTI1FAVTe09h2Itt4xuYbZ9UNE1OY3LxRmshIe+Z8q0S+Kgv 5 | uvK14e7RzsyH8i1dNNGmihy8xud6BFDFP3Krw2Cqfnq2rM6n5I0qogKkkrp5dxKo 6 | Op4rhn7LFvZ4se/jN4UrqTtCjMGFpcvvtKtwvKszD1zDpa9IIgLZtccDH2jZ1n7D 7 | 8526gZ3aZ2u/8mcY4+4eSjJPXib/bX8RPZX0mQIDAQABAoIBAQDMnd6+woGPT3dK 8 | B3ikUACn4veHQQu9q9h6VbTtr4LG/uxHKYflkCYq09kBoC3Y/qfsfiMtUEDZbFYu 9 | WWPz03dJT/48EODypwdQ1moxVdZa6JCc+for+D9PRytYMylZYZto+TJfl+ftjUY3 10 | eHPnTxcemv2WXAsK2KuGOBviM0rh1t6/SgzKOEEAVX8HP78p15MgR1JbgoZXM6Lu 11 | 3goga9PwYSSbqhe1CiNDcHYm+jY3yF6OqDph4W6/7oeKkyCYkJWRQT5yUn30WJEv 12 | dFr3NySyiOMcOARcTWz1cQaRI9ppvJQBKevGE9jMh8VWyYX4nxtEK0pz9418YfsT 13 | fe1lmw+JAoGBAP1QnU0UTTvSY5KBcxGqxkLS5gNNNN5JznoboCciHehLyKGb9P0h 14 | p982IBTEyVt0lbh5EdnOGB9RLGMiDDxJLEEVlwXqyVa1AnM1eocvdoYEjGsWa/nH 15 | hCaD+S3mdRMizdvOa9no/RbUVi7HWIj6nq33YOwi2/HXJTg7GfgMcIfjAoGBAPvZ 16 | 4MpxkSWkJW5DspYxrTRlqF+VJK4PDqf/cfAX4RalXX51FmSqK8cfdxP7nOHtk5BA 17 | btA4w1vtelVXFlAuyx74NntJFUD1qK+AiJ8g9OGw6O2bvHaIyEFfyA5RQ42Oci8S 18 | p6XKbB9D0D0/xSGgClGeIGiN4Uu+hQ7/zwFUp2JTAoGAH0MLpq+SuIdGcpY3NT72 19 | KNkOOeoVVDmXmco/AZVcS4bOw+MTYQn+RbXQT7/ZecfxkkqewVqF75d8+t8A04/U 20 | PlQDx18qaeD85Gr9LCHXZsK48Z5axQCydm5jGh8O48Wv0iivdr0iPucFm9BORNlp 21 | K98TvxCuRDJhDYd9tPcUDDECgYEAgZMJWl6vpMQdFevyl9+bE1gyFmsvYfYRPlHo 22 | lvRSNEqEHam0Cbqbp2kC5N8RShPnVdIwsSRjLoqx4/JYYoyQqLBfp3JkYvWBAjqJ 23 | EAXut6dDT+xOPFYHYcsbrHIW5V+iU6Gva1cH5dYH71s92xzx8if+2IwWm9e7sYnf 24 | QPoC3E0CgYBa+fHpt4st40Y48EAKkZmuGfZtmjAnx8auN/L8UCt1xQFpSCQ5adi/ 25 | 8xSUf0BB1no78PMqTw/iB+SEewvgt2rMESBHAKzl48W6kz+oESQBlmFO7gyG1/5a 26 | cmoiItavkDOKIEbUTGS2gkhFiyXRoXH4Ex81ImVgzalFAcOgsgD4rA== 27 | -----END RSA PRIVATE KEY----- 28 | -------------------------------------------------------------------------------- /01-workspace/06-kafka-security-sasl-ssl/docker-compose.yaml: -------------------------------------------------------------------------------- 1 | version: "3.8" 2 | services: 3 | kafka: 4 | image: vinsdocker/kafka 5 | container_name: kafka 6 | ports: 7 | - "9092:9092" 8 | environment: 9 | KAFKA_CLUSTER_ID: OTMwNzFhYTY1ODNiNGE5OT 10 | KAFKA_OPTS: "-Djava.security.auth.login.config=/kafka/config/jaas.conf" 11 | volumes: 12 | - ./props/security.properties:/kafka/config/kraft/server.properties 13 | - ./props/jaas.conf:/kafka/config/jaas.conf 14 | - ./data:/tmp/kafka-logs 15 | - ./props/consumer.properties:/learning/consumer.properties 16 | - ./certs:/certs -------------------------------------------------------------------------------- /01-workspace/06-kafka-security-sasl-ssl/generate-certs/create-certificates.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # CA create private key and root CA certificate 4 | openssl genrsa -out root.key 5 | openssl req -new -x509 -key root.key -out root.crt -subj "/CN=localhost" -nodes 6 | 7 | # keystore 8 | keytool -keystore kafka.keystore.jks -storepass changeit -alias localhost -validity 3650 -genkey -keyalg RSA -dname "CN=localhost" 9 | 10 | # create CSR (certificate signing request) 11 | keytool -keystore kafka.keystore.jks -storepass changeit -alias localhost -certreq -file kafka-signing-request.crt 12 | 13 | # CA signs the cerificate 14 | openssl x509 -req -CA root.crt -CAkey root.key -in kafka-signing-request.crt -out kafka-signed.crt -days 3650 -CAcreateserial 15 | 16 | # We can import root CA cert & our signed certificate 17 | # This should be private and owned by the server 18 | keytool -keystore kafka.keystore.jks -storepass changeit -alias CARoot -import -file root.crt -noprompt 19 | keytool -keystore kafka.keystore.jks -storepass changeit -alias localhost -import -file kafka-signed.crt -noprompt 20 | 21 | # This is for clients 22 | keytool -keystore kafka.truststore.jks -storepass changeit -noprompt -alias CARoot -import -file root.crt 23 | 24 | # move all these files to certs directory 25 | mkdir -p ../certs 26 | mv *.crt ../certs 27 | mv *.jks ../certs 28 | mv *.key ../certs -------------------------------------------------------------------------------- /01-workspace/06-kafka-security-sasl-ssl/props/consumer.properties: -------------------------------------------------------------------------------- 1 | sasl.jaas.config=org.apache.kafka.common.security.plain.PlainLoginModule required serviceName="Kafka" username="client" password="client-secret"; 2 | security.protocol=SASL_SSL 3 | sasl.mechanism=PLAIN 4 | ssl.truststore.location=/certs/kafka.truststore.jks 5 | ssl.truststore.password=changeit -------------------------------------------------------------------------------- /01-workspace/06-kafka-security-sasl-ssl/props/jaas.conf: -------------------------------------------------------------------------------- 1 | KafkaServer { 2 | org.apache.kafka.common.security.plain.PlainLoginModule required 3 | username="admin" 4 | password="secret" 5 | user_admin="secret" 6 | user_client="client-secret"; 7 | }; -------------------------------------------------------------------------------- /01-workspace/06-kafka-security-sasl-ssl/props/security.properties: -------------------------------------------------------------------------------- 1 | process.roles=broker,controller 2 | node.id=1 3 | listeners=SASL_SSL://:9092,CONTROLLER://:9093 4 | controller.listener.names=CONTROLLER 5 | advertised.listeners=SASL_SSL://localhost:9092 6 | inter.broker.listener.name=SASL_SSL 7 | controller.quorum.voters=1@kafka:9093 8 | listener.security.protocol.map=CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT,SSL:SSL,SASL_PLAINTEXT:SASL_PLAINTEXT,SASL_SSL:SASL_SSL 9 | auto.create.topics.enable=false 10 | offsets.topic.replication.factor=1 11 | transaction.state.log.replication.factor=1 12 | transaction.state.log.min.isr=1 13 | 14 | sasl.enabled.mechanisms=PLAIN 15 | sasl.mechanism.controller.protocol=PLAIN 16 | sasl.mechanism.inter.broker.protocol=PLAIN 17 | 18 | ssl.keystore.location=/certs/kafka.keystore.jks 19 | ssl.keystore.password=changeit 20 | ssl.truststore.location=/certs/kafka.truststore.jks 21 | ssl.truststore.password=changeit -------------------------------------------------------------------------------- /02-reactive-kafka-playground/.gitignore: -------------------------------------------------------------------------------- 1 | HELP.md 2 | target/ 3 | !.mvn/wrapper/maven-wrapper.jar 4 | !**/src/main/**/target/ 5 | !**/src/test/**/target/ 6 | 7 | ### STS ### 8 | .apt_generated 9 | .classpath 10 | .factorypath 11 | .project 12 | .settings 13 | .springBeans 14 | .sts4-cache 15 | 16 | ### IntelliJ IDEA ### 17 | .idea 18 | *.iws 19 | *.iml 20 | *.ipr 21 | 22 | ### NetBeans ### 23 | /nbproject/private/ 24 | /nbbuild/ 25 | /dist/ 26 | /nbdist/ 27 | /.nb-gradle/ 28 | build/ 29 | !**/src/main/**/build/ 30 | !**/src/test/**/build/ 31 | 32 | ### VS Code ### 33 | .vscode/ 34 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/.mvn/wrapper/maven-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vinsguru/reactive-kafka-course/6e03bbbe52cf58ea3c023f2c20cc0ba41b25a3ba/02-reactive-kafka-playground/.mvn/wrapper/maven-wrapper.jar -------------------------------------------------------------------------------- /02-reactive-kafka-playground/.mvn/wrapper/maven-wrapper.properties: -------------------------------------------------------------------------------- 1 | # Licensed to the Apache Software Foundation (ASF) under one 2 | # or more contributor license agreements. See the NOTICE file 3 | # distributed with this work for additional information 4 | # regarding copyright ownership. The ASF licenses this file 5 | # to you under the Apache License, Version 2.0 (the 6 | # "License"); you may not use this file except in compliance 7 | # with the License. You may obtain a copy of the License at 8 | # 9 | # https://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.8.7/apache-maven-3.8.7-bin.zip 18 | wrapperUrl=https://repo.maven.apache.org/maven2/org/apache/maven/wrapper/maven-wrapper/3.1.1/maven-wrapper-3.1.1.jar 19 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/mvnw: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # ---------------------------------------------------------------------------- 3 | # Licensed to the Apache Software Foundation (ASF) under one 4 | # or more contributor license agreements. See the NOTICE file 5 | # distributed with this work for additional information 6 | # regarding copyright ownership. The ASF licenses this file 7 | # to you under the Apache License, Version 2.0 (the 8 | # "License"); you may not use this file except in compliance 9 | # with the License. You may obtain a copy of the License at 10 | # 11 | # https://www.apache.org/licenses/LICENSE-2.0 12 | # 13 | # Unless required by applicable law or agreed to in writing, 14 | # software distributed under the License is distributed on an 15 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 16 | # KIND, either express or implied. See the License for the 17 | # specific language governing permissions and limitations 18 | # under the License. 19 | # ---------------------------------------------------------------------------- 20 | 21 | # ---------------------------------------------------------------------------- 22 | # Maven Start Up Batch script 23 | # 24 | # Required ENV vars: 25 | # ------------------ 26 | # JAVA_HOME - location of a JDK home dir 27 | # 28 | # Optional ENV vars 29 | # ----------------- 30 | # M2_HOME - location of maven2's installed home dir 31 | # MAVEN_OPTS - parameters passed to the Java VM when running Maven 32 | # e.g. to debug Maven itself, use 33 | # set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 34 | # MAVEN_SKIP_RC - flag to disable loading of mavenrc files 35 | # ---------------------------------------------------------------------------- 36 | 37 | if [ -z "$MAVEN_SKIP_RC" ] ; then 38 | 39 | if [ -f /usr/local/etc/mavenrc ] ; then 40 | . /usr/local/etc/mavenrc 41 | fi 42 | 43 | if [ -f /etc/mavenrc ] ; then 44 | . /etc/mavenrc 45 | fi 46 | 47 | if [ -f "$HOME/.mavenrc" ] ; then 48 | . "$HOME/.mavenrc" 49 | fi 50 | 51 | fi 52 | 53 | # OS specific support. $var _must_ be set to either true or false. 54 | cygwin=false; 55 | darwin=false; 56 | mingw=false 57 | case "`uname`" in 58 | CYGWIN*) cygwin=true ;; 59 | MINGW*) mingw=true;; 60 | Darwin*) darwin=true 61 | # Use /usr/libexec/java_home if available, otherwise fall back to /Library/Java/Home 62 | # See https://developer.apple.com/library/mac/qa/qa1170/_index.html 63 | if [ -z "$JAVA_HOME" ]; then 64 | if [ -x "/usr/libexec/java_home" ]; then 65 | export JAVA_HOME="`/usr/libexec/java_home`" 66 | else 67 | export JAVA_HOME="/Library/Java/Home" 68 | fi 69 | fi 70 | ;; 71 | esac 72 | 73 | if [ -z "$JAVA_HOME" ] ; then 74 | if [ -r /etc/gentoo-release ] ; then 75 | JAVA_HOME=`java-config --jre-home` 76 | fi 77 | fi 78 | 79 | if [ -z "$M2_HOME" ] ; then 80 | ## resolve links - $0 may be a link to maven's home 81 | PRG="$0" 82 | 83 | # need this for relative symlinks 84 | while [ -h "$PRG" ] ; do 85 | ls=`ls -ld "$PRG"` 86 | link=`expr "$ls" : '.*-> \(.*\)$'` 87 | if expr "$link" : '/.*' > /dev/null; then 88 | PRG="$link" 89 | else 90 | PRG="`dirname "$PRG"`/$link" 91 | fi 92 | done 93 | 94 | saveddir=`pwd` 95 | 96 | M2_HOME=`dirname "$PRG"`/.. 97 | 98 | # make it fully qualified 99 | M2_HOME=`cd "$M2_HOME" && pwd` 100 | 101 | cd "$saveddir" 102 | # echo Using m2 at $M2_HOME 103 | fi 104 | 105 | # For Cygwin, ensure paths are in UNIX format before anything is touched 106 | if $cygwin ; then 107 | [ -n "$M2_HOME" ] && 108 | M2_HOME=`cygpath --unix "$M2_HOME"` 109 | [ -n "$JAVA_HOME" ] && 110 | JAVA_HOME=`cygpath --unix "$JAVA_HOME"` 111 | [ -n "$CLASSPATH" ] && 112 | CLASSPATH=`cygpath --path --unix "$CLASSPATH"` 113 | fi 114 | 115 | # For Mingw, ensure paths are in UNIX format before anything is touched 116 | if $mingw ; then 117 | [ -n "$M2_HOME" ] && 118 | M2_HOME="`(cd "$M2_HOME"; pwd)`" 119 | [ -n "$JAVA_HOME" ] && 120 | JAVA_HOME="`(cd "$JAVA_HOME"; pwd)`" 121 | fi 122 | 123 | if [ -z "$JAVA_HOME" ]; then 124 | javaExecutable="`which javac`" 125 | if [ -n "$javaExecutable" ] && ! [ "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then 126 | # readlink(1) is not available as standard on Solaris 10. 127 | readLink=`which readlink` 128 | if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then 129 | if $darwin ; then 130 | javaHome="`dirname \"$javaExecutable\"`" 131 | javaExecutable="`cd \"$javaHome\" && pwd -P`/javac" 132 | else 133 | javaExecutable="`readlink -f \"$javaExecutable\"`" 134 | fi 135 | javaHome="`dirname \"$javaExecutable\"`" 136 | javaHome=`expr "$javaHome" : '\(.*\)/bin'` 137 | JAVA_HOME="$javaHome" 138 | export JAVA_HOME 139 | fi 140 | fi 141 | fi 142 | 143 | if [ -z "$JAVACMD" ] ; then 144 | if [ -n "$JAVA_HOME" ] ; then 145 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then 146 | # IBM's JDK on AIX uses strange locations for the executables 147 | JAVACMD="$JAVA_HOME/jre/sh/java" 148 | else 149 | JAVACMD="$JAVA_HOME/bin/java" 150 | fi 151 | else 152 | JAVACMD="`\\unset -f command; \\command -v java`" 153 | fi 154 | fi 155 | 156 | if [ ! -x "$JAVACMD" ] ; then 157 | echo "Error: JAVA_HOME is not defined correctly." >&2 158 | echo " We cannot execute $JAVACMD" >&2 159 | exit 1 160 | fi 161 | 162 | if [ -z "$JAVA_HOME" ] ; then 163 | echo "Warning: JAVA_HOME environment variable is not set." 164 | fi 165 | 166 | CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher 167 | 168 | # traverses directory structure from process work directory to filesystem root 169 | # first directory with .mvn subdirectory is considered project base directory 170 | find_maven_basedir() { 171 | 172 | if [ -z "$1" ] 173 | then 174 | echo "Path not specified to find_maven_basedir" 175 | return 1 176 | fi 177 | 178 | basedir="$1" 179 | wdir="$1" 180 | while [ "$wdir" != '/' ] ; do 181 | if [ -d "$wdir"/.mvn ] ; then 182 | basedir=$wdir 183 | break 184 | fi 185 | # workaround for JBEAP-8937 (on Solaris 10/Sparc) 186 | if [ -d "${wdir}" ]; then 187 | wdir=`cd "$wdir/.."; pwd` 188 | fi 189 | # end of workaround 190 | done 191 | echo "${basedir}" 192 | } 193 | 194 | # concatenates all lines of a file 195 | concat_lines() { 196 | if [ -f "$1" ]; then 197 | echo "$(tr -s '\n' ' ' < "$1")" 198 | fi 199 | } 200 | 201 | BASE_DIR=`find_maven_basedir "$(pwd)"` 202 | if [ -z "$BASE_DIR" ]; then 203 | exit 1; 204 | fi 205 | 206 | ########################################################################################## 207 | # Extension to allow automatically downloading the maven-wrapper.jar from Maven-central 208 | # This allows using the maven wrapper in projects that prohibit checking in binary data. 209 | ########################################################################################## 210 | if [ -r "$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" ]; then 211 | if [ "$MVNW_VERBOSE" = true ]; then 212 | echo "Found .mvn/wrapper/maven-wrapper.jar" 213 | fi 214 | else 215 | if [ "$MVNW_VERBOSE" = true ]; then 216 | echo "Couldn't find .mvn/wrapper/maven-wrapper.jar, downloading it ..." 217 | fi 218 | if [ -n "$MVNW_REPOURL" ]; then 219 | jarUrl="$MVNW_REPOURL/org/apache/maven/wrapper/maven-wrapper/3.1.0/maven-wrapper-3.1.0.jar" 220 | else 221 | jarUrl="https://repo.maven.apache.org/maven2/org/apache/maven/wrapper/maven-wrapper/3.1.0/maven-wrapper-3.1.0.jar" 222 | fi 223 | while IFS="=" read key value; do 224 | case "$key" in (wrapperUrl) jarUrl="$value"; break ;; 225 | esac 226 | done < "$BASE_DIR/.mvn/wrapper/maven-wrapper.properties" 227 | if [ "$MVNW_VERBOSE" = true ]; then 228 | echo "Downloading from: $jarUrl" 229 | fi 230 | wrapperJarPath="$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" 231 | if $cygwin; then 232 | wrapperJarPath=`cygpath --path --windows "$wrapperJarPath"` 233 | fi 234 | 235 | if command -v wget > /dev/null; then 236 | if [ "$MVNW_VERBOSE" = true ]; then 237 | echo "Found wget ... using wget" 238 | fi 239 | if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then 240 | wget "$jarUrl" -O "$wrapperJarPath" || rm -f "$wrapperJarPath" 241 | else 242 | wget --http-user=$MVNW_USERNAME --http-password=$MVNW_PASSWORD "$jarUrl" -O "$wrapperJarPath" || rm -f "$wrapperJarPath" 243 | fi 244 | elif command -v curl > /dev/null; then 245 | if [ "$MVNW_VERBOSE" = true ]; then 246 | echo "Found curl ... using curl" 247 | fi 248 | if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then 249 | curl -o "$wrapperJarPath" "$jarUrl" -f 250 | else 251 | curl --user $MVNW_USERNAME:$MVNW_PASSWORD -o "$wrapperJarPath" "$jarUrl" -f 252 | fi 253 | 254 | else 255 | if [ "$MVNW_VERBOSE" = true ]; then 256 | echo "Falling back to using Java to download" 257 | fi 258 | javaClass="$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.java" 259 | # For Cygwin, switch paths to Windows format before running javac 260 | if $cygwin; then 261 | javaClass=`cygpath --path --windows "$javaClass"` 262 | fi 263 | if [ -e "$javaClass" ]; then 264 | if [ ! -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then 265 | if [ "$MVNW_VERBOSE" = true ]; then 266 | echo " - Compiling MavenWrapperDownloader.java ..." 267 | fi 268 | # Compiling the Java class 269 | ("$JAVA_HOME/bin/javac" "$javaClass") 270 | fi 271 | if [ -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then 272 | # Running the downloader 273 | if [ "$MVNW_VERBOSE" = true ]; then 274 | echo " - Running MavenWrapperDownloader.java ..." 275 | fi 276 | ("$JAVA_HOME/bin/java" -cp .mvn/wrapper MavenWrapperDownloader "$MAVEN_PROJECTBASEDIR") 277 | fi 278 | fi 279 | fi 280 | fi 281 | ########################################################################################## 282 | # End of extension 283 | ########################################################################################## 284 | 285 | export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-"$BASE_DIR"} 286 | if [ "$MVNW_VERBOSE" = true ]; then 287 | echo $MAVEN_PROJECTBASEDIR 288 | fi 289 | MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS" 290 | 291 | # For Cygwin, switch paths to Windows format before running java 292 | if $cygwin; then 293 | [ -n "$M2_HOME" ] && 294 | M2_HOME=`cygpath --path --windows "$M2_HOME"` 295 | [ -n "$JAVA_HOME" ] && 296 | JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"` 297 | [ -n "$CLASSPATH" ] && 298 | CLASSPATH=`cygpath --path --windows "$CLASSPATH"` 299 | [ -n "$MAVEN_PROJECTBASEDIR" ] && 300 | MAVEN_PROJECTBASEDIR=`cygpath --path --windows "$MAVEN_PROJECTBASEDIR"` 301 | fi 302 | 303 | # Provide a "standardized" way to retrieve the CLI args that will 304 | # work with both Windows and non-Windows executions. 305 | MAVEN_CMD_LINE_ARGS="$MAVEN_CONFIG $@" 306 | export MAVEN_CMD_LINE_ARGS 307 | 308 | WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain 309 | 310 | exec "$JAVACMD" \ 311 | $MAVEN_OPTS \ 312 | $MAVEN_DEBUG_OPTS \ 313 | -classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \ 314 | "-Dmaven.home=${M2_HOME}" \ 315 | "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \ 316 | ${WRAPPER_LAUNCHER} $MAVEN_CONFIG "$@" 317 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/mvnw.cmd: -------------------------------------------------------------------------------- 1 | @REM ---------------------------------------------------------------------------- 2 | @REM Licensed to the Apache Software Foundation (ASF) under one 3 | @REM or more contributor license agreements. See the NOTICE file 4 | @REM distributed with this work for additional information 5 | @REM regarding copyright ownership. The ASF licenses this file 6 | @REM to you under the Apache License, Version 2.0 (the 7 | @REM "License"); you may not use this file except in compliance 8 | @REM with the License. You may obtain a copy of the License at 9 | @REM 10 | @REM https://www.apache.org/licenses/LICENSE-2.0 11 | @REM 12 | @REM Unless required by applicable law or agreed to in writing, 13 | @REM software distributed under the License is distributed on an 14 | @REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 15 | @REM KIND, either express or implied. See the License for the 16 | @REM specific language governing permissions and limitations 17 | @REM under the License. 18 | @REM ---------------------------------------------------------------------------- 19 | 20 | @REM ---------------------------------------------------------------------------- 21 | @REM Maven Start Up Batch script 22 | @REM 23 | @REM Required ENV vars: 24 | @REM JAVA_HOME - location of a JDK home dir 25 | @REM 26 | @REM Optional ENV vars 27 | @REM M2_HOME - location of maven2's installed home dir 28 | @REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands 29 | @REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a keystroke before ending 30 | @REM MAVEN_OPTS - parameters passed to the Java VM when running Maven 31 | @REM e.g. to debug Maven itself, use 32 | @REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 33 | @REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files 34 | @REM ---------------------------------------------------------------------------- 35 | 36 | @REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on' 37 | @echo off 38 | @REM set title of command window 39 | title %0 40 | @REM enable echoing by setting MAVEN_BATCH_ECHO to 'on' 41 | @if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO% 42 | 43 | @REM set %HOME% to equivalent of $HOME 44 | if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%") 45 | 46 | @REM Execute a user defined script before this one 47 | if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre 48 | @REM check for pre script, once with legacy .bat ending and once with .cmd ending 49 | if exist "%USERPROFILE%\mavenrc_pre.bat" call "%USERPROFILE%\mavenrc_pre.bat" %* 50 | if exist "%USERPROFILE%\mavenrc_pre.cmd" call "%USERPROFILE%\mavenrc_pre.cmd" %* 51 | :skipRcPre 52 | 53 | @setlocal 54 | 55 | set ERROR_CODE=0 56 | 57 | @REM To isolate internal variables from possible post scripts, we use another setlocal 58 | @setlocal 59 | 60 | @REM ==== START VALIDATION ==== 61 | if not "%JAVA_HOME%" == "" goto OkJHome 62 | 63 | echo. 64 | echo Error: JAVA_HOME not found in your environment. >&2 65 | echo Please set the JAVA_HOME variable in your environment to match the >&2 66 | echo location of your Java installation. >&2 67 | echo. 68 | goto error 69 | 70 | :OkJHome 71 | if exist "%JAVA_HOME%\bin\java.exe" goto init 72 | 73 | echo. 74 | echo Error: JAVA_HOME is set to an invalid directory. >&2 75 | echo JAVA_HOME = "%JAVA_HOME%" >&2 76 | echo Please set the JAVA_HOME variable in your environment to match the >&2 77 | echo location of your Java installation. >&2 78 | echo. 79 | goto error 80 | 81 | @REM ==== END VALIDATION ==== 82 | 83 | :init 84 | 85 | @REM Find the project base dir, i.e. the directory that contains the folder ".mvn". 86 | @REM Fallback to current working directory if not found. 87 | 88 | set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR% 89 | IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir 90 | 91 | set EXEC_DIR=%CD% 92 | set WDIR=%EXEC_DIR% 93 | :findBaseDir 94 | IF EXIST "%WDIR%"\.mvn goto baseDirFound 95 | cd .. 96 | IF "%WDIR%"=="%CD%" goto baseDirNotFound 97 | set WDIR=%CD% 98 | goto findBaseDir 99 | 100 | :baseDirFound 101 | set MAVEN_PROJECTBASEDIR=%WDIR% 102 | cd "%EXEC_DIR%" 103 | goto endDetectBaseDir 104 | 105 | :baseDirNotFound 106 | set MAVEN_PROJECTBASEDIR=%EXEC_DIR% 107 | cd "%EXEC_DIR%" 108 | 109 | :endDetectBaseDir 110 | 111 | IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig 112 | 113 | @setlocal EnableExtensions EnableDelayedExpansion 114 | for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a 115 | @endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS% 116 | 117 | :endReadAdditionalConfig 118 | 119 | SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe" 120 | set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar" 121 | set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain 122 | 123 | set DOWNLOAD_URL="https://repo.maven.apache.org/maven2/org/apache/maven/wrapper/maven-wrapper/3.1.0/maven-wrapper-3.1.0.jar" 124 | 125 | FOR /F "usebackq tokens=1,2 delims==" %%A IN ("%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties") DO ( 126 | IF "%%A"=="wrapperUrl" SET DOWNLOAD_URL=%%B 127 | ) 128 | 129 | @REM Extension to allow automatically downloading the maven-wrapper.jar from Maven-central 130 | @REM This allows using the maven wrapper in projects that prohibit checking in binary data. 131 | if exist %WRAPPER_JAR% ( 132 | if "%MVNW_VERBOSE%" == "true" ( 133 | echo Found %WRAPPER_JAR% 134 | ) 135 | ) else ( 136 | if not "%MVNW_REPOURL%" == "" ( 137 | SET DOWNLOAD_URL="%MVNW_REPOURL%/org/apache/maven/wrapper/maven-wrapper/3.1.0/maven-wrapper-3.1.0.jar" 138 | ) 139 | if "%MVNW_VERBOSE%" == "true" ( 140 | echo Couldn't find %WRAPPER_JAR%, downloading it ... 141 | echo Downloading from: %DOWNLOAD_URL% 142 | ) 143 | 144 | powershell -Command "&{"^ 145 | "$webclient = new-object System.Net.WebClient;"^ 146 | "if (-not ([string]::IsNullOrEmpty('%MVNW_USERNAME%') -and [string]::IsNullOrEmpty('%MVNW_PASSWORD%'))) {"^ 147 | "$webclient.Credentials = new-object System.Net.NetworkCredential('%MVNW_USERNAME%', '%MVNW_PASSWORD%');"^ 148 | "}"^ 149 | "[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12; $webclient.DownloadFile('%DOWNLOAD_URL%', '%WRAPPER_JAR%')"^ 150 | "}" 151 | if "%MVNW_VERBOSE%" == "true" ( 152 | echo Finished downloading %WRAPPER_JAR% 153 | ) 154 | ) 155 | @REM End of extension 156 | 157 | @REM Provide a "standardized" way to retrieve the CLI args that will 158 | @REM work with both Windows and non-Windows executions. 159 | set MAVEN_CMD_LINE_ARGS=%* 160 | 161 | %MAVEN_JAVA_EXE% ^ 162 | %JVM_CONFIG_MAVEN_PROPS% ^ 163 | %MAVEN_OPTS% ^ 164 | %MAVEN_DEBUG_OPTS% ^ 165 | -classpath %WRAPPER_JAR% ^ 166 | "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" ^ 167 | %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %* 168 | if ERRORLEVEL 1 goto error 169 | goto end 170 | 171 | :error 172 | set ERROR_CODE=1 173 | 174 | :end 175 | @endlocal & set ERROR_CODE=%ERROR_CODE% 176 | 177 | if not "%MAVEN_SKIP_RC%"=="" goto skipRcPost 178 | @REM check for post script, once with legacy .bat ending and once with .cmd ending 179 | if exist "%USERPROFILE%\mavenrc_post.bat" call "%USERPROFILE%\mavenrc_post.bat" 180 | if exist "%USERPROFILE%\mavenrc_post.cmd" call "%USERPROFILE%\mavenrc_post.cmd" 181 | :skipRcPost 182 | 183 | @REM pause the script if MAVEN_BATCH_PAUSE is set to 'on' 184 | if "%MAVEN_BATCH_PAUSE%"=="on" pause 185 | 186 | if "%MAVEN_TERMINATE_CMD%"=="on" exit %ERROR_CODE% 187 | 188 | cmd /C exit /B %ERROR_CODE% 189 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 4 | 4.0.0 5 | 6 | org.springframework.boot 7 | spring-boot-starter-parent 8 | 3.5.0 9 | 10 | 11 | com.vinsguru 12 | reactive-kafka-playground 13 | 0.0.1-SNAPSHOT 14 | reactive-kafka-playground 15 | Demo project for Spring Boot 16 | 17 | 21 18 | 1.3.23 19 | 20 | 21 | 22 | org.springframework.boot 23 | spring-boot-starter-webflux 24 | 25 | 26 | org.springframework.kafka 27 | spring-kafka 28 | 29 | 30 | io.projectreactor.kafka 31 | reactor-kafka 32 | ${reactor.kafka.version} 33 | 34 | 35 | org.projectlombok 36 | lombok 37 | true 38 | 39 | 40 | org.springframework.boot 41 | spring-boot-starter-test 42 | test 43 | 44 | 45 | io.projectreactor 46 | reactor-test 47 | test 48 | 49 | 50 | org.springframework.kafka 51 | spring-kafka-test 52 | test 53 | 54 | 55 | 56 | 57 | 58 | 59 | org.springframework.boot 60 | spring-boot-maven-plugin 61 | 62 | 63 | 64 | org.projectlombok 65 | lombok 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/ReactiveKafkaPlaygroundApplication.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground; 2 | 3 | import org.springframework.boot.SpringApplication; 4 | import org.springframework.boot.autoconfigure.SpringBootApplication; 5 | 6 | @SpringBootApplication(scanBasePackages = "com.vinsguru.reactivekafkaplayground.sec17.${app}") 7 | public class ReactiveKafkaPlaygroundApplication { 8 | 9 | public static void main(String[] args) { 10 | SpringApplication.run(ReactiveKafkaPlaygroundApplication.class, args); 11 | } 12 | 13 | } 14 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec01/Lec01KafkaConsumer.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground.sec01; 2 | 3 | import org.apache.kafka.clients.consumer.ConsumerConfig; 4 | import org.apache.kafka.common.serialization.StringDeserializer; 5 | import org.slf4j.Logger; 6 | import org.slf4j.LoggerFactory; 7 | import reactor.kafka.receiver.KafkaReceiver; 8 | import reactor.kafka.receiver.ReceiverOptions; 9 | import reactor.kafka.receiver.ReceiverRecord; 10 | 11 | import java.util.List; 12 | import java.util.Map; 13 | 14 | /* 15 | goal: to demo a simple kafka consumer using reactor kafka 16 | producer ----> kafka broker <----------> consumer 17 | 18 | topic: order-events 19 | partitions: 1 20 | log-end-offset: 15 21 | current-offset: 15 22 | 23 | */ 24 | public class Lec01KafkaConsumer { 25 | 26 | private static final Logger log = LoggerFactory.getLogger(Lec01KafkaConsumer.class); 27 | 28 | public static void main(String[] args) { 29 | 30 | var consumerConfig = Map.of( 31 | ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092", 32 | ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class, 33 | ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class, 34 | ConsumerConfig.GROUP_ID_CONFIG, "demo-group-123", 35 | ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest", 36 | ConsumerConfig.GROUP_INSTANCE_ID_CONFIG, "1" 37 | ); 38 | 39 | var options = ReceiverOptions.create(consumerConfig) 40 | .subscription(List.of("order-events")); 41 | 42 | KafkaReceiver.create(options) 43 | .receive() 44 | .doOnNext(r -> log.info("key: {}, value: {}", r.key(), r.value())) 45 | .doOnNext(r -> r.receiverOffset().acknowledge()) 46 | .subscribe(); 47 | 48 | } 49 | 50 | } 51 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec01/Lec02KafkaConsumer.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground.sec01; 2 | 3 | import org.apache.kafka.clients.consumer.ConsumerConfig; 4 | import org.apache.kafka.common.serialization.StringDeserializer; 5 | import org.slf4j.Logger; 6 | import org.slf4j.LoggerFactory; 7 | import reactor.kafka.receiver.KafkaReceiver; 8 | import reactor.kafka.receiver.ReceiverOptions; 9 | 10 | import java.util.List; 11 | import java.util.Map; 12 | import java.util.regex.Pattern; 13 | 14 | /* 15 | goal: to consume from multiple topics 16 | producer ----> kafka broker <----------> consumer 17 | */ 18 | public class Lec02KafkaConsumer { 19 | 20 | private static final Logger log = LoggerFactory.getLogger(Lec02KafkaConsumer.class); 21 | 22 | public static void main(String[] args) { 23 | 24 | var consumerConfig = Map.of( 25 | ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092", 26 | ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class, 27 | ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class, 28 | ConsumerConfig.GROUP_ID_CONFIG, "inventory-service-group", 29 | ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest", 30 | ConsumerConfig.GROUP_INSTANCE_ID_CONFIG, "1" 31 | ); 32 | 33 | var options = ReceiverOptions.create(consumerConfig) 34 | .subscription(Pattern.compile("order.*")); 35 | 36 | KafkaReceiver.create(options) 37 | .receive() 38 | .doOnNext(r -> log.info("topic: {}, key: {}, value: {}", r.topic(), r.key(), r.value())) 39 | .doOnNext(r -> r.receiverOffset().acknowledge()) 40 | .subscribe(); 41 | 42 | } 43 | 44 | } 45 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec02/KafkaProducer.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground.sec02; 2 | 3 | import org.apache.kafka.clients.producer.ProducerConfig; 4 | import org.apache.kafka.clients.producer.ProducerRecord; 5 | import org.apache.kafka.common.serialization.StringSerializer; 6 | import org.slf4j.Logger; 7 | import org.slf4j.LoggerFactory; 8 | import reactor.core.publisher.Flux; 9 | import reactor.kafka.sender.KafkaSender; 10 | import reactor.kafka.sender.SenderOptions; 11 | import reactor.kafka.sender.SenderRecord; 12 | 13 | import java.time.Duration; 14 | import java.util.Map; 15 | 16 | /* 17 | goal: to demo a simple kafka producer using reactor-kafka 18 | */ 19 | public class KafkaProducer { 20 | 21 | private static final Logger log = LoggerFactory.getLogger(KafkaProducer.class); 22 | 23 | public static void main(String[] args) { 24 | 25 | var producerConfig = Map.of( 26 | ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092", 27 | ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class, 28 | ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class 29 | ); 30 | 31 | var options = SenderOptions.create(producerConfig); 32 | 33 | var flux = Flux.interval(Duration.ofMillis(100)) 34 | .take(100) 35 | .map(i -> new ProducerRecord<>("order-events", i.toString(), "order-"+i)) 36 | .map(pr -> SenderRecord.create(pr, pr.key())); 37 | 38 | var sender = KafkaSender.create(options); 39 | sender.send(flux) 40 | .doOnNext(r -> log.info("correlation id: {}", r.correlationMetadata())) 41 | .doOnComplete(sender::close) 42 | .subscribe(); 43 | } 44 | 45 | } 46 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec03/KafkaConsumer.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground.sec03; 2 | 3 | import org.apache.kafka.clients.CommonClientConfigs; 4 | import org.apache.kafka.clients.consumer.ConsumerConfig; 5 | import org.apache.kafka.common.config.SaslConfigs; 6 | import org.apache.kafka.common.serialization.StringDeserializer; 7 | import org.slf4j.Logger; 8 | import org.slf4j.LoggerFactory; 9 | import reactor.kafka.receiver.KafkaReceiver; 10 | import reactor.kafka.receiver.ReceiverOptions; 11 | 12 | import java.util.List; 13 | import java.util.Map; 14 | 15 | /* 16 | goal: to produce and consume 1 million events 17 | producer ----> kafka broker <----------> consumer 18 | */ 19 | public class KafkaConsumer { 20 | 21 | private static final Logger log = LoggerFactory.getLogger(KafkaConsumer.class); 22 | 23 | public static void main(String[] args) { 24 | 25 | var consumerConfig = Map.of( 26 | ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092", 27 | ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class, 28 | ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class, 29 | ConsumerConfig.GROUP_ID_CONFIG, "demo-group-123", 30 | ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest", 31 | ConsumerConfig.GROUP_INSTANCE_ID_CONFIG, "1" 32 | ); 33 | 34 | var options = ReceiverOptions.create(consumerConfig) 35 | .subscription(List.of("order-events")); 36 | 37 | KafkaReceiver.create(options) 38 | .receive() 39 | .doOnNext(r -> log.info("key: {}, value: {}", r.key(), r.value())) 40 | .doOnNext(r -> r.receiverOffset().acknowledge()) 41 | .subscribe(); 42 | 43 | } 44 | 45 | } 46 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec03/KafkaProducer.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground.sec03; 2 | 3 | import org.apache.kafka.clients.producer.ProducerConfig; 4 | import org.apache.kafka.clients.producer.ProducerRecord; 5 | import org.apache.kafka.common.serialization.StringSerializer; 6 | import org.slf4j.Logger; 7 | import org.slf4j.LoggerFactory; 8 | import reactor.core.publisher.Flux; 9 | import reactor.kafka.sender.KafkaSender; 10 | import reactor.kafka.sender.SenderOptions; 11 | import reactor.kafka.sender.SenderRecord; 12 | 13 | import java.time.Duration; 14 | import java.util.Map; 15 | 16 | /* 17 | goal: to demo back pressure using max in flight for a reactive producer. 18 | producer could be a confusing term & it depends on the context. 19 | */ 20 | public class KafkaProducer { 21 | 22 | private static final Logger log = LoggerFactory.getLogger(KafkaProducer.class); 23 | 24 | public static void main(String[] args) { 25 | 26 | var producerConfig = Map.of( 27 | ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092", 28 | ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class, 29 | ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class 30 | ); 31 | 32 | var options = SenderOptions.create(producerConfig).maxInFlight(10_000); 33 | 34 | var flux = Flux.range(1, 1_000_000) 35 | .map(i -> new ProducerRecord<>("order-events", i.toString(), "order-"+i)) 36 | .map(pr -> SenderRecord.create(pr, pr.key())); 37 | 38 | var start = System.currentTimeMillis(); 39 | var sender = KafkaSender.create(options); 40 | sender.send(flux) 41 | .doOnNext(r -> log.info("correlation id: {}", r.correlationMetadata())) 42 | .doOnComplete(() -> { 43 | log.info("Total time taken: {} ms", (System.currentTimeMillis() - start)); 44 | sender.close(); 45 | }) 46 | .subscribe(); 47 | } 48 | 49 | } 50 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec04/KafkaConsumer.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground.sec04; 2 | 3 | import org.apache.kafka.clients.consumer.ConsumerConfig; 4 | import org.apache.kafka.common.serialization.StringDeserializer; 5 | import org.slf4j.Logger; 6 | import org.slf4j.LoggerFactory; 7 | import reactor.kafka.receiver.KafkaReceiver; 8 | import reactor.kafka.receiver.ReceiverOptions; 9 | 10 | import java.util.List; 11 | import java.util.Map; 12 | 13 | /* 14 | goal: to consume header info 15 | producer ----> kafka broker <----------> consumer 16 | */ 17 | public class KafkaConsumer { 18 | 19 | private static final Logger log = LoggerFactory.getLogger(KafkaConsumer.class); 20 | 21 | public static void main(String[] args) { 22 | 23 | var consumerConfig = Map.of( 24 | ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092", 25 | ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class, 26 | ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class, 27 | ConsumerConfig.GROUP_ID_CONFIG, "demo-group-123", 28 | ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest", 29 | ConsumerConfig.GROUP_INSTANCE_ID_CONFIG, "1" 30 | ); 31 | 32 | var options = ReceiverOptions.create(consumerConfig) 33 | .subscription(List.of("order-events")); 34 | 35 | KafkaReceiver.create(options) 36 | .receive() 37 | .doOnNext(r -> log.info("key: {}, value: {}", r.key(), r.value())) 38 | .doOnNext(r -> r.headers().forEach(h -> log.info("header key: {}, value: {}", h.key(), new String(h.value())))) 39 | .doOnNext(r -> r.receiverOffset().acknowledge()) 40 | .subscribe(); 41 | 42 | } 43 | 44 | } 45 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec04/KafkaProducer.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground.sec04; 2 | 3 | import org.apache.kafka.clients.producer.ProducerConfig; 4 | import org.apache.kafka.clients.producer.ProducerRecord; 5 | import org.apache.kafka.common.header.internals.RecordHeaders; 6 | import org.apache.kafka.common.serialization.StringSerializer; 7 | import org.slf4j.Logger; 8 | import org.slf4j.LoggerFactory; 9 | import reactor.core.publisher.Flux; 10 | import reactor.kafka.sender.KafkaSender; 11 | import reactor.kafka.sender.SenderOptions; 12 | import reactor.kafka.sender.SenderRecord; 13 | 14 | import java.util.Map; 15 | 16 | /* 17 | goal: to produce records along with headers 18 | */ 19 | public class KafkaProducer { 20 | 21 | private static final Logger log = LoggerFactory.getLogger(KafkaProducer.class); 22 | 23 | public static void main(String[] args) { 24 | 25 | var producerConfig = Map.of( 26 | ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092", 27 | ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class, 28 | ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class 29 | ); 30 | 31 | var options = SenderOptions.create(producerConfig); 32 | 33 | var flux = Flux.range(1, 10).map(KafkaProducer::createSenderRecord); 34 | 35 | var sender = KafkaSender.create(options); 36 | sender.send(flux) 37 | .doOnNext(r -> log.info("correlation id: {}", r.correlationMetadata())) 38 | .doOnComplete(sender::close) 39 | .subscribe(); 40 | } 41 | 42 | private static SenderRecord createSenderRecord(Integer i){ 43 | var headers = new RecordHeaders(); 44 | headers.add("client-id", "some-client".getBytes()); 45 | headers.add("tracing-id", "123".getBytes()); 46 | var pr = new ProducerRecord<>("order-events", null, i.toString(), "order-"+i, headers); 47 | return SenderRecord.create(pr, pr.key()); 48 | } 49 | 50 | } 51 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec05/KafkaConsumer.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground.sec05; 2 | 3 | import org.apache.kafka.clients.consumer.ConsumerConfig; 4 | import org.apache.kafka.common.serialization.StringDeserializer; 5 | import org.slf4j.Logger; 6 | import org.slf4j.LoggerFactory; 7 | import reactor.kafka.receiver.KafkaReceiver; 8 | import reactor.kafka.receiver.ReceiverOptions; 9 | 10 | import java.util.List; 11 | import java.util.Map; 12 | 13 | /* 14 | goal: to demo partition re-balancing. Ensure that topic has multiple partitions 15 | */ 16 | public class KafkaConsumer { 17 | 18 | private static final Logger log = LoggerFactory.getLogger(KafkaConsumer.class); 19 | 20 | public static void start(String instanceId) { 21 | 22 | var consumerConfig = Map.of( 23 | ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092", 24 | ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class, 25 | ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class, 26 | ConsumerConfig.GROUP_ID_CONFIG, "demo-group-123", 27 | ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest", 28 | ConsumerConfig.GROUP_INSTANCE_ID_CONFIG, instanceId 29 | ); 30 | 31 | var options = ReceiverOptions.create(consumerConfig) 32 | .subscription(List.of("order-events")); 33 | 34 | KafkaReceiver.create(options) 35 | .receive() 36 | .doOnNext(r -> log.info("key: {}, value: {}", r.key(), r.value())) 37 | .doOnNext(r -> r.receiverOffset().acknowledge()) 38 | .subscribe(); 39 | 40 | } 41 | 42 | } 43 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec05/KafkaConsumerGroup.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground.sec05; 2 | 3 | /* 4 | Ensure that topic has multiple partitions 5 | */ 6 | public class KafkaConsumerGroup { 7 | 8 | private static class Consumer1{ 9 | public static void main(String[] args) { 10 | KafkaConsumer.start("1"); 11 | } 12 | } 13 | 14 | private static class Consumer2{ 15 | public static void main(String[] args) { 16 | KafkaConsumer.start("2"); 17 | } 18 | } 19 | 20 | private static class Consumer3{ 21 | public static void main(String[] args) { 22 | KafkaConsumer.start("3"); 23 | } 24 | } 25 | 26 | } 27 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec05/KafkaProducer.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground.sec05; 2 | 3 | import org.apache.kafka.clients.producer.ProducerConfig; 4 | import org.apache.kafka.clients.producer.ProducerRecord; 5 | import org.apache.kafka.common.serialization.StringSerializer; 6 | import org.slf4j.Logger; 7 | import org.slf4j.LoggerFactory; 8 | import reactor.core.publisher.Flux; 9 | import reactor.kafka.sender.KafkaSender; 10 | import reactor.kafka.sender.SenderOptions; 11 | import reactor.kafka.sender.SenderRecord; 12 | 13 | import java.time.Duration; 14 | import java.util.Map; 15 | 16 | /* 17 | goal: to demo partition re-balancing. Ensure that topic has multiple partitions 18 | */ 19 | public class KafkaProducer { 20 | 21 | private static final Logger log = LoggerFactory.getLogger(KafkaProducer.class); 22 | 23 | public static void main(String[] args) { 24 | 25 | var producerConfig = Map.of( 26 | ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092", 27 | ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class, 28 | ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class 29 | ); 30 | 31 | var options = SenderOptions.create(producerConfig); 32 | 33 | var flux = Flux.interval(Duration.ofMillis(50)) 34 | .take(10_000) 35 | .map(i -> new ProducerRecord<>("order-events", i.toString(), "order-"+i)) 36 | .map(pr -> SenderRecord.create(pr, pr.key())); 37 | 38 | var sender = KafkaSender.create(options); 39 | sender.send(flux) 40 | .doOnNext(r -> log.info("correlation id: {}", r.correlationMetadata())) 41 | .doOnComplete(sender::close) 42 | .subscribe(); 43 | } 44 | 45 | } 46 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec06/KafkaConsumer.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground.sec06; 2 | 3 | import org.apache.kafka.clients.consumer.ConsumerConfig; 4 | import org.apache.kafka.clients.consumer.CooperativeStickyAssignor; 5 | import org.apache.kafka.clients.consumer.RangeAssignor; 6 | import org.apache.kafka.common.serialization.StringDeserializer; 7 | import org.slf4j.Logger; 8 | import org.slf4j.LoggerFactory; 9 | import reactor.kafka.receiver.KafkaReceiver; 10 | import reactor.kafka.receiver.ReceiverOptions; 11 | 12 | import java.util.List; 13 | import java.util.Map; 14 | 15 | /* 16 | goal: to demo partition re-balancing strategy. Ensure that topic has multiple partitions 17 | */ 18 | public class KafkaConsumer { 19 | 20 | private static final Logger log = LoggerFactory.getLogger(KafkaConsumer.class); 21 | 22 | public static void start(String instanceId) { 23 | 24 | var consumerConfig = Map.of( 25 | ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092", 26 | ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class, 27 | ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class, 28 | ConsumerConfig.GROUP_ID_CONFIG, "demo-group-123", 29 | ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest", 30 | ConsumerConfig.GROUP_INSTANCE_ID_CONFIG, instanceId//, 31 | // ConsumerConfig.PARTITION_ASSIGNMENT_STRATEGY_CONFIG, RangeAssignorCooperativeStickyAssignor.class.getName() 32 | ); 33 | 34 | var options = ReceiverOptions.create(consumerConfig) 35 | .addAssignListener(c -> { 36 | c.forEach(r -> log.info("assigned {}", r.position())); 37 | c.stream() 38 | .filter(r -> r.topicPartition().partition() == 2) 39 | .findFirst() 40 | .ifPresent(r -> r.seek(r.position() - 2)); // seek value can not be -ve. ensure before setting 41 | }) 42 | .subscription(List.of("order-events")); 43 | 44 | KafkaReceiver.create(options) 45 | .receive() 46 | .doOnNext(r -> log.info("key: {}, value: {}", r.key(), r.value())) 47 | .doOnNext(r -> r.receiverOffset().acknowledge()) 48 | .subscribe(); 49 | 50 | } 51 | 52 | } 53 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec06/KafkaConsumerGroup.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground.sec06; 2 | 3 | /* 4 | Ensure that topic has multiple partitions 5 | */ 6 | public class KafkaConsumerGroup { 7 | 8 | /* 9 | RangeAssignor 10 | 0,1,2 11 | 1,2,3 12 | */ 13 | 14 | private static class Consumer1{ 15 | public static void main(String[] args) { 16 | KafkaConsumer.start("1"); 17 | //0 18 | } 19 | } 20 | 21 | private static class Consumer2{ 22 | public static void main(String[] args) { 23 | KafkaConsumer.start("2"); 24 | // 2 25 | } 26 | } 27 | 28 | private static class Consumer3{ 29 | public static void main(String[] args) { 30 | KafkaConsumer.start("3"); 31 | // 1 32 | } 33 | } 34 | 35 | } 36 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec07/KafkaConsumer.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground.sec07; 2 | 3 | import org.apache.kafka.clients.consumer.ConsumerConfig; 4 | import org.apache.kafka.common.serialization.StringDeserializer; 5 | import org.slf4j.Logger; 6 | import org.slf4j.LoggerFactory; 7 | import reactor.kafka.receiver.KafkaReceiver; 8 | import reactor.kafka.receiver.ReceiverOptions; 9 | 10 | import java.util.List; 11 | import java.util.Map; 12 | 13 | /* 14 | goal: to seek offset 15 | */ 16 | public class KafkaConsumer { 17 | 18 | private static final Logger log = LoggerFactory.getLogger(KafkaConsumer.class); 19 | 20 | public static void main(String[] args) { 21 | 22 | var consumerConfig = Map.of( 23 | ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092", 24 | ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class, 25 | ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class, 26 | ConsumerConfig.GROUP_ID_CONFIG, "demo-group-123", 27 | ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest", 28 | ConsumerConfig.GROUP_INSTANCE_ID_CONFIG, "1" 29 | ); 30 | 31 | var options = ReceiverOptions.create(consumerConfig) 32 | .addAssignListener(c -> { 33 | c.forEach(r -> log.info("assigned {}", r.position())); 34 | c.stream() 35 | .filter(r -> r.topicPartition().partition() == 2) 36 | .findFirst() 37 | .ifPresent(r -> r.seek(r.position() - 2)); // seek value can not be -ve. ensure before setting 38 | }) 39 | .subscription(List.of("order-events")); 40 | 41 | KafkaReceiver.create(options) 42 | .receive() 43 | .doOnNext(r -> log.info("key: {}, value: {}", r.key(), r.value())) 44 | .doOnNext(r -> r.receiverOffset().acknowledge()) 45 | .subscribe(); 46 | 47 | } 48 | 49 | } 50 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec08/KafkaConsumer.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground.sec08; 2 | 3 | import org.apache.kafka.clients.consumer.ConsumerConfig; 4 | import org.apache.kafka.common.serialization.StringDeserializer; 5 | import org.slf4j.Logger; 6 | import org.slf4j.LoggerFactory; 7 | import reactor.kafka.receiver.KafkaReceiver; 8 | import reactor.kafka.receiver.ReceiverOptions; 9 | 10 | import java.util.List; 11 | import java.util.Map; 12 | 13 | /* 14 | goal: Cluster demo - to produce and consume events with 3 replicas 15 | */ 16 | public class KafkaConsumer { 17 | 18 | private static final Logger log = LoggerFactory.getLogger(KafkaConsumer.class); 19 | 20 | public static void main(String[] args) { 21 | 22 | var consumerConfig = Map.of( 23 | ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:8081", 24 | ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class, 25 | ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class, 26 | ConsumerConfig.GROUP_ID_CONFIG, "demo-group", 27 | ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest", 28 | ConsumerConfig.GROUP_INSTANCE_ID_CONFIG, "1" 29 | ); 30 | 31 | var options = ReceiverOptions.create(consumerConfig) 32 | .subscription(List.of("order-events")); 33 | 34 | KafkaReceiver.create(options) 35 | .receive() 36 | .doOnNext(r -> log.info("key: {}, value: {}", r.key(), r.value())) 37 | .doOnNext(r -> r.receiverOffset().acknowledge()) 38 | .subscribe(); 39 | 40 | } 41 | 42 | } 43 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec08/KafkaProducer.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground.sec08; 2 | 3 | import org.apache.kafka.clients.producer.ProducerConfig; 4 | import org.apache.kafka.clients.producer.ProducerRecord; 5 | import org.apache.kafka.common.serialization.StringSerializer; 6 | import org.slf4j.Logger; 7 | import org.slf4j.LoggerFactory; 8 | import reactor.core.publisher.Flux; 9 | import reactor.kafka.sender.KafkaSender; 10 | import reactor.kafka.sender.SenderOptions; 11 | import reactor.kafka.sender.SenderRecord; 12 | 13 | import java.time.Duration; 14 | import java.util.Map; 15 | 16 | /* 17 | goal: Cluster demo - to produce and consume events with 3 replicas 18 | */ 19 | public class KafkaProducer { 20 | 21 | private static final Logger log = LoggerFactory.getLogger(KafkaProducer.class); 22 | 23 | public static void main(String[] args) { 24 | 25 | var producerConfig = Map.of( 26 | ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:8081", 27 | ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class, 28 | ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class 29 | ); 30 | 31 | var options = SenderOptions.create(producerConfig); 32 | var flux = Flux.interval(Duration.ofMillis(50)) 33 | .take(10_000) 34 | .map(i -> new ProducerRecord<>("order-events", i.toString(), "order-"+i)) 35 | .map(pr -> SenderRecord.create(pr, pr.key())); 36 | 37 | var sender = KafkaSender.create(options); 38 | sender.send(flux) 39 | .doOnNext(r -> log.info("correlation id: {}", r.correlationMetadata())) 40 | .doOnComplete(sender::close) 41 | .subscribe(); 42 | } 43 | 44 | } 45 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec09/KafkaConsumer.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground.sec09; 2 | 3 | import org.apache.kafka.clients.consumer.ConsumerConfig; 4 | import org.apache.kafka.clients.consumer.ConsumerRecord; 5 | import org.apache.kafka.common.serialization.StringDeserializer; 6 | import org.slf4j.Logger; 7 | import org.slf4j.LoggerFactory; 8 | import reactor.core.publisher.Flux; 9 | import reactor.core.publisher.Mono; 10 | import reactor.kafka.receiver.KafkaReceiver; 11 | import reactor.kafka.receiver.ReceiverOptions; 12 | 13 | import java.time.Duration; 14 | import java.util.List; 15 | import java.util.Map; 16 | 17 | /* 18 | goal: receiveAutoAck with concatMap 19 | */ 20 | public class KafkaConsumer { 21 | 22 | private static final Logger log = LoggerFactory.getLogger(KafkaConsumer.class); 23 | 24 | public static void main(String[] args) { 25 | 26 | var consumerConfig = Map.of( 27 | ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092", 28 | ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class, 29 | ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class, 30 | ConsumerConfig.GROUP_ID_CONFIG, "demo-group", 31 | ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest", 32 | ConsumerConfig.GROUP_INSTANCE_ID_CONFIG, "1", 33 | ConsumerConfig.MAX_POLL_RECORDS_CONFIG, 3 34 | ); 35 | 36 | var options = ReceiverOptions.create(consumerConfig) 37 | .commitInterval(Duration.ofSeconds(1)) 38 | .subscription(List.of("order-events")); 39 | 40 | KafkaReceiver.create(options) 41 | .receiveAutoAck() 42 | .log() 43 | .concatMap(KafkaConsumer::batchProcess) 44 | .subscribe(); 45 | 46 | } 47 | 48 | private static Mono batchProcess(Flux> flux){ 49 | return flux 50 | .doFirst(() -> log.info("----------------")) 51 | .doOnNext(r -> log.info("key: {}, value: {}", r.key(), r.value())) 52 | .then(Mono.delay(Duration.ofSeconds(1))) 53 | .then(); 54 | } 55 | 56 | } 57 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec09/KafkaProducer.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground.sec09; 2 | 3 | import org.apache.kafka.clients.producer.ProducerConfig; 4 | import org.apache.kafka.clients.producer.ProducerRecord; 5 | import org.apache.kafka.common.serialization.StringSerializer; 6 | import org.slf4j.Logger; 7 | import org.slf4j.LoggerFactory; 8 | import reactor.core.publisher.Flux; 9 | import reactor.kafka.sender.KafkaSender; 10 | import reactor.kafka.sender.SenderOptions; 11 | import reactor.kafka.sender.SenderRecord; 12 | 13 | import java.time.Duration; 14 | import java.util.Map; 15 | 16 | /* 17 | goal: receiveAutoAck with concatMap 18 | */ 19 | public class KafkaProducer { 20 | 21 | private static final Logger log = LoggerFactory.getLogger(KafkaProducer.class); 22 | 23 | public static void main(String[] args) { 24 | 25 | var producerConfig = Map.of( 26 | ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092", 27 | ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class, 28 | ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class 29 | ); 30 | 31 | var options = SenderOptions.create(producerConfig); 32 | 33 | var flux = Flux.range(1, 100) 34 | .map(i -> new ProducerRecord<>("order-events", i.toString(), "order-"+i)) 35 | .map(pr -> SenderRecord.create(pr, pr.key())); 36 | 37 | var sender = KafkaSender.create(options); 38 | sender.send(flux) 39 | .doOnNext(r -> log.info("correlation id: {}", r.correlationMetadata())) 40 | .doOnComplete(sender::close) 41 | .subscribe(); 42 | } 43 | 44 | } 45 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec10/KafkaConsumer.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground.sec10; 2 | 3 | import org.apache.kafka.clients.consumer.ConsumerConfig; 4 | import org.apache.kafka.clients.consumer.ConsumerRecord; 5 | import org.apache.kafka.common.serialization.StringDeserializer; 6 | import org.slf4j.Logger; 7 | import org.slf4j.LoggerFactory; 8 | import reactor.core.publisher.Flux; 9 | import reactor.core.publisher.Mono; 10 | import reactor.core.scheduler.Schedulers; 11 | import reactor.kafka.receiver.KafkaReceiver; 12 | import reactor.kafka.receiver.ReceiverOptions; 13 | 14 | import java.time.Duration; 15 | import java.util.List; 16 | import java.util.Map; 17 | 18 | /* 19 | goal: receiveAutoAck with flatMap - parallel 20 | */ 21 | public class KafkaConsumer { 22 | 23 | private static final Logger log = LoggerFactory.getLogger(KafkaConsumer.class); 24 | 25 | public static void main(String[] args) { 26 | 27 | var consumerConfig = Map.of( 28 | ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092", 29 | ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class, 30 | ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class, 31 | ConsumerConfig.GROUP_ID_CONFIG, "demo-group", 32 | ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest", 33 | ConsumerConfig.GROUP_INSTANCE_ID_CONFIG, "1", 34 | ConsumerConfig.MAX_POLL_RECORDS_CONFIG, 3 35 | ); 36 | 37 | var options = ReceiverOptions.create(consumerConfig) 38 | .commitInterval(Duration.ofSeconds(1)) 39 | .subscription(List.of("order-events")); 40 | 41 | KafkaReceiver.create(options) 42 | .receiveAutoAck() 43 | .log() 44 | .flatMap(KafkaConsumer::batchProcess) 45 | .subscribe(); 46 | 47 | } 48 | 49 | private static Mono batchProcess(Flux> flux){ 50 | return flux 51 | .publishOn(Schedulers.boundedElastic()) // just for demo 52 | .doFirst(() -> log.info("----------------")) 53 | .doOnNext(r -> log.info("key: {}, value: {}", r.key(), r.value())) 54 | .then(Mono.delay(Duration.ofSeconds(1))) 55 | .then(); 56 | } 57 | 58 | } 59 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec10/KafkaProducer.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground.sec10; 2 | 3 | import org.apache.kafka.clients.producer.ProducerConfig; 4 | import org.apache.kafka.clients.producer.ProducerRecord; 5 | import org.apache.kafka.common.serialization.StringSerializer; 6 | import org.slf4j.Logger; 7 | import org.slf4j.LoggerFactory; 8 | import reactor.core.publisher.Flux; 9 | import reactor.kafka.sender.KafkaSender; 10 | import reactor.kafka.sender.SenderOptions; 11 | import reactor.kafka.sender.SenderRecord; 12 | 13 | import java.time.Duration; 14 | import java.util.Map; 15 | 16 | /* 17 | goal: receiveAutoAck with flatMap - parallel 18 | */ 19 | public class KafkaProducer { 20 | 21 | private static final Logger log = LoggerFactory.getLogger(KafkaProducer.class); 22 | 23 | public static void main(String[] args) { 24 | 25 | var producerConfig = Map.of( 26 | ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092", 27 | ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class, 28 | ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class 29 | ); 30 | 31 | var options = SenderOptions.create(producerConfig); 32 | 33 | var flux = Flux.range(1, 100) 34 | .map(i -> new ProducerRecord<>("order-events", i.toString(), "order-"+i)) 35 | .map(pr -> SenderRecord.create(pr, pr.key())); 36 | 37 | var sender = KafkaSender.create(options); 38 | sender.send(flux) 39 | .doOnNext(r -> log.info("correlation id: {}", r.correlationMetadata())) 40 | .doOnComplete(sender::close) 41 | .subscribe(); 42 | } 43 | 44 | } 45 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec11/KafkaConsumer.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground.sec11; 2 | 3 | import org.apache.kafka.clients.consumer.ConsumerConfig; 4 | import org.apache.kafka.clients.consumer.ConsumerRecord; 5 | import org.apache.kafka.common.serialization.StringDeserializer; 6 | import org.slf4j.Logger; 7 | import org.slf4j.LoggerFactory; 8 | import reactor.core.publisher.Flux; 9 | import reactor.core.publisher.GroupedFlux; 10 | import reactor.core.publisher.Mono; 11 | import reactor.core.scheduler.Schedulers; 12 | import reactor.kafka.receiver.KafkaReceiver; 13 | import reactor.kafka.receiver.ReceiverOptions; 14 | import reactor.kafka.receiver.ReceiverRecord; 15 | 16 | import java.time.Duration; 17 | import java.util.List; 18 | import java.util.Map; 19 | 20 | /* 21 | goal: flatMap - parallel using groupBy 22 | */ 23 | public class KafkaConsumer { 24 | 25 | private static final Logger log = LoggerFactory.getLogger(KafkaConsumer.class); 26 | 27 | public static void main(String[] args) { 28 | 29 | var consumerConfig = Map.of( 30 | ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092", 31 | ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class, 32 | ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class, 33 | ConsumerConfig.GROUP_ID_CONFIG, "demo-group", 34 | ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest", 35 | ConsumerConfig.GROUP_INSTANCE_ID_CONFIG, "1" 36 | ); 37 | 38 | var options = ReceiverOptions.create(consumerConfig) 39 | .commitInterval(Duration.ofSeconds(1)) 40 | .subscription(List.of("order-events")); 41 | 42 | KafkaReceiver.create(options) 43 | .receive() 44 | .groupBy(r -> Integer.parseInt(r.key()) % 5) // just for demo 45 | // we can also group by r.partition() 46 | // r.key().hashCode() % 5 47 | .flatMap(KafkaConsumer::batchProcess) 48 | .subscribe(); 49 | 50 | } 51 | 52 | private static Mono batchProcess(GroupedFlux> flux){ 53 | return flux 54 | .publishOn(Schedulers.boundedElastic()) // just for demo 55 | .doFirst(() -> log.info("----------------mod: {}", flux.key())) 56 | .doOnNext(r -> log.info("key: {}, value: {}", r.key(), r.value())) 57 | .doOnNext(r -> r.receiverOffset().acknowledge()) 58 | .then(); 59 | } 60 | 61 | } 62 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec11/KafkaProducer.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground.sec11; 2 | 3 | import org.apache.kafka.clients.producer.ProducerConfig; 4 | import org.apache.kafka.clients.producer.ProducerRecord; 5 | import org.apache.kafka.common.serialization.StringSerializer; 6 | import org.slf4j.Logger; 7 | import org.slf4j.LoggerFactory; 8 | import reactor.core.publisher.Flux; 9 | import reactor.kafka.sender.KafkaSender; 10 | import reactor.kafka.sender.SenderOptions; 11 | import reactor.kafka.sender.SenderRecord; 12 | 13 | import java.time.Duration; 14 | import java.util.Map; 15 | 16 | /* 17 | goal: flatMap - parallel using groupBy 18 | */ 19 | public class KafkaProducer { 20 | 21 | private static final Logger log = LoggerFactory.getLogger(KafkaProducer.class); 22 | 23 | public static void main(String[] args) { 24 | 25 | var producerConfig = Map.of( 26 | ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092", 27 | ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class, 28 | ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class 29 | ); 30 | 31 | var options = SenderOptions.create(producerConfig); 32 | 33 | var flux = Flux.range(1, 100) 34 | .map(i -> new ProducerRecord<>("order-events", i.toString(), "order-"+i)) 35 | .map(pr -> SenderRecord.create(pr, pr.key())); 36 | 37 | var sender = KafkaSender.create(options); 38 | sender.send(flux) 39 | .doOnNext(r -> log.info("correlation id: {}", r.correlationMetadata())) 40 | .doOnComplete(sender::close) 41 | .subscribe(); 42 | } 43 | 44 | } 45 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec12/KafkaConsumerV1.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground.sec12; 2 | 3 | import org.apache.kafka.clients.consumer.ConsumerConfig; 4 | import org.apache.kafka.common.serialization.StringDeserializer; 5 | import org.slf4j.Logger; 6 | import org.slf4j.LoggerFactory; 7 | import reactor.kafka.receiver.KafkaReceiver; 8 | import reactor.kafka.receiver.ReceiverOptions; 9 | import reactor.util.retry.Retry; 10 | 11 | import java.time.Duration; 12 | import java.util.List; 13 | import java.util.Map; 14 | 15 | /* 16 | error handling demo: a simple processing issue 17 | */ 18 | public class KafkaConsumerV1 { 19 | 20 | private static final Logger log = LoggerFactory.getLogger(KafkaConsumerV1.class); 21 | 22 | public static void main(String[] args) { 23 | 24 | var consumerConfig = Map.of( 25 | ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092", 26 | ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class, 27 | ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class, 28 | ConsumerConfig.GROUP_ID_CONFIG, "demo-group", 29 | ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest", 30 | ConsumerConfig.GROUP_INSTANCE_ID_CONFIG, "1" 31 | ); 32 | 33 | var options = ReceiverOptions.create(consumerConfig) 34 | .subscription(List.of("order-events")); 35 | 36 | KafkaReceiver.create(options) 37 | .receive() 38 | .log() 39 | .doOnNext(r -> log.info("key: {}, value: {}", r.key(), r.value().toString().toCharArray()[15])) // just for demo 40 | .doOnError(ex -> log.error(ex.getMessage())) 41 | .doOnNext(r -> r.receiverOffset().acknowledge()) 42 | .retryWhen(Retry.fixedDelay(3, Duration.ofSeconds(1))) 43 | .blockLast(); // just for demo 44 | 45 | } 46 | 47 | } 48 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec12/KafkaConsumerV2.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground.sec12; 2 | 3 | import org.apache.kafka.clients.consumer.ConsumerConfig; 4 | import org.apache.kafka.common.serialization.StringDeserializer; 5 | import org.slf4j.Logger; 6 | import org.slf4j.LoggerFactory; 7 | import reactor.core.publisher.Mono; 8 | import reactor.kafka.receiver.KafkaReceiver; 9 | import reactor.kafka.receiver.ReceiverOptions; 10 | import reactor.kafka.receiver.ReceiverRecord; 11 | import reactor.util.retry.Retry; 12 | 13 | import java.time.Duration; 14 | import java.util.List; 15 | import java.util.Map; 16 | import java.util.concurrent.ThreadLocalRandom; 17 | 18 | /* 19 | error handling demo: processing pipeline 20 | */ 21 | public class KafkaConsumerV2 { 22 | 23 | private static final Logger log = LoggerFactory.getLogger(KafkaConsumerV2.class); 24 | 25 | public static void main(String[] args) { 26 | 27 | var consumerConfig = Map.of( 28 | ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092", 29 | ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class, 30 | ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class, 31 | ConsumerConfig.GROUP_ID_CONFIG, "demo-group", 32 | ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest", 33 | ConsumerConfig.GROUP_INSTANCE_ID_CONFIG, "1" 34 | ); 35 | 36 | var options = ReceiverOptions.create(consumerConfig) 37 | .subscription(List.of("order-events")); 38 | 39 | KafkaReceiver.create(options) 40 | .receive() 41 | .log() 42 | .concatMap(KafkaConsumerV2::process) 43 | .subscribe(); 44 | 45 | } 46 | 47 | private static Mono process(ReceiverRecord receiverRecord){ 48 | return Mono.just(receiverRecord) 49 | .doOnNext(r -> { 50 | var index = ThreadLocalRandom.current().nextInt(1, 10); 51 | log.info("key: {}, index: {}, value: {}", r.key(), index, r.value().toString().toCharArray()[index]); 52 | }) 53 | .retryWhen(Retry.fixedDelay(3, Duration.ofSeconds(1)).onRetryExhaustedThrow((spec, signal) -> signal.failure())) 54 | .doOnError(ex -> log.error(ex.getMessage())) 55 | .doFinally(s -> receiverRecord.receiverOffset().acknowledge()) 56 | .onErrorComplete() 57 | .then(); 58 | } 59 | 60 | 61 | 62 | } 63 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec12/KafkaConsumerV3.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground.sec12; 2 | 3 | import org.apache.kafka.clients.consumer.ConsumerConfig; 4 | import org.apache.kafka.common.serialization.StringDeserializer; 5 | import org.slf4j.Logger; 6 | import org.slf4j.LoggerFactory; 7 | import reactor.core.publisher.Mono; 8 | import reactor.kafka.receiver.KafkaReceiver; 9 | import reactor.kafka.receiver.ReceiverOptions; 10 | import reactor.kafka.receiver.ReceiverRecord; 11 | import reactor.util.retry.Retry; 12 | 13 | import java.time.Duration; 14 | import java.util.List; 15 | import java.util.Map; 16 | import java.util.concurrent.ThreadLocalRandom; 17 | 18 | /* 19 | error handling demo: retry based on error / stop 20 | */ 21 | public class KafkaConsumerV3 { 22 | 23 | private static final Logger log = LoggerFactory.getLogger(KafkaConsumerV3.class); 24 | 25 | public static void main(String[] args) { 26 | 27 | var consumerConfig = Map.of( 28 | ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092", 29 | ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class, 30 | ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class, 31 | ConsumerConfig.GROUP_ID_CONFIG, "demo-group", 32 | ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest", 33 | ConsumerConfig.GROUP_INSTANCE_ID_CONFIG, "1" 34 | ); 35 | 36 | var options = ReceiverOptions.create(consumerConfig) 37 | .subscription(List.of("order-events")); 38 | 39 | KafkaReceiver.create(options) 40 | .receive() 41 | .log() 42 | .concatMap(KafkaConsumerV3::process) 43 | .subscribe(); 44 | 45 | } 46 | 47 | private static Mono process(ReceiverRecord receiverRecord){ 48 | return Mono.just(receiverRecord) 49 | .doOnNext(r -> { 50 | if(r.key().toString().equals("5")) 51 | throw new RuntimeException("DB is down"); 52 | var index = ThreadLocalRandom.current().nextInt(1, 20); 53 | log.info("key: {}, index: {}, value: {}", r.key(), index, r.value().toString().toCharArray()[index]); 54 | r.receiverOffset().acknowledge(); 55 | }) 56 | .retryWhen(retrySpec()) 57 | .doOnError(ex -> log.error(ex.getMessage())) 58 | .onErrorResume(IndexOutOfBoundsException.class, ex -> Mono.fromRunnable(() -> receiverRecord.receiverOffset().acknowledge())) 59 | .then(); 60 | } 61 | 62 | private static Retry retrySpec(){ 63 | return Retry.fixedDelay(3, Duration.ofSeconds(1)) 64 | .filter(IndexOutOfBoundsException.class::isInstance) 65 | .onRetryExhaustedThrow((spec, signal) -> signal.failure()); 66 | } 67 | 68 | } 69 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec12/KafkaProducer.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground.sec12; 2 | 3 | import org.apache.kafka.clients.producer.ProducerConfig; 4 | import org.apache.kafka.clients.producer.ProducerRecord; 5 | import org.apache.kafka.common.serialization.StringSerializer; 6 | import org.slf4j.Logger; 7 | import org.slf4j.LoggerFactory; 8 | import reactor.core.publisher.Flux; 9 | import reactor.kafka.sender.KafkaSender; 10 | import reactor.kafka.sender.SenderOptions; 11 | import reactor.kafka.sender.SenderRecord; 12 | 13 | import java.time.Duration; 14 | import java.util.Map; 15 | 16 | /* 17 | error handling demo: a simple processing issue 18 | */ 19 | public class KafkaProducer { 20 | 21 | private static final Logger log = LoggerFactory.getLogger(KafkaProducer.class); 22 | 23 | public static void main(String[] args) { 24 | 25 | var producerConfig = Map.of( 26 | ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092", 27 | ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class, 28 | ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class 29 | ); 30 | 31 | var options = SenderOptions.create(producerConfig); 32 | 33 | var flux = Flux.range(1, 2000) 34 | .delayElements(Duration.ofMillis(2)) 35 | .map(i -> new ProducerRecord<>("order-events", i.toString(), "order-"+i)) 36 | .map(pr -> SenderRecord.create(pr, pr.key())); 37 | 38 | var sender = KafkaSender.create(options); 39 | sender.send(flux) 40 | .doOnNext(r -> log.info("correlation id: {}", r.correlationMetadata())) 41 | .doOnComplete(sender::close) 42 | .subscribe(); 43 | } 44 | 45 | } 46 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec13/KafkaConsumer.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground.sec13; 2 | 3 | import org.apache.kafka.clients.consumer.ConsumerConfig; 4 | import org.apache.kafka.clients.producer.ProducerConfig; 5 | import org.apache.kafka.common.serialization.StringDeserializer; 6 | import org.apache.kafka.common.serialization.StringSerializer; 7 | import org.slf4j.Logger; 8 | import org.slf4j.LoggerFactory; 9 | import reactor.kafka.receiver.KafkaReceiver; 10 | import reactor.kafka.receiver.ReceiverOptions; 11 | import reactor.kafka.sender.KafkaSender; 12 | import reactor.kafka.sender.SenderOptions; 13 | import reactor.util.retry.Retry; 14 | 15 | import java.time.Duration; 16 | import java.util.List; 17 | import java.util.Map; 18 | 19 | /* 20 | error handling demo: dead letter topic 21 | */ 22 | public class KafkaConsumer { 23 | 24 | private static final Logger log = LoggerFactory.getLogger(KafkaConsumer.class); 25 | 26 | public static void main(String[] args) { 27 | 28 | var dltProducer = deadLetterTopicProducer(); 29 | var processor = new OrderEventProcessor(dltProducer); 30 | var receiver = kafkaReceiver(); 31 | 32 | receiver.receive() 33 | .concatMap(processor::process) 34 | .subscribe(); 35 | 36 | } 37 | 38 | private static ReactiveDeadLetterTopicProducer deadLetterTopicProducer(){ 39 | var producerConfig = Map.of( 40 | ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092", 41 | ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class, 42 | ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class 43 | ); 44 | var options = SenderOptions.create(producerConfig); 45 | var sender = KafkaSender.create(options); 46 | return new ReactiveDeadLetterTopicProducer<>( 47 | sender, 48 | Retry.fixedDelay(2, Duration.ofSeconds(1)) 49 | ); 50 | } 51 | 52 | private static KafkaReceiver kafkaReceiver(){ 53 | var consumerConfig = Map.of( 54 | ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092", 55 | ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class, 56 | ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class, 57 | ConsumerConfig.GROUP_ID_CONFIG, "demo-group", 58 | ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest", 59 | ConsumerConfig.GROUP_INSTANCE_ID_CONFIG, "1" 60 | ); 61 | var options = ReceiverOptions.create(consumerConfig) 62 | .subscription(List.of("order-events", "order-events-dlt")); 63 | return KafkaReceiver.create(options); 64 | } 65 | 66 | 67 | } 68 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec13/KafkaProducer.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground.sec13; 2 | 3 | import org.apache.kafka.clients.producer.ProducerConfig; 4 | import org.apache.kafka.clients.producer.ProducerRecord; 5 | import org.apache.kafka.common.serialization.StringSerializer; 6 | import org.slf4j.Logger; 7 | import org.slf4j.LoggerFactory; 8 | import reactor.core.publisher.Flux; 9 | import reactor.kafka.sender.KafkaSender; 10 | import reactor.kafka.sender.SenderOptions; 11 | import reactor.kafka.sender.SenderRecord; 12 | 13 | import java.util.Map; 14 | 15 | /* 16 | error handling demo: dead letter topic 17 | */ 18 | public class KafkaProducer { 19 | 20 | private static final Logger log = LoggerFactory.getLogger(KafkaProducer.class); 21 | 22 | public static void main(String[] args) { 23 | 24 | var producerConfig = Map.of( 25 | ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092", 26 | ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class, 27 | ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class 28 | ); 29 | 30 | var options = SenderOptions.create(producerConfig); 31 | 32 | var flux = Flux.range(1, 100) 33 | .map(i -> new ProducerRecord<>("order-events", i.toString(), "order-"+i)) 34 | .map(pr -> SenderRecord.create(pr, pr.key())); 35 | 36 | var sender = KafkaSender.create(options); 37 | sender.send(flux) 38 | .doOnNext(r -> log.info("correlation id: {}", r.correlationMetadata())) 39 | .doOnComplete(sender::close) 40 | .subscribe(); 41 | } 42 | 43 | } 44 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec13/OrderEventProcessor.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground.sec13; 2 | 3 | import org.slf4j.Logger; 4 | import org.slf4j.LoggerFactory; 5 | import reactor.core.publisher.Mono; 6 | import reactor.kafka.receiver.ReceiverRecord; 7 | 8 | public class OrderEventProcessor { 9 | 10 | private static final Logger log = LoggerFactory.getLogger(OrderEventProcessor.class); 11 | private final ReactiveDeadLetterTopicProducer deadLetterTopicProducer; 12 | 13 | public OrderEventProcessor(ReactiveDeadLetterTopicProducer deadLetterTopicProducer) { 14 | this.deadLetterTopicProducer = deadLetterTopicProducer; 15 | } 16 | 17 | public Mono process(ReceiverRecord record){ 18 | return Mono.just(record) 19 | .doOnNext(r -> { 20 | // if(r.key().endsWith("5")) 21 | // throw new RuntimeException("processing exception"); 22 | log.info("key: {}, value: {}", r.key(), r.value()); 23 | r.receiverOffset().acknowledge(); 24 | }) 25 | .onErrorMap(ex -> new RecordProcessingException(record, ex)) 26 | .transform(this.deadLetterTopicProducer.recordProcessingErrorHandler()); 27 | } 28 | 29 | } 30 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec13/ReactiveDeadLetterTopicProducer.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground.sec13; 2 | 3 | import org.apache.kafka.clients.producer.ProducerRecord; 4 | import org.slf4j.Logger; 5 | import org.slf4j.LoggerFactory; 6 | import reactor.core.publisher.Mono; 7 | import reactor.kafka.receiver.ReceiverRecord; 8 | import reactor.kafka.sender.KafkaSender; 9 | import reactor.kafka.sender.SenderRecord; 10 | import reactor.kafka.sender.SenderResult; 11 | import reactor.util.retry.Retry; 12 | 13 | import java.util.function.Function; 14 | 15 | public class ReactiveDeadLetterTopicProducer { 16 | 17 | private static final Logger log = LoggerFactory.getLogger(ReactiveDeadLetterTopicProducer.class); 18 | private final KafkaSender sender; 19 | private final Retry retrySpec; 20 | 21 | public ReactiveDeadLetterTopicProducer(KafkaSender sender, Retry retrySpec) { 22 | this.sender = sender; 23 | this.retrySpec = retrySpec; 24 | } 25 | 26 | public Mono> produce(ReceiverRecord record){ 27 | var sr = toSenderRecord(record); 28 | return this.sender.send(Mono.just(sr)).next(); 29 | } 30 | 31 | private SenderRecord toSenderRecord(ReceiverRecord record){ 32 | var pr = new ProducerRecord<>( 33 | record.topic() + "-dlt", 34 | record.key(), 35 | record.value() 36 | ); 37 | return SenderRecord.create(pr, pr.key()); 38 | } 39 | 40 | public Function>, Mono> recordProcessingErrorHandler(){ 41 | return mono -> mono 42 | .retryWhen(this.retrySpec) 43 | .onErrorMap(ex -> ex.getCause() instanceof RecordProcessingException, Throwable::getCause) 44 | .doOnError(ex -> log.error(ex.getMessage())) 45 | .onErrorResume(RecordProcessingException.class, ex -> this.produce(ex.getRecord()) 46 | .then(Mono.fromRunnable(() -> ex.getRecord().receiverOffset().acknowledge()))) 47 | .then(); 48 | } 49 | 50 | } 51 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec13/RecordProcessingException.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground.sec13; 2 | 3 | import reactor.kafka.receiver.ReceiverRecord; 4 | 5 | public class RecordProcessingException extends RuntimeException{ 6 | 7 | private final ReceiverRecord record; 8 | 9 | public RecordProcessingException(ReceiverRecord record, Throwable e) { 10 | super(e); 11 | this.record = record; 12 | } 13 | 14 | @SuppressWarnings("unchecked") 15 | public ReceiverRecord getRecord() { 16 | return (ReceiverRecord) record; 17 | } 18 | 19 | } 20 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec14/KafkaConsumer.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground.sec14; 2 | 3 | import org.apache.kafka.clients.consumer.ConsumerConfig; 4 | import org.apache.kafka.common.serialization.IntegerDeserializer; 5 | import org.apache.kafka.common.serialization.StringDeserializer; 6 | import org.slf4j.Logger; 7 | import org.slf4j.LoggerFactory; 8 | import org.springframework.kafka.support.serializer.ErrorHandlingDeserializer; 9 | import reactor.kafka.receiver.KafkaReceiver; 10 | import reactor.kafka.receiver.ReceiverOptions; 11 | 12 | import java.util.List; 13 | import java.util.Map; 14 | 15 | /* 16 | goal: to demo poison pill messages 17 | */ 18 | public class KafkaConsumer { 19 | 20 | private static final Logger log = LoggerFactory.getLogger(KafkaConsumer.class); 21 | 22 | public static void main(String[] args) { 23 | 24 | var consumerConfig = Map.of( 25 | ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092", 26 | ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class, 27 | // ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, IntegerDeserializer.class, 28 | ConsumerConfig.GROUP_ID_CONFIG, "demo-group", 29 | ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest", 30 | ConsumerConfig.GROUP_INSTANCE_ID_CONFIG, "1" 31 | ); 32 | 33 | var options = ReceiverOptions.create(consumerConfig) 34 | .withValueDeserializer(errorHandlingDeserializer()) 35 | .subscription(List.of("order-events")); 36 | 37 | KafkaReceiver.create(options) 38 | .receive() 39 | .doOnNext(r -> log.info("key: {}, value: {}", r.key(), r.value())) 40 | .doOnNext(r -> r.receiverOffset().acknowledge()) 41 | .subscribe(); 42 | 43 | } 44 | 45 | private static ErrorHandlingDeserializer errorHandlingDeserializer(){ 46 | var deserializer = new ErrorHandlingDeserializer<>(new IntegerDeserializer()); 47 | deserializer.setFailedDeserializationFunction( 48 | info -> { 49 | log.error("failed record: {}", new String(info.getData())); 50 | return -10_000; 51 | } 52 | ); 53 | return deserializer; 54 | } 55 | 56 | 57 | } 58 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec14/KafkaProducer.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground.sec14; 2 | 3 | import org.apache.kafka.clients.producer.ProducerConfig; 4 | import org.apache.kafka.clients.producer.ProducerRecord; 5 | import org.apache.kafka.common.serialization.IntegerSerializer; 6 | import org.apache.kafka.common.serialization.StringSerializer; 7 | import org.slf4j.Logger; 8 | import org.slf4j.LoggerFactory; 9 | import reactor.core.publisher.Flux; 10 | import reactor.kafka.sender.KafkaSender; 11 | import reactor.kafka.sender.SenderOptions; 12 | import reactor.kafka.sender.SenderRecord; 13 | 14 | import java.time.Duration; 15 | import java.util.Map; 16 | 17 | /* 18 | goal: to demo poison pill messages 19 | */ 20 | public class KafkaProducer { 21 | 22 | private static final Logger log = LoggerFactory.getLogger(KafkaProducer.class); 23 | 24 | public static void main(String[] args) { 25 | 26 | var producerConfig = Map.of( 27 | ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092", 28 | ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class, 29 | ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, IntegerSerializer.class 30 | ); 31 | 32 | var options = SenderOptions.create(producerConfig); 33 | 34 | var flux = Flux.range(1, 1) 35 | .map(i -> new ProducerRecord<>("order-events", i.toString(), i)) 36 | .map(pr -> SenderRecord.create(pr, pr.key())); 37 | 38 | var sender = KafkaSender.create(options); 39 | sender.send(flux) 40 | .doOnNext(r -> log.info("correlation id: {}", r.correlationMetadata())) 41 | .doOnComplete(sender::close) 42 | .subscribe(); 43 | } 44 | 45 | } 46 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec15/TransferDemo.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground.sec15; 2 | 3 | import org.apache.kafka.clients.consumer.ConsumerConfig; 4 | import org.apache.kafka.clients.producer.ProducerConfig; 5 | import org.apache.kafka.common.serialization.StringDeserializer; 6 | import org.apache.kafka.common.serialization.StringSerializer; 7 | import org.slf4j.Logger; 8 | import org.slf4j.LoggerFactory; 9 | import reactor.kafka.receiver.KafkaReceiver; 10 | import reactor.kafka.receiver.ReceiverOptions; 11 | import reactor.kafka.sender.KafkaSender; 12 | import reactor.kafka.sender.SenderOptions; 13 | 14 | import java.util.List; 15 | import java.util.Map; 16 | 17 | public class TransferDemo { 18 | 19 | private static final Logger log = LoggerFactory.getLogger(TransferDemo.class); 20 | 21 | public static void main(String[] args) { 22 | 23 | var transferEventConsumer = new TransferEventConsumer(kafkaReceiver()); 24 | var transferEventProcessor = new TransferEventProcessor(kafkaSender()); 25 | 26 | transferEventConsumer 27 | .receive() 28 | .transform(transferEventProcessor::process) 29 | .doOnNext(r -> log.info("transfer success: {} ", r.correlationMetadata())) 30 | .doOnError(ex -> log.error(ex.getMessage())) 31 | .subscribe(); 32 | 33 | } 34 | 35 | private static KafkaReceiver kafkaReceiver(){ 36 | var consumerConfig = Map.of( 37 | ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092", 38 | ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class, 39 | ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class, 40 | ConsumerConfig.GROUP_ID_CONFIG, "demo-group", 41 | ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest", 42 | ConsumerConfig.GROUP_INSTANCE_ID_CONFIG, "1" 43 | ); 44 | var options = ReceiverOptions.create(consumerConfig) 45 | .subscription(List.of("transfer-requests")); 46 | return KafkaReceiver.create(options); 47 | } 48 | 49 | private static KafkaSender kafkaSender(){ 50 | var producerConfig = Map.of( 51 | ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092", 52 | ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class, 53 | ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class, 54 | ProducerConfig.TRANSACTIONAL_ID_CONFIG, "money-transfer" 55 | ); 56 | var options = SenderOptions.create(producerConfig); 57 | return KafkaSender.create(options); 58 | } 59 | 60 | } 61 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec15/TransferEvent.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground.sec15; 2 | 3 | public record TransferEvent( 4 | String key, 5 | String from, 6 | String to, 7 | String amount, 8 | Runnable acknowledge 9 | ) { 10 | } 11 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec15/TransferEventConsumer.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground.sec15; 2 | 3 | import org.slf4j.Logger; 4 | import org.slf4j.LoggerFactory; 5 | import reactor.core.publisher.Flux; 6 | import reactor.kafka.receiver.KafkaReceiver; 7 | import reactor.kafka.receiver.ReceiverRecord; 8 | 9 | public class TransferEventConsumer { 10 | 11 | private static final Logger log = LoggerFactory.getLogger(TransferEventConsumer.class); 12 | private final KafkaReceiver receiver; 13 | 14 | public TransferEventConsumer(KafkaReceiver receiver) { 15 | this.receiver = receiver; 16 | } 17 | 18 | public Flux receive(){ 19 | return this.receiver.receive() 20 | .doOnNext(r -> log.info("key: {}, value: {}", r.key(), r.value())) 21 | .map(this::toTransferEvent); 22 | } 23 | 24 | // 1:a,b,10 25 | private TransferEvent toTransferEvent(ReceiverRecord record){ 26 | var arr = record.value().split(","); 27 | var runnable = record.key().equals("6") ? fail() : ack(record); 28 | return new TransferEvent( 29 | record.key(), 30 | arr[0], 31 | arr[1], 32 | arr[2], 33 | runnable 34 | ); 35 | } 36 | 37 | private Runnable ack(ReceiverRecord record){ 38 | return () -> record.receiverOffset().acknowledge(); 39 | } 40 | 41 | private Runnable fail(){ 42 | return () -> { throw new RuntimeException("error while ack"); }; 43 | } 44 | 45 | } 46 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec15/TransferEventProcessor.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground.sec15; 2 | 3 | import org.apache.kafka.clients.producer.ProducerRecord; 4 | import org.slf4j.Logger; 5 | import org.slf4j.LoggerFactory; 6 | import reactor.core.publisher.Flux; 7 | import reactor.core.publisher.Mono; 8 | import reactor.kafka.sender.KafkaSender; 9 | import reactor.kafka.sender.SenderRecord; 10 | import reactor.kafka.sender.SenderResult; 11 | 12 | import java.time.Duration; 13 | import java.util.function.Predicate; 14 | 15 | public class TransferEventProcessor { 16 | 17 | private static final Logger log = LoggerFactory.getLogger(TransferEventProcessor.class); 18 | private final KafkaSender sender; 19 | 20 | public TransferEventProcessor(KafkaSender sender) { 21 | this.sender = sender; 22 | } 23 | 24 | public Flux> process(Flux flux){ 25 | return flux.concatMap(this::validate) 26 | .concatMap(this::sendTransaction); 27 | } 28 | 29 | private Mono> sendTransaction(TransferEvent event){ 30 | var senderRecords = this.toSenderRecords(event); 31 | var manager = this.sender.transactionManager(); 32 | return manager.begin() 33 | .then(this.sender.send(senderRecords) 34 | // delaying for demo 35 | .concatWith(Mono.delay(Duration.ofSeconds(1)).then(Mono.fromRunnable(event.acknowledge()))) 36 | .concatWith(manager.commit()) 37 | .last()) 38 | .doOnError(ex -> log.error(ex.getMessage())) 39 | .onErrorResume(ex -> manager.abort()); 40 | } 41 | 42 | // 5 does not have money to transfer 43 | private Mono validate(TransferEvent event){ 44 | return Mono.just(event) 45 | .filter(Predicate.not(e -> e.key().equals("5"))) 46 | .switchIfEmpty( 47 | Mono.fromRunnable(event.acknowledge()) 48 | .doFirst(() -> log.info("fails validation: {}", event.key())) 49 | ); 50 | } 51 | 52 | private Flux> toSenderRecords(TransferEvent event){ 53 | var pr1 = new ProducerRecord<>("transaction-events", event.key(), "%s+%s".formatted(event.to(), event.amount())); 54 | var pr2 = new ProducerRecord<>("transaction-events", event.key(), "%s-%s".formatted(event.from(), event.amount())); 55 | var sr1 = SenderRecord.create(pr1, pr1.key()); 56 | var sr2 = SenderRecord.create(pr2, pr2.key()); 57 | return Flux.just(sr1, sr2); 58 | } 59 | 60 | } 61 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec16/ConsumerRunner.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground.sec16; 2 | 3 | import org.slf4j.Logger; 4 | import org.slf4j.LoggerFactory; 5 | import org.springframework.beans.factory.annotation.Autowired; 6 | import org.springframework.boot.CommandLineRunner; 7 | import org.springframework.kafka.core.reactive.ReactiveKafkaConsumerTemplate; 8 | import org.springframework.stereotype.Service; 9 | 10 | @Service 11 | public class ConsumerRunner implements CommandLineRunner { 12 | 13 | private static final Logger log = LoggerFactory.getLogger(ConsumerRunner.class); 14 | 15 | @Autowired 16 | private ReactiveKafkaConsumerTemplate template; 17 | 18 | @Override 19 | public void run(String... args) throws Exception { 20 | this.template.receive() 21 | // .doOnNext(r -> r.headers().forEach(h -> log.info("header key: {}, value: {}", h.key(), new String(h.value())))) 22 | .doOnNext(r -> log.info("key: {}, value: {}", r.key(), r.value())) 23 | .subscribe(); 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec16/DummyOrder.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground.sec16; 2 | 3 | public record DummyOrder( 4 | String orderId, 5 | String customerId 6 | ) {} 7 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec16/KafkaConsumerConfig.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground.sec16; 2 | 3 | import org.springframework.boot.autoconfigure.kafka.KafkaProperties; 4 | import org.springframework.boot.ssl.SslBundles; 5 | import org.springframework.context.annotation.Bean; 6 | import org.springframework.context.annotation.Configuration; 7 | import org.springframework.kafka.core.reactive.ReactiveKafkaConsumerTemplate; 8 | import org.springframework.kafka.support.serializer.JsonDeserializer; 9 | import reactor.kafka.receiver.ReceiverOptions; 10 | 11 | import java.util.List; 12 | 13 | @Configuration 14 | public class KafkaConsumerConfig { 15 | 16 | @Bean 17 | public ReceiverOptions receiverOptions(KafkaProperties kafkaProperties, SslBundles sslBundles){ 18 | return ReceiverOptions.create(kafkaProperties.buildConsumerProperties(sslBundles)) 19 | .consumerProperty(JsonDeserializer.REMOVE_TYPE_INFO_HEADERS, "false") 20 | .consumerProperty(JsonDeserializer.USE_TYPE_INFO_HEADERS, false) 21 | .consumerProperty(JsonDeserializer.VALUE_DEFAULT_TYPE, DummyOrder.class) 22 | .subscription(List.of("order-events")); 23 | } 24 | 25 | @Bean 26 | public ReactiveKafkaConsumerTemplate consumerTemplate(ReceiverOptions options){ 27 | return new ReactiveKafkaConsumerTemplate<>(options); 28 | } 29 | 30 | } 31 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec16/KafkaProducerConfig.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground.sec16; 2 | 3 | import org.springframework.boot.autoconfigure.kafka.KafkaProperties; 4 | import org.springframework.boot.ssl.SslBundles; 5 | import org.springframework.context.annotation.Bean; 6 | import org.springframework.context.annotation.Configuration; 7 | import org.springframework.kafka.core.reactive.ReactiveKafkaProducerTemplate; 8 | import reactor.kafka.sender.SenderOptions; 9 | 10 | @Configuration 11 | public class KafkaProducerConfig { 12 | 13 | @Bean 14 | public SenderOptions senderOptions(KafkaProperties kafkaProperties, SslBundles sslBundles){ 15 | return SenderOptions.create(kafkaProperties.buildProducerProperties(sslBundles)); 16 | } 17 | 18 | @Bean 19 | public ReactiveKafkaProducerTemplate producerTemplate(SenderOptions options){ 20 | return new ReactiveKafkaProducerTemplate<>(options); 21 | } 22 | 23 | } 24 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec16/OrderEvent.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground.sec16; 2 | 3 | import java.time.LocalDateTime; 4 | import java.util.UUID; 5 | 6 | public record OrderEvent( 7 | UUID orderId, 8 | long customerId, 9 | LocalDateTime orderDate 10 | ) {} 11 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec16/ProducerRunner.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground.sec16; 2 | 3 | import org.slf4j.Logger; 4 | import org.slf4j.LoggerFactory; 5 | import org.springframework.beans.factory.annotation.Autowired; 6 | import org.springframework.boot.CommandLineRunner; 7 | import org.springframework.kafka.core.reactive.ReactiveKafkaProducerTemplate; 8 | import org.springframework.stereotype.Service; 9 | import reactor.core.publisher.Flux; 10 | 11 | import java.time.Duration; 12 | import java.time.LocalDateTime; 13 | import java.util.UUID; 14 | 15 | @Service 16 | public class ProducerRunner implements CommandLineRunner { 17 | 18 | private static final Logger log = LoggerFactory.getLogger(ProducerRunner.class); 19 | 20 | @Autowired 21 | private ReactiveKafkaProducerTemplate template; 22 | 23 | @Override 24 | public void run(String... args) throws Exception { 25 | // this.orderFlux() 26 | // .flatMap(oe -> this.template.send("order-events", oe.orderId().toString(), oe)) 27 | // .doOnNext(r -> log.info("result: {}", r.recordMetadata())) 28 | // .subscribe(); 29 | } 30 | 31 | private Flux orderFlux(){ 32 | return Flux.interval(Duration.ofMillis(500)) 33 | .take(1000) 34 | .map(i -> new OrderEvent( 35 | UUID.randomUUID(), 36 | i, 37 | LocalDateTime.now() 38 | )); 39 | } 40 | 41 | 42 | } 43 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec17/consumer/ConsumerRunner.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground.sec17.consumer; 2 | 3 | import org.slf4j.Logger; 4 | import org.slf4j.LoggerFactory; 5 | import org.springframework.beans.factory.annotation.Autowired; 6 | import org.springframework.boot.CommandLineRunner; 7 | import org.springframework.kafka.core.reactive.ReactiveKafkaConsumerTemplate; 8 | import org.springframework.stereotype.Service; 9 | 10 | @Service 11 | public class ConsumerRunner implements CommandLineRunner { 12 | 13 | private static final Logger log = LoggerFactory.getLogger(ConsumerRunner.class); 14 | 15 | @Autowired 16 | private ReactiveKafkaConsumerTemplate template; 17 | 18 | @Override 19 | public void run(String... args) throws Exception { 20 | this.template.receive() 21 | // .doOnNext(r -> r.headers().forEach(h -> log.info("header key: {}, value: {}", h.key(), new String(h.value())))) 22 | .doOnNext(r -> log.info("key: {}, value: {}", r.key(), r.value())) 23 | .subscribe(); 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec17/consumer/DummyOrder.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground.sec17.consumer; 2 | 3 | public record DummyOrder( 4 | String orderId, 5 | String customerId 6 | ) {} 7 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec17/consumer/KafkaConsumerConfig.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground.sec17.consumer; 2 | 3 | import org.springframework.boot.autoconfigure.kafka.KafkaProperties; 4 | import org.springframework.boot.ssl.SslBundles; 5 | import org.springframework.context.annotation.Bean; 6 | import org.springframework.context.annotation.Configuration; 7 | import org.springframework.kafka.core.reactive.ReactiveKafkaConsumerTemplate; 8 | import org.springframework.kafka.support.serializer.JsonDeserializer; 9 | import reactor.kafka.receiver.ReceiverOptions; 10 | 11 | import java.util.List; 12 | 13 | @Configuration 14 | public class KafkaConsumerConfig { 15 | 16 | @Bean 17 | public ReceiverOptions receiverOptions(KafkaProperties kafkaProperties, SslBundles sslBundles){ 18 | return ReceiverOptions.create(kafkaProperties.buildConsumerProperties(sslBundles)) 19 | .consumerProperty(JsonDeserializer.REMOVE_TYPE_INFO_HEADERS, "false") 20 | .consumerProperty(JsonDeserializer.USE_TYPE_INFO_HEADERS, false) 21 | .consumerProperty(JsonDeserializer.VALUE_DEFAULT_TYPE, DummyOrder.class) 22 | .subscription(List.of("order-events")); 23 | } 24 | 25 | @Bean 26 | public ReactiveKafkaConsumerTemplate consumerTemplate(ReceiverOptions options){ 27 | return new ReactiveKafkaConsumerTemplate<>(options); 28 | } 29 | 30 | } 31 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec17/producer/KafkaProducerConfig.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground.sec17.producer; 2 | 3 | import org.springframework.boot.autoconfigure.kafka.KafkaProperties; 4 | import org.springframework.boot.ssl.SslBundles; 5 | import org.springframework.context.annotation.Bean; 6 | import org.springframework.context.annotation.Configuration; 7 | import org.springframework.kafka.core.reactive.ReactiveKafkaProducerTemplate; 8 | import reactor.kafka.sender.SenderOptions; 9 | 10 | @Configuration 11 | public class KafkaProducerConfig { 12 | 13 | @Bean 14 | public SenderOptions senderOptions(KafkaProperties kafkaProperties, SslBundles sslBundles){ 15 | return SenderOptions.create(kafkaProperties.buildProducerProperties(sslBundles)); 16 | } 17 | 18 | @Bean 19 | public ReactiveKafkaProducerTemplate producerTemplate(SenderOptions options){ 20 | return new ReactiveKafkaProducerTemplate<>(options); 21 | } 22 | 23 | } 24 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec17/producer/OrderEvent.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground.sec17.producer; 2 | 3 | import java.time.LocalDateTime; 4 | import java.util.UUID; 5 | 6 | public record OrderEvent( 7 | UUID orderId, 8 | long customerId, 9 | LocalDateTime orderDate 10 | ) {} 11 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec17/producer/ProducerRunner.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground.sec17.producer; 2 | 3 | import org.slf4j.Logger; 4 | import org.slf4j.LoggerFactory; 5 | import org.springframework.beans.factory.annotation.Autowired; 6 | import org.springframework.boot.CommandLineRunner; 7 | import org.springframework.kafka.core.reactive.ReactiveKafkaProducerTemplate; 8 | import org.springframework.stereotype.Service; 9 | import reactor.core.publisher.Flux; 10 | 11 | import java.time.Duration; 12 | import java.time.LocalDateTime; 13 | import java.util.UUID; 14 | 15 | @Service 16 | public class ProducerRunner implements CommandLineRunner { 17 | 18 | private static final Logger log = LoggerFactory.getLogger(ProducerRunner.class); 19 | 20 | @Autowired 21 | private ReactiveKafkaProducerTemplate template; 22 | 23 | @Override 24 | public void run(String... args) throws Exception { 25 | this.orderFlux() 26 | .flatMap(oe -> this.template.send("order-events", oe.orderId().toString(), oe)) 27 | .doOnNext(r -> log.info("result: {}", r.recordMetadata())) 28 | .subscribe(); 29 | } 30 | 31 | private Flux orderFlux(){ 32 | return Flux.interval(Duration.ofMillis(500)) 33 | .take(1000) 34 | .map(i -> new OrderEvent( 35 | UUID.randomUUID(), 36 | i, 37 | LocalDateTime.now() 38 | )); 39 | } 40 | 41 | 42 | } 43 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec18/KafkaProducer.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground.sec18; 2 | 3 | import org.apache.kafka.clients.CommonClientConfigs; 4 | import org.apache.kafka.clients.producer.ProducerConfig; 5 | import org.apache.kafka.clients.producer.ProducerRecord; 6 | import org.apache.kafka.common.config.SaslConfigs; 7 | import org.apache.kafka.common.config.SslConfigs; 8 | import org.apache.kafka.common.serialization.StringSerializer; 9 | import org.slf4j.Logger; 10 | import org.slf4j.LoggerFactory; 11 | import reactor.core.publisher.Flux; 12 | import reactor.kafka.sender.KafkaSender; 13 | import reactor.kafka.sender.SenderOptions; 14 | import reactor.kafka.sender.SenderRecord; 15 | 16 | import java.nio.file.Paths; 17 | import java.time.Duration; 18 | import java.util.Map; 19 | 20 | /* 21 | goal: to demo a simple kafka producer using SASL PLAINTEXT 22 | */ 23 | public class KafkaProducer { 24 | 25 | private static final Logger log = LoggerFactory.getLogger(KafkaProducer.class); 26 | 27 | public static void main(String[] args) { 28 | 29 | var producerConfig = Map.of( 30 | ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092", 31 | ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class, 32 | ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class, 33 | SaslConfigs.SASL_MECHANISM, "PLAIN", 34 | CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SASL_PLAINTEXT", 35 | SaslConfigs.SASL_JAAS_CONFIG, "org.apache.kafka.common.security.plain.PlainLoginModule required serviceName=\"Kafka\" username=\"client\" password=\"client-secret\";" 36 | ); 37 | 38 | var options = SenderOptions.create(producerConfig); 39 | 40 | var flux = Flux.interval(Duration.ofMillis(100)) 41 | .take(100) 42 | .map(i -> new ProducerRecord<>("order-events", i.toString(), "order-"+i)) 43 | .map(pr -> SenderRecord.create(pr, pr.key())); 44 | 45 | var sender = KafkaSender.create(options); 46 | sender.send(flux) 47 | .doOnNext(r -> log.info("correlation id: {}", r.correlationMetadata())) 48 | .doOnComplete(sender::close) 49 | .subscribe(); 50 | } 51 | 52 | } 53 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/resources/application.properties: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/resources/application.yaml: -------------------------------------------------------------------------------- 1 | spring: 2 | kafka: 3 | bootstrap-servers: 4 | - localhost:9092 5 | consumer: 6 | group-id: demo-group 7 | auto-offset-reset: earliest 8 | key-deserializer: org.apache.kafka.common.serialization.StringDeserializer 9 | value-deserializer: org.springframework.kafka.support.serializer.JsonDeserializer 10 | properties: 11 | "group.instance.id": "1" 12 | "spring.json.trusted.packages": "com.vinsguru.reactivekafkaplayground.sec17.consumer" 13 | producer: 14 | key-serializer: org.apache.kafka.common.serialization.StringSerializer 15 | value-serializer: org.springframework.kafka.support.serializer.JsonSerializer 16 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/resources/kafka.truststore.jks: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vinsguru/reactive-kafka-course/6e03bbbe52cf58ea3c023f2c20cc0ba41b25a3ba/02-reactive-kafka-playground/src/main/resources/kafka.truststore.jks -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/main/resources/logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n 5 | 6 | 7 | 8 | 9 | 10 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/test/java/com/vinsguru/reactivekafkaplayground/AbstractIT.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground; 2 | 3 | import org.apache.kafka.common.serialization.StringDeserializer; 4 | import org.apache.kafka.common.serialization.StringSerializer; 5 | import org.springframework.beans.factory.annotation.Autowired; 6 | import org.springframework.boot.test.context.SpringBootTest; 7 | import org.springframework.kafka.support.serializer.JsonDeserializer; 8 | import org.springframework.kafka.support.serializer.JsonSerializer; 9 | import org.springframework.kafka.test.EmbeddedKafkaBroker; 10 | import org.springframework.kafka.test.context.EmbeddedKafka; 11 | import org.springframework.kafka.test.utils.KafkaTestUtils; 12 | import reactor.kafka.receiver.KafkaReceiver; 13 | import reactor.kafka.receiver.ReceiverOptions; 14 | import reactor.kafka.sender.KafkaSender; 15 | import reactor.kafka.sender.SenderOptions; 16 | import reactor.kafka.sender.SenderRecord; 17 | 18 | import java.util.List; 19 | import java.util.function.UnaryOperator; 20 | 21 | @SpringBootTest 22 | @EmbeddedKafka( 23 | partitions = 1, 24 | topics = { "order-events" }, 25 | bootstrapServersProperty = "spring.kafka.bootstrapServers" 26 | ) 27 | public abstract class AbstractIT { 28 | 29 | @Autowired 30 | private EmbeddedKafkaBroker broker; 31 | 32 | protected KafkaReceiver createReceiver(String... topics){ 33 | return createReceiver(options -> 34 | options.withKeyDeserializer(new StringDeserializer()) 35 | .withValueDeserializer(new JsonDeserializer().trustedPackages("*")) 36 | .subscription(List.of(topics)) 37 | ); 38 | } 39 | 40 | protected KafkaReceiver createReceiver(UnaryOperator> builder){ 41 | var props = KafkaTestUtils.consumerProps("test-group", "true", broker); 42 | var options = ReceiverOptions.create(props); 43 | options = builder.apply(options); 44 | return KafkaReceiver.create(options); 45 | } 46 | 47 | protected KafkaSender createSender(){ 48 | return createSender(options -> 49 | options.withKeySerializer(new StringSerializer()) 50 | .withValueSerializer(new JsonSerializer()) 51 | ); 52 | } 53 | 54 | protected KafkaSender createSender(UnaryOperator> builder){ 55 | var props = KafkaTestUtils.producerProps(broker); 56 | var options = SenderOptions.create(props); 57 | options = builder.apply(options); 58 | return KafkaSender.create(options); 59 | } 60 | 61 | protected SenderRecord toSenderRecord(String topic, K key, V value){ 62 | return SenderRecord.create(topic, null, null, key, value, key); 63 | } 64 | 65 | } 66 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/test/java/com/vinsguru/reactivekafkaplayground/EmbeddedKafkaPlaygroundTests.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground; 2 | 3 | import org.apache.kafka.clients.consumer.ConsumerConfig; 4 | import org.apache.kafka.clients.producer.ProducerConfig; 5 | import org.apache.kafka.clients.producer.ProducerRecord; 6 | import org.apache.kafka.common.serialization.StringDeserializer; 7 | import org.apache.kafka.common.serialization.StringSerializer; 8 | import org.junit.jupiter.api.Test; 9 | import org.slf4j.Logger; 10 | import org.slf4j.LoggerFactory; 11 | import org.springframework.kafka.test.condition.EmbeddedKafkaCondition; 12 | import org.springframework.kafka.test.context.EmbeddedKafka; 13 | import org.springframework.kafka.test.utils.KafkaTestUtils; 14 | import reactor.core.publisher.Flux; 15 | import reactor.core.publisher.Mono; 16 | import reactor.kafka.receiver.KafkaReceiver; 17 | import reactor.kafka.receiver.ReceiverOptions; 18 | import reactor.kafka.sender.KafkaSender; 19 | import reactor.kafka.sender.SenderOptions; 20 | import reactor.kafka.sender.SenderRecord; 21 | import reactor.test.StepVerifier; 22 | 23 | import java.time.Duration; 24 | import java.util.List; 25 | import java.util.Map; 26 | 27 | @EmbeddedKafka( 28 | //ports = 9092, 29 | partitions = 1, 30 | brokerProperties = { "auto.create.topics.enable=false" }, 31 | topics = { "order-events" } 32 | ) 33 | class EmbeddedKafkaPlaygroundTests { 34 | 35 | @Test 36 | void embeddedKafkaDemo() { 37 | 38 | var brokers = EmbeddedKafkaCondition.getBroker().getBrokersAsString(); 39 | StepVerifier.create(Producer.run(brokers)) 40 | .verifyComplete(); 41 | 42 | StepVerifier.create(Consumer.run(brokers)) 43 | .verifyComplete(); 44 | 45 | } 46 | 47 | private static class Consumer { 48 | 49 | private static final Logger log = LoggerFactory.getLogger(Consumer.class); 50 | 51 | public static Mono run(String brokers) { 52 | 53 | var consumerConfig = Map.of( 54 | ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, brokers, 55 | ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class, 56 | ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class, 57 | ConsumerConfig.GROUP_ID_CONFIG, "demo-group-123", 58 | ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest", 59 | ConsumerConfig.GROUP_INSTANCE_ID_CONFIG, "1" 60 | ); 61 | 62 | var options = ReceiverOptions.create(consumerConfig) 63 | .subscription(List.of("order-events")); 64 | 65 | return KafkaReceiver.create(options) 66 | .receive() 67 | .take(10) 68 | .doOnNext(r -> log.info("key: {}, value: {}", r.key(), r.value())) 69 | .doOnNext(r -> r.receiverOffset().acknowledge()) 70 | .then(); 71 | 72 | } 73 | } 74 | 75 | private static class Producer { 76 | 77 | private static final Logger log = LoggerFactory.getLogger(Producer.class); 78 | 79 | public static Mono run(String brokers) { 80 | 81 | var producerConfig = Map.of( 82 | ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, brokers, 83 | ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class, 84 | ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class 85 | ); 86 | 87 | var options = SenderOptions.create(producerConfig); 88 | 89 | var flux = Flux.range(1, 10) 90 | .delayElements(Duration.ofMillis(10)) 91 | .map(i -> new ProducerRecord<>("order-events", i.toString(), "order-"+i)) 92 | .map(pr -> SenderRecord.create(pr, pr.key())); 93 | 94 | var sender = KafkaSender.create(options); 95 | 96 | return sender.send(flux) 97 | .doOnNext(r -> log.info("correlation id: {}", r.correlationMetadata())) 98 | .doOnComplete(sender::close) 99 | .then(); 100 | } 101 | } 102 | 103 | 104 | 105 | } 106 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/test/java/com/vinsguru/reactivekafkaplayground/OrderEventConsumerTest.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground; 2 | 3 | import com.vinsguru.reactivekafkaplayground.sec17.consumer.DummyOrder; 4 | import com.vinsguru.reactivekafkaplayground.sec17.producer.OrderEvent; 5 | import org.junit.jupiter.api.Assertions; 6 | import org.junit.jupiter.api.Test; 7 | import org.junit.jupiter.api.extension.ExtendWith; 8 | import org.springframework.boot.test.system.CapturedOutput; 9 | import org.springframework.boot.test.system.OutputCaptureExtension; 10 | import org.springframework.test.context.TestPropertySource; 11 | import reactor.core.publisher.Mono; 12 | import reactor.kafka.sender.KafkaSender; 13 | import reactor.test.StepVerifier; 14 | 15 | import java.time.Duration; 16 | import java.time.LocalDateTime; 17 | import java.util.UUID; 18 | 19 | @ExtendWith(OutputCaptureExtension.class) 20 | @TestPropertySource(properties = "app=consumer") 21 | public class OrderEventConsumerTest extends AbstractIT { 22 | 23 | @Test 24 | public void consumerTest(CapturedOutput output){ 25 | 26 | KafkaSender sender = createSender(); 27 | var uuid = UUID.randomUUID(); 28 | var orderEvent = new OrderEvent(uuid, 1, LocalDateTime.now()); 29 | var dummyOrder = new DummyOrder(uuid.toString(), "1"); 30 | var sr = toSenderRecord("order-events", "1", orderEvent); 31 | 32 | var mono = sender.send(Mono.just(sr)) 33 | .then(Mono.delay(Duration.ofMillis(500))) 34 | .then(); 35 | 36 | StepVerifier.create(mono) 37 | .verifyComplete(); 38 | 39 | Assertions.assertTrue(output.getOut().contains(dummyOrder.toString())); 40 | 41 | } 42 | 43 | 44 | } 45 | -------------------------------------------------------------------------------- /02-reactive-kafka-playground/src/test/java/com/vinsguru/reactivekafkaplayground/OrderEventProducerTest.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.reactivekafkaplayground; 2 | 3 | import com.vinsguru.reactivekafkaplayground.sec17.producer.OrderEvent; 4 | import org.junit.jupiter.api.Assertions; 5 | import org.junit.jupiter.api.Test; 6 | import org.slf4j.Logger; 7 | import org.slf4j.LoggerFactory; 8 | import org.springframework.test.annotation.DirtiesContext; 9 | import org.springframework.test.context.TestPropertySource; 10 | import reactor.kafka.receiver.KafkaReceiver; 11 | import reactor.test.StepVerifier; 12 | 13 | import java.time.Duration; 14 | 15 | @TestPropertySource(properties = "app=producer") 16 | public class OrderEventProducerTest extends AbstractIT{ 17 | 18 | private static final Logger log = LoggerFactory.getLogger(OrderEventProducerTest.class); 19 | 20 | @Test 21 | @DirtiesContext(methodMode = DirtiesContext.MethodMode.AFTER_METHOD) 22 | public void producerTest1(){ 23 | KafkaReceiver receiver = createReceiver("order-events"); 24 | var orderEvents = receiver.receive() 25 | .take(10) 26 | .doOnNext(r -> log.info("key: {}, value: {}", r.key(), r.value())); 27 | 28 | StepVerifier.create(orderEvents) 29 | .consumeNextWith(r -> Assertions.assertNotNull(r.value().orderId())) 30 | .expectNextCount(9) 31 | .expectComplete() 32 | .verify(Duration.ofSeconds(10)); 33 | } 34 | 35 | @Test 36 | @DirtiesContext(methodMode = DirtiesContext.MethodMode.AFTER_METHOD) 37 | public void producerTest2(){ 38 | KafkaReceiver receiver = createReceiver("order-events"); 39 | var orderEvents = receiver.receive() 40 | .take(10) 41 | .doOnNext(r -> log.info("key: {}, value: {}", r.key(), r.value())); 42 | 43 | StepVerifier.create(orderEvents) 44 | .consumeNextWith(r -> Assertions.assertNotNull(r.value().orderId())) 45 | .expectNextCount(9) 46 | .expectComplete() 47 | .verify(Duration.ofSeconds(10)); 48 | } 49 | 50 | } 51 | -------------------------------------------------------------------------------- /03-assignment/analytics-service/.gitignore: -------------------------------------------------------------------------------- 1 | HELP.md 2 | target/ 3 | !.mvn/wrapper/maven-wrapper.jar 4 | !**/src/main/**/target/ 5 | !**/src/test/**/target/ 6 | 7 | ### STS ### 8 | .apt_generated 9 | .classpath 10 | .factorypath 11 | .project 12 | .settings 13 | .springBeans 14 | .sts4-cache 15 | 16 | ### IntelliJ IDEA ### 17 | .idea 18 | *.iws 19 | *.iml 20 | *.ipr 21 | 22 | ### NetBeans ### 23 | /nbproject/private/ 24 | /nbbuild/ 25 | /dist/ 26 | /nbdist/ 27 | /.nb-gradle/ 28 | build/ 29 | !**/src/main/**/build/ 30 | !**/src/test/**/build/ 31 | 32 | ### VS Code ### 33 | .vscode/ 34 | -------------------------------------------------------------------------------- /03-assignment/analytics-service/.mvn/wrapper/maven-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vinsguru/reactive-kafka-course/6e03bbbe52cf58ea3c023f2c20cc0ba41b25a3ba/03-assignment/analytics-service/.mvn/wrapper/maven-wrapper.jar -------------------------------------------------------------------------------- /03-assignment/analytics-service/.mvn/wrapper/maven-wrapper.properties: -------------------------------------------------------------------------------- 1 | # Licensed to the Apache Software Foundation (ASF) under one 2 | # or more contributor license agreements. See the NOTICE file 3 | # distributed with this work for additional information 4 | # regarding copyright ownership. The ASF licenses this file 5 | # to you under the Apache License, Version 2.0 (the 6 | # "License"); you may not use this file except in compliance 7 | # with the License. You may obtain a copy of the License at 8 | # 9 | # https://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.8.7/apache-maven-3.8.7-bin.zip 18 | wrapperUrl=https://repo.maven.apache.org/maven2/org/apache/maven/wrapper/maven-wrapper/3.1.1/maven-wrapper-3.1.1.jar 19 | -------------------------------------------------------------------------------- /03-assignment/analytics-service/mvnw.cmd: -------------------------------------------------------------------------------- 1 | @REM ---------------------------------------------------------------------------- 2 | @REM Licensed to the Apache Software Foundation (ASF) under one 3 | @REM or more contributor license agreements. See the NOTICE file 4 | @REM distributed with this work for additional information 5 | @REM regarding copyright ownership. The ASF licenses this file 6 | @REM to you under the Apache License, Version 2.0 (the 7 | @REM "License"); you may not use this file except in compliance 8 | @REM with the License. You may obtain a copy of the License at 9 | @REM 10 | @REM https://www.apache.org/licenses/LICENSE-2.0 11 | @REM 12 | @REM Unless required by applicable law or agreed to in writing, 13 | @REM software distributed under the License is distributed on an 14 | @REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 15 | @REM KIND, either express or implied. See the License for the 16 | @REM specific language governing permissions and limitations 17 | @REM under the License. 18 | @REM ---------------------------------------------------------------------------- 19 | 20 | @REM ---------------------------------------------------------------------------- 21 | @REM Maven Start Up Batch script 22 | @REM 23 | @REM Required ENV vars: 24 | @REM JAVA_HOME - location of a JDK home dir 25 | @REM 26 | @REM Optional ENV vars 27 | @REM M2_HOME - location of maven2's installed home dir 28 | @REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands 29 | @REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a keystroke before ending 30 | @REM MAVEN_OPTS - parameters passed to the Java VM when running Maven 31 | @REM e.g. to debug Maven itself, use 32 | @REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 33 | @REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files 34 | @REM ---------------------------------------------------------------------------- 35 | 36 | @REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on' 37 | @echo off 38 | @REM set title of command window 39 | title %0 40 | @REM enable echoing by setting MAVEN_BATCH_ECHO to 'on' 41 | @if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO% 42 | 43 | @REM set %HOME% to equivalent of $HOME 44 | if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%") 45 | 46 | @REM Execute a user defined script before this one 47 | if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre 48 | @REM check for pre script, once with legacy .bat ending and once with .cmd ending 49 | if exist "%USERPROFILE%\mavenrc_pre.bat" call "%USERPROFILE%\mavenrc_pre.bat" %* 50 | if exist "%USERPROFILE%\mavenrc_pre.cmd" call "%USERPROFILE%\mavenrc_pre.cmd" %* 51 | :skipRcPre 52 | 53 | @setlocal 54 | 55 | set ERROR_CODE=0 56 | 57 | @REM To isolate internal variables from possible post scripts, we use another setlocal 58 | @setlocal 59 | 60 | @REM ==== START VALIDATION ==== 61 | if not "%JAVA_HOME%" == "" goto OkJHome 62 | 63 | echo. 64 | echo Error: JAVA_HOME not found in your environment. >&2 65 | echo Please set the JAVA_HOME variable in your environment to match the >&2 66 | echo location of your Java installation. >&2 67 | echo. 68 | goto error 69 | 70 | :OkJHome 71 | if exist "%JAVA_HOME%\bin\java.exe" goto init 72 | 73 | echo. 74 | echo Error: JAVA_HOME is set to an invalid directory. >&2 75 | echo JAVA_HOME = "%JAVA_HOME%" >&2 76 | echo Please set the JAVA_HOME variable in your environment to match the >&2 77 | echo location of your Java installation. >&2 78 | echo. 79 | goto error 80 | 81 | @REM ==== END VALIDATION ==== 82 | 83 | :init 84 | 85 | @REM Find the project base dir, i.e. the directory that contains the folder ".mvn". 86 | @REM Fallback to current working directory if not found. 87 | 88 | set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR% 89 | IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir 90 | 91 | set EXEC_DIR=%CD% 92 | set WDIR=%EXEC_DIR% 93 | :findBaseDir 94 | IF EXIST "%WDIR%"\.mvn goto baseDirFound 95 | cd .. 96 | IF "%WDIR%"=="%CD%" goto baseDirNotFound 97 | set WDIR=%CD% 98 | goto findBaseDir 99 | 100 | :baseDirFound 101 | set MAVEN_PROJECTBASEDIR=%WDIR% 102 | cd "%EXEC_DIR%" 103 | goto endDetectBaseDir 104 | 105 | :baseDirNotFound 106 | set MAVEN_PROJECTBASEDIR=%EXEC_DIR% 107 | cd "%EXEC_DIR%" 108 | 109 | :endDetectBaseDir 110 | 111 | IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig 112 | 113 | @setlocal EnableExtensions EnableDelayedExpansion 114 | for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a 115 | @endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS% 116 | 117 | :endReadAdditionalConfig 118 | 119 | SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe" 120 | set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar" 121 | set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain 122 | 123 | set DOWNLOAD_URL="https://repo.maven.apache.org/maven2/org/apache/maven/wrapper/maven-wrapper/3.1.0/maven-wrapper-3.1.0.jar" 124 | 125 | FOR /F "usebackq tokens=1,2 delims==" %%A IN ("%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties") DO ( 126 | IF "%%A"=="wrapperUrl" SET DOWNLOAD_URL=%%B 127 | ) 128 | 129 | @REM Extension to allow automatically downloading the maven-wrapper.jar from Maven-central 130 | @REM This allows using the maven wrapper in projects that prohibit checking in binary data. 131 | if exist %WRAPPER_JAR% ( 132 | if "%MVNW_VERBOSE%" == "true" ( 133 | echo Found %WRAPPER_JAR% 134 | ) 135 | ) else ( 136 | if not "%MVNW_REPOURL%" == "" ( 137 | SET DOWNLOAD_URL="%MVNW_REPOURL%/org/apache/maven/wrapper/maven-wrapper/3.1.0/maven-wrapper-3.1.0.jar" 138 | ) 139 | if "%MVNW_VERBOSE%" == "true" ( 140 | echo Couldn't find %WRAPPER_JAR%, downloading it ... 141 | echo Downloading from: %DOWNLOAD_URL% 142 | ) 143 | 144 | powershell -Command "&{"^ 145 | "$webclient = new-object System.Net.WebClient;"^ 146 | "if (-not ([string]::IsNullOrEmpty('%MVNW_USERNAME%') -and [string]::IsNullOrEmpty('%MVNW_PASSWORD%'))) {"^ 147 | "$webclient.Credentials = new-object System.Net.NetworkCredential('%MVNW_USERNAME%', '%MVNW_PASSWORD%');"^ 148 | "}"^ 149 | "[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12; $webclient.DownloadFile('%DOWNLOAD_URL%', '%WRAPPER_JAR%')"^ 150 | "}" 151 | if "%MVNW_VERBOSE%" == "true" ( 152 | echo Finished downloading %WRAPPER_JAR% 153 | ) 154 | ) 155 | @REM End of extension 156 | 157 | @REM Provide a "standardized" way to retrieve the CLI args that will 158 | @REM work with both Windows and non-Windows executions. 159 | set MAVEN_CMD_LINE_ARGS=%* 160 | 161 | %MAVEN_JAVA_EXE% ^ 162 | %JVM_CONFIG_MAVEN_PROPS% ^ 163 | %MAVEN_OPTS% ^ 164 | %MAVEN_DEBUG_OPTS% ^ 165 | -classpath %WRAPPER_JAR% ^ 166 | "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" ^ 167 | %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %* 168 | if ERRORLEVEL 1 goto error 169 | goto end 170 | 171 | :error 172 | set ERROR_CODE=1 173 | 174 | :end 175 | @endlocal & set ERROR_CODE=%ERROR_CODE% 176 | 177 | if not "%MAVEN_SKIP_RC%"=="" goto skipRcPost 178 | @REM check for post script, once with legacy .bat ending and once with .cmd ending 179 | if exist "%USERPROFILE%\mavenrc_post.bat" call "%USERPROFILE%\mavenrc_post.bat" 180 | if exist "%USERPROFILE%\mavenrc_post.cmd" call "%USERPROFILE%\mavenrc_post.cmd" 181 | :skipRcPost 182 | 183 | @REM pause the script if MAVEN_BATCH_PAUSE is set to 'on' 184 | if "%MAVEN_BATCH_PAUSE%"=="on" pause 185 | 186 | if "%MAVEN_TERMINATE_CMD%"=="on" exit %ERROR_CODE% 187 | 188 | cmd /C exit /B %ERROR_CODE% 189 | -------------------------------------------------------------------------------- /03-assignment/analytics-service/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 4 | 4.0.0 5 | 6 | org.springframework.boot 7 | spring-boot-starter-parent 8 | 3.5.0 9 | 10 | 11 | com.vinsguru 12 | analytics-service 13 | 0.0.1-SNAPSHOT 14 | analytics-service 15 | Demo project for Spring Boot 16 | 17 | 21 18 | 1.3.23 19 | 20 | 21 | 22 | org.springframework.boot 23 | spring-boot-starter-data-r2dbc 24 | 25 | 26 | org.springframework.boot 27 | spring-boot-starter-webflux 28 | 29 | 30 | org.springframework.kafka 31 | spring-kafka 32 | 33 | 34 | io.projectreactor.kafka 35 | reactor-kafka 36 | ${reactor.kafka.version} 37 | 38 | 39 | com.h2database 40 | h2 41 | runtime 42 | 43 | 44 | io.r2dbc 45 | r2dbc-h2 46 | runtime 47 | 48 | 49 | org.projectlombok 50 | lombok 51 | true 52 | 53 | 54 | org.springframework.boot 55 | spring-boot-starter-test 56 | test 57 | 58 | 59 | io.projectreactor 60 | reactor-test 61 | test 62 | 63 | 64 | org.springframework.kafka 65 | spring-kafka-test 66 | test 67 | 68 | 69 | 70 | 71 | 72 | 73 | org.springframework.boot 74 | spring-boot-maven-plugin 75 | 76 | 77 | 78 | org.projectlombok 79 | lombok 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | -------------------------------------------------------------------------------- /03-assignment/analytics-service/src/main/java/com/vinsguru/analyticsservice/AnalyticsServiceApplication.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.analyticsservice; 2 | 3 | import org.springframework.boot.SpringApplication; 4 | import org.springframework.boot.autoconfigure.SpringBootApplication; 5 | 6 | @SpringBootApplication 7 | public class AnalyticsServiceApplication { 8 | 9 | public static void main(String[] args) { 10 | SpringApplication.run(AnalyticsServiceApplication.class, args); 11 | } 12 | 13 | } 14 | -------------------------------------------------------------------------------- /03-assignment/analytics-service/src/main/java/com/vinsguru/analyticsservice/config/KafkaConsumerConfig.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.analyticsservice.config; 2 | 3 | import com.vinsguru.analyticsservice.event.ProductViewEvent; 4 | import org.springframework.beans.factory.annotation.Value; 5 | import org.springframework.boot.autoconfigure.kafka.KafkaProperties; 6 | import org.springframework.boot.ssl.SslBundles; 7 | import org.springframework.context.annotation.Bean; 8 | import org.springframework.context.annotation.Configuration; 9 | import org.springframework.kafka.core.reactive.ReactiveKafkaConsumerTemplate; 10 | import org.springframework.kafka.support.serializer.JsonDeserializer; 11 | import reactor.kafka.receiver.ReceiverOptions; 12 | 13 | import java.util.List; 14 | 15 | @Configuration 16 | public class KafkaConsumerConfig { 17 | 18 | @Bean 19 | public ReceiverOptions receiverOptions(KafkaProperties properties, SslBundles sslBundles){ 20 | return ReceiverOptions.create(properties.buildConsumerProperties(sslBundles)) 21 | .consumerProperty(JsonDeserializer.VALUE_DEFAULT_TYPE, ProductViewEvent.class) 22 | .consumerProperty(JsonDeserializer.USE_TYPE_INFO_HEADERS, false) 23 | .subscription(List.of("product-view-events")); 24 | } 25 | 26 | @Bean 27 | public ReactiveKafkaConsumerTemplate kafkaConsumerTemplate(ReceiverOptions receiverOptions){ 28 | return new ReactiveKafkaConsumerTemplate<>(receiverOptions); 29 | } 30 | 31 | } 32 | -------------------------------------------------------------------------------- /03-assignment/analytics-service/src/main/java/com/vinsguru/analyticsservice/controller/TrendingController.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.analyticsservice.controller; 2 | 3 | import com.vinsguru.analyticsservice.dto.ProductTrendingDto; 4 | import com.vinsguru.analyticsservice.service.ProductTrendingBroadcastService; 5 | import lombok.AllArgsConstructor; 6 | import org.springframework.http.MediaType; 7 | import org.springframework.web.bind.annotation.GetMapping; 8 | import org.springframework.web.bind.annotation.RequestMapping; 9 | import org.springframework.web.bind.annotation.RestController; 10 | import reactor.core.publisher.Flux; 11 | 12 | import java.util.List; 13 | 14 | @RestController 15 | @AllArgsConstructor 16 | @RequestMapping("trending") 17 | public class TrendingController { 18 | 19 | private final ProductTrendingBroadcastService broadcastService; 20 | 21 | @GetMapping(produces = MediaType.TEXT_EVENT_STREAM_VALUE) 22 | public Flux> trending(){ 23 | return this.broadcastService.getTrends(); 24 | } 25 | 26 | 27 | } 28 | -------------------------------------------------------------------------------- /03-assignment/analytics-service/src/main/java/com/vinsguru/analyticsservice/dto/ProductTrendingDto.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.analyticsservice.dto; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Data; 5 | import lombok.NoArgsConstructor; 6 | 7 | @Data 8 | @AllArgsConstructor 9 | @NoArgsConstructor 10 | public class ProductTrendingDto { 11 | 12 | private Integer productId; 13 | private Long viewCount; 14 | 15 | } 16 | -------------------------------------------------------------------------------- /03-assignment/analytics-service/src/main/java/com/vinsguru/analyticsservice/entity/ProductViewCount.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.analyticsservice.entity; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Data; 5 | import lombok.NoArgsConstructor; 6 | import org.springframework.data.annotation.Id; 7 | import org.springframework.data.annotation.Transient; 8 | import org.springframework.data.domain.Persistable; 9 | 10 | import java.util.Objects; 11 | 12 | @Data 13 | @NoArgsConstructor 14 | @AllArgsConstructor 15 | public class ProductViewCount implements Persistable { 16 | 17 | @Id 18 | private Integer id; 19 | private Long count; 20 | 21 | @Transient 22 | private boolean isNew; 23 | 24 | @Override 25 | public boolean isNew() { 26 | return this.isNew || Objects.isNull(id); 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /03-assignment/analytics-service/src/main/java/com/vinsguru/analyticsservice/event/ProductViewEvent.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.analyticsservice.event; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Data; 5 | import lombok.NoArgsConstructor; 6 | 7 | @Data 8 | @NoArgsConstructor 9 | @AllArgsConstructor 10 | public class ProductViewEvent { 11 | 12 | private Integer productId; 13 | // browser 14 | // location 15 | 16 | } 17 | -------------------------------------------------------------------------------- /03-assignment/analytics-service/src/main/java/com/vinsguru/analyticsservice/repository/ProductViewRepository.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.analyticsservice.repository; 2 | 3 | import com.vinsguru.analyticsservice.entity.ProductViewCount; 4 | import org.springframework.data.repository.reactive.ReactiveCrudRepository; 5 | import org.springframework.stereotype.Repository; 6 | import reactor.core.publisher.Flux; 7 | 8 | @Repository 9 | public interface ProductViewRepository extends ReactiveCrudRepository { 10 | 11 | Flux findTop5ByOrderByCountDesc(); 12 | 13 | } 14 | -------------------------------------------------------------------------------- /03-assignment/analytics-service/src/main/java/com/vinsguru/analyticsservice/service/ProductTrendingBroadcastService.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.analyticsservice.service; 2 | 3 | import com.vinsguru.analyticsservice.dto.ProductTrendingDto; 4 | import com.vinsguru.analyticsservice.repository.ProductViewRepository; 5 | import jakarta.annotation.PostConstruct; 6 | import lombok.RequiredArgsConstructor; 7 | import org.springframework.stereotype.Service; 8 | import reactor.core.publisher.Flux; 9 | 10 | import java.time.Duration; 11 | import java.util.List; 12 | import java.util.function.Predicate; 13 | 14 | @Service 15 | @RequiredArgsConstructor 16 | public class ProductTrendingBroadcastService { 17 | 18 | private final ProductViewRepository repository; 19 | private Flux> trends; 20 | 21 | @PostConstruct 22 | private void init(){ 23 | this.trends = this.repository.findTop5ByOrderByCountDesc() 24 | .map(pvc -> new ProductTrendingDto(pvc.getId(), pvc.getCount())) 25 | .collectList() 26 | .filter(Predicate.not(List::isEmpty)) 27 | .repeatWhen(l -> l.delayElements(Duration.ofSeconds(3))) 28 | .distinctUntilChanged() 29 | .cache(1); 30 | } 31 | 32 | public Flux> getTrends(){ 33 | return this.trends; 34 | } 35 | 36 | } 37 | -------------------------------------------------------------------------------- /03-assignment/analytics-service/src/main/java/com/vinsguru/analyticsservice/service/ProductViewEventConsumer.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.analyticsservice.service; 2 | 3 | import com.vinsguru.analyticsservice.entity.ProductViewCount; 4 | import com.vinsguru.analyticsservice.event.ProductViewEvent; 5 | import com.vinsguru.analyticsservice.repository.ProductViewRepository; 6 | import jakarta.annotation.PostConstruct; 7 | import lombok.AllArgsConstructor; 8 | import org.slf4j.Logger; 9 | import org.slf4j.LoggerFactory; 10 | import org.springframework.kafka.core.reactive.ReactiveKafkaConsumerTemplate; 11 | import org.springframework.stereotype.Service; 12 | import reactor.core.publisher.Mono; 13 | import reactor.kafka.receiver.ReceiverRecord; 14 | 15 | import java.time.Duration; 16 | import java.util.Collections; 17 | import java.util.List; 18 | import java.util.Map; 19 | import java.util.function.Function; 20 | import java.util.stream.Collectors; 21 | 22 | @Service 23 | @AllArgsConstructor 24 | public class ProductViewEventConsumer { 25 | 26 | private static final Logger log = LoggerFactory.getLogger(ProductViewEventConsumer.class); 27 | private final ReactiveKafkaConsumerTemplate template; 28 | private final ProductViewRepository repository; 29 | 30 | @PostConstruct 31 | public void subscribe(){ 32 | this.template 33 | .receive() 34 | .bufferTimeout(1000, Duration.ofSeconds(1)) 35 | .flatMap(this::process) 36 | .subscribe(); 37 | } 38 | 39 | private Mono process(List> events){ 40 | var eventsMap = events.stream() 41 | .map(r -> r.value().getProductId()) 42 | .collect(Collectors.groupingBy( 43 | Function.identity(), 44 | Collectors.counting() 45 | )); 46 | return this.repository.findAllById(eventsMap.keySet()) // what if there are no records 47 | .collectMap(ProductViewCount::getId) 48 | .defaultIfEmpty(Collections.emptyMap()) 49 | .map(dbMap -> eventsMap.keySet().stream().map(productId -> updateViewCount(dbMap, eventsMap, productId)).collect(Collectors.toList())) 50 | .flatMapMany(this.repository::saveAll) 51 | .doOnComplete(() -> events.get(events.size() - 1).receiverOffset().acknowledge()) 52 | .doOnError(ex -> log.error(ex.getMessage())) 53 | .then(); 54 | } 55 | 56 | private ProductViewCount updateViewCount(Map dbMap, Map eventMap, int productId){ 57 | var pvc = dbMap.getOrDefault(productId, new ProductViewCount(productId, 0L, true)); 58 | pvc.setCount(pvc.getCount() + eventMap.get(productId)); 59 | return pvc; 60 | } 61 | 62 | } 63 | -------------------------------------------------------------------------------- /03-assignment/analytics-service/src/main/resources/application.properties: -------------------------------------------------------------------------------- 1 | server.port=7070 2 | -------------------------------------------------------------------------------- /03-assignment/analytics-service/src/main/resources/application.yaml: -------------------------------------------------------------------------------- 1 | spring: 2 | kafka: 3 | bootstrap-servers: 4 | - localhost:9092 5 | consumer: 6 | group-id: analytics-service 7 | auto-offset-reset: earliest 8 | key-deserializer: org.apache.kafka.common.serialization.StringDeserializer 9 | value-deserializer: org.springframework.kafka.support.serializer.JsonDeserializer 10 | properties: 11 | "group.instance.id": "1" 12 | "spring.json.trusted.packages": "*" -------------------------------------------------------------------------------- /03-assignment/analytics-service/src/main/resources/data.sql: -------------------------------------------------------------------------------- 1 | DROP TABLE IF EXISTS product_view_count; 2 | CREATE TABLE product_view_count ( 3 | id INT NOT NULL, 4 | count INT 5 | ); -------------------------------------------------------------------------------- /03-assignment/analytics-service/src/main/resources/static/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Analytics Service 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 |
14 |
15 |
16 |

Product Analytics

17 |

This chart shows the top 5 products based on the view count.

18 |

If you can see the chart, then great job. You did the assignment.

19 | 20 |
21 |
22 |
23 | 68 | 69 | 70 | -------------------------------------------------------------------------------- /03-assignment/analytics-service/src/test/java/com/vinsguru/analyticsservice/AbstractIntegrationTest.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.analyticsservice; 2 | 3 | import org.apache.kafka.common.serialization.StringDeserializer; 4 | import org.apache.kafka.common.serialization.StringSerializer; 5 | import org.springframework.beans.factory.annotation.Autowired; 6 | import org.springframework.boot.test.context.SpringBootTest; 7 | import org.springframework.kafka.support.serializer.JsonDeserializer; 8 | import org.springframework.kafka.support.serializer.JsonSerializer; 9 | import org.springframework.kafka.test.EmbeddedKafkaBroker; 10 | import org.springframework.kafka.test.context.EmbeddedKafka; 11 | import org.springframework.kafka.test.utils.KafkaTestUtils; 12 | import reactor.kafka.receiver.KafkaReceiver; 13 | import reactor.kafka.receiver.ReceiverOptions; 14 | import reactor.kafka.sender.KafkaSender; 15 | import reactor.kafka.sender.SenderOptions; 16 | import reactor.kafka.sender.SenderRecord; 17 | 18 | import java.util.List; 19 | import java.util.function.UnaryOperator; 20 | 21 | @SpringBootTest 22 | @EmbeddedKafka( 23 | topics = { AbstractIntegrationTest.PRODUCT_VIEW_EVENTS }, 24 | partitions = 1, 25 | bootstrapServersProperty = "spring.kafka.bootstrap-servers" 26 | ) 27 | public class AbstractIntegrationTest { 28 | 29 | protected static final String PRODUCT_VIEW_EVENTS = "product-view-events"; 30 | 31 | @Autowired 32 | private EmbeddedKafkaBroker broker; 33 | 34 | protected KafkaReceiver createReceiver(String... topics){ 35 | return createReceiver(options -> 36 | options.withKeyDeserializer(new StringDeserializer()) 37 | .withValueDeserializer(new JsonDeserializer().trustedPackages("*")) 38 | .subscription(List.of(topics)) 39 | ); 40 | } 41 | 42 | protected KafkaReceiver createReceiver(UnaryOperator> builder){ 43 | var props = KafkaTestUtils.consumerProps("test-group", "true", broker); 44 | var options = ReceiverOptions.create(props); 45 | options = builder.apply(options); 46 | return KafkaReceiver.create(options); 47 | } 48 | 49 | protected KafkaSender createSender(){ 50 | return createSender(options -> 51 | options.withKeySerializer(new StringSerializer()) 52 | .withValueSerializer(new JsonSerializer()) 53 | ); 54 | } 55 | 56 | protected KafkaSender createSender(UnaryOperator> builder){ 57 | var props = KafkaTestUtils.producerProps(broker); 58 | var options = SenderOptions.create(props); 59 | options = builder.apply(options); 60 | return KafkaSender.create(options); 61 | } 62 | 63 | protected SenderRecord toSenderRecord(String topic, K key, V value){ 64 | return SenderRecord.create(topic, null, null, key, value, key); 65 | } 66 | 67 | } 68 | -------------------------------------------------------------------------------- /03-assignment/analytics-service/src/test/java/com/vinsguru/analyticsservice/AnalyticsServiceApplicationTests.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.analyticsservice; 2 | 3 | import com.vinsguru.analyticsservice.dto.ProductTrendingDto; 4 | import com.vinsguru.analyticsservice.event.ProductViewEvent; 5 | import org.junit.jupiter.api.Assertions; 6 | import org.junit.jupiter.api.Test; 7 | import org.springframework.beans.factory.annotation.Autowired; 8 | import org.springframework.boot.test.autoconfigure.web.reactive.AutoConfigureWebTestClient; 9 | import org.springframework.boot.test.context.SpringBootTest; 10 | import org.springframework.core.ParameterizedTypeReference; 11 | import org.springframework.http.MediaType; 12 | import org.springframework.test.web.reactive.server.WebTestClient; 13 | import reactor.core.publisher.Flux; 14 | import reactor.test.StepVerifier; 15 | 16 | import java.util.List; 17 | import java.util.stream.Collectors; 18 | import java.util.stream.IntStream; 19 | 20 | @AutoConfigureWebTestClient(timeout = "10000") // this timeout is optional 21 | class AnalyticsServiceApplicationTests extends AbstractIntegrationTest { 22 | 23 | @Autowired 24 | private WebTestClient client; 25 | 26 | @Test 27 | void trendingTest() { 28 | 29 | // emit events 30 | var events = Flux.just( 31 | createEvent(2, 2), 32 | createEvent(1, 1), 33 | createEvent(6, 3), 34 | createEvent(4, 2), 35 | createEvent(5, 5), 36 | createEvent(4, 2), 37 | createEvent(6, 3), 38 | createEvent(3, 3) 39 | ).flatMap(Flux::fromIterable) 40 | .map(e -> this.toSenderRecord(PRODUCT_VIEW_EVENTS, e.getProductId().toString(), e)); 41 | 42 | var resultFlux = this.createSender().send(events); 43 | 44 | StepVerifier.create(resultFlux) 45 | .expectNextCount(21) 46 | .verifyComplete(); 47 | 48 | // verify via trending endpoint 49 | var mono = this.client 50 | .get() 51 | .uri("/trending") 52 | .accept(MediaType.TEXT_EVENT_STREAM) 53 | .exchange() 54 | .returnResult(new ParameterizedTypeReference>() {}) 55 | .getResponseBody() 56 | .next(); 57 | 58 | StepVerifier.create(mono) 59 | .consumeNextWith(this::validateResult) 60 | .verifyComplete(); 61 | 62 | } 63 | 64 | // 6,5,4,3,2 1 65 | private void validateResult(List list){ 66 | Assertions.assertEquals(5, list.size()); 67 | Assertions.assertEquals(6, list.get(0).getProductId()); 68 | Assertions.assertEquals(6, list.get(0).getViewCount()); 69 | Assertions.assertEquals(2, list.get(4).getProductId()); 70 | Assertions.assertEquals(2, list.get(4).getViewCount()); 71 | Assertions.assertTrue(list.stream().noneMatch(p -> p.getProductId() == 1)); 72 | } 73 | 74 | private List createEvent(int productId, int count){ 75 | return IntStream.rangeClosed(1, count) 76 | .mapToObj(i -> new ProductViewEvent(productId)) 77 | .collect(Collectors.toList()); 78 | } 79 | 80 | } 81 | -------------------------------------------------------------------------------- /03-assignment/product-service/.gitignore: -------------------------------------------------------------------------------- 1 | HELP.md 2 | target/ 3 | !.mvn/wrapper/maven-wrapper.jar 4 | !**/src/main/**/target/ 5 | !**/src/test/**/target/ 6 | 7 | ### STS ### 8 | .apt_generated 9 | .classpath 10 | .factorypath 11 | .project 12 | .settings 13 | .springBeans 14 | .sts4-cache 15 | 16 | ### IntelliJ IDEA ### 17 | .idea 18 | *.iws 19 | *.iml 20 | *.ipr 21 | 22 | ### NetBeans ### 23 | /nbproject/private/ 24 | /nbbuild/ 25 | /dist/ 26 | /nbdist/ 27 | /.nb-gradle/ 28 | build/ 29 | !**/src/main/**/build/ 30 | !**/src/test/**/build/ 31 | 32 | ### VS Code ### 33 | .vscode/ 34 | -------------------------------------------------------------------------------- /03-assignment/product-service/.mvn/wrapper/maven-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vinsguru/reactive-kafka-course/6e03bbbe52cf58ea3c023f2c20cc0ba41b25a3ba/03-assignment/product-service/.mvn/wrapper/maven-wrapper.jar -------------------------------------------------------------------------------- /03-assignment/product-service/.mvn/wrapper/maven-wrapper.properties: -------------------------------------------------------------------------------- 1 | # Licensed to the Apache Software Foundation (ASF) under one 2 | # or more contributor license agreements. See the NOTICE file 3 | # distributed with this work for additional information 4 | # regarding copyright ownership. The ASF licenses this file 5 | # to you under the Apache License, Version 2.0 (the 6 | # "License"); you may not use this file except in compliance 7 | # with the License. You may obtain a copy of the License at 8 | # 9 | # https://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.8.7/apache-maven-3.8.7-bin.zip 18 | wrapperUrl=https://repo.maven.apache.org/maven2/org/apache/maven/wrapper/maven-wrapper/3.1.1/maven-wrapper-3.1.1.jar 19 | -------------------------------------------------------------------------------- /03-assignment/product-service/mvnw.cmd: -------------------------------------------------------------------------------- 1 | @REM ---------------------------------------------------------------------------- 2 | @REM Licensed to the Apache Software Foundation (ASF) under one 3 | @REM or more contributor license agreements. See the NOTICE file 4 | @REM distributed with this work for additional information 5 | @REM regarding copyright ownership. The ASF licenses this file 6 | @REM to you under the Apache License, Version 2.0 (the 7 | @REM "License"); you may not use this file except in compliance 8 | @REM with the License. You may obtain a copy of the License at 9 | @REM 10 | @REM https://www.apache.org/licenses/LICENSE-2.0 11 | @REM 12 | @REM Unless required by applicable law or agreed to in writing, 13 | @REM software distributed under the License is distributed on an 14 | @REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 15 | @REM KIND, either express or implied. See the License for the 16 | @REM specific language governing permissions and limitations 17 | @REM under the License. 18 | @REM ---------------------------------------------------------------------------- 19 | 20 | @REM ---------------------------------------------------------------------------- 21 | @REM Maven Start Up Batch script 22 | @REM 23 | @REM Required ENV vars: 24 | @REM JAVA_HOME - location of a JDK home dir 25 | @REM 26 | @REM Optional ENV vars 27 | @REM M2_HOME - location of maven2's installed home dir 28 | @REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands 29 | @REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a keystroke before ending 30 | @REM MAVEN_OPTS - parameters passed to the Java VM when running Maven 31 | @REM e.g. to debug Maven itself, use 32 | @REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 33 | @REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files 34 | @REM ---------------------------------------------------------------------------- 35 | 36 | @REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on' 37 | @echo off 38 | @REM set title of command window 39 | title %0 40 | @REM enable echoing by setting MAVEN_BATCH_ECHO to 'on' 41 | @if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO% 42 | 43 | @REM set %HOME% to equivalent of $HOME 44 | if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%") 45 | 46 | @REM Execute a user defined script before this one 47 | if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre 48 | @REM check for pre script, once with legacy .bat ending and once with .cmd ending 49 | if exist "%USERPROFILE%\mavenrc_pre.bat" call "%USERPROFILE%\mavenrc_pre.bat" %* 50 | if exist "%USERPROFILE%\mavenrc_pre.cmd" call "%USERPROFILE%\mavenrc_pre.cmd" %* 51 | :skipRcPre 52 | 53 | @setlocal 54 | 55 | set ERROR_CODE=0 56 | 57 | @REM To isolate internal variables from possible post scripts, we use another setlocal 58 | @setlocal 59 | 60 | @REM ==== START VALIDATION ==== 61 | if not "%JAVA_HOME%" == "" goto OkJHome 62 | 63 | echo. 64 | echo Error: JAVA_HOME not found in your environment. >&2 65 | echo Please set the JAVA_HOME variable in your environment to match the >&2 66 | echo location of your Java installation. >&2 67 | echo. 68 | goto error 69 | 70 | :OkJHome 71 | if exist "%JAVA_HOME%\bin\java.exe" goto init 72 | 73 | echo. 74 | echo Error: JAVA_HOME is set to an invalid directory. >&2 75 | echo JAVA_HOME = "%JAVA_HOME%" >&2 76 | echo Please set the JAVA_HOME variable in your environment to match the >&2 77 | echo location of your Java installation. >&2 78 | echo. 79 | goto error 80 | 81 | @REM ==== END VALIDATION ==== 82 | 83 | :init 84 | 85 | @REM Find the project base dir, i.e. the directory that contains the folder ".mvn". 86 | @REM Fallback to current working directory if not found. 87 | 88 | set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR% 89 | IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir 90 | 91 | set EXEC_DIR=%CD% 92 | set WDIR=%EXEC_DIR% 93 | :findBaseDir 94 | IF EXIST "%WDIR%"\.mvn goto baseDirFound 95 | cd .. 96 | IF "%WDIR%"=="%CD%" goto baseDirNotFound 97 | set WDIR=%CD% 98 | goto findBaseDir 99 | 100 | :baseDirFound 101 | set MAVEN_PROJECTBASEDIR=%WDIR% 102 | cd "%EXEC_DIR%" 103 | goto endDetectBaseDir 104 | 105 | :baseDirNotFound 106 | set MAVEN_PROJECTBASEDIR=%EXEC_DIR% 107 | cd "%EXEC_DIR%" 108 | 109 | :endDetectBaseDir 110 | 111 | IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig 112 | 113 | @setlocal EnableExtensions EnableDelayedExpansion 114 | for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a 115 | @endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS% 116 | 117 | :endReadAdditionalConfig 118 | 119 | SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe" 120 | set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar" 121 | set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain 122 | 123 | set DOWNLOAD_URL="https://repo.maven.apache.org/maven2/org/apache/maven/wrapper/maven-wrapper/3.1.0/maven-wrapper-3.1.0.jar" 124 | 125 | FOR /F "usebackq tokens=1,2 delims==" %%A IN ("%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties") DO ( 126 | IF "%%A"=="wrapperUrl" SET DOWNLOAD_URL=%%B 127 | ) 128 | 129 | @REM Extension to allow automatically downloading the maven-wrapper.jar from Maven-central 130 | @REM This allows using the maven wrapper in projects that prohibit checking in binary data. 131 | if exist %WRAPPER_JAR% ( 132 | if "%MVNW_VERBOSE%" == "true" ( 133 | echo Found %WRAPPER_JAR% 134 | ) 135 | ) else ( 136 | if not "%MVNW_REPOURL%" == "" ( 137 | SET DOWNLOAD_URL="%MVNW_REPOURL%/org/apache/maven/wrapper/maven-wrapper/3.1.0/maven-wrapper-3.1.0.jar" 138 | ) 139 | if "%MVNW_VERBOSE%" == "true" ( 140 | echo Couldn't find %WRAPPER_JAR%, downloading it ... 141 | echo Downloading from: %DOWNLOAD_URL% 142 | ) 143 | 144 | powershell -Command "&{"^ 145 | "$webclient = new-object System.Net.WebClient;"^ 146 | "if (-not ([string]::IsNullOrEmpty('%MVNW_USERNAME%') -and [string]::IsNullOrEmpty('%MVNW_PASSWORD%'))) {"^ 147 | "$webclient.Credentials = new-object System.Net.NetworkCredential('%MVNW_USERNAME%', '%MVNW_PASSWORD%');"^ 148 | "}"^ 149 | "[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12; $webclient.DownloadFile('%DOWNLOAD_URL%', '%WRAPPER_JAR%')"^ 150 | "}" 151 | if "%MVNW_VERBOSE%" == "true" ( 152 | echo Finished downloading %WRAPPER_JAR% 153 | ) 154 | ) 155 | @REM End of extension 156 | 157 | @REM Provide a "standardized" way to retrieve the CLI args that will 158 | @REM work with both Windows and non-Windows executions. 159 | set MAVEN_CMD_LINE_ARGS=%* 160 | 161 | %MAVEN_JAVA_EXE% ^ 162 | %JVM_CONFIG_MAVEN_PROPS% ^ 163 | %MAVEN_OPTS% ^ 164 | %MAVEN_DEBUG_OPTS% ^ 165 | -classpath %WRAPPER_JAR% ^ 166 | "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" ^ 167 | %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %* 168 | if ERRORLEVEL 1 goto error 169 | goto end 170 | 171 | :error 172 | set ERROR_CODE=1 173 | 174 | :end 175 | @endlocal & set ERROR_CODE=%ERROR_CODE% 176 | 177 | if not "%MAVEN_SKIP_RC%"=="" goto skipRcPost 178 | @REM check for post script, once with legacy .bat ending and once with .cmd ending 179 | if exist "%USERPROFILE%\mavenrc_post.bat" call "%USERPROFILE%\mavenrc_post.bat" 180 | if exist "%USERPROFILE%\mavenrc_post.cmd" call "%USERPROFILE%\mavenrc_post.cmd" 181 | :skipRcPost 182 | 183 | @REM pause the script if MAVEN_BATCH_PAUSE is set to 'on' 184 | if "%MAVEN_BATCH_PAUSE%"=="on" pause 185 | 186 | if "%MAVEN_TERMINATE_CMD%"=="on" exit %ERROR_CODE% 187 | 188 | cmd /C exit /B %ERROR_CODE% 189 | -------------------------------------------------------------------------------- /03-assignment/product-service/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 4 | 4.0.0 5 | 6 | org.springframework.boot 7 | spring-boot-starter-parent 8 | 3.5.0 9 | 10 | 11 | com.vinsguru 12 | product-service 13 | 0.0.1-SNAPSHOT 14 | product-service 15 | Demo project for Spring Boot 16 | 17 | 21 18 | 1.3.23 19 | 20 | 21 | 22 | org.springframework.boot 23 | spring-boot-starter-data-r2dbc 24 | 25 | 26 | org.springframework.boot 27 | spring-boot-starter-webflux 28 | 29 | 30 | org.springframework.kafka 31 | spring-kafka 32 | 33 | 34 | io.projectreactor.kafka 35 | reactor-kafka 36 | ${reactor.kafka.version} 37 | 38 | 39 | com.h2database 40 | h2 41 | runtime 42 | 43 | 44 | io.r2dbc 45 | r2dbc-h2 46 | runtime 47 | 48 | 49 | org.projectlombok 50 | lombok 51 | true 52 | 53 | 54 | org.springframework.boot 55 | spring-boot-starter-test 56 | test 57 | 58 | 59 | io.projectreactor 60 | reactor-test 61 | test 62 | 63 | 64 | org.springframework.kafka 65 | spring-kafka-test 66 | test 67 | 68 | 69 | 70 | 71 | 72 | 73 | org.springframework.boot 74 | spring-boot-maven-plugin 75 | 76 | 77 | 78 | org.projectlombok 79 | lombok 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | -------------------------------------------------------------------------------- /03-assignment/product-service/src/main/java/com/vinsguru/productservice/ProductServiceApplication.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.productservice; 2 | 3 | import org.springframework.boot.SpringApplication; 4 | import org.springframework.boot.autoconfigure.SpringBootApplication; 5 | 6 | @SpringBootApplication 7 | public class ProductServiceApplication { 8 | 9 | public static void main(String[] args) { 10 | SpringApplication.run(ProductServiceApplication.class, args); 11 | } 12 | 13 | } 14 | -------------------------------------------------------------------------------- /03-assignment/product-service/src/main/java/com/vinsguru/productservice/config/KafkaProducerConfig.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.productservice.config; 2 | 3 | import com.vinsguru.productservice.event.ProductViewEvent; 4 | import com.vinsguru.productservice.service.ProductViewEventProducer; 5 | import org.springframework.boot.autoconfigure.kafka.KafkaProperties; 6 | import org.springframework.boot.ssl.SslBundles; 7 | import org.springframework.context.annotation.Bean; 8 | import org.springframework.context.annotation.Configuration; 9 | import org.springframework.kafka.core.reactive.ReactiveKafkaProducerTemplate; 10 | import reactor.core.publisher.Sinks; 11 | import reactor.kafka.sender.SenderOptions; 12 | 13 | @Configuration 14 | public class KafkaProducerConfig { 15 | 16 | @Bean 17 | public SenderOptions senderOptions(KafkaProperties properties, SslBundles sslBundles){ 18 | return SenderOptions.create(properties.buildProducerProperties(sslBundles)); 19 | } 20 | 21 | @Bean 22 | public ReactiveKafkaProducerTemplate producerTemplate(SenderOptions senderOptions){ 23 | return new ReactiveKafkaProducerTemplate<>(senderOptions); 24 | } 25 | 26 | @Bean 27 | public ProductViewEventProducer productViewEventProducer(ReactiveKafkaProducerTemplate template){ 28 | var sink = Sinks.many().unicast().onBackpressureBuffer(); 29 | var flux = sink.asFlux(); 30 | var eventProducer = new ProductViewEventProducer(template, sink, flux, "product-view-events"); 31 | eventProducer.subscribe(); 32 | return eventProducer; 33 | } 34 | 35 | } 36 | -------------------------------------------------------------------------------- /03-assignment/product-service/src/main/java/com/vinsguru/productservice/controller/ProductController.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.productservice.controller; 2 | 3 | import com.vinsguru.productservice.dto.ProductDto; 4 | import com.vinsguru.productservice.service.ProductService; 5 | import lombok.AllArgsConstructor; 6 | import org.springframework.http.ResponseEntity; 7 | import org.springframework.web.bind.annotation.GetMapping; 8 | import org.springframework.web.bind.annotation.PathVariable; 9 | import org.springframework.web.bind.annotation.RequestMapping; 10 | import org.springframework.web.bind.annotation.RestController; 11 | import reactor.core.publisher.Mono; 12 | 13 | 14 | @RestController 15 | @AllArgsConstructor 16 | @RequestMapping("product") 17 | public class ProductController { 18 | 19 | private final ProductService productService; 20 | 21 | @GetMapping("{productId}") 22 | public Mono> view(@PathVariable Integer productId){ 23 | return this.productService.getProduct(productId) 24 | .map(ResponseEntity::ok) 25 | .defaultIfEmpty(ResponseEntity.notFound().build()); 26 | } 27 | 28 | } 29 | -------------------------------------------------------------------------------- /03-assignment/product-service/src/main/java/com/vinsguru/productservice/dto/ProductDto.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.productservice.dto; 2 | 3 | import lombok.Data; 4 | 5 | @Data 6 | public class ProductDto { 7 | 8 | private Integer id; 9 | private String description; 10 | private Integer price; 11 | 12 | } 13 | -------------------------------------------------------------------------------- /03-assignment/product-service/src/main/java/com/vinsguru/productservice/entity/Product.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.productservice.entity; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Data; 5 | import lombok.NoArgsConstructor; 6 | import org.springframework.data.annotation.Id; 7 | 8 | @Data 9 | @AllArgsConstructor 10 | @NoArgsConstructor 11 | public class Product { 12 | 13 | @Id 14 | private Integer id; 15 | private String description; 16 | private Integer price; 17 | 18 | } 19 | -------------------------------------------------------------------------------- /03-assignment/product-service/src/main/java/com/vinsguru/productservice/event/ProductViewEvent.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.productservice.event; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Data; 5 | import lombok.NoArgsConstructor; 6 | 7 | @Data 8 | @NoArgsConstructor 9 | @AllArgsConstructor 10 | public class ProductViewEvent { 11 | 12 | private Integer productId; 13 | // browser 14 | // location 15 | 16 | } 17 | -------------------------------------------------------------------------------- /03-assignment/product-service/src/main/java/com/vinsguru/productservice/repository/ProductRepository.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.productservice.repository; 2 | 3 | 4 | import com.vinsguru.productservice.entity.Product; 5 | import org.springframework.data.repository.reactive.ReactiveCrudRepository; 6 | import org.springframework.stereotype.Repository; 7 | 8 | @Repository 9 | public interface ProductRepository extends ReactiveCrudRepository { 10 | } 11 | -------------------------------------------------------------------------------- /03-assignment/product-service/src/main/java/com/vinsguru/productservice/service/ProductService.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.productservice.service; 2 | 3 | import com.vinsguru.productservice.dto.ProductDto; 4 | import com.vinsguru.productservice.event.ProductViewEvent; 5 | import com.vinsguru.productservice.repository.ProductRepository; 6 | import com.vinsguru.productservice.util.EntityDtoUtil; 7 | import lombok.AllArgsConstructor; 8 | import org.springframework.stereotype.Service; 9 | import reactor.core.publisher.Mono; 10 | 11 | @Service 12 | @AllArgsConstructor 13 | public class ProductService { 14 | 15 | private final ProductRepository repository; 16 | private final ProductViewEventProducer productViewEventProducer; 17 | 18 | public Mono getProduct(int id){ 19 | return this.repository.findById(id) 20 | .doOnNext(e -> this.productViewEventProducer.emitEvent(new ProductViewEvent(e.getId()))) 21 | .map(EntityDtoUtil::toDto); 22 | } 23 | 24 | } 25 | -------------------------------------------------------------------------------- /03-assignment/product-service/src/main/java/com/vinsguru/productservice/service/ProductViewEventProducer.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.productservice.service; 2 | 3 | import com.vinsguru.productservice.event.ProductViewEvent; 4 | import lombok.AllArgsConstructor; 5 | import org.apache.kafka.clients.producer.ProducerRecord; 6 | import org.slf4j.Logger; 7 | import org.slf4j.LoggerFactory; 8 | import org.springframework.kafka.core.reactive.ReactiveKafkaProducerTemplate; 9 | import reactor.core.publisher.Flux; 10 | import reactor.core.publisher.Sinks; 11 | import reactor.kafka.sender.SenderRecord; 12 | 13 | @AllArgsConstructor 14 | public class ProductViewEventProducer { 15 | 16 | private static final Logger log = LoggerFactory.getLogger(ProductViewEventProducer.class); 17 | 18 | private final ReactiveKafkaProducerTemplate template; 19 | private final Sinks.Many sink; 20 | private final Flux flux; 21 | private final String topic; 22 | 23 | public void subscribe(){ 24 | var srFlux = this.flux 25 | .map(e -> new ProducerRecord<>(topic, e.getProductId().toString(), e)) 26 | .map(pr -> SenderRecord.create(pr, pr.key())); 27 | this.template.send(srFlux) 28 | .doOnNext(r -> log.info("emitted event: {}", r.correlationMetadata())) 29 | .subscribe(); 30 | } 31 | 32 | public void emitEvent(ProductViewEvent event){ 33 | this.sink.tryEmitNext(event); 34 | } 35 | 36 | } 37 | -------------------------------------------------------------------------------- /03-assignment/product-service/src/main/java/com/vinsguru/productservice/util/EntityDtoUtil.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.productservice.util; 2 | 3 | import com.vinsguru.productservice.dto.ProductDto; 4 | import com.vinsguru.productservice.entity.Product; 5 | import org.springframework.beans.BeanUtils; 6 | 7 | public class EntityDtoUtil { 8 | 9 | public static ProductDto toDto(Product product){ 10 | var dto = new ProductDto(); 11 | BeanUtils.copyProperties(product, dto); 12 | return dto; 13 | } 14 | 15 | } 16 | -------------------------------------------------------------------------------- /03-assignment/product-service/src/main/resources/application.properties: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /03-assignment/product-service/src/main/resources/application.yaml: -------------------------------------------------------------------------------- 1 | spring: 2 | kafka: 3 | bootstrap-servers: 4 | - localhost:9092 5 | producer: 6 | key-serializer: org.apache.kafka.common.serialization.StringSerializer 7 | value-serializer: org.springframework.kafka.support.serializer.JsonSerializer -------------------------------------------------------------------------------- /03-assignment/product-service/src/main/resources/data.sql: -------------------------------------------------------------------------------- 1 | DROP TABLE IF EXISTS product; 2 | CREATE TABLE product AS SELECT * FROM CSVREAD('classpath:product.csv'); -------------------------------------------------------------------------------- /03-assignment/product-service/src/main/resources/product.csv: -------------------------------------------------------------------------------- 1 | id,description,price 2 | 1,product-1,1 3 | 2,product-2,2 4 | 3,product-3,3 5 | 4,product-4,4 6 | 5,product-5,5 7 | 6,product-6,6 8 | 7,product-7,7 9 | 8,product-8,8 10 | 9,product-9,9 11 | 10,product-10,10 12 | 11,product-11,11 13 | 12,product-12,12 14 | 13,product-13,13 15 | 14,product-14,14 16 | 15,product-15,15 17 | 16,product-16,16 18 | 17,product-17,17 19 | 18,product-18,18 20 | 19,product-19,19 21 | 20,product-20,20 22 | 21,product-21,21 23 | 22,product-22,22 24 | 23,product-23,23 25 | 24,product-24,24 26 | 25,product-25,25 27 | 26,product-26,26 28 | 27,product-27,27 29 | 28,product-28,28 30 | 29,product-29,29 31 | 30,product-30,30 32 | 31,product-31,31 33 | 32,product-32,32 34 | 33,product-33,33 35 | 34,product-34,34 36 | 35,product-35,35 37 | 36,product-36,36 38 | 37,product-37,37 39 | 38,product-38,38 40 | 39,product-39,39 41 | 40,product-40,40 42 | 41,product-41,41 43 | 42,product-42,42 44 | 43,product-43,43 45 | 44,product-44,44 46 | 45,product-45,45 47 | 46,product-46,46 48 | 47,product-47,47 49 | 48,product-48,48 50 | 49,product-49,49 51 | 50,product-50,50 52 | 51,product-51,51 53 | 52,product-52,52 54 | 53,product-53,53 55 | 54,product-54,54 56 | 55,product-55,55 57 | 56,product-56,56 58 | 57,product-57,57 59 | 58,product-58,58 60 | 59,product-59,59 61 | 60,product-60,60 62 | 61,product-61,61 63 | 62,product-62,62 64 | 63,product-63,63 65 | 64,product-64,64 66 | 65,product-65,65 67 | 66,product-66,66 68 | 67,product-67,67 69 | 68,product-68,68 70 | 69,product-69,69 71 | 70,product-70,70 72 | 71,product-71,71 73 | 72,product-72,72 74 | 73,product-73,73 75 | 74,product-74,74 76 | 75,product-75,75 77 | 76,product-76,76 78 | 77,product-77,77 79 | 78,product-78,78 80 | 79,product-79,79 81 | 80,product-80,80 82 | 81,product-81,81 83 | 82,product-82,82 84 | 83,product-83,83 85 | 84,product-84,84 86 | 85,product-85,85 87 | 86,product-86,86 88 | 87,product-87,87 89 | 88,product-88,88 90 | 89,product-89,89 91 | 90,product-90,90 92 | 91,product-91,91 93 | 92,product-92,92 94 | 93,product-93,93 95 | 94,product-94,94 96 | 95,product-95,95 97 | 96,product-96,96 98 | 97,product-97,97 99 | 98,product-98,98 100 | 99,product-99,99 101 | 100,product-100,100 -------------------------------------------------------------------------------- /03-assignment/product-service/src/main/resources/static/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Products Service 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 |
14 |
15 |
16 |

Product Service

17 |

You can click on these buttons to simulate product view

18 |
19 |
20 |
21 |
22 |
23 | 59 | 60 | -------------------------------------------------------------------------------- /03-assignment/product-service/src/test/java/com/vinsguru/productservice/AbstractIntegrationTest.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.productservice; 2 | 3 | import org.apache.kafka.common.serialization.StringDeserializer; 4 | import org.apache.kafka.common.serialization.StringSerializer; 5 | import org.springframework.beans.factory.annotation.Autowired; 6 | import org.springframework.boot.test.context.SpringBootTest; 7 | import org.springframework.kafka.support.serializer.JsonDeserializer; 8 | import org.springframework.kafka.support.serializer.JsonSerializer; 9 | import org.springframework.kafka.test.EmbeddedKafkaBroker; 10 | import org.springframework.kafka.test.context.EmbeddedKafka; 11 | import org.springframework.kafka.test.utils.KafkaTestUtils; 12 | import reactor.kafka.receiver.KafkaReceiver; 13 | import reactor.kafka.receiver.ReceiverOptions; 14 | import reactor.kafka.sender.KafkaSender; 15 | import reactor.kafka.sender.SenderOptions; 16 | import reactor.kafka.sender.SenderRecord; 17 | 18 | import java.util.List; 19 | import java.util.function.UnaryOperator; 20 | 21 | @SpringBootTest 22 | @EmbeddedKafka( 23 | topics = { AbstractIntegrationTest.PRODUCT_VIEW_EVENTS }, 24 | partitions = 1, 25 | bootstrapServersProperty = "spring.kafka.bootstrap-servers" 26 | ) 27 | public class AbstractIntegrationTest { 28 | 29 | protected static final String PRODUCT_VIEW_EVENTS = "product-view-events"; 30 | 31 | @Autowired 32 | private EmbeddedKafkaBroker broker; 33 | 34 | protected KafkaReceiver createReceiver(String... topics){ 35 | return createReceiver(options -> 36 | options.withKeyDeserializer(new StringDeserializer()) 37 | .withValueDeserializer(new JsonDeserializer().trustedPackages("*")) 38 | .subscription(List.of(topics)) 39 | ); 40 | } 41 | 42 | protected KafkaReceiver createReceiver(UnaryOperator> builder){ 43 | var props = KafkaTestUtils.consumerProps("test-group", "true", broker); 44 | var options = ReceiverOptions.create(props); 45 | options = builder.apply(options); 46 | return KafkaReceiver.create(options); 47 | } 48 | 49 | protected KafkaSender createSender(){ 50 | return createSender(options -> 51 | options.withKeySerializer(new StringSerializer()) 52 | .withValueSerializer(new JsonSerializer()) 53 | ); 54 | } 55 | 56 | protected KafkaSender createSender(UnaryOperator> builder){ 57 | var props = KafkaTestUtils.producerProps(broker); 58 | var options = SenderOptions.create(props); 59 | options = builder.apply(options); 60 | return KafkaSender.create(options); 61 | } 62 | 63 | protected SenderRecord toSenderRecord(String topic, K key, V value){ 64 | return SenderRecord.create(topic, null, null, key, value, key); 65 | } 66 | 67 | } 68 | -------------------------------------------------------------------------------- /03-assignment/product-service/src/test/java/com/vinsguru/productservice/ProductServiceApplicationTests.java: -------------------------------------------------------------------------------- 1 | package com.vinsguru.productservice; 2 | 3 | import com.vinsguru.productservice.event.ProductViewEvent; 4 | import org.junit.jupiter.api.Assertions; 5 | import org.junit.jupiter.api.Test; 6 | import org.springframework.beans.factory.annotation.Autowired; 7 | import org.springframework.boot.test.autoconfigure.web.reactive.AutoConfigureWebTestClient; 8 | import org.springframework.boot.test.context.SpringBootTest; 9 | import org.springframework.test.web.reactive.server.WebTestClient; 10 | import reactor.kafka.receiver.KafkaReceiver; 11 | import reactor.test.StepVerifier; 12 | 13 | @AutoConfigureWebTestClient 14 | class ProductServiceApplicationTests extends AbstractIntegrationTest{ 15 | 16 | @Autowired 17 | private WebTestClient client; 18 | 19 | @Test 20 | void productViewAndEventTest() { 21 | 22 | // view products 23 | viewProductSuccess(1); 24 | viewProductSuccess(1); 25 | viewProductError(1000); 26 | viewProductSuccess(5); 27 | 28 | // check if the events are emitted 29 | var flux = this.createReceiver(PRODUCT_VIEW_EVENTS) 30 | .receive() 31 | .take(3); 32 | 33 | StepVerifier.create(flux) 34 | .consumeNextWith(r -> Assertions.assertEquals(1, r.value().getProductId())) 35 | .consumeNextWith(r -> Assertions.assertEquals(1, r.value().getProductId())) 36 | .consumeNextWith(r -> Assertions.assertEquals(5, r.value().getProductId())) 37 | .verifyComplete(); 38 | 39 | } 40 | 41 | private void viewProductSuccess(int id){ 42 | this.client 43 | .get() 44 | .uri("/product/" + id) 45 | .exchange() 46 | .expectStatus().is2xxSuccessful() 47 | .expectBody() 48 | .jsonPath("$.id").isEqualTo(id) 49 | .jsonPath("$.description").isEqualTo("product-" + id); 50 | } 51 | 52 | private void viewProductError(int id){ 53 | this.client 54 | .get() 55 | .uri("/product/" + id) 56 | .exchange() 57 | .expectStatus().is4xxClientError(); 58 | } 59 | 60 | } 61 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Reactive Kafka From Scratch 2 | 3 | This repo contains the source code for the kafka course. 4 | 5 | ![Learn Kafka From Scratch](.doc/kafka.png) 6 | 7 | This course is particularly for any senior or staff level engineer who wants to learn Kafka From Scratch. 8 | 9 | --------------------------------------------------------------------------------