├── .doc
└── kafka.png
├── 01-workspace
├── 01-kafka-setup
│ ├── compose
│ │ ├── docker-compose.yaml
│ │ └── props
│ │ │ └── server.properties
│ └── image
│ │ ├── Dockerfile
│ │ └── runner.sh
├── 02-kafka-101
│ ├── 01-topic.sh
│ ├── 02-producer.sh
│ ├── 03-consumer.sh
│ ├── 04-print-offset.sh
│ ├── 05-consumer-group.sh
│ ├── 06-reset-offset.sh
│ └── 07-transaction.sh
├── 03-kafka-cluster
│ ├── docker-compose.yaml
│ └── props
│ │ ├── s1.properties
│ │ ├── s2.properties
│ │ └── s3.properties
├── 04-assignment-resources
│ ├── 01-product-service-resources
│ │ ├── application.yaml
│ │ ├── data.sql
│ │ ├── product.csv
│ │ └── static
│ │ │ └── index.html
│ └── 02-analytics-service-resources
│ │ ├── application.properties
│ │ ├── application.yaml
│ │ ├── data.sql
│ │ └── static
│ │ └── index.html
├── 05-kafka-security-sasl-plain
│ ├── 01-kafka-topics.sh
│ ├── docker-compose.yaml
│ └── props
│ │ ├── consumer.properties
│ │ ├── jaas.conf
│ │ └── security.properties
└── 06-kafka-security-sasl-ssl
│ ├── 01-kafka-topics.sh
│ ├── certs
│ ├── kafka-signed.crt
│ ├── kafka-signing-request.crt
│ ├── kafka.keystore.jks
│ ├── kafka.truststore.jks
│ ├── root.crt
│ └── root.key
│ ├── docker-compose.yaml
│ ├── generate-certs
│ └── create-certificates.sh
│ └── props
│ ├── consumer.properties
│ ├── jaas.conf
│ └── security.properties
├── 02-reactive-kafka-playground
├── .gitignore
├── .mvn
│ └── wrapper
│ │ ├── maven-wrapper.jar
│ │ └── maven-wrapper.properties
├── mvnw
├── mvnw.cmd
├── pom.xml
└── src
│ ├── main
│ ├── java
│ │ └── com
│ │ │ └── vinsguru
│ │ │ └── reactivekafkaplayground
│ │ │ ├── ReactiveKafkaPlaygroundApplication.java
│ │ │ ├── sec01
│ │ │ ├── Lec01KafkaConsumer.java
│ │ │ └── Lec02KafkaConsumer.java
│ │ │ ├── sec02
│ │ │ └── KafkaProducer.java
│ │ │ ├── sec03
│ │ │ ├── KafkaConsumer.java
│ │ │ └── KafkaProducer.java
│ │ │ ├── sec04
│ │ │ ├── KafkaConsumer.java
│ │ │ └── KafkaProducer.java
│ │ │ ├── sec05
│ │ │ ├── KafkaConsumer.java
│ │ │ ├── KafkaConsumerGroup.java
│ │ │ └── KafkaProducer.java
│ │ │ ├── sec06
│ │ │ ├── KafkaConsumer.java
│ │ │ └── KafkaConsumerGroup.java
│ │ │ ├── sec07
│ │ │ └── KafkaConsumer.java
│ │ │ ├── sec08
│ │ │ ├── KafkaConsumer.java
│ │ │ └── KafkaProducer.java
│ │ │ ├── sec09
│ │ │ ├── KafkaConsumer.java
│ │ │ └── KafkaProducer.java
│ │ │ ├── sec10
│ │ │ ├── KafkaConsumer.java
│ │ │ └── KafkaProducer.java
│ │ │ ├── sec11
│ │ │ ├── KafkaConsumer.java
│ │ │ └── KafkaProducer.java
│ │ │ ├── sec12
│ │ │ ├── KafkaConsumerV1.java
│ │ │ ├── KafkaConsumerV2.java
│ │ │ ├── KafkaConsumerV3.java
│ │ │ └── KafkaProducer.java
│ │ │ ├── sec13
│ │ │ ├── KafkaConsumer.java
│ │ │ ├── KafkaProducer.java
│ │ │ ├── OrderEventProcessor.java
│ │ │ ├── ReactiveDeadLetterTopicProducer.java
│ │ │ └── RecordProcessingException.java
│ │ │ ├── sec14
│ │ │ ├── KafkaConsumer.java
│ │ │ └── KafkaProducer.java
│ │ │ ├── sec15
│ │ │ ├── TransferDemo.java
│ │ │ ├── TransferEvent.java
│ │ │ ├── TransferEventConsumer.java
│ │ │ └── TransferEventProcessor.java
│ │ │ ├── sec16
│ │ │ ├── ConsumerRunner.java
│ │ │ ├── DummyOrder.java
│ │ │ ├── KafkaConsumerConfig.java
│ │ │ ├── KafkaProducerConfig.java
│ │ │ ├── OrderEvent.java
│ │ │ └── ProducerRunner.java
│ │ │ ├── sec17
│ │ │ ├── consumer
│ │ │ │ ├── ConsumerRunner.java
│ │ │ │ ├── DummyOrder.java
│ │ │ │ └── KafkaConsumerConfig.java
│ │ │ └── producer
│ │ │ │ ├── KafkaProducerConfig.java
│ │ │ │ ├── OrderEvent.java
│ │ │ │ └── ProducerRunner.java
│ │ │ └── sec18
│ │ │ └── KafkaProducer.java
│ └── resources
│ │ ├── application.properties
│ │ ├── application.yaml
│ │ ├── kafka.truststore.jks
│ │ └── logback.xml
│ └── test
│ └── java
│ └── com
│ └── vinsguru
│ └── reactivekafkaplayground
│ ├── AbstractIT.java
│ ├── EmbeddedKafkaPlaygroundTests.java
│ ├── OrderEventConsumerTest.java
│ └── OrderEventProducerTest.java
├── 03-assignment
├── analytics-service
│ ├── .gitignore
│ ├── .mvn
│ │ └── wrapper
│ │ │ ├── maven-wrapper.jar
│ │ │ └── maven-wrapper.properties
│ ├── mvnw
│ ├── mvnw.cmd
│ ├── pom.xml
│ └── src
│ │ ├── main
│ │ ├── java
│ │ │ └── com
│ │ │ │ └── vinsguru
│ │ │ │ └── analyticsservice
│ │ │ │ ├── AnalyticsServiceApplication.java
│ │ │ │ ├── config
│ │ │ │ └── KafkaConsumerConfig.java
│ │ │ │ ├── controller
│ │ │ │ └── TrendingController.java
│ │ │ │ ├── dto
│ │ │ │ └── ProductTrendingDto.java
│ │ │ │ ├── entity
│ │ │ │ └── ProductViewCount.java
│ │ │ │ ├── event
│ │ │ │ └── ProductViewEvent.java
│ │ │ │ ├── repository
│ │ │ │ └── ProductViewRepository.java
│ │ │ │ └── service
│ │ │ │ ├── ProductTrendingBroadcastService.java
│ │ │ │ └── ProductViewEventConsumer.java
│ │ └── resources
│ │ │ ├── application.properties
│ │ │ ├── application.yaml
│ │ │ ├── data.sql
│ │ │ └── static
│ │ │ └── index.html
│ │ └── test
│ │ └── java
│ │ └── com
│ │ └── vinsguru
│ │ └── analyticsservice
│ │ ├── AbstractIntegrationTest.java
│ │ └── AnalyticsServiceApplicationTests.java
└── product-service
│ ├── .gitignore
│ ├── .mvn
│ └── wrapper
│ │ ├── maven-wrapper.jar
│ │ └── maven-wrapper.properties
│ ├── mvnw
│ ├── mvnw.cmd
│ ├── pom.xml
│ └── src
│ ├── main
│ ├── java
│ │ └── com
│ │ │ └── vinsguru
│ │ │ └── productservice
│ │ │ ├── ProductServiceApplication.java
│ │ │ ├── config
│ │ │ └── KafkaProducerConfig.java
│ │ │ ├── controller
│ │ │ └── ProductController.java
│ │ │ ├── dto
│ │ │ └── ProductDto.java
│ │ │ ├── entity
│ │ │ └── Product.java
│ │ │ ├── event
│ │ │ └── ProductViewEvent.java
│ │ │ ├── repository
│ │ │ └── ProductRepository.java
│ │ │ ├── service
│ │ │ ├── ProductService.java
│ │ │ └── ProductViewEventProducer.java
│ │ │ └── util
│ │ │ └── EntityDtoUtil.java
│ └── resources
│ │ ├── application.properties
│ │ ├── application.yaml
│ │ ├── data.sql
│ │ ├── product.csv
│ │ └── static
│ │ └── index.html
│ └── test
│ └── java
│ └── com
│ └── vinsguru
│ └── productservice
│ ├── AbstractIntegrationTest.java
│ └── ProductServiceApplicationTests.java
└── README.md
/.doc/kafka.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/vinsguru/reactive-kafka-course/6e03bbbe52cf58ea3c023f2c20cc0ba41b25a3ba/.doc/kafka.png
--------------------------------------------------------------------------------
/01-workspace/01-kafka-setup/compose/docker-compose.yaml:
--------------------------------------------------------------------------------
1 | version: "3.8"
2 | services:
3 | kafka1:
4 | image: vinsdocker/kafka
5 | container_name: kafka
6 | ports:
7 | - "9092:9092"
8 | environment:
9 | KAFKA_CLUSTER_ID: OTMwNzFhYTY1ODNiNGE5OT
10 | volumes:
11 | - ./props/server.properties:/kafka/config/kraft/server.properties
12 | - ./data:/tmp/kafka-logs
--------------------------------------------------------------------------------
/01-workspace/01-kafka-setup/compose/props/server.properties:
--------------------------------------------------------------------------------
1 | process.roles=broker,controller
2 | node.id=1
3 | listeners=PLAINTEXT://:9092,CONTROLLER://:9093
4 | controller.listener.names=CONTROLLER
5 | advertised.listeners=PLAINTEXT://localhost:9092
6 | inter.broker.listener.name=PLAINTEXT
7 | controller.quorum.voters=1@kafka:9093
8 | listener.security.protocol.map=CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT,SSL:SSL,SASL_PLAINTEXT:SASL_PLAINTEXT,SASL_SSL:SASL_SSL
9 | auto.create.topics.enable=true
10 | offsets.topic.replication.factor=1
11 | transaction.state.log.replication.factor=1
12 | transaction.state.log.min.isr=1
13 | log.dirs=/tmp/kafka-logs
--------------------------------------------------------------------------------
/01-workspace/01-kafka-setup/image/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM eclipse-temurin:17-jre-focal
2 |
3 | ADD https://downloads.apache.org/kafka/3.4.0/kafka_2.13-3.4.0.tgz kafka.tgz
4 |
5 | RUN tar -xvzf kafka.tgz
6 |
7 | RUN rm kafka.tgz
8 |
9 | RUN mv /kafka_2.13-3.4.0 /kafka
10 |
11 | ENV PATH=${PATH}:/kafka/bin
12 |
13 | WORKDIR learning
14 |
15 | ADD runner.sh runner.sh
16 |
17 | CMD [ "sh", "runner.sh" ]
--------------------------------------------------------------------------------
/01-workspace/01-kafka-setup/image/runner.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # If env variable not set, generate random one
4 | # Large organizations might have multiple kafka clusters. Each cluster is expected to have an ID
5 | clusterId=${KAFKA_CLUSTER_ID:-$(kafka-storage.sh random-uuid)}
6 | echo "Kafka Cluster ID : ${clusterId}"
7 |
8 | # For the first time, format the storage. It would create couple of files (meta.properties, checkpoint file).
9 | # If it is already formatted, it would be ignored.
10 |
11 | echo "Formatting storage"
12 | kafka-storage.sh format -t $clusterId -c /kafka/config/kraft/server.properties
13 |
14 | # Finally start the kafka server!!
15 |
16 | echo "Starting Kafka"
17 | exec kafka-server-start.sh /kafka/config/kraft/server.properties
--------------------------------------------------------------------------------
/01-workspace/02-kafka-101/01-topic.sh:
--------------------------------------------------------------------------------
1 | # create a kafka topic called hello-world
2 | # we assume that directory which contains 'kafka-topics.sh' is included in the PATH
3 | kafka-topics.sh --bootstrap-server localhost:9092 --topic hello-world --create
4 |
5 | # list all topics
6 | kafka-topics.sh --bootstrap-server localhost:9092 --list
7 |
8 | # describe a topic
9 | kafka-topics.sh --bootstrap-server localhost:9092 --topic hello-world --describe
10 |
11 | # delete a topic
12 | kafka-topics.sh --bootstrap-server localhost:9092 --topic hello-world --delete
13 |
14 | # topic with partitons
15 | kafka-topics.sh --bootstrap-server localhost:9092 --topic order-events --create --partitions 2
16 |
17 | # topic with replicaiton factor
18 | kafka-topics.sh --bootstrap-server localhost:9092 --topic order-events --create --replication-factor 3
--------------------------------------------------------------------------------
/01-workspace/02-kafka-101/02-producer.sh:
--------------------------------------------------------------------------------
1 | # to produce messages
2 | kafka-console-producer.sh --bootstrap-server localhost:9092 --topic hello-world
3 |
4 | # linger.ms
5 | kafka-console-producer.sh --bootstrap-server localhost:9092 --topic hello-world --timeout 100
--------------------------------------------------------------------------------
/01-workspace/02-kafka-101/03-consumer.sh:
--------------------------------------------------------------------------------
1 | # to consume messages
2 | kafka-console-consumer.sh --bootstrap-server localhost:9092 --topic hello-world
3 |
4 | # to consume from beginning
5 | kafka-console-consumer.sh --bootstrap-server localhost:9092 --topic hello-world --from-beginning
--------------------------------------------------------------------------------
/01-workspace/02-kafka-101/04-print-offset.sh:
--------------------------------------------------------------------------------
1 |
2 | # to print offset, time etc
3 | kafka-console-consumer.sh \
4 | --bootstrap-server localhost:9092 \
5 | --topic hello-world \
6 | --property print.offset=true \
7 | --property print.timestamp=true
--------------------------------------------------------------------------------
/01-workspace/02-kafka-101/05-consumer-group.sh:
--------------------------------------------------------------------------------
1 |
2 | # create console producer
3 | kafka-console-producer.sh \
4 | --bootstrap-server localhost:9092 \
5 | --topic hello-world \
6 | --property key.separator=: \
7 | --property parse.key=true
8 |
9 | # create console consumer with a group
10 | kafka-console-consumer.sh \
11 | --bootstrap-server localhost:9092 \
12 | --topic hello-world \
13 | --property print.offset=true \
14 | --property print.key=true \
15 | --group name
16 |
17 | # list all the consumer groups
18 | kafka-consumer-groups.sh --bootstrap-server localhost:9092 --list
19 |
20 | # describe a consumer group
21 | kafka-consumer-groups.sh \
22 | --bootstrap-server localhost:9092 \
23 | --group cg \
24 | --describe
25 |
26 |
--------------------------------------------------------------------------------
/01-workspace/02-kafka-101/06-reset-offset.sh:
--------------------------------------------------------------------------------
1 | # stop the consumers before you enter this command
2 |
3 | # dry-run
4 | kafka-consumer-groups.sh \
5 | --bootstrap-server localhost:9092 \
6 | --group cg \
7 | --topic hello-world \
8 | --reset-offsets \
9 | --shift-by -3 \
10 | --dry-run
11 |
12 | # reset offset by shifting the offset
13 | kafka-consumer-groups.sh \
14 | --bootstrap-server localhost:9092 \
15 | --group cg \
16 | --topic hello-world \
17 | --reset-offsets \
18 | --shift-by -3 \
19 | --execute
20 |
21 | # reset by duration
22 | kafka-consumer-groups.sh \
23 | --bootstrap-server localhost:9092 \
24 | --topic hello-world \
25 | --group cg \
26 | --reset-offsets \
27 | --by-duration PT5M \
28 | --execute
29 |
30 | # -- to the beginning
31 | kafka-consumer-groups.sh \
32 | --bootstrap-server localhost:9092 \
33 | --topic hello-world \
34 | --group cg \
35 | --reset-offsets \
36 | --to-earliest \
37 | --execute
38 |
39 | # -- to the end
40 | kafka-consumer-groups.sh \
41 | --bootstrap-server localhost:9092 \
42 | --topic hello-world \
43 | --group cg \
44 | --reset-offsets \
45 | --to-latest \
46 | --execute
47 |
48 | # -- to date-time
49 | kafka-consumer-groups.sh \
50 | --bootstrap-server localhost:9092 \
51 | --topic hello-world \
52 | --group cg \
53 | --reset-offsets \
54 | --to-datetime 2023-01-01T01:00:00.000 \
55 | --execute
--------------------------------------------------------------------------------
/01-workspace/02-kafka-101/07-transaction.sh:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | kafka-topics.sh --bootstrap-server localhost:9092 --topic transfer-requests --create
5 |
6 | kafka-topics.sh --bootstrap-server localhost:9092 --topic transaction-events --create
7 |
8 | kafka-console-producer.sh \
9 | --bootstrap-server localhost:9092 \
10 | --topic transfer-requests \
11 | --property key.separator=: \
12 | --property parse.key=true
13 |
14 | kafka-console-consumer.sh \
15 | --bootstrap-server localhost:9092 \
16 | --topic transaction-events \
17 | --property print.key=true \
18 | --isolation-level=read_committed \
19 | --from-beginning
20 |
21 | kafka-console-consumer.sh \
22 | --bootstrap-server localhost:9092 \
23 | --topic transaction-events \
24 | --property print.key=true \
25 | --from-beginning
--------------------------------------------------------------------------------
/01-workspace/03-kafka-cluster/docker-compose.yaml:
--------------------------------------------------------------------------------
1 | version: "3.8"
2 | services:
3 | kafka1:
4 | image: vinsdocker/kafka
5 | container_name: kafka1
6 | ports:
7 | - "8081:8081"
8 | environment:
9 | - KAFKA_CLUSTER_ID=OTMwNzFhYTY1ODNiNGE5OT
10 | volumes:
11 | - ./props/s1.properties:/kafka/config/kraft/server.properties
12 | - ./data/b1:/tmp/kafka-logs
13 | kafka2:
14 | image: vinsdocker/kafka
15 | container_name: kafka2
16 | ports:
17 | - "8082:8082"
18 | environment:
19 | - KAFKA_CLUSTER_ID=OTMwNzFhYTY1ODNiNGE5OT
20 | volumes:
21 | - ./props/s2.properties:/kafka/config/kraft/server.properties
22 | - ./data/b2:/tmp/kafka-logs
23 | kafka3:
24 | image: vinsdocker/kafka
25 | container_name: kafka3
26 | ports:
27 | - "8083:8083"
28 | environment:
29 | - KAFKA_CLUSTER_ID=OTMwNzFhYTY1ODNiNGE5OT
30 | volumes:
31 | - ./props/s3.properties:/kafka/config/kraft/server.properties
32 | - ./data/b3:/tmp/kafka-logs
--------------------------------------------------------------------------------
/01-workspace/03-kafka-cluster/props/s1.properties:
--------------------------------------------------------------------------------
1 | process.roles=broker,controller
2 | node.id=1
3 | listeners=INTERNAL://:9092,CONTROLLER://:9093,EXTERNAL://:8081
4 | controller.listener.names=CONTROLLER
5 | inter.broker.listener.name=INTERNAL
6 | advertised.listeners=INTERNAL://kafka1:9092,EXTERNAL://localhost:8081
7 | controller.quorum.voters=1@kafka1:9093,2@kafka2:9093,3@kafka3:9093
8 |
9 | # PLAINTEXT
10 | # SSL
11 | # SASL_PLAINTEXT
12 | # SASL_SSL
13 |
14 | listener.security.protocol.map=CONTROLLER:PLAINTEXT,INTERNAL:PLAINTEXT,EXTERNAL:PLAINTEXT,PLAINTEXT:PLAINTEXT,SSL:SSL,SASL_PLAINTEXT:SASL_PLAINTEXT,SASL_SSL:SASL_SSL
15 | auto.create.topics.enable=false
16 |
17 | # __consumer_offsets
18 | offsets.topic.replication.factor=3
19 | log.dirs=/tmp/kafka-logs
--------------------------------------------------------------------------------
/01-workspace/03-kafka-cluster/props/s2.properties:
--------------------------------------------------------------------------------
1 | process.roles=broker,controller
2 | node.id=2
3 | listeners=INTERNAL://:9092,CONTROLLER://:9093,EXTERNAL://:8082
4 | controller.listener.names=CONTROLLER
5 | inter.broker.listener.name=INTERNAL
6 | advertised.listeners=INTERNAL://kafka2:9092,EXTERNAL://localhost:8082
7 | controller.quorum.voters=1@kafka1:9093,2@kafka2:9093,3@kafka3:9093
8 |
9 | listener.security.protocol.map=CONTROLLER:PLAINTEXT,INTERNAL:PLAINTEXT,EXTERNAL:PLAINTEXT,PLAINTEXT:PLAINTEXT,SSL:SSL,SASL_PLAINTEXT:SASL_PLAINTEXT,SASL_SSL:SASL_SSL
10 | auto.create.topics.enable=false
11 |
12 | # __consumer_offsets
13 | offsets.topic.replication.factor=3
14 | log.dirs=/tmp/kafka-logs
--------------------------------------------------------------------------------
/01-workspace/03-kafka-cluster/props/s3.properties:
--------------------------------------------------------------------------------
1 | process.roles=broker,controller
2 | node.id=3
3 | listeners=INTERNAL://:9092,CONTROLLER://:9093,EXTERNAL://:8083
4 | controller.listener.names=CONTROLLER
5 | inter.broker.listener.name=INTERNAL
6 | advertised.listeners=INTERNAL://kafka3:9092,EXTERNAL://localhost:8083
7 | controller.quorum.voters=1@kafka1:9093,2@kafka2:9093,3@kafka3:9093
8 |
9 | listener.security.protocol.map=CONTROLLER:PLAINTEXT,INTERNAL:PLAINTEXT,EXTERNAL:PLAINTEXT,PLAINTEXT:PLAINTEXT,SSL:SSL,SASL_PLAINTEXT:SASL_PLAINTEXT,SASL_SSL:SASL_SSL
10 | auto.create.topics.enable=false
11 |
12 | # __consumer_offsets
13 | offsets.topic.replication.factor=3
14 | log.dirs=/tmp/kafka-logs
--------------------------------------------------------------------------------
/01-workspace/04-assignment-resources/01-product-service-resources/application.yaml:
--------------------------------------------------------------------------------
1 | spring:
2 | kafka:
3 | bootstrap-servers:
4 | - localhost:9092
5 | producer:
6 | key-serializer: org.apache.kafka.common.serialization.StringSerializer
7 | value-serializer: org.springframework.kafka.support.serializer.JsonSerializer
--------------------------------------------------------------------------------
/01-workspace/04-assignment-resources/01-product-service-resources/data.sql:
--------------------------------------------------------------------------------
1 | DROP TABLE IF EXISTS product;
2 | CREATE TABLE product AS SELECT * FROM CSVREAD('classpath:product.csv');
--------------------------------------------------------------------------------
/01-workspace/04-assignment-resources/01-product-service-resources/product.csv:
--------------------------------------------------------------------------------
1 | id,description,price
2 | 1,product-1,1
3 | 2,product-2,2
4 | 3,product-3,3
5 | 4,product-4,4
6 | 5,product-5,5
7 | 6,product-6,6
8 | 7,product-7,7
9 | 8,product-8,8
10 | 9,product-9,9
11 | 10,product-10,10
12 | 11,product-11,11
13 | 12,product-12,12
14 | 13,product-13,13
15 | 14,product-14,14
16 | 15,product-15,15
17 | 16,product-16,16
18 | 17,product-17,17
19 | 18,product-18,18
20 | 19,product-19,19
21 | 20,product-20,20
22 | 21,product-21,21
23 | 22,product-22,22
24 | 23,product-23,23
25 | 24,product-24,24
26 | 25,product-25,25
27 | 26,product-26,26
28 | 27,product-27,27
29 | 28,product-28,28
30 | 29,product-29,29
31 | 30,product-30,30
32 | 31,product-31,31
33 | 32,product-32,32
34 | 33,product-33,33
35 | 34,product-34,34
36 | 35,product-35,35
37 | 36,product-36,36
38 | 37,product-37,37
39 | 38,product-38,38
40 | 39,product-39,39
41 | 40,product-40,40
42 | 41,product-41,41
43 | 42,product-42,42
44 | 43,product-43,43
45 | 44,product-44,44
46 | 45,product-45,45
47 | 46,product-46,46
48 | 47,product-47,47
49 | 48,product-48,48
50 | 49,product-49,49
51 | 50,product-50,50
52 | 51,product-51,51
53 | 52,product-52,52
54 | 53,product-53,53
55 | 54,product-54,54
56 | 55,product-55,55
57 | 56,product-56,56
58 | 57,product-57,57
59 | 58,product-58,58
60 | 59,product-59,59
61 | 60,product-60,60
62 | 61,product-61,61
63 | 62,product-62,62
64 | 63,product-63,63
65 | 64,product-64,64
66 | 65,product-65,65
67 | 66,product-66,66
68 | 67,product-67,67
69 | 68,product-68,68
70 | 69,product-69,69
71 | 70,product-70,70
72 | 71,product-71,71
73 | 72,product-72,72
74 | 73,product-73,73
75 | 74,product-74,74
76 | 75,product-75,75
77 | 76,product-76,76
78 | 77,product-77,77
79 | 78,product-78,78
80 | 79,product-79,79
81 | 80,product-80,80
82 | 81,product-81,81
83 | 82,product-82,82
84 | 83,product-83,83
85 | 84,product-84,84
86 | 85,product-85,85
87 | 86,product-86,86
88 | 87,product-87,87
89 | 88,product-88,88
90 | 89,product-89,89
91 | 90,product-90,90
92 | 91,product-91,91
93 | 92,product-92,92
94 | 93,product-93,93
95 | 94,product-94,94
96 | 95,product-95,95
97 | 96,product-96,96
98 | 97,product-97,97
99 | 98,product-98,98
100 | 99,product-99,99
101 | 100,product-100,100
--------------------------------------------------------------------------------
/01-workspace/04-assignment-resources/01-product-service-resources/static/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Products Service
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
Product Service
17 |
You can click on these buttons to simulate product view
18 |
19 |
20 |
21 |
22 |
23 |
59 |
60 |
--------------------------------------------------------------------------------
/01-workspace/04-assignment-resources/02-analytics-service-resources/application.properties:
--------------------------------------------------------------------------------
1 | product.trending.events.topic=product-trending-events
2 | product.view.events.topic=product-view-events
3 | logging.level.root=INFO
4 | server.port=7070
5 |
--------------------------------------------------------------------------------
/01-workspace/04-assignment-resources/02-analytics-service-resources/application.yaml:
--------------------------------------------------------------------------------
1 | spring:
2 | kafka:
3 | bootstrap-servers:
4 | - localhost:9092
5 | consumer:
6 | group-id: trending-service-group
7 | auto-offset-reset: earliest
8 | key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
9 | value-deserializer: org.springframework.kafka.support.serializer.JsonDeserializer
10 | properties:
11 | "group.instance.id": "1"
12 | "[spring.json.trusted.packages]": "*"
--------------------------------------------------------------------------------
/01-workspace/04-assignment-resources/02-analytics-service-resources/data.sql:
--------------------------------------------------------------------------------
1 | DROP TABLE IF EXISTS product_view_count;
2 | CREATE TABLE product_view_count (
3 | id INT NOT NULL,
4 | count INT
5 | );
--------------------------------------------------------------------------------
/01-workspace/04-assignment-resources/02-analytics-service-resources/static/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Analytics Service
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
Product Analytics
17 |
This chart shows the top 5 products based on the view count.
18 |
If you can see the chart, then great job. You did the assignment.
19 |
20 |
21 |
22 |
23 |
68 |
69 |
70 |
--------------------------------------------------------------------------------
/01-workspace/05-kafka-security-sasl-plain/01-kafka-topics.sh:
--------------------------------------------------------------------------------
1 | kafka-topics.sh \
2 | --bootstrap-server localhost:9092 \
3 | --topic order-events --create
4 |
5 | kafka-topics.sh \
6 | --bootstrap-server localhost:9092 \
7 | --command-config consumer.properties \
8 | --topic order-events --create
--------------------------------------------------------------------------------
/01-workspace/05-kafka-security-sasl-plain/docker-compose.yaml:
--------------------------------------------------------------------------------
1 | version: "3.8"
2 | services:
3 | kafka:
4 | image: vinsdocker/kafka
5 | container_name: kafka
6 | ports:
7 | - "9092:9092"
8 | environment:
9 | KAFKA_CLUSTER_ID: OTMwNzFhYTY1ODNiNGE5OT
10 | KAFKA_OPTS: "-Djava.security.auth.login.config=/kafka/config/jaas.conf"
11 | volumes:
12 | - ./props/security.properties:/kafka/config/kraft/server.properties
13 | - ./props/jaas.conf:/kafka/config/jaas.conf
14 | - ./data:/tmp/kafka-logs
15 | - ./props/consumer.properties:/learning/consumer.properties
--------------------------------------------------------------------------------
/01-workspace/05-kafka-security-sasl-plain/props/consumer.properties:
--------------------------------------------------------------------------------
1 | sasl.jaas.config=org.apache.kafka.common.security.plain.PlainLoginModule required serviceName="Kafka" username="client" password="client-secret";
2 | security.protocol=SASL_PLAINTEXT
3 | sasl.mechanism=PLAIN
4 |
--------------------------------------------------------------------------------
/01-workspace/05-kafka-security-sasl-plain/props/jaas.conf:
--------------------------------------------------------------------------------
1 | KafkaServer {
2 | org.apache.kafka.common.security.plain.PlainLoginModule required
3 | username="admin"
4 | password="secret"
5 | user_admin="secret"
6 | user_client="client-secret";
7 | };
--------------------------------------------------------------------------------
/01-workspace/05-kafka-security-sasl-plain/props/security.properties:
--------------------------------------------------------------------------------
1 | process.roles=broker,controller
2 | node.id=1
3 | listeners=SASL_PLAINTEXT://:9092,CONTROLLER://:9093
4 | controller.listener.names=CONTROLLER
5 | advertised.listeners=SASL_PLAINTEXT://localhost:9092
6 | inter.broker.listener.name=SASL_PLAINTEXT
7 | controller.quorum.voters=1@kafka:9093
8 | listener.security.protocol.map=CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT,SSL:SSL,SASL_PLAINTEXT:SASL_PLAINTEXT,SASL_SSL:SASL_SSL
9 | auto.create.topics.enable=false
10 | offsets.topic.replication.factor=1
11 | transaction.state.log.replication.factor=1
12 | transaction.state.log.min.isr=1
13 |
14 | # This PLAIN is different from PLAINTEXT above
15 | sasl.enabled.mechanisms=PLAIN
16 | sasl.mechanism.controller.protocol=PLAIN
17 | sasl.mechanism.inter.broker.protocol=PLAIN
--------------------------------------------------------------------------------
/01-workspace/06-kafka-security-sasl-ssl/01-kafka-topics.sh:
--------------------------------------------------------------------------------
1 | kafka-topics.sh \
2 | --bootstrap-server localhost:9092 \
3 | --command-config consumer.properties \
4 | --topic order-events --create
5 |
6 |
7 |
8 |
9 |
10 |
--------------------------------------------------------------------------------
/01-workspace/06-kafka-security-sasl-ssl/certs/kafka-signed.crt:
--------------------------------------------------------------------------------
1 | -----BEGIN CERTIFICATE-----
2 | MIICpDCCAYwCCQDaofz5dAOYFzANBgkqhkiG9w0BAQsFADAUMRIwEAYDVQQDDAls
3 | b2NhbGhvc3QwHhcNMjMwMzI2MjAzMjA4WhcNMzMwMzIzMjAzMjA4WjAUMRIwEAYD
4 | VQQDEwlsb2NhbGhvc3QwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCn
5 | RAfOB6pQALi8RUcbdzmZrHHusmiedIp3NbQ09QHeld7DHPg6DR63pyUHkURMFGEx
6 | x9OFezjIBiSDQHrTUM4JxNc1OcWzwCrSsNG4T+ASYzJYFMAFX5OPIYCJDFFuKG/F
7 | ds3dbB9TaWjTLHK+VV75EJrmn5w6cJ93OgyonJk74XCuygcbjaPTQaSs+pPF2PUP
8 | AZDP3DfTMC9uZREhWAMGC0ptb0BrcR1xFajBCzno80sw+Zmhbl8KumTTP070YDsb
9 | FguXvtWO3azrAzIT4mcqWDJpl4zB2k5duOy2IZeCHXqMRS+7Ay3LEvHrIAKQefkw
10 | 0aHyve0/mJpxIOF3EHpzAgMBAAEwDQYJKoZIhvcNAQELBQADggEBAFU0gG3SGX5y
11 | /uRu1IGypi2tVRlZ685mOkkUWWbfXbbs/6Igm8RpLBNw/MQ2nnxrx07e+ASd+3Nu
12 | /2xnCZgO9i/JvZnQXiDxXF3z/yzIW/P7PzIMPe0/+VM3ULLmYDEZ8S131clLtHWe
13 | bJXmNDZeLpxi8a/nxyXXnIGCE4+lYKkk22DunM6mAUWKeGljB4LHSzDZTExHsVhs
14 | PYiNp3kdQ1L8++S9SLbsIvmiMnJxoxkApewQpDAE0cqoPh0NGKtl6HbU2WoZP4Sn
15 | 3izBE+sej6aNMgMZ5wSTR6LuATaHAQ+lKXaGSjvddFtZ145u0Dombzt1ACGxlnG0
16 | jxU8K6J3awQ=
17 | -----END CERTIFICATE-----
18 |
--------------------------------------------------------------------------------
/01-workspace/06-kafka-security-sasl-ssl/certs/kafka-signing-request.crt:
--------------------------------------------------------------------------------
1 | -----BEGIN NEW CERTIFICATE REQUEST-----
2 | MIICiTCCAXECAQAwFDESMBAGA1UEAxMJbG9jYWxob3N0MIIBIjANBgkqhkiG9w0B
3 | AQEFAAOCAQ8AMIIBCgKCAQEAp0QHzgeqUAC4vEVHG3c5maxx7rJonnSKdzW0NPUB
4 | 3pXewxz4Og0et6clB5FETBRhMcfThXs4yAYkg0B601DOCcTXNTnFs8Aq0rDRuE/g
5 | EmMyWBTABV+TjyGAiQxRbihvxXbN3WwfU2lo0yxyvlVe+RCa5p+cOnCfdzoMqJyZ
6 | O+FwrsoHG42j00GkrPqTxdj1DwGQz9w30zAvbmURIVgDBgtKbW9Aa3EdcRWowQs5
7 | 6PNLMPmZoW5fCrpk0z9O9GA7GxYLl77Vjt2s6wMyE+JnKlgyaZeMwdpOXbjstiGX
8 | gh16jEUvuwMtyxLx6yACkHn5MNGh8r3tP5iacSDhdxB6cwIDAQABoDAwLgYJKoZI
9 | hvcNAQkOMSEwHzAdBgNVHQ4EFgQUI7w/91OfgoRI22ttzryRpHdqn/MwDQYJKoZI
10 | hvcNAQELBQADggEBAIEBcyMtjoF8lGRUTcfA3OI+mf+FnOuaQj8wqUUh5TxPuuJu
11 | +OMEWywd7a6dKtwxYw1CL6rOTKkM5JeoGDl2s4EivrGwEj3kPkP+gDEkZ7LcL9IX
12 | fBfrqeD89mkGspsqB0DtcgBemDuwQV/jcyBq36fEbOVbt/io1kPv+6Ff7+bi9YyE
13 | 3ZRJO/0wyip2rGCZIDoKx8FQMFk15pBxMd6uKUSO1ByVPiiusswNPriJo5M+ouw8
14 | le3JJlHOAkFChb4q7boP5aZWr5Eb6UrNP63oz9hxvY8xvfEp6woOEiP+o2RjjQBx
15 | OG1isI6LuXbjlAlcuHwApDjDl00Gy/eeLax53Mk=
16 | -----END NEW CERTIFICATE REQUEST-----
17 |
--------------------------------------------------------------------------------
/01-workspace/06-kafka-security-sasl-ssl/certs/kafka.keystore.jks:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/vinsguru/reactive-kafka-course/6e03bbbe52cf58ea3c023f2c20cc0ba41b25a3ba/01-workspace/06-kafka-security-sasl-ssl/certs/kafka.keystore.jks
--------------------------------------------------------------------------------
/01-workspace/06-kafka-security-sasl-ssl/certs/kafka.truststore.jks:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/vinsguru/reactive-kafka-course/6e03bbbe52cf58ea3c023f2c20cc0ba41b25a3ba/01-workspace/06-kafka-security-sasl-ssl/certs/kafka.truststore.jks
--------------------------------------------------------------------------------
/01-workspace/06-kafka-security-sasl-ssl/certs/root.crt:
--------------------------------------------------------------------------------
1 | -----BEGIN CERTIFICATE-----
2 | MIICpDCCAYwCCQCR3w8B2Drn7jANBgkqhkiG9w0BAQsFADAUMRIwEAYDVQQDDAls
3 | b2NhbGhvc3QwHhcNMjMwMzI2MjAzMjA4WhcNMjMwNDI1MjAzMjA4WjAUMRIwEAYD
4 | VQQDDAlsb2NhbGhvc3QwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQD5
5 | NaH+xN21C8hJmhHF3d1sD0JbvvhQuROBq7aFyE03YCoMCReB1PP6TPTJ836toaKP
6 | HWPgPIGjyWHa6J0ZB/W1YVhsPOtquqyBbkOmcgG8OrR8skSrUaAbISJRRQnU34xZ
7 | MjUUBVN7T2HYi23jG5htn1Q0TU5jcvFGayEh75nyrRL4qC+68rXh7tHOzIfyLV00
8 | 0aaKHLzG53oEUMU/cqvDYKp+eraszqfkjSqiAqSSunl3Eqg6niuGfssW9nix7+M3
9 | hSupO0KMwYWly++0q3C8qzMPXMOlr0giAtm1xwMfaNnWfsPznbqBndpna7/yZxjj
10 | 7h5KMk9eJv9tfxE9lfSZAgMBAAEwDQYJKoZIhvcNAQELBQADggEBALFVnLXr80hf
11 | wrAaA3EWUHU6CFBBJyIqAL90pOzrtiT1go/3lt7jWZAYjX62gXK8P8kbbz2legY1
12 | 1GXgmXdcc6ERDxhvIdhGo761djsNay/lOsUuU5A7Qa7mk3d5fgBhpsW/tCzAv5jR
13 | wB1iVQcvniY4lJ9ANJSDMzHSdSPHUfImvatrQg7jxdKaBwtv6NMq4pmWv7d97BeG
14 | YLDzT5D6PXZHfTlJGdeQfMPqyFthFGffpDTJLzVrjDTJdoLcNFZHeXtZm3YIDXlH
15 | J1jFdhqFTuySLDniZp+B80jeOxW5Ecy4jln0Ydydncr40Az0C8EcO3WDA4nFqU/1
16 | WRjvqDCLFbs=
17 | -----END CERTIFICATE-----
18 |
--------------------------------------------------------------------------------
/01-workspace/06-kafka-security-sasl-ssl/certs/root.key:
--------------------------------------------------------------------------------
1 | -----BEGIN RSA PRIVATE KEY-----
2 | MIIEpAIBAAKCAQEA+TWh/sTdtQvISZoRxd3dbA9CW774ULkTgau2hchNN2AqDAkX
3 | gdTz+kz0yfN+raGijx1j4DyBo8lh2uidGQf1tWFYbDzrarqsgW5DpnIBvDq0fLJE
4 | q1GgGyEiUUUJ1N+MWTI1FAVTe09h2Itt4xuYbZ9UNE1OY3LxRmshIe+Z8q0S+Kgv
5 | uvK14e7RzsyH8i1dNNGmihy8xud6BFDFP3Krw2Cqfnq2rM6n5I0qogKkkrp5dxKo
6 | Op4rhn7LFvZ4se/jN4UrqTtCjMGFpcvvtKtwvKszD1zDpa9IIgLZtccDH2jZ1n7D
7 | 8526gZ3aZ2u/8mcY4+4eSjJPXib/bX8RPZX0mQIDAQABAoIBAQDMnd6+woGPT3dK
8 | B3ikUACn4veHQQu9q9h6VbTtr4LG/uxHKYflkCYq09kBoC3Y/qfsfiMtUEDZbFYu
9 | WWPz03dJT/48EODypwdQ1moxVdZa6JCc+for+D9PRytYMylZYZto+TJfl+ftjUY3
10 | eHPnTxcemv2WXAsK2KuGOBviM0rh1t6/SgzKOEEAVX8HP78p15MgR1JbgoZXM6Lu
11 | 3goga9PwYSSbqhe1CiNDcHYm+jY3yF6OqDph4W6/7oeKkyCYkJWRQT5yUn30WJEv
12 | dFr3NySyiOMcOARcTWz1cQaRI9ppvJQBKevGE9jMh8VWyYX4nxtEK0pz9418YfsT
13 | fe1lmw+JAoGBAP1QnU0UTTvSY5KBcxGqxkLS5gNNNN5JznoboCciHehLyKGb9P0h
14 | p982IBTEyVt0lbh5EdnOGB9RLGMiDDxJLEEVlwXqyVa1AnM1eocvdoYEjGsWa/nH
15 | hCaD+S3mdRMizdvOa9no/RbUVi7HWIj6nq33YOwi2/HXJTg7GfgMcIfjAoGBAPvZ
16 | 4MpxkSWkJW5DspYxrTRlqF+VJK4PDqf/cfAX4RalXX51FmSqK8cfdxP7nOHtk5BA
17 | btA4w1vtelVXFlAuyx74NntJFUD1qK+AiJ8g9OGw6O2bvHaIyEFfyA5RQ42Oci8S
18 | p6XKbB9D0D0/xSGgClGeIGiN4Uu+hQ7/zwFUp2JTAoGAH0MLpq+SuIdGcpY3NT72
19 | KNkOOeoVVDmXmco/AZVcS4bOw+MTYQn+RbXQT7/ZecfxkkqewVqF75d8+t8A04/U
20 | PlQDx18qaeD85Gr9LCHXZsK48Z5axQCydm5jGh8O48Wv0iivdr0iPucFm9BORNlp
21 | K98TvxCuRDJhDYd9tPcUDDECgYEAgZMJWl6vpMQdFevyl9+bE1gyFmsvYfYRPlHo
22 | lvRSNEqEHam0Cbqbp2kC5N8RShPnVdIwsSRjLoqx4/JYYoyQqLBfp3JkYvWBAjqJ
23 | EAXut6dDT+xOPFYHYcsbrHIW5V+iU6Gva1cH5dYH71s92xzx8if+2IwWm9e7sYnf
24 | QPoC3E0CgYBa+fHpt4st40Y48EAKkZmuGfZtmjAnx8auN/L8UCt1xQFpSCQ5adi/
25 | 8xSUf0BB1no78PMqTw/iB+SEewvgt2rMESBHAKzl48W6kz+oESQBlmFO7gyG1/5a
26 | cmoiItavkDOKIEbUTGS2gkhFiyXRoXH4Ex81ImVgzalFAcOgsgD4rA==
27 | -----END RSA PRIVATE KEY-----
28 |
--------------------------------------------------------------------------------
/01-workspace/06-kafka-security-sasl-ssl/docker-compose.yaml:
--------------------------------------------------------------------------------
1 | version: "3.8"
2 | services:
3 | kafka:
4 | image: vinsdocker/kafka
5 | container_name: kafka
6 | ports:
7 | - "9092:9092"
8 | environment:
9 | KAFKA_CLUSTER_ID: OTMwNzFhYTY1ODNiNGE5OT
10 | KAFKA_OPTS: "-Djava.security.auth.login.config=/kafka/config/jaas.conf"
11 | volumes:
12 | - ./props/security.properties:/kafka/config/kraft/server.properties
13 | - ./props/jaas.conf:/kafka/config/jaas.conf
14 | - ./data:/tmp/kafka-logs
15 | - ./props/consumer.properties:/learning/consumer.properties
16 | - ./certs:/certs
--------------------------------------------------------------------------------
/01-workspace/06-kafka-security-sasl-ssl/generate-certs/create-certificates.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # CA create private key and root CA certificate
4 | openssl genrsa -out root.key
5 | openssl req -new -x509 -key root.key -out root.crt -subj "/CN=localhost" -nodes
6 |
7 | # keystore
8 | keytool -keystore kafka.keystore.jks -storepass changeit -alias localhost -validity 3650 -genkey -keyalg RSA -dname "CN=localhost"
9 |
10 | # create CSR (certificate signing request)
11 | keytool -keystore kafka.keystore.jks -storepass changeit -alias localhost -certreq -file kafka-signing-request.crt
12 |
13 | # CA signs the cerificate
14 | openssl x509 -req -CA root.crt -CAkey root.key -in kafka-signing-request.crt -out kafka-signed.crt -days 3650 -CAcreateserial
15 |
16 | # We can import root CA cert & our signed certificate
17 | # This should be private and owned by the server
18 | keytool -keystore kafka.keystore.jks -storepass changeit -alias CARoot -import -file root.crt -noprompt
19 | keytool -keystore kafka.keystore.jks -storepass changeit -alias localhost -import -file kafka-signed.crt -noprompt
20 |
21 | # This is for clients
22 | keytool -keystore kafka.truststore.jks -storepass changeit -noprompt -alias CARoot -import -file root.crt
23 |
24 | # move all these files to certs directory
25 | mkdir -p ../certs
26 | mv *.crt ../certs
27 | mv *.jks ../certs
28 | mv *.key ../certs
--------------------------------------------------------------------------------
/01-workspace/06-kafka-security-sasl-ssl/props/consumer.properties:
--------------------------------------------------------------------------------
1 | sasl.jaas.config=org.apache.kafka.common.security.plain.PlainLoginModule required serviceName="Kafka" username="client" password="client-secret";
2 | security.protocol=SASL_SSL
3 | sasl.mechanism=PLAIN
4 | ssl.truststore.location=/certs/kafka.truststore.jks
5 | ssl.truststore.password=changeit
--------------------------------------------------------------------------------
/01-workspace/06-kafka-security-sasl-ssl/props/jaas.conf:
--------------------------------------------------------------------------------
1 | KafkaServer {
2 | org.apache.kafka.common.security.plain.PlainLoginModule required
3 | username="admin"
4 | password="secret"
5 | user_admin="secret"
6 | user_client="client-secret";
7 | };
--------------------------------------------------------------------------------
/01-workspace/06-kafka-security-sasl-ssl/props/security.properties:
--------------------------------------------------------------------------------
1 | process.roles=broker,controller
2 | node.id=1
3 | listeners=SASL_SSL://:9092,CONTROLLER://:9093
4 | controller.listener.names=CONTROLLER
5 | advertised.listeners=SASL_SSL://localhost:9092
6 | inter.broker.listener.name=SASL_SSL
7 | controller.quorum.voters=1@kafka:9093
8 | listener.security.protocol.map=CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT,SSL:SSL,SASL_PLAINTEXT:SASL_PLAINTEXT,SASL_SSL:SASL_SSL
9 | auto.create.topics.enable=false
10 | offsets.topic.replication.factor=1
11 | transaction.state.log.replication.factor=1
12 | transaction.state.log.min.isr=1
13 |
14 | sasl.enabled.mechanisms=PLAIN
15 | sasl.mechanism.controller.protocol=PLAIN
16 | sasl.mechanism.inter.broker.protocol=PLAIN
17 |
18 | ssl.keystore.location=/certs/kafka.keystore.jks
19 | ssl.keystore.password=changeit
20 | ssl.truststore.location=/certs/kafka.truststore.jks
21 | ssl.truststore.password=changeit
--------------------------------------------------------------------------------
/02-reactive-kafka-playground/.gitignore:
--------------------------------------------------------------------------------
1 | HELP.md
2 | target/
3 | !.mvn/wrapper/maven-wrapper.jar
4 | !**/src/main/**/target/
5 | !**/src/test/**/target/
6 |
7 | ### STS ###
8 | .apt_generated
9 | .classpath
10 | .factorypath
11 | .project
12 | .settings
13 | .springBeans
14 | .sts4-cache
15 |
16 | ### IntelliJ IDEA ###
17 | .idea
18 | *.iws
19 | *.iml
20 | *.ipr
21 |
22 | ### NetBeans ###
23 | /nbproject/private/
24 | /nbbuild/
25 | /dist/
26 | /nbdist/
27 | /.nb-gradle/
28 | build/
29 | !**/src/main/**/build/
30 | !**/src/test/**/build/
31 |
32 | ### VS Code ###
33 | .vscode/
34 |
--------------------------------------------------------------------------------
/02-reactive-kafka-playground/.mvn/wrapper/maven-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/vinsguru/reactive-kafka-course/6e03bbbe52cf58ea3c023f2c20cc0ba41b25a3ba/02-reactive-kafka-playground/.mvn/wrapper/maven-wrapper.jar
--------------------------------------------------------------------------------
/02-reactive-kafka-playground/.mvn/wrapper/maven-wrapper.properties:
--------------------------------------------------------------------------------
1 | # Licensed to the Apache Software Foundation (ASF) under one
2 | # or more contributor license agreements. See the NOTICE file
3 | # distributed with this work for additional information
4 | # regarding copyright ownership. The ASF licenses this file
5 | # to you under the Apache License, Version 2.0 (the
6 | # "License"); you may not use this file except in compliance
7 | # with the License. You may obtain a copy of the License at
8 | #
9 | # https://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # Unless required by applicable law or agreed to in writing,
12 | # software distributed under the License is distributed on an
13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14 | # KIND, either express or implied. See the License for the
15 | # specific language governing permissions and limitations
16 | # under the License.
17 | distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.8.7/apache-maven-3.8.7-bin.zip
18 | wrapperUrl=https://repo.maven.apache.org/maven2/org/apache/maven/wrapper/maven-wrapper/3.1.1/maven-wrapper-3.1.1.jar
19 |
--------------------------------------------------------------------------------
/02-reactive-kafka-playground/mvnw:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | # ----------------------------------------------------------------------------
3 | # Licensed to the Apache Software Foundation (ASF) under one
4 | # or more contributor license agreements. See the NOTICE file
5 | # distributed with this work for additional information
6 | # regarding copyright ownership. The ASF licenses this file
7 | # to you under the Apache License, Version 2.0 (the
8 | # "License"); you may not use this file except in compliance
9 | # with the License. You may obtain a copy of the License at
10 | #
11 | # https://www.apache.org/licenses/LICENSE-2.0
12 | #
13 | # Unless required by applicable law or agreed to in writing,
14 | # software distributed under the License is distributed on an
15 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
16 | # KIND, either express or implied. See the License for the
17 | # specific language governing permissions and limitations
18 | # under the License.
19 | # ----------------------------------------------------------------------------
20 |
21 | # ----------------------------------------------------------------------------
22 | # Maven Start Up Batch script
23 | #
24 | # Required ENV vars:
25 | # ------------------
26 | # JAVA_HOME - location of a JDK home dir
27 | #
28 | # Optional ENV vars
29 | # -----------------
30 | # M2_HOME - location of maven2's installed home dir
31 | # MAVEN_OPTS - parameters passed to the Java VM when running Maven
32 | # e.g. to debug Maven itself, use
33 | # set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
34 | # MAVEN_SKIP_RC - flag to disable loading of mavenrc files
35 | # ----------------------------------------------------------------------------
36 |
37 | if [ -z "$MAVEN_SKIP_RC" ] ; then
38 |
39 | if [ -f /usr/local/etc/mavenrc ] ; then
40 | . /usr/local/etc/mavenrc
41 | fi
42 |
43 | if [ -f /etc/mavenrc ] ; then
44 | . /etc/mavenrc
45 | fi
46 |
47 | if [ -f "$HOME/.mavenrc" ] ; then
48 | . "$HOME/.mavenrc"
49 | fi
50 |
51 | fi
52 |
53 | # OS specific support. $var _must_ be set to either true or false.
54 | cygwin=false;
55 | darwin=false;
56 | mingw=false
57 | case "`uname`" in
58 | CYGWIN*) cygwin=true ;;
59 | MINGW*) mingw=true;;
60 | Darwin*) darwin=true
61 | # Use /usr/libexec/java_home if available, otherwise fall back to /Library/Java/Home
62 | # See https://developer.apple.com/library/mac/qa/qa1170/_index.html
63 | if [ -z "$JAVA_HOME" ]; then
64 | if [ -x "/usr/libexec/java_home" ]; then
65 | export JAVA_HOME="`/usr/libexec/java_home`"
66 | else
67 | export JAVA_HOME="/Library/Java/Home"
68 | fi
69 | fi
70 | ;;
71 | esac
72 |
73 | if [ -z "$JAVA_HOME" ] ; then
74 | if [ -r /etc/gentoo-release ] ; then
75 | JAVA_HOME=`java-config --jre-home`
76 | fi
77 | fi
78 |
79 | if [ -z "$M2_HOME" ] ; then
80 | ## resolve links - $0 may be a link to maven's home
81 | PRG="$0"
82 |
83 | # need this for relative symlinks
84 | while [ -h "$PRG" ] ; do
85 | ls=`ls -ld "$PRG"`
86 | link=`expr "$ls" : '.*-> \(.*\)$'`
87 | if expr "$link" : '/.*' > /dev/null; then
88 | PRG="$link"
89 | else
90 | PRG="`dirname "$PRG"`/$link"
91 | fi
92 | done
93 |
94 | saveddir=`pwd`
95 |
96 | M2_HOME=`dirname "$PRG"`/..
97 |
98 | # make it fully qualified
99 | M2_HOME=`cd "$M2_HOME" && pwd`
100 |
101 | cd "$saveddir"
102 | # echo Using m2 at $M2_HOME
103 | fi
104 |
105 | # For Cygwin, ensure paths are in UNIX format before anything is touched
106 | if $cygwin ; then
107 | [ -n "$M2_HOME" ] &&
108 | M2_HOME=`cygpath --unix "$M2_HOME"`
109 | [ -n "$JAVA_HOME" ] &&
110 | JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
111 | [ -n "$CLASSPATH" ] &&
112 | CLASSPATH=`cygpath --path --unix "$CLASSPATH"`
113 | fi
114 |
115 | # For Mingw, ensure paths are in UNIX format before anything is touched
116 | if $mingw ; then
117 | [ -n "$M2_HOME" ] &&
118 | M2_HOME="`(cd "$M2_HOME"; pwd)`"
119 | [ -n "$JAVA_HOME" ] &&
120 | JAVA_HOME="`(cd "$JAVA_HOME"; pwd)`"
121 | fi
122 |
123 | if [ -z "$JAVA_HOME" ]; then
124 | javaExecutable="`which javac`"
125 | if [ -n "$javaExecutable" ] && ! [ "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then
126 | # readlink(1) is not available as standard on Solaris 10.
127 | readLink=`which readlink`
128 | if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then
129 | if $darwin ; then
130 | javaHome="`dirname \"$javaExecutable\"`"
131 | javaExecutable="`cd \"$javaHome\" && pwd -P`/javac"
132 | else
133 | javaExecutable="`readlink -f \"$javaExecutable\"`"
134 | fi
135 | javaHome="`dirname \"$javaExecutable\"`"
136 | javaHome=`expr "$javaHome" : '\(.*\)/bin'`
137 | JAVA_HOME="$javaHome"
138 | export JAVA_HOME
139 | fi
140 | fi
141 | fi
142 |
143 | if [ -z "$JAVACMD" ] ; then
144 | if [ -n "$JAVA_HOME" ] ; then
145 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
146 | # IBM's JDK on AIX uses strange locations for the executables
147 | JAVACMD="$JAVA_HOME/jre/sh/java"
148 | else
149 | JAVACMD="$JAVA_HOME/bin/java"
150 | fi
151 | else
152 | JAVACMD="`\\unset -f command; \\command -v java`"
153 | fi
154 | fi
155 |
156 | if [ ! -x "$JAVACMD" ] ; then
157 | echo "Error: JAVA_HOME is not defined correctly." >&2
158 | echo " We cannot execute $JAVACMD" >&2
159 | exit 1
160 | fi
161 |
162 | if [ -z "$JAVA_HOME" ] ; then
163 | echo "Warning: JAVA_HOME environment variable is not set."
164 | fi
165 |
166 | CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher
167 |
168 | # traverses directory structure from process work directory to filesystem root
169 | # first directory with .mvn subdirectory is considered project base directory
170 | find_maven_basedir() {
171 |
172 | if [ -z "$1" ]
173 | then
174 | echo "Path not specified to find_maven_basedir"
175 | return 1
176 | fi
177 |
178 | basedir="$1"
179 | wdir="$1"
180 | while [ "$wdir" != '/' ] ; do
181 | if [ -d "$wdir"/.mvn ] ; then
182 | basedir=$wdir
183 | break
184 | fi
185 | # workaround for JBEAP-8937 (on Solaris 10/Sparc)
186 | if [ -d "${wdir}" ]; then
187 | wdir=`cd "$wdir/.."; pwd`
188 | fi
189 | # end of workaround
190 | done
191 | echo "${basedir}"
192 | }
193 |
194 | # concatenates all lines of a file
195 | concat_lines() {
196 | if [ -f "$1" ]; then
197 | echo "$(tr -s '\n' ' ' < "$1")"
198 | fi
199 | }
200 |
201 | BASE_DIR=`find_maven_basedir "$(pwd)"`
202 | if [ -z "$BASE_DIR" ]; then
203 | exit 1;
204 | fi
205 |
206 | ##########################################################################################
207 | # Extension to allow automatically downloading the maven-wrapper.jar from Maven-central
208 | # This allows using the maven wrapper in projects that prohibit checking in binary data.
209 | ##########################################################################################
210 | if [ -r "$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" ]; then
211 | if [ "$MVNW_VERBOSE" = true ]; then
212 | echo "Found .mvn/wrapper/maven-wrapper.jar"
213 | fi
214 | else
215 | if [ "$MVNW_VERBOSE" = true ]; then
216 | echo "Couldn't find .mvn/wrapper/maven-wrapper.jar, downloading it ..."
217 | fi
218 | if [ -n "$MVNW_REPOURL" ]; then
219 | jarUrl="$MVNW_REPOURL/org/apache/maven/wrapper/maven-wrapper/3.1.0/maven-wrapper-3.1.0.jar"
220 | else
221 | jarUrl="https://repo.maven.apache.org/maven2/org/apache/maven/wrapper/maven-wrapper/3.1.0/maven-wrapper-3.1.0.jar"
222 | fi
223 | while IFS="=" read key value; do
224 | case "$key" in (wrapperUrl) jarUrl="$value"; break ;;
225 | esac
226 | done < "$BASE_DIR/.mvn/wrapper/maven-wrapper.properties"
227 | if [ "$MVNW_VERBOSE" = true ]; then
228 | echo "Downloading from: $jarUrl"
229 | fi
230 | wrapperJarPath="$BASE_DIR/.mvn/wrapper/maven-wrapper.jar"
231 | if $cygwin; then
232 | wrapperJarPath=`cygpath --path --windows "$wrapperJarPath"`
233 | fi
234 |
235 | if command -v wget > /dev/null; then
236 | if [ "$MVNW_VERBOSE" = true ]; then
237 | echo "Found wget ... using wget"
238 | fi
239 | if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then
240 | wget "$jarUrl" -O "$wrapperJarPath" || rm -f "$wrapperJarPath"
241 | else
242 | wget --http-user=$MVNW_USERNAME --http-password=$MVNW_PASSWORD "$jarUrl" -O "$wrapperJarPath" || rm -f "$wrapperJarPath"
243 | fi
244 | elif command -v curl > /dev/null; then
245 | if [ "$MVNW_VERBOSE" = true ]; then
246 | echo "Found curl ... using curl"
247 | fi
248 | if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then
249 | curl -o "$wrapperJarPath" "$jarUrl" -f
250 | else
251 | curl --user $MVNW_USERNAME:$MVNW_PASSWORD -o "$wrapperJarPath" "$jarUrl" -f
252 | fi
253 |
254 | else
255 | if [ "$MVNW_VERBOSE" = true ]; then
256 | echo "Falling back to using Java to download"
257 | fi
258 | javaClass="$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.java"
259 | # For Cygwin, switch paths to Windows format before running javac
260 | if $cygwin; then
261 | javaClass=`cygpath --path --windows "$javaClass"`
262 | fi
263 | if [ -e "$javaClass" ]; then
264 | if [ ! -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then
265 | if [ "$MVNW_VERBOSE" = true ]; then
266 | echo " - Compiling MavenWrapperDownloader.java ..."
267 | fi
268 | # Compiling the Java class
269 | ("$JAVA_HOME/bin/javac" "$javaClass")
270 | fi
271 | if [ -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then
272 | # Running the downloader
273 | if [ "$MVNW_VERBOSE" = true ]; then
274 | echo " - Running MavenWrapperDownloader.java ..."
275 | fi
276 | ("$JAVA_HOME/bin/java" -cp .mvn/wrapper MavenWrapperDownloader "$MAVEN_PROJECTBASEDIR")
277 | fi
278 | fi
279 | fi
280 | fi
281 | ##########################################################################################
282 | # End of extension
283 | ##########################################################################################
284 |
285 | export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-"$BASE_DIR"}
286 | if [ "$MVNW_VERBOSE" = true ]; then
287 | echo $MAVEN_PROJECTBASEDIR
288 | fi
289 | MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS"
290 |
291 | # For Cygwin, switch paths to Windows format before running java
292 | if $cygwin; then
293 | [ -n "$M2_HOME" ] &&
294 | M2_HOME=`cygpath --path --windows "$M2_HOME"`
295 | [ -n "$JAVA_HOME" ] &&
296 | JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"`
297 | [ -n "$CLASSPATH" ] &&
298 | CLASSPATH=`cygpath --path --windows "$CLASSPATH"`
299 | [ -n "$MAVEN_PROJECTBASEDIR" ] &&
300 | MAVEN_PROJECTBASEDIR=`cygpath --path --windows "$MAVEN_PROJECTBASEDIR"`
301 | fi
302 |
303 | # Provide a "standardized" way to retrieve the CLI args that will
304 | # work with both Windows and non-Windows executions.
305 | MAVEN_CMD_LINE_ARGS="$MAVEN_CONFIG $@"
306 | export MAVEN_CMD_LINE_ARGS
307 |
308 | WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
309 |
310 | exec "$JAVACMD" \
311 | $MAVEN_OPTS \
312 | $MAVEN_DEBUG_OPTS \
313 | -classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \
314 | "-Dmaven.home=${M2_HOME}" \
315 | "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \
316 | ${WRAPPER_LAUNCHER} $MAVEN_CONFIG "$@"
317 |
--------------------------------------------------------------------------------
/02-reactive-kafka-playground/mvnw.cmd:
--------------------------------------------------------------------------------
1 | @REM ----------------------------------------------------------------------------
2 | @REM Licensed to the Apache Software Foundation (ASF) under one
3 | @REM or more contributor license agreements. See the NOTICE file
4 | @REM distributed with this work for additional information
5 | @REM regarding copyright ownership. The ASF licenses this file
6 | @REM to you under the Apache License, Version 2.0 (the
7 | @REM "License"); you may not use this file except in compliance
8 | @REM with the License. You may obtain a copy of the License at
9 | @REM
10 | @REM https://www.apache.org/licenses/LICENSE-2.0
11 | @REM
12 | @REM Unless required by applicable law or agreed to in writing,
13 | @REM software distributed under the License is distributed on an
14 | @REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15 | @REM KIND, either express or implied. See the License for the
16 | @REM specific language governing permissions and limitations
17 | @REM under the License.
18 | @REM ----------------------------------------------------------------------------
19 |
20 | @REM ----------------------------------------------------------------------------
21 | @REM Maven Start Up Batch script
22 | @REM
23 | @REM Required ENV vars:
24 | @REM JAVA_HOME - location of a JDK home dir
25 | @REM
26 | @REM Optional ENV vars
27 | @REM M2_HOME - location of maven2's installed home dir
28 | @REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands
29 | @REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a keystroke before ending
30 | @REM MAVEN_OPTS - parameters passed to the Java VM when running Maven
31 | @REM e.g. to debug Maven itself, use
32 | @REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
33 | @REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files
34 | @REM ----------------------------------------------------------------------------
35 |
36 | @REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on'
37 | @echo off
38 | @REM set title of command window
39 | title %0
40 | @REM enable echoing by setting MAVEN_BATCH_ECHO to 'on'
41 | @if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO%
42 |
43 | @REM set %HOME% to equivalent of $HOME
44 | if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%")
45 |
46 | @REM Execute a user defined script before this one
47 | if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre
48 | @REM check for pre script, once with legacy .bat ending and once with .cmd ending
49 | if exist "%USERPROFILE%\mavenrc_pre.bat" call "%USERPROFILE%\mavenrc_pre.bat" %*
50 | if exist "%USERPROFILE%\mavenrc_pre.cmd" call "%USERPROFILE%\mavenrc_pre.cmd" %*
51 | :skipRcPre
52 |
53 | @setlocal
54 |
55 | set ERROR_CODE=0
56 |
57 | @REM To isolate internal variables from possible post scripts, we use another setlocal
58 | @setlocal
59 |
60 | @REM ==== START VALIDATION ====
61 | if not "%JAVA_HOME%" == "" goto OkJHome
62 |
63 | echo.
64 | echo Error: JAVA_HOME not found in your environment. >&2
65 | echo Please set the JAVA_HOME variable in your environment to match the >&2
66 | echo location of your Java installation. >&2
67 | echo.
68 | goto error
69 |
70 | :OkJHome
71 | if exist "%JAVA_HOME%\bin\java.exe" goto init
72 |
73 | echo.
74 | echo Error: JAVA_HOME is set to an invalid directory. >&2
75 | echo JAVA_HOME = "%JAVA_HOME%" >&2
76 | echo Please set the JAVA_HOME variable in your environment to match the >&2
77 | echo location of your Java installation. >&2
78 | echo.
79 | goto error
80 |
81 | @REM ==== END VALIDATION ====
82 |
83 | :init
84 |
85 | @REM Find the project base dir, i.e. the directory that contains the folder ".mvn".
86 | @REM Fallback to current working directory if not found.
87 |
88 | set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR%
89 | IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir
90 |
91 | set EXEC_DIR=%CD%
92 | set WDIR=%EXEC_DIR%
93 | :findBaseDir
94 | IF EXIST "%WDIR%"\.mvn goto baseDirFound
95 | cd ..
96 | IF "%WDIR%"=="%CD%" goto baseDirNotFound
97 | set WDIR=%CD%
98 | goto findBaseDir
99 |
100 | :baseDirFound
101 | set MAVEN_PROJECTBASEDIR=%WDIR%
102 | cd "%EXEC_DIR%"
103 | goto endDetectBaseDir
104 |
105 | :baseDirNotFound
106 | set MAVEN_PROJECTBASEDIR=%EXEC_DIR%
107 | cd "%EXEC_DIR%"
108 |
109 | :endDetectBaseDir
110 |
111 | IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig
112 |
113 | @setlocal EnableExtensions EnableDelayedExpansion
114 | for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a
115 | @endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS%
116 |
117 | :endReadAdditionalConfig
118 |
119 | SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe"
120 | set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar"
121 | set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
122 |
123 | set DOWNLOAD_URL="https://repo.maven.apache.org/maven2/org/apache/maven/wrapper/maven-wrapper/3.1.0/maven-wrapper-3.1.0.jar"
124 |
125 | FOR /F "usebackq tokens=1,2 delims==" %%A IN ("%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties") DO (
126 | IF "%%A"=="wrapperUrl" SET DOWNLOAD_URL=%%B
127 | )
128 |
129 | @REM Extension to allow automatically downloading the maven-wrapper.jar from Maven-central
130 | @REM This allows using the maven wrapper in projects that prohibit checking in binary data.
131 | if exist %WRAPPER_JAR% (
132 | if "%MVNW_VERBOSE%" == "true" (
133 | echo Found %WRAPPER_JAR%
134 | )
135 | ) else (
136 | if not "%MVNW_REPOURL%" == "" (
137 | SET DOWNLOAD_URL="%MVNW_REPOURL%/org/apache/maven/wrapper/maven-wrapper/3.1.0/maven-wrapper-3.1.0.jar"
138 | )
139 | if "%MVNW_VERBOSE%" == "true" (
140 | echo Couldn't find %WRAPPER_JAR%, downloading it ...
141 | echo Downloading from: %DOWNLOAD_URL%
142 | )
143 |
144 | powershell -Command "&{"^
145 | "$webclient = new-object System.Net.WebClient;"^
146 | "if (-not ([string]::IsNullOrEmpty('%MVNW_USERNAME%') -and [string]::IsNullOrEmpty('%MVNW_PASSWORD%'))) {"^
147 | "$webclient.Credentials = new-object System.Net.NetworkCredential('%MVNW_USERNAME%', '%MVNW_PASSWORD%');"^
148 | "}"^
149 | "[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12; $webclient.DownloadFile('%DOWNLOAD_URL%', '%WRAPPER_JAR%')"^
150 | "}"
151 | if "%MVNW_VERBOSE%" == "true" (
152 | echo Finished downloading %WRAPPER_JAR%
153 | )
154 | )
155 | @REM End of extension
156 |
157 | @REM Provide a "standardized" way to retrieve the CLI args that will
158 | @REM work with both Windows and non-Windows executions.
159 | set MAVEN_CMD_LINE_ARGS=%*
160 |
161 | %MAVEN_JAVA_EXE% ^
162 | %JVM_CONFIG_MAVEN_PROPS% ^
163 | %MAVEN_OPTS% ^
164 | %MAVEN_DEBUG_OPTS% ^
165 | -classpath %WRAPPER_JAR% ^
166 | "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" ^
167 | %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %*
168 | if ERRORLEVEL 1 goto error
169 | goto end
170 |
171 | :error
172 | set ERROR_CODE=1
173 |
174 | :end
175 | @endlocal & set ERROR_CODE=%ERROR_CODE%
176 |
177 | if not "%MAVEN_SKIP_RC%"=="" goto skipRcPost
178 | @REM check for post script, once with legacy .bat ending and once with .cmd ending
179 | if exist "%USERPROFILE%\mavenrc_post.bat" call "%USERPROFILE%\mavenrc_post.bat"
180 | if exist "%USERPROFILE%\mavenrc_post.cmd" call "%USERPROFILE%\mavenrc_post.cmd"
181 | :skipRcPost
182 |
183 | @REM pause the script if MAVEN_BATCH_PAUSE is set to 'on'
184 | if "%MAVEN_BATCH_PAUSE%"=="on" pause
185 |
186 | if "%MAVEN_TERMINATE_CMD%"=="on" exit %ERROR_CODE%
187 |
188 | cmd /C exit /B %ERROR_CODE%
189 |
--------------------------------------------------------------------------------
/02-reactive-kafka-playground/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 | 4.0.0
5 |
6 | org.springframework.boot
7 | spring-boot-starter-parent
8 | 3.5.0
9 |
10 |
11 | com.vinsguru
12 | reactive-kafka-playground
13 | 0.0.1-SNAPSHOT
14 | reactive-kafka-playground
15 | Demo project for Spring Boot
16 |
17 | 21
18 | 1.3.23
19 |
20 |
21 |
22 | org.springframework.boot
23 | spring-boot-starter-webflux
24 |
25 |
26 | org.springframework.kafka
27 | spring-kafka
28 |
29 |
30 | io.projectreactor.kafka
31 | reactor-kafka
32 | ${reactor.kafka.version}
33 |
34 |
35 | org.projectlombok
36 | lombok
37 | true
38 |
39 |
40 | org.springframework.boot
41 | spring-boot-starter-test
42 | test
43 |
44 |
45 | io.projectreactor
46 | reactor-test
47 | test
48 |
49 |
50 | org.springframework.kafka
51 | spring-kafka-test
52 | test
53 |
54 |
55 |
56 |
57 |
58 |
59 | org.springframework.boot
60 | spring-boot-maven-plugin
61 |
62 |
63 |
64 | org.projectlombok
65 | lombok
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
--------------------------------------------------------------------------------
/02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/ReactiveKafkaPlaygroundApplication.java:
--------------------------------------------------------------------------------
1 | package com.vinsguru.reactivekafkaplayground;
2 |
3 | import org.springframework.boot.SpringApplication;
4 | import org.springframework.boot.autoconfigure.SpringBootApplication;
5 |
6 | @SpringBootApplication(scanBasePackages = "com.vinsguru.reactivekafkaplayground.sec17.${app}")
7 | public class ReactiveKafkaPlaygroundApplication {
8 |
9 | public static void main(String[] args) {
10 | SpringApplication.run(ReactiveKafkaPlaygroundApplication.class, args);
11 | }
12 |
13 | }
14 |
--------------------------------------------------------------------------------
/02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec01/Lec01KafkaConsumer.java:
--------------------------------------------------------------------------------
1 | package com.vinsguru.reactivekafkaplayground.sec01;
2 |
3 | import org.apache.kafka.clients.consumer.ConsumerConfig;
4 | import org.apache.kafka.common.serialization.StringDeserializer;
5 | import org.slf4j.Logger;
6 | import org.slf4j.LoggerFactory;
7 | import reactor.kafka.receiver.KafkaReceiver;
8 | import reactor.kafka.receiver.ReceiverOptions;
9 | import reactor.kafka.receiver.ReceiverRecord;
10 |
11 | import java.util.List;
12 | import java.util.Map;
13 |
14 | /*
15 | goal: to demo a simple kafka consumer using reactor kafka
16 | producer ----> kafka broker <----------> consumer
17 |
18 | topic: order-events
19 | partitions: 1
20 | log-end-offset: 15
21 | current-offset: 15
22 |
23 | */
24 | public class Lec01KafkaConsumer {
25 |
26 | private static final Logger log = LoggerFactory.getLogger(Lec01KafkaConsumer.class);
27 |
28 | public static void main(String[] args) {
29 |
30 | var consumerConfig = Map.of(
31 | ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092",
32 | ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class,
33 | ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class,
34 | ConsumerConfig.GROUP_ID_CONFIG, "demo-group-123",
35 | ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest",
36 | ConsumerConfig.GROUP_INSTANCE_ID_CONFIG, "1"
37 | );
38 |
39 | var options = ReceiverOptions.create(consumerConfig)
40 | .subscription(List.of("order-events"));
41 |
42 | KafkaReceiver.create(options)
43 | .receive()
44 | .doOnNext(r -> log.info("key: {}, value: {}", r.key(), r.value()))
45 | .doOnNext(r -> r.receiverOffset().acknowledge())
46 | .subscribe();
47 |
48 | }
49 |
50 | }
51 |
--------------------------------------------------------------------------------
/02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec01/Lec02KafkaConsumer.java:
--------------------------------------------------------------------------------
1 | package com.vinsguru.reactivekafkaplayground.sec01;
2 |
3 | import org.apache.kafka.clients.consumer.ConsumerConfig;
4 | import org.apache.kafka.common.serialization.StringDeserializer;
5 | import org.slf4j.Logger;
6 | import org.slf4j.LoggerFactory;
7 | import reactor.kafka.receiver.KafkaReceiver;
8 | import reactor.kafka.receiver.ReceiverOptions;
9 |
10 | import java.util.List;
11 | import java.util.Map;
12 | import java.util.regex.Pattern;
13 |
14 | /*
15 | goal: to consume from multiple topics
16 | producer ----> kafka broker <----------> consumer
17 | */
18 | public class Lec02KafkaConsumer {
19 |
20 | private static final Logger log = LoggerFactory.getLogger(Lec02KafkaConsumer.class);
21 |
22 | public static void main(String[] args) {
23 |
24 | var consumerConfig = Map.of(
25 | ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092",
26 | ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class,
27 | ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class,
28 | ConsumerConfig.GROUP_ID_CONFIG, "inventory-service-group",
29 | ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest",
30 | ConsumerConfig.GROUP_INSTANCE_ID_CONFIG, "1"
31 | );
32 |
33 | var options = ReceiverOptions.create(consumerConfig)
34 | .subscription(Pattern.compile("order.*"));
35 |
36 | KafkaReceiver.create(options)
37 | .receive()
38 | .doOnNext(r -> log.info("topic: {}, key: {}, value: {}", r.topic(), r.key(), r.value()))
39 | .doOnNext(r -> r.receiverOffset().acknowledge())
40 | .subscribe();
41 |
42 | }
43 |
44 | }
45 |
--------------------------------------------------------------------------------
/02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec02/KafkaProducer.java:
--------------------------------------------------------------------------------
1 | package com.vinsguru.reactivekafkaplayground.sec02;
2 |
3 | import org.apache.kafka.clients.producer.ProducerConfig;
4 | import org.apache.kafka.clients.producer.ProducerRecord;
5 | import org.apache.kafka.common.serialization.StringSerializer;
6 | import org.slf4j.Logger;
7 | import org.slf4j.LoggerFactory;
8 | import reactor.core.publisher.Flux;
9 | import reactor.kafka.sender.KafkaSender;
10 | import reactor.kafka.sender.SenderOptions;
11 | import reactor.kafka.sender.SenderRecord;
12 |
13 | import java.time.Duration;
14 | import java.util.Map;
15 |
16 | /*
17 | goal: to demo a simple kafka producer using reactor-kafka
18 | */
19 | public class KafkaProducer {
20 |
21 | private static final Logger log = LoggerFactory.getLogger(KafkaProducer.class);
22 |
23 | public static void main(String[] args) {
24 |
25 | var producerConfig = Map.of(
26 | ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092",
27 | ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class,
28 | ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class
29 | );
30 |
31 | var options = SenderOptions.create(producerConfig);
32 |
33 | var flux = Flux.interval(Duration.ofMillis(100))
34 | .take(100)
35 | .map(i -> new ProducerRecord<>("order-events", i.toString(), "order-"+i))
36 | .map(pr -> SenderRecord.create(pr, pr.key()));
37 |
38 | var sender = KafkaSender.create(options);
39 | sender.send(flux)
40 | .doOnNext(r -> log.info("correlation id: {}", r.correlationMetadata()))
41 | .doOnComplete(sender::close)
42 | .subscribe();
43 | }
44 |
45 | }
46 |
--------------------------------------------------------------------------------
/02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec03/KafkaConsumer.java:
--------------------------------------------------------------------------------
1 | package com.vinsguru.reactivekafkaplayground.sec03;
2 |
3 | import org.apache.kafka.clients.CommonClientConfigs;
4 | import org.apache.kafka.clients.consumer.ConsumerConfig;
5 | import org.apache.kafka.common.config.SaslConfigs;
6 | import org.apache.kafka.common.serialization.StringDeserializer;
7 | import org.slf4j.Logger;
8 | import org.slf4j.LoggerFactory;
9 | import reactor.kafka.receiver.KafkaReceiver;
10 | import reactor.kafka.receiver.ReceiverOptions;
11 |
12 | import java.util.List;
13 | import java.util.Map;
14 |
15 | /*
16 | goal: to produce and consume 1 million events
17 | producer ----> kafka broker <----------> consumer
18 | */
19 | public class KafkaConsumer {
20 |
21 | private static final Logger log = LoggerFactory.getLogger(KafkaConsumer.class);
22 |
23 | public static void main(String[] args) {
24 |
25 | var consumerConfig = Map.of(
26 | ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092",
27 | ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class,
28 | ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class,
29 | ConsumerConfig.GROUP_ID_CONFIG, "demo-group-123",
30 | ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest",
31 | ConsumerConfig.GROUP_INSTANCE_ID_CONFIG, "1"
32 | );
33 |
34 | var options = ReceiverOptions.create(consumerConfig)
35 | .subscription(List.of("order-events"));
36 |
37 | KafkaReceiver.create(options)
38 | .receive()
39 | .doOnNext(r -> log.info("key: {}, value: {}", r.key(), r.value()))
40 | .doOnNext(r -> r.receiverOffset().acknowledge())
41 | .subscribe();
42 |
43 | }
44 |
45 | }
46 |
--------------------------------------------------------------------------------
/02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec03/KafkaProducer.java:
--------------------------------------------------------------------------------
1 | package com.vinsguru.reactivekafkaplayground.sec03;
2 |
3 | import org.apache.kafka.clients.producer.ProducerConfig;
4 | import org.apache.kafka.clients.producer.ProducerRecord;
5 | import org.apache.kafka.common.serialization.StringSerializer;
6 | import org.slf4j.Logger;
7 | import org.slf4j.LoggerFactory;
8 | import reactor.core.publisher.Flux;
9 | import reactor.kafka.sender.KafkaSender;
10 | import reactor.kafka.sender.SenderOptions;
11 | import reactor.kafka.sender.SenderRecord;
12 |
13 | import java.time.Duration;
14 | import java.util.Map;
15 |
16 | /*
17 | goal: to demo back pressure using max in flight for a reactive producer.
18 | producer could be a confusing term & it depends on the context.
19 | */
20 | public class KafkaProducer {
21 |
22 | private static final Logger log = LoggerFactory.getLogger(KafkaProducer.class);
23 |
24 | public static void main(String[] args) {
25 |
26 | var producerConfig = Map.of(
27 | ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092",
28 | ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class,
29 | ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class
30 | );
31 |
32 | var options = SenderOptions.create(producerConfig).maxInFlight(10_000);
33 |
34 | var flux = Flux.range(1, 1_000_000)
35 | .map(i -> new ProducerRecord<>("order-events", i.toString(), "order-"+i))
36 | .map(pr -> SenderRecord.create(pr, pr.key()));
37 |
38 | var start = System.currentTimeMillis();
39 | var sender = KafkaSender.create(options);
40 | sender.send(flux)
41 | .doOnNext(r -> log.info("correlation id: {}", r.correlationMetadata()))
42 | .doOnComplete(() -> {
43 | log.info("Total time taken: {} ms", (System.currentTimeMillis() - start));
44 | sender.close();
45 | })
46 | .subscribe();
47 | }
48 |
49 | }
50 |
--------------------------------------------------------------------------------
/02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec04/KafkaConsumer.java:
--------------------------------------------------------------------------------
1 | package com.vinsguru.reactivekafkaplayground.sec04;
2 |
3 | import org.apache.kafka.clients.consumer.ConsumerConfig;
4 | import org.apache.kafka.common.serialization.StringDeserializer;
5 | import org.slf4j.Logger;
6 | import org.slf4j.LoggerFactory;
7 | import reactor.kafka.receiver.KafkaReceiver;
8 | import reactor.kafka.receiver.ReceiverOptions;
9 |
10 | import java.util.List;
11 | import java.util.Map;
12 |
13 | /*
14 | goal: to consume header info
15 | producer ----> kafka broker <----------> consumer
16 | */
17 | public class KafkaConsumer {
18 |
19 | private static final Logger log = LoggerFactory.getLogger(KafkaConsumer.class);
20 |
21 | public static void main(String[] args) {
22 |
23 | var consumerConfig = Map.of(
24 | ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092",
25 | ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class,
26 | ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class,
27 | ConsumerConfig.GROUP_ID_CONFIG, "demo-group-123",
28 | ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest",
29 | ConsumerConfig.GROUP_INSTANCE_ID_CONFIG, "1"
30 | );
31 |
32 | var options = ReceiverOptions.create(consumerConfig)
33 | .subscription(List.of("order-events"));
34 |
35 | KafkaReceiver.create(options)
36 | .receive()
37 | .doOnNext(r -> log.info("key: {}, value: {}", r.key(), r.value()))
38 | .doOnNext(r -> r.headers().forEach(h -> log.info("header key: {}, value: {}", h.key(), new String(h.value()))))
39 | .doOnNext(r -> r.receiverOffset().acknowledge())
40 | .subscribe();
41 |
42 | }
43 |
44 | }
45 |
--------------------------------------------------------------------------------
/02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec04/KafkaProducer.java:
--------------------------------------------------------------------------------
1 | package com.vinsguru.reactivekafkaplayground.sec04;
2 |
3 | import org.apache.kafka.clients.producer.ProducerConfig;
4 | import org.apache.kafka.clients.producer.ProducerRecord;
5 | import org.apache.kafka.common.header.internals.RecordHeaders;
6 | import org.apache.kafka.common.serialization.StringSerializer;
7 | import org.slf4j.Logger;
8 | import org.slf4j.LoggerFactory;
9 | import reactor.core.publisher.Flux;
10 | import reactor.kafka.sender.KafkaSender;
11 | import reactor.kafka.sender.SenderOptions;
12 | import reactor.kafka.sender.SenderRecord;
13 |
14 | import java.util.Map;
15 |
16 | /*
17 | goal: to produce records along with headers
18 | */
19 | public class KafkaProducer {
20 |
21 | private static final Logger log = LoggerFactory.getLogger(KafkaProducer.class);
22 |
23 | public static void main(String[] args) {
24 |
25 | var producerConfig = Map.of(
26 | ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092",
27 | ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class,
28 | ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class
29 | );
30 |
31 | var options = SenderOptions.create(producerConfig);
32 |
33 | var flux = Flux.range(1, 10).map(KafkaProducer::createSenderRecord);
34 |
35 | var sender = KafkaSender.create(options);
36 | sender.send(flux)
37 | .doOnNext(r -> log.info("correlation id: {}", r.correlationMetadata()))
38 | .doOnComplete(sender::close)
39 | .subscribe();
40 | }
41 |
42 | private static SenderRecord createSenderRecord(Integer i){
43 | var headers = new RecordHeaders();
44 | headers.add("client-id", "some-client".getBytes());
45 | headers.add("tracing-id", "123".getBytes());
46 | var pr = new ProducerRecord<>("order-events", null, i.toString(), "order-"+i, headers);
47 | return SenderRecord.create(pr, pr.key());
48 | }
49 |
50 | }
51 |
--------------------------------------------------------------------------------
/02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec05/KafkaConsumer.java:
--------------------------------------------------------------------------------
1 | package com.vinsguru.reactivekafkaplayground.sec05;
2 |
3 | import org.apache.kafka.clients.consumer.ConsumerConfig;
4 | import org.apache.kafka.common.serialization.StringDeserializer;
5 | import org.slf4j.Logger;
6 | import org.slf4j.LoggerFactory;
7 | import reactor.kafka.receiver.KafkaReceiver;
8 | import reactor.kafka.receiver.ReceiverOptions;
9 |
10 | import java.util.List;
11 | import java.util.Map;
12 |
13 | /*
14 | goal: to demo partition re-balancing. Ensure that topic has multiple partitions
15 | */
16 | public class KafkaConsumer {
17 |
18 | private static final Logger log = LoggerFactory.getLogger(KafkaConsumer.class);
19 |
20 | public static void start(String instanceId) {
21 |
22 | var consumerConfig = Map.of(
23 | ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092",
24 | ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class,
25 | ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class,
26 | ConsumerConfig.GROUP_ID_CONFIG, "demo-group-123",
27 | ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest",
28 | ConsumerConfig.GROUP_INSTANCE_ID_CONFIG, instanceId
29 | );
30 |
31 | var options = ReceiverOptions.create(consumerConfig)
32 | .subscription(List.of("order-events"));
33 |
34 | KafkaReceiver.create(options)
35 | .receive()
36 | .doOnNext(r -> log.info("key: {}, value: {}", r.key(), r.value()))
37 | .doOnNext(r -> r.receiverOffset().acknowledge())
38 | .subscribe();
39 |
40 | }
41 |
42 | }
43 |
--------------------------------------------------------------------------------
/02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec05/KafkaConsumerGroup.java:
--------------------------------------------------------------------------------
1 | package com.vinsguru.reactivekafkaplayground.sec05;
2 |
3 | /*
4 | Ensure that topic has multiple partitions
5 | */
6 | public class KafkaConsumerGroup {
7 |
8 | private static class Consumer1{
9 | public static void main(String[] args) {
10 | KafkaConsumer.start("1");
11 | }
12 | }
13 |
14 | private static class Consumer2{
15 | public static void main(String[] args) {
16 | KafkaConsumer.start("2");
17 | }
18 | }
19 |
20 | private static class Consumer3{
21 | public static void main(String[] args) {
22 | KafkaConsumer.start("3");
23 | }
24 | }
25 |
26 | }
27 |
--------------------------------------------------------------------------------
/02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec05/KafkaProducer.java:
--------------------------------------------------------------------------------
1 | package com.vinsguru.reactivekafkaplayground.sec05;
2 |
3 | import org.apache.kafka.clients.producer.ProducerConfig;
4 | import org.apache.kafka.clients.producer.ProducerRecord;
5 | import org.apache.kafka.common.serialization.StringSerializer;
6 | import org.slf4j.Logger;
7 | import org.slf4j.LoggerFactory;
8 | import reactor.core.publisher.Flux;
9 | import reactor.kafka.sender.KafkaSender;
10 | import reactor.kafka.sender.SenderOptions;
11 | import reactor.kafka.sender.SenderRecord;
12 |
13 | import java.time.Duration;
14 | import java.util.Map;
15 |
16 | /*
17 | goal: to demo partition re-balancing. Ensure that topic has multiple partitions
18 | */
19 | public class KafkaProducer {
20 |
21 | private static final Logger log = LoggerFactory.getLogger(KafkaProducer.class);
22 |
23 | public static void main(String[] args) {
24 |
25 | var producerConfig = Map.of(
26 | ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092",
27 | ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class,
28 | ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class
29 | );
30 |
31 | var options = SenderOptions.create(producerConfig);
32 |
33 | var flux = Flux.interval(Duration.ofMillis(50))
34 | .take(10_000)
35 | .map(i -> new ProducerRecord<>("order-events", i.toString(), "order-"+i))
36 | .map(pr -> SenderRecord.create(pr, pr.key()));
37 |
38 | var sender = KafkaSender.create(options);
39 | sender.send(flux)
40 | .doOnNext(r -> log.info("correlation id: {}", r.correlationMetadata()))
41 | .doOnComplete(sender::close)
42 | .subscribe();
43 | }
44 |
45 | }
46 |
--------------------------------------------------------------------------------
/02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec06/KafkaConsumer.java:
--------------------------------------------------------------------------------
1 | package com.vinsguru.reactivekafkaplayground.sec06;
2 |
3 | import org.apache.kafka.clients.consumer.ConsumerConfig;
4 | import org.apache.kafka.clients.consumer.CooperativeStickyAssignor;
5 | import org.apache.kafka.clients.consumer.RangeAssignor;
6 | import org.apache.kafka.common.serialization.StringDeserializer;
7 | import org.slf4j.Logger;
8 | import org.slf4j.LoggerFactory;
9 | import reactor.kafka.receiver.KafkaReceiver;
10 | import reactor.kafka.receiver.ReceiverOptions;
11 |
12 | import java.util.List;
13 | import java.util.Map;
14 |
15 | /*
16 | goal: to demo partition re-balancing strategy. Ensure that topic has multiple partitions
17 | */
18 | public class KafkaConsumer {
19 |
20 | private static final Logger log = LoggerFactory.getLogger(KafkaConsumer.class);
21 |
22 | public static void start(String instanceId) {
23 |
24 | var consumerConfig = Map.of(
25 | ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092",
26 | ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class,
27 | ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class,
28 | ConsumerConfig.GROUP_ID_CONFIG, "demo-group-123",
29 | ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest",
30 | ConsumerConfig.GROUP_INSTANCE_ID_CONFIG, instanceId//,
31 | // ConsumerConfig.PARTITION_ASSIGNMENT_STRATEGY_CONFIG, RangeAssignorCooperativeStickyAssignor.class.getName()
32 | );
33 |
34 | var options = ReceiverOptions.create(consumerConfig)
35 | .addAssignListener(c -> {
36 | c.forEach(r -> log.info("assigned {}", r.position()));
37 | c.stream()
38 | .filter(r -> r.topicPartition().partition() == 2)
39 | .findFirst()
40 | .ifPresent(r -> r.seek(r.position() - 2)); // seek value can not be -ve. ensure before setting
41 | })
42 | .subscription(List.of("order-events"));
43 |
44 | KafkaReceiver.create(options)
45 | .receive()
46 | .doOnNext(r -> log.info("key: {}, value: {}", r.key(), r.value()))
47 | .doOnNext(r -> r.receiverOffset().acknowledge())
48 | .subscribe();
49 |
50 | }
51 |
52 | }
53 |
--------------------------------------------------------------------------------
/02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec06/KafkaConsumerGroup.java:
--------------------------------------------------------------------------------
1 | package com.vinsguru.reactivekafkaplayground.sec06;
2 |
3 | /*
4 | Ensure that topic has multiple partitions
5 | */
6 | public class KafkaConsumerGroup {
7 |
8 | /*
9 | RangeAssignor
10 | 0,1,2
11 | 1,2,3
12 | */
13 |
14 | private static class Consumer1{
15 | public static void main(String[] args) {
16 | KafkaConsumer.start("1");
17 | //0
18 | }
19 | }
20 |
21 | private static class Consumer2{
22 | public static void main(String[] args) {
23 | KafkaConsumer.start("2");
24 | // 2
25 | }
26 | }
27 |
28 | private static class Consumer3{
29 | public static void main(String[] args) {
30 | KafkaConsumer.start("3");
31 | // 1
32 | }
33 | }
34 |
35 | }
36 |
--------------------------------------------------------------------------------
/02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec07/KafkaConsumer.java:
--------------------------------------------------------------------------------
1 | package com.vinsguru.reactivekafkaplayground.sec07;
2 |
3 | import org.apache.kafka.clients.consumer.ConsumerConfig;
4 | import org.apache.kafka.common.serialization.StringDeserializer;
5 | import org.slf4j.Logger;
6 | import org.slf4j.LoggerFactory;
7 | import reactor.kafka.receiver.KafkaReceiver;
8 | import reactor.kafka.receiver.ReceiverOptions;
9 |
10 | import java.util.List;
11 | import java.util.Map;
12 |
13 | /*
14 | goal: to seek offset
15 | */
16 | public class KafkaConsumer {
17 |
18 | private static final Logger log = LoggerFactory.getLogger(KafkaConsumer.class);
19 |
20 | public static void main(String[] args) {
21 |
22 | var consumerConfig = Map.of(
23 | ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092",
24 | ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class,
25 | ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class,
26 | ConsumerConfig.GROUP_ID_CONFIG, "demo-group-123",
27 | ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest",
28 | ConsumerConfig.GROUP_INSTANCE_ID_CONFIG, "1"
29 | );
30 |
31 | var options = ReceiverOptions.create(consumerConfig)
32 | .addAssignListener(c -> {
33 | c.forEach(r -> log.info("assigned {}", r.position()));
34 | c.stream()
35 | .filter(r -> r.topicPartition().partition() == 2)
36 | .findFirst()
37 | .ifPresent(r -> r.seek(r.position() - 2)); // seek value can not be -ve. ensure before setting
38 | })
39 | .subscription(List.of("order-events"));
40 |
41 | KafkaReceiver.create(options)
42 | .receive()
43 | .doOnNext(r -> log.info("key: {}, value: {}", r.key(), r.value()))
44 | .doOnNext(r -> r.receiverOffset().acknowledge())
45 | .subscribe();
46 |
47 | }
48 |
49 | }
50 |
--------------------------------------------------------------------------------
/02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec08/KafkaConsumer.java:
--------------------------------------------------------------------------------
1 | package com.vinsguru.reactivekafkaplayground.sec08;
2 |
3 | import org.apache.kafka.clients.consumer.ConsumerConfig;
4 | import org.apache.kafka.common.serialization.StringDeserializer;
5 | import org.slf4j.Logger;
6 | import org.slf4j.LoggerFactory;
7 | import reactor.kafka.receiver.KafkaReceiver;
8 | import reactor.kafka.receiver.ReceiverOptions;
9 |
10 | import java.util.List;
11 | import java.util.Map;
12 |
13 | /*
14 | goal: Cluster demo - to produce and consume events with 3 replicas
15 | */
16 | public class KafkaConsumer {
17 |
18 | private static final Logger log = LoggerFactory.getLogger(KafkaConsumer.class);
19 |
20 | public static void main(String[] args) {
21 |
22 | var consumerConfig = Map.of(
23 | ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:8081",
24 | ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class,
25 | ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class,
26 | ConsumerConfig.GROUP_ID_CONFIG, "demo-group",
27 | ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest",
28 | ConsumerConfig.GROUP_INSTANCE_ID_CONFIG, "1"
29 | );
30 |
31 | var options = ReceiverOptions.create(consumerConfig)
32 | .subscription(List.of("order-events"));
33 |
34 | KafkaReceiver.create(options)
35 | .receive()
36 | .doOnNext(r -> log.info("key: {}, value: {}", r.key(), r.value()))
37 | .doOnNext(r -> r.receiverOffset().acknowledge())
38 | .subscribe();
39 |
40 | }
41 |
42 | }
43 |
--------------------------------------------------------------------------------
/02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec08/KafkaProducer.java:
--------------------------------------------------------------------------------
1 | package com.vinsguru.reactivekafkaplayground.sec08;
2 |
3 | import org.apache.kafka.clients.producer.ProducerConfig;
4 | import org.apache.kafka.clients.producer.ProducerRecord;
5 | import org.apache.kafka.common.serialization.StringSerializer;
6 | import org.slf4j.Logger;
7 | import org.slf4j.LoggerFactory;
8 | import reactor.core.publisher.Flux;
9 | import reactor.kafka.sender.KafkaSender;
10 | import reactor.kafka.sender.SenderOptions;
11 | import reactor.kafka.sender.SenderRecord;
12 |
13 | import java.time.Duration;
14 | import java.util.Map;
15 |
16 | /*
17 | goal: Cluster demo - to produce and consume events with 3 replicas
18 | */
19 | public class KafkaProducer {
20 |
21 | private static final Logger log = LoggerFactory.getLogger(KafkaProducer.class);
22 |
23 | public static void main(String[] args) {
24 |
25 | var producerConfig = Map.of(
26 | ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:8081",
27 | ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class,
28 | ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class
29 | );
30 |
31 | var options = SenderOptions.create(producerConfig);
32 | var flux = Flux.interval(Duration.ofMillis(50))
33 | .take(10_000)
34 | .map(i -> new ProducerRecord<>("order-events", i.toString(), "order-"+i))
35 | .map(pr -> SenderRecord.create(pr, pr.key()));
36 |
37 | var sender = KafkaSender.create(options);
38 | sender.send(flux)
39 | .doOnNext(r -> log.info("correlation id: {}", r.correlationMetadata()))
40 | .doOnComplete(sender::close)
41 | .subscribe();
42 | }
43 |
44 | }
45 |
--------------------------------------------------------------------------------
/02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec09/KafkaConsumer.java:
--------------------------------------------------------------------------------
1 | package com.vinsguru.reactivekafkaplayground.sec09;
2 |
3 | import org.apache.kafka.clients.consumer.ConsumerConfig;
4 | import org.apache.kafka.clients.consumer.ConsumerRecord;
5 | import org.apache.kafka.common.serialization.StringDeserializer;
6 | import org.slf4j.Logger;
7 | import org.slf4j.LoggerFactory;
8 | import reactor.core.publisher.Flux;
9 | import reactor.core.publisher.Mono;
10 | import reactor.kafka.receiver.KafkaReceiver;
11 | import reactor.kafka.receiver.ReceiverOptions;
12 |
13 | import java.time.Duration;
14 | import java.util.List;
15 | import java.util.Map;
16 |
17 | /*
18 | goal: receiveAutoAck with concatMap
19 | */
20 | public class KafkaConsumer {
21 |
22 | private static final Logger log = LoggerFactory.getLogger(KafkaConsumer.class);
23 |
24 | public static void main(String[] args) {
25 |
26 | var consumerConfig = Map.of(
27 | ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092",
28 | ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class,
29 | ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class,
30 | ConsumerConfig.GROUP_ID_CONFIG, "demo-group",
31 | ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest",
32 | ConsumerConfig.GROUP_INSTANCE_ID_CONFIG, "1",
33 | ConsumerConfig.MAX_POLL_RECORDS_CONFIG, 3
34 | );
35 |
36 | var options = ReceiverOptions.create(consumerConfig)
37 | .commitInterval(Duration.ofSeconds(1))
38 | .subscription(List.of("order-events"));
39 |
40 | KafkaReceiver.create(options)
41 | .receiveAutoAck()
42 | .log()
43 | .concatMap(KafkaConsumer::batchProcess)
44 | .subscribe();
45 |
46 | }
47 |
48 | private static Mono batchProcess(Flux> flux){
49 | return flux
50 | .doFirst(() -> log.info("----------------"))
51 | .doOnNext(r -> log.info("key: {}, value: {}", r.key(), r.value()))
52 | .then(Mono.delay(Duration.ofSeconds(1)))
53 | .then();
54 | }
55 |
56 | }
57 |
--------------------------------------------------------------------------------
/02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec09/KafkaProducer.java:
--------------------------------------------------------------------------------
1 | package com.vinsguru.reactivekafkaplayground.sec09;
2 |
3 | import org.apache.kafka.clients.producer.ProducerConfig;
4 | import org.apache.kafka.clients.producer.ProducerRecord;
5 | import org.apache.kafka.common.serialization.StringSerializer;
6 | import org.slf4j.Logger;
7 | import org.slf4j.LoggerFactory;
8 | import reactor.core.publisher.Flux;
9 | import reactor.kafka.sender.KafkaSender;
10 | import reactor.kafka.sender.SenderOptions;
11 | import reactor.kafka.sender.SenderRecord;
12 |
13 | import java.time.Duration;
14 | import java.util.Map;
15 |
16 | /*
17 | goal: receiveAutoAck with concatMap
18 | */
19 | public class KafkaProducer {
20 |
21 | private static final Logger log = LoggerFactory.getLogger(KafkaProducer.class);
22 |
23 | public static void main(String[] args) {
24 |
25 | var producerConfig = Map.of(
26 | ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092",
27 | ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class,
28 | ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class
29 | );
30 |
31 | var options = SenderOptions.create(producerConfig);
32 |
33 | var flux = Flux.range(1, 100)
34 | .map(i -> new ProducerRecord<>("order-events", i.toString(), "order-"+i))
35 | .map(pr -> SenderRecord.create(pr, pr.key()));
36 |
37 | var sender = KafkaSender.create(options);
38 | sender.send(flux)
39 | .doOnNext(r -> log.info("correlation id: {}", r.correlationMetadata()))
40 | .doOnComplete(sender::close)
41 | .subscribe();
42 | }
43 |
44 | }
45 |
--------------------------------------------------------------------------------
/02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec10/KafkaConsumer.java:
--------------------------------------------------------------------------------
1 | package com.vinsguru.reactivekafkaplayground.sec10;
2 |
3 | import org.apache.kafka.clients.consumer.ConsumerConfig;
4 | import org.apache.kafka.clients.consumer.ConsumerRecord;
5 | import org.apache.kafka.common.serialization.StringDeserializer;
6 | import org.slf4j.Logger;
7 | import org.slf4j.LoggerFactory;
8 | import reactor.core.publisher.Flux;
9 | import reactor.core.publisher.Mono;
10 | import reactor.core.scheduler.Schedulers;
11 | import reactor.kafka.receiver.KafkaReceiver;
12 | import reactor.kafka.receiver.ReceiverOptions;
13 |
14 | import java.time.Duration;
15 | import java.util.List;
16 | import java.util.Map;
17 |
18 | /*
19 | goal: receiveAutoAck with flatMap - parallel
20 | */
21 | public class KafkaConsumer {
22 |
23 | private static final Logger log = LoggerFactory.getLogger(KafkaConsumer.class);
24 |
25 | public static void main(String[] args) {
26 |
27 | var consumerConfig = Map.of(
28 | ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092",
29 | ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class,
30 | ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class,
31 | ConsumerConfig.GROUP_ID_CONFIG, "demo-group",
32 | ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest",
33 | ConsumerConfig.GROUP_INSTANCE_ID_CONFIG, "1",
34 | ConsumerConfig.MAX_POLL_RECORDS_CONFIG, 3
35 | );
36 |
37 | var options = ReceiverOptions.create(consumerConfig)
38 | .commitInterval(Duration.ofSeconds(1))
39 | .subscription(List.of("order-events"));
40 |
41 | KafkaReceiver.create(options)
42 | .receiveAutoAck()
43 | .log()
44 | .flatMap(KafkaConsumer::batchProcess)
45 | .subscribe();
46 |
47 | }
48 |
49 | private static Mono batchProcess(Flux> flux){
50 | return flux
51 | .publishOn(Schedulers.boundedElastic()) // just for demo
52 | .doFirst(() -> log.info("----------------"))
53 | .doOnNext(r -> log.info("key: {}, value: {}", r.key(), r.value()))
54 | .then(Mono.delay(Duration.ofSeconds(1)))
55 | .then();
56 | }
57 |
58 | }
59 |
--------------------------------------------------------------------------------
/02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec10/KafkaProducer.java:
--------------------------------------------------------------------------------
1 | package com.vinsguru.reactivekafkaplayground.sec10;
2 |
3 | import org.apache.kafka.clients.producer.ProducerConfig;
4 | import org.apache.kafka.clients.producer.ProducerRecord;
5 | import org.apache.kafka.common.serialization.StringSerializer;
6 | import org.slf4j.Logger;
7 | import org.slf4j.LoggerFactory;
8 | import reactor.core.publisher.Flux;
9 | import reactor.kafka.sender.KafkaSender;
10 | import reactor.kafka.sender.SenderOptions;
11 | import reactor.kafka.sender.SenderRecord;
12 |
13 | import java.time.Duration;
14 | import java.util.Map;
15 |
16 | /*
17 | goal: receiveAutoAck with flatMap - parallel
18 | */
19 | public class KafkaProducer {
20 |
21 | private static final Logger log = LoggerFactory.getLogger(KafkaProducer.class);
22 |
23 | public static void main(String[] args) {
24 |
25 | var producerConfig = Map.of(
26 | ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092",
27 | ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class,
28 | ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class
29 | );
30 |
31 | var options = SenderOptions.create(producerConfig);
32 |
33 | var flux = Flux.range(1, 100)
34 | .map(i -> new ProducerRecord<>("order-events", i.toString(), "order-"+i))
35 | .map(pr -> SenderRecord.create(pr, pr.key()));
36 |
37 | var sender = KafkaSender.create(options);
38 | sender.send(flux)
39 | .doOnNext(r -> log.info("correlation id: {}", r.correlationMetadata()))
40 | .doOnComplete(sender::close)
41 | .subscribe();
42 | }
43 |
44 | }
45 |
--------------------------------------------------------------------------------
/02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec11/KafkaConsumer.java:
--------------------------------------------------------------------------------
1 | package com.vinsguru.reactivekafkaplayground.sec11;
2 |
3 | import org.apache.kafka.clients.consumer.ConsumerConfig;
4 | import org.apache.kafka.clients.consumer.ConsumerRecord;
5 | import org.apache.kafka.common.serialization.StringDeserializer;
6 | import org.slf4j.Logger;
7 | import org.slf4j.LoggerFactory;
8 | import reactor.core.publisher.Flux;
9 | import reactor.core.publisher.GroupedFlux;
10 | import reactor.core.publisher.Mono;
11 | import reactor.core.scheduler.Schedulers;
12 | import reactor.kafka.receiver.KafkaReceiver;
13 | import reactor.kafka.receiver.ReceiverOptions;
14 | import reactor.kafka.receiver.ReceiverRecord;
15 |
16 | import java.time.Duration;
17 | import java.util.List;
18 | import java.util.Map;
19 |
20 | /*
21 | goal: flatMap - parallel using groupBy
22 | */
23 | public class KafkaConsumer {
24 |
25 | private static final Logger log = LoggerFactory.getLogger(KafkaConsumer.class);
26 |
27 | public static void main(String[] args) {
28 |
29 | var consumerConfig = Map.of(
30 | ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092",
31 | ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class,
32 | ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class,
33 | ConsumerConfig.GROUP_ID_CONFIG, "demo-group",
34 | ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest",
35 | ConsumerConfig.GROUP_INSTANCE_ID_CONFIG, "1"
36 | );
37 |
38 | var options = ReceiverOptions.create(consumerConfig)
39 | .commitInterval(Duration.ofSeconds(1))
40 | .subscription(List.of("order-events"));
41 |
42 | KafkaReceiver.create(options)
43 | .receive()
44 | .groupBy(r -> Integer.parseInt(r.key()) % 5) // just for demo
45 | // we can also group by r.partition()
46 | // r.key().hashCode() % 5
47 | .flatMap(KafkaConsumer::batchProcess)
48 | .subscribe();
49 |
50 | }
51 |
52 | private static Mono batchProcess(GroupedFlux> flux){
53 | return flux
54 | .publishOn(Schedulers.boundedElastic()) // just for demo
55 | .doFirst(() -> log.info("----------------mod: {}", flux.key()))
56 | .doOnNext(r -> log.info("key: {}, value: {}", r.key(), r.value()))
57 | .doOnNext(r -> r.receiverOffset().acknowledge())
58 | .then();
59 | }
60 |
61 | }
62 |
--------------------------------------------------------------------------------
/02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec11/KafkaProducer.java:
--------------------------------------------------------------------------------
1 | package com.vinsguru.reactivekafkaplayground.sec11;
2 |
3 | import org.apache.kafka.clients.producer.ProducerConfig;
4 | import org.apache.kafka.clients.producer.ProducerRecord;
5 | import org.apache.kafka.common.serialization.StringSerializer;
6 | import org.slf4j.Logger;
7 | import org.slf4j.LoggerFactory;
8 | import reactor.core.publisher.Flux;
9 | import reactor.kafka.sender.KafkaSender;
10 | import reactor.kafka.sender.SenderOptions;
11 | import reactor.kafka.sender.SenderRecord;
12 |
13 | import java.time.Duration;
14 | import java.util.Map;
15 |
16 | /*
17 | goal: flatMap - parallel using groupBy
18 | */
19 | public class KafkaProducer {
20 |
21 | private static final Logger log = LoggerFactory.getLogger(KafkaProducer.class);
22 |
23 | public static void main(String[] args) {
24 |
25 | var producerConfig = Map.of(
26 | ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092",
27 | ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class,
28 | ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class
29 | );
30 |
31 | var options = SenderOptions.create(producerConfig);
32 |
33 | var flux = Flux.range(1, 100)
34 | .map(i -> new ProducerRecord<>("order-events", i.toString(), "order-"+i))
35 | .map(pr -> SenderRecord.create(pr, pr.key()));
36 |
37 | var sender = KafkaSender.create(options);
38 | sender.send(flux)
39 | .doOnNext(r -> log.info("correlation id: {}", r.correlationMetadata()))
40 | .doOnComplete(sender::close)
41 | .subscribe();
42 | }
43 |
44 | }
45 |
--------------------------------------------------------------------------------
/02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec12/KafkaConsumerV1.java:
--------------------------------------------------------------------------------
1 | package com.vinsguru.reactivekafkaplayground.sec12;
2 |
3 | import org.apache.kafka.clients.consumer.ConsumerConfig;
4 | import org.apache.kafka.common.serialization.StringDeserializer;
5 | import org.slf4j.Logger;
6 | import org.slf4j.LoggerFactory;
7 | import reactor.kafka.receiver.KafkaReceiver;
8 | import reactor.kafka.receiver.ReceiverOptions;
9 | import reactor.util.retry.Retry;
10 |
11 | import java.time.Duration;
12 | import java.util.List;
13 | import java.util.Map;
14 |
15 | /*
16 | error handling demo: a simple processing issue
17 | */
18 | public class KafkaConsumerV1 {
19 |
20 | private static final Logger log = LoggerFactory.getLogger(KafkaConsumerV1.class);
21 |
22 | public static void main(String[] args) {
23 |
24 | var consumerConfig = Map.of(
25 | ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092",
26 | ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class,
27 | ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class,
28 | ConsumerConfig.GROUP_ID_CONFIG, "demo-group",
29 | ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest",
30 | ConsumerConfig.GROUP_INSTANCE_ID_CONFIG, "1"
31 | );
32 |
33 | var options = ReceiverOptions.create(consumerConfig)
34 | .subscription(List.of("order-events"));
35 |
36 | KafkaReceiver.create(options)
37 | .receive()
38 | .log()
39 | .doOnNext(r -> log.info("key: {}, value: {}", r.key(), r.value().toString().toCharArray()[15])) // just for demo
40 | .doOnError(ex -> log.error(ex.getMessage()))
41 | .doOnNext(r -> r.receiverOffset().acknowledge())
42 | .retryWhen(Retry.fixedDelay(3, Duration.ofSeconds(1)))
43 | .blockLast(); // just for demo
44 |
45 | }
46 |
47 | }
48 |
--------------------------------------------------------------------------------
/02-reactive-kafka-playground/src/main/java/com/vinsguru/reactivekafkaplayground/sec12/KafkaConsumerV2.java:
--------------------------------------------------------------------------------
1 | package com.vinsguru.reactivekafkaplayground.sec12;
2 |
3 | import org.apache.kafka.clients.consumer.ConsumerConfig;
4 | import org.apache.kafka.common.serialization.StringDeserializer;
5 | import org.slf4j.Logger;
6 | import org.slf4j.LoggerFactory;
7 | import reactor.core.publisher.Mono;
8 | import reactor.kafka.receiver.KafkaReceiver;
9 | import reactor.kafka.receiver.ReceiverOptions;
10 | import reactor.kafka.receiver.ReceiverRecord;
11 | import reactor.util.retry.Retry;
12 |
13 | import java.time.Duration;
14 | import java.util.List;
15 | import java.util.Map;
16 | import java.util.concurrent.ThreadLocalRandom;
17 |
18 | /*
19 | error handling demo: processing pipeline
20 | */
21 | public class KafkaConsumerV2 {
22 |
23 | private static final Logger log = LoggerFactory.getLogger(KafkaConsumerV2.class);
24 |
25 | public static void main(String[] args) {
26 |
27 | var consumerConfig = Map.of(
28 | ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092",
29 | ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class,
30 | ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class,
31 | ConsumerConfig.GROUP_ID_CONFIG, "demo-group",
32 | ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest",
33 | ConsumerConfig.GROUP_INSTANCE_ID_CONFIG, "1"
34 | );
35 |
36 | var options = ReceiverOptions.create(consumerConfig)
37 | .subscription(List.of("order-events"));
38 |
39 | KafkaReceiver.create(options)
40 | .receive()
41 | .log()
42 | .concatMap(KafkaConsumerV2::process)
43 | .subscribe();
44 |
45 | }
46 |
47 | private static Mono process(ReceiverRecord