├── .gitignore ├── README.md ├── docker-compose.yml ├── explore-kafka-streams-start.zip ├── explore-kafka-streams-using-spring-boot-start.zip ├── explore-kafka-streams-using-spring-boot └── orders-management-streams │ ├── .gitignore │ ├── gradle │ └── wrapper │ │ ├── gradle-wrapper.jar │ │ └── gradle-wrapper.properties │ ├── gradlew │ ├── gradlew.bat │ ├── orders-domain │ ├── build.gradle │ └── src │ │ └── main │ │ └── java │ │ └── com │ │ └── learnkafkastreams │ │ └── domain │ │ ├── Address.java │ │ ├── AllOrdersCountPerStore.java │ │ ├── AllOrdersCountPerStoreDTO.java │ │ ├── HostInfoDTO.java │ │ ├── Order.java │ │ ├── OrderCountPerStore.java │ │ ├── OrderCountPerStoreDTO.java │ │ ├── OrderLineItem.java │ │ ├── OrderRevenueDTO.java │ │ ├── OrderType.java │ │ ├── OrdersCountPerStoreByWindowsDTO.java │ │ ├── OrdersRevenuePerStoreByWindowsDTO.java │ │ ├── Revenue.java │ │ ├── Store.java │ │ ├── TotalRevenue.java │ │ └── TotalRevenueWithAddress.java │ ├── orders-streams-app │ ├── build.gradle │ └── src │ │ ├── main │ │ ├── java │ │ │ └── com │ │ │ │ └── learnkafkastreams │ │ │ │ ├── OrdersManagementStreamsApplication.java │ │ │ │ ├── config │ │ │ │ └── OrdersStreamsConfiguration.java │ │ │ │ ├── exceptionhandler │ │ │ │ ├── StreamsDeserializationErrorHandler.java │ │ │ │ ├── StreamsProcessorCustomErrorHandler.java │ │ │ │ └── StreamsSerializationExceptionHandler.java │ │ │ │ ├── producer │ │ │ │ ├── OrdersMockDataProducer.java │ │ │ │ ├── ProducerUtil.java │ │ │ │ └── StoresMockDataProducer.java │ │ │ │ ├── service │ │ │ │ ├── OrderService.java │ │ │ │ └── OrderStoreService.java │ │ │ │ ├── topology │ │ │ │ └── OrdersTopology.java │ │ │ │ └── util │ │ │ │ └── OrderTimeStampExtractor.java │ │ └── resources │ │ │ ├── application.yml │ │ │ ├── curl_commands.txt │ │ │ ├── data │ │ │ ├── 01-orders-count.json │ │ │ ├── 02-orders-count-by-locationid.json │ │ │ ├── 03-count-with-all-orders-types.json │ │ │ ├── 04-orders-revenue.json │ │ │ ├── 05-orders-count-windows.json │ │ │ └── 06-orders-revenue-windows.json │ │ │ ├── logback.xml │ │ │ ├── orders_count_by_all_types.json │ │ │ └── orders_count_by_type.json │ │ └── test │ │ └── java │ │ └── com │ │ └── learnkafkastreams │ │ ├── controller │ │ └── OrdersControllerTest.java │ │ └── topology │ │ ├── OrdersTopologyIntegrationTest.java │ │ └── OrdersTopologyTest.java │ └── settings.gradle └── explore-kafka-streams ├── .gitignore ├── advanced-streams ├── build.gradle └── src │ └── main │ ├── java │ └── com │ │ └── learnkafkastreams │ │ ├── domain │ │ ├── Alphabet.java │ │ └── AlphabetWordAggregate.java │ │ ├── launcher │ │ ├── AggregatingStreamPlayGroundApp.java │ │ ├── JoiningStreamPlayGroundApp.java │ │ └── WindowsStreamPlaygroundApp.java │ │ ├── producer │ │ ├── AggregateProducer.java │ │ ├── JoinsMockDataProducer.java │ │ ├── ProducerUtil.java │ │ └── WindowsMockDataProduer.java │ │ ├── serdes │ │ ├── JsonDeserializer.java │ │ ├── JsonSerializer.java │ │ └── SerdesFactory.java │ │ └── topology │ │ ├── ExploreAggregateOperatorsTopology.java │ │ ├── ExploreJoinsOperatorsTopology.java │ │ └── ExploreWindowTopology.java │ └── resources │ └── logback.xml ├── gradle └── wrapper │ ├── gradle-wrapper.jar │ └── gradle-wrapper.properties ├── gradlew ├── gradlew.bat ├── greeting-streams ├── build.gradle └── src │ ├── main │ ├── java │ │ └── com │ │ │ └── learnkafkastreams │ │ │ ├── domain │ │ │ └── Greeting.java │ │ │ ├── launcher │ │ │ ├── GreetingsStreamApp.java │ │ │ └── KTableStreamApp.java │ │ │ └── producer │ │ │ ├── GreetingMockDataProducer.java │ │ │ ├── ProducerUtil.java │ │ │ └── WordsProducer.java │ └── resources │ │ └── logback.xml │ └── test │ └── java │ └── com │ └── learnkafkastreams │ ├── domain │ └── GreetingTest.java │ └── topology │ ├── ExploreKTableTopologyTest.java │ └── GreetingsTopologyTest.java ├── orders-kafka-streams-app ├── build.gradle └── src │ ├── main │ ├── java │ │ └── com │ │ │ └── learnkafkastreams │ │ │ ├── OrdersKafkaStreamApp.java │ │ │ ├── domain │ │ │ ├── Address.java │ │ │ ├── Order.java │ │ │ ├── OrderLineItem.java │ │ │ ├── OrderType.java │ │ │ ├── Revenue.java │ │ │ ├── Store.java │ │ │ ├── TotalRevenue.java │ │ │ └── TotalRevenueWithAddress.java │ │ │ ├── producer │ │ │ ├── OrdersMockDataProducer.java │ │ │ ├── ProducerUtil.java │ │ │ └── StoresMockDataProducer.java │ │ │ └── topology │ │ │ └── OrdersTopology.java │ └── resources │ │ └── logback.xml │ └── test │ ├── java │ └── com │ │ └── learnkafkastreams │ │ ├── domain │ │ └── OrderTest.java │ │ └── topology │ │ └── OrdersTopologyTest.java │ └── resources │ ├── order-general.json │ └── order-restaurant.json └── settings.gradle /.gitignore: -------------------------------------------------------------------------------- 1 | HELP.md 2 | .gradle 3 | build/ 4 | !gradle/wrapper/gradle-wrapper.jar 5 | !**/src/main/** 6 | !**/src/test/** 7 | 8 | ### STS ### 9 | .apt_generated 10 | .classpath 11 | .factorypath 12 | .project 13 | .settings 14 | .springBeans 15 | .sts4-cache 16 | 17 | ### IntelliJ IDEA ### 18 | .idea 19 | *.iws 20 | *.iml 21 | *.ipr 22 | out/ 23 | 24 | ### NetBeans ### 25 | /nbproject/private/ 26 | /nbbuild/ 27 | /dist/ 28 | /nbdist/ 29 | /.nb-gradle/ 30 | 31 | ### VS Code ### 32 | .vscode/ 33 | 34 | .DS_Store -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Kafka Streams API for Developers 2 | 3 | 4 | ## Set up Kafka Environment using Docker 5 | 6 | - This should set up the Zookeeper and Kafka Broker in your local environment 7 | 8 | ```aidl 9 | docker-compose up 10 | ``` 11 | 12 | ### Verify the Local Kafka Environment 13 | 14 | - Run this below command 15 | 16 | ``` 17 | docker ps 18 | ``` 19 | 20 | - You should be below containers up and running in local 21 | 22 | ``` 23 | CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES 24 | fb28f7f91b0e confluentinc/cp-server:7.1.0 "/etc/confluent/dock…" 50 seconds ago Up 49 seconds 0.0.0.0:9092->9092/tcp, 0.0.0.0:9101->9101/tcp broker 25 | d00a0f845a45 confluentinc/cp-zookeeper:7.1.0 "/etc/confluent/dock…" 50 seconds ago Up 49 seconds 2888/tcp, 0.0.0.0:2181->2181/tcp, 3888/tcp zookeeper 26 | ``` 27 | 28 | ### Interacting with Kafka 29 | 30 | #### Produce Messages 31 | 32 | - This command should take care of logging in to the Kafka container. 33 | 34 | ``` 35 | docker exec -it broker bash 36 | ``` 37 | 38 | - Command to produce messages in to the Kafka topic. 39 | 40 | ``` 41 | kafka-console-producer --broker-list localhost:9092 --topic greetings 42 | ``` 43 | 44 | - Publish to **greetings** topic with key and value 45 | 46 | ``` 47 | kafka-console-producer --broker-list localhost:9092 --topic greetings --property "key.separator=-" --property "parse.key=true" 48 | 49 | ``` 50 | 51 | - Publish to **greetings-spanish** topic with key and value 52 | 53 | ``` 54 | kafka-console-producer --broker-list localhost:9092 --topic greetings_spanish --property "key.separator=-" --property "parse.key=true" 55 | ``` 56 | 57 | #### Consume Messages 58 | 59 | - This command should take care of logging in to the Kafka container. 60 | 61 | ``` 62 | docker exec -it broker bash 63 | ``` 64 | - Command to consume messages from the Kafka topic. 65 | 66 | ``` 67 | kafka-console-consumer --bootstrap-server localhost:9092 --topic greetings_uppercase 68 | ``` 69 | 70 | - Command to consume with Key 71 | 72 | ``` 73 | kafka-console-consumer --bootstrap-server localhost:9092 --topic greetings_uppercase --from-beginning -property "key.separator= - " --property "print.key=true" 74 | ``` 75 | 76 | - Other Helpful Kafka Consumer commands 77 | 78 | ``` 79 | kafka-console-consumer --bootstrap-server localhost:9092 --topic general_orders 80 | ``` 81 | 82 | ``` 83 | kafka-console-consumer --bootstrap-server localhost:9092 --topic restaurant_orders 84 | ``` 85 | 86 | ``` 87 | kafka-console-consumer --bootstrap-server localhost:9092 --topic ktable-words-store-changelog --from-beginning 88 | ``` 89 | 90 | - Command to read from the Internal Aggregate topic 91 | 92 | ``` 93 | kafka-console-consumer --bootstrap-server localhost:9092 --topic aggregate-KSTREAM-AGGREGATE-STATE-STORE-0000000003-changelog --from-beginning -property "key.separator= - " --property "print.key=true" 94 | ``` 95 | 96 | 97 | ### List Topics 98 | 99 | - This command should take care of logging in to the Kafka container. 100 | 101 | ``` 102 | docker exec -it broker bash 103 | ``` 104 | 105 | - Command to list the topics. 106 | 107 | ``` 108 | kafka-topics --bootstrap-server localhost:9092 --list 109 | ``` 110 | 111 | 112 | ## KafkaStreams using SpringBoot 113 | 114 | ### How AutoConfiguration works ? 115 | 116 | - Adding the annotation @EnableKafkaStreams is going to invoke the **KafkaStreamsDefaultConfiguration** class 117 | - **KafkaStreamsAnnotationDrivenConfiguration** supplies the **KafkaStreamsConfiguration** bean 118 | - This class takes care of building the **StreamsBuilderFactoryBean** which is responsible for supplying the StreamsBuilder instance. 119 | - This **StreamsBuilderFactoryBean** class also takes care of managing the Lifecycle of the **KafkaStreams** App. 120 | 121 | 122 | ## Interactive Queries with Multiple Instances of Kafka Streams using SpringBoot 123 | 124 | ### Start up two instances of the application 125 | 126 | - **Instance 1** with the default port **8080**. 127 | 128 | ``` 129 | java -jar orders-streams-app/build/libs/orders-streams-app-0.0.1-SNAPSHOT.jar 130 | ``` 131 | - **Instance 2** with the port as **8081**. 132 | 133 | ``` 134 | java -jar -Dserver.port=8081 orders-streams-app/build/libs/orders-streams-app-0.0.1-SNAPSHOT.jar 135 | ``` 136 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | --- 2 | version: '3.6' 3 | services: 4 | zookeeper: 5 | image: confluentinc/cp-zookeeper:7.1.0 6 | platform: linux/amd64 7 | hostname: zookeeper 8 | container_name: zookeeper 9 | ports: 10 | - "2181:2181" 11 | environment: 12 | ZOOKEEPER_CLIENT_PORT: 2181 13 | ZOOKEEPER_TICK_TIME: 2000 14 | 15 | broker: 16 | image: confluentinc/cp-server:7.1.0 17 | platform: linux/amd64 18 | hostname: broker 19 | container_name: broker 20 | depends_on: 21 | - zookeeper 22 | ports: 23 | - "9092:9092" 24 | - "9101:9101" 25 | environment: 26 | KAFKA_BROKER_ID: 1 27 | KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181' 28 | KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT 29 | KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://broker:29092,PLAINTEXT_HOST://localhost:9092 30 | KAFKA_METRIC_REPORTERS: io.confluent.metrics.reporter.ConfluentMetricsReporter 31 | KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 32 | KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 33 | KAFKA_CONFLUENT_LICENSE_TOPIC_REPLICATION_FACTOR: 1 34 | KAFKA_CONFLUENT_BALANCER_TOPIC_REPLICATION_FACTOR: 1 35 | KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1 36 | KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1 37 | KAFKA_JMX_PORT: 9101 38 | KAFKA_JMX_HOSTNAME: localhost 39 | CONFLUENT_METRICS_REPORTER_BOOTSTRAP_SERVERS: broker:29092 40 | CONFLUENT_METRICS_REPORTER_TOPIC_REPLICAS: 1 41 | CONFLUENT_METRICS_ENABLE: 'true' 42 | CONFLUENT_SUPPORT_CUSTOMER_ID: 'anonymous' 43 | -------------------------------------------------------------------------------- /explore-kafka-streams-start.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dilipsundarraj1/kafka-streams-api-for-developers/e1c1ecdcce46f0956f3f1e8bff1e8934ee7a0b4d/explore-kafka-streams-start.zip -------------------------------------------------------------------------------- /explore-kafka-streams-using-spring-boot-start.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dilipsundarraj1/kafka-streams-api-for-developers/e1c1ecdcce46f0956f3f1e8bff1e8934ee7a0b4d/explore-kafka-streams-using-spring-boot-start.zip -------------------------------------------------------------------------------- /explore-kafka-streams-using-spring-boot/orders-management-streams/.gitignore: -------------------------------------------------------------------------------- 1 | HELP.md 2 | .gradle 3 | build/ 4 | !gradle/wrapper/gradle-wrapper.jar 5 | !**/src/main/**/build/ 6 | !**/src/test/**/build/ 7 | 8 | ### STS ### 9 | .apt_generated 10 | .classpath 11 | .factorypath 12 | .project 13 | .settings 14 | .springBeans 15 | .sts4-cache 16 | bin/ 17 | !**/src/main/**/bin/ 18 | !**/src/test/**/bin/ 19 | 20 | ### IntelliJ IDEA ### 21 | .idea 22 | *.iws 23 | *.iml 24 | *.ipr 25 | out/ 26 | !**/src/main/**/out/ 27 | !**/src/test/**/out/ 28 | 29 | ### NetBeans ### 30 | /nbproject/private/ 31 | /nbbuild/ 32 | /dist/ 33 | /nbdist/ 34 | /.nb-gradle/ 35 | 36 | ### VS Code ### 37 | .vscode/ 38 | -------------------------------------------------------------------------------- /explore-kafka-streams-using-spring-boot/orders-management-streams/gradle/wrapper/gradle-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dilipsundarraj1/kafka-streams-api-for-developers/e1c1ecdcce46f0956f3f1e8bff1e8934ee7a0b4d/explore-kafka-streams-using-spring-boot/orders-management-streams/gradle/wrapper/gradle-wrapper.jar -------------------------------------------------------------------------------- /explore-kafka-streams-using-spring-boot/orders-management-streams/gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | distributionBase=GRADLE_USER_HOME 2 | distributionPath=wrapper/dists 3 | distributionUrl=https\://services.gradle.org/distributions/gradle-7.6-bin.zip 4 | zipStoreBase=GRADLE_USER_HOME 5 | zipStorePath=wrapper/dists 6 | -------------------------------------------------------------------------------- /explore-kafka-streams-using-spring-boot/orders-management-streams/gradlew: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # 4 | # Copyright © 2015-2021 the original authors. 5 | # 6 | # Licensed under the Apache License, Version 2.0 (the "License"); 7 | # you may not use this file except in compliance with the License. 8 | # You may obtain a copy of the License at 9 | # 10 | # https://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | # See the License for the specific language governing permissions and 16 | # limitations under the License. 17 | # 18 | 19 | ############################################################################## 20 | # 21 | # Gradle start up script for POSIX generated by Gradle. 22 | # 23 | # Important for running: 24 | # 25 | # (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is 26 | # noncompliant, but you have some other compliant shell such as ksh or 27 | # bash, then to run this script, type that shell name before the whole 28 | # command line, like: 29 | # 30 | # ksh Gradle 31 | # 32 | # Busybox and similar reduced shells will NOT work, because this script 33 | # requires all of these POSIX shell features: 34 | # * functions; 35 | # * expansions «$var», «${var}», «${var:-default}», «${var+SET}», 36 | # «${var#prefix}», «${var%suffix}», and «$( cmd )»; 37 | # * compound commands having a testable exit status, especially «case»; 38 | # * various built-in commands including «command», «set», and «ulimit». 39 | # 40 | # Important for patching: 41 | # 42 | # (2) This script targets any POSIX shell, so it avoids extensions provided 43 | # by Bash, Ksh, etc; in particular arrays are avoided. 44 | # 45 | # The "traditional" practice of packing multiple parameters into a 46 | # space-separated string is a well documented source of bugs and security 47 | # problems, so this is (mostly) avoided, by progressively accumulating 48 | # options in "$@", and eventually passing that to Java. 49 | # 50 | # Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS, 51 | # and GRADLE_OPTS) rely on word-splitting, this is performed explicitly; 52 | # see the in-line comments for details. 53 | # 54 | # There are tweaks for specific operating systems such as AIX, CygWin, 55 | # Darwin, MinGW, and NonStop. 56 | # 57 | # (3) This script is generated from the Groovy template 58 | # https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt 59 | # within the Gradle project. 60 | # 61 | # You can find Gradle at https://github.com/gradle/gradle/. 62 | # 63 | ############################################################################## 64 | 65 | # Attempt to set APP_HOME 66 | 67 | # Resolve links: $0 may be a link 68 | app_path=$0 69 | 70 | # Need this for daisy-chained symlinks. 71 | while 72 | APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path 73 | [ -h "$app_path" ] 74 | do 75 | ls=$( ls -ld "$app_path" ) 76 | link=${ls#*' -> '} 77 | case $link in #( 78 | /*) app_path=$link ;; #( 79 | *) app_path=$APP_HOME$link ;; 80 | esac 81 | done 82 | 83 | APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit 84 | 85 | APP_NAME="Gradle" 86 | APP_BASE_NAME=${0##*/} 87 | 88 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 89 | DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' 90 | 91 | # Use the maximum available, or set MAX_FD != -1 to use that value. 92 | MAX_FD=maximum 93 | 94 | warn () { 95 | echo "$*" 96 | } >&2 97 | 98 | die () { 99 | echo 100 | echo "$*" 101 | echo 102 | exit 1 103 | } >&2 104 | 105 | # OS specific support (must be 'true' or 'false'). 106 | cygwin=false 107 | msys=false 108 | darwin=false 109 | nonstop=false 110 | case "$( uname )" in #( 111 | CYGWIN* ) cygwin=true ;; #( 112 | Darwin* ) darwin=true ;; #( 113 | MSYS* | MINGW* ) msys=true ;; #( 114 | NONSTOP* ) nonstop=true ;; 115 | esac 116 | 117 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar 118 | 119 | 120 | # Determine the Java command to use to start the JVM. 121 | if [ -n "$JAVA_HOME" ] ; then 122 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then 123 | # IBM's JDK on AIX uses strange locations for the executables 124 | JAVACMD=$JAVA_HOME/jre/sh/java 125 | else 126 | JAVACMD=$JAVA_HOME/bin/java 127 | fi 128 | if [ ! -x "$JAVACMD" ] ; then 129 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME 130 | 131 | Please set the JAVA_HOME variable in your environment to match the 132 | location of your Java installation." 133 | fi 134 | else 135 | JAVACMD=java 136 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 137 | 138 | Please set the JAVA_HOME variable in your environment to match the 139 | location of your Java installation." 140 | fi 141 | 142 | # Increase the maximum file descriptors if we can. 143 | if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then 144 | case $MAX_FD in #( 145 | max*) 146 | MAX_FD=$( ulimit -H -n ) || 147 | warn "Could not query maximum file descriptor limit" 148 | esac 149 | case $MAX_FD in #( 150 | '' | soft) :;; #( 151 | *) 152 | ulimit -n "$MAX_FD" || 153 | warn "Could not set maximum file descriptor limit to $MAX_FD" 154 | esac 155 | fi 156 | 157 | # Collect all arguments for the java command, stacking in reverse order: 158 | # * args from the command line 159 | # * the main class name 160 | # * -classpath 161 | # * -D...appname settings 162 | # * --module-path (only if needed) 163 | # * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables. 164 | 165 | # For Cygwin or MSYS, switch paths to Windows format before running java 166 | if "$cygwin" || "$msys" ; then 167 | APP_HOME=$( cygpath --path --mixed "$APP_HOME" ) 168 | CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" ) 169 | 170 | JAVACMD=$( cygpath --unix "$JAVACMD" ) 171 | 172 | # Now convert the arguments - kludge to limit ourselves to /bin/sh 173 | for arg do 174 | if 175 | case $arg in #( 176 | -*) false ;; # don't mess with options #( 177 | /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath 178 | [ -e "$t" ] ;; #( 179 | *) false ;; 180 | esac 181 | then 182 | arg=$( cygpath --path --ignore --mixed "$arg" ) 183 | fi 184 | # Roll the args list around exactly as many times as the number of 185 | # args, so each arg winds up back in the position where it started, but 186 | # possibly modified. 187 | # 188 | # NB: a `for` loop captures its iteration list before it begins, so 189 | # changing the positional parameters here affects neither the number of 190 | # iterations, nor the values presented in `arg`. 191 | shift # remove old arg 192 | set -- "$@" "$arg" # push replacement arg 193 | done 194 | fi 195 | 196 | # Collect all arguments for the java command; 197 | # * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of 198 | # shell script including quotes and variable substitutions, so put them in 199 | # double quotes to make sure that they get re-expanded; and 200 | # * put everything else in single quotes, so that it's not re-expanded. 201 | 202 | set -- \ 203 | "-Dorg.gradle.appname=$APP_BASE_NAME" \ 204 | -classpath "$CLASSPATH" \ 205 | org.gradle.wrapper.GradleWrapperMain \ 206 | "$@" 207 | 208 | # Stop when "xargs" is not available. 209 | if ! command -v xargs >/dev/null 2>&1 210 | then 211 | die "xargs is not available" 212 | fi 213 | 214 | # Use "xargs" to parse quoted args. 215 | # 216 | # With -n1 it outputs one arg per line, with the quotes and backslashes removed. 217 | # 218 | # In Bash we could simply go: 219 | # 220 | # readarray ARGS < <( xargs -n1 <<<"$var" ) && 221 | # set -- "${ARGS[@]}" "$@" 222 | # 223 | # but POSIX shell has neither arrays nor command substitution, so instead we 224 | # post-process each arg (as a line of input to sed) to backslash-escape any 225 | # character that might be a shell metacharacter, then use eval to reverse 226 | # that process (while maintaining the separation between arguments), and wrap 227 | # the whole thing up as a single "set" statement. 228 | # 229 | # This will of course break if any of these variables contains a newline or 230 | # an unmatched quote. 231 | # 232 | 233 | eval "set -- $( 234 | printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" | 235 | xargs -n1 | 236 | sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' | 237 | tr '\n' ' ' 238 | )" '"$@"' 239 | 240 | exec "$JAVACMD" "$@" 241 | -------------------------------------------------------------------------------- /explore-kafka-streams-using-spring-boot/orders-management-streams/gradlew.bat: -------------------------------------------------------------------------------- 1 | @rem 2 | @rem Copyright 2015 the original author or authors. 3 | @rem 4 | @rem Licensed under the Apache License, Version 2.0 (the "License"); 5 | @rem you may not use this file except in compliance with the License. 6 | @rem You may obtain a copy of the License at 7 | @rem 8 | @rem https://www.apache.org/licenses/LICENSE-2.0 9 | @rem 10 | @rem Unless required by applicable law or agreed to in writing, software 11 | @rem distributed under the License is distributed on an "AS IS" BASIS, 12 | @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | @rem See the License for the specific language governing permissions and 14 | @rem limitations under the License. 15 | @rem 16 | 17 | @if "%DEBUG%"=="" @echo off 18 | @rem ########################################################################## 19 | @rem 20 | @rem Gradle startup script for Windows 21 | @rem 22 | @rem ########################################################################## 23 | 24 | @rem Set local scope for the variables with windows NT shell 25 | if "%OS%"=="Windows_NT" setlocal 26 | 27 | set DIRNAME=%~dp0 28 | if "%DIRNAME%"=="" set DIRNAME=. 29 | set APP_BASE_NAME=%~n0 30 | set APP_HOME=%DIRNAME% 31 | 32 | @rem Resolve any "." and ".." in APP_HOME to make it shorter. 33 | for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi 34 | 35 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 36 | set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" 37 | 38 | @rem Find java.exe 39 | if defined JAVA_HOME goto findJavaFromJavaHome 40 | 41 | set JAVA_EXE=java.exe 42 | %JAVA_EXE% -version >NUL 2>&1 43 | if %ERRORLEVEL% equ 0 goto execute 44 | 45 | echo. 46 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 47 | echo. 48 | echo Please set the JAVA_HOME variable in your environment to match the 49 | echo location of your Java installation. 50 | 51 | goto fail 52 | 53 | :findJavaFromJavaHome 54 | set JAVA_HOME=%JAVA_HOME:"=% 55 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe 56 | 57 | if exist "%JAVA_EXE%" goto execute 58 | 59 | echo. 60 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 61 | echo. 62 | echo Please set the JAVA_HOME variable in your environment to match the 63 | echo location of your Java installation. 64 | 65 | goto fail 66 | 67 | :execute 68 | @rem Setup the command line 69 | 70 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar 71 | 72 | 73 | @rem Execute Gradle 74 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* 75 | 76 | :end 77 | @rem End local scope for the variables with windows NT shell 78 | if %ERRORLEVEL% equ 0 goto mainEnd 79 | 80 | :fail 81 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of 82 | rem the _cmd.exe /c_ return code! 83 | set EXIT_CODE=%ERRORLEVEL% 84 | if %EXIT_CODE% equ 0 set EXIT_CODE=1 85 | if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE% 86 | exit /b %EXIT_CODE% 87 | 88 | :mainEnd 89 | if "%OS%"=="Windows_NT" endlocal 90 | 91 | :omega 92 | -------------------------------------------------------------------------------- /explore-kafka-streams-using-spring-boot/orders-management-streams/orders-domain/build.gradle: -------------------------------------------------------------------------------- 1 | plugins { 2 | id 'java' 3 | } 4 | 5 | group 'com.learnkafkastreams' 6 | version '0.0.1-SNAPSHOT' 7 | 8 | repositories { 9 | mavenCentral() 10 | } 11 | 12 | dependencies { 13 | testImplementation 'org.junit.jupiter:junit-jupiter-api:5.8.1' 14 | testRuntimeOnly 'org.junit.jupiter:junit-jupiter-engine:5.8.1' 15 | } 16 | 17 | test { 18 | useJUnitPlatform() 19 | } -------------------------------------------------------------------------------- /explore-kafka-streams-using-spring-boot/orders-management-streams/orders-domain/src/main/java/com/learnkafkastreams/domain/Address.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.domain; 2 | 3 | public record Address(String addressLine1, 4 | String addressLine2, 5 | String city, 6 | String state, 7 | String zip) { 8 | } 9 | -------------------------------------------------------------------------------- /explore-kafka-streams-using-spring-boot/orders-management-streams/orders-domain/src/main/java/com/learnkafkastreams/domain/AllOrdersCountPerStore.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.domain; 2 | 3 | public record AllOrdersCountPerStore(String locationId, 4 | Long orderCount, 5 | OrderType orderType) { 6 | public static record Address(String addressLine1, 7 | String addressLine2, 8 | String city, 9 | String state, 10 | String zip) { 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /explore-kafka-streams-using-spring-boot/orders-management-streams/orders-domain/src/main/java/com/learnkafkastreams/domain/AllOrdersCountPerStoreDTO.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.domain; 2 | 3 | import com.learnkafkastreams.domain.OrderType; 4 | 5 | public record AllOrdersCountPerStoreDTO(String locationId, 6 | Long orderCount, 7 | OrderType orderType) { 8 | } 9 | -------------------------------------------------------------------------------- /explore-kafka-streams-using-spring-boot/orders-management-streams/orders-domain/src/main/java/com/learnkafkastreams/domain/HostInfoDTO.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.domain; 2 | 3 | public record HostInfoDTO(String host, int port) { 4 | } 5 | -------------------------------------------------------------------------------- /explore-kafka-streams-using-spring-boot/orders-management-streams/orders-domain/src/main/java/com/learnkafkastreams/domain/Order.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.domain; 2 | 3 | import java.math.BigDecimal; 4 | import java.time.LocalDateTime; 5 | import java.util.List; 6 | 7 | public record Order(Integer orderId, 8 | String locationId, 9 | BigDecimal finalAmount, 10 | OrderType orderType, 11 | List orderLineItems, 12 | LocalDateTime orderedDateTime) { 13 | 14 | } 15 | -------------------------------------------------------------------------------- /explore-kafka-streams-using-spring-boot/orders-management-streams/orders-domain/src/main/java/com/learnkafkastreams/domain/OrderCountPerStore.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.domain; 2 | 3 | public record OrderCountPerStore(String locationId, 4 | Long orderCount) { 5 | } 6 | -------------------------------------------------------------------------------- /explore-kafka-streams-using-spring-boot/orders-management-streams/orders-domain/src/main/java/com/learnkafkastreams/domain/OrderCountPerStoreDTO.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.domain; 2 | 3 | public record OrderCountPerStoreDTO(String locationId, 4 | Long orderCount) { 5 | } 6 | -------------------------------------------------------------------------------- /explore-kafka-streams-using-spring-boot/orders-management-streams/orders-domain/src/main/java/com/learnkafkastreams/domain/OrderLineItem.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.domain; 2 | 3 | import java.math.BigDecimal; 4 | 5 | public record OrderLineItem( 6 | String item, 7 | Integer count, 8 | BigDecimal amount) { 9 | } 10 | -------------------------------------------------------------------------------- /explore-kafka-streams-using-spring-boot/orders-management-streams/orders-domain/src/main/java/com/learnkafkastreams/domain/OrderRevenueDTO.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.domain; 2 | 3 | import com.learnkafkastreams.domain.OrderType; 4 | import com.learnkafkastreams.domain.TotalRevenue; 5 | 6 | public record OrderRevenueDTO( 7 | String locationId, 8 | 9 | OrderType orderType, 10 | TotalRevenue totalRevenue 11 | ) { 12 | } 13 | -------------------------------------------------------------------------------- /explore-kafka-streams-using-spring-boot/orders-management-streams/orders-domain/src/main/java/com/learnkafkastreams/domain/OrderType.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.domain; 2 | 3 | public enum OrderType { 4 | GENERAL, 5 | RESTAURANT 6 | } 7 | -------------------------------------------------------------------------------- /explore-kafka-streams-using-spring-boot/orders-management-streams/orders-domain/src/main/java/com/learnkafkastreams/domain/OrdersCountPerStoreByWindowsDTO.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.domain; 2 | 3 | import com.learnkafkastreams.domain.OrderType; 4 | 5 | import java.time.LocalDateTime; 6 | 7 | public record OrdersCountPerStoreByWindowsDTO(String locationId, 8 | Long orderCount, 9 | OrderType orderType, 10 | LocalDateTime startWindow, 11 | LocalDateTime endWindow) { 12 | } 13 | -------------------------------------------------------------------------------- /explore-kafka-streams-using-spring-boot/orders-management-streams/orders-domain/src/main/java/com/learnkafkastreams/domain/OrdersRevenuePerStoreByWindowsDTO.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.domain; 2 | 3 | import com.learnkafkastreams.domain.OrderType; 4 | import com.learnkafkastreams.domain.TotalRevenue; 5 | 6 | import java.time.LocalDateTime; 7 | 8 | public record OrdersRevenuePerStoreByWindowsDTO(String locationId, 9 | TotalRevenue totalRevenue, 10 | OrderType orderType, 11 | LocalDateTime startWindow, 12 | LocalDateTime endWindow) { 13 | } 14 | -------------------------------------------------------------------------------- /explore-kafka-streams-using-spring-boot/orders-management-streams/orders-domain/src/main/java/com/learnkafkastreams/domain/Revenue.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.domain; 2 | 3 | import java.math.BigDecimal; 4 | public record Revenue(String locationId, 5 | BigDecimal finalAmount) { 6 | } 7 | -------------------------------------------------------------------------------- /explore-kafka-streams-using-spring-boot/orders-management-streams/orders-domain/src/main/java/com/learnkafkastreams/domain/Store.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.domain; 2 | 3 | public record Store(String locationId, 4 | Address address, 5 | String contactNum) { 6 | } 7 | -------------------------------------------------------------------------------- /explore-kafka-streams-using-spring-boot/orders-management-streams/orders-domain/src/main/java/com/learnkafkastreams/domain/TotalRevenue.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.domain; 2 | 3 | import java.math.BigDecimal; 4 | 5 | public record TotalRevenue(String locationId, 6 | Integer runnuingOrderCount, 7 | BigDecimal runningRevenue) { 8 | 9 | public TotalRevenue() { 10 | this("", 0, BigDecimal.valueOf(0.0)); 11 | } 12 | 13 | public TotalRevenue updateRunningRevenue(String key, Order order) { 14 | 15 | var newOrdersCount = this.runnuingOrderCount+1; 16 | var newRevenue = this.runningRevenue.add( order.finalAmount()); 17 | return new TotalRevenue(key, newOrdersCount, newRevenue); 18 | 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /explore-kafka-streams-using-spring-boot/orders-management-streams/orders-domain/src/main/java/com/learnkafkastreams/domain/TotalRevenueWithAddress.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.domain; 2 | 3 | public record TotalRevenueWithAddress(TotalRevenue totalRevenue, 4 | Store store) { 5 | } 6 | -------------------------------------------------------------------------------- /explore-kafka-streams-using-spring-boot/orders-management-streams/orders-streams-app/build.gradle: -------------------------------------------------------------------------------- 1 | plugins { 2 | id 'java' 3 | id 'org.springframework.boot' version '3.0.1' 4 | id 'io.spring.dependency-management' version '1.1.0' 5 | } 6 | 7 | group = 'com.learnkafkastreams' 8 | version = '0.0.1-SNAPSHOT' 9 | sourceCompatibility = '17' 10 | 11 | repositories { 12 | mavenCentral() 13 | } 14 | 15 | configurations { 16 | compileOnly { 17 | extendsFrom annotationProcessor 18 | } 19 | } 20 | 21 | dependencies { 22 | implementation project(':orders-domain') 23 | 24 | implementation 'org.springframework.boot:spring-boot-starter-validation' 25 | implementation 'org.springframework.boot:spring-boot-starter-web' 26 | 27 | //lombok 28 | compileOnly 'org.projectlombok:lombok' 29 | annotationProcessor 'org.projectlombok:lombok' 30 | 31 | //kafka 32 | implementation 'org.apache.kafka:kafka-streams' 33 | implementation 'org.springframework.kafka:spring-kafka' 34 | 35 | //test 36 | testImplementation 'org.springframework.boot:spring-boot-starter-test' 37 | testImplementation 'org.springframework.kafka:spring-kafka-test' 38 | testImplementation 'org.apache.kafka:kafka-streams-test-utils:3.3.1' 39 | testImplementation 'org.awaitility:awaitility:4.2.0' 40 | 41 | } 42 | 43 | tasks.named('test') { 44 | useJUnitPlatform() 45 | } 46 | -------------------------------------------------------------------------------- /explore-kafka-streams-using-spring-boot/orders-management-streams/orders-streams-app/src/main/java/com/learnkafkastreams/OrdersManagementStreamsApplication.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams; 2 | 3 | import org.springframework.boot.SpringApplication; 4 | import org.springframework.boot.autoconfigure.SpringBootApplication; 5 | import org.springframework.kafka.annotation.EnableKafkaStreams; 6 | 7 | @SpringBootApplication 8 | @EnableKafkaStreams 9 | public class OrdersManagementStreamsApplication { 10 | 11 | public static void main(String[] args) { 12 | SpringApplication.run(OrdersManagementStreamsApplication.class, args); 13 | } 14 | 15 | } 16 | -------------------------------------------------------------------------------- /explore-kafka-streams-using-spring-boot/orders-management-streams/orders-streams-app/src/main/java/com/learnkafkastreams/config/OrdersStreamsConfiguration.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.config; 2 | 3 | import com.learnkafkastreams.exceptionhandler.StreamsProcessorCustomErrorHandler; 4 | import com.learnkafkastreams.topology.OrdersTopology; 5 | import lombok.extern.slf4j.Slf4j; 6 | import org.apache.kafka.clients.admin.NewTopic; 7 | import org.apache.kafka.common.TopicPartition; 8 | import org.apache.kafka.streams.StreamsConfig; 9 | import org.springframework.beans.factory.annotation.Autowired; 10 | import org.springframework.boot.autoconfigure.kafka.KafkaProperties; 11 | import org.springframework.context.annotation.Bean; 12 | import org.springframework.context.annotation.Configuration; 13 | import org.springframework.kafka.annotation.KafkaStreamsDefaultConfiguration; 14 | import org.springframework.kafka.config.KafkaStreamsConfiguration; 15 | import org.springframework.kafka.config.StreamsBuilderFactoryBeanConfigurer; 16 | import org.springframework.kafka.config.TopicBuilder; 17 | import org.springframework.kafka.core.KafkaTemplate; 18 | import org.springframework.kafka.listener.ConsumerRecordRecoverer; 19 | import org.springframework.kafka.listener.DeadLetterPublishingRecoverer; 20 | import org.springframework.kafka.streams.RecoveringDeserializationExceptionHandler; 21 | 22 | 23 | @Configuration 24 | @Slf4j 25 | public class OrdersStreamsConfiguration { 26 | //KafkaStreamsDefaultConfiguration -> Class Responsible for configuring the KafkaStreams in SpringBoot 27 | @Autowired 28 | KafkaProperties kafkaProperties; 29 | 30 | @Autowired 31 | KafkaTemplate kafkaTemplate; 32 | 33 | @Bean(name = KafkaStreamsDefaultConfiguration.DEFAULT_STREAMS_CONFIG_BEAN_NAME) 34 | public KafkaStreamsConfiguration kStreamConfig() { 35 | 36 | var streamProperties = kafkaProperties.buildStreamsProperties(); 37 | 38 | streamProperties.put(StreamsConfig.DEFAULT_DESERIALIZATION_EXCEPTION_HANDLER_CLASS_CONFIG, RecoveringDeserializationExceptionHandler.class); 39 | streamProperties.put(RecoveringDeserializationExceptionHandler.KSTREAM_DESERIALIZATION_RECOVERER, consumerRecordRecoverer); 40 | 41 | return new KafkaStreamsConfiguration(streamProperties); 42 | } 43 | 44 | @Bean 45 | public StreamsBuilderFactoryBeanConfigurer streamsBuilderFactoryBeanConfigurer(){ 46 | log.info("Inside streamsBuilderFactoryBeanConfigurer"); 47 | return factoryBean -> { 48 | factoryBean.setStreamsUncaughtExceptionHandler(new StreamsProcessorCustomErrorHandler()); 49 | }; 50 | } 51 | 52 | 53 | public DeadLetterPublishingRecoverer recoverer() { 54 | return new DeadLetterPublishingRecoverer(kafkaTemplate, 55 | (record, ex) -> { 56 | log.error("Exception in Deserializing the message : {} and the record is : {}", ex.getMessage(),record, ex); 57 | return new TopicPartition("recovererDLQ", record.partition()); 58 | }); 59 | } 60 | 61 | 62 | ConsumerRecordRecoverer consumerRecordRecoverer = (record, exception) -> { 63 | log.error("Exception is : {} Failed Record : {} ", exception, record); 64 | }; 65 | 66 | @Bean 67 | public NewTopic topicBuilder() { 68 | return TopicBuilder.name(OrdersTopology.ORDERS) 69 | .partitions(2) 70 | .replicas(1) 71 | .build(); 72 | 73 | } 74 | 75 | @Bean 76 | public NewTopic storeTopicBuilder() { 77 | return TopicBuilder.name(OrdersTopology.STORES) 78 | .partitions(2) 79 | .replicas(1) 80 | .build(); 81 | 82 | } 83 | } 84 | -------------------------------------------------------------------------------- /explore-kafka-streams-using-spring-boot/orders-management-streams/orders-streams-app/src/main/java/com/learnkafkastreams/exceptionhandler/StreamsDeserializationErrorHandler.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.exceptionhandler; 2 | 3 | import lombok.extern.slf4j.Slf4j; 4 | import org.apache.kafka.clients.consumer.ConsumerRecord; 5 | import org.apache.kafka.streams.errors.DeserializationExceptionHandler; 6 | import org.apache.kafka.streams.processor.ProcessorContext; 7 | 8 | import java.util.Map; 9 | 10 | @Slf4j 11 | public class StreamsDeserializationErrorHandler implements DeserializationExceptionHandler { 12 | int errorCounter = 0; 13 | 14 | @Override 15 | public DeserializationHandlerResponse handle(ProcessorContext context, ConsumerRecord record, Exception exception) { 16 | 17 | log.error("Exception is : {} and the Kafka Record is : {} " , exception.getMessage(), record, exception); 18 | log.error("errorCounter is : {} " , errorCounter); 19 | if(errorCounter < 10){ 20 | errorCounter++; 21 | return DeserializationHandlerResponse.CONTINUE; 22 | } 23 | return DeserializationHandlerResponse.FAIL; 24 | } 25 | 26 | @Override 27 | public void configure(Map configs) { 28 | 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /explore-kafka-streams-using-spring-boot/orders-management-streams/orders-streams-app/src/main/java/com/learnkafkastreams/exceptionhandler/StreamsProcessorCustomErrorHandler.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.exceptionhandler; 2 | 3 | import lombok.extern.slf4j.Slf4j; 4 | import org.apache.kafka.streams.errors.StreamsException; 5 | import org.apache.kafka.streams.errors.StreamsUncaughtExceptionHandler; 6 | 7 | @Slf4j 8 | public class StreamsProcessorCustomErrorHandler implements StreamsUncaughtExceptionHandler { 9 | @Override 10 | public StreamThreadExceptionResponse handle(Throwable exception) { 11 | log.error("Exception in the Application : {} ",exception.getMessage(), exception); 12 | if(exception instanceof StreamsException){ 13 | var cause = exception.getCause(); 14 | if(cause.getMessage().equals("Transient Error")){ 15 | //return StreamThreadExceptionResponse.REPLACE_THREAD; 16 | return StreamThreadExceptionResponse.SHUTDOWN_CLIENT; 17 | } 18 | } 19 | log.error("Shutdown the client"); 20 | //return StreamThreadExceptionResponse.SHUTDOWN_CLIENT; 21 | return StreamThreadExceptionResponse.SHUTDOWN_APPLICATION; 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /explore-kafka-streams-using-spring-boot/orders-management-streams/orders-streams-app/src/main/java/com/learnkafkastreams/exceptionhandler/StreamsSerializationExceptionHandler.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.exceptionhandler; 2 | 3 | import lombok.extern.slf4j.Slf4j; 4 | import org.apache.kafka.clients.producer.ProducerRecord; 5 | import org.apache.kafka.streams.errors.ProductionExceptionHandler; 6 | 7 | import java.util.Map; 8 | 9 | @Slf4j 10 | public class StreamsSerializationExceptionHandler implements ProductionExceptionHandler { 11 | @Override 12 | public ProductionExceptionHandlerResponse handle(ProducerRecord record, Exception exception) { 13 | log.error("Exception in handle : {} and the record is : {} ", exception.getMessage(), record, exception); 14 | return ProductionExceptionHandlerResponse.CONTINUE; 15 | } 16 | 17 | @Override 18 | public void configure(Map configs) { 19 | 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /explore-kafka-streams-using-spring-boot/orders-management-streams/orders-streams-app/src/main/java/com/learnkafkastreams/producer/OrdersMockDataProducer.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.producer; 2 | 3 | import com.fasterxml.jackson.core.JsonProcessingException; 4 | import com.fasterxml.jackson.databind.ObjectMapper; 5 | import com.fasterxml.jackson.databind.SerializationFeature; 6 | import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; 7 | import com.learnkafkastreams.domain.Order; 8 | import com.learnkafkastreams.domain.OrderLineItem; 9 | import com.learnkafkastreams.domain.OrderType; 10 | import com.learnkafkastreams.topology.OrdersTopology; 11 | import lombok.extern.slf4j.Slf4j; 12 | 13 | import java.math.BigDecimal; 14 | import java.time.LocalDateTime; 15 | import java.time.LocalTime; 16 | import java.util.List; 17 | 18 | import static com.learnkafkastreams.producer.ProducerUtil.publishMessageSync; 19 | import static java.lang.Thread.sleep; 20 | 21 | @Slf4j 22 | public class OrdersMockDataProducer { 23 | 24 | public static void main(String[] args) throws InterruptedException { 25 | ObjectMapper objectMapper = new ObjectMapper() 26 | .registerModule(new JavaTimeModule()) 27 | .configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false); 28 | 29 | 30 | publishOrders(objectMapper, buildOrders()); 31 | //publishBulkOrders(objectMapper); 32 | 33 | /** 34 | * To test grace period. 35 | * 1. Run the {@link #publishOrders(ObjectMapper, List)} function during the start of the minute. 36 | * 2. Wait until the next minute and run the {@link #publishOrders(ObjectMapper, List)} 37 | * and then the {@link #publishOrdersToTestGrace(ObjectMapper, List)} function before the 15th second. 38 | * - This should allow the aggregation to be added to the window before 39 | * 40 | */ 41 | //publishOrdersToTestGrace(objectMapper, buildOrdersToTestGrace()); 42 | 43 | 44 | //Future and Old Records 45 | // publishFutureRecords(objectMapper); 46 | // publishExpiredRecords(objectMapper); 47 | 48 | 49 | } 50 | 51 | private static void publishFutureRecords(ObjectMapper objectMapper) { 52 | var localDateTime = LocalDateTime.now().plusDays(1); 53 | 54 | var newOrders = buildOrders() 55 | .stream() 56 | .map(order -> 57 | new Order(order.orderId(), 58 | order.locationId(), 59 | order.finalAmount(), 60 | order.orderType(), 61 | order.orderLineItems(), 62 | localDateTime)) 63 | .toList(); 64 | publishOrders(objectMapper, newOrders); 65 | } 66 | 67 | private static void publishExpiredRecords(ObjectMapper objectMapper) { 68 | 69 | var localDateTime = LocalDateTime.now().minusDays(1); 70 | 71 | var newOrders = buildOrders() 72 | .stream() 73 | .map(order -> 74 | new Order(order.orderId(), 75 | order.locationId(), 76 | order.finalAmount(), 77 | order.orderType(), 78 | order.orderLineItems(), 79 | localDateTime)) 80 | .toList(); 81 | publishOrders(objectMapper, newOrders); 82 | 83 | } 84 | 85 | private static void publishOrdersForGracePeriod(ObjectMapper objectMapper, List orders) { 86 | 87 | var localTime = LocalDateTime.now().toLocalTime(); 88 | var modifiedTime = LocalTime.of(localTime.getHour(), localTime.getMinute(), 18); 89 | var localDateTime = LocalDateTime.now().with(modifiedTime); 90 | 91 | //With Grace Period 92 | //[general_orders_revenue_window]: , TotalRevenue[locationId=store_4567, runnuingOrderCount=1, runningRevenue=27.00] 93 | //[general_orders_revenue_window]: TotalRevenue[locationId=store_1234, runnuingOrderCount=1, runningRevenue=27.00] 94 | //[general_orders_revenue_window]: TotalRevenue[locationId=store_4567, runnuingOrderCount=1, runningRevenue=27.00] 95 | //[general_orders_revenue_window]: TotalRevenue[locationId=store_4567, runnuingOrderCount=1, runningRevenue=27.00] 96 | 97 | //Without Grace Period 98 | //[general_orders_revenue_window]: , TotalRevenue[locationId=store_4567, runnuingOrderCount=1, runningRevenue=27.00] 99 | //[general_orders_revenue_window]: TotalRevenue[locationId=store_1234, runnuingOrderCount=1, runningRevenue=27.00] 100 | //[general_orders_revenue_window]: TotalRevenue[locationId=store_4567, runnuingOrderCount=1, runningRevenue=27.00] 101 | //[general_orders_revenue_window]: TotalRevenue[locationId=store_4567, runnuingOrderCount=1, runningRevenue=27.00] 102 | 103 | 104 | 105 | var generalOrdersWithCustomTime = orders 106 | .stream() 107 | .filter(order -> order.orderType().equals(OrderType.GENERAL)) 108 | .map(order -> 109 | new Order(order.orderId(), 110 | order.locationId(), 111 | order.finalAmount(), 112 | order.orderType(), 113 | order.orderLineItems(), 114 | localDateTime)) 115 | .toList(); 116 | 117 | var generalOrders = orders 118 | .stream() 119 | .filter(order -> order.orderType().equals(OrderType.GENERAL)) 120 | .toList(); 121 | 122 | publishOrders(objectMapper, generalOrders); 123 | 124 | //orders with the timestamp as 18th second 125 | publishRecordsWithDelay(generalOrdersWithCustomTime, localDateTime, objectMapper, 18); 126 | 127 | } 128 | 129 | private static void publishRecordsWithDelay(List newOrders, LocalDateTime localDateTime, ObjectMapper objectMapper) { 130 | 131 | publishOrders(objectMapper, newOrders); 132 | } 133 | 134 | private static void publishRecordsWithDelay(List newOrders, LocalDateTime localDateTime, ObjectMapper objectMapper, int timeToPublish) { 135 | 136 | var flag = true; 137 | while (flag) { 138 | var dateTime = LocalDateTime.now(); 139 | if (dateTime.toLocalTime().getMinute() == localDateTime.getMinute() 140 | && dateTime.toLocalTime().getSecond() == timeToPublish) { 141 | System.out.printf("Publishing the record with delay "); 142 | publishOrders(objectMapper, newOrders); 143 | flag = false; 144 | } else { 145 | System.out.println(" Current Time is and the record will be published at the 16th second: " + dateTime); 146 | System.out.println("Record Date Time : " + localDateTime); 147 | } 148 | } 149 | } 150 | 151 | private static List buildOrdersForGracePeriod() { 152 | 153 | var orderItems = List.of( 154 | new OrderLineItem("Bananas", 2, new BigDecimal("2.00")), 155 | new OrderLineItem("Iphone Charger", 1, new BigDecimal("25.00")) 156 | ); 157 | 158 | var orderItemsRestaurant = List.of( 159 | new OrderLineItem("Pizza", 2, new BigDecimal("12.00")), 160 | new OrderLineItem("Coffee", 1, new BigDecimal("3.00")) 161 | ); 162 | 163 | var order1 = new Order(12345, "store_999", 164 | new BigDecimal("27.00"), 165 | OrderType.RESTAURANT, 166 | orderItems, 167 | LocalDateTime.parse("2023-01-06T18:50:21") 168 | ); 169 | 170 | var order2 = new Order(54321, "store_999", 171 | new BigDecimal("15.00"), 172 | OrderType.RESTAURANT, 173 | orderItemsRestaurant, 174 | LocalDateTime.parse("2023-01-06T18:50:21") 175 | ); 176 | 177 | var order3 = new Order(54321, "store_999", 178 | new BigDecimal("15.00"), 179 | OrderType.RESTAURANT, 180 | orderItemsRestaurant, 181 | LocalDateTime.parse("2023-01-06T18:50:22") 182 | ); 183 | 184 | return List.of( 185 | order1, 186 | order2, 187 | order3 188 | ); 189 | 190 | } 191 | 192 | private static List buildOrders() { 193 | var orderItems = List.of( 194 | new OrderLineItem("Bananas", 2, new BigDecimal("2.00")), 195 | new OrderLineItem("Iphone Charger", 1, new BigDecimal("25.00")) 196 | ); 197 | 198 | var orderItemsRestaurant = List.of( 199 | new OrderLineItem("Pizza", 2, new BigDecimal("12.00")), 200 | new OrderLineItem("Coffee", 1, new BigDecimal("3.00")) 201 | ); 202 | 203 | var order1 = new Order(12345, "store_1234", 204 | new BigDecimal("27.00"), 205 | OrderType.GENERAL, 206 | orderItems, 207 | LocalDateTime.now() 208 | //LocalDateTime.now(ZoneId.of("UTC")) 209 | ); 210 | 211 | var order2 = new Order(54321, "store_1234", 212 | new BigDecimal("15.00"), 213 | OrderType.RESTAURANT, 214 | orderItemsRestaurant, 215 | LocalDateTime.now() 216 | //LocalDateTime.now(ZoneId.of("UTC")) 217 | ); 218 | 219 | var order3 = new Order(12345, "store_4567", 220 | new BigDecimal("27.00"), 221 | OrderType.GENERAL, 222 | orderItems, 223 | LocalDateTime.now() 224 | //LocalDateTime.parse("2023-02-25T05:02:01") 225 | //LocalDateTime.now(ZoneId.of("UTC")) 226 | ); 227 | 228 | var order4 = new Order(12345, "store_4567", 229 | new BigDecimal("27.00"), 230 | OrderType.RESTAURANT, 231 | orderItems, 232 | LocalDateTime.now() 233 | //LocalDateTime.parse("2023-02-25T05:02:01") 234 | //LocalDateTime.now(ZoneId.of("UTC")) 235 | ); 236 | 237 | return List.of( 238 | order1, 239 | order2, 240 | order3, 241 | order4 242 | ); 243 | } 244 | 245 | private static List buildOrdersToTestGrace() { 246 | var orderItems = List.of( 247 | new OrderLineItem("Bananas", 2, new BigDecimal("2.00")), 248 | new OrderLineItem("Iphone Charger", 1, new BigDecimal("25.00")) 249 | ); 250 | 251 | var orderItemsRestaurant = List.of( 252 | new OrderLineItem("Pizza", 2, new BigDecimal("12.00")), 253 | new OrderLineItem("Coffee", 1, new BigDecimal("3.00")) 254 | ); 255 | 256 | var order1 = new Order(12345, "store_1234", 257 | new BigDecimal("27.00"), 258 | OrderType.GENERAL, 259 | orderItems, 260 | LocalDateTime.parse("2023-02-27T08:45:58") 261 | //LocalDateTime.now(ZoneId.of("UTC")) 262 | ); 263 | 264 | var order2 = new Order(54321, "store_1234", 265 | new BigDecimal("15.00"), 266 | OrderType.RESTAURANT, 267 | orderItemsRestaurant, 268 | LocalDateTime.parse("2023-02-27T08:45:58") 269 | //LocalDateTime.now(ZoneId.of("UTC")) 270 | ); 271 | 272 | var order3 = new Order(12345, "store_4567", 273 | new BigDecimal("27.00"), 274 | OrderType.GENERAL, 275 | orderItems, 276 | //LocalDateTime.now() 277 | LocalDateTime.parse("2023-02-27T08:45:58") 278 | //LocalDateTime.now(ZoneId.of("UTC")) 279 | ); 280 | 281 | var order4 = new Order(12345, "store_4567", 282 | new BigDecimal("27.00"), 283 | OrderType.RESTAURANT, 284 | orderItems, 285 | //LocalDateTime.now() 286 | LocalDateTime.parse("2023-02-27T08:45:58") 287 | //LocalDateTime.now(ZoneId.of("UTC")) 288 | ); 289 | 290 | return List.of( 291 | order1, 292 | order2, 293 | order3, 294 | order4 295 | ); 296 | } 297 | 298 | private static void publishBulkOrders(ObjectMapper objectMapper) throws InterruptedException { 299 | 300 | int count = 0; 301 | while (count < 100) { 302 | var orders = buildOrders(); 303 | publishOrders(objectMapper, orders); 304 | sleep(1000); 305 | count++; 306 | } 307 | } 308 | 309 | private static void publishOrdersToTestGrace(ObjectMapper objectMapper, List orders) { 310 | 311 | orders 312 | .forEach(order -> { 313 | try { 314 | var ordersJSON = objectMapper.writeValueAsString(order); 315 | var recordMetaData = publishMessageSync(OrdersTopology.ORDERS, order.orderId() + "", ordersJSON); 316 | log.info("Published the order message : {} ", recordMetaData); 317 | } catch (JsonProcessingException e) { 318 | log.error("JsonProcessingException : {} ", e.getMessage(), e); 319 | throw new RuntimeException(e); 320 | } catch (Exception e) { 321 | log.error("Exception : {} ", e.getMessage(), e); 322 | throw new RuntimeException(e); 323 | } 324 | }); 325 | } 326 | 327 | private static void publishOrders(ObjectMapper objectMapper, List orders) { 328 | 329 | orders 330 | .forEach(order -> { 331 | try { 332 | var ordersJSON = objectMapper.writeValueAsString(order); 333 | var recordMetaData = publishMessageSync(OrdersTopology.ORDERS, order.orderId() + "", ordersJSON); 334 | log.info("Published the order message : {} ", recordMetaData); 335 | } catch (JsonProcessingException e) { 336 | log.error("JsonProcessingException : {} ", e.getMessage(), e); 337 | throw new RuntimeException(e); 338 | } catch (Exception e) { 339 | log.error("Exception : {} ", e.getMessage(), e); 340 | throw new RuntimeException(e); 341 | } 342 | }); 343 | } 344 | 345 | 346 | } 347 | -------------------------------------------------------------------------------- /explore-kafka-streams-using-spring-boot/orders-management-streams/orders-streams-app/src/main/java/com/learnkafkastreams/producer/ProducerUtil.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.producer; 2 | 3 | import lombok.extern.slf4j.Slf4j; 4 | import org.apache.kafka.clients.producer.KafkaProducer; 5 | import org.apache.kafka.clients.producer.ProducerConfig; 6 | import org.apache.kafka.clients.producer.ProducerRecord; 7 | import org.apache.kafka.clients.producer.RecordMetadata; 8 | import org.apache.kafka.common.serialization.StringSerializer; 9 | 10 | import java.util.HashMap; 11 | import java.util.Map; 12 | import java.util.concurrent.ExecutionException; 13 | 14 | @Slf4j 15 | public class ProducerUtil { 16 | 17 | static KafkaProducer producer = new KafkaProducer(producerProps()); 18 | 19 | public static Map producerProps(){ 20 | 21 | Map propsMap = new HashMap<>(); 22 | propsMap.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); 23 | propsMap.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); 24 | propsMap.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); 25 | return propsMap; 26 | 27 | } 28 | 29 | 30 | public static RecordMetadata publishMessageSync(String topicName, String key, String message ){ 31 | 32 | ProducerRecord producerRecord = new ProducerRecord<>(topicName, key, message); 33 | RecordMetadata recordMetadata=null; 34 | 35 | try { 36 | log.info("producerRecord : " + producerRecord); 37 | recordMetadata = producer.send(producerRecord).get(); 38 | } catch (InterruptedException e) { 39 | log.error("InterruptedException in publishMessageSync : {} ", e.getMessage(), e); 40 | } catch (ExecutionException e) { 41 | log.error("ExecutionException in publishMessageSync : {} ", e.getMessage(), e); 42 | }catch(Exception e){ 43 | log.error("Exception in publishMessageSync : {} ", e.getMessage(), e); 44 | } 45 | return recordMetadata; 46 | } 47 | 48 | } 49 | -------------------------------------------------------------------------------- /explore-kafka-streams-using-spring-boot/orders-management-streams/orders-streams-app/src/main/java/com/learnkafkastreams/producer/StoresMockDataProducer.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.producer; 2 | 3 | import com.fasterxml.jackson.core.JsonProcessingException; 4 | import com.fasterxml.jackson.databind.ObjectMapper; 5 | import com.fasterxml.jackson.databind.SerializationFeature; 6 | import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; 7 | import com.learnkafkastreams.domain.Address; 8 | import com.learnkafkastreams.domain.Store; 9 | import com.learnkafkastreams.topology.OrdersTopology; 10 | import lombok.extern.slf4j.Slf4j; 11 | 12 | import java.util.List; 13 | 14 | import static com.learnkafkastreams.producer.ProducerUtil.publishMessageSync; 15 | 16 | @Slf4j 17 | public class StoresMockDataProducer { 18 | 19 | public static void main(String[] args) { 20 | ObjectMapper objectMapper = new ObjectMapper() 21 | .registerModule(new JavaTimeModule()) 22 | .configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false); 23 | 24 | 25 | var address1 = new Address("1234 Street 1 ", "", "City1", "State1", "12345"); 26 | var store1 = new Store("store_1234", 27 | address1, 28 | "1234567890" 29 | ); 30 | 31 | var address2 = new Address("1234 Street 2 ", "", "City2", "State2", "541321"); 32 | var store2 = new Store("store_4567", 33 | address2, 34 | "0987654321" 35 | ); 36 | 37 | 38 | var stores = List.of(store1, store2); 39 | stores 40 | .forEach(store -> { 41 | try { 42 | var storeJSON = objectMapper.writeValueAsString(store); 43 | var recordMetaData = publishMessageSync(OrdersTopology.STORES, store.locationId(), storeJSON); 44 | log.info("Published the store message : {} ", recordMetaData); 45 | } catch (JsonProcessingException e) { 46 | log.error("JsonProcessingException : {} ", e.getMessage(), e); 47 | throw new RuntimeException(e); 48 | } 49 | catch (Exception e) { 50 | log.error("Exception : {} ", e.getMessage(), e); 51 | throw new RuntimeException(e); 52 | } 53 | }); 54 | 55 | } 56 | 57 | } 58 | -------------------------------------------------------------------------------- /explore-kafka-streams-using-spring-boot/orders-management-streams/orders-streams-app/src/main/java/com/learnkafkastreams/service/OrderService.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.service; 2 | 3 | import lombok.extern.slf4j.Slf4j; 4 | import org.springframework.stereotype.Service; 5 | 6 | @Service 7 | @Slf4j 8 | public class OrderService { 9 | 10 | } 11 | -------------------------------------------------------------------------------- /explore-kafka-streams-using-spring-boot/orders-management-streams/orders-streams-app/src/main/java/com/learnkafkastreams/service/OrderStoreService.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.service; 2 | 3 | import org.apache.kafka.streams.StoreQueryParameters; 4 | import org.apache.kafka.streams.state.QueryableStoreTypes; 5 | import org.apache.kafka.streams.state.ReadOnlyKeyValueStore; 6 | import org.apache.kafka.streams.state.ReadOnlyWindowStore; 7 | import org.springframework.kafka.config.StreamsBuilderFactoryBean; 8 | import org.springframework.stereotype.Service; 9 | 10 | @Service 11 | public class OrderStoreService { 12 | 13 | 14 | } 15 | -------------------------------------------------------------------------------- /explore-kafka-streams-using-spring-boot/orders-management-streams/orders-streams-app/src/main/java/com/learnkafkastreams/topology/OrdersTopology.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.topology; 2 | 3 | import com.learnkafkastreams.domain.Order; 4 | import com.learnkafkastreams.domain.Store; 5 | import com.learnkafkastreams.util.OrderTimeStampExtractor; 6 | import lombok.extern.slf4j.Slf4j; 7 | import org.apache.kafka.common.serialization.Serdes; 8 | import org.apache.kafka.streams.StreamsBuilder; 9 | import org.apache.kafka.streams.kstream.Consumed; 10 | import org.apache.kafka.streams.kstream.Printed; 11 | import org.springframework.beans.factory.annotation.Autowired; 12 | import org.springframework.kafka.support.serializer.JsonSerde; 13 | import org.springframework.stereotype.Component; 14 | 15 | @Component 16 | @Slf4j 17 | public class OrdersTopology { 18 | 19 | public static final String ORDERS = "orders"; 20 | public static final String GENERAL_ORDERS = "general_orders"; 21 | public static final String GENERAL_ORDERS_COUNT = "general_orders_count"; 22 | public static final String GENERAL_ORDERS_COUNT_WINDOWS = "general_orders_count_window"; 23 | public static final String GENERAL_ORDERS_REVENUE = "general_orders_revenue"; 24 | public static final String GENERAL_ORDERS_REVENUE_WINDOWS = "general_orders_revenue_window"; 25 | 26 | public static final String RESTAURANT_ORDERS = "restaurant_orders"; 27 | public static final String RESTAURANT_ORDERS_COUNT = "restaurant_orders_count"; 28 | public static final String RESTAURANT_ORDERS_REVENUE = "restaurant_orders_revenue"; 29 | public static final String RESTAURANT_ORDERS_COUNT_WINDOWS = "restaurant_orders_count_window"; 30 | public static final String RESTAURANT_ORDERS_REVENUE_WINDOWS = "restaurant_orders_revenue_window"; 31 | public static final String STORES = "stores"; 32 | 33 | 34 | 35 | @Autowired 36 | public void process(StreamsBuilder streamsBuilder) { 37 | 38 | orderRopology(streamsBuilder); 39 | 40 | } 41 | 42 | private static void orderRopology(StreamsBuilder streamsBuilder) { 43 | var orderStreams = streamsBuilder 44 | .stream(ORDERS, 45 | Consumed.with(Serdes.String(), new JsonSerde<>(Order.class)) 46 | .withTimestampExtractor(new OrderTimeStampExtractor()) 47 | ) 48 | .selectKey((key, value) -> value.locationId()); 49 | 50 | var storesTable = streamsBuilder 51 | .table(STORES, 52 | Consumed.with(Serdes.String(), new JsonSerde<>(Store.class))); 53 | 54 | storesTable 55 | .toStream() 56 | .print(Printed.toSysOut().withLabel("stores")); 57 | 58 | orderStreams 59 | .print(Printed.toSysOut().withLabel("orders")); 60 | 61 | 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /explore-kafka-streams-using-spring-boot/orders-management-streams/orders-streams-app/src/main/java/com/learnkafkastreams/util/OrderTimeStampExtractor.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.util; 2 | 3 | import com.learnkafkastreams.domain.Order; 4 | import lombok.extern.slf4j.Slf4j; 5 | import org.apache.kafka.clients.consumer.ConsumerRecord; 6 | import org.apache.kafka.streams.processor.TimestampExtractor; 7 | 8 | import java.time.ZoneOffset; 9 | 10 | @Slf4j 11 | public class OrderTimeStampExtractor implements TimestampExtractor { 12 | @Override 13 | public long extract(ConsumerRecord record, long partitionTime) { 14 | var orderRecord = (Order) record.value(); 15 | if(orderRecord!=null && orderRecord.orderedDateTime()!=null){ 16 | var timeStamp = orderRecord.orderedDateTime(); 17 | log.info("TimeStamp in extractor : {} ", timeStamp); 18 | var instant = timeStamp.toInstant(ZoneOffset.ofHours(-6)).toEpochMilli();; 19 | // var instant = timeStamp.toInstant(ZoneOffset.UTC).toEpochMilli(); 20 | log.info("instant in extractor : {} ", instant); 21 | return instant; 22 | } 23 | //fallback to stream time 24 | return partitionTime; 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /explore-kafka-streams-using-spring-boot/orders-management-streams/orders-streams-app/src/main/resources/application.yml: -------------------------------------------------------------------------------- 1 | spring: 2 | main: 3 | allow-bean-definition-overriding: true 4 | application: 5 | name: orders-kafka-streams 6 | profiles: 7 | active: local # this activates the profile 8 | --- 9 | spring: 10 | config: 11 | activate: 12 | on-profile: local 13 | kafka: 14 | streams: 15 | bootstrap-servers: localhost:9092 16 | application-id: orders-kafka-streams 17 | properties: 18 | default: 19 | key: 20 | serde: 'org.apache.kafka.common.serialization.Serdes$StringSerde' 21 | value: 22 | serde: 'org.apache.kafka.common.serialization.Serdes$StringSerde' 23 | deserialization: 24 | exception: 25 | handler: 'org.apache.kafka.streams.errors.LogAndContinueExceptionHandler' 26 | serialization: 27 | exception: 28 | handler: 'com.learnkafkastreams.exceptionhandler.StreamsSerializationExceptionHandler' -------------------------------------------------------------------------------- /explore-kafka-streams-using-spring-boot/orders-management-streams/orders-streams-app/src/main/resources/curl_commands.txt: -------------------------------------------------------------------------------- 1 | =============================== 2 | Retrieve Orders Count By Order Type: | 3 | =============================== 4 | 5 | curl -i http://localhost:8080/v1/orders/count/general_orders 6 | 7 | curl -i http://localhost:8080/v1/orders/count/restaurant_orders 8 | 9 | Sample JSON: 01-orders-count.json 10 | DTO:OrderCountPerStoreDTO 11 | 12 | =================================================== 13 | Retrieve Orders Count By Order Type & Location Id: | 14 | =================================================== 15 | 16 | curl -i http://localhost:8080/v1/orders/count/general_orders?location_id=store_1234 17 | 18 | Sample JSON: 02-orders-count-by-locationid.json 19 | DTO: OrderCountPerStoreDTO 20 | 21 | No Matching Key: 22 | ================ 23 | curl -i http://localhost:8080/v1/orders/count/general_orders?location_id=store_9999 24 | 25 | 26 | ========================================= 27 | Retrieve All Orders Count for All Types: | 28 | ========================================= 29 | 30 | curl -i http://localhost:8080/v1/orders/count 31 | 32 | Sample JSON : 03-count-with-all-orders-types.json 33 | DTO: AllOrdersCountPerStoreDTO 34 | 35 | ========================= 36 | Retrieve Revenue By Order Type:| 37 | ========================= 38 | curl -i http://localhost:8080/v1/orders/revenue/general_orders 39 | 40 | 41 | curl -i http://localhost:8080/v1/orders/revenue/restaurant_orders 42 | 43 | Sample JSON : 04-orders-revenue.json 44 | DTO: OrderRevenueDTO 45 | 46 | ================ 47 | Error Scenarios :| 48 | ================ 49 | curl -i http://localhost:8080/v1/orders/revenue/restaurant_orders 50 | 51 | curl -i -X POST http://localhost:8080/v1/orders/revenue/restaurant_orders 52 | 53 | =============================== 54 | Retrieve Revenue for All Types:| 55 | =============================== 56 | 57 | curl -i http://localhost:8080/v1/orders/revenue 58 | 59 | Sample JSON : 04-orders-revenue.json 60 | DTO: OrderRevenueDTO 61 | 62 | ========= 63 | WINDOWS:| 64 | ========= 65 | 66 | =================================================== 67 | Retrieve All Orders Count for Windows by Order Type:| 68 | =================================================== 69 | 70 | curl -i http://localhost:8080/v1/orders/windows/count/restaurant_orders 71 | 72 | curl -i http://localhost:8080/v1/orders/windows/count/general_orders 73 | 74 | Sample JSON: 05-orders-count-windows.json 75 | DTO : OrdersCountPerStoreByWindows 76 | 77 | =================================================== 78 | Retrieve All Orders Count for All Types By Windows:| 79 | =================================================== 80 | 81 | curl -i http://localhost:8080/v1/orders/windows/count 82 | 83 | Sample JSON: 05-orders-count-windows.json 84 | DTO : OrdersCountPerStoreByWindows 85 | 86 | ========================================================================== 87 | Retrieve All Orders Count for Windows by passing custom from and to time:| 88 | ========================================================================== 89 | 90 | // start_time and to_times are inclusive in the result: 91 | 92 | Example: to_time=2023-02-14T11:33:30Z, If there is a start window with this value then it gets included in the fetched result. 93 | curl -i http://localhost:8080/v1/orders/windows/count?from_time=2023-02-16T11:27:00&to_time=2023-02-16T11:27:00 94 | 95 | curl -i http://localhost:8081/v1/orders/windows/count?from_time=2023-02-16T11:27:00&to_time=2023-02-16T11:27:00 96 | 97 | curl -i http://localhost:8081/v1/orders/windows/count?from_time=2023-02-25T11:29:00&to_time=2023-02-16T11:27:00 98 | 99 | Sample JSON: 05-orders-count-windows.json 100 | DTO : OrdersCountPerStoreByWindows 101 | 102 | 103 | =================================================== 104 | Retrieve All Orders Revenue for Windows by Order Type:| 105 | =================================================== 106 | 107 | curl -i http://localhost:8080/v1/orders/windows/revenue/restaurant_orders 108 | 109 | curl -i http://localhost:8080/v1/orders/windows/revenue/general_orders 110 | 111 | Sample JSON: 06-orders-revenue-windows.json 112 | DTO : OrdersRevenuePerStoreByWindows 113 | -------------------------------------------------------------------------------- /explore-kafka-streams-using-spring-boot/orders-management-streams/orders-streams-app/src/main/resources/data/01-orders-count.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "locationId": "store_1234", 4 | "orderCount": 726 5 | }, 6 | { 7 | "locationId": "store_4567", 8 | "orderCount": 726 9 | } 10 | ] -------------------------------------------------------------------------------- /explore-kafka-streams-using-spring-boot/orders-management-streams/orders-streams-app/src/main/resources/data/02-orders-count-by-locationid.json: -------------------------------------------------------------------------------- 1 | { 2 | "locationId": "store_1234", 3 | "orderCount": 726 4 | } -------------------------------------------------------------------------------- /explore-kafka-streams-using-spring-boot/orders-management-streams/orders-streams-app/src/main/resources/data/03-count-with-all-orders-types.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "locationId": "store_1234", 4 | "orderCount": 726, 5 | "orderType": "GENERAL" 6 | }, 7 | { 8 | "locationId": "store_4567", 9 | "orderCount": 726, 10 | "orderType": "GENERAL" 11 | }, 12 | { 13 | "locationId": "store_1234", 14 | "orderCount": 726, 15 | "orderType": "RESTAURANT" 16 | }, 17 | { 18 | "locationId": "store_4567", 19 | "orderCount": 726, 20 | "orderType": "RESTAURANT" 21 | } 22 | ] -------------------------------------------------------------------------------- /explore-kafka-streams-using-spring-boot/orders-management-streams/orders-streams-app/src/main/resources/data/04-orders-revenue.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "locationId": "store_1234", 4 | "orderType": "GENERAL", 5 | "totalRevenue": { 6 | "locationId": "store_1234", 7 | "runnuingOrderCount": 200, 8 | "runningRevenue": 5400.00 9 | } 10 | }, 11 | { 12 | "locationId": "store_4567", 13 | "orderType": "GENERAL", 14 | "totalRevenue": { 15 | "locationId": "store_4567", 16 | "runnuingOrderCount": 200, 17 | "runningRevenue": 5400.00 18 | } 19 | } 20 | ] -------------------------------------------------------------------------------- /explore-kafka-streams-using-spring-boot/orders-management-streams/orders-streams-app/src/main/resources/data/05-orders-count-windows.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "locationId": "store_1234", 4 | "orderCount": 27, 5 | "orderType": "RESTAURANT", 6 | "startWindow": "2023-02-16T11:27:00", 7 | "endWindow": "2023-02-16T11:27:30" 8 | }, 9 | { 10 | "locationId": "store_1234", 11 | "orderCount": 29, 12 | "orderType": "RESTAURANT", 13 | "startWindow": "2023-02-16T11:27:30", 14 | "endWindow": "2023-02-16T11:28:00" 15 | } 16 | ] -------------------------------------------------------------------------------- /explore-kafka-streams-using-spring-boot/orders-management-streams/orders-streams-app/src/main/resources/data/06-orders-revenue-windows.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "locationId": "store_1234", 4 | "totalRevenue": { 5 | "locationId": "store_1234", 6 | "runnuingOrderCount": 27, 7 | "runningRevenue": 405.00 8 | }, 9 | "orderType": "RESTAURANT", 10 | "startWindow": "2023-02-16T11:27:00", 11 | "endWindow": "2023-02-16T11:27:30" 12 | }, 13 | { 14 | "locationId": "store_1234", 15 | "totalRevenue": { 16 | "locationId": "store_1234", 17 | "runnuingOrderCount": 29, 18 | "runningRevenue": 435.00 19 | } 20 | } 21 | ] -------------------------------------------------------------------------------- /explore-kafka-streams-using-spring-boot/orders-management-streams/orders-streams-app/src/main/resources/logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | %d{HH:mm:ss.SSS} [%t] %-5level %logger{36} - %msg%n 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | -------------------------------------------------------------------------------- /explore-kafka-streams-using-spring-boot/orders-management-streams/orders-streams-app/src/main/resources/orders_count_by_all_types.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "locationId": "store_1234", 4 | "orderCount": 100, 5 | "orderType": "GENERAL" 6 | }, 7 | { 8 | "locationId": "store_4567", 9 | "orderCount": 100, 10 | "orderType": "GENERAL" 11 | }, 12 | { 13 | "locationId": "store_1234", 14 | "orderCount": 100, 15 | "orderType": "RESTAURANT" 16 | }, 17 | { 18 | "locationId": "store_4567", 19 | "orderCount": 100, 20 | "orderType": "RESTAURANT" 21 | } 22 | ] -------------------------------------------------------------------------------- /explore-kafka-streams-using-spring-boot/orders-management-streams/orders-streams-app/src/main/resources/orders_count_by_type.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "locationId": "store_1234", 4 | "orderCount": 100 5 | }, 6 | { 7 | "locationId": "store_4567", 8 | "orderCount": 100 9 | } 10 | ] -------------------------------------------------------------------------------- /explore-kafka-streams-using-spring-boot/orders-management-streams/orders-streams-app/src/test/java/com/learnkafkastreams/controller/OrdersControllerTest.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.controller; 2 | 3 | public class OrdersControllerTest { 4 | } 5 | -------------------------------------------------------------------------------- /explore-kafka-streams-using-spring-boot/orders-management-streams/orders-streams-app/src/test/java/com/learnkafkastreams/topology/OrdersTopologyIntegrationTest.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.topology; 2 | 3 | import com.fasterxml.jackson.core.JsonProcessingException; 4 | import com.fasterxml.jackson.databind.ObjectMapper; 5 | import com.learnkafkastreams.domain.Order; 6 | import com.learnkafkastreams.domain.OrderLineItem; 7 | import com.learnkafkastreams.domain.OrderType; 8 | import com.learnkafkastreams.service.OrderService; 9 | import org.apache.kafka.streams.KafkaStreams; 10 | import org.apache.kafka.streams.KeyValue; 11 | import org.junit.jupiter.api.AfterEach; 12 | import org.junit.jupiter.api.BeforeEach; 13 | import org.junit.jupiter.api.Test; 14 | import org.springframework.beans.factory.annotation.Autowired; 15 | import org.springframework.boot.test.context.SpringBootTest; 16 | import org.springframework.kafka.config.StreamsBuilderFactoryBean; 17 | import org.springframework.kafka.core.KafkaTemplate; 18 | import org.springframework.kafka.test.EmbeddedKafkaBroker; 19 | import org.springframework.kafka.test.context.EmbeddedKafka; 20 | import org.springframework.test.annotation.DirtiesContext; 21 | import org.springframework.test.context.TestPropertySource; 22 | 23 | import java.math.BigDecimal; 24 | import java.time.Duration; 25 | import java.time.LocalDateTime; 26 | import java.time.ZoneId; 27 | import java.util.List; 28 | import java.util.Objects; 29 | 30 | import static com.learnkafkastreams.topology.OrdersTopology.*; 31 | import static java.util.concurrent.TimeUnit.SECONDS; 32 | import static org.hamcrest.Matchers.equalTo; 33 | import static org.junit.jupiter.api.Assertions.assertEquals; 34 | 35 | 36 | @SpringBootTest 37 | public class OrdersTopologyIntegrationTest { 38 | 39 | @Autowired 40 | KafkaTemplate kafkaTemplate; 41 | 42 | @Autowired 43 | StreamsBuilderFactoryBean streamsBuilderFactoryBean; 44 | @Autowired 45 | ObjectMapper objectMapper; 46 | 47 | 48 | private void publishOrders() { 49 | orders() 50 | .forEach(order -> { 51 | String orderJSON = null; 52 | try { 53 | orderJSON = objectMapper.writeValueAsString(order.value); 54 | } catch (JsonProcessingException e) { 55 | throw new RuntimeException(e); 56 | } 57 | kafkaTemplate.send(ORDERS, order.key, orderJSON); 58 | }); 59 | 60 | 61 | } 62 | 63 | 64 | static List> orders() { 65 | 66 | var orderItems = List.of( 67 | new OrderLineItem("Bananas", 2, new BigDecimal("2.00")), 68 | new OrderLineItem("Iphone Charger", 1, new BigDecimal("25.00")) 69 | ); 70 | 71 | var orderItemsRestaurant = List.of( 72 | new OrderLineItem("Pizza", 2, new BigDecimal("12.00")), 73 | new OrderLineItem("Coffee", 1, new BigDecimal("3.00")) 74 | ); 75 | 76 | var order1 = new Order(12345, "store_1234", 77 | new BigDecimal("27.00"), 78 | OrderType.GENERAL, 79 | orderItems, 80 | //LocalDateTime.now() 81 | LocalDateTime.parse("2023-02-21T21:25:01") 82 | ); 83 | 84 | var order2 = new Order(54321, "store_1234", 85 | new BigDecimal("15.00"), 86 | OrderType.RESTAURANT, 87 | orderItemsRestaurant, 88 | //LocalDateTime.now() 89 | LocalDateTime.parse("2023-02-21T21:25:01") 90 | ); 91 | var keyValue1 = KeyValue.pair(order1.orderId().toString() 92 | , order1); 93 | 94 | var keyValue2 = KeyValue.pair(order2.orderId().toString() 95 | , order2); 96 | 97 | 98 | return List.of(keyValue1, keyValue2); 99 | 100 | } 101 | 102 | } 103 | -------------------------------------------------------------------------------- /explore-kafka-streams-using-spring-boot/orders-management-streams/orders-streams-app/src/test/java/com/learnkafkastreams/topology/OrdersTopologyTest.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.topology; 2 | 3 | import com.learnkafkastreams.domain.Order; 4 | import com.learnkafkastreams.domain.OrderLineItem; 5 | import com.learnkafkastreams.domain.OrderType; 6 | import org.apache.kafka.streams.KeyValue; 7 | import org.apache.kafka.streams.TestInputTopic; 8 | import org.apache.kafka.streams.TopologyTestDriver; 9 | 10 | import java.math.BigDecimal; 11 | import java.time.LocalDateTime; 12 | import java.util.List; 13 | 14 | import static com.learnkafkastreams.topology.OrdersTopology.ORDERS; 15 | 16 | class OrdersTopologyTest { 17 | 18 | TopologyTestDriver topologyTestDriver = null; 19 | TestInputTopic ordersInputTopic = null; 20 | 21 | static String INPUT_TOPIC = ORDERS; 22 | 23 | static List> orders(){ 24 | 25 | var orderItems = List.of( 26 | new OrderLineItem("Bananas", 2, new BigDecimal("2.00")), 27 | new OrderLineItem("Iphone Charger", 1, new BigDecimal("25.00")) 28 | ); 29 | 30 | var orderItemsRestaurant = List.of( 31 | new OrderLineItem("Pizza", 2, new BigDecimal("12.00")), 32 | new OrderLineItem("Coffee", 1, new BigDecimal("3.00")) 33 | ); 34 | 35 | var order1 = new Order(12345, "store_1234", 36 | new BigDecimal("27.00"), 37 | OrderType.GENERAL, 38 | orderItems, 39 | LocalDateTime.now() 40 | //LocalDateTime.now(ZoneId.of("UTC")) 41 | ); 42 | 43 | var order2 = new Order(54321, "store_1234", 44 | new BigDecimal("15.00"), 45 | OrderType.RESTAURANT, 46 | orderItemsRestaurant, 47 | LocalDateTime.now() 48 | //LocalDateTime.now(ZoneId.of("UTC")) 49 | ); 50 | var keyValue1 = KeyValue.pair( order1.orderId().toString() 51 | , order1); 52 | 53 | var keyValue2 = KeyValue.pair( order2.orderId().toString() 54 | , order2); 55 | 56 | 57 | return List.of(keyValue1, keyValue2); 58 | 59 | } 60 | } -------------------------------------------------------------------------------- /explore-kafka-streams-using-spring-boot/orders-management-streams/settings.gradle: -------------------------------------------------------------------------------- 1 | rootProject.name = 'orders-management-streams' 2 | include 'orders-domain' 3 | include 'orders-streams-app' 4 | 5 | -------------------------------------------------------------------------------- /explore-kafka-streams/.gitignore: -------------------------------------------------------------------------------- 1 | HELP.md 2 | .gradle 3 | build/ 4 | !gradle/wrapper/gradle-wrapper.jar 5 | !**/src/main/** 6 | !**/src/test/** 7 | 8 | ### STS ### 9 | .apt_generated 10 | .classpath 11 | .factorypath 12 | .project 13 | .settings 14 | .springBeans 15 | .sts4-cache 16 | 17 | ### IntelliJ IDEA ### 18 | .idea 19 | *.iws 20 | *.iml 21 | *.ipr 22 | out/ 23 | 24 | ### NetBeans ### 25 | /nbproject/private/ 26 | /nbbuild/ 27 | /dist/ 28 | /nbdist/ 29 | /.nb-gradle/ 30 | 31 | ### VS Code ### 32 | .vscode/ 33 | 34 | .DS_Store -------------------------------------------------------------------------------- /explore-kafka-streams/advanced-streams/build.gradle: -------------------------------------------------------------------------------- 1 | plugins { 2 | id 'java' 3 | id "io.freefair.lombok" version "6.5.1" 4 | } 5 | 6 | group 'com.learnkafkastreams' 7 | version 'unspecified' 8 | 9 | repositories { 10 | mavenCentral() 11 | } 12 | 13 | dependencies { 14 | 15 | implementation 'org.apache.kafka:kafka-streams:3.3.1' 16 | 17 | //jackson for custom serdes 18 | implementation 'com.fasterxml.jackson.core:jackson-databind:2.14.1' 19 | implementation 'com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.14.1' 20 | 21 | 22 | //log 23 | implementation 'ch.qos.logback:logback-core:1.2.11' 24 | implementation 'ch.qos.logback:logback-classic:1.2.11' 25 | implementation 'org.slf4j:slf4j-api:1.7.36' 26 | 27 | testImplementation 'org.junit.jupiter:junit-jupiter-api:5.8.1' 28 | testRuntimeOnly 'org.junit.jupiter:junit-jupiter-engine:5.8.1' 29 | } 30 | 31 | test { 32 | useJUnitPlatform() 33 | } -------------------------------------------------------------------------------- /explore-kafka-streams/advanced-streams/src/main/java/com/learnkafkastreams/domain/Alphabet.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.domain; 2 | 3 | public record Alphabet( 4 | String abbreviation, 5 | String description 6 | ) { 7 | 8 | } 9 | -------------------------------------------------------------------------------- /explore-kafka-streams/advanced-streams/src/main/java/com/learnkafkastreams/domain/AlphabetWordAggregate.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.domain; 2 | 3 | import lombok.extern.slf4j.Slf4j; 4 | 5 | import java.util.HashSet; 6 | import java.util.Set; 7 | 8 | @Slf4j 9 | public record AlphabetWordAggregate(String key, 10 | Set valueList, 11 | int runningCount) { 12 | 13 | 14 | public AlphabetWordAggregate() { 15 | this("", new HashSet<>(), 0); 16 | } 17 | 18 | 19 | public AlphabetWordAggregate updateNewEvents(String key, String neVwalue){ 20 | 21 | return null; 22 | } 23 | 24 | 25 | public static void main(String[] args) { 26 | 27 | 28 | var al =new AlphabetWordAggregate(); 29 | 30 | } 31 | 32 | } 33 | 34 | 35 | -------------------------------------------------------------------------------- /explore-kafka-streams/advanced-streams/src/main/java/com/learnkafkastreams/launcher/AggregatingStreamPlayGroundApp.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.launcher; 2 | 3 | import com.learnkafkastreams.topology.ExploreAggregateOperatorsTopology; 4 | import lombok.extern.slf4j.Slf4j; 5 | import org.apache.kafka.clients.admin.AdminClient; 6 | import org.apache.kafka.clients.admin.NewTopic; 7 | import org.apache.kafka.clients.consumer.ConsumerConfig; 8 | import org.apache.kafka.common.serialization.Serdes; 9 | import org.apache.kafka.streams.KafkaStreams; 10 | import org.apache.kafka.streams.StreamsConfig; 11 | 12 | import java.util.List; 13 | import java.util.Properties; 14 | import java.util.stream.Collectors; 15 | 16 | import static com.learnkafkastreams.topology.ExploreAggregateOperatorsTopology.AGGREGATE; 17 | import static org.apache.kafka.streams.StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG; 18 | 19 | @Slf4j 20 | public class AggregatingStreamPlayGroundApp { 21 | 22 | 23 | public static void main(String[] args) { 24 | 25 | var kTableTopology = ExploreAggregateOperatorsTopology.build(); 26 | 27 | Properties config = new Properties(); 28 | config.put(StreamsConfig.APPLICATION_ID_CONFIG, "stateful-operation"); // consumer group 29 | config.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); 30 | config.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest"); 31 | config.put(DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass()); 32 | 33 | 34 | createTopics(config, List.of(AGGREGATE)); 35 | var kafkaStreams = new KafkaStreams(kTableTopology, config); 36 | 37 | // Runtime.getRuntime().addShutdownHook(new Thread(kafkaStreams::close)); 38 | 39 | log.info("Starting Greeting streams"); 40 | kafkaStreams.start(); 41 | } 42 | 43 | private static void createTopics(Properties config, List greetings) { 44 | 45 | AdminClient admin = AdminClient.create(config); 46 | var partitions = 1; 47 | short replication = 1; 48 | 49 | var newTopics = greetings 50 | .stream() 51 | .map(topic ->{ 52 | return new NewTopic(topic, partitions, replication); 53 | }) 54 | .collect(Collectors.toList()); 55 | 56 | var createTopicResult = admin.createTopics(newTopics); 57 | try { 58 | createTopicResult 59 | .all().get(); 60 | log.info("topics are created successfully"); 61 | } catch (Exception e) { 62 | log.error("Exception creating topics : {} ",e.getMessage(), e); 63 | } 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /explore-kafka-streams/advanced-streams/src/main/java/com/learnkafkastreams/launcher/JoiningStreamPlayGroundApp.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.launcher; 2 | 3 | import com.learnkafkastreams.topology.ExploreJoinsOperatorsTopology; 4 | import lombok.extern.slf4j.Slf4j; 5 | import org.apache.kafka.clients.admin.AdminClient; 6 | import org.apache.kafka.clients.admin.NewTopic; 7 | import org.apache.kafka.clients.consumer.ConsumerConfig; 8 | import org.apache.kafka.streams.KafkaStreams; 9 | import org.apache.kafka.streams.StreamsConfig; 10 | 11 | import java.util.List; 12 | import java.util.Properties; 13 | import java.util.stream.Collectors; 14 | 15 | import static com.learnkafkastreams.topology.ExploreJoinsOperatorsTopology.ALPHABETS; 16 | import static com.learnkafkastreams.topology.ExploreJoinsOperatorsTopology.ALPHABETS_ABBREVATIONS; 17 | 18 | @Slf4j 19 | public class JoiningStreamPlayGroundApp { 20 | 21 | 22 | public static void main(String[] args) { 23 | 24 | var kTableTopology = ExploreJoinsOperatorsTopology.build(); 25 | 26 | Properties config = new Properties(); 27 | config.put(StreamsConfig.APPLICATION_ID_CONFIG, "joins1"); // consumer group 28 | config.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); 29 | config.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest"); 30 | config.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, "5000"); 31 | 32 | createTopics(config, List.of(ALPHABETS,ALPHABETS_ABBREVATIONS )); 33 | 34 | //createTopicsCopartitioningDemo(config, List.of(ALPHABETS,ALPHABETS_ABBREVATIONS )); 35 | var kafkaStreams = new KafkaStreams(kTableTopology, config); 36 | 37 | Runtime.getRuntime().addShutdownHook(new Thread(kafkaStreams::close)); 38 | 39 | log.info("Starting Greeting streams"); 40 | kafkaStreams.start(); 41 | } 42 | 43 | private static void createTopicsCopartitioningDemo(Properties config, List alphabets) { 44 | 45 | AdminClient admin = AdminClient.create(config); 46 | var partitions = 1; 47 | short replication = 1; 48 | 49 | var newTopics = alphabets 50 | .stream() 51 | .map(topic ->{ 52 | if(topic.equals(ALPHABETS_ABBREVATIONS)){ 53 | return new NewTopic(topic, 3, replication); 54 | } 55 | return new NewTopic(topic, partitions, replication); 56 | }) 57 | .collect(Collectors.toList()); 58 | 59 | var createTopicResult = admin.createTopics(newTopics); 60 | try { 61 | createTopicResult 62 | .all().get(); 63 | log.info("topics are created successfully"); 64 | } catch (Exception e) { 65 | log.error("Exception creating topics : {} ",e.getMessage(), e); 66 | } 67 | 68 | 69 | } 70 | 71 | private static void createTopics(Properties config, List greetings) { 72 | 73 | AdminClient admin = AdminClient.create(config); 74 | var partitions = 1; 75 | short replication = 1; 76 | 77 | var newTopics = greetings 78 | .stream() 79 | .map(topic ->{ 80 | return new NewTopic(topic, partitions, replication); 81 | }) 82 | .collect(Collectors.toList()); 83 | 84 | var createTopicResult = admin.createTopics(newTopics); 85 | try { 86 | createTopicResult 87 | .all().get(); 88 | log.info("topics are created successfully"); 89 | } catch (Exception e) { 90 | log.error("Exception creating topics : {} ",e.getMessage(), e); 91 | } 92 | } 93 | } 94 | -------------------------------------------------------------------------------- /explore-kafka-streams/advanced-streams/src/main/java/com/learnkafkastreams/launcher/WindowsStreamPlaygroundApp.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.launcher; 2 | 3 | import com.learnkafkastreams.topology.ExploreWindowTopology; 4 | import lombok.extern.slf4j.Slf4j; 5 | import org.apache.kafka.clients.admin.AdminClient; 6 | import org.apache.kafka.clients.admin.NewTopic; 7 | import org.apache.kafka.clients.consumer.ConsumerConfig; 8 | import org.apache.kafka.streams.KafkaStreams; 9 | import org.apache.kafka.streams.StreamsConfig; 10 | import org.apache.kafka.streams.errors.LogAndContinueExceptionHandler; 11 | 12 | import java.util.List; 13 | import java.util.Properties; 14 | import java.util.stream.Collectors; 15 | 16 | import static com.learnkafkastreams.topology.ExploreJoinsOperatorsTopology.ALPHABETS; 17 | import static com.learnkafkastreams.topology.ExploreJoinsOperatorsTopology.ALPHABETS_ABBREVATIONS; 18 | import static com.learnkafkastreams.topology.ExploreWindowTopology.WINDOW_WORDS; 19 | 20 | @Slf4j 21 | public class WindowsStreamPlaygroundApp { 22 | 23 | 24 | public static void main(String[] args) { 25 | 26 | var joinTopology = ExploreWindowTopology.build(); 27 | 28 | Properties config = new Properties(); 29 | config.put(StreamsConfig.APPLICATION_ID_CONFIG, "windows-2"); // consumer group 30 | config.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); 31 | config.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest"); 32 | config.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, "10000"); 33 | 34 | createTopics(config, List.of(WINDOW_WORDS )); 35 | var kafkaStreams = new KafkaStreams(joinTopology, config); 36 | 37 | Runtime.getRuntime().addShutdownHook(new Thread(kafkaStreams::close)); 38 | 39 | log.info("Starting Windowed streams"); 40 | kafkaStreams.start(); 41 | } 42 | 43 | private static void createTopics(Properties config, List greetings) { 44 | 45 | AdminClient admin = AdminClient.create(config); 46 | var partitions = 2; 47 | short replication = 1; 48 | 49 | var newTopics = greetings 50 | .stream() 51 | .map(topic ->{ 52 | return new NewTopic(topic, partitions, replication); 53 | }) 54 | .collect(Collectors.toList()); 55 | 56 | var createTopicResult = admin.createTopics(newTopics); 57 | try { 58 | createTopicResult 59 | .all().get(); 60 | log.info("topics are created successfully"); 61 | } catch (Exception e) { 62 | log.error("Exception creating topics : {} ",e.getMessage(), e); 63 | } 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /explore-kafka-streams/advanced-streams/src/main/java/com/learnkafkastreams/producer/AggregateProducer.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.producer; 2 | 3 | import lombok.extern.slf4j.Slf4j; 4 | 5 | import static com.learnkafkastreams.producer.ProducerUtil.publishMessageSync; 6 | import static com.learnkafkastreams.topology.ExploreAggregateOperatorsTopology.AGGREGATE; 7 | 8 | @Slf4j 9 | public class AggregateProducer { 10 | 11 | 12 | public static void main(String[] args) throws InterruptedException { 13 | 14 | var key = "A"; 15 | //String key = null; 16 | 17 | var word = "Apple"; 18 | var word1 = "Alligator"; 19 | var word2 = "Ambulance"; 20 | 21 | var recordMetaData = publishMessageSync(AGGREGATE, key,word); 22 | log.info("Published the alphabet message : {} ", recordMetaData); 23 | 24 | var recordMetaData1 = publishMessageSync(AGGREGATE, key,word1); 25 | log.info("Published the alphabet message : {} ", recordMetaData1); 26 | 27 | var recordMetaData2 = publishMessageSync(AGGREGATE, key,word2); 28 | log.info("Published the alphabet message : {} ", recordMetaData2); 29 | 30 | var bKey = "B"; 31 | //String bKey = null; 32 | 33 | var bWord1 = "Bus"; 34 | var bWord2 = "Baby"; 35 | var recordMetaData3 = publishMessageSync(AGGREGATE, bKey,bWord1); 36 | log.info("Published the alphabet message : {} ", recordMetaData2); 37 | 38 | var recordMetaData4 = publishMessageSync(AGGREGATE, bKey,bWord2); 39 | log.info("Published the alphabet message : {} ", recordMetaData2); 40 | 41 | } 42 | 43 | 44 | 45 | } 46 | -------------------------------------------------------------------------------- /explore-kafka-streams/advanced-streams/src/main/java/com/learnkafkastreams/producer/JoinsMockDataProducer.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.producer; 2 | 3 | import lombok.extern.slf4j.Slf4j; 4 | import org.apache.kafka.clients.producer.ProducerRecord; 5 | 6 | import java.util.ArrayList; 7 | import java.util.Map; 8 | 9 | import static com.learnkafkastreams.producer.ProducerUtil.publishMessageSync; 10 | import static com.learnkafkastreams.producer.ProducerUtil.publishMessageSyncWithDelay; 11 | import static com.learnkafkastreams.topology.ExploreJoinsOperatorsTopology.ALPHABETS; 12 | import static com.learnkafkastreams.topology.ExploreJoinsOperatorsTopology.ALPHABETS_ABBREVATIONS; 13 | import static java.time.Instant.now; 14 | 15 | @Slf4j 16 | public class JoinsMockDataProducer { 17 | 18 | 19 | public static void main(String[] args) throws InterruptedException { 20 | 21 | 22 | var alphabetMap = Map.of( 23 | "A", "A is the first letter in English Alphabets.", 24 | "B", "B is the second letter in English Alphabets." 25 | // ,"E", "E is the fifth letter in English Alphabets." 26 | // , 27 | // "A", "A is the First letter in English Alphabets.", 28 | // "B", "B is the Second letter in English Alphabets." 29 | ); 30 | //publishMessages(alphabetMap, ALPHABETS); 31 | 32 | //JoinWindows 33 | //-4 & 4 will trigger the join 34 | //-6 -5 & 5, 6 wont trigger the join 35 | //publishMessagesWithDelay(alphabetMap, ALPHABETS, 4); 36 | 37 | var alphabetAbbrevationMap = Map.of( 38 | "A", "Apple", 39 | "B", "Bus" 40 | , "C", "Cat" 41 | 42 | ); 43 | publishMessages(alphabetAbbrevationMap, ALPHABETS_ABBREVATIONS); 44 | 45 | alphabetAbbrevationMap = Map.of( 46 | "A", "Airplane", 47 | "B", "Baby." 48 | 49 | ); 50 | // publishMessages(alphabetAbbrevationMap, ALPHABETS_ABBREVATIONS); 51 | 52 | } 53 | 54 | private static void publishMessagesToSimulateGrace(Map alphabetMap, String topicName, int delaySeconds) throws InterruptedException { 55 | var producerRecords = new ArrayList>(); 56 | alphabetMap 57 | .forEach((key, value) 58 | -> producerRecords.add(new ProducerRecord<>(topicName, 0, now().toEpochMilli(), key, value))); 59 | 60 | Thread.sleep(delaySeconds* 1000L); 61 | ProducerUtil.publishMessageSync(producerRecords); 62 | } 63 | 64 | private static void publishMessagesWithDelay(Map alphabetMap, String topic, int delaySeconds) { 65 | alphabetMap 66 | .forEach((key, value) -> { 67 | var recordMetaData = publishMessageSyncWithDelay(topic, key, value, delaySeconds); 68 | log.info("Published the alphabet message : {} ", recordMetaData); 69 | }); 70 | } 71 | 72 | 73 | private static void publishMessages(Map alphabetMap, String topic) { 74 | 75 | alphabetMap 76 | .forEach((key, value) -> { 77 | var recordMetaData = publishMessageSync(topic, key, value); 78 | log.info("Published the alphabet message : {} ", recordMetaData); 79 | }); 80 | } 81 | 82 | 83 | } 84 | -------------------------------------------------------------------------------- /explore-kafka-streams/advanced-streams/src/main/java/com/learnkafkastreams/producer/ProducerUtil.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.producer; 2 | 3 | import lombok.extern.slf4j.Slf4j; 4 | import org.apache.kafka.clients.producer.KafkaProducer; 5 | import org.apache.kafka.clients.producer.ProducerConfig; 6 | import org.apache.kafka.clients.producer.ProducerRecord; 7 | import org.apache.kafka.clients.producer.RecordMetadata; 8 | import org.apache.kafka.common.serialization.StringSerializer; 9 | 10 | import java.util.ArrayList; 11 | import java.util.HashMap; 12 | import java.util.Map; 13 | import java.util.concurrent.ExecutionException; 14 | 15 | import static java.time.Instant.now; 16 | 17 | @Slf4j 18 | public class ProducerUtil { 19 | 20 | 21 | static KafkaProducer producer = new KafkaProducer(producerProps()); 22 | 23 | public static Map producerProps(){ 24 | 25 | Map propsMap = new HashMap<>(); 26 | propsMap.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); 27 | propsMap.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); 28 | propsMap.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); 29 | return propsMap; 30 | 31 | } 32 | 33 | 34 | public static RecordMetadata publishMessageSync(String topicName, String key, String message ){ 35 | 36 | ProducerRecord producerRecord = new ProducerRecord<>(topicName, key, message); 37 | return getRecordMetadata(producerRecord); 38 | } 39 | 40 | public static RecordMetadata publishMessageSyncWithDelay(String topicName, String key, String message , long delay){ 41 | 42 | ProducerRecord producerRecord = new ProducerRecord<>(topicName, 0, now().plusSeconds(delay).toEpochMilli(), key, message); 43 | return getRecordMetadata(producerRecord); 44 | } 45 | 46 | private static RecordMetadata getRecordMetadata(ProducerRecord producerRecord) { 47 | RecordMetadata recordMetadata=null; 48 | try { 49 | log.info("producerRecord : " + producerRecord); 50 | recordMetadata = producer.send(producerRecord).get(); 51 | } catch (InterruptedException e) { 52 | log.error("InterruptedException in publishMessageSync : {} ", e.getMessage(), e); 53 | } catch (ExecutionException e) { 54 | log.error("ExecutionException in publishMessageSync : {} ", e.getMessage(), e); 55 | }catch(Exception e){ 56 | log.error("Exception in publishMessageSync : {} ", e.getMessage(), e); 57 | } 58 | return recordMetadata; 59 | } 60 | 61 | public static void publishMessageSync(ArrayList> producerRecords) { 62 | 63 | producerRecords.forEach(producerRecord -> getRecordMetadata(producerRecord)); 64 | 65 | 66 | } 67 | } -------------------------------------------------------------------------------- /explore-kafka-streams/advanced-streams/src/main/java/com/learnkafkastreams/producer/WindowsMockDataProduer.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.producer; 2 | 3 | import lombok.extern.slf4j.Slf4j; 4 | 5 | import static com.learnkafkastreams.topology.ExploreWindowTopology.WINDOW_WORDS; 6 | import static java.lang.Thread.sleep; 7 | import static com.learnkafkastreams.producer.ProducerUtil.publishMessageSync; 8 | 9 | @Slf4j 10 | public class WindowsMockDataProduer { 11 | 12 | 13 | 14 | public static void main(String[] args) throws InterruptedException { 15 | 16 | bulkMockDataProducer(); 17 | //bulkMockDataProducer_SlidingWindows(); 18 | 19 | } 20 | 21 | private static void bulkMockDataProducer() throws InterruptedException { 22 | var key = "A"; 23 | var word = "Apple"; 24 | int count = 0; 25 | while(count<100){ 26 | var recordMetaData = publishMessageSync(WINDOW_WORDS, key,word); 27 | log.info("Published the alphabet message : {} ", recordMetaData); 28 | sleep(1000); 29 | count++; 30 | } 31 | } 32 | 33 | private static void bulkMockDataProducer_SlidingWindows() throws InterruptedException { 34 | var key = "A"; 35 | var word = "Apple"; 36 | int count = 0; 37 | while(count<10){ 38 | var recordMetaData = publishMessageSync(WINDOW_WORDS, key,word); 39 | log.info("Published the alphabet message : {} ", recordMetaData); 40 | sleep(1000); 41 | count++; 42 | } 43 | } 44 | 45 | 46 | } 47 | -------------------------------------------------------------------------------- /explore-kafka-streams/advanced-streams/src/main/java/com/learnkafkastreams/serdes/JsonDeserializer.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.serdes; 2 | 3 | import com.fasterxml.jackson.core.JsonProcessingException; 4 | import com.fasterxml.jackson.databind.ObjectMapper; 5 | import com.fasterxml.jackson.databind.SerializationFeature; 6 | import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; 7 | import lombok.extern.slf4j.Slf4j; 8 | import org.apache.kafka.common.header.Headers; 9 | import org.apache.kafka.common.serialization.Deserializer; 10 | 11 | import java.nio.charset.StandardCharsets; 12 | import java.util.Map; 13 | 14 | @Slf4j 15 | public class JsonDeserializer implements Deserializer { 16 | 17 | private final ObjectMapper objectMapper = new ObjectMapper() 18 | .registerModule(new JavaTimeModule()) 19 | .configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false); 20 | 21 | private Class destinationClass; 22 | 23 | public JsonDeserializer(Class destinationClass) { 24 | this.destinationClass = destinationClass; 25 | } 26 | 27 | @Override 28 | public void configure(Map configs, boolean isKey) { 29 | Deserializer.super.configure(configs, isKey); 30 | } 31 | 32 | @Override 33 | public T deserialize(String topic, byte[] data) { 34 | if (data == null) { 35 | return null; 36 | } 37 | try { 38 | return objectMapper.readValue(new String(data, StandardCharsets.UTF_8), destinationClass); 39 | } catch (JsonProcessingException e) { 40 | log.error("JsonProcessingException Deserializing to {} : {} ", destinationClass, e.getMessage(), e); 41 | throw new RuntimeException(e); 42 | }catch (Exception e){ 43 | log.error("Exception Deserializing to {} : {} ", destinationClass, e.getMessage(), e); 44 | throw e; 45 | } 46 | } 47 | 48 | @Override 49 | public T deserialize(String topic, Headers headers, byte[] data) { 50 | return Deserializer.super.deserialize(topic, headers, data); 51 | } 52 | 53 | @Override 54 | public void close() { 55 | Deserializer.super.close(); 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /explore-kafka-streams/advanced-streams/src/main/java/com/learnkafkastreams/serdes/JsonSerializer.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.serdes; 2 | 3 | import com.fasterxml.jackson.core.JsonProcessingException; 4 | import com.fasterxml.jackson.databind.ObjectMapper; 5 | import com.fasterxml.jackson.databind.SerializationFeature; 6 | import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; 7 | import lombok.extern.slf4j.Slf4j; 8 | import org.apache.kafka.common.serialization.Serializer; 9 | 10 | import java.nio.charset.StandardCharsets; 11 | 12 | @Slf4j 13 | public class JsonSerializer implements Serializer { 14 | 15 | private final ObjectMapper objectMapper = new ObjectMapper() 16 | .registerModule(new JavaTimeModule()) 17 | .configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false); 18 | 19 | @Override 20 | public byte[] serialize(String topic, T type) { 21 | try { 22 | return objectMapper.writeValueAsString(type).getBytes(StandardCharsets.UTF_8); 23 | } catch (JsonProcessingException e) { 24 | log.error("JsonProcessingException Serializing to JSON : {} ", e.getMessage(), e); 25 | throw new RuntimeException(e); 26 | }catch (Exception e){ 27 | log.error("Exception Serializing, Message is {} ", e.getMessage(), e); 28 | throw e; 29 | } 30 | } 31 | 32 | @Override 33 | public void close() {} 34 | } 35 | -------------------------------------------------------------------------------- /explore-kafka-streams/advanced-streams/src/main/java/com/learnkafkastreams/serdes/SerdesFactory.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.serdes; 2 | 3 | import com.learnkafkastreams.domain.Alphabet; 4 | import com.learnkafkastreams.domain.AlphabetWordAggregate; 5 | import org.apache.kafka.common.serialization.Serde; 6 | import org.apache.kafka.common.serialization.Serdes; 7 | 8 | public class SerdesFactory { 9 | 10 | 11 | public static Serde alphabetWordAggregate() { 12 | 13 | JsonSerializer jsonSerializer = new JsonSerializer<>(); 14 | 15 | JsonDeserializer jsonDeSerializer = new JsonDeserializer<>(AlphabetWordAggregate.class); 16 | return Serdes.serdeFrom(jsonSerializer, jsonDeSerializer); 17 | } 18 | 19 | 20 | public static Serde alphabet() { 21 | 22 | JsonSerializer jsonSerializer = new JsonSerializer<>(); 23 | 24 | JsonDeserializer jsonDeSerializer = new JsonDeserializer<>(Alphabet.class); 25 | return Serdes.serdeFrom(jsonSerializer, jsonDeSerializer); 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /explore-kafka-streams/advanced-streams/src/main/java/com/learnkafkastreams/topology/ExploreAggregateOperatorsTopology.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.topology; 2 | 3 | import com.learnkafkastreams.domain.AlphabetWordAggregate; 4 | import com.learnkafkastreams.serdes.SerdesFactory; 5 | import lombok.extern.slf4j.Slf4j; 6 | import org.apache.kafka.common.serialization.Serdes; 7 | import org.apache.kafka.common.utils.Bytes; 8 | import org.apache.kafka.streams.KeyValue; 9 | import org.apache.kafka.streams.StreamsBuilder; 10 | import org.apache.kafka.streams.Topology; 11 | import org.apache.kafka.streams.kstream.*; 12 | import org.apache.kafka.streams.state.KeyValueStore; 13 | 14 | @Slf4j 15 | public class ExploreAggregateOperatorsTopology { 16 | 17 | 18 | public static String AGGREGATE = "aggregate"; 19 | 20 | public static Topology build(){ 21 | StreamsBuilder streamsBuilder = new StreamsBuilder(); 22 | 23 | return streamsBuilder.build(); 24 | } 25 | 26 | } 27 | -------------------------------------------------------------------------------- /explore-kafka-streams/advanced-streams/src/main/java/com/learnkafkastreams/topology/ExploreJoinsOperatorsTopology.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.topology; 2 | 3 | import com.learnkafkastreams.domain.Alphabet; 4 | import lombok.extern.slf4j.Slf4j; 5 | import org.apache.kafka.common.serialization.Serdes; 6 | import org.apache.kafka.streams.StreamsBuilder; 7 | import org.apache.kafka.streams.Topology; 8 | import org.apache.kafka.streams.kstream.*; 9 | 10 | import java.time.Duration; 11 | 12 | @Slf4j 13 | public class ExploreJoinsOperatorsTopology { 14 | 15 | 16 | public static String ALPHABETS = "alphabets"; // A => First letter in the english alphabet 17 | public static String ALPHABETS_ABBREVATIONS = "alphabets_abbreviations"; // A=> Apple 18 | 19 | 20 | public static Topology build(){ 21 | StreamsBuilder streamsBuilder = new StreamsBuilder(); 22 | 23 | return streamsBuilder.build(); 24 | } 25 | 26 | } 27 | -------------------------------------------------------------------------------- /explore-kafka-streams/advanced-streams/src/main/java/com/learnkafkastreams/topology/ExploreWindowTopology.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.topology; 2 | 3 | 4 | import lombok.extern.slf4j.Slf4j; 5 | import org.apache.kafka.common.serialization.Serdes; 6 | import org.apache.kafka.streams.StreamsBuilder; 7 | import org.apache.kafka.streams.Topology; 8 | import org.apache.kafka.streams.kstream.*; 9 | 10 | import java.time.*; 11 | import java.time.format.DateTimeFormatter; 12 | 13 | @Slf4j 14 | public class ExploreWindowTopology { 15 | 16 | public static final String WINDOW_WORDS = "windows-words"; 17 | 18 | public static Topology build(){ 19 | StreamsBuilder streamsBuilder = new StreamsBuilder(); 20 | 21 | return streamsBuilder.build(); 22 | } 23 | 24 | 25 | private static void printLocalDateTimes(Windowed key, Long value) { 26 | var startTime = key.window().startTime(); 27 | var endTime = key.window().endTime(); 28 | 29 | LocalDateTime startLDT = LocalDateTime.ofInstant(startTime, ZoneId.of(ZoneId.SHORT_IDS.get("CST"))); 30 | LocalDateTime endLDT = LocalDateTime.ofInstant(endTime, ZoneId.of(ZoneId.SHORT_IDS.get("CST"))); 31 | log.info("startLDT : {} , endLDT : {}, Count : {}", startLDT, endLDT, value); 32 | } 33 | 34 | } 35 | -------------------------------------------------------------------------------- /explore-kafka-streams/advanced-streams/src/main/resources/logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | %d [%t] %-5level %logger{36} - %msg%n 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | -------------------------------------------------------------------------------- /explore-kafka-streams/gradle/wrapper/gradle-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dilipsundarraj1/kafka-streams-api-for-developers/e1c1ecdcce46f0956f3f1e8bff1e8934ee7a0b4d/explore-kafka-streams/gradle/wrapper/gradle-wrapper.jar -------------------------------------------------------------------------------- /explore-kafka-streams/gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | distributionBase=GRADLE_USER_HOME 2 | distributionPath=wrapper/dists 3 | distributionUrl=https\://services.gradle.org/distributions/gradle-7.4-bin.zip 4 | zipStoreBase=GRADLE_USER_HOME 5 | zipStorePath=wrapper/dists 6 | -------------------------------------------------------------------------------- /explore-kafka-streams/gradlew: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # 4 | # Copyright © 2015-2021 the original authors. 5 | # 6 | # Licensed under the Apache License, Version 2.0 (the "License"); 7 | # you may not use this file except in compliance with the License. 8 | # You may obtain a copy of the License at 9 | # 10 | # https://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | # See the License for the specific language governing permissions and 16 | # limitations under the License. 17 | # 18 | 19 | ############################################################################## 20 | # 21 | # Gradle start up script for POSIX generated by Gradle. 22 | # 23 | # Important for running: 24 | # 25 | # (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is 26 | # noncompliant, but you have some other compliant shell such as ksh or 27 | # bash, then to run this script, type that shell name before the whole 28 | # command line, like: 29 | # 30 | # ksh Gradle 31 | # 32 | # Busybox and similar reduced shells will NOT work, because this script 33 | # requires all of these POSIX shell features: 34 | # * functions; 35 | # * expansions «$var», «${var}», «${var:-default}», «${var+SET}», 36 | # «${var#prefix}», «${var%suffix}», and «$( cmd )»; 37 | # * compound commands having a testable exit status, especially «case»; 38 | # * various built-in commands including «command», «set», and «ulimit». 39 | # 40 | # Important for patching: 41 | # 42 | # (2) This script targets any POSIX shell, so it avoids extensions provided 43 | # by Bash, Ksh, etc; in particular arrays are avoided. 44 | # 45 | # The "traditional" practice of packing multiple parameters into a 46 | # space-separated string is a well documented source of bugs and security 47 | # problems, so this is (mostly) avoided, by progressively accumulating 48 | # options in "$@", and eventually passing that to Java. 49 | # 50 | # Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS, 51 | # and GRADLE_OPTS) rely on word-splitting, this is performed explicitly; 52 | # see the in-line comments for details. 53 | # 54 | # There are tweaks for specific operating systems such as AIX, CygWin, 55 | # Darwin, MinGW, and NonStop. 56 | # 57 | # (3) This script is generated from the Groovy template 58 | # https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt 59 | # within the Gradle project. 60 | # 61 | # You can find Gradle at https://github.com/gradle/gradle/. 62 | # 63 | ############################################################################## 64 | 65 | # Attempt to set APP_HOME 66 | 67 | # Resolve links: $0 may be a link 68 | app_path=$0 69 | 70 | # Need this for daisy-chained symlinks. 71 | while 72 | APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path 73 | [ -h "$app_path" ] 74 | do 75 | ls=$( ls -ld "$app_path" ) 76 | link=${ls#*' -> '} 77 | case $link in #( 78 | /*) app_path=$link ;; #( 79 | *) app_path=$APP_HOME$link ;; 80 | esac 81 | done 82 | 83 | APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit 84 | 85 | APP_NAME="Gradle" 86 | APP_BASE_NAME=${0##*/} 87 | 88 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 89 | DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' 90 | 91 | # Use the maximum available, or set MAX_FD != -1 to use that value. 92 | MAX_FD=maximum 93 | 94 | warn () { 95 | echo "$*" 96 | } >&2 97 | 98 | die () { 99 | echo 100 | echo "$*" 101 | echo 102 | exit 1 103 | } >&2 104 | 105 | # OS specific support (must be 'true' or 'false'). 106 | cygwin=false 107 | msys=false 108 | darwin=false 109 | nonstop=false 110 | case "$( uname )" in #( 111 | CYGWIN* ) cygwin=true ;; #( 112 | Darwin* ) darwin=true ;; #( 113 | MSYS* | MINGW* ) msys=true ;; #( 114 | NONSTOP* ) nonstop=true ;; 115 | esac 116 | 117 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar 118 | 119 | 120 | # Determine the Java command to use to start the JVM. 121 | if [ -n "$JAVA_HOME" ] ; then 122 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then 123 | # IBM's JDK on AIX uses strange locations for the executables 124 | JAVACMD=$JAVA_HOME/jre/sh/java 125 | else 126 | JAVACMD=$JAVA_HOME/bin/java 127 | fi 128 | if [ ! -x "$JAVACMD" ] ; then 129 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME 130 | 131 | Please set the JAVA_HOME variable in your environment to match the 132 | location of your Java installation." 133 | fi 134 | else 135 | JAVACMD=java 136 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 137 | 138 | Please set the JAVA_HOME variable in your environment to match the 139 | location of your Java installation." 140 | fi 141 | 142 | # Increase the maximum file descriptors if we can. 143 | if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then 144 | case $MAX_FD in #( 145 | max*) 146 | MAX_FD=$( ulimit -H -n ) || 147 | warn "Could not query maximum file descriptor limit" 148 | esac 149 | case $MAX_FD in #( 150 | '' | soft) :;; #( 151 | *) 152 | ulimit -n "$MAX_FD" || 153 | warn "Could not set maximum file descriptor limit to $MAX_FD" 154 | esac 155 | fi 156 | 157 | # Collect all arguments for the java command, stacking in reverse order: 158 | # * args from the command line 159 | # * the main class name 160 | # * -classpath 161 | # * -D...appname settings 162 | # * --module-path (only if needed) 163 | # * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables. 164 | 165 | # For Cygwin or MSYS, switch paths to Windows format before running java 166 | if "$cygwin" || "$msys" ; then 167 | APP_HOME=$( cygpath --path --mixed "$APP_HOME" ) 168 | CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" ) 169 | 170 | JAVACMD=$( cygpath --unix "$JAVACMD" ) 171 | 172 | # Now convert the arguments - kludge to limit ourselves to /bin/sh 173 | for arg do 174 | if 175 | case $arg in #( 176 | -*) false ;; # don't mess with options #( 177 | /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath 178 | [ -e "$t" ] ;; #( 179 | *) false ;; 180 | esac 181 | then 182 | arg=$( cygpath --path --ignore --mixed "$arg" ) 183 | fi 184 | # Roll the args list around exactly as many times as the number of 185 | # args, so each arg winds up back in the position where it started, but 186 | # possibly modified. 187 | # 188 | # NB: a `for` loop captures its iteration list before it begins, so 189 | # changing the positional parameters here affects neither the number of 190 | # iterations, nor the values presented in `arg`. 191 | shift # remove old arg 192 | set -- "$@" "$arg" # push replacement arg 193 | done 194 | fi 195 | 196 | # Collect all arguments for the java command; 197 | # * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of 198 | # shell script including quotes and variable substitutions, so put them in 199 | # double quotes to make sure that they get re-expanded; and 200 | # * put everything else in single quotes, so that it's not re-expanded. 201 | 202 | set -- \ 203 | "-Dorg.gradle.appname=$APP_BASE_NAME" \ 204 | -classpath "$CLASSPATH" \ 205 | org.gradle.wrapper.GradleWrapperMain \ 206 | "$@" 207 | 208 | # Use "xargs" to parse quoted args. 209 | # 210 | # With -n1 it outputs one arg per line, with the quotes and backslashes removed. 211 | # 212 | # In Bash we could simply go: 213 | # 214 | # readarray ARGS < <( xargs -n1 <<<"$var" ) && 215 | # set -- "${ARGS[@]}" "$@" 216 | # 217 | # but POSIX shell has neither arrays nor command substitution, so instead we 218 | # post-process each arg (as a line of input to sed) to backslash-escape any 219 | # character that might be a shell metacharacter, then use eval to reverse 220 | # that process (while maintaining the separation between arguments), and wrap 221 | # the whole thing up as a single "set" statement. 222 | # 223 | # This will of course break if any of these variables contains a newline or 224 | # an unmatched quote. 225 | # 226 | 227 | eval "set -- $( 228 | printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" | 229 | xargs -n1 | 230 | sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' | 231 | tr '\n' ' ' 232 | )" '"$@"' 233 | 234 | exec "$JAVACMD" "$@" 235 | -------------------------------------------------------------------------------- /explore-kafka-streams/gradlew.bat: -------------------------------------------------------------------------------- 1 | @rem 2 | @rem Copyright 2015 the original author or authors. 3 | @rem 4 | @rem Licensed under the Apache License, Version 2.0 (the "License"); 5 | @rem you may not use this file except in compliance with the License. 6 | @rem You may obtain a copy of the License at 7 | @rem 8 | @rem https://www.apache.org/licenses/LICENSE-2.0 9 | @rem 10 | @rem Unless required by applicable law or agreed to in writing, software 11 | @rem distributed under the License is distributed on an "AS IS" BASIS, 12 | @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | @rem See the License for the specific language governing permissions and 14 | @rem limitations under the License. 15 | @rem 16 | 17 | @if "%DEBUG%" == "" @echo off 18 | @rem ########################################################################## 19 | @rem 20 | @rem Gradle startup script for Windows 21 | @rem 22 | @rem ########################################################################## 23 | 24 | @rem Set local scope for the variables with windows NT shell 25 | if "%OS%"=="Windows_NT" setlocal 26 | 27 | set DIRNAME=%~dp0 28 | if "%DIRNAME%" == "" set DIRNAME=. 29 | set APP_BASE_NAME=%~n0 30 | set APP_HOME=%DIRNAME% 31 | 32 | @rem Resolve any "." and ".." in APP_HOME to make it shorter. 33 | for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi 34 | 35 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 36 | set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" 37 | 38 | @rem Find java.exe 39 | if defined JAVA_HOME goto findJavaFromJavaHome 40 | 41 | set JAVA_EXE=java.exe 42 | %JAVA_EXE% -version >NUL 2>&1 43 | if "%ERRORLEVEL%" == "0" goto execute 44 | 45 | echo. 46 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 47 | echo. 48 | echo Please set the JAVA_HOME variable in your environment to match the 49 | echo location of your Java installation. 50 | 51 | goto fail 52 | 53 | :findJavaFromJavaHome 54 | set JAVA_HOME=%JAVA_HOME:"=% 55 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe 56 | 57 | if exist "%JAVA_EXE%" goto execute 58 | 59 | echo. 60 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 61 | echo. 62 | echo Please set the JAVA_HOME variable in your environment to match the 63 | echo location of your Java installation. 64 | 65 | goto fail 66 | 67 | :execute 68 | @rem Setup the command line 69 | 70 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar 71 | 72 | 73 | @rem Execute Gradle 74 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* 75 | 76 | :end 77 | @rem End local scope for the variables with windows NT shell 78 | if "%ERRORLEVEL%"=="0" goto mainEnd 79 | 80 | :fail 81 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of 82 | rem the _cmd.exe /c_ return code! 83 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 84 | exit /b 1 85 | 86 | :mainEnd 87 | if "%OS%"=="Windows_NT" endlocal 88 | 89 | :omega 90 | -------------------------------------------------------------------------------- /explore-kafka-streams/greeting-streams/build.gradle: -------------------------------------------------------------------------------- 1 | plugins { 2 | id 'java' 3 | id "io.freefair.lombok" version "6.5.1" 4 | } 5 | 6 | group 'com.learnkafkastreams' 7 | version 'unspecified' 8 | 9 | repositories { 10 | mavenCentral() 11 | } 12 | 13 | dependencies { 14 | 15 | implementation 'org.apache.kafka:kafka-streams:3.3.1' 16 | 17 | //jackson for custom serdes 18 | implementation 'com.fasterxml.jackson.core:jackson-databind:2.14.1' 19 | implementation 'com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.14.1' 20 | 21 | 22 | //log 23 | implementation 'ch.qos.logback:logback-core:1.2.11' 24 | implementation 'ch.qos.logback:logback-classic:1.2.11' 25 | implementation 'org.slf4j:slf4j-api:1.7.36' 26 | 27 | testImplementation 'org.junit.jupiter:junit-jupiter-api:5.8.1' 28 | testRuntimeOnly 'org.junit.jupiter:junit-jupiter-engine:5.8.1' 29 | 30 | //kafka-streams test 31 | testImplementation 'org.apache.kafka:kafka-streams-test-utils:3.3.1' 32 | 33 | } 34 | 35 | test { 36 | useJUnitPlatform() 37 | } -------------------------------------------------------------------------------- /explore-kafka-streams/greeting-streams/src/main/java/com/learnkafkastreams/domain/Greeting.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.domain; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Builder; 5 | import lombok.Data; 6 | import lombok.NoArgsConstructor; 7 | 8 | import java.time.LocalDateTime; 9 | 10 | @Builder 11 | @Data 12 | @AllArgsConstructor 13 | @NoArgsConstructor 14 | public class Greeting { 15 | private String message; 16 | private LocalDateTime timeStamp; 17 | } 18 | -------------------------------------------------------------------------------- /explore-kafka-streams/greeting-streams/src/main/java/com/learnkafkastreams/launcher/GreetingsStreamApp.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.launcher; 2 | 3 | import lombok.extern.slf4j.Slf4j; 4 | import org.apache.kafka.clients.admin.AdminClient; 5 | import org.apache.kafka.clients.admin.NewTopic; 6 | 7 | import java.util.List; 8 | import java.util.Properties; 9 | import java.util.stream.Collectors; 10 | 11 | @Slf4j 12 | public class GreetingsStreamApp { 13 | 14 | public static void main(String[] args) { 15 | 16 | 17 | } 18 | 19 | private static void createTopics(Properties config, List greetings) { 20 | 21 | AdminClient admin = AdminClient.create(config); 22 | var partitions = 1; 23 | short replication = 1; 24 | 25 | var newTopics = greetings 26 | .stream() 27 | .map(topic ->{ 28 | return new NewTopic(topic, partitions, replication); 29 | }) 30 | .collect(Collectors.toList()); 31 | 32 | var createTopicResult = admin.createTopics(newTopics); 33 | try { 34 | createTopicResult 35 | .all().get(); 36 | log.info("topics are created successfully"); 37 | } catch (Exception e) { 38 | log.error("Exception creating topics : {} ",e.getMessage(), e); 39 | } 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /explore-kafka-streams/greeting-streams/src/main/java/com/learnkafkastreams/launcher/KTableStreamApp.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.launcher; 2 | 3 | import lombok.extern.slf4j.Slf4j; 4 | import org.apache.kafka.clients.admin.AdminClient; 5 | import org.apache.kafka.clients.admin.NewTopic; 6 | import org.apache.kafka.clients.consumer.ConsumerConfig; 7 | import org.apache.kafka.streams.KafkaStreams; 8 | import org.apache.kafka.streams.StreamsConfig; 9 | 10 | import java.util.List; 11 | import java.util.Properties; 12 | import java.util.stream.Collectors; 13 | 14 | @Slf4j 15 | public class KTableStreamApp { 16 | 17 | 18 | public static void main(String[] args) { 19 | 20 | // var kTableTopology = ExploreKTableTopology.build(); 21 | 22 | Properties config = new Properties(); 23 | config.put(StreamsConfig.APPLICATION_ID_CONFIG, "ktable"); // consumer group 24 | config.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); 25 | config.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest"); 26 | 27 | //createTopics(config, List.of(WORDS)); 28 | //var kafkaStreams = new KafkaStreams(kTableTopology, config); 29 | 30 | // Runtime.getRuntime().addShutdownHook(new Thread(kafkaStreams::close)); 31 | 32 | log.info("Starting Greeting streams"); 33 | // kafkaStreams.start(); 34 | } 35 | 36 | private static void createTopics(Properties config, List greetings) { 37 | 38 | AdminClient admin = AdminClient.create(config); 39 | var partitions = 1; 40 | short replication = 1; 41 | 42 | var newTopics = greetings 43 | .stream() 44 | .map(topic ->{ 45 | return new NewTopic(topic, partitions, replication); 46 | }) 47 | .collect(Collectors.toList()); 48 | 49 | var createTopicResult = admin.createTopics(newTopics); 50 | try { 51 | createTopicResult 52 | .all().get(); 53 | log.info("topics are created successfully"); 54 | } catch (Exception e) { 55 | log.error("Exception creating topics : {} ",e.getMessage(), e); 56 | } 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /explore-kafka-streams/greeting-streams/src/main/java/com/learnkafkastreams/producer/GreetingMockDataProducer.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.producer; 2 | 3 | import com.fasterxml.jackson.core.JsonProcessingException; 4 | import com.fasterxml.jackson.databind.ObjectMapper; 5 | import com.fasterxml.jackson.databind.SerializationFeature; 6 | import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; 7 | import com.learnkafkastreams.domain.Greeting; 8 | import lombok.extern.slf4j.Slf4j; 9 | 10 | import java.time.LocalDateTime; 11 | import java.util.List; 12 | 13 | import static com.learnkafkastreams.producer.ProducerUtil.publishMessageSync; 14 | 15 | @Slf4j 16 | public class GreetingMockDataProducer { 17 | 18 | static String GREETINGS = "greetings"; 19 | 20 | public static void main(String[] args) { 21 | ObjectMapper objectMapper = new ObjectMapper() 22 | .registerModule(new JavaTimeModule()) 23 | .configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false); 24 | 25 | englishGreetings(objectMapper); 26 | spanishGreetings(objectMapper); 27 | 28 | } 29 | 30 | private static void spanishGreetings(ObjectMapper objectMapper) { 31 | var spanishGreetings = List.of( 32 | new Greeting("Hello, Good Morning!", LocalDateTime.now()), 33 | new Greeting("Hello, Good Evening!", LocalDateTime.now()), 34 | new Greeting("Hello, Good Night!", LocalDateTime.now()) 35 | ); 36 | spanishGreetings 37 | .forEach(greeting -> { 38 | try { 39 | var greetingJSON = objectMapper.writeValueAsString(greeting); 40 | var recordMetaData = publishMessageSync(GREETINGS, null, greetingJSON); 41 | log.info("Published the alphabet message : {} ", recordMetaData); 42 | } catch (JsonProcessingException e) { 43 | throw new RuntimeException(e); 44 | } 45 | }); 46 | } 47 | 48 | private static void englishGreetings(ObjectMapper objectMapper) { 49 | var spanishGreetings = List.of( 50 | new Greeting("¡Hola buenos dias!", LocalDateTime.now()), 51 | new Greeting("¡Hola buenas tardes!", LocalDateTime.now()), 52 | new Greeting("¡Hola, buenas noches!", LocalDateTime.now()) 53 | ); 54 | spanishGreetings 55 | .forEach(greeting -> { 56 | try { 57 | var greetingJSON = objectMapper.writeValueAsString(greeting); 58 | var recordMetaData = publishMessageSync(GREETINGS, null, greetingJSON); 59 | log.info("Published the alphabet message : {} ", recordMetaData); 60 | } catch (JsonProcessingException e) { 61 | throw new RuntimeException(e); 62 | } 63 | }); 64 | } 65 | 66 | } 67 | 68 | -------------------------------------------------------------------------------- /explore-kafka-streams/greeting-streams/src/main/java/com/learnkafkastreams/producer/ProducerUtil.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.producer; 2 | 3 | import lombok.extern.slf4j.Slf4j; 4 | import org.apache.kafka.clients.producer.KafkaProducer; 5 | import org.apache.kafka.clients.producer.ProducerConfig; 6 | import org.apache.kafka.clients.producer.ProducerRecord; 7 | import org.apache.kafka.clients.producer.RecordMetadata; 8 | import org.apache.kafka.common.serialization.StringSerializer; 9 | 10 | import java.util.HashMap; 11 | import java.util.Map; 12 | import java.util.concurrent.ExecutionException; 13 | 14 | @Slf4j 15 | public class ProducerUtil { 16 | 17 | 18 | static KafkaProducer producer = new KafkaProducer(producerProps()); 19 | 20 | public static Map producerProps(){ 21 | 22 | Map propsMap = new HashMap<>(); 23 | propsMap.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); 24 | propsMap.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); 25 | propsMap.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); 26 | return propsMap; 27 | 28 | } 29 | 30 | 31 | public static RecordMetadata publishMessageSync(String topicName, String key, String message ){ 32 | 33 | ProducerRecord producerRecord = new ProducerRecord<>(topicName, key, message); 34 | RecordMetadata recordMetadata=null; 35 | 36 | try { 37 | log.info("producerRecord : " + producerRecord); 38 | recordMetadata = producer.send(producerRecord).get(); 39 | } catch (InterruptedException e) { 40 | log.error("InterruptedException in publishMessageSync : {} ", e.getMessage(), e); 41 | } catch (ExecutionException e) { 42 | log.error("ExecutionException in publishMessageSync : {} ", e.getMessage(), e); 43 | }catch(Exception e){ 44 | log.error("Exception in publishMessageSync : {} ", e.getMessage(), e); 45 | } 46 | return recordMetadata; 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /explore-kafka-streams/greeting-streams/src/main/java/com/learnkafkastreams/producer/WordsProducer.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.producer; 2 | 3 | import lombok.extern.slf4j.Slf4j; 4 | 5 | import static com.learnkafkastreams.producer.ProducerUtil.publishMessageSync; 6 | 7 | @Slf4j 8 | public class WordsProducer { 9 | 10 | static String WORDS = "words"; 11 | 12 | public static void main(String[] args) throws InterruptedException { 13 | 14 | var key = "A"; 15 | 16 | var word = "Apple"; 17 | var word1 = "Alligator"; 18 | var word2 = "Ambulance"; 19 | 20 | var recordMetaData = publishMessageSync(WORDS, key,word); 21 | log.info("Published the alphabet message : {} ", recordMetaData); 22 | 23 | var recordMetaData1 = publishMessageSync(WORDS, key,word1); 24 | log.info("Published the alphabet message : {} ", recordMetaData1); 25 | 26 | var recordMetaData2 = publishMessageSync(WORDS, key,word2); 27 | log.info("Published the alphabet message : {} ", recordMetaData2); 28 | 29 | var bKey = "B"; 30 | 31 | var bWord1 = "Bus"; 32 | var bWord2 = "Baby"; 33 | var recordMetaData3 = publishMessageSync(WORDS, bKey,bWord1); 34 | log.info("Published the alphabet message : {} ", recordMetaData2); 35 | 36 | var recordMetaData4 = publishMessageSync(WORDS, bKey,bWord2); 37 | log.info("Published the alphabet message : {} ", recordMetaData2); 38 | 39 | } 40 | 41 | 42 | 43 | } 44 | -------------------------------------------------------------------------------- /explore-kafka-streams/greeting-streams/src/main/resources/logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | %d [%t] %-5level %logger{36} - %msg%n 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | -------------------------------------------------------------------------------- /explore-kafka-streams/greeting-streams/src/test/java/com/learnkafkastreams/domain/GreetingTest.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.domain; 2 | 3 | 4 | import com.fasterxml.jackson.core.JsonProcessingException; 5 | import com.fasterxml.jackson.databind.ObjectMapper; 6 | import com.fasterxml.jackson.databind.SerializationFeature; 7 | import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; 8 | import org.junit.jupiter.api.Test; 9 | 10 | import java.time.LocalDateTime; 11 | 12 | public class GreetingTest { 13 | 14 | @Test 15 | void greetingsJson() throws JsonProcessingException { 16 | JavaTimeModule module = new JavaTimeModule(); 17 | var objectMapper = new ObjectMapper() 18 | .registerModule(module) 19 | .configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false) 20 | ; 21 | 22 | var greeting = new Greeting("Good Morning", LocalDateTime.now()); 23 | 24 | var greetingJSON = objectMapper.writeValueAsString(greeting); 25 | 26 | System.out.println(objectMapper.writeValueAsString(greeting)); 27 | 28 | var greetingObj = objectMapper.readValue(greetingJSON,Greeting.class); 29 | 30 | 31 | System.out.println("greetingObj : " + greetingObj); 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /explore-kafka-streams/greeting-streams/src/test/java/com/learnkafkastreams/topology/ExploreKTableTopologyTest.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.topology; 2 | 3 | import org.apache.kafka.common.serialization.Serdes; 4 | import org.apache.kafka.streams.TestInputTopic; 5 | import org.apache.kafka.streams.TestOutputTopic; 6 | import org.apache.kafka.streams.TopologyTestDriver; 7 | import org.junit.jupiter.api.AfterEach; 8 | import org.junit.jupiter.api.BeforeEach; 9 | import org.junit.jupiter.api.Test; 10 | 11 | import java.time.LocalDateTime; 12 | 13 | import static org.junit.jupiter.api.Assertions.assertEquals; 14 | 15 | public class ExploreKTableTopologyTest { 16 | 17 | TopologyTestDriver topologyTestDriver = null; 18 | TestInputTopic inputTopic = null; 19 | TestOutputTopic outputTopic = null; 20 | 21 | // @BeforeEach 22 | // void setUp() { 23 | // topologyTestDriver = new TopologyTestDriver(ExploreKTableTopology.build()); 24 | // 25 | // inputTopic = 26 | // topologyTestDriver. 27 | // createInputTopic( 28 | // ExploreKTableTopology.WORDS, Serdes.String().serializer(), 29 | // Serdes.String().serializer()); 30 | // 31 | // outputTopic = 32 | // topologyTestDriver 33 | // .createOutputTopic( 34 | // ExploreKTableTopology.WORDS_OUTPUT, 35 | // Serdes.String().deserializer(), 36 | // Serdes.String().deserializer()); 37 | // } 38 | // 39 | // @AfterEach 40 | // void tearDown() { 41 | // topologyTestDriver.close(); 42 | // } 43 | 44 | 45 | 46 | } 47 | -------------------------------------------------------------------------------- /explore-kafka-streams/greeting-streams/src/test/java/com/learnkafkastreams/topology/GreetingsTopologyTest.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.topology; 2 | 3 | import com.learnkafkastreams.domain.Greeting; 4 | import org.apache.kafka.streams.TestInputTopic; 5 | import org.apache.kafka.streams.TestOutputTopic; 6 | import org.apache.kafka.streams.TopologyTestDriver; 7 | 8 | class GreetingsTopologyTest { 9 | 10 | TopologyTestDriver topologyTestDriver = null; 11 | TestInputTopic inputTopic = null; 12 | TestOutputTopic outputTopic = null; 13 | 14 | 15 | } -------------------------------------------------------------------------------- /explore-kafka-streams/orders-kafka-streams-app/build.gradle: -------------------------------------------------------------------------------- 1 | plugins { 2 | id 'java' 3 | id "io.freefair.lombok" version "6.5.1" 4 | } 5 | 6 | group 'com.learnkafkastreams' 7 | version 'unspecified' 8 | 9 | repositories { 10 | mavenCentral() 11 | } 12 | 13 | 14 | sourceCompatibility = JavaVersion.VERSION_17 15 | targetCompatibility = JavaVersion.VERSION_17 16 | 17 | 18 | 19 | 20 | dependencies { 21 | 22 | implementation 'org.apache.kafka:kafka-streams:3.3.1' 23 | 24 | //jackson for custom serdes 25 | implementation 'com.fasterxml.jackson.core:jackson-databind:2.14.1' 26 | implementation 'com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.14.1' 27 | 28 | //log 29 | implementation 'ch.qos.logback:logback-core:1.2.11' 30 | implementation 'ch.qos.logback:logback-classic:1.2.11' 31 | implementation 'org.slf4j:slf4j-api:1.7.36' 32 | 33 | testImplementation 'org.junit.jupiter:junit-jupiter-api:5.8.1' 34 | testRuntimeOnly 'org.junit.jupiter:junit-jupiter-engine:5.8.1' 35 | 36 | //streams-test 37 | testImplementation 'org.apache.kafka:kafka-streams-test-utils:3.3.1' 38 | 39 | } 40 | 41 | test { 42 | useJUnitPlatform() 43 | } -------------------------------------------------------------------------------- /explore-kafka-streams/orders-kafka-streams-app/src/main/java/com/learnkafkastreams/OrdersKafkaStreamApp.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams; 2 | 3 | 4 | import lombok.extern.slf4j.Slf4j; 5 | import org.apache.kafka.clients.admin.AdminClient; 6 | import org.apache.kafka.clients.admin.NewTopic; 7 | import org.apache.kafka.clients.consumer.ConsumerConfig; 8 | import org.apache.kafka.streams.StreamsConfig; 9 | 10 | import java.util.List; 11 | import java.util.Properties; 12 | import java.util.stream.Collectors; 13 | 14 | @Slf4j 15 | public class OrdersKafkaStreamApp { 16 | 17 | 18 | public static void main(String[] args) { 19 | 20 | // create an instance of the topology 21 | 22 | 23 | Properties config = new Properties(); 24 | config.put(StreamsConfig.APPLICATION_ID_CONFIG, "orders-app"); // consumer group 25 | config.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); 26 | config.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest"); // read only the new messages 27 | //createTopics(config, List.of(GREETINGS, GREETINGS_UPPERCASE, GREETINGS_SPANISH)); 28 | 29 | //Create an instance of KafkaStreams 30 | //var kafkaStreams = new KafkaStreams(topology, config); 31 | 32 | //This closes the streams anytime the JVM shuts down normally or abruptly. 33 | //Runtime.getRuntime().addShutdownHook(new Thread(kafkaStreams::close)); 34 | try{ 35 | //kafkaStreams.start(); 36 | }catch (Exception e ){ 37 | log.error("Exception in starting the Streams : {}", e.getMessage(), e); 38 | } 39 | 40 | } 41 | 42 | private static void createTopics(Properties config, List greetings) { 43 | 44 | AdminClient admin = AdminClient.create(config); 45 | var partitions = 1; 46 | short replication = 1; 47 | 48 | var newTopics = greetings 49 | .stream() 50 | .map(topic ->{ 51 | return new NewTopic(topic, partitions, replication); 52 | }) 53 | .collect(Collectors.toList()); 54 | 55 | var createTopicResult = admin.createTopics(newTopics); 56 | try { 57 | createTopicResult 58 | .all().get(); 59 | log.info("topics are created successfully"); 60 | } catch (Exception e) { 61 | log.error("Exception creating topics : {} ",e.getMessage(), e); 62 | } 63 | } 64 | 65 | } 66 | -------------------------------------------------------------------------------- /explore-kafka-streams/orders-kafka-streams-app/src/main/java/com/learnkafkastreams/domain/Address.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.domain; 2 | 3 | public record Address(String addressLine1, 4 | String addressLine2, 5 | String city, 6 | String state, 7 | String zip) { 8 | } 9 | -------------------------------------------------------------------------------- /explore-kafka-streams/orders-kafka-streams-app/src/main/java/com/learnkafkastreams/domain/Order.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.domain; 2 | 3 | import java.math.BigDecimal; 4 | import java.time.LocalDateTime; 5 | import java.util.List; 6 | 7 | public record Order(Integer orderId, 8 | String locationId, 9 | BigDecimal finalAmount, 10 | OrderType orderType, 11 | List orderLineItems, 12 | LocalDateTime orderedDateTime) { 13 | } 14 | -------------------------------------------------------------------------------- /explore-kafka-streams/orders-kafka-streams-app/src/main/java/com/learnkafkastreams/domain/OrderLineItem.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.domain; 2 | 3 | import java.math.BigDecimal; 4 | 5 | public record OrderLineItem( 6 | String item, 7 | Integer count, 8 | BigDecimal amount) { 9 | } 10 | -------------------------------------------------------------------------------- /explore-kafka-streams/orders-kafka-streams-app/src/main/java/com/learnkafkastreams/domain/OrderType.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.domain; 2 | 3 | public enum OrderType { 4 | GENERAL, 5 | RESTAURANT 6 | } 7 | -------------------------------------------------------------------------------- /explore-kafka-streams/orders-kafka-streams-app/src/main/java/com/learnkafkastreams/domain/Revenue.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.domain; 2 | 3 | import java.math.BigDecimal; 4 | 5 | public record Revenue(String locationId, 6 | BigDecimal finalAmount) { 7 | } 8 | -------------------------------------------------------------------------------- /explore-kafka-streams/orders-kafka-streams-app/src/main/java/com/learnkafkastreams/domain/Store.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.domain; 2 | 3 | public record Store(String locationId, 4 | Address address, 5 | String contactNum) { 6 | } 7 | -------------------------------------------------------------------------------- /explore-kafka-streams/orders-kafka-streams-app/src/main/java/com/learnkafkastreams/domain/TotalRevenue.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.domain; 2 | 3 | import java.math.BigDecimal; 4 | 5 | public record TotalRevenue(String locationId, 6 | Integer runnuingOrderCount, 7 | BigDecimal runningRevenue) { 8 | } 9 | -------------------------------------------------------------------------------- /explore-kafka-streams/orders-kafka-streams-app/src/main/java/com/learnkafkastreams/domain/TotalRevenueWithAddress.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.domain; 2 | 3 | public record TotalRevenueWithAddress(TotalRevenue totalRevenue, 4 | Store store) { 5 | } 6 | -------------------------------------------------------------------------------- /explore-kafka-streams/orders-kafka-streams-app/src/main/java/com/learnkafkastreams/producer/OrdersMockDataProducer.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.producer; 2 | 3 | import com.fasterxml.jackson.core.JsonProcessingException; 4 | import com.fasterxml.jackson.databind.ObjectMapper; 5 | import com.fasterxml.jackson.databind.SerializationFeature; 6 | import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; 7 | import com.learnkafkastreams.domain.OrderLineItem; 8 | import com.learnkafkastreams.domain.Order; 9 | import com.learnkafkastreams.domain.OrderType; 10 | import com.learnkafkastreams.topology.OrdersTopology; 11 | import lombok.extern.slf4j.Slf4j; 12 | 13 | import java.math.BigDecimal; 14 | import java.time.LocalDateTime; 15 | import java.time.LocalTime; 16 | import java.util.List; 17 | 18 | import static com.learnkafkastreams.producer.ProducerUtil.publishMessageSync; 19 | import static java.lang.Thread.sleep; 20 | 21 | @Slf4j 22 | public class OrdersMockDataProducer { 23 | 24 | public static void main(String[] args) throws InterruptedException { 25 | ObjectMapper objectMapper = new ObjectMapper() 26 | .registerModule(new JavaTimeModule()) 27 | .configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false); 28 | 29 | publishOrders(objectMapper, buildOrders()); 30 | //publishBulkOrders(objectMapper); 31 | 32 | //grace-period 33 | // publishOrdersForGracePeriod(objectMapper, buildOrders()); 34 | 35 | //Future and Old Records 36 | // publishFutureRecords(objectMapper); 37 | // publishExpiredRecords(objectMapper); 38 | 39 | 40 | } 41 | 42 | private static void publishFutureRecords(ObjectMapper objectMapper) { 43 | var localDateTime = LocalDateTime.now().plusDays(1); 44 | 45 | var newOrders = buildOrders() 46 | .stream() 47 | .map(order -> 48 | new Order(order.orderId(), 49 | order.locationId(), 50 | order.finalAmount(), 51 | order.orderType(), 52 | order.orderLineItems(), 53 | localDateTime)) 54 | .toList(); 55 | publishOrders(objectMapper, newOrders); 56 | } 57 | 58 | private static void publishExpiredRecords(ObjectMapper objectMapper) { 59 | 60 | var localDateTime = LocalDateTime.now().minusDays(1); 61 | 62 | var newOrders = buildOrders() 63 | .stream() 64 | .map(order -> 65 | new Order(order.orderId(), 66 | order.locationId(), 67 | order.finalAmount(), 68 | order.orderType(), 69 | order.orderLineItems(), 70 | localDateTime)) 71 | .toList(); 72 | publishOrders(objectMapper, newOrders); 73 | 74 | } 75 | 76 | private static void publishOrdersForGracePeriod(ObjectMapper objectMapper, List orders) { 77 | 78 | var localTime = LocalDateTime.now().toLocalTime(); 79 | var modifiedTime = LocalTime.of(localTime.getHour(), localTime.getMinute(), 18); 80 | var localDateTime = LocalDateTime.now().with(modifiedTime); 81 | 82 | //With Grace Period 83 | //[general_orders_revenue_window]: , TotalRevenue[locationId=store_4567, runnuingOrderCount=1, runningRevenue=27.00] 84 | //[general_orders_revenue_window]: TotalRevenue[locationId=store_1234, runnuingOrderCount=1, runningRevenue=27.00] 85 | //[general_orders_revenue_window]: TotalRevenue[locationId=store_4567, runnuingOrderCount=1, runningRevenue=27.00] 86 | //[general_orders_revenue_window]: TotalRevenue[locationId=store_4567, runnuingOrderCount=1, runningRevenue=27.00] 87 | 88 | //Without Grace Period 89 | //[general_orders_revenue_window]: , TotalRevenue[locationId=store_4567, runnuingOrderCount=1, runningRevenue=27.00] 90 | //[general_orders_revenue_window]: TotalRevenue[locationId=store_1234, runnuingOrderCount=1, runningRevenue=27.00] 91 | //[general_orders_revenue_window]: TotalRevenue[locationId=store_4567, runnuingOrderCount=1, runningRevenue=27.00] 92 | //[general_orders_revenue_window]: TotalRevenue[locationId=store_4567, runnuingOrderCount=1, runningRevenue=27.00] 93 | 94 | 95 | 96 | var generalOrdersWithCustomTime = orders 97 | .stream() 98 | .filter(order -> order.orderType().equals(OrderType.GENERAL)) 99 | .map(order -> 100 | new Order(order.orderId(), 101 | order.locationId(), 102 | order.finalAmount(), 103 | order.orderType(), 104 | order.orderLineItems(), 105 | localDateTime)) 106 | .toList(); 107 | 108 | var generalOrders = orders 109 | .stream() 110 | .filter(order -> order.orderType().equals(OrderType.GENERAL)) 111 | .toList(); 112 | 113 | publishOrders(objectMapper, generalOrders); 114 | 115 | //orders with the timestamp as 18th second 116 | publishRecordsWithDelay(generalOrdersWithCustomTime, localDateTime, objectMapper, 18); 117 | 118 | } 119 | 120 | private static void publishRecordsWithDelay(List newOrders, LocalDateTime localDateTime, ObjectMapper objectMapper) { 121 | 122 | publishOrders(objectMapper, newOrders); 123 | } 124 | 125 | private static void publishRecordsWithDelay(List newOrders, LocalDateTime localDateTime, ObjectMapper objectMapper, int timeToPublish) { 126 | 127 | var flag = true; 128 | while (flag) { 129 | var dateTime = LocalDateTime.now(); 130 | if (dateTime.toLocalTime().getMinute() == localDateTime.getMinute() 131 | && dateTime.toLocalTime().getSecond() == timeToPublish) { 132 | System.out.printf("Publishing the record with delay "); 133 | publishOrders(objectMapper, newOrders); 134 | flag = false; 135 | } else { 136 | System.out.println(" Current Time is and the record will be published at the 16th second: " + dateTime); 137 | System.out.println("Record Date Time : " + localDateTime); 138 | } 139 | } 140 | } 141 | 142 | private static List buildOrdersForGracePeriod() { 143 | 144 | var orderItems = List.of( 145 | new OrderLineItem("Bananas", 2, new BigDecimal("2.00")), 146 | new OrderLineItem("Iphone Charger", 1, new BigDecimal("25.00")) 147 | ); 148 | 149 | var orderItemsRestaurant = List.of( 150 | new OrderLineItem("Pizza", 2, new BigDecimal("12.00")), 151 | new OrderLineItem("Coffee", 1, new BigDecimal("3.00")) 152 | ); 153 | 154 | var order1 = new Order(12345, "store_999", 155 | new BigDecimal("27.00"), 156 | OrderType.RESTAURANT, 157 | orderItems, 158 | LocalDateTime.parse("2023-01-06T18:50:21") 159 | ); 160 | 161 | var order2 = new Order(54321, "store_999", 162 | new BigDecimal("15.00"), 163 | OrderType.RESTAURANT, 164 | orderItemsRestaurant, 165 | LocalDateTime.parse("2023-01-06T18:50:21") 166 | ); 167 | 168 | var order3 = new Order(54321, "store_999", 169 | new BigDecimal("15.00"), 170 | OrderType.RESTAURANT, 171 | orderItemsRestaurant, 172 | LocalDateTime.parse("2023-01-06T18:50:22") 173 | ); 174 | 175 | return List.of( 176 | order1, 177 | order2, 178 | order3 179 | ); 180 | 181 | } 182 | 183 | private static List buildOrders() { 184 | var orderItems = List.of( 185 | new OrderLineItem("Bananas", 2, new BigDecimal("2.00")), 186 | new OrderLineItem("Iphone Charger", 1, new BigDecimal("25.00")) 187 | ); 188 | 189 | var orderItemsRestaurant = List.of( 190 | new OrderLineItem("Pizza", 2, new BigDecimal("12.00")), 191 | new OrderLineItem("Coffee", 1, new BigDecimal("3.00")) 192 | ); 193 | 194 | var order1 = new Order(12345, "store_1234", 195 | new BigDecimal("27.00"), 196 | OrderType.GENERAL, 197 | orderItems, 198 | LocalDateTime.now() 199 | //LocalDateTime.now(ZoneId.of("UTC")) 200 | ); 201 | 202 | var order2 = new Order(54321, "store_1234", 203 | new BigDecimal("15.00"), 204 | OrderType.RESTAURANT, 205 | orderItemsRestaurant, 206 | LocalDateTime.now() 207 | //LocalDateTime.now(ZoneId.of("UTC")) 208 | ); 209 | 210 | var order3 = new Order(12345, "store_4567", 211 | new BigDecimal("27.00"), 212 | OrderType.GENERAL, 213 | orderItems, 214 | LocalDateTime.now() 215 | //LocalDateTime.now(ZoneId.of("UTC")) 216 | ); 217 | 218 | var order4 = new Order(12345, "store_4567", 219 | new BigDecimal("27.00"), 220 | OrderType.RESTAURANT, 221 | orderItems, 222 | LocalDateTime.now() 223 | //LocalDateTime.now(ZoneId.of("UTC")) 224 | ); 225 | 226 | return List.of( 227 | order1, 228 | order2, 229 | order3, 230 | order4 231 | ); 232 | } 233 | 234 | private static void publishBulkOrders(ObjectMapper objectMapper) throws InterruptedException { 235 | 236 | int count = 0; 237 | while (count < 100) { 238 | var orders = buildOrders(); 239 | publishOrders(objectMapper, orders); 240 | sleep(1000); 241 | count++; 242 | } 243 | } 244 | 245 | private static void publishOrders(ObjectMapper objectMapper, List orders) { 246 | 247 | orders 248 | .forEach(order -> { 249 | try { 250 | var ordersJSON = objectMapper.writeValueAsString(order); 251 | var recordMetaData = publishMessageSync(OrdersTopology.ORDERS, order.orderId() + "", ordersJSON); 252 | log.info("Published the order message : {} ", recordMetaData); 253 | } catch (JsonProcessingException e) { 254 | log.error("JsonProcessingException : {} ", e.getMessage(), e); 255 | throw new RuntimeException(e); 256 | } catch (Exception e) { 257 | log.error("Exception : {} ", e.getMessage(), e); 258 | throw new RuntimeException(e); 259 | } 260 | }); 261 | } 262 | 263 | 264 | } 265 | -------------------------------------------------------------------------------- /explore-kafka-streams/orders-kafka-streams-app/src/main/java/com/learnkafkastreams/producer/ProducerUtil.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.producer; 2 | 3 | import lombok.extern.slf4j.Slf4j; 4 | import org.apache.kafka.clients.producer.KafkaProducer; 5 | import org.apache.kafka.clients.producer.ProducerConfig; 6 | import org.apache.kafka.clients.producer.ProducerRecord; 7 | import org.apache.kafka.clients.producer.RecordMetadata; 8 | import org.apache.kafka.common.serialization.StringSerializer; 9 | 10 | import java.util.HashMap; 11 | import java.util.Map; 12 | import java.util.concurrent.ExecutionException; 13 | 14 | @Slf4j 15 | public class ProducerUtil { 16 | 17 | static KafkaProducer producer = new KafkaProducer(producerProps()); 18 | 19 | public static Map producerProps(){ 20 | 21 | Map propsMap = new HashMap<>(); 22 | propsMap.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); 23 | propsMap.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); 24 | propsMap.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); 25 | return propsMap; 26 | 27 | } 28 | 29 | 30 | public static RecordMetadata publishMessageSync(String topicName, String key, String message ){ 31 | 32 | ProducerRecord producerRecord = new ProducerRecord<>(topicName, key, message); 33 | RecordMetadata recordMetadata=null; 34 | 35 | try { 36 | log.info("producerRecord : " + producerRecord); 37 | recordMetadata = producer.send(producerRecord).get(); 38 | } catch (InterruptedException e) { 39 | log.error("InterruptedException in publishMessageSync : {} ", e.getMessage(), e); 40 | } catch (ExecutionException e) { 41 | log.error("ExecutionException in publishMessageSync : {} ", e.getMessage(), e); 42 | }catch(Exception e){ 43 | log.error("Exception in publishMessageSync : {} ", e.getMessage(), e); 44 | } 45 | return recordMetadata; 46 | } 47 | 48 | } 49 | -------------------------------------------------------------------------------- /explore-kafka-streams/orders-kafka-streams-app/src/main/java/com/learnkafkastreams/producer/StoresMockDataProducer.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.producer; 2 | 3 | import com.fasterxml.jackson.core.JsonProcessingException; 4 | import com.fasterxml.jackson.databind.ObjectMapper; 5 | import com.fasterxml.jackson.databind.SerializationFeature; 6 | import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; 7 | import com.learnkafkastreams.domain.Address; 8 | import com.learnkafkastreams.domain.Store; 9 | import com.learnkafkastreams.topology.OrdersTopology; 10 | import lombok.extern.slf4j.Slf4j; 11 | 12 | import java.util.List; 13 | 14 | import static com.learnkafkastreams.producer.ProducerUtil.publishMessageSync; 15 | 16 | @Slf4j 17 | public class StoresMockDataProducer { 18 | 19 | public static void main(String[] args) { 20 | ObjectMapper objectMapper = new ObjectMapper() 21 | .registerModule(new JavaTimeModule()) 22 | .configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false); 23 | 24 | 25 | var address1 = new Address("1234 Street 1 ", "", "City1", "State1", "12345"); 26 | var store1 = new Store("store_1234", 27 | address1, 28 | "1234567890" 29 | ); 30 | 31 | var address2 = new Address("1234 Street 2 ", "", "City2", "State2", "541321"); 32 | var store2 = new Store("store_4567", 33 | address2, 34 | "0987654321" 35 | ); 36 | 37 | 38 | var stores = List.of(store1, store2); 39 | stores 40 | .forEach(store -> { 41 | try { 42 | var storeJSON = objectMapper.writeValueAsString(store); 43 | var recordMetaData = publishMessageSync(OrdersTopology.STORES, store.locationId(), storeJSON); 44 | log.info("Published the store message : {} ", recordMetaData); 45 | } catch (JsonProcessingException e) { 46 | log.error("JsonProcessingException : {} ", e.getMessage(), e); 47 | throw new RuntimeException(e); 48 | } 49 | catch (Exception e) { 50 | log.error("Exception : {} ", e.getMessage(), e); 51 | throw new RuntimeException(e); 52 | } 53 | }); 54 | 55 | } 56 | 57 | } -------------------------------------------------------------------------------- /explore-kafka-streams/orders-kafka-streams-app/src/main/java/com/learnkafkastreams/topology/OrdersTopology.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.topology; 2 | 3 | import lombok.extern.slf4j.Slf4j; 4 | 5 | @Slf4j 6 | public class OrdersTopology { 7 | public static final String ORDERS = "orders"; 8 | public static final String STORES = "stores"; 9 | } 10 | -------------------------------------------------------------------------------- /explore-kafka-streams/orders-kafka-streams-app/src/main/resources/logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | %d [%t] %-5level %logger{36} - %msg%n 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | -------------------------------------------------------------------------------- /explore-kafka-streams/orders-kafka-streams-app/src/test/java/com/learnkafkastreams/domain/OrderTest.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.domain; 2 | 3 | import com.fasterxml.jackson.core.JsonProcessingException; 4 | import com.fasterxml.jackson.databind.ObjectMapper; 5 | import com.fasterxml.jackson.databind.SerializationFeature; 6 | import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; 7 | import org.junit.jupiter.api.Test; 8 | 9 | import java.math.BigDecimal; 10 | import java.time.LocalDateTime; 11 | import java.util.List; 12 | 13 | import static org.junit.jupiter.api.Assertions.assertEquals; 14 | 15 | public class OrderTest { 16 | 17 | 18 | private final ObjectMapper objectMapper = new ObjectMapper() 19 | .registerModule(new JavaTimeModule()) 20 | .configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false); 21 | 22 | 23 | @Test 24 | void orderDomainTest() throws JsonProcessingException { 25 | 26 | var orderItems = List.of( 27 | new OrderLineItem("Bananas", 2, new BigDecimal("2.00")), 28 | new OrderLineItem("Iphone Charger", 1, new BigDecimal("25.00")) 29 | ); 30 | 31 | var order = new Order(12345, "store_1234", 32 | new BigDecimal("27.00"), 33 | OrderType.GENERAL, 34 | orderItems, 35 | LocalDateTime.parse("2022-12-05T08:55:27") 36 | ); 37 | 38 | var orderJSON = objectMapper.writeValueAsString(order); 39 | System.out.println("orderJSON :"+orderJSON); 40 | var expectedJSON = "{\"orderId\":12345,\"locationId\":\"store_1234\",\"finalAmount\":27.00,\"orderType\":\"GENERAL\",\"orderLineItems\":[{\"item\":\"Bananas\",\"count\":2,\"amount\":2.00},{\"item\":\"Iphone Charger\",\"count\":1,\"amount\":25.00}],\"orderedDateTime\":\"2022-12-05T08:55:27\"}"; 41 | 42 | assertEquals(expectedJSON, orderJSON); 43 | } 44 | 45 | @Test 46 | void orderRecordDomainTest() throws JsonProcessingException { 47 | 48 | var orderItems = List.of( 49 | new OrderLineItem("Bananas", 2, new BigDecimal("2.00")), 50 | new OrderLineItem("Iphone Charger", 1, new BigDecimal("25.00")) 51 | ); 52 | 53 | var order = new Order(12345, "store_1234", 54 | new BigDecimal("27.00"), 55 | OrderType.GENERAL, 56 | orderItems, 57 | LocalDateTime.parse("2022-12-05T08:55:27") 58 | ); 59 | 60 | var orderJSON = objectMapper.writeValueAsString(order); 61 | System.out.println("orderJSON :"+orderJSON); 62 | var expectedJSON = "{\"orderId\":12345,\"locationId\":\"store_1234\",\"finalAmount\":27.00,\"orderType\":\"GENERAL\",\"orderLineItems\":[{\"item\":\"Bananas\",\"count\":2,\"amount\":2.00},{\"item\":\"Iphone Charger\",\"count\":1,\"amount\":25.00}],\"orderedDateTime\":\"2022-12-05T08:55:27\"}"; 63 | 64 | assertEquals(expectedJSON, orderJSON); 65 | } 66 | 67 | @Test 68 | void orderDomainRestaurantTest() throws JsonProcessingException { 69 | 70 | var orderItems = List.of( 71 | new OrderLineItem("Pizza", 2, new BigDecimal("12.00")), 72 | new OrderLineItem("Coffee", 1, new BigDecimal("3.00")) 73 | ); 74 | 75 | var order = new Order(12345, "store_1234", 76 | new BigDecimal("15.00"), 77 | OrderType.RESTAURANT, 78 | orderItems, 79 | LocalDateTime.parse("2022-12-05T08:55:27") 80 | ); 81 | 82 | var orderJSON = objectMapper.writeValueAsString(order); 83 | System.out.println("orderJSON :"+orderJSON); 84 | var expectedJSON = "{\"orderId\":12345,\"locationId\":\"store_1234\",\"finalAmount\":15.00,\"orderType\":\"RESTAURANT\",\"orderLineItems\":[{\"item\":\"Pizza\",\"count\":2,\"amount\":12.00},{\"item\":\"Coffee\",\"count\":1,\"amount\":3.00}],\"orderedDateTime\":\"2022-12-05T08:55:27\"}"; 85 | 86 | assertEquals(expectedJSON, orderJSON); 87 | } 88 | } 89 | -------------------------------------------------------------------------------- /explore-kafka-streams/orders-kafka-streams-app/src/test/java/com/learnkafkastreams/topology/OrdersTopologyTest.java: -------------------------------------------------------------------------------- 1 | package com.learnkafkastreams.topology; 2 | 3 | import com.learnkafkastreams.domain.Order; 4 | import com.learnkafkastreams.domain.OrderLineItem; 5 | import com.learnkafkastreams.domain.OrderType; 6 | import org.apache.kafka.streams.KeyValue; 7 | import org.apache.kafka.streams.TestInputTopic; 8 | import org.apache.kafka.streams.TopologyTestDriver; 9 | 10 | import java.math.BigDecimal; 11 | import java.time.LocalDateTime; 12 | import java.util.List; 13 | 14 | import static com.learnkafkastreams.topology.OrdersTopology.ORDERS; 15 | 16 | class OrdersTopologyTest { 17 | 18 | TopologyTestDriver topologyTestDriver = null; 19 | TestInputTopic ordersInputTopic = null; 20 | 21 | static String INPUT_TOPIC = ORDERS; 22 | 23 | 24 | 25 | static List> orders(){ 26 | 27 | var orderItems = List.of( 28 | new OrderLineItem("Bananas", 2, new BigDecimal("2.00")), 29 | new OrderLineItem("Iphone Charger", 1, new BigDecimal("25.00")) 30 | ); 31 | 32 | var orderItemsRestaurant = List.of( 33 | new OrderLineItem("Pizza", 2, new BigDecimal("12.00")), 34 | new OrderLineItem("Coffee", 1, new BigDecimal("3.00")) 35 | ); 36 | 37 | var order1 = new Order(12345, "store_1234", 38 | new BigDecimal("27.00"), 39 | OrderType.GENERAL, 40 | orderItems, 41 | LocalDateTime.now() 42 | //LocalDateTime.now(ZoneId.of("UTC")) 43 | ); 44 | 45 | var order2 = new Order(54321, "store_1234", 46 | new BigDecimal("15.00"), 47 | OrderType.RESTAURANT, 48 | orderItemsRestaurant, 49 | LocalDateTime.now() 50 | //LocalDateTime.now(ZoneId.of("UTC")) 51 | ); 52 | var keyValue1 = KeyValue.pair( order1.orderId().toString() 53 | , order1); 54 | 55 | var keyValue2 = KeyValue.pair( order2.orderId().toString() 56 | , order2); 57 | 58 | 59 | return List.of(keyValue1, keyValue2); 60 | 61 | } 62 | } -------------------------------------------------------------------------------- /explore-kafka-streams/orders-kafka-streams-app/src/test/resources/order-general.json: -------------------------------------------------------------------------------- 1 | { 2 | "orderId": 12345, 3 | "locationId": "store_1234", 4 | "finalAmount": 27.00, 5 | "orderType": "GENERAL", 6 | "orderLineItems": [ 7 | { 8 | "item": "Bananas", 9 | "count": 2, 10 | "amount": 2.00 11 | }, 12 | { 13 | "item": "Iphone Charger", 14 | "count": 1, 15 | "amount": 25.00 16 | } 17 | ], 18 | "orderedDateTime": "2022-12-05T08:55:27" 19 | } -------------------------------------------------------------------------------- /explore-kafka-streams/orders-kafka-streams-app/src/test/resources/order-restaurant.json: -------------------------------------------------------------------------------- 1 | { 2 | "orderId": 12345, 3 | "locationId": "store_1234", 4 | "finalAmount": 15.00, 5 | "orderType": "RESTAURANT", 6 | "orderLineItems": [ 7 | { 8 | "item": "Pizza", 9 | "count": 2, 10 | "amount": 12.00 11 | }, 12 | { 13 | "item": "Coffee", 14 | "count": 1, 15 | "amount": 3.00 16 | } 17 | ], 18 | "orderedDateTime": "2022-12-05T08:55:27" 19 | } 20 | -------------------------------------------------------------------------------- /explore-kafka-streams/settings.gradle: -------------------------------------------------------------------------------- 1 | rootProject.name = 'explore-kafka-streams' 2 | include 'orders-kafka-streams-app' 3 | include 'advanced-streams' 4 | include 'greeting-streams' 5 | --------------------------------------------------------------------------------