├── .gitignore
├── .mvn
└── wrapper
│ ├── MavenWrapperDownloader.java
│ ├── maven-wrapper.jar
│ └── maven-wrapper.properties
├── LICENSE
├── README.md
├── docker-compose.yml
├── mvnw
├── mvnw.cmd
├── pom.xml
├── springbootapp.iml
└── src
├── main
├── avro
│ ├── countsum.avsc
│ └── rating.avsc
├── java
│ ├── com
│ │ └── example
│ │ │ └── kafkastreams
│ │ │ └── restapi
│ │ │ └── springbootapp
│ │ │ ├── SpringbootappApplication.java
│ │ │ ├── config
│ │ │ └── KafkaStreamsConfig.java
│ │ │ ├── controller
│ │ │ └── v1
│ │ │ │ └── MovieController.java
│ │ │ └── dto
│ │ │ └── MovieAverageRatingResponse.java
│ └── io
│ │ └── confluent
│ │ └── demo
│ │ ├── CountAndSum.java
│ │ └── Rating.java
└── resources
│ └── application.properties
└── test
└── java
└── com
└── example
└── kafkastreams
└── restapi
└── springbootapp
└── SpringbootappApplicationTests.java
/.gitignore:
--------------------------------------------------------------------------------
1 | HELP.md
2 | target/
3 | !.mvn/wrapper/maven-wrapper.jar
4 | !**/src/main/**/target/
5 | !**/src/test/**/target/
6 | /data/
7 |
8 | ### STS ###
9 | .apt_generated
10 | .classpath
11 | .factorypath
12 | .project
13 | .settings
14 | .springBeans
15 | .sts4-cache
16 |
17 | ### IntelliJ IDEA ###
18 | .idea
19 | *.iws
20 | *.iml
21 | *.ipr
22 |
23 | ### NetBeans ###
24 | /nbproject/private/
25 | /nbbuild/
26 | /dist/
27 | /nbdist/
28 | /.nb-gradle/
29 | build/
30 | !**/src/main/**/build/
31 | !**/src/test/**/build/
32 |
33 | ### VS Code ###
34 | .vscode/
--------------------------------------------------------------------------------
/.mvn/wrapper/MavenWrapperDownloader.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2007-present the original author or authors.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * https://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | import java.net.*;
17 | import java.io.*;
18 | import java.nio.channels.*;
19 | import java.util.Properties;
20 |
21 | public class MavenWrapperDownloader {
22 |
23 | private static final String WRAPPER_VERSION = "0.5.6";
24 | /**
25 | * Default URL to download the maven-wrapper.jar from, if no 'downloadUrl' is provided.
26 | */
27 | private static final String DEFAULT_DOWNLOAD_URL = "https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/"
28 | + WRAPPER_VERSION + "/maven-wrapper-" + WRAPPER_VERSION + ".jar";
29 |
30 | /**
31 | * Path to the maven-wrapper.properties file, which might contain a downloadUrl property to
32 | * use instead of the default one.
33 | */
34 | private static final String MAVEN_WRAPPER_PROPERTIES_PATH =
35 | ".mvn/wrapper/maven-wrapper.properties";
36 |
37 | /**
38 | * Path where the maven-wrapper.jar will be saved to.
39 | */
40 | private static final String MAVEN_WRAPPER_JAR_PATH =
41 | ".mvn/wrapper/maven-wrapper.jar";
42 |
43 | /**
44 | * Name of the property which should be used to override the default download url for the wrapper.
45 | */
46 | private static final String PROPERTY_NAME_WRAPPER_URL = "wrapperUrl";
47 |
48 | public static void main(String args[]) {
49 | System.out.println("- Downloader started");
50 | File baseDirectory = new File(args[0]);
51 | System.out.println("- Using base directory: " + baseDirectory.getAbsolutePath());
52 |
53 | // If the maven-wrapper.properties exists, read it and check if it contains a custom
54 | // wrapperUrl parameter.
55 | File mavenWrapperPropertyFile = new File(baseDirectory, MAVEN_WRAPPER_PROPERTIES_PATH);
56 | String url = DEFAULT_DOWNLOAD_URL;
57 | if(mavenWrapperPropertyFile.exists()) {
58 | FileInputStream mavenWrapperPropertyFileInputStream = null;
59 | try {
60 | mavenWrapperPropertyFileInputStream = new FileInputStream(mavenWrapperPropertyFile);
61 | Properties mavenWrapperProperties = new Properties();
62 | mavenWrapperProperties.load(mavenWrapperPropertyFileInputStream);
63 | url = mavenWrapperProperties.getProperty(PROPERTY_NAME_WRAPPER_URL, url);
64 | } catch (IOException e) {
65 | System.out.println("- ERROR loading '" + MAVEN_WRAPPER_PROPERTIES_PATH + "'");
66 | } finally {
67 | try {
68 | if(mavenWrapperPropertyFileInputStream != null) {
69 | mavenWrapperPropertyFileInputStream.close();
70 | }
71 | } catch (IOException e) {
72 | // Ignore ...
73 | }
74 | }
75 | }
76 | System.out.println("- Downloading from: " + url);
77 |
78 | File outputFile = new File(baseDirectory.getAbsolutePath(), MAVEN_WRAPPER_JAR_PATH);
79 | if(!outputFile.getParentFile().exists()) {
80 | if(!outputFile.getParentFile().mkdirs()) {
81 | System.out.println(
82 | "- ERROR creating output directory '" + outputFile.getParentFile().getAbsolutePath() + "'");
83 | }
84 | }
85 | System.out.println("- Downloading to: " + outputFile.getAbsolutePath());
86 | try {
87 | downloadFileFromURL(url, outputFile);
88 | System.out.println("Done");
89 | System.exit(0);
90 | } catch (Throwable e) {
91 | System.out.println("- Error downloading");
92 | e.printStackTrace();
93 | System.exit(1);
94 | }
95 | }
96 |
97 | private static void downloadFileFromURL(String urlString, File destination) throws Exception {
98 | if (System.getenv("MVNW_USERNAME") != null && System.getenv("MVNW_PASSWORD") != null) {
99 | String username = System.getenv("MVNW_USERNAME");
100 | char[] password = System.getenv("MVNW_PASSWORD").toCharArray();
101 | Authenticator.setDefault(new Authenticator() {
102 | @Override
103 | protected PasswordAuthentication getPasswordAuthentication() {
104 | return new PasswordAuthentication(username, password);
105 | }
106 | });
107 | }
108 | URL website = new URL(urlString);
109 | ReadableByteChannel rbc;
110 | rbc = Channels.newChannel(website.openStream());
111 | FileOutputStream fos = new FileOutputStream(destination);
112 | fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE);
113 | fos.close();
114 | rbc.close();
115 | }
116 |
117 | }
118 |
--------------------------------------------------------------------------------
/.mvn/wrapper/maven-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ben-jamin-chen/springboot-kafka-streams-rest-api/28dff294767060e8bad3f2ac2fd75f80a0008eac/.mvn/wrapper/maven-wrapper.jar
--------------------------------------------------------------------------------
/.mvn/wrapper/maven-wrapper.properties:
--------------------------------------------------------------------------------
1 | distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.6.3/apache-maven-3.6.3-bin.zip
2 | wrapperUrl=https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar
3 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2020 Ben Chen
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Spring Boot (2.3.3) RESTful API with Kafka Streams (2.6.0)
2 |
3 | While looking through the Kafka Tutorials to see how I could setup a Spring Boot API project with Kafka Streams, I found it strange that there wasn't a complete or more informative example on how this could be achieved. Most use cases demonstrated how to compute aggregations and how to build simple topologies, but it was difficult to find a concrete example on how to build an API service that could query into these materialized name stores. Anyways, I thought I’d create my own using a more recent version of Spring Boot with Java 14.
4 |
5 | ## What You Need
6 |
7 | * Java 14
8 | * Maven 3.6.0+
9 | * Docker 19+
10 |
11 | ## Getting Started
12 | We need to first launch the Confluent services (i.e. Schema Registry, Broker, ZooKeeper) locally by running the `docker-compose up -d` CLI command where the [docker-compose.yml](https://github.com/bchen04/springboot-kafka-streams-rest-api/blob/master/docker-compose.yml) file is. Typically, you can create a stack file (in the form of a YAML file) to define your applications. You can also run `docker-compose ps` to check the status of the stack. Notice, the endpoints from within the containers on your host machine.
13 |
14 | | Name | From within containers | From host machine |
15 | | ------------- | ------------- | ------------- |
16 | | Kafka Broker | broker:9092 | localhost:9092 |
17 | | Schema Registry | http://schema-registry:8081 | http://localhost:8081 |
18 | | ZooKeeper | zookeeper:2181 | localhost:2181 |
19 |
20 | > Note: you can run `docker-compose down` to stop all services and containers.
21 |
22 | As part of this sample, I've retrofitted the average aggregate example from [Confluent's Kafka Tutorials](https://kafka-tutorials.confluent.io/aggregating-average/kstreams.html) into this project. The API will calculate and return a running average rating for a given movie identifier. This should demonstrate how to build a basic API service on top of an aggregation result.
23 |
24 | Notice in the `~/src/main/avro` directory, we have all our Avro schema files for the stream of `ratings` and `countsum`. For your convenience, the classes were already generated under the `~/src/main/java/io/confluent/demo` directory, but feel free to tinker with them and recompile the schemas if needed. The Avro classes can be programmatically generated using `Maven` or by manually invoking the [schema compiler](https://avro.apache.org/docs/1.10.0/gettingstartedjava.html#Compiling+the+schema).
25 |
26 | So before building and running the project, open a new terminal and run the following commands to generate your input and output topics.
27 |
28 | ```zsh
29 | $ docker-compose exec broker kafka-topics --create --bootstrap-server \
30 | localhost:9092 --replication-factor 1 --partitions 1 --topic ratings
31 |
32 | $ docker-compose exec broker kafka-topics --create --bootstrap-server \
33 | localhost:9092 --replication-factor 1 --partitions 1 --topic rating-averages
34 | ```
35 |
36 | Next, we will need to produce some data onto the input topic.
37 |
38 | ```zsh
39 | $ docker exec -i schema-registry /usr/bin/kafka-avro-console-producer --topic ratings --broker-list broker:9092\
40 | --property "parse.key=false"\
41 | --property "key.separator=:"\
42 | --property value.schema="$(< src/main/avro/rating.avsc)"
43 | ```
44 |
45 | Paste in the following `json` data when prompted and be sure to press enter twice to actually submit it.
46 |
47 | ```json
48 | {"movie_id":362,"rating":10}
49 | {"movie_id":362,"rating":8}
50 | ```
51 |
52 | Optionally, you can also see the consumer results on the output topic by running this command on a new terminal window:
53 |
54 | ```zsh
55 | $ docker exec -it broker /usr/bin/kafka-console-consumer --topic rating-averages --bootstrap-server broker:9092 \
56 | --property "print.key=true"\
57 | --property "key.deserializer=org.apache.kafka.common.serialization.LongDeserializer" \
58 | --property "value.deserializer=org.apache.kafka.common.serialization.DoubleDeserializer" \
59 | --from-beginning
60 | ```
61 |
62 | ## Build and Run the Sample
63 |
64 | You can import the code straight into your preferred IDE or run the sample using the following command (in the root project folder).
65 |
66 | ```zsh
67 | $ mvn spring-boot:run
68 | ```
69 | After the application runs, navigate to [http://localhost:7001/swagger-ui/index.html?configUrl=/api-docs/swagger-config](http://localhost:7001/swagger-ui/index.html?configUrl=/api-docs/swagger-config) in your web browser to access the Swagger UI. If you used the same sample data from above, you can enter `362` as the `movieId` and it should return something similar like this below:
70 |
71 | ```json
72 | {
73 | "movieId": 362,
74 | "rating": 9
75 | }
76 | ```
77 |
78 | > Note: keep in mind the various [states](https://kafka.apache.org/25/javadoc/org/apache/kafka/streams/KafkaStreams.State.html) of the client. When a Kafka Streams instance is in `RUNNING` state, it allows for inspection of the stream's metadata using methods like `queryMetadataForKey()`. While it is in `REBALANCING` state, the REST service cannot immediately answer requests until the state stores are fully rebuilt.
79 |
80 | ## Troubleshooting
81 |
82 | * In certain conditions, you may need to do a complete application reset. You can delete the application’s local state directory where the application instance was run. In this project, Kafka Streams persists local states under the `~/data` folder.
83 |
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | ---
2 | version: '3.5'
3 |
4 | services:
5 | zookeeper:
6 | image: confluentinc/cp-zookeeper:5.5.0
7 | hostname: zookeeper
8 | container_name: zookeeper
9 | ports:
10 | - "2181:2181"
11 | environment:
12 | ZOOKEEPER_CLIENT_PORT: 2181
13 | ZOOKEEPER_TICK_TIME: 2000
14 | networks:
15 | - cp
16 |
17 | broker:
18 | image: confluentinc/cp-enterprise-kafka:5.5.0
19 | hostname: broker
20 | container_name: broker
21 | depends_on:
22 | - zookeeper
23 | ports:
24 | - "29092:29092"
25 | environment:
26 | KAFKA_BROKER_ID: 1
27 | KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181'
28 | KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
29 | KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://broker:9092,PLAINTEXT_HOST://localhost:29092
30 | KAFKA_METRIC_REPORTERS: io.confluent.metrics.reporter.ConfluentMetricsReporter
31 | KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
32 | KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
33 | KAFKA_TOOLS_LOG4J_LOGLEVEL: ERROR
34 | CONFLUENT_METRICS_REPORTER_BOOTSTRAP_SERVERS: broker:9092
35 | CONFLUENT_METRICS_REPORTER_ZOOKEEPER_CONNECT: zookeeper:2181
36 | CONFLUENT_METRICS_REPORTER_TOPIC_REPLICAS: 1
37 | CONFLUENT_METRICS_ENABLE: 'true'
38 | CONFLUENT_SUPPORT_CUSTOMER_ID: 'anonymous'
39 | networks:
40 | - cp
41 |
42 | schema-registry:
43 | image: confluentinc/cp-schema-registry:5.5.0
44 | hostname: schema-registry
45 | container_name: schema-registry
46 | depends_on:
47 | - zookeeper
48 | - broker
49 | ports:
50 | - "8081:8081"
51 | environment:
52 | SCHEMA_REGISTRY_HOST_NAME: schema-registry
53 | SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
54 | SCHEMA_REGISTRY_LOG4J_ROOT_LOGLEVEL: WARN
55 | networks:
56 | - cp
57 |
58 | networks:
59 | cp:
60 | name: cp_network
61 |
--------------------------------------------------------------------------------
/mvnw:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | # ----------------------------------------------------------------------------
3 | # Licensed to the Apache Software Foundation (ASF) under one
4 | # or more contributor license agreements. See the NOTICE file
5 | # distributed with this work for additional information
6 | # regarding copyright ownership. The ASF licenses this file
7 | # to you under the Apache License, Version 2.0 (the
8 | # "License"); you may not use this file except in compliance
9 | # with the License. You may obtain a copy of the License at
10 | #
11 | # https://www.apache.org/licenses/LICENSE-2.0
12 | #
13 | # Unless required by applicable law or agreed to in writing,
14 | # software distributed under the License is distributed on an
15 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
16 | # KIND, either express or implied. See the License for the
17 | # specific language governing permissions and limitations
18 | # under the License.
19 | # ----------------------------------------------------------------------------
20 |
21 | # ----------------------------------------------------------------------------
22 | # Maven Start Up Batch script
23 | #
24 | # Required ENV vars:
25 | # ------------------
26 | # JAVA_HOME - location of a JDK home dir
27 | #
28 | # Optional ENV vars
29 | # -----------------
30 | # M2_HOME - location of maven2's installed home dir
31 | # MAVEN_OPTS - parameters passed to the Java VM when running Maven
32 | # e.g. to debug Maven itself, use
33 | # set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
34 | # MAVEN_SKIP_RC - flag to disable loading of mavenrc files
35 | # ----------------------------------------------------------------------------
36 |
37 | if [ -z "$MAVEN_SKIP_RC" ] ; then
38 |
39 | if [ -f /etc/mavenrc ] ; then
40 | . /etc/mavenrc
41 | fi
42 |
43 | if [ -f "$HOME/.mavenrc" ] ; then
44 | . "$HOME/.mavenrc"
45 | fi
46 |
47 | fi
48 |
49 | # OS specific support. $var _must_ be set to either true or false.
50 | cygwin=false;
51 | darwin=false;
52 | mingw=false
53 | case "`uname`" in
54 | CYGWIN*) cygwin=true ;;
55 | MINGW*) mingw=true;;
56 | Darwin*) darwin=true
57 | # Use /usr/libexec/java_home if available, otherwise fall back to /Library/Java/Home
58 | # See https://developer.apple.com/library/mac/qa/qa1170/_index.html
59 | if [ -z "$JAVA_HOME" ]; then
60 | if [ -x "/usr/libexec/java_home" ]; then
61 | export JAVA_HOME="`/usr/libexec/java_home`"
62 | else
63 | export JAVA_HOME="/Library/Java/Home"
64 | fi
65 | fi
66 | ;;
67 | esac
68 |
69 | if [ -z "$JAVA_HOME" ] ; then
70 | if [ -r /etc/gentoo-release ] ; then
71 | JAVA_HOME=`java-config --jre-home`
72 | fi
73 | fi
74 |
75 | if [ -z "$M2_HOME" ] ; then
76 | ## resolve links - $0 may be a link to maven's home
77 | PRG="$0"
78 |
79 | # need this for relative symlinks
80 | while [ -h "$PRG" ] ; do
81 | ls=`ls -ld "$PRG"`
82 | link=`expr "$ls" : '.*-> \(.*\)$'`
83 | if expr "$link" : '/.*' > /dev/null; then
84 | PRG="$link"
85 | else
86 | PRG="`dirname "$PRG"`/$link"
87 | fi
88 | done
89 |
90 | saveddir=`pwd`
91 |
92 | M2_HOME=`dirname "$PRG"`/..
93 |
94 | # make it fully qualified
95 | M2_HOME=`cd "$M2_HOME" && pwd`
96 |
97 | cd "$saveddir"
98 | # echo Using m2 at $M2_HOME
99 | fi
100 |
101 | # For Cygwin, ensure paths are in UNIX format before anything is touched
102 | if $cygwin ; then
103 | [ -n "$M2_HOME" ] &&
104 | M2_HOME=`cygpath --unix "$M2_HOME"`
105 | [ -n "$JAVA_HOME" ] &&
106 | JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
107 | [ -n "$CLASSPATH" ] &&
108 | CLASSPATH=`cygpath --path --unix "$CLASSPATH"`
109 | fi
110 |
111 | # For Mingw, ensure paths are in UNIX format before anything is touched
112 | if $mingw ; then
113 | [ -n "$M2_HOME" ] &&
114 | M2_HOME="`(cd "$M2_HOME"; pwd)`"
115 | [ -n "$JAVA_HOME" ] &&
116 | JAVA_HOME="`(cd "$JAVA_HOME"; pwd)`"
117 | fi
118 |
119 | if [ -z "$JAVA_HOME" ]; then
120 | javaExecutable="`which javac`"
121 | if [ -n "$javaExecutable" ] && ! [ "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then
122 | # readlink(1) is not available as standard on Solaris 10.
123 | readLink=`which readlink`
124 | if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then
125 | if $darwin ; then
126 | javaHome="`dirname \"$javaExecutable\"`"
127 | javaExecutable="`cd \"$javaHome\" && pwd -P`/javac"
128 | else
129 | javaExecutable="`readlink -f \"$javaExecutable\"`"
130 | fi
131 | javaHome="`dirname \"$javaExecutable\"`"
132 | javaHome=`expr "$javaHome" : '\(.*\)/bin'`
133 | JAVA_HOME="$javaHome"
134 | export JAVA_HOME
135 | fi
136 | fi
137 | fi
138 |
139 | if [ -z "$JAVACMD" ] ; then
140 | if [ -n "$JAVA_HOME" ] ; then
141 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
142 | # IBM's JDK on AIX uses strange locations for the executables
143 | JAVACMD="$JAVA_HOME/jre/sh/java"
144 | else
145 | JAVACMD="$JAVA_HOME/bin/java"
146 | fi
147 | else
148 | JAVACMD="`which java`"
149 | fi
150 | fi
151 |
152 | if [ ! -x "$JAVACMD" ] ; then
153 | echo "Error: JAVA_HOME is not defined correctly." >&2
154 | echo " We cannot execute $JAVACMD" >&2
155 | exit 1
156 | fi
157 |
158 | if [ -z "$JAVA_HOME" ] ; then
159 | echo "Warning: JAVA_HOME environment variable is not set."
160 | fi
161 |
162 | CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher
163 |
164 | # traverses directory structure from process work directory to filesystem root
165 | # first directory with .mvn subdirectory is considered project base directory
166 | find_maven_basedir() {
167 |
168 | if [ -z "$1" ]
169 | then
170 | echo "Path not specified to find_maven_basedir"
171 | return 1
172 | fi
173 |
174 | basedir="$1"
175 | wdir="$1"
176 | while [ "$wdir" != '/' ] ; do
177 | if [ -d "$wdir"/.mvn ] ; then
178 | basedir=$wdir
179 | break
180 | fi
181 | # workaround for JBEAP-8937 (on Solaris 10/Sparc)
182 | if [ -d "${wdir}" ]; then
183 | wdir=`cd "$wdir/.."; pwd`
184 | fi
185 | # end of workaround
186 | done
187 | echo "${basedir}"
188 | }
189 |
190 | # concatenates all lines of a file
191 | concat_lines() {
192 | if [ -f "$1" ]; then
193 | echo "$(tr -s '\n' ' ' < "$1")"
194 | fi
195 | }
196 |
197 | BASE_DIR=`find_maven_basedir "$(pwd)"`
198 | if [ -z "$BASE_DIR" ]; then
199 | exit 1;
200 | fi
201 |
202 | ##########################################################################################
203 | # Extension to allow automatically downloading the maven-wrapper.jar from Maven-central
204 | # This allows using the maven wrapper in projects that prohibit checking in binary data.
205 | ##########################################################################################
206 | if [ -r "$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" ]; then
207 | if [ "$MVNW_VERBOSE" = true ]; then
208 | echo "Found .mvn/wrapper/maven-wrapper.jar"
209 | fi
210 | else
211 | if [ "$MVNW_VERBOSE" = true ]; then
212 | echo "Couldn't find .mvn/wrapper/maven-wrapper.jar, downloading it ..."
213 | fi
214 | if [ -n "$MVNW_REPOURL" ]; then
215 | jarUrl="$MVNW_REPOURL/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar"
216 | else
217 | jarUrl="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar"
218 | fi
219 | while IFS="=" read key value; do
220 | case "$key" in (wrapperUrl) jarUrl="$value"; break ;;
221 | esac
222 | done < "$BASE_DIR/.mvn/wrapper/maven-wrapper.properties"
223 | if [ "$MVNW_VERBOSE" = true ]; then
224 | echo "Downloading from: $jarUrl"
225 | fi
226 | wrapperJarPath="$BASE_DIR/.mvn/wrapper/maven-wrapper.jar"
227 | if $cygwin; then
228 | wrapperJarPath=`cygpath --path --windows "$wrapperJarPath"`
229 | fi
230 |
231 | if command -v wget > /dev/null; then
232 | if [ "$MVNW_VERBOSE" = true ]; then
233 | echo "Found wget ... using wget"
234 | fi
235 | if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then
236 | wget "$jarUrl" -O "$wrapperJarPath"
237 | else
238 | wget --http-user=$MVNW_USERNAME --http-password=$MVNW_PASSWORD "$jarUrl" -O "$wrapperJarPath"
239 | fi
240 | elif command -v curl > /dev/null; then
241 | if [ "$MVNW_VERBOSE" = true ]; then
242 | echo "Found curl ... using curl"
243 | fi
244 | if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then
245 | curl -o "$wrapperJarPath" "$jarUrl" -f
246 | else
247 | curl --user $MVNW_USERNAME:$MVNW_PASSWORD -o "$wrapperJarPath" "$jarUrl" -f
248 | fi
249 |
250 | else
251 | if [ "$MVNW_VERBOSE" = true ]; then
252 | echo "Falling back to using Java to download"
253 | fi
254 | javaClass="$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.java"
255 | # For Cygwin, switch paths to Windows format before running javac
256 | if $cygwin; then
257 | javaClass=`cygpath --path --windows "$javaClass"`
258 | fi
259 | if [ -e "$javaClass" ]; then
260 | if [ ! -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then
261 | if [ "$MVNW_VERBOSE" = true ]; then
262 | echo " - Compiling MavenWrapperDownloader.java ..."
263 | fi
264 | # Compiling the Java class
265 | ("$JAVA_HOME/bin/javac" "$javaClass")
266 | fi
267 | if [ -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then
268 | # Running the downloader
269 | if [ "$MVNW_VERBOSE" = true ]; then
270 | echo " - Running MavenWrapperDownloader.java ..."
271 | fi
272 | ("$JAVA_HOME/bin/java" -cp .mvn/wrapper MavenWrapperDownloader "$MAVEN_PROJECTBASEDIR")
273 | fi
274 | fi
275 | fi
276 | fi
277 | ##########################################################################################
278 | # End of extension
279 | ##########################################################################################
280 |
281 | export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-"$BASE_DIR"}
282 | if [ "$MVNW_VERBOSE" = true ]; then
283 | echo $MAVEN_PROJECTBASEDIR
284 | fi
285 | MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS"
286 |
287 | # For Cygwin, switch paths to Windows format before running java
288 | if $cygwin; then
289 | [ -n "$M2_HOME" ] &&
290 | M2_HOME=`cygpath --path --windows "$M2_HOME"`
291 | [ -n "$JAVA_HOME" ] &&
292 | JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"`
293 | [ -n "$CLASSPATH" ] &&
294 | CLASSPATH=`cygpath --path --windows "$CLASSPATH"`
295 | [ -n "$MAVEN_PROJECTBASEDIR" ] &&
296 | MAVEN_PROJECTBASEDIR=`cygpath --path --windows "$MAVEN_PROJECTBASEDIR"`
297 | fi
298 |
299 | # Provide a "standardized" way to retrieve the CLI args that will
300 | # work with both Windows and non-Windows executions.
301 | MAVEN_CMD_LINE_ARGS="$MAVEN_CONFIG $@"
302 | export MAVEN_CMD_LINE_ARGS
303 |
304 | WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
305 |
306 | exec "$JAVACMD" \
307 | $MAVEN_OPTS \
308 | -classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \
309 | "-Dmaven.home=${M2_HOME}" "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \
310 | ${WRAPPER_LAUNCHER} $MAVEN_CONFIG "$@"
311 |
--------------------------------------------------------------------------------
/mvnw.cmd:
--------------------------------------------------------------------------------
1 | @REM ----------------------------------------------------------------------------
2 | @REM Licensed to the Apache Software Foundation (ASF) under one
3 | @REM or more contributor license agreements. See the NOTICE file
4 | @REM distributed with this work for additional information
5 | @REM regarding copyright ownership. The ASF licenses this file
6 | @REM to you under the Apache License, Version 2.0 (the
7 | @REM "License"); you may not use this file except in compliance
8 | @REM with the License. You may obtain a copy of the License at
9 | @REM
10 | @REM https://www.apache.org/licenses/LICENSE-2.0
11 | @REM
12 | @REM Unless required by applicable law or agreed to in writing,
13 | @REM software distributed under the License is distributed on an
14 | @REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15 | @REM KIND, either express or implied. See the License for the
16 | @REM specific language governing permissions and limitations
17 | @REM under the License.
18 | @REM ----------------------------------------------------------------------------
19 |
20 | @REM ----------------------------------------------------------------------------
21 | @REM Maven Start Up Batch script
22 | @REM
23 | @REM Required ENV vars:
24 | @REM JAVA_HOME - location of a JDK home dir
25 | @REM
26 | @REM Optional ENV vars
27 | @REM M2_HOME - location of maven2's installed home dir
28 | @REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands
29 | @REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a keystroke before ending
30 | @REM MAVEN_OPTS - parameters passed to the Java VM when running Maven
31 | @REM e.g. to debug Maven itself, use
32 | @REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
33 | @REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files
34 | @REM ----------------------------------------------------------------------------
35 |
36 | @REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on'
37 | @echo off
38 | @REM set title of command window
39 | title %0
40 | @REM enable echoing by setting MAVEN_BATCH_ECHO to 'on'
41 | @if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO%
42 |
43 | @REM set %HOME% to equivalent of $HOME
44 | if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%")
45 |
46 | @REM Execute a user defined script before this one
47 | if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre
48 | @REM check for pre script, once with legacy .bat ending and once with .cmd ending
49 | if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat"
50 | if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd"
51 | :skipRcPre
52 |
53 | @setlocal
54 |
55 | set ERROR_CODE=0
56 |
57 | @REM To isolate internal variables from possible post scripts, we use another setlocal
58 | @setlocal
59 |
60 | @REM ==== START VALIDATION ====
61 | if not "%JAVA_HOME%" == "" goto OkJHome
62 |
63 | echo.
64 | echo Error: JAVA_HOME not found in your environment. >&2
65 | echo Please set the JAVA_HOME variable in your environment to match the >&2
66 | echo location of your Java installation. >&2
67 | echo.
68 | goto error
69 |
70 | :OkJHome
71 | if exist "%JAVA_HOME%\bin\java.exe" goto init
72 |
73 | echo.
74 | echo Error: JAVA_HOME is set to an invalid directory. >&2
75 | echo JAVA_HOME = "%JAVA_HOME%" >&2
76 | echo Please set the JAVA_HOME variable in your environment to match the >&2
77 | echo location of your Java installation. >&2
78 | echo.
79 | goto error
80 |
81 | @REM ==== END VALIDATION ====
82 |
83 | :init
84 |
85 | @REM Find the project base dir, i.e. the directory that contains the folder ".mvn".
86 | @REM Fallback to current working directory if not found.
87 |
88 | set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR%
89 | IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir
90 |
91 | set EXEC_DIR=%CD%
92 | set WDIR=%EXEC_DIR%
93 | :findBaseDir
94 | IF EXIST "%WDIR%"\.mvn goto baseDirFound
95 | cd ..
96 | IF "%WDIR%"=="%CD%" goto baseDirNotFound
97 | set WDIR=%CD%
98 | goto findBaseDir
99 |
100 | :baseDirFound
101 | set MAVEN_PROJECTBASEDIR=%WDIR%
102 | cd "%EXEC_DIR%"
103 | goto endDetectBaseDir
104 |
105 | :baseDirNotFound
106 | set MAVEN_PROJECTBASEDIR=%EXEC_DIR%
107 | cd "%EXEC_DIR%"
108 |
109 | :endDetectBaseDir
110 |
111 | IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig
112 |
113 | @setlocal EnableExtensions EnableDelayedExpansion
114 | for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a
115 | @endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS%
116 |
117 | :endReadAdditionalConfig
118 |
119 | SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe"
120 | set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar"
121 | set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
122 |
123 | set DOWNLOAD_URL="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar"
124 |
125 | FOR /F "tokens=1,2 delims==" %%A IN ("%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties") DO (
126 | IF "%%A"=="wrapperUrl" SET DOWNLOAD_URL=%%B
127 | )
128 |
129 | @REM Extension to allow automatically downloading the maven-wrapper.jar from Maven-central
130 | @REM This allows using the maven wrapper in projects that prohibit checking in binary data.
131 | if exist %WRAPPER_JAR% (
132 | if "%MVNW_VERBOSE%" == "true" (
133 | echo Found %WRAPPER_JAR%
134 | )
135 | ) else (
136 | if not "%MVNW_REPOURL%" == "" (
137 | SET DOWNLOAD_URL="%MVNW_REPOURL%/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar"
138 | )
139 | if "%MVNW_VERBOSE%" == "true" (
140 | echo Couldn't find %WRAPPER_JAR%, downloading it ...
141 | echo Downloading from: %DOWNLOAD_URL%
142 | )
143 |
144 | powershell -Command "&{"^
145 | "$webclient = new-object System.Net.WebClient;"^
146 | "if (-not ([string]::IsNullOrEmpty('%MVNW_USERNAME%') -and [string]::IsNullOrEmpty('%MVNW_PASSWORD%'))) {"^
147 | "$webclient.Credentials = new-object System.Net.NetworkCredential('%MVNW_USERNAME%', '%MVNW_PASSWORD%');"^
148 | "}"^
149 | "[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12; $webclient.DownloadFile('%DOWNLOAD_URL%', '%WRAPPER_JAR%')"^
150 | "}"
151 | if "%MVNW_VERBOSE%" == "true" (
152 | echo Finished downloading %WRAPPER_JAR%
153 | )
154 | )
155 | @REM End of extension
156 |
157 | @REM Provide a "standardized" way to retrieve the CLI args that will
158 | @REM work with both Windows and non-Windows executions.
159 | set MAVEN_CMD_LINE_ARGS=%*
160 |
161 | %MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %*
162 | if ERRORLEVEL 1 goto error
163 | goto end
164 |
165 | :error
166 | set ERROR_CODE=1
167 |
168 | :end
169 | @endlocal & set ERROR_CODE=%ERROR_CODE%
170 |
171 | if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost
172 | @REM check for post script, once with legacy .bat ending and once with .cmd ending
173 | if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat"
174 | if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd"
175 | :skipRcPost
176 |
177 | @REM pause the script if MAVEN_BATCH_PAUSE is set to 'on'
178 | if "%MAVEN_BATCH_PAUSE%" == "on" pause
179 |
180 | if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE%
181 |
182 | exit /B %ERROR_CODE%
183 |
--------------------------------------------------------------------------------
/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 | 4.0.0
5 |
6 | org.springframework.boot
7 | spring-boot-starter-parent
8 | 2.3.3.RELEASE
9 |
10 |
11 | com.example.kafkastreams.restapi
12 | springbootapp
13 | 0.0.1-SNAPSHOT
14 | springbootapp
15 | A demo project using Spring Boot with Kafka Stream
16 |
17 |
18 | confluent
19 | confluent
20 | https://packages.confluent.io/maven/
21 |
22 |
23 |
24 | 14
25 | 1.10.0
26 | 2.6.0
27 | 1.4.3
28 |
29 |
30 |
31 | org.springframework.boot
32 | spring-boot-starter-web
33 |
34 |
35 | org.apache.kafka
36 | kafka-streams
37 | ${kafka.version}
38 |
39 |
40 | org.apache.kafka
41 | kafka-clients
42 | ${kafka.version}
43 |
44 |
45 | io.confluent
46 | kafka-streams-avro-serde
47 | 5.5.1
48 |
49 |
50 | org.springframework.boot
51 | spring-boot-starter-test
52 | test
53 |
54 |
55 | org.junit.vintage
56 | junit-vintage-engine
57 |
58 |
59 |
60 |
61 |
62 | org.springdoc
63 | springdoc-openapi-ui
64 | ${openapi.version}
65 |
66 |
67 |
68 | org.springframework.kafka
69 | spring-kafka
70 | 2.5.4.RELEASE
71 |
72 |
73 |
74 | org.apache.avro
75 | avro
76 | ${avro.version}
77 |
78 |
79 |
80 | org.apache.avro
81 | avro-maven-plugin
82 | ${avro.version}
83 |
84 |
85 | org.apache.avro
86 | avro-compiler
87 | ${avro.version}
88 |
89 |
90 | org.apache.avro
91 | avro-ipc
92 | ${avro.version}
93 |
94 |
95 |
96 |
97 |
98 | org.apache.avro
99 | avro-maven-plugin
100 | ${avro.version}
101 |
102 |
103 | schemas
104 | generate-sources
105 |
106 | schema
107 | protocol
108 | idl-protocol
109 |
110 |
111 | ${project.basedir}/src/main/avro/
112 | ${project.basedir}/src/main/java/
113 |
114 |
115 |
116 |
117 |
118 |
119 |
--------------------------------------------------------------------------------
/springbootapp.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 |
142 |
143 |
144 |
145 |
146 |
147 |
148 |
149 |
150 |
151 |
152 |
153 |
154 |
155 |
156 |
157 |
158 |
159 |
160 |
161 |
162 |
163 |
164 |
165 |
166 |
167 |
168 |
169 |
170 |
171 |
172 |
--------------------------------------------------------------------------------
/src/main/avro/countsum.avsc:
--------------------------------------------------------------------------------
1 | {
2 | "namespace": "io.confluent.demo",
3 | "type": "record",
4 | "name": "CountAndSum",
5 | "fields": [
6 | {
7 | "name": "count",
8 | "type": "long"
9 | },
10 | {
11 | "name": "sum",
12 | "type": "double"
13 | }
14 | ]
15 | }
--------------------------------------------------------------------------------
/src/main/avro/rating.avsc:
--------------------------------------------------------------------------------
1 | {
2 | "namespace": "io.confluent.demo",
3 | "type": "record",
4 | "name": "Rating",
5 | "fields": [
6 | {
7 | "name": "movie_id",
8 | "type": "long"
9 | },
10 | {
11 | "name": "rating",
12 | "type": "double"
13 | }
14 | ]
15 | }
--------------------------------------------------------------------------------
/src/main/java/com/example/kafkastreams/restapi/springbootapp/SpringbootappApplication.java:
--------------------------------------------------------------------------------
1 | package com.example.kafkastreams.restapi.springbootapp;
2 |
3 | import org.springframework.boot.SpringApplication;
4 | import org.springframework.boot.autoconfigure.SpringBootApplication;
5 |
6 | @SpringBootApplication
7 | public class SpringbootappApplication {
8 |
9 | public static void main(String[] args) {
10 | SpringApplication.run(SpringbootappApplication.class, args);
11 | }
12 |
13 | }
14 |
--------------------------------------------------------------------------------
/src/main/java/com/example/kafkastreams/restapi/springbootapp/config/KafkaStreamsConfig.java:
--------------------------------------------------------------------------------
1 | package com.example.kafkastreams.restapi.springbootapp.config;
2 |
3 | import com.fasterxml.jackson.databind.JsonNode;
4 | import io.confluent.demo.CountAndSum;
5 | import io.confluent.demo.Rating;
6 | import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig;
7 | import io.confluent.kafka.streams.serdes.avro.SpecificAvroSerde;
8 | import org.apache.kafka.clients.consumer.ConsumerConfig;
9 | import org.apache.kafka.common.serialization.Serdes;
10 | import org.apache.kafka.common.utils.Bytes;
11 | import org.apache.kafka.streams.*;
12 | import org.apache.kafka.streams.errors.LogAndContinueExceptionHandler;
13 | import org.apache.kafka.streams.kstream.*;
14 | import org.apache.kafka.streams.state.KeyValueStore;
15 | import org.springframework.beans.factory.annotation.Value;
16 | import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
17 | import org.springframework.context.annotation.Bean;
18 | import org.springframework.context.annotation.Configuration;
19 | import org.springframework.context.annotation.Primary;
20 | import org.springframework.kafka.support.serializer.JsonDeserializer;
21 |
22 | import java.util.HashMap;
23 | import java.util.Map;
24 | import java.util.Properties;
25 |
26 | import static java.util.Optional.ofNullable;
27 | import static org.apache.kafka.common.serialization.Serdes.Double;
28 | import static org.apache.kafka.common.serialization.Serdes.Long;
29 | import static org.apache.kafka.streams.StreamsConfig.DEFAULT_DESERIALIZATION_EXCEPTION_HANDLER_CLASS_CONFIG;
30 | import static org.apache.kafka.streams.kstream.Grouped.with;
31 |
32 | @Configuration
33 | public class KafkaStreamsConfig {
34 | @Value("${schema.registry.url}")
35 | private String schemaRegistryUrl;
36 |
37 | @Value("${app.server.config}")
38 | private String appServerConfig;
39 |
40 | @Value("${application.name}")
41 | private String appName;
42 |
43 | @Value("${rating.topic.name}")
44 | private String ratingTopicName;
45 |
46 | @Value("${average.rating.topic.name}")
47 | private String avgRatingsTopicName;
48 |
49 | @Value("${state.store.name}")
50 | private String stateStoreName;
51 |
52 | @Bean
53 | @Primary
54 | public KafkaStreams kafkaStreams(KafkaProperties kafkaProperties) {
55 | final Properties props = new Properties();
56 | props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaProperties.getBootstrapServers());
57 | props.put(StreamsConfig.APPLICATION_ID_CONFIG, appName);
58 | props.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, schemaRegistryUrl);
59 | props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Long().getClass());
60 | props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Double().getClass());
61 | props.put(StreamsConfig.STATE_DIR_CONFIG, "data");
62 | props.put(StreamsConfig.APPLICATION_SERVER_CONFIG, appServerConfig);
63 | props.put(JsonDeserializer.VALUE_DEFAULT_TYPE, JsonNode.class);
64 | props.put(DEFAULT_DESERIALIZATION_EXCEPTION_HANDLER_CLASS_CONFIG, LogAndContinueExceptionHandler.class);
65 | props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
66 |
67 | Topology topology = this.buildTopology(new StreamsBuilder());
68 |
69 | final KafkaStreams kafkaStreams = new KafkaStreams(topology, props);
70 | kafkaStreams.start();
71 |
72 | return kafkaStreams;
73 | }
74 |
75 | private Topology buildTopology(StreamsBuilder bldr) {
76 | KStream ratingStream = bldr.stream(ratingTopicName,
77 | Consumed.with(Serdes.Long(), getRatingSerde(schemaRegistryUrl)));
78 |
79 | SpecificAvroSerde countAndSumSerde = getCountAndSumSerde(schemaRegistryUrl);
80 |
81 | // Grouping Ratings
82 | KGroupedStream ratingsById = ratingStream
83 | .map((key, rating) -> new KeyValue<>(rating.getMovieId(), rating.getRating()))
84 | .groupByKey(with(Long(), Double()));
85 |
86 | final KTable ratingCountAndSum =
87 | ratingsById.aggregate(() -> new CountAndSum(0L, 0.0),
88 | (key, value, aggregate) -> {
89 | aggregate.setCount(aggregate.getCount() + 1);
90 | aggregate.setSum(aggregate.getSum() + value);
91 | return aggregate;
92 | },
93 | Materialized.with(Long(), countAndSumSerde));
94 |
95 | final KTable ratingAverage =
96 | ratingCountAndSum.mapValues(value -> value.getSum() / value.getCount(),
97 | Materialized.>as(stateStoreName)
98 | .withKeySerde(Long())
99 | .withValueSerde(Double()));
100 |
101 | // persist the result in topic
102 | ratingAverage
103 | .toStream()
104 | .to(avgRatingsTopicName, Produced.with(Long(), Double()));
105 |
106 | // finish the topology
107 | return bldr.build();
108 | }
109 |
110 | private static SpecificAvroSerde getCountAndSumSerde(String schemaRegistryUrl) {
111 | SpecificAvroSerde serde = new SpecificAvroSerde<>();
112 | serde.configure(getSerdeConfig(schemaRegistryUrl), false);
113 | return serde;
114 | }
115 |
116 | private static SpecificAvroSerde getRatingSerde(String schemaRegistryUrl) {
117 | SpecificAvroSerde serde = new SpecificAvroSerde<>();
118 | serde.configure(getSerdeConfig(schemaRegistryUrl), false);
119 | return serde;
120 | }
121 |
122 | private static Map getSerdeConfig(String schemaRegistryUrl) {
123 | final HashMap map = new HashMap<>();
124 | map.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, ofNullable(schemaRegistryUrl).orElse(""));
125 | return map;
126 | }
127 | }
128 |
--------------------------------------------------------------------------------
/src/main/java/com/example/kafkastreams/restapi/springbootapp/controller/v1/MovieController.java:
--------------------------------------------------------------------------------
1 | package com.example.kafkastreams.restapi.springbootapp.controller.v1;
2 |
3 | import com.example.kafkastreams.restapi.springbootapp.dto.MovieAverageRatingResponse;
4 | import io.swagger.v3.oas.annotations.OpenAPIDefinition;
5 | import io.swagger.v3.oas.annotations.Operation;
6 | import io.swagger.v3.oas.annotations.Parameter;
7 | import io.swagger.v3.oas.annotations.info.Contact;
8 | import io.swagger.v3.oas.annotations.info.Info;
9 | import io.swagger.v3.oas.annotations.info.License;
10 | import io.swagger.v3.oas.annotations.media.Content;
11 | import io.swagger.v3.oas.annotations.media.Schema;
12 | import io.swagger.v3.oas.annotations.responses.ApiResponse;
13 | import io.swagger.v3.oas.annotations.responses.ApiResponses;
14 | import io.swagger.v3.oas.annotations.servers.Server;
15 | import org.apache.kafka.common.serialization.Serdes;
16 | import org.apache.kafka.streams.KafkaStreams;
17 | import org.apache.kafka.streams.KeyQueryMetadata;
18 | import org.apache.kafka.streams.StoreQueryParameters;
19 | import org.apache.kafka.streams.state.QueryableStoreType;
20 | import org.apache.kafka.streams.state.QueryableStoreTypes;
21 | import org.apache.kafka.streams.state.ReadOnlyKeyValueStore;
22 | import org.slf4j.Logger;
23 | import org.slf4j.LoggerFactory;
24 | import org.springframework.beans.factory.annotation.Autowired;
25 | import org.springframework.beans.factory.annotation.Value;
26 | import org.springframework.http.HttpStatus;
27 | import org.springframework.http.ResponseEntity;
28 | import org.springframework.web.bind.annotation.*;
29 |
30 | @OpenAPIDefinition(servers = { @Server(url = "http://localhost:7001") }, info = @Info(title = "Sample Spring Boot Kafka Stream API", version = "v1", description = "A demo project using Spring Boot with Kafka Streams.", license = @License(name = "MIT License", url = "https://github.com/bchen04/springboot-kafka-streams-rest-api/blob/master/LICENSE"), contact = @Contact(url = "https://www.linkedin.com/in/bchen04/", name = "Ben Chen")))
31 | @RestController
32 | @RequestMapping("v1/movie")
33 | public class MovieController {
34 | private final KafkaStreams streams;
35 | private static final Logger logger = LoggerFactory.getLogger(MovieController.class);
36 |
37 | @Autowired
38 | public MovieController(KafkaStreams streams) {
39 | this.streams = streams;
40 | }
41 |
42 | @Value("${state.store.name}")
43 | private String stateStoreName;
44 |
45 | @Operation(summary = "Returns the average rating for a particular movie")
46 | @ApiResponses(value = {
47 | @ApiResponse(responseCode = "200", description = "successful operation", content = @Content(schema = @Schema(type = "object"))),
48 | @ApiResponse(responseCode = "500", description = "internal server error")})
49 | @GetMapping(value = "{movieId}/rating", produces = { "application/json" })
50 | public ResponseEntity getMovieAverageRating(@Parameter(description = "Movie identifier", required = true, example = "362") @PathVariable Long movieId) {
51 | try {
52 | //find active, standby host list and partition for key
53 | //get the metadata related to the key.
54 | final KeyQueryMetadata keyQueryMetadata = streams.queryMetadataForKey(stateStoreName, movieId, Serdes.Long().serializer());
55 |
56 | //use the above information to redirect the query to the host containing the partition for the key
57 | final int keyPartition = keyQueryMetadata.getPartition();
58 |
59 | //querying local key-value stores
60 | final QueryableStoreType> queryableStoreType = QueryableStoreTypes.keyValueStore();
61 |
62 | //fetch the store for specific partition where the key belongs and look into stale stores as well
63 | ReadOnlyKeyValueStore store = streams
64 | .store(StoreQueryParameters.fromNameAndType(stateStoreName, queryableStoreType)
65 | .enableStaleStores()
66 | .withPartition(keyPartition));
67 |
68 | //get the value by key
69 | Double result = store.get(movieId);
70 |
71 | return ResponseEntity
72 | .ok()
73 | .body(new MovieAverageRatingResponse(movieId, result));
74 | }
75 | catch(Exception ex) {
76 | logger.error("Failed due to exception: {}", ex.getMessage());
77 |
78 | return ResponseEntity
79 | .status(HttpStatus.INTERNAL_SERVER_ERROR)
80 | .build();
81 | }
82 | }
83 | }
84 |
--------------------------------------------------------------------------------
/src/main/java/com/example/kafkastreams/restapi/springbootapp/dto/MovieAverageRatingResponse.java:
--------------------------------------------------------------------------------
1 | package com.example.kafkastreams.restapi.springbootapp.dto;
2 |
3 | public class MovieAverageRatingResponse {
4 | public Long movieId;
5 | public Double rating;
6 |
7 | public MovieAverageRatingResponse(Long movieId, Double rating)
8 | {
9 | this.movieId = movieId;
10 | this.rating = rating;
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/src/main/java/io/confluent/demo/CountAndSum.java:
--------------------------------------------------------------------------------
1 | /**
2 | * Autogenerated by Avro
3 | *
4 | * DO NOT EDIT DIRECTLY
5 | */
6 | package io.confluent.demo;
7 |
8 | import org.apache.avro.generic.GenericArray;
9 | import org.apache.avro.specific.SpecificData;
10 | import org.apache.avro.util.Utf8;
11 | import org.apache.avro.message.BinaryMessageEncoder;
12 | import org.apache.avro.message.BinaryMessageDecoder;
13 | import org.apache.avro.message.SchemaStore;
14 |
15 | @org.apache.avro.specific.AvroGenerated
16 | public class CountAndSum extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord {
17 | private static final long serialVersionUID = 1108119859414124265L;
18 | public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"CountAndSum\",\"namespace\":\"io.confluent.demo\",\"fields\":[{\"name\":\"count\",\"type\":\"long\"},{\"name\":\"sum\",\"type\":\"double\"}]}");
19 | public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; }
20 |
21 | private static SpecificData MODEL$ = new SpecificData();
22 |
23 | private static final BinaryMessageEncoder ENCODER =
24 | new BinaryMessageEncoder(MODEL$, SCHEMA$);
25 |
26 | private static final BinaryMessageDecoder DECODER =
27 | new BinaryMessageDecoder(MODEL$, SCHEMA$);
28 |
29 | /**
30 | * Return the BinaryMessageEncoder instance used by this class.
31 | * @return the message encoder used by this class
32 | */
33 | public static BinaryMessageEncoder getEncoder() {
34 | return ENCODER;
35 | }
36 |
37 | /**
38 | * Return the BinaryMessageDecoder instance used by this class.
39 | * @return the message decoder used by this class
40 | */
41 | public static BinaryMessageDecoder getDecoder() {
42 | return DECODER;
43 | }
44 |
45 | /**
46 | * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link SchemaStore}.
47 | * @param resolver a {@link SchemaStore} used to find schemas by fingerprint
48 | * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore
49 | */
50 | public static BinaryMessageDecoder createDecoder(SchemaStore resolver) {
51 | return new BinaryMessageDecoder(MODEL$, SCHEMA$, resolver);
52 | }
53 |
54 | /**
55 | * Serializes this CountAndSum to a ByteBuffer.
56 | * @return a buffer holding the serialized data for this instance
57 | * @throws java.io.IOException if this instance could not be serialized
58 | */
59 | public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException {
60 | return ENCODER.encode(this);
61 | }
62 |
63 | /**
64 | * Deserializes a CountAndSum from a ByteBuffer.
65 | * @param b a byte buffer holding serialized data for an instance of this class
66 | * @return a CountAndSum instance decoded from the given buffer
67 | * @throws java.io.IOException if the given bytes could not be deserialized into an instance of this class
68 | */
69 | public static CountAndSum fromByteBuffer(
70 | java.nio.ByteBuffer b) throws java.io.IOException {
71 | return DECODER.decode(b);
72 | }
73 |
74 | private long count;
75 | private double sum;
76 |
77 | /**
78 | * Default constructor. Note that this does not initialize fields
79 | * to their default values from the schema. If that is desired then
80 | * one should use newBuilder()
.
81 | */
82 | public CountAndSum() {}
83 |
84 | /**
85 | * All-args constructor.
86 | * @param count The new value for count
87 | * @param sum The new value for sum
88 | */
89 | public CountAndSum(java.lang.Long count, java.lang.Double sum) {
90 | this.count = count;
91 | this.sum = sum;
92 | }
93 |
94 | public org.apache.avro.specific.SpecificData getSpecificData() { return MODEL$; }
95 | public org.apache.avro.Schema getSchema() { return SCHEMA$; }
96 | // Used by DatumWriter. Applications should not call.
97 | public java.lang.Object get(int field$) {
98 | switch (field$) {
99 | case 0: return count;
100 | case 1: return sum;
101 | default: throw new IndexOutOfBoundsException("Invalid index: " + field$);
102 | }
103 | }
104 |
105 | // Used by DatumReader. Applications should not call.
106 | @SuppressWarnings(value="unchecked")
107 | public void put(int field$, java.lang.Object value$) {
108 | switch (field$) {
109 | case 0: count = (java.lang.Long)value$; break;
110 | case 1: sum = (java.lang.Double)value$; break;
111 | default: throw new IndexOutOfBoundsException("Invalid index: " + field$);
112 | }
113 | }
114 |
115 | /**
116 | * Gets the value of the 'count' field.
117 | * @return The value of the 'count' field.
118 | */
119 | public long getCount() {
120 | return count;
121 | }
122 |
123 |
124 | /**
125 | * Sets the value of the 'count' field.
126 | * @param value the value to set.
127 | */
128 | public void setCount(long value) {
129 | this.count = value;
130 | }
131 |
132 | /**
133 | * Gets the value of the 'sum' field.
134 | * @return The value of the 'sum' field.
135 | */
136 | public double getSum() {
137 | return sum;
138 | }
139 |
140 |
141 | /**
142 | * Sets the value of the 'sum' field.
143 | * @param value the value to set.
144 | */
145 | public void setSum(double value) {
146 | this.sum = value;
147 | }
148 |
149 | /**
150 | * Creates a new CountAndSum RecordBuilder.
151 | * @return A new CountAndSum RecordBuilder
152 | */
153 | public static io.confluent.demo.CountAndSum.Builder newBuilder() {
154 | return new io.confluent.demo.CountAndSum.Builder();
155 | }
156 |
157 | /**
158 | * Creates a new CountAndSum RecordBuilder by copying an existing Builder.
159 | * @param other The existing builder to copy.
160 | * @return A new CountAndSum RecordBuilder
161 | */
162 | public static io.confluent.demo.CountAndSum.Builder newBuilder(io.confluent.demo.CountAndSum.Builder other) {
163 | if (other == null) {
164 | return new io.confluent.demo.CountAndSum.Builder();
165 | } else {
166 | return new io.confluent.demo.CountAndSum.Builder(other);
167 | }
168 | }
169 |
170 | /**
171 | * Creates a new CountAndSum RecordBuilder by copying an existing CountAndSum instance.
172 | * @param other The existing instance to copy.
173 | * @return A new CountAndSum RecordBuilder
174 | */
175 | public static io.confluent.demo.CountAndSum.Builder newBuilder(io.confluent.demo.CountAndSum other) {
176 | if (other == null) {
177 | return new io.confluent.demo.CountAndSum.Builder();
178 | } else {
179 | return new io.confluent.demo.CountAndSum.Builder(other);
180 | }
181 | }
182 |
183 | /**
184 | * RecordBuilder for CountAndSum instances.
185 | */
186 | @org.apache.avro.specific.AvroGenerated
187 | public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase
188 | implements org.apache.avro.data.RecordBuilder {
189 |
190 | private long count;
191 | private double sum;
192 |
193 | /** Creates a new Builder */
194 | private Builder() {
195 | super(SCHEMA$);
196 | }
197 |
198 | /**
199 | * Creates a Builder by copying an existing Builder.
200 | * @param other The existing Builder to copy.
201 | */
202 | private Builder(io.confluent.demo.CountAndSum.Builder other) {
203 | super(other);
204 | if (isValidValue(fields()[0], other.count)) {
205 | this.count = data().deepCopy(fields()[0].schema(), other.count);
206 | fieldSetFlags()[0] = other.fieldSetFlags()[0];
207 | }
208 | if (isValidValue(fields()[1], other.sum)) {
209 | this.sum = data().deepCopy(fields()[1].schema(), other.sum);
210 | fieldSetFlags()[1] = other.fieldSetFlags()[1];
211 | }
212 | }
213 |
214 | /**
215 | * Creates a Builder by copying an existing CountAndSum instance
216 | * @param other The existing instance to copy.
217 | */
218 | private Builder(io.confluent.demo.CountAndSum other) {
219 | super(SCHEMA$);
220 | if (isValidValue(fields()[0], other.count)) {
221 | this.count = data().deepCopy(fields()[0].schema(), other.count);
222 | fieldSetFlags()[0] = true;
223 | }
224 | if (isValidValue(fields()[1], other.sum)) {
225 | this.sum = data().deepCopy(fields()[1].schema(), other.sum);
226 | fieldSetFlags()[1] = true;
227 | }
228 | }
229 |
230 | /**
231 | * Gets the value of the 'count' field.
232 | * @return The value.
233 | */
234 | public long getCount() {
235 | return count;
236 | }
237 |
238 |
239 | /**
240 | * Sets the value of the 'count' field.
241 | * @param value The value of 'count'.
242 | * @return This builder.
243 | */
244 | public io.confluent.demo.CountAndSum.Builder setCount(long value) {
245 | validate(fields()[0], value);
246 | this.count = value;
247 | fieldSetFlags()[0] = true;
248 | return this;
249 | }
250 |
251 | /**
252 | * Checks whether the 'count' field has been set.
253 | * @return True if the 'count' field has been set, false otherwise.
254 | */
255 | public boolean hasCount() {
256 | return fieldSetFlags()[0];
257 | }
258 |
259 |
260 | /**
261 | * Clears the value of the 'count' field.
262 | * @return This builder.
263 | */
264 | public io.confluent.demo.CountAndSum.Builder clearCount() {
265 | fieldSetFlags()[0] = false;
266 | return this;
267 | }
268 |
269 | /**
270 | * Gets the value of the 'sum' field.
271 | * @return The value.
272 | */
273 | public double getSum() {
274 | return sum;
275 | }
276 |
277 |
278 | /**
279 | * Sets the value of the 'sum' field.
280 | * @param value The value of 'sum'.
281 | * @return This builder.
282 | */
283 | public io.confluent.demo.CountAndSum.Builder setSum(double value) {
284 | validate(fields()[1], value);
285 | this.sum = value;
286 | fieldSetFlags()[1] = true;
287 | return this;
288 | }
289 |
290 | /**
291 | * Checks whether the 'sum' field has been set.
292 | * @return True if the 'sum' field has been set, false otherwise.
293 | */
294 | public boolean hasSum() {
295 | return fieldSetFlags()[1];
296 | }
297 |
298 |
299 | /**
300 | * Clears the value of the 'sum' field.
301 | * @return This builder.
302 | */
303 | public io.confluent.demo.CountAndSum.Builder clearSum() {
304 | fieldSetFlags()[1] = false;
305 | return this;
306 | }
307 |
308 | @Override
309 | @SuppressWarnings("unchecked")
310 | public CountAndSum build() {
311 | try {
312 | CountAndSum record = new CountAndSum();
313 | record.count = fieldSetFlags()[0] ? this.count : (java.lang.Long) defaultValue(fields()[0]);
314 | record.sum = fieldSetFlags()[1] ? this.sum : (java.lang.Double) defaultValue(fields()[1]);
315 | return record;
316 | } catch (org.apache.avro.AvroMissingFieldException e) {
317 | throw e;
318 | } catch (java.lang.Exception e) {
319 | throw new org.apache.avro.AvroRuntimeException(e);
320 | }
321 | }
322 | }
323 |
324 | @SuppressWarnings("unchecked")
325 | private static final org.apache.avro.io.DatumWriter
326 | WRITER$ = (org.apache.avro.io.DatumWriter)MODEL$.createDatumWriter(SCHEMA$);
327 |
328 | @Override public void writeExternal(java.io.ObjectOutput out)
329 | throws java.io.IOException {
330 | WRITER$.write(this, SpecificData.getEncoder(out));
331 | }
332 |
333 | @SuppressWarnings("unchecked")
334 | private static final org.apache.avro.io.DatumReader
335 | READER$ = (org.apache.avro.io.DatumReader)MODEL$.createDatumReader(SCHEMA$);
336 |
337 | @Override public void readExternal(java.io.ObjectInput in)
338 | throws java.io.IOException {
339 | READER$.read(this, SpecificData.getDecoder(in));
340 | }
341 |
342 | @Override protected boolean hasCustomCoders() { return true; }
343 |
344 | @Override public void customEncode(org.apache.avro.io.Encoder out)
345 | throws java.io.IOException
346 | {
347 | out.writeLong(this.count);
348 |
349 | out.writeDouble(this.sum);
350 |
351 | }
352 |
353 | @Override public void customDecode(org.apache.avro.io.ResolvingDecoder in)
354 | throws java.io.IOException
355 | {
356 | org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff();
357 | if (fieldOrder == null) {
358 | this.count = in.readLong();
359 |
360 | this.sum = in.readDouble();
361 |
362 | } else {
363 | for (int i = 0; i < 2; i++) {
364 | switch (fieldOrder[i].pos()) {
365 | case 0:
366 | this.count = in.readLong();
367 | break;
368 |
369 | case 1:
370 | this.sum = in.readDouble();
371 | break;
372 |
373 | default:
374 | throw new java.io.IOException("Corrupt ResolvingDecoder.");
375 | }
376 | }
377 | }
378 | }
379 | }
380 |
381 |
382 |
383 |
384 |
385 |
386 |
387 |
388 |
389 |
390 |
--------------------------------------------------------------------------------
/src/main/java/io/confluent/demo/Rating.java:
--------------------------------------------------------------------------------
1 | /**
2 | * Autogenerated by Avro
3 | *
4 | * DO NOT EDIT DIRECTLY
5 | */
6 | package io.confluent.demo;
7 |
8 | import org.apache.avro.generic.GenericArray;
9 | import org.apache.avro.specific.SpecificData;
10 | import org.apache.avro.util.Utf8;
11 | import org.apache.avro.message.BinaryMessageEncoder;
12 | import org.apache.avro.message.BinaryMessageDecoder;
13 | import org.apache.avro.message.SchemaStore;
14 |
15 | @org.apache.avro.specific.AvroGenerated
16 | public class Rating extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord {
17 | private static final long serialVersionUID = 7100497284749734046L;
18 | public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"Rating\",\"namespace\":\"io.confluent.demo\",\"fields\":[{\"name\":\"movie_id\",\"type\":\"long\"},{\"name\":\"rating\",\"type\":\"double\"}]}");
19 | public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; }
20 |
21 | private static SpecificData MODEL$ = new SpecificData();
22 |
23 | private static final BinaryMessageEncoder ENCODER =
24 | new BinaryMessageEncoder(MODEL$, SCHEMA$);
25 |
26 | private static final BinaryMessageDecoder DECODER =
27 | new BinaryMessageDecoder(MODEL$, SCHEMA$);
28 |
29 | /**
30 | * Return the BinaryMessageEncoder instance used by this class.
31 | * @return the message encoder used by this class
32 | */
33 | public static BinaryMessageEncoder getEncoder() {
34 | return ENCODER;
35 | }
36 |
37 | /**
38 | * Return the BinaryMessageDecoder instance used by this class.
39 | * @return the message decoder used by this class
40 | */
41 | public static BinaryMessageDecoder getDecoder() {
42 | return DECODER;
43 | }
44 |
45 | /**
46 | * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link SchemaStore}.
47 | * @param resolver a {@link SchemaStore} used to find schemas by fingerprint
48 | * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore
49 | */
50 | public static BinaryMessageDecoder createDecoder(SchemaStore resolver) {
51 | return new BinaryMessageDecoder(MODEL$, SCHEMA$, resolver);
52 | }
53 |
54 | /**
55 | * Serializes this Rating to a ByteBuffer.
56 | * @return a buffer holding the serialized data for this instance
57 | * @throws java.io.IOException if this instance could not be serialized
58 | */
59 | public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException {
60 | return ENCODER.encode(this);
61 | }
62 |
63 | /**
64 | * Deserializes a Rating from a ByteBuffer.
65 | * @param b a byte buffer holding serialized data for an instance of this class
66 | * @return a Rating instance decoded from the given buffer
67 | * @throws java.io.IOException if the given bytes could not be deserialized into an instance of this class
68 | */
69 | public static Rating fromByteBuffer(
70 | java.nio.ByteBuffer b) throws java.io.IOException {
71 | return DECODER.decode(b);
72 | }
73 |
74 | private long movie_id;
75 | private double rating;
76 |
77 | /**
78 | * Default constructor. Note that this does not initialize fields
79 | * to their default values from the schema. If that is desired then
80 | * one should use newBuilder()
.
81 | */
82 | public Rating() {}
83 |
84 | /**
85 | * All-args constructor.
86 | * @param movie_id The new value for movie_id
87 | * @param rating The new value for rating
88 | */
89 | public Rating(java.lang.Long movie_id, java.lang.Double rating) {
90 | this.movie_id = movie_id;
91 | this.rating = rating;
92 | }
93 |
94 | public org.apache.avro.specific.SpecificData getSpecificData() { return MODEL$; }
95 | public org.apache.avro.Schema getSchema() { return SCHEMA$; }
96 | // Used by DatumWriter. Applications should not call.
97 | public java.lang.Object get(int field$) {
98 | switch (field$) {
99 | case 0: return movie_id;
100 | case 1: return rating;
101 | default: throw new IndexOutOfBoundsException("Invalid index: " + field$);
102 | }
103 | }
104 |
105 | // Used by DatumReader. Applications should not call.
106 | @SuppressWarnings(value="unchecked")
107 | public void put(int field$, java.lang.Object value$) {
108 | switch (field$) {
109 | case 0: movie_id = (java.lang.Long)value$; break;
110 | case 1: rating = (java.lang.Double)value$; break;
111 | default: throw new IndexOutOfBoundsException("Invalid index: " + field$);
112 | }
113 | }
114 |
115 | /**
116 | * Gets the value of the 'movie_id' field.
117 | * @return The value of the 'movie_id' field.
118 | */
119 | public long getMovieId() {
120 | return movie_id;
121 | }
122 |
123 |
124 | /**
125 | * Sets the value of the 'movie_id' field.
126 | * @param value the value to set.
127 | */
128 | public void setMovieId(long value) {
129 | this.movie_id = value;
130 | }
131 |
132 | /**
133 | * Gets the value of the 'rating' field.
134 | * @return The value of the 'rating' field.
135 | */
136 | public double getRating() {
137 | return rating;
138 | }
139 |
140 |
141 | /**
142 | * Sets the value of the 'rating' field.
143 | * @param value the value to set.
144 | */
145 | public void setRating(double value) {
146 | this.rating = value;
147 | }
148 |
149 | /**
150 | * Creates a new Rating RecordBuilder.
151 | * @return A new Rating RecordBuilder
152 | */
153 | public static io.confluent.demo.Rating.Builder newBuilder() {
154 | return new io.confluent.demo.Rating.Builder();
155 | }
156 |
157 | /**
158 | * Creates a new Rating RecordBuilder by copying an existing Builder.
159 | * @param other The existing builder to copy.
160 | * @return A new Rating RecordBuilder
161 | */
162 | public static io.confluent.demo.Rating.Builder newBuilder(io.confluent.demo.Rating.Builder other) {
163 | if (other == null) {
164 | return new io.confluent.demo.Rating.Builder();
165 | } else {
166 | return new io.confluent.demo.Rating.Builder(other);
167 | }
168 | }
169 |
170 | /**
171 | * Creates a new Rating RecordBuilder by copying an existing Rating instance.
172 | * @param other The existing instance to copy.
173 | * @return A new Rating RecordBuilder
174 | */
175 | public static io.confluent.demo.Rating.Builder newBuilder(io.confluent.demo.Rating other) {
176 | if (other == null) {
177 | return new io.confluent.demo.Rating.Builder();
178 | } else {
179 | return new io.confluent.demo.Rating.Builder(other);
180 | }
181 | }
182 |
183 | /**
184 | * RecordBuilder for Rating instances.
185 | */
186 | @org.apache.avro.specific.AvroGenerated
187 | public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase
188 | implements org.apache.avro.data.RecordBuilder {
189 |
190 | private long movie_id;
191 | private double rating;
192 |
193 | /** Creates a new Builder */
194 | private Builder() {
195 | super(SCHEMA$);
196 | }
197 |
198 | /**
199 | * Creates a Builder by copying an existing Builder.
200 | * @param other The existing Builder to copy.
201 | */
202 | private Builder(io.confluent.demo.Rating.Builder other) {
203 | super(other);
204 | if (isValidValue(fields()[0], other.movie_id)) {
205 | this.movie_id = data().deepCopy(fields()[0].schema(), other.movie_id);
206 | fieldSetFlags()[0] = other.fieldSetFlags()[0];
207 | }
208 | if (isValidValue(fields()[1], other.rating)) {
209 | this.rating = data().deepCopy(fields()[1].schema(), other.rating);
210 | fieldSetFlags()[1] = other.fieldSetFlags()[1];
211 | }
212 | }
213 |
214 | /**
215 | * Creates a Builder by copying an existing Rating instance
216 | * @param other The existing instance to copy.
217 | */
218 | private Builder(io.confluent.demo.Rating other) {
219 | super(SCHEMA$);
220 | if (isValidValue(fields()[0], other.movie_id)) {
221 | this.movie_id = data().deepCopy(fields()[0].schema(), other.movie_id);
222 | fieldSetFlags()[0] = true;
223 | }
224 | if (isValidValue(fields()[1], other.rating)) {
225 | this.rating = data().deepCopy(fields()[1].schema(), other.rating);
226 | fieldSetFlags()[1] = true;
227 | }
228 | }
229 |
230 | /**
231 | * Gets the value of the 'movie_id' field.
232 | * @return The value.
233 | */
234 | public long getMovieId() {
235 | return movie_id;
236 | }
237 |
238 |
239 | /**
240 | * Sets the value of the 'movie_id' field.
241 | * @param value The value of 'movie_id'.
242 | * @return This builder.
243 | */
244 | public io.confluent.demo.Rating.Builder setMovieId(long value) {
245 | validate(fields()[0], value);
246 | this.movie_id = value;
247 | fieldSetFlags()[0] = true;
248 | return this;
249 | }
250 |
251 | /**
252 | * Checks whether the 'movie_id' field has been set.
253 | * @return True if the 'movie_id' field has been set, false otherwise.
254 | */
255 | public boolean hasMovieId() {
256 | return fieldSetFlags()[0];
257 | }
258 |
259 |
260 | /**
261 | * Clears the value of the 'movie_id' field.
262 | * @return This builder.
263 | */
264 | public io.confluent.demo.Rating.Builder clearMovieId() {
265 | fieldSetFlags()[0] = false;
266 | return this;
267 | }
268 |
269 | /**
270 | * Gets the value of the 'rating' field.
271 | * @return The value.
272 | */
273 | public double getRating() {
274 | return rating;
275 | }
276 |
277 |
278 | /**
279 | * Sets the value of the 'rating' field.
280 | * @param value The value of 'rating'.
281 | * @return This builder.
282 | */
283 | public io.confluent.demo.Rating.Builder setRating(double value) {
284 | validate(fields()[1], value);
285 | this.rating = value;
286 | fieldSetFlags()[1] = true;
287 | return this;
288 | }
289 |
290 | /**
291 | * Checks whether the 'rating' field has been set.
292 | * @return True if the 'rating' field has been set, false otherwise.
293 | */
294 | public boolean hasRating() {
295 | return fieldSetFlags()[1];
296 | }
297 |
298 |
299 | /**
300 | * Clears the value of the 'rating' field.
301 | * @return This builder.
302 | */
303 | public io.confluent.demo.Rating.Builder clearRating() {
304 | fieldSetFlags()[1] = false;
305 | return this;
306 | }
307 |
308 | @Override
309 | @SuppressWarnings("unchecked")
310 | public Rating build() {
311 | try {
312 | Rating record = new Rating();
313 | record.movie_id = fieldSetFlags()[0] ? this.movie_id : (java.lang.Long) defaultValue(fields()[0]);
314 | record.rating = fieldSetFlags()[1] ? this.rating : (java.lang.Double) defaultValue(fields()[1]);
315 | return record;
316 | } catch (org.apache.avro.AvroMissingFieldException e) {
317 | throw e;
318 | } catch (java.lang.Exception e) {
319 | throw new org.apache.avro.AvroRuntimeException(e);
320 | }
321 | }
322 | }
323 |
324 | @SuppressWarnings("unchecked")
325 | private static final org.apache.avro.io.DatumWriter
326 | WRITER$ = (org.apache.avro.io.DatumWriter)MODEL$.createDatumWriter(SCHEMA$);
327 |
328 | @Override public void writeExternal(java.io.ObjectOutput out)
329 | throws java.io.IOException {
330 | WRITER$.write(this, SpecificData.getEncoder(out));
331 | }
332 |
333 | @SuppressWarnings("unchecked")
334 | private static final org.apache.avro.io.DatumReader
335 | READER$ = (org.apache.avro.io.DatumReader)MODEL$.createDatumReader(SCHEMA$);
336 |
337 | @Override public void readExternal(java.io.ObjectInput in)
338 | throws java.io.IOException {
339 | READER$.read(this, SpecificData.getDecoder(in));
340 | }
341 |
342 | @Override protected boolean hasCustomCoders() { return true; }
343 |
344 | @Override public void customEncode(org.apache.avro.io.Encoder out)
345 | throws java.io.IOException
346 | {
347 | out.writeLong(this.movie_id);
348 |
349 | out.writeDouble(this.rating);
350 |
351 | }
352 |
353 | @Override public void customDecode(org.apache.avro.io.ResolvingDecoder in)
354 | throws java.io.IOException
355 | {
356 | org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff();
357 | if (fieldOrder == null) {
358 | this.movie_id = in.readLong();
359 |
360 | this.rating = in.readDouble();
361 |
362 | } else {
363 | for (int i = 0; i < 2; i++) {
364 | switch (fieldOrder[i].pos()) {
365 | case 0:
366 | this.movie_id = in.readLong();
367 | break;
368 |
369 | case 1:
370 | this.rating = in.readDouble();
371 | break;
372 |
373 | default:
374 | throw new java.io.IOException("Corrupt ResolvingDecoder.");
375 | }
376 | }
377 | }
378 | }
379 | }
380 |
381 |
382 |
383 |
384 |
385 |
386 |
387 |
388 |
389 |
390 |
--------------------------------------------------------------------------------
/src/main/resources/application.properties:
--------------------------------------------------------------------------------
1 | springdoc.swagger-ui.operationsSorter=method
2 | springdoc.api-docs.path=/api-docs
3 | springdoc.swagger-ui.disable-swagger-default-url=true
4 | server.port=7001
5 | application.name=springboot-kafka-streams-rest-api
6 | app.server.config=http://localhost:7001
7 | spring.kafka.bootstrap-servers=localhost:29092
8 | schema.registry.url=http://localhost:8081
9 | rating.topic.name=ratings
10 | average.rating.topic.name=rating-averages
11 | state.store.name=average-ratings
--------------------------------------------------------------------------------
/src/test/java/com/example/kafkastreams/restapi/springbootapp/SpringbootappApplicationTests.java:
--------------------------------------------------------------------------------
1 | package com.example.kafkastreams.restapi.springbootapp;
2 |
3 | import org.junit.jupiter.api.Test;
4 | import org.springframework.boot.test.context.SpringBootTest;
5 |
6 | @SpringBootTest
7 | class SpringbootappApplicationTests {
8 |
9 | @Test
10 | void contextLoads() {
11 | }
12 |
13 | }
14 |
--------------------------------------------------------------------------------