├── .gitattributes
├── .gitignore
├── Dockerfile
├── Kafka Avro Primer.pdf
├── LICENSE
├── README.md
├── docker-compose.yml
├── mvnw
├── mvnw.cmd
├── pom.xml
├── spring-boot-with-kafka.iml
└── src
├── main
├── java
│ └── com
│ │ └── demo
│ │ ├── SpringBootWithKafkaApplication.java
│ │ ├── config
│ │ └── SwaggerConfig.java
│ │ ├── consumer
│ │ ├── Receiver.java
│ │ └── ReceiverConfig.java
│ │ ├── controllers
│ │ └── KafkaController.java
│ │ ├── domain
│ │ └── UserRequest.java
│ │ ├── producer
│ │ ├── Sender.java
│ │ └── SenderConfig.java
│ │ └── util
│ │ ├── AvroDeserializer.java
│ │ └── AvroSerializer.java
└── resources
│ ├── application.yml
│ └── avro
│ └── user.avsc
└── test
├── java
└── com
│ └── demo
│ ├── SpringBootWithKafkaApplicationTests.java
│ ├── producer
│ └── SenderTest.java
│ └── serializer
│ ├── AvroDeserializerTest.java
│ └── AvroSerializerTest.java
└── resources
└── application.yml
/.gitattributes:
--------------------------------------------------------------------------------
1 | # Auto detect text files and perform LF normalization
2 | * text=auto
3 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and WebStorm
2 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
3 |
4 | # User-specific stuff
5 | .idea
6 | target/
7 | .idea/**/workspace.xml
8 | .idea/**/tasks.xml
9 | .idea/**/usage.statistics.xml
10 | .idea/**/dictionaries
11 | .idea/**/shelf
12 |
13 | # Generated files
14 | .idea/**/contentModel.xml
15 |
16 | # Sensitive or high-churn files
17 | .idea/**/dataSources/
18 | .idea/**/dataSources.ids
19 | .idea/**/dataSources.local.xml
20 | .idea/**/sqlDataSources.xml
21 | .idea/**/dynamic.xml
22 | .idea/**/uiDesigner.xml
23 | .idea/**/dbnavigator.xml
24 |
25 | # Gradle
26 | .idea/**/gradle.xml
27 | .idea/**/libraries
28 |
29 | # Gradle and Maven with auto-import
30 | # When using Gradle or Maven with auto-import, you should exclude module files,
31 | # since they will be recreated, and may cause churn. Uncomment if using
32 | # auto-import.
33 | # .idea/modules.xml
34 | # .idea/*.iml
35 | # .idea/modules
36 |
37 | # CMake
38 | cmake-build-*/
39 |
40 | # Mongo Explorer plugin
41 | .idea/**/mongoSettings.xml
42 |
43 | # File-based project format
44 | *.iws
45 |
46 | # IntelliJ
47 | out/
48 |
49 | # mpeltonen/sbt-idea plugin
50 | .idea_modules/
51 |
52 | # JIRA plugin
53 | atlassian-ide-plugin.xml
54 |
55 | # Cursive Clojure plugin
56 | .idea/replstate.xml
57 |
58 | # Crashlytics plugin (for Android Studio and IntelliJ)
59 | com_crashlytics_export_strings.xml
60 | crashlytics.properties
61 | crashlytics-build.properties
62 | fabric.properties
63 |
64 | # Editor-based Rest Client
65 | .idea/httpRequests
66 |
67 | # Android studio 3.1+ serialized cache file
68 | .idea/caches/build_file_checksums.ser
69 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | #
2 | # Copyright 2018 Confluent Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | FROM confluentinc/cp-kafka-connect:5.2.0
17 |
18 | ENV CONNECT_PLUGIN_PATH="/usr/share/java,/usr/share/confluent-hub-components"
19 |
20 | RUN confluent-hub install --no-prompt confluentinc/kafka-connect-datagen:latest
21 |
--------------------------------------------------------------------------------
/Kafka Avro Primer.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/andreydanil/spring-boot-kafka-avro-primer/37c8134eb11e4c1cefe310a1df418929a4add8b8/Kafka Avro Primer.pdf
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Spring Boot + Kafka + Schema Registry - Primer
2 |
3 | ## What is Schema Registry?
4 |
5 | According to [Confluent.io](https://docs.confluent.io/current/schema-registry/docs/index.html) : The Schema Registry stores a versioned history of all schemas and allows for the evolution of schemas according to the configured compatibility settings and expanded Avro support.
6 |
7 | ## Why do we need a Schema Registry?
8 |
9 | Simply put, we want to avoid garbage-in-garbage-out scenarios. Schema Registry enables message producers to comply to a JSON schema and avoid producers from pushing message that are bad in to topics. This saves a lot of headache for down-stream consumer. Schema Registry is a critical component in enforcing data governance in a messaging platform.
10 |
11 | ## What is Avro?
12 |
13 | According to [Avro.Apache.org](https://avro.apache.org/docs/current/) : Apache Avro™ is a data serialization system.
14 |
15 | Avro provides:
16 |
17 | - Rich data structures.
18 | - A compact, fast, binary data format.
19 | - A container file, to store persistent data.
20 | - Remote procedure call (RPC).
21 | - Simple integration with dynamic languages. Code generation is not required to read or write data files nor to use or implement RPC protocols. Code generation as an optional optimization, only worth implementing for statically typed languages.
22 |
23 | ## What will we build in this tutorial
24 |
25 | This is a tutorial for creating a simple Spring Boot application with Kafka and Schema Registry.
26 | The following topics are covered in this tutorial:
27 | 1. Working with Confluent.io components
28 | 2. Creating a Kafka Avro Producer using Spring Boot
29 | 3. Creating Avro schema and generating Java classes
30 | 4. A REST interface to send messages to a Kafka topic with Avro schema
31 | 5. View the messages from a Kafka Avro Consumer
32 |
33 | ## Getting Started
34 |
35 | In our sample application we will build a Spring Boot microservice that produces messages and uses Avro to serialize and push them into Kafka.
36 | For this tutorial we will be using the open source components of confluent platform. All of our microservices and infrastructure components will be dockerized and run using docker-compose.
37 |
38 |
39 | ### Get the code and tools
40 |
41 | -Download and install Maven from https://maven.apache.org/download.cgi
42 |
43 | -Download and install JDK 1.8 from http://www.oracle.com/technetwork/java/javase/downloads/index.html
44 |
45 | -Download and install Docker and Docker Compose for your OS.
46 |
47 | Clone this repo to your machine and change directory to spring-kafka-registry. Build the docker image referenced in the compose file
48 |
49 | ```
50 | git clone https://github.com/andreydanil/spring-boot-kafka-avro.git
51 |
52 | cd spring-boot-kafka-avro
53 | ```
54 | First things first.
55 |
56 | Let's open the pom.xml file and look at the maven dependencies that are particularly important in this sample.
57 |
58 | The two important sections that help in making the magic happen are the Spring Kafka related dependencies and the Avro related dependencies as shown below :
59 | ```
60 |
61 |
62 | org.springframework.kafka
63 | spring-kafka
64 | ${spring-kafka.version}
65 |
66 |
67 | org.springframework.kafka
68 | spring-kafka-test
69 | ${spring-kafka.version}
70 | test
71 |
72 | ```
73 | And ...
74 |
75 | ```
76 |
77 |
78 | org.apache.avro
79 | avro
80 | ${avro.version}
81 |
82 |
83 | io.confluent
84 | kafka-avro-serializer
85 | ${confluent.version}
86 |
87 | ```
88 | We will revisit these components later but first let's look at the Avro schema file in the source code.
89 |
90 | Open the user.avsc file from src\main\resources\avro
91 |
92 | ```
93 | {
94 | "namespace": "example.avro",
95 | "type": "record",
96 | "name": "User",
97 | "fields": [
98 | {"name": "name", "type": "string"},
99 | {"name": "id", "type": ["int", "null"]},
100 | {"name": "favorite_color", "type": ["string", "null"]}
101 | ]
102 | }
103 |
104 | ```
105 | This is a simple Avro Schema file that describes the Order message structure with various data types.
106 |
107 | Following are the two types of data types supported in Avro:
108 |
109 | Primitive type: Primitive type are used to define the data types of fields in our message schema. All premetive types are supported in Avro. In our Order example, we are using string, int, float in the Avro message schema.
110 |
111 | Complex type: We could also use these six complex data types supported in Avro to define our schema: records, enums, arrays, maps, unions and fixed. In our Order example, we are using the 'record' complex type to define order message.
112 |
113 | ### Generate classes from Avro schema files
114 |
115 | Once we define the schema, we then generate the Java source code using the maven plugin.
116 |
117 | Let's look at the pom.xml file once again
118 |
119 | ```
120 |
121 | org.apache.avro
122 | avro-maven-plugin
123 | ${avro.version}
124 |
125 |
126 | generate-sources
127 |
128 | schema
129 |
130 |
131 | ${project.basedir}/src/main/resources/avro/
132 | ${project.build.directory}/generated/avro
133 |
134 |
135 |
136 |
137 |
138 | ```
139 | Notice the sourceDirectory and outputDirectory locations defiled in the configuration section of the avro-maven-plugin
140 |
141 | The following command in maven lifecycle phase will do the trick and put the generated classes in our outputDirectory:
142 |
143 | spring-kafka-registry\target\generated\avro\
144 |
145 | ```
146 | mvn generate-sources
147 | ```
148 |
149 | The generated source code comes in very handy to process messages in our application.
150 |
151 | ### Spring Boot Application
152 |
153 | Now let's see how this is done.
154 | Open the main application class defined in the source file SpringKafkaRegistryApplication.java from following location:
155 |
156 | spring-kafka-registry\src\main\java\com\solstice\demo
157 |
158 | Notice that we properties that are defined to ensure we are able to interact with the Kafka and Schema Registry instances
159 | ```
160 | ...
161 | Properties properties = new Properties();
162 | // Kafka Properties
163 | properties.setProperty("bootstrap.servers", bootstrap);
164 | properties.setProperty("acks", "all");
165 | properties.setProperty("retries", "10");
166 | // Avro properties
167 | properties.setProperty("key.serializer", StringSerializer.class.getName());
168 | properties.setProperty("value.serializer", KafkaAvroSerializer.class.getName());
169 | properties.setProperty("schema.registry.url", registry);
170 | ...
171 | ```
172 | In addition to the bootstrap server and the schema registry url, we are also setting the serializer classes for key and value properties.
173 |
174 | The KafkaAvroSerializer class is responsible for serializing the message in to Avro format.
175 |
176 | After setting all the needed properties we then create an instance of the KafkaProducer.
177 |
178 | We then build the Order object using the generated class and send it off to the Kafka topic.
179 |
180 | The setter methods in the generated Order class come in very handy.
181 |
182 |
183 | ### Package the Spring Boot jar and create the docker image
184 |
185 | To compile and package the jar file and create a docker image, run the following commands shown below:
186 |
187 | ```
188 | docker-compose up -d
189 | mvn clean package
190 | ```
191 |
192 |
193 | ### Running all the docker containers together
194 |
195 | To run the sample make sure you have installed Docker and Docker Compose for your OS.
196 |
197 | And simply run this command in the source root
198 |
199 |
200 | ```
201 | docker-compose up -d
202 | ```
203 | you should see all the containers come up as shown below:
204 |
205 |
206 | This will produce the Order message and serialize it into Avro format and pushed in to the Kafka topic as a binary message.
207 |
208 | You should see the following output in your browser window or the terminal if you user curl to confirm the message was posted to Kafka topic:
209 |
210 |
211 | ### Checking the message in Kafka Avro Consumer
212 |
213 | To consume the messages and Deserialise the binary message back into a proper Order object we can run the built in command line utility.
214 |
215 | Run the following commands :
216 |
217 |
218 | ```
219 | docker-compose exec schema-registry bash
220 | ```
221 | This will put you into the Schema Registry conatainer where you can execute the command line Avro consumer to see your message.
222 |
223 | Make sure you pass the same topic name that you used in the Kafka Avro Producer above.
224 |
225 | ```
226 | kafka-avro-console-consumer --topic \
227 | --bootstrap-server broker:9092 \
228 | --property schema.registry.url=http://schema-registry:8081 \
229 | --from-beginning
230 | ```
231 | You should see a similar output in your browser window (edited for brevity):
232 |
233 |
234 | That concludes our sample application.
235 |
236 | ### Bring down the application and containers
237 | ```
238 | docker-compose down
239 | ```
240 | Stops containers and removes containers, networks, volumes, and images created by up.
241 |
242 | By default, the only things removed are:
243 |
244 | Containers for services defined in the Compose file
245 | Networks defined in the networks section of the Compose file
246 | The default network, if one is used
247 |
248 | You should see something similar to the output below :
249 |
250 |
251 |
252 | ## Built With
253 |
254 | * [Spring Boot](https://projects.spring.io/spring-boot/) - The Web Framework for Java based Microservices
255 | * [Apache Kafka](https://maven.apache.org/) - Message Broker and Streaming Platform
256 | * [Confluent Schema Registry](https://maven.apache.org/) - Schema Registry Module
257 | * [Maven](https://maven.apache.org/) - Dependency Management
258 | * [Docker and Docker Compose](https://www.docker.com/) - Containerization
259 |
260 | ## Contributing
261 |
262 | Please read [CONTRIBUTING.md](CONTRIBUTING.md) for details on our code of conduct, and the process for submitting pull requests to us.
263 |
264 | ## Versioning
265 |
266 | * 0.0.1
267 |
268 | ## Authors
269 |
270 | * **Andrey Danilkovich** - *Initial work* - [andreydanil](https://github.com/andreydanil)
271 |
272 |
273 | ## License
274 |
275 | This project is licensed under the MIT License - see the [LICENSE.md](LICENSE.md) file for details
276 |
277 | ## Acknowledgments
278 |
279 | * Hat tip to anyone who's code was used
280 | * Inspiration from open source contributors
281 | * All open source and commercial components used in this sample are maintained and produced by respective organization
282 |
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | ---
2 | version: '2'
3 | services:
4 | zookeeper:
5 | image: confluentinc/cp-zookeeper:5.2.0
6 | hostname: zookeeper
7 | container_name: zookeeper
8 | ports:
9 | - "2181:2181"
10 | environment:
11 | ZOOKEEPER_CLIENT_PORT: 2181
12 | ZOOKEEPER_TICK_TIME: 2000
13 |
14 | broker:
15 | image: confluentinc/cp-enterprise-kafka:5.2.0
16 | hostname: broker
17 | container_name: broker
18 | depends_on:
19 | - zookeeper
20 | ports:
21 | - "9092:9092"
22 | - "29092:29092"
23 | environment:
24 | KAFKA_BROKER_ID: 1
25 | KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181'
26 | KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
27 | KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://broker:9092,PLAINTEXT_HOST://localhost:29092
28 | KAFKA_METRIC_REPORTERS: io.confluent.metrics.reporter.ConfluentMetricsReporter
29 | KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
30 | KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
31 | CONFLUENT_METRICS_REPORTER_BOOTSTRAP_SERVERS: broker:9092
32 | CONFLUENT_METRICS_REPORTER_ZOOKEEPER_CONNECT: zookeeper:2181
33 | CONFLUENT_METRICS_REPORTER_TOPIC_REPLICAS: 1
34 | CONFLUENT_METRICS_ENABLE: 'true'
35 | CONFLUENT_SUPPORT_CUSTOMER_ID: 'anonymous'
36 |
37 | schema-registry:
38 | image: confluentinc/cp-schema-registry:5.2.0
39 | hostname: schema-registry
40 | container_name: schema-registry
41 | depends_on:
42 | - zookeeper
43 | - broker
44 | ports:
45 | - "8081:8081"
46 | environment:
47 | SCHEMA_REGISTRY_HOST_NAME: schema-registry
48 | SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
49 |
50 | connect:
51 | image: confluentinc/kafka-connect-datagen:latest
52 | build:
53 | context: .
54 | dockerfile: Dockerfile
55 | hostname: connect
56 | container_name: connect
57 | depends_on:
58 | - zookeeper
59 | - broker
60 | - schema-registry
61 | ports:
62 | - "8083:8083"
63 | environment:
64 | CONNECT_BOOTSTRAP_SERVERS: 'broker:9092'
65 | CONNECT_REST_ADVERTISED_HOST_NAME: connect
66 | CONNECT_REST_PORT: 8083
67 | CONNECT_GROUP_ID: compose-connect-group
68 | CONNECT_CONFIG_STORAGE_TOPIC: docker-connect-configs
69 | CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: 1
70 | CONNECT_OFFSET_FLUSH_INTERVAL_MS: 10000
71 | CONNECT_OFFSET_STORAGE_TOPIC: docker-connect-offsets
72 | CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: 1
73 | CONNECT_STATUS_STORAGE_TOPIC: docker-connect-status
74 | CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: 1
75 | CONNECT_KEY_CONVERTER: org.apache.kafka.connect.storage.StringConverter
76 | CONNECT_VALUE_CONVERTER: io.confluent.connect.avro.AvroConverter
77 | CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: http://schema-registry:8081
78 | CONNECT_INTERNAL_KEY_CONVERTER: "org.apache.kafka.connect.json.JsonConverter"
79 | CONNECT_INTERNAL_VALUE_CONVERTER: "org.apache.kafka.connect.json.JsonConverter"
80 | CONNECT_ZOOKEEPER_CONNECT: 'zookeeper:2181'
81 | # Assumes image is based on confluentinc/kafka-connect-datagen:latest which is pulling 5.1.1 Connect image
82 | CLASSPATH: /usr/share/java/monitoring-interceptors/monitoring-interceptors-5.2.0.jar
83 | CONNECT_PRODUCER_INTERCEPTOR_CLASSES: "io.confluent.monitoring.clients.interceptor.MonitoringProducerInterceptor"
84 | CONNECT_CONSUMER_INTERCEPTOR_CLASSES: "io.confluent.monitoring.clients.interceptor.MonitoringConsumerInterceptor"
85 | CONNECT_PLUGIN_PATH: "/usr/share/java,/usr/share/confluent-hub-components"
86 | CONNECT_LOG4J_LOGGERS: org.apache.zookeeper=ERROR,org.I0Itec.zkclient=ERROR,org.reflections=ERROR
87 |
88 | control-center:
89 | image: confluentinc/cp-enterprise-control-center:5.2.0
90 | hostname: control-center
91 | container_name: control-center
92 | depends_on:
93 | - zookeeper
94 | - broker
95 | - schema-registry
96 | - connect
97 | - ksql-server
98 | ports:
99 | - "9021:9021"
100 | environment:
101 | CONTROL_CENTER_BOOTSTRAP_SERVERS: 'broker:9092'
102 | CONTROL_CENTER_ZOOKEEPER_CONNECT: 'zookeeper:2181'
103 | CONTROL_CENTER_CONNECT_CLUSTER: 'connect:8083'
104 | CONTROL_CENTER_KSQL_URL: "http://ksql-server:8088"
105 | CONTROL_CENTER_KSQL_ADVERTISED_URL: "http://localhost:8088"
106 | CONTROL_CENTER_SCHEMA_REGISTRY_URL: "http://schema-registry:8081"
107 | CONTROL_CENTER_REPLICATION_FACTOR: 1
108 | CONTROL_CENTER_INTERNAL_TOPICS_PARTITIONS: 1
109 | CONTROL_CENTER_MONITORING_INTERCEPTOR_TOPIC_PARTITIONS: 1
110 | CONFLUENT_METRICS_TOPIC_REPLICATION: 1
111 | PORT: 9021
112 |
113 | ksql-server:
114 | image: confluentinc/cp-ksql-server:5.2.0
115 | hostname: ksql-server
116 | container_name: ksql-server
117 | depends_on:
118 | - broker
119 | - connect
120 | ports:
121 | - "8088:8088"
122 | environment:
123 | KSQL_CONFIG_DIR: "/etc/ksql"
124 | KSQL_LOG4J_OPTS: "-Dlog4j.configuration=file:/etc/ksql/log4j-rolling.properties"
125 | KSQL_BOOTSTRAP_SERVERS: "broker:9092"
126 | KSQL_HOST_NAME: ksql-server
127 | KSQL_APPLICATION_ID: "cp-all-in-one"
128 | KSQL_LISTENERS: "http://0.0.0.0:8088"
129 | KSQL_CACHE_MAX_BYTES_BUFFERING: 0
130 | KSQL_KSQL_SCHEMA_REGISTRY_URL: "http://schema-registry:8081"
131 | KSQL_PRODUCER_INTERCEPTOR_CLASSES: "io.confluent.monitoring.clients.interceptor.MonitoringProducerInterceptor"
132 | KSQL_CONSUMER_INTERCEPTOR_CLASSES: "io.confluent.monitoring.clients.interceptor.MonitoringConsumerInterceptor"
133 |
134 | ksql-cli:
135 | image: confluentinc/cp-ksql-cli:5.2.0
136 | container_name: ksql-cli
137 | depends_on:
138 | - broker
139 | - connect
140 | - ksql-server
141 | entrypoint: /bin/sh
142 | tty: true
143 |
144 | ksql-datagen:
145 | image: confluentinc/ksql-examples:5.2.0
146 | hostname: ksql-datagen
147 | container_name: ksql-datagen
148 | depends_on:
149 | - ksql-server
150 | - broker
151 | - schema-registry
152 | - connect
153 | command: "bash -c 'echo Waiting for Kafka to be ready... && \
154 | cub kafka-ready -b broker:9092 1 40 && \
155 | echo Waiting for Confluent Schema Registry to be ready... && \
156 | cub sr-ready schema-registry 8081 40 && \
157 | echo Waiting a few seconds for topic creation to finish... && \
158 | sleep 11 && \
159 | tail -f /dev/null'"
160 | environment:
161 | KSQL_CONFIG_DIR: "/etc/ksql"
162 | KSQL_LOG4J_OPTS: "-Dlog4j.configuration=file:/etc/ksql/log4j-rolling.properties"
163 | STREAMS_BOOTSTRAP_SERVERS: broker:9092
164 | STREAMS_SCHEMA_REGISTRY_HOST: schema-registry
165 | STREAMS_SCHEMA_REGISTRY_PORT: 8081
166 |
167 | rest-proxy:
168 | image: confluentinc/cp-kafka-rest:5.2.0
169 | depends_on:
170 | - zookeeper
171 | - broker
172 | - schema-registry
173 | ports:
174 | - 8082:8082
175 | hostname: rest-proxy
176 | container_name: rest-proxy
177 | environment:
178 | KAFKA_REST_HOST_NAME: rest-proxy
179 | KAFKA_REST_BOOTSTRAP_SERVERS: 'broker:9092'
180 | KAFKA_REST_LISTENERS: "http://0.0.0.0:8082"
181 | KAFKA_REST_SCHEMA_REGISTRY_URL: 'http://schema-registry:8081'
182 |
--------------------------------------------------------------------------------
/mvnw:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | # ----------------------------------------------------------------------------
3 | # Licensed to the Apache Software Foundation (ASF) under one
4 | # or more contributor license agreements. See the NOTICE file
5 | # distributed with this work for additional information
6 | # regarding copyright ownership. The ASF licenses this file
7 | # to you under the Apache License, Version 2.0 (the
8 | # "License"); you may not use this file except in compliance
9 | # with the License. You may obtain a copy of the License at
10 | #
11 | # http://www.apache.org/licenses/LICENSE-2.0
12 | #
13 | # Unless required by applicable law or agreed to in writing,
14 | # software distributed under the License is distributed on an
15 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
16 | # KIND, either express or implied. See the License for the
17 | # specific language governing permissions and limitations
18 | # under the License.
19 | # ----------------------------------------------------------------------------
20 |
21 | # ----------------------------------------------------------------------------
22 | # Maven2 Start Up Batch script
23 | #
24 | # Required ENV vars:
25 | # ------------------
26 | # JAVA_HOME - location of a JDK home dir
27 | #
28 | # Optional ENV vars
29 | # -----------------
30 | # M2_HOME - location of maven2's installed home dir
31 | # MAVEN_OPTS - parameters passed to the Java VM when running Maven
32 | # e.g. to debug Maven itself, use
33 | # set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
34 | # MAVEN_SKIP_RC - flag to disable loading of mavenrc files
35 | # ----------------------------------------------------------------------------
36 |
37 | if [ -z "$MAVEN_SKIP_RC" ] ; then
38 |
39 | if [ -f /etc/mavenrc ] ; then
40 | . /etc/mavenrc
41 | fi
42 |
43 | if [ -f "$HOME/.mavenrc" ] ; then
44 | . "$HOME/.mavenrc"
45 | fi
46 |
47 | fi
48 |
49 | # OS specific support. $var _must_ be set to either true or false.
50 | cygwin=false;
51 | darwin=false;
52 | mingw=false
53 | case "`uname`" in
54 | CYGWIN*) cygwin=true ;;
55 | MINGW*) mingw=true;;
56 | Darwin*) darwin=true
57 | # Use /usr/libexec/java_home if available, otherwise fall back to /Library/Java/Home
58 | # See https://developer.apple.com/library/mac/qa/qa1170/_index.html
59 | if [ -z "$JAVA_HOME" ]; then
60 | if [ -x "/usr/libexec/java_home" ]; then
61 | export JAVA_HOME="`/usr/libexec/java_home`"
62 | else
63 | export JAVA_HOME="/Library/Java/Home"
64 | fi
65 | fi
66 | ;;
67 | esac
68 |
69 | if [ -z "$JAVA_HOME" ] ; then
70 | if [ -r /etc/gentoo-release ] ; then
71 | JAVA_HOME=`java-config --jre-home`
72 | fi
73 | fi
74 |
75 | if [ -z "$M2_HOME" ] ; then
76 | ## resolve links - $0 may be a link to maven's home
77 | PRG="$0"
78 |
79 | # need this for relative symlinks
80 | while [ -h "$PRG" ] ; do
81 | ls=`ls -ld "$PRG"`
82 | link=`expr "$ls" : '.*-> \(.*\)$'`
83 | if expr "$link" : '/.*' > /dev/null; then
84 | PRG="$link"
85 | else
86 | PRG="`dirname "$PRG"`/$link"
87 | fi
88 | done
89 |
90 | saveddir=`pwd`
91 |
92 | M2_HOME=`dirname "$PRG"`/..
93 |
94 | # make it fully qualified
95 | M2_HOME=`cd "$M2_HOME" && pwd`
96 |
97 | cd "$saveddir"
98 | # echo Using m2 at $M2_HOME
99 | fi
100 |
101 | # For Cygwin, ensure paths are in UNIX format before anything is touched
102 | if $cygwin ; then
103 | [ -n "$M2_HOME" ] &&
104 | M2_HOME=`cygpath --unix "$M2_HOME"`
105 | [ -n "$JAVA_HOME" ] &&
106 | JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
107 | [ -n "$CLASSPATH" ] &&
108 | CLASSPATH=`cygpath --path --unix "$CLASSPATH"`
109 | fi
110 |
111 | # For Migwn, ensure paths are in UNIX format before anything is touched
112 | if $mingw ; then
113 | [ -n "$M2_HOME" ] &&
114 | M2_HOME="`(cd "$M2_HOME"; pwd)`"
115 | [ -n "$JAVA_HOME" ] &&
116 | JAVA_HOME="`(cd "$JAVA_HOME"; pwd)`"
117 | # TODO classpath?
118 | fi
119 |
120 | if [ -z "$JAVA_HOME" ]; then
121 | javaExecutable="`which javac`"
122 | if [ -n "$javaExecutable" ] && ! [ "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then
123 | # readlink(1) is not available as standard on Solaris 10.
124 | readLink=`which readlink`
125 | if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then
126 | if $darwin ; then
127 | javaHome="`dirname \"$javaExecutable\"`"
128 | javaExecutable="`cd \"$javaHome\" && pwd -P`/javac"
129 | else
130 | javaExecutable="`readlink -f \"$javaExecutable\"`"
131 | fi
132 | javaHome="`dirname \"$javaExecutable\"`"
133 | javaHome=`expr "$javaHome" : '\(.*\)/bin'`
134 | JAVA_HOME="$javaHome"
135 | export JAVA_HOME
136 | fi
137 | fi
138 | fi
139 |
140 | if [ -z "$JAVACMD" ] ; then
141 | if [ -n "$JAVA_HOME" ] ; then
142 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
143 | # IBM's JDK on AIX uses strange locations for the executables
144 | JAVACMD="$JAVA_HOME/jre/sh/java"
145 | else
146 | JAVACMD="$JAVA_HOME/bin/java"
147 | fi
148 | else
149 | JAVACMD="`which java`"
150 | fi
151 | fi
152 |
153 | if [ ! -x "$JAVACMD" ] ; then
154 | echo "Error: JAVA_HOME is not defined correctly." >&2
155 | echo " We cannot execute $JAVACMD" >&2
156 | exit 1
157 | fi
158 |
159 | if [ -z "$JAVA_HOME" ] ; then
160 | echo "Warning: JAVA_HOME environment variable is not set."
161 | fi
162 |
163 | CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher
164 |
165 | # traverses directory structure from process work directory to filesystem root
166 | # first directory with .mvn subdirectory is considered project base directory
167 | find_maven_basedir() {
168 |
169 | if [ -z "$1" ]
170 | then
171 | echo "Path not specified to find_maven_basedir"
172 | return 1
173 | fi
174 |
175 | basedir="$1"
176 | wdir="$1"
177 | while [ "$wdir" != '/' ] ; do
178 | if [ -d "$wdir"/.mvn ] ; then
179 | basedir=$wdir
180 | break
181 | fi
182 | # workaround for JBEAP-8937 (on Solaris 10/Sparc)
183 | if [ -d "${wdir}" ]; then
184 | wdir=`cd "$wdir/.."; pwd`
185 | fi
186 | # end of workaround
187 | done
188 | echo "${basedir}"
189 | }
190 |
191 | # concatenates all lines of a file
192 | concat_lines() {
193 | if [ -f "$1" ]; then
194 | echo "$(tr -s '\n' ' ' < "$1")"
195 | fi
196 | }
197 |
198 | BASE_DIR=`find_maven_basedir "$(pwd)"`
199 | if [ -z "$BASE_DIR" ]; then
200 | exit 1;
201 | fi
202 |
203 | export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-"$BASE_DIR"}
204 | echo $MAVEN_PROJECTBASEDIR
205 | MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS"
206 |
207 | # For Cygwin, switch paths to Windows format before running java
208 | if $cygwin; then
209 | [ -n "$M2_HOME" ] &&
210 | M2_HOME=`cygpath --path --windows "$M2_HOME"`
211 | [ -n "$JAVA_HOME" ] &&
212 | JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"`
213 | [ -n "$CLASSPATH" ] &&
214 | CLASSPATH=`cygpath --path --windows "$CLASSPATH"`
215 | [ -n "$MAVEN_PROJECTBASEDIR" ] &&
216 | MAVEN_PROJECTBASEDIR=`cygpath --path --windows "$MAVEN_PROJECTBASEDIR"`
217 | fi
218 |
219 | WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
220 |
221 | exec "$JAVACMD" \
222 | $MAVEN_OPTS \
223 | -classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \
224 | "-Dmaven.home=${M2_HOME}" "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \
225 | ${WRAPPER_LAUNCHER} $MAVEN_CONFIG "$@"
226 |
--------------------------------------------------------------------------------
/mvnw.cmd:
--------------------------------------------------------------------------------
1 | @REM ----------------------------------------------------------------------------
2 | @REM Licensed to the Apache Software Foundation (ASF) under one
3 | @REM or more contributor license agreements. See the NOTICE file
4 | @REM distributed with this work for additional information
5 | @REM regarding copyright ownership. The ASF licenses this file
6 | @REM to you under the Apache License, Version 2.0 (the
7 | @REM "License"); you may not use this file except in compliance
8 | @REM with the License. You may obtain a copy of the License at
9 | @REM
10 | @REM http://www.apache.org/licenses/LICENSE-2.0
11 | @REM
12 | @REM Unless required by applicable law or agreed to in writing,
13 | @REM software distributed under the License is distributed on an
14 | @REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15 | @REM KIND, either express or implied. See the License for the
16 | @REM specific language governing permissions and limitations
17 | @REM under the License.
18 | @REM ----------------------------------------------------------------------------
19 |
20 | @REM ----------------------------------------------------------------------------
21 | @REM Maven2 Start Up Batch script
22 | @REM
23 | @REM Required ENV vars:
24 | @REM JAVA_HOME - location of a JDK home dir
25 | @REM
26 | @REM Optional ENV vars
27 | @REM M2_HOME - location of maven2's installed home dir
28 | @REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands
29 | @REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a key stroke before ending
30 | @REM MAVEN_OPTS - parameters passed to the Java VM when running Maven
31 | @REM e.g. to debug Maven itself, use
32 | @REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
33 | @REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files
34 | @REM ----------------------------------------------------------------------------
35 |
36 | @REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on'
37 | @echo off
38 | @REM enable echoing my setting MAVEN_BATCH_ECHO to 'on'
39 | @if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO%
40 |
41 | @REM set %HOME% to equivalent of $HOME
42 | if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%")
43 |
44 | @REM Execute a user defined script before this one
45 | if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre
46 | @REM check for pre script, once with legacy .bat ending and once with .cmd ending
47 | if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat"
48 | if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd"
49 | :skipRcPre
50 |
51 | @setlocal
52 |
53 | set ERROR_CODE=0
54 |
55 | @REM To isolate internal variables from possible post scripts, we use another setlocal
56 | @setlocal
57 |
58 | @REM ==== START VALIDATION ====
59 | if not "%JAVA_HOME%" == "" goto OkJHome
60 |
61 | echo.
62 | echo Error: JAVA_HOME not found in your environment. >&2
63 | echo Please set the JAVA_HOME variable in your environment to match the >&2
64 | echo location of your Java installation. >&2
65 | echo.
66 | goto error
67 |
68 | :OkJHome
69 | if exist "%JAVA_HOME%\bin\java.exe" goto init
70 |
71 | echo.
72 | echo Error: JAVA_HOME is set to an invalid directory. >&2
73 | echo JAVA_HOME = "%JAVA_HOME%" >&2
74 | echo Please set the JAVA_HOME variable in your environment to match the >&2
75 | echo location of your Java installation. >&2
76 | echo.
77 | goto error
78 |
79 | @REM ==== END VALIDATION ====
80 |
81 | :init
82 |
83 | @REM Find the project base dir, i.e. the directory that contains the folder ".mvn".
84 | @REM Fallback to current working directory if not found.
85 |
86 | set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR%
87 | IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir
88 |
89 | set EXEC_DIR=%CD%
90 | set WDIR=%EXEC_DIR%
91 | :findBaseDir
92 | IF EXIST "%WDIR%"\.mvn goto baseDirFound
93 | cd ..
94 | IF "%WDIR%"=="%CD%" goto baseDirNotFound
95 | set WDIR=%CD%
96 | goto findBaseDir
97 |
98 | :baseDirFound
99 | set MAVEN_PROJECTBASEDIR=%WDIR%
100 | cd "%EXEC_DIR%"
101 | goto endDetectBaseDir
102 |
103 | :baseDirNotFound
104 | set MAVEN_PROJECTBASEDIR=%EXEC_DIR%
105 | cd "%EXEC_DIR%"
106 |
107 | :endDetectBaseDir
108 |
109 | IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig
110 |
111 | @setlocal EnableExtensions EnableDelayedExpansion
112 | for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a
113 | @endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS%
114 |
115 | :endReadAdditionalConfig
116 |
117 | SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe"
118 |
119 | set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar"
120 | set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
121 |
122 | %MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %*
123 | if ERRORLEVEL 1 goto error
124 | goto end
125 |
126 | :error
127 | set ERROR_CODE=1
128 |
129 | :end
130 | @endlocal & set ERROR_CODE=%ERROR_CODE%
131 |
132 | if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost
133 | @REM check for post script, once with legacy .bat ending and once with .cmd ending
134 | if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat"
135 | if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd"
136 | :skipRcPost
137 |
138 | @REM pause the script if MAVEN_BATCH_PAUSE is set to 'on'
139 | if "%MAVEN_BATCH_PAUSE%" == "on" pause
140 |
141 | if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE%
142 |
143 | exit /B %ERROR_CODE%
144 |
--------------------------------------------------------------------------------
/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 | 4.0.0
5 | com.demo.kafka
6 | spring-boot-with-kafka
7 | 0.0.1-SNAPSHOT
8 | jar
9 | spring-boot-with-kafka
10 | Demo project for Spring Boot
11 |
12 | org.springframework.boot
13 | spring-boot-starter-parent
14 | 2.0.5.RELEASE
15 |
16 |
17 |
18 | UTF-8
19 | UTF-8
20 | 1.8
21 | 1.8.2
22 |
23 |
24 |
25 |
26 | org.springframework.boot
27 | spring-boot-starter-web
28 |
29 |
30 | org.springframework.kafka
31 | spring-kafka
32 |
33 |
34 | org.springframework.kafka
35 | spring-kafka-test
36 |
37 | test
38 |
39 |
40 | org.springframework.boot
41 | spring-boot-starter-test
42 | test
43 |
44 |
45 |
46 | org.apache.avro
47 | avro
48 | ${avro.version}
49 |
50 |
51 |
52 | javax.xml.bind
53 | jaxb-api
54 | 2.2.11
55 |
56 |
57 | com.sun.xml.bind
58 | jaxb-core
59 | 2.2.11
60 |
61 |
62 | com.sun.xml.bind
63 | jaxb-impl
64 | 2.2.11
65 |
66 |
67 | javax.activation
68 | activation
69 | 1.1.1
70 |
71 |
72 |
73 | org.projectlombok
74 | lombok
75 | 1.16.16
76 | provided
77 |
78 |
79 |
80 | io.springfox
81 | springfox-swagger2
82 | 2.9.2
83 |
84 |
85 | io.springfox
86 | springfox-swagger-ui
87 | 2.9.2
88 |
89 |
90 |
91 |
92 |
93 | org.springframework.boot
94 | spring-boot-maven-plugin
95 |
96 |
97 | org.apache.avro
98 | avro-maven-plugin
99 | ${avro.version}
100 |
101 |
102 | generate-sources
103 |
104 | schema
105 |
106 |
107 | ${project.basedir}/src/main/resources/avro/
108 | ${project.build.directory}/generated/avro
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
--------------------------------------------------------------------------------
/spring-boot-with-kafka.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
--------------------------------------------------------------------------------
/src/main/java/com/demo/SpringBootWithKafkaApplication.java:
--------------------------------------------------------------------------------
1 | package com.demo;
2 |
3 | import org.springframework.boot.SpringApplication;
4 | import org.springframework.boot.autoconfigure.SpringBootApplication;
5 |
6 | @SpringBootApplication
7 | public class SpringBootWithKafkaApplication {
8 |
9 | public static void main(String[] args) {
10 | SpringApplication.run(SpringBootWithKafkaApplication.class, args);
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/src/main/java/com/demo/config/SwaggerConfig.java:
--------------------------------------------------------------------------------
1 | package com.demo.config;
2 |
3 | import org.springframework.beans.factory.annotation.Value;
4 | import org.springframework.context.annotation.Bean;
5 | import org.springframework.context.annotation.Configuration;
6 | import springfox.documentation.builders.ApiInfoBuilder;
7 | import springfox.documentation.builders.PathSelectors;
8 | import springfox.documentation.builders.RequestHandlerSelectors;
9 | import springfox.documentation.service.ApiInfo;
10 | import springfox.documentation.spi.DocumentationType;
11 | import springfox.documentation.spring.web.plugins.Docket;
12 | import springfox.documentation.swagger2.annotations.EnableSwagger2;
13 |
14 | @Configuration
15 | @EnableSwagger2
16 | public class SwaggerConfig {
17 |
18 | @Value("${swagger.exposedPackage}")
19 | private String swaggerExposedPackage;
20 |
21 | @Value("${swagger.title}")
22 | private String title;
23 |
24 | @Value("${swagger.description}")
25 | private String description;
26 |
27 | @Value("${swagger.contact.name}")
28 | private String contactName;
29 |
30 | @Value("${swagger.contact.url}")
31 | private String contactUrl;
32 |
33 | @Value("${swagger.contact.email}")
34 | private String contactEmail;
35 |
36 | @Value("${application.version:0.0.0}")
37 | private String version;
38 |
39 |
40 | @Bean
41 | public Docket api() {
42 | return new Docket(DocumentationType.SWAGGER_2)
43 | .select()
44 | .apis(RequestHandlerSelectors.basePackage(swaggerExposedPackage))
45 | .paths(PathSelectors.any())
46 | .build()
47 | .apiInfo(metaData());
48 | }
49 |
50 | private ApiInfo metaData() {
51 | return new ApiInfoBuilder()
52 | .title(title)
53 | .description(description)
54 | .version(version)
55 | .contact(contactName)
56 | .build();
57 | }
58 | }
59 |
--------------------------------------------------------------------------------
/src/main/java/com/demo/consumer/Receiver.java:
--------------------------------------------------------------------------------
1 | package com.demo.consumer;
2 |
3 | import org.slf4j.Logger;
4 | import org.slf4j.LoggerFactory;
5 | import org.springframework.kafka.annotation.KafkaListener;
6 | import org.springframework.stereotype.Service;
7 |
8 | import java.io.IOException;
9 | import java.util.concurrent.CountDownLatch;
10 |
11 | @Service
12 | public class Receiver {
13 |
14 | private static final Logger LOGGER = LoggerFactory.getLogger(Receiver.class);
15 |
16 | private CountDownLatch latch = new CountDownLatch(1);
17 |
18 | public CountDownLatch getLatch() {
19 | return latch;
20 | }
21 |
22 | @KafkaListener(topics = "users-demo", groupId = "group_id")
23 | public void consume(example.avro.User message) throws IOException {
24 | LOGGER.info(String.format("#### -> Consumed message -> %s", message));
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/src/main/java/com/demo/consumer/ReceiverConfig.java:
--------------------------------------------------------------------------------
1 | package com.demo.consumer;
2 |
3 | import com.demo.util.AvroDeserializer;
4 | import example.avro.User;
5 | import org.apache.kafka.clients.consumer.ConsumerConfig;
6 | import org.apache.kafka.common.serialization.StringDeserializer;
7 | import org.springframework.beans.factory.annotation.Value;
8 | import org.springframework.context.annotation.Bean;
9 | import org.springframework.context.annotation.Configuration;
10 | import org.springframework.kafka.annotation.EnableKafka;
11 | import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
12 | import org.springframework.kafka.core.ConsumerFactory;
13 | import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
14 |
15 | import java.util.HashMap;
16 | import java.util.Map;
17 |
18 | @Configuration
19 | @EnableKafka
20 | public class ReceiverConfig {
21 |
22 | @Value("${spring.kafka.consumer.bootstrap-servers}")
23 | private String bootstrapServers;
24 |
25 | @Bean
26 | public Map consumerConfigs() {
27 | Map props = new HashMap<>();
28 | props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
29 | props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
30 | props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, AvroDeserializer.class);
31 | props.put(ConsumerConfig.GROUP_ID_CONFIG, "avro");
32 |
33 | return props;
34 | }
35 |
36 | @Bean
37 | public ConsumerFactory consumerFactory() {
38 | return new DefaultKafkaConsumerFactory<>(consumerConfigs(), new StringDeserializer(),
39 | new AvroDeserializer<>(example.avro.User.class));
40 | }
41 |
42 | @Bean
43 | public ConcurrentKafkaListenerContainerFactory kafkaListenerContainerFactory() {
44 | ConcurrentKafkaListenerContainerFactory factory =
45 | new ConcurrentKafkaListenerContainerFactory<>();
46 | factory.setConsumerFactory(consumerFactory());
47 |
48 | return factory;
49 | }
50 |
51 | @Bean
52 | public Receiver receiver() {
53 | return new Receiver();
54 | }
55 | }
56 |
--------------------------------------------------------------------------------
/src/main/java/com/demo/controllers/KafkaController.java:
--------------------------------------------------------------------------------
1 | package com.demo.controllers;
2 |
3 | import com.demo.domain.UserRequest;
4 | import com.demo.producer.Sender;
5 | import org.springframework.beans.factory.annotation.Autowired;
6 | import org.springframework.web.bind.annotation.PostMapping;
7 | import org.springframework.web.bind.annotation.RequestBody;
8 | import org.springframework.web.bind.annotation.RequestMapping;
9 | import org.springframework.web.bind.annotation.RestController;
10 |
11 | @RestController
12 | @RequestMapping(value = "/kafka")
13 | public class KafkaController {
14 |
15 | private final Sender sender;
16 |
17 | @Autowired
18 | KafkaController(Sender sender) {
19 | this.sender = sender;
20 | }
21 |
22 | @PostMapping(value = "demo")
23 | public void sendSchemaRegistryToKafkaTopic(@RequestBody UserRequest request) {
24 | example.avro.User user = example.avro.User.newBuilder()
25 | .setName(request.getName())
26 | .setFavoriteColor(request.getFavoriteColor())
27 | .setId(request.getId())
28 | .build();
29 | this.sender.sendMessage(user);
30 | }
31 | }
32 |
--------------------------------------------------------------------------------
/src/main/java/com/demo/domain/UserRequest.java:
--------------------------------------------------------------------------------
1 | package com.demo.domain;
2 |
3 | import lombok.AllArgsConstructor;
4 | import lombok.Builder;
5 | import lombok.Data;
6 | import lombok.NoArgsConstructor;
7 |
8 | import javax.annotation.sql.DataSourceDefinition;
9 |
10 | @Data
11 | @NoArgsConstructor
12 | @AllArgsConstructor
13 | @Builder
14 | public class UserRequest {
15 | private String name;
16 | private Integer id;
17 | private String favoriteColor;
18 | }
19 |
--------------------------------------------------------------------------------
/src/main/java/com/demo/producer/Sender.java:
--------------------------------------------------------------------------------
1 | package com.demo.producer;
2 |
3 | import example.avro.User;
4 | import org.slf4j.Logger;
5 | import org.slf4j.LoggerFactory;
6 | import org.springframework.beans.factory.annotation.Autowired;
7 | import org.springframework.beans.factory.annotation.Value;
8 | import org.springframework.kafka.core.KafkaTemplate;
9 | import org.springframework.stereotype.Service;
10 |
11 | @Service
12 | public class Sender {
13 |
14 | private static final Logger LOGGER = LoggerFactory.getLogger(Sender.class);
15 |
16 | private static final String TOPIC = "users-demo";
17 |
18 | @Value("${spring.kafka.topic.avro}")
19 | private String avroTopic;
20 |
21 | @Autowired
22 | private KafkaTemplate kafkaTemplate;
23 |
24 | public void send(example.avro.User user) {
25 | LOGGER.info("sending user='{}'", user.toString());
26 | kafkaTemplate.send(avroTopic, user);
27 | }
28 |
29 | public void sendMessage(User user) {
30 | LOGGER.info(String.format("#### -> Producing message -> %s", user.toString()));
31 | this.kafkaTemplate.send(TOPIC, user);
32 | }
33 |
34 | /**
35 | * SQL
36 | */
37 | }
38 |
--------------------------------------------------------------------------------
/src/main/java/com/demo/producer/SenderConfig.java:
--------------------------------------------------------------------------------
1 | package com.demo.producer;
2 |
3 | import com.demo.util.AvroSerializer;
4 | import org.apache.kafka.clients.producer.ProducerConfig;
5 | import org.apache.kafka.common.serialization.StringSerializer;
6 | import org.springframework.beans.factory.annotation.Value;
7 | import org.springframework.context.annotation.Bean;
8 | import org.springframework.context.annotation.Configuration;
9 | import org.springframework.kafka.core.DefaultKafkaProducerFactory;
10 | import org.springframework.kafka.core.KafkaTemplate;
11 | import org.springframework.kafka.core.ProducerFactory;
12 |
13 | import java.util.HashMap;
14 | import java.util.Map;
15 |
16 | @Configuration
17 | public class SenderConfig {
18 |
19 | @Value("${spring.kafka.producer.bootstrap-servers}")
20 | private String bootstrapServers;
21 |
22 | @Bean
23 | public Map producerConfigs() {
24 | Map props = new HashMap<>();
25 |
26 | props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
27 | props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
28 | props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, AvroSerializer.class);
29 |
30 | return props;
31 | }
32 |
33 | @Bean
34 | public ProducerFactory producerFactory() {
35 | return new DefaultKafkaProducerFactory<>(producerConfigs());
36 | }
37 |
38 | @Bean
39 | public KafkaTemplate kafkaTemplate() {
40 | return new KafkaTemplate<>(producerFactory());
41 | }
42 |
43 | @Bean
44 | public Sender sender() {
45 | return new Sender();
46 | }
47 | }
48 |
--------------------------------------------------------------------------------
/src/main/java/com/demo/util/AvroDeserializer.java:
--------------------------------------------------------------------------------
1 | package com.demo.util;
2 |
3 | import org.apache.avro.generic.GenericRecord;
4 | import org.apache.avro.io.DatumReader;
5 | import org.apache.avro.io.Decoder;
6 | import org.apache.avro.io.DecoderFactory;
7 | import org.apache.avro.specific.SpecificDatumReader;
8 | import org.apache.avro.specific.SpecificRecordBase;
9 | import org.apache.kafka.common.errors.SerializationException;
10 | import org.apache.kafka.common.serialization.Deserializer;
11 | import org.slf4j.Logger;
12 | import org.slf4j.LoggerFactory;
13 |
14 | import javax.xml.bind.DatatypeConverter;
15 | import java.util.Arrays;
16 | import java.util.Map;
17 |
18 | public class AvroDeserializer implements Deserializer {
19 |
20 | private static final Logger LOGGER = LoggerFactory.getLogger(AvroDeserializer.class);
21 |
22 | protected final Class targetType;
23 |
24 | public AvroDeserializer(Class targetType) {
25 | this.targetType = targetType;
26 | }
27 |
28 | @Override
29 | public void close() {
30 | // No-op
31 | }
32 |
33 | @Override
34 | public void configure(Map arg0, boolean arg1) {
35 | // No-op
36 | }
37 |
38 | @SuppressWarnings("unchecked")
39 | @Override
40 | public T deserialize(String topic, byte[] data) {
41 | try {
42 | T result = null;
43 |
44 | if (data != null) {
45 | LOGGER.debug("data='{}'", DatatypeConverter.printHexBinary(data));
46 |
47 | DatumReader datumReader =
48 | new SpecificDatumReader<>(targetType.newInstance().getSchema());
49 | Decoder decoder = DecoderFactory.get().binaryDecoder(data, null);
50 |
51 | result = (T) datumReader.read(null, decoder);
52 | LOGGER.debug("deserialized data='{}'", result);
53 | }
54 | return result;
55 | } catch (Exception ex) {
56 | throw new SerializationException(
57 | "Can't deserialize data '" + Arrays.toString(data) + "' from topic '" + topic + "'", ex);
58 | }
59 | }
60 | }
61 |
--------------------------------------------------------------------------------
/src/main/java/com/demo/util/AvroSerializer.java:
--------------------------------------------------------------------------------
1 | package com.demo.util;
2 |
3 | import org.apache.avro.generic.GenericDatumWriter;
4 | import org.apache.avro.generic.GenericRecord;
5 | import org.apache.avro.io.BinaryEncoder;
6 | import org.apache.avro.io.DatumWriter;
7 | import org.apache.avro.io.EncoderFactory;
8 | import org.apache.avro.specific.SpecificRecordBase;
9 | import org.apache.kafka.common.errors.SerializationException;
10 | import org.apache.kafka.common.serialization.Serializer;
11 | import org.slf4j.Logger;
12 | import org.slf4j.LoggerFactory;
13 |
14 | import javax.xml.bind.DatatypeConverter;
15 | import java.io.ByteArrayOutputStream;
16 | import java.io.IOException;
17 | import java.util.Map;
18 |
19 | public class AvroSerializer implements Serializer {
20 |
21 | private static final Logger LOGGER = LoggerFactory.getLogger(AvroSerializer.class);
22 |
23 | @Override
24 | public void close() {
25 | // No-op
26 | }
27 |
28 | @Override
29 | public void configure(Map arg0, boolean arg1) {
30 | // No-op
31 | }
32 |
33 | @Override
34 | public byte[] serialize(String topic, T data) {
35 | try {
36 | byte[] result = null;
37 |
38 | if (data != null) {
39 | LOGGER.debug("data='{}'", data);
40 |
41 | ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
42 | BinaryEncoder binaryEncoder =
43 | EncoderFactory.get().binaryEncoder(byteArrayOutputStream, null);
44 |
45 | DatumWriter datumWriter = new GenericDatumWriter<>(data.getSchema());
46 | datumWriter.write(data, binaryEncoder);
47 |
48 | binaryEncoder.flush();
49 | byteArrayOutputStream.close();
50 |
51 | result = byteArrayOutputStream.toByteArray();
52 | LOGGER.debug("serialized data='{}'", DatatypeConverter.printHexBinary(result));
53 | }
54 | return result;
55 | } catch (IOException ex) {
56 | throw new SerializationException(
57 | "Can't serialize data='" + data + "' for topic='" + topic + "'", ex);
58 | }
59 | }
60 | }
61 |
--------------------------------------------------------------------------------
/src/main/resources/application.yml:
--------------------------------------------------------------------------------
1 | server:
2 | port: 9000
3 | spring:
4 | kafka:
5 | topic:
6 | avro: users-demo
7 | consumer:
8 | schema:
9 | registry:
10 | url: localhost:8081
11 | bootstrap-servers: localhost:9092
12 | group-id: group_id
13 | auto-offset-reset: earliest
14 | key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
15 | producer:
16 | bootstrap-servers: localhost:9092
17 | key-serializer: org.apache.kafka.common.serialization.StringSerializer
18 |
19 | swagger:
20 | exposedPackage: com.demo.controllers
21 | title: Kafka Demo
22 | description: Rest API for the Kafka Demo
23 | contact:
24 | name: Solstice, LLC
25 | url: https://sites.google.com/solstice.com/solstice/home
26 | email: demo@solstice.com
27 |
28 | #logging:
29 | # level:
30 | # root: DEBUG
31 | # org.springframework.web: INFO
32 |
33 |
--------------------------------------------------------------------------------
/src/main/resources/avro/user.avsc:
--------------------------------------------------------------------------------
1 | {"namespace": "example.avro",
2 | "type": "record",
3 | "name": "User",
4 | "fields": [
5 | {"name": "name", "type": "string"},
6 | {"name": "id", "type": ["int", "null"]},
7 | {"name": "favorite_color", "type": ["string", "null"]}
8 | ]
9 | }
10 |
--------------------------------------------------------------------------------
/src/test/java/com/demo/SpringBootWithKafkaApplicationTests.java:
--------------------------------------------------------------------------------
1 | package com.demo;
2 |
3 | import com.demo.consumer.Receiver;
4 | import com.demo.producer.Sender;
5 | import example.avro.User;
6 | import org.junit.Before;
7 | import org.junit.ClassRule;
8 | import org.junit.Ignore;
9 | import org.junit.Test;
10 | import org.junit.runner.RunWith;
11 | import org.springframework.beans.factory.annotation.Autowired;
12 | import org.springframework.boot.test.context.SpringBootTest;
13 | import org.springframework.kafka.config.KafkaListenerEndpointRegistry;
14 | import org.springframework.kafka.listener.MessageListenerContainer;
15 | import org.springframework.kafka.test.rule.KafkaEmbedded;
16 | import org.springframework.kafka.test.utils.ContainerTestUtils;
17 | import org.springframework.test.context.junit4.SpringRunner;
18 |
19 | import java.util.concurrent.TimeUnit;
20 |
21 | import static org.assertj.core.api.Assertions.assertThat;
22 |
23 | @RunWith(SpringRunner.class)
24 | @SpringBootTest
25 | public class SpringBootWithKafkaApplicationTests {
26 |
27 | @ClassRule
28 | public static KafkaEmbedded embeddedKafka = new KafkaEmbedded(1, true, "schema-registry-demo");
29 | @Autowired
30 | private Sender sender;
31 | @Autowired
32 | private Receiver receiver;
33 | @Autowired
34 | private KafkaListenerEndpointRegistry kafkaListenerEndpointRegistry;
35 |
36 | @Before
37 | public void setUp() throws Exception {
38 | // wait until the partitions are assigned
39 | for (MessageListenerContainer messageListenerContainer : kafkaListenerEndpointRegistry
40 | .getListenerContainers()) {
41 | ContainerTestUtils.waitForAssignment(messageListenerContainer,
42 | embeddedKafka.getPartitionsPerTopic());
43 | }
44 | }
45 |
46 | @Ignore
47 | @Test
48 | public void testReceiver() throws Exception {
49 | User user = User.newBuilder().setName("John Doe").setFavoriteColor("green")
50 | .setId(null).build();
51 | sender.send(user);
52 |
53 | receiver.getLatch().await(10000, TimeUnit.MILLISECONDS);
54 | assertThat(receiver.getLatch().getCount()).isEqualTo(0);
55 | }
56 | }
57 |
--------------------------------------------------------------------------------
/src/test/java/com/demo/producer/SenderTest.java:
--------------------------------------------------------------------------------
1 | package com.demo.producer;
2 |
3 | import org.junit.Test;
4 |
5 |
6 | public class SenderTest {
7 |
8 | @Test
9 | public void callAPI() {
10 |
11 | }
12 | }
--------------------------------------------------------------------------------
/src/test/java/com/demo/serializer/AvroDeserializerTest.java:
--------------------------------------------------------------------------------
1 | package com.demo.serializer;
2 |
3 | import com.demo.util.AvroDeserializer;
4 | import example.avro.User;
5 | import org.junit.Test;
6 |
7 | import javax.xml.bind.DatatypeConverter;
8 |
9 | import static org.assertj.core.api.Assertions.assertThat;
10 |
11 | public class AvroDeserializerTest {
12 |
13 | @Test
14 | public void testDeserialize() {
15 | User user = User.newBuilder().setName("John Doe").setFavoriteColor("green")
16 | .setId(null).build();
17 |
18 | byte[] data = DatatypeConverter.parseHexBinary("104A6F686E20446F6502000A677265656E");
19 |
20 | AvroDeserializer avroDeserializer = new AvroDeserializer<>(User.class);
21 |
22 | assertThat(avroDeserializer.deserialize("avro.t", data)).isEqualTo(user);
23 | avroDeserializer.close();
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/src/test/java/com/demo/serializer/AvroSerializerTest.java:
--------------------------------------------------------------------------------
1 | package com.demo.serializer;
2 |
3 | import com.demo.util.AvroSerializer;
4 | import example.avro.User;
5 | import org.junit.Test;
6 |
7 | import javax.xml.bind.DatatypeConverter;
8 |
9 | import static org.assertj.core.api.Assertions.assertThat;
10 |
11 | public class AvroSerializerTest {
12 |
13 | @Test
14 | public void testSerialize() {
15 | User user = User.newBuilder().setName("John Doe").setFavoriteColor("green")
16 | .setId(null).build();
17 |
18 | AvroSerializer avroSerializer = new AvroSerializer<>();
19 | assertThat(avroSerializer.serialize("avro.t", user))
20 | .isEqualTo(DatatypeConverter.parseHexBinary("104A6F686E20446F6502000A677265656E"));
21 |
22 | avroSerializer.close();
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/src/test/resources/application.yml:
--------------------------------------------------------------------------------
1 | kafka:
2 | # bootstrap-servers: localhost:9092
3 | bootstrap-servers: ${spring.embedded.kafka.brokers}
4 | topic:
5 | avro: users-demo
6 |
--------------------------------------------------------------------------------