├── .github └── workflows │ ├── build-and-publish.yaml │ └── release.yaml ├── .gitignore ├── CHANGELOG.md ├── LICENSE ├── README.md ├── build.gradle.kts ├── fluent-kafka-streams-tests-junit4 ├── build.gradle.kts ├── lombok.config └── src │ ├── main │ └── java │ │ └── com │ │ └── bakdata │ │ └── fluent_kafka_streams_tests │ │ └── junit4 │ │ └── TestTopologyRule.java │ └── test │ ├── java │ └── com │ │ └── bakdata │ │ └── fluent_kafka_streams_tests │ │ └── junit4 │ │ ├── WordCountTest.java │ │ ├── WordCountWithStaticTopologyTest.java │ │ ├── WordCountWitherTest.java │ │ └── test_applications │ │ └── WordCount.java │ └── resources │ └── log4j2.xml ├── fluent-kafka-streams-tests-junit5 ├── build.gradle.kts ├── lombok.config └── src │ ├── main │ └── java │ │ └── com │ │ └── bakdata │ │ └── fluent_kafka_streams_tests │ │ └── junit5 │ │ └── TestTopologyExtension.java │ └── test │ ├── java │ └── com │ │ └── bakdata │ │ └── fluent_kafka_streams_tests │ │ └── junit5 │ │ ├── WordCountTest.java │ │ ├── WordCountWithStaticTopologyTest.java │ │ ├── WordCountWitherTest.java │ │ └── test_applications │ │ └── WordCount.java │ └── resources │ └── log4j2.xml ├── fluent-kafka-streams-tests ├── build.gradle.kts ├── lombok.config └── src │ ├── main │ └── java │ │ └── com │ │ └── bakdata │ │ └── fluent_kafka_streams_tests │ │ ├── BaseOutput.java │ │ ├── Expectation.java │ │ ├── StreamOutput.java │ │ ├── TableOutput.java │ │ ├── TestInput.java │ │ ├── TestOutput.java │ │ └── TestTopology.java │ └── test │ ├── avro │ ├── City.avsc │ └── Person.avsc │ ├── java │ └── com │ │ └── bakdata │ │ └── fluent_kafka_streams_tests │ │ ├── CountInhabitantsWithAvroTest.java │ │ ├── CountInhabitantsWithProtoTest.java │ │ ├── DynamicTopicTest.java │ │ ├── ErrorEventsPerMinuteTest.java │ │ ├── ForeignKeyJoinTest.java │ │ ├── HeaderTest.java │ │ ├── MirrorAvroNonDefaultSerdeTest.java │ │ ├── MirrorPatternTest.java │ │ ├── MirrorPatternTopicMixedTest.java │ │ ├── NameJoinTest.java │ │ ├── NameJoinWithIntermediateTopicTest.java │ │ ├── TestInputAndOutputTest.java │ │ ├── TestTopologyTest.java │ │ ├── UserClicksPerMinuteTest.java │ │ ├── WordCountTest.java │ │ ├── WordCountWithDefaultSerdeTest.java │ │ ├── WordCountWithStaticTopologyTest.java │ │ ├── serde │ │ ├── JsonDeserializer.java │ │ ├── JsonSerde.java │ │ └── JsonSerializer.java │ │ ├── test_applications │ │ ├── CountInhabitantsWithAvro.java │ │ ├── CountInhabitantsWithProto.java │ │ ├── ErrorEventsPerMinute.java │ │ ├── ForeignKeyJoin.java │ │ ├── Mirror.java │ │ ├── MirrorAvro.java │ │ ├── MirrorAvroNonDefaultSerde.java │ │ ├── MirrorPattern.java │ │ ├── MirrorPatternTopicMixed.java │ │ ├── NameJoinGlobalKTable.java │ │ ├── TopicExtractorApplication.java │ │ ├── UserClicksPerMinute.java │ │ └── WordCount.java │ │ └── test_types │ │ ├── ClickEvent.java │ │ ├── ClickOutput.java │ │ ├── ErrorOutput.java │ │ └── StatusCode.java │ ├── proto │ ├── city.proto │ └── person.proto │ └── resources │ └── log4j2.xml ├── gradle.properties ├── gradle └── wrapper │ ├── gradle-wrapper.jar │ └── gradle-wrapper.properties ├── gradlew ├── gradlew.bat ├── settings.gradle └── src └── main └── javadoc └── assertj-javadoc.css /.github/workflows/build-and-publish.yaml: -------------------------------------------------------------------------------- 1 | name: Build and Publish 2 | 3 | on: 4 | push: 5 | tags: ["**"] 6 | branches: 7 | - master 8 | pull_request: 9 | 10 | jobs: 11 | build-and-publish: 12 | name: Java Gradle 13 | uses: bakdata/ci-templates/.github/workflows/java-gradle-library.yaml@1.64.0 14 | with: 15 | java-version: 17 16 | secrets: 17 | sonar-token: ${{ secrets.SONARCLOUD_TOKEN }} 18 | sonar-organization: ${{ secrets.SONARCLOUD_ORGANIZATION }} 19 | signing-secret-key-ring: ${{ secrets.SONATYPE_SIGNING_SECRET_KEY_RING }} 20 | signing-key-id: ${{ secrets.SONATYPE_SIGNING_KEY_ID }} 21 | signing-password: ${{ secrets.SONATYPE_SIGNING_PASSWORD }} 22 | ossrh-username: ${{ secrets.SONATYPE_OSSRH_USERNAME }} 23 | ossrh-password: ${{ secrets.SONATYPE_OSSRH_PASSWORD }} 24 | github-token: ${{ secrets.GH_TOKEN }} 25 | -------------------------------------------------------------------------------- /.github/workflows/release.yaml: -------------------------------------------------------------------------------- 1 | name: Release 2 | 3 | on: 4 | workflow_dispatch: 5 | inputs: 6 | release-type: 7 | description: "The scope of the release (major, minor or patch)." 8 | type: choice 9 | required: true 10 | default: patch 11 | options: 12 | - patch 13 | - minor 14 | - major 15 | 16 | jobs: 17 | java-gradle-release: 18 | name: Java Gradle 19 | uses: bakdata/ci-templates/.github/workflows/java-gradle-release.yaml@1.62.0 20 | with: 21 | java-version: 17 22 | release-type: "${{ inputs.release-type }}" 23 | secrets: 24 | github-email: "${{ secrets.GH_EMAIL }}" 25 | github-username: "${{ secrets.GH_USERNAME }}" 26 | github-token: "${{ secrets.GH_TOKEN }}" 27 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .* 2 | !.gitignore 3 | !.github 4 | **/build/ 5 | **/out/ 6 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 bakdata 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![Build Status](https://dev.azure.com/bakdata/public/_apis/build/status/bakdata.fluent-kafka-streams-tests?branchName=master)](https://dev.azure.com/bakdata/public/_build/latest?definitionId=2&branchName=master) 2 | [![Sonarcloud status](https://sonarcloud.io/api/project_badges/measure?project=com.bakdata.fluent-kafka-streams-tests%3Afluent-kafka-streams-tests&metric=alert_status)](https://sonarcloud.io/dashboard?id=com.bakdata.fluent-kafka-streams-tests%3Afluent-kafka-streams-tests) 3 | [![Code coverage](https://sonarcloud.io/api/project_badges/measure?project=com.bakdata.fluent-kafka-streams-tests%3Afluent-kafka-streams-tests&metric=coverage)](https://sonarcloud.io/dashboard?id=com.bakdata.fluent-kafka-streams-tests%3Afluent-kafka-streams-tests) 4 | [![Maven](https://img.shields.io/maven-central/v/com.bakdata.fluent-kafka-streams-tests/fluent-kafka-streams-tests-junit5.svg)](https://search.maven.org/search?q=g:com.bakdata.fluent-kafka-streams-tests%20AND%20a:fluent-kafka-streams-tests*&core=gav) 5 | 6 | Fluent Kafka Streams Tests 7 | ========================= 8 | 9 | Write clean and concise tests for your Kafka Streams application. 10 | 11 | You can find a [blog post on medium](https://medium.com/bakdata/fluent-kafka-streams-tests-e641785171ec) with some examples and detailed explanations of how Fluent Kafka Streams Tests work. 12 | 13 | ## Getting Started 14 | 15 | You can add Fluent Kafka Streams Tests via Maven Central. 16 | 17 | #### Gradle 18 | ```gradle 19 | compile group: 'com.bakdata.fluent-kafka-streams-tests', name: 'fluent-kafka-streams-tests-junit5', version: '2.1.0' 20 | ``` 21 | 22 | #### Maven 23 | ```xml 24 | 25 | com.bakdata.fluent-kafka-streams-tests 26 | fluent-kafka-streams-tests-junit5 27 | 2.1.0 28 | 29 | ``` 30 | 31 | There is also a junit4 version and one without any dependencies to a specific testing framework. 32 | 33 | For other build tools or versions, refer to the [overview of sonatype](https://search.maven.org/search?q=g:com.bakdata.fluent-kafka-streams-tests%20AND%20a:fluent-kafka-streams-*&core=gav). 34 | 35 | ## Using it to Write Tests 36 | 37 | Here are two example tests which show you how to use Fluent Kafka Streams Tests. 38 | 39 | #### Word Count Test 40 | Assume you have a Word Count Kafka Streams application, called `WordCount`, and want to test it correctly. 41 | First, start by creating a new test class with your application. 42 | 43 | ```java 44 | class WordCountTest { 45 | private final WordCount app = new WordCount(); 46 | } 47 | ``` 48 | 49 | Then, set up the `TestTopology`. 50 | 51 | ```java 52 | class WordCountTest { 53 | private final WordCount app = new WordCount(); 54 | 55 | @RegisterExtension 56 | final TestTopologyExtension testTopology = 57 | new TestTopologyExtension<>(this.app::getTopology, this.app.getKafkaProperties()); 58 | } 59 | ``` 60 | 61 | The `TestTopology` takes care of all the inputs, processing, and outputs of you application. 62 | For it to do that, you need to register it as an extension (JUnit5), so certain setup/teardown methods are called. 63 | The constructor expects a topology factory (for a fresh topology in each test) that creates the topology under test. 64 | 65 | Additionally, the properties of the `KafkaClient` need to be specified. 66 | Broker and application-id must be present (Kafka testutil limitation), but are ignored. 67 | Most importantly, if the application expects default serde for key and value, these must be present in the properties or 68 | explicitly specified with `withDefaultKeySerde(Serde serde)` and/or `withDefaultValueSerde(Serde serde)`. 69 | 70 | To test your appliction, you can simply write a JUnit test. 71 | ```java 72 | class WordCountTest { 73 | private final WordCount app = new WordCount(); 74 | 75 | @RegisterExtension 76 | final TestTopologyExtension testTopology = 77 | new TestTopologyExtension<>(this.app::getTopology, this.app.getKafkaProperties()); 78 | 79 | @Test 80 | void shouldAggregateSameWordStream() { 81 | this.testTopology.input() 82 | .add("cat") 83 | .add("dog") 84 | .add("cat"); 85 | 86 | this.testTopology.streamOutput().withSerde(Serdes.String(), Serdes.Long()) 87 | .expectNextRecord().hasKey("cat").hasValue(1L) 88 | .expectNextRecord().hasKey("dog").hasValue(1L) 89 | .expectNextRecord().hasKey("cat").hasValue(2L) 90 | .expectNoMoreRecord(); 91 | } 92 | } 93 | ``` 94 | 95 | See the tests for the [junit4](fluent-kafka-streams-tests-junit4/src/test/java/com/bakdata/fluent_kafka_streams_tests/junit4/WordCountTest.java) and [framework agnostic](fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/WordCountTest.java) setup. 96 | 97 | The `TestTopology` has a method `.input()` to retrieve the input topic (or `.input(String topic)`) if more than one input topic is present). 98 | You can simply add values to your input stream by calling `.add(V value)` or `.add(K key, V value)`. 99 | 100 | To get the output, `TestTopology` provides two methods: `.streamOutput()` and `.tableOutput()`. 101 | They behave just like the input with regard to the number of output topics. 102 | Using the stream version simulates Kafka's stream-semantics, meaning that a key can be present many times in an output stream, whereas the table-semantics only output the newest value of each key. 103 | 104 | To check the output records, you can call `.expectNextRecord()` and then chain `.hasKey(K key)`, `.hasKeySatisfying(Consumer requirements)`, `.hasValue(V value)` or `.hasValueSatisfying(Consumer requirements)` to this call. 105 | Note that calling `.expectNextRecord()` by itself without chaining at least one of the `.has*` methods will not check for the existence of a next record! 106 | 107 | Once you expect no further records, call `.expectNoMoreRecord()` to indicate the end of the output stream. 108 | 109 | #### Using Other Test Frameworks to Check Output 110 | We intentionally kept the API for output checking slim, because there are many tools out there which focus on doing exactly that. 111 | The `TestOutput` class implements the `Iterable` interface, so you can use your favorite tool to test iterables. 112 | 113 | Here is an example using [AssertJ](http://joel-costigliola.github.io/assertj/). 114 | 115 | ```java 116 | @Test 117 | void shouldReturnCorrectIteratorTable() { 118 | this.testTopology.input() 119 | .add("cat") 120 | .add("dog") 121 | .add("bird"); 122 | 123 | assertThat(this.testTopology.tableOutput().withSerde(Serdes.String(), Serdes.Long())) 124 | .extracting(ProducerRecord::key) 125 | .containsAll(List.of("cat", "dog", "bird")); 126 | } 127 | ``` 128 | 129 | There is also an API to consume a record's key or value in order to embed another assertion framework into our API. 130 | 131 | ```java 132 | @Test 133 | void shouldReturnCorrectIteratorTable() { 134 | this.testTopology.input() 135 | .add("cat"); 136 | 137 | this.testTopology.streamOutput().withSerde(Serdes.String(), Serdes.Long()) 138 | .expectNextRecord() 139 | .hasKeySatisfying(key -> assertThat(key).isEqualTo("cat")) 140 | .hasValueSatisfying(value -> assertThat(value).isEqualTo(1L)) 141 | .expectNoMoreRecord(); 142 | } 143 | ``` 144 | 145 | Alternatively, you can convert the output to `List` for use with your assertion framework. Here is an example of this with [AssertJ](http://joel-costigliola.github.io/assertj/). 146 | 147 | ```java 148 | @Test 149 | void shouldConvertStreamOutputToList(){ 150 | this.testTopology.input() 151 | .add("cat") 152 | .add("dog") 153 | .add("bird"); 154 | 155 | final List>outputs = this.testTopology.streamOutput() 156 | .withSerde(Serdes.String(), Serdes.Long()) 157 | .toList(); 158 | 159 | assertThat(outputs) 160 | .extracting(ProducerRecord::key) 161 | .containsExactly("cat", "dog", "bird"); 162 | assertThat(outputs) 163 | .extracting(ProducerRecord::value) 164 | .containsExactly(1L, 1L, 1L); 165 | } 166 | ``` 167 | 168 | #### More Examples 169 | 170 | You can find many more tests 171 | in [this repository's test code](https://github.com/bakdata/fluent-kafka-streams-tests/tree/master/fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests) 172 | . 173 | 174 | ## Development 175 | 176 | If you want to contribute to this project, you can simply clone the repository and build it via Gradle. 177 | All dependencies should be included in the Gradle files, there are no external prerequisites. 178 | 179 | ```bash 180 | > git clone git@github.com:bakdata/fluent-kafka-streams-tests.git 181 | > cd fluent-kafka-streams-tests && ./gradlew build 182 | ``` 183 | 184 | Please note, that we have [code styles](https://github.com/bakdata/bakdata-code-styles) for Java. 185 | They are basically the Google style guide, with some small modifications. 186 | 187 | ## Contributing 188 | 189 | We are happy if you want to contribute to this project. 190 | If you find any bugs or have suggestions for improvements, please open an issue. 191 | We are also happy to accept your PRs. 192 | Just open an issue beforehand and let us know what you want to do and why. 193 | 194 | ## License 195 | This project is licensed under the MIT license. 196 | Have a look at the [LICENSE](https://github.com/bakdata/fluent-kafka-streams-tests/blob/master/LICENSE) for more details. 197 | -------------------------------------------------------------------------------- /build.gradle.kts: -------------------------------------------------------------------------------- 1 | plugins { 2 | // release 3 | id("com.bakdata.release") version "1.7.1" 4 | id("com.bakdata.sonar") version "1.7.1" 5 | id("com.bakdata.sonatype") version "1.9.0" 6 | id("io.freefair.lombok") version "8.12.2.1" apply false 7 | } 8 | 9 | allprojects { 10 | group = "com.bakdata.${rootProject.name}" 11 | 12 | tasks.withType { 13 | maxParallelForks = 4 14 | } 15 | 16 | repositories { 17 | mavenCentral() 18 | maven(url = "https://packages.confluent.io/maven/") 19 | maven(url = "https://s01.oss.sonatype.org/content/repositories/snapshots") 20 | } 21 | 22 | } 23 | 24 | subprojects { 25 | apply(plugin = "java-library") 26 | apply(plugin = "io.freefair.lombok") 27 | configure { 28 | toolchain { 29 | languageVersion = JavaLanguageVersion.of(11) 30 | } 31 | } 32 | 33 | tasks.withType { 34 | options { 35 | (this as StandardJavadocDocletOptions).apply { 36 | addBooleanOption("html5", true) 37 | stylesheetFile(File("$rootDir/src/main/javadoc/assertj-javadoc.css")) 38 | addBooleanOption("-allow-script-in-comments", true) 39 | header("") 40 | footer("") 41 | } 42 | } 43 | } 44 | 45 | dependencies { 46 | val log4jVersion: String by project 47 | "testImplementation"(group = "org.apache.logging.log4j", name = "log4j-slf4j2-impl", version = log4jVersion) 48 | "testImplementation"(group = "org.assertj", name = "assertj-core", version = "3.27.2") 49 | } 50 | 51 | publication { 52 | developers { 53 | developer { 54 | name.set("Arvid Heise") 55 | id.set("AHeise") 56 | } 57 | developer { 58 | name.set("Lawrence Benson") 59 | id.set("lawben") 60 | } 61 | developer { 62 | name.set("Torben Meyer") 63 | id.set("torbsto") 64 | } 65 | developer { 66 | name.set("Philipp Schirmer") 67 | id.set("philipp98431") 68 | } 69 | } 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests-junit4/build.gradle.kts: -------------------------------------------------------------------------------- 1 | description = "Provides the fluent Kafka Streams test framework." 2 | 3 | dependencies { 4 | api(project(":fluent-kafka-streams-tests")) 5 | 6 | val junit4Version: String by project 7 | api(group = "junit", name = "junit", version = junit4Version) 8 | testImplementation(group = "junit", name = "junit", version = junit4Version) 9 | } 10 | 11 | tasks.test { 12 | useJUnit() 13 | } 14 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests-junit4/lombok.config: -------------------------------------------------------------------------------- 1 | # This file is generated by the 'io.freefair.lombok' Gradle plugin 2 | config.stopBubbling = true 3 | lombok.addLombokGeneratedAnnotation = true 4 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests-junit4/src/main/java/com/bakdata/fluent_kafka_streams_tests/junit4/TestTopologyRule.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2025 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests.junit4; 26 | 27 | import com.bakdata.fluent_kafka_streams_tests.TestTopology; 28 | import java.util.Map; 29 | import java.util.function.Function; 30 | import java.util.function.Supplier; 31 | import lombok.Getter; 32 | import org.apache.kafka.common.serialization.Serde; 33 | import org.apache.kafka.streams.Topology; 34 | import org.junit.rules.TestRule; 35 | import org.junit.runner.Description; 36 | import org.junit.runners.model.Statement; 37 | 38 | /** 39 | *

Represents the main interaction with Kafka for testing purposes. Handles all inputs and outputs of the 40 | * {@link Topology} under test. This should be registered as an extension in your JUnit tests, to ensure that certain 41 | * setup and teardown methods are called.

Usage: 42 | *

 43 |  * public class WordCountTest {
 44 |  *     private final WordCount app = new WordCount();
 45 |  *
 46 |  *     {@literal @Rule
 47 |  *     public final TestTopologyRule testTopology =
 48 |  *         new TestTopologyRule<>(this.app::getTopology, this.app.getKafkaProperties());}
 49 |  *
 50 |  *     {@literal @Test}
 51 |  *     public void shouldAggregateSameWordStream() {
 52 |  *         this.testTopology.input()
 53 |  *             .add("cat")
 54 |  *             .add("dog")
 55 |  *             .add("cat");
 56 |  *
 57 |  *         this.testTopology.streamOutput().withSerde(Serdes.String(), Serdes.Long())
 58 |  *             .expectNextRecord().hasKey("cat").hasValue(1L)
 59 |  *             .expectNextRecord().hasKey("dog").hasValue(1L)
 60 |  *             .expectNextRecord().hasKey("cat").hasValue(2L)
 61 |  *             .expectNoMoreRecord();
 62 |  *     }
 63 |  * }
 64 |  * 
65 | *

With {@code app} being any Kafka Streams application that you want to test.

66 | * @param Default type of keys 67 | * @param Default type of values 68 | */ 69 | @Getter 70 | public class TestTopologyRule extends TestTopology 71 | implements TestRule { 72 | public TestTopologyRule( 73 | final Function, ? extends Topology> topologyFactory, 74 | final Map properties) { 75 | super(topologyFactory, properties); 76 | } 77 | 78 | public TestTopologyRule( 79 | final Supplier topologyFactory, final Map properties) { 80 | super(topologyFactory, properties); 81 | } 82 | 83 | public TestTopologyRule(final Topology topology, final Map properties) { 84 | super(topology, properties); 85 | } 86 | 87 | protected TestTopologyRule( 88 | final Function, ? extends Topology> topologyFactory, 89 | final Map userProperties, 90 | final Serde defaultKeySerde, final Serde defaultValueSerde) { 91 | super(topologyFactory, userProperties, defaultKeySerde, defaultValueSerde); 92 | } 93 | 94 | @Override 95 | public Statement apply(final Statement base, final Description description) { 96 | return new Statement() { 97 | @Override 98 | public void evaluate() throws Throwable { 99 | TestTopologyRule.this.start(); 100 | try { 101 | base.evaluate(); 102 | } finally { 103 | TestTopologyRule.this.stop(); 104 | } 105 | } 106 | }; 107 | } 108 | 109 | @Override 110 | public TestTopologyRule withDefaultValueSerde(final Serde defaultValueSerde) { 111 | return (TestTopologyRule) super.withDefaultValueSerde(defaultValueSerde); 112 | } 113 | 114 | @Override 115 | public TestTopologyRule withDefaultKeySerde(final Serde defaultKeySerde) { 116 | return (TestTopologyRule) super.withDefaultKeySerde(defaultKeySerde); 117 | } 118 | 119 | @Override 120 | public TestTopologyRule withDefaultSerde(final Serde defaultKeySerde, 121 | final Serde defaultValueSerde) { 122 | return (TestTopologyRule) super.withDefaultSerde(defaultKeySerde, defaultValueSerde); 123 | } 124 | 125 | @Override 126 | protected TestTopologyRule with( 127 | final Function, ? extends Topology> topologyFactory, 128 | final Map userProperties, final Serde defaultKeySerde, 129 | final Serde defaultValueSerde) { 130 | return new TestTopologyRule<>(topologyFactory, userProperties, defaultKeySerde, defaultValueSerde); 131 | } 132 | 133 | } 134 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests-junit4/src/test/java/com/bakdata/fluent_kafka_streams_tests/junit4/WordCountWithStaticTopologyTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2024 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests.junit4; 26 | 27 | import com.bakdata.fluent_kafka_streams_tests.junit4.test_applications.WordCount; 28 | import org.apache.kafka.common.serialization.Serdes; 29 | import org.junit.Rule; 30 | import org.junit.Test; 31 | 32 | public class WordCountWithStaticTopologyTest { 33 | private final WordCount app = new WordCount(); 34 | 35 | @Rule 36 | public final TestTopologyRule testTopology = new TestTopologyRule<>(this.app.getTopology(), 37 | WordCount.getKafkaProperties()); 38 | 39 | @Test 40 | public void shouldAggregateSameWordStream() { 41 | this.testTopology.input().add("bla") 42 | .add("blub") 43 | .add("bla"); 44 | 45 | this.testTopology.streamOutput().withSerde(Serdes.String(), Serdes.Long()) 46 | .expectNextRecord().hasKey("bla").hasValue(1L) 47 | .expectNextRecord().hasKey("blub").hasValue(1L) 48 | .expectNextRecord().hasKey("bla").hasValue(2L) 49 | .expectNoMoreRecord(); 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests-junit4/src/test/java/com/bakdata/fluent_kafka_streams_tests/junit4/WordCountWitherTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2025 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests.junit4; 26 | 27 | import com.bakdata.fluent_kafka_streams_tests.junit4.test_applications.WordCount; 28 | import org.apache.kafka.common.serialization.Serdes; 29 | import org.junit.Rule; 30 | import org.junit.Test; 31 | 32 | public class WordCountWitherTest { 33 | private final WordCount app = new WordCount(); 34 | 35 | @Rule 36 | public final TestTopologyRule testTopology = 37 | new TestTopologyRule<>(this.app.getTopology(), WordCount.getKafkaProperties()) 38 | .withDefaultValueSerde(Serdes.String()); 39 | 40 | @Test 41 | public void shouldAggregateSameWordStream() { 42 | this.testTopology.input().add("bla") 43 | .add("blub") 44 | .add("bla"); 45 | 46 | this.testTopology.streamOutput().withSerde(Serdes.String(), Serdes.Long()) 47 | .expectNextRecord().hasKey("bla").hasValue(1L) 48 | .expectNextRecord().hasKey("blub").hasValue(1L) 49 | .expectNextRecord().hasKey("bla").hasValue(2L) 50 | .expectNoMoreRecord(); 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests-junit4/src/test/java/com/bakdata/fluent_kafka_streams_tests/junit4/test_applications/WordCount.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2024 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests.junit4.test_applications; 26 | 27 | import java.util.Arrays; 28 | import java.util.HashMap; 29 | import java.util.Map; 30 | import java.util.regex.Pattern; 31 | import lombok.Getter; 32 | import org.apache.kafka.common.serialization.Serde; 33 | import org.apache.kafka.common.serialization.Serdes; 34 | import org.apache.kafka.common.serialization.Serdes.StringSerde; 35 | import org.apache.kafka.streams.StreamsBuilder; 36 | import org.apache.kafka.streams.StreamsConfig; 37 | import org.apache.kafka.streams.Topology; 38 | import org.apache.kafka.streams.kstream.KStream; 39 | import org.apache.kafka.streams.kstream.KTable; 40 | import org.apache.kafka.streams.kstream.Produced; 41 | 42 | @Getter 43 | public class WordCount { 44 | private final String inputTopic = "wordcount-input"; 45 | 46 | private final String outputTopic = "wordcount-output"; 47 | 48 | public static Map getKafkaProperties() { 49 | final String brokers = "localhost:9092"; 50 | final Map kafkaConfig = new HashMap<>(); 51 | kafkaConfig.put(StreamsConfig.APPLICATION_ID_CONFIG, "wordcount"); 52 | kafkaConfig.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, brokers); 53 | kafkaConfig.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, StringSerde.class); 54 | kafkaConfig.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, StringSerde.class); 55 | return kafkaConfig; 56 | } 57 | 58 | public Topology getTopology() { 59 | final Serde stringSerde = Serdes.String(); 60 | final Serde longSerde = Serdes.Long(); 61 | 62 | final StreamsBuilder builder = new StreamsBuilder(); 63 | final KStream textLines = builder.stream(this.inputTopic); 64 | 65 | final Pattern pattern = Pattern.compile("\\W+", Pattern.UNICODE_CHARACTER_CLASS); 66 | final KTable wordCounts = textLines 67 | .flatMapValues(value -> Arrays.asList(pattern.split(value.toLowerCase()))) 68 | .groupBy((key, word) -> word) 69 | .count(); 70 | 71 | wordCounts.toStream().to(this.outputTopic, Produced.with(stringSerde, longSerde)); 72 | return builder.build(); 73 | } 74 | } 75 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests-junit4/src/test/resources/log4j2.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests-junit5/build.gradle.kts: -------------------------------------------------------------------------------- 1 | description = "Provides the fluent Kafka Streams test framework." 2 | 3 | dependencies { 4 | api(project(":fluent-kafka-streams-tests")) 5 | 6 | val junit5Version: String by project 7 | testImplementation(group = "org.junit.jupiter", name = "junit-jupiter-api", version = junit5Version) 8 | testRuntimeOnly(group = "org.junit.jupiter", name = "junit-jupiter-engine", version = junit5Version) 9 | api(group = "org.junit.jupiter", name = "junit-jupiter-api", version = junit5Version) 10 | } 11 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests-junit5/lombok.config: -------------------------------------------------------------------------------- 1 | # This file is generated by the 'io.freefair.lombok' Gradle plugin 2 | config.stopBubbling = true 3 | lombok.addLombokGeneratedAnnotation = true 4 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests-junit5/src/main/java/com/bakdata/fluent_kafka_streams_tests/junit5/TestTopologyExtension.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2025 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests.junit5; 26 | 27 | import com.bakdata.fluent_kafka_streams_tests.TestTopology; 28 | import java.util.Map; 29 | import java.util.function.Function; 30 | import java.util.function.Supplier; 31 | import lombok.Getter; 32 | import org.apache.kafka.common.serialization.Serde; 33 | import org.apache.kafka.streams.Topology; 34 | import org.junit.jupiter.api.extension.AfterEachCallback; 35 | import org.junit.jupiter.api.extension.BeforeEachCallback; 36 | import org.junit.jupiter.api.extension.ExtensionContext; 37 | 38 | /** 39 | *

Represents the main interaction with Kafka for testing purposes. Handles all inputs and outputs of the 40 | * {@link Topology} under test. This should be registered as an extension in your JUnit tests, to ensure that certain 41 | * setup and teardown methods are called.

Usage: 42 | *

 43 |  * class WordCountTest {
 44 |  *     private final WordCount app = new WordCount();
 45 |  *
 46 |  *     {@literal @RegisterExtension
 47 |  *     final TestTopologyExtension testTopology =
 48 |  *         new TestTopologyExtension<>(this.app::getTopology, this.app.getKafkaProperties());}
 49 |  *
 50 |  *     {@literal @Test}
 51 |  *     void shouldAggregateSameWordStream() {
 52 |  *         this.testTopology.input()
 53 |  *             .add("cat")
 54 |  *             .add("dog")
 55 |  *             .add("cat");
 56 |  *
 57 |  *         this.testTopology.streamOutput().withSerde(Serdes.String(), Serdes.Long())
 58 |  *             .expectNextRecord().hasKey("cat").hasValue(1L)
 59 |  *             .expectNextRecord().hasKey("dog").hasValue(1L)
 60 |  *             .expectNextRecord().hasKey("cat").hasValue(2L)
 61 |  *             .expectNoMoreRecord();
 62 |  *     }
 63 |  * }
 64 |  * 
65 | *

With {@code app} being any Kafka Streams application that you want to test.

66 | * @param Default type of keys 67 | * @param Default type of values 68 | */ 69 | @Getter 70 | public class TestTopologyExtension extends TestTopology 71 | implements BeforeEachCallback, AfterEachCallback { 72 | 73 | public TestTopologyExtension( 74 | final Function, ? extends Topology> topologyFactory, 75 | final Map properties) { 76 | super(topologyFactory, properties); 77 | } 78 | 79 | public TestTopologyExtension( 80 | final Supplier topologyFactory, final Map properties) { 81 | super(topologyFactory, properties); 82 | } 83 | 84 | public TestTopologyExtension(final Topology topology, final Map properties) { 85 | super(topology, properties); 86 | } 87 | 88 | protected TestTopologyExtension( 89 | final Function, ? extends Topology> topologyFactory, 90 | final Map userProperties, 91 | final Serde defaultKeySerde, final Serde defaultValueSerde) { 92 | super(topologyFactory, userProperties, defaultKeySerde, defaultValueSerde); 93 | } 94 | 95 | @Override 96 | public void afterEach(final ExtensionContext context) { 97 | this.stop(); 98 | } 99 | 100 | @Override 101 | public void beforeEach(final ExtensionContext context) { 102 | this.start(); 103 | } 104 | 105 | @Override 106 | public TestTopologyExtension withDefaultValueSerde(final Serde defaultValueSerde) { 107 | return (TestTopologyExtension) super.withDefaultValueSerde(defaultValueSerde); 108 | } 109 | 110 | @Override 111 | public TestTopologyExtension withDefaultKeySerde(final Serde defaultKeySerde) { 112 | return (TestTopologyExtension) super.withDefaultKeySerde(defaultKeySerde); 113 | } 114 | 115 | @Override 116 | public TestTopologyExtension withDefaultSerde(final Serde defaultKeySerde, 117 | final Serde defaultValueSerde) { 118 | return (TestTopologyExtension) super.withDefaultSerde(defaultKeySerde, defaultValueSerde); 119 | } 120 | 121 | @Override 122 | protected TestTopology with( 123 | final Function, ? extends Topology> topologyFactory, 124 | final Map userProperties, final Serde defaultKeySerde, 125 | final Serde defaultValueSerde) { 126 | return new TestTopologyExtension<>(topologyFactory, userProperties, defaultKeySerde, defaultValueSerde); 127 | } 128 | } 129 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests-junit5/src/test/java/com/bakdata/fluent_kafka_streams_tests/junit5/WordCountTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2024 bakdata GmbH 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests.junit5; 26 | 27 | import static org.assertj.core.api.Assertions.assertThat; 28 | import static org.junit.jupiter.api.Assertions.assertThrows; 29 | 30 | import com.bakdata.fluent_kafka_streams_tests.junit5.test_applications.WordCount; 31 | import java.util.Iterator; 32 | import java.util.List; 33 | import java.util.NoSuchElementException; 34 | import org.apache.kafka.clients.producer.ProducerRecord; 35 | import org.apache.kafka.common.serialization.Serdes; 36 | import org.junit.jupiter.api.Test; 37 | import org.junit.jupiter.api.extension.RegisterExtension; 38 | 39 | class WordCountTest { 40 | private final WordCount app = new WordCount(); 41 | 42 | @RegisterExtension 43 | final TestTopologyExtension testTopology = new TestTopologyExtension<>(this.app::getTopology, 44 | WordCount.getKafkaProperties()); 45 | 46 | @Test 47 | void shouldAggregateSameWordStream() { 48 | this.testTopology.input().add("bla") 49 | .add("blub") 50 | .add("bla"); 51 | 52 | this.testTopology.streamOutput().withSerde(Serdes.String(), Serdes.Long()) 53 | .expectNextRecord().hasKey("bla").hasValue(1L) 54 | .expectNextRecord().hasKey("blub").hasValue(1L) 55 | .expectNextRecord().hasKey("bla").hasValue(2L) 56 | .expectNoMoreRecord(); 57 | } 58 | 59 | @Test 60 | void shouldAggregateSameWordTable() { 61 | this.testTopology.input().add("bla") 62 | .add("blub") 63 | .add("bla"); 64 | this.testTopology.tableOutput().withSerde(Serdes.String(), Serdes.Long()) 65 | .expectNextRecord().hasKey("bla").hasValue(2L) 66 | .expectNextRecord().hasKey("blub").hasValue(1L) 67 | .expectNoMoreRecord(); 68 | } 69 | 70 | @Test 71 | void shouldNotAggregateDifferentWordsStream() { 72 | this.testTopology.input().add("bla") 73 | .add("foo") 74 | .add("blub"); 75 | 76 | this.testTopology.streamOutput().withSerde(Serdes.String(), Serdes.Long()) 77 | .expectNextRecord().hasKey("bla").hasValue(1L) 78 | .expectNextRecord().hasKey("foo").hasValue(1L) 79 | .expectNextRecord().hasKey("blub").hasValue(1L) 80 | .expectNoMoreRecord(); 81 | } 82 | 83 | @Test 84 | void shouldNotAggregateDifferentWordsTable() { 85 | this.testTopology.input().add("bla") 86 | .add("foo") 87 | .add("blub"); 88 | 89 | this.testTopology.tableOutput().withSerde(Serdes.String(), Serdes.Long()) 90 | .expectNextRecord().hasKey("bla").hasValue(1L) 91 | .expectNextRecord().hasKey("foo").hasValue(1L) 92 | .expectNextRecord().hasKey("blub").hasValue(1L) 93 | .expectNoMoreRecord(); 94 | } 95 | 96 | @Test 97 | void shouldReturnNoInputAndOutputStream() { 98 | this.testTopology.streamOutput().withSerde(Serdes.String(), Serdes.Long()) 99 | .expectNoMoreRecord(); 100 | } 101 | 102 | @Test 103 | void shouldReturnNoInputAndOutputTable() { 104 | this.testTopology.tableOutput().withSerde(Serdes.String(), Serdes.Long()) 105 | .expectNoMoreRecord(); 106 | } 107 | 108 | @Test 109 | void shouldAggregateSameWordOrderTable() { 110 | this.testTopology.input().add("blub") // 1 blub 111 | .add("bla") // 1 bla 112 | .add("blub") // 2 blub 113 | .add("blub") // 3 blub 114 | .add("bla") // 2 bla 115 | .add("blub") // 4 blub 116 | .add("bla") // 3 bla 117 | .add("bla") // 4 bla 118 | .add("blub") // 5 blub 119 | .add("bla") // 5 bla 120 | .add("bla") // 6 bla 121 | .add("bla"); // 7 bla 122 | 123 | this.testTopology.tableOutput(this.app.getOutputTopic()).withSerde(Serdes.String(), Serdes.Long()) 124 | .expectNextRecord().hasKey("blub").hasValue(5L) 125 | .expectNextRecord().hasKey("bla").hasValue(7L) 126 | .expectNoMoreRecord(); 127 | } 128 | 129 | @Test 130 | void shouldReturnSingleInputAndOutputStream() { 131 | this.testTopology.input().add("bla"); 132 | 133 | this.testTopology.streamOutput().withSerde(Serdes.String(), Serdes.Long()) 134 | .expectNextRecord().hasKey("bla").hasValue(1L) 135 | .expectNoMoreRecord(); 136 | } 137 | 138 | @Test 139 | void shouldReturnSingleInputAndOutputTable() { 140 | this.testTopology.input().add("bla"); 141 | 142 | this.testTopology.tableOutput().withSerde(Serdes.String(), Serdes.Long()) 143 | .expectNextRecord().hasKey("bla").hasValue(1L) 144 | .expectNoMoreRecord(); 145 | } 146 | 147 | @Test 148 | void shouldReturnCorrectIteratorStream() { 149 | this.testTopology.input().add("bla") 150 | .add("blub") 151 | .add("bla") 152 | .add("foo"); 153 | final List expected = List.of("bla", "blub", "bla", "foo"); 154 | 155 | assertThat(this.testTopology.streamOutput().withSerde(Serdes.String(), Serdes.Long())) 156 | .extracting(ProducerRecord::key) 157 | .containsAll(expected); 158 | } 159 | 160 | @Test 161 | void shouldReturnCorrectIteratorExplicitStream() { 162 | this.testTopology.input().add("bla") 163 | .add("blub") 164 | .add("bla") 165 | .add("foo"); 166 | final List expected = List.of("bla", "blub", "bla", "foo"); 167 | 168 | assertThat(this.testTopology.streamOutput().withSerde(Serdes.String(), Serdes.Long()).iterator()) 169 | .toIterable() 170 | .extracting(ProducerRecord::key) 171 | .containsAll(expected); 172 | } 173 | 174 | @Test 175 | void shouldReturnCorrectIteratorTable() { 176 | this.testTopology.input().add("bla") 177 | .add("blub") 178 | .add("bla") 179 | .add("foo"); 180 | final List expected = List.of("bla", "blub", "foo"); 181 | 182 | assertThat(this.testTopology.tableOutput().withSerde(Serdes.String(), Serdes.Long())) 183 | .extracting(ProducerRecord::key) 184 | .containsAll(expected); 185 | } 186 | 187 | @Test 188 | void shouldFailCorrectIteratorStreamNoHasNextCheck() { 189 | final Iterator> output = this.testTopology.streamOutput() 190 | .withSerde(Serdes.String(), Serdes.Long()).iterator(); 191 | assertThrows(NoSuchElementException.class, output::next); 192 | } 193 | 194 | @Test 195 | void shouldFailCorrectIteratorTableNoHasNextCheck() { 196 | final Iterator> output = this.testTopology.tableOutput() 197 | .withSerde(Serdes.String(), Serdes.Long()).iterator(); 198 | assertThrows(NoSuchElementException.class, output::next); 199 | } 200 | 201 | @Test 202 | void shouldReturnCorrectIteratorExplicitTable() { 203 | this.testTopology.input().add("bla") 204 | .add("blub") 205 | .add("bla") 206 | .add("foo"); 207 | final List expected = List.of("bla", "blub", "foo"); 208 | 209 | assertThat(this.testTopology.tableOutput().withSerde(Serdes.String(), Serdes.Long()).iterator()) 210 | .toIterable() 211 | .extracting(ProducerRecord::key) 212 | .containsAll(expected); 213 | } 214 | 215 | @Test 216 | void shouldWorkOnTableToStream() { 217 | this.testTopology.input() 218 | .add("bla") 219 | .add("blub") 220 | .add("bla"); 221 | 222 | // Unnecessary conversion between table and stream to check that nothing breaks 223 | this.testTopology.streamOutput().asTable().asStream() 224 | .withSerde(Serdes.String(), Serdes.Long()) 225 | .expectNextRecord().hasKey("bla").hasValue(1L) 226 | .expectNextRecord().hasKey("blub").hasValue(1L) 227 | .expectNextRecord().hasKey("bla").hasValue(2L) 228 | .expectNoMoreRecord(); 229 | } 230 | 231 | @Test 232 | void singleWordShouldBePresent() { 233 | this.testTopology.input().add("bla"); 234 | this.testTopology.tableOutput().expectNextRecord().isPresent(); 235 | } 236 | 237 | @Test 238 | void shouldBeDoneAfterSingleWord() { 239 | this.testTopology.input().add("bla"); 240 | this.testTopology.tableOutput().expectNextRecord().isPresent().expectNextRecord().toBeEmpty(); 241 | } 242 | 243 | @Test 244 | void shouldDoNothingOnEmptyInput() { 245 | this.testTopology.streamOutput().expectNoMoreRecord().and().expectNoMoreRecord().toBeEmpty(); 246 | } 247 | 248 | @Test 249 | void shouldConvertStreamOutputToList() { 250 | this.testTopology.input() 251 | .add("bla") 252 | .add("blub") 253 | .add("bla"); 254 | 255 | final List> outputs = this.testTopology.streamOutput() 256 | .withSerde(Serdes.String(), Serdes.Long()) 257 | .toList(); 258 | 259 | assertThat(outputs) 260 | .extracting(ProducerRecord::key) 261 | .containsExactly("bla", "blub", "bla"); 262 | assertThat(outputs) 263 | .extracting(ProducerRecord::value) 264 | .containsExactly(1L, 1L, 2L); 265 | } 266 | 267 | @Test 268 | void shouldConvertTableOutputToList() { 269 | this.testTopology.input() 270 | .add("bla") 271 | .add("blub") 272 | .add("bla"); 273 | 274 | final List> outputs = this.testTopology.tableOutput() 275 | .withSerde(Serdes.String(), Serdes.Long()) 276 | .toList(); 277 | 278 | assertThat(outputs) 279 | .extracting(ProducerRecord::key) 280 | .containsExactly("bla", "blub"); 281 | assertThat(outputs) 282 | .extracting(ProducerRecord::value) 283 | .containsExactly(2L, 1L); 284 | } 285 | 286 | @Test 287 | void shouldConvertEmptyStreamOutputToEmptyList() { 288 | final List> outputs = this.testTopology.streamOutput() 289 | .withSerde(Serdes.String(), Serdes.Long()) 290 | .toList(); 291 | 292 | assertThat(outputs) 293 | .isInstanceOf(List.class) 294 | .isEmpty(); 295 | } 296 | 297 | @Test 298 | void shouldConvertEmptyTableOutputToEmptyList() { 299 | final List> outputs = this.testTopology.tableOutput() 300 | .withSerde(Serdes.String(), Serdes.Long()) 301 | .toList(); 302 | 303 | assertThat(outputs) 304 | .isInstanceOf(List.class) 305 | .isEmpty(); 306 | } 307 | } 308 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests-junit5/src/test/java/com/bakdata/fluent_kafka_streams_tests/junit5/WordCountWithStaticTopologyTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2024 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests.junit5; 26 | 27 | import com.bakdata.fluent_kafka_streams_tests.junit5.test_applications.WordCount; 28 | import org.apache.kafka.common.serialization.Serdes; 29 | import org.junit.jupiter.api.Test; 30 | import org.junit.jupiter.api.extension.RegisterExtension; 31 | 32 | class WordCountWithStaticTopologyTest { 33 | private final WordCount app = new WordCount(); 34 | 35 | @RegisterExtension 36 | final TestTopologyExtension testTopology = new TestTopologyExtension<>(this.app.getTopology(), 37 | WordCount.getKafkaProperties()); 38 | 39 | @Test 40 | void shouldAggregateSameWordStream() { 41 | this.testTopology.input().add("bla") 42 | .add("blub") 43 | .add("bla"); 44 | 45 | this.testTopology.streamOutput().withSerde(Serdes.String(), Serdes.Long()) 46 | .expectNextRecord().hasKey("bla").hasValue(1L) 47 | .expectNextRecord().hasKey("blub").hasValue(1L) 48 | .expectNextRecord().hasKey("bla").hasValue(2L) 49 | .expectNoMoreRecord(); 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests-junit5/src/test/java/com/bakdata/fluent_kafka_streams_tests/junit5/WordCountWitherTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2025 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests.junit5; 26 | 27 | import com.bakdata.fluent_kafka_streams_tests.junit5.test_applications.WordCount; 28 | import org.apache.kafka.common.serialization.Serdes; 29 | import org.junit.jupiter.api.Test; 30 | import org.junit.jupiter.api.extension.RegisterExtension; 31 | 32 | class WordCountWitherTest { 33 | private final WordCount app = new WordCount(); 34 | 35 | @RegisterExtension 36 | final TestTopologyExtension testTopology = 37 | new TestTopologyExtension<>(this.app::getTopology, WordCount.getKafkaProperties()) 38 | .withDefaultValueSerde(Serdes.String()); 39 | 40 | @Test 41 | void shouldAggregateSameWordStream() { 42 | this.testTopology.input().add("bla") 43 | .add("blub") 44 | .add("bla"); 45 | 46 | this.testTopology.streamOutput().withSerde(Serdes.String(), Serdes.Long()) 47 | .expectNextRecord().hasKey("bla").hasValue(1L) 48 | .expectNextRecord().hasKey("blub").hasValue(1L) 49 | .expectNextRecord().hasKey("bla").hasValue(2L) 50 | .expectNoMoreRecord(); 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests-junit5/src/test/java/com/bakdata/fluent_kafka_streams_tests/junit5/test_applications/WordCount.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2024 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests.junit5.test_applications; 26 | 27 | import java.util.Arrays; 28 | import java.util.HashMap; 29 | import java.util.Map; 30 | import java.util.regex.Pattern; 31 | import lombok.Getter; 32 | import org.apache.kafka.common.serialization.Serde; 33 | import org.apache.kafka.common.serialization.Serdes; 34 | import org.apache.kafka.common.serialization.Serdes.StringSerde; 35 | import org.apache.kafka.streams.StreamsBuilder; 36 | import org.apache.kafka.streams.StreamsConfig; 37 | import org.apache.kafka.streams.Topology; 38 | import org.apache.kafka.streams.kstream.KStream; 39 | import org.apache.kafka.streams.kstream.KTable; 40 | import org.apache.kafka.streams.kstream.Produced; 41 | 42 | @Getter 43 | public class WordCount { 44 | private final String inputTopic = "wordcount-input"; 45 | 46 | private final String outputTopic = "wordcount-output"; 47 | 48 | public static Map getKafkaProperties() { 49 | final String brokers = "localhost:9092"; 50 | final Map kafkaConfig = new HashMap<>(); 51 | kafkaConfig.put(StreamsConfig.APPLICATION_ID_CONFIG, "wordcount"); 52 | kafkaConfig.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, brokers); 53 | kafkaConfig.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, StringSerde.class); 54 | kafkaConfig.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, StringSerde.class); 55 | return kafkaConfig; 56 | } 57 | 58 | public Topology getTopology() { 59 | final Serde stringSerde = Serdes.String(); 60 | final Serde longSerde = Serdes.Long(); 61 | 62 | final StreamsBuilder builder = new StreamsBuilder(); 63 | final KStream textLines = builder.stream(this.inputTopic); 64 | 65 | final Pattern pattern = Pattern.compile("\\W+", Pattern.UNICODE_CHARACTER_CLASS); 66 | final KTable wordCounts = textLines 67 | .flatMapValues(value -> Arrays.asList(pattern.split(value.toLowerCase()))) 68 | .groupBy((key, word) -> word) 69 | .count(); 70 | 71 | wordCounts.toStream().to(this.outputTopic, Produced.with(stringSerde, longSerde)); 72 | return builder.build(); 73 | } 74 | } 75 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests-junit5/src/test/resources/log4j2.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/build.gradle.kts: -------------------------------------------------------------------------------- 1 | plugins { 2 | id("com.github.davidmc24.gradle.plugin.avro") version "1.9.1" 3 | id("com.google.protobuf") version "0.9.4" 4 | java 5 | idea // required for protobuf support in intellij 6 | } 7 | 8 | description = "Provides the fluent Kafka Streams test framework." 9 | 10 | 11 | dependencies { 12 | api(group = "com.bakdata.kafka", name = "kafka-streams-utils", version = "1.1.0") 13 | api(group = "org.apache.kafka", name = "kafka-clients") 14 | api(group = "org.apache.kafka", name = "kafka-streams") 15 | api(group = "org.apache.kafka", name = "kafka-streams-test-utils") 16 | implementation(group = "org.jooq", name = "jool", version = "0.9.15") 17 | 18 | val junit5Version: String by project 19 | testRuntimeOnly(group = "org.junit.jupiter", name = "junit-jupiter-engine", version = junit5Version) 20 | testImplementation(group = "org.junit.jupiter", name = "junit-jupiter-api", version = junit5Version) 21 | testImplementation(group = "org.apache.avro", name = "avro", version = "1.12.0") 22 | testImplementation(group = "io.confluent", name = "kafka-streams-avro-serde") 23 | testImplementation(group = "io.confluent", name = "kafka-protobuf-provider") 24 | testImplementation(group = "io.confluent", name = "kafka-streams-protobuf-serde") 25 | testImplementation(group = "com.google.protobuf", name = "protobuf-java", version = "3.25.5") 26 | } 27 | 28 | protobuf { 29 | protoc { 30 | // The artifact spec for the Protobuf Compiler 31 | artifact = "com.google.protobuf:protoc:3.25.5" 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/lombok.config: -------------------------------------------------------------------------------- 1 | # This file is generated by the 'io.freefair.lombok' Gradle plugin 2 | config.stopBubbling = true 3 | lombok.addLombokGeneratedAnnotation = true 4 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/main/java/com/bakdata/fluent_kafka_streams_tests/BaseOutput.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2025 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests; 26 | 27 | import com.bakdata.kafka.Configurator; 28 | import com.bakdata.kafka.Preconfigured; 29 | import java.util.ArrayList; 30 | import java.util.List; 31 | import org.apache.kafka.clients.producer.ProducerRecord; 32 | import org.apache.kafka.common.serialization.Serde; 33 | import org.apache.kafka.streams.TestOutputTopic; 34 | import org.apache.kafka.streams.TopologyTestDriver; 35 | import org.apache.kafka.streams.test.TestRecord; 36 | 37 | abstract class BaseOutput implements TestOutput { 38 | private final TopologyTestDriver testDriver; 39 | private final TestOutputTopic testOutputTopic; 40 | private final String topic; 41 | private final Serde keySerde; 42 | private final Serde valueSerde; 43 | private final Configurator configurator; 44 | 45 | protected BaseOutput(final TopologyTestDriver testDriver, final String topic, final Serde keySerde, 46 | final Serde valueSerde, final Configurator configurator) { 47 | this.testDriver = testDriver; 48 | this.topic = topic; 49 | this.keySerde = keySerde; 50 | this.valueSerde = valueSerde; 51 | this.configurator = configurator; 52 | 53 | this.testOutputTopic = this.testDriver 54 | .createOutputTopic(this.topic, this.keySerde.deserializer(), this.valueSerde.deserializer()); 55 | } 56 | 57 | /** 58 | * Set new serde for this output.
59 | * 60 | * @param keySerde The serializer/deserializer to be used for the keys in the output. 61 | * @param valueSerde The serializer/deserializer to be used for the values in the output. 62 | */ 63 | @Override 64 | public TestOutput withSerde(final Serde keySerde, final Serde valueSerde) { 65 | return this.create(this.testDriver, this.topic, keySerde, valueSerde, this.configurator); 66 | } 67 | 68 | @Override 69 | public TestOutput configureWithSerde(final Preconfigured> keySerde, 70 | final Preconfigured> valueSerde) { 71 | return this.withSerde(this.configurator.configureForKeys(keySerde), this.configurator.configureForValues(valueSerde)); 72 | } 73 | 74 | @Override 75 | public TestOutput configureWithSerde(final Serde keySerde, final Serde valueSerde) { 76 | return this.configureWithSerde(Preconfigured.create(keySerde), Preconfigured.create(valueSerde)); 77 | } 78 | 79 | /** 80 | * Set new key serde for this output.
81 | */ 82 | @Override 83 | public TestOutput withKeySerde(final Serde keySerde) { 84 | return this.withSerde(keySerde, this.valueSerde); 85 | } 86 | 87 | @Override 88 | public TestOutput configureWithKeySerde(final Preconfigured> keySerde) { 89 | return this.withSerde(this.configurator.configureForKeys(keySerde), this.valueSerde); 90 | } 91 | 92 | @Override 93 | public TestOutput configureWithKeySerde(final Serde keySerde) { 94 | return this.configureWithKeySerde(Preconfigured.create(keySerde)); 95 | } 96 | 97 | /** 98 | * Set new value serde for this output.
99 | */ 100 | @Override 101 | public TestOutput withValueSerde(final Serde valueSerde) { 102 | return this.withSerde(this.keySerde, valueSerde); 103 | } 104 | 105 | @Override 106 | public TestOutput configureWithValueSerde(final Preconfigured> valueSerde) { 107 | return this.withSerde(this.keySerde, this.configurator.configureForValues(valueSerde)); 108 | } 109 | 110 | @Override 111 | public TestOutput configureWithValueSerde(final Serde valueSerde) { 112 | return this.configureWithValueSerde(Preconfigured.create(valueSerde)); 113 | } 114 | 115 | /** 116 | * Reads the next record and creates an {@link Expectation} for it.
117 | * 118 | * Note that calling this method by itself without chaining at least one of the {@code has*()} methods will not 119 | * check for the existence of a next record!
120 | * 121 | * @return An {@link Expectation} containing the next record from the output.
122 | */ 123 | @Override 124 | public Expectation expectNextRecord() { 125 | return new Expectation<>(this.readOneRecord(), this); 126 | } 127 | 128 | /** 129 | * Reads the next record from the output and expects it to be the end of output.
130 | * 131 | * @return An {@link Expectation} containing the next record from the output.
132 | */ 133 | @Override 134 | public Expectation expectNoMoreRecord() { 135 | return this.expectNextRecord().toBeEmpty(); 136 | } 137 | 138 | /** 139 | * Interpret the output with {@link org.apache.kafka.streams.kstream.KTable} semantics (each key only once).
140 | * Note: once the first value of the stream has been read or the iterator has be called, you cannot switch between 141 | * the output types anymore.
142 | */ 143 | @Override 144 | public TestOutput asTable() { 145 | return new TableOutput<>(this.testDriver, this.topic, this.keySerde, this.valueSerde, this.configurator); 146 | } 147 | 148 | /** 149 | * Interpret the output with {@link org.apache.kafka.streams.kstream.KStream} semantics (each key multiple times) 150 | * .
This is the default, there should usually be no need to call this method.
Note: once the first value 151 | * of the stream has been read or the iterator has be called, you cannot switch between the output types 152 | * anymore.
153 | */ 154 | @Override 155 | public TestOutput asStream() { 156 | return new StreamOutput<>(this.testDriver, this.topic, this.keySerde, this.valueSerde, this.configurator); 157 | } 158 | 159 | /** 160 | * Convert the output to a {@link java.util.List}. In case the current instance of this class is a 161 | * {@link StreamOutput}, the output will be converted to List with {@link org.apache.kafka.streams.kstream.KStream} 162 | * semantics (each key multiple times). In case the current instance of this class is a {@link TableOutput}, the 163 | * output will be converted to List with {@link org.apache.kafka.streams.kstream.KTable} semantics (each key only 164 | * once). 165 | * 166 | * @return A {@link java.util.List} representing the output 167 | */ 168 | @Override 169 | public List> toList() { 170 | final List> list = new ArrayList<>(); 171 | this.iterator().forEachRemaining(list::add); 172 | return list; 173 | } 174 | 175 | // ================== 176 | // Non-public methods 177 | // ================== 178 | protected ProducerRecord readFromTestDriver() { 179 | // the Expectation implementation requires null if the topic is empty but outputTopic.readRecord() throws a 180 | // NoSuchElementException. Thus, we have to check beforehand. 181 | if (this.testOutputTopic.isEmpty()) { 182 | return null; 183 | } 184 | final TestRecord testRecord = this.testOutputTopic.readRecord(); 185 | // partition is always 0, see TopologyTestDriver.PARTITION_ID 186 | return new ProducerRecord<>(this.topic, 0, testRecord.timestamp(), testRecord.key(), testRecord.value(), 187 | testRecord.getHeaders()); 188 | } 189 | 190 | 191 | protected abstract TestOutput create(TopologyTestDriver testDriver, String topic, 192 | Serde keySerde, Serde valueSerde, Configurator configurator); 193 | } 194 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/main/java/com/bakdata/fluent_kafka_streams_tests/Expectation.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2025 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests; 26 | 27 | import java.util.Objects; 28 | import java.util.function.Consumer; 29 | import lombok.RequiredArgsConstructor; 30 | import org.apache.kafka.clients.producer.ProducerRecord; 31 | 32 | /** 33 | * Represents a single output {@link ProducerRecord} from {@link TestOutput} to be tested. 34 | * 35 | * @param the key type of the record under test 36 | * @param the value type of the record under test 37 | */ 38 | @RequiredArgsConstructor 39 | public class Expectation { 40 | private final ProducerRecord lastRecord; 41 | private final TestOutput output; 42 | 43 | /** 44 | * Asserts whether a record exists. 45 | */ 46 | public Expectation isPresent() { 47 | if (this.lastRecord == null) { 48 | throw new AssertionError("No more records found"); 49 | } 50 | return this.and(); 51 | } 52 | 53 | /** 54 | * Checks for the equality of the {@link ProducerRecord#key()} and {@code expectedKey}. 55 | * 56 | * @param expectedKey key to expect 57 | * @return the current {@code Expectation} chain 58 | */ 59 | public Expectation hasKey(final K expectedKey) { 60 | this.isPresent(); 61 | if (!Objects.equals(this.lastRecord.key(), expectedKey)) { 62 | throw new AssertionError(String.format("Record key does not match. Expected '%s' but got '%s'", expectedKey, 63 | this.lastRecord.key())); 64 | } 65 | return this.and(); 66 | } 67 | 68 | /** 69 | * Forwards {@link ProducerRecord#key()} to the provided condition in order make assertions using another 70 | * framework. 71 | * 72 | * @param requirements consumer that accepts the current record's key 73 | * @return the current {@code Expectation} chain 74 | */ 75 | public Expectation hasKeySatisfying(final Consumer requirements) { 76 | this.isPresent(); 77 | requirements.accept(this.lastRecord.key()); 78 | return this.and(); 79 | } 80 | 81 | /** 82 | * Checks for the equality of the {@link ProducerRecord#value()} and {@code expectedValue}. 83 | * @param expectedValue value to expect 84 | * @return the current {@code Expectation} chain 85 | */ 86 | public Expectation hasValue(final V expectedValue) { 87 | this.isPresent(); 88 | if (!Objects.equals(this.lastRecord.value(), expectedValue)) { 89 | throw new AssertionError( 90 | String.format("Record value does not match. Expected '%s' but got '%s'", expectedValue, 91 | this.lastRecord.value())); 92 | } 93 | return this.and(); 94 | } 95 | 96 | /** 97 | * Forwards {@link ProducerRecord#value()} to the provided condition in order make assertions using another 98 | * framework. 99 | * 100 | * @param requirements consumer that accepts the current record's value 101 | * @return the current {@code Expectation} chain 102 | */ 103 | public Expectation hasValueSatisfying(final Consumer requirements) { 104 | this.isPresent(); 105 | requirements.accept(this.lastRecord.value()); 106 | return this.and(); 107 | } 108 | 109 | /** 110 | * Concatenates calls to this Expectation. It is not necessary to call this method, but it can be seen as a more 111 | * readable alternative to simple chaining. 112 | * @return this 113 | */ 114 | public Expectation and() { 115 | return this; 116 | } 117 | 118 | /** 119 | *

Reads the next record and creates an {@code Expectation} for it.

120 | *

This is logically equivalent to {@link TestOutput#expectNextRecord()}.

121 | *

This methods main purpose is to allow chaining:

122 | *
{@code
123 |      * myOutput.expectNextRecord()
124 |      *         .expectNextRecord()
125 |      *         .expectNoMoreRecord();
126 |      * }
127 | * 128 | * @return An {@code Expectation} containing the next record from the output. 129 | */ 130 | public Expectation expectNextRecord() { 131 | return this.output.expectNextRecord(); 132 | } 133 | 134 | /** 135 | *

Reads the next record from the output and expects it to be the end of output.

136 | *

This is logically equivalent to {@link TestOutput#expectNoMoreRecord()}.

137 | *

This methods main purpose is to allow chaining:

138 | *
{@code
139 |      * myOutput.expectNextRecord()
140 |      *         .expectNextRecord()
141 |      *         .expectNoMoreRecord();
142 |      * }
143 | * 144 | * @return An {@code Expectation} containing the next record from the output. 145 | */ 146 | public Expectation expectNoMoreRecord() { 147 | return this.output.expectNoMoreRecord(); 148 | } 149 | 150 | /** 151 | *

Asserts that there is no records present, i.e., the end of the output has been reached.

152 | *

This method should be used when there are no records at all expected.

153 | * @return the current {@code Expectation} chain 154 | */ 155 | public Expectation toBeEmpty() { 156 | if (this.lastRecord != null) { 157 | throw new AssertionError( 158 | String.format("More records found. {key='%s', value='%s'}", this.lastRecord.key(), this.lastRecord.value())); 159 | } 160 | return this.and(); 161 | } 162 | } 163 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/main/java/com/bakdata/fluent_kafka_streams_tests/StreamOutput.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2025 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests; 26 | 27 | import com.bakdata.kafka.Configurator; 28 | import java.util.Iterator; 29 | import java.util.NoSuchElementException; 30 | import lombok.NonNull; 31 | import org.apache.kafka.clients.producer.ProducerRecord; 32 | import org.apache.kafka.common.serialization.Serde; 33 | import org.apache.kafka.streams.TopologyTestDriver; 34 | 35 | /** 36 | *

Represents the {@link TestOutput} with {@link org.apache.kafka.streams.kstream.KStream} semantics.

37 | * 38 | *

Note: The StreamOutput is a one-time iterable. Cache it if you need to iterate several times.

39 | */ 40 | class StreamOutput extends BaseOutput { 41 | StreamOutput(final TopologyTestDriver testDriver, final String topic, final Serde keySerde, 42 | final Serde valueSerde, final Configurator configurator) { 43 | super(testDriver, topic, keySerde, valueSerde, configurator); 44 | } 45 | 46 | /** 47 | * Reads the next value from the output stream.
Usually, you should not need to call this. The recommended way 48 | * should be to use either 49 | *
    50 | *
  • the {@link #expectNextRecord()} and {@link #expectNoMoreRecord()} methods OR
  • 51 | *
  • the iterable interface (via {@link #iterator()}.
  • 52 | *
53 | * 54 | * @return The next value in the output stream. {@code null} if no more values are present.
55 | */ 56 | @Override 57 | public ProducerRecord readOneRecord() { 58 | return this.readFromTestDriver(); 59 | } 60 | 61 | /** 62 | * Creates an iterator of {@link ProducerRecord} for the stream output. Can only be read once.
63 | */ 64 | @Override 65 | public @NonNull Iterator> iterator() { 66 | return new Iterator>() { 67 | private ProducerRecord current = StreamOutput.this.readFromTestDriver(); 68 | 69 | @Override 70 | public boolean hasNext() { 71 | return this.current != null; 72 | } 73 | 74 | @Override 75 | public ProducerRecord next() { 76 | if (!this.hasNext()) { 77 | throw new NoSuchElementException(); 78 | } 79 | final ProducerRecord toReturn = this.current; 80 | this.current = StreamOutput.this.readFromTestDriver(); 81 | return toReturn; 82 | } 83 | }; 84 | } 85 | 86 | // ================== 87 | // Non-public methods 88 | // ================== 89 | @Override 90 | protected TestOutput create(final TopologyTestDriver testDriver, final String topic, 91 | final Serde keySerde, final Serde valueSerde, final Configurator configurator) { 92 | return new StreamOutput<>(testDriver, topic, keySerde, valueSerde, configurator); 93 | } 94 | } 95 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/main/java/com/bakdata/fluent_kafka_streams_tests/TableOutput.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2025 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests; 26 | 27 | import com.bakdata.kafka.Configurator; 28 | import java.util.Iterator; 29 | import java.util.LinkedHashMap; 30 | import java.util.Map; 31 | import lombok.NonNull; 32 | import org.apache.kafka.clients.producer.ProducerRecord; 33 | import org.apache.kafka.common.serialization.Serde; 34 | import org.apache.kafka.streams.TopologyTestDriver; 35 | 36 | class TableOutput extends BaseOutput { 37 | private final Map> table = new LinkedHashMap<>(); 38 | private Iterator> tableIterator; 39 | 40 | TableOutput(final TopologyTestDriver testDriver, final String topic, final Serde keySerde, 41 | final Serde valueSerde, final Configurator configurator) { 42 | super(testDriver, topic, keySerde, valueSerde, configurator); 43 | } 44 | 45 | /** 46 | *

Reads the next value from the output stream.

47 | * Usually, you should not need to call this. The recommended way should be to use either 48 | *
    49 | *
  • the {@link #expectNextRecord()} and {@link #expectNoMoreRecord()} methods OR
  • 50 | *
  • the iterable interface (via {@link #iterator()}.
  • 51 | *
52 | * 53 | * @return The next value in the output stream. {@code null} if no more values are present. 54 | */ 55 | @Override 56 | public ProducerRecord readOneRecord() { 57 | if (this.tableIterator == null) { 58 | this.tableIterator = this.iterator(); 59 | } 60 | 61 | // Emulate testDriver, which returns null on last read 62 | return this.tableIterator.hasNext() ? this.tableIterator.next() : null; 63 | } 64 | 65 | /** 66 | * Creates an iterator of {@link ProducerRecord} for the table output. Can only be read once. 67 | */ 68 | @Override 69 | public @NonNull Iterator> iterator() { 70 | ProducerRecord producerRecord = this.readFromTestDriver(); 71 | while (producerRecord != null) { 72 | this.table.put(producerRecord.key(), producerRecord); 73 | producerRecord = this.readFromTestDriver(); 74 | } 75 | return this.table.values().stream().iterator(); 76 | } 77 | 78 | // ================== 79 | // Non-public methods 80 | // ================== 81 | @Override 82 | protected TestOutput create(final TopologyTestDriver testDriver, final String topic, 83 | final Serde keySerde, final Serde valueSerde, final Configurator configurator) { 84 | return new TableOutput<>(testDriver, topic, keySerde, valueSerde, configurator); 85 | } 86 | } 87 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/main/java/com/bakdata/fluent_kafka_streams_tests/TestOutput.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2025 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests; 26 | 27 | import com.bakdata.kafka.Preconfigured; 28 | import java.util.List; 29 | import org.apache.kafka.clients.producer.ProducerRecord; 30 | import org.apache.kafka.common.serialization.Serde; 31 | 32 | 33 | /** 34 | *

Represents the output stream of the tested app via the {@link TestTopology}.

35 | *

This can be used via the {@link StreamOutput} or the {@link TableOutput}, dependent on the desired semantics.

36 | *

For more details see each implementation.

37 | * 38 | *

Note: The StreamOutput is a one-time iterable. Cache it if you need to iterate several times.

39 | * 40 | * @param the key type of the output stream 41 | * @param the value type of the output stream 42 | */ 43 | public interface TestOutput extends Iterable> { 44 | /** 45 | * Set new serde for this output. 46 | * 47 | * @param keySerde The serializer/deserializer to be used for the keys in the output 48 | * @param valueSerde The serializer/deserializer to be used for the values in the output 49 | * @return Copy of current {@code TestOutput} with provided serdes 50 | */ 51 | TestOutput withSerde(Serde keySerde, Serde valueSerde); 52 | 53 | /** 54 | * Set new serde for this output. Serdes are configured using properties of the test topology. 55 | * 56 | * @param keySerde The serializer/deserializer to be used for the keys in the output 57 | * @param valueSerde The serializer/deserializer to be used for the values in the output 58 | * @return Copy of current {@code TestOutput} with provided serdes 59 | */ 60 | TestOutput configureWithSerde(Preconfigured> keySerde, 61 | Preconfigured> valueSerde); 62 | 63 | /** 64 | * Set new serde for this output. Serdes are configured using properties of the test topology. 65 | * 66 | * @param keySerde The serializer/deserializer to be used for the keys in the output 67 | * @param valueSerde The serializer/deserializer to be used for the values in the output 68 | * @return Copy of current {@code TestOutput} with provided serdes 69 | */ 70 | TestOutput configureWithSerde(Serde keySerde, Serde valueSerde); 71 | 72 | /** 73 | * Set new key serde for this output. 74 | * 75 | * @param keySerde The serializer/deserializer to be used for the keys in the output 76 | * @return Copy of current {@code TestOutput} with provided key serde 77 | */ 78 | TestOutput withKeySerde(Serde keySerde); 79 | 80 | /** 81 | * Set new key serde for this output. Serde is configured using properties of the test topology. 82 | * 83 | * @param keySerde The serializer/deserializer to be used for the keys in the output 84 | * @return Copy of current {@code TestOutput} with provided key serde 85 | */ 86 | TestOutput configureWithKeySerde(Preconfigured> keySerde); 87 | 88 | /** 89 | * Set new key serde for this output. Serde is configured using properties of the test topology. 90 | * 91 | * @param keySerde The serializer/deserializer to be used for the keys in the output 92 | * @return Copy of current {@code TestOutput} with provided key serde 93 | */ 94 | TestOutput configureWithKeySerde(Serde keySerde); 95 | 96 | /** 97 | * Set new value serde for this output. 98 | * 99 | * @param valueSerde The serializer/deserializer to be used for the values in the output 100 | * @return Copy of current {@code TestOutput} with provided value serde 101 | */ 102 | TestOutput withValueSerde(Serde valueSerde); 103 | 104 | /** 105 | * Set new value serde for this output. Serde is configured using properties of the test topology. 106 | * 107 | * @param valueSerde The serializer/deserializer to be used for the values in the output 108 | * @return Copy of current {@code TestOutput} with provided value serde 109 | */ 110 | TestOutput configureWithValueSerde(Preconfigured> valueSerde); 111 | 112 | /** 113 | * Set new value serde for this output. Serde is configured using properties of the test topology. 114 | * 115 | * @param valueSerde The serializer/deserializer to be used for the values in the output 116 | * @return Copy of current {@code TestOutput} with provided value serde 117 | */ 118 | TestOutput configureWithValueSerde(Serde valueSerde); 119 | 120 | /** 121 | *

Type-casts the key and value to the given types.

122 | * 123 | * A type-cast is useful if you have general-purpose serde, such as Json or Avro, which is used for different types 124 | * in input and output. Thus, instead of unnecessarily overriding the serde, this method just casts the output. 125 | * 126 | * @param keyType the new key type. 127 | * @param valueType the new value type. 128 | * @return Copy of current {@code TestOutput} with provided types 129 | */ 130 | default TestOutput withTypes(final Class keyType, final Class valueType) { 131 | return (TestOutput) this; 132 | } 133 | 134 | /** 135 | *

Type-casts the key to the given type.

136 | * 137 | * A type-cast is useful if you have general-purpose serde, such as Json or Avro, which is used for different types 138 | * in input and output. Thus, instead of unnecessarily overriding the serde, this method just casts the output. 139 | * 140 | * @param keyType the new key type. 141 | * @return Copy of current {@code TestOutput} with provided key type 142 | */ 143 | default TestOutput withKeyType(final Class keyType) { 144 | return (TestOutput) this; 145 | } 146 | 147 | /** 148 | *

Type-casts the value to the given type.

149 | * 150 | * A type-cast is useful if you have general-purpose serde, such as Json or Avro, which is used for different types 151 | * in input and output. Thus, instead of unnecessarily overriding the serde, this method just casts the output. 152 | * 153 | * @param valueType the new value type. 154 | * @return Copy of current {@code TestOutput} with provided value type 155 | */ 156 | default TestOutput withValueType(final Class valueType) { 157 | return (TestOutput) this; 158 | } 159 | 160 | /** 161 | *

Reads the next value from the output stream.

162 | * Usually, you should not need to call this. The recommended way should be to use either 163 | *
    164 | *
  • the {@link #expectNextRecord()} and {@link #expectNoMoreRecord()} methods OR
  • 165 | *
  • the iterable interface (via {@link #iterator()}.
  • 166 | *
167 | * 168 | * @return The next value in the output stream depending on the output type (stream or table semantics). {@code 169 | * null} if no more values are present. 170 | */ 171 | ProducerRecord readOneRecord(); 172 | 173 | /** 174 | * Reads the next record and creates an {@link Expectation} for it. 175 | * 176 | * @return An {@link Expectation} containing the next record from the output. 177 | */ 178 | Expectation expectNextRecord(); 179 | 180 | /** 181 | * Reads the next record from the output and expects it to be the end of output. 182 | * 183 | * @return An {@link Expectation} containing the next record from the output. 184 | */ 185 | Expectation expectNoMoreRecord(); 186 | 187 | /** 188 | *

Interpret the output with {@link org.apache.kafka.streams.kstream.KTable} semantics (each key only once).

189 | *

Note: once the first value of the stream has been read or the iterator has be called, you cannot switch 190 | * between the output types any more.

191 | * @return Current output with {@link org.apache.kafka.streams.kstream.KTable} semantics 192 | */ 193 | TestOutput asTable(); 194 | 195 | /** 196 | *

Interpret the output with {@link org.apache.kafka.streams.kstream.KStream} semantics (each key multiple 197 | * times).

198 | *

This is the default, there should usually be no need to call this method.

199 | *

Note: once the first value of the stream has been read or the iterator has be called, you cannot switch 200 | * between the output types any more.

201 | * 202 | * @return Current output with {@link org.apache.kafka.streams.kstream.KStream} semantics 203 | */ 204 | TestOutput asStream(); 205 | 206 | /** 207 | * Convert the output to a {@link java.util.List}. 208 | * 209 | * @return A {@link java.util.List} representing the output 210 | */ 211 | List> toList(); 212 | } 213 | 214 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/avro/City.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "type": "record", 3 | "name": "City", 4 | "namespace": "com.bakdata.fluent_kafka_streams_tests.test_types", 5 | "fields": [ 6 | { 7 | "name": "name", 8 | "type": "string" 9 | }, 10 | { 11 | "name": "inhabitants", 12 | "type": "int" 13 | } 14 | ] 15 | } -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/avro/Person.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "type": "record", 3 | "name": "Person", 4 | "namespace": "com.bakdata.fluent_kafka_streams_tests.test_types", 5 | "fields": [ 6 | { 7 | "name": "name", 8 | "type": "string" 9 | }, 10 | { 11 | "name": "city", 12 | "type": "string" 13 | } 14 | ] 15 | } -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/CountInhabitantsWithAvroTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2025 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests; 26 | 27 | import com.bakdata.fluent_kafka_streams_tests.test_applications.CountInhabitantsWithAvro; 28 | import com.bakdata.fluent_kafka_streams_tests.test_types.City; 29 | import com.bakdata.fluent_kafka_streams_tests.test_types.Person; 30 | import org.apache.kafka.common.serialization.Serdes; 31 | import org.junit.jupiter.api.AfterEach; 32 | import org.junit.jupiter.api.BeforeEach; 33 | import org.junit.jupiter.api.Test; 34 | 35 | 36 | class CountInhabitantsWithAvroTest { 37 | 38 | private final TestTopology testTopology = 39 | new TestTopology<>(CountInhabitantsWithAvro::getTopology, CountInhabitantsWithAvro.getKafkaProperties()); 40 | 41 | @BeforeEach 42 | void start() { 43 | this.testTopology.start(); 44 | } 45 | 46 | @AfterEach 47 | void stop() { 48 | this.testTopology.stop(); 49 | } 50 | 51 | @Test 52 | void shouldAggregateInhabitants() { 53 | this.testTopology.input() 54 | .add(new Person("Huey", "City1")) 55 | .add(new Person("Dewey", "City2")) 56 | .add(new Person("Louie", "City1")); 57 | 58 | this.testTopology.tableOutput().withValueType(City.class) 59 | .expectNextRecord().hasKey("City1").hasValue(new City("City1", 2)) 60 | .expectNextRecord().hasKey("City2").hasValue(new City("City2", 1)) 61 | .expectNoMoreRecord(); 62 | } 63 | 64 | @Test 65 | void shouldWorkForEmptyInput() { 66 | this.testTopology.tableOutput().withSerde(Serdes.String(), Serdes.Long()) 67 | .expectNoMoreRecord(); 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/CountInhabitantsWithProtoTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2025 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests; 26 | 27 | import static com.bakdata.fluent_kafka_streams_tests.test_types.proto.CityOuterClass.City; 28 | import static com.bakdata.fluent_kafka_streams_tests.test_types.proto.PersonOuterClass.Person; 29 | 30 | import com.bakdata.fluent_kafka_streams_tests.test_applications.CountInhabitantsWithProto; 31 | import org.apache.kafka.common.serialization.Serdes; 32 | import org.junit.jupiter.api.AfterEach; 33 | import org.junit.jupiter.api.BeforeEach; 34 | import org.junit.jupiter.api.Test; 35 | 36 | class CountInhabitantsWithProtoTest { 37 | 38 | private final TestTopology testTopology = 39 | new TestTopology<>(CountInhabitantsWithProto::getTopology, CountInhabitantsWithProto.getKafkaProperties()); 40 | 41 | static Person newPerson(final String name, final String city) { 42 | return Person.newBuilder().setName(name).setCity(city).build(); 43 | } 44 | 45 | static City newCity(final String name, final int inhabitants) { 46 | return City.newBuilder().setName(name).setInhabitants(inhabitants).build(); 47 | } 48 | 49 | @BeforeEach 50 | void start() { 51 | this.testTopology.start(); 52 | } 53 | 54 | @AfterEach 55 | void stop() { 56 | this.testTopology.stop(); 57 | } 58 | 59 | @Test 60 | void shouldAggregateInhabitants() { 61 | this.testTopology.input() 62 | .withValueSerde(CountInhabitantsWithProto.newPersonSerde()) 63 | .add("test", newPerson("Huey", "City1")) 64 | .add("test", newPerson("Dewey", "City2")) 65 | .add("test", newPerson("Louie", "City1")); 66 | 67 | this.testTopology.tableOutput().withValueSerde(CountInhabitantsWithProto.newCitySerde()) 68 | .expectNextRecord().hasKey("City1").hasValue(newCity("City1", 2)) 69 | .expectNextRecord().hasKey("City2").hasValue(newCity("City2", 1)) 70 | .expectNoMoreRecord(); 71 | } 72 | 73 | @Test 74 | void shouldWorkForEmptyInput() { 75 | this.testTopology.tableOutput().withSerde(Serdes.String(), Serdes.Long()) 76 | .expectNoMoreRecord(); 77 | } 78 | 79 | } 80 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/DynamicTopicTest.java: -------------------------------------------------------------------------------- 1 | package com.bakdata.fluent_kafka_streams_tests; 2 | 3 | import static org.assertj.core.api.Assertions.assertThatExceptionOfType; 4 | 5 | import com.bakdata.fluent_kafka_streams_tests.test_applications.TopicExtractorApplication; 6 | import java.util.NoSuchElementException; 7 | import org.junit.jupiter.api.AfterEach; 8 | import org.junit.jupiter.api.BeforeEach; 9 | import org.junit.jupiter.api.Test; 10 | 11 | class DynamicTopicTest { 12 | 13 | private static final String KEY = "key"; 14 | private static final String VALUE = "value"; 15 | private final TestTopology testTopology = 16 | new TestTopology<>(TopicExtractorApplication::getTopology, TopicExtractorApplication.getProperties()); 17 | 18 | @BeforeEach 19 | void start() { 20 | this.testTopology.start(); 21 | this.testTopology.input().add(KEY, VALUE); 22 | this.testTopology.getOutputTopics().add(TopicExtractorApplication.OUTPUT_TOPIC); 23 | } 24 | 25 | @AfterEach 26 | void stop() { 27 | this.testTopology.stop(); 28 | } 29 | 30 | @Test 31 | void shouldHaveOutputForTopicName() { 32 | this.testTopology.streamOutput(TopicExtractorApplication.OUTPUT_TOPIC) 33 | .expectNextRecord() 34 | .hasKey(KEY).and().hasValue(VALUE); 35 | } 36 | 37 | @Test 38 | void shouldHaveOutputWithoutTopicName() { 39 | this.testTopology.streamOutput() 40 | .expectNextRecord() 41 | .hasKey(KEY).and().hasValue(VALUE); 42 | } 43 | 44 | @Test 45 | void shouldThrowExceptionForNonExistingStreamOutputTopic() { 46 | assertThatExceptionOfType(NoSuchElementException.class) 47 | .isThrownBy(() -> this.testTopology.streamOutput("non-existing")) 48 | .withMessage("Output topic 'non-existing' not found"); 49 | } 50 | 51 | @Test 52 | void shouldThrowExceptionForNonExistingTableOutputTopic() { 53 | assertThatExceptionOfType(NoSuchElementException.class) 54 | .isThrownBy(() -> this.testTopology.tableOutput("non-existing")) 55 | .withMessage("Output topic 'non-existing' not found"); 56 | } 57 | 58 | } 59 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/ForeignKeyJoinTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2025 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests; 26 | 27 | import com.bakdata.fluent_kafka_streams_tests.test_applications.ForeignKeyJoin; 28 | import org.junit.jupiter.api.AfterEach; 29 | import org.junit.jupiter.api.BeforeEach; 30 | import org.junit.jupiter.api.Test; 31 | 32 | class ForeignKeyJoinTest { 33 | 34 | private final TestTopology testTopology = 35 | new TestTopology<>(ForeignKeyJoin::getTopology, ForeignKeyJoin.getKafkaProperties()); 36 | 37 | @BeforeEach 38 | void start() { 39 | this.testTopology.start(); 40 | } 41 | 42 | @AfterEach 43 | void stop() { 44 | this.testTopology.stop(); 45 | } 46 | 47 | @Test 48 | void shouldIgnoreForeignKeyTopicsAsOutput() { 49 | this.testTopology.input(ForeignKeyJoin.LEFT_INPUT_TOPIC) 50 | .add("foo", "bar"); 51 | 52 | this.testTopology.input(ForeignKeyJoin.RIGHT_INPUT_TOPIC) 53 | .add("bar", "baz"); 54 | 55 | this.testTopology.streamOutput() 56 | .expectNextRecord().hasKey("foo").hasValue("barbaz") 57 | .expectNoMoreRecord(); 58 | } 59 | 60 | } 61 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/HeaderTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2025 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests; 26 | 27 | import static org.assertj.core.api.Assertions.assertThat; 28 | 29 | import com.bakdata.fluent_kafka_streams_tests.test_applications.Mirror; 30 | import com.google.common.collect.Lists; 31 | import java.util.List; 32 | import org.apache.kafka.clients.producer.ProducerRecord; 33 | import org.apache.kafka.common.header.internals.RecordHeaders; 34 | import org.junit.jupiter.api.AfterEach; 35 | import org.junit.jupiter.api.BeforeEach; 36 | import org.junit.jupiter.api.Test; 37 | 38 | class HeaderTest { 39 | private final Mirror app = new Mirror(); 40 | 41 | private final TestTopology testTopology = 42 | new TestTopology<>(this.app::getTopology, Mirror.getKafkaProperties()); 43 | 44 | @BeforeEach 45 | void start() { 46 | this.testTopology.start(); 47 | } 48 | 49 | @AfterEach 50 | void stop() { 51 | this.testTopology.stop(); 52 | } 53 | 54 | @Test 55 | void shouldAddHeaders() { 56 | this.testTopology.input() 57 | .add("key1", "value1", new RecordHeaders() 58 | .add("header1", new byte[]{0})) 59 | .add("key2", "value2", 1L, new RecordHeaders() 60 | .add("header1", new byte[]{1}) 61 | .add("header2", new byte[]{2, 3})); 62 | 63 | final List> records = Lists.newArrayList(this.testTopology.streamOutput()); 64 | assertThat(records) 65 | .hasSize(2) 66 | .anySatisfy(producerRecord -> { 67 | assertThat(producerRecord.key()).isEqualTo("key1"); 68 | assertThat(producerRecord.value()).isEqualTo("value1"); 69 | assertThat(producerRecord.timestamp()).isZero(); 70 | assertThat(producerRecord.headers().toArray()) 71 | .hasSize(1) 72 | .anySatisfy(header -> { 73 | assertThat(header.key()).isEqualTo("header1"); 74 | assertThat(header.value()).isEqualTo(new byte[]{0}); 75 | }); 76 | }) 77 | .anySatisfy(producerRecord -> { 78 | assertThat(producerRecord.key()).isEqualTo("key2"); 79 | assertThat(producerRecord.value()).isEqualTo("value2"); 80 | assertThat(producerRecord.timestamp()).isEqualTo(1L); 81 | assertThat(producerRecord.headers().toArray()) 82 | .hasSize(2) 83 | .anySatisfy(header -> { 84 | assertThat(header.key()).isEqualTo("header1"); 85 | assertThat(header.value()).isEqualTo(new byte[]{1}); 86 | }) 87 | .anySatisfy(header -> { 88 | assertThat(header.key()).isEqualTo("header2"); 89 | assertThat(header.value()).isEqualTo(new byte[]{2, 3}); 90 | }); 91 | }); 92 | } 93 | } 94 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/MirrorAvroNonDefaultSerdeTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2025 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests; 26 | 27 | import static com.bakdata.fluent_kafka_streams_tests.test_applications.MirrorAvroNonDefaultSerde.getKeySerde; 28 | import static com.bakdata.fluent_kafka_streams_tests.test_applications.MirrorAvroNonDefaultSerde.getValueSerde; 29 | 30 | import com.bakdata.fluent_kafka_streams_tests.test_applications.MirrorAvroNonDefaultSerde; 31 | import com.bakdata.fluent_kafka_streams_tests.test_types.City; 32 | import com.bakdata.fluent_kafka_streams_tests.test_types.Person; 33 | import io.confluent.kafka.streams.serdes.avro.SpecificAvroSerde; 34 | import org.junit.jupiter.api.AfterEach; 35 | import org.junit.jupiter.api.BeforeEach; 36 | import org.junit.jupiter.api.Test; 37 | 38 | 39 | class MirrorAvroNonDefaultSerdeTest { 40 | 41 | private final TestTopology testTopology = 42 | new TestTopology<>(MirrorAvroNonDefaultSerde::getTopology, MirrorAvroNonDefaultSerde.getKafkaProperties()); 43 | 44 | @BeforeEach 45 | void start() { 46 | this.testTopology.start(); 47 | } 48 | 49 | @AfterEach 50 | void stop() { 51 | this.testTopology.stop(); 52 | } 53 | 54 | @Test 55 | void shouldConfigurePreconfiguredSerdes() { 56 | this.testTopology.input() 57 | .configureWithSerde(getKeySerde(), getValueSerde()) 58 | .add(new City("City1", 2), new Person("Huey", "City1")); 59 | 60 | this.testTopology.streamOutput() 61 | .configureWithSerde(getKeySerde(), getValueSerde()) 62 | .expectNextRecord().hasKey(new City("City1", 2)).hasValue(new Person("Huey", "City1")) 63 | .expectNoMoreRecord(); 64 | } 65 | 66 | @Test 67 | void shouldConfigureSerdes() { 68 | this.testTopology.input() 69 | .configureWithSerde(new SpecificAvroSerde<>(), new SpecificAvroSerde<>()) 70 | .add(new City("City1", 2), new Person("Huey", "City1")); 71 | 72 | this.testTopology.streamOutput() 73 | .configureWithSerde(new SpecificAvroSerde<>(), new SpecificAvroSerde<>()) 74 | .expectNextRecord().hasKey(new City("City1", 2)).hasValue(new Person("Huey", "City1")) 75 | .expectNoMoreRecord(); 76 | } 77 | 78 | @Test 79 | void shouldConfigurePreconfiguredKeyAndValueSerdes() { 80 | this.testTopology.input() 81 | .configureWithKeySerde(getKeySerde()) 82 | .configureWithValueSerde(getValueSerde()) 83 | .add(new City("City1", 2), new Person("Huey", "City1")); 84 | 85 | this.testTopology.streamOutput() 86 | .configureWithKeySerde(getKeySerde()) 87 | .configureWithValueSerde(getValueSerde()) 88 | .expectNextRecord().hasKey(new City("City1", 2)).hasValue(new Person("Huey", "City1")) 89 | .expectNoMoreRecord(); 90 | } 91 | 92 | @Test 93 | void shouldConfigureKeyAndValueSerdes() { 94 | this.testTopology.input() 95 | .configureWithKeySerde(new SpecificAvroSerde<>()) 96 | .configureWithValueSerde(new SpecificAvroSerde<>()) 97 | .add(new City("City1", 2), new Person("Huey", "City1")); 98 | 99 | this.testTopology.streamOutput() 100 | .configureWithKeySerde(new SpecificAvroSerde<>()) 101 | .configureWithValueSerde(new SpecificAvroSerde<>()) 102 | .expectNextRecord().hasKey(new City("City1", 2)).hasValue(new Person("Huey", "City1")) 103 | .expectNoMoreRecord(); 104 | } 105 | } 106 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/MirrorPatternTest.java: -------------------------------------------------------------------------------- 1 | package com.bakdata.fluent_kafka_streams_tests; 2 | 3 | import static org.assertj.core.api.Assertions.assertThatExceptionOfType; 4 | 5 | import com.bakdata.fluent_kafka_streams_tests.test_applications.MirrorPattern; 6 | import java.util.NoSuchElementException; 7 | import org.junit.jupiter.api.AfterEach; 8 | import org.junit.jupiter.api.BeforeEach; 9 | import org.junit.jupiter.api.Test; 10 | 11 | class MirrorPatternTest { 12 | private final MirrorPattern app = new MirrorPattern(); 13 | 14 | private final TestTopology testTopology = new TestTopology<>(this.app::getTopology, 15 | MirrorPattern.getKafkaProperties()); 16 | 17 | @BeforeEach 18 | void start() { 19 | this.testTopology.start(); 20 | } 21 | 22 | @AfterEach 23 | void stop() { 24 | this.testTopology.stop(); 25 | } 26 | 27 | @Test 28 | void shouldConsumeFromPattern() { 29 | this.testTopology.input("example-input1") 30 | .add("key1", "value1") 31 | .add("key2", "value2"); 32 | this.testTopology.input("another-input1") 33 | .add("key3", "value3"); 34 | this.testTopology.input("example-input2") 35 | .add("key4", "value4"); 36 | 37 | this.testTopology.streamOutput() 38 | .expectNextRecord().hasKey("key1").hasValue("value1") 39 | .expectNextRecord().hasKey("key2").hasValue("value2") 40 | .expectNextRecord().hasKey("key3").hasValue("value3") 41 | .expectNextRecord().hasKey("key4").hasValue("value4") 42 | .expectNoMoreRecord(); 43 | } 44 | 45 | @Test 46 | void shouldThrowIfInputDoesNotMatchPattern() { 47 | assertThatExceptionOfType(NoSuchElementException.class) 48 | .isThrownBy(() -> this.testTopology.input("not-matching")); 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/MirrorPatternTopicMixedTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2024 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests; 26 | 27 | import static org.assertj.core.api.Assertions.assertThatExceptionOfType; 28 | 29 | import com.bakdata.fluent_kafka_streams_tests.test_applications.MirrorPatternTopicMixed; 30 | import java.util.NoSuchElementException; 31 | import org.junit.jupiter.api.AfterEach; 32 | import org.junit.jupiter.api.BeforeEach; 33 | import org.junit.jupiter.api.Test; 34 | 35 | class MirrorPatternTopicMixedTest { 36 | private final MirrorPatternTopicMixed app = new MirrorPatternTopicMixed(); 37 | 38 | private final TestTopology testTopology = new TestTopology<>(this.app::getTopology, 39 | MirrorPatternTopicMixed.getKafkaProperties()); 40 | 41 | @BeforeEach 42 | void start() { 43 | this.testTopology.start(); 44 | } 45 | 46 | @AfterEach 47 | void stop() { 48 | this.testTopology.stop(); 49 | } 50 | 51 | @Test 52 | void shouldConsumeFromPattern() { 53 | this.testTopology.input("example-input1") 54 | .add("key1", "value1") 55 | .add("key2", "value2"); 56 | this.testTopology.input("another-input1") 57 | .add("key3", "value3"); 58 | this.testTopology.input("input2") 59 | .add("key4", "value4"); 60 | 61 | this.testTopology.streamOutput() 62 | .expectNextRecord().hasKey("key1").hasValue("value1") 63 | .expectNextRecord().hasKey("key2").hasValue("value2") 64 | .expectNextRecord().hasKey("key3").hasValue("value3") 65 | .expectNextRecord().hasKey("key4").hasValue("value4") 66 | .expectNoMoreRecord(); 67 | } 68 | 69 | @Test 70 | void shouldThrowIfInputDoesNotMatchPattern() { 71 | assertThatExceptionOfType(NoSuchElementException.class) 72 | .isThrownBy(() -> this.testTopology.input("not-matching")); 73 | } 74 | } 75 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/NameJoinTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2024 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests; 26 | 27 | import com.bakdata.fluent_kafka_streams_tests.test_applications.NameJoinGlobalKTable; 28 | import com.bakdata.fluent_kafka_streams_tests.test_types.Person; 29 | import org.apache.kafka.common.serialization.Serdes; 30 | import org.junit.jupiter.api.AfterEach; 31 | import org.junit.jupiter.api.BeforeEach; 32 | import org.junit.jupiter.api.Test; 33 | 34 | class NameJoinTest { 35 | 36 | private final TestTopology testTopology = 37 | new TestTopology<>(NameJoinGlobalKTable::getTopology, NameJoinGlobalKTable.getKafkaProperties()); 38 | 39 | @BeforeEach 40 | void start() { 41 | this.testTopology.start(); 42 | } 43 | 44 | @AfterEach 45 | void stop() { 46 | this.testTopology.stop(); 47 | } 48 | 49 | @Test 50 | void testTopology() { 51 | this.testTopology.input(NameJoinGlobalKTable.NAME_INPUT).withSerde(Serdes.Long(), Serdes.String()) 52 | .add(1L, "Robinson") 53 | .add(2L, "Walker"); 54 | 55 | this.testTopology.input(NameJoinGlobalKTable.INPUT_TOPIC).withSerde(Serdes.Long(), Serdes.Long()) 56 | .add(1L, 1L) 57 | .add(2L, 2L); 58 | 59 | this.testTopology.streamOutput(NameJoinGlobalKTable.OUTPUT_TOPIC).withSerde(Serdes.Long(), Serdes.String()) 60 | .expectNextRecord().hasKey(1L).hasValue("Robinson") 61 | .expectNextRecord().hasKey(2L).hasValue("Walker") 62 | .expectNoMoreRecord(); 63 | } 64 | 65 | } 66 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/NameJoinWithIntermediateTopicTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2024 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests; 26 | 27 | import com.bakdata.fluent_kafka_streams_tests.test_applications.NameJoinGlobalKTable; 28 | import com.bakdata.fluent_kafka_streams_tests.test_types.Person; 29 | import org.apache.kafka.common.serialization.Serdes; 30 | import org.junit.jupiter.api.AfterEach; 31 | import org.junit.jupiter.api.BeforeEach; 32 | import org.junit.jupiter.api.Test; 33 | 34 | class NameJoinWithIntermediateTopicTest { 35 | 36 | private final TestTopology testTopology = 37 | new TestTopology<>(NameJoinGlobalKTable::getTopologyWithIntermediateTopic, 38 | NameJoinGlobalKTable.getKafkaProperties()); 39 | 40 | @BeforeEach 41 | void start() { 42 | this.testTopology.start(); 43 | } 44 | 45 | @AfterEach 46 | void stop() { 47 | this.testTopology.stop(); 48 | } 49 | 50 | @Test 51 | void testTopology() { 52 | this.testTopology.input(NameJoinGlobalKTable.NAME_INPUT).withSerde(Serdes.Long(), Serdes.String()) 53 | .add(1L, "Robinson") 54 | .add(2L, "Walker"); 55 | 56 | this.testTopology.input(NameJoinGlobalKTable.INPUT_TOPIC).withSerde(Serdes.Long(), Serdes.Long()) 57 | .add(1L, 1L) 58 | .add(2L, 2L); 59 | 60 | this.testTopology.streamOutput(NameJoinGlobalKTable.OUTPUT_TOPIC).withSerde(Serdes.Long(), Serdes.String()) 61 | .expectNextRecord().hasKey(1L).hasValue("ROBINSON") 62 | .expectNextRecord().hasKey(2L).hasValue("WALKER") 63 | .expectNoMoreRecord(); 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/TestInputAndOutputTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2023 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests; 26 | 27 | import com.bakdata.fluent_kafka_streams_tests.test_applications.MirrorAvro; 28 | import com.bakdata.fluent_kafka_streams_tests.test_types.City; 29 | import com.bakdata.fluent_kafka_streams_tests.test_types.Person; 30 | import org.junit.jupiter.api.AfterEach; 31 | import org.junit.jupiter.api.BeforeEach; 32 | import org.junit.jupiter.api.Test; 33 | 34 | 35 | class TestInputAndOutputTest { 36 | 37 | private final TestTopology testTopology = 38 | new TestTopology<>(MirrorAvro::getTopology, MirrorAvro.getKafkaProperties()); 39 | 40 | @BeforeEach 41 | void start() { 42 | this.testTopology.start(); 43 | } 44 | 45 | @AfterEach 46 | void stop() { 47 | this.testTopology.stop(); 48 | } 49 | 50 | @Test 51 | void shouldUseTypes() { 52 | this.testTopology.input() 53 | .withTypes(City.class, Person.class) 54 | .add(new City("City1", 2), new Person("Huey", "City1")); 55 | 56 | this.testTopology.streamOutput() 57 | .withTypes(City.class, Person.class) 58 | .expectNextRecord().hasKey(new City("City1", 2)).hasValue(new Person("Huey", "City1")) 59 | .expectNoMoreRecord(); 60 | } 61 | 62 | @Test 63 | void shouldUseValueTypes() { 64 | this.testTopology.input() 65 | .withValueType(Person.class) 66 | .add(new Person("Huey", "City1"), new Person("Huey", "City1")); 67 | 68 | this.testTopology.streamOutput() 69 | .withValueType(Person.class) 70 | .expectNextRecord().hasKey(new Person("Huey", "City1")).hasValue(new Person("Huey", "City1")) 71 | .expectNoMoreRecord(); 72 | } 73 | 74 | @Test 75 | void shouldUseKeyTypes() { 76 | this.testTopology.input() 77 | .withKeyType(City.class) 78 | .add(new City("City1", 2), new City("City1", 2)); 79 | 80 | this.testTopology.streamOutput() 81 | .withKeyType(City.class) 82 | .expectNextRecord().hasKey(new City("City1", 2)).hasValue(new City("City1", 2)) 83 | .expectNoMoreRecord(); 84 | } 85 | 86 | @Test 87 | void shouldVerifyNullKeys() { 88 | this.testTopology.input() 89 | .add(null, new City("City1", 2)); 90 | 91 | this.testTopology.streamOutput() 92 | .expectNextRecord().hasKey(null).hasValue(new City("City1", 2)) 93 | .expectNoMoreRecord(); 94 | } 95 | 96 | @Test 97 | void shouldVerifyNullValues() { 98 | this.testTopology.input() 99 | .add(new Person("Huey", "City1"), null); 100 | 101 | this.testTopology.streamOutput() 102 | .expectNextRecord().hasKey(new Person("Huey", "City1")).hasValue(null) 103 | .expectNoMoreRecord(); 104 | } 105 | } 106 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/TestTopologyTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2024 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests; 26 | 27 | import static org.assertj.core.api.Assertions.assertThatCode; 28 | 29 | import com.bakdata.fluent_kafka_streams_tests.test_applications.MirrorAvro; 30 | import com.bakdata.fluent_kafka_streams_tests.test_types.City; 31 | import com.bakdata.fluent_kafka_streams_tests.test_types.Person; 32 | import java.util.Map; 33 | import org.junit.jupiter.api.Test; 34 | 35 | 36 | class TestTopologyTest { 37 | 38 | @Test 39 | void shouldUseImmutableProperties() { 40 | final TestTopology testTopology = 41 | new TestTopology<>(MirrorAvro::getTopology, Map.copyOf(MirrorAvro.getKafkaProperties())); 42 | assertThatCode(testTopology::start).doesNotThrowAnyException(); 43 | testTopology.stop(); 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/UserClicksPerMinuteTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2024 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests; 26 | 27 | import com.bakdata.fluent_kafka_streams_tests.serde.JsonSerde; 28 | import com.bakdata.fluent_kafka_streams_tests.test_applications.UserClicksPerMinute; 29 | import com.bakdata.fluent_kafka_streams_tests.test_types.ClickEvent; 30 | import com.bakdata.fluent_kafka_streams_tests.test_types.ClickOutput; 31 | import java.util.concurrent.TimeUnit; 32 | import org.apache.kafka.common.serialization.Serdes; 33 | import org.junit.jupiter.api.AfterEach; 34 | import org.junit.jupiter.api.BeforeEach; 35 | import org.junit.jupiter.api.Test; 36 | 37 | class UserClicksPerMinuteTest { 38 | private static final int USER = 1; 39 | private static final int USER1 = 1; 40 | private static final int USER2 = 2; 41 | 42 | private final TestTopology testTopology = 43 | new TestTopology<>(UserClicksPerMinute::getTopology, UserClicksPerMinute.getKafkaProperties()); 44 | 45 | @BeforeEach 46 | void start() { 47 | this.testTopology.start(); 48 | } 49 | 50 | @AfterEach 51 | void stop() { 52 | this.testTopology.stop(); 53 | } 54 | 55 | @Test 56 | void shouldCountSingleUserSingleEventCorrectlyStream() { 57 | final long time = TimeUnit.MINUTES.toMillis(1); 58 | this.testTopology.input().at(time).add(USER, new ClickEvent(USER)); 59 | 60 | this.testTopology.streamOutput().withValueSerde(new JsonSerde<>(ClickOutput.class)) 61 | .expectNextRecord().hasKey(USER).hasValue(new ClickOutput(USER, 1L, time)) 62 | .expectNoMoreRecord(); 63 | } 64 | 65 | @Test 66 | void shouldCountSingleUserSingleEventCorrectlyTable() { 67 | final long time = TimeUnit.MINUTES.toMillis(1); 68 | this.testTopology.input().at(time).add(USER, new ClickEvent(USER)); 69 | 70 | this.testTopology.tableOutput().withValueSerde(new JsonSerde<>(ClickOutput.class)) 71 | .expectNextRecord().hasKey(USER).hasValue(new ClickOutput(USER, 1L, time)) 72 | .expectNoMoreRecord(); 73 | } 74 | 75 | @Test 76 | void shouldCountSingleUserSingleEventCorrectlyExplicitTime() { 77 | this.testTopology.input().at(1, TimeUnit.HOURS).add(USER, new ClickEvent(USER)); 78 | 79 | this.testTopology.tableOutput().withValueSerde(new JsonSerde<>(ClickOutput.class)) 80 | .expectNextRecord().hasKey(USER).hasValue(new ClickOutput(USER, 1L, TimeUnit.HOURS.toMillis(1))) 81 | .expectNoMoreRecord(); 82 | } 83 | 84 | @Test 85 | void shouldCountSingleUserSingleEventCorrectlyExplicitTimeWithoutAt() { 86 | final long time = TimeUnit.MINUTES.toMillis(1); 87 | this.testTopology.input() 88 | .add(USER, new ClickEvent(USER), time) 89 | .add(USER, new ClickEvent(USER), time); 90 | 91 | this.testTopology.tableOutput().withValueSerde(new JsonSerde<>(ClickOutput.class)) 92 | .expectNextRecord().hasKey(USER).hasValue(new ClickOutput(USER, 2L, time)) 93 | .expectNoMoreRecord(); 94 | } 95 | 96 | @Test 97 | void shouldCountSingleUserMultipleEventCorrectly() { 98 | // Window timestamps 99 | final long time1 = TimeUnit.MINUTES.toMillis(1); 100 | final long time2 = time1 + TimeUnit.MINUTES.toMillis(1); 101 | 102 | this.testTopology.input() 103 | .at(time1).add(USER, new ClickEvent(USER)) 104 | .at(time1 + 10).add(USER, new ClickEvent(USER)) 105 | .at(time1 + 20).add(USER, new ClickEvent(USER)) 106 | .at(time2).add(USER, new ClickEvent(USER)); 107 | 108 | this.testTopology.streamOutput().withValueSerde(new JsonSerde<>(ClickOutput.class)) 109 | .expectNextRecord().hasKey(USER).hasValue(new ClickOutput(USER, 1L, time1)) 110 | .expectNextRecord().hasKey(USER).hasValue(new ClickOutput(USER, 2L, time1)) 111 | .expectNextRecord().hasKey(USER).hasValue(new ClickOutput(USER, 3L, time1)) 112 | .expectNextRecord().hasKey(USER).hasValue(new ClickOutput(USER, 1L, time2)) 113 | .expectNoMoreRecord(); 114 | } 115 | 116 | @Test 117 | void shouldCountMultiUserMultipleEventCorrectly() { 118 | // Window timestamps 119 | final long time1 = TimeUnit.MINUTES.toMillis(1); 120 | final long time2 = time1 + TimeUnit.MINUTES.toMillis(1); 121 | 122 | this.testTopology.input() 123 | // First window 124 | .at(time1).add(new ClickEvent(USER1).getUserId(), new ClickEvent(USER1)) 125 | .at(time1 + 10).add(new ClickEvent(USER2).getUserId(), new ClickEvent(USER2)) 126 | .at(time1 + 20).add(new ClickEvent(USER1).getUserId(), new ClickEvent(USER1)) 127 | // Second window 128 | .at(time2).add(new ClickEvent(USER2).getUserId(), new ClickEvent(USER2)) 129 | .at(time2 + 10).add(new ClickEvent(USER1).getUserId(), new ClickEvent(USER1)) 130 | .at(time2 + 20).add(new ClickEvent(USER2).getUserId(), new ClickEvent(USER2)); 131 | 132 | this.testTopology.streamOutput().withValueSerde(new JsonSerde<>(ClickOutput.class)) 133 | .expectNextRecord().hasKey(USER1).hasValue(new ClickOutput(USER1, 1L, time1)) 134 | .expectNextRecord().hasKey(USER2).hasValue(new ClickOutput(USER2, 1L, time1)) 135 | .expectNextRecord().hasKey(USER1).hasValue(new ClickOutput(USER1, 2L, time1)) 136 | 137 | .expectNextRecord().hasKey(USER2).hasValue(new ClickOutput(USER2, 1L, time2)) 138 | .expectNextRecord().hasKey(USER1).hasValue(new ClickOutput(USER1, 1L, time2)) 139 | .expectNextRecord().hasKey(USER2).hasValue(new ClickOutput(USER2, 2L, time2)) 140 | .expectNoMoreRecord(); 141 | } 142 | 143 | @Test 144 | void shouldWorkWithExplicitKeySerdes() { 145 | final long time = TimeUnit.MINUTES.toMillis(1); 146 | this.testTopology.input().withKeySerde(Serdes.Integer()) 147 | .at(time).add(USER, new ClickEvent(USER)); 148 | 149 | this.testTopology.streamOutput() 150 | .withKeySerde(Serdes.Integer()) 151 | .withValueSerde(new JsonSerde<>(ClickOutput.class)) 152 | .expectNextRecord().hasKey(USER).hasValue(new ClickOutput(USER, 1, time)); 153 | } 154 | } 155 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/WordCountWithDefaultSerdeTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2024 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests; 26 | 27 | import com.bakdata.fluent_kafka_streams_tests.test_applications.WordCount; 28 | import org.apache.kafka.common.serialization.Serdes; 29 | import org.junit.jupiter.api.AfterEach; 30 | import org.junit.jupiter.api.BeforeEach; 31 | import org.junit.jupiter.api.Test; 32 | 33 | class WordCountWithDefaultSerdeTest { 34 | private final WordCount app = new WordCount(); 35 | 36 | private final TestTopology testTopology = 37 | new TestTopology<>(this.app.getTopology(), WordCount.getKafkaProperties()) 38 | .withDefaultValueSerde(Serdes.String()); 39 | 40 | @BeforeEach 41 | void start() { 42 | this.testTopology.start(); 43 | } 44 | 45 | @AfterEach 46 | void stop() { 47 | this.testTopology.stop(); 48 | } 49 | 50 | @Test 51 | void shouldAggregateSameWordStream() { 52 | this.testTopology.input().add("bla") 53 | .add("blub") 54 | .add("bla"); 55 | 56 | this.testTopology.streamOutput().withSerde(Serdes.String(), Serdes.Long()) 57 | .expectNextRecord().hasKey("bla").hasValue(1L) 58 | .expectNextRecord().hasKey("blub").hasValue(1L) 59 | .expectNextRecord().hasKey("bla").hasValue(2L) 60 | .expectNoMoreRecord(); 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/WordCountWithStaticTopologyTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2024 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests; 26 | 27 | import com.bakdata.fluent_kafka_streams_tests.test_applications.WordCount; 28 | import org.apache.kafka.common.serialization.Serdes; 29 | import org.junit.jupiter.api.AfterEach; 30 | import org.junit.jupiter.api.BeforeEach; 31 | import org.junit.jupiter.api.Test; 32 | 33 | class WordCountWithStaticTopologyTest { 34 | private final WordCount app = new WordCount(); 35 | 36 | private final TestTopology testTopology = 37 | new TestTopology<>(this.app::getTopology, WordCount.getKafkaProperties()); 38 | 39 | @BeforeEach 40 | void start() { 41 | this.testTopology.start(); 42 | } 43 | 44 | @AfterEach 45 | void stop() { 46 | this.testTopology.stop(); 47 | } 48 | 49 | @Test 50 | void shouldAggregateSameWordStream() { 51 | this.testTopology.input().add("bla") 52 | .add("blub") 53 | .add("bla"); 54 | 55 | this.testTopology.streamOutput().withSerde(Serdes.String(), Serdes.Long()) 56 | .expectNextRecord().hasKey("bla").hasValue(1L) 57 | .expectNextRecord().hasKey("blub").hasValue(1L) 58 | .expectNextRecord().hasKey("bla").hasValue(2L) 59 | .expectNoMoreRecord(); 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/serde/JsonDeserializer.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2023 bakdata GmbH 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests.serde; 26 | 27 | import com.fasterxml.jackson.databind.ObjectMapper; 28 | import java.io.IOException; 29 | import java.util.Map; 30 | import org.apache.kafka.common.errors.SerializationException; 31 | import org.apache.kafka.common.serialization.Deserializer; 32 | 33 | public class JsonDeserializer implements Deserializer { 34 | private final ObjectMapper objectMapper = new ObjectMapper(); 35 | private final Class clazz; 36 | 37 | public JsonDeserializer(final Class clazz) { 38 | this.clazz = clazz; 39 | } 40 | 41 | @Override 42 | public void configure(final Map props, final boolean isKey) { 43 | // nothing to configure 44 | } 45 | 46 | @Override 47 | public T deserialize(final String topic, final byte[] bytes) { 48 | if (bytes == null) 49 | return null; 50 | 51 | try { 52 | return this.objectMapper.readValue(bytes, this.clazz); 53 | } catch (final IOException e) { 54 | throw new SerializationException(e); 55 | } 56 | } 57 | 58 | @Override 59 | public void close() { 60 | // nothing to close 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/serde/JsonSerde.java: -------------------------------------------------------------------------------- 1 | package com.bakdata.fluent_kafka_streams_tests.serde; 2 | 3 | import lombok.experimental.Delegate; 4 | import org.apache.kafka.common.serialization.Serde; 5 | import org.apache.kafka.common.serialization.Serdes; 6 | 7 | public class JsonSerde implements Serde { 8 | @Delegate 9 | private final Serde inner; 10 | 11 | public JsonSerde(final Class clazz) { 12 | this.inner = Serdes.serdeFrom(new JsonSerializer<>(), new JsonDeserializer<>(clazz)); 13 | } 14 | 15 | public JsonSerde() { 16 | this((Class) Object.class); 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/serde/JsonSerializer.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2023 bakdata GmbH 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests.serde; 26 | 27 | import com.fasterxml.jackson.databind.ObjectMapper; 28 | import java.io.IOException; 29 | import java.util.Map; 30 | import lombok.NoArgsConstructor; 31 | import org.apache.kafka.common.errors.SerializationException; 32 | import org.apache.kafka.common.serialization.Serializer; 33 | 34 | @NoArgsConstructor 35 | public class JsonSerializer implements Serializer { 36 | private final ObjectMapper objectMapper = new ObjectMapper(); 37 | 38 | @Override 39 | public void configure(final Map props, final boolean isKey) { 40 | // nothing to configure 41 | } 42 | 43 | @Override 44 | public byte[] serialize(final String topic, final T data) { 45 | if (data == null) 46 | return null; 47 | 48 | try { 49 | return this.objectMapper.writeValueAsBytes(data); 50 | } catch (final IOException e) { 51 | throw new SerializationException("Error serializing JSON message", e); 52 | } 53 | } 54 | 55 | @Override 56 | public void close() { 57 | // nothing to close 58 | } 59 | 60 | } 61 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/test_applications/CountInhabitantsWithAvro.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2025 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests.test_applications; 26 | 27 | import com.bakdata.fluent_kafka_streams_tests.test_types.City; 28 | import com.bakdata.fluent_kafka_streams_tests.test_types.Person; 29 | import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig; 30 | import io.confluent.kafka.streams.serdes.avro.SpecificAvroSerde; 31 | import java.util.HashMap; 32 | import java.util.Map; 33 | import lombok.experimental.UtilityClass; 34 | import org.apache.kafka.common.serialization.Serdes.StringSerde; 35 | import org.apache.kafka.streams.KeyValue; 36 | import org.apache.kafka.streams.StreamsBuilder; 37 | import org.apache.kafka.streams.StreamsConfig; 38 | import org.apache.kafka.streams.Topology; 39 | import org.apache.kafka.streams.kstream.KStream; 40 | import org.apache.kafka.streams.kstream.KTable; 41 | 42 | @UtilityClass 43 | public class CountInhabitantsWithAvro { 44 | 45 | private static final String INPUT_TOPIC = "person-input"; 46 | private static final String OUTPUT_TOPIC = "city-output"; 47 | private static final String SCHEMA_REGISTRY_URL = "mock://"; 48 | 49 | public static Map getKafkaProperties() { 50 | final String brokers = "localhost:9092"; 51 | final Map kafkaConfig = new HashMap<>(); 52 | kafkaConfig.put(StreamsConfig.APPLICATION_ID_CONFIG, "inhabitants-per-city"); 53 | kafkaConfig.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, brokers); 54 | kafkaConfig.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, StringSerde.class); 55 | kafkaConfig.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, SpecificAvroSerde.class); 56 | kafkaConfig.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, SCHEMA_REGISTRY_URL); 57 | return kafkaConfig; 58 | } 59 | 60 | public static Topology getTopology() { 61 | final StreamsBuilder builder = new StreamsBuilder(); 62 | final KStream persons = builder.stream(INPUT_TOPIC); 63 | 64 | final KTable counts = persons 65 | .groupBy((name, person) -> person.getCity()) 66 | .count(); 67 | 68 | counts.toStream() 69 | .map((cityName, count) -> KeyValue.pair(cityName, new City(cityName, Math.toIntExact(count)))) 70 | .to(OUTPUT_TOPIC); 71 | 72 | return builder.build(); 73 | } 74 | } 75 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/test_applications/CountInhabitantsWithProto.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2025 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests.test_applications; 26 | 27 | import static com.bakdata.fluent_kafka_streams_tests.test_types.proto.CityOuterClass.City; 28 | import static com.bakdata.fluent_kafka_streams_tests.test_types.proto.PersonOuterClass.Person; 29 | 30 | import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig; 31 | import io.confluent.kafka.streams.serdes.protobuf.KafkaProtobufSerde; 32 | import java.util.HashMap; 33 | import java.util.Map; 34 | import lombok.experimental.UtilityClass; 35 | import org.apache.kafka.common.serialization.Serde; 36 | import org.apache.kafka.common.serialization.Serdes; 37 | import org.apache.kafka.common.serialization.Serdes.StringSerde; 38 | import org.apache.kafka.streams.KeyValue; 39 | import org.apache.kafka.streams.StreamsBuilder; 40 | import org.apache.kafka.streams.StreamsConfig; 41 | import org.apache.kafka.streams.Topology; 42 | import org.apache.kafka.streams.kstream.Consumed; 43 | import org.apache.kafka.streams.kstream.Grouped; 44 | import org.apache.kafka.streams.kstream.KStream; 45 | import org.apache.kafka.streams.kstream.KTable; 46 | import org.apache.kafka.streams.kstream.Materialized; 47 | import org.apache.kafka.streams.kstream.Produced; 48 | 49 | @UtilityClass 50 | public class CountInhabitantsWithProto { 51 | private static final String INPUT_TOPIC = "person-input"; 52 | private static final String OUTPUT_TOPIC = "city-output"; 53 | private static final String SCHEMA_REGISTRY_URL = "mock://"; 54 | 55 | public static KafkaProtobufSerde newPersonSerde() { 56 | final KafkaProtobufSerde serde = new KafkaProtobufSerde<>(Person.class); 57 | final Map config = new HashMap<>(); 58 | config.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, SCHEMA_REGISTRY_URL); 59 | serde.configure(config, false); 60 | return serde; 61 | } 62 | 63 | public static KafkaProtobufSerde newCitySerde() { 64 | final KafkaProtobufSerde serde = new KafkaProtobufSerde<>(City.class); 65 | final Map config = new HashMap<>(); 66 | config.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, SCHEMA_REGISTRY_URL); 67 | serde.configure(config, false); 68 | return serde; 69 | } 70 | 71 | public static Map getKafkaProperties() { 72 | final String brokers = "localhost:9092"; 73 | final Map kafkaConfig = new HashMap<>(); 74 | kafkaConfig.put(StreamsConfig.APPLICATION_ID_CONFIG, "inhabitants-per-city"); 75 | kafkaConfig.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, brokers); 76 | kafkaConfig.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, StringSerde.class); 77 | kafkaConfig.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, KafkaProtobufSerde.class); 78 | kafkaConfig.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, SCHEMA_REGISTRY_URL); 79 | return kafkaConfig; 80 | } 81 | 82 | public static Topology getTopology() { 83 | final KafkaProtobufSerde personSerde = newPersonSerde(); 84 | final KafkaProtobufSerde citySerde = newCitySerde(); 85 | final Serde stringSerde = Serdes.String(); 86 | final Serde longSerde = Serdes.Long(); 87 | 88 | final StreamsBuilder builder = new StreamsBuilder(); 89 | final KStream persons = 90 | builder.stream(INPUT_TOPIC, Consumed.with(stringSerde, personSerde)); 91 | 92 | final KTable counts = persons 93 | .groupBy((name, person) -> person.getCity(), Grouped.with(stringSerde, personSerde)) 94 | .count(Materialized.with(stringSerde, longSerde)); 95 | 96 | counts.toStream() 97 | .map((cityName, count) -> KeyValue.pair( 98 | cityName, 99 | City.newBuilder().setName(cityName).setInhabitants(Math.toIntExact(count)).build() 100 | )) 101 | .to(OUTPUT_TOPIC, Produced.with(stringSerde, citySerde)); 102 | 103 | return builder.build(); 104 | } 105 | } 106 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/test_applications/ErrorEventsPerMinute.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2024 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests.test_applications; 26 | 27 | import com.bakdata.fluent_kafka_streams_tests.serde.JsonSerde; 28 | import com.bakdata.fluent_kafka_streams_tests.test_types.ClickEvent; 29 | import com.bakdata.fluent_kafka_streams_tests.test_types.ErrorOutput; 30 | import com.bakdata.fluent_kafka_streams_tests.test_types.StatusCode; 31 | import java.time.Duration; 32 | import java.util.HashMap; 33 | import java.util.Map; 34 | import lombok.Getter; 35 | import org.apache.kafka.common.serialization.Serdes; 36 | import org.apache.kafka.common.serialization.Serdes.IntegerSerde; 37 | import org.apache.kafka.streams.KeyValue; 38 | import org.apache.kafka.streams.StreamsBuilder; 39 | import org.apache.kafka.streams.StreamsConfig; 40 | import org.apache.kafka.streams.Topology; 41 | import org.apache.kafka.streams.kstream.Consumed; 42 | import org.apache.kafka.streams.kstream.Grouped; 43 | import org.apache.kafka.streams.kstream.Joined; 44 | import org.apache.kafka.streams.kstream.KStream; 45 | import org.apache.kafka.streams.kstream.KTable; 46 | import org.apache.kafka.streams.kstream.TimeWindows; 47 | import org.apache.kafka.streams.kstream.Windowed; 48 | 49 | @Getter 50 | public class ErrorEventsPerMinute { 51 | private final String clickInputTopic = "user-click-input"; 52 | 53 | private final String statusInputTopic = "status-input"; 54 | 55 | private final String errorOutputTopic = "user-error-output"; 56 | 57 | private final String alertTopic = "error-alert-output"; 58 | 59 | public static Map getKafkaProperties() { 60 | final String brokers = "localhost:9092"; 61 | final Map kafkaConfig = new HashMap<>(); 62 | kafkaConfig.put(StreamsConfig.APPLICATION_ID_CONFIG, "errors-per-minute"); 63 | kafkaConfig.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, brokers); 64 | kafkaConfig.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, IntegerSerde.class); 65 | kafkaConfig.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, JsonSerde.class); 66 | return kafkaConfig; 67 | } 68 | 69 | public Topology getTopology() { 70 | final StreamsBuilder builder = new StreamsBuilder(); 71 | 72 | // Click Events 73 | final KStream clickEvents = builder.stream(this.clickInputTopic, 74 | Consumed.with(Serdes.Integer(), new JsonSerde<>(ClickEvent.class))); 75 | 76 | final KTable, Long> counts = clickEvents 77 | .selectKey(((key, value) -> value.getStatus())) 78 | .filter(((key, value) -> key >= 400)) 79 | .groupByKey(Grouped.with(Serdes.Integer(), new JsonSerde<>(ClickEvent.class))) 80 | .windowedBy(TimeWindows.ofSizeWithNoGrace(Duration.ofMinutes(1))) // 1 Minute in ms 81 | .count(); 82 | 83 | // Status codes 84 | final KTable statusCodes = builder.table(this.statusInputTopic, 85 | Consumed.with(Serdes.Integer(), new JsonSerde<>(StatusCode.class))); 86 | 87 | // Join 88 | final KStream errors = counts.toStream() 89 | .map((key, value) -> KeyValue.pair( 90 | key.key(), 91 | new ErrorOutput(key.key(), value, key.window().start(), null /*empty definition*/))) 92 | .join(statusCodes, 93 | (countRecord, code) -> new ErrorOutput( 94 | countRecord.getStatusCode(), countRecord.getCount(), countRecord.getTime(), 95 | code.getDefinition()), 96 | Joined.valueSerde(new JsonSerde<>(ErrorOutput.class))); 97 | errors.to(this.errorOutputTopic); 98 | 99 | // Send alert if more than 5x a certain error code per minute 100 | errors.filter((key, errorOutput) -> errorOutput.getCount() > 5L).to(this.alertTopic); 101 | 102 | return builder.build(); 103 | } 104 | } 105 | 106 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/test_applications/ForeignKeyJoin.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2025 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests.test_applications; 26 | 27 | import java.util.HashMap; 28 | import java.util.Map; 29 | import lombok.experimental.UtilityClass; 30 | import org.apache.kafka.common.serialization.Serdes.StringSerde; 31 | import org.apache.kafka.streams.StreamsBuilder; 32 | import org.apache.kafka.streams.StreamsConfig; 33 | import org.apache.kafka.streams.Topology; 34 | import org.apache.kafka.streams.kstream.KTable; 35 | import org.apache.kafka.streams.kstream.TableJoined; 36 | 37 | @UtilityClass 38 | public class ForeignKeyJoin { 39 | public static final String LEFT_INPUT_TOPIC = "left-input"; 40 | public static final String RIGHT_INPUT_TOPIC = "right-input"; 41 | public static final String OUTPUT_TOPIC = "join-output"; 42 | 43 | public static Map getKafkaProperties() { 44 | final String brokers = "localhost:9092"; 45 | final Map kafkaConfig = new HashMap<>(); 46 | kafkaConfig.put(StreamsConfig.APPLICATION_ID_CONFIG, "foreignKeyJoin"); 47 | kafkaConfig.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, brokers); 48 | kafkaConfig.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, StringSerde.class); 49 | kafkaConfig.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, StringSerde.class); 50 | return kafkaConfig; 51 | } 52 | 53 | public static Topology getTopology() { 54 | final StreamsBuilder builder = new StreamsBuilder(); 55 | final KTable leftTable = builder.table(LEFT_INPUT_TOPIC); 56 | final KTable rightTable = builder.table(RIGHT_INPUT_TOPIC); 57 | 58 | leftTable.join(rightTable, 59 | leftValue -> leftValue, 60 | (leftValue, rightValue) -> leftValue + rightValue, 61 | TableJoined.as("join")) 62 | .toStream() 63 | .to(OUTPUT_TOPIC); 64 | 65 | return builder.build(); 66 | } 67 | 68 | } 69 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/test_applications/Mirror.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2024 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests.test_applications; 26 | 27 | import java.util.HashMap; 28 | import java.util.Map; 29 | import lombok.Getter; 30 | import org.apache.kafka.common.serialization.Serdes.StringSerde; 31 | import org.apache.kafka.streams.StreamsBuilder; 32 | import org.apache.kafka.streams.StreamsConfig; 33 | import org.apache.kafka.streams.Topology; 34 | import org.apache.kafka.streams.kstream.KStream; 35 | 36 | @Getter 37 | public class Mirror { 38 | private final String inputTopic = "input"; 39 | 40 | private final String outputTopic = "output"; 41 | 42 | public static Map getKafkaProperties() { 43 | final String brokers = "localhost:9092"; 44 | final Map kafkaConfig = new HashMap<>(); 45 | kafkaConfig.put(StreamsConfig.APPLICATION_ID_CONFIG, "mirror"); 46 | kafkaConfig.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, brokers); 47 | kafkaConfig.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, StringSerde.class); 48 | kafkaConfig.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, StringSerde.class); 49 | return kafkaConfig; 50 | } 51 | 52 | public Topology getTopology() { 53 | final StreamsBuilder builder = new StreamsBuilder(); 54 | final KStream input = builder.stream(this.inputTopic); 55 | 56 | input.to(this.outputTopic); 57 | return builder.build(); 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/test_applications/MirrorAvro.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2025 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests.test_applications; 26 | 27 | import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig; 28 | import io.confluent.kafka.streams.serdes.avro.SpecificAvroSerde; 29 | import java.util.HashMap; 30 | import java.util.Map; 31 | import lombok.experimental.UtilityClass; 32 | import org.apache.avro.specific.SpecificRecord; 33 | import org.apache.kafka.streams.StreamsBuilder; 34 | import org.apache.kafka.streams.StreamsConfig; 35 | import org.apache.kafka.streams.Topology; 36 | import org.apache.kafka.streams.kstream.KStream; 37 | 38 | @UtilityClass 39 | public class MirrorAvro { 40 | private static final String INPUT_TOPIC = "input"; 41 | private static final String OUTPUT_TOPIC = "output"; 42 | 43 | public static Map getKafkaProperties() { 44 | final String brokers = "localhost:9092"; 45 | final Map kafkaConfig = new HashMap<>(); 46 | kafkaConfig.put(StreamsConfig.APPLICATION_ID_CONFIG, "mirror"); 47 | kafkaConfig.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, brokers); 48 | kafkaConfig.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, "mock://"); 49 | kafkaConfig.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, SpecificAvroSerde.class); 50 | kafkaConfig.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, SpecificAvroSerde.class); 51 | return kafkaConfig; 52 | } 53 | 54 | public static Topology getTopology() { 55 | final StreamsBuilder builder = new StreamsBuilder(); 56 | final KStream input = builder.stream(INPUT_TOPIC); 57 | 58 | input.to(OUTPUT_TOPIC); 59 | return builder.build(); 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/test_applications/MirrorAvroNonDefaultSerde.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2025 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests.test_applications; 26 | 27 | import com.bakdata.kafka.Preconfigured; 28 | import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig; 29 | import io.confluent.kafka.streams.serdes.avro.SpecificAvroSerde; 30 | import java.util.HashMap; 31 | import java.util.Map; 32 | import lombok.experimental.UtilityClass; 33 | import org.apache.avro.specific.SpecificRecord; 34 | import org.apache.kafka.common.serialization.Serde; 35 | import org.apache.kafka.common.serialization.Serdes.StringSerde; 36 | import org.apache.kafka.streams.StreamsBuilder; 37 | import org.apache.kafka.streams.StreamsConfig; 38 | import org.apache.kafka.streams.Topology; 39 | import org.apache.kafka.streams.kstream.Consumed; 40 | import org.apache.kafka.streams.kstream.KStream; 41 | import org.apache.kafka.streams.kstream.Produced; 42 | 43 | @UtilityClass 44 | public class MirrorAvroNonDefaultSerde { 45 | private static final String INPUT_TOPIC = "input"; 46 | private static final String OUTPUT_TOPIC = "output"; 47 | 48 | public static Map getKafkaProperties() { 49 | final String brokers = "localhost:9092"; 50 | final Map kafkaConfig = new HashMap<>(); 51 | kafkaConfig.put(StreamsConfig.APPLICATION_ID_CONFIG, "mirror"); 52 | kafkaConfig.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, brokers); 53 | kafkaConfig.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, "mock://"); 54 | kafkaConfig.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, StringSerde.class); 55 | kafkaConfig.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, StringSerde.class); 56 | return kafkaConfig; 57 | } 58 | 59 | public static Topology getTopology() { 60 | final StreamsBuilder builder = new StreamsBuilder(); 61 | final Preconfigured> keySerde = getKeySerde(); 62 | final Preconfigured> serde = getValueSerde(); 63 | final KStream input = builder.stream(INPUT_TOPIC, 64 | Consumed.with(keySerde.configureForKeys(getKafkaProperties()), 65 | serde.configureForValues(getKafkaProperties()))); 66 | input.to(OUTPUT_TOPIC, Produced.with(keySerde.configureForKeys(getKafkaProperties()), 67 | serde.configureForValues(getKafkaProperties()))); 68 | return builder.build(); 69 | } 70 | 71 | public static Preconfigured> getKeySerde() { 72 | return Preconfigured.create(new SpecificAvroSerde<>()); 73 | } 74 | 75 | public static Preconfigured> getValueSerde() { 76 | return Preconfigured.create(new SpecificAvroSerde<>()); 77 | } 78 | } 79 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/test_applications/MirrorPattern.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2024 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests.test_applications; 26 | 27 | import java.util.HashMap; 28 | import java.util.Map; 29 | import java.util.regex.Pattern; 30 | import lombok.Getter; 31 | import org.apache.kafka.common.serialization.Serdes.StringSerde; 32 | import org.apache.kafka.streams.StreamsBuilder; 33 | import org.apache.kafka.streams.StreamsConfig; 34 | import org.apache.kafka.streams.Topology; 35 | import org.apache.kafka.streams.kstream.KStream; 36 | 37 | @Getter 38 | public class MirrorPattern { 39 | private final String inputPattern1 = ".*-input1"; 40 | private final String inputPattern2 = ".*-input2"; 41 | 42 | private final String outputTopic = "output"; 43 | 44 | public static Map getKafkaProperties() { 45 | final String brokers = "localhost:9092"; 46 | final Map kafkaConfig = new HashMap<>(); 47 | kafkaConfig.put(StreamsConfig.APPLICATION_ID_CONFIG, "wordcount"); 48 | kafkaConfig.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, brokers); 49 | kafkaConfig.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, StringSerde.class); 50 | kafkaConfig.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, StringSerde.class); 51 | return kafkaConfig; 52 | } 53 | 54 | public Topology getTopology() { 55 | final StreamsBuilder builder = new StreamsBuilder(); 56 | final KStream input1 = builder.stream(Pattern.compile(this.inputPattern1)); 57 | final KStream input2 = builder.stream(Pattern.compile(this.inputPattern2)); 58 | 59 | input1.merge(input2).to(this.outputTopic); 60 | return builder.build(); 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/test_applications/MirrorPatternTopicMixed.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2024 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests.test_applications; 26 | 27 | import java.util.HashMap; 28 | import java.util.Map; 29 | import java.util.regex.Pattern; 30 | import lombok.Getter; 31 | import org.apache.kafka.common.serialization.Serdes.StringSerde; 32 | import org.apache.kafka.streams.StreamsBuilder; 33 | import org.apache.kafka.streams.StreamsConfig; 34 | import org.apache.kafka.streams.Topology; 35 | import org.apache.kafka.streams.kstream.KStream; 36 | 37 | @Getter 38 | public class MirrorPatternTopicMixed { 39 | private final String inputPattern = ".*-input1"; 40 | private final String inputTopic = "input2"; 41 | 42 | private final String outputTopic = "output"; 43 | 44 | public static Map getKafkaProperties() { 45 | final String brokers = "localhost:9092"; 46 | final Map kafkaConfig = new HashMap<>(); 47 | kafkaConfig.put(StreamsConfig.APPLICATION_ID_CONFIG, "wordcount"); 48 | kafkaConfig.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, brokers); 49 | kafkaConfig.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, StringSerde.class); 50 | kafkaConfig.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, StringSerde.class); 51 | return kafkaConfig; 52 | } 53 | 54 | public Topology getTopology() { 55 | final StreamsBuilder builder = new StreamsBuilder(); 56 | final KStream input1 = builder.stream(Pattern.compile(this.inputPattern)); 57 | final KStream input2 = builder.stream(this.inputTopic); 58 | 59 | input1.merge(input2).to(this.outputTopic); 60 | return builder.build(); 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/test_applications/NameJoinGlobalKTable.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2024 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests.test_applications; 26 | 27 | import java.util.HashMap; 28 | import java.util.Map; 29 | import lombok.experimental.UtilityClass; 30 | import org.apache.kafka.common.serialization.Serdes; 31 | import org.apache.kafka.common.serialization.Serdes.LongSerde; 32 | import org.apache.kafka.common.serialization.Serdes.StringSerde; 33 | import org.apache.kafka.streams.StreamsBuilder; 34 | import org.apache.kafka.streams.StreamsConfig; 35 | import org.apache.kafka.streams.Topology; 36 | import org.apache.kafka.streams.kstream.Consumed; 37 | import org.apache.kafka.streams.kstream.GlobalKTable; 38 | import org.apache.kafka.streams.kstream.KStream; 39 | import org.apache.kafka.streams.kstream.Produced; 40 | 41 | @UtilityClass 42 | public class NameJoinGlobalKTable { 43 | public static final String INPUT_TOPIC = "id-input"; 44 | public static final String NAME_INPUT = "name-input"; 45 | public static final String INTERMEDIATE_TOPIC = "upper-case-input"; 46 | public static final String OUTPUT_TOPIC = "join-output"; 47 | 48 | public static Map getKafkaProperties() { 49 | final String brokers = "localhost:9092"; 50 | final Map kafkaConfig = new HashMap<>(); 51 | kafkaConfig.put(StreamsConfig.APPLICATION_ID_CONFIG, "globalKTableJoin"); 52 | kafkaConfig.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, brokers); 53 | kafkaConfig.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, LongSerde.class); 54 | kafkaConfig.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, StringSerde.class); 55 | return kafkaConfig; 56 | } 57 | 58 | public static Topology getTopology() { 59 | final StreamsBuilder builder = new StreamsBuilder(); 60 | final KStream inputStream = 61 | builder.stream(INPUT_TOPIC, Consumed.with(Serdes.Long(), Serdes.Long())); 62 | 63 | final GlobalKTable joinTable = builder.globalTable(NAME_INPUT); 64 | 65 | inputStream 66 | .join(joinTable, 67 | (id, valueId) -> valueId, 68 | (id, name) -> name) 69 | .to(OUTPUT_TOPIC, Produced.with(Serdes.Long(), Serdes.String())); 70 | 71 | return builder.build(); 72 | } 73 | 74 | public static Topology getTopologyWithIntermediateTopic() { 75 | final StreamsBuilder builder = new StreamsBuilder(); 76 | final KStream inputStream = 77 | builder.stream(INPUT_TOPIC, Consumed.with(Serdes.Long(), Serdes.Long())); 78 | 79 | builder.stream(NAME_INPUT, Consumed.with(Serdes.Long(), Serdes.String())) 80 | .mapValues(name -> name.toUpperCase()) 81 | .to(INTERMEDIATE_TOPIC); 82 | 83 | final GlobalKTable joinTable = builder.globalTable(INTERMEDIATE_TOPIC); 84 | 85 | inputStream 86 | .join(joinTable, 87 | (id, valueId) -> valueId, 88 | (id, name) -> name) 89 | .to(OUTPUT_TOPIC, Produced.with(Serdes.Long(), Serdes.String())); 90 | 91 | return builder.build(); 92 | } 93 | 94 | } 95 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/test_applications/TopicExtractorApplication.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2024 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests.test_applications; 26 | 27 | import java.util.HashMap; 28 | import java.util.Map; 29 | import lombok.experimental.UtilityClass; 30 | import org.apache.kafka.common.serialization.Serdes; 31 | import org.apache.kafka.common.serialization.Serdes.StringSerde; 32 | import org.apache.kafka.streams.StreamsBuilder; 33 | import org.apache.kafka.streams.StreamsConfig; 34 | import org.apache.kafka.streams.Topology; 35 | import org.apache.kafka.streams.kstream.Consumed; 36 | 37 | @UtilityClass 38 | public class TopicExtractorApplication { 39 | public static final String INPUT_TOPIC = "input"; 40 | public static final String OUTPUT_TOPIC = "output"; 41 | 42 | 43 | public static Topology getTopology() { 44 | final StreamsBuilder builder = new StreamsBuilder(); 45 | builder.stream(INPUT_TOPIC, Consumed.with(Serdes.String(), Serdes.String())) 46 | .to((key, value, recordContext) -> OUTPUT_TOPIC); 47 | return builder.build(); 48 | } 49 | 50 | public static Map getProperties() { 51 | final Map properties = new HashMap<>(); 52 | properties.put(StreamsConfig.APPLICATION_ID_CONFIG, "dynamic-test-stream"); 53 | properties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "dummy:123"); 54 | properties.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, StringSerde.class); 55 | properties.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, StringSerde.class); 56 | return properties; 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/test_applications/UserClicksPerMinute.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2024 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests.test_applications; 26 | 27 | import com.bakdata.fluent_kafka_streams_tests.serde.JsonSerde; 28 | import com.bakdata.fluent_kafka_streams_tests.test_types.ClickEvent; 29 | import com.bakdata.fluent_kafka_streams_tests.test_types.ClickOutput; 30 | import java.time.Duration; 31 | import java.util.HashMap; 32 | import java.util.Map; 33 | import org.apache.kafka.common.serialization.Serdes; 34 | import org.apache.kafka.common.serialization.Serdes.IntegerSerde; 35 | import org.apache.kafka.streams.KeyValue; 36 | import org.apache.kafka.streams.StreamsBuilder; 37 | import org.apache.kafka.streams.StreamsConfig; 38 | import org.apache.kafka.streams.Topology; 39 | import org.apache.kafka.streams.kstream.KStream; 40 | import org.apache.kafka.streams.kstream.KTable; 41 | import org.apache.kafka.streams.kstream.Produced; 42 | import org.apache.kafka.streams.kstream.TimeWindows; 43 | import org.apache.kafka.streams.kstream.Windowed; 44 | 45 | public class UserClicksPerMinute { 46 | private static final String INPUT_TOPIC = "user-click-input"; 47 | 48 | private static final String OUTPUT_TOPIC = "user-click-output"; 49 | 50 | public static Map getKafkaProperties() { 51 | final String brokers = "localhost:9092"; 52 | final Map kafkaConfig = new HashMap<>(); 53 | kafkaConfig.put(StreamsConfig.APPLICATION_ID_CONFIG, "user-clicks-per-minute"); 54 | kafkaConfig.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, brokers); 55 | kafkaConfig.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, IntegerSerde.class); 56 | kafkaConfig.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, JsonSerde.class); 57 | return kafkaConfig; 58 | } 59 | 60 | public static Topology getTopology() { 61 | final StreamsBuilder builder = new StreamsBuilder(); 62 | final KStream clickEvents = builder.stream(INPUT_TOPIC); 63 | 64 | final KTable, Long> counts = clickEvents 65 | .groupByKey() 66 | .windowedBy(TimeWindows.ofSizeWithNoGrace(Duration.ofMinutes(1))) 67 | .count(); 68 | 69 | counts.toStream() 70 | .map((key, value) -> KeyValue.pair( 71 | key.key(), 72 | new ClickOutput(key.key(), value, key.window().start()))) 73 | .to(OUTPUT_TOPIC, Produced.with(Serdes.Integer(), new JsonSerde<>(ClickOutput.class))); 74 | 75 | return builder.build(); 76 | } 77 | 78 | } 79 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/test_applications/WordCount.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2024 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests.test_applications; 26 | 27 | import java.util.Arrays; 28 | import java.util.HashMap; 29 | import java.util.Map; 30 | import java.util.regex.Pattern; 31 | import lombok.Getter; 32 | import org.apache.kafka.common.serialization.Serde; 33 | import org.apache.kafka.common.serialization.Serdes; 34 | import org.apache.kafka.common.serialization.Serdes.StringSerde; 35 | import org.apache.kafka.streams.StreamsBuilder; 36 | import org.apache.kafka.streams.StreamsConfig; 37 | import org.apache.kafka.streams.Topology; 38 | import org.apache.kafka.streams.kstream.KStream; 39 | import org.apache.kafka.streams.kstream.KTable; 40 | import org.apache.kafka.streams.kstream.Materialized; 41 | import org.apache.kafka.streams.kstream.Produced; 42 | 43 | @Getter 44 | public class WordCount { 45 | private final String inputTopic = "wordcount-input"; 46 | 47 | private final String outputTopic = "wordcount-output"; 48 | 49 | public static Map getKafkaProperties() { 50 | final String brokers = "localhost:9092"; 51 | final Map kafkaConfig = new HashMap<>(); 52 | kafkaConfig.put(StreamsConfig.APPLICATION_ID_CONFIG, "wordcount"); 53 | kafkaConfig.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, brokers); 54 | kafkaConfig.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, StringSerde.class); 55 | kafkaConfig.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, StringSerde.class); 56 | return kafkaConfig; 57 | } 58 | 59 | public Topology getTopology() { 60 | final Serde stringSerde = Serdes.String(); 61 | final Serde longSerde = Serdes.Long(); 62 | 63 | final StreamsBuilder builder = new StreamsBuilder(); 64 | final KStream textLines = builder.stream(this.inputTopic); 65 | 66 | final Pattern pattern = Pattern.compile("\\W+", Pattern.UNICODE_CHARACTER_CLASS); 67 | final KTable wordCounts = textLines 68 | .flatMapValues(value -> Arrays.asList(pattern.split(value.toLowerCase()))) 69 | .groupBy((key, word) -> word) 70 | .count(Materialized.as("count")); 71 | 72 | wordCounts.toStream().to(this.outputTopic, Produced.with(stringSerde, longSerde)); 73 | return builder.build(); 74 | } 75 | } 76 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/test_types/ClickEvent.java: -------------------------------------------------------------------------------- 1 | package com.bakdata.fluent_kafka_streams_tests.test_types; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Builder; 5 | import lombok.Data; 6 | import lombok.NoArgsConstructor; 7 | 8 | @Data 9 | @Builder 10 | @NoArgsConstructor 11 | @AllArgsConstructor 12 | public class ClickEvent { 13 | int userId; 14 | Integer status; 15 | 16 | public ClickEvent(final int userId) { 17 | this.userId = userId; 18 | } 19 | } -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/test_types/ClickOutput.java: -------------------------------------------------------------------------------- 1 | package com.bakdata.fluent_kafka_streams_tests.test_types; 2 | 3 | import com.fasterxml.jackson.annotation.JsonTypeInfo; 4 | import com.fasterxml.jackson.databind.annotation.JsonDeserialize; 5 | import lombok.AllArgsConstructor; 6 | import lombok.Data; 7 | import lombok.NoArgsConstructor; 8 | 9 | @Data 10 | @AllArgsConstructor 11 | @NoArgsConstructor 12 | @JsonDeserialize(as = ClickOutput.class) 13 | @JsonTypeInfo(use=JsonTypeInfo.Id.CLASS, property="@class") 14 | public class ClickOutput { 15 | int userId; 16 | long count; 17 | long time; 18 | } 19 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/test_types/ErrorOutput.java: -------------------------------------------------------------------------------- 1 | package com.bakdata.fluent_kafka_streams_tests.test_types; 2 | 3 | import com.fasterxml.jackson.annotation.JsonTypeInfo; 4 | import com.fasterxml.jackson.databind.annotation.JsonDeserialize; 5 | import lombok.AllArgsConstructor; 6 | import lombok.Data; 7 | import lombok.NoArgsConstructor; 8 | 9 | 10 | @Data 11 | @AllArgsConstructor 12 | @NoArgsConstructor 13 | @JsonDeserialize(as = ErrorOutput.class) 14 | @JsonTypeInfo(use=JsonTypeInfo.Id.CLASS, property="@class") 15 | public class ErrorOutput { 16 | int statusCode; 17 | long count; 18 | long time; 19 | String definition; 20 | } 21 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/test_types/StatusCode.java: -------------------------------------------------------------------------------- 1 | package com.bakdata.fluent_kafka_streams_tests.test_types; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Data; 5 | import lombok.NoArgsConstructor; 6 | 7 | @Data 8 | @NoArgsConstructor 9 | @AllArgsConstructor 10 | public class StatusCode { 11 | int code; 12 | String definition; 13 | } -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/proto/city.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package com.bakdata.fluent_kafka_streams_tests.test_types.proto; 4 | 5 | message City { 6 | string name = 1; 7 | int32 inhabitants = 2; 8 | } 9 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/proto/person.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package com.bakdata.fluent_kafka_streams_tests.test_types.proto; 4 | 5 | message Person { 6 | string name = 1; 7 | string city = 2; 8 | } 9 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/resources/log4j2.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | -------------------------------------------------------------------------------- /gradle.properties: -------------------------------------------------------------------------------- 1 | version=3.3.1-SNAPSHOT 2 | org.gradle.caching=true 3 | org.gradle.parallel=true 4 | junit5Version=5.11.4 5 | junit4Version=4.13.2 6 | log4jVersion=2.24.3 7 | org.gradle.jvmargs=-Xmx2048m 8 | -------------------------------------------------------------------------------- /gradle/wrapper/gradle-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bakdata/fluent-kafka-streams-tests/6eb221b7c5b18c020a822a1d561bb24487e0f034/gradle/wrapper/gradle-wrapper.jar -------------------------------------------------------------------------------- /gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | distributionBase=GRADLE_USER_HOME 2 | distributionPath=wrapper/dists 3 | distributionUrl=https\://services.gradle.org/distributions/gradle-8.12.1-all.zip 4 | networkTimeout=10000 5 | validateDistributionUrl=true 6 | zipStoreBase=GRADLE_USER_HOME 7 | zipStorePath=wrapper/dists 8 | -------------------------------------------------------------------------------- /gradlew: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # 4 | # Copyright © 2015-2021 the original authors. 5 | # 6 | # Licensed under the Apache License, Version 2.0 (the "License"); 7 | # you may not use this file except in compliance with the License. 8 | # You may obtain a copy of the License at 9 | # 10 | # https://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | # See the License for the specific language governing permissions and 16 | # limitations under the License. 17 | # 18 | 19 | ############################################################################## 20 | # 21 | # Gradle start up script for POSIX generated by Gradle. 22 | # 23 | # Important for running: 24 | # 25 | # (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is 26 | # noncompliant, but you have some other compliant shell such as ksh or 27 | # bash, then to run this script, type that shell name before the whole 28 | # command line, like: 29 | # 30 | # ksh Gradle 31 | # 32 | # Busybox and similar reduced shells will NOT work, because this script 33 | # requires all of these POSIX shell features: 34 | # * functions; 35 | # * expansions «$var», «${var}», «${var:-default}», «${var+SET}», 36 | # «${var#prefix}», «${var%suffix}», and «$( cmd )»; 37 | # * compound commands having a testable exit status, especially «case»; 38 | # * various built-in commands including «command», «set», and «ulimit». 39 | # 40 | # Important for patching: 41 | # 42 | # (2) This script targets any POSIX shell, so it avoids extensions provided 43 | # by Bash, Ksh, etc; in particular arrays are avoided. 44 | # 45 | # The "traditional" practice of packing multiple parameters into a 46 | # space-separated string is a well documented source of bugs and security 47 | # problems, so this is (mostly) avoided, by progressively accumulating 48 | # options in "$@", and eventually passing that to Java. 49 | # 50 | # Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS, 51 | # and GRADLE_OPTS) rely on word-splitting, this is performed explicitly; 52 | # see the in-line comments for details. 53 | # 54 | # There are tweaks for specific operating systems such as AIX, CygWin, 55 | # Darwin, MinGW, and NonStop. 56 | # 57 | # (3) This script is generated from the Groovy template 58 | # https://github.com/gradle/gradle/blob/HEAD/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt 59 | # within the Gradle project. 60 | # 61 | # You can find Gradle at https://github.com/gradle/gradle/. 62 | # 63 | ############################################################################## 64 | 65 | # Attempt to set APP_HOME 66 | 67 | # Resolve links: $0 may be a link 68 | app_path=$0 69 | 70 | # Need this for daisy-chained symlinks. 71 | while 72 | APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path 73 | [ -h "$app_path" ] 74 | do 75 | ls=$( ls -ld "$app_path" ) 76 | link=${ls#*' -> '} 77 | case $link in #( 78 | /*) app_path=$link ;; #( 79 | *) app_path=$APP_HOME$link ;; 80 | esac 81 | done 82 | 83 | # This is normally unused 84 | # shellcheck disable=SC2034 85 | APP_BASE_NAME=${0##*/} 86 | # Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036) 87 | APP_HOME=$( cd "${APP_HOME:-./}" > /dev/null && pwd -P ) || exit 88 | 89 | # Use the maximum available, or set MAX_FD != -1 to use that value. 90 | MAX_FD=maximum 91 | 92 | warn () { 93 | echo "$*" 94 | } >&2 95 | 96 | die () { 97 | echo 98 | echo "$*" 99 | echo 100 | exit 1 101 | } >&2 102 | 103 | # OS specific support (must be 'true' or 'false'). 104 | cygwin=false 105 | msys=false 106 | darwin=false 107 | nonstop=false 108 | case "$( uname )" in #( 109 | CYGWIN* ) cygwin=true ;; #( 110 | Darwin* ) darwin=true ;; #( 111 | MSYS* | MINGW* ) msys=true ;; #( 112 | NONSTOP* ) nonstop=true ;; 113 | esac 114 | 115 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar 116 | 117 | 118 | # Determine the Java command to use to start the JVM. 119 | if [ -n "$JAVA_HOME" ] ; then 120 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then 121 | # IBM's JDK on AIX uses strange locations for the executables 122 | JAVACMD=$JAVA_HOME/jre/sh/java 123 | else 124 | JAVACMD=$JAVA_HOME/bin/java 125 | fi 126 | if [ ! -x "$JAVACMD" ] ; then 127 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME 128 | 129 | Please set the JAVA_HOME variable in your environment to match the 130 | location of your Java installation." 131 | fi 132 | else 133 | JAVACMD=java 134 | if ! command -v java >/dev/null 2>&1 135 | then 136 | die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 137 | 138 | Please set the JAVA_HOME variable in your environment to match the 139 | location of your Java installation." 140 | fi 141 | fi 142 | 143 | # Increase the maximum file descriptors if we can. 144 | if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then 145 | case $MAX_FD in #( 146 | max*) 147 | # In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked. 148 | # shellcheck disable=SC2039,SC3045 149 | MAX_FD=$( ulimit -H -n ) || 150 | warn "Could not query maximum file descriptor limit" 151 | esac 152 | case $MAX_FD in #( 153 | '' | soft) :;; #( 154 | *) 155 | # In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked. 156 | # shellcheck disable=SC2039,SC3045 157 | ulimit -n "$MAX_FD" || 158 | warn "Could not set maximum file descriptor limit to $MAX_FD" 159 | esac 160 | fi 161 | 162 | # Collect all arguments for the java command, stacking in reverse order: 163 | # * args from the command line 164 | # * the main class name 165 | # * -classpath 166 | # * -D...appname settings 167 | # * --module-path (only if needed) 168 | # * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables. 169 | 170 | # For Cygwin or MSYS, switch paths to Windows format before running java 171 | if "$cygwin" || "$msys" ; then 172 | APP_HOME=$( cygpath --path --mixed "$APP_HOME" ) 173 | CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" ) 174 | 175 | JAVACMD=$( cygpath --unix "$JAVACMD" ) 176 | 177 | # Now convert the arguments - kludge to limit ourselves to /bin/sh 178 | for arg do 179 | if 180 | case $arg in #( 181 | -*) false ;; # don't mess with options #( 182 | /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath 183 | [ -e "$t" ] ;; #( 184 | *) false ;; 185 | esac 186 | then 187 | arg=$( cygpath --path --ignore --mixed "$arg" ) 188 | fi 189 | # Roll the args list around exactly as many times as the number of 190 | # args, so each arg winds up back in the position where it started, but 191 | # possibly modified. 192 | # 193 | # NB: a `for` loop captures its iteration list before it begins, so 194 | # changing the positional parameters here affects neither the number of 195 | # iterations, nor the values presented in `arg`. 196 | shift # remove old arg 197 | set -- "$@" "$arg" # push replacement arg 198 | done 199 | fi 200 | 201 | 202 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 203 | DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' 204 | 205 | # Collect all arguments for the java command: 206 | # * DEFAULT_JVM_OPTS, JAVA_OPTS, JAVA_OPTS, and optsEnvironmentVar are not allowed to contain shell fragments, 207 | # and any embedded shellness will be escaped. 208 | # * For example: A user cannot expect ${Hostname} to be expanded, as it is an environment variable and will be 209 | # treated as '${Hostname}' itself on the command line. 210 | 211 | set -- \ 212 | "-Dorg.gradle.appname=$APP_BASE_NAME" \ 213 | -classpath "$CLASSPATH" \ 214 | org.gradle.wrapper.GradleWrapperMain \ 215 | "$@" 216 | 217 | # Stop when "xargs" is not available. 218 | if ! command -v xargs >/dev/null 2>&1 219 | then 220 | die "xargs is not available" 221 | fi 222 | 223 | # Use "xargs" to parse quoted args. 224 | # 225 | # With -n1 it outputs one arg per line, with the quotes and backslashes removed. 226 | # 227 | # In Bash we could simply go: 228 | # 229 | # readarray ARGS < <( xargs -n1 <<<"$var" ) && 230 | # set -- "${ARGS[@]}" "$@" 231 | # 232 | # but POSIX shell has neither arrays nor command substitution, so instead we 233 | # post-process each arg (as a line of input to sed) to backslash-escape any 234 | # character that might be a shell metacharacter, then use eval to reverse 235 | # that process (while maintaining the separation between arguments), and wrap 236 | # the whole thing up as a single "set" statement. 237 | # 238 | # This will of course break if any of these variables contains a newline or 239 | # an unmatched quote. 240 | # 241 | 242 | eval "set -- $( 243 | printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" | 244 | xargs -n1 | 245 | sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' | 246 | tr '\n' ' ' 247 | )" '"$@"' 248 | 249 | exec "$JAVACMD" "$@" 250 | -------------------------------------------------------------------------------- /gradlew.bat: -------------------------------------------------------------------------------- 1 | @rem 2 | @rem Copyright 2015 the original author or authors. 3 | @rem 4 | @rem Licensed under the Apache License, Version 2.0 (the "License"); 5 | @rem you may not use this file except in compliance with the License. 6 | @rem You may obtain a copy of the License at 7 | @rem 8 | @rem https://www.apache.org/licenses/LICENSE-2.0 9 | @rem 10 | @rem Unless required by applicable law or agreed to in writing, software 11 | @rem distributed under the License is distributed on an "AS IS" BASIS, 12 | @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | @rem See the License for the specific language governing permissions and 14 | @rem limitations under the License. 15 | @rem 16 | 17 | @if "%DEBUG%"=="" @echo off 18 | @rem ########################################################################## 19 | @rem 20 | @rem Gradle startup script for Windows 21 | @rem 22 | @rem ########################################################################## 23 | 24 | @rem Set local scope for the variables with windows NT shell 25 | if "%OS%"=="Windows_NT" setlocal 26 | 27 | set DIRNAME=%~dp0 28 | if "%DIRNAME%"=="" set DIRNAME=. 29 | @rem This is normally unused 30 | set APP_BASE_NAME=%~n0 31 | set APP_HOME=%DIRNAME% 32 | 33 | @rem Resolve any "." and ".." in APP_HOME to make it shorter. 34 | for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi 35 | 36 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 37 | set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" 38 | 39 | @rem Find java.exe 40 | if defined JAVA_HOME goto findJavaFromJavaHome 41 | 42 | set JAVA_EXE=java.exe 43 | %JAVA_EXE% -version >NUL 2>&1 44 | if %ERRORLEVEL% equ 0 goto execute 45 | 46 | echo. 47 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 48 | echo. 49 | echo Please set the JAVA_HOME variable in your environment to match the 50 | echo location of your Java installation. 51 | 52 | goto fail 53 | 54 | :findJavaFromJavaHome 55 | set JAVA_HOME=%JAVA_HOME:"=% 56 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe 57 | 58 | if exist "%JAVA_EXE%" goto execute 59 | 60 | echo. 61 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 62 | echo. 63 | echo Please set the JAVA_HOME variable in your environment to match the 64 | echo location of your Java installation. 65 | 66 | goto fail 67 | 68 | :execute 69 | @rem Setup the command line 70 | 71 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar 72 | 73 | 74 | @rem Execute Gradle 75 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* 76 | 77 | :end 78 | @rem End local scope for the variables with windows NT shell 79 | if %ERRORLEVEL% equ 0 goto mainEnd 80 | 81 | :fail 82 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of 83 | rem the _cmd.exe /c_ return code! 84 | set EXIT_CODE=%ERRORLEVEL% 85 | if %EXIT_CODE% equ 0 set EXIT_CODE=1 86 | if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE% 87 | exit /b %EXIT_CODE% 88 | 89 | :mainEnd 90 | if "%OS%"=="Windows_NT" endlocal 91 | 92 | :omega 93 | -------------------------------------------------------------------------------- /settings.gradle: -------------------------------------------------------------------------------- 1 | pluginManagement { 2 | repositories { 3 | gradlePluginPortal() 4 | } 5 | } 6 | 7 | rootProject.name = 'fluent-kafka-streams-tests' 8 | 9 | ['', '-junit5', '-junit4'].each { suffix -> 10 | include ":fluent-kafka-streams-tests$suffix" 11 | } 12 | --------------------------------------------------------------------------------