├── .gitignore ├── gradle.properties ├── gradle ├── wrapper │ ├── gradle-wrapper.jar │ └── gradle-wrapper.properties └── libs.versions.toml ├── fluent-kafka-streams-tests ├── lombok.config ├── src │ ├── test │ │ ├── proto │ │ │ ├── city.proto │ │ │ └── person.proto │ │ ├── avro │ │ │ ├── City.avsc │ │ │ └── Person.avsc │ │ ├── java │ │ │ └── com │ │ │ │ └── bakdata │ │ │ │ └── fluent_kafka_streams_tests │ │ │ │ ├── test_types │ │ │ │ ├── StatusCode.java │ │ │ │ ├── ClickEvent.java │ │ │ │ ├── ClickOutput.java │ │ │ │ └── ErrorOutput.java │ │ │ │ ├── serde │ │ │ │ ├── JsonSerde.java │ │ │ │ ├── JsonSerializer.java │ │ │ │ └── JsonDeserializer.java │ │ │ │ ├── MirrorPatternTest.java │ │ │ │ ├── TestTopologyTest.java │ │ │ │ ├── DynamicTopicTest.java │ │ │ │ ├── ForeignKeyJoinTest.java │ │ │ │ ├── WordCountWithStaticTopologyTest.java │ │ │ │ ├── WordCountWithDefaultSerdeTest.java │ │ │ │ ├── test_applications │ │ │ │ ├── Mirror.java │ │ │ │ ├── TopicExtractorApplication.java │ │ │ │ ├── MirrorPatternTopicMixed.java │ │ │ │ ├── MirrorPattern.java │ │ │ │ ├── MirrorAvro.java │ │ │ │ ├── ForeignKeyJoin.java │ │ │ │ ├── WordCount.java │ │ │ │ ├── CountInhabitantsWithAvro.java │ │ │ │ ├── UserClicksPerMinute.java │ │ │ │ ├── MirrorAvroNonDefaultSerde.java │ │ │ │ ├── NameJoinGlobalKTable.java │ │ │ │ ├── ErrorEventsPerMinute.java │ │ │ │ └── CountInhabitantsWithProto.java │ │ │ │ ├── NameJoinTest.java │ │ │ │ ├── NameJoinWithIntermediateTopicTest.java │ │ │ │ ├── CountInhabitantsWithAvroTest.java │ │ │ │ ├── MirrorPatternTopicMixedTest.java │ │ │ │ ├── CountInhabitantsWithProtoTest.java │ │ │ │ ├── HeaderTest.java │ │ │ │ ├── MirrorAvroNonDefaultSerdeTest.java │ │ │ │ ├── TestInputAndOutputTest.java │ │ │ │ └── UserClicksPerMinuteTest.java │ │ └── resources │ │ │ └── log4j2.xml │ └── main │ │ └── java │ │ └── com │ │ └── bakdata │ │ └── fluent_kafka_streams_tests │ │ ├── TableOutput.java │ │ ├── StreamOutput.java │ │ ├── SerdeConfig.java │ │ ├── Expectation.java │ │ ├── BaseOutput.java │ │ └── TestOutput.java └── build.gradle.kts ├── fluent-kafka-streams-tests-junit4 ├── lombok.config ├── build.gradle.kts └── src │ ├── test │ ├── resources │ │ └── log4j2.xml │ └── java │ │ └── com │ │ └── bakdata │ │ └── fluent_kafka_streams_tests │ │ └── junit4 │ │ ├── WordCountWithStaticTopologyTest.java │ │ ├── WordCountWitherTest.java │ │ └── test_applications │ │ └── WordCount.java │ └── main │ └── java │ └── com │ └── bakdata │ └── fluent_kafka_streams_tests │ └── junit4 │ └── TestTopologyRule.java ├── fluent-kafka-streams-tests-junit5 ├── lombok.config ├── build.gradle.kts └── src │ ├── test │ ├── resources │ │ └── log4j2.xml │ └── java │ │ └── com │ │ └── bakdata │ │ └── fluent_kafka_streams_tests │ │ └── junit5 │ │ ├── WordCountWithStaticTopologyTest.java │ │ ├── WordCountWitherTest.java │ │ ├── test_applications │ │ └── WordCount.java │ │ └── WordCountTest.java │ └── main │ └── java │ └── com │ └── bakdata │ └── fluent_kafka_streams_tests │ └── junit5 │ └── TestTopologyExtension.java ├── settings.gradle.kts ├── .github ├── workflows │ ├── release.yaml │ └── build-and-publish.yaml └── dependabot.yaml ├── LICENSE ├── gradlew.bat ├── README.md └── gradlew /.gitignore: -------------------------------------------------------------------------------- 1 | .* 2 | !.gitignore 3 | !.github 4 | **/build/ 5 | **/out/ 6 | -------------------------------------------------------------------------------- /gradle.properties: -------------------------------------------------------------------------------- 1 | version=3.5.1-SNAPSHOT 2 | org.gradle.caching=true 3 | org.gradle.parallel=true 4 | org.gradle.jvmargs=-Xmx2048m 5 | -------------------------------------------------------------------------------- /gradle/wrapper/gradle-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bakdata/fluent-kafka-streams-tests/HEAD/gradle/wrapper/gradle-wrapper.jar -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/lombok.config: -------------------------------------------------------------------------------- 1 | # This file is generated by the 'io.freefair.lombok' Gradle plugin 2 | config.stopBubbling = true 3 | lombok.addLombokGeneratedAnnotation = true 4 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests-junit4/lombok.config: -------------------------------------------------------------------------------- 1 | # This file is generated by the 'io.freefair.lombok' Gradle plugin 2 | config.stopBubbling = true 3 | lombok.addLombokGeneratedAnnotation = true 4 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests-junit5/lombok.config: -------------------------------------------------------------------------------- 1 | # This file is generated by the 'io.freefair.lombok' Gradle plugin 2 | config.stopBubbling = true 3 | lombok.addLombokGeneratedAnnotation = true 4 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/proto/city.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package com.bakdata.fluent_kafka_streams_tests.test_types.proto; 4 | 5 | message City { 6 | string name = 1; 7 | int32 inhabitants = 2; 8 | } 9 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/proto/person.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package com.bakdata.fluent_kafka_streams_tests.test_types.proto; 4 | 5 | message Person { 6 | string name = 1; 7 | string city = 2; 8 | } 9 | -------------------------------------------------------------------------------- /settings.gradle.kts: -------------------------------------------------------------------------------- 1 | pluginManagement { 2 | repositories { 3 | gradlePluginPortal() 4 | } 5 | } 6 | 7 | rootProject.name = "fluent-kafka-streams-tests" 8 | 9 | listOf("", "-junit5", "-junit4").forEach { suffix -> 10 | include(":fluent-kafka-streams-tests$suffix") 11 | } 12 | -------------------------------------------------------------------------------- /gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | distributionBase=GRADLE_USER_HOME 2 | distributionPath=wrapper/dists 3 | distributionUrl=https\://services.gradle.org/distributions/gradle-8.14.2-all.zip 4 | networkTimeout=10000 5 | validateDistributionUrl=true 6 | zipStoreBase=GRADLE_USER_HOME 7 | zipStorePath=wrapper/dists 8 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests-junit4/build.gradle.kts: -------------------------------------------------------------------------------- 1 | description = "Provides the fluent Kafka Streams test framework." 2 | 3 | dependencies { 4 | api(project(":fluent-kafka-streams-tests")) 5 | 6 | compileOnly(libs.junit4) 7 | testImplementation(libs.junit4) 8 | } 9 | 10 | tasks.test { 11 | useJUnit() 12 | } 13 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests-junit5/build.gradle.kts: -------------------------------------------------------------------------------- 1 | description = "Provides the fluent Kafka Streams test framework." 2 | 3 | dependencies { 4 | api(project(":fluent-kafka-streams-tests")) 5 | 6 | testRuntimeOnly(libs.junit.platform.launcher) 7 | compileOnly(libs.junit.jupiter) 8 | testImplementation(libs.junit.jupiter) 9 | } 10 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/avro/City.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "type": "record", 3 | "name": "City", 4 | "namespace": "com.bakdata.fluent_kafka_streams_tests.test_types", 5 | "fields": [ 6 | { 7 | "name": "name", 8 | "type": "string" 9 | }, 10 | { 11 | "name": "inhabitants", 12 | "type": "int" 13 | } 14 | ] 15 | } -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/avro/Person.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "type": "record", 3 | "name": "Person", 4 | "namespace": "com.bakdata.fluent_kafka_streams_tests.test_types", 5 | "fields": [ 6 | { 7 | "name": "name", 8 | "type": "string" 9 | }, 10 | { 11 | "name": "city", 12 | "type": "string" 13 | } 14 | ] 15 | } -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/test_types/StatusCode.java: -------------------------------------------------------------------------------- 1 | package com.bakdata.fluent_kafka_streams_tests.test_types; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Data; 5 | import lombok.NoArgsConstructor; 6 | 7 | @Data 8 | @NoArgsConstructor 9 | @AllArgsConstructor 10 | public class StatusCode { 11 | int code; 12 | String definition; 13 | } -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/test_types/ClickEvent.java: -------------------------------------------------------------------------------- 1 | package com.bakdata.fluent_kafka_streams_tests.test_types; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Builder; 5 | import lombok.Data; 6 | import lombok.NoArgsConstructor; 7 | 8 | @Data 9 | @Builder 10 | @NoArgsConstructor 11 | @AllArgsConstructor 12 | public class ClickEvent { 13 | int userId; 14 | Integer status; 15 | 16 | public ClickEvent(final int userId) { 17 | this.userId = userId; 18 | } 19 | } -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/resources/log4j2.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests-junit4/src/test/resources/log4j2.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests-junit5/src/test/resources/log4j2.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/test_types/ClickOutput.java: -------------------------------------------------------------------------------- 1 | package com.bakdata.fluent_kafka_streams_tests.test_types; 2 | 3 | import com.fasterxml.jackson.annotation.JsonTypeInfo; 4 | import com.fasterxml.jackson.databind.annotation.JsonDeserialize; 5 | import lombok.AllArgsConstructor; 6 | import lombok.Data; 7 | import lombok.NoArgsConstructor; 8 | 9 | @Data 10 | @AllArgsConstructor 11 | @NoArgsConstructor 12 | @JsonDeserialize(as = ClickOutput.class) 13 | @JsonTypeInfo(use=JsonTypeInfo.Id.CLASS, property="@class") 14 | public class ClickOutput { 15 | int userId; 16 | long count; 17 | long time; 18 | } 19 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/serde/JsonSerde.java: -------------------------------------------------------------------------------- 1 | package com.bakdata.fluent_kafka_streams_tests.serde; 2 | 3 | import lombok.experimental.Delegate; 4 | import org.apache.kafka.common.serialization.Serde; 5 | import org.apache.kafka.common.serialization.Serdes; 6 | 7 | public class JsonSerde implements Serde { 8 | @Delegate 9 | private final Serde inner; 10 | 11 | public JsonSerde(final Class clazz) { 12 | this.inner = Serdes.serdeFrom(new JsonSerializer<>(), new JsonDeserializer<>(clazz)); 13 | } 14 | 15 | public JsonSerde() { 16 | this((Class) Object.class); 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/test_types/ErrorOutput.java: -------------------------------------------------------------------------------- 1 | package com.bakdata.fluent_kafka_streams_tests.test_types; 2 | 3 | import com.fasterxml.jackson.annotation.JsonTypeInfo; 4 | import com.fasterxml.jackson.databind.annotation.JsonDeserialize; 5 | import lombok.AllArgsConstructor; 6 | import lombok.Data; 7 | import lombok.NoArgsConstructor; 8 | 9 | 10 | @Data 11 | @AllArgsConstructor 12 | @NoArgsConstructor 13 | @JsonDeserialize(as = ErrorOutput.class) 14 | @JsonTypeInfo(use=JsonTypeInfo.Id.CLASS, property="@class") 15 | public class ErrorOutput { 16 | int statusCode; 17 | long count; 18 | long time; 19 | String definition; 20 | } 21 | -------------------------------------------------------------------------------- /.github/workflows/release.yaml: -------------------------------------------------------------------------------- 1 | name: Release 2 | 3 | on: 4 | workflow_dispatch: 5 | inputs: 6 | release-type: 7 | description: "The scope of the release (major, minor or patch)." 8 | type: choice 9 | required: true 10 | default: patch 11 | options: 12 | - patch 13 | - minor 14 | - major 15 | 16 | jobs: 17 | java-gradle-release: 18 | name: Java Gradle 19 | uses: bakdata/ci-templates/.github/workflows/java-gradle-release.yaml@1.70.0 20 | with: 21 | java-version: 17 22 | release-type: "${{ inputs.release-type }}" 23 | secrets: 24 | github-email: "${{ secrets.GH_EMAIL }}" 25 | github-username: "${{ secrets.GH_USERNAME }}" 26 | github-token: "${{ secrets.GH_TOKEN }}" 27 | -------------------------------------------------------------------------------- /.github/workflows/build-and-publish.yaml: -------------------------------------------------------------------------------- 1 | name: Build and Publish 2 | 3 | on: 4 | push: 5 | tags: ["**"] 6 | branches: 7 | - master 8 | pull_request: 9 | 10 | jobs: 11 | build-and-publish: 12 | name: Java Gradle 13 | uses: bakdata/ci-templates/.github/workflows/java-gradle-library.yaml@1.70.0 14 | with: 15 | java-version: 17 16 | secrets: 17 | sonar-token: ${{ secrets.SONARCLOUD_TOKEN }} 18 | sonar-organization: ${{ secrets.SONARCLOUD_ORGANIZATION }} 19 | signing-secret-key-ring: ${{ secrets.SONATYPE_SIGNING_SECRET_KEY_RING }} 20 | signing-key-id: ${{ secrets.SONATYPE_SIGNING_KEY_ID }} 21 | signing-password: ${{ secrets.SONATYPE_SIGNING_PASSWORD }} 22 | ossrh-username: ${{ secrets.SONATYPE_OSSRH_USERNAME }} 23 | ossrh-password: ${{ secrets.SONATYPE_OSSRH_PASSWORD }} 24 | github-token: ${{ secrets.GH_TOKEN }} 25 | -------------------------------------------------------------------------------- /.github/dependabot.yaml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "gradle" 4 | directory: "/" 5 | schedule: 6 | interval: "monthly" 7 | groups: 8 | kafka-dependencies: 9 | patterns: 10 | - "com.bakdata.kafka*" 11 | - "com.bakdata.fluent-kafka-streams-tests*" 12 | - "io.confluent*" 13 | - "org.apache.kafka*" 14 | log-dependencies: 15 | patterns: 16 | - "org.slf4j*" 17 | - "org.apache.logging.log4j*" 18 | test-dependencies: 19 | patterns: 20 | - "org.junit*" 21 | - "org.assertj*" 22 | - "*junit*" 23 | - "org.mockito*" 24 | - "org.testcontainers*" 25 | - "org.awaitility*" 26 | plugins: 27 | patterns: 28 | - "com.bakdata.release" 29 | - "com.bakdata.sonar" 30 | - "com.bakdata.sonatype" 31 | protobuf: 32 | patterns: 33 | - "com.google.protobuf*" 34 | 35 | - package-ecosystem: "github-actions" 36 | directory: "/" 37 | schedule: 38 | interval: "daily" 39 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 bakdata 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/build.gradle.kts: -------------------------------------------------------------------------------- 1 | plugins { 2 | alias(libs.plugins.avro) 3 | alias(libs.plugins.protobuf) 4 | java 5 | idea // required for protobuf support in intellij 6 | } 7 | 8 | description = "Provides the fluent Kafka Streams test framework." 9 | 10 | 11 | val protobufVersion = libs.protobuf.get().version 12 | dependencies { 13 | api(platform(libs.kafka.bom)) // Central repository requires this as a direct dependency to resolve versions 14 | api(libs.kafka.streams.utils) 15 | api(libs.kafka.clients) 16 | api(libs.kafka.streams) 17 | api(libs.kafka.streams.testUtils) 18 | implementation(libs.jool) 19 | 20 | testRuntimeOnly(libs.junit.platform.launcher) 21 | testImplementation(libs.junit.jupiter) 22 | testImplementation(libs.avro) 23 | testImplementation(libs.kafka.streams.avro.serde) { 24 | exclude(group = "org.apache.kafka") // force usage of OSS kafka-clients 25 | } 26 | testImplementation(libs.kafka.streams.protobuf.serde) { 27 | exclude(group = "org.apache.kafka") // force usage of OSS kafka-clients 28 | } 29 | testImplementation(libs.protobuf) 30 | } 31 | 32 | protobuf { 33 | protoc { 34 | // The artifact spec for the Protobuf Compiler 35 | artifact = "com.google.protobuf:protoc:$protobufVersion" 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /gradle/libs.versions.toml: -------------------------------------------------------------------------------- 1 | [versions] 2 | junit5 = "5.14.0" 3 | junit4 = "4.13.2" 4 | kafkaUtils = "1.3.0" 5 | 6 | [libraries] 7 | kafka-bom = { group = "com.bakdata.kafka", name = "kafka-bom", version.ref = "kafkaUtils" } 8 | kafka-streams-utils = { group = "com.bakdata.kafka", name = "kafka-streams-utils", version.ref = "kafkaUtils" } 9 | kafka-clients = { group = "org.apache.kafka", name = "kafka-clients" } 10 | kafka-streams = { group = "org.apache.kafka", name = "kafka-streams" } 11 | kafka-streams-testUtils = { group = "org.apache.kafka", name = "kafka-streams-test-utils" } 12 | kafka-streams-avro-serde = { group = "io.confluent", name = "kafka-streams-avro-serde" } 13 | kafka-streams-protobuf-serde = { group = "io.confluent", name = "kafka-streams-protobuf-serde" } 14 | avro = { group = "org.apache.avro", name = "avro", version = "1.12.0" } 15 | protobuf = { group = "com.google.protobuf", name = "protobuf-java", version = "4.33.0" } 16 | jool = { group = "org.jooq", name = "jool", version = "0.9.15" } 17 | 18 | junit-platform-launcher = { group = "org.junit.platform", name = "junit-platform-launcher" } 19 | junit-jupiter = { group = "org.junit.jupiter", name = "junit-jupiter", version.ref = "junit5" } 20 | junit4 = { group = "junit", name = "junit", version.ref = "junit4" } 21 | assertj = { group = "org.assertj", name = "assertj-core", version = "3.27.6" } 22 | log4j-slf4j2 = { group = "org.apache.logging.log4j", name = "log4j-slf4j2-impl", version = "2.25.2" } 23 | 24 | [plugins] 25 | release = { id = "com.bakdata.release", version = "1.11.0" } 26 | sonar = { id = "com.bakdata.sonar", version = "1.11.0" } 27 | sonatype = { id = "com.bakdata.sonatype", version = "1.11.0" } 28 | lombok = { id = "io.freefair.lombok", version = "8.14" } 29 | avro = { id = "com.github.davidmc24.gradle.plugin.avro", version = "1.9.1" } 30 | protobuf = { id = "com.google.protobuf", version = "0.9.5" } 31 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/MirrorPatternTest.java: -------------------------------------------------------------------------------- 1 | package com.bakdata.fluent_kafka_streams_tests; 2 | 3 | import static org.assertj.core.api.Assertions.assertThatExceptionOfType; 4 | 5 | import com.bakdata.fluent_kafka_streams_tests.test_applications.MirrorPattern; 6 | import java.util.NoSuchElementException; 7 | import org.junit.jupiter.api.AfterEach; 8 | import org.junit.jupiter.api.BeforeEach; 9 | import org.junit.jupiter.api.Test; 10 | 11 | class MirrorPatternTest { 12 | private final MirrorPattern app = new MirrorPattern(); 13 | 14 | private final TestTopology testTopology = new TestTopology<>(this.app::getTopology, 15 | MirrorPattern.getKafkaProperties()); 16 | 17 | @BeforeEach 18 | void start() { 19 | this.testTopology.start(); 20 | } 21 | 22 | @AfterEach 23 | void stop() { 24 | this.testTopology.stop(); 25 | } 26 | 27 | @Test 28 | void shouldConsumeFromPattern() { 29 | this.testTopology.input("example-input1") 30 | .add("key1", "value1") 31 | .add("key2", "value2"); 32 | this.testTopology.input("another-input1") 33 | .add("key3", "value3"); 34 | this.testTopology.input("example-input2") 35 | .add("key4", "value4"); 36 | 37 | this.testTopology.streamOutput() 38 | .expectNextRecord().hasKey("key1").hasValue("value1") 39 | .expectNextRecord().hasKey("key2").hasValue("value2") 40 | .expectNextRecord().hasKey("key3").hasValue("value3") 41 | .expectNextRecord().hasKey("key4").hasValue("value4") 42 | .expectNoMoreRecord(); 43 | } 44 | 45 | @Test 46 | void shouldThrowIfInputDoesNotMatchPattern() { 47 | assertThatExceptionOfType(NoSuchElementException.class) 48 | .isThrownBy(() -> this.testTopology.input("not-matching")); 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/TestTopologyTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2024 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests; 26 | 27 | import static org.assertj.core.api.Assertions.assertThatCode; 28 | 29 | import com.bakdata.fluent_kafka_streams_tests.test_applications.MirrorAvro; 30 | import com.bakdata.fluent_kafka_streams_tests.test_types.City; 31 | import com.bakdata.fluent_kafka_streams_tests.test_types.Person; 32 | import java.util.Map; 33 | import org.junit.jupiter.api.Test; 34 | 35 | 36 | class TestTopologyTest { 37 | 38 | @Test 39 | void shouldUseImmutableProperties() { 40 | final TestTopology testTopology = 41 | new TestTopology<>(MirrorAvro::getTopology, Map.copyOf(MirrorAvro.getKafkaProperties())); 42 | assertThatCode(testTopology::start).doesNotThrowAnyException(); 43 | testTopology.stop(); 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/DynamicTopicTest.java: -------------------------------------------------------------------------------- 1 | package com.bakdata.fluent_kafka_streams_tests; 2 | 3 | import static org.assertj.core.api.Assertions.assertThatExceptionOfType; 4 | 5 | import com.bakdata.fluent_kafka_streams_tests.test_applications.TopicExtractorApplication; 6 | import java.util.NoSuchElementException; 7 | import org.junit.jupiter.api.AfterEach; 8 | import org.junit.jupiter.api.BeforeEach; 9 | import org.junit.jupiter.api.Test; 10 | 11 | class DynamicTopicTest { 12 | 13 | private static final String KEY = "key"; 14 | private static final String VALUE = "value"; 15 | private final TestTopology testTopology = 16 | new TestTopology<>(TopicExtractorApplication::getTopology, TopicExtractorApplication.getProperties()); 17 | 18 | @BeforeEach 19 | void start() { 20 | this.testTopology.start(); 21 | this.testTopology.input().add(KEY, VALUE); 22 | this.testTopology.getOutputTopics().add(TopicExtractorApplication.OUTPUT_TOPIC); 23 | } 24 | 25 | @AfterEach 26 | void stop() { 27 | this.testTopology.stop(); 28 | } 29 | 30 | @Test 31 | void shouldHaveOutputForTopicName() { 32 | this.testTopology.streamOutput(TopicExtractorApplication.OUTPUT_TOPIC) 33 | .expectNextRecord() 34 | .hasKey(KEY).and().hasValue(VALUE); 35 | } 36 | 37 | @Test 38 | void shouldHaveOutputWithoutTopicName() { 39 | this.testTopology.streamOutput() 40 | .expectNextRecord() 41 | .hasKey(KEY).and().hasValue(VALUE); 42 | } 43 | 44 | @Test 45 | void shouldThrowExceptionForNonExistingStreamOutputTopic() { 46 | assertThatExceptionOfType(NoSuchElementException.class) 47 | .isThrownBy(() -> this.testTopology.streamOutput("non-existing")) 48 | .withMessage("Output topic 'non-existing' not found"); 49 | } 50 | 51 | @Test 52 | void shouldThrowExceptionForNonExistingTableOutputTopic() { 53 | assertThatExceptionOfType(NoSuchElementException.class) 54 | .isThrownBy(() -> this.testTopology.tableOutput("non-existing")) 55 | .withMessage("Output topic 'non-existing' not found"); 56 | } 57 | 58 | } 59 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests-junit4/src/test/java/com/bakdata/fluent_kafka_streams_tests/junit4/WordCountWithStaticTopologyTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2024 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests.junit4; 26 | 27 | import com.bakdata.fluent_kafka_streams_tests.junit4.test_applications.WordCount; 28 | import org.apache.kafka.common.serialization.Serdes; 29 | import org.junit.Rule; 30 | import org.junit.Test; 31 | 32 | public class WordCountWithStaticTopologyTest { 33 | private final WordCount app = new WordCount(); 34 | 35 | @Rule 36 | public final TestTopologyRule testTopology = new TestTopologyRule<>(this.app.getTopology(), 37 | WordCount.getKafkaProperties()); 38 | 39 | @Test 40 | public void shouldAggregateSameWordStream() { 41 | this.testTopology.input().add("bla") 42 | .add("blub") 43 | .add("bla"); 44 | 45 | this.testTopology.streamOutput().withSerde(Serdes.String(), Serdes.Long()) 46 | .expectNextRecord().hasKey("bla").hasValue(1L) 47 | .expectNextRecord().hasKey("blub").hasValue(1L) 48 | .expectNextRecord().hasKey("bla").hasValue(2L) 49 | .expectNoMoreRecord(); 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests-junit4/src/test/java/com/bakdata/fluent_kafka_streams_tests/junit4/WordCountWitherTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2025 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests.junit4; 26 | 27 | import com.bakdata.fluent_kafka_streams_tests.junit4.test_applications.WordCount; 28 | import org.apache.kafka.common.serialization.Serdes; 29 | import org.junit.Rule; 30 | import org.junit.Test; 31 | 32 | public class WordCountWitherTest { 33 | private final WordCount app = new WordCount(); 34 | 35 | @Rule 36 | public final TestTopologyRule testTopology = 37 | new TestTopologyRule<>(this.app.getTopology(), WordCount.getKafkaProperties()) 38 | .withDefaultValueSerde(Serdes.String()); 39 | 40 | @Test 41 | public void shouldAggregateSameWordStream() { 42 | this.testTopology.input().add("bla") 43 | .add("blub") 44 | .add("bla"); 45 | 46 | this.testTopology.streamOutput().withSerde(Serdes.String(), Serdes.Long()) 47 | .expectNextRecord().hasKey("bla").hasValue(1L) 48 | .expectNextRecord().hasKey("blub").hasValue(1L) 49 | .expectNextRecord().hasKey("bla").hasValue(2L) 50 | .expectNoMoreRecord(); 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/ForeignKeyJoinTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2025 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests; 26 | 27 | import com.bakdata.fluent_kafka_streams_tests.test_applications.ForeignKeyJoin; 28 | import org.junit.jupiter.api.AfterEach; 29 | import org.junit.jupiter.api.BeforeEach; 30 | import org.junit.jupiter.api.Test; 31 | 32 | class ForeignKeyJoinTest { 33 | 34 | private final TestTopology testTopology = 35 | new TestTopology<>(ForeignKeyJoin::getTopology, ForeignKeyJoin.getKafkaProperties()); 36 | 37 | @BeforeEach 38 | void start() { 39 | this.testTopology.start(); 40 | } 41 | 42 | @AfterEach 43 | void stop() { 44 | this.testTopology.stop(); 45 | } 46 | 47 | @Test 48 | void shouldIgnoreForeignKeyTopicsAsOutput() { 49 | this.testTopology.input(ForeignKeyJoin.LEFT_INPUT_TOPIC) 50 | .add("foo", "bar"); 51 | 52 | this.testTopology.input(ForeignKeyJoin.RIGHT_INPUT_TOPIC) 53 | .add("bar", "baz"); 54 | 55 | this.testTopology.streamOutput() 56 | .expectNextRecord().hasKey("foo").hasValue("barbaz") 57 | .expectNoMoreRecord(); 58 | } 59 | 60 | } 61 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/serde/JsonSerializer.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2023 bakdata GmbH 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests.serde; 26 | 27 | import com.fasterxml.jackson.databind.ObjectMapper; 28 | import java.io.IOException; 29 | import java.util.Map; 30 | import lombok.NoArgsConstructor; 31 | import org.apache.kafka.common.errors.SerializationException; 32 | import org.apache.kafka.common.serialization.Serializer; 33 | 34 | @NoArgsConstructor 35 | public class JsonSerializer implements Serializer { 36 | private final ObjectMapper objectMapper = new ObjectMapper(); 37 | 38 | @Override 39 | public void configure(final Map props, final boolean isKey) { 40 | // nothing to configure 41 | } 42 | 43 | @Override 44 | public byte[] serialize(final String topic, final T data) { 45 | if (data == null) 46 | return null; 47 | 48 | try { 49 | return this.objectMapper.writeValueAsBytes(data); 50 | } catch (final IOException e) { 51 | throw new SerializationException("Error serializing JSON message", e); 52 | } 53 | } 54 | 55 | @Override 56 | public void close() { 57 | // nothing to close 58 | } 59 | 60 | } 61 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests-junit5/src/test/java/com/bakdata/fluent_kafka_streams_tests/junit5/WordCountWithStaticTopologyTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2024 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests.junit5; 26 | 27 | import com.bakdata.fluent_kafka_streams_tests.junit5.test_applications.WordCount; 28 | import org.apache.kafka.common.serialization.Serdes; 29 | import org.junit.jupiter.api.Test; 30 | import org.junit.jupiter.api.extension.RegisterExtension; 31 | 32 | class WordCountWithStaticTopologyTest { 33 | private final WordCount app = new WordCount(); 34 | 35 | @RegisterExtension 36 | final TestTopologyExtension testTopology = new TestTopologyExtension<>(this.app.getTopology(), 37 | WordCount.getKafkaProperties()); 38 | 39 | @Test 40 | void shouldAggregateSameWordStream() { 41 | this.testTopology.input().add("bla") 42 | .add("blub") 43 | .add("bla"); 44 | 45 | this.testTopology.streamOutput().withSerde(Serdes.String(), Serdes.Long()) 46 | .expectNextRecord().hasKey("bla").hasValue(1L) 47 | .expectNextRecord().hasKey("blub").hasValue(1L) 48 | .expectNextRecord().hasKey("bla").hasValue(2L) 49 | .expectNoMoreRecord(); 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests-junit5/src/test/java/com/bakdata/fluent_kafka_streams_tests/junit5/WordCountWitherTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2025 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests.junit5; 26 | 27 | import com.bakdata.fluent_kafka_streams_tests.junit5.test_applications.WordCount; 28 | import org.apache.kafka.common.serialization.Serdes; 29 | import org.junit.jupiter.api.Test; 30 | import org.junit.jupiter.api.extension.RegisterExtension; 31 | 32 | class WordCountWitherTest { 33 | private final WordCount app = new WordCount(); 34 | 35 | @RegisterExtension 36 | final TestTopologyExtension testTopology = 37 | new TestTopologyExtension<>(this.app::getTopology, WordCount.getKafkaProperties()) 38 | .withDefaultValueSerde(Serdes.String()); 39 | 40 | @Test 41 | void shouldAggregateSameWordStream() { 42 | this.testTopology.input().add("bla") 43 | .add("blub") 44 | .add("bla"); 45 | 46 | this.testTopology.streamOutput().withSerde(Serdes.String(), Serdes.Long()) 47 | .expectNextRecord().hasKey("bla").hasValue(1L) 48 | .expectNextRecord().hasKey("blub").hasValue(1L) 49 | .expectNextRecord().hasKey("bla").hasValue(2L) 50 | .expectNoMoreRecord(); 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/serde/JsonDeserializer.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2023 bakdata GmbH 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests.serde; 26 | 27 | import com.fasterxml.jackson.databind.ObjectMapper; 28 | import java.io.IOException; 29 | import java.util.Map; 30 | import org.apache.kafka.common.errors.SerializationException; 31 | import org.apache.kafka.common.serialization.Deserializer; 32 | 33 | public class JsonDeserializer implements Deserializer { 34 | private final ObjectMapper objectMapper = new ObjectMapper(); 35 | private final Class clazz; 36 | 37 | public JsonDeserializer(final Class clazz) { 38 | this.clazz = clazz; 39 | } 40 | 41 | @Override 42 | public void configure(final Map props, final boolean isKey) { 43 | // nothing to configure 44 | } 45 | 46 | @Override 47 | public T deserialize(final String topic, final byte[] bytes) { 48 | if (bytes == null) 49 | return null; 50 | 51 | try { 52 | return this.objectMapper.readValue(bytes, this.clazz); 53 | } catch (final IOException e) { 54 | throw new SerializationException(e); 55 | } 56 | } 57 | 58 | @Override 59 | public void close() { 60 | // nothing to close 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/WordCountWithStaticTopologyTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2024 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests; 26 | 27 | import com.bakdata.fluent_kafka_streams_tests.test_applications.WordCount; 28 | import org.apache.kafka.common.serialization.Serdes; 29 | import org.junit.jupiter.api.AfterEach; 30 | import org.junit.jupiter.api.BeforeEach; 31 | import org.junit.jupiter.api.Test; 32 | 33 | class WordCountWithStaticTopologyTest { 34 | private final WordCount app = new WordCount(); 35 | 36 | private final TestTopology testTopology = 37 | new TestTopology<>(this.app::getTopology, WordCount.getKafkaProperties()); 38 | 39 | @BeforeEach 40 | void start() { 41 | this.testTopology.start(); 42 | } 43 | 44 | @AfterEach 45 | void stop() { 46 | this.testTopology.stop(); 47 | } 48 | 49 | @Test 50 | void shouldAggregateSameWordStream() { 51 | this.testTopology.input().add("bla") 52 | .add("blub") 53 | .add("bla"); 54 | 55 | this.testTopology.streamOutput().withSerde(Serdes.String(), Serdes.Long()) 56 | .expectNextRecord().hasKey("bla").hasValue(1L) 57 | .expectNextRecord().hasKey("blub").hasValue(1L) 58 | .expectNextRecord().hasKey("bla").hasValue(2L) 59 | .expectNoMoreRecord(); 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/WordCountWithDefaultSerdeTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2024 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests; 26 | 27 | import com.bakdata.fluent_kafka_streams_tests.test_applications.WordCount; 28 | import org.apache.kafka.common.serialization.Serdes; 29 | import org.junit.jupiter.api.AfterEach; 30 | import org.junit.jupiter.api.BeforeEach; 31 | import org.junit.jupiter.api.Test; 32 | 33 | class WordCountWithDefaultSerdeTest { 34 | private final WordCount app = new WordCount(); 35 | 36 | private final TestTopology testTopology = 37 | new TestTopology<>(this.app.getTopology(), WordCount.getKafkaProperties()) 38 | .withDefaultValueSerde(Serdes.String()); 39 | 40 | @BeforeEach 41 | void start() { 42 | this.testTopology.start(); 43 | } 44 | 45 | @AfterEach 46 | void stop() { 47 | this.testTopology.stop(); 48 | } 49 | 50 | @Test 51 | void shouldAggregateSameWordStream() { 52 | this.testTopology.input().add("bla") 53 | .add("blub") 54 | .add("bla"); 55 | 56 | this.testTopology.streamOutput().withSerde(Serdes.String(), Serdes.Long()) 57 | .expectNextRecord().hasKey("bla").hasValue(1L) 58 | .expectNextRecord().hasKey("blub").hasValue(1L) 59 | .expectNextRecord().hasKey("bla").hasValue(2L) 60 | .expectNoMoreRecord(); 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/test_applications/Mirror.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2024 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests.test_applications; 26 | 27 | import java.util.HashMap; 28 | import java.util.Map; 29 | import lombok.Getter; 30 | import org.apache.kafka.common.serialization.Serdes.StringSerde; 31 | import org.apache.kafka.streams.StreamsBuilder; 32 | import org.apache.kafka.streams.StreamsConfig; 33 | import org.apache.kafka.streams.Topology; 34 | import org.apache.kafka.streams.kstream.KStream; 35 | 36 | @Getter 37 | public class Mirror { 38 | private final String inputTopic = "input"; 39 | 40 | private final String outputTopic = "output"; 41 | 42 | public static Map getKafkaProperties() { 43 | final String brokers = "localhost:9092"; 44 | final Map kafkaConfig = new HashMap<>(); 45 | kafkaConfig.put(StreamsConfig.APPLICATION_ID_CONFIG, "mirror"); 46 | kafkaConfig.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, brokers); 47 | kafkaConfig.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, StringSerde.class); 48 | kafkaConfig.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, StringSerde.class); 49 | return kafkaConfig; 50 | } 51 | 52 | public Topology getTopology() { 53 | final StreamsBuilder builder = new StreamsBuilder(); 54 | final KStream input = builder.stream(this.inputTopic); 55 | 56 | input.to(this.outputTopic); 57 | return builder.build(); 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/NameJoinTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2024 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests; 26 | 27 | import com.bakdata.fluent_kafka_streams_tests.test_applications.NameJoinGlobalKTable; 28 | import com.bakdata.fluent_kafka_streams_tests.test_types.Person; 29 | import org.apache.kafka.common.serialization.Serdes; 30 | import org.junit.jupiter.api.AfterEach; 31 | import org.junit.jupiter.api.BeforeEach; 32 | import org.junit.jupiter.api.Test; 33 | 34 | class NameJoinTest { 35 | 36 | private final TestTopology testTopology = 37 | new TestTopology<>(NameJoinGlobalKTable::getTopology, NameJoinGlobalKTable.getKafkaProperties()); 38 | 39 | @BeforeEach 40 | void start() { 41 | this.testTopology.start(); 42 | } 43 | 44 | @AfterEach 45 | void stop() { 46 | this.testTopology.stop(); 47 | } 48 | 49 | @Test 50 | void testTopology() { 51 | this.testTopology.input(NameJoinGlobalKTable.NAME_INPUT).withSerde(Serdes.Long(), Serdes.String()) 52 | .add(1L, "Robinson") 53 | .add(2L, "Walker"); 54 | 55 | this.testTopology.input(NameJoinGlobalKTable.INPUT_TOPIC).withSerde(Serdes.Long(), Serdes.Long()) 56 | .add(1L, 1L) 57 | .add(2L, 2L); 58 | 59 | this.testTopology.streamOutput(NameJoinGlobalKTable.OUTPUT_TOPIC).withSerde(Serdes.Long(), Serdes.String()) 60 | .expectNextRecord().hasKey(1L).hasValue("Robinson") 61 | .expectNextRecord().hasKey(2L).hasValue("Walker") 62 | .expectNoMoreRecord(); 63 | } 64 | 65 | } 66 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/test_applications/TopicExtractorApplication.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2024 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests.test_applications; 26 | 27 | import java.util.HashMap; 28 | import java.util.Map; 29 | import lombok.experimental.UtilityClass; 30 | import org.apache.kafka.common.serialization.Serdes; 31 | import org.apache.kafka.common.serialization.Serdes.StringSerde; 32 | import org.apache.kafka.streams.StreamsBuilder; 33 | import org.apache.kafka.streams.StreamsConfig; 34 | import org.apache.kafka.streams.Topology; 35 | import org.apache.kafka.streams.kstream.Consumed; 36 | 37 | @UtilityClass 38 | public class TopicExtractorApplication { 39 | public static final String INPUT_TOPIC = "input"; 40 | public static final String OUTPUT_TOPIC = "output"; 41 | 42 | 43 | public static Topology getTopology() { 44 | final StreamsBuilder builder = new StreamsBuilder(); 45 | builder.stream(INPUT_TOPIC, Consumed.with(Serdes.String(), Serdes.String())) 46 | .to((key, value, recordContext) -> OUTPUT_TOPIC); 47 | return builder.build(); 48 | } 49 | 50 | public static Map getProperties() { 51 | final Map properties = new HashMap<>(); 52 | properties.put(StreamsConfig.APPLICATION_ID_CONFIG, "dynamic-test-stream"); 53 | properties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "dummy:123"); 54 | properties.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, StringSerde.class); 55 | properties.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, StringSerde.class); 56 | return properties; 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/NameJoinWithIntermediateTopicTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2024 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests; 26 | 27 | import com.bakdata.fluent_kafka_streams_tests.test_applications.NameJoinGlobalKTable; 28 | import com.bakdata.fluent_kafka_streams_tests.test_types.Person; 29 | import org.apache.kafka.common.serialization.Serdes; 30 | import org.junit.jupiter.api.AfterEach; 31 | import org.junit.jupiter.api.BeforeEach; 32 | import org.junit.jupiter.api.Test; 33 | 34 | class NameJoinWithIntermediateTopicTest { 35 | 36 | private final TestTopology testTopology = 37 | new TestTopology<>(NameJoinGlobalKTable::getTopologyWithIntermediateTopic, 38 | NameJoinGlobalKTable.getKafkaProperties()); 39 | 40 | @BeforeEach 41 | void start() { 42 | this.testTopology.start(); 43 | } 44 | 45 | @AfterEach 46 | void stop() { 47 | this.testTopology.stop(); 48 | } 49 | 50 | @Test 51 | void testTopology() { 52 | this.testTopology.input(NameJoinGlobalKTable.NAME_INPUT).withSerde(Serdes.Long(), Serdes.String()) 53 | .add(1L, "Robinson") 54 | .add(2L, "Walker"); 55 | 56 | this.testTopology.input(NameJoinGlobalKTable.INPUT_TOPIC).withSerde(Serdes.Long(), Serdes.Long()) 57 | .add(1L, 1L) 58 | .add(2L, 2L); 59 | 60 | this.testTopology.streamOutput(NameJoinGlobalKTable.OUTPUT_TOPIC).withSerde(Serdes.Long(), Serdes.String()) 61 | .expectNextRecord().hasKey(1L).hasValue("ROBINSON") 62 | .expectNextRecord().hasKey(2L).hasValue("WALKER") 63 | .expectNoMoreRecord(); 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/CountInhabitantsWithAvroTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2025 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests; 26 | 27 | import com.bakdata.fluent_kafka_streams_tests.test_applications.CountInhabitantsWithAvro; 28 | import com.bakdata.fluent_kafka_streams_tests.test_types.City; 29 | import com.bakdata.fluent_kafka_streams_tests.test_types.Person; 30 | import org.apache.kafka.common.serialization.Serdes; 31 | import org.junit.jupiter.api.AfterEach; 32 | import org.junit.jupiter.api.BeforeEach; 33 | import org.junit.jupiter.api.Test; 34 | 35 | 36 | class CountInhabitantsWithAvroTest { 37 | 38 | private final TestTopology testTopology = 39 | new TestTopology<>(CountInhabitantsWithAvro::getTopology, CountInhabitantsWithAvro.getKafkaProperties()); 40 | 41 | @BeforeEach 42 | void start() { 43 | this.testTopology.start(); 44 | } 45 | 46 | @AfterEach 47 | void stop() { 48 | this.testTopology.stop(); 49 | } 50 | 51 | @Test 52 | void shouldAggregateInhabitants() { 53 | this.testTopology.input() 54 | .add(new Person("Huey", "City1")) 55 | .add(new Person("Dewey", "City2")) 56 | .add(new Person("Louie", "City1")); 57 | 58 | this.testTopology.tableOutput().withValueType(City.class) 59 | .expectNextRecord().hasKey("City1").hasValue(new City("City1", 2)) 60 | .expectNextRecord().hasKey("City2").hasValue(new City("City2", 1)) 61 | .expectNoMoreRecord(); 62 | } 63 | 64 | @Test 65 | void shouldWorkForEmptyInput() { 66 | this.testTopology.tableOutput().withSerde(Serdes.String(), Serdes.Long()) 67 | .expectNoMoreRecord(); 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/test_applications/MirrorPatternTopicMixed.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2024 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests.test_applications; 26 | 27 | import java.util.HashMap; 28 | import java.util.Map; 29 | import java.util.regex.Pattern; 30 | import lombok.Getter; 31 | import org.apache.kafka.common.serialization.Serdes.StringSerde; 32 | import org.apache.kafka.streams.StreamsBuilder; 33 | import org.apache.kafka.streams.StreamsConfig; 34 | import org.apache.kafka.streams.Topology; 35 | import org.apache.kafka.streams.kstream.KStream; 36 | 37 | @Getter 38 | public class MirrorPatternTopicMixed { 39 | private final String inputPattern = ".*-input1"; 40 | private final String inputTopic = "input2"; 41 | 42 | private final String outputTopic = "output"; 43 | 44 | public static Map getKafkaProperties() { 45 | final String brokers = "localhost:9092"; 46 | final Map kafkaConfig = new HashMap<>(); 47 | kafkaConfig.put(StreamsConfig.APPLICATION_ID_CONFIG, "wordcount"); 48 | kafkaConfig.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, brokers); 49 | kafkaConfig.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, StringSerde.class); 50 | kafkaConfig.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, StringSerde.class); 51 | return kafkaConfig; 52 | } 53 | 54 | public Topology getTopology() { 55 | final StreamsBuilder builder = new StreamsBuilder(); 56 | final KStream input1 = builder.stream(Pattern.compile(this.inputPattern)); 57 | final KStream input2 = builder.stream(this.inputTopic); 58 | 59 | input1.merge(input2).to(this.outputTopic); 60 | return builder.build(); 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/test_applications/MirrorPattern.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2024 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests.test_applications; 26 | 27 | import java.util.HashMap; 28 | import java.util.Map; 29 | import java.util.regex.Pattern; 30 | import lombok.Getter; 31 | import org.apache.kafka.common.serialization.Serdes.StringSerde; 32 | import org.apache.kafka.streams.StreamsBuilder; 33 | import org.apache.kafka.streams.StreamsConfig; 34 | import org.apache.kafka.streams.Topology; 35 | import org.apache.kafka.streams.kstream.KStream; 36 | 37 | @Getter 38 | public class MirrorPattern { 39 | private final String inputPattern1 = ".*-input1"; 40 | private final String inputPattern2 = ".*-input2"; 41 | 42 | private final String outputTopic = "output"; 43 | 44 | public static Map getKafkaProperties() { 45 | final String brokers = "localhost:9092"; 46 | final Map kafkaConfig = new HashMap<>(); 47 | kafkaConfig.put(StreamsConfig.APPLICATION_ID_CONFIG, "wordcount"); 48 | kafkaConfig.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, brokers); 49 | kafkaConfig.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, StringSerde.class); 50 | kafkaConfig.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, StringSerde.class); 51 | return kafkaConfig; 52 | } 53 | 54 | public Topology getTopology() { 55 | final StreamsBuilder builder = new StreamsBuilder(); 56 | final KStream input1 = builder.stream(Pattern.compile(this.inputPattern1)); 57 | final KStream input2 = builder.stream(Pattern.compile(this.inputPattern2)); 58 | 59 | input1.merge(input2).to(this.outputTopic); 60 | return builder.build(); 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/test_applications/MirrorAvro.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2025 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests.test_applications; 26 | 27 | import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig; 28 | import io.confluent.kafka.streams.serdes.avro.SpecificAvroSerde; 29 | import java.util.HashMap; 30 | import java.util.Map; 31 | import lombok.experimental.UtilityClass; 32 | import org.apache.avro.specific.SpecificRecord; 33 | import org.apache.kafka.streams.StreamsBuilder; 34 | import org.apache.kafka.streams.StreamsConfig; 35 | import org.apache.kafka.streams.Topology; 36 | import org.apache.kafka.streams.kstream.KStream; 37 | 38 | @UtilityClass 39 | public class MirrorAvro { 40 | private static final String INPUT_TOPIC = "input"; 41 | private static final String OUTPUT_TOPIC = "output"; 42 | 43 | public static Map getKafkaProperties() { 44 | final String brokers = "localhost:9092"; 45 | final Map kafkaConfig = new HashMap<>(); 46 | kafkaConfig.put(StreamsConfig.APPLICATION_ID_CONFIG, "mirror"); 47 | kafkaConfig.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, brokers); 48 | kafkaConfig.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, "mock://"); 49 | kafkaConfig.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, SpecificAvroSerde.class); 50 | kafkaConfig.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, SpecificAvroSerde.class); 51 | return kafkaConfig; 52 | } 53 | 54 | public static Topology getTopology() { 55 | final StreamsBuilder builder = new StreamsBuilder(); 56 | final KStream input = builder.stream(INPUT_TOPIC); 57 | 58 | input.to(OUTPUT_TOPIC); 59 | return builder.build(); 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/MirrorPatternTopicMixedTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2024 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests; 26 | 27 | import static org.assertj.core.api.Assertions.assertThatExceptionOfType; 28 | 29 | import com.bakdata.fluent_kafka_streams_tests.test_applications.MirrorPatternTopicMixed; 30 | import java.util.NoSuchElementException; 31 | import org.junit.jupiter.api.AfterEach; 32 | import org.junit.jupiter.api.BeforeEach; 33 | import org.junit.jupiter.api.Test; 34 | 35 | class MirrorPatternTopicMixedTest { 36 | private final MirrorPatternTopicMixed app = new MirrorPatternTopicMixed(); 37 | 38 | private final TestTopology testTopology = new TestTopology<>(this.app::getTopology, 39 | MirrorPatternTopicMixed.getKafkaProperties()); 40 | 41 | @BeforeEach 42 | void start() { 43 | this.testTopology.start(); 44 | } 45 | 46 | @AfterEach 47 | void stop() { 48 | this.testTopology.stop(); 49 | } 50 | 51 | @Test 52 | void shouldConsumeFromPattern() { 53 | this.testTopology.input("example-input1") 54 | .add("key1", "value1") 55 | .add("key2", "value2"); 56 | this.testTopology.input("another-input1") 57 | .add("key3", "value3"); 58 | this.testTopology.input("input2") 59 | .add("key4", "value4"); 60 | 61 | this.testTopology.streamOutput() 62 | .expectNextRecord().hasKey("key1").hasValue("value1") 63 | .expectNextRecord().hasKey("key2").hasValue("value2") 64 | .expectNextRecord().hasKey("key3").hasValue("value3") 65 | .expectNextRecord().hasKey("key4").hasValue("value4") 66 | .expectNoMoreRecord(); 67 | } 68 | 69 | @Test 70 | void shouldThrowIfInputDoesNotMatchPattern() { 71 | assertThatExceptionOfType(NoSuchElementException.class) 72 | .isThrownBy(() -> this.testTopology.input("not-matching")); 73 | } 74 | } 75 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/test_applications/ForeignKeyJoin.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2025 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests.test_applications; 26 | 27 | import java.util.HashMap; 28 | import java.util.Map; 29 | import lombok.experimental.UtilityClass; 30 | import org.apache.kafka.common.serialization.Serdes.StringSerde; 31 | import org.apache.kafka.streams.StreamsBuilder; 32 | import org.apache.kafka.streams.StreamsConfig; 33 | import org.apache.kafka.streams.Topology; 34 | import org.apache.kafka.streams.kstream.KTable; 35 | import org.apache.kafka.streams.kstream.TableJoined; 36 | 37 | @UtilityClass 38 | public class ForeignKeyJoin { 39 | public static final String LEFT_INPUT_TOPIC = "left-input"; 40 | public static final String RIGHT_INPUT_TOPIC = "right-input"; 41 | public static final String OUTPUT_TOPIC = "join-output"; 42 | 43 | public static Map getKafkaProperties() { 44 | final String brokers = "localhost:9092"; 45 | final Map kafkaConfig = new HashMap<>(); 46 | kafkaConfig.put(StreamsConfig.APPLICATION_ID_CONFIG, "foreignKeyJoin"); 47 | kafkaConfig.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, brokers); 48 | kafkaConfig.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, StringSerde.class); 49 | kafkaConfig.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, StringSerde.class); 50 | return kafkaConfig; 51 | } 52 | 53 | public static Topology getTopology() { 54 | final StreamsBuilder builder = new StreamsBuilder(); 55 | final KTable leftTable = builder.table(LEFT_INPUT_TOPIC); 56 | final KTable rightTable = builder.table(RIGHT_INPUT_TOPIC); 57 | 58 | leftTable.join(rightTable, 59 | leftValue -> leftValue, 60 | (leftValue, rightValue) -> leftValue + rightValue, 61 | TableJoined.as("join")) 62 | .toStream() 63 | .to(OUTPUT_TOPIC); 64 | 65 | return builder.build(); 66 | } 67 | 68 | } 69 | -------------------------------------------------------------------------------- /gradlew.bat: -------------------------------------------------------------------------------- 1 | @rem 2 | @rem Copyright 2015 the original author or authors. 3 | @rem 4 | @rem Licensed under the Apache License, Version 2.0 (the "License"); 5 | @rem you may not use this file except in compliance with the License. 6 | @rem You may obtain a copy of the License at 7 | @rem 8 | @rem https://www.apache.org/licenses/LICENSE-2.0 9 | @rem 10 | @rem Unless required by applicable law or agreed to in writing, software 11 | @rem distributed under the License is distributed on an "AS IS" BASIS, 12 | @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | @rem See the License for the specific language governing permissions and 14 | @rem limitations under the License. 15 | @rem 16 | @rem SPDX-License-Identifier: Apache-2.0 17 | @rem 18 | 19 | @if "%DEBUG%"=="" @echo off 20 | @rem ########################################################################## 21 | @rem 22 | @rem Gradle startup script for Windows 23 | @rem 24 | @rem ########################################################################## 25 | 26 | @rem Set local scope for the variables with windows NT shell 27 | if "%OS%"=="Windows_NT" setlocal 28 | 29 | set DIRNAME=%~dp0 30 | if "%DIRNAME%"=="" set DIRNAME=. 31 | @rem This is normally unused 32 | set APP_BASE_NAME=%~n0 33 | set APP_HOME=%DIRNAME% 34 | 35 | @rem Resolve any "." and ".." in APP_HOME to make it shorter. 36 | for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi 37 | 38 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 39 | set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" 40 | 41 | @rem Find java.exe 42 | if defined JAVA_HOME goto findJavaFromJavaHome 43 | 44 | set JAVA_EXE=java.exe 45 | %JAVA_EXE% -version >NUL 2>&1 46 | if %ERRORLEVEL% equ 0 goto execute 47 | 48 | echo. 1>&2 49 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 1>&2 50 | echo. 1>&2 51 | echo Please set the JAVA_HOME variable in your environment to match the 1>&2 52 | echo location of your Java installation. 1>&2 53 | 54 | goto fail 55 | 56 | :findJavaFromJavaHome 57 | set JAVA_HOME=%JAVA_HOME:"=% 58 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe 59 | 60 | if exist "%JAVA_EXE%" goto execute 61 | 62 | echo. 1>&2 63 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 1>&2 64 | echo. 1>&2 65 | echo Please set the JAVA_HOME variable in your environment to match the 1>&2 66 | echo location of your Java installation. 1>&2 67 | 68 | goto fail 69 | 70 | :execute 71 | @rem Setup the command line 72 | 73 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar 74 | 75 | 76 | @rem Execute Gradle 77 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* 78 | 79 | :end 80 | @rem End local scope for the variables with windows NT shell 81 | if %ERRORLEVEL% equ 0 goto mainEnd 82 | 83 | :fail 84 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of 85 | rem the _cmd.exe /c_ return code! 86 | set EXIT_CODE=%ERRORLEVEL% 87 | if %EXIT_CODE% equ 0 set EXIT_CODE=1 88 | if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE% 89 | exit /b %EXIT_CODE% 90 | 91 | :mainEnd 92 | if "%OS%"=="Windows_NT" endlocal 93 | 94 | :omega 95 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/CountInhabitantsWithProtoTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2025 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests; 26 | 27 | import static com.bakdata.fluent_kafka_streams_tests.test_types.proto.CityOuterClass.City; 28 | import static com.bakdata.fluent_kafka_streams_tests.test_types.proto.PersonOuterClass.Person; 29 | 30 | import com.bakdata.fluent_kafka_streams_tests.test_applications.CountInhabitantsWithProto; 31 | import org.apache.kafka.common.serialization.Serdes; 32 | import org.junit.jupiter.api.AfterEach; 33 | import org.junit.jupiter.api.BeforeEach; 34 | import org.junit.jupiter.api.Test; 35 | 36 | class CountInhabitantsWithProtoTest { 37 | 38 | private final TestTopology testTopology = 39 | new TestTopology<>(CountInhabitantsWithProto::getTopology, CountInhabitantsWithProto.getKafkaProperties()); 40 | 41 | static Person newPerson(final String name, final String city) { 42 | return Person.newBuilder().setName(name).setCity(city).build(); 43 | } 44 | 45 | static City newCity(final String name, final int inhabitants) { 46 | return City.newBuilder().setName(name).setInhabitants(inhabitants).build(); 47 | } 48 | 49 | @BeforeEach 50 | void start() { 51 | this.testTopology.start(); 52 | } 53 | 54 | @AfterEach 55 | void stop() { 56 | this.testTopology.stop(); 57 | } 58 | 59 | @Test 60 | void shouldAggregateInhabitants() { 61 | this.testTopology.input() 62 | .withValueSerde(CountInhabitantsWithProto.newPersonSerde()) 63 | .add("test", newPerson("Huey", "City1")) 64 | .add("test", newPerson("Dewey", "City2")) 65 | .add("test", newPerson("Louie", "City1")); 66 | 67 | this.testTopology.tableOutput().withValueSerde(CountInhabitantsWithProto.newCitySerde()) 68 | .expectNextRecord().hasKey("City1").hasValue(newCity("City1", 2)) 69 | .expectNextRecord().hasKey("City2").hasValue(newCity("City2", 1)) 70 | .expectNoMoreRecord(); 71 | } 72 | 73 | @Test 74 | void shouldWorkForEmptyInput() { 75 | this.testTopology.tableOutput().withSerde(Serdes.String(), Serdes.Long()) 76 | .expectNoMoreRecord(); 77 | } 78 | 79 | } 80 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests-junit4/src/test/java/com/bakdata/fluent_kafka_streams_tests/junit4/test_applications/WordCount.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2024 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests.junit4.test_applications; 26 | 27 | import java.util.Arrays; 28 | import java.util.HashMap; 29 | import java.util.Map; 30 | import java.util.regex.Pattern; 31 | import lombok.Getter; 32 | import org.apache.kafka.common.serialization.Serde; 33 | import org.apache.kafka.common.serialization.Serdes; 34 | import org.apache.kafka.common.serialization.Serdes.StringSerde; 35 | import org.apache.kafka.streams.StreamsBuilder; 36 | import org.apache.kafka.streams.StreamsConfig; 37 | import org.apache.kafka.streams.Topology; 38 | import org.apache.kafka.streams.kstream.KStream; 39 | import org.apache.kafka.streams.kstream.KTable; 40 | import org.apache.kafka.streams.kstream.Produced; 41 | 42 | @Getter 43 | public class WordCount { 44 | private final String inputTopic = "wordcount-input"; 45 | 46 | private final String outputTopic = "wordcount-output"; 47 | 48 | public static Map getKafkaProperties() { 49 | final String brokers = "localhost:9092"; 50 | final Map kafkaConfig = new HashMap<>(); 51 | kafkaConfig.put(StreamsConfig.APPLICATION_ID_CONFIG, "wordcount"); 52 | kafkaConfig.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, brokers); 53 | kafkaConfig.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, StringSerde.class); 54 | kafkaConfig.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, StringSerde.class); 55 | return kafkaConfig; 56 | } 57 | 58 | public Topology getTopology() { 59 | final Serde stringSerde = Serdes.String(); 60 | final Serde longSerde = Serdes.Long(); 61 | 62 | final StreamsBuilder builder = new StreamsBuilder(); 63 | final KStream textLines = builder.stream(this.inputTopic); 64 | 65 | final Pattern pattern = Pattern.compile("\\W+", Pattern.UNICODE_CHARACTER_CLASS); 66 | final KTable wordCounts = textLines 67 | .flatMapValues(value -> Arrays.asList(pattern.split(value.toLowerCase()))) 68 | .groupBy((key, word) -> word) 69 | .count(); 70 | 71 | wordCounts.toStream().to(this.outputTopic, Produced.with(stringSerde, longSerde)); 72 | return builder.build(); 73 | } 74 | } 75 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests-junit5/src/test/java/com/bakdata/fluent_kafka_streams_tests/junit5/test_applications/WordCount.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2024 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests.junit5.test_applications; 26 | 27 | import java.util.Arrays; 28 | import java.util.HashMap; 29 | import java.util.Map; 30 | import java.util.regex.Pattern; 31 | import lombok.Getter; 32 | import org.apache.kafka.common.serialization.Serde; 33 | import org.apache.kafka.common.serialization.Serdes; 34 | import org.apache.kafka.common.serialization.Serdes.StringSerde; 35 | import org.apache.kafka.streams.StreamsBuilder; 36 | import org.apache.kafka.streams.StreamsConfig; 37 | import org.apache.kafka.streams.Topology; 38 | import org.apache.kafka.streams.kstream.KStream; 39 | import org.apache.kafka.streams.kstream.KTable; 40 | import org.apache.kafka.streams.kstream.Produced; 41 | 42 | @Getter 43 | public class WordCount { 44 | private final String inputTopic = "wordcount-input"; 45 | 46 | private final String outputTopic = "wordcount-output"; 47 | 48 | public static Map getKafkaProperties() { 49 | final String brokers = "localhost:9092"; 50 | final Map kafkaConfig = new HashMap<>(); 51 | kafkaConfig.put(StreamsConfig.APPLICATION_ID_CONFIG, "wordcount"); 52 | kafkaConfig.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, brokers); 53 | kafkaConfig.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, StringSerde.class); 54 | kafkaConfig.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, StringSerde.class); 55 | return kafkaConfig; 56 | } 57 | 58 | public Topology getTopology() { 59 | final Serde stringSerde = Serdes.String(); 60 | final Serde longSerde = Serdes.Long(); 61 | 62 | final StreamsBuilder builder = new StreamsBuilder(); 63 | final KStream textLines = builder.stream(this.inputTopic); 64 | 65 | final Pattern pattern = Pattern.compile("\\W+", Pattern.UNICODE_CHARACTER_CLASS); 66 | final KTable wordCounts = textLines 67 | .flatMapValues(value -> Arrays.asList(pattern.split(value.toLowerCase()))) 68 | .groupBy((key, word) -> word) 69 | .count(); 70 | 71 | wordCounts.toStream().to(this.outputTopic, Produced.with(stringSerde, longSerde)); 72 | return builder.build(); 73 | } 74 | } 75 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/test_applications/WordCount.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2024 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests.test_applications; 26 | 27 | import java.util.Arrays; 28 | import java.util.HashMap; 29 | import java.util.Map; 30 | import java.util.regex.Pattern; 31 | import lombok.Getter; 32 | import org.apache.kafka.common.serialization.Serde; 33 | import org.apache.kafka.common.serialization.Serdes; 34 | import org.apache.kafka.common.serialization.Serdes.StringSerde; 35 | import org.apache.kafka.streams.StreamsBuilder; 36 | import org.apache.kafka.streams.StreamsConfig; 37 | import org.apache.kafka.streams.Topology; 38 | import org.apache.kafka.streams.kstream.KStream; 39 | import org.apache.kafka.streams.kstream.KTable; 40 | import org.apache.kafka.streams.kstream.Materialized; 41 | import org.apache.kafka.streams.kstream.Produced; 42 | 43 | @Getter 44 | public class WordCount { 45 | private final String inputTopic = "wordcount-input"; 46 | 47 | private final String outputTopic = "wordcount-output"; 48 | 49 | public static Map getKafkaProperties() { 50 | final String brokers = "localhost:9092"; 51 | final Map kafkaConfig = new HashMap<>(); 52 | kafkaConfig.put(StreamsConfig.APPLICATION_ID_CONFIG, "wordcount"); 53 | kafkaConfig.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, brokers); 54 | kafkaConfig.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, StringSerde.class); 55 | kafkaConfig.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, StringSerde.class); 56 | return kafkaConfig; 57 | } 58 | 59 | public Topology getTopology() { 60 | final Serde stringSerde = Serdes.String(); 61 | final Serde longSerde = Serdes.Long(); 62 | 63 | final StreamsBuilder builder = new StreamsBuilder(); 64 | final KStream textLines = builder.stream(this.inputTopic); 65 | 66 | final Pattern pattern = Pattern.compile("\\W+", Pattern.UNICODE_CHARACTER_CLASS); 67 | final KTable wordCounts = textLines 68 | .flatMapValues(value -> Arrays.asList(pattern.split(value.toLowerCase()))) 69 | .groupBy((key, word) -> word) 70 | .count(Materialized.as("count")); 71 | 72 | wordCounts.toStream().to(this.outputTopic, Produced.with(stringSerde, longSerde)); 73 | return builder.build(); 74 | } 75 | } 76 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/test_applications/CountInhabitantsWithAvro.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2025 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests.test_applications; 26 | 27 | import com.bakdata.fluent_kafka_streams_tests.test_types.City; 28 | import com.bakdata.fluent_kafka_streams_tests.test_types.Person; 29 | import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig; 30 | import io.confluent.kafka.streams.serdes.avro.SpecificAvroSerde; 31 | import java.util.HashMap; 32 | import java.util.Map; 33 | import lombok.experimental.UtilityClass; 34 | import org.apache.kafka.common.serialization.Serdes.StringSerde; 35 | import org.apache.kafka.streams.KeyValue; 36 | import org.apache.kafka.streams.StreamsBuilder; 37 | import org.apache.kafka.streams.StreamsConfig; 38 | import org.apache.kafka.streams.Topology; 39 | import org.apache.kafka.streams.kstream.KStream; 40 | import org.apache.kafka.streams.kstream.KTable; 41 | 42 | @UtilityClass 43 | public class CountInhabitantsWithAvro { 44 | 45 | private static final String INPUT_TOPIC = "person-input"; 46 | private static final String OUTPUT_TOPIC = "city-output"; 47 | private static final String SCHEMA_REGISTRY_URL = "mock://"; 48 | 49 | public static Map getKafkaProperties() { 50 | final String brokers = "localhost:9092"; 51 | final Map kafkaConfig = new HashMap<>(); 52 | kafkaConfig.put(StreamsConfig.APPLICATION_ID_CONFIG, "inhabitants-per-city"); 53 | kafkaConfig.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, brokers); 54 | kafkaConfig.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, StringSerde.class); 55 | kafkaConfig.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, SpecificAvroSerde.class); 56 | kafkaConfig.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, SCHEMA_REGISTRY_URL); 57 | return kafkaConfig; 58 | } 59 | 60 | public static Topology getTopology() { 61 | final StreamsBuilder builder = new StreamsBuilder(); 62 | final KStream persons = builder.stream(INPUT_TOPIC); 63 | 64 | final KTable counts = persons 65 | .groupBy((name, person) -> person.getCity()) 66 | .count(); 67 | 68 | counts.toStream() 69 | .map((cityName, count) -> KeyValue.pair(cityName, new City(cityName, Math.toIntExact(count)))) 70 | .to(OUTPUT_TOPIC); 71 | 72 | return builder.build(); 73 | } 74 | } 75 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/main/java/com/bakdata/fluent_kafka_streams_tests/TableOutput.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2025 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests; 26 | 27 | import java.util.Iterator; 28 | import java.util.LinkedHashMap; 29 | import java.util.Map; 30 | import lombok.NonNull; 31 | import org.apache.kafka.clients.producer.ProducerRecord; 32 | import org.apache.kafka.streams.TopologyTestDriver; 33 | 34 | class TableOutput extends BaseOutput { 35 | private final Map> table = new LinkedHashMap<>(); 36 | private Iterator> tableIterator; 37 | 38 | TableOutput(final TopologyTestDriver testDriver, final String topic, final SerdeConfig serdeConfig) { 39 | super(testDriver, topic, serdeConfig); 40 | } 41 | 42 | /** 43 | *

Reads the next value from the output stream.

44 | * Usually, you should not need to call this. The recommended way should be to use either 45 | *
    46 | *
  • the {@link #expectNextRecord()} and {@link #expectNoMoreRecord()} methods OR
  • 47 | *
  • the iterable interface (via {@link #iterator()}.
  • 48 | *
49 | * 50 | * @return The next value in the output stream. {@code null} if no more values are present. 51 | */ 52 | @Override 53 | public ProducerRecord readOneRecord() { 54 | if (this.tableIterator == null) { 55 | this.tableIterator = this.iterator(); 56 | } 57 | 58 | // Emulate testDriver, which returns null on last read 59 | return this.tableIterator.hasNext() ? this.tableIterator.next() : null; 60 | } 61 | 62 | /** 63 | * Creates an iterator of {@link ProducerRecord} for the table output. Can only be read once. 64 | */ 65 | @Override 66 | public @NonNull Iterator> iterator() { 67 | ProducerRecord producerRecord = this.readFromTestDriver(); 68 | while (producerRecord != null) { 69 | this.table.put(producerRecord.key(), producerRecord); 70 | producerRecord = this.readFromTestDriver(); 71 | } 72 | return this.table.values().stream().iterator(); 73 | } 74 | 75 | // ================== 76 | // Non-public methods 77 | // ================== 78 | @Override 79 | protected TestOutput create(final TopologyTestDriver testDriver, final String topic, 80 | final SerdeConfig serdeConfig) { 81 | return new TableOutput<>(testDriver, topic, serdeConfig); 82 | } 83 | } 84 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/test_applications/UserClicksPerMinute.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2024 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests.test_applications; 26 | 27 | import com.bakdata.fluent_kafka_streams_tests.serde.JsonSerde; 28 | import com.bakdata.fluent_kafka_streams_tests.test_types.ClickEvent; 29 | import com.bakdata.fluent_kafka_streams_tests.test_types.ClickOutput; 30 | import java.time.Duration; 31 | import java.util.HashMap; 32 | import java.util.Map; 33 | import org.apache.kafka.common.serialization.Serdes; 34 | import org.apache.kafka.common.serialization.Serdes.IntegerSerde; 35 | import org.apache.kafka.streams.KeyValue; 36 | import org.apache.kafka.streams.StreamsBuilder; 37 | import org.apache.kafka.streams.StreamsConfig; 38 | import org.apache.kafka.streams.Topology; 39 | import org.apache.kafka.streams.kstream.KStream; 40 | import org.apache.kafka.streams.kstream.KTable; 41 | import org.apache.kafka.streams.kstream.Produced; 42 | import org.apache.kafka.streams.kstream.TimeWindows; 43 | import org.apache.kafka.streams.kstream.Windowed; 44 | 45 | public class UserClicksPerMinute { 46 | private static final String INPUT_TOPIC = "user-click-input"; 47 | 48 | private static final String OUTPUT_TOPIC = "user-click-output"; 49 | 50 | public static Map getKafkaProperties() { 51 | final String brokers = "localhost:9092"; 52 | final Map kafkaConfig = new HashMap<>(); 53 | kafkaConfig.put(StreamsConfig.APPLICATION_ID_CONFIG, "user-clicks-per-minute"); 54 | kafkaConfig.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, brokers); 55 | kafkaConfig.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, IntegerSerde.class); 56 | kafkaConfig.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, JsonSerde.class); 57 | return kafkaConfig; 58 | } 59 | 60 | public static Topology getTopology() { 61 | final StreamsBuilder builder = new StreamsBuilder(); 62 | final KStream clickEvents = builder.stream(INPUT_TOPIC); 63 | 64 | final KTable, Long> counts = clickEvents 65 | .groupByKey() 66 | .windowedBy(TimeWindows.ofSizeWithNoGrace(Duration.ofMinutes(1))) 67 | .count(); 68 | 69 | counts.toStream() 70 | .map((key, value) -> KeyValue.pair( 71 | key.key(), 72 | new ClickOutput(key.key(), value, key.window().start()))) 73 | .to(OUTPUT_TOPIC, Produced.with(Serdes.Integer(), new JsonSerde<>(ClickOutput.class))); 74 | 75 | return builder.build(); 76 | } 77 | 78 | } 79 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/main/java/com/bakdata/fluent_kafka_streams_tests/StreamOutput.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2025 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests; 26 | 27 | import java.util.Iterator; 28 | import java.util.NoSuchElementException; 29 | import lombok.NonNull; 30 | import org.apache.kafka.clients.producer.ProducerRecord; 31 | import org.apache.kafka.streams.TopologyTestDriver; 32 | 33 | /** 34 | *

Represents the {@link TestOutput} with {@link org.apache.kafka.streams.kstream.KStream} semantics.

35 | * 36 | *

Note: The StreamOutput is a one-time iterable. Cache it if you need to iterate several times.

37 | */ 38 | class StreamOutput extends BaseOutput { 39 | StreamOutput(final TopologyTestDriver testDriver, final String topic, final SerdeConfig serdeConfig) { 40 | super(testDriver, topic, serdeConfig); 41 | } 42 | 43 | /** 44 | * Reads the next value from the output stream.
Usually, you should not need to call this. The recommended way 45 | * should be to use either 46 | *
    47 | *
  • the {@link #expectNextRecord()} and {@link #expectNoMoreRecord()} methods OR
  • 48 | *
  • the iterable interface (via {@link #iterator()}.
  • 49 | *
50 | * 51 | * @return The next value in the output stream. {@code null} if no more values are present.
52 | */ 53 | @Override 54 | public ProducerRecord readOneRecord() { 55 | return this.readFromTestDriver(); 56 | } 57 | 58 | /** 59 | * Creates an iterator of {@link ProducerRecord} for the stream output. Can only be read once.
60 | */ 61 | @Override 62 | public @NonNull Iterator> iterator() { 63 | return new Iterator>() { 64 | private ProducerRecord current = StreamOutput.this.readFromTestDriver(); 65 | 66 | @Override 67 | public boolean hasNext() { 68 | return this.current != null; 69 | } 70 | 71 | @Override 72 | public ProducerRecord next() { 73 | if (!this.hasNext()) { 74 | throw new NoSuchElementException(); 75 | } 76 | final ProducerRecord toReturn = this.current; 77 | this.current = StreamOutput.this.readFromTestDriver(); 78 | return toReturn; 79 | } 80 | }; 81 | } 82 | 83 | // ================== 84 | // Non-public methods 85 | // ================== 86 | @Override 87 | protected TestOutput create(final TopologyTestDriver testDriver, final String topic, 88 | final SerdeConfig serdeConfig) { 89 | return new StreamOutput<>(testDriver, topic, serdeConfig); 90 | } 91 | } 92 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/test_applications/MirrorAvroNonDefaultSerde.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2025 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests.test_applications; 26 | 27 | import com.bakdata.kafka.Preconfigured; 28 | import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig; 29 | import io.confluent.kafka.streams.serdes.avro.SpecificAvroSerde; 30 | import java.util.HashMap; 31 | import java.util.Map; 32 | import lombok.experimental.UtilityClass; 33 | import org.apache.avro.specific.SpecificRecord; 34 | import org.apache.kafka.common.serialization.Serde; 35 | import org.apache.kafka.common.serialization.Serdes.StringSerde; 36 | import org.apache.kafka.streams.StreamsBuilder; 37 | import org.apache.kafka.streams.StreamsConfig; 38 | import org.apache.kafka.streams.Topology; 39 | import org.apache.kafka.streams.kstream.Consumed; 40 | import org.apache.kafka.streams.kstream.KStream; 41 | import org.apache.kafka.streams.kstream.Produced; 42 | 43 | @UtilityClass 44 | public class MirrorAvroNonDefaultSerde { 45 | private static final String INPUT_TOPIC = "input"; 46 | private static final String OUTPUT_TOPIC = "output"; 47 | 48 | public static Map getKafkaProperties() { 49 | final String brokers = "localhost:9092"; 50 | final Map kafkaConfig = new HashMap<>(); 51 | kafkaConfig.put(StreamsConfig.APPLICATION_ID_CONFIG, "mirror"); 52 | kafkaConfig.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, brokers); 53 | kafkaConfig.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, "mock://"); 54 | kafkaConfig.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, StringSerde.class); 55 | kafkaConfig.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, StringSerde.class); 56 | return kafkaConfig; 57 | } 58 | 59 | public static Topology getTopology() { 60 | final StreamsBuilder builder = new StreamsBuilder(); 61 | final Preconfigured> keySerde = getKeySerde(); 62 | final Preconfigured> serde = getValueSerde(); 63 | final KStream input = builder.stream(INPUT_TOPIC, 64 | Consumed.with(keySerde.configureForKeys(getKafkaProperties()), 65 | serde.configureForValues(getKafkaProperties()))); 66 | input.to(OUTPUT_TOPIC, Produced.with(keySerde.configureForKeys(getKafkaProperties()), 67 | serde.configureForValues(getKafkaProperties()))); 68 | return builder.build(); 69 | } 70 | 71 | public static Preconfigured> getKeySerde() { 72 | return Preconfigured.create(new SpecificAvroSerde<>()); 73 | } 74 | 75 | public static Preconfigured> getValueSerde() { 76 | return Preconfigured.create(new SpecificAvroSerde<>()); 77 | } 78 | } 79 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/HeaderTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2025 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests; 26 | 27 | import static org.assertj.core.api.Assertions.assertThat; 28 | 29 | import com.bakdata.fluent_kafka_streams_tests.test_applications.Mirror; 30 | import com.google.common.collect.Lists; 31 | import java.util.List; 32 | import org.apache.kafka.clients.producer.ProducerRecord; 33 | import org.apache.kafka.common.header.internals.RecordHeaders; 34 | import org.junit.jupiter.api.AfterEach; 35 | import org.junit.jupiter.api.BeforeEach; 36 | import org.junit.jupiter.api.Test; 37 | 38 | class HeaderTest { 39 | private final Mirror app = new Mirror(); 40 | 41 | private final TestTopology testTopology = 42 | new TestTopology<>(this.app::getTopology, Mirror.getKafkaProperties()); 43 | 44 | @BeforeEach 45 | void start() { 46 | this.testTopology.start(); 47 | } 48 | 49 | @AfterEach 50 | void stop() { 51 | this.testTopology.stop(); 52 | } 53 | 54 | @Test 55 | void shouldAddHeaders() { 56 | this.testTopology.input() 57 | .add("key1", "value1", new RecordHeaders() 58 | .add("header1", new byte[]{0})) 59 | .add("key2", "value2", 1L, new RecordHeaders() 60 | .add("header1", new byte[]{1}) 61 | .add("header2", new byte[]{2, 3})); 62 | 63 | final List> records = Lists.newArrayList(this.testTopology.streamOutput()); 64 | assertThat(records) 65 | .hasSize(2) 66 | .anySatisfy(producerRecord -> { 67 | assertThat(producerRecord.key()).isEqualTo("key1"); 68 | assertThat(producerRecord.value()).isEqualTo("value1"); 69 | assertThat(producerRecord.timestamp()).isZero(); 70 | assertThat(producerRecord.headers().toArray()) 71 | .hasSize(1) 72 | .anySatisfy(header -> { 73 | assertThat(header.key()).isEqualTo("header1"); 74 | assertThat(header.value()).isEqualTo(new byte[]{0}); 75 | }); 76 | }) 77 | .anySatisfy(producerRecord -> { 78 | assertThat(producerRecord.key()).isEqualTo("key2"); 79 | assertThat(producerRecord.value()).isEqualTo("value2"); 80 | assertThat(producerRecord.timestamp()).isEqualTo(1L); 81 | assertThat(producerRecord.headers().toArray()) 82 | .hasSize(2) 83 | .anySatisfy(header -> { 84 | assertThat(header.key()).isEqualTo("header1"); 85 | assertThat(header.value()).isEqualTo(new byte[]{1}); 86 | }) 87 | .anySatisfy(header -> { 88 | assertThat(header.key()).isEqualTo("header2"); 89 | assertThat(header.value()).isEqualTo(new byte[]{2, 3}); 90 | }); 91 | }); 92 | } 93 | } 94 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/test_applications/NameJoinGlobalKTable.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2024 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests.test_applications; 26 | 27 | import java.util.HashMap; 28 | import java.util.Map; 29 | import lombok.experimental.UtilityClass; 30 | import org.apache.kafka.common.serialization.Serdes; 31 | import org.apache.kafka.common.serialization.Serdes.LongSerde; 32 | import org.apache.kafka.common.serialization.Serdes.StringSerde; 33 | import org.apache.kafka.streams.StreamsBuilder; 34 | import org.apache.kafka.streams.StreamsConfig; 35 | import org.apache.kafka.streams.Topology; 36 | import org.apache.kafka.streams.kstream.Consumed; 37 | import org.apache.kafka.streams.kstream.GlobalKTable; 38 | import org.apache.kafka.streams.kstream.KStream; 39 | import org.apache.kafka.streams.kstream.Produced; 40 | 41 | @UtilityClass 42 | public class NameJoinGlobalKTable { 43 | public static final String INPUT_TOPIC = "id-input"; 44 | public static final String NAME_INPUT = "name-input"; 45 | public static final String INTERMEDIATE_TOPIC = "upper-case-input"; 46 | public static final String OUTPUT_TOPIC = "join-output"; 47 | 48 | public static Map getKafkaProperties() { 49 | final String brokers = "localhost:9092"; 50 | final Map kafkaConfig = new HashMap<>(); 51 | kafkaConfig.put(StreamsConfig.APPLICATION_ID_CONFIG, "globalKTableJoin"); 52 | kafkaConfig.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, brokers); 53 | kafkaConfig.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, LongSerde.class); 54 | kafkaConfig.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, StringSerde.class); 55 | return kafkaConfig; 56 | } 57 | 58 | public static Topology getTopology() { 59 | final StreamsBuilder builder = new StreamsBuilder(); 60 | final KStream inputStream = 61 | builder.stream(INPUT_TOPIC, Consumed.with(Serdes.Long(), Serdes.Long())); 62 | 63 | final GlobalKTable joinTable = builder.globalTable(NAME_INPUT); 64 | 65 | inputStream 66 | .join(joinTable, 67 | (id, valueId) -> valueId, 68 | (id, name) -> name) 69 | .to(OUTPUT_TOPIC, Produced.with(Serdes.Long(), Serdes.String())); 70 | 71 | return builder.build(); 72 | } 73 | 74 | public static Topology getTopologyWithIntermediateTopic() { 75 | final StreamsBuilder builder = new StreamsBuilder(); 76 | final KStream inputStream = 77 | builder.stream(INPUT_TOPIC, Consumed.with(Serdes.Long(), Serdes.Long())); 78 | 79 | builder.stream(NAME_INPUT, Consumed.with(Serdes.Long(), Serdes.String())) 80 | .mapValues(name -> name.toUpperCase()) 81 | .to(INTERMEDIATE_TOPIC); 82 | 83 | final GlobalKTable joinTable = builder.globalTable(INTERMEDIATE_TOPIC); 84 | 85 | inputStream 86 | .join(joinTable, 87 | (id, valueId) -> valueId, 88 | (id, name) -> name) 89 | .to(OUTPUT_TOPIC, Produced.with(Serdes.Long(), Serdes.String())); 90 | 91 | return builder.build(); 92 | } 93 | 94 | } 95 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/MirrorAvroNonDefaultSerdeTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2025 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests; 26 | 27 | import static com.bakdata.fluent_kafka_streams_tests.test_applications.MirrorAvroNonDefaultSerde.getKeySerde; 28 | import static com.bakdata.fluent_kafka_streams_tests.test_applications.MirrorAvroNonDefaultSerde.getValueSerde; 29 | 30 | import com.bakdata.fluent_kafka_streams_tests.test_applications.MirrorAvroNonDefaultSerde; 31 | import com.bakdata.fluent_kafka_streams_tests.test_types.City; 32 | import com.bakdata.fluent_kafka_streams_tests.test_types.Person; 33 | import io.confluent.kafka.streams.serdes.avro.SpecificAvroSerde; 34 | import org.junit.jupiter.api.AfterEach; 35 | import org.junit.jupiter.api.BeforeEach; 36 | import org.junit.jupiter.api.Test; 37 | 38 | 39 | class MirrorAvroNonDefaultSerdeTest { 40 | 41 | private final TestTopology testTopology = 42 | new TestTopology<>(MirrorAvroNonDefaultSerde::getTopology, MirrorAvroNonDefaultSerde.getKafkaProperties()); 43 | 44 | @BeforeEach 45 | void start() { 46 | this.testTopology.start(); 47 | } 48 | 49 | @AfterEach 50 | void stop() { 51 | this.testTopology.stop(); 52 | } 53 | 54 | @Test 55 | void shouldConfigurePreconfiguredSerdes() { 56 | this.testTopology.input() 57 | .configureWithSerde(getKeySerde(), getValueSerde()) 58 | .add(new City("City1", 2), new Person("Huey", "City1")); 59 | 60 | this.testTopology.streamOutput() 61 | .configureWithSerde(getKeySerde(), getValueSerde()) 62 | .expectNextRecord().hasKey(new City("City1", 2)).hasValue(new Person("Huey", "City1")) 63 | .expectNoMoreRecord(); 64 | } 65 | 66 | @Test 67 | void shouldConfigureSerdes() { 68 | this.testTopology.input() 69 | .configureWithSerde(new SpecificAvroSerde<>(), new SpecificAvroSerde<>()) 70 | .add(new City("City1", 2), new Person("Huey", "City1")); 71 | 72 | this.testTopology.streamOutput() 73 | .configureWithSerde(new SpecificAvroSerde<>(), new SpecificAvroSerde<>()) 74 | .expectNextRecord().hasKey(new City("City1", 2)).hasValue(new Person("Huey", "City1")) 75 | .expectNoMoreRecord(); 76 | } 77 | 78 | @Test 79 | void shouldConfigurePreconfiguredKeyAndValueSerdes() { 80 | this.testTopology.input() 81 | .configureWithKeySerde(getKeySerde()) 82 | .configureWithValueSerde(getValueSerde()) 83 | .add(new City("City1", 2), new Person("Huey", "City1")); 84 | 85 | this.testTopology.streamOutput() 86 | .configureWithKeySerde(getKeySerde()) 87 | .configureWithValueSerde(getValueSerde()) 88 | .expectNextRecord().hasKey(new City("City1", 2)).hasValue(new Person("Huey", "City1")) 89 | .expectNoMoreRecord(); 90 | } 91 | 92 | @Test 93 | void shouldConfigureKeyAndValueSerdes() { 94 | this.testTopology.input() 95 | .configureWithKeySerde(new SpecificAvroSerde<>()) 96 | .configureWithValueSerde(new SpecificAvroSerde<>()) 97 | .add(new City("City1", 2), new Person("Huey", "City1")); 98 | 99 | this.testTopology.streamOutput() 100 | .configureWithKeySerde(new SpecificAvroSerde<>()) 101 | .configureWithValueSerde(new SpecificAvroSerde<>()) 102 | .expectNextRecord().hasKey(new City("City1", 2)).hasValue(new Person("Huey", "City1")) 103 | .expectNoMoreRecord(); 104 | } 105 | } 106 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/test_applications/ErrorEventsPerMinute.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2024 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests.test_applications; 26 | 27 | import com.bakdata.fluent_kafka_streams_tests.serde.JsonSerde; 28 | import com.bakdata.fluent_kafka_streams_tests.test_types.ClickEvent; 29 | import com.bakdata.fluent_kafka_streams_tests.test_types.ErrorOutput; 30 | import com.bakdata.fluent_kafka_streams_tests.test_types.StatusCode; 31 | import java.time.Duration; 32 | import java.util.HashMap; 33 | import java.util.Map; 34 | import lombok.Getter; 35 | import org.apache.kafka.common.serialization.Serdes; 36 | import org.apache.kafka.common.serialization.Serdes.IntegerSerde; 37 | import org.apache.kafka.streams.KeyValue; 38 | import org.apache.kafka.streams.StreamsBuilder; 39 | import org.apache.kafka.streams.StreamsConfig; 40 | import org.apache.kafka.streams.Topology; 41 | import org.apache.kafka.streams.kstream.Consumed; 42 | import org.apache.kafka.streams.kstream.Grouped; 43 | import org.apache.kafka.streams.kstream.Joined; 44 | import org.apache.kafka.streams.kstream.KStream; 45 | import org.apache.kafka.streams.kstream.KTable; 46 | import org.apache.kafka.streams.kstream.TimeWindows; 47 | import org.apache.kafka.streams.kstream.Windowed; 48 | 49 | @Getter 50 | public class ErrorEventsPerMinute { 51 | private final String clickInputTopic = "user-click-input"; 52 | 53 | private final String statusInputTopic = "status-input"; 54 | 55 | private final String errorOutputTopic = "user-error-output"; 56 | 57 | private final String alertTopic = "error-alert-output"; 58 | 59 | public static Map getKafkaProperties() { 60 | final String brokers = "localhost:9092"; 61 | final Map kafkaConfig = new HashMap<>(); 62 | kafkaConfig.put(StreamsConfig.APPLICATION_ID_CONFIG, "errors-per-minute"); 63 | kafkaConfig.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, brokers); 64 | kafkaConfig.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, IntegerSerde.class); 65 | kafkaConfig.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, JsonSerde.class); 66 | return kafkaConfig; 67 | } 68 | 69 | public Topology getTopology() { 70 | final StreamsBuilder builder = new StreamsBuilder(); 71 | 72 | // Click Events 73 | final KStream clickEvents = builder.stream(this.clickInputTopic, 74 | Consumed.with(Serdes.Integer(), new JsonSerde<>(ClickEvent.class))); 75 | 76 | final KTable, Long> counts = clickEvents 77 | .selectKey(((key, value) -> value.getStatus())) 78 | .filter(((key, value) -> key >= 400)) 79 | .groupByKey(Grouped.with(Serdes.Integer(), new JsonSerde<>(ClickEvent.class))) 80 | .windowedBy(TimeWindows.ofSizeWithNoGrace(Duration.ofMinutes(1))) // 1 Minute in ms 81 | .count(); 82 | 83 | // Status codes 84 | final KTable statusCodes = builder.table(this.statusInputTopic, 85 | Consumed.with(Serdes.Integer(), new JsonSerde<>(StatusCode.class))); 86 | 87 | // Join 88 | final KStream errors = counts.toStream() 89 | .map((key, value) -> KeyValue.pair( 90 | key.key(), 91 | new ErrorOutput(key.key(), value, key.window().start(), null /*empty definition*/))) 92 | .join(statusCodes, 93 | (countRecord, code) -> new ErrorOutput( 94 | countRecord.getStatusCode(), countRecord.getCount(), countRecord.getTime(), 95 | code.getDefinition()), 96 | Joined.valueSerde(new JsonSerde<>(ErrorOutput.class))); 97 | errors.to(this.errorOutputTopic); 98 | 99 | // Send alert if more than 5x a certain error code per minute 100 | errors.filter((key, errorOutput) -> errorOutput.getCount() > 5L).to(this.alertTopic); 101 | 102 | return builder.build(); 103 | } 104 | } 105 | 106 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/main/java/com/bakdata/fluent_kafka_streams_tests/SerdeConfig.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2025 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests; 26 | 27 | import com.bakdata.kafka.Configurator; 28 | import com.bakdata.kafka.Preconfigured; 29 | import lombok.AccessLevel; 30 | import lombok.Getter; 31 | import lombok.NonNull; 32 | import lombok.RequiredArgsConstructor; 33 | import org.apache.kafka.common.serialization.Serde; 34 | 35 | @RequiredArgsConstructor(access = AccessLevel.PRIVATE) 36 | final class SerdeConfig { 37 | @Getter 38 | private final Serde keySerde; 39 | @Getter 40 | private final Serde valueSerde; 41 | private final Serde defaultKeySerde; 42 | private final Serde defaultValueSerde; 43 | private final Configurator configurator; 44 | 45 | static SerdeConfig create(@NonNull final Serde keySerde, @NonNull final Serde valueSerde, 46 | final Configurator configurator) { 47 | return new SerdeConfig<>(keySerde, valueSerde, keySerde, valueSerde, configurator); 48 | } 49 | 50 | SerdeConfig withSerde(final Serde keySerde, final Serde valueSerde) { 51 | final Serde newKeySerde = keySerde == null ? this.getDefaultKeySerde() : keySerde; 52 | final Serde newValueSerde = valueSerde == null ? this.getDefaultValueSerde() : valueSerde; 53 | return new SerdeConfig<>(newKeySerde, newValueSerde, this.defaultKeySerde, this.defaultValueSerde, 54 | this.configurator); 55 | } 56 | 57 | SerdeConfig configureWithSerde(final Preconfigured> keySerde, 58 | final Preconfigured> valueSerde) { 59 | return this.withSerde(this.configureForKeys(keySerde), this.configureForValues(valueSerde)); 60 | } 61 | 62 | SerdeConfig configureWithSerde(final Serde keySerde, final Serde valueSerde) { 63 | return this.configureWithSerde(Preconfigured.create(keySerde), Preconfigured.create(valueSerde)); 64 | } 65 | 66 | SerdeConfig withKeySerde(final Serde keySerde) { 67 | return this.withSerde(keySerde, this.valueSerde); 68 | } 69 | 70 | SerdeConfig configureWithKeySerde(final Preconfigured> keySerde) { 71 | return this.withSerde(this.configureForKeys(keySerde), this.valueSerde); 72 | } 73 | 74 | SerdeConfig configureWithKeySerde(final Serde keySerde) { 75 | return this.configureWithKeySerde(Preconfigured.create(keySerde)); 76 | } 77 | 78 | SerdeConfig withValueSerde(final Serde valueSerde) { 79 | return this.withSerde(this.keySerde, valueSerde); 80 | } 81 | 82 | SerdeConfig configureWithValueSerde(final Preconfigured> valueSerde) { 83 | return this.withSerde(this.keySerde, this.configureForValues(valueSerde)); 84 | } 85 | 86 | SerdeConfig configureWithValueSerde(final Serde valueSerde) { 87 | return this.configureWithValueSerde(Preconfigured.create(valueSerde)); 88 | } 89 | 90 | SerdeConfig withTypes(final Class keyType, final Class valueType) { 91 | return (SerdeConfig) this; 92 | } 93 | 94 | SerdeConfig withKeyType(final Class keyType) { 95 | return (SerdeConfig) this; 96 | } 97 | 98 | SerdeConfig withValueType(final Class valueType) { 99 | return (SerdeConfig) this; 100 | } 101 | 102 | private Serde configureForKeys(final Preconfigured> keySerde) { 103 | return this.configurator.configureForKeys(keySerde); 104 | } 105 | 106 | private Serde configureForValues(final Preconfigured> valueSerde) { 107 | return this.configurator.configureForValues(valueSerde); 108 | } 109 | 110 | private Serde getDefaultKeySerde() { 111 | return (Serde) this.defaultKeySerde; 112 | } 113 | 114 | private Serde getDefaultValueSerde() { 115 | return (Serde) this.defaultValueSerde; 116 | } 117 | } 118 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/test_applications/CountInhabitantsWithProto.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2025 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests.test_applications; 26 | 27 | import static com.bakdata.fluent_kafka_streams_tests.test_types.proto.CityOuterClass.City; 28 | import static com.bakdata.fluent_kafka_streams_tests.test_types.proto.PersonOuterClass.Person; 29 | 30 | import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig; 31 | import io.confluent.kafka.streams.serdes.protobuf.KafkaProtobufSerde; 32 | import java.util.HashMap; 33 | import java.util.Map; 34 | import lombok.experimental.UtilityClass; 35 | import org.apache.kafka.common.serialization.Serde; 36 | import org.apache.kafka.common.serialization.Serdes; 37 | import org.apache.kafka.common.serialization.Serdes.StringSerde; 38 | import org.apache.kafka.streams.KeyValue; 39 | import org.apache.kafka.streams.StreamsBuilder; 40 | import org.apache.kafka.streams.StreamsConfig; 41 | import org.apache.kafka.streams.Topology; 42 | import org.apache.kafka.streams.kstream.Consumed; 43 | import org.apache.kafka.streams.kstream.Grouped; 44 | import org.apache.kafka.streams.kstream.KStream; 45 | import org.apache.kafka.streams.kstream.KTable; 46 | import org.apache.kafka.streams.kstream.Materialized; 47 | import org.apache.kafka.streams.kstream.Produced; 48 | 49 | @UtilityClass 50 | public class CountInhabitantsWithProto { 51 | private static final String INPUT_TOPIC = "person-input"; 52 | private static final String OUTPUT_TOPIC = "city-output"; 53 | private static final String SCHEMA_REGISTRY_URL = "mock://"; 54 | 55 | public static KafkaProtobufSerde newPersonSerde() { 56 | final KafkaProtobufSerde serde = new KafkaProtobufSerde<>(Person.class); 57 | final Map config = new HashMap<>(); 58 | config.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, SCHEMA_REGISTRY_URL); 59 | serde.configure(config, false); 60 | return serde; 61 | } 62 | 63 | public static KafkaProtobufSerde newCitySerde() { 64 | final KafkaProtobufSerde serde = new KafkaProtobufSerde<>(City.class); 65 | final Map config = new HashMap<>(); 66 | config.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, SCHEMA_REGISTRY_URL); 67 | serde.configure(config, false); 68 | return serde; 69 | } 70 | 71 | public static Map getKafkaProperties() { 72 | final String brokers = "localhost:9092"; 73 | final Map kafkaConfig = new HashMap<>(); 74 | kafkaConfig.put(StreamsConfig.APPLICATION_ID_CONFIG, "inhabitants-per-city"); 75 | kafkaConfig.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, brokers); 76 | kafkaConfig.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, StringSerde.class); 77 | kafkaConfig.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, KafkaProtobufSerde.class); 78 | kafkaConfig.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, SCHEMA_REGISTRY_URL); 79 | return kafkaConfig; 80 | } 81 | 82 | public static Topology getTopology() { 83 | final KafkaProtobufSerde personSerde = newPersonSerde(); 84 | final KafkaProtobufSerde citySerde = newCitySerde(); 85 | final Serde stringSerde = Serdes.String(); 86 | final Serde longSerde = Serdes.Long(); 87 | 88 | final StreamsBuilder builder = new StreamsBuilder(); 89 | final KStream persons = 90 | builder.stream(INPUT_TOPIC, Consumed.with(stringSerde, personSerde)); 91 | 92 | final KTable counts = persons 93 | .groupBy((name, person) -> person.getCity(), Grouped.with(stringSerde, personSerde)) 94 | .count(Materialized.with(stringSerde, longSerde)); 95 | 96 | counts.toStream() 97 | .map((cityName, count) -> KeyValue.pair( 98 | cityName, 99 | City.newBuilder().setName(cityName).setInhabitants(Math.toIntExact(count)).build() 100 | )) 101 | .to(OUTPUT_TOPIC, Produced.with(stringSerde, citySerde)); 102 | 103 | return builder.build(); 104 | } 105 | } 106 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/TestInputAndOutputTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2025 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests; 26 | 27 | import com.bakdata.fluent_kafka_streams_tests.test_applications.MirrorAvro; 28 | import com.bakdata.fluent_kafka_streams_tests.test_types.City; 29 | import com.bakdata.fluent_kafka_streams_tests.test_types.Person; 30 | import com.bakdata.kafka.Preconfigured; 31 | import org.apache.kafka.common.serialization.Serdes; 32 | import org.junit.jupiter.api.AfterEach; 33 | import org.junit.jupiter.api.BeforeEach; 34 | import org.junit.jupiter.api.Test; 35 | 36 | 37 | class TestInputAndOutputTest { 38 | 39 | private final TestTopology testTopology = 40 | new TestTopology<>(MirrorAvro::getTopology, MirrorAvro.getKafkaProperties()); 41 | 42 | @BeforeEach 43 | void start() { 44 | this.testTopology.start(); 45 | } 46 | 47 | @AfterEach 48 | void stop() { 49 | this.testTopology.stop(); 50 | } 51 | 52 | @Test 53 | void shouldUseTypes() { 54 | this.testTopology.input() 55 | .withTypes(City.class, Person.class) 56 | .add(new City("City1", 2), new Person("Huey", "City1")); 57 | 58 | this.testTopology.streamOutput() 59 | .withTypes(City.class, Person.class) 60 | .expectNextRecord().hasKey(new City("City1", 2)).hasValue(new Person("Huey", "City1")) 61 | .expectNoMoreRecord(); 62 | } 63 | 64 | @Test 65 | void shouldUseValueTypes() { 66 | this.testTopology.input() 67 | .withValueType(Person.class) 68 | .add(new Person("Huey", "City1"), new Person("Huey", "City1")); 69 | 70 | this.testTopology.streamOutput() 71 | .withValueType(Person.class) 72 | .expectNextRecord().hasKey(new Person("Huey", "City1")).hasValue(new Person("Huey", "City1")) 73 | .expectNoMoreRecord(); 74 | } 75 | 76 | @Test 77 | void shouldUseKeyTypes() { 78 | this.testTopology.input() 79 | .withKeyType(City.class) 80 | .add(new City("City1", 2), new City("City1", 2)); 81 | 82 | this.testTopology.streamOutput() 83 | .withKeyType(City.class) 84 | .expectNextRecord().hasKey(new City("City1", 2)).hasValue(new City("City1", 2)) 85 | .expectNoMoreRecord(); 86 | } 87 | 88 | @Test 89 | void shouldConfigureDefaultSerdes() { 90 | this.testTopology.input() 91 | .configureWithSerde(Preconfigured.defaultSerde(), Preconfigured.defaultSerde()) 92 | .add(new City("City1", 2), new Person("Huey", "City1")); 93 | 94 | this.testTopology.streamOutput() 95 | .configureWithSerde(Preconfigured.defaultSerde(), Preconfigured.defaultSerde()) 96 | .expectNextRecord().hasKey(new City("City1", 2)).hasValue(new Person("Huey", "City1")) 97 | .expectNoMoreRecord(); 98 | } 99 | 100 | @Test 101 | void shouldRememberDefaultSerdes() { 102 | this.testTopology.input() 103 | .withSerde(Serdes.String(), Serdes.String()) 104 | .configureWithSerde(Preconfigured.defaultSerde(), Preconfigured.defaultSerde()) 105 | .add(new City("City1", 2), new Person("Huey", "City1")); 106 | 107 | this.testTopology.streamOutput() 108 | .withSerde(Serdes.String(), Serdes.String()) 109 | .configureWithSerde(Preconfigured.defaultSerde(), Preconfigured.defaultSerde()) 110 | .expectNextRecord().hasKey(new City("City1", 2)).hasValue(new Person("Huey", "City1")) 111 | .expectNoMoreRecord(); 112 | } 113 | 114 | @Test 115 | void shouldVerifyNullKeys() { 116 | this.testTopology.input() 117 | .add(null, new City("City1", 2)); 118 | 119 | this.testTopology.streamOutput() 120 | .expectNextRecord().hasKey(null).hasValue(new City("City1", 2)) 121 | .expectNoMoreRecord(); 122 | } 123 | 124 | @Test 125 | void shouldVerifyNullValues() { 126 | this.testTopology.input() 127 | .add(new Person("Huey", "City1"), null); 128 | 129 | this.testTopology.streamOutput() 130 | .expectNextRecord().hasKey(new Person("Huey", "City1")).hasValue(null) 131 | .expectNoMoreRecord(); 132 | } 133 | } 134 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests-junit5/src/main/java/com/bakdata/fluent_kafka_streams_tests/junit5/TestTopologyExtension.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2025 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests.junit5; 26 | 27 | import com.bakdata.fluent_kafka_streams_tests.TestTopology; 28 | import java.util.Map; 29 | import java.util.function.Function; 30 | import java.util.function.Supplier; 31 | import lombok.Getter; 32 | import org.apache.kafka.common.serialization.Serde; 33 | import org.apache.kafka.streams.Topology; 34 | import org.junit.jupiter.api.extension.AfterEachCallback; 35 | import org.junit.jupiter.api.extension.BeforeEachCallback; 36 | import org.junit.jupiter.api.extension.ExtensionContext; 37 | 38 | /** 39 | *

Represents the main interaction with Kafka for testing purposes. Handles all inputs and outputs of the 40 | * {@link Topology} under test. This should be registered as an extension in your JUnit tests, to ensure that certain 41 | * setup and teardown methods are called.

Usage: 42 | *

 43 |  * class WordCountTest {
 44 |  *     private final WordCount app = new WordCount();
 45 |  *
 46 |  *     {@literal @RegisterExtension
 47 |  *     final TestTopologyExtension testTopology =
 48 |  *         new TestTopologyExtension<>(this.app::getTopology, this.app.getKafkaProperties());}
 49 |  *
 50 |  *     {@literal @Test}
 51 |  *     void shouldAggregateSameWordStream() {
 52 |  *         this.testTopology.input()
 53 |  *             .add("cat")
 54 |  *             .add("dog")
 55 |  *             .add("cat");
 56 |  *
 57 |  *         this.testTopology.streamOutput().withSerde(Serdes.String(), Serdes.Long())
 58 |  *             .expectNextRecord().hasKey("cat").hasValue(1L)
 59 |  *             .expectNextRecord().hasKey("dog").hasValue(1L)
 60 |  *             .expectNextRecord().hasKey("cat").hasValue(2L)
 61 |  *             .expectNoMoreRecord();
 62 |  *     }
 63 |  * }
 64 |  * 
65 | *

With {@code app} being any Kafka Streams application that you want to test.

66 | * @param Default type of keys 67 | * @param Default type of values 68 | */ 69 | @Getter 70 | public class TestTopologyExtension extends TestTopology 71 | implements BeforeEachCallback, AfterEachCallback { 72 | 73 | public TestTopologyExtension( 74 | final Function, ? extends Topology> topologyFactory, 75 | final Map properties) { 76 | super(topologyFactory, properties); 77 | } 78 | 79 | public TestTopologyExtension( 80 | final Supplier topologyFactory, final Map properties) { 81 | super(topologyFactory, properties); 82 | } 83 | 84 | public TestTopologyExtension(final Topology topology, final Map properties) { 85 | super(topology, properties); 86 | } 87 | 88 | protected TestTopologyExtension( 89 | final Function, ? extends Topology> topologyFactory, 90 | final Map userProperties, 91 | final Serde defaultKeySerde, final Serde defaultValueSerde) { 92 | super(topologyFactory, userProperties, defaultKeySerde, defaultValueSerde); 93 | } 94 | 95 | @Override 96 | public void afterEach(final ExtensionContext context) { 97 | this.stop(); 98 | } 99 | 100 | @Override 101 | public void beforeEach(final ExtensionContext context) { 102 | this.start(); 103 | } 104 | 105 | @Override 106 | public TestTopologyExtension withDefaultValueSerde(final Serde defaultValueSerde) { 107 | return (TestTopologyExtension) super.withDefaultValueSerde(defaultValueSerde); 108 | } 109 | 110 | @Override 111 | public TestTopologyExtension withDefaultKeySerde(final Serde defaultKeySerde) { 112 | return (TestTopologyExtension) super.withDefaultKeySerde(defaultKeySerde); 113 | } 114 | 115 | @Override 116 | public TestTopologyExtension withDefaultSerde(final Serde defaultKeySerde, 117 | final Serde defaultValueSerde) { 118 | return (TestTopologyExtension) super.withDefaultSerde(defaultKeySerde, defaultValueSerde); 119 | } 120 | 121 | @Override 122 | protected TestTopology with( 123 | final Function, ? extends Topology> topologyFactory, 124 | final Map userProperties, final Serde defaultKeySerde, 125 | final Serde defaultValueSerde) { 126 | return new TestTopologyExtension<>(topologyFactory, userProperties, defaultKeySerde, defaultValueSerde); 127 | } 128 | } 129 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests-junit4/src/main/java/com/bakdata/fluent_kafka_streams_tests/junit4/TestTopologyRule.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2025 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests.junit4; 26 | 27 | import com.bakdata.fluent_kafka_streams_tests.TestTopology; 28 | import java.util.Map; 29 | import java.util.function.Function; 30 | import java.util.function.Supplier; 31 | import lombok.Getter; 32 | import org.apache.kafka.common.serialization.Serde; 33 | import org.apache.kafka.streams.Topology; 34 | import org.junit.rules.TestRule; 35 | import org.junit.runner.Description; 36 | import org.junit.runners.model.Statement; 37 | 38 | /** 39 | *

Represents the main interaction with Kafka for testing purposes. Handles all inputs and outputs of the 40 | * {@link Topology} under test. This should be registered as an extension in your JUnit tests, to ensure that certain 41 | * setup and teardown methods are called.

Usage: 42 | *

 43 |  * public class WordCountTest {
 44 |  *     private final WordCount app = new WordCount();
 45 |  *
 46 |  *     {@literal @Rule
 47 |  *     public final TestTopologyRule testTopology =
 48 |  *         new TestTopologyRule<>(this.app::getTopology, this.app.getKafkaProperties());}
 49 |  *
 50 |  *     {@literal @Test}
 51 |  *     public void shouldAggregateSameWordStream() {
 52 |  *         this.testTopology.input()
 53 |  *             .add("cat")
 54 |  *             .add("dog")
 55 |  *             .add("cat");
 56 |  *
 57 |  *         this.testTopology.streamOutput().withSerde(Serdes.String(), Serdes.Long())
 58 |  *             .expectNextRecord().hasKey("cat").hasValue(1L)
 59 |  *             .expectNextRecord().hasKey("dog").hasValue(1L)
 60 |  *             .expectNextRecord().hasKey("cat").hasValue(2L)
 61 |  *             .expectNoMoreRecord();
 62 |  *     }
 63 |  * }
 64 |  * 
65 | *

With {@code app} being any Kafka Streams application that you want to test.

66 | * @param Default type of keys 67 | * @param Default type of values 68 | */ 69 | @Getter 70 | public class TestTopologyRule extends TestTopology 71 | implements TestRule { 72 | public TestTopologyRule( 73 | final Function, ? extends Topology> topologyFactory, 74 | final Map properties) { 75 | super(topologyFactory, properties); 76 | } 77 | 78 | public TestTopologyRule( 79 | final Supplier topologyFactory, final Map properties) { 80 | super(topologyFactory, properties); 81 | } 82 | 83 | public TestTopologyRule(final Topology topology, final Map properties) { 84 | super(topology, properties); 85 | } 86 | 87 | protected TestTopologyRule( 88 | final Function, ? extends Topology> topologyFactory, 89 | final Map userProperties, 90 | final Serde defaultKeySerde, final Serde defaultValueSerde) { 91 | super(topologyFactory, userProperties, defaultKeySerde, defaultValueSerde); 92 | } 93 | 94 | @Override 95 | public Statement apply(final Statement base, final Description description) { 96 | return new Statement() { 97 | @Override 98 | public void evaluate() throws Throwable { 99 | TestTopologyRule.this.start(); 100 | try { 101 | base.evaluate(); 102 | } finally { 103 | TestTopologyRule.this.stop(); 104 | } 105 | } 106 | }; 107 | } 108 | 109 | @Override 110 | public TestTopologyRule withDefaultValueSerde(final Serde defaultValueSerde) { 111 | return (TestTopologyRule) super.withDefaultValueSerde(defaultValueSerde); 112 | } 113 | 114 | @Override 115 | public TestTopologyRule withDefaultKeySerde(final Serde defaultKeySerde) { 116 | return (TestTopologyRule) super.withDefaultKeySerde(defaultKeySerde); 117 | } 118 | 119 | @Override 120 | public TestTopologyRule withDefaultSerde(final Serde defaultKeySerde, 121 | final Serde defaultValueSerde) { 122 | return (TestTopologyRule) super.withDefaultSerde(defaultKeySerde, defaultValueSerde); 123 | } 124 | 125 | @Override 126 | protected TestTopologyRule with( 127 | final Function, ? extends Topology> topologyFactory, 128 | final Map userProperties, final Serde defaultKeySerde, 129 | final Serde defaultValueSerde) { 130 | return new TestTopologyRule<>(topologyFactory, userProperties, defaultKeySerde, defaultValueSerde); 131 | } 132 | 133 | } 134 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/main/java/com/bakdata/fluent_kafka_streams_tests/Expectation.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2025 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests; 26 | 27 | import java.util.Objects; 28 | import java.util.function.Consumer; 29 | import lombok.RequiredArgsConstructor; 30 | import org.apache.kafka.clients.producer.ProducerRecord; 31 | 32 | /** 33 | * Represents a single output {@link ProducerRecord} from {@link TestOutput} to be tested. 34 | * 35 | * @param the key type of the record under test 36 | * @param the value type of the record under test 37 | */ 38 | @RequiredArgsConstructor 39 | public class Expectation { 40 | private final ProducerRecord lastRecord; 41 | private final TestOutput output; 42 | 43 | /** 44 | * Asserts whether a record exists. 45 | */ 46 | public Expectation isPresent() { 47 | if (this.lastRecord == null) { 48 | throw new AssertionError("No more records found"); 49 | } 50 | return this.and(); 51 | } 52 | 53 | /** 54 | * Checks for the equality of the {@link ProducerRecord#key()} and {@code expectedKey}. 55 | * 56 | * @param expectedKey key to expect 57 | * @return the current {@code Expectation} chain 58 | */ 59 | public Expectation hasKey(final K expectedKey) { 60 | this.isPresent(); 61 | if (!Objects.equals(this.lastRecord.key(), expectedKey)) { 62 | throw new AssertionError(String.format("Record key does not match. Expected '%s' but got '%s'", expectedKey, 63 | this.lastRecord.key())); 64 | } 65 | return this.and(); 66 | } 67 | 68 | /** 69 | * Forwards {@link ProducerRecord#key()} to the provided condition in order make assertions using another 70 | * framework. 71 | * 72 | * @param requirements consumer that accepts the current record's key 73 | * @return the current {@code Expectation} chain 74 | */ 75 | public Expectation hasKeySatisfying(final Consumer requirements) { 76 | this.isPresent(); 77 | requirements.accept(this.lastRecord.key()); 78 | return this.and(); 79 | } 80 | 81 | /** 82 | * Checks for the equality of the {@link ProducerRecord#value()} and {@code expectedValue}. 83 | * @param expectedValue value to expect 84 | * @return the current {@code Expectation} chain 85 | */ 86 | public Expectation hasValue(final V expectedValue) { 87 | this.isPresent(); 88 | if (!Objects.equals(this.lastRecord.value(), expectedValue)) { 89 | throw new AssertionError( 90 | String.format("Record value does not match. Expected '%s' but got '%s'", expectedValue, 91 | this.lastRecord.value())); 92 | } 93 | return this.and(); 94 | } 95 | 96 | /** 97 | * Forwards {@link ProducerRecord#value()} to the provided condition in order make assertions using another 98 | * framework. 99 | * 100 | * @param requirements consumer that accepts the current record's value 101 | * @return the current {@code Expectation} chain 102 | */ 103 | public Expectation hasValueSatisfying(final Consumer requirements) { 104 | this.isPresent(); 105 | requirements.accept(this.lastRecord.value()); 106 | return this.and(); 107 | } 108 | 109 | /** 110 | * Concatenates calls to this Expectation. It is not necessary to call this method, but it can be seen as a more 111 | * readable alternative to simple chaining. 112 | * @return this 113 | */ 114 | public Expectation and() { 115 | return this; 116 | } 117 | 118 | /** 119 | *

Reads the next record and creates an {@code Expectation} for it.

120 | *

This is logically equivalent to {@link TestOutput#expectNextRecord()}.

121 | *

This methods main purpose is to allow chaining:

122 | *
{@code
123 |      * myOutput.expectNextRecord()
124 |      *         .expectNextRecord()
125 |      *         .expectNoMoreRecord();
126 |      * }
127 | * 128 | * @return An {@code Expectation} containing the next record from the output. 129 | */ 130 | public Expectation expectNextRecord() { 131 | return this.output.expectNextRecord(); 132 | } 133 | 134 | /** 135 | *

Reads the next record from the output and expects it to be the end of output.

136 | *

This is logically equivalent to {@link TestOutput#expectNoMoreRecord()}.

137 | *

This methods main purpose is to allow chaining:

138 | *
{@code
139 |      * myOutput.expectNextRecord()
140 |      *         .expectNextRecord()
141 |      *         .expectNoMoreRecord();
142 |      * }
143 | * 144 | * @return An {@code Expectation} containing the next record from the output. 145 | */ 146 | public Expectation expectNoMoreRecord() { 147 | return this.output.expectNoMoreRecord(); 148 | } 149 | 150 | /** 151 | *

Asserts that there is no records present, i.e., the end of the output has been reached.

152 | *

This method should be used when there are no records at all expected.

153 | * @return the current {@code Expectation} chain 154 | */ 155 | public Expectation toBeEmpty() { 156 | if (this.lastRecord != null) { 157 | throw new AssertionError( 158 | String.format("More records found. {key='%s', value='%s'}", this.lastRecord.key(), this.lastRecord.value())); 159 | } 160 | return this.and(); 161 | } 162 | } 163 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/UserClicksPerMinuteTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2024 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests; 26 | 27 | import com.bakdata.fluent_kafka_streams_tests.serde.JsonSerde; 28 | import com.bakdata.fluent_kafka_streams_tests.test_applications.UserClicksPerMinute; 29 | import com.bakdata.fluent_kafka_streams_tests.test_types.ClickEvent; 30 | import com.bakdata.fluent_kafka_streams_tests.test_types.ClickOutput; 31 | import java.util.concurrent.TimeUnit; 32 | import org.apache.kafka.common.serialization.Serdes; 33 | import org.junit.jupiter.api.AfterEach; 34 | import org.junit.jupiter.api.BeforeEach; 35 | import org.junit.jupiter.api.Test; 36 | 37 | class UserClicksPerMinuteTest { 38 | private static final int USER = 1; 39 | private static final int USER1 = 1; 40 | private static final int USER2 = 2; 41 | 42 | private final TestTopology testTopology = 43 | new TestTopology<>(UserClicksPerMinute::getTopology, UserClicksPerMinute.getKafkaProperties()); 44 | 45 | @BeforeEach 46 | void start() { 47 | this.testTopology.start(); 48 | } 49 | 50 | @AfterEach 51 | void stop() { 52 | this.testTopology.stop(); 53 | } 54 | 55 | @Test 56 | void shouldCountSingleUserSingleEventCorrectlyStream() { 57 | final long time = TimeUnit.MINUTES.toMillis(1); 58 | this.testTopology.input().at(time).add(USER, new ClickEvent(USER)); 59 | 60 | this.testTopology.streamOutput().withValueSerde(new JsonSerde<>(ClickOutput.class)) 61 | .expectNextRecord().hasKey(USER).hasValue(new ClickOutput(USER, 1L, time)) 62 | .expectNoMoreRecord(); 63 | } 64 | 65 | @Test 66 | void shouldCountSingleUserSingleEventCorrectlyTable() { 67 | final long time = TimeUnit.MINUTES.toMillis(1); 68 | this.testTopology.input().at(time).add(USER, new ClickEvent(USER)); 69 | 70 | this.testTopology.tableOutput().withValueSerde(new JsonSerde<>(ClickOutput.class)) 71 | .expectNextRecord().hasKey(USER).hasValue(new ClickOutput(USER, 1L, time)) 72 | .expectNoMoreRecord(); 73 | } 74 | 75 | @Test 76 | void shouldCountSingleUserSingleEventCorrectlyExplicitTime() { 77 | this.testTopology.input().at(1, TimeUnit.HOURS).add(USER, new ClickEvent(USER)); 78 | 79 | this.testTopology.tableOutput().withValueSerde(new JsonSerde<>(ClickOutput.class)) 80 | .expectNextRecord().hasKey(USER).hasValue(new ClickOutput(USER, 1L, TimeUnit.HOURS.toMillis(1))) 81 | .expectNoMoreRecord(); 82 | } 83 | 84 | @Test 85 | void shouldCountSingleUserSingleEventCorrectlyExplicitTimeWithoutAt() { 86 | final long time = TimeUnit.MINUTES.toMillis(1); 87 | this.testTopology.input() 88 | .add(USER, new ClickEvent(USER), time) 89 | .add(USER, new ClickEvent(USER), time); 90 | 91 | this.testTopology.tableOutput().withValueSerde(new JsonSerde<>(ClickOutput.class)) 92 | .expectNextRecord().hasKey(USER).hasValue(new ClickOutput(USER, 2L, time)) 93 | .expectNoMoreRecord(); 94 | } 95 | 96 | @Test 97 | void shouldCountSingleUserMultipleEventCorrectly() { 98 | // Window timestamps 99 | final long time1 = TimeUnit.MINUTES.toMillis(1); 100 | final long time2 = time1 + TimeUnit.MINUTES.toMillis(1); 101 | 102 | this.testTopology.input() 103 | .at(time1).add(USER, new ClickEvent(USER)) 104 | .at(time1 + 10).add(USER, new ClickEvent(USER)) 105 | .at(time1 + 20).add(USER, new ClickEvent(USER)) 106 | .at(time2).add(USER, new ClickEvent(USER)); 107 | 108 | this.testTopology.streamOutput().withValueSerde(new JsonSerde<>(ClickOutput.class)) 109 | .expectNextRecord().hasKey(USER).hasValue(new ClickOutput(USER, 1L, time1)) 110 | .expectNextRecord().hasKey(USER).hasValue(new ClickOutput(USER, 2L, time1)) 111 | .expectNextRecord().hasKey(USER).hasValue(new ClickOutput(USER, 3L, time1)) 112 | .expectNextRecord().hasKey(USER).hasValue(new ClickOutput(USER, 1L, time2)) 113 | .expectNoMoreRecord(); 114 | } 115 | 116 | @Test 117 | void shouldCountMultiUserMultipleEventCorrectly() { 118 | // Window timestamps 119 | final long time1 = TimeUnit.MINUTES.toMillis(1); 120 | final long time2 = time1 + TimeUnit.MINUTES.toMillis(1); 121 | 122 | this.testTopology.input() 123 | // First window 124 | .at(time1).add(new ClickEvent(USER1).getUserId(), new ClickEvent(USER1)) 125 | .at(time1 + 10).add(new ClickEvent(USER2).getUserId(), new ClickEvent(USER2)) 126 | .at(time1 + 20).add(new ClickEvent(USER1).getUserId(), new ClickEvent(USER1)) 127 | // Second window 128 | .at(time2).add(new ClickEvent(USER2).getUserId(), new ClickEvent(USER2)) 129 | .at(time2 + 10).add(new ClickEvent(USER1).getUserId(), new ClickEvent(USER1)) 130 | .at(time2 + 20).add(new ClickEvent(USER2).getUserId(), new ClickEvent(USER2)); 131 | 132 | this.testTopology.streamOutput().withValueSerde(new JsonSerde<>(ClickOutput.class)) 133 | .expectNextRecord().hasKey(USER1).hasValue(new ClickOutput(USER1, 1L, time1)) 134 | .expectNextRecord().hasKey(USER2).hasValue(new ClickOutput(USER2, 1L, time1)) 135 | .expectNextRecord().hasKey(USER1).hasValue(new ClickOutput(USER1, 2L, time1)) 136 | 137 | .expectNextRecord().hasKey(USER2).hasValue(new ClickOutput(USER2, 1L, time2)) 138 | .expectNextRecord().hasKey(USER1).hasValue(new ClickOutput(USER1, 1L, time2)) 139 | .expectNextRecord().hasKey(USER2).hasValue(new ClickOutput(USER2, 2L, time2)) 140 | .expectNoMoreRecord(); 141 | } 142 | 143 | @Test 144 | void shouldWorkWithExplicitKeySerdes() { 145 | final long time = TimeUnit.MINUTES.toMillis(1); 146 | this.testTopology.input().withKeySerde(Serdes.Integer()) 147 | .at(time).add(USER, new ClickEvent(USER)); 148 | 149 | this.testTopology.streamOutput() 150 | .withKeySerde(Serdes.Integer()) 151 | .withValueSerde(new JsonSerde<>(ClickOutput.class)) 152 | .expectNextRecord().hasKey(USER).hasValue(new ClickOutput(USER, 1, time)); 153 | } 154 | } 155 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/main/java/com/bakdata/fluent_kafka_streams_tests/BaseOutput.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2025 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests; 26 | 27 | import com.bakdata.kafka.Preconfigured; 28 | import java.util.ArrayList; 29 | import java.util.List; 30 | import org.apache.kafka.clients.producer.ProducerRecord; 31 | import org.apache.kafka.common.serialization.Serde; 32 | import org.apache.kafka.streams.TestOutputTopic; 33 | import org.apache.kafka.streams.TopologyTestDriver; 34 | import org.apache.kafka.streams.test.TestRecord; 35 | 36 | abstract class BaseOutput implements TestOutput { 37 | private final TopologyTestDriver testDriver; 38 | private final TestOutputTopic testOutputTopic; 39 | private final String topic; 40 | private final SerdeConfig serdeConfig; 41 | 42 | protected BaseOutput(final TopologyTestDriver testDriver, final String topic, final SerdeConfig serdeConfig) { 43 | this.testDriver = testDriver; 44 | this.topic = topic; 45 | this.serdeConfig = serdeConfig; 46 | 47 | this.testOutputTopic = this.testDriver.createOutputTopic(this.topic, 48 | this.serdeConfig.getKeySerde().deserializer(), 49 | serdeConfig.getValueSerde().deserializer() 50 | ); 51 | } 52 | 53 | /** 54 | * Set new serde for this output.
55 | * 56 | * @param keySerde The serializer/deserializer to be used for the keys in the output. 57 | * @param valueSerde The serializer/deserializer to be used for the values in the output. 58 | */ 59 | @Override 60 | public TestOutput withSerde(final Serde keySerde, final Serde valueSerde) { 61 | return this.with(this.serdeConfig.withSerde(keySerde, valueSerde)); 62 | } 63 | 64 | @Override 65 | public TestOutput configureWithSerde(final Preconfigured> keySerde, 66 | final Preconfigured> valueSerde) { 67 | return this.with(this.serdeConfig.configureWithSerde(keySerde, valueSerde)); 68 | } 69 | 70 | @Override 71 | public TestOutput configureWithSerde(final Serde keySerde, final Serde valueSerde) { 72 | return this.with(this.serdeConfig.configureWithSerde(keySerde, valueSerde)); 73 | } 74 | 75 | /** 76 | * Set new key serde for this output.
77 | */ 78 | @Override 79 | public TestOutput withKeySerde(final Serde keySerde) { 80 | return this.with(this.serdeConfig.withKeySerde(keySerde)); 81 | } 82 | 83 | @Override 84 | public TestOutput configureWithKeySerde(final Preconfigured> keySerde) { 85 | return this.with(this.serdeConfig.configureWithKeySerde(keySerde)); 86 | } 87 | 88 | @Override 89 | public TestOutput configureWithKeySerde(final Serde keySerde) { 90 | return this.with(this.serdeConfig.configureWithKeySerde(keySerde)); 91 | } 92 | 93 | /** 94 | * Set new value serde for this output.
95 | */ 96 | @Override 97 | public TestOutput withValueSerde(final Serde valueSerde) { 98 | return this.with(this.serdeConfig.withValueSerde(valueSerde)); 99 | } 100 | 101 | @Override 102 | public TestOutput configureWithValueSerde(final Preconfigured> valueSerde) { 103 | return this.with(this.serdeConfig.configureWithValueSerde(valueSerde)); 104 | } 105 | 106 | @Override 107 | public TestOutput configureWithValueSerde(final Serde valueSerde) { 108 | return this.with(this.serdeConfig.configureWithValueSerde(valueSerde)); 109 | } 110 | 111 | @Override 112 | public TestOutput withTypes(final Class keyType, final Class valueType) { 113 | return this.with(this.serdeConfig.withTypes(keyType, valueType)); 114 | } 115 | 116 | @Override 117 | public TestOutput withKeyType(final Class keyType) { 118 | return this.with(this.serdeConfig.withKeyType(keyType)); 119 | } 120 | 121 | @Override 122 | public TestOutput withValueType(final Class valueType) { 123 | return this.with(this.serdeConfig.withValueType(valueType)); 124 | } 125 | 126 | /** 127 | * Reads the next record and creates an {@link Expectation} for it.
128 | * 129 | * Note that calling this method by itself without chaining at least one of the {@code has*()} methods will not 130 | * check for the existence of a next record!
131 | * 132 | * @return An {@link Expectation} containing the next record from the output.
133 | */ 134 | @Override 135 | public Expectation expectNextRecord() { 136 | return new Expectation<>(this.readOneRecord(), this); 137 | } 138 | 139 | /** 140 | * Reads the next record from the output and expects it to be the end of output.
141 | * 142 | * @return An {@link Expectation} containing the next record from the output.
143 | */ 144 | @Override 145 | public Expectation expectNoMoreRecord() { 146 | return this.expectNextRecord().toBeEmpty(); 147 | } 148 | 149 | /** 150 | * Interpret the output with {@link org.apache.kafka.streams.kstream.KTable} semantics (each key only once).
151 | * Note: once the first value of the stream has been read or the iterator has be called, you cannot switch between 152 | * the output types anymore.
153 | */ 154 | @Override 155 | public TestOutput asTable() { 156 | return new TableOutput<>(this.testDriver, this.topic, this.serdeConfig); 157 | } 158 | 159 | /** 160 | * Interpret the output with {@link org.apache.kafka.streams.kstream.KStream} semantics (each key multiple times) 161 | * .
This is the default, there should usually be no need to call this method.
Note: once the first value 162 | * of the stream has been read or the iterator has be called, you cannot switch between the output types 163 | * anymore.
164 | */ 165 | @Override 166 | public TestOutput asStream() { 167 | return new StreamOutput<>(this.testDriver, this.topic, this.serdeConfig); 168 | } 169 | 170 | /** 171 | * Convert the output to a {@link java.util.List}. In case the current instance of this class is a 172 | * {@link StreamOutput}, the output will be converted to List with {@link org.apache.kafka.streams.kstream.KStream} 173 | * semantics (each key multiple times). In case the current instance of this class is a {@link TableOutput}, the 174 | * output will be converted to List with {@link org.apache.kafka.streams.kstream.KTable} semantics (each key only 175 | * once). 176 | * 177 | * @return A {@link java.util.List} representing the output 178 | */ 179 | @Override 180 | public List> toList() { 181 | final List> list = new ArrayList<>(); 182 | this.iterator().forEachRemaining(list::add); 183 | return list; 184 | } 185 | 186 | // ================== 187 | // Non-public methods 188 | // ================== 189 | protected ProducerRecord readFromTestDriver() { 190 | // the Expectation implementation requires null if the topic is empty but outputTopic.readRecord() throws a 191 | // NoSuchElementException. Thus, we have to check beforehand. 192 | if (this.testOutputTopic.isEmpty()) { 193 | return null; 194 | } 195 | final TestRecord testRecord = this.testOutputTopic.readRecord(); 196 | // partition is always 0, see TopologyTestDriver.PARTITION_ID 197 | return new ProducerRecord<>(this.topic, 0, testRecord.timestamp(), testRecord.key(), testRecord.value(), 198 | testRecord.getHeaders()); 199 | } 200 | 201 | protected abstract TestOutput create(TopologyTestDriver testDriver, String topic, 202 | SerdeConfig serdeConfig); 203 | 204 | private TestOutput with(final SerdeConfig newSerdeConfig) { 205 | return this.create(this.testDriver, this.topic, newSerdeConfig); 206 | } 207 | } 208 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![Build Status](https://dev.azure.com/bakdata/public/_apis/build/status/bakdata.fluent-kafka-streams-tests?branchName=master)](https://dev.azure.com/bakdata/public/_build/latest?definitionId=2&branchName=master) 2 | [![Sonarcloud status](https://sonarcloud.io/api/project_badges/measure?project=com.bakdata.fluent-kafka-streams-tests%3Afluent-kafka-streams-tests&metric=alert_status)](https://sonarcloud.io/dashboard?id=com.bakdata.fluent-kafka-streams-tests%3Afluent-kafka-streams-tests) 3 | [![Code coverage](https://sonarcloud.io/api/project_badges/measure?project=com.bakdata.fluent-kafka-streams-tests%3Afluent-kafka-streams-tests&metric=coverage)](https://sonarcloud.io/dashboard?id=com.bakdata.fluent-kafka-streams-tests%3Afluent-kafka-streams-tests) 4 | [![Maven](https://img.shields.io/maven-central/v/com.bakdata.fluent-kafka-streams-tests/fluent-kafka-streams-tests-junit5.svg)](https://search.maven.org/search?q=g:com.bakdata.fluent-kafka-streams-tests%20AND%20a:fluent-kafka-streams-tests*&core=gav) 5 | 6 | Fluent Kafka Streams Tests 7 | ========================= 8 | 9 | Write clean and concise tests for your Kafka Streams application. 10 | 11 | You can find a [blog post on medium](https://medium.com/bakdata/fluent-kafka-streams-tests-e641785171ec) with some examples and detailed explanations of how Fluent Kafka Streams Tests work. 12 | 13 | ## Getting Started 14 | 15 | You can add Fluent Kafka Streams Tests via Maven Central. 16 | 17 | #### Gradle 18 | ```gradle 19 | compile group: 'com.bakdata.fluent-kafka-streams-tests', name: 'fluent-kafka-streams-tests-junit5', version: '2.1.0' 20 | ``` 21 | 22 | #### Maven 23 | ```xml 24 | 25 | com.bakdata.fluent-kafka-streams-tests 26 | fluent-kafka-streams-tests-junit5 27 | 2.1.0 28 | 29 | ``` 30 | 31 | There is also a junit4 version and one without any dependencies to a specific testing framework. 32 | 33 | For other build tools or versions, refer to the [overview of sonatype](https://search.maven.org/search?q=g:com.bakdata.fluent-kafka-streams-tests%20AND%20a:fluent-kafka-streams-*&core=gav). 34 | 35 | ## Using it to Write Tests 36 | 37 | Here are two example tests which show you how to use Fluent Kafka Streams Tests. 38 | 39 | #### Word Count Test 40 | Assume you have a Word Count Kafka Streams application, called `WordCount`, and want to test it correctly. 41 | First, start by creating a new test class with your application. 42 | 43 | ```java 44 | class WordCountTest { 45 | private final WordCount app = new WordCount(); 46 | } 47 | ``` 48 | 49 | Then, set up the `TestTopology`. 50 | 51 | ```java 52 | class WordCountTest { 53 | private final WordCount app = new WordCount(); 54 | 55 | @RegisterExtension 56 | final TestTopologyExtension testTopology = 57 | new TestTopologyExtension<>(this.app::getTopology, this.app.getKafkaProperties()); 58 | } 59 | ``` 60 | 61 | The `TestTopology` takes care of all the inputs, processing, and outputs of you application. 62 | For it to do that, you need to register it as an extension (JUnit5), so certain setup/teardown methods are called. 63 | The constructor expects a topology factory (for a fresh topology in each test) that creates the topology under test. 64 | 65 | Additionally, the properties of the `KafkaClient` need to be specified. 66 | Broker and application-id must be present (Kafka testutil limitation), but are ignored. 67 | Most importantly, if the application expects default serde for key and value, these must be present in the properties or 68 | explicitly specified with `withDefaultKeySerde(Serde serde)` and/or `withDefaultValueSerde(Serde serde)`. 69 | 70 | To test your appliction, you can simply write a JUnit test. 71 | ```java 72 | class WordCountTest { 73 | private final WordCount app = new WordCount(); 74 | 75 | @RegisterExtension 76 | final TestTopologyExtension testTopology = 77 | new TestTopologyExtension<>(this.app::getTopology, this.app.getKafkaProperties()); 78 | 79 | @Test 80 | void shouldAggregateSameWordStream() { 81 | this.testTopology.input() 82 | .add("cat") 83 | .add("dog") 84 | .add("cat"); 85 | 86 | this.testTopology.streamOutput().withSerde(Serdes.String(), Serdes.Long()) 87 | .expectNextRecord().hasKey("cat").hasValue(1L) 88 | .expectNextRecord().hasKey("dog").hasValue(1L) 89 | .expectNextRecord().hasKey("cat").hasValue(2L) 90 | .expectNoMoreRecord(); 91 | } 92 | } 93 | ``` 94 | 95 | See the tests for the [junit4](fluent-kafka-streams-tests-junit4/src/test/java/com/bakdata/fluent_kafka_streams_tests/junit4/WordCountTest.java) and [framework agnostic](fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests/WordCountTest.java) setup. 96 | 97 | The `TestTopology` has a method `.input()` to retrieve the input topic (or `.input(String topic)`) if more than one input topic is present). 98 | You can simply add values to your input stream by calling `.add(V value)` or `.add(K key, V value)`. 99 | 100 | To get the output, `TestTopology` provides two methods: `.streamOutput()` and `.tableOutput()`. 101 | They behave just like the input with regard to the number of output topics. 102 | Using the stream version simulates Kafka's stream-semantics, meaning that a key can be present many times in an output stream, whereas the table-semantics only output the newest value of each key. 103 | 104 | To check the output records, you can call `.expectNextRecord()` and then chain `.hasKey(K key)`, `.hasKeySatisfying(Consumer requirements)`, `.hasValue(V value)` or `.hasValueSatisfying(Consumer requirements)` to this call. 105 | Note that calling `.expectNextRecord()` by itself without chaining at least one of the `.has*` methods will not check for the existence of a next record! 106 | 107 | Once you expect no further records, call `.expectNoMoreRecord()` to indicate the end of the output stream. 108 | 109 | #### Using Other Test Frameworks to Check Output 110 | We intentionally kept the API for output checking slim, because there are many tools out there which focus on doing exactly that. 111 | The `TestOutput` class implements the `Iterable` interface, so you can use your favorite tool to test iterables. 112 | 113 | Here is an example using [AssertJ](http://joel-costigliola.github.io/assertj/). 114 | 115 | ```java 116 | @Test 117 | void shouldReturnCorrectIteratorTable() { 118 | this.testTopology.input() 119 | .add("cat") 120 | .add("dog") 121 | .add("bird"); 122 | 123 | assertThat(this.testTopology.tableOutput().withSerde(Serdes.String(), Serdes.Long())) 124 | .extracting(ProducerRecord::key) 125 | .containsAll(List.of("cat", "dog", "bird")); 126 | } 127 | ``` 128 | 129 | There is also an API to consume a record's key or value in order to embed another assertion framework into our API. 130 | 131 | ```java 132 | @Test 133 | void shouldReturnCorrectIteratorTable() { 134 | this.testTopology.input() 135 | .add("cat"); 136 | 137 | this.testTopology.streamOutput().withSerde(Serdes.String(), Serdes.Long()) 138 | .expectNextRecord() 139 | .hasKeySatisfying(key -> assertThat(key).isEqualTo("cat")) 140 | .hasValueSatisfying(value -> assertThat(value).isEqualTo(1L)) 141 | .expectNoMoreRecord(); 142 | } 143 | ``` 144 | 145 | Alternatively, you can convert the output to `List` for use with your assertion framework. Here is an example of this with [AssertJ](http://joel-costigliola.github.io/assertj/). 146 | 147 | ```java 148 | @Test 149 | void shouldConvertStreamOutputToList(){ 150 | this.testTopology.input() 151 | .add("cat") 152 | .add("dog") 153 | .add("bird"); 154 | 155 | final List>outputs = this.testTopology.streamOutput() 156 | .withSerde(Serdes.String(), Serdes.Long()) 157 | .toList(); 158 | 159 | assertThat(outputs) 160 | .extracting(ProducerRecord::key) 161 | .containsExactly("cat", "dog", "bird"); 162 | assertThat(outputs) 163 | .extracting(ProducerRecord::value) 164 | .containsExactly(1L, 1L, 1L); 165 | } 166 | ``` 167 | 168 | #### More Examples 169 | 170 | You can find many more tests 171 | in [this repository's test code](https://github.com/bakdata/fluent-kafka-streams-tests/tree/master/fluent-kafka-streams-tests/src/test/java/com/bakdata/fluent_kafka_streams_tests) 172 | . 173 | 174 | ## Development 175 | 176 | If you want to contribute to this project, you can simply clone the repository and build it via Gradle. 177 | All dependencies should be included in the Gradle files, there are no external prerequisites. 178 | 179 | ```bash 180 | > git clone git@github.com:bakdata/fluent-kafka-streams-tests.git 181 | > cd fluent-kafka-streams-tests && ./gradlew build 182 | ``` 183 | 184 | Please note, that we have [code styles](https://github.com/bakdata/bakdata-code-styles) for Java. 185 | They are basically the Google style guide, with some small modifications. 186 | 187 | ## Contributing 188 | 189 | We are happy if you want to contribute to this project. 190 | If you find any bugs or have suggestions for improvements, please open an issue. 191 | We are also happy to accept your PRs. 192 | Just open an issue beforehand and let us know what you want to do and why. 193 | 194 | ## License 195 | This project is licensed under the MIT license. 196 | Have a look at the [LICENSE](https://github.com/bakdata/fluent-kafka-streams-tests/blob/master/LICENSE) for more details. 197 | -------------------------------------------------------------------------------- /gradlew: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # 4 | # Copyright © 2015-2021 the original authors. 5 | # 6 | # Licensed under the Apache License, Version 2.0 (the "License"); 7 | # you may not use this file except in compliance with the License. 8 | # You may obtain a copy of the License at 9 | # 10 | # https://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | # See the License for the specific language governing permissions and 16 | # limitations under the License. 17 | # 18 | # SPDX-License-Identifier: Apache-2.0 19 | # 20 | 21 | ############################################################################## 22 | # 23 | # Gradle start up script for POSIX generated by Gradle. 24 | # 25 | # Important for running: 26 | # 27 | # (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is 28 | # noncompliant, but you have some other compliant shell such as ksh or 29 | # bash, then to run this script, type that shell name before the whole 30 | # command line, like: 31 | # 32 | # ksh Gradle 33 | # 34 | # Busybox and similar reduced shells will NOT work, because this script 35 | # requires all of these POSIX shell features: 36 | # * functions; 37 | # * expansions «$var», «${var}», «${var:-default}», «${var+SET}», 38 | # «${var#prefix}», «${var%suffix}», and «$( cmd )»; 39 | # * compound commands having a testable exit status, especially «case»; 40 | # * various built-in commands including «command», «set», and «ulimit». 41 | # 42 | # Important for patching: 43 | # 44 | # (2) This script targets any POSIX shell, so it avoids extensions provided 45 | # by Bash, Ksh, etc; in particular arrays are avoided. 46 | # 47 | # The "traditional" practice of packing multiple parameters into a 48 | # space-separated string is a well documented source of bugs and security 49 | # problems, so this is (mostly) avoided, by progressively accumulating 50 | # options in "$@", and eventually passing that to Java. 51 | # 52 | # Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS, 53 | # and GRADLE_OPTS) rely on word-splitting, this is performed explicitly; 54 | # see the in-line comments for details. 55 | # 56 | # There are tweaks for specific operating systems such as AIX, CygWin, 57 | # Darwin, MinGW, and NonStop. 58 | # 59 | # (3) This script is generated from the Groovy template 60 | # https://github.com/gradle/gradle/blob/HEAD/platforms/jvm/plugins-application/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt 61 | # within the Gradle project. 62 | # 63 | # You can find Gradle at https://github.com/gradle/gradle/. 64 | # 65 | ############################################################################## 66 | 67 | # Attempt to set APP_HOME 68 | 69 | # Resolve links: $0 may be a link 70 | app_path=$0 71 | 72 | # Need this for daisy-chained symlinks. 73 | while 74 | APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path 75 | [ -h "$app_path" ] 76 | do 77 | ls=$( ls -ld "$app_path" ) 78 | link=${ls#*' -> '} 79 | case $link in #( 80 | /*) app_path=$link ;; #( 81 | *) app_path=$APP_HOME$link ;; 82 | esac 83 | done 84 | 85 | # This is normally unused 86 | # shellcheck disable=SC2034 87 | APP_BASE_NAME=${0##*/} 88 | # Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036) 89 | APP_HOME=$( cd -P "${APP_HOME:-./}" > /dev/null && printf '%s\n' "$PWD" ) || exit 90 | 91 | # Use the maximum available, or set MAX_FD != -1 to use that value. 92 | MAX_FD=maximum 93 | 94 | warn () { 95 | echo "$*" 96 | } >&2 97 | 98 | die () { 99 | echo 100 | echo "$*" 101 | echo 102 | exit 1 103 | } >&2 104 | 105 | # OS specific support (must be 'true' or 'false'). 106 | cygwin=false 107 | msys=false 108 | darwin=false 109 | nonstop=false 110 | case "$( uname )" in #( 111 | CYGWIN* ) cygwin=true ;; #( 112 | Darwin* ) darwin=true ;; #( 113 | MSYS* | MINGW* ) msys=true ;; #( 114 | NONSTOP* ) nonstop=true ;; 115 | esac 116 | 117 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar 118 | 119 | 120 | # Determine the Java command to use to start the JVM. 121 | if [ -n "$JAVA_HOME" ] ; then 122 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then 123 | # IBM's JDK on AIX uses strange locations for the executables 124 | JAVACMD=$JAVA_HOME/jre/sh/java 125 | else 126 | JAVACMD=$JAVA_HOME/bin/java 127 | fi 128 | if [ ! -x "$JAVACMD" ] ; then 129 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME 130 | 131 | Please set the JAVA_HOME variable in your environment to match the 132 | location of your Java installation." 133 | fi 134 | else 135 | JAVACMD=java 136 | if ! command -v java >/dev/null 2>&1 137 | then 138 | die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 139 | 140 | Please set the JAVA_HOME variable in your environment to match the 141 | location of your Java installation." 142 | fi 143 | fi 144 | 145 | # Increase the maximum file descriptors if we can. 146 | if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then 147 | case $MAX_FD in #( 148 | max*) 149 | # In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked. 150 | # shellcheck disable=SC2039,SC3045 151 | MAX_FD=$( ulimit -H -n ) || 152 | warn "Could not query maximum file descriptor limit" 153 | esac 154 | case $MAX_FD in #( 155 | '' | soft) :;; #( 156 | *) 157 | # In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked. 158 | # shellcheck disable=SC2039,SC3045 159 | ulimit -n "$MAX_FD" || 160 | warn "Could not set maximum file descriptor limit to $MAX_FD" 161 | esac 162 | fi 163 | 164 | # Collect all arguments for the java command, stacking in reverse order: 165 | # * args from the command line 166 | # * the main class name 167 | # * -classpath 168 | # * -D...appname settings 169 | # * --module-path (only if needed) 170 | # * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables. 171 | 172 | # For Cygwin or MSYS, switch paths to Windows format before running java 173 | if "$cygwin" || "$msys" ; then 174 | APP_HOME=$( cygpath --path --mixed "$APP_HOME" ) 175 | CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" ) 176 | 177 | JAVACMD=$( cygpath --unix "$JAVACMD" ) 178 | 179 | # Now convert the arguments - kludge to limit ourselves to /bin/sh 180 | for arg do 181 | if 182 | case $arg in #( 183 | -*) false ;; # don't mess with options #( 184 | /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath 185 | [ -e "$t" ] ;; #( 186 | *) false ;; 187 | esac 188 | then 189 | arg=$( cygpath --path --ignore --mixed "$arg" ) 190 | fi 191 | # Roll the args list around exactly as many times as the number of 192 | # args, so each arg winds up back in the position where it started, but 193 | # possibly modified. 194 | # 195 | # NB: a `for` loop captures its iteration list before it begins, so 196 | # changing the positional parameters here affects neither the number of 197 | # iterations, nor the values presented in `arg`. 198 | shift # remove old arg 199 | set -- "$@" "$arg" # push replacement arg 200 | done 201 | fi 202 | 203 | 204 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 205 | DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' 206 | 207 | # Collect all arguments for the java command: 208 | # * DEFAULT_JVM_OPTS, JAVA_OPTS, JAVA_OPTS, and optsEnvironmentVar are not allowed to contain shell fragments, 209 | # and any embedded shellness will be escaped. 210 | # * For example: A user cannot expect ${Hostname} to be expanded, as it is an environment variable and will be 211 | # treated as '${Hostname}' itself on the command line. 212 | 213 | set -- \ 214 | "-Dorg.gradle.appname=$APP_BASE_NAME" \ 215 | -classpath "$CLASSPATH" \ 216 | org.gradle.wrapper.GradleWrapperMain \ 217 | "$@" 218 | 219 | # Stop when "xargs" is not available. 220 | if ! command -v xargs >/dev/null 2>&1 221 | then 222 | die "xargs is not available" 223 | fi 224 | 225 | # Use "xargs" to parse quoted args. 226 | # 227 | # With -n1 it outputs one arg per line, with the quotes and backslashes removed. 228 | # 229 | # In Bash we could simply go: 230 | # 231 | # readarray ARGS < <( xargs -n1 <<<"$var" ) && 232 | # set -- "${ARGS[@]}" "$@" 233 | # 234 | # but POSIX shell has neither arrays nor command substitution, so instead we 235 | # post-process each arg (as a line of input to sed) to backslash-escape any 236 | # character that might be a shell metacharacter, then use eval to reverse 237 | # that process (while maintaining the separation between arguments), and wrap 238 | # the whole thing up as a single "set" statement. 239 | # 240 | # This will of course break if any of these variables contains a newline or 241 | # an unmatched quote. 242 | # 243 | 244 | eval "set -- $( 245 | printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" | 246 | xargs -n1 | 247 | sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' | 248 | tr '\n' ' ' 249 | )" '"$@"' 250 | 251 | exec "$JAVACMD" "$@" 252 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests/src/main/java/com/bakdata/fluent_kafka_streams_tests/TestOutput.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2025 bakdata 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests; 26 | 27 | import com.bakdata.kafka.Preconfigured; 28 | import java.util.List; 29 | import org.apache.kafka.clients.producer.ProducerRecord; 30 | import org.apache.kafka.common.serialization.Serde; 31 | 32 | 33 | /** 34 | *

Represents the output stream of the tested app via the {@link TestTopology}.

35 | *

This can be used via the {@link StreamOutput} or the {@link TableOutput}, dependent on the desired semantics.

36 | *

For more details see each implementation.

37 | * 38 | *

Note: The StreamOutput is a one-time iterable. Cache it if you need to iterate several times.

39 | * 40 | * @param the key type of the output stream 41 | * @param the value type of the output stream 42 | */ 43 | public interface TestOutput extends Iterable> { 44 | /** 45 | * Set new serde for this output. 46 | * 47 | * @param keySerde The serializer/deserializer to be used for the keys in the output 48 | * @param valueSerde The serializer/deserializer to be used for the values in the output 49 | * @return Copy of current {@code TestOutput} with provided serdes 50 | */ 51 | TestOutput withSerde(Serde keySerde, Serde valueSerde); 52 | 53 | /** 54 | * Set new serde for this output. Serdes are configured using properties of the test topology. 55 | * 56 | * @param keySerde The serializer/deserializer to be used for the keys in the output 57 | * @param valueSerde The serializer/deserializer to be used for the values in the output 58 | * @return Copy of current {@code TestOutput} with provided serdes 59 | */ 60 | TestOutput configureWithSerde(Preconfigured> keySerde, 61 | Preconfigured> valueSerde); 62 | 63 | /** 64 | * Set new serde for this output. Serdes are configured using properties of the test topology. 65 | * 66 | * @param keySerde The serializer/deserializer to be used for the keys in the output 67 | * @param valueSerde The serializer/deserializer to be used for the values in the output 68 | * @return Copy of current {@code TestOutput} with provided serdes 69 | */ 70 | TestOutput configureWithSerde(Serde keySerde, Serde valueSerde); 71 | 72 | /** 73 | * Set new key serde for this output. 74 | * 75 | * @param keySerde The serializer/deserializer to be used for the keys in the output 76 | * @return Copy of current {@code TestOutput} with provided key serde 77 | */ 78 | TestOutput withKeySerde(Serde keySerde); 79 | 80 | /** 81 | * Set new key serde for this output. Serde is configured using properties of the test topology. 82 | * 83 | * @param keySerde The serializer/deserializer to be used for the keys in the output 84 | * @return Copy of current {@code TestOutput} with provided key serde 85 | */ 86 | TestOutput configureWithKeySerde(Preconfigured> keySerde); 87 | 88 | /** 89 | * Set new key serde for this output. Serde is configured using properties of the test topology. 90 | * 91 | * @param keySerde The serializer/deserializer to be used for the keys in the output 92 | * @return Copy of current {@code TestOutput} with provided key serde 93 | */ 94 | TestOutput configureWithKeySerde(Serde keySerde); 95 | 96 | /** 97 | * Set new value serde for this output. 98 | * 99 | * @param valueSerde The serializer/deserializer to be used for the values in the output 100 | * @return Copy of current {@code TestOutput} with provided value serde 101 | */ 102 | TestOutput withValueSerde(Serde valueSerde); 103 | 104 | /** 105 | * Set new value serde for this output. Serde is configured using properties of the test topology. 106 | * 107 | * @param valueSerde The serializer/deserializer to be used for the values in the output 108 | * @return Copy of current {@code TestOutput} with provided value serde 109 | */ 110 | TestOutput configureWithValueSerde(Preconfigured> valueSerde); 111 | 112 | /** 113 | * Set new value serde for this output. Serde is configured using properties of the test topology. 114 | * 115 | * @param valueSerde The serializer/deserializer to be used for the values in the output 116 | * @return Copy of current {@code TestOutput} with provided value serde 117 | */ 118 | TestOutput configureWithValueSerde(Serde valueSerde); 119 | 120 | /** 121 | *

Type-casts the key and value to the given types.

122 | * 123 | * A type-cast is useful if you have general-purpose serde, such as Json or Avro, which is used for different types 124 | * in input and output. Thus, instead of unnecessarily overriding the serde, this method just casts the output. 125 | * 126 | * @param keyType the new key type. 127 | * @param valueType the new value type. 128 | * @return Copy of current {@code TestOutput} with provided types 129 | */ 130 | TestOutput withTypes(final Class keyType, final Class valueType); 131 | 132 | /** 133 | *

Type-casts the key to the given type.

134 | * 135 | * A type-cast is useful if you have general-purpose serde, such as Json or Avro, which is used for different types 136 | * in input and output. Thus, instead of unnecessarily overriding the serde, this method just casts the output. 137 | * 138 | * @param keyType the new key type. 139 | * @return Copy of current {@code TestOutput} with provided key type 140 | */ 141 | TestOutput withKeyType(final Class keyType); 142 | 143 | /** 144 | *

Type-casts the value to the given type.

145 | * 146 | * A type-cast is useful if you have general-purpose serde, such as Json or Avro, which is used for different types 147 | * in input and output. Thus, instead of unnecessarily overriding the serde, this method just casts the output. 148 | * 149 | * @param valueType the new value type. 150 | * @return Copy of current {@code TestOutput} with provided value type 151 | */ 152 | TestOutput withValueType(final Class valueType); 153 | 154 | /** 155 | *

Reads the next value from the output stream.

156 | * Usually, you should not need to call this. The recommended way should be to use either 157 | *
    158 | *
  • the {@link #expectNextRecord()} and {@link #expectNoMoreRecord()} methods OR
  • 159 | *
  • the iterable interface (via {@link #iterator()}.
  • 160 | *
161 | * 162 | * @return The next value in the output stream depending on the output type (stream or table semantics). {@code 163 | * null} if no more values are present. 164 | */ 165 | ProducerRecord readOneRecord(); 166 | 167 | /** 168 | * Reads the next record and creates an {@link Expectation} for it. 169 | * 170 | * @return An {@link Expectation} containing the next record from the output. 171 | */ 172 | Expectation expectNextRecord(); 173 | 174 | /** 175 | * Reads the next record from the output and expects it to be the end of output. 176 | * 177 | * @return An {@link Expectation} containing the next record from the output. 178 | */ 179 | Expectation expectNoMoreRecord(); 180 | 181 | /** 182 | *

Interpret the output with {@link org.apache.kafka.streams.kstream.KTable} semantics (each key only once).

183 | *

Note: once the first value of the stream has been read or the iterator has be called, you cannot switch 184 | * between the output types any more.

185 | * @return Current output with {@link org.apache.kafka.streams.kstream.KTable} semantics 186 | */ 187 | TestOutput asTable(); 188 | 189 | /** 190 | *

Interpret the output with {@link org.apache.kafka.streams.kstream.KStream} semantics (each key multiple 191 | * times).

192 | *

This is the default, there should usually be no need to call this method.

193 | *

Note: once the first value of the stream has been read or the iterator has be called, you cannot switch 194 | * between the output types any more.

195 | * 196 | * @return Current output with {@link org.apache.kafka.streams.kstream.KStream} semantics 197 | */ 198 | TestOutput asStream(); 199 | 200 | /** 201 | * Convert the output to a {@link java.util.List}. 202 | * 203 | * @return A {@link java.util.List} representing the output 204 | */ 205 | List> toList(); 206 | } 207 | 208 | -------------------------------------------------------------------------------- /fluent-kafka-streams-tests-junit5/src/test/java/com/bakdata/fluent_kafka_streams_tests/junit5/WordCountTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * MIT License 3 | * 4 | * Copyright (c) 2024 bakdata GmbH 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy 7 | * of this software and associated documentation files (the "Software"), to deal 8 | * in the Software without restriction, including without limitation the rights 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the Software is 11 | * furnished to do so, subject to the following conditions: 12 | * 13 | * The above copyright notice and this permission notice shall be included in all 14 | * copies or substantial portions of the Software. 15 | * 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | * SOFTWARE. 23 | */ 24 | 25 | package com.bakdata.fluent_kafka_streams_tests.junit5; 26 | 27 | import static org.assertj.core.api.Assertions.assertThat; 28 | import static org.junit.jupiter.api.Assertions.assertThrows; 29 | 30 | import com.bakdata.fluent_kafka_streams_tests.junit5.test_applications.WordCount; 31 | import java.util.Iterator; 32 | import java.util.List; 33 | import java.util.NoSuchElementException; 34 | import org.apache.kafka.clients.producer.ProducerRecord; 35 | import org.apache.kafka.common.serialization.Serdes; 36 | import org.junit.jupiter.api.Test; 37 | import org.junit.jupiter.api.extension.RegisterExtension; 38 | 39 | class WordCountTest { 40 | private final WordCount app = new WordCount(); 41 | 42 | @RegisterExtension 43 | final TestTopologyExtension testTopology = new TestTopologyExtension<>(this.app::getTopology, 44 | WordCount.getKafkaProperties()); 45 | 46 | @Test 47 | void shouldAggregateSameWordStream() { 48 | this.testTopology.input().add("bla") 49 | .add("blub") 50 | .add("bla"); 51 | 52 | this.testTopology.streamOutput().withSerde(Serdes.String(), Serdes.Long()) 53 | .expectNextRecord().hasKey("bla").hasValue(1L) 54 | .expectNextRecord().hasKey("blub").hasValue(1L) 55 | .expectNextRecord().hasKey("bla").hasValue(2L) 56 | .expectNoMoreRecord(); 57 | } 58 | 59 | @Test 60 | void shouldAggregateSameWordTable() { 61 | this.testTopology.input().add("bla") 62 | .add("blub") 63 | .add("bla"); 64 | this.testTopology.tableOutput().withSerde(Serdes.String(), Serdes.Long()) 65 | .expectNextRecord().hasKey("bla").hasValue(2L) 66 | .expectNextRecord().hasKey("blub").hasValue(1L) 67 | .expectNoMoreRecord(); 68 | } 69 | 70 | @Test 71 | void shouldNotAggregateDifferentWordsStream() { 72 | this.testTopology.input().add("bla") 73 | .add("foo") 74 | .add("blub"); 75 | 76 | this.testTopology.streamOutput().withSerde(Serdes.String(), Serdes.Long()) 77 | .expectNextRecord().hasKey("bla").hasValue(1L) 78 | .expectNextRecord().hasKey("foo").hasValue(1L) 79 | .expectNextRecord().hasKey("blub").hasValue(1L) 80 | .expectNoMoreRecord(); 81 | } 82 | 83 | @Test 84 | void shouldNotAggregateDifferentWordsTable() { 85 | this.testTopology.input().add("bla") 86 | .add("foo") 87 | .add("blub"); 88 | 89 | this.testTopology.tableOutput().withSerde(Serdes.String(), Serdes.Long()) 90 | .expectNextRecord().hasKey("bla").hasValue(1L) 91 | .expectNextRecord().hasKey("foo").hasValue(1L) 92 | .expectNextRecord().hasKey("blub").hasValue(1L) 93 | .expectNoMoreRecord(); 94 | } 95 | 96 | @Test 97 | void shouldReturnNoInputAndOutputStream() { 98 | this.testTopology.streamOutput().withSerde(Serdes.String(), Serdes.Long()) 99 | .expectNoMoreRecord(); 100 | } 101 | 102 | @Test 103 | void shouldReturnNoInputAndOutputTable() { 104 | this.testTopology.tableOutput().withSerde(Serdes.String(), Serdes.Long()) 105 | .expectNoMoreRecord(); 106 | } 107 | 108 | @Test 109 | void shouldAggregateSameWordOrderTable() { 110 | this.testTopology.input().add("blub") // 1 blub 111 | .add("bla") // 1 bla 112 | .add("blub") // 2 blub 113 | .add("blub") // 3 blub 114 | .add("bla") // 2 bla 115 | .add("blub") // 4 blub 116 | .add("bla") // 3 bla 117 | .add("bla") // 4 bla 118 | .add("blub") // 5 blub 119 | .add("bla") // 5 bla 120 | .add("bla") // 6 bla 121 | .add("bla"); // 7 bla 122 | 123 | this.testTopology.tableOutput(this.app.getOutputTopic()).withSerde(Serdes.String(), Serdes.Long()) 124 | .expectNextRecord().hasKey("blub").hasValue(5L) 125 | .expectNextRecord().hasKey("bla").hasValue(7L) 126 | .expectNoMoreRecord(); 127 | } 128 | 129 | @Test 130 | void shouldReturnSingleInputAndOutputStream() { 131 | this.testTopology.input().add("bla"); 132 | 133 | this.testTopology.streamOutput().withSerde(Serdes.String(), Serdes.Long()) 134 | .expectNextRecord().hasKey("bla").hasValue(1L) 135 | .expectNoMoreRecord(); 136 | } 137 | 138 | @Test 139 | void shouldReturnSingleInputAndOutputTable() { 140 | this.testTopology.input().add("bla"); 141 | 142 | this.testTopology.tableOutput().withSerde(Serdes.String(), Serdes.Long()) 143 | .expectNextRecord().hasKey("bla").hasValue(1L) 144 | .expectNoMoreRecord(); 145 | } 146 | 147 | @Test 148 | void shouldReturnCorrectIteratorStream() { 149 | this.testTopology.input().add("bla") 150 | .add("blub") 151 | .add("bla") 152 | .add("foo"); 153 | final List expected = List.of("bla", "blub", "bla", "foo"); 154 | 155 | assertThat(this.testTopology.streamOutput().withSerde(Serdes.String(), Serdes.Long())) 156 | .extracting(ProducerRecord::key) 157 | .containsAll(expected); 158 | } 159 | 160 | @Test 161 | void shouldReturnCorrectIteratorExplicitStream() { 162 | this.testTopology.input().add("bla") 163 | .add("blub") 164 | .add("bla") 165 | .add("foo"); 166 | final List expected = List.of("bla", "blub", "bla", "foo"); 167 | 168 | assertThat(this.testTopology.streamOutput().withSerde(Serdes.String(), Serdes.Long()).iterator()) 169 | .toIterable() 170 | .extracting(ProducerRecord::key) 171 | .containsAll(expected); 172 | } 173 | 174 | @Test 175 | void shouldReturnCorrectIteratorTable() { 176 | this.testTopology.input().add("bla") 177 | .add("blub") 178 | .add("bla") 179 | .add("foo"); 180 | final List expected = List.of("bla", "blub", "foo"); 181 | 182 | assertThat(this.testTopology.tableOutput().withSerde(Serdes.String(), Serdes.Long())) 183 | .extracting(ProducerRecord::key) 184 | .containsAll(expected); 185 | } 186 | 187 | @Test 188 | void shouldFailCorrectIteratorStreamNoHasNextCheck() { 189 | final Iterator> output = this.testTopology.streamOutput() 190 | .withSerde(Serdes.String(), Serdes.Long()).iterator(); 191 | assertThrows(NoSuchElementException.class, output::next); 192 | } 193 | 194 | @Test 195 | void shouldFailCorrectIteratorTableNoHasNextCheck() { 196 | final Iterator> output = this.testTopology.tableOutput() 197 | .withSerde(Serdes.String(), Serdes.Long()).iterator(); 198 | assertThrows(NoSuchElementException.class, output::next); 199 | } 200 | 201 | @Test 202 | void shouldReturnCorrectIteratorExplicitTable() { 203 | this.testTopology.input().add("bla") 204 | .add("blub") 205 | .add("bla") 206 | .add("foo"); 207 | final List expected = List.of("bla", "blub", "foo"); 208 | 209 | assertThat(this.testTopology.tableOutput().withSerde(Serdes.String(), Serdes.Long()).iterator()) 210 | .toIterable() 211 | .extracting(ProducerRecord::key) 212 | .containsAll(expected); 213 | } 214 | 215 | @Test 216 | void shouldWorkOnTableToStream() { 217 | this.testTopology.input() 218 | .add("bla") 219 | .add("blub") 220 | .add("bla"); 221 | 222 | // Unnecessary conversion between table and stream to check that nothing breaks 223 | this.testTopology.streamOutput().asTable().asStream() 224 | .withSerde(Serdes.String(), Serdes.Long()) 225 | .expectNextRecord().hasKey("bla").hasValue(1L) 226 | .expectNextRecord().hasKey("blub").hasValue(1L) 227 | .expectNextRecord().hasKey("bla").hasValue(2L) 228 | .expectNoMoreRecord(); 229 | } 230 | 231 | @Test 232 | void singleWordShouldBePresent() { 233 | this.testTopology.input().add("bla"); 234 | this.testTopology.tableOutput().expectNextRecord().isPresent(); 235 | } 236 | 237 | @Test 238 | void shouldBeDoneAfterSingleWord() { 239 | this.testTopology.input().add("bla"); 240 | this.testTopology.tableOutput().expectNextRecord().isPresent().expectNextRecord().toBeEmpty(); 241 | } 242 | 243 | @Test 244 | void shouldDoNothingOnEmptyInput() { 245 | this.testTopology.streamOutput().expectNoMoreRecord().and().expectNoMoreRecord().toBeEmpty(); 246 | } 247 | 248 | @Test 249 | void shouldConvertStreamOutputToList() { 250 | this.testTopology.input() 251 | .add("bla") 252 | .add("blub") 253 | .add("bla"); 254 | 255 | final List> outputs = this.testTopology.streamOutput() 256 | .withSerde(Serdes.String(), Serdes.Long()) 257 | .toList(); 258 | 259 | assertThat(outputs) 260 | .extracting(ProducerRecord::key) 261 | .containsExactly("bla", "blub", "bla"); 262 | assertThat(outputs) 263 | .extracting(ProducerRecord::value) 264 | .containsExactly(1L, 1L, 2L); 265 | } 266 | 267 | @Test 268 | void shouldConvertTableOutputToList() { 269 | this.testTopology.input() 270 | .add("bla") 271 | .add("blub") 272 | .add("bla"); 273 | 274 | final List> outputs = this.testTopology.tableOutput() 275 | .withSerde(Serdes.String(), Serdes.Long()) 276 | .toList(); 277 | 278 | assertThat(outputs) 279 | .extracting(ProducerRecord::key) 280 | .containsExactly("bla", "blub"); 281 | assertThat(outputs) 282 | .extracting(ProducerRecord::value) 283 | .containsExactly(2L, 1L); 284 | } 285 | 286 | @Test 287 | void shouldConvertEmptyStreamOutputToEmptyList() { 288 | final List> outputs = this.testTopology.streamOutput() 289 | .withSerde(Serdes.String(), Serdes.Long()) 290 | .toList(); 291 | 292 | assertThat(outputs) 293 | .isInstanceOf(List.class) 294 | .isEmpty(); 295 | } 296 | 297 | @Test 298 | void shouldConvertEmptyTableOutputToEmptyList() { 299 | final List> outputs = this.testTopology.tableOutput() 300 | .withSerde(Serdes.String(), Serdes.Long()) 301 | .toList(); 302 | 303 | assertThat(outputs) 304 | .isInstanceOf(List.class) 305 | .isEmpty(); 306 | } 307 | } 308 | --------------------------------------------------------------------------------