├── gradle ├── wrapper │ ├── gradle-wrapper.jar │ └── gradle-wrapper.properties └── libs.versions.toml ├── jitpack.yml ├── gradle.properties ├── modules ├── kotka-streams-extensions │ ├── src │ │ ├── main │ │ │ └── kotlin │ │ │ │ └── dev │ │ │ │ └── adamko │ │ │ │ └── kotka │ │ │ │ └── extensions │ │ │ │ ├── QueryableStoreType.kt │ │ │ │ ├── processor │ │ │ │ └── recordExtensions.kt │ │ │ │ ├── streams │ │ │ │ ├── BranchedKStream.kt │ │ │ │ ├── topicNameExtractor.kt │ │ │ │ ├── KGroupedStream.kt │ │ │ │ ├── KStreamTransform.kt │ │ │ │ └── KStream.kt │ │ │ │ ├── serdes.kt │ │ │ │ ├── streamsBuilder.kt │ │ │ │ ├── KeyValue.kt │ │ │ │ ├── namedOperations.kt │ │ │ │ ├── materialized.kt │ │ │ │ ├── state │ │ │ │ └── keyValueStore.kt │ │ │ │ └── tables │ │ │ │ └── KTable.kt │ │ └── test │ │ │ └── kotlin │ │ │ └── dev │ │ │ └── adamko │ │ │ └── kotka │ │ │ └── extensions │ │ │ ├── SerdesTests.kt │ │ │ ├── streams │ │ │ ├── BranchedKStreamTest.kt │ │ │ └── KGroupedStreamTest.kt │ │ │ ├── processor │ │ │ └── RecordExtensionsTest.kt │ │ │ ├── StreamsBuilderTests.kt │ │ │ ├── tables │ │ │ └── KTableExtensionsTests.kt │ │ │ └── KeyValueTest.kt │ ├── module.md │ └── build.gradle.kts ├── versions-platform │ └── build.gradle.kts ├── kotka-streams-framework │ ├── build.gradle.kts │ ├── src │ │ └── main │ │ │ └── kotlin │ │ │ └── dev │ │ │ └── adamko │ │ │ └── kotka │ │ │ └── topicdata │ │ │ ├── TopicDefinition.kt │ │ │ ├── TopicRecord.kt │ │ │ ├── GlobalKTableDefinition.kt │ │ │ └── KeyValueSerdes.kt │ └── module.md └── kotka-streams-kotlinx-serialization │ ├── module.md │ ├── src │ ├── test │ │ └── kotlin │ │ │ └── dev │ │ │ └── adamko │ │ │ └── kotka │ │ │ └── kxs │ │ │ ├── BinaryFormatSerdeTest.kt │ │ │ └── StringFormatSerdeTest.kt │ └── main │ │ └── kotlin │ │ └── dev │ │ └── adamko │ │ └── kotka │ │ └── kxs │ │ ├── KotkaJsonModule.kt │ │ ├── binaryFormatSerde.kt │ │ └── stringFormatSerde.kt │ └── build.gradle.kts ├── .github ├── workflows │ ├── workflow_pull_request.yml │ ├── run_tests.yml │ ├── run_publish_maven.yml │ ├── run_publish_site.yml │ ├── workflow_release.yml │ └── run_gradle_task.yml └── renovate.json5 ├── .gitattributes ├── docs ├── build.gradle.kts ├── images │ └── logo-icon.svg └── styles │ └── logo-styles.css ├── gradlew.bat ├── settings.gradle.kts ├── .gitignore ├── README.md ├── gradlew └── LICENSE /gradle/wrapper/gradle-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/adamko-dev/kotka-streams/HEAD/gradle/wrapper/gradle-wrapper.jar -------------------------------------------------------------------------------- /jitpack.yml: -------------------------------------------------------------------------------- 1 | # https://jitpack.io/docs/BUILDING/ 2 | 3 | # https://jitpack.io/docs/BUILDING/#java-version 4 | jdk: 5 | - openjdk11 6 | -------------------------------------------------------------------------------- /gradle.properties: -------------------------------------------------------------------------------- 1 | org.gradle.jvmargs=-Dfile.encoding=UTF-8 -Xmx2g 2 | 3 | org.gradle.parallel=true 4 | org.gradle.caching=true 5 | org.gradle.configuration-cache=true 6 | org.gradle.configuration-cache-problems=warn 7 | -------------------------------------------------------------------------------- /gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | distributionBase=GRADLE_USER_HOME 2 | distributionPath=wrapper/dists 3 | distributionUrl=https\://services.gradle.org/distributions/gradle-8.5-bin.zip 4 | networkTimeout=10000 5 | validateDistributionUrl=true 6 | zipStoreBase=GRADLE_USER_HOME 7 | zipStorePath=wrapper/dists 8 | -------------------------------------------------------------------------------- /modules/kotka-streams-extensions/src/main/kotlin/dev/adamko/kotka/extensions/QueryableStoreType.kt: -------------------------------------------------------------------------------- 1 | package dev.adamko.kotka.extensions 2 | 3 | import org.apache.kafka.streams.state.QueryableStoreType 4 | import org.apache.kafka.streams.state.ReadOnlyKeyValueStore 5 | import org.apache.kafka.streams.state.ValueAndTimestamp 6 | 7 | 8 | /** @see org.apache.kafka.streams.state.QueryableStoreType */ 9 | typealias TimestampedQueryStoreType = QueryableStoreType>> 10 | -------------------------------------------------------------------------------- /modules/kotka-streams-extensions/src/main/kotlin/dev/adamko/kotka/extensions/processor/recordExtensions.kt: -------------------------------------------------------------------------------- 1 | package dev.adamko.kotka.extensions.processor 2 | 3 | import org.apache.kafka.streams.processor.api.Record 4 | 5 | 6 | /** Get the key of a [Record] */ 7 | operator fun Record.component1(): K = key() 8 | 9 | /** Get the value of a [Record] */ 10 | operator fun Record.component2(): V = value() 11 | 12 | /** Get the timestamp of a [Record] */ 13 | operator fun Record.component3(): Long = timestamp() 14 | -------------------------------------------------------------------------------- /.github/workflows/workflow_pull_request.yml: -------------------------------------------------------------------------------- 1 | name: Pull Requests 2 | 3 | 4 | on: 5 | workflow_dispatch: 6 | pull_request: 7 | merge_group: 8 | push: 9 | branches: 10 | - "renovate/**" 11 | 12 | 13 | concurrency: 14 | group: "Pull Requests: ${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}" 15 | cancel-in-progress: true 16 | 17 | 18 | jobs: 19 | 20 | tests: 21 | uses: ./.github/workflows/run_tests.yml 22 | permissions: 23 | contents: read 24 | checks: write 25 | -------------------------------------------------------------------------------- /modules/kotka-streams-extensions/src/main/kotlin/dev/adamko/kotka/extensions/streams/BranchedKStream.kt: -------------------------------------------------------------------------------- 1 | package dev.adamko.kotka.extensions.streams 2 | 3 | import org.apache.kafka.streams.kstream.Branched 4 | import org.apache.kafka.streams.kstream.BranchedKStream 5 | import org.apache.kafka.streams.kstream.Predicate 6 | 7 | 8 | /** @see org.apache.kafka.streams.kstream.BranchedKStream.branch */ 9 | fun BranchedKStream.branch( 10 | branched: Branched, 11 | predicate: Predicate, 12 | ): BranchedKStream = branch(predicate, branched) 13 | -------------------------------------------------------------------------------- /modules/kotka-streams-extensions/src/main/kotlin/dev/adamko/kotka/extensions/streams/topicNameExtractor.kt: -------------------------------------------------------------------------------- 1 | package dev.adamko.kotka.extensions.streams 2 | 3 | import org.apache.kafka.streams.KeyValue 4 | import org.apache.kafka.streams.processor.RecordContext 5 | 6 | 7 | fun interface TopicNameExtractorKt { 8 | fun Context.extract(keyValue: KeyValue): String 9 | 10 | interface Context { 11 | val recordContext: RecordContext 12 | } 13 | } 14 | 15 | 16 | @JvmInline 17 | internal value class TopicNameExtractorContextInternal( 18 | override val recordContext: RecordContext 19 | ) : TopicNameExtractorKt.Context 20 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | text eol=lf 2 | 3 | # jvm sources 4 | *.kt text diff=java 5 | *.kts text diff=java 6 | 7 | 8 | # These are explicitly windows files and should use crlf 9 | *.bat text eol=crlf 10 | 11 | # These files are text and should be normalized (Convert crlf => lf) 12 | *.bash text eol=lf 13 | *.sh text eol=lf 14 | 15 | # These files are binary and should be left untouched 16 | # (binary is a macro for -text -diff) 17 | *.jar binary 18 | *.war binary 19 | 20 | # https://github.com/github/linguist/blob/v7.24.1/docs/overrides.md 21 | docs/** linguist-documentation 22 | -------------------------------------------------------------------------------- /modules/kotka-streams-extensions/src/main/kotlin/dev/adamko/kotka/extensions/serdes.kt: -------------------------------------------------------------------------------- 1 | package dev.adamko.kotka.extensions 2 | 3 | import org.apache.kafka.common.serialization.Serde 4 | import org.apache.kafka.common.serialization.Serdes 5 | 6 | 7 | /** 8 | * Create a [ListSerde][Serdes.ListSerde]. 9 | * 10 | * @see Serdes.ListSerde 11 | */ 12 | @Suppress("FunctionName") 13 | fun ListSerde( 14 | valueSerde: Serde, 15 | ): Serde> { 16 | @Suppress("UNCHECKED_CAST") 17 | val listClass = ArrayList::class.java as Class> 18 | 19 | return Serdes.ListSerde, Inner>( 20 | listClass, 21 | valueSerde 22 | ) 23 | } 24 | -------------------------------------------------------------------------------- /modules/versions-platform/build.gradle.kts: -------------------------------------------------------------------------------- 1 | plugins { 2 | buildsrc.convention.base 3 | buildsrc.convention.`maven-publish` 4 | 5 | `java-platform` 6 | } 7 | 8 | description = "Aligns versions of project dependencies" 9 | 10 | javaPlatform { 11 | allowDependencies() 12 | } 13 | 14 | dependencies { 15 | api(platform(libs.kotlin.bom)) 16 | api(platform(libs.kotlinxSerialization.bom)) 17 | 18 | api(platform(libs.kotest.bom)) 19 | api(platform(libs.junit.bom)) 20 | 21 | constraints { 22 | api(libs.kafka.streams) 23 | api(libs.kotlinx.knitTest) 24 | api(libs.mockk) 25 | api(libs.slf4j.api) 26 | api(libs.slf4j.simple) 27 | } 28 | } 29 | 30 | kotkaPublishing { 31 | mavenPomSubprojectName.set("Versions Platform") 32 | } 33 | -------------------------------------------------------------------------------- /modules/kotka-streams-extensions/src/main/kotlin/dev/adamko/kotka/extensions/streamsBuilder.kt: -------------------------------------------------------------------------------- 1 | package dev.adamko.kotka.extensions 2 | 3 | import org.apache.kafka.streams.StreamsBuilder 4 | import org.apache.kafka.streams.kstream.Consumed 5 | import org.apache.kafka.streams.kstream.KStream 6 | 7 | 8 | /** @see org.apache.kafka.streams.StreamsBuilder.stream */ 9 | fun StreamsBuilder.stream( 10 | consumed: Consumed? = null, 11 | vararg topics: String, 12 | ): KStream = when (consumed) { 13 | null -> stream(topics.toList()) 14 | else -> stream(topics.toList(), consumed) 15 | } 16 | 17 | 18 | /** @see org.apache.kafka.streams.StreamsBuilder.stream */ 19 | fun StreamsBuilder.stream( 20 | vararg topics: String, 21 | ): KStream = stream(topics.toList()) 22 | -------------------------------------------------------------------------------- /modules/kotka-streams-extensions/module.md: -------------------------------------------------------------------------------- 1 | # Module kotka-streams-extensions 2 | 3 | Contains the basic extension functions to make Kafka Streams more Kotlin-esque. 4 | 5 | ```kotlin 6 | implementation("dev.adamko.kotka:kotka-streams-extensions:$kotkaVersion") 7 | ``` 8 | 9 | ```kotlin 10 | import dev.adamko.kotka.extensions.tables.* 11 | import dev.adamko.kotka.extensions.streams.* 12 | import dev.adamko.kotka.extensions.* 13 | 14 | data class MusicalBand( 15 | val name: String, 16 | val memberNames: List, 17 | ) 18 | 19 | builder.stream("musical-bands") 20 | .flatMap("band-member-names-to-band-name") { _: String, band: MusicalBand -> 21 | band.memberNames.map { memberName -> memberName to band.name } 22 | } 23 | .groupByKey(groupedAs("map-of-band-member-to-band-names")) 24 | ``` 25 | -------------------------------------------------------------------------------- /modules/kotka-streams-extensions/src/test/kotlin/dev/adamko/kotka/extensions/SerdesTests.kt: -------------------------------------------------------------------------------- 1 | package dev.adamko.kotka.extensions 2 | 3 | import io.kotest.core.spec.style.FunSpec 4 | import io.kotest.matchers.types.shouldBeInstanceOf 5 | import org.apache.kafka.common.serialization.ListDeserializer 6 | import org.apache.kafka.common.serialization.ListSerializer 7 | import org.apache.kafka.common.serialization.Serdes 8 | 9 | class SerdesTests : FunSpec({ 10 | 11 | context("ListSerde") { 12 | 13 | test("expect ListSerde returns a... list serde!") { 14 | val listSerde = ListSerde(Serdes.String()) 15 | 16 | listSerde.shouldBeInstanceOf>() 17 | 18 | listSerde.deserializer().shouldBeInstanceOf>() 19 | listSerde.serializer().shouldBeInstanceOf>() 20 | } 21 | } 22 | }) 23 | -------------------------------------------------------------------------------- /docs/build.gradle.kts: -------------------------------------------------------------------------------- 1 | import dev.adamko.dokkatoo.dokka.plugins.DokkaHtmlPluginParameters 2 | import dev.adamko.dokkatoo.dokka.plugins.DokkaHtmlPluginParameters.Companion.DOKKA_HTML_PARAMETERS_NAME 3 | 4 | plugins { 5 | buildsrc.convention.dokkatoo 6 | } 7 | 8 | dependencies { 9 | dokkatoo(projects.modules.kotkaStreamsExtensions) 10 | dokkatoo(projects.modules.kotkaStreamsFramework) 11 | dokkatoo(projects.modules.kotkaStreamsKotlinxSerialization) 12 | 13 | dokkatooPluginHtml(libs.kotlin.dokkaPlugin.allModulesPage) 14 | dokkatooPluginHtml(libs.kotlin.dokkaPlugin.templating) 15 | } 16 | 17 | dokkatoo { 18 | moduleName.set("Kotka Streams") 19 | 20 | pluginsConfiguration.named(DOKKA_HTML_PARAMETERS_NAME) { 21 | customStyleSheets.from("./styles/logo-styles.css") 22 | customAssets.from("./images/logo-icon.svg") 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /modules/kotka-streams-extensions/build.gradle.kts: -------------------------------------------------------------------------------- 1 | plugins { 2 | buildsrc.convention.`kotlin-jvm` 3 | buildsrc.convention.`maven-publish` 4 | buildsrc.convention.dokkatoo 5 | } 6 | 7 | description = "Kotlin extensions for Kafka Streams" 8 | 9 | 10 | dependencies { 11 | implementation(platform(projects.modules.versionsPlatform)) 12 | 13 | implementation(libs.kafka.streams) 14 | 15 | testImplementation(libs.kotest.runnerJUnit5) 16 | testImplementation(libs.kotest.assertionsCore) 17 | testImplementation(libs.kotest.property) 18 | testImplementation(libs.kotest.assertionsJson) 19 | 20 | testImplementation(libs.mockk) 21 | } 22 | 23 | 24 | kotkaPublishing { 25 | mavenPomSubprojectName.set("Kafka Streams Extensions") 26 | } 27 | 28 | dokkatoo { 29 | moduleName.set("kotka-streams-extensions") 30 | dokkatooSourceSets.configureEach { 31 | includes.from("module.md") 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /modules/kotka-streams-framework/build.gradle.kts: -------------------------------------------------------------------------------- 1 | plugins { 2 | buildsrc.convention.`kotlin-jvm` 3 | buildsrc.convention.`maven-publish` 4 | buildsrc.convention.dokkatoo 5 | } 6 | 7 | description = "A light framework for structuring Kafka Streams topics and records" 8 | 9 | 10 | dependencies { 11 | implementation(platform(projects.modules.versionsPlatform)) 12 | 13 | api(projects.modules.kotkaStreamsExtensions) 14 | 15 | implementation(libs.kafka.streams) 16 | 17 | testImplementation(libs.kotest.runnerJUnit5) 18 | testImplementation(libs.kotest.assertionsCore) 19 | testImplementation(libs.kotest.property) 20 | testImplementation(libs.kotest.assertionsJson) 21 | 22 | testImplementation(libs.mockk) 23 | } 24 | 25 | 26 | kotkaPublishing { 27 | mavenPomSubprojectName.set("Framework") 28 | } 29 | 30 | dokkatoo { 31 | moduleName.set("kotka-streams-framework") 32 | dokkatooSourceSets.configureEach { 33 | includes.from("module.md") 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /modules/kotka-streams-kotlinx-serialization/module.md: -------------------------------------------------------------------------------- 1 | # Module kotka-streams-kotlinx-serialization 2 | 3 | Use [Kotlinx Serialization](https://github.com/Kotlin/kotlinx.serialization/) for topic key/value 4 | serdes. 5 | 6 | ```kotlin 7 | implementation("dev.adamko.kotka:kotka-streams-kotlinx-serialization:$kotkaVersion") 8 | ``` 9 | 10 | ```kotlin 11 | import dev.adamko.kotka.extensions.tables.* 12 | import dev.adamko.kotka.extensions.streams.* 13 | import dev.adamko.kotka.extensions.* 14 | import dev.adamko.kotka.topicdata.* 15 | import dev.adamko.kotka.kxs.* 16 | 17 | val jsonMapper = Json {} 18 | 19 | @Serializable 20 | data class Sku( 21 | val sku: String 22 | ) 23 | 24 | @Serializable 25 | data class ShopItem( 26 | val id: Sku, 27 | val name: String, 28 | ) : TopicRecord { 29 | override val topicKey: Sku by ::id 30 | } 31 | 32 | object ShopItemTopic : TopicDefinition { 33 | override val topicName = "shop-item-updates" 34 | override val serdes = KeyValueSerdes.kxsJson(jsonMapper) 35 | } 36 | ``` 37 | -------------------------------------------------------------------------------- /modules/kotka-streams-extensions/src/test/kotlin/dev/adamko/kotka/extensions/streams/BranchedKStreamTest.kt: -------------------------------------------------------------------------------- 1 | package dev.adamko.kotka.extensions.streams 2 | 3 | import io.kotest.core.spec.style.FunSpec 4 | import io.mockk.confirmVerified 5 | import io.mockk.every 6 | import io.mockk.mockk 7 | import io.mockk.verify 8 | import org.apache.kafka.streams.kstream.Branched 9 | import org.apache.kafka.streams.kstream.BranchedKStream 10 | import org.apache.kafka.streams.kstream.Predicate 11 | 12 | class BranchedKStreamTest : FunSpec({ 13 | 14 | context(".branch() extension") { 15 | 16 | val branchedKStream: BranchedKStream = mockk { 17 | every { branch(any(), any()) } returns mockk() 18 | } 19 | 20 | test("verify extension calls actual function") { 21 | 22 | branchedKStream.branch( 23 | branched = mockk(), 24 | ) { k, v -> 25 | k == v 26 | } 27 | 28 | verify(exactly = 1) { 29 | branchedKStream.branch( 30 | any>(), 31 | any>(), 32 | ) } 33 | 34 | confirmVerified(branchedKStream) 35 | } 36 | } 37 | }) 38 | -------------------------------------------------------------------------------- /modules/kotka-streams-kotlinx-serialization/src/test/kotlin/dev/adamko/kotka/kxs/BinaryFormatSerdeTest.kt: -------------------------------------------------------------------------------- 1 | package dev.adamko.kotka.kxs 2 | 3 | import io.kotest.core.spec.style.FunSpec 4 | import io.kotest.matchers.string.shouldContainInOrder 5 | import kotlinx.serialization.Serializable 6 | import kotlinx.serialization.cbor.Cbor 7 | 8 | class BinaryFormatSerdeTest : FunSpec({ 9 | 10 | context("toString()") { 11 | 12 | test("Int serializer") { 13 | val serde = Cbor.serde() 14 | serde.toString() 15 | .shouldContainInOrder( 16 | "BinaryFormat serde:", 17 | "kotlinx.serialization.internal.IntSerializer", 18 | ) 19 | } 20 | 21 | test("TestDataClass serializer") { 22 | val serde = Cbor.serde() 23 | serde.toString() 24 | .shouldContainInOrder( 25 | "BinaryFormat serde:", 26 | "dev.adamko.kotka.kxs.BinaryFormatSerdeTest", 27 | "TestDataClass", 28 | "serializer", 29 | ) 30 | } 31 | } 32 | 33 | }) { 34 | 35 | @Serializable 36 | private data class TestDataClass( 37 | val int: Int, 38 | val double: Double, 39 | val string: String, 40 | ) 41 | 42 | } 43 | -------------------------------------------------------------------------------- /modules/kotka-streams-kotlinx-serialization/src/test/kotlin/dev/adamko/kotka/kxs/StringFormatSerdeTest.kt: -------------------------------------------------------------------------------- 1 | package dev.adamko.kotka.kxs 2 | 3 | import io.kotest.core.spec.style.FunSpec 4 | import io.kotest.matchers.string.shouldContainInOrder 5 | import kotlinx.serialization.Serializable 6 | import kotlinx.serialization.json.Json 7 | 8 | class StringFormatSerdeTest : FunSpec({ 9 | 10 | context("toString()") { 11 | 12 | test("Int serializer") { 13 | val serde = Json.serde() 14 | serde.toString() 15 | .shouldContainInOrder( 16 | "StringFormat serde:", 17 | "kotlinx.serialization.internal.IntSerializer", 18 | ) 19 | } 20 | 21 | test("TestDataClass serializer") { 22 | val serde = Json.serde() 23 | serde.toString() 24 | .shouldContainInOrder( 25 | "StringFormat serde:", 26 | "dev.adamko.kotka.kxs.StringFormatSerdeTest", 27 | "TestDataClass", 28 | "serializer", 29 | ) 30 | } 31 | } 32 | 33 | }) { 34 | 35 | @Serializable 36 | private data class TestDataClass( 37 | val int: Int, 38 | val double: Double, 39 | val string: String, 40 | ) 41 | 42 | } 43 | -------------------------------------------------------------------------------- /.github/workflows/run_tests.yml: -------------------------------------------------------------------------------- 1 | name: Tests 2 | 3 | 4 | on: 5 | workflow_dispatch: 6 | inputs: 7 | checkout-ref: 8 | description: "The branch, tag or SHA to checkout. See actions/checkout 'ref'." 9 | required: false 10 | type: string 11 | workflow_call: 12 | inputs: 13 | checkout-ref: 14 | description: "The branch, tag or SHA to checkout. See actions/checkout 'ref'." 15 | required: false 16 | type: string 17 | 18 | 19 | concurrency: 20 | group: "Tests: ${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}" 21 | cancel-in-progress: true 22 | 23 | 24 | permissions: 25 | contents: read 26 | checks: write # required by mikepenz/action-junit-report 27 | 28 | 29 | jobs: 30 | 31 | gradle-check: 32 | strategy: 33 | matrix: 34 | os: [ ubuntu-latest, macos-latest, windows-latest ] 35 | fail-fast: false 36 | uses: ./.github/workflows/run_gradle_task.yml 37 | with: 38 | runs-on: ${{ matrix.os }} 39 | gradle-task: check --stacktrace 40 | checkout-ref: ${{ inputs.checkout-ref }} 41 | 42 | build-site: 43 | # verify that the site can be built, but don't deploy it 44 | uses: ./.github/workflows/run_gradle_task.yml 45 | with: 46 | runs-on: ubuntu-latest 47 | gradle-task: :docs:dokkatooGenerate 48 | checkout-ref: ${{ inputs.checkout-ref }} 49 | -------------------------------------------------------------------------------- /modules/kotka-streams-extensions/src/test/kotlin/dev/adamko/kotka/extensions/processor/RecordExtensionsTest.kt: -------------------------------------------------------------------------------- 1 | package dev.adamko.kotka.extensions.processor 2 | 3 | import io.kotest.core.spec.style.FunSpec 4 | import io.kotest.matchers.shouldBe 5 | import org.apache.kafka.streams.processor.api.Record 6 | 7 | class RecordExtensionsTest : FunSpec({ 8 | 9 | context("verify componentN() functions") { 10 | val record = Record("key 123", "value 999", 12345L) 11 | 12 | 13 | test("Record.component1() should return Record.key()") { 14 | record.component1() shouldBe "key 123" 15 | record.component1() shouldBe record.key() 16 | } 17 | 18 | 19 | test("Record.component2() should return Record.value()") { 20 | record.component2() shouldBe "value 999" 21 | record.component2() shouldBe record.value() 22 | } 23 | 24 | 25 | test("Record.component3() should return Record.timestamp()") { 26 | record.component3() shouldBe 12345L 27 | record.component3() shouldBe record.timestamp() 28 | } 29 | 30 | 31 | test("destructuring declaration should return (key, value, timestamp)") { 32 | val (key, value, timestamp) = record 33 | 34 | key shouldBe "key 123" 35 | key shouldBe record.key() 36 | 37 | value shouldBe "value 999" 38 | value shouldBe record.value() 39 | 40 | timestamp shouldBe 12345L 41 | timestamp shouldBe record.timestamp() 42 | } 43 | } 44 | }) 45 | -------------------------------------------------------------------------------- /modules/kotka-streams-framework/src/main/kotlin/dev/adamko/kotka/topicdata/TopicDefinition.kt: -------------------------------------------------------------------------------- 1 | package dev.adamko.kotka.topicdata; 2 | 3 | import org.apache.kafka.streams.StreamsBuilder 4 | import org.apache.kafka.streams.kstream.Consumed 5 | import org.apache.kafka.streams.kstream.KStream 6 | import org.apache.kafka.streams.kstream.KTable 7 | 8 | 9 | /** 10 | * A definition for any Kafka topic. 11 | * 12 | * These can be used to create 13 | * [KTopic][org.apache.kafka.streams.kstream.KStream]s, 14 | * [KTables][org.apache.kafka.streams.kstream.KTable]s, 15 | * or [GlobalKTables][org.apache.kafka.streams.kstream.GlobalKTable]s. 16 | */ 17 | interface TopicDefinition { 18 | 19 | val topicName: String 20 | 21 | val serdes: KeyValueSerdes 22 | 23 | /** unique Kafka processor ID - to help ensure topology names are unique */ 24 | val pid: String 25 | get() = this::class.simpleName!! 26 | 27 | } 28 | 29 | 30 | fun TopicDefinition.consumeAsKStream( 31 | builder: StreamsBuilder, 32 | consumer: Consumed = serdes.consumer("${pid}.input-stream") 33 | ): KStream { 34 | return builder.stream( 35 | topicName, 36 | consumer, 37 | ) 38 | } 39 | 40 | 41 | fun TopicDefinition.consumeAsKTable( 42 | builder: StreamsBuilder, 43 | consumer: Consumed = serdes.consumer("${pid}.input-table") 44 | ): KTable { 45 | return builder.table( 46 | topicName, 47 | consumer, 48 | ) 49 | } 50 | -------------------------------------------------------------------------------- /.github/renovate.json5: -------------------------------------------------------------------------------- 1 | { 2 | $schema: "https://docs.renovatebot.com/renovate-schema.json", 3 | extends: [ 4 | "config:base" 5 | ], 6 | enabled: true, 7 | enabledManagers: [ 8 | "gradle", 9 | "gradle-wrapper", 10 | "github-actions", 11 | ], 12 | // Will auto-merge directly, without a PR, if tests pass - else, makes a PR. 13 | // Must add Renovate to 'Allow specified actors to bypass required pull requests' 14 | // in branch protection rule 15 | automergeType: "branch", 16 | platformAutomerge: true, 17 | ignoreTests: false, 18 | packageRules: [ 19 | { 20 | description: "auto-merge all but major releases", 21 | matchUpdateTypes: [ 22 | "minor", 23 | "patch", 24 | "pin", 25 | "digest", 26 | ], 27 | automerge: true, 28 | } 29 | ], 30 | timezone: "Etc/UTC", 31 | // loosely limit to Europe work hours, so we don't get pinged in the middle of the night 32 | schedule: [ 33 | "after 10am and before 6pm" 34 | ], 35 | automergeSchedule: [ 36 | "after 10am and before 6pm" 37 | ], 38 | stabilityDays: 14, 39 | // suppressNotifications: [ 40 | // "artifactErrors", 41 | // "branchAutomergeFailure", 42 | // "configErrorIssue", 43 | // "deprecationWarningIssues", 44 | // "lockFileErrors", 45 | // "onboardingClose", 46 | // "prEditedNotification", 47 | // "prIgnoreNotification", 48 | // ], 49 | prCreation: "status-success", 50 | semanticCommits: "disabled", 51 | ignorePaths: [] 52 | } 53 | -------------------------------------------------------------------------------- /docs/images/logo-icon.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 24 | 26 | 28 | 30 | 31 | 32 | 33 | 34 | 35 | -------------------------------------------------------------------------------- /modules/kotka-streams-kotlinx-serialization/build.gradle.kts: -------------------------------------------------------------------------------- 1 | plugins { 2 | buildsrc.convention.`kotlin-jvm` 3 | kotlin("plugin.serialization") 4 | buildsrc.convention.`maven-publish` 5 | buildsrc.convention.dokkatoo 6 | } 7 | 8 | description = "Use Kotlinx Serialization for topic key/value serdes" 9 | 10 | 11 | dependencies { 12 | implementation(platform(projects.modules.versionsPlatform)) 13 | 14 | api(projects.modules.kotkaStreamsExtensions) 15 | api(projects.modules.kotkaStreamsFramework) 16 | 17 | implementation(libs.kafka.streams) 18 | 19 | implementation(libs.slf4j.api) 20 | 21 | implementation(libs.kotlinxSerialization.core) 22 | 23 | testImplementation(libs.kotest.runnerJUnit5) 24 | testImplementation(libs.kotest.assertionsCore) 25 | testImplementation(libs.kotest.property) 26 | testImplementation(libs.kotest.assertionsJson) 27 | 28 | testImplementation(libs.mockk) 29 | 30 | testImplementation(libs.kotlinxSerialization.cbor) 31 | testImplementation(libs.kotlinxSerialization.json) 32 | 33 | testImplementation(libs.slf4j.simple) 34 | } 35 | 36 | 37 | tasks.compileTestKotlin { 38 | // use experimental binary formats for testing 39 | kotlinOptions.freeCompilerArgs += "-opt-in=kotlinx.serialization.ExperimentalSerializationApi" 40 | } 41 | 42 | 43 | kotkaPublishing { 44 | mavenPomSubprojectName.set("Kotlinx Serialization Extensions") 45 | } 46 | 47 | dokkatoo { 48 | moduleName.set("kotka-streams-kotlinx-serialization") 49 | dokkatooSourceSets.configureEach { 50 | includes.from("module.md") 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /modules/kotka-streams-framework/module.md: -------------------------------------------------------------------------------- 1 | # Module kotka-streams-framework 2 | 3 | A light framework for structuring topics and records. 4 | 5 | ```kotlin 6 | implementation("dev.adamko.kotka:kotka-streams-framework:$kotkaVersion") 7 | ``` 8 | 9 | Use `TopicRecord` to standardise the data on each topic. Records can now easily be converted from 10 | one type, to another. 11 | 12 | ```kotlin 13 | import dev.adamko.kotka.extensions.tables.* 14 | import dev.adamko.kotka.extensions.streams.* 15 | import dev.adamko.kotka.extensions.* 16 | import dev.adamko.kotka.topicdata.* 17 | 18 | data class Animal( 19 | val id: Long, 20 | val name: String, 21 | ) : TopicRecord { 22 | override val topicKey: Long by ::id 23 | } 24 | 25 | data class Pet( 26 | val id: Long, 27 | val name: String, 28 | ) : TopicRecord { 29 | override val topicKey: Long by ::id 30 | } 31 | 32 | val petUpdates = builder.stream("animals") 33 | .mapTopicRecords("convert-animals-to-pets") { _, animal -> 34 | Pet(animal.id, animal.name) 35 | } 36 | ``` 37 | 38 | Use `KeyValueSerdes` to define both the key and value serdes for a topic. 39 | A `TopicDefinition` ties both of these together. 40 | 41 | ```kotlin 42 | /** All [Pet] updates */ 43 | object PetUpdatesTopic : TopicDefinition { 44 | override val topicName = "pet-updates" 45 | override val serdes = KeyValueSerdes(Serdes.Long(), PetSerde()) 46 | } 47 | 48 | petUpdates 49 | .to( 50 | PetUpdatesTopic.topicName, 51 | PetUpdatesTopic.serdes.producer("send-pet-updates-to-pet-update-topic") 52 | ) 53 | ``` 54 | -------------------------------------------------------------------------------- /modules/kotka-streams-framework/src/main/kotlin/dev/adamko/kotka/topicdata/TopicRecord.kt: -------------------------------------------------------------------------------- 1 | package dev.adamko.kotka.topicdata 2 | 3 | import dev.adamko.kotka.extensions.namedAs 4 | import dev.adamko.kotka.topicdata.TopicRecord.Companion.toKeyValue 5 | import org.apache.kafka.streams.KeyValue 6 | import org.apache.kafka.streams.kstream.KStream 7 | import org.apache.kafka.streams.state.ValueAndTimestamp 8 | 9 | interface TopicRecord { 10 | 11 | val topicKey: K 12 | 13 | companion object { 14 | 15 | fun > V.toPair(): Pair = topicKey to this 16 | fun > V.toKeyValue(): KeyValue = KeyValue.pair(topicKey, this) 17 | 18 | fun > V.toTimestampValue(timestamp: Long): ValueAndTimestamp = 19 | ValueAndTimestamp.make(this, timestamp) 20 | 21 | operator fun > T.component1(): K = topicKey 22 | operator fun > T.component2(): T = this 23 | 24 | } 25 | } 26 | 27 | /** @see KStream.map */ 28 | fun > KStream.mapTopicRecords( 29 | name: String, 30 | mapper: (key: inK, value: inV) -> outV 31 | ): KStream = 32 | map({ k, v -> mapper(k, v).toKeyValue() }, namedAs(name)) 33 | 34 | 35 | /** @see KStream.flatMap */ 36 | fun > KStream.flatMapTopicRecords( 37 | name: String, 38 | mapper: (key: inK, value: inV) -> Iterable 39 | ): KStream = 40 | flatMap( 41 | { k, v -> mapper(k, v).map { a: outV -> a.toKeyValue() } }, 42 | namedAs(name) 43 | ) 44 | -------------------------------------------------------------------------------- /docs/styles/logo-styles.css: -------------------------------------------------------------------------------- 1 | .library-name a { 2 | position: center; 3 | display: flex; 4 | margin-left: 0.2em; 5 | margin-right: 0.2em; 6 | align-items: center; 7 | justify-content: center; 8 | } 9 | 10 | .library-name a::before { 11 | content: ""; 12 | background-image: url('../images/logo-icon.svg'); 13 | background-repeat: no-repeat; 14 | background-size: 100% 100%; 15 | position: center; 16 | display: flex; 17 | width: 3.5em; 18 | height: 3.5em; 19 | margin-right: 1em; 20 | } 21 | 22 | .navigation-wrapper { 23 | padding-top: 0.5em; 24 | padding-bottom: 0.5em; 25 | } 26 | 27 | div#github-link { 28 | width: 36px; 29 | height: 36px; 30 | display: inline-flex; 31 | border-radius: 24px; 32 | align-items: center; 33 | justify-content: center; 34 | border: none; 35 | } 36 | 37 | div#github-link a { 38 | background-image: url("data:image/svg+xml;utf8,"); 39 | width: 24px; 40 | height: 24px; 41 | border-radius: 24px; 42 | background-size: 100% 100%; 43 | margin-left: 16px; 44 | } 45 | -------------------------------------------------------------------------------- /.github/workflows/run_publish_maven.yml: -------------------------------------------------------------------------------- 1 | name: Publish Maven 2 | 3 | 4 | on: 5 | workflow_dispatch: 6 | inputs: 7 | checkout-ref: 8 | description: "The branch, tag or SHA to checkout. See actions/checkout 'ref'." 9 | required: false 10 | type: string 11 | workflow_call: 12 | inputs: 13 | checkout-ref: 14 | description: "The branch, tag or SHA to checkout. See actions/checkout 'ref'." 15 | required: false 16 | type: string 17 | 18 | 19 | concurrency: 20 | group: "Publish Maven: ${{ github.workflow }}" 21 | cancel-in-progress: false 22 | 23 | 24 | permissions: 25 | contents: write 26 | packages: write 27 | checks: write 28 | 29 | 30 | jobs: 31 | 32 | sonatype-release: 33 | permissions: 34 | contents: read 35 | packages: write 36 | checks: write 37 | uses: ./.github/workflows/run_gradle_task.yml 38 | secrets: inherit 39 | with: 40 | runs-on: ubuntu-latest 41 | gradle-task: >- 42 | publishAllPublicationsToSonatypeRepository 43 | --stacktrace 44 | --no-configuration-cache 45 | --no-parallel 46 | github-environment: sonatype-publish 47 | github-environment-url: https://s01.oss.sonatype.org/ 48 | checkout-ref: ${{ inputs.checkout-ref }} 49 | 50 | 51 | github-packages-release: 52 | permissions: 53 | contents: read 54 | packages: write 55 | checks: write 56 | uses: ./.github/workflows/run_gradle_task.yml 57 | secrets: inherit 58 | with: 59 | runs-on: ubuntu-latest 60 | gradle-task: >- 61 | publishAllPublicationsToGitHubPackagesRepository 62 | --stacktrace 63 | --no-configuration-cache 64 | --no-parallel 65 | checkout-ref: ${{ inputs.checkout-ref }} 66 | -------------------------------------------------------------------------------- /.github/workflows/run_publish_site.yml: -------------------------------------------------------------------------------- 1 | name: Publish Site 2 | 3 | 4 | on: 5 | workflow_dispatch: 6 | inputs: 7 | checkout-ref: 8 | description: "The branch, tag or SHA to checkout. See actions/checkout 'ref'." 9 | required: false 10 | type: string 11 | workflow_call: 12 | inputs: 13 | checkout-ref: 14 | description: "The branch, tag or SHA to checkout. See actions/checkout 'ref'." 15 | required: false 16 | type: string 17 | 18 | 19 | concurrency: 20 | group: "Publish Site: ${{ github.workflow }}" 21 | cancel-in-progress: true 22 | 23 | 24 | jobs: 25 | 26 | build: 27 | runs-on: ubuntu-latest 28 | steps: 29 | - name: Checkout the repo 30 | uses: actions/checkout@v4 31 | with: 32 | ref: ${{ inputs.checkout-ref || github.ref }} 33 | 34 | - name: Validate Gradle Wrapper 35 | uses: gradle/wrapper-validation-action@v1 36 | 37 | - name: Setup JDK 38 | uses: actions/setup-java@v4 39 | with: 40 | distribution: temurin 41 | java-version: 11 42 | 43 | - uses: gradle/gradle-build-action@v2 44 | with: 45 | gradle-home-cache-cleanup: true 46 | arguments: | 47 | :docs:dokkatooGenerate 48 | 49 | - uses: actions/upload-pages-artifact@v3 50 | with: 51 | path: ./docs/build/dokka/html 52 | 53 | deploy: 54 | needs: build 55 | runs-on: ubuntu-latest 56 | permissions: 57 | pages: write # to deploy to Pages 58 | id-token: write # to verify the deployment originates from an appropriate source 59 | environment: 60 | name: github-pages 61 | url: ${{ steps.deployment.outputs.page_url }} 62 | steps: 63 | - name: Deploy to GitHub Pages 64 | id: deployment 65 | uses: actions/deploy-pages@v4 66 | -------------------------------------------------------------------------------- /.github/workflows/workflow_release.yml: -------------------------------------------------------------------------------- 1 | name: Releases 2 | 3 | 4 | on: 5 | workflow_dispatch: 6 | inputs: 7 | checkout-ref: 8 | description: "The branch, tag or SHA to checkout. See actions/checkout 'ref'." 9 | required: false 10 | type: string 11 | push: 12 | branches: [ main ] 13 | release: 14 | types: [ created ] 15 | 16 | 17 | concurrency: 18 | group: "Releases: ${{ github.workflow }} @ ${{ inputs.checkout-ref }} ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}" 19 | cancel-in-progress: false 20 | 21 | 22 | jobs: 23 | 24 | tests: 25 | uses: ./.github/workflows/run_tests.yml 26 | permissions: 27 | checks: write 28 | contents: read 29 | with: 30 | checkout-ref: ${{ inputs.checkout-ref }} 31 | 32 | publish-maven: 33 | needs: tests 34 | # only publish when manually triggered, or it's the main branch, or it's for a release 35 | if: inputs.checkout-ref || github.ref == 'refs/heads/main' || (github.event_name == 'release' && github.event.action == 'created') 36 | uses: ./.github/workflows/run_publish_maven.yml 37 | secrets: inherit 38 | permissions: 39 | checks: write 40 | contents: write 41 | packages: write 42 | with: 43 | checkout-ref: ${{ inputs.checkout-ref }} 44 | 45 | publish-site: 46 | needs: tests 47 | # only publish when manually triggered, or it's for a release 48 | if: inputs.checkout-ref || (github.event_name == 'release' && github.event.action == 'created') 49 | uses: ./.github/workflows/run_publish_site.yml 50 | permissions: 51 | checks: write 52 | contents: read 53 | id-token: write # to verify the deployment originates from an appropriate source 54 | packages: write 55 | pages: write # to deploy to Pages 56 | with: 57 | checkout-ref: ${{ inputs.checkout-ref }} 58 | -------------------------------------------------------------------------------- /modules/kotka-streams-extensions/src/main/kotlin/dev/adamko/kotka/extensions/KeyValue.kt: -------------------------------------------------------------------------------- 1 | package dev.adamko.kotka.extensions 2 | 3 | import org.apache.kafka.streams.KeyValue 4 | import org.apache.kafka.streams.state.ValueAndTimestamp 5 | 6 | 7 | /** Convert a [Pair] to a [KeyValue] */ 8 | inline fun Pair.toKeyValue(): KeyValue = 9 | KeyValue.pair(first, second) 10 | 11 | //infix fun K.kvPair(value: V): KeyValue = KeyValue(this, value) 12 | 13 | //fun valueAndTimestamp(value: V, timestamp: Long): ValueAndTimestamp = 14 | // ValueAndTimestamp.make(value, timestamp) 15 | 16 | 17 | /** Get the key of a [KeyValue] */ 18 | @JvmName("keyValueGetKey") 19 | operator fun KeyValue.component1(): K = key 20 | /** Get the value of a [KeyValue] */ 21 | @JvmName("keyValueGetValue") 22 | operator fun KeyValue.component2(): V = value 23 | 24 | 25 | /** Convert a [Triple] to a [KeyValue], with [a timestamped value][ValueAndTimestamp] */ 26 | inline fun Triple.toKeyValueTimestamp(): KeyValue> = 27 | KeyValue.pair(first, ValueAndTimestamp.make(second, third)) 28 | 29 | 30 | /** Get the [timestamp][ValueAndTimestamp.timestamp] of a timestamped-[KeyValue] */ 31 | val KeyValue>.timestamp: Long 32 | get() = value.timestamp() 33 | 34 | 35 | /** Get the key of a timestamped-[KeyValue] */ 36 | @JvmName("keyValueTimestampGetKey") 37 | operator fun KeyValue>.component1(): K = key 38 | /** Get the value of a timestamped-[KeyValue] */ 39 | @JvmName("keyValueTimestampGetValue") 40 | operator fun KeyValue>.component2(): V = value.value() 41 | /** Get the timestamp of a timestamped-[KeyValue] */ 42 | @JvmName("keyValueTimestampGetTimestamp") 43 | operator fun KeyValue>.component3(): Long = value.timestamp() 44 | 45 | 46 | /** Get the value of a [ValueAndTimestamp] */ 47 | @JvmName("valueTimestampGetValue") 48 | operator fun ValueAndTimestamp.component1(): V = value() 49 | /** Get the timestamp of a [ValueAndTimestamp] */ 50 | @JvmName("valueTimestampGetTimestamp") 51 | operator fun ValueAndTimestamp.component2(): Long = timestamp() 52 | -------------------------------------------------------------------------------- /modules/kotka-streams-kotlinx-serialization/src/main/kotlin/dev/adamko/kotka/kxs/KotkaJsonModule.kt: -------------------------------------------------------------------------------- 1 | package dev.adamko.kotka.kxs 2 | 3 | import kotlinx.serialization.StringFormat 4 | import org.apache.kafka.common.serialization.Serde 5 | import org.apache.kafka.streams.kstream.Grouped 6 | import org.apache.kafka.streams.kstream.Joined 7 | import org.apache.kafka.streams.kstream.Materialized 8 | import org.apache.kafka.streams.kstream.Produced 9 | import org.apache.kafka.streams.kstream.Repartitioned 10 | import org.apache.kafka.streams.processor.StateStore 11 | 12 | 13 | class KotkaSerdeModule( 14 | val mapper: StringFormat 15 | ) { 16 | 17 | 18 | /** @see Materialized */ 19 | inline fun materializedAs( 20 | name: String, 21 | keySerde: Serde = mapper.serde(), 22 | valueSerde: Serde = mapper.serde(), 23 | ): Materialized = 24 | Materialized.`as`(name) 25 | .withKeySerde(keySerde) 26 | .withValueSerde(valueSerde) 27 | 28 | 29 | /** @see Repartitioned */ 30 | inline fun repartitionedAs( 31 | name: String, 32 | keySerde: Serde = mapper.serde(), 33 | valueSerde: Serde = mapper.serde(), 34 | ): Repartitioned = 35 | Repartitioned.`as`(name) 36 | .withKeySerde(keySerde) 37 | .withValueSerde(valueSerde) 38 | 39 | 40 | /** @see Produced */ 41 | inline fun producedAs( 42 | name: String, 43 | keySerde: Serde = mapper.serde(), 44 | valueSerde: Serde = mapper.serde(), 45 | ): Produced = 46 | Produced.`as`(name) 47 | .withKeySerde(keySerde) 48 | .withValueSerde(valueSerde) 49 | 50 | 51 | /** @see Joined */ 52 | inline fun joinedAs( 53 | name: String, 54 | keySerde: Serde = mapper.serde(), 55 | valueSerde: Serde = mapper.serde(), 56 | otherValueSerde: Serde? = mapper.serde(), 57 | ): Joined = 58 | Joined.`as`(name) 59 | .withKeySerde(keySerde) 60 | .withValueSerde(valueSerde) 61 | .withOtherValueSerde(otherValueSerde) 62 | 63 | 64 | /** See [Grouped.as] */ 65 | inline fun groupedAs( 66 | name: String, 67 | keySerde: Serde? = mapper.serde(), 68 | valueSerde: Serde? = mapper.serde(), 69 | ): Grouped = 70 | Grouped.`as`(name) 71 | .withKeySerde(keySerde) 72 | .withValueSerde(valueSerde) 73 | 74 | } 75 | -------------------------------------------------------------------------------- /modules/kotka-streams-kotlinx-serialization/src/main/kotlin/dev/adamko/kotka/kxs/binaryFormatSerde.kt: -------------------------------------------------------------------------------- 1 | package dev.adamko.kotka.kxs 2 | 3 | import dev.adamko.kotka.topicdata.KeyValueSerdes 4 | import kotlinx.serialization.BinaryFormat 5 | import kotlinx.serialization.KSerializer 6 | import kotlinx.serialization.serializer 7 | import org.apache.kafka.common.serialization.Deserializer 8 | import org.apache.kafka.common.serialization.Serde 9 | import org.apache.kafka.common.serialization.Serializer 10 | 11 | 12 | inline fun BinaryFormat.kafkaSerializer( 13 | serializer: KSerializer = serializersModule.serializer() 14 | ): Serializer = 15 | Serializer { topic: String, data: T -> 16 | runCatching { 17 | encodeToByteArray(serializer, data) 18 | }.getOrElse { e -> 19 | println( 20 | """ 21 | Exception on encodeToByteArray 22 | Topic: $topic 23 | topicData: $data 24 | topicData as T: $data 25 | """.trimIndent() 26 | ) 27 | e.printStackTrace() 28 | throw e 29 | } 30 | } 31 | 32 | 33 | inline fun BinaryFormat.kafkaDeserializer( 34 | serializer: KSerializer = serializersModule.serializer() 35 | ): Deserializer = 36 | Deserializer { topic: String, data: ByteArray -> 37 | runCatching { 38 | decodeFromByteArray(serializer, data) 39 | }.getOrElse { e -> 40 | println( 41 | """ 42 | Exception on decodeFromByteArray, 43 | Topic: $topic 44 | topicData: $data 45 | topicData as T: ${data as? T} 46 | """.trimIndent() 47 | ) 48 | e.printStackTrace() 49 | throw e 50 | } 51 | } 52 | 53 | 54 | inline fun BinaryFormat.serde( 55 | serializer: KSerializer = serializersModule.serializer() 56 | ): Serde = 57 | object : Serde { 58 | private val kafkaSerializer: Serializer = kafkaSerializer(serializer) 59 | private val kafkaDeserializer: Deserializer = kafkaDeserializer(serializer) 60 | override fun serializer(): Serializer = kafkaSerializer 61 | override fun deserializer(): Deserializer = kafkaDeserializer 62 | 63 | override fun toString(): String = "BinaryFormat serde: $serializer" 64 | } 65 | 66 | 67 | inline fun BinaryFormat.keyValueSerdes( 68 | keySerializer: KSerializer = serializersModule.serializer(), 69 | valueSerializer: KSerializer = serializersModule.serializer(), 70 | ): KeyValueSerdes = 71 | KeyValueSerdes( 72 | keySerde = serde(keySerializer), 73 | valueSerde = serde(valueSerializer), 74 | ) 75 | -------------------------------------------------------------------------------- /modules/kotka-streams-kotlinx-serialization/src/main/kotlin/dev/adamko/kotka/kxs/stringFormatSerde.kt: -------------------------------------------------------------------------------- 1 | package dev.adamko.kotka.kxs 2 | 3 | import dev.adamko.kotka.topicdata.KeyValueSerdes 4 | import kotlinx.serialization.KSerializer 5 | import kotlinx.serialization.StringFormat 6 | import kotlinx.serialization.serializer 7 | import org.apache.kafka.common.serialization.Deserializer 8 | import org.apache.kafka.common.serialization.Serde 9 | import org.apache.kafka.common.serialization.Serializer 10 | 11 | 12 | inline fun StringFormat.kafkaSerializer( 13 | serializer: KSerializer = serializersModule.serializer() 14 | ): Serializer = 15 | Serializer { topic: String, data: T -> 16 | runCatching { 17 | encodeToString(serializer, data).encodeToByteArray() 18 | }.getOrElse { e -> 19 | println( 20 | """ 21 | Exception on encodeToString 22 | Topic: $topic 23 | topicData: $data 24 | topicData as T: $data 25 | """.trimIndent() 26 | ) 27 | e.printStackTrace() 28 | throw e 29 | } 30 | } 31 | 32 | 33 | inline fun StringFormat.kafkaDeserializer( 34 | serializer: KSerializer = serializersModule.serializer() 35 | ): Deserializer = 36 | Deserializer { topic: String, data: ByteArray -> 37 | runCatching { 38 | decodeFromString(serializer, data.decodeToString()) 39 | }.getOrElse { e -> 40 | println( 41 | """ 42 | Exception on decodeFromString, 43 | Topic: $topic 44 | topicData: $data 45 | topicData as T: ${data as? T} 46 | """.trimIndent() 47 | ) 48 | e.printStackTrace() 49 | throw e 50 | } 51 | } 52 | 53 | 54 | inline fun StringFormat.serde( 55 | serializer: KSerializer = serializersModule.serializer() 56 | ): Serde = 57 | object : Serde { 58 | private val kafkaSerializer: Serializer = kafkaSerializer(serializer) 59 | private val kafkaDeserializer: Deserializer = kafkaDeserializer(serializer) 60 | override fun serializer(): Serializer = kafkaSerializer 61 | override fun deserializer(): Deserializer = kafkaDeserializer 62 | 63 | override fun toString(): String = "StringFormat serde: $serializer" 64 | } 65 | 66 | 67 | inline fun StringFormat.keyValueSerdes( 68 | keySerializer: KSerializer = serializersModule.serializer(), 69 | valueSerializer: KSerializer = serializersModule.serializer(), 70 | ): KeyValueSerdes = 71 | KeyValueSerdes( 72 | keySerde = serde(keySerializer), 73 | valueSerde = serde(valueSerializer), 74 | ) 75 | -------------------------------------------------------------------------------- /modules/kotka-streams-extensions/src/test/kotlin/dev/adamko/kotka/extensions/StreamsBuilderTests.kt: -------------------------------------------------------------------------------- 1 | package dev.adamko.kotka.extensions 2 | 3 | import io.kotest.core.spec.style.FunSpec 4 | import io.kotest.property.Arb 5 | import io.kotest.property.arbitrary.list 6 | import io.kotest.property.arbitrary.string 7 | import io.kotest.property.checkAll 8 | import io.mockk.confirmVerified 9 | import io.mockk.every 10 | import io.mockk.mockk 11 | import io.mockk.verify 12 | import org.apache.kafka.streams.StreamsBuilder 13 | import org.apache.kafka.streams.kstream.Consumed 14 | 15 | class StreamsBuilderTests : FunSpec({ 16 | 17 | context("verify StreamBuilder.stream() with vararg topics calls stream() with List<>") { 18 | val topicsArb = Arb.list(Arb.string()) 19 | 20 | 21 | test("single topic") { 22 | val builderMock: StreamsBuilder = mockk { 23 | every { stream(any>()) } returns mockk() 24 | } 25 | 26 | builderMock.stream(consumed = null, "test-topic") 27 | 28 | verify(exactly = 1) { 29 | builderMock.stream(listOf("test-topic")) 30 | } 31 | confirmVerified(builderMock) 32 | } 33 | 34 | 35 | test("multiple topics") { 36 | val builderMock: StreamsBuilder = mockk { 37 | every { stream(any>()) } returns mockk() 38 | } 39 | 40 | builderMock.stream( 41 | "test-topic-1", 42 | "test-topic-2", 43 | "test-topic-3", 44 | ) 45 | 46 | verify(exactly = 1) { 47 | builderMock.stream( 48 | listOf( 49 | "test-topic-1", 50 | "test-topic-2", 51 | "test-topic-3", 52 | ) 53 | ) 54 | } 55 | confirmVerified(builderMock) 56 | } 57 | 58 | 59 | test("typed array, without Consumed") { 60 | checkAll(topicsArb) { topics -> 61 | val builderMock: StreamsBuilder = mockk { 62 | every { stream(any>()) } returns mockk() 63 | } 64 | 65 | builderMock.stream(topics = topics.toTypedArray()) 66 | 67 | verify(exactly = 1) { builderMock.stream(topics) } 68 | confirmVerified(builderMock) 69 | } 70 | } 71 | 72 | 73 | test("typed array, with Consumed") { 74 | checkAll(topicsArb) { topics -> 75 | val builderMock: StreamsBuilder = mockk { 76 | every { 77 | stream(any>(), any>()) 78 | } returns mockk() 79 | } 80 | val consumed: Consumed = mockk() 81 | 82 | builderMock.stream(topics = topics.toTypedArray(), consumed = consumed) 83 | 84 | verify(exactly = 1) { builderMock.stream(topics, consumed) } 85 | confirmVerified(builderMock) 86 | } 87 | } 88 | } 89 | 90 | }) 91 | -------------------------------------------------------------------------------- /modules/kotka-streams-framework/src/main/kotlin/dev/adamko/kotka/topicdata/GlobalKTableDefinition.kt: -------------------------------------------------------------------------------- 1 | package dev.adamko.kotka.topicdata 2 | 3 | import dev.adamko.kotka.extensions.TimestampedQueryStoreType 4 | import dev.adamko.kotka.extensions.materializedAs 5 | import org.apache.kafka.common.utils.Bytes 6 | import org.apache.kafka.streams.StreamsBuilder 7 | import org.apache.kafka.streams.kstream.GlobalKTable 8 | import org.apache.kafka.streams.kstream.KStream 9 | import org.apache.kafka.streams.kstream.Materialized 10 | import org.apache.kafka.streams.state.KeyValueStore 11 | import org.apache.kafka.streams.state.QueryableStoreTypes.timestampedKeyValueStore 12 | 13 | 14 | /** 15 | * * An update topic [topicName] 16 | * * plus a table-source topic [tableName] 17 | * * plus a state store [storeName] 18 | * 19 | * equals a GlobalTableDefinition 20 | * 21 | * After defining a [GlobalKTable], use [buildInstance] to create an instance - this has a backing 22 | * topic from which the GlobalKTable will be updated. 23 | */ 24 | abstract class GlobalKTableDefinition( 25 | val tableName: String, 26 | val storeName: String = "$tableName-store", 27 | /** 28 | * The name of the backing topic, from which this table will be filled. 29 | * 30 | * Updates to items can be sent or received with this topic. 31 | */ 32 | final override val topicName: String = "$tableName-topic", 33 | override val serdes: KeyValueSerdes, 34 | val storeType: TimestampedQueryStoreType = timestampedKeyValueStore(), 35 | ) : TopicDefinition { 36 | 37 | val topicNames: Set 38 | 39 | init { 40 | require(tableName != topicName) { 41 | "tableName $tableName and topicName $tableName must be different" 42 | } 43 | require(tableName != storeName) { 44 | "tableName $tableName and storeName $storeName must be different" 45 | } 46 | topicNames = setOf(tableName, topicName) 47 | } 48 | 49 | 50 | fun materializer(): Materialized> = materializedAs( 51 | storeName, 52 | serdes.keySerde, 53 | serdes.valueSerde 54 | ) 55 | 56 | /** 57 | * A [GlobalKTable] and [the backing topic][updatesKStream] from which the table is filled. 58 | */ 59 | data class Instance( 60 | val updatesKStream: KStream, 61 | val globalKTable: GlobalKTable, 62 | val definition: GlobalKTableDefinition, 63 | ) 64 | 65 | fun buildInstance( 66 | builder: StreamsBuilder, 67 | ): Instance { 68 | val updatesKStream = builder.stream( 69 | topicName, 70 | serdes.consumer("${pid}.input-stream") 71 | ) 72 | 73 | updatesKStream.to( 74 | tableName, 75 | serdes.producer("${pid}.updates-to-table") 76 | ) 77 | 78 | val globalKTable = builder.globalTable( 79 | tableName, 80 | serdes.consumer("${pid}.input-global-table"), 81 | materializer(), 82 | ) 83 | 84 | return Instance(updatesKStream, globalKTable, this) 85 | } 86 | 87 | } 88 | -------------------------------------------------------------------------------- /modules/kotka-streams-extensions/src/main/kotlin/dev/adamko/kotka/extensions/streams/KGroupedStream.kt: -------------------------------------------------------------------------------- 1 | package dev.adamko.kotka.extensions.streams 2 | 3 | import dev.adamko.kotka.extensions.namedAs 4 | import org.apache.kafka.common.utils.Bytes 5 | import org.apache.kafka.streams.kstream.Aggregator 6 | import org.apache.kafka.streams.kstream.Initializer 7 | import org.apache.kafka.streams.kstream.KGroupedStream 8 | import org.apache.kafka.streams.kstream.KTable 9 | import org.apache.kafka.streams.kstream.Materialized 10 | import org.apache.kafka.streams.kstream.Reducer 11 | import org.apache.kafka.streams.state.KeyValueStore 12 | 13 | 14 | /** @see KGroupedStream.count */ 15 | fun KGroupedStream.count( 16 | name: String? = null, 17 | materialized: Materialized>? = null, 18 | ): KTable { 19 | return when { 20 | name != null && materialized != null -> count(namedAs(name), materialized) 21 | name == null && materialized != null -> count(materialized) 22 | name != null && materialized == null -> count(namedAs(name)) 23 | else -> count() 24 | } 25 | } 26 | 27 | 28 | // note: 'aggregate' and 'reduce' each have two extensions rather than one with 29 | // default values. This is so the 'name' param will be the first param - I 30 | // don't think it's as natural otherwise if it's not first, or a named 31 | // parameter is required. 32 | 33 | 34 | /** @see KGroupedStream.aggregate */ 35 | fun KGroupedStream.aggregate( 36 | initializer: Initializer, 37 | materialized: Materialized>, 38 | aggregator: Aggregator, 39 | ): KTable = aggregate(initializer, aggregator, materialized) 40 | 41 | 42 | /** @see KGroupedStream.aggregate */ 43 | fun KGroupedStream.aggregate( 44 | name: String, 45 | materialized: Materialized>, 46 | initializer: Initializer, 47 | aggregator: Aggregator, 48 | ): KTable = aggregate(initializer, aggregator, namedAs(name), materialized) 49 | 50 | 51 | /** @see KGroupedStream.reduce */ 52 | fun KGroupedStream.reduce( 53 | materialized: Materialized>, 54 | reducer: Reducer, 55 | ): KTable = reduce(reducer, materialized) 56 | 57 | 58 | /** @see KGroupedStream.reduce */ 59 | fun KGroupedStream.reduce( 60 | name: String, 61 | materialized: Materialized>, 62 | reducer: Reducer, 63 | ): KTable = reduce(reducer, namedAs(name), materialized) 64 | 65 | 66 | // CogroupedKStream cogroup(final Aggregator aggregator); 67 | // TimeWindowedKStream windowedBy(final Windows windows); 68 | //SessionWindowedKStream windowedBy(final SessionWindows windows); 69 | //TimeWindowedKStream windowedBy(final SlidingWindows windows); 70 | -------------------------------------------------------------------------------- /gradlew.bat: -------------------------------------------------------------------------------- 1 | @rem 2 | @rem Copyright 2015 the original author or authors. 3 | @rem 4 | @rem Licensed under the Apache License, Version 2.0 (the "License"); 5 | @rem you may not use this file except in compliance with the License. 6 | @rem You may obtain a copy of the License at 7 | @rem 8 | @rem https://www.apache.org/licenses/LICENSE-2.0 9 | @rem 10 | @rem Unless required by applicable law or agreed to in writing, software 11 | @rem distributed under the License is distributed on an "AS IS" BASIS, 12 | @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | @rem See the License for the specific language governing permissions and 14 | @rem limitations under the License. 15 | @rem 16 | 17 | @if "%DEBUG%"=="" @echo off 18 | @rem ########################################################################## 19 | @rem 20 | @rem Gradle startup script for Windows 21 | @rem 22 | @rem ########################################################################## 23 | 24 | @rem Set local scope for the variables with windows NT shell 25 | if "%OS%"=="Windows_NT" setlocal 26 | 27 | set DIRNAME=%~dp0 28 | if "%DIRNAME%"=="" set DIRNAME=. 29 | @rem This is normally unused 30 | set APP_BASE_NAME=%~n0 31 | set APP_HOME=%DIRNAME% 32 | 33 | @rem Resolve any "." and ".." in APP_HOME to make it shorter. 34 | for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi 35 | 36 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 37 | set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" 38 | 39 | @rem Find java.exe 40 | if defined JAVA_HOME goto findJavaFromJavaHome 41 | 42 | set JAVA_EXE=java.exe 43 | %JAVA_EXE% -version >NUL 2>&1 44 | if %ERRORLEVEL% equ 0 goto execute 45 | 46 | echo. 47 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 48 | echo. 49 | echo Please set the JAVA_HOME variable in your environment to match the 50 | echo location of your Java installation. 51 | 52 | goto fail 53 | 54 | :findJavaFromJavaHome 55 | set JAVA_HOME=%JAVA_HOME:"=% 56 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe 57 | 58 | if exist "%JAVA_EXE%" goto execute 59 | 60 | echo. 61 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 62 | echo. 63 | echo Please set the JAVA_HOME variable in your environment to match the 64 | echo location of your Java installation. 65 | 66 | goto fail 67 | 68 | :execute 69 | @rem Setup the command line 70 | 71 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar 72 | 73 | 74 | @rem Execute Gradle 75 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* 76 | 77 | :end 78 | @rem End local scope for the variables with windows NT shell 79 | if %ERRORLEVEL% equ 0 goto mainEnd 80 | 81 | :fail 82 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of 83 | rem the _cmd.exe /c_ return code! 84 | set EXIT_CODE=%ERRORLEVEL% 85 | if %EXIT_CODE% equ 0 set EXIT_CODE=1 86 | if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE% 87 | exit /b %EXIT_CODE% 88 | 89 | :mainEnd 90 | if "%OS%"=="Windows_NT" endlocal 91 | 92 | :omega 93 | -------------------------------------------------------------------------------- /modules/kotka-streams-extensions/src/main/kotlin/dev/adamko/kotka/extensions/streams/KStreamTransform.kt: -------------------------------------------------------------------------------- 1 | package dev.adamko.kotka.extensions.streams 2 | 3 | //import kotlin.experimental.ExperimentalTypeInference 4 | //import org.apache.kafka.streams.KeyValue 5 | //import org.apache.kafka.streams.kstream.KStream 6 | //import org.apache.kafka.streams.kstream.Transformer 7 | //import org.apache.kafka.streams.kstream.TransformerSupplier 8 | //import org.apache.kafka.streams.processor.ProcessorContext 9 | // 10 | ///** @see KStream.transform */ 11 | //inline fun KStream.transformKt( 12 | // crossinline block: BuildTransformer 13 | //): KStream { 14 | // 15 | // val transformer = object : Transformer> { 16 | // 17 | // private lateinit var scope: TransformerScope 18 | // 19 | // override fun init(procContext: ProcessorContext) { 20 | // scope = TransformerScope(procContext) 21 | // scope.block() 22 | // } 23 | // 24 | // override fun close() { 25 | // scope.close() 26 | // } 27 | // 28 | // override fun transform(key: inK, value: inV): KeyValue? { 29 | // return scope.mapper(key, value)?.toKeyValue() 30 | // } 31 | // } 32 | // 33 | // return transform({ transformer }) 34 | //} 35 | // 36 | ///** @see KStream.transform */ 37 | //@OptIn(ExperimentalTypeInference::class) 38 | //inline fun > KStream.transformKt2( 39 | // @BuilderInference block: Transformer.() -> Unit 40 | //): KStream { 41 | // 42 | // val transformer = object : Transformer> { 43 | // 44 | // private lateinit var scope: TransformerScope 45 | // 46 | // override fun init(procContext: ProcessorContext) { 47 | // scope = TransformerScope(procContext) 48 | // } 49 | // 50 | // override fun close() { 51 | // scope.close() 52 | // 53 | // sequence { 54 | // 55 | // } 56 | // } 57 | // 58 | // override fun transform(key: inK, value: inV): KeyValue? { 59 | // return scope.mapper(key, value)?.toKeyValue() 60 | // } 61 | // } 62 | // 63 | // return transform(TransformerSupplier { transformer }) 64 | //} 65 | // 66 | //typealias BuildTransformer = TransformerScope.() -> Unit 67 | // 68 | //typealias TransformerMapper = (key: inK, value: inV) -> Pair? 69 | //typealias TransformerCloseHandler = (ProcessorContext) -> Unit 70 | // 71 | //open class TransformerScope( 72 | // val processorContext: ProcessorContext, 73 | //) : AutoCloseable { 74 | // 75 | // private var closeHandler: TransformerCloseHandler = {} 76 | // lateinit var mapper: TransformerMapper 77 | // 78 | // fun handleClose(closeHandler: TransformerCloseHandler) { 79 | // this.closeHandler = closeHandler 80 | // } 81 | // 82 | // fun mapEach(mapper: TransformerMapper) { 83 | // this.mapper = mapper 84 | // } 85 | // 86 | // override fun close() { 87 | // closeHandler(processorContext) 88 | // } 89 | //} 90 | -------------------------------------------------------------------------------- /settings.gradle.kts: -------------------------------------------------------------------------------- 1 | @file:Suppress("UnstableApiUsage") 2 | 3 | rootProject.name = "kotka-streams" 4 | 5 | pluginManagement { 6 | repositories { 7 | mavenCentral() 8 | gradlePluginPortal() 9 | } 10 | } 11 | 12 | @Suppress("UnstableApiUsage") 13 | dependencyResolutionManagement { 14 | repositoriesMode.set(RepositoriesMode.PREFER_SETTINGS) 15 | 16 | repositories { 17 | mavenCentral() 18 | } 19 | } 20 | 21 | include( 22 | ":docs", 23 | 24 | ":modules:kotka-streams-extensions", 25 | ":modules:kotka-streams-framework", 26 | ":modules:kotka-streams-kotlinx-serialization", 27 | ":modules:versions-platform", 28 | ) 29 | 30 | enableFeaturePreview("TYPESAFE_PROJECT_ACCESSORS") 31 | enableFeaturePreview("STABLE_CONFIGURATION_CACHE") 32 | 33 | //region git versioning 34 | val gitDescribe: Provider = 35 | providers 36 | .exec { 37 | workingDir(rootDir) 38 | commandLine( 39 | "git", 40 | "describe", 41 | "--always", 42 | "--tags", 43 | "--dirty=-DIRTY", 44 | "--broken=-BROKEN", 45 | "--match=v[0-9]*\\.[0-9]*\\.[0-9]*", 46 | ) 47 | isIgnoreExitValue = true 48 | }.standardOutput.asText.map { it.trim() } 49 | 50 | val currentBranchName: Provider = 51 | providers 52 | .exec { 53 | workingDir(rootDir) 54 | commandLine( 55 | "git", 56 | "branch", 57 | "--show-current", 58 | ) 59 | isIgnoreExitValue = true 60 | }.standardOutput.asText.map { it.trim() } 61 | 62 | val currentCommitHash: Provider = 63 | providers.exec { 64 | workingDir(rootDir) 65 | commandLine( 66 | "git", 67 | "rev-parse", 68 | "--short", 69 | "HEAD", 70 | ) 71 | isIgnoreExitValue = true 72 | }.standardOutput.asText.map { it.trim() } 73 | 74 | /** 75 | * The standard Gradle way of setting the version, which can be set on the CLI with 76 | * 77 | * ```shell 78 | * ./gradlew -Pversion=1.2.3 79 | * ``` 80 | * 81 | * This can be used to override [gitVersion]. 82 | */ 83 | val standardVersion: Provider = providers.gradleProperty("version") 84 | 85 | /** Match simple SemVer tags. The first group is the `major.minor.patch` digits. */ 86 | val semverRegex = Regex("""v((?:0|[1-9][0-9]*)\.(?:0|[1-9][0-9]*)\.(?:0|[1-9][0-9]*))""") 87 | 88 | val gitVersion: Provider = 89 | gitDescribe.zip(currentBranchName) { described, branch -> 90 | val detached = branch.isNullOrBlank() 91 | 92 | if (!detached) { 93 | "$branch-SNAPSHOT" 94 | } else { 95 | val descriptions = described.split("-") 96 | val head = descriptions.singleOrNull() ?: "" 97 | // drop the leading `v`, try to find the `major.minor.patch` digits group 98 | val headVersion = semverRegex.matchEntire(head)?.groupValues?.last() 99 | headVersion 100 | ?: currentCommitHash.orNull // fall back to using the git commit hash 101 | ?: "unknown" // just in case there's no git repo, e.g. someone downloaded a zip archive 102 | } 103 | } 104 | 105 | gradle.allprojects { 106 | extensions.add>("gitVersion", standardVersion.orElse(gitVersion)) 107 | } 108 | //endregion 109 | -------------------------------------------------------------------------------- /modules/kotka-streams-framework/src/main/kotlin/dev/adamko/kotka/topicdata/KeyValueSerdes.kt: -------------------------------------------------------------------------------- 1 | package dev.adamko.kotka.topicdata 2 | 3 | import dev.adamko.kotka.extensions.consumedAs 4 | import dev.adamko.kotka.extensions.groupedAs 5 | import dev.adamko.kotka.extensions.joinedAs 6 | import dev.adamko.kotka.extensions.producedAs 7 | import dev.adamko.kotka.extensions.repartitionedAs 8 | import org.apache.kafka.common.serialization.Serde 9 | import org.apache.kafka.common.serialization.Serdes 10 | import org.apache.kafka.streams.Topology 11 | import org.apache.kafka.streams.kstream.Consumed 12 | import org.apache.kafka.streams.kstream.Grouped 13 | import org.apache.kafka.streams.kstream.Joined 14 | import org.apache.kafka.streams.kstream.Produced 15 | import org.apache.kafka.streams.kstream.Repartitioned 16 | import org.apache.kafka.streams.processor.StreamPartitioner 17 | import org.apache.kafka.streams.processor.TimestampExtractor 18 | 19 | 20 | /** 21 | * A pair of [Serde]s - [one for the Key][keySerde], [another for the Value][valueSerde]. 22 | */ 23 | open class KeyValueSerdes( 24 | open val keySerde: Serde, 25 | open val valueSerde: Serde, 26 | ) { 27 | 28 | fun consumer( 29 | name: String? = null, 30 | keySerde: Serde? = this.keySerde, 31 | valueSerde: Serde? = this.valueSerde, 32 | resetPolicy: Topology.AutoOffsetReset? = null, 33 | timestampExtractor: TimestampExtractor? = null, 34 | ): Consumed = 35 | consumedAs( 36 | name = name, 37 | keySerde = keySerde, 38 | valueSerde = valueSerde, 39 | resetPolicy = resetPolicy, 40 | timestampExtractor = timestampExtractor, 41 | ) 42 | 43 | fun grouper( 44 | name: String? = null, 45 | keySerde: Serde? = this.keySerde, 46 | valueSerde: Serde? = this.valueSerde, 47 | ): Grouped = groupedAs( 48 | name = name, 49 | keySerde = keySerde, 50 | valueSerde = valueSerde, 51 | ) 52 | 53 | fun joiner( 54 | name: String? = null, 55 | keySerde: Serde? = this.keySerde, 56 | valueSerde: Serde? = this.valueSerde, 57 | otherValueSerde: Serde?, 58 | ): Joined = joinedAs( 59 | name = name, 60 | keySerde = keySerde, 61 | valueSerde = valueSerde, 62 | otherValueSerde = otherValueSerde, 63 | ) 64 | 65 | fun producer( 66 | name: String? = null, 67 | keySerde: Serde? = this.keySerde, 68 | valueSerde: Serde? = this.valueSerde, 69 | partitioner: StreamPartitioner? = null, 70 | ): Produced = producedAs( 71 | name = name, 72 | keySerde = keySerde, 73 | valueSerde = valueSerde, 74 | partitioner = partitioner, 75 | ) 76 | 77 | fun repartitioner( 78 | name: String? = null, 79 | keySerde: Serde? = this.keySerde, 80 | valueSerde: Serde? = this.valueSerde, 81 | numberOfPartitions: Int? = null, 82 | partitioner: StreamPartitioner? = null, 83 | ): Repartitioned = repartitionedAs( 84 | name = name, 85 | keySerde = keySerde, 86 | valueSerde = valueSerde, 87 | numberOfPartitions = numberOfPartitions, 88 | partitioner = partitioner, 89 | ) 90 | 91 | companion object { 92 | 93 | fun string(): KeyValueSerdes = 94 | KeyValueSerdes( 95 | Serdes.String(), 96 | Serdes.String(), 97 | ) 98 | } 99 | } 100 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | ### Intellij ### 3 | .idea/** 4 | *.iml 5 | *.ipr 6 | *.iws 7 | out/ 8 | 9 | ### Eclipse ### 10 | .metadata 11 | bin/ 12 | tmp/ 13 | *.tmp 14 | *.bak 15 | *.swp 16 | *~.nib 17 | local.properties 18 | .settings/ 19 | .loadpath 20 | .recommenders 21 | 22 | # External tool builders 23 | .externalToolBuilders/ 24 | 25 | # Locally stored "Eclipse launch configurations" 26 | *.launch 27 | 28 | # PyDev specific (Python IDE for Eclipse) 29 | *.pydevproject 30 | 31 | # CDT-specific (C/C++ Development Tooling) 32 | .cproject 33 | 34 | # CDT- autotools 35 | .autotools 36 | 37 | # Java annotation processor (APT) 38 | .factorypath 39 | 40 | # PDT-specific (PHP Development Tools) 41 | .buildpath 42 | 43 | # sbteclipse plugin 44 | .target 45 | 46 | # Tern plugin 47 | .tern-project 48 | 49 | # TeXlipse plugin 50 | .texlipse 51 | 52 | # STS (Spring Tool Suite) 53 | .springBeans 54 | 55 | # Code Recommenders 56 | .recommenders/ 57 | 58 | # Annotation Processing 59 | .apt_generated/ 60 | .apt_generated_test/ 61 | 62 | # Scala IDE specific (Scala & Java development for Eclipse) 63 | .cache-main 64 | .scala_dependencies 65 | .worksheet 66 | 67 | ### Kotlin ### 68 | # Compiled class file 69 | *.class 70 | 71 | # Log file 72 | *.log 73 | 74 | # BlueJ files 75 | *.ctxt 76 | 77 | # Mobile Tools for Java (J2ME) 78 | .mtj.tmp/ 79 | 80 | # Package Files # 81 | *.jar 82 | *.war 83 | *.nar 84 | *.ear 85 | *.zip 86 | *.tar.gz 87 | *.rar 88 | 89 | # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml 90 | hs_err_pid* 91 | replay_pid* 92 | 93 | ### Linux ### 94 | *~ 95 | 96 | # temporary files which can be created if a process still has a handle open of a deleted file 97 | .fuse_hidden* 98 | 99 | # KDE directory preferences 100 | .directory 101 | 102 | # Linux trash folder which might appear on any partition or disk 103 | .Trash-* 104 | 105 | # .nfs files are created when an open file is removed but is still being accessed 106 | .nfs* 107 | 108 | ### macOS ### 109 | # General 110 | .DS_Store 111 | .AppleDouble 112 | .LSOverride 113 | 114 | # Icon must end with two \r 115 | Icon 116 | 117 | # Thumbnails 118 | ._* 119 | 120 | # Files that might appear in the root of a volume 121 | .DocumentRevisions-V100 122 | .fseventsd 123 | .Spotlight-V100 124 | .TemporaryItems 125 | .Trashes 126 | .VolumeIcon.icns 127 | .com.apple.timemachine.donotpresent 128 | 129 | # Directories potentially created on remote AFP share 130 | .AppleDB 131 | .AppleDesktop 132 | Network Trash Folder 133 | Temporary Items 134 | .apdisk 135 | 136 | ### Windows ### 137 | # Windows thumbnail cache files 138 | Thumbs.db 139 | Thumbs.db:encryptable 140 | ehthumbs.db 141 | ehthumbs_vista.db 142 | 143 | # Dump file 144 | *.stackdump 145 | 146 | # Folder config file 147 | [Dd]esktop.ini 148 | 149 | # Recycle Bin used on file shares 150 | $RECYCLE.BIN/ 151 | 152 | # Windows Installer files 153 | *.cab 154 | *.msi 155 | *.msix 156 | *.msm 157 | *.msp 158 | 159 | # Windows shortcuts 160 | *.lnk 161 | 162 | ### Gradle ### 163 | .gradle 164 | build/ 165 | 166 | # Ignore Gradle GUI config 167 | gradle-app.setting 168 | 169 | # Avoid ignoring Gradle wrapper jar file (.jar files are usually ignored) 170 | !gradle-wrapper.jar 171 | 172 | # Cache of project 173 | .gradletasknamecache 174 | 175 | # # Work around https://youtrack.jetbrains.com/issue/IDEA-116898 176 | # gradle/wrapper/gradle-wrapper.properties 177 | 178 | ### Gradle Patch ### 179 | **/build/ 180 | -------------------------------------------------------------------------------- /modules/kotka-streams-extensions/src/test/kotlin/dev/adamko/kotka/extensions/tables/KTableExtensionsTests.kt: -------------------------------------------------------------------------------- 1 | package dev.adamko.kotka.extensions.tables 2 | 3 | import dev.adamko.kotka.extensions.materializedAs 4 | import dev.adamko.kotka.extensions.namedAs 5 | import io.kotest.core.spec.style.FunSpec 6 | import io.kotest.matchers.shouldBe 7 | import org.apache.kafka.streams.StreamsBuilder 8 | 9 | class KTableExtensionsTests : FunSpec({ 10 | 11 | context(".mapValues()") { 12 | 13 | context("expect extension function produces same topology as wrapped function") { 14 | 15 | test("name is present, materialized is null") { 16 | 17 | val originalBuilder = StreamsBuilder() 18 | originalBuilder 19 | .table("table-topic") 20 | .mapValues( 21 | { readOnlyKey, value -> "$readOnlyKey, $value" }, 22 | namedAs("map-values-test"), 23 | ) 24 | val describeOriginal = originalBuilder.build().describe() 25 | 26 | 27 | val extensionBuilder = StreamsBuilder() 28 | extensionBuilder 29 | .table("table-topic") 30 | .mapValues(name = "blah", materialized = null) { readOnlyKey, value -> 31 | "$readOnlyKey, $value" 32 | } 33 | val describeExtension = extensionBuilder.build().describe() 34 | 35 | describeExtension shouldBe describeOriginal 36 | } 37 | 38 | 39 | test("name is present, materialized is present") { 40 | 41 | val originalBuilder = StreamsBuilder() 42 | originalBuilder 43 | .table("table-topic") 44 | .mapValues( 45 | { readOnlyKey, value -> "$readOnlyKey, $value" }, 46 | namedAs("map-values-test"), 47 | materializedAs("store-name"), 48 | ) 49 | val describeOriginal = originalBuilder.build().describe() 50 | 51 | 52 | val extensionBuilder = StreamsBuilder() 53 | extensionBuilder 54 | .table("table-topic") 55 | .mapValues(name = "blah", materializedAs("store-name")) { readOnlyKey, value -> 56 | "$readOnlyKey, $value" 57 | } 58 | val describeExtension = extensionBuilder.build().describe() 59 | 60 | describeExtension shouldBe describeOriginal 61 | } 62 | 63 | 64 | test("name is null, materialized is present") { 65 | 66 | val originalBuilder = StreamsBuilder() 67 | originalBuilder 68 | .table("table-topic") 69 | .mapValues( 70 | { readOnlyKey, value -> "$readOnlyKey, $value" }, 71 | materializedAs("store-name"), 72 | ) 73 | val describeOriginal = originalBuilder.build().describe() 74 | 75 | 76 | val extensionBuilder = StreamsBuilder() 77 | extensionBuilder 78 | .table("table-topic") 79 | .mapValues(name = null, materializedAs("store-name")) { readOnlyKey, value -> 80 | "$readOnlyKey, $value" 81 | } 82 | val describeExtension = extensionBuilder.build().describe() 83 | 84 | describeExtension shouldBe describeOriginal 85 | } 86 | 87 | 88 | test("name is null, materialized is null") { 89 | 90 | val originalBuilder = StreamsBuilder() 91 | originalBuilder 92 | .table("table-topic") 93 | .mapValues { readOnlyKey, value -> "$readOnlyKey, $value" } 94 | val describeOriginal = originalBuilder.build().describe() 95 | 96 | 97 | val extensionBuilder = StreamsBuilder() 98 | extensionBuilder 99 | .table("table-topic") 100 | .mapValues(name = null, materialized = null) { readOnlyKey, value -> 101 | "$readOnlyKey, $value" 102 | } 103 | val describeExtension = extensionBuilder.build().describe() 104 | 105 | describeExtension shouldBe describeOriginal 106 | } 107 | } 108 | } 109 | }) 110 | -------------------------------------------------------------------------------- /modules/kotka-streams-extensions/src/main/kotlin/dev/adamko/kotka/extensions/namedOperations.kt: -------------------------------------------------------------------------------- 1 | package dev.adamko.kotka.extensions 2 | 3 | import org.apache.kafka.common.serialization.Serde 4 | import org.apache.kafka.streams.Topology 5 | import org.apache.kafka.streams.kstream.* 6 | import org.apache.kafka.streams.processor.StreamPartitioner 7 | import org.apache.kafka.streams.processor.TimestampExtractor 8 | 9 | 10 | /** @see Named */ 11 | fun namedAs(name: String): Named = Named.`as`(name) 12 | 13 | 14 | /** @see Repartitioned */ 15 | fun repartitionedAs( 16 | name: String? = null, 17 | keySerde: Serde? = null, 18 | valueSerde: Serde? = null, 19 | numberOfPartitions: Int? = null, 20 | partitioner: StreamPartitioner? = null, 21 | ): Repartitioned { 22 | var repartitioned = Repartitioned.`as`(name) 23 | .withKeySerde(keySerde) 24 | .withValueSerde(valueSerde) 25 | .withStreamPartitioner(partitioner) 26 | 27 | if (numberOfPartitions != null) 28 | repartitioned = repartitioned.withNumberOfPartitions(numberOfPartitions) 29 | 30 | return repartitioned 31 | } 32 | 33 | 34 | /** @see Produced */ 35 | fun producedAs( 36 | name: String? = null, 37 | keySerde: Serde? = null, 38 | valueSerde: Serde? = null, 39 | partitioner: StreamPartitioner? = null, 40 | ): Produced = 41 | Produced.`as`(name) 42 | .withKeySerde(keySerde) 43 | .withValueSerde(valueSerde) 44 | .withStreamPartitioner(partitioner) 45 | 46 | 47 | /** @see Joined */ 48 | fun joinedAs( 49 | name: String? = null, 50 | keySerde: Serde? = null, 51 | valueSerde: Serde? = null, 52 | otherValueSerde: Serde?, 53 | ): Joined = 54 | Joined.`as`(name) 55 | .withKeySerde(keySerde) 56 | .withValueSerde(valueSerde) 57 | .withOtherValueSerde(otherValueSerde) 58 | 59 | 60 | /** @see Grouped.as */ 61 | fun groupedAs( 62 | name: String? = null, 63 | keySerde: Serde? = null, 64 | valueSerde: Serde? = null, 65 | ): Grouped = 66 | Grouped.`as`(name) 67 | .withKeySerde(keySerde) 68 | .withValueSerde(valueSerde) 69 | 70 | 71 | /** @see Consumed.as */ 72 | fun consumedAs( 73 | name: String? = null, 74 | keySerde: Serde? = null, 75 | valueSerde: Serde? = null, 76 | resetPolicy: Topology.AutoOffsetReset? = null, 77 | timestampExtractor: TimestampExtractor? = null, 78 | ): Consumed = 79 | Consumed.`as`(name) 80 | .withKeySerde(keySerde) 81 | .withValueSerde(valueSerde) 82 | .withOffsetResetPolicy(resetPolicy) 83 | .withTimestampExtractor(timestampExtractor) 84 | 85 | 86 | /** @see [Branched.`as`] */ 87 | fun branchedAs(name: String): Branched = 88 | Branched.`as`(name) 89 | 90 | 91 | /** @see TableJoined.as */ 92 | fun tableJoined( 93 | name: String? = null, 94 | partitioner: StreamPartitioner? = null, 95 | otherPartitioner: StreamPartitioner? = null, 96 | ): TableJoined = 97 | TableJoined.`as`(name) 98 | .withPartitioner(partitioner) 99 | .withOtherPartitioner(otherPartitioner) 100 | 101 | 102 | /** @see Printed */ 103 | fun printed( 104 | name: String? = null, 105 | outputStream: PrintedOutputStream, 106 | label: String? = null, 107 | mapper: KeyValueMapper? = null, 108 | ): Printed { 109 | 110 | var printed: Printed = when (outputStream) { 111 | is PrintedOutputStream.File -> Printed.toFile(outputStream.filePath) 112 | PrintedOutputStream.SysOut -> Printed.toSysOut() 113 | } 114 | 115 | if (name != null) 116 | printed = printed.withName(name) 117 | 118 | if (mapper != null) 119 | printed = printed.withKeyValueMapper(mapper) 120 | 121 | if (label != null) 122 | printed = printed.withLabel(label) 123 | 124 | return printed 125 | } 126 | 127 | 128 | /** @see printed */ 129 | sealed interface PrintedOutputStream { 130 | /** @see Printed.toSysOut */ 131 | object SysOut : PrintedOutputStream 132 | /** @see Printed.toFile */ 133 | data class File(val filePath: String) : PrintedOutputStream 134 | } 135 | -------------------------------------------------------------------------------- /.github/workflows/run_gradle_task.yml: -------------------------------------------------------------------------------- 1 | name: Gradle Task 2 | run-name: "Gradle Task ${{ inputs.gradle-task }} @ ${{ inputs.runs-on }}" 3 | 4 | # Reusable Workflow for running a Gradle task 5 | 6 | on: 7 | workflow_dispatch: 8 | inputs: 9 | gradle-task: 10 | description: "The Gradle task to run, including any flags" 11 | required: true 12 | type: string 13 | runs-on: 14 | description: "OS to run the task on" 15 | required: true 16 | type: string 17 | checkout-ref: 18 | description: "The branch, tag or SHA to checkout. See actions/checkout 'ref'." 19 | required: false 20 | type: string 21 | github-environment: 22 | description: "GitHub Environment name" 23 | required: false 24 | type: string 25 | github-environment-url: 26 | description: "GitHub Environment display URL" 27 | required: false 28 | type: string 29 | workflow_call: 30 | inputs: 31 | gradle-task: 32 | description: "The Gradle task to run, including any flags" 33 | required: true 34 | type: string 35 | runs-on: 36 | description: "OS to run the task on" 37 | required: true 38 | type: string 39 | checkout-ref: 40 | description: "The branch, tag or SHA to checkout. See actions/checkout 'ref'." 41 | required: false 42 | type: string 43 | github-environment: 44 | description: "GitHub Environment name" 45 | required: false 46 | type: string 47 | github-environment-url: 48 | description: "GitHub Environment display URL" 49 | required: false 50 | type: string 51 | 52 | 53 | concurrency: 54 | # note: the Workflow inputs are also included in the concurrency group 55 | group: "Gradle Task: ${{ github.workflow }} ${{ join(inputs.*) }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}" 56 | cancel-in-progress: true 57 | 58 | 59 | permissions: 60 | contents: read 61 | checks: write # required by mikepenz/action-junit-report 62 | packages: write 63 | 64 | 65 | jobs: 66 | 67 | run-task: 68 | runs-on: ${{ inputs.runs-on }} 69 | name: "./gradlew ${{ inputs.gradle-task}} @ ${{ inputs.runs-on }}" 70 | timeout-minutes: 60 71 | environment: 72 | name: ${{ inputs.github-environment }} 73 | url: ${{ inputs.github-environment-url }} 74 | steps: 75 | 76 | ### Gradle task ### 77 | 78 | - name: Checkout the repo 79 | uses: actions/checkout@v4 80 | with: 81 | ref: ${{ inputs.checkout-ref || github.ref }} 82 | 83 | - name: Validate Gradle Wrapper 84 | uses: gradle/wrapper-validation-action@v1 85 | 86 | - name: Setup JDK 87 | uses: actions/setup-java@v4 88 | with: 89 | distribution: temurin 90 | java-version: 11 91 | 92 | - uses: gradle/gradle-build-action@v2 93 | with: 94 | gradle-home-cache-cleanup: true 95 | arguments: ${{ inputs.gradle-task }} 96 | env: 97 | "ORG_GRADLE_PROJECT_signing.keyId": ${{ secrets.MAVEN_SONATYPE_SIGNING_KEY_ID }} 98 | "ORG_GRADLE_PROJECT_signing.key": ${{ secrets.MAVEN_SONATYPE_SIGNING_KEY }} 99 | "ORG_GRADLE_PROJECT_signing.password": ${{ secrets.MAVEN_SONATYPE_SIGNING_PASSWORD }} 100 | ORG_GRADLE_PROJECT_sonatypeRepositoryUsername: ${{ secrets.MAVEN_SONATYPE_USERNAME }} 101 | ORG_GRADLE_PROJECT_sonatypeRepositoryPassword: ${{ secrets.MAVEN_SONATYPE_PASSWORD }} 102 | 103 | ORG_GRADLE_PROJECT_gitHubPackagesUsername: ${{ github.actor }} 104 | ORG_GRADLE_PROJECT_gitHubPackagesPassword: ${{ secrets.GITHUB_TOKEN }} 105 | 106 | - name: Upload build reports 107 | if: failure() 108 | uses: actions/upload-artifact@v4 109 | with: 110 | name: build-report-${{ runner.os }}${{ github.action }} 111 | path: | 112 | **/build/reports/ 113 | **/*.hprof 114 | **/*.log 115 | if-no-files-found: ignore 116 | 117 | - name: Publish Test Reports 118 | uses: mikepenz/action-junit-report@v4 119 | if: always() 120 | with: 121 | report_paths: | 122 | **/build/test-results/**/TEST-*.xml 123 | require_tests: false 124 | -------------------------------------------------------------------------------- /gradle/libs.versions.toml: -------------------------------------------------------------------------------- 1 | [versions] 2 | 3 | ## Compilation 4 | 5 | kotlin = "1.9.22" # https://github.com/JetBrains/kotlin/releases 6 | jvm = "11" 7 | 8 | ## Libs 9 | 10 | kafka = "3.6.1" # https://kafka.apache.org/downloads 11 | kotlinx-serialization = "1.6.2" # https://github.com/Kotlin/kotlinx.serialization/releases/tag/v1.3.3 12 | kotlinx-knit = "0.5.1" # https://github.com/Kotlin/kotlinx-knit/releases 13 | kotlin-dokka = "1.9.10" # https://search.maven.org/artifact/org.jetbrains.dokka/dokka-gradle-plugin 14 | 15 | slf4j = "2.0.11" 16 | 17 | ## Test libs 18 | 19 | kotest = "5.8.0" # https://github.com/kotest/kotest/releases 20 | 21 | mockk = "1.13.9" # https://search.maven.org/artifact/io.mockk/mockk 22 | junit = "5.10.1" # https://github.com/junit-team/junit5/releases 23 | 24 | ## Plugins 25 | binaryCompatValidator = "0.12.1" # https://search.maven.org/artifact/org.jetbrains.kotlinx/binary-compatibility-validator 26 | dokkatoo = "2.0.0" 27 | 28 | ########### 29 | [libraries] 30 | ########### 31 | 32 | kotlin-bom = { module = "org.jetbrains.kotlin:kotlin-bom", version.ref = "kotlin" } 33 | 34 | kafka-streams = { module = "org.apache.kafka:kafka-streams", version.ref = "kafka" } 35 | 36 | kotlinxSerialization-bom = { module = "org.jetbrains.kotlinx:kotlinx-serialization-bom", version.ref = "kotlinx-serialization" } 37 | kotlinxSerialization-core = { module = "org.jetbrains.kotlinx:kotlinx-serialization-core" } 38 | kotlinxSerialization-json = { module = "org.jetbrains.kotlinx:kotlinx-serialization-json" } 39 | kotlinxSerialization-protobuf = { module = "org.jetbrains.kotlinx:kotlinx-serialization-protobuf" } 40 | kotlinxSerialization-cbor = { module = "org.jetbrains.kotlinx:kotlinx-serialization-cbor" } 41 | kotlinxSerialization-properties = { module = "org.jetbrains.kotlinx:kotlinx-serialization-properties" } 42 | 43 | slf4j-api = { module = "org.slf4j:slf4j-api", version.ref = "slf4j" } 44 | slf4j-simple = { module = "org.slf4j:slf4j-simple", version.ref = "slf4j" } 45 | 46 | ## Dokka 47 | kotlin-dokkaPlugin-allModulesPage = { module = "org.jetbrains.dokka:all-modules-page-plugin", version.ref = "kotlin-dokka" } 48 | kotlin-dokkaPlugin-templating = { module = "org.jetbrains.dokka:templating-plugin", version.ref = "kotlin-dokka" } 49 | 50 | 51 | ### Test libs ## 52 | 53 | # Kotest 54 | kotest-bom = { module = "io.kotest:kotest-bom", version.ref = "kotest" } 55 | kotest-assertionsCore = { module = "io.kotest:kotest-assertions-core" } 56 | kotest-assertionsJson = { module = "io.kotest:kotest-assertions-json" } 57 | kotest-property = { module = "io.kotest:kotest-property" } 58 | kotest-frameworkEngine = { module = "io.kotest:kotest-framework-engine" } 59 | kotest-frameworkDatatest = { module = "io.kotest:kotest-framework-datatest" } 60 | kotest-runnerJUnit5 = { module = "io.kotest:kotest-runner-junit5" } 61 | 62 | 63 | # Knit 64 | kotlinx-knit = { module = "org.jetbrains.kotlinx:kotlinx-knit", version.ref = "kotlinx-knit" } 65 | kotlinx-knitTest = { module = "org.jetbrains.kotlinx:kotlinx-knit-test", version.ref = "kotlinx-knit" } 66 | 67 | 68 | # JUnit 69 | junit-bom = { module = "org.junit:junit-bom", version.ref = "junit" } 70 | 71 | # Mockk 72 | mockk = { module = "io.mockk:mockk", version.ref = "mockk" } 73 | 74 | ### Gradle Plugins ### 75 | 76 | gradlePlugin-kotlin = { module = "org.jetbrains.kotlin:kotlin-gradle-plugin", version.ref = "kotlin" } 77 | gradlePlugin-kotlinxKnit = { module = "org.jetbrains.kotlinx:kotlinx-knit", version.ref = "kotlinx-knit" } 78 | gradlePlugin-kotlinxBinaryCompatValidator = { module = "org.jetbrains.kotlinx:binary-compatibility-validator", version.ref = "kotlin" } 79 | 80 | gradlePlugin-dokkatoo = { module = "dev.adamko.dokkatoo:dokkatoo-plugin", version.ref = "dokkatoo" } 81 | 82 | gradlePlugin-kotest = { module = "io.kotest:kotest-framework-multiplatform-plugin-gradle", version.ref = "kotest" } 83 | 84 | gradlePlugin-kotlinxSerialization = { module = "org.jetbrains.kotlin:kotlin-serialization", version.ref = "kotlin" } 85 | 86 | ######### 87 | [plugins] 88 | ######### 89 | 90 | 91 | ######### 92 | [bundles] 93 | ######### 94 | 95 | kotest = [ 96 | "kotest-assertionsCore", 97 | "kotest-assertionsJson", 98 | "kotest-property", 99 | "kotest-frameworkEngine", 100 | "kotest-frameworkDatatest", 101 | "kotest-runnerJUnit5", 102 | ] 103 | -------------------------------------------------------------------------------- /modules/kotka-streams-extensions/src/test/kotlin/dev/adamko/kotka/extensions/KeyValueTest.kt: -------------------------------------------------------------------------------- 1 | package dev.adamko.kotka.extensions 2 | 3 | import io.kotest.core.spec.style.FunSpec 4 | import io.kotest.matchers.shouldBe 5 | import org.apache.kafka.streams.KeyValue 6 | import org.apache.kafka.streams.state.ValueAndTimestamp 7 | 8 | class KeyValueTest : FunSpec({ 9 | 10 | context("KeyValue") { 11 | 12 | test("expect Pair toKeyValue() returns KeyValue") { 13 | 14 | val pair: Pair = "key 123" to "value 999" 15 | 16 | val result: KeyValue = pair.toKeyValue() 17 | 18 | result.key shouldBe "key 123" 19 | result.key shouldBe pair.first 20 | 21 | result.value shouldBe "value 999" 22 | result.value shouldBe pair.second 23 | } 24 | 25 | context("componentN() operators") { 26 | val keyValue = KeyValue.pair( 27 | "key 123", 28 | "value 999", 29 | ) 30 | 31 | test("component1() should return key") { 32 | keyValue.component1() shouldBe "key 123" 33 | keyValue.component1() shouldBe keyValue.key 34 | } 35 | 36 | test("component2() should return value") { 37 | keyValue.component2() shouldBe "value 999" 38 | keyValue.component2() shouldBe keyValue.value 39 | } 40 | 41 | test("destructuring declaration should return (key, value)") { 42 | val (key, value) = keyValue 43 | 44 | key shouldBe "key 123" 45 | key shouldBe keyValue.key 46 | 47 | value shouldBe "value 999" 48 | value shouldBe keyValue.value 49 | } 50 | } 51 | } 52 | 53 | context("KeyValue>") { 54 | 55 | test("expect Triple toKeyValueTimestamp() returns KeyValueAndTimestamp") { 56 | 57 | val triple = Triple("key 123", "value 999", 12345L) 58 | 59 | val result: KeyValue> = triple.toKeyValueTimestamp() 60 | 61 | result.key shouldBe "key 123" 62 | result.key shouldBe triple.first 63 | 64 | result.value.value() shouldBe "value 999" 65 | result.value.value() shouldBe triple.second 66 | 67 | result.value.timestamp() shouldBe 12345L 68 | result.value.timestamp() shouldBe triple.third 69 | } 70 | 71 | context("componentN() operators") { 72 | val keyValueTimestamp = KeyValue.pair( 73 | "key 123", 74 | ValueAndTimestamp.make("value 999", 12345L) 75 | ) 76 | 77 | test("component1() should return key") { 78 | keyValueTimestamp.component1() shouldBe "key 123" 79 | keyValueTimestamp.component1() shouldBe keyValueTimestamp.key 80 | } 81 | 82 | test("component2() should return value") { 83 | keyValueTimestamp.component2() shouldBe "value 999" 84 | keyValueTimestamp.component2() shouldBe keyValueTimestamp.value.value() 85 | } 86 | 87 | test("component3() should return timestamp") { 88 | keyValueTimestamp.component3() shouldBe 12345L 89 | keyValueTimestamp.component3() shouldBe keyValueTimestamp.value.timestamp() 90 | } 91 | 92 | test("destructuring declaration should return (key, value, timestamp)") { 93 | val (key, value, timestamp) = keyValueTimestamp 94 | 95 | key shouldBe "key 123" 96 | key shouldBe keyValueTimestamp.key 97 | 98 | value shouldBe "value 999" 99 | value shouldBe keyValueTimestamp.value.value() 100 | 101 | timestamp shouldBe 12345L 102 | timestamp shouldBe keyValueTimestamp.value.timestamp() 103 | } 104 | } 105 | 106 | test("timestamp extension val should return timestamp") { 107 | val keyValueTimestamp = KeyValue.pair( 108 | "key 123", 109 | ValueAndTimestamp.make("value 999", 12345L) 110 | ) 111 | 112 | keyValueTimestamp.timestamp shouldBe 12345L 113 | keyValueTimestamp.timestamp shouldBe keyValueTimestamp.value.timestamp() 114 | } 115 | } 116 | 117 | 118 | context("ValueAndTimestamp") { 119 | val valueAndTimestamp = ValueAndTimestamp.make("value 999", 12345L) 120 | 121 | test("component1() should return value") { 122 | valueAndTimestamp.component1() shouldBe "value 999" 123 | valueAndTimestamp.component1() shouldBe valueAndTimestamp.value() 124 | } 125 | 126 | test("component2() should return timestamp") { 127 | valueAndTimestamp.component2() shouldBe 12345L 128 | valueAndTimestamp.component2() shouldBe valueAndTimestamp.timestamp() 129 | } 130 | 131 | test("destructuring declaration should return (value, timestamp)") { 132 | val (value, timestamp) = valueAndTimestamp 133 | 134 | value shouldBe "value 999" 135 | value shouldBe valueAndTimestamp.value() 136 | 137 | timestamp shouldBe 12345L 138 | timestamp shouldBe valueAndTimestamp.timestamp() 139 | } 140 | } 141 | }) 142 | -------------------------------------------------------------------------------- /modules/kotka-streams-extensions/src/main/kotlin/dev/adamko/kotka/extensions/materialized.kt: -------------------------------------------------------------------------------- 1 | package dev.adamko.kotka.extensions 2 | 3 | import kotlin.time.Duration 4 | import kotlin.time.toJavaDuration 5 | import org.apache.kafka.common.serialization.Serde 6 | import org.apache.kafka.common.utils.Bytes 7 | import org.apache.kafka.streams.kstream.Materialized 8 | import org.apache.kafka.streams.processor.StateStore 9 | import org.apache.kafka.streams.state.KeyValueBytesStoreSupplier 10 | import org.apache.kafka.streams.state.KeyValueStore 11 | import org.apache.kafka.streams.state.SessionBytesStoreSupplier 12 | import org.apache.kafka.streams.state.SessionStore 13 | import org.apache.kafka.streams.state.WindowBytesStoreSupplier 14 | import org.apache.kafka.streams.state.WindowStore 15 | 16 | 17 | /** @see Materialized */ 18 | fun materializedAs( 19 | storeName: String, 20 | keySerde: Serde? = null, 21 | valueSerde: Serde? = null, 22 | loggingConfig: Map? = null, 23 | cachingEnabled: Boolean = true, 24 | retention: Duration? = null, 25 | ): Materialized { 26 | val materialized = Materialized.`as`(storeName) 27 | 28 | return materialized.withConfig( 29 | keySerde, 30 | valueSerde, 31 | loggingConfig, 32 | cachingEnabled, 33 | retention 34 | ) 35 | } 36 | 37 | 38 | /** @see Materialized */ 39 | fun materializedAs( 40 | storeSupplier: WindowBytesStoreSupplier, 41 | keySerde: Serde? = null, 42 | valueSerde: Serde? = null, 43 | loggingConfig: Map? = null, 44 | cachingEnabled: Boolean = true, 45 | retention: Duration? = null, 46 | ): Materialized> { 47 | 48 | val materialized: Materialized> = 49 | Materialized.`as`(storeSupplier) 50 | 51 | return materialized.withConfig( 52 | keySerde, 53 | valueSerde, 54 | loggingConfig, 55 | cachingEnabled, 56 | retention 57 | ) 58 | } 59 | 60 | 61 | /** @see Materialized */ 62 | fun materializedAs( 63 | storeSupplier: SessionBytesStoreSupplier, 64 | keySerde: Serde? = null, 65 | valueSerde: Serde? = null, 66 | loggingConfig: Map? = null, 67 | cachingEnabled: Boolean = true, 68 | retention: Duration? = null, 69 | ): Materialized> { 70 | 71 | val materialized: Materialized> = 72 | Materialized.`as`(storeSupplier) 73 | 74 | return materialized.withConfig( 75 | keySerde, 76 | valueSerde, 77 | loggingConfig, 78 | cachingEnabled, 79 | retention 80 | ) 81 | } 82 | 83 | 84 | /** @see Materialized */ 85 | fun materializedAs( 86 | storeSupplier: KeyValueBytesStoreSupplier, 87 | keySerde: Serde? = null, 88 | valueSerde: Serde? = null, 89 | loggingConfig: Map? = null, 90 | cachingEnabled: Boolean = true, 91 | retention: Duration? = null, 92 | ): Materialized> { 93 | 94 | val materialized: Materialized> = 95 | Materialized.`as`(storeSupplier) 96 | 97 | return materialized.withConfig( 98 | keySerde, 99 | valueSerde, 100 | loggingConfig, 101 | cachingEnabled, 102 | retention 103 | ) 104 | } 105 | 106 | 107 | fun materializedWith( 108 | keySerde: Serde? = null, 109 | valueSerde: Serde? = null, 110 | loggingConfig: Map? = null, 111 | cachingEnabled: Boolean = true, 112 | retention: Duration? = null, 113 | ): Materialized = Materialized.with(keySerde, valueSerde) 114 | .withConfig( 115 | loggingConfig = loggingConfig, 116 | cachingEnabled = cachingEnabled, 117 | retention = retention, 118 | ) 119 | 120 | 121 | /** @see Materialized */ 122 | fun Materialized.withConfig( 123 | keySerde: Serde? = null, 124 | valueSerde: Serde? = null, 125 | loggingConfig: Map? = null, 126 | cachingEnabled: Boolean = true, 127 | retention: Duration? = null, 128 | ): Materialized { 129 | var materialized: Materialized = this 130 | 131 | if (keySerde != null) 132 | materialized = materialized.withKeySerde(keySerde) 133 | 134 | if (valueSerde != null) 135 | materialized = materialized.withValueSerde(valueSerde) 136 | 137 | if (retention != null) 138 | materialized = materialized.withRetention(retention.toJavaDuration()) 139 | 140 | if (loggingConfig != null) 141 | materialized = materialized.withLoggingEnabled(loggingConfig) 142 | 143 | materialized = when (cachingEnabled) { 144 | true -> materialized.withCachingEnabled() 145 | false -> materialized.withCachingDisabled() 146 | } 147 | 148 | return materialized 149 | } 150 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![GitHub license](https://img.shields.io/github/license/adamko-dev/kotka-streams?style=flat-square)](https://github.com/adamko-dev/kotka-streams/blob/main/LICENSE) 2 | [![](https://jitpack.io/v/adamko-dev/kotka-streams.svg?style=flat-square)](https://jitpack.io/#adamko-dev/kotka-streams) 3 | [![Maven Central](https://img.shields.io/maven-central/v/dev.adamko.kotka/kotka-streams?color=%234c1&style=flat-square)](https://search.maven.org/search?q=g:dev.adamko.kotka) 4 | [![Maven Central Snapshots](https://img.shields.io/maven-metadata/v?label=snapshots&metadataUrl=https%3A%2F%2Fs01.oss.sonatype.org%2Fcontent%2Frepositories%2Fsnapshots%2Fdev%2Fadamko%2Fkotka%2Fkotka-streams%2Fmaven-metadata.xml&style=flat-square&color=%234ff)](https://s01.oss.sonatype.org/content/repositories/snapshots/dev/adamko/kotka/) 5 | 6 | # Kotka Streams - Kotlin for Kafka Streams 7 | 8 | Using [Kotka](https://github.com/adamko-dev/kotka-streams) means a more pleasant experience while 9 | using [Kafka Streams](https://kafka.apache.org/documentation/streams/). 10 | 11 | 12 | ## Quickstart 13 | 14 | Add a dependency on `kotka-streams-extensions` for the basics. 15 | 16 | ```kotlin 17 | // build.gradle.kts 18 | repositories { 19 | mavenCentral() 20 | } 21 | 22 | dependencies { 23 | implementation("dev.adamko.kotka:kotka-streams-extensions:$kotkaVersion") 24 | } 25 | ``` 26 | 27 | ## Modules 28 | 29 | There are three modules. Add a dependency on `com.github.adamko-dev:kotka-streams` to get them all 30 | at once 31 | 32 | ```kotlin 33 | dependencies { 34 | implementation("dev.adamko.kotka:kotka-streams:$kotkaVersion") 35 | } 36 | ``` 37 | 38 | ### `kotka-streams-extensions` 39 | 40 | Contains the basic extension functions to make Kafka Streams more Kotlin-esque. 41 | 42 | ```kotlin 43 | implementation("dev.adamko.kotka:kotka-streams-extensions:$kotkaVersion") 44 | ``` 45 | 46 | ```kotlin 47 | import dev.adamko.kotka.extensions.tables.* 48 | import dev.adamko.kotka.extensions.streams.* 49 | import dev.adamko.kotka.extensions.* 50 | 51 | data class MusicalBand( 52 | val name: String, 53 | val memberNames: List, 54 | ) 55 | 56 | builder.stream("musical-bands") 57 | .flatMap("band-member-names-to-band-name") { _: String, band: MusicalBand -> 58 | band.memberNames.map { memberName -> memberName to band.name } 59 | } 60 | .groupByKey(groupedAs("map-of-band-member-to-band-names")) 61 | ``` 62 | 63 | ### `kotka-streams-framework` 64 | 65 | A light framework for structuring topics and records. 66 | 67 | ```kotlin 68 | implementation("dev.adamko.kotka:kotka-streams-framework:$kotkaVersion") 69 | ``` 70 | 71 | Use `TopicRecord` to standardise the data on each topic. Records can now easily be converted from 72 | one type, to another. 73 | 74 | ```kotlin 75 | import dev.adamko.kotka.extensions.tables.* 76 | import dev.adamko.kotka.extensions.streams.* 77 | import dev.adamko.kotka.extensions.* 78 | import dev.adamko.kotka.topicdata.* 79 | 80 | data class Animal( 81 | val id: Long, 82 | val name: String, 83 | ) : TopicRecord { 84 | override val topicKey: Long by ::id 85 | } 86 | 87 | data class Pet( 88 | val id: Long, 89 | val name: String, 90 | ) : TopicRecord { 91 | override val topicKey: Long by ::id 92 | } 93 | 94 | val petUpdates = builder.stream("animals") 95 | .mapTopicRecords("convert-animals-to-pets") { _, animal -> 96 | Pet(animal.id, animal.name) 97 | } 98 | ``` 99 | 100 | Use `KeyValueSerdes` to define both the key and value serdes for a topic. 101 | A `TopicDefinition` ties both of these together. 102 | 103 | ```kotlin 104 | /** All [Pet] updates */ 105 | object PetUpdatesTopic : TopicDefinition { 106 | override val topicName = "pet-updates" 107 | override val serdes = KeyValueSerdes(Serdes.Long(), PetSerde()) 108 | } 109 | 110 | petUpdates 111 | .to( 112 | PetUpdatesTopic.topicName, 113 | PetUpdatesTopic.serdes.producer("send-pet-updates-to-pet-update-topic") 114 | ) 115 | ``` 116 | 117 | ### `kotka-streams-kotlinx-serialization` 118 | 119 | Use [Kotlinx Serialization](https://github.com/Kotlin/kotlinx.serialization/) for topic key/value 120 | serdes. 121 | 122 | ```kotlin 123 | implementation("dev.adamko.kotka:kotka-streams-kotlinx-serialization:$kotkaVersion") 124 | ``` 125 | 126 | ```kotlin 127 | import dev.adamko.kotka.extensions.tables.* 128 | import dev.adamko.kotka.extensions.streams.* 129 | import dev.adamko.kotka.extensions.* 130 | import dev.adamko.kotka.topicdata.* 131 | import dev.adamko.kotka.kxs.* 132 | 133 | val jsonMapper = Json {} 134 | 135 | @Serializable 136 | data class Sku( 137 | val sku: String 138 | ) 139 | 140 | @Serializable 141 | data class ShopItem( 142 | val id: Sku, 143 | val name: String, 144 | ) : TopicRecord { 145 | override val topicKey: Sku by ::id 146 | } 147 | 148 | object ShopItemTopic : TopicDefinition { 149 | override val topicName = "shop-item-updates" 150 | override val serdes = KeyValueSerdes.kxsJson(jsonMapper) 151 | } 152 | ``` 153 | -------------------------------------------------------------------------------- /modules/kotka-streams-extensions/src/main/kotlin/dev/adamko/kotka/extensions/state/keyValueStore.kt: -------------------------------------------------------------------------------- 1 | package dev.adamko.kotka.extensions.state 2 | 3 | import org.apache.kafka.common.serialization.Serializer 4 | import org.apache.kafka.streams.KeyValue 5 | import org.apache.kafka.streams.state.KeyValueIterator 6 | import org.apache.kafka.streams.state.KeyValueStore 7 | 8 | 9 | typealias KeyValueIteratorOperation = KeyValueIterator.() -> Result 10 | 11 | 12 | /** 13 | * Iterate over [all][org.apache.kafka.streams.state.KeyValueStore.all] records. 14 | * 15 | * This function is useful because the [KeyValueIterator] will be closed automatically 16 | * (see: [kotlin.io.use]) after all values have been consumed. 17 | * 18 | * @see org.apache.kafka.streams.state.KeyValueStore.all 19 | */ 20 | inline fun KeyValueStore.useAll( 21 | operation: KeyValueIteratorOperation 22 | ): Result = 23 | all().use { iterator -> iterator.operation() } 24 | 25 | 26 | /** 27 | * Reverse-iterate over [all][org.apache.kafka.streams.state.KeyValueStore.reverseAll] records. 28 | * 29 | * This function is useful because the [KeyValueIterator] will be closed automatically 30 | * (see: [kotlin.io.use]) after all values have been consumed. 31 | * 32 | * @see org.apache.kafka.streams.state.KeyValueStore.reverseAll 33 | */ 34 | inline fun KeyValueStore.useReverseAll( 35 | operation: KeyValueIteratorOperation 36 | ): Result = 37 | reverseAll().use { iterator -> iterator.operation() } 38 | 39 | 40 | /** 41 | * Iterate over [a range][org.apache.kafka.streams.state.KeyValueStore.range] of records. 42 | * 43 | * This function is useful because the [KeyValueIterator] will be closed automatically 44 | * (see: [kotlin.io.use]) after all values have been consumed. 45 | * 46 | * @see org.apache.kafka.streams.state.KeyValueStore.range 47 | */ 48 | inline fun KeyValueStore.useRange( 49 | from: K? = null, 50 | to: K? = null, 51 | operation: KeyValueIteratorOperation, 52 | ): Result = 53 | range(from, to).use { iterator -> iterator.operation() } 54 | 55 | 56 | /** 57 | * Reverse-iterate over [a range][org.apache.kafka.streams.state.KeyValueStore.range] of records. 58 | * 59 | * This function is useful because the [KeyValueIterator] will be closed automatically 60 | * (see: [kotlin.io.use]) after all values have been consumed. 61 | * 62 | * @see org.apache.kafka.streams.state.KeyValueStore.reverseRange 63 | */ 64 | inline fun KeyValueStore.useReverseRange( 65 | from: K? = null, 66 | to: K? = null, 67 | operation: KeyValueIteratorOperation, 68 | ): Result = 69 | reverseRange(from, to).use { iterator -> iterator.operation() } 70 | 71 | 72 | /** 73 | * Iterate over records based on the key prefix. 74 | * 75 | * This function is useful because the [KeyValueIterator] will be closed automatically 76 | * (see: [kotlin.io.use]) after all values have been consumed. 77 | * 78 | * @see org.apache.kafka.streams.state.KeyValueStore.prefixScan 79 | */ 80 | inline fun KeyValueStore.usePrefixScan( 81 | prefix: Prefix, 82 | prefixKeySerializer: Serializer, 83 | operation: KeyValueIteratorOperation, 84 | ): Result = 85 | prefixScan(prefix, prefixKeySerializer).use { iterator -> iterator.operation() } 86 | 87 | 88 | /** 89 | * Generate a [Sequence] of [all][org.apache.kafka.streams.state.KeyValueStore.all] records. 90 | * 91 | * This function is useful because the [KeyValueIterator] will be closed automatically 92 | * (see: [kotlin.io.use]) after all values have been consumed. 93 | * 94 | * @see org.apache.kafka.streams.state.KeyValueStore.all 95 | */ 96 | fun KeyValueStore.allAsSequence(): Sequence> = 97 | all().use { it.asSequence() } 98 | 99 | 100 | /** 101 | * Generate a [Sequence] of all records, in [reverse][org.apache.kafka.streams.state.KeyValueStore.reverseAll] 102 | * 103 | * This function is useful because the [KeyValueIterator] will be closed automatically 104 | * (see: [kotlin.io.use]) after all values have been consumed. 105 | * 106 | * @see org.apache.kafka.streams.state.KeyValueStore.reverseAll 107 | */ 108 | fun KeyValueStore.reverseAllAsSequence(): Sequence> = 109 | reverseAll().use { it.asSequence() } 110 | 111 | 112 | /** 113 | * Generate a [Sequence] of [a range][org.apache.kafka.streams.state.KeyValueStore.range] of records. 114 | * 115 | * This function is useful because the [KeyValueIterator] will be closed automatically 116 | * (see: [kotlin.io.use]) after all values have been consumed. 117 | * 118 | * @see org.apache.kafka.streams.state.KeyValueStore.range 119 | */ 120 | fun KeyValueStore.rangeAsSequence( 121 | from: K? = null, 122 | to: K? = null, 123 | ): Sequence> = 124 | range(from, to).use { it.asSequence() } 125 | 126 | 127 | /** 128 | * Generate a [Sequence] of [a reverse-range][org.apache.kafka.streams.state.KeyValueStore.range] of records. 129 | * 130 | * This function is useful because the [KeyValueIterator] will be closed automatically 131 | * (see: [kotlin.io.use]) after all values have been consumed. 132 | * 133 | * @see org.apache.kafka.streams.state.KeyValueStore.reverseRange 134 | */ 135 | fun KeyValueStore.reverseRangeAsSequence( 136 | from: K? = null, 137 | to: K? = null, 138 | ): Sequence> = 139 | reverseRange(from, to).use { it.asSequence() } 140 | 141 | 142 | /** 143 | * Iterate over keys based on the key prefix. 144 | * 145 | * This function is useful because the [KeyValueIterator] will be closed automatically 146 | * (see: [kotlin.io.use]) after all values have been consumed. 147 | * 148 | * @see org.apache.kafka.streams.state.KeyValueStore.prefixScan 149 | */ 150 | fun KeyValueStore.prefixScanAsSequence( 151 | prefix: Prefix, 152 | prefixKeySerializer: Serializer, 153 | ): Sequence> = 154 | prefixScan(prefix, prefixKeySerializer).use { it.asSequence() } 155 | -------------------------------------------------------------------------------- /modules/kotka-streams-extensions/src/test/kotlin/dev/adamko/kotka/extensions/streams/KGroupedStreamTest.kt: -------------------------------------------------------------------------------- 1 | package dev.adamko.kotka.extensions.streams 2 | 3 | import dev.adamko.kotka.extensions.materializedAs 4 | import io.kotest.core.spec.style.FunSpec 5 | import io.mockk.confirmVerified 6 | import io.mockk.every 7 | import io.mockk.mockk 8 | import io.mockk.verify 9 | import org.apache.kafka.common.utils.Bytes 10 | import org.apache.kafka.streams.kstream.Aggregator 11 | import org.apache.kafka.streams.kstream.Initializer 12 | import org.apache.kafka.streams.kstream.KGroupedStream 13 | import org.apache.kafka.streams.kstream.Materialized 14 | import org.apache.kafka.streams.kstream.Named 15 | import org.apache.kafka.streams.kstream.Reducer 16 | import org.apache.kafka.streams.state.KeyValueStore 17 | 18 | 19 | class KGroupedStreamTest : FunSpec({ 20 | 21 | context(".count() extension") { 22 | 23 | val kGroupedStream: KGroupedStream = mockk { 24 | every { count(any(), any()) } returns mockk() 25 | every { count(any()) } returns mockk() 26 | every { count(any>>()) } returns mockk() 27 | every { count() } returns mockk() 28 | } 29 | 30 | test("name=null, materialized=null -> expect count() is called") { 31 | 32 | kGroupedStream.count( 33 | name = null, 34 | materialized = null 35 | ) 36 | 37 | verify(exactly = 1) { kGroupedStream.count() } 38 | 39 | confirmVerified(kGroupedStream) 40 | } 41 | 42 | test("name='some-name', materialized=null -> expect count(namedAs('some-name')) is called") { 43 | 44 | kGroupedStream.count( 45 | name = "some-name", 46 | materialized = null 47 | ) 48 | 49 | verify(exactly = 1) { kGroupedStream.count(any()) } 50 | 51 | confirmVerified(kGroupedStream) 52 | } 53 | 54 | test("name=null, materialized=Materialized<> -> expect count(Materialized<>) is called") { 55 | 56 | kGroupedStream.count( 57 | name = null, 58 | materialized = materializedAs("store-name"), 59 | ) 60 | 61 | verify(exactly = 1) { 62 | kGroupedStream.count(any>>()) 63 | } 64 | 65 | confirmVerified(kGroupedStream) 66 | } 67 | 68 | test("name='some-name', materialized=Materialized<> -> expect count('some-name', Materialized<>) is called") { 69 | 70 | kGroupedStream.count( 71 | name = "some-name", 72 | materialized = materializedAs("store-name"), 73 | ) 74 | 75 | verify(exactly = 1) { 76 | kGroupedStream.count( 77 | any(), 78 | any>>() 79 | ) 80 | } 81 | 82 | confirmVerified(kGroupedStream) 83 | } 84 | } 85 | 86 | 87 | context(".aggregate() extension") { 88 | 89 | val kGroupedStream: KGroupedStream = mockk { 90 | every { aggregate(any>(), any(), any()) } returns mockk() 91 | 92 | every { aggregate(any>(), any(), any(), any()) } returns mockk() 93 | } 94 | 95 | test("verify aggregate extension (without name) is called") { 96 | 97 | kGroupedStream.aggregate( 98 | initializer = { "initial" }, 99 | materialized = materializedAs("store-name"), 100 | aggregator = { key, value, aggregate -> "aggregated $key $value $aggregate" } 101 | ) 102 | 103 | verify(exactly = 1) { 104 | kGroupedStream.aggregate( 105 | any>(), 106 | any>(), 107 | any>>() 108 | ) 109 | } 110 | 111 | confirmVerified(kGroupedStream) 112 | } 113 | 114 | test("verify aggregate extension (with name) is called") { 115 | 116 | kGroupedStream.aggregate( 117 | name = "test-name", 118 | initializer = { "initial" }, 119 | materialized = materializedAs("store-name"), 120 | aggregator = { key, value, aggregate -> "aggregated $key $value $aggregate" } 121 | ) 122 | 123 | verify(exactly = 1) { 124 | kGroupedStream.aggregate( 125 | any>(), 126 | any>(), 127 | any(), 128 | any>>() 129 | ) 130 | } 131 | 132 | confirmVerified(kGroupedStream) 133 | } 134 | } 135 | 136 | 137 | context(".reduce() extension") { 138 | 139 | val kGroupedStream: KGroupedStream = mockk { 140 | every { 141 | reduce( 142 | any>(), 143 | any>>(), 144 | ) 145 | } returns mockk() 146 | 147 | every { 148 | reduce( 149 | any>(), 150 | any(), 151 | any>>(), 152 | ) 153 | } returns mockk() 154 | } 155 | 156 | test("verify reduce extension (without name) is called") { 157 | 158 | kGroupedStream.reduce( 159 | materialized = materializedAs("store-name"), 160 | reducer = { v1, v2 -> "reduced $v1 $v2" } 161 | ) 162 | 163 | verify(exactly = 1) { 164 | kGroupedStream.reduce( 165 | any>(), 166 | any>>(), 167 | ) 168 | } 169 | 170 | confirmVerified(kGroupedStream) 171 | } 172 | 173 | test("verify aggregate extension (with name) is called") { 174 | 175 | kGroupedStream.reduce( 176 | name = "test-name", 177 | materialized = materializedAs("store-name"), 178 | reducer = { v1, v2 -> "reduced $v1 $v2" } 179 | ) 180 | 181 | verify(exactly = 1) { 182 | kGroupedStream.reduce( 183 | any>(), 184 | any(), 185 | any>>(), 186 | ) 187 | } 188 | 189 | confirmVerified(kGroupedStream) 190 | } 191 | } 192 | }) 193 | -------------------------------------------------------------------------------- /modules/kotka-streams-extensions/src/main/kotlin/dev/adamko/kotka/extensions/tables/KTable.kt: -------------------------------------------------------------------------------- 1 | package dev.adamko.kotka.extensions.tables 2 | 3 | import dev.adamko.kotka.extensions.namedAs 4 | import dev.adamko.kotka.extensions.tableJoined 5 | import org.apache.kafka.common.utils.Bytes 6 | import org.apache.kafka.streams.KeyValue 7 | import org.apache.kafka.streams.kstream.* 8 | import org.apache.kafka.streams.state.KeyValueStore 9 | 10 | 11 | /** @see org.apache.kafka.streams.kstream.KTable.mapValues */ 12 | fun KTable.mapValues( 13 | name: String? = null, 14 | materialized: Materialized>? = null, 15 | mapper: (readOnlyKey: K, value: inV) -> outV, 16 | ): KTable = 17 | when { 18 | name != null && materialized != null -> mapValues(mapper, namedAs(name), materialized) 19 | name != null && materialized == null -> mapValues(mapper, namedAs(name)) 20 | name == null && materialized != null -> mapValues(mapper, materialized) 21 | else -> mapValues(mapper) 22 | } 23 | 24 | 25 | /** @see org.apache.kafka.streams.kstream.KTable.groupBy */ 26 | fun KTable.groupBy( 27 | grouped: Grouped? = null, 28 | selector: KeyValueMapper> 29 | ): KGroupedTable = when (grouped) { 30 | null -> groupBy(selector) 31 | else -> groupBy(selector, grouped) 32 | } 33 | 34 | 35 | /** @see org.apache.kafka.streams.kstream.KTable.join */ 36 | fun KTable.join( 37 | other: KTable, 38 | name: String? = null, 39 | materialized: Materialized>? = null, 40 | joiner: ValueJoiner, 41 | ): KTable = 42 | when { 43 | name != null && materialized != null -> join(other, joiner, namedAs(name), materialized) 44 | name != null && materialized == null -> join(other, joiner, namedAs(name)) 45 | name == null && materialized != null -> join(other, joiner, materialized) 46 | else -> join(other, joiner) 47 | } 48 | 49 | /** 50 | * A function that extracts the key ([otherK]) from this table's value ([V]). 51 | * If the result is null, the update is ignored as invalid. 52 | * See [KTable.join] 53 | */ 54 | fun interface ForeignKeyExtractor : (V) -> otherK? 55 | 56 | 57 | /** @see org.apache.kafka.streams.kstream.KTable.join */ 58 | 59 | fun KTable.join( 60 | other: KTable, 61 | name: String? = null, 62 | materialized: Materialized>? = null, 63 | foreignKeyExtractor: ForeignKeyExtractor, 64 | joiner: ValueJoiner, 65 | ): KTable = 66 | join( 67 | other = other, 68 | tableJoined = if (name == null) null else tableJoined(name), 69 | materialized = materialized, 70 | foreignKeyExtractor = foreignKeyExtractor, 71 | joiner = joiner, 72 | ) 73 | 74 | 75 | /** @see org.apache.kafka.streams.kstream.KTable.join */ 76 | fun KTable.join( 77 | other: KTable, 78 | tableJoined: TableJoined? = null, 79 | materialized: Materialized>? = null, 80 | foreignKeyExtractor: ForeignKeyExtractor, 81 | joiner: ValueJoiner, 82 | ): KTable { 83 | return when { 84 | tableJoined != null && materialized != null -> 85 | join(other, foreignKeyExtractor, joiner, tableJoined, materialized) 86 | 87 | tableJoined != null && materialized == null -> 88 | join(other, foreignKeyExtractor, joiner, tableJoined) 89 | 90 | tableJoined == null && materialized != null -> 91 | join(other, foreignKeyExtractor, joiner, materialized) 92 | 93 | else -> 94 | join(other, foreignKeyExtractor, joiner) 95 | } 96 | } 97 | 98 | 99 | /** @see org.apache.kafka.streams.kstream.KTable.leftJoin */ 100 | fun KTable.leftJoin( 101 | other: KTable, 102 | name: String? = null, 103 | materialized: Materialized>? = null, 104 | joiner: ValueJoiner, 105 | ): KTable { 106 | return when { 107 | name != null && materialized != null -> leftJoin(other, joiner, namedAs(name), materialized) 108 | name != null && materialized == null -> leftJoin(other, joiner, namedAs(name)) 109 | name == null && materialized != null -> leftJoin(other, joiner, materialized) 110 | else -> leftJoin(other, joiner) 111 | } 112 | } 113 | 114 | 115 | /** @see org.apache.kafka.streams.kstream.KTable.leftJoin */ 116 | fun KTable.leftJoin( 117 | other: KTable, 118 | name: String? = null, 119 | materialized: Materialized>? = null, 120 | foreignKeyExtractor: (V) -> otherK, 121 | joiner: ValueJoiner, 122 | ): KTable = 123 | leftJoin( 124 | other = other, 125 | tableJoined = if (name == null) null else tableJoined(name), 126 | materialized = materialized, 127 | foreignKeyExtractor = foreignKeyExtractor, 128 | joiner = joiner, 129 | ) 130 | 131 | 132 | /** @see org.apache.kafka.streams.kstream.KTable.leftJoin */ 133 | fun KTable.leftJoin( 134 | other: KTable, 135 | tableJoined: TableJoined? = null, 136 | materialized: Materialized>? = null, 137 | foreignKeyExtractor: (V) -> otherK, 138 | joiner: ValueJoiner, 139 | ): KTable { 140 | return when { 141 | tableJoined != null && materialized != null -> 142 | leftJoin(other, foreignKeyExtractor, joiner, tableJoined, materialized) 143 | 144 | tableJoined != null && materialized == null -> 145 | leftJoin(other, foreignKeyExtractor, joiner, tableJoined) 146 | 147 | tableJoined == null && materialized != null -> 148 | leftJoin(other, foreignKeyExtractor, joiner, materialized) 149 | 150 | else -> 151 | leftJoin(other, foreignKeyExtractor, joiner) 152 | } 153 | } 154 | 155 | 156 | /** @see org.apache.kafka.streams.kstream.KTable.outerJoin */ 157 | fun KTable.outerJoin( 158 | other: KTable, 159 | name: String? = null, 160 | materialized: Materialized>? = null, 161 | joiner: ValueJoiner, 162 | ): KTable { 163 | return when { 164 | name != null && materialized != null -> outerJoin(other, joiner, namedAs(name), materialized) 165 | name != null && materialized == null -> outerJoin(other, joiner, namedAs(name)) 166 | name == null && materialized != null -> outerJoin(other, joiner, materialized) 167 | else -> outerJoin(other, joiner) 168 | } 169 | } 170 | 171 | 172 | // the value of the resulting KStream is nullable, because it includes record deletions. 173 | /** @see org.apache.kafka.streams.kstream.KTable.toStream */ 174 | fun KTable.toStream( 175 | name: String, 176 | mapper: KeyValueMapper? = null, 177 | ): KStream = toStream(mapper, namedAs(name)) 178 | 179 | 180 | // the value of the resulting KStream is nullable, because it includes record deletions. 181 | /** @see org.apache.kafka.streams.kstream.KTable.toStream */ 182 | fun KTable.toStream(name: String): KStream = toStream(namedAs(name)) 183 | 184 | 185 | /** @see org.apache.kafka.streams.kstream.KTable.filter */ 186 | fun KTable.filter( 187 | name: String? = null, 188 | materialized: Materialized>? = null, 189 | predicate: (K, V) -> Boolean, 190 | ): KTable { 191 | return when { 192 | name != null && materialized != null -> filter(predicate, namedAs(name), materialized) 193 | name != null && materialized == null -> filter(predicate, namedAs(name)) 194 | name == null && materialized != null -> filter(predicate, materialized) 195 | else -> filter(predicate) 196 | } 197 | } 198 | 199 | /** @see org.apache.kafka.streams.kstream.KTable.filterNot */ 200 | fun KTable.filterNot( 201 | name: String? = null, 202 | materialized: Materialized>? = null, 203 | predicate: (K, V) -> Boolean, 204 | ): KTable { 205 | return when { 206 | name != null && materialized != null -> filterNot(predicate, namedAs(name), materialized) 207 | name != null && materialized == null -> filterNot(predicate, namedAs(name)) 208 | name == null && materialized != null -> filterNot(predicate, materialized) 209 | else -> filterNot(predicate) 210 | } 211 | } 212 | 213 | // KTable transformValues(final ValueTransformerWithKeySupplier transformerSupplier,final Materialized> materialized,final Named named,final String... stateStoreNames); 214 | -------------------------------------------------------------------------------- /modules/kotka-streams-extensions/src/main/kotlin/dev/adamko/kotka/extensions/streams/KStream.kt: -------------------------------------------------------------------------------- 1 | package dev.adamko.kotka.extensions.streams 2 | 3 | import dev.adamko.kotka.extensions.namedAs 4 | import dev.adamko.kotka.extensions.toKeyValue 5 | import org.apache.kafka.common.utils.Bytes 6 | import org.apache.kafka.streams.KeyValue 7 | import org.apache.kafka.streams.kstream.BranchedKStream 8 | import org.apache.kafka.streams.kstream.ForeachAction 9 | import org.apache.kafka.streams.kstream.GlobalKTable 10 | import org.apache.kafka.streams.kstream.Grouped 11 | import org.apache.kafka.streams.kstream.Joined 12 | import org.apache.kafka.streams.kstream.KGroupedStream 13 | import org.apache.kafka.streams.kstream.KStream 14 | import org.apache.kafka.streams.kstream.KTable 15 | import org.apache.kafka.streams.kstream.KeyValueMapper 16 | import org.apache.kafka.streams.kstream.Materialized 17 | import org.apache.kafka.streams.kstream.Produced 18 | import org.apache.kafka.streams.kstream.ValueJoinerWithKey 19 | import org.apache.kafka.streams.processor.TopicNameExtractor 20 | import org.apache.kafka.streams.state.KeyValueStore 21 | 22 | 23 | /** @see org.apache.kafka.streams.kstream.KStream.map */ 24 | inline fun KStream.map( 25 | name: String, 26 | crossinline mapper: (key: inK, value: inV) -> Pair 27 | ): KStream = 28 | map({ k, v -> mapper(k, v).toKeyValue() }, namedAs(name)) 29 | 30 | 31 | /** @see org.apache.kafka.streams.kstream.KStream.mapValues */ 32 | fun KStream.mapValues( 33 | name: String, 34 | mapper: (key: K, value: inV) -> outV 35 | ): KStream = mapValues(mapper, namedAs(name)) 36 | 37 | 38 | /** @see org.apache.kafka.streams.kstream.KStream.flatMap */ 39 | inline fun KStream.flatMap( 40 | name: String? = null, 41 | crossinline mapper: (key: inK, value: inV) -> Iterable> 42 | ): KStream { 43 | return when (name) { 44 | null -> flatMap { k, v -> mapper(k, v).map { it.toKeyValue() } } 45 | else -> flatMap({ k, v -> mapper(k, v).map { it.toKeyValue() } }, namedAs(name)) 46 | } 47 | } 48 | 49 | 50 | /** @see org.apache.kafka.streams.kstream.KStream.flatMapValues */ 51 | inline fun KStream.flatMapValues( 52 | name: String? = null, 53 | crossinline mapper: (key: inK, value: inV) -> Iterable 54 | ): KStream { 55 | return when (name) { 56 | null -> flatMapValues { k, v -> mapper(k, v) } 57 | else -> flatMapValues({ k, v -> mapper(k, v) }, namedAs(name)) 58 | } 59 | } 60 | 61 | 62 | /** @see org.apache.kafka.streams.kstream.KStream.groupBy */ 63 | fun KStream.groupBy( 64 | grouped: Grouped, 65 | keySelector: (K, V) -> outK 66 | ): KGroupedStream = groupBy(keySelector, grouped) 67 | 68 | 69 | /** @see org.apache.kafka.streams.kstream.KStream.to */ 70 | fun KStream.to( 71 | produced: Produced? = null, 72 | topicNameExtractor: TopicNameExtractorKt, 73 | ) { 74 | val extractor = TopicNameExtractor { key, value, recordContext -> 75 | with(topicNameExtractor) { 76 | TopicNameExtractorContextInternal(recordContext).extract(KeyValue(key, value)) 77 | } 78 | } 79 | 80 | return when (produced) { 81 | null -> to(extractor) 82 | else -> to(extractor, produced) 83 | } 84 | } 85 | 86 | 87 | /** @see org.apache.kafka.streams.kstream.KStream.filter */ 88 | fun KStream.filter( 89 | name: String, 90 | predicate: (K, V) -> Boolean, 91 | ): KStream = filter(predicate, namedAs(name)) 92 | 93 | 94 | /** @see org.apache.kafka.streams.kstream.KStream.filterNot */ 95 | fun KStream.filterNot( 96 | name: String, 97 | predicate: (K, V) -> Boolean, 98 | ): KStream = filterNot(predicate, namedAs(name)) 99 | 100 | 101 | /** @see org.apache.kafka.streams.kstream.KStream.merge */ 102 | fun KStream.merge( 103 | name: String, 104 | other: KStream, 105 | ): KStream = merge(other, namedAs(name)) 106 | 107 | 108 | /** @see org.apache.kafka.streams.kstream.KStream.toTable */ 109 | fun KStream.toTable( 110 | name: String? = null, 111 | materialized: Materialized>? = null 112 | ): KTable { 113 | return when { 114 | name != null && materialized != null -> toTable(namedAs(name), materialized) 115 | name == null && materialized != null -> toTable(materialized) 116 | name != null && materialized == null -> toTable(namedAs(name)) 117 | else -> toTable() 118 | } 119 | } 120 | 121 | 122 | /** @see org.apache.kafka.streams.kstream.KStream.split */ 123 | fun KStream.split( 124 | name: String? = null 125 | ): BranchedKStream = 126 | when (name) { 127 | null -> split() 128 | else -> split(namedAs(name)) 129 | } 130 | 131 | 132 | /** @see org.apache.kafka.streams.kstream.KStream.join */ 133 | fun KStream.join( 134 | table: KTable, 135 | joined: Joined, 136 | valueJoiner: ValueJoinerWithKey, 137 | ): KStream = join( 138 | table, 139 | valueJoiner, 140 | joined, 141 | ) 142 | 143 | 144 | /** @see org.apache.kafka.streams.kstream.KStream.join */ 145 | fun KStream.join( 146 | name: String, 147 | globalTable: GlobalKTable, 148 | keySelector: KeyValueMapper, 149 | valueJoiner: ValueJoinerWithKey, 150 | ): KStream = 151 | join( 152 | globalTable, 153 | keySelector, 154 | valueJoiner, 155 | namedAs(name), 156 | ) 157 | 158 | 159 | /** @see org.apache.kafka.streams.kstream.KStream.leftJoin */ 160 | fun KStream.leftJoin( 161 | name: String, 162 | globalTable: GlobalKTable, 163 | keySelector: KeyValueMapper, 164 | valueJoiner: ValueJoinerWithKey, 165 | ): KStream = 166 | leftJoin( 167 | globalTable, 168 | keySelector, 169 | valueJoiner, 170 | namedAs(name), 171 | ) 172 | 173 | 174 | /** @see org.apache.kafka.streams.kstream.KStream.foreach */ 175 | fun KStream.forEach( 176 | name: String? = null, 177 | forEachAction: ForeachAction, 178 | ): Unit = when (name) { 179 | null -> foreach(forEachAction) 180 | else -> foreach(forEachAction, namedAs(name)) 181 | } 182 | 183 | 184 | /** @see org.apache.kafka.streams.kstream.KStream.peek */ 185 | fun KStream.peek( 186 | name: String? = null, 187 | forEachAction: ForeachAction, 188 | ): KStream = when (name) { 189 | null -> peek(forEachAction) 190 | else -> peek(forEachAction, namedAs(name)) 191 | } 192 | 193 | 194 | // 195 | // fun flatTransform( 196 | // transformerSupplier: TransformerSupplier?>?>?, 197 | // named: Named?, 198 | // vararg stateStoreNames: String? 199 | // ): KStream? 200 | // 201 | // fun flatTransform( 202 | // transformerSupplier: TransformerSupplier?>?>?, 203 | // vararg stateStoreNames: String? 204 | // ): KStream? 205 | // 206 | // fun transform( 207 | // transformerSupplier: TransformerSupplier?>?, 208 | // named: Named?, 209 | // vararg stateStoreNames: String? 210 | // ): KStream? 211 | // 212 | // fun map( 213 | // mapper: KeyValueMapper?>?, 214 | // named: Named? 215 | // ): KStream? 216 | // 217 | // fun selectKey(mapper: KeyValueMapper?, named: Named?): KStream? 218 | // fun join( 219 | // otherStream: KStream?, 220 | // joiner: ValueJoinerWithKey?, 221 | // windows: JoinWindows?, 222 | // streamJoined: StreamJoined? 223 | // ): KStream? 224 | // 225 | // fun leftJoin( 226 | // otherStream: KStream?, 227 | // joiner: ValueJoinerWithKey?, 228 | // windows: JoinWindows?, 229 | // streamJoined: StreamJoined? 230 | // ): KStream? 231 | // 232 | // fun outerJoin( 233 | // otherStream: KStream?, 234 | // joiner: ValueJoinerWithKey?, 235 | // windows: JoinWindows?, 236 | // streamJoined: StreamJoined? 237 | // ): KStream? 238 | // 239 | // fun flatTransformValues( 240 | // valueTransformerSupplier: ValueTransformerWithKeySupplier?>?, 241 | // named: Named?, 242 | // vararg stateStoreNames: String? 243 | // ): KStream? 244 | // 245 | // fun transformValues( 246 | // valueTransformerSupplier: ValueTransformerWithKeySupplier?, 247 | // named: Named?, 248 | // vararg stateStoreNames: String? 249 | // ): KStream? 250 | // 251 | // fun merge(stream: KStream?, named: Named?): KStream? 252 | // fun repartition(repartitioned: Repartitioned?): KStream? 253 | // fun process( 254 | // processorSupplier: ProcessorSupplier?, 255 | // named: Named?, 256 | // vararg stateStoreNames: String? 257 | // ) 258 | // 259 | // fun to(topic: String?, produced: Produced?) 260 | // fun to(topicExtractor: TopicNameExtractor?, produced: Produced?) 261 | // 262 | -------------------------------------------------------------------------------- /gradlew: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # 4 | # Copyright © 2015-2021 the original authors. 5 | # 6 | # Licensed under the Apache License, Version 2.0 (the "License"); 7 | # you may not use this file except in compliance with the License. 8 | # You may obtain a copy of the License at 9 | # 10 | # https://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | # See the License for the specific language governing permissions and 16 | # limitations under the License. 17 | # 18 | 19 | ############################################################################## 20 | # 21 | # Gradle start up script for POSIX generated by Gradle. 22 | # 23 | # Important for running: 24 | # 25 | # (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is 26 | # noncompliant, but you have some other compliant shell such as ksh or 27 | # bash, then to run this script, type that shell name before the whole 28 | # command line, like: 29 | # 30 | # ksh Gradle 31 | # 32 | # Busybox and similar reduced shells will NOT work, because this script 33 | # requires all of these POSIX shell features: 34 | # * functions; 35 | # * expansions «$var», «${var}», «${var:-default}», «${var+SET}», 36 | # «${var#prefix}», «${var%suffix}», and «$( cmd )»; 37 | # * compound commands having a testable exit status, especially «case»; 38 | # * various built-in commands including «command», «set», and «ulimit». 39 | # 40 | # Important for patching: 41 | # 42 | # (2) This script targets any POSIX shell, so it avoids extensions provided 43 | # by Bash, Ksh, etc; in particular arrays are avoided. 44 | # 45 | # The "traditional" practice of packing multiple parameters into a 46 | # space-separated string is a well documented source of bugs and security 47 | # problems, so this is (mostly) avoided, by progressively accumulating 48 | # options in "$@", and eventually passing that to Java. 49 | # 50 | # Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS, 51 | # and GRADLE_OPTS) rely on word-splitting, this is performed explicitly; 52 | # see the in-line comments for details. 53 | # 54 | # There are tweaks for specific operating systems such as AIX, CygWin, 55 | # Darwin, MinGW, and NonStop. 56 | # 57 | # (3) This script is generated from the Groovy template 58 | # https://github.com/gradle/gradle/blob/HEAD/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt 59 | # within the Gradle project. 60 | # 61 | # You can find Gradle at https://github.com/gradle/gradle/. 62 | # 63 | ############################################################################## 64 | 65 | # Attempt to set APP_HOME 66 | 67 | # Resolve links: $0 may be a link 68 | app_path=$0 69 | 70 | # Need this for daisy-chained symlinks. 71 | while 72 | APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path 73 | [ -h "$app_path" ] 74 | do 75 | ls=$( ls -ld "$app_path" ) 76 | link=${ls#*' -> '} 77 | case $link in #( 78 | /*) app_path=$link ;; #( 79 | *) app_path=$APP_HOME$link ;; 80 | esac 81 | done 82 | 83 | # This is normally unused 84 | # shellcheck disable=SC2034 85 | APP_BASE_NAME=${0##*/} 86 | # Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036) 87 | APP_HOME=$( cd "${APP_HOME:-./}" > /dev/null && pwd -P ) || exit 88 | 89 | # Use the maximum available, or set MAX_FD != -1 to use that value. 90 | MAX_FD=maximum 91 | 92 | warn () { 93 | echo "$*" 94 | } >&2 95 | 96 | die () { 97 | echo 98 | echo "$*" 99 | echo 100 | exit 1 101 | } >&2 102 | 103 | # OS specific support (must be 'true' or 'false'). 104 | cygwin=false 105 | msys=false 106 | darwin=false 107 | nonstop=false 108 | case "$( uname )" in #( 109 | CYGWIN* ) cygwin=true ;; #( 110 | Darwin* ) darwin=true ;; #( 111 | MSYS* | MINGW* ) msys=true ;; #( 112 | NONSTOP* ) nonstop=true ;; 113 | esac 114 | 115 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar 116 | 117 | 118 | # Determine the Java command to use to start the JVM. 119 | if [ -n "$JAVA_HOME" ] ; then 120 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then 121 | # IBM's JDK on AIX uses strange locations for the executables 122 | JAVACMD=$JAVA_HOME/jre/sh/java 123 | else 124 | JAVACMD=$JAVA_HOME/bin/java 125 | fi 126 | if [ ! -x "$JAVACMD" ] ; then 127 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME 128 | 129 | Please set the JAVA_HOME variable in your environment to match the 130 | location of your Java installation." 131 | fi 132 | else 133 | JAVACMD=java 134 | if ! command -v java >/dev/null 2>&1 135 | then 136 | die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 137 | 138 | Please set the JAVA_HOME variable in your environment to match the 139 | location of your Java installation." 140 | fi 141 | fi 142 | 143 | # Increase the maximum file descriptors if we can. 144 | if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then 145 | case $MAX_FD in #( 146 | max*) 147 | # In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked. 148 | # shellcheck disable=SC2039,SC3045 149 | MAX_FD=$( ulimit -H -n ) || 150 | warn "Could not query maximum file descriptor limit" 151 | esac 152 | case $MAX_FD in #( 153 | '' | soft) :;; #( 154 | *) 155 | # In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked. 156 | # shellcheck disable=SC2039,SC3045 157 | ulimit -n "$MAX_FD" || 158 | warn "Could not set maximum file descriptor limit to $MAX_FD" 159 | esac 160 | fi 161 | 162 | # Collect all arguments for the java command, stacking in reverse order: 163 | # * args from the command line 164 | # * the main class name 165 | # * -classpath 166 | # * -D...appname settings 167 | # * --module-path (only if needed) 168 | # * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables. 169 | 170 | # For Cygwin or MSYS, switch paths to Windows format before running java 171 | if "$cygwin" || "$msys" ; then 172 | APP_HOME=$( cygpath --path --mixed "$APP_HOME" ) 173 | CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" ) 174 | 175 | JAVACMD=$( cygpath --unix "$JAVACMD" ) 176 | 177 | # Now convert the arguments - kludge to limit ourselves to /bin/sh 178 | for arg do 179 | if 180 | case $arg in #( 181 | -*) false ;; # don't mess with options #( 182 | /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath 183 | [ -e "$t" ] ;; #( 184 | *) false ;; 185 | esac 186 | then 187 | arg=$( cygpath --path --ignore --mixed "$arg" ) 188 | fi 189 | # Roll the args list around exactly as many times as the number of 190 | # args, so each arg winds up back in the position where it started, but 191 | # possibly modified. 192 | # 193 | # NB: a `for` loop captures its iteration list before it begins, so 194 | # changing the positional parameters here affects neither the number of 195 | # iterations, nor the values presented in `arg`. 196 | shift # remove old arg 197 | set -- "$@" "$arg" # push replacement arg 198 | done 199 | fi 200 | 201 | 202 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 203 | DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' 204 | 205 | # Collect all arguments for the java command: 206 | # * DEFAULT_JVM_OPTS, JAVA_OPTS, JAVA_OPTS, and optsEnvironmentVar are not allowed to contain shell fragments, 207 | # and any embedded shellness will be escaped. 208 | # * For example: A user cannot expect ${Hostname} to be expanded, as it is an environment variable and will be 209 | # treated as '${Hostname}' itself on the command line. 210 | 211 | set -- \ 212 | "-Dorg.gradle.appname=$APP_BASE_NAME" \ 213 | -classpath "$CLASSPATH" \ 214 | org.gradle.wrapper.GradleWrapperMain \ 215 | "$@" 216 | 217 | # Stop when "xargs" is not available. 218 | if ! command -v xargs >/dev/null 2>&1 219 | then 220 | die "xargs is not available" 221 | fi 222 | 223 | # Use "xargs" to parse quoted args. 224 | # 225 | # With -n1 it outputs one arg per line, with the quotes and backslashes removed. 226 | # 227 | # In Bash we could simply go: 228 | # 229 | # readarray ARGS < <( xargs -n1 <<<"$var" ) && 230 | # set -- "${ARGS[@]}" "$@" 231 | # 232 | # but POSIX shell has neither arrays nor command substitution, so instead we 233 | # post-process each arg (as a line of input to sed) to backslash-escape any 234 | # character that might be a shell metacharacter, then use eval to reverse 235 | # that process (while maintaining the separation between arguments), and wrap 236 | # the whole thing up as a single "set" statement. 237 | # 238 | # This will of course break if any of these variables contains a newline or 239 | # an unmatched quote. 240 | # 241 | 242 | eval "set -- $( 243 | printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" | 244 | xargs -n1 | 245 | sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' | 246 | tr '\n' ' ' 247 | )" '"$@"' 248 | 249 | exec "$JAVACMD" "$@" 250 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | --------------------------------------------------------------------------------