├── .github └── workflows │ └── maven.yml ├── .gitignore ├── .mvn └── wrapper │ └── maven-wrapper.properties ├── README.adoc ├── clients ├── pom.xml └── src │ ├── main │ └── kotlin │ │ └── io │ │ └── streamthoughts │ │ └── kafka │ │ └── clients │ │ ├── Configs.kt │ │ ├── Extensions.kt │ │ ├── Kafka.kt │ │ ├── KafkaClientConfigs.kt │ │ ├── KafkaClients.kt │ │ ├── KafkaRecord.kt │ │ ├── LoggerUtils.kt │ │ ├── consumer │ │ ├── AutoOffsetReset.kt │ │ ├── ConsumerAwareRebalanceListener.kt │ │ ├── ConsumerFactory.kt │ │ ├── ConsumerTask.kt │ │ ├── ConsumerWorker.kt │ │ ├── KafkaConsumerConfigs.kt │ │ ├── KafkaConsumerTask.kt │ │ ├── KafkaConsumerWorker.kt │ │ ├── TopicSubscription.kt │ │ ├── Types.kt │ │ ├── error │ │ │ ├── ConsumedErrorHandler.kt │ │ │ ├── ConsumedErrorHandlers.kt │ │ │ └── serialization │ │ │ │ ├── DeserializationErrorHandler.kt │ │ │ │ └── DeserializationErrorHandlers.kt │ │ └── listener │ │ │ └── ConsumerBatchRecordsListener.kt │ │ └── producer │ │ ├── Acks.kt │ │ ├── KafkaProducerConfigs.kt │ │ ├── KafkaProducerContainer.kt │ │ ├── ProducerContainer.kt │ │ ├── ProducerFactory.kt │ │ ├── SendResult.kt │ │ └── callback │ │ └── ProducerSendCallback.kt │ └── test │ ├── kotlin │ └── io │ │ └── streamthoughts │ │ └── kafka │ │ └── clients │ │ ├── KafkaClientConfigsTest.kt │ │ ├── consumer │ │ ├── KafkaConsumerConfigsTest.kt │ │ └── KafkaConsumerTaskTest.kt │ │ └── producer │ │ ├── KafkaProducerConfigsTest.kt │ │ └── KafkaProducerContainerTest.kt │ └── resources │ ├── logback-test.xml │ └── test-configs.properties ├── examples ├── pom.xml └── src │ └── main │ └── kotlin │ └── io │ └── streamthoughts │ └── kafka │ └── client │ └── examples │ ├── ConsumerClientExample.kt │ ├── ConsumerKotlinDSLExample.kt │ ├── ProducerClientExample.kt │ ├── ProducerKotlinDSLExample.kt │ └── TxProducerContainerExample.kt ├── mvnw ├── mvnw.cmd ├── pom.xml └── tests ├── pom.xml └── src ├── main └── kotlin │ └── io │ └── streamthoughts │ └── kafka │ └── tests │ ├── TestingEmbeddedKafka.kt │ └── TestingEmbeddedZookeeper.kt └── test └── kotlin └── io └── streamthoughts └── kafka └── tests └── junit └── EmbeddedKafkaSetupExtension.kt /.github/workflows/maven.yml: -------------------------------------------------------------------------------- 1 | name: Java CI with Maven 2 | 3 | on: 4 | push: 5 | branches: [ master ] 6 | pull_request: 7 | branches: [ master ] 8 | 9 | jobs: 10 | build: 11 | 12 | runs-on: ubuntu-latest 13 | 14 | steps: 15 | - uses: actions/checkout@v2 16 | - name: Set up JDK 11 17 | uses: actions/setup-java@v1 18 | with: 19 | java-version: 11 20 | - name: Build with Maven 21 | run: mvn -B package --file pom.xml 22 | 23 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | ### Scala template 2 | *.class 3 | *.log 4 | 5 | # sbt specific 6 | .cache 7 | .history 8 | .lib/ 9 | dist/* 10 | target/ 11 | lib_managed/ 12 | src_managed/ 13 | project/boot/ 14 | project/plugins/project/ 15 | 16 | # Scala-IDE specific 17 | .scala_dependencies 18 | .worksheet 19 | ### Java template 20 | *.class 21 | 22 | # Mobile Tools for Java (J2ME) 23 | .mtj.tmp/ 24 | 25 | # Package Files # 26 | *.jar 27 | *.war 28 | *.ear 29 | 30 | # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml 31 | hs_err_pid* 32 | ### Maven template 33 | target/ 34 | pom.xml.tag 35 | pom.xml.releaseBackup 36 | pom.xml.versionsBackup 37 | pom.xml.next 38 | release.properties 39 | dependency-reduced-pom.xml 40 | buildNumber.properties 41 | .mvn/timing.properties 42 | ### Eclipse template 43 | *.pydevproject 44 | .metadata 45 | .gradle 46 | tmp/ 47 | *.tmp 48 | *.bak 49 | *.swp 50 | *~.nib 51 | local.properties 52 | .settings/ 53 | .loadpath 54 | 55 | # Eclipse Core 56 | .project 57 | 58 | # External tool builders 59 | .externalToolBuilders/ 60 | 61 | # Locally stored "Eclipse launch configurations" 62 | *.launch 63 | 64 | # CDT-specific 65 | .cproject 66 | 67 | # JDT-specific (Eclipse Java Development Tools) 68 | .classpath 69 | 70 | # Java annotation processor (APT) 71 | .factorypath 72 | 73 | # PDT-specific 74 | .buildpath 75 | 76 | # sbteclipse plugin 77 | .target 78 | 79 | # TeXlipse plugin 80 | .texlipse 81 | ### JetBrains template 82 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio 83 | 84 | *.iml 85 | 86 | ## Directory-based project format: 87 | .idea/ 88 | # if you remove the above rule, at least ignore the following: 89 | 90 | # User-specific stuff: 91 | # .idea/workspace.xml 92 | # .idea/tasks.xml 93 | # .idea/dictionaries 94 | 95 | # Sensitive or high-churn files: 96 | # .idea/dataSources.ids 97 | # .idea/dataSources.xml 98 | # .idea/sqlDataSources.xml 99 | # .idea/dynamic.xml 100 | # .idea/uiDesigner.xml 101 | 102 | # Gradle: 103 | # .idea/gradle.xml 104 | # .idea/libraries 105 | 106 | # Mongo Explorer plugin: 107 | # .idea/mongoSettings.xml 108 | 109 | ## File-based project format: 110 | *.ipr 111 | *.iws 112 | 113 | ## Plugin-specific files: 114 | 115 | # IntelliJ 116 | /out/ 117 | 118 | # mpeltonen/sbt-idea plugin 119 | .idea_modules/ 120 | 121 | # JIRA plugin 122 | atlassian-ide-plugin.xml 123 | 124 | # Crashlytics plugin (for Android Studio and IntelliJ) 125 | com_crashlytics_export_strings.xml 126 | crashlytics.properties 127 | crashlytics-build.properties 128 | 129 | # Created by .ignore support plugin (hsz.mobi) 130 | # 131 | # 132 | .mvn/wrapper/MavenWrapperDownloader.java 133 | -------------------------------------------------------------------------------- /.mvn/wrapper/maven-wrapper.properties: -------------------------------------------------------------------------------- 1 | distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.6.3/apache-maven-3.6.3-bin.zip 2 | wrapperUrl=https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar 3 | -------------------------------------------------------------------------------- /README.adoc: -------------------------------------------------------------------------------- 1 | = Kafka Clients for Kotlin 2 | :toc: 3 | :toc-placement!: 4 | 5 | image:https://img.shields.io/badge/License-Apache%202.0-blue.svg[https://github.com/streamthoughts/kafka-clients-kotlin/blob/master/LICENSE] 6 | image:https://img.shields.io/github/v/release/streamthoughts/kafka-clients-kotlin[GitHub release (latest by date)] 7 | image:https://img.shields.io/github/issues-raw/streamthoughts/kafka-clients-kotlin[GitHub issues] 8 | image:https://img.shields.io/github/workflow/status/streamthoughts/kafka-clients-kotlin/Java%20CI%20with%20Maven[GitHub Workflow Status] 9 | image:https://img.shields.io/github/stars/streamthoughts/kafka-clients-kotlin?style=social[GitHub Repo stars] 10 | 11 | WARNING: Be aware that this package is still in heavy development. Some breaking change will occur in future weeks and months. 12 | Thank's for your comprehension. 13 | 14 | toc::[] 15 | 16 | == What is Kafka Clients for Kotlin ? 17 | 18 | The **Kafka Clients for Kotlin** projects packs with convenient Kotlin API for the development of Kafka-based event-driven applications. 19 | It provides high-level abstractions both for sending records `ProducerContainer` and consuming records from topics using one or many 20 | concurrent consumers `KafkaConsumerWorker`. 21 | 22 | In addition, it provides builder classes to facilitate the configuration of `Producer` and `Consumer` objects: `KafkaProducerConfigs` and `KafkaConsumerConfigs` 23 | 24 | **Kafka Clients for Kotlin** is based on the pure java `kafka-clients`. 25 | 26 | == How to contribute ? 27 | 28 | The project is in its early stages so it can be very easy to contribute by proposing APIs changes, new features and so on. 29 | Any feedback, bug reports and PRs are greatly appreciated! 30 | 31 | * Source Code: https://github.com/streamthoughts/kafka-clients-kotlin 32 | * Issue Tracker: https://github.com/streamthoughts/kafka-clients-kotlin/issues 33 | 34 | 35 | == Show your support 36 | 37 | You think this project can help you or your team to develop kafka-based application with Kotlin ? 38 | Please ⭐ this repository to support us! 39 | 40 | == How to give it a try ? 41 | 42 | Just add **Kafka Clients for Kotlin** to the dependencies of your projects. 43 | 44 | === For Maven 45 | [source,xml] 46 | ---- 47 | 48 | io.streamthoughts 49 | kafka-clients-kotlin 50 | 0.2.0 51 | 52 | ---- 53 | 54 | == Getting Started 55 | 56 | === Writing messages to Kafka 57 | 58 | **Example: How to create `KafkaProducer` config ?** 59 | 60 | [source,kotlin] 61 | ---- 62 | val configs = producerConfigsOf() 63 | .client { bootstrapServers("localhost:9092") } 64 | .acks(Acks.Leader) 65 | .keySerializer(StringSerializer::class.java.name) 66 | .valueSerializer(StringSerializer::class.java.name) 67 | ---- 68 | 69 | ==== Example with standard `KafkaProducer` (i.e : using java `kafka-clients`) 70 | 71 | [source,kotlin] 72 | ---- 73 | val producer = KafkaProducer(configs) 74 | 75 | val messages = listOf("I ❤️ Logs", "Making Sense of Stream Processing", "Apache Kafka") 76 | producer.use { 77 | messages.forEach {value -> 78 | val record = ProducerRecord(topic, value) 79 | producer.send(record) { m: RecordMetadata, e: Exception? -> 80 | when (e) { 81 | null -> println("Record was successfully sent (topic=${m.topic()}, partition=${m.partition()}, offset= ${m.offset()})") 82 | else -> e.printStackTrace() 83 | } 84 | } 85 | } 86 | } 87 | ---- 88 | 89 | N.B: See the full source code: https://github.com/streamthoughts/kafka-clients-kotlin/blob/master/examples/src/main/kotlin/io/streamthoughts/kafka/client/examples/ProducerClientExample.kt[ProducerClientExample.kt] 90 | 91 | ==== Example with Kotlin DSL 92 | 93 | [source,kotlin] 94 | ---- 95 | val producer: ProducerContainer = kafka("localhost:9092") { 96 | client { 97 | clientId("my-client") 98 | } 99 | 100 | producer { 101 | configure { 102 | acks(Acks.InSyncReplicas) 103 | } 104 | keySerializer(StringSerializer()) 105 | valueSerializer(StringSerializer()) 106 | 107 | defaultTopic("demo-topic") 108 | 109 | onSendError {_, _, error -> 110 | error.printStackTrace() 111 | } 112 | 113 | onSendSuccess{ _, _, metadata -> 114 | println("Record was sent successfully: topic=${metadata.topic()}, partition=${metadata.partition()}, offset=${metadata.offset()} ") 115 | } 116 | } 117 | } 118 | 119 | val messages = listOf("I ❤️ Logs", "Making Sense of Stream Processing", "Apache Kafka") 120 | producer.use { 121 | producer.init() // create internal producer and call initTransaction() if `transactional.id` is set 122 | messages.forEach { producer.send(value = it) } 123 | } 124 | ---- 125 | 126 | N.B: See the full source code: https://github.com/streamthoughts/kafka-clients-kotlin/blob/master/examples/src/main/kotlin/io/streamthoughts/kafka/client/examples/ProducerKotlinDSLExample.kt[ProducerKotlinDSLExample.kt] 127 | 128 | === Consuming messages from a Kafka topic 129 | 130 | ==== Example: How to create `KafkaConsumer` config ? 131 | 132 | [source,kotlin] 133 | ---- 134 | val configs = consumerConfigsOf() 135 | .client { bootstrapServers("localhost:9092") } 136 | .groupId("demo-consumer-group") 137 | .keyDeserializer(StringDeserializer::class.java.name) 138 | .valueDeserializer(StringDeserializer::class.java.name) 139 | ---- 140 | 141 | ==== Example with standard `KafkaConsumer` (i.e : using java `kafka-clients`) 142 | 143 | [source,kotlin] 144 | ---- 145 | val consumer = KafkaConsumer(configs) 146 | 147 | consumer.use { 148 | consumer.subscribe(listOf(topic)) 149 | while(true) { 150 | consumer 151 | .poll(Duration.ofMillis(500)) 152 | .forEach { record -> 153 | println( 154 | "Received record with key ${record.key()} " + 155 | "and value ${record.value()} from topic ${record.topic()} and partition ${record.partition()}" 156 | ) 157 | } 158 | } 159 | } 160 | ---- 161 | 162 | N.B: See the full source code: https://github.com/streamthoughts/kafka-clients-kotlin/blob/master/examples/src/main/kotlin/io/streamthoughts/kafka/client/examples/ConsumerClientExample.kt[ConsumerClientExample.kt] 163 | 164 | ==== Example with Kotlin DSL 165 | [source,kotlin] 166 | ---- 167 | val consumerWorker: ConsumerWorker = kafka("localhost:9092") { 168 | client { 169 | clientId("my-client") 170 | } 171 | 172 | val stringDeserializer: Deserializer = StringDeserializer() 173 | consumer("my-group", stringDeserializer, stringDeserializer) { 174 | configure { 175 | maxPollRecords(1000) 176 | autoOffsetReset(AutoOffsetReset.Earliest) 177 | } 178 | 179 | onDeserializationError(replaceWithNullOnInvalidRecord()) 180 | 181 | onPartitionsAssigned { _: Consumer<*, *>, partitions -> 182 | println("Partitions assigned: $partitions") 183 | } 184 | 185 | onPartitionsRevokedAfterCommit { _: Consumer<*, *>, partitions -> 186 | println("Partitions revoked: $partitions") 187 | } 188 | 189 | onConsumed { _: Consumer<*, *>, value: String? -> 190 | println("consumed record-value: $value") 191 | } 192 | 193 | onConsumedError(closeTaskOnConsumedError()) 194 | 195 | Runtime.getRuntime().addShutdownHook(Thread { run { stop() } }) 196 | } 197 | } 198 | 199 | consumerWorker.use { 200 | consumerWorker.start("demo-topic", maxParallelHint = 4) 201 | runBlocking { 202 | println("All consumers started, waiting one minute before stopping") 203 | delay(Duration.ofMinutes(1).toMillis()) 204 | } 205 | } 206 | ---- 207 | 208 | N.B: See the full source code: https://github.com/streamthoughts/kafka-clients-kotlin/blob/master/examples/src/main/kotlin/io/streamthoughts/kafka/client/examples/ConsumerKotlinDSLExample.kt[ConsumerKotlinDSLExample.kt] 209 | 210 | == All Examples: 211 | 212 | * https://github.com/streamthoughts/kafka-clients-kotlin/blob/master/examples/src/main/kotlin/io/streamthoughts/kafka/client/examples/ProducerClientExample.kt[ProducerClientExample.kt] 213 | * https://github.com/streamthoughts/kafka-clients-kotlin/blob/master/examples/src/main/kotlin/io/streamthoughts/kafka/client/examples/ProducerKotlinDSLExample.kt[ProducerKotlinDSLExample.kt] 214 | * https://github.com/streamthoughts/kafka-clients-kotlin/blob/master/examples/src/main/kotlin/io/streamthoughts/kafka/client/examples/TxProducerContainerExample.kt[TxProducerContainerExample.kt] 215 | * https://github.com/streamthoughts/kafka-clients-kotlin/blob/master/examples/src/main/kotlin/io/streamthoughts/kafka/client/examples/ConsumerClientExample.kt[ConsumerClientExample.kt] 216 | * https://github.com/streamthoughts/kafka-clients-kotlin/blob/master/examples/src/main/kotlin/io/streamthoughts/kafka/client/examples/ConsumerKotlinDSLExample.kt[ConsumerKotlinDSLExample.kt] 217 | 218 | == How to build project ? 219 | 220 | Kafka Clients for Kotlin uses https://github.com/takari/maven-wrapper[maven-wrapper]. 221 | 222 | [source,bash] 223 | ---- 224 | $ ./mvnw clean package 225 | ---- 226 | 227 | Run Tests 228 | 229 | [source,bash] 230 | ---- 231 | $ ./mvnw clean test 232 | ---- 233 | 234 | == Licence 235 | 236 | Copyright 2020 StreamThoughts. 237 | 238 | Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at 239 | 240 | http://www.apache.org/licenses/LICENSE-2.0["http://www.apache.org/licenses/LICENSE-2.0"] 241 | 242 | Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License 243 | -------------------------------------------------------------------------------- /clients/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | io.streamthoughts 7 | kafka-clients-kotlin-reactor 8 | 0.2.0 9 | 10 | 4.0.0 11 | 12 | Kafka Clients for Kotlin 13 | kafka-clients-kotlin 14 | 15 | 16 | 17 | org.apache.kafka 18 | kafka-clients 19 | 20 | 21 | org.jetbrains.kotlin 22 | kotlin-stdlib 23 | 24 | 25 | org.jetbrains.kotlin 26 | kotlin-stdlib-jdk8 27 | 28 | 29 | org.jetbrains.kotlinx 30 | kotlinx-coroutines-core 31 | 32 | 33 | org.jetbrains.kotlin 34 | kotlin-test-junit 35 | test 36 | 37 | 38 | io.streamthoughts 39 | kafka-clients-kotlin-tests 40 | ${project.version} 41 | test 42 | 43 | 44 | io.streamthoughts 45 | kafka-clients-kotlin-tests 46 | ${project.version} 47 | test-jar 48 | test 49 | 50 | 51 | org.junit.platform 52 | junit-platform-launcher 53 | test 54 | 55 | 56 | org.junit.jupiter 57 | junit-jupiter-engine 58 | test 59 | 60 | 61 | org.slf4j 62 | slf4j-api 63 | 1.7.30 64 | 65 | 66 | ch.qos.logback 67 | logback-classic 68 | 69 | 70 | ch.qos.logback 71 | logback-core 72 | 73 | 74 | 75 | -------------------------------------------------------------------------------- /clients/src/main/kotlin/io/streamthoughts/kafka/clients/Configs.kt: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 StreamThoughts. 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with 6 | * this work for additional information regarding copyright ownership. 7 | * The ASF licenses this file to You under the Apache License, Version 2.0 8 | * (the "License"); you may not use this file except in compliance with 9 | * the License. You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | * See the License for the specific language governing permissions and 17 | * limitations under the License. 18 | */ 19 | package io.streamthoughts.kafka.clients 20 | 21 | import kotlin.collections.HashMap 22 | 23 | 24 | /** 25 | * The base class for client configuration. 26 | * 27 | * @see io.streamthoughts.kafka.clients.KafkaClientConfigs 28 | * @see io.streamthoughts.kafka.clients.consumer.KafkaConsumerConfigs 29 | * @see io.streamthoughts.kafka.clients.producer.KafkaProducerConfigs 30 | */ 31 | open class Configs protected constructor(props: Map = emptyMap()) : MutableMap { 32 | 33 | private val props = HashMap(props) 34 | 35 | override val entries: MutableSet> 36 | get() = props.entries 37 | 38 | override val keys: MutableSet 39 | get() = props.keys 40 | 41 | override val size: Int 42 | get() = props.size 43 | 44 | override val values: MutableCollection 45 | get() = props.values 46 | 47 | override fun containsKey(key: String): Boolean { 48 | return props.containsKey(key) 49 | } 50 | 51 | override fun containsValue(value: Any?): Boolean { 52 | return props.containsValue(value) 53 | } 54 | 55 | override fun get(key: String): Any? { 56 | return props[key] 57 | } 58 | 59 | override fun isEmpty(): Boolean { 60 | return props.isEmpty() 61 | } 62 | 63 | open fun with(key: String, value: Any?) = apply { this[key] = value } 64 | 65 | operator fun set(key: String, value: Any?) { 66 | props[key] = value 67 | } 68 | 69 | override fun equals(other: Any?): Boolean { 70 | if (this === other) return true 71 | if (other !is Configs) return false 72 | 73 | if (props != other.props) return false 74 | 75 | return true 76 | } 77 | 78 | override fun hashCode(): Int { 79 | return props.hashCode() 80 | } 81 | 82 | override fun toString(): String { 83 | return "Configs[$props]" 84 | } 85 | 86 | override fun clear() { 87 | props.clear() 88 | } 89 | 90 | override fun put(key: String, value: Any?): Any? = props.put(key, value) 91 | 92 | override fun putAll(from: Map) = props.putAll(from) 93 | 94 | override fun remove(key: String): Any? = props.remove(key) 95 | } -------------------------------------------------------------------------------- /clients/src/main/kotlin/io/streamthoughts/kafka/clients/Extensions.kt: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 StreamThoughts. 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with 6 | * this work for additional information regarding copyright ownership. 7 | * The ASF licenses this file to You under the Apache License, Version 2.0 8 | * (the "License"); you may not use this file except in compliance with 9 | * the License. You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | * See the License for the specific language governing permissions and 17 | * limitations under the License. 18 | */ 19 | package io.streamthoughts.kafka.clients 20 | 21 | import java.io.FileInputStream 22 | import java.io.InputStream 23 | import java.util.Properties 24 | 25 | /** 26 | * Convenient method to transform a [Properties] to a [Map] of string keys. 27 | */ 28 | fun Properties.toStringMap(): Map = this.map { (k, v) -> Pair(k.toString(), v) }.toMap() 29 | 30 | /** 31 | * Convenient method to load config properties from the given [configFile]. 32 | */ 33 | fun T.load(configFile: String): T = 34 | apply { FileInputStream(configFile).use { load(it) } } 35 | 36 | /** 37 | * Convenient method to load config properties from the given [inputStream]. 38 | */ 39 | fun T.load(inputStream: InputStream): T = 40 | apply { putAll((Properties().apply { load(inputStream) }).toStringMap()) } 41 | -------------------------------------------------------------------------------- /clients/src/main/kotlin/io/streamthoughts/kafka/clients/Kafka.kt: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 StreamThoughts. 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with 6 | * this work for additional information regarding copyright ownership. 7 | * The ASF licenses this file to You under the Apache License, Version 2.0 8 | * (the "License"); you may not use this file except in compliance with 9 | * the License. You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | * See the License for the specific language governing permissions and 17 | * limitations under the License. 18 | */ 19 | package io.streamthoughts.kafka.clients 20 | 21 | data class Kafka(val bootstrapServers: Array = arrayOf("localhost:9092")) { 22 | 23 | override fun equals(other: Any?): Boolean { 24 | if (this === other) return true 25 | if (other !is Kafka) return false 26 | 27 | if (!bootstrapServers.contentEquals(other.bootstrapServers)) return false 28 | 29 | return true 30 | } 31 | 32 | override fun hashCode(): Int { 33 | return bootstrapServers.contentHashCode() 34 | } 35 | 36 | override fun toString(): String { 37 | return "Kafka(bootstrapServers=${bootstrapServers.joinToString()})" 38 | } 39 | 40 | } -------------------------------------------------------------------------------- /clients/src/main/kotlin/io/streamthoughts/kafka/clients/KafkaClientConfigs.kt: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 StreamThoughts. 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with 6 | * this work for additional information regarding copyright ownership. 7 | * The ASF licenses this file to You under the Apache License, Version 2.0 8 | * (the "License"); you may not use this file except in compliance with 9 | * the License. You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | * See the License for the specific language governing permissions and 17 | * limitations under the License. 18 | */ 19 | package io.streamthoughts.kafka.clients 20 | 21 | import org.apache.kafka.clients.CommonClientConfigs 22 | import java.io.InputStream 23 | import java.util.Properties 24 | import kotlin.collections.HashMap 25 | import kotlin.collections.Map 26 | import kotlin.collections.emptyMap 27 | import kotlin.collections.joinToString 28 | import kotlin.collections.mutableMapOf 29 | 30 | 31 | open class KafkaClientConfigs constructor(props: Map = emptyMap()): Configs(props) { 32 | 33 | constructor(kafka : Kafka): this(bootstrapServersConfig(kafka)) 34 | 35 | companion object { 36 | private fun bootstrapServersConfig(kafka: Kafka) = mutableMapOf( 37 | Pair(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, kafka.bootstrapServers.joinToString()) 38 | ) 39 | } 40 | 41 | /** 42 | * @see [CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG] 43 | */ 44 | fun bootstrapServers(bootstrapServers: Array) = 45 | apply { this[CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG] = bootstrapServers.joinToString() } 46 | 47 | /** 48 | * @see [CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG] 49 | */ 50 | fun bootstrapServers(bootstrapServers: String) = 51 | apply { this[CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG] = bootstrapServers } 52 | 53 | /** 54 | * @see [CommonClientConfigs.CLIENT_ID_CONFIG] 55 | */ 56 | fun clientId(clientId: String) = 57 | apply { this[CommonClientConfigs.CLIENT_ID_CONFIG] = clientId } 58 | 59 | override fun with(key: String, value: Any?) = apply { super.with(key, value) } 60 | } 61 | 62 | /** 63 | * Convenient method to create and populate a new [KafkaClientConfigs] from a [configFile]. 64 | */ 65 | fun loadClientConfigs(configFile: String): KafkaClientConfigs = KafkaClientConfigs().load(configFile) 66 | 67 | /** 68 | * Convenient method to create and populate a new [KafkaClientConfigs] from an [inputStream]. 69 | */ 70 | fun loadClientConfigs(inputStream: InputStream): KafkaClientConfigs = KafkaClientConfigs().load(inputStream) 71 | 72 | /** 73 | * Creates a new [KafkaClientConfigs] with no properties. 74 | */ 75 | fun emptyClientConfigs(): KafkaClientConfigs = KafkaClientConfigs() 76 | 77 | /** 78 | * Creates a new [KafkaClientConfigs] with the given [props]. 79 | */ 80 | fun clientConfigsOf(props: Map): KafkaClientConfigs = KafkaClientConfigs(HashMap(props)) 81 | 82 | /** 83 | * Creates a new [KafkaClientConfigs] with the given [props]. 84 | */ 85 | fun clientConfigsOf(props: Properties): KafkaClientConfigs = clientConfigsOf(props.toStringMap()) -------------------------------------------------------------------------------- /clients/src/main/kotlin/io/streamthoughts/kafka/clients/KafkaClients.kt: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 StreamThoughts. 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with 6 | * this work for additional information regarding copyright ownership. 7 | * The ASF licenses this file to You under the Apache License, Version 2.0 8 | * (the "License"); you may not use this file except in compliance with 9 | * the License. You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | * See the License for the specific language governing permissions and 17 | * limitations under the License. 18 | */ 19 | package io.streamthoughts.kafka.clients 20 | 21 | import io.streamthoughts.kafka.clients.consumer.ConsumerWorker 22 | import io.streamthoughts.kafka.clients.consumer.KafkaConsumerWorker 23 | import io.streamthoughts.kafka.clients.consumer.consumerConfigsOf 24 | import io.streamthoughts.kafka.clients.producer.KafkaProducerContainer 25 | import io.streamthoughts.kafka.clients.producer.ProducerContainer 26 | import io.streamthoughts.kafka.clients.producer.producerConfigsOf 27 | import org.apache.kafka.common.serialization.Deserializer 28 | 29 | /** 30 | * [KafkaClients] DSL for building either a new consumer or producer kafka client. 31 | */ 32 | class KafkaClients(private val configs: KafkaClientConfigs) { 33 | 34 | /** 35 | * Configures the commons configuration for Kafka Client. 36 | */ 37 | fun client(init: KafkaClientConfigs.() -> Unit) : Unit = configs.init() 38 | 39 | /** 40 | * Creates and configures a new [KafkaConsumerWorker] using the given [init] function 41 | * for the given [groupId], [keyDeserializer] and [valueDeserializer] 42 | * 43 | * @return a new [KafkaConsumerWorker] instance. 44 | */ 45 | fun consumer(groupId: String, 46 | keyDeserializer: Deserializer, 47 | valueDeserializer: Deserializer, 48 | init: KafkaConsumerWorker.Builder.() -> Unit): ConsumerWorker { 49 | val configs = consumerConfigsOf(configs).groupId(groupId) 50 | return KafkaConsumerWorker.Builder(configs, keyDeserializer, valueDeserializer).also(init).build() 51 | } 52 | 53 | /** 54 | * Creates and configures a new [ProducerContainer] using the given [init] function. 55 | * 56 | * @return a new [ProducerContainer] instance. 57 | */ 58 | fun producer(init: ProducerContainer.Builder.() -> Unit): ProducerContainer { 59 | val configs = producerConfigsOf(configs) 60 | return KafkaProducerContainer.Builder(configs).also(init).build() 61 | } 62 | } 63 | 64 | fun kafka(bootstrapServer: String, init: KafkaClients.() -> R): R = 65 | kafka(arrayOf(bootstrapServer), init) 66 | 67 | fun kafka(bootstrapServers: Array, init: KafkaClients.() -> R): R = 68 | kafka(KafkaClientConfigs(Kafka(bootstrapServers)), init) 69 | 70 | fun kafka(kafka: Kafka, init: KafkaClients.() -> R): R = 71 | kafka(KafkaClientConfigs(kafka), init) 72 | 73 | fun kafka(configs: KafkaClientConfigs, init: KafkaClients.() -> R): R = 74 | KafkaClients(configs).init() 75 | -------------------------------------------------------------------------------- /clients/src/main/kotlin/io/streamthoughts/kafka/clients/KafkaRecord.kt: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 StreamThoughts. 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with 6 | * this work for additional information regarding copyright ownership. 7 | * The ASF licenses this file to You under the Apache License, Version 2.0 8 | * (the "License"); you may not use this file except in compliance with 9 | * the License. You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | * See the License for the specific language governing permissions and 17 | * limitations under the License. 18 | */ 19 | package io.streamthoughts.kafka.clients 20 | 21 | import org.apache.kafka.clients.producer.ProducerRecord 22 | import java.time.Instant 23 | 24 | data class KafkaRecord( 25 | /** 26 | * The record key. 27 | */ 28 | var key: K? = null, 29 | /** 30 | * The record value. 31 | */ 32 | var value: V? = null, 33 | /** 34 | * The record topic. 35 | */ 36 | var topic: String? = null, 37 | /** 38 | * The record partition 39 | */ 40 | var partition: Int? = null, 41 | /** 42 | * The record timestamp 43 | */ 44 | var timestamp: Instant? = null 45 | ) { 46 | 47 | fun toProducerRecord() : ProducerRecord { 48 | return ProducerRecord( 49 | topic, 50 | partition, 51 | timestamp?.toEpochMilli(), 52 | key, 53 | value 54 | ) 55 | } 56 | } -------------------------------------------------------------------------------- /clients/src/main/kotlin/io/streamthoughts/kafka/clients/LoggerUtils.kt: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 StreamThoughts. 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with 6 | * this work for additional information regarding copyright ownership. 7 | * The ASF licenses this file to You under the Apache License, Version 2.0 8 | * (the "License"); you may not use this file except in compliance with 9 | * the License. You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | * See the License for the specific language governing permissions and 17 | * limitations under the License. 18 | */ 19 | package io.streamthoughts.kafka.clients 20 | 21 | import org.slf4j.Logger 22 | import org.slf4j.LoggerFactory 23 | 24 | fun loggerFor(forClass: Class<*>): Logger = LoggerFactory.getLogger(forClass) -------------------------------------------------------------------------------- /clients/src/main/kotlin/io/streamthoughts/kafka/clients/consumer/AutoOffsetReset.kt: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 StreamThoughts. 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with 6 | * this work for additional information regarding copyright ownership. 7 | * The ASF licenses this file to You under the Apache License, Version 2.0 8 | * (the "License"); you may not use this file except in compliance with 9 | * the License. You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | * See the License for the specific language governing permissions and 17 | * limitations under the License. 18 | */ 19 | package io.streamthoughts.kafka.clients.consumer 20 | 21 | /** 22 | * The policy to apply for [org.apache.kafka.clients.consumer.Consumer] which has no initial offset in Kafka 23 | * or if the current offset does not exist any more on the server (e.g. because that data has been deleted). 24 | * 25 | * @see org.apache.kafka.clients.consumer.ConsumerConfig.AUTO_OFFSET_RESET_CONFIG 26 | */ 27 | object AutoOffsetReset { 28 | /** 29 | * earliest: automatically reset the offset to the earliest offset. 30 | */ 31 | const val Earliest = "earliest" 32 | /** 33 | * latest: automatically reset the offset to the latest offset 34 | */ 35 | const val Latest = "lastest" 36 | /** 37 | * none: throw exception to the consumer if no previous offset is found for the consumer's group 38 | */ 39 | const val None = "none" 40 | } -------------------------------------------------------------------------------- /clients/src/main/kotlin/io/streamthoughts/kafka/clients/consumer/ConsumerAwareRebalanceListener.kt: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 StreamThoughts. 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with 6 | * this work for additional information regarding copyright ownership. 7 | * The ASF licenses this file to You under the Apache License, Version 2.0 8 | * (the "License"); you may not use this file except in compliance with 9 | * the License. You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | * See the License for the specific language governing permissions and 17 | * limitations under the License. 18 | */ 19 | package io.streamthoughts.kafka.clients.consumer 20 | 21 | import org.apache.kafka.clients.consumer.Consumer 22 | import org.apache.kafka.common.TopicPartition 23 | 24 | /** 25 | * @see [org.apache.kafka.clients.consumer.ConsumerRebalanceListener] 26 | */ 27 | interface ConsumerAwareRebalanceListener { 28 | 29 | fun onPartitionsRevokedBeforeCommit(consumer: Consumer<*, *>, 30 | partitions: Collection) { 31 | } 32 | 33 | fun onPartitionsRevokedAfterCommit(consumer: Consumer<*, *>, 34 | partitions: Collection) { 35 | } 36 | 37 | fun onPartitionsAssigned(consumer: Consumer<*, *>, 38 | partitions: Collection) { 39 | } 40 | 41 | fun onPartitionsLost(consumer: Consumer<*, *>, 42 | partitions: Collection) { 43 | } 44 | 45 | } -------------------------------------------------------------------------------- /clients/src/main/kotlin/io/streamthoughts/kafka/clients/consumer/ConsumerFactory.kt: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 StreamThoughts. 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with 6 | * this work for additional information regarding copyright ownership. 7 | * The ASF licenses this file to You under the Apache License, Version 2.0 8 | * (the "License"); you may not use this file except in compliance with 9 | * the License. You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | * See the License for the specific language governing permissions and 17 | * limitations under the License. 18 | */ 19 | package io.streamthoughts.kafka.clients.consumer 20 | 21 | import org.apache.kafka.clients.consumer.Consumer 22 | import org.apache.kafka.clients.consumer.KafkaConsumer 23 | 24 | /** 25 | * The default factory interface to create new [Consumer] instance. 26 | */ 27 | interface ConsumerFactory { 28 | 29 | object DefaultConsumerFactory: ConsumerFactory { 30 | override fun make(configs: Map): Consumer = KafkaConsumer(configs) 31 | 32 | } 33 | 34 | /** 35 | * Creates a new [Consumer] instance with the given [configs]. 36 | */ 37 | fun make(configs: Map): Consumer 38 | } 39 | -------------------------------------------------------------------------------- /clients/src/main/kotlin/io/streamthoughts/kafka/clients/consumer/ConsumerTask.kt: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 StreamThoughts. 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with 6 | * this work for additional information regarding copyright ownership. 7 | * The ASF licenses this file to You under the Apache License, Version 2.0 8 | * (the "License"); you may not use this file except in compliance with 9 | * the License. You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | * See the License for the specific language governing permissions and 17 | * limitations under the License. 18 | */ 19 | package io.streamthoughts.kafka.clients.consumer 20 | 21 | import org.apache.kafka.clients.consumer.Consumer 22 | import org.apache.kafka.clients.consumer.OffsetAndMetadata 23 | import org.apache.kafka.common.TopicPartition 24 | import java.io.Closeable 25 | import java.time.Duration 26 | 27 | interface ConsumerTask: Closeable { 28 | 29 | enum class State { 30 | /** 31 | * The [ConsumerTask] is created. 32 | */ 33 | CREATED, 34 | /** 35 | * The [ConsumerTask] is starting. 36 | */ 37 | STARTING, 38 | /** 39 | * The [ConsumerTask] 40 | */ 41 | RUNNING, 42 | /** 43 | * The [ConsumerTask] is paused for all assigned partitions. 44 | */ 45 | PAUSED, 46 | /** 47 | * The [ConsumerTask] is rebalancing and new partitions are being assigned. 48 | */ 49 | PARTITIONS_ASSIGNED, 50 | /** 51 | * The [ConsumerTask] is rebalancing and partitions are being revoked. 52 | */ 53 | PARTITIONS_REVOKED, 54 | /** 55 | * The [ConsumerTask] is being closed. 56 | */ 57 | PENDING_SHUTDOWN, 58 | /** 59 | * The [ConsumerTask] is closed. 60 | */ 61 | SHUTDOWN 62 | } 63 | 64 | suspend fun run() 65 | 66 | /** 67 | * Pauses consumption for the current assignments. 68 | * @see org.apache.kafka.clients.consumer.Consumer.pause 69 | */ 70 | fun pause() 71 | 72 | /** 73 | * Resumes consumption for the current assignments. 74 | * @see org.apache.kafka.clients.consumer.Consumer.pause 75 | */ 76 | fun resume() 77 | 78 | /** 79 | * Shutdowns the [ConsumerTask] and wait for completion. 80 | * @see org.apache.kafka.clients.consumer.Consumer.close 81 | */ 82 | override fun close() 83 | 84 | /** 85 | * Shutdowns the [ConsumerTask] and wait for completion until the given [timeout]. 86 | * @see org.apache.kafka.clients.consumer.Consumer.close 87 | */ 88 | fun close(timeout: Duration) 89 | 90 | /** 91 | * @return the [State] of this [ConsumerTask]. 92 | */ 93 | fun state(): State 94 | 95 | /** 96 | * Executes the given [action] with the underlying [Consumer]. 97 | */ 98 | fun execute(action: (consumer: Consumer) -> T): T 99 | 100 | /** 101 | * Commits asynchronously the positions of the internal [Consumer] for the given [offsets]. 102 | * If passed [offsets] is {@code null} then commit the [Consumer] positions for its current partition assignments. 103 | * 104 | * @see [Consumer.commitAsync] 105 | */ 106 | fun commitAsync(offsets: Map? = null) 107 | 108 | /** 109 | * Commits synchronously the positions of the internal [Consumer] for the given offsets. 110 | * If passed [offsets] is {@code null} then commit the [Consumer] positions for its current partition assignments. 111 | * 112 | * @see [Consumer.commitAsync] 113 | */ 114 | fun commitSync(offsets: Map? = null) 115 | } -------------------------------------------------------------------------------- /clients/src/main/kotlin/io/streamthoughts/kafka/clients/consumer/ConsumerWorker.kt: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 StreamThoughts. 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with 6 | * this work for additional information regarding copyright ownership. 7 | * The ASF licenses this file to You under the Apache License, Version 2.0 8 | * (the "License"); you may not use this file except in compliance with 9 | * the License. You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | * See the License for the specific language governing permissions and 17 | * limitations under the License. 18 | */ 19 | package io.streamthoughts.kafka.clients.consumer 20 | 21 | import io.streamthoughts.kafka.clients.consumer.error.ConsumedErrorHandler 22 | import io.streamthoughts.kafka.clients.consumer.error.serialization.DeserializationErrorHandler 23 | import io.streamthoughts.kafka.clients.consumer.listener.ConsumerBatchRecordsListener 24 | import java.io.Closeable 25 | import java.time.Duration 26 | import java.util.regex.Pattern 27 | 28 | /** 29 | * The [ConsumerWorker] manages one or many concurrent [org.apache.kafka.clients.consumer.Consumer] that belong 30 | * to the same {@code group.id}. 31 | */ 32 | interface ConsumerWorker: Closeable { 33 | 34 | 35 | interface Builder { 36 | /** 37 | * Configures this worker. 38 | */ 39 | fun configure(init: KafkaConsumerConfigs.() -> Unit) 40 | 41 | /** 42 | * Sets the [ConsumerFactory] to be used for creating a new [org.apache.kafka.clients.consumer.Consumer] instance. 43 | */ 44 | fun factory(consumerFactory: ConsumerFactory): Builder 45 | 46 | /** 47 | * Sets the [listener] to invoke when a rebalance is in progress and partitions are assigned. 48 | */ 49 | fun onPartitionsAssigned(listener: RebalanceListener): Builder 50 | 51 | /** 52 | * Sets the [listener] to invoke when a rebalance is in progress and partitions are revoked. 53 | */ 54 | fun onPartitionsRevokedBeforeCommit(listener: RebalanceListener): Builder 55 | 56 | /** 57 | * Sets the [listener] to invoke when a rebalance is in progress and partitions are revoked. 58 | */ 59 | fun onPartitionsRevokedAfterCommit(listener: RebalanceListener): Builder 60 | 61 | /** 62 | * Sets the [listener] to invoke when a rebalance is in progress and partitions are lost. 63 | */ 64 | fun onPartitionsLost(listener: RebalanceListener): Builder 65 | 66 | /** 67 | * Sets the [handler] to invoke when a exception happen while de-serializing a record. 68 | */ 69 | fun onDeserializationError(handler: DeserializationErrorHandler): Builder 70 | 71 | /** 72 | * Sets the [handler] to invoked when a error is thrown while processing last records returned from the 73 | * the [org.apache.kafka.clients.consumer.Consumer.poll] method, i.e. an exception thrown by the provided 74 | * [ConsumerBatchRecordsListener]. 75 | * 76 | * @see [onConsumed] 77 | */ 78 | fun onConsumedError(handler: ConsumedErrorHandler): Builder 79 | 80 | /** 81 | * Sets the [ConsumerBatchRecordsListener] to invoke when a non-empty batch of records is returned from 82 | * the [org.apache.kafka.clients.consumer.Consumer.poll] method. 83 | */ 84 | fun onConsumed(listener: ConsumerBatchRecordsListener): Builder 85 | 86 | /** 87 | * Build a new [ConsumerWorker]. 88 | * 89 | * @return the new [ConsumerWorker] instance. 90 | */ 91 | fun build(): ConsumerWorker 92 | } 93 | 94 | /** 95 | * Returns the group id the [org.apache.kafka.clients.consumer.Consumer] managed by this [ConsumerWorker] belong. 96 | */ 97 | fun groupId(): String 98 | 99 | /** 100 | * Creates as many [org.apache.kafka.clients.consumer.Consumer] as given [maxParallelHint] that will 101 | * immediately subscribe to the given [topic] and start consuming records. 102 | */ 103 | fun start(topic: String, maxParallelHint: Int = 1) 104 | 105 | /** 106 | * Creates as many [org.apache.kafka.clients.consumer.Consumer] as given [maxParallelHint] that will 107 | * immediately subscribe to the given [topics] and start consuming records. 108 | */ 109 | fun start(topics: List, maxParallelHint: Int = 1) 110 | 111 | /** 112 | * Creates as many [org.apache.kafka.clients.consumer.Consumer] as given [maxParallelHint] that will 113 | * immediately subscribe to the topics matching the given [pattern] and start consuming records. 114 | */ 115 | fun start(pattern: Pattern, maxParallelHint: Int = 1) 116 | 117 | /** 118 | * Stops all [org.apache.kafka.clients.consumer.Consumer] managed by this [ConsumerWorker]. 119 | */ 120 | override fun close() 121 | 122 | /** 123 | * Pauses all [org.apache.kafka.clients.consumer.Consumer] managed by this [ConsumerWorker]. 124 | */ 125 | fun pause() 126 | 127 | /** 128 | * Resumes all [org.apache.kafka.clients.consumer.Consumer] managed by this [ConsumerWorker]. 129 | */ 130 | fun resume() 131 | 132 | /** 133 | * Joins for all [org.apache.kafka.clients.consumer.Consumer] to close. 134 | */ 135 | suspend fun joinAll() 136 | } -------------------------------------------------------------------------------- /clients/src/main/kotlin/io/streamthoughts/kafka/clients/consumer/KafkaConsumerConfigs.kt: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 StreamThoughts. 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with 6 | * this work for additional information regarding copyright ownership. 7 | * The ASF licenses this file to You under the Apache License, Version 2.0 8 | * (the "License"); you may not use this file except in compliance with 9 | * the License. You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | * See the License for the specific language governing permissions and 17 | * limitations under the License. 18 | */ 19 | package io.streamthoughts.kafka.clients.consumer 20 | 21 | import io.streamthoughts.kafka.clients.KafkaClientConfigs 22 | import io.streamthoughts.kafka.clients.load 23 | import io.streamthoughts.kafka.clients.toStringMap 24 | import org.apache.kafka.clients.consumer.ConsumerConfig 25 | import java.io.InputStream 26 | import java.util.* 27 | 28 | /** 29 | * Uses to build and encapsulate a configuration [Map] 30 | * for creating a new [org.apache.kafka.clients.consumer.KafkaConsumer] 31 | * 32 | * @see [ConsumerConfig] 33 | */ 34 | class KafkaConsumerConfigs (props: Map = emptyMap()) : KafkaClientConfigs(props) { 35 | 36 | companion object { 37 | const val POLL_INTERVAL_MS_CONFIG = "poll.interval.ms" 38 | const val POLL_INTERVAL_MS_DEFAULT = Long.MAX_VALUE 39 | } 40 | 41 | override fun with(key: String, value: Any?) = apply { super.with(key, value) } 42 | 43 | fun client(init: KafkaClientConfigs.() -> Unit) = apply { this.init() } 44 | 45 | /** 46 | * @see [ConsumerConfig.AUTO_OFFSET_RESET_CONFIG] 47 | */ 48 | fun autoOffsetReset(autoOffsetReset : String) = 49 | apply { this[ConsumerConfig.AUTO_OFFSET_RESET_CONFIG] = autoOffsetReset } 50 | /** 51 | * @see [ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG] 52 | */ 53 | fun autoCommitIntervalMs(autoCommitIntervalMs : Long) = 54 | apply { this[ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG] = autoCommitIntervalMs} 55 | /** 56 | * @see [ConsumerConfig.ALLOW_AUTO_CREATE_TOPICS_CONFIG] 57 | */ 58 | fun allowAutoCreateTopicsConfig(allowAutoCreateTopicsConfig : Boolean) = 59 | apply { this[ConsumerConfig.ALLOW_AUTO_CREATE_TOPICS_CONFIG] = allowAutoCreateTopicsConfig } 60 | /** 61 | * @see [ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG] 62 | */ 63 | fun enableAutoCommit(enableAutoCommit : Boolean) = 64 | apply { this[ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG] = enableAutoCommit} 65 | /** 66 | * @see [ConsumerConfig.FETCH_MAX_BYTES_CONFIG] 67 | */ 68 | fun fetchMaxBytes(fetchMaxBytes : Long) = 69 | apply { this[ConsumerConfig.FETCH_MAX_BYTES_CONFIG] = fetchMaxBytes } 70 | /** 71 | * @see [ConsumerConfig.FETCH_MIN_BYTES_CONFIG] 72 | */ 73 | fun fetchMinBytes(fetchMinBytes : Long) = 74 | apply { this[ConsumerConfig.FETCH_MIN_BYTES_CONFIG] = fetchMinBytes} 75 | /** 76 | * @see [ConsumerConfig.FETCH_MAX_BYTES_CONFIG] 77 | */ 78 | fun fetchMaxWaitMs(fetchMaxWaitMs : Long) = 79 | apply { this[ConsumerConfig.FETCH_MAX_BYTES_CONFIG] = fetchMaxWaitMs} 80 | /** 81 | * @see [ConsumerConfig.GROUP_ID_CONFIG] 82 | */ 83 | fun groupId(groupId : String) = 84 | apply { this[ConsumerConfig.GROUP_ID_CONFIG] = groupId} 85 | /** 86 | * @see [ConsumerConfig.MAX_POLL_RECORDS_CONFIG] 87 | */ 88 | fun maxPollRecords(maxPollRecords : Int) = 89 | apply { this[ConsumerConfig.MAX_POLL_RECORDS_CONFIG] = maxPollRecords} 90 | /** 91 | * @see [ConsumerConfig.MAX_PARTITION_FETCH_BYTES_CONFIG] 92 | */ 93 | fun maxPartitionFetchBytes(maxPartitionFetchBytes : Int) = 94 | apply { this[ConsumerConfig.MAX_PARTITION_FETCH_BYTES_CONFIG] = maxPartitionFetchBytes } 95 | /** 96 | * @see [ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG] 97 | */ 98 | fun keyDeserializer(keyDeserializer : String) = 99 | apply { this[ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG] = keyDeserializer} 100 | /** 101 | * @see [ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG] 102 | */ 103 | fun valueDeserializer(valueDeserializer : String) = 104 | apply { this [ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG] = valueDeserializer} 105 | 106 | fun pollRecordsMs(pollRecordsMs : Long) = 107 | apply { this[POLL_INTERVAL_MS_CONFIG] = pollRecordsMs } 108 | } 109 | 110 | /** 111 | * Creates a new empty [KafkaConsumerConfigs]. 112 | */ 113 | fun emptyConsumerConfigs(): KafkaConsumerConfigs = KafkaConsumerConfigs(emptyMap()) 114 | 115 | /** 116 | * Creates a new [KafkaConsumerConfigs] with the given [pairs]. 117 | */ 118 | fun consumerConfigsOf(vararg pairs: Pair): KafkaConsumerConfigs = consumerConfigsOf(mapOf(*pairs)) 119 | 120 | /** 121 | * Creates a new [KafkaConsumerConfigs] with the given [props]. 122 | */ 123 | fun consumerConfigsOf(props: Map): KafkaConsumerConfigs = KafkaConsumerConfigs(props) 124 | 125 | /** 126 | * Creates a new [KafkaConsumerConfigs] with the given [props]. 127 | */ 128 | fun consumerConfigsOf(props: Properties):KafkaConsumerConfigs = consumerConfigsOf(props.toStringMap()) 129 | 130 | /** 131 | * Convenient method to create and populate a new [KafkaConsumerConfigs] from a [configFile]. 132 | */ 133 | fun loadConsumerConfigs(configFile: String): KafkaConsumerConfigs = KafkaConsumerConfigs().load(configFile) 134 | 135 | /** 136 | * Convenient method to create and populate new [KafkaConsumerConfigs] from an [inputStream]. 137 | */ 138 | fun loadConsumerConfigs(inputStream: InputStream): KafkaConsumerConfigs = KafkaConsumerConfigs().load(inputStream) -------------------------------------------------------------------------------- /clients/src/main/kotlin/io/streamthoughts/kafka/clients/consumer/KafkaConsumerWorker.kt: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 StreamThoughts. 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with 6 | * this work for additional information regarding copyright ownership. 7 | * The ASF licenses this file to You under the Apache License, Version 2.0 8 | * (the "License"); you may not use this file except in compliance with 9 | * the License. You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | * See the License for the specific language governing permissions and 17 | * limitations under the License. 18 | */ 19 | package io.streamthoughts.kafka.clients.consumer 20 | 21 | import io.streamthoughts.kafka.clients.consumer.KafkaConsumerWorker.KafkaConsumerWorker 22 | import io.streamthoughts.kafka.clients.consumer.error.ConsumedErrorHandler 23 | import io.streamthoughts.kafka.clients.consumer.error.closeTaskOnConsumedError 24 | import io.streamthoughts.kafka.clients.consumer.error.serialization.DeserializationErrorHandler 25 | import io.streamthoughts.kafka.clients.consumer.error.serialization.logAndFailOnInvalidRecord 26 | import io.streamthoughts.kafka.clients.consumer.listener.ConsumerBatchRecordsListener 27 | import io.streamthoughts.kafka.clients.consumer.listener.noop 28 | import io.streamthoughts.kafka.clients.loggerFor 29 | import kotlinx.coroutines.CoroutineScope 30 | import kotlinx.coroutines.ExecutorCoroutineDispatcher 31 | import kotlinx.coroutines.Job 32 | import kotlinx.coroutines.asCoroutineDispatcher 33 | import kotlinx.coroutines.joinAll 34 | import kotlinx.coroutines.launch 35 | import org.apache.kafka.clients.CommonClientConfigs 36 | import org.apache.kafka.clients.consumer.Consumer 37 | import org.apache.kafka.clients.consumer.ConsumerConfig 38 | import org.apache.kafka.common.TopicPartition 39 | import org.apache.kafka.common.serialization.Deserializer 40 | import java.util.concurrent.ExecutorService 41 | import java.util.concurrent.Executors 42 | import java.util.concurrent.atomic.AtomicBoolean 43 | import java.util.concurrent.atomic.AtomicInteger 44 | import java.util.regex.Pattern 45 | 46 | /** 47 | * [KafkaConsumerWorker] is the default [ConsumerWorker] implementation. 48 | */ 49 | class KafkaConsumerWorker ( 50 | private val configs: KafkaConsumerConfigs, 51 | private val keyDeserializer: Deserializer, 52 | private val valueDeserializer: Deserializer, 53 | private val consumerRebalanceListener: ConsumerAwareRebalanceListener, 54 | private val batchRecordListener: ConsumerBatchRecordsListener, 55 | private val onConsumedError: ConsumedErrorHandler, 56 | private val onDeserializationError: DeserializationErrorHandler, 57 | private val consumerFactory: ConsumerFactory = ConsumerFactory.DefaultConsumerFactory 58 | ): ConsumerWorker { 59 | 60 | companion object KafkaConsumerWorker { 61 | private val Log = loggerFor(KafkaConsumerWorker::class.java) 62 | } 63 | 64 | private val groupId: String = configs[ConsumerConfig.GROUP_ID_CONFIG].toString() 65 | 66 | private val defaultClientIdPrefix: String 67 | 68 | private var consumerTasks: Array> = emptyArray() 69 | 70 | private var consumerJobs: List = mutableListOf() 71 | 72 | private var isRunning = AtomicBoolean(false) 73 | 74 | init { 75 | defaultClientIdPrefix= "consumer-$groupId" 76 | } 77 | 78 | override fun groupId(): String { 79 | return groupId 80 | } 81 | 82 | @Synchronized 83 | override fun start(topic: String, maxParallelHint: Int) { 84 | start(getTopicSubscription(topic), maxParallelHint) 85 | } 86 | 87 | @Synchronized 88 | override fun start(topics: List, maxParallelHint: Int) { 89 | start(getTopicSubscription(topics), maxParallelHint) 90 | } 91 | 92 | @Synchronized 93 | override fun start(pattern: Pattern, maxParallelHint: Int) { 94 | start(getTopicSubscription(pattern), maxParallelHint) 95 | } 96 | 97 | @Synchronized 98 | private fun start(subscription: TopicSubscription, maxParallelHint: Int) { 99 | Log.info("KafkaConsumerWorker(group: $groupId): Initializing io.streamthoughts.kafka.clients.consumer tasks ($maxParallelHint)") 100 | consumerTasks = Array(maxParallelHint) { taskId -> 101 | KafkaConsumerTask( 102 | consumerFactory, 103 | configs, 104 | subscription, 105 | keyDeserializer, 106 | valueDeserializer, 107 | batchRecordListener, 108 | clientId = computeClientId(taskId), 109 | consumerAwareRebalanceListener = consumerRebalanceListener, 110 | deserializationErrorHandler = onDeserializationError, 111 | consumedErrorHandler = onConsumedError 112 | ) 113 | } 114 | doStart() 115 | isRunning.set(true) 116 | } 117 | 118 | private fun computeClientId(taskId: Int): String { 119 | val clientId = configs[CommonClientConfigs.CLIENT_ID_CONFIG] 120 | return clientId?.let { "$defaultClientIdPrefix-$taskId" } ?: "" 121 | } 122 | 123 | private fun doStart() { 124 | val threadNumber = AtomicInteger(1) 125 | val executor: ExecutorService = Executors.newFixedThreadPool(consumerTasks.size) { 126 | Thread(it, "io.streamthoughts.kafka.clients.consumer-$groupId-${threadNumber.getAndIncrement()}").also { t -> t.isDaemon = true } 127 | } 128 | val dispatcher: ExecutorCoroutineDispatcher = executor.asCoroutineDispatcher() 129 | val scope = CoroutineScope(dispatcher) 130 | 131 | consumerJobs = consumerTasks.map { task -> 132 | scope.launch { 133 | task.run() 134 | } 135 | } 136 | } 137 | 138 | override suspend fun joinAll() { 139 | consumerJobs.joinAll() 140 | } 141 | 142 | override fun close() { 143 | if (isRunning.get()) { 144 | Log.info("KafkaConsumerWorker(group: $groupId): Stopping all io.streamthoughts.kafka.clients.consumer tasks") 145 | consumerTasks.forEach { it.close() } 146 | isRunning.set(false) 147 | } 148 | } 149 | 150 | @Synchronized 151 | override fun pause() { 152 | Log.info("KafkaConsumerWorker(group: $groupId): Pausing all io.streamthoughts.kafka.clients.consumer tasks") 153 | consumerTasks.forEach { it.pause() } 154 | } 155 | 156 | @Synchronized 157 | override fun resume() { 158 | Log.info("KafkaConsumerWorker(group: $groupId): Resuming all io.streamthoughts.kafka.clients.consumer tasks") 159 | consumerTasks.forEach { it.resume() } 160 | } 161 | 162 | data class Builder( 163 | var configs: KafkaConsumerConfigs, 164 | var keyDeserializer: Deserializer, 165 | var valueDeserializer: Deserializer, 166 | var onPartitionsAssigned: RebalanceListener? = null, 167 | var onPartitionsRevokedBeforeCommit: RebalanceListener? = null, 168 | var onPartitionsRevokedAfterCommit: RebalanceListener? = null, 169 | var onPartitionsLost: RebalanceListener? = null, 170 | var batchRecordListener: ConsumerBatchRecordsListener? = null, 171 | var onDeserializationError: DeserializationErrorHandler? = null, 172 | var consumerFactory: ConsumerFactory? = null, 173 | var onConsumedError: ConsumedErrorHandler? = null 174 | ) : ConsumerWorker.Builder { 175 | 176 | override fun configure(init: KafkaConsumerConfigs.() -> Unit) { 177 | configs.init() 178 | } 179 | 180 | override fun factory(consumerFactory : ConsumerFactory) = 181 | apply { this.consumerFactory = consumerFactory } 182 | 183 | override fun onPartitionsAssigned(listener : RebalanceListener) = 184 | apply { this.onPartitionsAssigned = listener } 185 | 186 | override fun onPartitionsRevokedBeforeCommit(listener : RebalanceListener) = 187 | apply { this.onPartitionsRevokedAfterCommit = listener } 188 | 189 | override fun onPartitionsRevokedAfterCommit(listener : RebalanceListener) = 190 | apply { this.onPartitionsRevokedAfterCommit = listener } 191 | 192 | override fun onPartitionsLost(listener : RebalanceListener) = 193 | apply { this.onPartitionsLost = listener } 194 | 195 | override fun onDeserializationError(handler : DeserializationErrorHandler) = 196 | apply { onDeserializationError = handler } 197 | 198 | override fun onConsumedError(handler : ConsumedErrorHandler) = 199 | apply { onConsumedError = handler } 200 | 201 | override fun onConsumed(listener: ConsumerBatchRecordsListener) = 202 | apply { this.batchRecordListener = listener } 203 | 204 | override fun build(): ConsumerWorker = 205 | KafkaConsumerWorker( 206 | configs, 207 | keyDeserializer, 208 | valueDeserializer, 209 | SimpleConsumerAwareRebalanceListener(), 210 | batchRecordListener ?: noop(), 211 | onConsumedError ?: closeTaskOnConsumedError(), 212 | onDeserializationError ?: logAndFailOnInvalidRecord(), 213 | consumerFactory ?: ConsumerFactory.DefaultConsumerFactory 214 | ) 215 | 216 | inner class SimpleConsumerAwareRebalanceListener: ConsumerAwareRebalanceListener { 217 | override fun onPartitionsRevokedBeforeCommit(consumer: Consumer<*, *>, 218 | partitions: Collection) { 219 | doInvoke(onPartitionsRevokedBeforeCommit, consumer, partitions) 220 | } 221 | 222 | override fun onPartitionsRevokedAfterCommit(consumer: Consumer<*, *>, 223 | partitions: Collection) { 224 | doInvoke(onPartitionsRevokedAfterCommit, consumer, partitions) 225 | } 226 | 227 | override fun onPartitionsAssigned(consumer: Consumer<*, *>, 228 | partitions: Collection) { 229 | doInvoke(onPartitionsAssigned, consumer, partitions) 230 | } 231 | 232 | override fun onPartitionsLost(consumer: Consumer<*, *>, 233 | partitions: Collection) { 234 | doInvoke(onPartitionsLost, consumer, partitions) 235 | } 236 | 237 | private fun doInvoke(listener: RebalanceListener?, 238 | consumer: Consumer<*, *>, 239 | partitions: Collection) { 240 | listener?.invoke(consumer, partitions) 241 | } 242 | } 243 | } 244 | } -------------------------------------------------------------------------------- /clients/src/main/kotlin/io/streamthoughts/kafka/clients/consumer/TopicSubscription.kt: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 StreamThoughts. 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with 6 | * this work for additional information regarding copyright ownership. 7 | * The ASF licenses this file to You under the Apache License, Version 2.0 8 | * (the "License"); you may not use this file except in compliance with 9 | * the License. You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | * See the License for the specific language governing permissions and 17 | * limitations under the License. 18 | */ 19 | package io.streamthoughts.kafka.clients.consumer 20 | 21 | import org.apache.kafka.clients.consumer.Consumer 22 | import org.apache.kafka.clients.consumer.ConsumerRebalanceListener 23 | import java.util.regex.Pattern 24 | 25 | fun getTopicSubscription(pattern: Pattern): TopicSubscription = PatternTopicsSubscription(pattern) 26 | fun getTopicSubscription(topic: String): TopicSubscription = ListTopicsSubscription(listOf(topic)) 27 | fun getTopicSubscription(topics: List): TopicSubscription = ListTopicsSubscription(topics) 28 | 29 | /** 30 | * The default interface to wrap a [org.apache.kafka.clients.consumer.Consumer] subscription. 31 | */ 32 | interface TopicSubscription { 33 | 34 | /** 35 | * @see [Consumer.subscribe] 36 | */ 37 | fun subscribe(consumer: Consumer<*, *>, consumerRebalanceListener: ConsumerRebalanceListener) 38 | } 39 | 40 | /** 41 | * A topic list subscription. 42 | */ 43 | private class ListTopicsSubscription(private val topics: List): TopicSubscription { 44 | 45 | override fun subscribe(consumer: Consumer<*, *>, consumerRebalanceListener: ConsumerRebalanceListener) { 46 | consumer.subscribe(topics, consumerRebalanceListener) 47 | } 48 | 49 | override fun toString(): String { 50 | return "Subscription(topics=$topics)" 51 | } 52 | } 53 | 54 | /** 55 | * A topic pattern subscription. 56 | */ 57 | private class PatternTopicsSubscription(private val pattern: Pattern): TopicSubscription { 58 | 59 | override fun subscribe(consumer: Consumer<*, *>, consumerRebalanceListener: ConsumerRebalanceListener) { 60 | consumer.subscribe(pattern, consumerRebalanceListener) 61 | } 62 | 63 | override fun toString(): String { 64 | return "Subscription(pattern=$pattern)" 65 | } 66 | 67 | } -------------------------------------------------------------------------------- /clients/src/main/kotlin/io/streamthoughts/kafka/clients/consumer/Types.kt: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 StreamThoughts. 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with 6 | * this work for additional information regarding copyright ownership. 7 | * The ASF licenses this file to You under the Apache License, Version 2.0 8 | * (the "License"); you may not use this file except in compliance with 9 | * the License. You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | * See the License for the specific language governing permissions and 17 | * limitations under the License. 18 | */ 19 | package io.streamthoughts.kafka.clients.consumer 20 | 21 | import org.apache.kafka.clients.consumer.Consumer 22 | import org.apache.kafka.common.TopicPartition 23 | 24 | typealias RebalanceListener = (Consumer<*, *>, Collection ) -> Unit -------------------------------------------------------------------------------- /clients/src/main/kotlin/io/streamthoughts/kafka/clients/consumer/error/ConsumedErrorHandler.kt: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 StreamThoughts. 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with 6 | * this work for additional information regarding copyright ownership. 7 | * The ASF licenses this file to You under the Apache License, Version 2.0 8 | * (the "License"); you may not use this file except in compliance with 9 | * the License. You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | * See the License for the specific language governing permissions and 17 | * limitations under the License. 18 | */ 19 | package io.streamthoughts.kafka.clients.consumer.error 20 | 21 | import io.streamthoughts.kafka.clients.consumer.ConsumerTask 22 | import org.apache.kafka.clients.consumer.ConsumerRecord 23 | 24 | /** 25 | * Handles errors thrown during the processing of a non-empty batch of [ConsumerRecord] 26 | * using a given [io.streamthoughts.kafka.clients.consumer.listener.ConsumerBatchRecordsListener] 27 | */ 28 | interface ConsumedErrorHandler { 29 | 30 | /** 31 | * This method is invoked when an [thrownException] is thrown while a [consumerTask] is processing 32 | * a non-empty batch of [records]. 33 | * 34 | * @param consumerTask the [ConsumerTask] polling records. 35 | * @param records the remaining [records] to be processed (including the one that failed). 36 | * @param thrownException the [Exception] that was thrown while processing [records]. 37 | */ 38 | fun handle(consumerTask: ConsumerTask, records: List>, thrownException: Exception) 39 | } -------------------------------------------------------------------------------- /clients/src/main/kotlin/io/streamthoughts/kafka/clients/consumer/error/ConsumedErrorHandlers.kt: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 StreamThoughts. 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with 6 | * this work for additional information regarding copyright ownership. 7 | * The ASF licenses this file to You under the Apache License, Version 2.0 8 | * (the "License"); you may not use this file except in compliance with 9 | * the License. You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | * See the License for the specific language governing permissions and 17 | * limitations under the License. 18 | */ 19 | package io.streamthoughts.kafka.clients.consumer.error 20 | 21 | import io.streamthoughts.kafka.clients.consumer.ConsumerTask 22 | import io.streamthoughts.kafka.clients.loggerFor 23 | import org.apache.kafka.clients.consumer.ConsumerRecord 24 | import org.apache.kafka.clients.consumer.OffsetAndMetadata 25 | import org.apache.kafka.common.TopicPartition 26 | import org.slf4j.Logger 27 | import java.time.Duration 28 | import kotlin.math.max 29 | 30 | fun closeTaskOnConsumedError(): ConsumedErrorHandler = CloseTaskOnConsumedError 31 | fun logAndCommitOnConsumedError(): ConsumedErrorHandler = LogAndCommitOnConsumedError 32 | 33 | /** 34 | * Stops the [ConsumerTask] when an error is thrown while a non-empty batch of [ConsumerRecord] is being processed 35 | * by a [io.streamthoughts.kafka.clients.consumer.listener.ConsumerBatchRecordsListener]. 36 | */ 37 | private object CloseTaskOnConsumedError: ConsumedErrorHandler { 38 | 39 | private val Log: Logger = loggerFor(CloseTaskOnConsumedError.javaClass) 40 | 41 | override fun handle(consumerTask: ConsumerTask, records: List>, thrownException: Exception) { 42 | Log.error("Stopping consumerTask after an exception was thrown while processing records", thrownException) 43 | consumerTask.close(Duration.ZERO) 44 | } 45 | } 46 | 47 | /** 48 | * Log and skips all records when an error is thrown while a non-empty batch of [ConsumerRecord] is being processed 49 | * by a [io.streamthoughts.kafka.clients.consumer.listener.ConsumerBatchRecordsListener]. 50 | * 51 | * This [ConsumedErrorHandler] will commit the offsets for the failing records batch. 52 | */ 53 | private object LogAndCommitOnConsumedError: ConsumedErrorHandler { 54 | private val Log: Logger = loggerFor(LogAndCommitOnConsumedError.javaClass) 55 | 56 | override fun handle(consumerTask: ConsumerTask, records: List>, thrownException: Exception) { 57 | Log.error("Failed to process records: $records. Ignore and continue processing.", thrownException) 58 | // The ConsumerTask doesn't automatically commit consumer offsets after an exception is thrown. 59 | // Thus, we have to manually commit offsets to actually skip records. 60 | consumerTask.commitSync(offsetsToCommitFor(records)) 61 | } 62 | 63 | private fun offsetsToCommitFor(records: List>): Map { 64 | val offsetsToCommit: MutableMap = HashMap() 65 | records.forEach { r -> 66 | val partition = TopicPartition(r.topic(), r.partition()) 67 | val current = offsetsToCommit[partition]?.offset() ?: 0 68 | offsetsToCommit[partition] = OffsetAndMetadata(max(current, r.offset() + 1)) 69 | } 70 | return offsetsToCommit 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /clients/src/main/kotlin/io/streamthoughts/kafka/clients/consumer/error/serialization/DeserializationErrorHandler.kt: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 StreamThoughts. 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with 6 | * this work for additional information regarding copyright ownership. 7 | * The ASF licenses this file to You under the Apache License, Version 2.0 8 | * (the "License"); you may not use this file except in compliance with 9 | * the License. You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | * See the License for the specific language governing permissions and 17 | * limitations under the License. 18 | */ 19 | package io.streamthoughts.kafka.clients.consumer.error.serialization 20 | 21 | import org.apache.kafka.clients.consumer.ConsumerRecord 22 | 23 | /** 24 | * 25 | */ 26 | interface DeserializationErrorHandler { 27 | 28 | sealed class Response { 29 | data class Replace(val key: K?, val value: V?): DeserializationErrorHandler.Response() 30 | class Fail: DeserializationErrorHandler.Response() 31 | class Skip: DeserializationErrorHandler.Response() 32 | } 33 | 34 | /** 35 | * Handles the [error] that has been thrown while de-serializing the given raw [record]. 36 | */ 37 | fun handle(record: ConsumerRecord, error: Exception): Response 38 | } -------------------------------------------------------------------------------- /clients/src/main/kotlin/io/streamthoughts/kafka/clients/consumer/error/serialization/DeserializationErrorHandlers.kt: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 StreamThoughts. 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with 6 | * this work for additional information regarding copyright ownership. 7 | * The ASF licenses this file to You under the Apache License, Version 2.0 8 | * (the "License"); you may not use this file except in compliance with 9 | * the License. You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | * See the License for the specific language governing permissions and 17 | * limitations under the License. 18 | */ 19 | package io.streamthoughts.kafka.clients.consumer.error.serialization 20 | 21 | import io.streamthoughts.kafka.clients.loggerFor 22 | import org.apache.kafka.clients.consumer.ConsumerRecord 23 | 24 | fun replaceWithOnInvalidRecord(key: K, value: V): DeserializationErrorHandler = ReplaceErrorHandler(key, value) 25 | 26 | fun replaceWithNullOnInvalidRecord(): DeserializationErrorHandler = ReplaceErrorHandler() 27 | 28 | fun logAndFailOnInvalidRecord(): DeserializationErrorHandler = LogAndFailErrorHandler() 29 | 30 | fun logAndSkipOnInvalidRecord(): DeserializationErrorHandler = LogAndSkipErrorHandler() 31 | 32 | private class ReplaceErrorHandler( 33 | private val key: K? = null, 34 | private val value: V? = null 35 | ): DeserializationErrorHandler { 36 | 37 | companion object { 38 | private val Log = loggerFor(DeserializationErrorHandler::class.java) 39 | } 40 | 41 | override fun handle( 42 | record: ConsumerRecord, 43 | error: Exception 44 | ): DeserializationErrorHandler.Response { 45 | Log.warn("Cannot deserialize record:" + 46 | " topic = ${record.topic()} " + 47 | ", partition = ${record.partition()}" + 48 | ", offset ${record.topic()}" + 49 | ". Replace key and value.", error) 50 | return DeserializationErrorHandler.Response.Replace(key, value) 51 | } 52 | } 53 | 54 | private class LogAndSkipErrorHandler: DeserializationErrorHandler { 55 | 56 | companion object { 57 | private val Log = loggerFor(LogAndSkipErrorHandler::class.java) 58 | } 59 | 60 | override fun handle( 61 | record: ConsumerRecord, 62 | error: Exception 63 | ): DeserializationErrorHandler.Response { 64 | Log.error("Cannot deserialize record:" + 65 | " topic = ${record.topic()} " + 66 | ", partition = ${record.partition()}" + 67 | ", offset ${record.topic()}" + 68 | ". Skip and continue.", error) 69 | return DeserializationErrorHandler.Response.Skip() 70 | } 71 | } 72 | 73 | private class LogAndFailErrorHandler: DeserializationErrorHandler { 74 | 75 | companion object { 76 | private val Log = loggerFor(LogAndFailErrorHandler::class.java) 77 | } 78 | 79 | override fun handle( 80 | record: ConsumerRecord, 81 | error: Exception 82 | ): DeserializationErrorHandler.Response { 83 | Log.error("Cannot deserialize record:" + 84 | " topic = ${record.topic()} " + 85 | ", partition = ${record.partition()}" + 86 | ", offset ${record.topic()}" + 87 | ". Fail consumption.", error) 88 | return DeserializationErrorHandler.Response.Fail() 89 | } 90 | } 91 | -------------------------------------------------------------------------------- /clients/src/main/kotlin/io/streamthoughts/kafka/clients/consumer/listener/ConsumerBatchRecordsListener.kt: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 StreamThoughts. 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with 6 | * this work for additional information regarding copyright ownership. 7 | * The ASF licenses this file to You under the Apache License, Version 2.0 8 | * (the "License"); you may not use this file except in compliance with 9 | * the License. You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | * See the License for the specific language governing permissions and 17 | * limitations under the License. 18 | */ 19 | package io.streamthoughts.kafka.clients.consumer.listener 20 | 21 | import io.streamthoughts.kafka.clients.consumer.ConsumerTask 22 | import org.apache.kafka.clients.consumer.ConsumerRecord 23 | import org.apache.kafka.clients.consumer.ConsumerRecords 24 | 25 | fun noop() : ConsumerBatchRecordsListener { 26 | return DelegatingConsumerBatchRecordsListener { _: ConsumerTask, _: ConsumerRecords -> } 27 | } 28 | 29 | @JvmName("onConsumedRecord") 30 | fun forEach(callback: (consumerTask: ConsumerTask, record: ConsumerRecord) -> Unit) 31 | : ConsumerBatchRecordsListener { 32 | return DelegatingConsumerBatchRecordsListener { task, records -> 33 | records.onEach { callback(task, it) } 34 | } 35 | } 36 | 37 | @JvmName("onConsumedValueRecordWithKey") 38 | fun forEach(callback: (consumerTask: ConsumerTask, record: Pair) -> Unit) 39 | : ConsumerBatchRecordsListener { 40 | return DelegatingConsumerBatchRecordsListener { task, records -> 41 | records.onEach { callback(task, Pair(it.key(), it.value())) } 42 | } 43 | } 44 | 45 | @JvmName("onConsumedValueRecord") 46 | fun forEach(callback: (consumerTask: ConsumerTask, value: V?) -> Unit) 47 | : ConsumerBatchRecordsListener { 48 | return DelegatingConsumerBatchRecordsListener { task, records -> 49 | records.onEach { callback(task, it.value()) } 50 | } 51 | } 52 | 53 | interface ConsumerBatchRecordsListener { 54 | 55 | /** 56 | * This method is invoked after the [consumerTask] has polled a non-empty batch of [records]. 57 | * 58 | * @see [org.apache.kafka.clients.consumer.Consumer.poll] 59 | */ 60 | fun handle(consumerTask: ConsumerTask, records: ConsumerRecords): Unit 61 | } 62 | 63 | private class DelegatingConsumerBatchRecordsListener( 64 | private val callback: (consumerTask: ConsumerTask, record: ConsumerRecords) -> Unit 65 | ): ConsumerBatchRecordsListener { 66 | 67 | override fun handle(consumerTask: ConsumerTask, records: ConsumerRecords) { 68 | callback(consumerTask, records) 69 | } 70 | } -------------------------------------------------------------------------------- /clients/src/main/kotlin/io/streamthoughts/kafka/clients/producer/Acks.kt: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 StreamThoughts. 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with 6 | * this work for additional information regarding copyright ownership. 7 | * The ASF licenses this file to You under the Apache License, Version 2.0 8 | * (the "License"); you may not use this file except in compliance with 9 | * the License. You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | * See the License for the specific language governing permissions and 17 | * limitations under the License. 18 | */ 19 | package io.streamthoughts.kafka.clients.producer 20 | 21 | /** 22 | * The number of acknowledgments the producer requires the leader to have received before considering a request complete. 23 | * This controls the durability of records that are sent. 24 | * 25 | * @see org.apache.kafka.clients.producer.ProducerConfig.ACKS_CONFIG 26 | */ 27 | object Acks { 28 | /** 29 | * The producer will not wait for any acknowledgment from the server at all. 30 | */ 31 | const val None = "0" 32 | /** 33 | * The producer will wait for the broker leader to acknowledge the record. 34 | */ 35 | const val Leader = "1" 36 | 37 | /** 38 | * The leader will wait for the full set of in-sync replicas to acknowledge the record. 39 | */ 40 | const val InSyncReplicas = "all" 41 | } -------------------------------------------------------------------------------- /clients/src/main/kotlin/io/streamthoughts/kafka/clients/producer/KafkaProducerConfigs.kt: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 StreamThoughts. 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with 6 | * this work for additional information regarding copyright ownership. 7 | * The ASF licenses this file to You under the Apache License, Version 2.0 8 | * (the "License"); you may not use this file except in compliance with 9 | * the License. You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | * See the License for the specific language governing permissions and 17 | * limitations under the License. 18 | */ 19 | package io.streamthoughts.kafka.clients.producer 20 | 21 | import io.streamthoughts.kafka.clients.KafkaClientConfigs 22 | import io.streamthoughts.kafka.clients.load 23 | import io.streamthoughts.kafka.clients.toStringMap 24 | import org.apache.kafka.clients.producer.ProducerConfig 25 | import org.apache.kafka.common.record.CompressionType 26 | import java.io.InputStream 27 | import java.time.Duration 28 | import java.util.* 29 | 30 | /** 31 | * Uses to build and encapsulate a configuration [Map] 32 | * for creating a new [org.apache.kafka.clients.producer.KafkaProducer] 33 | * 34 | * @see [ProducerConfig] 35 | */ 36 | class KafkaProducerConfigs(props: Map = emptyMap()) : KafkaClientConfigs(props) { 37 | 38 | override fun with(key: String, value: Any?) = apply { super.with(key, value) } 39 | 40 | fun client(init: KafkaClientConfigs.() -> Unit) = apply { this.init() } 41 | 42 | /** 43 | * @see ProducerConfig.ACKS_CONFIG 44 | */ 45 | fun acks(acks: String) = 46 | apply { this[ProducerConfig.ACKS_CONFIG] = acks } 47 | 48 | /** 49 | * @see ProducerConfig.BATCH_SIZE_CONFIG 50 | */ 51 | fun batchSize(batchSize: Int) = 52 | apply { this[ProducerConfig.BATCH_SIZE_CONFIG] = batchSize } 53 | 54 | /** 55 | * @see ProducerConfig.BUFFER_MEMORY_CONFIG 56 | */ 57 | fun bufferMemory(bufferMemory: Long) = 58 | apply { this[ProducerConfig.BUFFER_MEMORY_CONFIG] = bufferMemory} 59 | 60 | /** 61 | * @see ProducerConfig.COMPRESSION_TYPE_CONFIG 62 | */ 63 | fun compressionType(compressionType: CompressionType) = 64 | apply { this[ProducerConfig.COMPRESSION_TYPE_CONFIG] = compressionType.toString().toLowerCase() } 65 | 66 | /** 67 | * @see ProducerConfig.DELIVERY_TIMEOUT_MS_CONFIG 68 | */ 69 | fun deliveryTimeout(deliveryTimeout: Duration) = 70 | apply { this[ProducerConfig.DELIVERY_TIMEOUT_MS_CONFIG] = deliveryTimeout.toMillis() } 71 | 72 | /** 73 | * @see ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG 74 | */ 75 | fun enableIdempotence(enableIdempotence: Boolean) = 76 | apply { this[ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG] = enableIdempotence } 77 | 78 | /** 79 | * @see ProducerConfig.LINGER_MS_CONFIG 80 | */ 81 | fun lingerMs(lingerMs: Long) = 82 | apply { this[ProducerConfig.LINGER_MS_CONFIG] = lingerMs } 83 | 84 | /** 85 | * @see ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION 86 | */ 87 | fun maxInFlightRequestsPerConnection(maxInFlightRequestsPerConnection: Int) = 88 | apply { this[ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION] = maxInFlightRequestsPerConnection } 89 | 90 | /** 91 | * @see ProducerConfig.MAX_BLOCK_MS_CONFIG 92 | */ 93 | fun maxBlock(maxBlockDuration: Duration) = 94 | apply { this[ProducerConfig.MAX_BLOCK_MS_CONFIG] = maxBlockDuration.toMillis()} 95 | 96 | /** 97 | * @see ProducerConfig.RETRIES_CONFIG 98 | */ 99 | fun retries(retries: Int) = 100 | apply { this[ProducerConfig.RETRIES_CONFIG] = retries } 101 | 102 | /** 103 | * @see ProducerConfig.RETRY_BACKOFF_MS_CONFIG 104 | */ 105 | fun retryBackoff(retryBackoff: Long) = 106 | apply { this[ProducerConfig.RETRY_BACKOFF_MS_CONFIG] = retryBackoff } 107 | 108 | /** 109 | * @see ProducerConfig.TRANSACTIONAL_ID_CONFIG 110 | */ 111 | fun transactionalId(transactionalId: String) = 112 | apply { this[ProducerConfig.TRANSACTIONAL_ID_CONFIG] = transactionalId } 113 | 114 | /** 115 | * @see ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG 116 | */ 117 | fun keySerializer(keySerializer: String) = 118 | apply { this[ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG] = keySerializer } 119 | 120 | /** 121 | * @see ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG 122 | */ 123 | fun valueSerializer(valueSerializer: String) = 124 | apply { this[ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG] = valueSerializer } 125 | } 126 | 127 | /** 128 | * Creates a new empty [KafkaProducerConfigs]. 129 | */ 130 | fun emptyProducerConfigs(): KafkaProducerConfigs = KafkaProducerConfigs(emptyMap()) 131 | 132 | /** 133 | * Creates a new [KafkaProducerConfigs] with the given [pairs]. 134 | */ 135 | fun producerConfigsOf(vararg pairs: Pair): KafkaProducerConfigs = producerConfigsOf(mapOf(*pairs)) 136 | 137 | /** 138 | * Creates a new [KafkaProducerConfigs] with the given [props]. 139 | */ 140 | fun producerConfigsOf(props: Map): KafkaProducerConfigs = KafkaProducerConfigs(props) 141 | 142 | /** 143 | * Creates a new [KafkaProducerConfigs] with the given [props]. 144 | */ 145 | fun producerConfigsOf(props: Properties): KafkaProducerConfigs = producerConfigsOf(props.toStringMap()) 146 | 147 | /** 148 | * Convenient method to create and populate a new [KafkaProducerConfigs] from a [configFile]. 149 | */ 150 | fun loadProducerConfigs(configFile: String): KafkaProducerConfigs = KafkaProducerConfigs().load(configFile) 151 | 152 | /** 153 | * Convenient method to create and populate a new [KafkaClientConfigs] from an [inputStream]. 154 | */ 155 | fun loadProducerConfigs(inputStream: InputStream): KafkaProducerConfigs = KafkaProducerConfigs().load(inputStream) -------------------------------------------------------------------------------- /clients/src/main/kotlin/io/streamthoughts/kafka/clients/producer/KafkaProducerContainer.kt: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 StreamThoughts. 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with 6 | * this work for additional information regarding copyright ownership. 7 | * The ASF licenses this file to You under the Apache License, Version 2.0 8 | * (the "License"); you may not use this file except in compliance with 9 | * the License. You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | * See the License for the specific language governing permissions and 17 | * limitations under the License. 18 | */ 19 | package io.streamthoughts.kafka.clients.producer 20 | 21 | import ch.qos.logback.classic.Level 22 | import io.streamthoughts.kafka.clients.KafkaRecord 23 | import io.streamthoughts.kafka.clients.loggerFor 24 | import io.streamthoughts.kafka.clients.producer.callback.OnSendErrorCallback 25 | import io.streamthoughts.kafka.clients.producer.callback.OnSendSuccessCallback 26 | import io.streamthoughts.kafka.clients.producer.callback.ProducerSendCallback 27 | import org.apache.kafka.clients.producer.KafkaProducer 28 | import org.apache.kafka.clients.producer.Producer 29 | import org.apache.kafka.clients.producer.ProducerConfig 30 | import org.apache.kafka.clients.producer.ProducerRecord 31 | import org.apache.kafka.clients.producer.RecordMetadata 32 | import org.apache.kafka.common.Metric 33 | import org.apache.kafka.common.MetricName 34 | import org.apache.kafka.common.PartitionInfo 35 | import org.apache.kafka.common.errors.AuthorizationException 36 | import org.apache.kafka.common.errors.OutOfOrderSequenceException 37 | import org.apache.kafka.common.errors.ProducerFencedException 38 | import org.apache.kafka.common.serialization.Serializer 39 | import org.slf4j.Logger 40 | import java.time.Duration 41 | import java.time.Instant 42 | import java.util.concurrent.CompletableFuture 43 | import java.util.concurrent.Future 44 | import java.util.concurrent.atomic.AtomicInteger 45 | 46 | /** 47 | * The default kafka-based [ProducerContainer] implementation 48 | */ 49 | class KafkaProducerContainer private constructor( 50 | private val configs: KafkaProducerConfigs, 51 | private val onSendCallback: ProducerSendCallback, 52 | private val keySerializer: Serializer ?= null, 53 | private val valueSerializer: Serializer ?= null, 54 | private val producerFactory: ProducerFactory? = null, 55 | private val defaultTopic: String? = null 56 | ): ProducerContainer { 57 | 58 | companion object { 59 | private val Log: Logger = loggerFor(ProducerContainer::class.java) 60 | private val numInstances = AtomicInteger(0) 61 | private const val defaultClientIdPrefix = "producer-" 62 | 63 | private fun computeNextClientId(producerConfigs: Map): String { 64 | val clientIdPrefix = producerConfigs[ProducerConfig.CLIENT_ID_CONFIG] ?: defaultClientIdPrefix 65 | return "$clientIdPrefix-${numInstances.incrementAndGet()}" 66 | } 67 | } 68 | 69 | @Volatile 70 | private var state = ProducerContainer.State.CREATED 71 | 72 | private var transactionId: String? = null 73 | private lateinit var clientId: String 74 | private lateinit var producer: Producer 75 | 76 | override fun send( 77 | records: Collection>, 78 | topic: String?, 79 | partition: Int?, 80 | timestamp: Instant?, 81 | onSuccess: OnSendSuccessCallback?, 82 | onError: OnSendErrorCallback? 83 | ): Future>> { 84 | val futures: List>> = records.map { 85 | send(it.first, it.second, topic, partition, timestamp, onSuccess, onError) as CompletableFuture 86 | } 87 | return CompletableFuture.allOf(*futures.toTypedArray()).thenApply { futures.map { it.join() }.toList() } 88 | } 89 | 90 | override fun send( 91 | record: KafkaRecord, 92 | onSuccess: OnSendSuccessCallback?, 93 | onError: OnSendErrorCallback? 94 | ): Future> { 95 | val producerRecord = KafkaRecord( 96 | record.key, 97 | record.value, 98 | record.topic?:defaultTopic, 99 | record.partition, 100 | record.timestamp 101 | ).toProducerRecord() 102 | return send(producerRecord, onSuccess, onError) 103 | } 104 | 105 | override fun send( 106 | record: ProducerRecord, 107 | onSuccess: OnSendSuccessCallback?, 108 | onError: OnSendErrorCallback? 109 | ) : CompletableFuture> { 110 | return runOrThrowIfIllegalState { 111 | val future = CompletableFuture>() 112 | logWithProducerInfo(Level.DEBUG, "Sending record $record") 113 | producer.send(record) { metadata: RecordMetadata, exception: Exception? -> 114 | 115 | if (exception != null) { 116 | future.completeExceptionally(exception) 117 | 118 | (onError?.let { DelegateSendCallback(onError = onError) }?: onSendCallback) 119 | .onSendError(this, record, exception) 120 | } else { 121 | future.complete(SendResult(record, metadata)) 122 | 123 | (onSuccess?.let { DelegateSendCallback(onSuccess = onSuccess) }?: onSendCallback) 124 | .onSendSuccess(this, record, metadata) 125 | } 126 | } 127 | future 128 | } 129 | } 130 | 131 | override fun execute(action: (producer: Producer) -> T) = run { action(producer) } 132 | 133 | override fun runTx(action: (ProducerContainer) -> Unit): TransactionResult { 134 | return runOrThrowIfIllegalState { 135 | try { 136 | producer.beginTransaction() 137 | action.invoke(this) 138 | producer.commitTransaction() 139 | CommittedTransactionResult 140 | } catch (e: Exception) { 141 | when (e) { 142 | is ProducerFencedException, 143 | is OutOfOrderSequenceException, 144 | is AuthorizationException -> { 145 | logWithProducerInfo( 146 | Level.ERROR, 147 | "Unrecoverable error happened while executing producer transactional action. Close producer immediately", 148 | e 149 | ) 150 | close() 151 | UnrecoverableErrorTransactionResult(e) 152 | } 153 | else -> { 154 | logWithProducerInfo( 155 | Level.ERROR, 156 | "Error happened while executing producer transactional action. Abort current transaction", 157 | e 158 | ) 159 | producer.abortTransaction() 160 | AbortedTransactionResult(e) 161 | } 162 | } 163 | AbortedTransactionResult(e) 164 | } 165 | } 166 | } 167 | 168 | override fun init() { 169 | if (isInitialized()) { 170 | throw IllegalStateException("Producer is already initialized") 171 | } 172 | val producerConfigs = HashMap(configs) 173 | clientId = computeNextClientId(producerConfigs).also { producerConfigs[ProducerConfig.CLIENT_ID_CONFIG] = it } 174 | transactionId = producerConfigs[ProducerConfig.TRANSACTIONAL_ID_CONFIG]?.toString() 175 | logWithProducerInfo(Level.INFO, "Initializing") 176 | producer = producerFactory?.make(producerConfigs, keySerializer, valueSerializer) ?: KafkaProducer(producerConfigs, keySerializer, valueSerializer) 177 | if (ProducerConfig.TRANSACTIONAL_ID_CONFIG in producerConfigs) 178 | producer.initTransactions() 179 | state = ProducerContainer.State.STARTED 180 | } 181 | 182 | override fun metrics(topic: String): Map { 183 | return runOrThrowIfIllegalState { 184 | producer.metrics() 185 | } 186 | } 187 | 188 | override fun partitionsFor(topic: String): List { 189 | return runOrThrowIfIllegalState { 190 | producer.partitionsFor(topic) 191 | } 192 | } 193 | 194 | override fun flush() { 195 | runOrThrowIfIllegalState { 196 | logWithProducerInfo(Level.DEBUG, "Flushing") 197 | producer.flush() 198 | } 199 | } 200 | 201 | override fun close(timeout: Duration) { 202 | if (isClosed() || !isInitialized()) return // silently ignore call if producer is already closed. 203 | 204 | runOrThrowIfIllegalState { 205 | state = ProducerContainer.State.PENDING_SHUTDOWN 206 | logWithProducerInfo(Level.INFO, "Closing") 207 | producer.close(timeout) 208 | state = ProducerContainer.State.CLOSED 209 | logWithProducerInfo(Level.INFO, "Closed") 210 | } 211 | } 212 | 213 | private fun isClosed() = 214 | state == ProducerContainer.State.CLOSED || 215 | state == ProducerContainer.State.PENDING_SHUTDOWN 216 | 217 | private fun isInitialized() = this::producer.isInitialized 218 | 219 | private fun runOrThrowIfIllegalState(action: () -> R): R { 220 | if (!isInitialized()) throw IllegalStateException("Producer is not initialized yet") 221 | if (isClosed()) throw IllegalStateException("Cannot perform operation after producer has been closed") 222 | return action.invoke() 223 | } 224 | 225 | private fun logWithProducerInfo(level: Level, msg: String, exception: java.lang.Exception? = null) { 226 | val message = "Producer (clientId=$clientId${transactionId?.let {" , transactionalId=$transactionId" }?:""}): $msg" 227 | when(level) { 228 | Level.ERROR -> Log.error(message, exception) 229 | Level.WARN -> Log.warn(message) 230 | Level.INFO -> Log.info(message) 231 | Level.DEBUG -> Log.debug(message) 232 | else -> Log.debug(message) 233 | } 234 | } 235 | 236 | override fun state(): ProducerContainer.State = state 237 | 238 | data class Builder( 239 | var configs: KafkaProducerConfigs, 240 | var keySerializer: Serializer ?= null, 241 | var valueSerializer: Serializer ?= null, 242 | var producerFactory: ProducerFactory? = null, 243 | var defaultTopic: String? = null, 244 | var onSendSuccess: OnSendSuccessCallback? = null, 245 | var onSendError: OnSendErrorCallback? = null, 246 | var onSendCallback: ProducerSendCallback? = null 247 | ) : ProducerContainer.Builder { 248 | 249 | override fun configure(init: KafkaProducerConfigs.() -> Unit) = 250 | apply { configs.init() } 251 | 252 | override fun defaultTopic(topic: String) = 253 | apply { this.defaultTopic = topic } 254 | 255 | override fun producerFactory(producerFactory: ProducerFactory) = 256 | apply { this.producerFactory = producerFactory } 257 | 258 | override fun onSendError(callback: OnSendErrorCallback) = 259 | apply { this.onSendError = callback } 260 | 261 | override fun onSendSuccess(callback: OnSendSuccessCallback) = 262 | apply { this.onSendSuccess = callback } 263 | 264 | override fun onSendCallback(callback: ProducerSendCallback) = 265 | apply { this.onSendCallback = callback } 266 | 267 | override fun keySerializer(serializer: Serializer) = 268 | apply { this.keySerializer = serializer } 269 | 270 | override fun valueSerializer(serializer: Serializer) = 271 | apply { this.valueSerializer = serializer } 272 | 273 | fun build(): ProducerContainer = KafkaProducerContainer( 274 | configs, 275 | onSendCallback ?: DelegateSendCallback(onSendSuccess, onSendError), 276 | keySerializer, 277 | valueSerializer, 278 | producerFactory, 279 | defaultTopic 280 | ) 281 | } 282 | 283 | private class DelegateSendCallback( 284 | private val onSuccess: OnSendSuccessCallback? = null, 285 | private val onError: OnSendErrorCallback? = null 286 | ) : ProducerSendCallback { 287 | 288 | override fun onSendError( 289 | container: ProducerContainer, 290 | record: ProducerRecord, 291 | error: Exception 292 | ) { 293 | this.onError?.invoke(container, record, error) 294 | } 295 | override fun onSendSuccess( 296 | container: ProducerContainer, 297 | record: ProducerRecord, 298 | metadata: RecordMetadata 299 | ) { 300 | onSuccess?.invoke(container, record, metadata) 301 | } 302 | } 303 | } -------------------------------------------------------------------------------- /clients/src/main/kotlin/io/streamthoughts/kafka/clients/producer/ProducerContainer.kt: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 StreamThoughts. 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with 6 | * this work for additional information regarding copyright ownership. 7 | * The ASF licenses this file to You under the Apache License, Version 2.0 8 | * (the "License"); you may not use this file except in compliance with 9 | * the License. You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | * See the License for the specific language governing permissions and 17 | * limitations under the License. 18 | */ 19 | package io.streamthoughts.kafka.clients.producer 20 | 21 | import io.streamthoughts.kafka.clients.KafkaRecord 22 | import io.streamthoughts.kafka.clients.producer.callback.OnSendErrorCallback 23 | import io.streamthoughts.kafka.clients.producer.callback.OnSendSuccessCallback 24 | import io.streamthoughts.kafka.clients.producer.callback.ProducerSendCallback 25 | import org.apache.kafka.clients.producer.Producer 26 | import org.apache.kafka.clients.producer.ProducerRecord 27 | import org.apache.kafka.common.Metric 28 | import org.apache.kafka.common.MetricName 29 | import org.apache.kafka.common.PartitionInfo 30 | import org.apache.kafka.common.serialization.Serializer 31 | import java.io.Closeable 32 | import java.time.Duration 33 | import java.time.Instant 34 | import java.util.concurrent.Future 35 | 36 | 37 | sealed class TransactionResult 38 | /** 39 | * The transaction has been aborted due to an exception. 40 | * @see Producer.abortTransaction 41 | */ 42 | data class AbortedTransactionResult( val exception: Exception): TransactionResult() 43 | 44 | /** 45 | * The transaction throws an unrecoverable error. Hence the [Producer] has been closed. 46 | * 47 | */ 48 | data class UnrecoverableErrorTransactionResult( val exception: Exception): TransactionResult() 49 | 50 | /** 51 | * The transaction has been successfully committed 52 | * @see Producer.commitTransaction 53 | */ 54 | object CommittedTransactionResult: TransactionResult() 55 | 56 | interface ProducerContainer: Closeable { 57 | 58 | enum class State { 59 | /** 60 | * The [ProducerContainer] is created. 61 | */ 62 | CREATED, 63 | 64 | /** 65 | * The [ProducerContainer] is initialized and can be used for sending records. 66 | */ 67 | STARTED, 68 | 69 | /** 70 | * The [ProducerContainer] is closing 71 | */ 72 | PENDING_SHUTDOWN, 73 | 74 | /** 75 | * The [ProducerContainer] is closed. 76 | */ 77 | CLOSED, 78 | } 79 | 80 | interface Builder { 81 | /** 82 | * Configure this [ProducerContainer]. 83 | */ 84 | fun configure(init: KafkaProducerConfigs.() -> Unit): Builder 85 | 86 | /** 87 | * Sets the [producerFactory] to be used for creating a new [Producer] client. 88 | */ 89 | fun producerFactory(producerFactory: ProducerFactory): Builder 90 | 91 | /** 92 | * Set the default topic to send records 93 | */ 94 | fun defaultTopic(topic: String): Builder 95 | 96 | /** 97 | * Set the default [callback] to invoke when an error happen while sending a record. 98 | */ 99 | fun onSendError(callback: OnSendErrorCallback): Builder 100 | 101 | /** 102 | * Set the default [callback] to invoke when a record has been sent successfully. 103 | */ 104 | fun onSendSuccess(callback: OnSendSuccessCallback): Builder 105 | 106 | /** 107 | * Set the default [callback] to be invoked after a sent record completes either successfully or unsuccessfully. 108 | * 109 | * @see onSendError 110 | * @see onSendSuccess 111 | */ 112 | fun onSendCallback(callback: ProducerSendCallback): Builder 113 | 114 | /** 115 | * Set the [serializer] to be used for serializing the record-key. 116 | */ 117 | fun keySerializer(serializer: Serializer): Builder 118 | 119 | /** 120 | * Set the [serializer] to be used for serializing the record-value. 121 | */ 122 | fun valueSerializer(serializer: Serializer): Builder 123 | } 124 | 125 | /** 126 | * Asynchronously send a record for the given [value] to the given to the given [topic] (or the default one if null is given) 127 | * and [partition] with the given [timestamp]. 128 | * 129 | * Then, optionally invoke the specific given [onSuccess] callback when the record has been acknowledge. 130 | * Otherwise invoke [onError] if an error happen while sending. 131 | * 132 | * @see Producer.send 133 | * @return a [Future] of [SendResult] 134 | */ 135 | fun send(value : V, 136 | topic: String? = null, 137 | partition: Int? = null, 138 | timestamp: Instant? = null, 139 | onSuccess: OnSendSuccessCallback? = null, 140 | onError: OnSendErrorCallback? = null) : Future> { 141 | return send(null, value, topic, partition, timestamp, onSuccess, onError) 142 | } 143 | 144 | /** 145 | * Asynchronously send a record for the given [key] and [value] to the given [topic] (or the default one if null is given) 146 | * and [partition] with the given [timestamp]. 147 | * 148 | * Then, optionally invoke the specific given [onSuccess] callback when the record has been acknowledge. 149 | * Otherwise invoke [onError] if an error happen while sending. 150 | * 151 | * @see Producer.send 152 | * @return a [Future] of [SendResult] 153 | */ 154 | fun send(key: K?= null, 155 | value: V? = null, 156 | topic: String? = null, 157 | partition: Int? = null, 158 | timestamp: Instant? = null, 159 | onSuccess: OnSendSuccessCallback? = null, 160 | onError: OnSendErrorCallback? = null) : Future> { 161 | return send(KafkaRecord(key, value, topic, partition, timestamp), onSuccess, onError) 162 | } 163 | 164 | /** 165 | * Asynchronously send the given key-value [record] to the given [topic] (or the default one if null is given) 166 | * and [partition] with the given [timestamp]. 167 | * 168 | * Then, optionally invoke the specific given [onSuccess] callback when the record has been acknowledge. 169 | * Otherwise invoke [onError] if an error happen while sending. 170 | * 171 | * @see Producer.send 172 | * @return a [Future] of [SendResult] 173 | */ 174 | fun send(record: Pair, 175 | topic: String? = null, 176 | partition: Int? = null, 177 | timestamp: Instant? = null, 178 | onSuccess: OnSendSuccessCallback? = null, 179 | onError: OnSendErrorCallback? = null) : Future> { 180 | return send(record.first, record.second, topic, partition, timestamp, onSuccess, onError) 181 | } 182 | 183 | /** 184 | * Asynchronously send all the given key-value [records] to the given [topic] (or the default one if null is given) 185 | * and [partition] with the given [timestamp]. 186 | * 187 | * Then, optionally invoke the specific given [onSuccess] callback when the record has been acknowledge. 188 | * Otherwise invoke [onError] if an error happen while sending. 189 | * 190 | * @see Producer.send 191 | * @return a [Future] of [SendResult] 192 | */ 193 | fun send(records: Collection>, 194 | topic: String? = null, 195 | partition: Int? = null, 196 | timestamp: Instant? = null, 197 | onSuccess: OnSendSuccessCallback? = null, 198 | onError: OnSendErrorCallback? = null) : Future>> 199 | 200 | /** 201 | * Asynchronously send the given [record]. 202 | * 203 | * Then, optionally invoke the specific given [onSuccess] callback when the record has been acknowledge. 204 | * Otherwise invoke [onError] if an error happen while sending. 205 | * 206 | * @see Producer.send 207 | * @return a [Future] of [SendResult] 208 | */ 209 | fun send(record: KafkaRecord, 210 | onSuccess: OnSendSuccessCallback? = null, 211 | onError: OnSendErrorCallback? = null) : Future> 212 | /** 213 | * Asynchronously send the given [record]. 214 | * 215 | * Then, optionally invoke the specific given [onSuccess] callback when the record has been acknowledge. 216 | * Otherwise invoke [onError] if an error happen while sending. 217 | * 218 | * @see Producer.send 219 | * @return a [Future] of [SendResult] 220 | */ 221 | fun send(record: ProducerRecord, 222 | onSuccess: OnSendSuccessCallback? = null, 223 | onError: OnSendErrorCallback? = null) : Future> 224 | 225 | /** 226 | * Executes the given [action] with the underlying [Producer]. 227 | */ 228 | fun execute(action: (producer: Producer) -> T): T 229 | 230 | /** 231 | * Executes the given [action] in a producer transaction. 232 | */ 233 | fun runTx(action: (ProducerContainer) -> Unit): TransactionResult 234 | 235 | /** 236 | * @see Producer.metrics 237 | */ 238 | fun metrics(topic: String): Map 239 | 240 | /** 241 | * @see Producer.partitionsFor 242 | */ 243 | fun partitionsFor(topic: String): List 244 | 245 | /** 246 | * Initialize this [ProducerContainer]. 247 | */ 248 | fun init() 249 | 250 | /** 251 | * Flush the [Producer]. 252 | */ 253 | fun flush() 254 | 255 | /** 256 | * @return the [State] of this container. 257 | */ 258 | fun state(): State 259 | 260 | /** 261 | * Close this [ProducerContainer]. 262 | * 263 | * @see [Producer.close]. 264 | */ 265 | override fun close() { 266 | close(Duration.ofMillis(Long.MAX_VALUE)) 267 | } 268 | 269 | /** 270 | * Close this [ProducerContainer]. 271 | * 272 | * @see [Producer.close]. 273 | */ 274 | fun close(timeout: Duration) 275 | } 276 | -------------------------------------------------------------------------------- /clients/src/main/kotlin/io/streamthoughts/kafka/clients/producer/ProducerFactory.kt: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 StreamThoughts. 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with 6 | * this work for additional information regarding copyright ownership. 7 | * The ASF licenses this file to You under the Apache License, Version 2.0 8 | * (the "License"); you may not use this file except in compliance with 9 | * the License. You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | * See the License for the specific language governing permissions and 17 | * limitations under the License. 18 | */ 19 | package io.streamthoughts.kafka.clients.producer 20 | 21 | import org.apache.kafka.clients.producer.Producer 22 | import org.apache.kafka.common.serialization.Serializer 23 | 24 | /** 25 | * The default factory interface to create new [Producer] instance. 26 | */ 27 | interface ProducerFactory { 28 | 29 | /** 30 | * Creates a new [Producer] instance with the given [configs]. 31 | */ 32 | fun make(configs: Map, 33 | keySerializer: Serializer? = null, 34 | valueSerializer: Serializer? = null): Producer 35 | } 36 | -------------------------------------------------------------------------------- /clients/src/main/kotlin/io/streamthoughts/kafka/clients/producer/SendResult.kt: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 StreamThoughts. 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with 6 | * this work for additional information regarding copyright ownership. 7 | * The ASF licenses this file to You under the Apache License, Version 2.0 8 | * (the "License"); you may not use this file except in compliance with 9 | * the License. You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | * See the License for the specific language governing permissions and 17 | * limitations under the License. 18 | */ 19 | package io.streamthoughts.kafka.clients.producer 20 | 21 | import org.apache.kafka.clients.producer.ProducerRecord 22 | import org.apache.kafka.clients.producer.RecordMetadata 23 | 24 | data class SendResult(val record: ProducerRecord, val metadata: RecordMetadata) -------------------------------------------------------------------------------- /clients/src/main/kotlin/io/streamthoughts/kafka/clients/producer/callback/ProducerSendCallback.kt: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 StreamThoughts. 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with 6 | * this work for additional information regarding copyright ownership. 7 | * The ASF licenses this file to You under the Apache License, Version 2.0 8 | * (the "License"); you may not use this file except in compliance with 9 | * the License. You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | * See the License for the specific language governing permissions and 17 | * limitations under the License. 18 | */ 19 | package io.streamthoughts.kafka.clients.producer.callback 20 | 21 | import io.streamthoughts.kafka.clients.loggerFor 22 | import io.streamthoughts.kafka.clients.producer.ProducerContainer 23 | import io.streamthoughts.kafka.clients.producer.ProducerContainer.* 24 | import io.streamthoughts.kafka.clients.producer.callback.ProducerSendCallback.CloseOnErrorProducerSendCallback 25 | import org.apache.kafka.clients.producer.ProducerRecord 26 | import org.apache.kafka.clients.producer.RecordMetadata 27 | import org.slf4j.Logger 28 | import java.time.Duration 29 | 30 | typealias OnSendErrorCallback = (container: ProducerContainer, record: ProducerRecord, error: Exception) -> Unit 31 | typealias OnSendSuccessCallback = (container: ProducerContainer, record: ProducerRecord, metadata: RecordMetadata) -> Unit 32 | 33 | /** 34 | * Creates a new a [CloseOnErrorProducerSendCallback] 35 | */ 36 | fun closeOnErrorProducerSendCallback(then: OnSendErrorCallback? = null): OnSendErrorCallback = 37 | CloseOnErrorProducerSendCallback(then)::onSendError 38 | 39 | interface ProducerSendCallback { 40 | 41 | /** 42 | * This method is invoked when an error happen while sending a record. 43 | */ 44 | fun onSendError(container: ProducerContainer, record: ProducerRecord, error: Exception) {} 45 | 46 | /** 47 | * This method is invoked when a record has been sent successfully. 48 | */ 49 | fun onSendSuccess(container: ProducerContainer, record: ProducerRecord, metadata: RecordMetadata) {} 50 | 51 | /** 52 | * This [ProducerSendCallback] closes the [ProducerContainer] on the first exception that happens while 53 | * sending a record (i.e on the the first callback). 54 | * 55 | * Scenario : 56 | * 57 | * If leaders are unreachable, then record batches will expire after 58 | * [org.apache.kafka.clients.producer.ProducerConfig.DELIVERY_TIMEOUT_MS_CONFIG] for all partitions for which 59 | * there is no in-flight request (retries are only applied for in-flight requests that fail) 60 | * 61 | * This may eventually lead to records re-ordering if the application continue to send new records even after 62 | * the [org.apache.kafka.clients.producer.Producer] expired buffered batches. 63 | * For this reason the producer client should be closed on the first callback. 64 | * 65 | * @constructor Creates a new [CloseOnErrorProducerSendCallback] that will optionally invoke the given [callback] 66 | * on each record and after closing the container. 67 | */ 68 | class CloseOnErrorProducerSendCallback( 69 | private val callback : OnSendErrorCallback? = null 70 | ) : ProducerSendCallback { 71 | 72 | companion object { 73 | val Log: Logger = loggerFor(CloseOnErrorProducerSendCallback::class.java) 74 | } 75 | 76 | override fun onSendError(container: ProducerContainer, record: ProducerRecord, error: Exception) { 77 | val currentState = container.state() 78 | if (currentState != State.CLOSED && currentState != State.PENDING_SHUTDOWN) { 79 | Log.error("Closing producer due to an error while sending record to topic: ${record.topic()}", error) 80 | container.execute { producer -> { 81 | producer.close(Duration.ZERO) 82 | } } 83 | } 84 | callback?.invoke(container, record, error) 85 | } 86 | } 87 | } -------------------------------------------------------------------------------- /clients/src/test/kotlin/io/streamthoughts/kafka/clients/KafkaClientConfigsTest.kt: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 StreamThoughts. 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with 6 | * this work for additional information regarding copyright ownership. 7 | * The ASF licenses this file to You under the Apache License, Version 2.0 8 | * (the "License"); you may not use this file except in compliance with 9 | * the License. You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | * See the License for the specific language governing permissions and 17 | * limitations under the License. 18 | */ 19 | package io.streamthoughts.kafka.clients 20 | 21 | import org.apache.kafka.clients.consumer.ConsumerConfig 22 | import org.junit.jupiter.api.Assertions 23 | import org.junit.jupiter.api.Test 24 | import org.junit.jupiter.api.TestInstance 25 | 26 | @TestInstance(TestInstance.Lifecycle.PER_CLASS) 27 | internal class KafkaClientConfigsTest { 28 | 29 | private val kafka = Kafka(bootstrapServers = arrayOf("dummy:1234")) 30 | 31 | @Test 32 | fun should_return_kafka_consumer_config_as_map() { 33 | val configs = KafkaClientConfigs(kafka = kafka).clientId("dummy-id") 34 | Assertions.assertEquals("dummy:1234", configs[ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG] as String) 35 | Assertions.assertEquals("dummy-id", configs[ConsumerConfig.CLIENT_ID_CONFIG] as String) 36 | } 37 | 38 | @Test 39 | fun should_load_client_config_given_props_file() { 40 | val configs: KafkaClientConfigs = loadClientConfigs(configsInputStream()) 41 | Assertions.assertEquals("localhost:9092", configs[ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG] as String) 42 | Assertions.assertEquals("client-test-id", configs[ConsumerConfig.CLIENT_ID_CONFIG] as String) 43 | } 44 | 45 | private fun configsInputStream() = this.javaClass.classLoader 46 | .getResourceAsStream("test-configs.properties") 47 | ?:throw IllegalArgumentException( "Cannot load properties") 48 | 49 | } -------------------------------------------------------------------------------- /clients/src/test/kotlin/io/streamthoughts/kafka/clients/consumer/KafkaConsumerConfigsTest.kt: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 StreamThoughts. 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with 6 | * this work for additional information regarding copyright ownership. 7 | * The ASF licenses this file to You under the Apache License, Version 2.0 8 | * (the "License"); you may not use this file except in compliance with 9 | * the License. You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | * See the License for the specific language governing permissions and 17 | * limitations under the License. 18 | */ 19 | package io.streamthoughts.kafka.clients.consumer 20 | 21 | import org.apache.kafka.clients.consumer.ConsumerConfig 22 | import org.apache.kafka.common.serialization.StringDeserializer 23 | import org.junit.jupiter.api.Assertions.assertEquals 24 | import org.junit.jupiter.api.Test 25 | import org.junit.jupiter.api.TestInstance 26 | 27 | @TestInstance(TestInstance.Lifecycle.PER_CLASS) 28 | class KafkaConsumerConfigsTest { 29 | 30 | @Test 31 | fun should_return_valid_kafka_consumer_config() { 32 | val configs: KafkaConsumerConfigs = consumerConfigsOf() 33 | .client { 34 | bootstrapServers("dummy:1234") 35 | clientId("test-id") 36 | } 37 | .groupId("test-group") 38 | .keyDeserializer(StringDeserializer::class.java.name) 39 | .valueDeserializer(StringDeserializer::class.java.name) 40 | 41 | assertEquals("dummy:1234", configs[ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG]) 42 | assertEquals("test-id", configs[ConsumerConfig.CLIENT_ID_CONFIG]) 43 | assertEquals("test-group", configs[ConsumerConfig.GROUP_ID_CONFIG]) 44 | assertEquals(StringDeserializer::class.java.name, configs[ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG]) 45 | assertEquals(StringDeserializer::class.java.name, configs[ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG]) 46 | } 47 | 48 | @Test 49 | fun should_load_consumer_config_given_props_file() { 50 | val configs: KafkaConsumerConfigs = loadConsumerConfigs(configsInputStream()) 51 | assertEquals("localhost:9092", configs[ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG] as String) 52 | assertEquals("client-test-id", configs[ConsumerConfig.CLIENT_ID_CONFIG] as String) 53 | } 54 | 55 | 56 | private fun configsInputStream() = this.javaClass.classLoader 57 | .getResourceAsStream("test-configs.properties") 58 | ?:throw IllegalArgumentException( "Cannot load properties") 59 | 60 | } -------------------------------------------------------------------------------- /clients/src/test/kotlin/io/streamthoughts/kafka/clients/consumer/KafkaConsumerTaskTest.kt: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 StreamThoughts. 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with 6 | * this work for additional information regarding copyright ownership. 7 | * The ASF licenses this file to You under the Apache License, Version 2.0 8 | * (the "License"); you may not use this file except in compliance with 9 | * the License. You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | * See the License for the specific language governing permissions and 17 | * limitations under the License. 18 | */ 19 | package io.streamthoughts.kafka.clients.consumer 20 | 21 | import io.streamthoughts.kafka.clients.consumer.error.ConsumedErrorHandler 22 | import io.streamthoughts.kafka.clients.consumer.error.closeTaskOnConsumedError 23 | import io.streamthoughts.kafka.clients.consumer.error.serialization.logAndFailOnInvalidRecord 24 | import io.streamthoughts.kafka.clients.consumer.listener.ConsumerBatchRecordsListener 25 | import io.streamthoughts.kafka.clients.loggerFor 26 | import io.streamthoughts.kafka.clients.producer.Acks 27 | import io.streamthoughts.kafka.clients.producer.KafkaProducerConfigs 28 | import io.streamthoughts.kafka.tests.TestingEmbeddedKafka 29 | import io.streamthoughts.kafka.tests.junit.EmbeddedSingleNodeKafkaCluster 30 | import kotlinx.coroutines.GlobalScope 31 | import kotlinx.coroutines.launch 32 | import kotlinx.coroutines.runBlocking 33 | import org.apache.kafka.clients.consumer.ConsumerRecord 34 | import org.apache.kafka.clients.consumer.ConsumerRecords 35 | import org.apache.kafka.clients.producer.ProducerRecord 36 | import org.apache.kafka.common.errors.TopicExistsException 37 | import org.apache.kafka.common.serialization.StringDeserializer 38 | import org.apache.kafka.common.serialization.StringSerializer 39 | import org.junit.jupiter.api.AfterEach 40 | import org.junit.jupiter.api.Assertions 41 | import org.junit.jupiter.api.BeforeAll 42 | import org.junit.jupiter.api.BeforeEach 43 | import org.junit.jupiter.api.Test 44 | import org.junit.jupiter.api.TestInstance 45 | import org.junit.jupiter.api.extension.ExtendWith 46 | import org.slf4j.Logger 47 | import java.time.Duration 48 | import java.util.* 49 | 50 | @TestInstance(TestInstance.Lifecycle.PER_CLASS) 51 | @ExtendWith(EmbeddedSingleNodeKafkaCluster::class) 52 | class KafkaConsumerTaskTest(private val cluster: TestingEmbeddedKafka) { 53 | 54 | companion object { 55 | private val Log: Logger = loggerFor(KafkaConsumerTaskTest::class.java) 56 | } 57 | 58 | private val failingListener = object : ConsumerBatchRecordsListener { 59 | override fun handle(consumerTask: ConsumerTask, records: ConsumerRecords) { 60 | throw RuntimeException("Failing consumer for testing purpose") 61 | } 62 | } 63 | 64 | private val testTopic = "test-topic" 65 | 66 | private val subscription: TopicSubscription = getTopicSubscription(testTopic) 67 | 68 | private lateinit var configs: KafkaConsumerConfigs 69 | 70 | 71 | @BeforeAll 72 | fun setUp() { 73 | configs = emptyConsumerConfigs() 74 | .client { bootstrapServers(cluster.bootstrapServers()) } 75 | .autoOffsetReset(AutoOffsetReset.Earliest) 76 | .pollRecordsMs(10000) 77 | } 78 | 79 | @BeforeEach 80 | fun createTopics() { 81 | val retryOnTopicExistsException = fun (topic: String) { 82 | while (true) { 83 | try { 84 | cluster.createTopic(topic) 85 | break 86 | } catch (e: TopicExistsException) { 87 | Log.warn("Cannot create $topic due to TopicExistsException. Ignore error and retry") 88 | } 89 | } 90 | } 91 | retryOnTopicExistsException(testTopic) 92 | } 93 | 94 | @AfterEach 95 | fun deleteTopics() { 96 | cluster.deleteTopics(testTopic) 97 | } 98 | 99 | @Test 100 | fun should_invoke_handler_when_error_is_thrown_during_processing() { 101 | produceSingleRecord() 102 | 103 | val captureHandler = CaptureErrorHandler() 104 | val consumer = createConsumerFor( 105 | listener = failingListener, 106 | consumedErrorHandler = captureHandler 107 | ) 108 | runBlocking { 109 | GlobalScope.launch { consumer.run() } 110 | try { 111 | captureHandler.assertThatEventuallyCapture("Fail to capture processing error before timeout") 112 | } finally { 113 | consumer.close() 114 | } 115 | } 116 | } 117 | 118 | @Test 119 | fun should_close_task_on_consumed_error() { 120 | produceSingleRecord() 121 | 122 | val consumer = createConsumerFor( 123 | listener = failingListener, 124 | consumedErrorHandler = closeTaskOnConsumedError() 125 | ) 126 | runBlocking { 127 | val job = GlobalScope.launch { consumer.run() } 128 | try { 129 | job.join() 130 | } finally { 131 | Assertions.assertTrue(consumer.state() == ConsumerTask.State.SHUTDOWN) 132 | } 133 | } 134 | } 135 | 136 | private fun produceSingleRecord() { 137 | cluster.producerClient( 138 | KafkaProducerConfigs() 139 | .acks(Acks.Leader) 140 | .keySerializer(StringSerializer::class.java.name) 141 | .valueSerializer(StringSerializer::class.java.name) 142 | ).use { 143 | Assertions.assertNotNull( 144 | it.send(ProducerRecord(testTopic, 0, "test-key", "test-value")).get() 145 | ) 146 | } 147 | } 148 | 149 | private fun createConsumerFor(listener: ConsumerBatchRecordsListener, 150 | consumedErrorHandler: ConsumedErrorHandler = closeTaskOnConsumedError() 151 | ): KafkaConsumerTask { 152 | return KafkaConsumerTask( 153 | consumerFactory = ConsumerFactory.DefaultConsumerFactory, 154 | consumerConfigs = configs.groupId("test-group-${UUID.randomUUID()}"), 155 | subscription = subscription, 156 | keyDeserializer = StringDeserializer(), 157 | valueDeserializer = StringDeserializer(), 158 | listener = listener, 159 | clientId = "test-client", 160 | deserializationErrorHandler = logAndFailOnInvalidRecord(), 161 | consumedErrorHandler = consumedErrorHandler 162 | ) 163 | } 164 | 165 | data class CaptureError(val records: List>, val thrownException: Exception) 166 | 167 | private class CaptureErrorHandler: ConsumedErrorHandler { 168 | 169 | @Volatile 170 | var error: CaptureError? = null 171 | 172 | override fun handle( 173 | consumerTask: ConsumerTask, 174 | records: List>, 175 | thrownException: Exception 176 | ) { 177 | thrownException.printStackTrace() 178 | error = CaptureError(records, thrownException) 179 | } 180 | 181 | fun assertThatEventuallyCapture(message: String, 182 | timeout: Duration = Duration.ofSeconds(30)) { 183 | val begin = System.currentTimeMillis() 184 | while (System.currentTimeMillis() - begin < timeout.toMillis()) { 185 | if (error != null) { 186 | return 187 | } 188 | } 189 | Assertions.fail(message) 190 | } 191 | } 192 | } -------------------------------------------------------------------------------- /clients/src/test/kotlin/io/streamthoughts/kafka/clients/producer/KafkaProducerConfigsTest.kt: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 StreamThoughts. 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with 6 | * this work for additional information regarding copyright ownership. 7 | * The ASF licenses this file to You under the Apache License, Version 2.0 8 | * (the "License"); you may not use this file except in compliance with 9 | * the License. You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | * See the License for the specific language governing permissions and 17 | * limitations under the License. 18 | */ 19 | package io.streamthoughts.kafka.clients.producer 20 | 21 | import org.apache.kafka.clients.consumer.ConsumerConfig 22 | import org.apache.kafka.clients.producer.ProducerConfig 23 | import org.apache.kafka.common.serialization.StringSerializer 24 | import org.junit.jupiter.api.Assertions.assertEquals 25 | import org.junit.jupiter.api.Test 26 | 27 | class KafkaProducerConfigsTest { 28 | 29 | @Test 30 | fun should_return_valid_kafka_producer_config() { 31 | 32 | val configs: KafkaProducerConfigs = producerConfigsOf() 33 | .client { 34 | bootstrapServers("dummy:1234") 35 | clientId("test-id") 36 | } 37 | .acks(Acks.Leader) 38 | .keySerializer(StringSerializer::class.java.name) 39 | .valueSerializer(StringSerializer::class.java.name) 40 | 41 | assertEquals("dummy:1234", configs[ProducerConfig.BOOTSTRAP_SERVERS_CONFIG]) 42 | assertEquals("test-id", configs[ProducerConfig.CLIENT_ID_CONFIG]) 43 | assertEquals(Acks.Leader, configs[ProducerConfig.ACKS_CONFIG]) 44 | assertEquals(StringSerializer::class.java.name, configs[ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG]) 45 | assertEquals(StringSerializer::class.java.name, configs[ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG]) 46 | } 47 | 48 | @Test 49 | fun should_load_producer_config_given_props_file() { 50 | val configs: KafkaProducerConfigs = loadProducerConfigs(configsInputStream()) 51 | assertEquals("localhost:9092", configs[ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG] as String) 52 | assertEquals("client-test-id", configs[ConsumerConfig.CLIENT_ID_CONFIG] as String) 53 | } 54 | 55 | private fun configsInputStream() = this.javaClass.classLoader 56 | .getResourceAsStream("test-configs.properties") 57 | ?:throw IllegalArgumentException( "Cannot load properties") 58 | } -------------------------------------------------------------------------------- /clients/src/test/kotlin/io/streamthoughts/kafka/clients/producer/KafkaProducerContainerTest.kt: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 StreamThoughts. 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with 6 | * this work for additional information regarding copyright ownership. 7 | * The ASF licenses this file to You under the Apache License, Version 2.0 8 | * (the "License"); you may not use this file except in compliance with 9 | * the License. You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | * See the License for the specific language governing permissions and 17 | * limitations under the License. 18 | */ 19 | package io.streamthoughts.kafka.clients.producer 20 | 21 | import io.streamthoughts.kafka.clients.loggerFor 22 | import io.streamthoughts.kafka.tests.TestingEmbeddedKafka 23 | import io.streamthoughts.kafka.tests.junit.EmbeddedSingleNodeKafkaCluster 24 | import org.apache.kafka.clients.consumer.ConsumerConfig 25 | import org.apache.kafka.clients.consumer.ConsumerRecord 26 | import org.apache.kafka.common.errors.TopicExistsException 27 | import org.apache.kafka.common.serialization.StringDeserializer 28 | import org.apache.kafka.common.serialization.StringSerializer 29 | import org.junit.jupiter.api.AfterAll 30 | import org.junit.jupiter.api.AfterEach 31 | import org.junit.jupiter.api.Assertions 32 | import org.junit.jupiter.api.BeforeAll 33 | import org.junit.jupiter.api.BeforeEach 34 | import org.junit.jupiter.api.Test 35 | import org.junit.jupiter.api.TestInstance 36 | import org.junit.jupiter.api.extension.ExtendWith 37 | import org.slf4j.Logger 38 | import java.time.Duration 39 | import java.util.Properties 40 | 41 | @TestInstance(TestInstance.Lifecycle.PER_CLASS) 42 | @ExtendWith(EmbeddedSingleNodeKafkaCluster::class) 43 | class KafkaProducerContainerTest(private val cluster: TestingEmbeddedKafka) { 44 | 45 | companion object { 46 | private val Log: Logger = loggerFor(KafkaProducerContainerTest::class.java) 47 | 48 | const val DEFAULT_TOPIC = "default-topic" 49 | const val TEST_TOPIC = "test-topic" 50 | } 51 | 52 | private lateinit var configs: KafkaProducerConfigs 53 | 54 | private lateinit var container : ProducerContainer 55 | 56 | @BeforeAll 57 | fun setUp() { 58 | configs = producerConfigsOf().client { bootstrapServers(cluster.bootstrapServers()) } 59 | createAndInitContainer() 60 | } 61 | 62 | @AfterAll 63 | fun tearDown() { 64 | container.close() 65 | } 66 | 67 | @BeforeEach 68 | fun createTopics() { 69 | val retryOnTopicExistsException = fun (topic: String) { 70 | while (true) { 71 | try { 72 | cluster.createTopic(topic) 73 | break 74 | } catch (e: TopicExistsException) { 75 | Log.warn("Cannot create $topic due to TopicExistsException. Ignore error and retry") 76 | } 77 | } 78 | } 79 | retryOnTopicExistsException(DEFAULT_TOPIC) 80 | retryOnTopicExistsException(TEST_TOPIC) 81 | } 82 | 83 | 84 | 85 | @AfterEach 86 | fun deleteTopics() { 87 | cluster.deleteTopics(DEFAULT_TOPIC, TEST_TOPIC) 88 | } 89 | 90 | private fun createAndInitContainer() { 91 | container = KafkaProducerContainer.Builder(configs) 92 | .keySerializer(StringSerializer()) 93 | .valueSerializer(StringSerializer()) 94 | .defaultTopic(DEFAULT_TOPIC) 95 | .build() 96 | container.init() 97 | } 98 | 99 | @Test 100 | fun should_produce_record_to_specific_topic_given_single_value() { 101 | container.send(value = "test-value", topic = TEST_TOPIC) 102 | container.flush() 103 | val records = consumeRecords(TEST_TOPIC) 104 | Assertions.assertEquals("test-value", records[0].value()) 105 | } 106 | 107 | @Test 108 | fun should_produce_record_to_default_topic_given_single_value() { 109 | container.send(value = "test-value") 110 | container.flush() 111 | val records = consumeRecords(DEFAULT_TOPIC) 112 | Assertions.assertEquals("test-value", records[0].value()) 113 | } 114 | 115 | @Test 116 | fun should_produce_record_to_specific_topic_given_key_value() { 117 | container.send(key = "test-key", value = "test-value", topic = TEST_TOPIC) 118 | container.flush() 119 | val records = consumeRecords(TEST_TOPIC) 120 | Assertions.assertEquals("test-value", records[0].value()) 121 | Assertions.assertEquals("test-key", records[0].key()) 122 | } 123 | 124 | 125 | private fun consumeRecords(topic: String, 126 | timeout: Duration = Duration.ofMinutes(1), 127 | expectedNumRecords: Int = 1): List> { 128 | val configs = Properties() 129 | configs.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, cluster.bootstrapServers().joinToString()) 130 | val records = cluster.consumeUntilMinRecordsOrTimeout( 131 | topic = topic, 132 | timeout = Duration.ofMinutes(1), 133 | expectedNumRecords = expectedNumRecords, 134 | keyDeserializer = StringDeserializer(), 135 | valueDeserializer = StringDeserializer() 136 | ) 137 | Assertions.assertTrue( 138 | records.size >= expectedNumRecords, 139 | "Did not receive all $expectedNumRecords records from topic $topic within ${timeout.toMillis()} ms" 140 | ) 141 | return records 142 | 143 | } 144 | } -------------------------------------------------------------------------------- /clients/src/test/resources/logback-test.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 5 | 6 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /clients/src/test/resources/test-configs.properties: -------------------------------------------------------------------------------- 1 | bootstrap.servers=localhost:9092 2 | client.id=client-test-id -------------------------------------------------------------------------------- /examples/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | kafka-clients-kotlin-reactor 7 | io.streamthoughts 8 | 0.2.0 9 | 10 | 4.0.0 11 | 12 | Kafka Clients for Kotlin Examples 13 | kafka-clients-kotlin-example 14 | 15 | 16 | 17 | io.streamthoughts 18 | kafka-clients-kotlin 19 | ${project.version} 20 | compile 21 | 22 | 23 | org.apache.kafka 24 | kafka-clients 25 | compile 26 | 27 | 28 | 29 | -------------------------------------------------------------------------------- /examples/src/main/kotlin/io/streamthoughts/kafka/client/examples/ConsumerClientExample.kt: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 StreamThoughts. 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with 6 | * this work for additional information regarding copyright ownership. 7 | * The ASF licenses this file to You under the Apache License, Version 2.0 8 | * (the "License"); you may not use this file except in compliance with 9 | * the License. You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | * See the License for the specific language governing permissions and 17 | * limitations under the License. 18 | */ 19 | package io.streamthoughts.kafka.client.examples 20 | 21 | import io.streamthoughts.kafka.clients.consumer.KafkaConsumerConfigs 22 | import io.streamthoughts.kafka.clients.consumer.loadConsumerConfigs 23 | import org.apache.kafka.clients.consumer.KafkaConsumer 24 | import org.apache.kafka.common.serialization.StringDeserializer 25 | import java.time.Duration 26 | import kotlin.system.exitProcess 27 | 28 | fun main(args: Array) { 29 | 30 | if (args.size != 2) { 31 | println("Missing required command line arguments: configFile topic") 32 | exitProcess(1) 33 | } 34 | 35 | val (config, topic) = args 36 | 37 | // Load properties from file and customize Consumer config. 38 | val configs: KafkaConsumerConfigs = loadConsumerConfigs(config) 39 | .keyDeserializer(StringDeserializer::class.java.name) 40 | .valueDeserializer(StringDeserializer::class.java.name) 41 | 42 | val consumer = KafkaConsumer(configs) 43 | 44 | consumer.use { 45 | consumer.subscribe(listOf(topic)) 46 | while(true) { 47 | consumer 48 | .poll(Duration.ofMillis(500)) 49 | .forEach { record -> 50 | println( 51 | "Received record with key ${record.key()} " + 52 | "and value ${record.value()} from topic ${record.topic()} and partition ${record.partition()}" 53 | ) 54 | } 55 | } 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /examples/src/main/kotlin/io/streamthoughts/kafka/client/examples/ConsumerKotlinDSLExample.kt: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 StreamThoughts. 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with 6 | * this work for additional information regarding copyright ownership. 7 | * The ASF licenses this file to You under the Apache License, Version 2.0 8 | * (the "License"); you may not use this file except in compliance with 9 | * the License. You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | * See the License for the specific language governing permissions and 17 | * limitations under the License. 18 | */ 19 | package io.streamthoughts.kafka.client.examples 20 | 21 | import io.streamthoughts.kafka.clients.consumer.AutoOffsetReset 22 | import io.streamthoughts.kafka.clients.consumer.ConsumerTask 23 | import io.streamthoughts.kafka.clients.consumer.ConsumerWorker 24 | import io.streamthoughts.kafka.clients.consumer.error.closeTaskOnConsumedError 25 | import io.streamthoughts.kafka.clients.consumer.error.serialization.replaceWithNullOnInvalidRecord 26 | import io.streamthoughts.kafka.clients.consumer.listener.forEach 27 | import io.streamthoughts.kafka.clients.kafka 28 | import kotlinx.coroutines.delay 29 | import kotlinx.coroutines.runBlocking 30 | import org.apache.kafka.clients.consumer.Consumer 31 | import org.apache.kafka.common.serialization.Deserializer 32 | import org.apache.kafka.common.serialization.StringDeserializer 33 | import java.time.Duration 34 | 35 | fun main(args: Array) { 36 | 37 | val consumerWorker: ConsumerWorker = kafka("localhost:9092") { 38 | client { 39 | clientId("my-client") 40 | } 41 | 42 | val stringDeserializer: Deserializer = StringDeserializer() 43 | consumer("my-group", stringDeserializer, stringDeserializer) { 44 | configure { 45 | pollRecordsMs(500) 46 | maxPollRecords(1000) 47 | autoOffsetReset(AutoOffsetReset.Earliest) 48 | } 49 | 50 | onDeserializationError(replaceWithNullOnInvalidRecord()) 51 | 52 | onConsumedError(closeTaskOnConsumedError()) 53 | 54 | onPartitionsAssigned { _: Consumer<*, *>, partitions -> 55 | println("Partitions assigned: $partitions") 56 | } 57 | 58 | onPartitionsRevokedAfterCommit { _: Consumer<*, *>, partitions -> 59 | println("Partitions revoked: $partitions") 60 | } 61 | 62 | onConsumed(forEach { _: ConsumerTask, value: String? -> 63 | println("consumed record-value: $value") 64 | }) 65 | } 66 | } 67 | 68 | consumerWorker.use { 69 | consumerWorker.start("demo-topic", maxParallelHint = 4) 70 | runBlocking { 71 | println("All consumers started, waiting one minute before stopping") 72 | delay(Duration.ofMinutes(1).toMillis()) 73 | } 74 | } 75 | } -------------------------------------------------------------------------------- /examples/src/main/kotlin/io/streamthoughts/kafka/client/examples/ProducerClientExample.kt: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 StreamThoughts. 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with 6 | * this work for additional information regarding copyright ownership. 7 | * The ASF licenses this file to You under the Apache License, Version 2.0 8 | * (the "License"); you may not use this file except in compliance with 9 | * the License. You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | * See the License for the specific language governing permissions and 17 | * limitations under the License. 18 | */ 19 | package io.streamthoughts.kafka.client.examples 20 | 21 | import io.streamthoughts.kafka.clients.producer.Acks 22 | import io.streamthoughts.kafka.clients.producer.KafkaProducerConfigs 23 | import io.streamthoughts.kafka.clients.producer.loadProducerConfigs 24 | import org.apache.kafka.clients.producer.KafkaProducer 25 | import org.apache.kafka.clients.producer.ProducerRecord 26 | import org.apache.kafka.clients.producer.RecordMetadata 27 | import org.apache.kafka.common.serialization.StringSerializer 28 | import kotlin.system.exitProcess 29 | 30 | fun main(args: Array) { 31 | 32 | if (args.size != 2) { 33 | println("Missing required command line arguments: configFile topic") 34 | exitProcess(1) 35 | } 36 | 37 | val (config, topic) = args 38 | 39 | // Load properties from file and customize Producer config. 40 | val configs: KafkaProducerConfigs = loadProducerConfigs(config) 41 | .acks(Acks.Leader) 42 | .keySerializer(StringSerializer::class.java.name) 43 | .valueSerializer(StringSerializer::class.java.name) 44 | 45 | val producer = KafkaProducer(configs) 46 | 47 | val messages = listOf("I ❤️ Logs", "Making Sense of Stream Processing", "Apache Kafka") 48 | producer.use { 49 | messages.forEach {value -> 50 | val record = ProducerRecord(topic, value) 51 | producer.send(record) { m: RecordMetadata, e: Exception? -> 52 | when (e) { 53 | null -> println("Record was successfully sent (topic=${m.topic()}, partition=${m.partition()}, offset= ${m.offset()})") 54 | else -> e.printStackTrace() 55 | } 56 | } 57 | } 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /examples/src/main/kotlin/io/streamthoughts/kafka/client/examples/ProducerKotlinDSLExample.kt: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 StreamThoughts. 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with 6 | * this work for additional information regarding copyright ownership. 7 | * The ASF licenses this file to You under the Apache License, Version 2.0 8 | * (the "License"); you may not use this file except in compliance with 9 | * the License. You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | * See the License for the specific language governing permissions and 17 | * limitations under the License. 18 | */ 19 | package io.streamthoughts.kafka.client.examples 20 | 21 | import io.streamthoughts.kafka.clients.kafka 22 | import io.streamthoughts.kafka.clients.producer.Acks 23 | import io.streamthoughts.kafka.clients.producer.ProducerContainer 24 | import io.streamthoughts.kafka.clients.producer.callback.closeOnErrorProducerSendCallback 25 | import org.apache.kafka.common.serialization.StringSerializer 26 | 27 | fun main(args: Array) { 28 | 29 | val producer: ProducerContainer = kafka("localhost:9092") { 30 | client { 31 | clientId("my-client") 32 | } 33 | 34 | producer { 35 | configure { 36 | acks(Acks.InSyncReplicas) 37 | } 38 | keySerializer(StringSerializer()) 39 | valueSerializer(StringSerializer()) 40 | 41 | defaultTopic("demo-topic") 42 | 43 | onSendError(closeOnErrorProducerSendCallback()) 44 | 45 | onSendSuccess{ _, _, metadata -> 46 | println("Record was sent successfully: topic=${metadata.topic()}, partition=${metadata.partition()}, offset=${metadata.offset()} ") 47 | } 48 | } 49 | } 50 | 51 | val messages = listOf("I ❤️ Logs", "Making Sense of Stream Processing", "Apache Kafka") 52 | producer.use { 53 | producer.init() // create internal KafkaProducer and eventually call initTransaction if transactional.id is set 54 | messages.forEach { 55 | producer.send(value = it) 56 | } 57 | } 58 | } -------------------------------------------------------------------------------- /examples/src/main/kotlin/io/streamthoughts/kafka/client/examples/TxProducerContainerExample.kt: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 StreamThoughts. 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with 6 | * this work for additional information regarding copyright ownership. 7 | * The ASF licenses this file to You under the Apache License, Version 2.0 8 | * (the "License"); you may not use this file except in compliance with 9 | * the License. You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | * See the License for the specific language governing permissions and 17 | * limitations under the License. 18 | */ 19 | package io.streamthoughts.kafka.client.examples 20 | 21 | import io.streamthoughts.kafka.clients.producer.Acks 22 | import io.streamthoughts.kafka.clients.producer.KafkaProducerConfigs 23 | import io.streamthoughts.kafka.clients.producer.KafkaProducerContainer 24 | import io.streamthoughts.kafka.clients.producer.loadProducerConfigs 25 | import org.apache.kafka.common.serialization.StringSerializer 26 | import kotlin.system.exitProcess 27 | 28 | 29 | fun main(args: Array) { 30 | 31 | if (args.size != 2) { 32 | println("Missing required command line arguments: configFile topic") 33 | exitProcess(1) 34 | } 35 | 36 | val (config, topic) = args 37 | 38 | 39 | // Load properties from file and customize Producer config. 40 | val configs: KafkaProducerConfigs = loadProducerConfigs(config) 41 | .acks(Acks.Leader) 42 | .keySerializer(StringSerializer::class.java.name) 43 | .valueSerializer(StringSerializer::class.java.name) 44 | 45 | val producer = KafkaProducerContainer.Builder(configs) 46 | .defaultTopic(topic) 47 | .configure { 48 | transactionalId("my-tx-id") 49 | enableIdempotence(true) 50 | } 51 | .onSendSuccess { _, _, m -> 52 | println("Record was successfully sent (topic=${m.topic()}, partition=${m.partition()}, offset= ${m.offset()})") 53 | } 54 | .onSendError { _, _, e -> 55 | e.printStackTrace() 56 | } 57 | .build() 58 | 59 | val messages = listOf("I ❤️ Logs", "Making Sense of Stream Processing", "Apache Kafka") 60 | 61 | producer.use { 62 | producer.init() 63 | producer.runTx { 64 | messages.forEach{producer.send(value = it)} 65 | } 66 | } 67 | } -------------------------------------------------------------------------------- /mvnw: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # ---------------------------------------------------------------------------- 3 | # Licensed to the Apache Software Foundation (ASF) under one 4 | # or more contributor license agreements. See the NOTICE file 5 | # distributed with this work for additional information 6 | # regarding copyright ownership. The ASF licenses this file 7 | # to you under the Apache License, Version 2.0 (the 8 | # "License"); you may not use this file except in compliance 9 | # with the License. You may obtain a copy of the License at 10 | # 11 | # http://www.apache.org/licenses/LICENSE-2.0 12 | # 13 | # Unless required by applicable law or agreed to in writing, 14 | # software distributed under the License is distributed on an 15 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 16 | # KIND, either express or implied. See the License for the 17 | # specific language governing permissions and limitations 18 | # under the License. 19 | # ---------------------------------------------------------------------------- 20 | 21 | # ---------------------------------------------------------------------------- 22 | # Maven Start Up Batch script 23 | # 24 | # Required ENV vars: 25 | # ------------------ 26 | # JAVA_HOME - location of a JDK home dir 27 | # 28 | # Optional ENV vars 29 | # ----------------- 30 | # M2_HOME - location of maven2's installed home dir 31 | # MAVEN_OPTS - parameters passed to the Java VM when running Maven 32 | # e.g. to debug Maven itself, use 33 | # set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 34 | # MAVEN_SKIP_RC - flag to disable loading of mavenrc files 35 | # ---------------------------------------------------------------------------- 36 | 37 | if [ -z "$MAVEN_SKIP_RC" ] ; then 38 | 39 | if [ -f /etc/mavenrc ] ; then 40 | . /etc/mavenrc 41 | fi 42 | 43 | if [ -f "$HOME/.mavenrc" ] ; then 44 | . "$HOME/.mavenrc" 45 | fi 46 | 47 | fi 48 | 49 | # OS specific support. $var _must_ be set to either true or false. 50 | cygwin=false; 51 | darwin=false; 52 | mingw=false 53 | case "`uname`" in 54 | CYGWIN*) cygwin=true ;; 55 | MINGW*) mingw=true;; 56 | Darwin*) darwin=true 57 | # Use /usr/libexec/java_home if available, otherwise fall back to /Library/Java/Home 58 | # See https://developer.apple.com/library/mac/qa/qa1170/_index.html 59 | if [ -z "$JAVA_HOME" ]; then 60 | if [ -x "/usr/libexec/java_home" ]; then 61 | export JAVA_HOME="`/usr/libexec/java_home`" 62 | else 63 | export JAVA_HOME="/Library/Java/Home" 64 | fi 65 | fi 66 | ;; 67 | esac 68 | 69 | if [ -z "$JAVA_HOME" ] ; then 70 | if [ -r /etc/gentoo-release ] ; then 71 | JAVA_HOME=`java-config --jre-home` 72 | fi 73 | fi 74 | 75 | if [ -z "$M2_HOME" ] ; then 76 | ## resolve links - $0 may be a link to maven's home 77 | PRG="$0" 78 | 79 | # need this for relative symlinks 80 | while [ -h "$PRG" ] ; do 81 | ls=`ls -ld "$PRG"` 82 | link=`expr "$ls" : '.*-> \(.*\)$'` 83 | if expr "$link" : '/.*' > /dev/null; then 84 | PRG="$link" 85 | else 86 | PRG="`dirname "$PRG"`/$link" 87 | fi 88 | done 89 | 90 | saveddir=`pwd` 91 | 92 | M2_HOME=`dirname "$PRG"`/.. 93 | 94 | # make it fully qualified 95 | M2_HOME=`cd "$M2_HOME" && pwd` 96 | 97 | cd "$saveddir" 98 | # echo Using m2 at $M2_HOME 99 | fi 100 | 101 | # For Cygwin, ensure paths are in UNIX format before anything is touched 102 | if $cygwin ; then 103 | [ -n "$M2_HOME" ] && 104 | M2_HOME=`cygpath --unix "$M2_HOME"` 105 | [ -n "$JAVA_HOME" ] && 106 | JAVA_HOME=`cygpath --unix "$JAVA_HOME"` 107 | [ -n "$CLASSPATH" ] && 108 | CLASSPATH=`cygpath --path --unix "$CLASSPATH"` 109 | fi 110 | 111 | # For Mingw, ensure paths are in UNIX format before anything is touched 112 | if $mingw ; then 113 | [ -n "$M2_HOME" ] && 114 | M2_HOME="`(cd "$M2_HOME"; pwd)`" 115 | [ -n "$JAVA_HOME" ] && 116 | JAVA_HOME="`(cd "$JAVA_HOME"; pwd)`" 117 | fi 118 | 119 | if [ -z "$JAVA_HOME" ]; then 120 | javaExecutable="`which javac`" 121 | if [ -n "$javaExecutable" ] && ! [ "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then 122 | # readlink(1) is not available as standard on Solaris 10. 123 | readLink=`which readlink` 124 | if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then 125 | if $darwin ; then 126 | javaHome="`dirname \"$javaExecutable\"`" 127 | javaExecutable="`cd \"$javaHome\" && pwd -P`/javac" 128 | else 129 | javaExecutable="`readlink -f \"$javaExecutable\"`" 130 | fi 131 | javaHome="`dirname \"$javaExecutable\"`" 132 | javaHome=`expr "$javaHome" : '\(.*\)/bin'` 133 | JAVA_HOME="$javaHome" 134 | export JAVA_HOME 135 | fi 136 | fi 137 | fi 138 | 139 | if [ -z "$JAVACMD" ] ; then 140 | if [ -n "$JAVA_HOME" ] ; then 141 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then 142 | # IBM's JDK on AIX uses strange locations for the executables 143 | JAVACMD="$JAVA_HOME/jre/sh/java" 144 | else 145 | JAVACMD="$JAVA_HOME/bin/java" 146 | fi 147 | else 148 | JAVACMD="`which java`" 149 | fi 150 | fi 151 | 152 | if [ ! -x "$JAVACMD" ] ; then 153 | echo "Error: JAVA_HOME is not defined correctly." >&2 154 | echo " We cannot execute $JAVACMD" >&2 155 | exit 1 156 | fi 157 | 158 | if [ -z "$JAVA_HOME" ] ; then 159 | echo "Warning: JAVA_HOME environment variable is not set." 160 | fi 161 | 162 | CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher 163 | 164 | # traverses directory structure from process work directory to filesystem root 165 | # first directory with .mvn subdirectory is considered project base directory 166 | find_maven_basedir() { 167 | 168 | if [ -z "$1" ] 169 | then 170 | echo "Path not specified to find_maven_basedir" 171 | return 1 172 | fi 173 | 174 | basedir="$1" 175 | wdir="$1" 176 | while [ "$wdir" != '/' ] ; do 177 | if [ -d "$wdir"/.mvn ] ; then 178 | basedir=$wdir 179 | break 180 | fi 181 | # workaround for JBEAP-8937 (on Solaris 10/Sparc) 182 | if [ -d "${wdir}" ]; then 183 | wdir=`cd "$wdir/.."; pwd` 184 | fi 185 | # end of workaround 186 | done 187 | echo "${basedir}" 188 | } 189 | 190 | # concatenates all lines of a file 191 | concat_lines() { 192 | if [ -f "$1" ]; then 193 | echo "$(tr -s '\n' ' ' < "$1")" 194 | fi 195 | } 196 | 197 | BASE_DIR=`find_maven_basedir "$(pwd)"` 198 | if [ -z "$BASE_DIR" ]; then 199 | exit 1; 200 | fi 201 | 202 | ########################################################################################## 203 | # Extension to allow automatically downloading the maven-wrapper.jar from Maven-central 204 | # This allows using the maven wrapper in projects that prohibit checking in binary data. 205 | ########################################################################################## 206 | if [ -r "$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" ]; then 207 | if [ "$MVNW_VERBOSE" = true ]; then 208 | echo "Found .mvn/wrapper/maven-wrapper.jar" 209 | fi 210 | else 211 | if [ "$MVNW_VERBOSE" = true ]; then 212 | echo "Couldn't find .mvn/wrapper/maven-wrapper.jar, downloading it ..." 213 | fi 214 | if [ -n "$MVNW_REPOURL" ]; then 215 | jarUrl="$MVNW_REPOURL/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar" 216 | else 217 | jarUrl="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar" 218 | fi 219 | while IFS="=" read key value; do 220 | case "$key" in (wrapperUrl) jarUrl="$value"; break ;; 221 | esac 222 | done < "$BASE_DIR/.mvn/wrapper/maven-wrapper.properties" 223 | if [ "$MVNW_VERBOSE" = true ]; then 224 | echo "Downloading from: $jarUrl" 225 | fi 226 | wrapperJarPath="$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" 227 | if $cygwin; then 228 | wrapperJarPath=`cygpath --path --windows "$wrapperJarPath"` 229 | fi 230 | 231 | if command -v wget > /dev/null; then 232 | if [ "$MVNW_VERBOSE" = true ]; then 233 | echo "Found wget ... using wget" 234 | fi 235 | if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then 236 | wget "$jarUrl" -O "$wrapperJarPath" 237 | else 238 | wget --http-user=$MVNW_USERNAME --http-password=$MVNW_PASSWORD "$jarUrl" -O "$wrapperJarPath" 239 | fi 240 | elif command -v curl > /dev/null; then 241 | if [ "$MVNW_VERBOSE" = true ]; then 242 | echo "Found curl ... using curl" 243 | fi 244 | if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then 245 | curl -o "$wrapperJarPath" "$jarUrl" -f 246 | else 247 | curl --user $MVNW_USERNAME:$MVNW_PASSWORD -o "$wrapperJarPath" "$jarUrl" -f 248 | fi 249 | 250 | else 251 | if [ "$MVNW_VERBOSE" = true ]; then 252 | echo "Falling back to using Java to download" 253 | fi 254 | javaClass="$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.java" 255 | # For Cygwin, switch paths to Windows format before running javac 256 | if $cygwin; then 257 | javaClass=`cygpath --path --windows "$javaClass"` 258 | fi 259 | if [ -e "$javaClass" ]; then 260 | if [ ! -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then 261 | if [ "$MVNW_VERBOSE" = true ]; then 262 | echo " - Compiling MavenWrapperDownloader.java ..." 263 | fi 264 | # Compiling the Java class 265 | ("$JAVA_HOME/bin/javac" "$javaClass") 266 | fi 267 | if [ -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then 268 | # Running the downloader 269 | if [ "$MVNW_VERBOSE" = true ]; then 270 | echo " - Running MavenWrapperDownloader.java ..." 271 | fi 272 | ("$JAVA_HOME/bin/java" -cp .mvn/wrapper MavenWrapperDownloader "$MAVEN_PROJECTBASEDIR") 273 | fi 274 | fi 275 | fi 276 | fi 277 | ########################################################################################## 278 | # End of extension 279 | ########################################################################################## 280 | 281 | export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-"$BASE_DIR"} 282 | if [ "$MVNW_VERBOSE" = true ]; then 283 | echo $MAVEN_PROJECTBASEDIR 284 | fi 285 | MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS" 286 | 287 | # For Cygwin, switch paths to Windows format before running java 288 | if $cygwin; then 289 | [ -n "$M2_HOME" ] && 290 | M2_HOME=`cygpath --path --windows "$M2_HOME"` 291 | [ -n "$JAVA_HOME" ] && 292 | JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"` 293 | [ -n "$CLASSPATH" ] && 294 | CLASSPATH=`cygpath --path --windows "$CLASSPATH"` 295 | [ -n "$MAVEN_PROJECTBASEDIR" ] && 296 | MAVEN_PROJECTBASEDIR=`cygpath --path --windows "$MAVEN_PROJECTBASEDIR"` 297 | fi 298 | 299 | # Provide a "standardized" way to retrieve the CLI args that will 300 | # work with both Windows and non-Windows executions. 301 | MAVEN_CMD_LINE_ARGS="$MAVEN_CONFIG $@" 302 | export MAVEN_CMD_LINE_ARGS 303 | 304 | WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain 305 | 306 | exec "$JAVACMD" \ 307 | $MAVEN_OPTS \ 308 | -classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \ 309 | "-Dmaven.home=${M2_HOME}" "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \ 310 | ${WRAPPER_LAUNCHER} $MAVEN_CONFIG "$@" 311 | -------------------------------------------------------------------------------- /mvnw.cmd: -------------------------------------------------------------------------------- 1 | @REM ---------------------------------------------------------------------------- 2 | @REM Licensed to the Apache Software Foundation (ASF) under one 3 | @REM or more contributor license agreements. See the NOTICE file 4 | @REM distributed with this work for additional information 5 | @REM regarding copyright ownership. The ASF licenses this file 6 | @REM to you under the Apache License, Version 2.0 (the 7 | @REM "License"); you may not use this file except in compliance 8 | @REM with the License. You may obtain a copy of the License at 9 | @REM 10 | @REM http://www.apache.org/licenses/LICENSE-2.0 11 | @REM 12 | @REM Unless required by applicable law or agreed to in writing, 13 | @REM software distributed under the License is distributed on an 14 | @REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 15 | @REM KIND, either express or implied. See the License for the 16 | @REM specific language governing permissions and limitations 17 | @REM under the License. 18 | @REM ---------------------------------------------------------------------------- 19 | 20 | @REM ---------------------------------------------------------------------------- 21 | @REM Maven Start Up Batch script 22 | @REM 23 | @REM Required ENV vars: 24 | @REM JAVA_HOME - location of a JDK home dir 25 | @REM 26 | @REM Optional ENV vars 27 | @REM M2_HOME - location of maven2's installed home dir 28 | @REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands 29 | @REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a keystroke before ending 30 | @REM MAVEN_OPTS - parameters passed to the Java VM when running Maven 31 | @REM e.g. to debug Maven itself, use 32 | @REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 33 | @REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files 34 | @REM ---------------------------------------------------------------------------- 35 | 36 | @REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on' 37 | @echo off 38 | @REM set title of command window 39 | title %0 40 | @REM enable echoing by setting MAVEN_BATCH_ECHO to 'on' 41 | @if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO% 42 | 43 | @REM set %HOME% to equivalent of $HOME 44 | if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%") 45 | 46 | @REM Execute a user defined script before this one 47 | if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre 48 | @REM check for pre script, once with legacy .bat ending and once with .cmd ending 49 | if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat" 50 | if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd" 51 | :skipRcPre 52 | 53 | @setlocal 54 | 55 | set ERROR_CODE=0 56 | 57 | @REM To isolate internal variables from possible post scripts, we use another setlocal 58 | @setlocal 59 | 60 | @REM ==== START VALIDATION ==== 61 | if not "%JAVA_HOME%" == "" goto OkJHome 62 | 63 | echo. 64 | echo Error: JAVA_HOME not found in your environment. >&2 65 | echo Please set the JAVA_HOME variable in your environment to match the >&2 66 | echo location of your Java installation. >&2 67 | echo. 68 | goto error 69 | 70 | :OkJHome 71 | if exist "%JAVA_HOME%\bin\java.exe" goto init 72 | 73 | echo. 74 | echo Error: JAVA_HOME is set to an invalid directory. >&2 75 | echo JAVA_HOME = "%JAVA_HOME%" >&2 76 | echo Please set the JAVA_HOME variable in your environment to match the >&2 77 | echo location of your Java installation. >&2 78 | echo. 79 | goto error 80 | 81 | @REM ==== END VALIDATION ==== 82 | 83 | :init 84 | 85 | @REM Find the project base dir, i.e. the directory that contains the folder ".mvn". 86 | @REM Fallback to current working directory if not found. 87 | 88 | set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR% 89 | IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir 90 | 91 | set EXEC_DIR=%CD% 92 | set WDIR=%EXEC_DIR% 93 | :findBaseDir 94 | IF EXIST "%WDIR%"\.mvn goto baseDirFound 95 | cd .. 96 | IF "%WDIR%"=="%CD%" goto baseDirNotFound 97 | set WDIR=%CD% 98 | goto findBaseDir 99 | 100 | :baseDirFound 101 | set MAVEN_PROJECTBASEDIR=%WDIR% 102 | cd "%EXEC_DIR%" 103 | goto endDetectBaseDir 104 | 105 | :baseDirNotFound 106 | set MAVEN_PROJECTBASEDIR=%EXEC_DIR% 107 | cd "%EXEC_DIR%" 108 | 109 | :endDetectBaseDir 110 | 111 | IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig 112 | 113 | @setlocal EnableExtensions EnableDelayedExpansion 114 | for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a 115 | @endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS% 116 | 117 | :endReadAdditionalConfig 118 | 119 | SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe" 120 | set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar" 121 | set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain 122 | 123 | set DOWNLOAD_URL="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar" 124 | 125 | FOR /F "tokens=1,2 delims==" %%A IN ("%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties") DO ( 126 | IF "%%A"=="wrapperUrl" SET DOWNLOAD_URL=%%B 127 | ) 128 | 129 | @REM Extension to allow automatically downloading the maven-wrapper.jar from Maven-central 130 | @REM This allows using the maven wrapper in projects that prohibit checking in binary data. 131 | if exist %WRAPPER_JAR% ( 132 | if "%MVNW_VERBOSE%" == "true" ( 133 | echo Found %WRAPPER_JAR% 134 | ) 135 | ) else ( 136 | if not "%MVNW_REPOURL%" == "" ( 137 | SET DOWNLOAD_URL="%MVNW_REPOURL%/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar" 138 | ) 139 | if "%MVNW_VERBOSE%" == "true" ( 140 | echo Couldn't find %WRAPPER_JAR%, downloading it ... 141 | echo Downloading from: %DOWNLOAD_URL% 142 | ) 143 | 144 | powershell -Command "&{"^ 145 | "$webclient = new-object System.Net.WebClient;"^ 146 | "if (-not ([string]::IsNullOrEmpty('%MVNW_USERNAME%') -and [string]::IsNullOrEmpty('%MVNW_PASSWORD%'))) {"^ 147 | "$webclient.Credentials = new-object System.Net.NetworkCredential('%MVNW_USERNAME%', '%MVNW_PASSWORD%');"^ 148 | "}"^ 149 | "[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12; $webclient.DownloadFile('%DOWNLOAD_URL%', '%WRAPPER_JAR%')"^ 150 | "}" 151 | if "%MVNW_VERBOSE%" == "true" ( 152 | echo Finished downloading %WRAPPER_JAR% 153 | ) 154 | ) 155 | @REM End of extension 156 | 157 | @REM Provide a "standardized" way to retrieve the CLI args that will 158 | @REM work with both Windows and non-Windows executions. 159 | set MAVEN_CMD_LINE_ARGS=%* 160 | 161 | %MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %* 162 | if ERRORLEVEL 1 goto error 163 | goto end 164 | 165 | :error 166 | set ERROR_CODE=1 167 | 168 | :end 169 | @endlocal & set ERROR_CODE=%ERROR_CODE% 170 | 171 | if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost 172 | @REM check for post script, once with legacy .bat ending and once with .cmd ending 173 | if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat" 174 | if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd" 175 | :skipRcPost 176 | 177 | @REM pause the script if MAVEN_BATCH_PAUSE is set to 'on' 178 | if "%MAVEN_BATCH_PAUSE%" == "on" pause 179 | 180 | if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE% 181 | 182 | exit /B %ERROR_CODE% 183 | -------------------------------------------------------------------------------- /pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 4.0.0 5 | 6 | io.streamthoughts 7 | kafka-clients-kotlin-reactor 8 | 0.2.0 9 | 10 | tests 11 | clients 12 | examples 13 | 14 | pom 15 | 16 | Kafka Clients for Kotlin Reactor 17 | Kafka Clients for Kotlin 18 | 19 | 20 | 21 | streamthoughts 22 | http://streamthoughts.io 23 | 24 | 25 | 26 | 27 | streamthoughts 28 | http://streamthoughts.io 29 | 30 | 31 | https://github.com/streamthoughts/kafka-clients-kotlin 32 | 33 | 34 | 35 | The Apache Software License, Version 2.0 36 | http://www.apache.org/licenses/LICENSE-2.0.txt 37 | 38 | 39 | 40 | 41 | https://github.com/streamthoughts/kafka-clients-kotlin 42 | scm:git:git://github.com:streamthoughts/kafka-clients-kotlin.git 43 | scm:git:git@github.com:streamthoughts/kafka-clients-kotlin.git 44 | HEAD 45 | 46 | 47 | 48 | 49 | ossrh 50 | https://oss.sonatype.org/content/repositories/snapshots 51 | 52 | 53 | ossrh 54 | https://oss.sonatype.org/service/local/staging/deploy/maven2/ 55 | 56 | 57 | 58 | 59 | 60 | jcenter 61 | JCenter 62 | https://jcenter.bintray.com/ 63 | 64 | 65 | kotlin-eap 66 | https://dl.bintray.com/kotlin/kotlin-eap/ 67 | 68 | 69 | 70 | 71 | UTF-8 72 | true 73 | 1.4.10 74 | 1.4.0 75 | official 76 | 2.6.0 77 | 5.5.2 78 | 1.2.3 79 | 1.4.10 80 | 81 | 82 | 83 | 84 | ossrh 85 | 86 | 87 | 88 | org.apache.maven.plugins 89 | maven-gpg-plugin 90 | 1.5 91 | 92 | 93 | sign-artifacts 94 | verify 95 | 96 | sign 97 | 98 | 99 | 100 | 101 | 102 | org.sonatype.plugins 103 | nexus-staging-maven-plugin 104 | 1.6.8 105 | true 106 | 107 | ossrh 108 | https://oss.sonatype.org/ 109 | false 110 | 111 | 112 | 113 | org.apache.maven.plugins 114 | maven-source-plugin 115 | 3.2.1 116 | 117 | 118 | attach-sources 119 | 120 | jar-no-fork 121 | 122 | 123 | 124 | 125 | 126 | org.jetbrains.dokka 127 | dokka-maven-plugin 128 | ${dokka.version} 129 | 130 | 131 | prepare-package 132 | 133 | dokka 134 | 138 | 139 | 140 | 141 | 142 | 143 | 144 | org.jetbrains.dokka 145 | kotlin-as-java-plugin 146 | ${dokka.version} 147 | 148 | 149 | 150 | 151 | 152 | org.apache.maven.plugins 153 | maven-jar-plugin 154 | 155 | 156 | javadoc-jar 157 | package 158 | 159 | jar 160 | 161 | 162 | javadoc 163 | ${project.basedir}/target/dokka 164 | 165 | 166 | 167 | 168 | 169 | 170 | 171 | 172 | 173 | 174 | 175 | org.apache.kafka 176 | kafka-clients 177 | ${kafka.version} 178 | 179 | 180 | org.jetbrains.kotlin 181 | kotlin-stdlib 182 | ${kotlin.version} 183 | 184 | 185 | org.jetbrains.kotlin 186 | kotlin-stdlib-jdk8 187 | ${kotlin.version} 188 | 189 | 190 | org.jetbrains.kotlinx 191 | kotlinx-coroutines-core 192 | ${kotlinx.version} 193 | 194 | 195 | 196 | org.jetbrains.kotlin 197 | kotlin-test-junit 198 | ${kotlin.version} 199 | test 200 | 201 | 202 | 203 | org.junit.platform 204 | junit-platform-launcher 205 | 1.5.2 206 | test 207 | 208 | 209 | 210 | org.junit.jupiter 211 | junit-jupiter-engine 212 | ${junit.version} 213 | test 214 | 215 | 216 | 217 | org.junit.vintage 218 | junit-vintage-engine 219 | ${junit.version} 220 | test 221 | 222 | 223 | 224 | org.slf4j 225 | slf4j-api 226 | 1.7.30 227 | 228 | 229 | ch.qos.logback 230 | logback-classic 231 | ${logback.version} 232 | 233 | 234 | ch.qos.logback 235 | logback-core 236 | ${logback.version} 237 | 238 | 239 | 240 | 241 | 242 | 243 | org.jetbrains.kotlin 244 | kotlin-stdlib 245 | ${kotlin.version} 246 | 247 | 248 | 249 | 250 | ${project.basedir}/src/main/kotlin 251 | ${project.basedir}/src/test/kotlin 252 | 253 | 254 | maven-surefire-plugin 255 | 3.0.0-M3 256 | 257 | 258 | maven-failsafe-plugin 259 | 3.0.0-M3 260 | 261 | 262 | org.jetbrains.kotlin 263 | kotlin-maven-plugin 264 | ${kotlin.version} 265 | 266 | 267 | compile 268 | compile 269 | 270 | compile 271 | 272 | 273 | 274 | test-compile 275 | test-compile 276 | 277 | test-compile 278 | 279 | 280 | 281 | 282 | 11 283 | 284 | 285 | 286 | 287 | 288 | -------------------------------------------------------------------------------- /tests/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | kafka-clients-kotlin-reactor 7 | io.streamthoughts 8 | 0.2.0 9 | 10 | 4.0.0 11 | 12 | kafka-clients-kotlin-tests 13 | 14 | Kafka Clients for Kotlin Tests 15 | Utility classes for testing Kafka Clients in Kotlin 16 | 17 | 18 | 5.1.0 19 | 20 | 21 | 22 | 23 | 24 | org.jetbrains.kotlin 25 | kotlin-stdlib 26 | 27 | 28 | org.jetbrains.kotlin 29 | kotlin-stdlib-jdk8 30 | 31 | 32 | org.jetbrains.kotlin 33 | kotlin-test-junit 34 | test 35 | 36 | 37 | 38 | 39 | 40 | org.apache.kafka 41 | kafka_2.13 42 | ${kafka.version} 43 | 44 | 45 | org.apache.kafka 46 | kafka_2.13 47 | test 48 | ${kafka.version} 49 | 50 | 51 | org.apache.kafka 52 | kafka-clients 53 | test 54 | ${kafka.version} 55 | 56 | 57 | 58 | 59 | org.apache.curator 60 | curator-test 61 | ${curator.version} 62 | 63 | 64 | 65 | 66 | org.junit.platform 67 | junit-platform-launcher 68 | test 69 | 70 | 71 | org.junit.jupiter 72 | junit-jupiter-engine 73 | test 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | org.apache.maven.plugins 82 | maven-jar-plugin 83 | 3.2.0 84 | 85 | 86 | 87 | test-jar 88 | 89 | 90 | 91 | 92 | 93 | 94 | -------------------------------------------------------------------------------- /tests/src/main/kotlin/io/streamthoughts/kafka/tests/TestingEmbeddedKafka.kt: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 StreamThoughts. 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with 6 | * this work for additional information regarding copyright ownership. 7 | * The ASF licenses this file to You under the Apache License, Version 2.0 8 | * (the "License"); you may not use this file except in compliance with 9 | * the License. You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | * See the License for the specific language governing permissions and 17 | * limitations under the License. 18 | */ 19 | package io.streamthoughts.kafka.tests 20 | 21 | import kafka.server.KafkaConfig 22 | import kafka.server.KafkaServer 23 | import kafka.utils.TestUtils 24 | import org.apache.kafka.clients.admin.Admin 25 | import org.apache.kafka.clients.admin.AdminClient 26 | import org.apache.kafka.clients.admin.AdminClientConfig 27 | import org.apache.kafka.clients.admin.NewTopic 28 | import org.apache.kafka.clients.consumer.Consumer 29 | import org.apache.kafka.clients.consumer.ConsumerConfig 30 | import org.apache.kafka.clients.consumer.ConsumerRecord 31 | import org.apache.kafka.clients.consumer.KafkaConsumer 32 | import org.apache.kafka.clients.producer.KafkaProducer 33 | import org.apache.kafka.clients.producer.Producer 34 | import org.apache.kafka.clients.producer.ProducerConfig 35 | import org.apache.kafka.common.network.ListenerName 36 | import org.apache.kafka.common.security.auth.SecurityProtocol 37 | import org.apache.kafka.common.serialization.ByteArrayDeserializer 38 | import org.apache.kafka.common.serialization.Deserializer 39 | import org.apache.kafka.common.utils.SystemTime 40 | import org.slf4j.Logger 41 | import org.slf4j.LoggerFactory 42 | import java.io.IOException 43 | import java.nio.file.Files 44 | import java.nio.file.Paths 45 | import java.time.Duration 46 | import java.util.* 47 | import java.util.concurrent.ExecutionException 48 | import kotlin.collections.HashMap 49 | 50 | /** 51 | * Runs an in-memory, "embedded" instance of a Kafka broker. 52 | */ 53 | class TestingEmbeddedKafka(config: Properties = Properties()) { 54 | 55 | companion object { 56 | val Log: Logger = LoggerFactory.getLogger(TestingEmbeddedKafka::class.java) 57 | 58 | private fun listTopicNames(adminClient: Admin): MutableSet { 59 | return try { 60 | adminClient.listTopics().names().get() 61 | } catch (e: Exception) { 62 | throw RuntimeException("Failed to get topic names", e) 63 | } 64 | } 65 | 66 | private fun waitForTrue(timeout: Duration, 67 | time: Long = System.currentTimeMillis(), 68 | action: () -> Boolean): Boolean { 69 | 70 | val timeoutMs = timeout.toMillis() 71 | var result = false 72 | while (System.currentTimeMillis() - time < timeoutMs && !result) { 73 | result = action() 74 | } 75 | return result 76 | } 77 | 78 | } 79 | 80 | private val config: MutableMap = HashMap(config) 81 | 82 | private lateinit var kafka: KafkaServer 83 | 84 | /** 85 | * @param securityProtocol the security protocol the returned broker list should use. 86 | * 87 | */ 88 | fun bootstrapServers(securityProtocol: SecurityProtocol? = null): Array { 89 | val port = if (securityProtocol == null) { 90 | val listenerName = kafka.config().advertisedListeners().apply(0).listenerName() 91 | kafka.boundPort(listenerName) 92 | } 93 | else { 94 | kafka.boundPort(ListenerName(securityProtocol.toString())) 95 | } 96 | return arrayOf("${kafka.config().hostName()}:$port") 97 | } 98 | 99 | /** 100 | * Creates and starts an embedded Kafka broker. 101 | */ 102 | fun start(overrides: Map = emptyMap()) { 103 | config.putAll(overrides) 104 | config.putIfAbsent(KafkaConfig.LogDirProp(), "/tmp/kafka-logs") 105 | config.putIfAbsent(KafkaConfig.DeleteTopicEnableProp(), true) 106 | config.putIfAbsent(KafkaConfig.LogCleanerDedupeBufferSizeProp(), 2 * 1024 * 1024L) 107 | config.putIfAbsent(KafkaConfig.GroupMinSessionTimeoutMsProp(), 0) 108 | config.putIfAbsent(KafkaConfig.GroupInitialRebalanceDelayMsProp(), 0) 109 | config.putIfAbsent(KafkaConfig.OffsetsTopicReplicationFactorProp(), 1.toShort()) 110 | config.putIfAbsent(KafkaConfig.OffsetsTopicPartitionsProp(), 5) 111 | config.putIfAbsent(KafkaConfig.TransactionsTopicPartitionsProp(), 5) 112 | config.putIfAbsent(KafkaConfig.AutoCreateTopicsEnableProp(), true) 113 | val kafkaConfig = KafkaConfig(config, true) 114 | Log.debug( 115 | "Starting embedded Kafka broker (with log.dirs={} and ZK ensemble at {}) ...", 116 | logDir(), zookeeperConnect() 117 | ) 118 | kafka = TestUtils.createServer(kafkaConfig, SystemTime()) 119 | Log.debug( 120 | "Startup of embedded Kafka broker at {} completed (with ZK ensemble at {}) ...", 121 | bootstrapServers(), zookeeperConnect() 122 | ) 123 | } 124 | 125 | /** 126 | * Stops the embedded broker and cleanup local logs directory. 127 | */ 128 | fun stop() { 129 | Log.debug( 130 | "Shutting down embedded Kafka broker at {} (with ZK ensemble at {}) ...", 131 | bootstrapServers(), zookeeperConnect() 132 | ) 133 | kafka.shutdown() 134 | kafka.awaitShutdown() 135 | clearLogsDir() 136 | Log.debug( 137 | "Shutdown of embedded Kafka broker at {} completed (with ZK ensemble at {}) ...", 138 | bootstrapServers(), zookeeperConnect() 139 | ) 140 | } 141 | 142 | private fun clearLogsDir() { 143 | Log.debug("Deleting logs.dir at {} ...", logDir()) 144 | Files.walk(Paths.get(logDir())) 145 | .sorted(Comparator.reverseOrder()) 146 | .forEach { 147 | try { 148 | Files.delete(it) 149 | } catch (e: IOException) { 150 | Log.error("Failed to delete entry in log dir {}", logDir(), e) 151 | } 152 | } 153 | } 154 | 155 | /** 156 | * Creates a Kafka [topic] with the given [partitions] number, [replication] factor and [config]. 157 | */ 158 | @JvmOverloads 159 | fun createTopic( 160 | topic: String, 161 | partitions: Int = 1, 162 | replication: Int = 1, 163 | config: Map? = emptyMap() 164 | ) { 165 | Log.debug( 166 | "Creating topic { name: {}, partitions: {}, replication: {}, config: {} }", 167 | topic, partitions, replication, config 168 | ) 169 | 170 | adminClient().use {client -> 171 | try { 172 | val newTopic = NewTopic(topic, partitions, replication.toShort()) 173 | newTopic.configs(config) 174 | client.createTopics(listOf(newTopic)).all().get() 175 | } catch (e : ExecutionException) { 176 | throw e.cause as Throwable 177 | } 178 | } 179 | } 180 | 181 | /** 182 | * @return the list of topics that exists on the embedded cluster. 183 | */ 184 | fun topics(): Set = adminClient().use { adminClient -> return listTopicNames(adminClient) } 185 | 186 | /** 187 | * Waits for all given [topicNames] to be present on the embedded cluster until [timeout]. 188 | * 189 | * @return {@code true} if all topics are present before reaching the timeout, {@code false} otherwise. 190 | */ 191 | fun waitForTopicsToBePresent(vararg topicNames: String, 192 | timeout: Duration = Duration.ofSeconds(30)): Boolean { 193 | val now = System.currentTimeMillis() 194 | val required = mutableListOf(*topicNames) 195 | return adminClient().use { client -> 196 | waitForTrue(timeout, now) { 197 | listTopicNames(client).containsAll(required) 198 | } 199 | } 200 | } 201 | 202 | /** 203 | * Waits for all given [topicNames] to be absent on the embedded cluster until [timeout]. 204 | * 205 | * @return {@code true} if all topics are absent before reaching the timeout, {@code false} otherwise. 206 | */ 207 | fun waitForTopicsToBeAbsent(vararg topicNames: String, 208 | timeout: Duration = Duration.ofSeconds(30)): Boolean { 209 | return adminClient().use { 210 | doWaitForTopicsToBeAbsent(topics = arrayOf(*topicNames), until = timeout, adminClient = it) 211 | } 212 | } 213 | 214 | private fun doWaitForTopicsToBeAbsent( 215 | topics: Array, 216 | until: Duration = Duration.ofMillis(Long.MAX_VALUE), 217 | now : Long = System.currentTimeMillis(), 218 | adminClient: Admin): Boolean { 219 | val remaining: MutableList = mutableListOf(*topics) 220 | return waitForTrue(until, now) { 221 | val exists = listTopicNames(adminClient) 222 | remaining.retainAll(exists) 223 | remaining.isEmpty() 224 | } 225 | } 226 | 227 | /** 228 | * Creates a new admin client. 229 | * 230 | * @return a new [org.apache.kafka.clients.admin.AdminClient] instance. 231 | */ 232 | fun adminClient(): Admin { 233 | val config: Map = mutableMapOf( 234 | Pair(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers().joinToString()), 235 | Pair(AdminClientConfig.REQUEST_TIMEOUT_MS_CONFIG, 60000) 236 | ) 237 | return AdminClient.create(config) 238 | } 239 | 240 | /** 241 | * Creates a new producer client. 242 | * 243 | * @return a new [org.apache.kafka.clients.producer.KafkaProducer] instance. 244 | */ 245 | fun producerClient(config: Map = emptyMap()): Producer { 246 | val configs = HashMap(config) 247 | configs[ProducerConfig.BOOTSTRAP_SERVERS_CONFIG] = bootstrapServers().joinToString() 248 | return KafkaProducer(configs) 249 | } 250 | 251 | /** 252 | * Creates a new consumer client. 253 | * 254 | * @return a new [org.apache.kafka.clients.consumer.KafkaConsumer] instance. 255 | */ 256 | fun consumerClient(config: Map = emptyMap(), 257 | keyDeserializer: Deserializer? = null, 258 | valueDeserializer: Deserializer? = null): Consumer { 259 | val configs = HashMap(config) 260 | configs[ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG] = bootstrapServers().joinToString() 261 | configs.putIfAbsent(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer::class.java.name) 262 | configs.putIfAbsent(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer::class.java.name) 263 | configs.putIfAbsent(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest") 264 | configs.putIfAbsent(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true") 265 | configs.putIfAbsent(ConsumerConfig.GROUP_ID_CONFIG, UUID.randomUUID().toString()) 266 | return KafkaConsumer(configs, keyDeserializer, valueDeserializer) 267 | } 268 | 269 | fun consumeUntilMinRecordsOrTimeout( 270 | topic: String, 271 | timeout: Duration = Duration.ofMinutes(1), 272 | expectedNumRecords: Int = Int.MAX_VALUE, 273 | keyDeserializer: Deserializer? = null, 274 | valueDeserializer: Deserializer? = null, 275 | consumerConfig: Map = emptyMap()): List> { 276 | 277 | consumerClient(consumerConfig, keyDeserializer, valueDeserializer).use { client -> 278 | client.subscribe(listOf(topic)) 279 | val records: MutableList> = mutableListOf() 280 | 281 | val begin = System.currentTimeMillis() 282 | while ((System.currentTimeMillis() - begin) < timeout.toMillis() && records.size < expectedNumRecords) { 283 | client.poll(Duration.ofMillis(100)).forEach { records.add(it) } 284 | } 285 | return records 286 | } 287 | } 288 | 289 | /** 290 | * Deletes the given [topics] from the cluster. 291 | */ 292 | fun deleteTopics(vararg topicNames: String) { 293 | val remaining: MutableList = mutableListOf(*topicNames) 294 | try { 295 | adminClient().use { client -> 296 | client.deleteTopics(remaining).all().get() 297 | doWaitForTopicsToBeAbsent(topics = arrayOf(*topicNames), adminClient = client) 298 | } 299 | } catch (e: Exception) { 300 | throw RuntimeException("Failed to delete topics: $remaining", e) 301 | } 302 | } 303 | 304 | private fun zookeeperConnect(): String { 305 | return config[KafkaConfig.ZkConnectProp()].toString() 306 | } 307 | 308 | private fun logDir(): String { 309 | return config[KafkaConfig.LogDirProp()].toString() 310 | } 311 | } -------------------------------------------------------------------------------- /tests/src/main/kotlin/io/streamthoughts/kafka/tests/TestingEmbeddedZookeeper.kt: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 StreamThoughts. 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with 6 | * this work for additional information regarding copyright ownership. 7 | * The ASF licenses this file to You under the Apache License, Version 2.0 8 | * (the "License"); you may not use this file except in compliance with 9 | * the License. You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | * See the License for the specific language governing permissions and 17 | * limitations under the License. 18 | */ 19 | package io.streamthoughts.kafka.tests 20 | 21 | import org.apache.curator.test.TestingServer 22 | import org.slf4j.Logger 23 | import org.slf4j.LoggerFactory 24 | import java.io.IOException 25 | import java.net.BindException 26 | import kotlin.jvm.Throws 27 | 28 | /** 29 | * Runs an in-memory, "embedded" instance of a ZooKeeper server. 30 | */ 31 | class TestingEmbeddedZookeeper(private val port: Int = -1) { 32 | 33 | companion object { 34 | private val Log: Logger = LoggerFactory.getLogger(TestingEmbeddedZookeeper::class.java) 35 | } 36 | 37 | private lateinit var server: TestingServer 38 | 39 | /** 40 | * Creates and starts a ZooKeeper instance. 41 | */ 42 | @Throws(Exception::class) 43 | fun start() { 44 | Log.debug("Starting embedded ZooKeeper server") 45 | System.setProperty("zookeeper.admin.enableServer", "false") 46 | server = createTestingServer() 47 | Log.debug( 48 | "Embedded ZooKeeper server at {} uses the temp directory at {}", 49 | server.connectString, server.tempDirectory 50 | ) 51 | } 52 | 53 | @Throws(IOException::class) 54 | fun stop() { 55 | Log.debug( 56 | "Stopping down embedded ZooKeeper server at {} ...", 57 | server.connectString 58 | ) 59 | server.close() 60 | Log.debug( 61 | "Stopping of embedded ZooKeeper server at {} completed", 62 | server.connectString 63 | ) 64 | } 65 | 66 | fun connectString(): String { 67 | return server.connectString 68 | } 69 | 70 | @Throws(Exception::class) 71 | private fun createTestingServer(): TestingServer { 72 | while (true) { 73 | try { 74 | return TestingServer(port) 75 | } catch (e: BindException) { 76 | // https://issues.apache.org/jira/browse/CURATOR-535 77 | Log.warn("Failed to create test ZK node due to known race condition" + 78 | " while choosing random available ports. Let's retry") 79 | } 80 | } 81 | } 82 | } -------------------------------------------------------------------------------- /tests/src/test/kotlin/io/streamthoughts/kafka/tests/junit/EmbeddedKafkaSetupExtension.kt: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 StreamThoughts. 3 | * 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with 6 | * this work for additional information regarding copyright ownership. 7 | * The ASF licenses this file to You under the Apache License, Version 2.0 8 | * (the "License"); you may not use this file except in compliance with 9 | * the License. You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | * See the License for the specific language governing permissions and 17 | * limitations under the License. 18 | */ 19 | package io.streamthoughts.kafka.tests.junit 20 | 21 | import io.streamthoughts.kafka.tests.TestingEmbeddedKafka 22 | import io.streamthoughts.kafka.tests.TestingEmbeddedZookeeper 23 | import kafka.server.KafkaConfig 24 | import org.junit.jupiter.api.extension.AfterAllCallback 25 | import org.junit.jupiter.api.extension.BeforeAllCallback 26 | import org.junit.jupiter.api.extension.ExtensionContext 27 | import org.junit.jupiter.api.extension.ParameterContext 28 | import org.junit.jupiter.api.extension.ParameterResolver 29 | 30 | 31 | class EmbeddedSingleNodeKafkaCluster 32 | : BeforeAllCallback, AfterAllCallback, ParameterResolver { 33 | 34 | private val kafka: TestingEmbeddedKafka = TestingEmbeddedKafka() 35 | 36 | private val zookeeper: TestingEmbeddedZookeeper = TestingEmbeddedZookeeper() 37 | 38 | override fun beforeAll(extensionContext: ExtensionContext) { 39 | zookeeper.start() 40 | kafka.start(mapOf(Pair(KafkaConfig.ZkConnectProp(), zookeeper.connectString()))) 41 | } 42 | 43 | override fun afterAll(extensionContext: ExtensionContext?) { 44 | kafka.stop() 45 | zookeeper.stop() 46 | } 47 | 48 | override fun supportsParameter(parameterContext: ParameterContext, 49 | extensionContext: ExtensionContext): Boolean { 50 | return parameterContext.parameter.type == TestingEmbeddedKafka::class.java 51 | } 52 | 53 | override fun resolveParameter(parameterContext: ParameterContext?, 54 | extensionContext: ExtensionContext?): Any { 55 | return kafka 56 | } 57 | } --------------------------------------------------------------------------------