├── .github
└── workflows
│ └── maven.yml
├── .gitignore
├── .mvn
└── wrapper
│ └── maven-wrapper.properties
├── README.adoc
├── clients
├── pom.xml
└── src
│ ├── main
│ └── kotlin
│ │ └── io
│ │ └── streamthoughts
│ │ └── kafka
│ │ └── clients
│ │ ├── Configs.kt
│ │ ├── Extensions.kt
│ │ ├── Kafka.kt
│ │ ├── KafkaClientConfigs.kt
│ │ ├── KafkaClients.kt
│ │ ├── KafkaRecord.kt
│ │ ├── LoggerUtils.kt
│ │ ├── consumer
│ │ ├── AutoOffsetReset.kt
│ │ ├── ConsumerAwareRebalanceListener.kt
│ │ ├── ConsumerFactory.kt
│ │ ├── ConsumerTask.kt
│ │ ├── ConsumerWorker.kt
│ │ ├── KafkaConsumerConfigs.kt
│ │ ├── KafkaConsumerTask.kt
│ │ ├── KafkaConsumerWorker.kt
│ │ ├── TopicSubscription.kt
│ │ ├── Types.kt
│ │ ├── error
│ │ │ ├── ConsumedErrorHandler.kt
│ │ │ ├── ConsumedErrorHandlers.kt
│ │ │ └── serialization
│ │ │ │ ├── DeserializationErrorHandler.kt
│ │ │ │ └── DeserializationErrorHandlers.kt
│ │ └── listener
│ │ │ └── ConsumerBatchRecordsListener.kt
│ │ └── producer
│ │ ├── Acks.kt
│ │ ├── KafkaProducerConfigs.kt
│ │ ├── KafkaProducerContainer.kt
│ │ ├── ProducerContainer.kt
│ │ ├── ProducerFactory.kt
│ │ ├── SendResult.kt
│ │ └── callback
│ │ └── ProducerSendCallback.kt
│ └── test
│ ├── kotlin
│ └── io
│ │ └── streamthoughts
│ │ └── kafka
│ │ └── clients
│ │ ├── KafkaClientConfigsTest.kt
│ │ ├── consumer
│ │ ├── KafkaConsumerConfigsTest.kt
│ │ └── KafkaConsumerTaskTest.kt
│ │ └── producer
│ │ ├── KafkaProducerConfigsTest.kt
│ │ └── KafkaProducerContainerTest.kt
│ └── resources
│ ├── logback-test.xml
│ └── test-configs.properties
├── examples
├── pom.xml
└── src
│ └── main
│ └── kotlin
│ └── io
│ └── streamthoughts
│ └── kafka
│ └── client
│ └── examples
│ ├── ConsumerClientExample.kt
│ ├── ConsumerKotlinDSLExample.kt
│ ├── ProducerClientExample.kt
│ ├── ProducerKotlinDSLExample.kt
│ └── TxProducerContainerExample.kt
├── mvnw
├── mvnw.cmd
├── pom.xml
└── tests
├── pom.xml
└── src
├── main
└── kotlin
│ └── io
│ └── streamthoughts
│ └── kafka
│ └── tests
│ ├── TestingEmbeddedKafka.kt
│ └── TestingEmbeddedZookeeper.kt
└── test
└── kotlin
└── io
└── streamthoughts
└── kafka
└── tests
└── junit
└── EmbeddedKafkaSetupExtension.kt
/.github/workflows/maven.yml:
--------------------------------------------------------------------------------
1 | name: Java CI with Maven
2 |
3 | on:
4 | push:
5 | branches: [ master ]
6 | pull_request:
7 | branches: [ master ]
8 |
9 | jobs:
10 | build:
11 |
12 | runs-on: ubuntu-latest
13 |
14 | steps:
15 | - uses: actions/checkout@v2
16 | - name: Set up JDK 11
17 | uses: actions/setup-java@v1
18 | with:
19 | java-version: 11
20 | - name: Build with Maven
21 | run: mvn -B package --file pom.xml
22 |
23 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | ### Scala template
2 | *.class
3 | *.log
4 |
5 | # sbt specific
6 | .cache
7 | .history
8 | .lib/
9 | dist/*
10 | target/
11 | lib_managed/
12 | src_managed/
13 | project/boot/
14 | project/plugins/project/
15 |
16 | # Scala-IDE specific
17 | .scala_dependencies
18 | .worksheet
19 | ### Java template
20 | *.class
21 |
22 | # Mobile Tools for Java (J2ME)
23 | .mtj.tmp/
24 |
25 | # Package Files #
26 | *.jar
27 | *.war
28 | *.ear
29 |
30 | # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml
31 | hs_err_pid*
32 | ### Maven template
33 | target/
34 | pom.xml.tag
35 | pom.xml.releaseBackup
36 | pom.xml.versionsBackup
37 | pom.xml.next
38 | release.properties
39 | dependency-reduced-pom.xml
40 | buildNumber.properties
41 | .mvn/timing.properties
42 | ### Eclipse template
43 | *.pydevproject
44 | .metadata
45 | .gradle
46 | tmp/
47 | *.tmp
48 | *.bak
49 | *.swp
50 | *~.nib
51 | local.properties
52 | .settings/
53 | .loadpath
54 |
55 | # Eclipse Core
56 | .project
57 |
58 | # External tool builders
59 | .externalToolBuilders/
60 |
61 | # Locally stored "Eclipse launch configurations"
62 | *.launch
63 |
64 | # CDT-specific
65 | .cproject
66 |
67 | # JDT-specific (Eclipse Java Development Tools)
68 | .classpath
69 |
70 | # Java annotation processor (APT)
71 | .factorypath
72 |
73 | # PDT-specific
74 | .buildpath
75 |
76 | # sbteclipse plugin
77 | .target
78 |
79 | # TeXlipse plugin
80 | .texlipse
81 | ### JetBrains template
82 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio
83 |
84 | *.iml
85 |
86 | ## Directory-based project format:
87 | .idea/
88 | # if you remove the above rule, at least ignore the following:
89 |
90 | # User-specific stuff:
91 | # .idea/workspace.xml
92 | # .idea/tasks.xml
93 | # .idea/dictionaries
94 |
95 | # Sensitive or high-churn files:
96 | # .idea/dataSources.ids
97 | # .idea/dataSources.xml
98 | # .idea/sqlDataSources.xml
99 | # .idea/dynamic.xml
100 | # .idea/uiDesigner.xml
101 |
102 | # Gradle:
103 | # .idea/gradle.xml
104 | # .idea/libraries
105 |
106 | # Mongo Explorer plugin:
107 | # .idea/mongoSettings.xml
108 |
109 | ## File-based project format:
110 | *.ipr
111 | *.iws
112 |
113 | ## Plugin-specific files:
114 |
115 | # IntelliJ
116 | /out/
117 |
118 | # mpeltonen/sbt-idea plugin
119 | .idea_modules/
120 |
121 | # JIRA plugin
122 | atlassian-ide-plugin.xml
123 |
124 | # Crashlytics plugin (for Android Studio and IntelliJ)
125 | com_crashlytics_export_strings.xml
126 | crashlytics.properties
127 | crashlytics-build.properties
128 |
129 | # Created by .ignore support plugin (hsz.mobi)
130 | #
131 | #
132 | .mvn/wrapper/MavenWrapperDownloader.java
133 |
--------------------------------------------------------------------------------
/.mvn/wrapper/maven-wrapper.properties:
--------------------------------------------------------------------------------
1 | distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.6.3/apache-maven-3.6.3-bin.zip
2 | wrapperUrl=https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar
3 |
--------------------------------------------------------------------------------
/README.adoc:
--------------------------------------------------------------------------------
1 | = Kafka Clients for Kotlin
2 | :toc:
3 | :toc-placement!:
4 |
5 | image:https://img.shields.io/badge/License-Apache%202.0-blue.svg[https://github.com/streamthoughts/kafka-clients-kotlin/blob/master/LICENSE]
6 | image:https://img.shields.io/github/v/release/streamthoughts/kafka-clients-kotlin[GitHub release (latest by date)]
7 | image:https://img.shields.io/github/issues-raw/streamthoughts/kafka-clients-kotlin[GitHub issues]
8 | image:https://img.shields.io/github/workflow/status/streamthoughts/kafka-clients-kotlin/Java%20CI%20with%20Maven[GitHub Workflow Status]
9 | image:https://img.shields.io/github/stars/streamthoughts/kafka-clients-kotlin?style=social[GitHub Repo stars]
10 |
11 | WARNING: Be aware that this package is still in heavy development. Some breaking change will occur in future weeks and months.
12 | Thank's for your comprehension.
13 |
14 | toc::[]
15 |
16 | == What is Kafka Clients for Kotlin ?
17 |
18 | The **Kafka Clients for Kotlin** projects packs with convenient Kotlin API for the development of Kafka-based event-driven applications.
19 | It provides high-level abstractions both for sending records `ProducerContainer` and consuming records from topics using one or many
20 | concurrent consumers `KafkaConsumerWorker`.
21 |
22 | In addition, it provides builder classes to facilitate the configuration of `Producer` and `Consumer` objects: `KafkaProducerConfigs` and `KafkaConsumerConfigs`
23 |
24 | **Kafka Clients for Kotlin** is based on the pure java `kafka-clients`.
25 |
26 | == How to contribute ?
27 |
28 | The project is in its early stages so it can be very easy to contribute by proposing APIs changes, new features and so on.
29 | Any feedback, bug reports and PRs are greatly appreciated!
30 |
31 | * Source Code: https://github.com/streamthoughts/kafka-clients-kotlin
32 | * Issue Tracker: https://github.com/streamthoughts/kafka-clients-kotlin/issues
33 |
34 |
35 | == Show your support
36 |
37 | You think this project can help you or your team to develop kafka-based application with Kotlin ?
38 | Please ⭐ this repository to support us!
39 |
40 | == How to give it a try ?
41 |
42 | Just add **Kafka Clients for Kotlin** to the dependencies of your projects.
43 |
44 | === For Maven
45 | [source,xml]
46 | ----
47 |
48 | io.streamthoughts
49 | kafka-clients-kotlin
50 | 0.2.0
51 |
52 | ----
53 |
54 | == Getting Started
55 |
56 | === Writing messages to Kafka
57 |
58 | **Example: How to create `KafkaProducer` config ?**
59 |
60 | [source,kotlin]
61 | ----
62 | val configs = producerConfigsOf()
63 | .client { bootstrapServers("localhost:9092") }
64 | .acks(Acks.Leader)
65 | .keySerializer(StringSerializer::class.java.name)
66 | .valueSerializer(StringSerializer::class.java.name)
67 | ----
68 |
69 | ==== Example with standard `KafkaProducer` (i.e : using java `kafka-clients`)
70 |
71 | [source,kotlin]
72 | ----
73 | val producer = KafkaProducer(configs)
74 |
75 | val messages = listOf("I ❤️ Logs", "Making Sense of Stream Processing", "Apache Kafka")
76 | producer.use {
77 | messages.forEach {value ->
78 | val record = ProducerRecord(topic, value)
79 | producer.send(record) { m: RecordMetadata, e: Exception? ->
80 | when (e) {
81 | null -> println("Record was successfully sent (topic=${m.topic()}, partition=${m.partition()}, offset= ${m.offset()})")
82 | else -> e.printStackTrace()
83 | }
84 | }
85 | }
86 | }
87 | ----
88 |
89 | N.B: See the full source code: https://github.com/streamthoughts/kafka-clients-kotlin/blob/master/examples/src/main/kotlin/io/streamthoughts/kafka/client/examples/ProducerClientExample.kt[ProducerClientExample.kt]
90 |
91 | ==== Example with Kotlin DSL
92 |
93 | [source,kotlin]
94 | ----
95 | val producer: ProducerContainer = kafka("localhost:9092") {
96 | client {
97 | clientId("my-client")
98 | }
99 |
100 | producer {
101 | configure {
102 | acks(Acks.InSyncReplicas)
103 | }
104 | keySerializer(StringSerializer())
105 | valueSerializer(StringSerializer())
106 |
107 | defaultTopic("demo-topic")
108 |
109 | onSendError {_, _, error ->
110 | error.printStackTrace()
111 | }
112 |
113 | onSendSuccess{ _, _, metadata ->
114 | println("Record was sent successfully: topic=${metadata.topic()}, partition=${metadata.partition()}, offset=${metadata.offset()} ")
115 | }
116 | }
117 | }
118 |
119 | val messages = listOf("I ❤️ Logs", "Making Sense of Stream Processing", "Apache Kafka")
120 | producer.use {
121 | producer.init() // create internal producer and call initTransaction() if `transactional.id` is set
122 | messages.forEach { producer.send(value = it) }
123 | }
124 | ----
125 |
126 | N.B: See the full source code: https://github.com/streamthoughts/kafka-clients-kotlin/blob/master/examples/src/main/kotlin/io/streamthoughts/kafka/client/examples/ProducerKotlinDSLExample.kt[ProducerKotlinDSLExample.kt]
127 |
128 | === Consuming messages from a Kafka topic
129 |
130 | ==== Example: How to create `KafkaConsumer` config ?
131 |
132 | [source,kotlin]
133 | ----
134 | val configs = consumerConfigsOf()
135 | .client { bootstrapServers("localhost:9092") }
136 | .groupId("demo-consumer-group")
137 | .keyDeserializer(StringDeserializer::class.java.name)
138 | .valueDeserializer(StringDeserializer::class.java.name)
139 | ----
140 |
141 | ==== Example with standard `KafkaConsumer` (i.e : using java `kafka-clients`)
142 |
143 | [source,kotlin]
144 | ----
145 | val consumer = KafkaConsumer(configs)
146 |
147 | consumer.use {
148 | consumer.subscribe(listOf(topic))
149 | while(true) {
150 | consumer
151 | .poll(Duration.ofMillis(500))
152 | .forEach { record ->
153 | println(
154 | "Received record with key ${record.key()} " +
155 | "and value ${record.value()} from topic ${record.topic()} and partition ${record.partition()}"
156 | )
157 | }
158 | }
159 | }
160 | ----
161 |
162 | N.B: See the full source code: https://github.com/streamthoughts/kafka-clients-kotlin/blob/master/examples/src/main/kotlin/io/streamthoughts/kafka/client/examples/ConsumerClientExample.kt[ConsumerClientExample.kt]
163 |
164 | ==== Example with Kotlin DSL
165 | [source,kotlin]
166 | ----
167 | val consumerWorker: ConsumerWorker = kafka("localhost:9092") {
168 | client {
169 | clientId("my-client")
170 | }
171 |
172 | val stringDeserializer: Deserializer = StringDeserializer()
173 | consumer("my-group", stringDeserializer, stringDeserializer) {
174 | configure {
175 | maxPollRecords(1000)
176 | autoOffsetReset(AutoOffsetReset.Earliest)
177 | }
178 |
179 | onDeserializationError(replaceWithNullOnInvalidRecord())
180 |
181 | onPartitionsAssigned { _: Consumer<*, *>, partitions ->
182 | println("Partitions assigned: $partitions")
183 | }
184 |
185 | onPartitionsRevokedAfterCommit { _: Consumer<*, *>, partitions ->
186 | println("Partitions revoked: $partitions")
187 | }
188 |
189 | onConsumed { _: Consumer<*, *>, value: String? ->
190 | println("consumed record-value: $value")
191 | }
192 |
193 | onConsumedError(closeTaskOnConsumedError())
194 |
195 | Runtime.getRuntime().addShutdownHook(Thread { run { stop() } })
196 | }
197 | }
198 |
199 | consumerWorker.use {
200 | consumerWorker.start("demo-topic", maxParallelHint = 4)
201 | runBlocking {
202 | println("All consumers started, waiting one minute before stopping")
203 | delay(Duration.ofMinutes(1).toMillis())
204 | }
205 | }
206 | ----
207 |
208 | N.B: See the full source code: https://github.com/streamthoughts/kafka-clients-kotlin/blob/master/examples/src/main/kotlin/io/streamthoughts/kafka/client/examples/ConsumerKotlinDSLExample.kt[ConsumerKotlinDSLExample.kt]
209 |
210 | == All Examples:
211 |
212 | * https://github.com/streamthoughts/kafka-clients-kotlin/blob/master/examples/src/main/kotlin/io/streamthoughts/kafka/client/examples/ProducerClientExample.kt[ProducerClientExample.kt]
213 | * https://github.com/streamthoughts/kafka-clients-kotlin/blob/master/examples/src/main/kotlin/io/streamthoughts/kafka/client/examples/ProducerKotlinDSLExample.kt[ProducerKotlinDSLExample.kt]
214 | * https://github.com/streamthoughts/kafka-clients-kotlin/blob/master/examples/src/main/kotlin/io/streamthoughts/kafka/client/examples/TxProducerContainerExample.kt[TxProducerContainerExample.kt]
215 | * https://github.com/streamthoughts/kafka-clients-kotlin/blob/master/examples/src/main/kotlin/io/streamthoughts/kafka/client/examples/ConsumerClientExample.kt[ConsumerClientExample.kt]
216 | * https://github.com/streamthoughts/kafka-clients-kotlin/blob/master/examples/src/main/kotlin/io/streamthoughts/kafka/client/examples/ConsumerKotlinDSLExample.kt[ConsumerKotlinDSLExample.kt]
217 |
218 | == How to build project ?
219 |
220 | Kafka Clients for Kotlin uses https://github.com/takari/maven-wrapper[maven-wrapper].
221 |
222 | [source,bash]
223 | ----
224 | $ ./mvnw clean package
225 | ----
226 |
227 | Run Tests
228 |
229 | [source,bash]
230 | ----
231 | $ ./mvnw clean test
232 | ----
233 |
234 | == Licence
235 |
236 | Copyright 2020 StreamThoughts.
237 |
238 | Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
239 |
240 | http://www.apache.org/licenses/LICENSE-2.0["http://www.apache.org/licenses/LICENSE-2.0"]
241 |
242 | Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License
243 |
--------------------------------------------------------------------------------
/clients/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | io.streamthoughts
7 | kafka-clients-kotlin-reactor
8 | 0.2.0
9 |
10 | 4.0.0
11 |
12 | Kafka Clients for Kotlin
13 | kafka-clients-kotlin
14 |
15 |
16 |
17 | org.apache.kafka
18 | kafka-clients
19 |
20 |
21 | org.jetbrains.kotlin
22 | kotlin-stdlib
23 |
24 |
25 | org.jetbrains.kotlin
26 | kotlin-stdlib-jdk8
27 |
28 |
29 | org.jetbrains.kotlinx
30 | kotlinx-coroutines-core
31 |
32 |
33 | org.jetbrains.kotlin
34 | kotlin-test-junit
35 | test
36 |
37 |
38 | io.streamthoughts
39 | kafka-clients-kotlin-tests
40 | ${project.version}
41 | test
42 |
43 |
44 | io.streamthoughts
45 | kafka-clients-kotlin-tests
46 | ${project.version}
47 | test-jar
48 | test
49 |
50 |
51 | org.junit.platform
52 | junit-platform-launcher
53 | test
54 |
55 |
56 | org.junit.jupiter
57 | junit-jupiter-engine
58 | test
59 |
60 |
61 | org.slf4j
62 | slf4j-api
63 | 1.7.30
64 |
65 |
66 | ch.qos.logback
67 | logback-classic
68 |
69 |
70 | ch.qos.logback
71 | logback-core
72 |
73 |
74 |
75 |
--------------------------------------------------------------------------------
/clients/src/main/kotlin/io/streamthoughts/kafka/clients/Configs.kt:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 StreamThoughts.
3 | *
4 | * Licensed to the Apache Software Foundation (ASF) under one or more
5 | * contributor license agreements. See the NOTICE file distributed with
6 | * this work for additional information regarding copyright ownership.
7 | * The ASF licenses this file to You under the Apache License, Version 2.0
8 | * (the "License"); you may not use this file except in compliance with
9 | * the License. You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | */
19 | package io.streamthoughts.kafka.clients
20 |
21 | import kotlin.collections.HashMap
22 |
23 |
24 | /**
25 | * The base class for client configuration.
26 | *
27 | * @see io.streamthoughts.kafka.clients.KafkaClientConfigs
28 | * @see io.streamthoughts.kafka.clients.consumer.KafkaConsumerConfigs
29 | * @see io.streamthoughts.kafka.clients.producer.KafkaProducerConfigs
30 | */
31 | open class Configs protected constructor(props: Map = emptyMap()) : MutableMap {
32 |
33 | private val props = HashMap(props)
34 |
35 | override val entries: MutableSet>
36 | get() = props.entries
37 |
38 | override val keys: MutableSet
39 | get() = props.keys
40 |
41 | override val size: Int
42 | get() = props.size
43 |
44 | override val values: MutableCollection
45 | get() = props.values
46 |
47 | override fun containsKey(key: String): Boolean {
48 | return props.containsKey(key)
49 | }
50 |
51 | override fun containsValue(value: Any?): Boolean {
52 | return props.containsValue(value)
53 | }
54 |
55 | override fun get(key: String): Any? {
56 | return props[key]
57 | }
58 |
59 | override fun isEmpty(): Boolean {
60 | return props.isEmpty()
61 | }
62 |
63 | open fun with(key: String, value: Any?) = apply { this[key] = value }
64 |
65 | operator fun set(key: String, value: Any?) {
66 | props[key] = value
67 | }
68 |
69 | override fun equals(other: Any?): Boolean {
70 | if (this === other) return true
71 | if (other !is Configs) return false
72 |
73 | if (props != other.props) return false
74 |
75 | return true
76 | }
77 |
78 | override fun hashCode(): Int {
79 | return props.hashCode()
80 | }
81 |
82 | override fun toString(): String {
83 | return "Configs[$props]"
84 | }
85 |
86 | override fun clear() {
87 | props.clear()
88 | }
89 |
90 | override fun put(key: String, value: Any?): Any? = props.put(key, value)
91 |
92 | override fun putAll(from: Map) = props.putAll(from)
93 |
94 | override fun remove(key: String): Any? = props.remove(key)
95 | }
--------------------------------------------------------------------------------
/clients/src/main/kotlin/io/streamthoughts/kafka/clients/Extensions.kt:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 StreamThoughts.
3 | *
4 | * Licensed to the Apache Software Foundation (ASF) under one or more
5 | * contributor license agreements. See the NOTICE file distributed with
6 | * this work for additional information regarding copyright ownership.
7 | * The ASF licenses this file to You under the Apache License, Version 2.0
8 | * (the "License"); you may not use this file except in compliance with
9 | * the License. You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | */
19 | package io.streamthoughts.kafka.clients
20 |
21 | import java.io.FileInputStream
22 | import java.io.InputStream
23 | import java.util.Properties
24 |
25 | /**
26 | * Convenient method to transform a [Properties] to a [Map] of string keys.
27 | */
28 | fun Properties.toStringMap(): Map = this.map { (k, v) -> Pair(k.toString(), v) }.toMap()
29 |
30 | /**
31 | * Convenient method to load config properties from the given [configFile].
32 | */
33 | fun T.load(configFile: String): T =
34 | apply { FileInputStream(configFile).use { load(it) } }
35 |
36 | /**
37 | * Convenient method to load config properties from the given [inputStream].
38 | */
39 | fun T.load(inputStream: InputStream): T =
40 | apply { putAll((Properties().apply { load(inputStream) }).toStringMap()) }
41 |
--------------------------------------------------------------------------------
/clients/src/main/kotlin/io/streamthoughts/kafka/clients/Kafka.kt:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 StreamThoughts.
3 | *
4 | * Licensed to the Apache Software Foundation (ASF) under one or more
5 | * contributor license agreements. See the NOTICE file distributed with
6 | * this work for additional information regarding copyright ownership.
7 | * The ASF licenses this file to You under the Apache License, Version 2.0
8 | * (the "License"); you may not use this file except in compliance with
9 | * the License. You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | */
19 | package io.streamthoughts.kafka.clients
20 |
21 | data class Kafka(val bootstrapServers: Array = arrayOf("localhost:9092")) {
22 |
23 | override fun equals(other: Any?): Boolean {
24 | if (this === other) return true
25 | if (other !is Kafka) return false
26 |
27 | if (!bootstrapServers.contentEquals(other.bootstrapServers)) return false
28 |
29 | return true
30 | }
31 |
32 | override fun hashCode(): Int {
33 | return bootstrapServers.contentHashCode()
34 | }
35 |
36 | override fun toString(): String {
37 | return "Kafka(bootstrapServers=${bootstrapServers.joinToString()})"
38 | }
39 |
40 | }
--------------------------------------------------------------------------------
/clients/src/main/kotlin/io/streamthoughts/kafka/clients/KafkaClientConfigs.kt:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 StreamThoughts.
3 | *
4 | * Licensed to the Apache Software Foundation (ASF) under one or more
5 | * contributor license agreements. See the NOTICE file distributed with
6 | * this work for additional information regarding copyright ownership.
7 | * The ASF licenses this file to You under the Apache License, Version 2.0
8 | * (the "License"); you may not use this file except in compliance with
9 | * the License. You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | */
19 | package io.streamthoughts.kafka.clients
20 |
21 | import org.apache.kafka.clients.CommonClientConfigs
22 | import java.io.InputStream
23 | import java.util.Properties
24 | import kotlin.collections.HashMap
25 | import kotlin.collections.Map
26 | import kotlin.collections.emptyMap
27 | import kotlin.collections.joinToString
28 | import kotlin.collections.mutableMapOf
29 |
30 |
31 | open class KafkaClientConfigs constructor(props: Map = emptyMap()): Configs(props) {
32 |
33 | constructor(kafka : Kafka): this(bootstrapServersConfig(kafka))
34 |
35 | companion object {
36 | private fun bootstrapServersConfig(kafka: Kafka) = mutableMapOf(
37 | Pair(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, kafka.bootstrapServers.joinToString())
38 | )
39 | }
40 |
41 | /**
42 | * @see [CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG]
43 | */
44 | fun bootstrapServers(bootstrapServers: Array) =
45 | apply { this[CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG] = bootstrapServers.joinToString() }
46 |
47 | /**
48 | * @see [CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG]
49 | */
50 | fun bootstrapServers(bootstrapServers: String) =
51 | apply { this[CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG] = bootstrapServers }
52 |
53 | /**
54 | * @see [CommonClientConfigs.CLIENT_ID_CONFIG]
55 | */
56 | fun clientId(clientId: String) =
57 | apply { this[CommonClientConfigs.CLIENT_ID_CONFIG] = clientId }
58 |
59 | override fun with(key: String, value: Any?) = apply { super.with(key, value) }
60 | }
61 |
62 | /**
63 | * Convenient method to create and populate a new [KafkaClientConfigs] from a [configFile].
64 | */
65 | fun loadClientConfigs(configFile: String): KafkaClientConfigs = KafkaClientConfigs().load(configFile)
66 |
67 | /**
68 | * Convenient method to create and populate a new [KafkaClientConfigs] from an [inputStream].
69 | */
70 | fun loadClientConfigs(inputStream: InputStream): KafkaClientConfigs = KafkaClientConfigs().load(inputStream)
71 |
72 | /**
73 | * Creates a new [KafkaClientConfigs] with no properties.
74 | */
75 | fun emptyClientConfigs(): KafkaClientConfigs = KafkaClientConfigs()
76 |
77 | /**
78 | * Creates a new [KafkaClientConfigs] with the given [props].
79 | */
80 | fun clientConfigsOf(props: Map): KafkaClientConfigs = KafkaClientConfigs(HashMap(props))
81 |
82 | /**
83 | * Creates a new [KafkaClientConfigs] with the given [props].
84 | */
85 | fun clientConfigsOf(props: Properties): KafkaClientConfigs = clientConfigsOf(props.toStringMap())
--------------------------------------------------------------------------------
/clients/src/main/kotlin/io/streamthoughts/kafka/clients/KafkaClients.kt:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 StreamThoughts.
3 | *
4 | * Licensed to the Apache Software Foundation (ASF) under one or more
5 | * contributor license agreements. See the NOTICE file distributed with
6 | * this work for additional information regarding copyright ownership.
7 | * The ASF licenses this file to You under the Apache License, Version 2.0
8 | * (the "License"); you may not use this file except in compliance with
9 | * the License. You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | */
19 | package io.streamthoughts.kafka.clients
20 |
21 | import io.streamthoughts.kafka.clients.consumer.ConsumerWorker
22 | import io.streamthoughts.kafka.clients.consumer.KafkaConsumerWorker
23 | import io.streamthoughts.kafka.clients.consumer.consumerConfigsOf
24 | import io.streamthoughts.kafka.clients.producer.KafkaProducerContainer
25 | import io.streamthoughts.kafka.clients.producer.ProducerContainer
26 | import io.streamthoughts.kafka.clients.producer.producerConfigsOf
27 | import org.apache.kafka.common.serialization.Deserializer
28 |
29 | /**
30 | * [KafkaClients] DSL for building either a new consumer or producer kafka client.
31 | */
32 | class KafkaClients(private val configs: KafkaClientConfigs) {
33 |
34 | /**
35 | * Configures the commons configuration for Kafka Client.
36 | */
37 | fun client(init: KafkaClientConfigs.() -> Unit) : Unit = configs.init()
38 |
39 | /**
40 | * Creates and configures a new [KafkaConsumerWorker] using the given [init] function
41 | * for the given [groupId], [keyDeserializer] and [valueDeserializer]
42 | *
43 | * @return a new [KafkaConsumerWorker] instance.
44 | */
45 | fun consumer(groupId: String,
46 | keyDeserializer: Deserializer,
47 | valueDeserializer: Deserializer,
48 | init: KafkaConsumerWorker.Builder.() -> Unit): ConsumerWorker {
49 | val configs = consumerConfigsOf(configs).groupId(groupId)
50 | return KafkaConsumerWorker.Builder(configs, keyDeserializer, valueDeserializer).also(init).build()
51 | }
52 |
53 | /**
54 | * Creates and configures a new [ProducerContainer] using the given [init] function.
55 | *
56 | * @return a new [ProducerContainer] instance.
57 | */
58 | fun producer(init: ProducerContainer.Builder.() -> Unit): ProducerContainer {
59 | val configs = producerConfigsOf(configs)
60 | return KafkaProducerContainer.Builder(configs).also(init).build()
61 | }
62 | }
63 |
64 | fun kafka(bootstrapServer: String, init: KafkaClients.() -> R): R =
65 | kafka(arrayOf(bootstrapServer), init)
66 |
67 | fun kafka(bootstrapServers: Array, init: KafkaClients.() -> R): R =
68 | kafka(KafkaClientConfigs(Kafka(bootstrapServers)), init)
69 |
70 | fun kafka(kafka: Kafka, init: KafkaClients.() -> R): R =
71 | kafka(KafkaClientConfigs(kafka), init)
72 |
73 | fun kafka(configs: KafkaClientConfigs, init: KafkaClients.() -> R): R =
74 | KafkaClients(configs).init()
75 |
--------------------------------------------------------------------------------
/clients/src/main/kotlin/io/streamthoughts/kafka/clients/KafkaRecord.kt:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 StreamThoughts.
3 | *
4 | * Licensed to the Apache Software Foundation (ASF) under one or more
5 | * contributor license agreements. See the NOTICE file distributed with
6 | * this work for additional information regarding copyright ownership.
7 | * The ASF licenses this file to You under the Apache License, Version 2.0
8 | * (the "License"); you may not use this file except in compliance with
9 | * the License. You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | */
19 | package io.streamthoughts.kafka.clients
20 |
21 | import org.apache.kafka.clients.producer.ProducerRecord
22 | import java.time.Instant
23 |
24 | data class KafkaRecord(
25 | /**
26 | * The record key.
27 | */
28 | var key: K? = null,
29 | /**
30 | * The record value.
31 | */
32 | var value: V? = null,
33 | /**
34 | * The record topic.
35 | */
36 | var topic: String? = null,
37 | /**
38 | * The record partition
39 | */
40 | var partition: Int? = null,
41 | /**
42 | * The record timestamp
43 | */
44 | var timestamp: Instant? = null
45 | ) {
46 |
47 | fun toProducerRecord() : ProducerRecord {
48 | return ProducerRecord(
49 | topic,
50 | partition,
51 | timestamp?.toEpochMilli(),
52 | key,
53 | value
54 | )
55 | }
56 | }
--------------------------------------------------------------------------------
/clients/src/main/kotlin/io/streamthoughts/kafka/clients/LoggerUtils.kt:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 StreamThoughts.
3 | *
4 | * Licensed to the Apache Software Foundation (ASF) under one or more
5 | * contributor license agreements. See the NOTICE file distributed with
6 | * this work for additional information regarding copyright ownership.
7 | * The ASF licenses this file to You under the Apache License, Version 2.0
8 | * (the "License"); you may not use this file except in compliance with
9 | * the License. You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | */
19 | package io.streamthoughts.kafka.clients
20 |
21 | import org.slf4j.Logger
22 | import org.slf4j.LoggerFactory
23 |
24 | fun loggerFor(forClass: Class<*>): Logger = LoggerFactory.getLogger(forClass)
--------------------------------------------------------------------------------
/clients/src/main/kotlin/io/streamthoughts/kafka/clients/consumer/AutoOffsetReset.kt:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 StreamThoughts.
3 | *
4 | * Licensed to the Apache Software Foundation (ASF) under one or more
5 | * contributor license agreements. See the NOTICE file distributed with
6 | * this work for additional information regarding copyright ownership.
7 | * The ASF licenses this file to You under the Apache License, Version 2.0
8 | * (the "License"); you may not use this file except in compliance with
9 | * the License. You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | */
19 | package io.streamthoughts.kafka.clients.consumer
20 |
21 | /**
22 | * The policy to apply for [org.apache.kafka.clients.consumer.Consumer] which has no initial offset in Kafka
23 | * or if the current offset does not exist any more on the server (e.g. because that data has been deleted).
24 | *
25 | * @see org.apache.kafka.clients.consumer.ConsumerConfig.AUTO_OFFSET_RESET_CONFIG
26 | */
27 | object AutoOffsetReset {
28 | /**
29 | * earliest: automatically reset the offset to the earliest offset.
30 | */
31 | const val Earliest = "earliest"
32 | /**
33 | * latest: automatically reset the offset to the latest offset
34 | */
35 | const val Latest = "lastest"
36 | /**
37 | * none: throw exception to the consumer if no previous offset is found for the consumer's group
38 | */
39 | const val None = "none"
40 | }
--------------------------------------------------------------------------------
/clients/src/main/kotlin/io/streamthoughts/kafka/clients/consumer/ConsumerAwareRebalanceListener.kt:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 StreamThoughts.
3 | *
4 | * Licensed to the Apache Software Foundation (ASF) under one or more
5 | * contributor license agreements. See the NOTICE file distributed with
6 | * this work for additional information regarding copyright ownership.
7 | * The ASF licenses this file to You under the Apache License, Version 2.0
8 | * (the "License"); you may not use this file except in compliance with
9 | * the License. You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | */
19 | package io.streamthoughts.kafka.clients.consumer
20 |
21 | import org.apache.kafka.clients.consumer.Consumer
22 | import org.apache.kafka.common.TopicPartition
23 |
24 | /**
25 | * @see [org.apache.kafka.clients.consumer.ConsumerRebalanceListener]
26 | */
27 | interface ConsumerAwareRebalanceListener {
28 |
29 | fun onPartitionsRevokedBeforeCommit(consumer: Consumer<*, *>,
30 | partitions: Collection) {
31 | }
32 |
33 | fun onPartitionsRevokedAfterCommit(consumer: Consumer<*, *>,
34 | partitions: Collection) {
35 | }
36 |
37 | fun onPartitionsAssigned(consumer: Consumer<*, *>,
38 | partitions: Collection) {
39 | }
40 |
41 | fun onPartitionsLost(consumer: Consumer<*, *>,
42 | partitions: Collection) {
43 | }
44 |
45 | }
--------------------------------------------------------------------------------
/clients/src/main/kotlin/io/streamthoughts/kafka/clients/consumer/ConsumerFactory.kt:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 StreamThoughts.
3 | *
4 | * Licensed to the Apache Software Foundation (ASF) under one or more
5 | * contributor license agreements. See the NOTICE file distributed with
6 | * this work for additional information regarding copyright ownership.
7 | * The ASF licenses this file to You under the Apache License, Version 2.0
8 | * (the "License"); you may not use this file except in compliance with
9 | * the License. You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | */
19 | package io.streamthoughts.kafka.clients.consumer
20 |
21 | import org.apache.kafka.clients.consumer.Consumer
22 | import org.apache.kafka.clients.consumer.KafkaConsumer
23 |
24 | /**
25 | * The default factory interface to create new [Consumer] instance.
26 | */
27 | interface ConsumerFactory {
28 |
29 | object DefaultConsumerFactory: ConsumerFactory {
30 | override fun make(configs: Map): Consumer = KafkaConsumer(configs)
31 |
32 | }
33 |
34 | /**
35 | * Creates a new [Consumer] instance with the given [configs].
36 | */
37 | fun make(configs: Map): Consumer
38 | }
39 |
--------------------------------------------------------------------------------
/clients/src/main/kotlin/io/streamthoughts/kafka/clients/consumer/ConsumerTask.kt:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 StreamThoughts.
3 | *
4 | * Licensed to the Apache Software Foundation (ASF) under one or more
5 | * contributor license agreements. See the NOTICE file distributed with
6 | * this work for additional information regarding copyright ownership.
7 | * The ASF licenses this file to You under the Apache License, Version 2.0
8 | * (the "License"); you may not use this file except in compliance with
9 | * the License. You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | */
19 | package io.streamthoughts.kafka.clients.consumer
20 |
21 | import org.apache.kafka.clients.consumer.Consumer
22 | import org.apache.kafka.clients.consumer.OffsetAndMetadata
23 | import org.apache.kafka.common.TopicPartition
24 | import java.io.Closeable
25 | import java.time.Duration
26 |
27 | interface ConsumerTask: Closeable {
28 |
29 | enum class State {
30 | /**
31 | * The [ConsumerTask] is created.
32 | */
33 | CREATED,
34 | /**
35 | * The [ConsumerTask] is starting.
36 | */
37 | STARTING,
38 | /**
39 | * The [ConsumerTask]
40 | */
41 | RUNNING,
42 | /**
43 | * The [ConsumerTask] is paused for all assigned partitions.
44 | */
45 | PAUSED,
46 | /**
47 | * The [ConsumerTask] is rebalancing and new partitions are being assigned.
48 | */
49 | PARTITIONS_ASSIGNED,
50 | /**
51 | * The [ConsumerTask] is rebalancing and partitions are being revoked.
52 | */
53 | PARTITIONS_REVOKED,
54 | /**
55 | * The [ConsumerTask] is being closed.
56 | */
57 | PENDING_SHUTDOWN,
58 | /**
59 | * The [ConsumerTask] is closed.
60 | */
61 | SHUTDOWN
62 | }
63 |
64 | suspend fun run()
65 |
66 | /**
67 | * Pauses consumption for the current assignments.
68 | * @see org.apache.kafka.clients.consumer.Consumer.pause
69 | */
70 | fun pause()
71 |
72 | /**
73 | * Resumes consumption for the current assignments.
74 | * @see org.apache.kafka.clients.consumer.Consumer.pause
75 | */
76 | fun resume()
77 |
78 | /**
79 | * Shutdowns the [ConsumerTask] and wait for completion.
80 | * @see org.apache.kafka.clients.consumer.Consumer.close
81 | */
82 | override fun close()
83 |
84 | /**
85 | * Shutdowns the [ConsumerTask] and wait for completion until the given [timeout].
86 | * @see org.apache.kafka.clients.consumer.Consumer.close
87 | */
88 | fun close(timeout: Duration)
89 |
90 | /**
91 | * @return the [State] of this [ConsumerTask].
92 | */
93 | fun state(): State
94 |
95 | /**
96 | * Executes the given [action] with the underlying [Consumer].
97 | */
98 | fun execute(action: (consumer: Consumer) -> T): T
99 |
100 | /**
101 | * Commits asynchronously the positions of the internal [Consumer] for the given [offsets].
102 | * If passed [offsets] is {@code null} then commit the [Consumer] positions for its current partition assignments.
103 | *
104 | * @see [Consumer.commitAsync]
105 | */
106 | fun commitAsync(offsets: Map? = null)
107 |
108 | /**
109 | * Commits synchronously the positions of the internal [Consumer] for the given offsets.
110 | * If passed [offsets] is {@code null} then commit the [Consumer] positions for its current partition assignments.
111 | *
112 | * @see [Consumer.commitAsync]
113 | */
114 | fun commitSync(offsets: Map? = null)
115 | }
--------------------------------------------------------------------------------
/clients/src/main/kotlin/io/streamthoughts/kafka/clients/consumer/ConsumerWorker.kt:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 StreamThoughts.
3 | *
4 | * Licensed to the Apache Software Foundation (ASF) under one or more
5 | * contributor license agreements. See the NOTICE file distributed with
6 | * this work for additional information regarding copyright ownership.
7 | * The ASF licenses this file to You under the Apache License, Version 2.0
8 | * (the "License"); you may not use this file except in compliance with
9 | * the License. You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | */
19 | package io.streamthoughts.kafka.clients.consumer
20 |
21 | import io.streamthoughts.kafka.clients.consumer.error.ConsumedErrorHandler
22 | import io.streamthoughts.kafka.clients.consumer.error.serialization.DeserializationErrorHandler
23 | import io.streamthoughts.kafka.clients.consumer.listener.ConsumerBatchRecordsListener
24 | import java.io.Closeable
25 | import java.time.Duration
26 | import java.util.regex.Pattern
27 |
28 | /**
29 | * The [ConsumerWorker] manages one or many concurrent [org.apache.kafka.clients.consumer.Consumer] that belong
30 | * to the same {@code group.id}.
31 | */
32 | interface ConsumerWorker: Closeable {
33 |
34 |
35 | interface Builder {
36 | /**
37 | * Configures this worker.
38 | */
39 | fun configure(init: KafkaConsumerConfigs.() -> Unit)
40 |
41 | /**
42 | * Sets the [ConsumerFactory] to be used for creating a new [org.apache.kafka.clients.consumer.Consumer] instance.
43 | */
44 | fun factory(consumerFactory: ConsumerFactory): Builder
45 |
46 | /**
47 | * Sets the [listener] to invoke when a rebalance is in progress and partitions are assigned.
48 | */
49 | fun onPartitionsAssigned(listener: RebalanceListener): Builder
50 |
51 | /**
52 | * Sets the [listener] to invoke when a rebalance is in progress and partitions are revoked.
53 | */
54 | fun onPartitionsRevokedBeforeCommit(listener: RebalanceListener): Builder
55 |
56 | /**
57 | * Sets the [listener] to invoke when a rebalance is in progress and partitions are revoked.
58 | */
59 | fun onPartitionsRevokedAfterCommit(listener: RebalanceListener): Builder
60 |
61 | /**
62 | * Sets the [listener] to invoke when a rebalance is in progress and partitions are lost.
63 | */
64 | fun onPartitionsLost(listener: RebalanceListener): Builder
65 |
66 | /**
67 | * Sets the [handler] to invoke when a exception happen while de-serializing a record.
68 | */
69 | fun onDeserializationError(handler: DeserializationErrorHandler): Builder
70 |
71 | /**
72 | * Sets the [handler] to invoked when a error is thrown while processing last records returned from the
73 | * the [org.apache.kafka.clients.consumer.Consumer.poll] method, i.e. an exception thrown by the provided
74 | * [ConsumerBatchRecordsListener].
75 | *
76 | * @see [onConsumed]
77 | */
78 | fun onConsumedError(handler: ConsumedErrorHandler): Builder
79 |
80 | /**
81 | * Sets the [ConsumerBatchRecordsListener] to invoke when a non-empty batch of records is returned from
82 | * the [org.apache.kafka.clients.consumer.Consumer.poll] method.
83 | */
84 | fun onConsumed(listener: ConsumerBatchRecordsListener): Builder
85 |
86 | /**
87 | * Build a new [ConsumerWorker].
88 | *
89 | * @return the new [ConsumerWorker] instance.
90 | */
91 | fun build(): ConsumerWorker
92 | }
93 |
94 | /**
95 | * Returns the group id the [org.apache.kafka.clients.consumer.Consumer] managed by this [ConsumerWorker] belong.
96 | */
97 | fun groupId(): String
98 |
99 | /**
100 | * Creates as many [org.apache.kafka.clients.consumer.Consumer] as given [maxParallelHint] that will
101 | * immediately subscribe to the given [topic] and start consuming records.
102 | */
103 | fun start(topic: String, maxParallelHint: Int = 1)
104 |
105 | /**
106 | * Creates as many [org.apache.kafka.clients.consumer.Consumer] as given [maxParallelHint] that will
107 | * immediately subscribe to the given [topics] and start consuming records.
108 | */
109 | fun start(topics: List, maxParallelHint: Int = 1)
110 |
111 | /**
112 | * Creates as many [org.apache.kafka.clients.consumer.Consumer] as given [maxParallelHint] that will
113 | * immediately subscribe to the topics matching the given [pattern] and start consuming records.
114 | */
115 | fun start(pattern: Pattern, maxParallelHint: Int = 1)
116 |
117 | /**
118 | * Stops all [org.apache.kafka.clients.consumer.Consumer] managed by this [ConsumerWorker].
119 | */
120 | override fun close()
121 |
122 | /**
123 | * Pauses all [org.apache.kafka.clients.consumer.Consumer] managed by this [ConsumerWorker].
124 | */
125 | fun pause()
126 |
127 | /**
128 | * Resumes all [org.apache.kafka.clients.consumer.Consumer] managed by this [ConsumerWorker].
129 | */
130 | fun resume()
131 |
132 | /**
133 | * Joins for all [org.apache.kafka.clients.consumer.Consumer] to close.
134 | */
135 | suspend fun joinAll()
136 | }
--------------------------------------------------------------------------------
/clients/src/main/kotlin/io/streamthoughts/kafka/clients/consumer/KafkaConsumerConfigs.kt:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 StreamThoughts.
3 | *
4 | * Licensed to the Apache Software Foundation (ASF) under one or more
5 | * contributor license agreements. See the NOTICE file distributed with
6 | * this work for additional information regarding copyright ownership.
7 | * The ASF licenses this file to You under the Apache License, Version 2.0
8 | * (the "License"); you may not use this file except in compliance with
9 | * the License. You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | */
19 | package io.streamthoughts.kafka.clients.consumer
20 |
21 | import io.streamthoughts.kafka.clients.KafkaClientConfigs
22 | import io.streamthoughts.kafka.clients.load
23 | import io.streamthoughts.kafka.clients.toStringMap
24 | import org.apache.kafka.clients.consumer.ConsumerConfig
25 | import java.io.InputStream
26 | import java.util.*
27 |
28 | /**
29 | * Uses to build and encapsulate a configuration [Map]
30 | * for creating a new [org.apache.kafka.clients.consumer.KafkaConsumer]
31 | *
32 | * @see [ConsumerConfig]
33 | */
34 | class KafkaConsumerConfigs (props: Map = emptyMap()) : KafkaClientConfigs(props) {
35 |
36 | companion object {
37 | const val POLL_INTERVAL_MS_CONFIG = "poll.interval.ms"
38 | const val POLL_INTERVAL_MS_DEFAULT = Long.MAX_VALUE
39 | }
40 |
41 | override fun with(key: String, value: Any?) = apply { super.with(key, value) }
42 |
43 | fun client(init: KafkaClientConfigs.() -> Unit) = apply { this.init() }
44 |
45 | /**
46 | * @see [ConsumerConfig.AUTO_OFFSET_RESET_CONFIG]
47 | */
48 | fun autoOffsetReset(autoOffsetReset : String) =
49 | apply { this[ConsumerConfig.AUTO_OFFSET_RESET_CONFIG] = autoOffsetReset }
50 | /**
51 | * @see [ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG]
52 | */
53 | fun autoCommitIntervalMs(autoCommitIntervalMs : Long) =
54 | apply { this[ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG] = autoCommitIntervalMs}
55 | /**
56 | * @see [ConsumerConfig.ALLOW_AUTO_CREATE_TOPICS_CONFIG]
57 | */
58 | fun allowAutoCreateTopicsConfig(allowAutoCreateTopicsConfig : Boolean) =
59 | apply { this[ConsumerConfig.ALLOW_AUTO_CREATE_TOPICS_CONFIG] = allowAutoCreateTopicsConfig }
60 | /**
61 | * @see [ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG]
62 | */
63 | fun enableAutoCommit(enableAutoCommit : Boolean) =
64 | apply { this[ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG] = enableAutoCommit}
65 | /**
66 | * @see [ConsumerConfig.FETCH_MAX_BYTES_CONFIG]
67 | */
68 | fun fetchMaxBytes(fetchMaxBytes : Long) =
69 | apply { this[ConsumerConfig.FETCH_MAX_BYTES_CONFIG] = fetchMaxBytes }
70 | /**
71 | * @see [ConsumerConfig.FETCH_MIN_BYTES_CONFIG]
72 | */
73 | fun fetchMinBytes(fetchMinBytes : Long) =
74 | apply { this[ConsumerConfig.FETCH_MIN_BYTES_CONFIG] = fetchMinBytes}
75 | /**
76 | * @see [ConsumerConfig.FETCH_MAX_BYTES_CONFIG]
77 | */
78 | fun fetchMaxWaitMs(fetchMaxWaitMs : Long) =
79 | apply { this[ConsumerConfig.FETCH_MAX_BYTES_CONFIG] = fetchMaxWaitMs}
80 | /**
81 | * @see [ConsumerConfig.GROUP_ID_CONFIG]
82 | */
83 | fun groupId(groupId : String) =
84 | apply { this[ConsumerConfig.GROUP_ID_CONFIG] = groupId}
85 | /**
86 | * @see [ConsumerConfig.MAX_POLL_RECORDS_CONFIG]
87 | */
88 | fun maxPollRecords(maxPollRecords : Int) =
89 | apply { this[ConsumerConfig.MAX_POLL_RECORDS_CONFIG] = maxPollRecords}
90 | /**
91 | * @see [ConsumerConfig.MAX_PARTITION_FETCH_BYTES_CONFIG]
92 | */
93 | fun maxPartitionFetchBytes(maxPartitionFetchBytes : Int) =
94 | apply { this[ConsumerConfig.MAX_PARTITION_FETCH_BYTES_CONFIG] = maxPartitionFetchBytes }
95 | /**
96 | * @see [ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG]
97 | */
98 | fun keyDeserializer(keyDeserializer : String) =
99 | apply { this[ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG] = keyDeserializer}
100 | /**
101 | * @see [ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG]
102 | */
103 | fun valueDeserializer(valueDeserializer : String) =
104 | apply { this [ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG] = valueDeserializer}
105 |
106 | fun pollRecordsMs(pollRecordsMs : Long) =
107 | apply { this[POLL_INTERVAL_MS_CONFIG] = pollRecordsMs }
108 | }
109 |
110 | /**
111 | * Creates a new empty [KafkaConsumerConfigs].
112 | */
113 | fun emptyConsumerConfigs(): KafkaConsumerConfigs = KafkaConsumerConfigs(emptyMap())
114 |
115 | /**
116 | * Creates a new [KafkaConsumerConfigs] with the given [pairs].
117 | */
118 | fun consumerConfigsOf(vararg pairs: Pair): KafkaConsumerConfigs = consumerConfigsOf(mapOf(*pairs))
119 |
120 | /**
121 | * Creates a new [KafkaConsumerConfigs] with the given [props].
122 | */
123 | fun consumerConfigsOf(props: Map): KafkaConsumerConfigs = KafkaConsumerConfigs(props)
124 |
125 | /**
126 | * Creates a new [KafkaConsumerConfigs] with the given [props].
127 | */
128 | fun consumerConfigsOf(props: Properties):KafkaConsumerConfigs = consumerConfigsOf(props.toStringMap())
129 |
130 | /**
131 | * Convenient method to create and populate a new [KafkaConsumerConfigs] from a [configFile].
132 | */
133 | fun loadConsumerConfigs(configFile: String): KafkaConsumerConfigs = KafkaConsumerConfigs().load(configFile)
134 |
135 | /**
136 | * Convenient method to create and populate new [KafkaConsumerConfigs] from an [inputStream].
137 | */
138 | fun loadConsumerConfigs(inputStream: InputStream): KafkaConsumerConfigs = KafkaConsumerConfigs().load(inputStream)
--------------------------------------------------------------------------------
/clients/src/main/kotlin/io/streamthoughts/kafka/clients/consumer/KafkaConsumerWorker.kt:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 StreamThoughts.
3 | *
4 | * Licensed to the Apache Software Foundation (ASF) under one or more
5 | * contributor license agreements. See the NOTICE file distributed with
6 | * this work for additional information regarding copyright ownership.
7 | * The ASF licenses this file to You under the Apache License, Version 2.0
8 | * (the "License"); you may not use this file except in compliance with
9 | * the License. You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | */
19 | package io.streamthoughts.kafka.clients.consumer
20 |
21 | import io.streamthoughts.kafka.clients.consumer.KafkaConsumerWorker.KafkaConsumerWorker
22 | import io.streamthoughts.kafka.clients.consumer.error.ConsumedErrorHandler
23 | import io.streamthoughts.kafka.clients.consumer.error.closeTaskOnConsumedError
24 | import io.streamthoughts.kafka.clients.consumer.error.serialization.DeserializationErrorHandler
25 | import io.streamthoughts.kafka.clients.consumer.error.serialization.logAndFailOnInvalidRecord
26 | import io.streamthoughts.kafka.clients.consumer.listener.ConsumerBatchRecordsListener
27 | import io.streamthoughts.kafka.clients.consumer.listener.noop
28 | import io.streamthoughts.kafka.clients.loggerFor
29 | import kotlinx.coroutines.CoroutineScope
30 | import kotlinx.coroutines.ExecutorCoroutineDispatcher
31 | import kotlinx.coroutines.Job
32 | import kotlinx.coroutines.asCoroutineDispatcher
33 | import kotlinx.coroutines.joinAll
34 | import kotlinx.coroutines.launch
35 | import org.apache.kafka.clients.CommonClientConfigs
36 | import org.apache.kafka.clients.consumer.Consumer
37 | import org.apache.kafka.clients.consumer.ConsumerConfig
38 | import org.apache.kafka.common.TopicPartition
39 | import org.apache.kafka.common.serialization.Deserializer
40 | import java.util.concurrent.ExecutorService
41 | import java.util.concurrent.Executors
42 | import java.util.concurrent.atomic.AtomicBoolean
43 | import java.util.concurrent.atomic.AtomicInteger
44 | import java.util.regex.Pattern
45 |
46 | /**
47 | * [KafkaConsumerWorker] is the default [ConsumerWorker] implementation.
48 | */
49 | class KafkaConsumerWorker (
50 | private val configs: KafkaConsumerConfigs,
51 | private val keyDeserializer: Deserializer,
52 | private val valueDeserializer: Deserializer,
53 | private val consumerRebalanceListener: ConsumerAwareRebalanceListener,
54 | private val batchRecordListener: ConsumerBatchRecordsListener,
55 | private val onConsumedError: ConsumedErrorHandler,
56 | private val onDeserializationError: DeserializationErrorHandler,
57 | private val consumerFactory: ConsumerFactory = ConsumerFactory.DefaultConsumerFactory
58 | ): ConsumerWorker {
59 |
60 | companion object KafkaConsumerWorker {
61 | private val Log = loggerFor(KafkaConsumerWorker::class.java)
62 | }
63 |
64 | private val groupId: String = configs[ConsumerConfig.GROUP_ID_CONFIG].toString()
65 |
66 | private val defaultClientIdPrefix: String
67 |
68 | private var consumerTasks: Array> = emptyArray()
69 |
70 | private var consumerJobs: List = mutableListOf()
71 |
72 | private var isRunning = AtomicBoolean(false)
73 |
74 | init {
75 | defaultClientIdPrefix= "consumer-$groupId"
76 | }
77 |
78 | override fun groupId(): String {
79 | return groupId
80 | }
81 |
82 | @Synchronized
83 | override fun start(topic: String, maxParallelHint: Int) {
84 | start(getTopicSubscription(topic), maxParallelHint)
85 | }
86 |
87 | @Synchronized
88 | override fun start(topics: List, maxParallelHint: Int) {
89 | start(getTopicSubscription(topics), maxParallelHint)
90 | }
91 |
92 | @Synchronized
93 | override fun start(pattern: Pattern, maxParallelHint: Int) {
94 | start(getTopicSubscription(pattern), maxParallelHint)
95 | }
96 |
97 | @Synchronized
98 | private fun start(subscription: TopicSubscription, maxParallelHint: Int) {
99 | Log.info("KafkaConsumerWorker(group: $groupId): Initializing io.streamthoughts.kafka.clients.consumer tasks ($maxParallelHint)")
100 | consumerTasks = Array(maxParallelHint) { taskId ->
101 | KafkaConsumerTask(
102 | consumerFactory,
103 | configs,
104 | subscription,
105 | keyDeserializer,
106 | valueDeserializer,
107 | batchRecordListener,
108 | clientId = computeClientId(taskId),
109 | consumerAwareRebalanceListener = consumerRebalanceListener,
110 | deserializationErrorHandler = onDeserializationError,
111 | consumedErrorHandler = onConsumedError
112 | )
113 | }
114 | doStart()
115 | isRunning.set(true)
116 | }
117 |
118 | private fun computeClientId(taskId: Int): String {
119 | val clientId = configs[CommonClientConfigs.CLIENT_ID_CONFIG]
120 | return clientId?.let { "$defaultClientIdPrefix-$taskId" } ?: ""
121 | }
122 |
123 | private fun doStart() {
124 | val threadNumber = AtomicInteger(1)
125 | val executor: ExecutorService = Executors.newFixedThreadPool(consumerTasks.size) {
126 | Thread(it, "io.streamthoughts.kafka.clients.consumer-$groupId-${threadNumber.getAndIncrement()}").also { t -> t.isDaemon = true }
127 | }
128 | val dispatcher: ExecutorCoroutineDispatcher = executor.asCoroutineDispatcher()
129 | val scope = CoroutineScope(dispatcher)
130 |
131 | consumerJobs = consumerTasks.map { task ->
132 | scope.launch {
133 | task.run()
134 | }
135 | }
136 | }
137 |
138 | override suspend fun joinAll() {
139 | consumerJobs.joinAll()
140 | }
141 |
142 | override fun close() {
143 | if (isRunning.get()) {
144 | Log.info("KafkaConsumerWorker(group: $groupId): Stopping all io.streamthoughts.kafka.clients.consumer tasks")
145 | consumerTasks.forEach { it.close() }
146 | isRunning.set(false)
147 | }
148 | }
149 |
150 | @Synchronized
151 | override fun pause() {
152 | Log.info("KafkaConsumerWorker(group: $groupId): Pausing all io.streamthoughts.kafka.clients.consumer tasks")
153 | consumerTasks.forEach { it.pause() }
154 | }
155 |
156 | @Synchronized
157 | override fun resume() {
158 | Log.info("KafkaConsumerWorker(group: $groupId): Resuming all io.streamthoughts.kafka.clients.consumer tasks")
159 | consumerTasks.forEach { it.resume() }
160 | }
161 |
162 | data class Builder(
163 | var configs: KafkaConsumerConfigs,
164 | var keyDeserializer: Deserializer,
165 | var valueDeserializer: Deserializer,
166 | var onPartitionsAssigned: RebalanceListener? = null,
167 | var onPartitionsRevokedBeforeCommit: RebalanceListener? = null,
168 | var onPartitionsRevokedAfterCommit: RebalanceListener? = null,
169 | var onPartitionsLost: RebalanceListener? = null,
170 | var batchRecordListener: ConsumerBatchRecordsListener? = null,
171 | var onDeserializationError: DeserializationErrorHandler? = null,
172 | var consumerFactory: ConsumerFactory? = null,
173 | var onConsumedError: ConsumedErrorHandler? = null
174 | ) : ConsumerWorker.Builder {
175 |
176 | override fun configure(init: KafkaConsumerConfigs.() -> Unit) {
177 | configs.init()
178 | }
179 |
180 | override fun factory(consumerFactory : ConsumerFactory) =
181 | apply { this.consumerFactory = consumerFactory }
182 |
183 | override fun onPartitionsAssigned(listener : RebalanceListener) =
184 | apply { this.onPartitionsAssigned = listener }
185 |
186 | override fun onPartitionsRevokedBeforeCommit(listener : RebalanceListener) =
187 | apply { this.onPartitionsRevokedAfterCommit = listener }
188 |
189 | override fun onPartitionsRevokedAfterCommit(listener : RebalanceListener) =
190 | apply { this.onPartitionsRevokedAfterCommit = listener }
191 |
192 | override fun onPartitionsLost(listener : RebalanceListener) =
193 | apply { this.onPartitionsLost = listener }
194 |
195 | override fun onDeserializationError(handler : DeserializationErrorHandler) =
196 | apply { onDeserializationError = handler }
197 |
198 | override fun onConsumedError(handler : ConsumedErrorHandler) =
199 | apply { onConsumedError = handler }
200 |
201 | override fun onConsumed(listener: ConsumerBatchRecordsListener) =
202 | apply { this.batchRecordListener = listener }
203 |
204 | override fun build(): ConsumerWorker =
205 | KafkaConsumerWorker(
206 | configs,
207 | keyDeserializer,
208 | valueDeserializer,
209 | SimpleConsumerAwareRebalanceListener(),
210 | batchRecordListener ?: noop(),
211 | onConsumedError ?: closeTaskOnConsumedError(),
212 | onDeserializationError ?: logAndFailOnInvalidRecord(),
213 | consumerFactory ?: ConsumerFactory.DefaultConsumerFactory
214 | )
215 |
216 | inner class SimpleConsumerAwareRebalanceListener: ConsumerAwareRebalanceListener {
217 | override fun onPartitionsRevokedBeforeCommit(consumer: Consumer<*, *>,
218 | partitions: Collection) {
219 | doInvoke(onPartitionsRevokedBeforeCommit, consumer, partitions)
220 | }
221 |
222 | override fun onPartitionsRevokedAfterCommit(consumer: Consumer<*, *>,
223 | partitions: Collection) {
224 | doInvoke(onPartitionsRevokedAfterCommit, consumer, partitions)
225 | }
226 |
227 | override fun onPartitionsAssigned(consumer: Consumer<*, *>,
228 | partitions: Collection) {
229 | doInvoke(onPartitionsAssigned, consumer, partitions)
230 | }
231 |
232 | override fun onPartitionsLost(consumer: Consumer<*, *>,
233 | partitions: Collection) {
234 | doInvoke(onPartitionsLost, consumer, partitions)
235 | }
236 |
237 | private fun doInvoke(listener: RebalanceListener?,
238 | consumer: Consumer<*, *>,
239 | partitions: Collection) {
240 | listener?.invoke(consumer, partitions)
241 | }
242 | }
243 | }
244 | }
--------------------------------------------------------------------------------
/clients/src/main/kotlin/io/streamthoughts/kafka/clients/consumer/TopicSubscription.kt:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 StreamThoughts.
3 | *
4 | * Licensed to the Apache Software Foundation (ASF) under one or more
5 | * contributor license agreements. See the NOTICE file distributed with
6 | * this work for additional information regarding copyright ownership.
7 | * The ASF licenses this file to You under the Apache License, Version 2.0
8 | * (the "License"); you may not use this file except in compliance with
9 | * the License. You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | */
19 | package io.streamthoughts.kafka.clients.consumer
20 |
21 | import org.apache.kafka.clients.consumer.Consumer
22 | import org.apache.kafka.clients.consumer.ConsumerRebalanceListener
23 | import java.util.regex.Pattern
24 |
25 | fun getTopicSubscription(pattern: Pattern): TopicSubscription = PatternTopicsSubscription(pattern)
26 | fun getTopicSubscription(topic: String): TopicSubscription = ListTopicsSubscription(listOf(topic))
27 | fun getTopicSubscription(topics: List): TopicSubscription = ListTopicsSubscription(topics)
28 |
29 | /**
30 | * The default interface to wrap a [org.apache.kafka.clients.consumer.Consumer] subscription.
31 | */
32 | interface TopicSubscription {
33 |
34 | /**
35 | * @see [Consumer.subscribe]
36 | */
37 | fun subscribe(consumer: Consumer<*, *>, consumerRebalanceListener: ConsumerRebalanceListener)
38 | }
39 |
40 | /**
41 | * A topic list subscription.
42 | */
43 | private class ListTopicsSubscription(private val topics: List): TopicSubscription {
44 |
45 | override fun subscribe(consumer: Consumer<*, *>, consumerRebalanceListener: ConsumerRebalanceListener) {
46 | consumer.subscribe(topics, consumerRebalanceListener)
47 | }
48 |
49 | override fun toString(): String {
50 | return "Subscription(topics=$topics)"
51 | }
52 | }
53 |
54 | /**
55 | * A topic pattern subscription.
56 | */
57 | private class PatternTopicsSubscription(private val pattern: Pattern): TopicSubscription {
58 |
59 | override fun subscribe(consumer: Consumer<*, *>, consumerRebalanceListener: ConsumerRebalanceListener) {
60 | consumer.subscribe(pattern, consumerRebalanceListener)
61 | }
62 |
63 | override fun toString(): String {
64 | return "Subscription(pattern=$pattern)"
65 | }
66 |
67 | }
--------------------------------------------------------------------------------
/clients/src/main/kotlin/io/streamthoughts/kafka/clients/consumer/Types.kt:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 StreamThoughts.
3 | *
4 | * Licensed to the Apache Software Foundation (ASF) under one or more
5 | * contributor license agreements. See the NOTICE file distributed with
6 | * this work for additional information regarding copyright ownership.
7 | * The ASF licenses this file to You under the Apache License, Version 2.0
8 | * (the "License"); you may not use this file except in compliance with
9 | * the License. You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | */
19 | package io.streamthoughts.kafka.clients.consumer
20 |
21 | import org.apache.kafka.clients.consumer.Consumer
22 | import org.apache.kafka.common.TopicPartition
23 |
24 | typealias RebalanceListener = (Consumer<*, *>, Collection ) -> Unit
--------------------------------------------------------------------------------
/clients/src/main/kotlin/io/streamthoughts/kafka/clients/consumer/error/ConsumedErrorHandler.kt:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 StreamThoughts.
3 | *
4 | * Licensed to the Apache Software Foundation (ASF) under one or more
5 | * contributor license agreements. See the NOTICE file distributed with
6 | * this work for additional information regarding copyright ownership.
7 | * The ASF licenses this file to You under the Apache License, Version 2.0
8 | * (the "License"); you may not use this file except in compliance with
9 | * the License. You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | */
19 | package io.streamthoughts.kafka.clients.consumer.error
20 |
21 | import io.streamthoughts.kafka.clients.consumer.ConsumerTask
22 | import org.apache.kafka.clients.consumer.ConsumerRecord
23 |
24 | /**
25 | * Handles errors thrown during the processing of a non-empty batch of [ConsumerRecord]
26 | * using a given [io.streamthoughts.kafka.clients.consumer.listener.ConsumerBatchRecordsListener]
27 | */
28 | interface ConsumedErrorHandler {
29 |
30 | /**
31 | * This method is invoked when an [thrownException] is thrown while a [consumerTask] is processing
32 | * a non-empty batch of [records].
33 | *
34 | * @param consumerTask the [ConsumerTask] polling records.
35 | * @param records the remaining [records] to be processed (including the one that failed).
36 | * @param thrownException the [Exception] that was thrown while processing [records].
37 | */
38 | fun handle(consumerTask: ConsumerTask, records: List>, thrownException: Exception)
39 | }
--------------------------------------------------------------------------------
/clients/src/main/kotlin/io/streamthoughts/kafka/clients/consumer/error/ConsumedErrorHandlers.kt:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 StreamThoughts.
3 | *
4 | * Licensed to the Apache Software Foundation (ASF) under one or more
5 | * contributor license agreements. See the NOTICE file distributed with
6 | * this work for additional information regarding copyright ownership.
7 | * The ASF licenses this file to You under the Apache License, Version 2.0
8 | * (the "License"); you may not use this file except in compliance with
9 | * the License. You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | */
19 | package io.streamthoughts.kafka.clients.consumer.error
20 |
21 | import io.streamthoughts.kafka.clients.consumer.ConsumerTask
22 | import io.streamthoughts.kafka.clients.loggerFor
23 | import org.apache.kafka.clients.consumer.ConsumerRecord
24 | import org.apache.kafka.clients.consumer.OffsetAndMetadata
25 | import org.apache.kafka.common.TopicPartition
26 | import org.slf4j.Logger
27 | import java.time.Duration
28 | import kotlin.math.max
29 |
30 | fun closeTaskOnConsumedError(): ConsumedErrorHandler = CloseTaskOnConsumedError
31 | fun logAndCommitOnConsumedError(): ConsumedErrorHandler = LogAndCommitOnConsumedError
32 |
33 | /**
34 | * Stops the [ConsumerTask] when an error is thrown while a non-empty batch of [ConsumerRecord] is being processed
35 | * by a [io.streamthoughts.kafka.clients.consumer.listener.ConsumerBatchRecordsListener].
36 | */
37 | private object CloseTaskOnConsumedError: ConsumedErrorHandler {
38 |
39 | private val Log: Logger = loggerFor(CloseTaskOnConsumedError.javaClass)
40 |
41 | override fun handle(consumerTask: ConsumerTask, records: List>, thrownException: Exception) {
42 | Log.error("Stopping consumerTask after an exception was thrown while processing records", thrownException)
43 | consumerTask.close(Duration.ZERO)
44 | }
45 | }
46 |
47 | /**
48 | * Log and skips all records when an error is thrown while a non-empty batch of [ConsumerRecord] is being processed
49 | * by a [io.streamthoughts.kafka.clients.consumer.listener.ConsumerBatchRecordsListener].
50 | *
51 | * This [ConsumedErrorHandler] will commit the offsets for the failing records batch.
52 | */
53 | private object LogAndCommitOnConsumedError: ConsumedErrorHandler {
54 | private val Log: Logger = loggerFor(LogAndCommitOnConsumedError.javaClass)
55 |
56 | override fun handle(consumerTask: ConsumerTask, records: List>, thrownException: Exception) {
57 | Log.error("Failed to process records: $records. Ignore and continue processing.", thrownException)
58 | // The ConsumerTask doesn't automatically commit consumer offsets after an exception is thrown.
59 | // Thus, we have to manually commit offsets to actually skip records.
60 | consumerTask.commitSync(offsetsToCommitFor(records))
61 | }
62 |
63 | private fun offsetsToCommitFor(records: List>): Map {
64 | val offsetsToCommit: MutableMap = HashMap()
65 | records.forEach { r ->
66 | val partition = TopicPartition(r.topic(), r.partition())
67 | val current = offsetsToCommit[partition]?.offset() ?: 0
68 | offsetsToCommit[partition] = OffsetAndMetadata(max(current, r.offset() + 1))
69 | }
70 | return offsetsToCommit
71 | }
72 | }
73 |
--------------------------------------------------------------------------------
/clients/src/main/kotlin/io/streamthoughts/kafka/clients/consumer/error/serialization/DeserializationErrorHandler.kt:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 StreamThoughts.
3 | *
4 | * Licensed to the Apache Software Foundation (ASF) under one or more
5 | * contributor license agreements. See the NOTICE file distributed with
6 | * this work for additional information regarding copyright ownership.
7 | * The ASF licenses this file to You under the Apache License, Version 2.0
8 | * (the "License"); you may not use this file except in compliance with
9 | * the License. You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | */
19 | package io.streamthoughts.kafka.clients.consumer.error.serialization
20 |
21 | import org.apache.kafka.clients.consumer.ConsumerRecord
22 |
23 | /**
24 | *
25 | */
26 | interface DeserializationErrorHandler {
27 |
28 | sealed class Response {
29 | data class Replace(val key: K?, val value: V?): DeserializationErrorHandler.Response()
30 | class Fail: DeserializationErrorHandler.Response()
31 | class Skip: DeserializationErrorHandler.Response()
32 | }
33 |
34 | /**
35 | * Handles the [error] that has been thrown while de-serializing the given raw [record].
36 | */
37 | fun handle(record: ConsumerRecord, error: Exception): Response
38 | }
--------------------------------------------------------------------------------
/clients/src/main/kotlin/io/streamthoughts/kafka/clients/consumer/error/serialization/DeserializationErrorHandlers.kt:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 StreamThoughts.
3 | *
4 | * Licensed to the Apache Software Foundation (ASF) under one or more
5 | * contributor license agreements. See the NOTICE file distributed with
6 | * this work for additional information regarding copyright ownership.
7 | * The ASF licenses this file to You under the Apache License, Version 2.0
8 | * (the "License"); you may not use this file except in compliance with
9 | * the License. You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | */
19 | package io.streamthoughts.kafka.clients.consumer.error.serialization
20 |
21 | import io.streamthoughts.kafka.clients.loggerFor
22 | import org.apache.kafka.clients.consumer.ConsumerRecord
23 |
24 | fun replaceWithOnInvalidRecord(key: K, value: V): DeserializationErrorHandler = ReplaceErrorHandler(key, value)
25 |
26 | fun replaceWithNullOnInvalidRecord(): DeserializationErrorHandler = ReplaceErrorHandler()
27 |
28 | fun logAndFailOnInvalidRecord(): DeserializationErrorHandler = LogAndFailErrorHandler()
29 |
30 | fun logAndSkipOnInvalidRecord(): DeserializationErrorHandler = LogAndSkipErrorHandler()
31 |
32 | private class ReplaceErrorHandler(
33 | private val key: K? = null,
34 | private val value: V? = null
35 | ): DeserializationErrorHandler {
36 |
37 | companion object {
38 | private val Log = loggerFor(DeserializationErrorHandler::class.java)
39 | }
40 |
41 | override fun handle(
42 | record: ConsumerRecord,
43 | error: Exception
44 | ): DeserializationErrorHandler.Response {
45 | Log.warn("Cannot deserialize record:" +
46 | " topic = ${record.topic()} " +
47 | ", partition = ${record.partition()}" +
48 | ", offset ${record.topic()}" +
49 | ". Replace key and value.", error)
50 | return DeserializationErrorHandler.Response.Replace(key, value)
51 | }
52 | }
53 |
54 | private class LogAndSkipErrorHandler: DeserializationErrorHandler {
55 |
56 | companion object {
57 | private val Log = loggerFor(LogAndSkipErrorHandler::class.java)
58 | }
59 |
60 | override fun handle(
61 | record: ConsumerRecord,
62 | error: Exception
63 | ): DeserializationErrorHandler.Response {
64 | Log.error("Cannot deserialize record:" +
65 | " topic = ${record.topic()} " +
66 | ", partition = ${record.partition()}" +
67 | ", offset ${record.topic()}" +
68 | ". Skip and continue.", error)
69 | return DeserializationErrorHandler.Response.Skip()
70 | }
71 | }
72 |
73 | private class LogAndFailErrorHandler: DeserializationErrorHandler {
74 |
75 | companion object {
76 | private val Log = loggerFor(LogAndFailErrorHandler::class.java)
77 | }
78 |
79 | override fun handle(
80 | record: ConsumerRecord,
81 | error: Exception
82 | ): DeserializationErrorHandler.Response {
83 | Log.error("Cannot deserialize record:" +
84 | " topic = ${record.topic()} " +
85 | ", partition = ${record.partition()}" +
86 | ", offset ${record.topic()}" +
87 | ". Fail consumption.", error)
88 | return DeserializationErrorHandler.Response.Fail()
89 | }
90 | }
91 |
--------------------------------------------------------------------------------
/clients/src/main/kotlin/io/streamthoughts/kafka/clients/consumer/listener/ConsumerBatchRecordsListener.kt:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 StreamThoughts.
3 | *
4 | * Licensed to the Apache Software Foundation (ASF) under one or more
5 | * contributor license agreements. See the NOTICE file distributed with
6 | * this work for additional information regarding copyright ownership.
7 | * The ASF licenses this file to You under the Apache License, Version 2.0
8 | * (the "License"); you may not use this file except in compliance with
9 | * the License. You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | */
19 | package io.streamthoughts.kafka.clients.consumer.listener
20 |
21 | import io.streamthoughts.kafka.clients.consumer.ConsumerTask
22 | import org.apache.kafka.clients.consumer.ConsumerRecord
23 | import org.apache.kafka.clients.consumer.ConsumerRecords
24 |
25 | fun noop() : ConsumerBatchRecordsListener {
26 | return DelegatingConsumerBatchRecordsListener { _: ConsumerTask, _: ConsumerRecords -> }
27 | }
28 |
29 | @JvmName("onConsumedRecord")
30 | fun forEach(callback: (consumerTask: ConsumerTask, record: ConsumerRecord) -> Unit)
31 | : ConsumerBatchRecordsListener {
32 | return DelegatingConsumerBatchRecordsListener { task, records ->
33 | records.onEach { callback(task, it) }
34 | }
35 | }
36 |
37 | @JvmName("onConsumedValueRecordWithKey")
38 | fun forEach(callback: (consumerTask: ConsumerTask, record: Pair) -> Unit)
39 | : ConsumerBatchRecordsListener {
40 | return DelegatingConsumerBatchRecordsListener { task, records ->
41 | records.onEach { callback(task, Pair(it.key(), it.value())) }
42 | }
43 | }
44 |
45 | @JvmName("onConsumedValueRecord")
46 | fun forEach(callback: (consumerTask: ConsumerTask, value: V?) -> Unit)
47 | : ConsumerBatchRecordsListener {
48 | return DelegatingConsumerBatchRecordsListener { task, records ->
49 | records.onEach { callback(task, it.value()) }
50 | }
51 | }
52 |
53 | interface ConsumerBatchRecordsListener {
54 |
55 | /**
56 | * This method is invoked after the [consumerTask] has polled a non-empty batch of [records].
57 | *
58 | * @see [org.apache.kafka.clients.consumer.Consumer.poll]
59 | */
60 | fun handle(consumerTask: ConsumerTask, records: ConsumerRecords): Unit
61 | }
62 |
63 | private class DelegatingConsumerBatchRecordsListener(
64 | private val callback: (consumerTask: ConsumerTask, record: ConsumerRecords) -> Unit
65 | ): ConsumerBatchRecordsListener {
66 |
67 | override fun handle(consumerTask: ConsumerTask, records: ConsumerRecords) {
68 | callback(consumerTask, records)
69 | }
70 | }
--------------------------------------------------------------------------------
/clients/src/main/kotlin/io/streamthoughts/kafka/clients/producer/Acks.kt:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 StreamThoughts.
3 | *
4 | * Licensed to the Apache Software Foundation (ASF) under one or more
5 | * contributor license agreements. See the NOTICE file distributed with
6 | * this work for additional information regarding copyright ownership.
7 | * The ASF licenses this file to You under the Apache License, Version 2.0
8 | * (the "License"); you may not use this file except in compliance with
9 | * the License. You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | */
19 | package io.streamthoughts.kafka.clients.producer
20 |
21 | /**
22 | * The number of acknowledgments the producer requires the leader to have received before considering a request complete.
23 | * This controls the durability of records that are sent.
24 | *
25 | * @see org.apache.kafka.clients.producer.ProducerConfig.ACKS_CONFIG
26 | */
27 | object Acks {
28 | /**
29 | * The producer will not wait for any acknowledgment from the server at all.
30 | */
31 | const val None = "0"
32 | /**
33 | * The producer will wait for the broker leader to acknowledge the record.
34 | */
35 | const val Leader = "1"
36 |
37 | /**
38 | * The leader will wait for the full set of in-sync replicas to acknowledge the record.
39 | */
40 | const val InSyncReplicas = "all"
41 | }
--------------------------------------------------------------------------------
/clients/src/main/kotlin/io/streamthoughts/kafka/clients/producer/KafkaProducerConfigs.kt:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 StreamThoughts.
3 | *
4 | * Licensed to the Apache Software Foundation (ASF) under one or more
5 | * contributor license agreements. See the NOTICE file distributed with
6 | * this work for additional information regarding copyright ownership.
7 | * The ASF licenses this file to You under the Apache License, Version 2.0
8 | * (the "License"); you may not use this file except in compliance with
9 | * the License. You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | */
19 | package io.streamthoughts.kafka.clients.producer
20 |
21 | import io.streamthoughts.kafka.clients.KafkaClientConfigs
22 | import io.streamthoughts.kafka.clients.load
23 | import io.streamthoughts.kafka.clients.toStringMap
24 | import org.apache.kafka.clients.producer.ProducerConfig
25 | import org.apache.kafka.common.record.CompressionType
26 | import java.io.InputStream
27 | import java.time.Duration
28 | import java.util.*
29 |
30 | /**
31 | * Uses to build and encapsulate a configuration [Map]
32 | * for creating a new [org.apache.kafka.clients.producer.KafkaProducer]
33 | *
34 | * @see [ProducerConfig]
35 | */
36 | class KafkaProducerConfigs(props: Map = emptyMap()) : KafkaClientConfigs(props) {
37 |
38 | override fun with(key: String, value: Any?) = apply { super.with(key, value) }
39 |
40 | fun client(init: KafkaClientConfigs.() -> Unit) = apply { this.init() }
41 |
42 | /**
43 | * @see ProducerConfig.ACKS_CONFIG
44 | */
45 | fun acks(acks: String) =
46 | apply { this[ProducerConfig.ACKS_CONFIG] = acks }
47 |
48 | /**
49 | * @see ProducerConfig.BATCH_SIZE_CONFIG
50 | */
51 | fun batchSize(batchSize: Int) =
52 | apply { this[ProducerConfig.BATCH_SIZE_CONFIG] = batchSize }
53 |
54 | /**
55 | * @see ProducerConfig.BUFFER_MEMORY_CONFIG
56 | */
57 | fun bufferMemory(bufferMemory: Long) =
58 | apply { this[ProducerConfig.BUFFER_MEMORY_CONFIG] = bufferMemory}
59 |
60 | /**
61 | * @see ProducerConfig.COMPRESSION_TYPE_CONFIG
62 | */
63 | fun compressionType(compressionType: CompressionType) =
64 | apply { this[ProducerConfig.COMPRESSION_TYPE_CONFIG] = compressionType.toString().toLowerCase() }
65 |
66 | /**
67 | * @see ProducerConfig.DELIVERY_TIMEOUT_MS_CONFIG
68 | */
69 | fun deliveryTimeout(deliveryTimeout: Duration) =
70 | apply { this[ProducerConfig.DELIVERY_TIMEOUT_MS_CONFIG] = deliveryTimeout.toMillis() }
71 |
72 | /**
73 | * @see ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG
74 | */
75 | fun enableIdempotence(enableIdempotence: Boolean) =
76 | apply { this[ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG] = enableIdempotence }
77 |
78 | /**
79 | * @see ProducerConfig.LINGER_MS_CONFIG
80 | */
81 | fun lingerMs(lingerMs: Long) =
82 | apply { this[ProducerConfig.LINGER_MS_CONFIG] = lingerMs }
83 |
84 | /**
85 | * @see ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION
86 | */
87 | fun maxInFlightRequestsPerConnection(maxInFlightRequestsPerConnection: Int) =
88 | apply { this[ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION] = maxInFlightRequestsPerConnection }
89 |
90 | /**
91 | * @see ProducerConfig.MAX_BLOCK_MS_CONFIG
92 | */
93 | fun maxBlock(maxBlockDuration: Duration) =
94 | apply { this[ProducerConfig.MAX_BLOCK_MS_CONFIG] = maxBlockDuration.toMillis()}
95 |
96 | /**
97 | * @see ProducerConfig.RETRIES_CONFIG
98 | */
99 | fun retries(retries: Int) =
100 | apply { this[ProducerConfig.RETRIES_CONFIG] = retries }
101 |
102 | /**
103 | * @see ProducerConfig.RETRY_BACKOFF_MS_CONFIG
104 | */
105 | fun retryBackoff(retryBackoff: Long) =
106 | apply { this[ProducerConfig.RETRY_BACKOFF_MS_CONFIG] = retryBackoff }
107 |
108 | /**
109 | * @see ProducerConfig.TRANSACTIONAL_ID_CONFIG
110 | */
111 | fun transactionalId(transactionalId: String) =
112 | apply { this[ProducerConfig.TRANSACTIONAL_ID_CONFIG] = transactionalId }
113 |
114 | /**
115 | * @see ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG
116 | */
117 | fun keySerializer(keySerializer: String) =
118 | apply { this[ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG] = keySerializer }
119 |
120 | /**
121 | * @see ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG
122 | */
123 | fun valueSerializer(valueSerializer: String) =
124 | apply { this[ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG] = valueSerializer }
125 | }
126 |
127 | /**
128 | * Creates a new empty [KafkaProducerConfigs].
129 | */
130 | fun emptyProducerConfigs(): KafkaProducerConfigs = KafkaProducerConfigs(emptyMap())
131 |
132 | /**
133 | * Creates a new [KafkaProducerConfigs] with the given [pairs].
134 | */
135 | fun producerConfigsOf(vararg pairs: Pair): KafkaProducerConfigs = producerConfigsOf(mapOf(*pairs))
136 |
137 | /**
138 | * Creates a new [KafkaProducerConfigs] with the given [props].
139 | */
140 | fun producerConfigsOf(props: Map): KafkaProducerConfigs = KafkaProducerConfigs(props)
141 |
142 | /**
143 | * Creates a new [KafkaProducerConfigs] with the given [props].
144 | */
145 | fun producerConfigsOf(props: Properties): KafkaProducerConfigs = producerConfigsOf(props.toStringMap())
146 |
147 | /**
148 | * Convenient method to create and populate a new [KafkaProducerConfigs] from a [configFile].
149 | */
150 | fun loadProducerConfigs(configFile: String): KafkaProducerConfigs = KafkaProducerConfigs().load(configFile)
151 |
152 | /**
153 | * Convenient method to create and populate a new [KafkaClientConfigs] from an [inputStream].
154 | */
155 | fun loadProducerConfigs(inputStream: InputStream): KafkaProducerConfigs = KafkaProducerConfigs().load(inputStream)
--------------------------------------------------------------------------------
/clients/src/main/kotlin/io/streamthoughts/kafka/clients/producer/KafkaProducerContainer.kt:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 StreamThoughts.
3 | *
4 | * Licensed to the Apache Software Foundation (ASF) under one or more
5 | * contributor license agreements. See the NOTICE file distributed with
6 | * this work for additional information regarding copyright ownership.
7 | * The ASF licenses this file to You under the Apache License, Version 2.0
8 | * (the "License"); you may not use this file except in compliance with
9 | * the License. You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | */
19 | package io.streamthoughts.kafka.clients.producer
20 |
21 | import ch.qos.logback.classic.Level
22 | import io.streamthoughts.kafka.clients.KafkaRecord
23 | import io.streamthoughts.kafka.clients.loggerFor
24 | import io.streamthoughts.kafka.clients.producer.callback.OnSendErrorCallback
25 | import io.streamthoughts.kafka.clients.producer.callback.OnSendSuccessCallback
26 | import io.streamthoughts.kafka.clients.producer.callback.ProducerSendCallback
27 | import org.apache.kafka.clients.producer.KafkaProducer
28 | import org.apache.kafka.clients.producer.Producer
29 | import org.apache.kafka.clients.producer.ProducerConfig
30 | import org.apache.kafka.clients.producer.ProducerRecord
31 | import org.apache.kafka.clients.producer.RecordMetadata
32 | import org.apache.kafka.common.Metric
33 | import org.apache.kafka.common.MetricName
34 | import org.apache.kafka.common.PartitionInfo
35 | import org.apache.kafka.common.errors.AuthorizationException
36 | import org.apache.kafka.common.errors.OutOfOrderSequenceException
37 | import org.apache.kafka.common.errors.ProducerFencedException
38 | import org.apache.kafka.common.serialization.Serializer
39 | import org.slf4j.Logger
40 | import java.time.Duration
41 | import java.time.Instant
42 | import java.util.concurrent.CompletableFuture
43 | import java.util.concurrent.Future
44 | import java.util.concurrent.atomic.AtomicInteger
45 |
46 | /**
47 | * The default kafka-based [ProducerContainer] implementation
48 | */
49 | class KafkaProducerContainer private constructor(
50 | private val configs: KafkaProducerConfigs,
51 | private val onSendCallback: ProducerSendCallback,
52 | private val keySerializer: Serializer ?= null,
53 | private val valueSerializer: Serializer ?= null,
54 | private val producerFactory: ProducerFactory? = null,
55 | private val defaultTopic: String? = null
56 | ): ProducerContainer {
57 |
58 | companion object {
59 | private val Log: Logger = loggerFor(ProducerContainer::class.java)
60 | private val numInstances = AtomicInteger(0)
61 | private const val defaultClientIdPrefix = "producer-"
62 |
63 | private fun computeNextClientId(producerConfigs: Map): String {
64 | val clientIdPrefix = producerConfigs[ProducerConfig.CLIENT_ID_CONFIG] ?: defaultClientIdPrefix
65 | return "$clientIdPrefix-${numInstances.incrementAndGet()}"
66 | }
67 | }
68 |
69 | @Volatile
70 | private var state = ProducerContainer.State.CREATED
71 |
72 | private var transactionId: String? = null
73 | private lateinit var clientId: String
74 | private lateinit var producer: Producer
75 |
76 | override fun send(
77 | records: Collection>,
78 | topic: String?,
79 | partition: Int?,
80 | timestamp: Instant?,
81 | onSuccess: OnSendSuccessCallback?,
82 | onError: OnSendErrorCallback?
83 | ): Future>> {
84 | val futures: List>> = records.map {
85 | send(it.first, it.second, topic, partition, timestamp, onSuccess, onError) as CompletableFuture
86 | }
87 | return CompletableFuture.allOf(*futures.toTypedArray()).thenApply { futures.map { it.join() }.toList() }
88 | }
89 |
90 | override fun send(
91 | record: KafkaRecord,
92 | onSuccess: OnSendSuccessCallback?,
93 | onError: OnSendErrorCallback?
94 | ): Future> {
95 | val producerRecord = KafkaRecord(
96 | record.key,
97 | record.value,
98 | record.topic?:defaultTopic,
99 | record.partition,
100 | record.timestamp
101 | ).toProducerRecord()
102 | return send(producerRecord, onSuccess, onError)
103 | }
104 |
105 | override fun send(
106 | record: ProducerRecord,
107 | onSuccess: OnSendSuccessCallback?,
108 | onError: OnSendErrorCallback?
109 | ) : CompletableFuture> {
110 | return runOrThrowIfIllegalState {
111 | val future = CompletableFuture>()
112 | logWithProducerInfo(Level.DEBUG, "Sending record $record")
113 | producer.send(record) { metadata: RecordMetadata, exception: Exception? ->
114 |
115 | if (exception != null) {
116 | future.completeExceptionally(exception)
117 |
118 | (onError?.let { DelegateSendCallback(onError = onError) }?: onSendCallback)
119 | .onSendError(this, record, exception)
120 | } else {
121 | future.complete(SendResult(record, metadata))
122 |
123 | (onSuccess?.let { DelegateSendCallback(onSuccess = onSuccess) }?: onSendCallback)
124 | .onSendSuccess(this, record, metadata)
125 | }
126 | }
127 | future
128 | }
129 | }
130 |
131 | override fun execute(action: (producer: Producer) -> T) = run { action(producer) }
132 |
133 | override fun runTx(action: (ProducerContainer) -> Unit): TransactionResult {
134 | return runOrThrowIfIllegalState {
135 | try {
136 | producer.beginTransaction()
137 | action.invoke(this)
138 | producer.commitTransaction()
139 | CommittedTransactionResult
140 | } catch (e: Exception) {
141 | when (e) {
142 | is ProducerFencedException,
143 | is OutOfOrderSequenceException,
144 | is AuthorizationException -> {
145 | logWithProducerInfo(
146 | Level.ERROR,
147 | "Unrecoverable error happened while executing producer transactional action. Close producer immediately",
148 | e
149 | )
150 | close()
151 | UnrecoverableErrorTransactionResult(e)
152 | }
153 | else -> {
154 | logWithProducerInfo(
155 | Level.ERROR,
156 | "Error happened while executing producer transactional action. Abort current transaction",
157 | e
158 | )
159 | producer.abortTransaction()
160 | AbortedTransactionResult(e)
161 | }
162 | }
163 | AbortedTransactionResult(e)
164 | }
165 | }
166 | }
167 |
168 | override fun init() {
169 | if (isInitialized()) {
170 | throw IllegalStateException("Producer is already initialized")
171 | }
172 | val producerConfigs = HashMap(configs)
173 | clientId = computeNextClientId(producerConfigs).also { producerConfigs[ProducerConfig.CLIENT_ID_CONFIG] = it }
174 | transactionId = producerConfigs[ProducerConfig.TRANSACTIONAL_ID_CONFIG]?.toString()
175 | logWithProducerInfo(Level.INFO, "Initializing")
176 | producer = producerFactory?.make(producerConfigs, keySerializer, valueSerializer) ?: KafkaProducer(producerConfigs, keySerializer, valueSerializer)
177 | if (ProducerConfig.TRANSACTIONAL_ID_CONFIG in producerConfigs)
178 | producer.initTransactions()
179 | state = ProducerContainer.State.STARTED
180 | }
181 |
182 | override fun metrics(topic: String): Map {
183 | return runOrThrowIfIllegalState {
184 | producer.metrics()
185 | }
186 | }
187 |
188 | override fun partitionsFor(topic: String): List {
189 | return runOrThrowIfIllegalState {
190 | producer.partitionsFor(topic)
191 | }
192 | }
193 |
194 | override fun flush() {
195 | runOrThrowIfIllegalState {
196 | logWithProducerInfo(Level.DEBUG, "Flushing")
197 | producer.flush()
198 | }
199 | }
200 |
201 | override fun close(timeout: Duration) {
202 | if (isClosed() || !isInitialized()) return // silently ignore call if producer is already closed.
203 |
204 | runOrThrowIfIllegalState {
205 | state = ProducerContainer.State.PENDING_SHUTDOWN
206 | logWithProducerInfo(Level.INFO, "Closing")
207 | producer.close(timeout)
208 | state = ProducerContainer.State.CLOSED
209 | logWithProducerInfo(Level.INFO, "Closed")
210 | }
211 | }
212 |
213 | private fun isClosed() =
214 | state == ProducerContainer.State.CLOSED ||
215 | state == ProducerContainer.State.PENDING_SHUTDOWN
216 |
217 | private fun isInitialized() = this::producer.isInitialized
218 |
219 | private fun runOrThrowIfIllegalState(action: () -> R): R {
220 | if (!isInitialized()) throw IllegalStateException("Producer is not initialized yet")
221 | if (isClosed()) throw IllegalStateException("Cannot perform operation after producer has been closed")
222 | return action.invoke()
223 | }
224 |
225 | private fun logWithProducerInfo(level: Level, msg: String, exception: java.lang.Exception? = null) {
226 | val message = "Producer (clientId=$clientId${transactionId?.let {" , transactionalId=$transactionId" }?:""}): $msg"
227 | when(level) {
228 | Level.ERROR -> Log.error(message, exception)
229 | Level.WARN -> Log.warn(message)
230 | Level.INFO -> Log.info(message)
231 | Level.DEBUG -> Log.debug(message)
232 | else -> Log.debug(message)
233 | }
234 | }
235 |
236 | override fun state(): ProducerContainer.State = state
237 |
238 | data class Builder(
239 | var configs: KafkaProducerConfigs,
240 | var keySerializer: Serializer ?= null,
241 | var valueSerializer: Serializer ?= null,
242 | var producerFactory: ProducerFactory? = null,
243 | var defaultTopic: String? = null,
244 | var onSendSuccess: OnSendSuccessCallback? = null,
245 | var onSendError: OnSendErrorCallback? = null,
246 | var onSendCallback: ProducerSendCallback? = null
247 | ) : ProducerContainer.Builder {
248 |
249 | override fun configure(init: KafkaProducerConfigs.() -> Unit) =
250 | apply { configs.init() }
251 |
252 | override fun defaultTopic(topic: String) =
253 | apply { this.defaultTopic = topic }
254 |
255 | override fun producerFactory(producerFactory: ProducerFactory) =
256 | apply { this.producerFactory = producerFactory }
257 |
258 | override fun onSendError(callback: OnSendErrorCallback) =
259 | apply { this.onSendError = callback }
260 |
261 | override fun onSendSuccess(callback: OnSendSuccessCallback) =
262 | apply { this.onSendSuccess = callback }
263 |
264 | override fun onSendCallback(callback: ProducerSendCallback) =
265 | apply { this.onSendCallback = callback }
266 |
267 | override fun keySerializer(serializer: Serializer) =
268 | apply { this.keySerializer = serializer }
269 |
270 | override fun valueSerializer(serializer: Serializer) =
271 | apply { this.valueSerializer = serializer }
272 |
273 | fun build(): ProducerContainer = KafkaProducerContainer(
274 | configs,
275 | onSendCallback ?: DelegateSendCallback(onSendSuccess, onSendError),
276 | keySerializer,
277 | valueSerializer,
278 | producerFactory,
279 | defaultTopic
280 | )
281 | }
282 |
283 | private class DelegateSendCallback(
284 | private val onSuccess: OnSendSuccessCallback? = null,
285 | private val onError: OnSendErrorCallback? = null
286 | ) : ProducerSendCallback {
287 |
288 | override fun onSendError(
289 | container: ProducerContainer,
290 | record: ProducerRecord,
291 | error: Exception
292 | ) {
293 | this.onError?.invoke(container, record, error)
294 | }
295 | override fun onSendSuccess(
296 | container: ProducerContainer,
297 | record: ProducerRecord,
298 | metadata: RecordMetadata
299 | ) {
300 | onSuccess?.invoke(container, record, metadata)
301 | }
302 | }
303 | }
--------------------------------------------------------------------------------
/clients/src/main/kotlin/io/streamthoughts/kafka/clients/producer/ProducerContainer.kt:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 StreamThoughts.
3 | *
4 | * Licensed to the Apache Software Foundation (ASF) under one or more
5 | * contributor license agreements. See the NOTICE file distributed with
6 | * this work for additional information regarding copyright ownership.
7 | * The ASF licenses this file to You under the Apache License, Version 2.0
8 | * (the "License"); you may not use this file except in compliance with
9 | * the License. You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | */
19 | package io.streamthoughts.kafka.clients.producer
20 |
21 | import io.streamthoughts.kafka.clients.KafkaRecord
22 | import io.streamthoughts.kafka.clients.producer.callback.OnSendErrorCallback
23 | import io.streamthoughts.kafka.clients.producer.callback.OnSendSuccessCallback
24 | import io.streamthoughts.kafka.clients.producer.callback.ProducerSendCallback
25 | import org.apache.kafka.clients.producer.Producer
26 | import org.apache.kafka.clients.producer.ProducerRecord
27 | import org.apache.kafka.common.Metric
28 | import org.apache.kafka.common.MetricName
29 | import org.apache.kafka.common.PartitionInfo
30 | import org.apache.kafka.common.serialization.Serializer
31 | import java.io.Closeable
32 | import java.time.Duration
33 | import java.time.Instant
34 | import java.util.concurrent.Future
35 |
36 |
37 | sealed class TransactionResult
38 | /**
39 | * The transaction has been aborted due to an exception.
40 | * @see Producer.abortTransaction
41 | */
42 | data class AbortedTransactionResult( val exception: Exception): TransactionResult()
43 |
44 | /**
45 | * The transaction throws an unrecoverable error. Hence the [Producer] has been closed.
46 | *
47 | */
48 | data class UnrecoverableErrorTransactionResult( val exception: Exception): TransactionResult()
49 |
50 | /**
51 | * The transaction has been successfully committed
52 | * @see Producer.commitTransaction
53 | */
54 | object CommittedTransactionResult: TransactionResult()
55 |
56 | interface ProducerContainer: Closeable {
57 |
58 | enum class State {
59 | /**
60 | * The [ProducerContainer] is created.
61 | */
62 | CREATED,
63 |
64 | /**
65 | * The [ProducerContainer] is initialized and can be used for sending records.
66 | */
67 | STARTED,
68 |
69 | /**
70 | * The [ProducerContainer] is closing
71 | */
72 | PENDING_SHUTDOWN,
73 |
74 | /**
75 | * The [ProducerContainer] is closed.
76 | */
77 | CLOSED,
78 | }
79 |
80 | interface Builder {
81 | /**
82 | * Configure this [ProducerContainer].
83 | */
84 | fun configure(init: KafkaProducerConfigs.() -> Unit): Builder
85 |
86 | /**
87 | * Sets the [producerFactory] to be used for creating a new [Producer] client.
88 | */
89 | fun producerFactory(producerFactory: ProducerFactory): Builder
90 |
91 | /**
92 | * Set the default topic to send records
93 | */
94 | fun defaultTopic(topic: String): Builder
95 |
96 | /**
97 | * Set the default [callback] to invoke when an error happen while sending a record.
98 | */
99 | fun onSendError(callback: OnSendErrorCallback): Builder
100 |
101 | /**
102 | * Set the default [callback] to invoke when a record has been sent successfully.
103 | */
104 | fun onSendSuccess(callback: OnSendSuccessCallback): Builder
105 |
106 | /**
107 | * Set the default [callback] to be invoked after a sent record completes either successfully or unsuccessfully.
108 | *
109 | * @see onSendError
110 | * @see onSendSuccess
111 | */
112 | fun onSendCallback(callback: ProducerSendCallback): Builder
113 |
114 | /**
115 | * Set the [serializer] to be used for serializing the record-key.
116 | */
117 | fun keySerializer(serializer: Serializer): Builder
118 |
119 | /**
120 | * Set the [serializer] to be used for serializing the record-value.
121 | */
122 | fun valueSerializer(serializer: Serializer): Builder
123 | }
124 |
125 | /**
126 | * Asynchronously send a record for the given [value] to the given to the given [topic] (or the default one if null is given)
127 | * and [partition] with the given [timestamp].
128 | *
129 | * Then, optionally invoke the specific given [onSuccess] callback when the record has been acknowledge.
130 | * Otherwise invoke [onError] if an error happen while sending.
131 | *
132 | * @see Producer.send
133 | * @return a [Future] of [SendResult]
134 | */
135 | fun send(value : V,
136 | topic: String? = null,
137 | partition: Int? = null,
138 | timestamp: Instant? = null,
139 | onSuccess: OnSendSuccessCallback? = null,
140 | onError: OnSendErrorCallback? = null) : Future> {
141 | return send(null, value, topic, partition, timestamp, onSuccess, onError)
142 | }
143 |
144 | /**
145 | * Asynchronously send a record for the given [key] and [value] to the given [topic] (or the default one if null is given)
146 | * and [partition] with the given [timestamp].
147 | *
148 | * Then, optionally invoke the specific given [onSuccess] callback when the record has been acknowledge.
149 | * Otherwise invoke [onError] if an error happen while sending.
150 | *
151 | * @see Producer.send
152 | * @return a [Future] of [SendResult]
153 | */
154 | fun send(key: K?= null,
155 | value: V? = null,
156 | topic: String? = null,
157 | partition: Int? = null,
158 | timestamp: Instant? = null,
159 | onSuccess: OnSendSuccessCallback? = null,
160 | onError: OnSendErrorCallback? = null) : Future> {
161 | return send(KafkaRecord(key, value, topic, partition, timestamp), onSuccess, onError)
162 | }
163 |
164 | /**
165 | * Asynchronously send the given key-value [record] to the given [topic] (or the default one if null is given)
166 | * and [partition] with the given [timestamp].
167 | *
168 | * Then, optionally invoke the specific given [onSuccess] callback when the record has been acknowledge.
169 | * Otherwise invoke [onError] if an error happen while sending.
170 | *
171 | * @see Producer.send
172 | * @return a [Future] of [SendResult]
173 | */
174 | fun send(record: Pair,
175 | topic: String? = null,
176 | partition: Int? = null,
177 | timestamp: Instant? = null,
178 | onSuccess: OnSendSuccessCallback? = null,
179 | onError: OnSendErrorCallback? = null) : Future> {
180 | return send(record.first, record.second, topic, partition, timestamp, onSuccess, onError)
181 | }
182 |
183 | /**
184 | * Asynchronously send all the given key-value [records] to the given [topic] (or the default one if null is given)
185 | * and [partition] with the given [timestamp].
186 | *
187 | * Then, optionally invoke the specific given [onSuccess] callback when the record has been acknowledge.
188 | * Otherwise invoke [onError] if an error happen while sending.
189 | *
190 | * @see Producer.send
191 | * @return a [Future] of [SendResult]
192 | */
193 | fun send(records: Collection>,
194 | topic: String? = null,
195 | partition: Int? = null,
196 | timestamp: Instant? = null,
197 | onSuccess: OnSendSuccessCallback? = null,
198 | onError: OnSendErrorCallback? = null) : Future>>
199 |
200 | /**
201 | * Asynchronously send the given [record].
202 | *
203 | * Then, optionally invoke the specific given [onSuccess] callback when the record has been acknowledge.
204 | * Otherwise invoke [onError] if an error happen while sending.
205 | *
206 | * @see Producer.send
207 | * @return a [Future] of [SendResult]
208 | */
209 | fun send(record: KafkaRecord,
210 | onSuccess: OnSendSuccessCallback? = null,
211 | onError: OnSendErrorCallback? = null) : Future>
212 | /**
213 | * Asynchronously send the given [record].
214 | *
215 | * Then, optionally invoke the specific given [onSuccess] callback when the record has been acknowledge.
216 | * Otherwise invoke [onError] if an error happen while sending.
217 | *
218 | * @see Producer.send
219 | * @return a [Future] of [SendResult]
220 | */
221 | fun send(record: ProducerRecord,
222 | onSuccess: OnSendSuccessCallback? = null,
223 | onError: OnSendErrorCallback? = null) : Future>
224 |
225 | /**
226 | * Executes the given [action] with the underlying [Producer].
227 | */
228 | fun execute(action: (producer: Producer) -> T): T
229 |
230 | /**
231 | * Executes the given [action] in a producer transaction.
232 | */
233 | fun runTx(action: (ProducerContainer) -> Unit): TransactionResult
234 |
235 | /**
236 | * @see Producer.metrics
237 | */
238 | fun metrics(topic: String): Map
239 |
240 | /**
241 | * @see Producer.partitionsFor
242 | */
243 | fun partitionsFor(topic: String): List
244 |
245 | /**
246 | * Initialize this [ProducerContainer].
247 | */
248 | fun init()
249 |
250 | /**
251 | * Flush the [Producer].
252 | */
253 | fun flush()
254 |
255 | /**
256 | * @return the [State] of this container.
257 | */
258 | fun state(): State
259 |
260 | /**
261 | * Close this [ProducerContainer].
262 | *
263 | * @see [Producer.close].
264 | */
265 | override fun close() {
266 | close(Duration.ofMillis(Long.MAX_VALUE))
267 | }
268 |
269 | /**
270 | * Close this [ProducerContainer].
271 | *
272 | * @see [Producer.close].
273 | */
274 | fun close(timeout: Duration)
275 | }
276 |
--------------------------------------------------------------------------------
/clients/src/main/kotlin/io/streamthoughts/kafka/clients/producer/ProducerFactory.kt:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 StreamThoughts.
3 | *
4 | * Licensed to the Apache Software Foundation (ASF) under one or more
5 | * contributor license agreements. See the NOTICE file distributed with
6 | * this work for additional information regarding copyright ownership.
7 | * The ASF licenses this file to You under the Apache License, Version 2.0
8 | * (the "License"); you may not use this file except in compliance with
9 | * the License. You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | */
19 | package io.streamthoughts.kafka.clients.producer
20 |
21 | import org.apache.kafka.clients.producer.Producer
22 | import org.apache.kafka.common.serialization.Serializer
23 |
24 | /**
25 | * The default factory interface to create new [Producer] instance.
26 | */
27 | interface ProducerFactory {
28 |
29 | /**
30 | * Creates a new [Producer] instance with the given [configs].
31 | */
32 | fun make(configs: Map,
33 | keySerializer: Serializer? = null,
34 | valueSerializer: Serializer? = null): Producer
35 | }
36 |
--------------------------------------------------------------------------------
/clients/src/main/kotlin/io/streamthoughts/kafka/clients/producer/SendResult.kt:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 StreamThoughts.
3 | *
4 | * Licensed to the Apache Software Foundation (ASF) under one or more
5 | * contributor license agreements. See the NOTICE file distributed with
6 | * this work for additional information regarding copyright ownership.
7 | * The ASF licenses this file to You under the Apache License, Version 2.0
8 | * (the "License"); you may not use this file except in compliance with
9 | * the License. You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | */
19 | package io.streamthoughts.kafka.clients.producer
20 |
21 | import org.apache.kafka.clients.producer.ProducerRecord
22 | import org.apache.kafka.clients.producer.RecordMetadata
23 |
24 | data class SendResult(val record: ProducerRecord, val metadata: RecordMetadata)
--------------------------------------------------------------------------------
/clients/src/main/kotlin/io/streamthoughts/kafka/clients/producer/callback/ProducerSendCallback.kt:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 StreamThoughts.
3 | *
4 | * Licensed to the Apache Software Foundation (ASF) under one or more
5 | * contributor license agreements. See the NOTICE file distributed with
6 | * this work for additional information regarding copyright ownership.
7 | * The ASF licenses this file to You under the Apache License, Version 2.0
8 | * (the "License"); you may not use this file except in compliance with
9 | * the License. You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | */
19 | package io.streamthoughts.kafka.clients.producer.callback
20 |
21 | import io.streamthoughts.kafka.clients.loggerFor
22 | import io.streamthoughts.kafka.clients.producer.ProducerContainer
23 | import io.streamthoughts.kafka.clients.producer.ProducerContainer.*
24 | import io.streamthoughts.kafka.clients.producer.callback.ProducerSendCallback.CloseOnErrorProducerSendCallback
25 | import org.apache.kafka.clients.producer.ProducerRecord
26 | import org.apache.kafka.clients.producer.RecordMetadata
27 | import org.slf4j.Logger
28 | import java.time.Duration
29 |
30 | typealias OnSendErrorCallback = (container: ProducerContainer, record: ProducerRecord, error: Exception) -> Unit
31 | typealias OnSendSuccessCallback = (container: ProducerContainer, record: ProducerRecord