├── .devcontainer └── devcontainer.json ├── .github └── workflows │ ├── build.yaml │ ├── githubpages.yaml │ ├── junie.yml │ ├── pr.yaml │ └── publish.yml ├── .gitignore ├── Kafka.MD ├── LICENSE ├── README.md ├── Zookeeper.MD ├── build.gradle.kts ├── docs ├── images │ ├── anchor-copy-button.svg │ ├── arrow_down.svg │ ├── burger.svg │ ├── copy-icon.svg │ ├── copy-successful-icon.svg │ ├── footer-go-to-link.svg │ ├── go-to-top-icon.svg │ ├── logo-icon.svg │ ├── nav-icons │ │ ├── abstract-class-kotlin.svg │ │ ├── abstract-class.svg │ │ ├── annotation-kotlin.svg │ │ ├── annotation.svg │ │ ├── class-kotlin.svg │ │ ├── class.svg │ │ ├── enum-kotlin.svg │ │ ├── enum.svg │ │ ├── exception-class.svg │ │ ├── field-value.svg │ │ ├── field-variable.svg │ │ ├── function.svg │ │ ├── interface-kotlin.svg │ │ ├── interface.svg │ │ ├── object.svg │ │ └── typealias-kotlin.svg │ └── theme-toggle.svg ├── index.html ├── kotlin-kafka │ ├── io.github.nomisRev.kafka.receiver │ │ ├── -auto-offset-reset │ │ │ └── index.html │ │ ├── -commit-strategy │ │ │ ├── -by-size-or-time │ │ │ │ ├── -by-size-or-time.html │ │ │ │ ├── index.html │ │ │ │ ├── interval.html │ │ │ │ └── size.html │ │ │ ├── -by-size │ │ │ │ ├── -by-size.html │ │ │ │ ├── index.html │ │ │ │ └── size.html │ │ │ ├── -by-time │ │ │ │ ├── -by-time.html │ │ │ │ ├── index.html │ │ │ │ └── interval.html │ │ │ └── index.html │ │ ├── -consumer-partition │ │ │ ├── index.html │ │ │ ├── position.html │ │ │ ├── seek-to-beginning.html │ │ │ ├── seek-to-end.html │ │ │ ├── seek-to-timestamp.html │ │ │ ├── seek.html │ │ │ └── topic-partition.html │ │ ├── -kafka-receiver.html │ │ ├── -kafka-receiver │ │ │ ├── index.html │ │ │ ├── receive-auto-ack.html │ │ │ ├── receive.html │ │ │ └── with-consumer.html │ │ ├── -offset │ │ │ ├── acknowledge.html │ │ │ ├── commit.html │ │ │ ├── index.html │ │ │ ├── offset.html │ │ │ └── topic-partition.html │ │ ├── -receiver-record │ │ │ ├── -receiver-record.html │ │ │ ├── index.html │ │ │ └── offset.html │ │ ├── -receiver-settings.html │ │ ├── -receiver-settings │ │ │ ├── -receiver-settings.html │ │ │ ├── auto-offset-reset.html │ │ │ ├── bootstrap-servers.html │ │ │ ├── close-timeout.html │ │ │ ├── commit-retry-interval.html │ │ │ ├── commit-strategy.html │ │ │ ├── group-id.html │ │ │ ├── index.html │ │ │ ├── key-deserializer.html │ │ │ ├── max-commit-attempts.html │ │ │ ├── max-deferred-commits.html │ │ │ ├── poll-timeout.html │ │ │ ├── properties.html │ │ │ └── value-deserializer.html │ │ └── index.html │ ├── io.github.nomisRev.kafka │ │ ├── -acks │ │ │ ├── -all │ │ │ │ └── index.html │ │ │ ├── -minus-one │ │ │ │ └── index.html │ │ │ ├── -one │ │ │ │ └── index.html │ │ │ ├── -zero │ │ │ │ └── index.html │ │ │ ├── entries.html │ │ │ ├── index.html │ │ │ ├── value-of.html │ │ │ ├── value.html │ │ │ └── values.html │ │ ├── -admin-settings │ │ │ ├── -admin-settings.html │ │ │ ├── bootstrap-server.html │ │ │ ├── index.html │ │ │ └── properties.html │ │ ├── -admin.html │ │ ├── -auto-offset-reset │ │ │ ├── -earliest │ │ │ │ └── index.html │ │ │ ├── -latest │ │ │ │ └── index.html │ │ │ ├── -none │ │ │ │ └── index.html │ │ │ ├── entries.html │ │ │ ├── index.html │ │ │ ├── value-of.html │ │ │ ├── value.html │ │ │ └── values.html │ │ ├── -consumer-settings │ │ │ ├── -consumer-settings.html │ │ │ ├── auto-commit-interval.html │ │ │ ├── auto-offset-reset.html │ │ │ ├── bootstrap-servers.html │ │ │ ├── check-crcs.html │ │ │ ├── client-dns-lookup.html │ │ │ ├── client-id.html │ │ │ ├── connections-max-idle.html │ │ │ ├── default-api-timeout.html │ │ │ ├── enable-auto-commit.html │ │ │ ├── exclude-internal-topics.html │ │ │ ├── fetch-max-bytes.html │ │ │ ├── fetch-max-wait.html │ │ │ ├── fetch-min-bytes.html │ │ │ ├── group-id.html │ │ │ ├── heartbeat-interval.html │ │ │ ├── index.html │ │ │ ├── interceptor-classes.html │ │ │ ├── key-deserializer.html │ │ │ ├── max-partition-fetch-bytes.html │ │ │ ├── max-poll-interval.html │ │ │ ├── max-poll-records.html │ │ │ ├── metadata-max-age.html │ │ │ ├── metrics-num-samples.html │ │ │ ├── metrics-recording-level.html │ │ │ ├── metrics-reporter-classes.html │ │ │ ├── metrics-sample-window.html │ │ │ ├── partition-assignment-strategy.html │ │ │ ├── properties.html │ │ │ ├── receive-buffer.html │ │ │ ├── reconnect-backoff-max.html │ │ │ ├── reconnect-backoff.html │ │ │ ├── request-timeout.html │ │ │ ├── retry-backoff.html │ │ │ ├── send-buffer.html │ │ │ ├── session-time-out.html │ │ │ ├── to-receiver-settings.html │ │ │ └── value-deserializer.html │ │ ├── -kafka-consumer.html │ │ ├── -kafka-producer.html │ │ ├── -nothing-deserializer │ │ │ ├── close.html │ │ │ ├── configure.html │ │ │ ├── deserialize.html │ │ │ └── index.html │ │ ├── -nothing-serializer │ │ │ ├── close.html │ │ │ ├── configure.html │ │ │ ├── index.html │ │ │ └── serialize.html │ │ ├── -producer-settings │ │ │ ├── -producer-settings.html │ │ │ ├── acks.html │ │ │ ├── bootstrap-servers.html │ │ │ ├── index.html │ │ │ ├── key-deserializer.html │ │ │ ├── other.html │ │ │ ├── properties.html │ │ │ └── value-deserializer.html │ │ ├── as-deferred.html │ │ ├── as-flow.html │ │ ├── await.html │ │ ├── commit-await.html │ │ ├── commit-batch-within.html │ │ ├── component1.html │ │ ├── component2.html │ │ ├── create-topic.html │ │ ├── delete-topic.html │ │ ├── describe-topic.html │ │ ├── imap.html │ │ ├── index.html │ │ ├── kafka-consumer.html │ │ ├── kafka-producer.html │ │ ├── map.html │ │ ├── offsets.html │ │ ├── produce.html │ │ ├── send-await.html │ │ ├── subscribe-to.html │ │ └── topic-exists.html │ └── package-list ├── navigation.html ├── scripts │ ├── clipboard.js │ ├── main.js │ ├── navigation-loader.js │ ├── pages.json │ ├── platform-content-handler.js │ ├── prism.js │ ├── sourceset_dependencies.js │ └── symbol-parameters-wrapper_deferred.js └── styles │ ├── font-jb-sans-auto.css │ ├── logo-styles.css │ ├── main.css │ ├── prism.css │ └── style.css ├── gradle.properties ├── gradle ├── libs.versions.toml └── wrapper │ ├── gradle-wrapper.jar │ └── gradle-wrapper.properties ├── gradlew ├── gradlew.bat ├── guide ├── build.gradle.kts ├── example │ ├── example-admin-01.kt │ ├── example-publisher-01.kt │ └── example-readme-01.kt ├── src │ └── main │ │ ├── kotlin │ │ └── KafkaContainer.kt │ │ └── resources │ │ └── log4j.properties └── test │ ├── AdminSettingsSpec.kt │ └── AdminSpec.kt ├── knit.code.include ├── knit.properties ├── renovate.json ├── settings.gradle.kts └── src ├── main └── kotlin │ └── io │ └── github │ └── nomisRev │ └── kafka │ ├── Admin.kt │ ├── Consumer.kt │ ├── KafkaFuture.kt │ ├── Producer.kt │ ├── Serializers.kt │ ├── internal │ └── FlowTimeChunked.kt │ ├── publisher │ ├── Acks.kt │ ├── FlowProduce.kt │ ├── KafkaPublisher.kt │ ├── PublisherScope.kt │ └── PublisherSettings.kt │ ├── receiver │ ├── CommitStrategy.kt │ ├── ConsumerPartition.kt │ ├── ConsumerRecord.kt │ ├── KafkaReceiver.kt │ ├── Offset.kt │ ├── ReceiverSettings.kt │ └── internals │ │ ├── AckMode.kt │ │ ├── AtMostOnceOffsets.kt │ │ ├── CommittableBatch.kt │ │ ├── EventLoop.kt │ │ └── SeekablePartition.kt │ └── utils │ └── Closeable.kt └── test ├── kotlin └── io │ └── github │ └── nomisrev │ └── kafka │ ├── KafkaContainer.kt │ ├── KafkaSpec.kt │ ├── Predef.kt │ ├── publisher │ ├── FlowProduceSpec.kt │ └── KafkaPublisherSpec.kt │ └── receiver │ ├── CommitStrategySpec.kt │ └── KafakReceiverSpec.kt └── resources └── logback-test.xml /.devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Java", 3 | "image": "mcr.microsoft.com/devcontainers/java:1-21", 4 | "features": { 5 | "ghcr.io/devcontainers/features/java:1": { 6 | "version": "none", 7 | "installMaven": "true", 8 | "mavenVersion": "3.8.6", 9 | "installGradle": "true" 10 | } 11 | } 12 | } -------------------------------------------------------------------------------- /.github/workflows/build.yaml: -------------------------------------------------------------------------------- 1 | name: "Build" 2 | 3 | on: 4 | push: 5 | paths-ignore: 6 | - 'docs/**' 7 | - '*.md' 8 | branches: 9 | - main 10 | 11 | jobs: 12 | check: 13 | runs-on: ubuntu-latest 14 | timeout-minutes: 30 15 | 16 | steps: 17 | - uses: actions/checkout@v4 18 | with: 19 | fetch-depth: 0 20 | 21 | - uses: actions/setup-java@v4 22 | with: 23 | distribution: 'zulu' 24 | java-version: 17 25 | 26 | - uses: gradle/gradle-build-action@v3 27 | with: 28 | arguments: build --scan --full-stacktrace 29 | 30 | - name: Bundle the build report 31 | if: failure() 32 | run: find . -type d -name 'reports' | zip -@ -r build-reports.zip 33 | 34 | - name: Upload the build report 35 | if: failure() 36 | uses: actions/upload-artifact@master 37 | with: 38 | name: error-report 39 | path: build-reports.zip 40 | -------------------------------------------------------------------------------- /.github/workflows/githubpages.yaml: -------------------------------------------------------------------------------- 1 | name: githubpages 2 | 3 | on: 4 | release: 5 | types: [published] 6 | 7 | jobs: 8 | githubpages: 9 | runs-on: ubuntu-latest 10 | timeout-minutes: 20 11 | 12 | steps: 13 | - uses: actions/checkout@v4 14 | with: 15 | fetch-depth: 0 16 | 17 | - uses: gradle/gradle-build-action@v3 18 | with: 19 | arguments: -Pversion=${{ github.event.release.tag_name }} dokkaHtml 20 | 21 | - name: Deploy to gh-pages 22 | uses: peaceiris/actions-gh-pages@v4 23 | with: 24 | github_token: ${{ secrets.GITHUB_TOKEN }} 25 | publish_dir: ./docs 26 | -------------------------------------------------------------------------------- /.github/workflows/junie.yml: -------------------------------------------------------------------------------- 1 | name: Junie 2 | run-name: Junie run ${{ inputs.run_id }} 3 | 4 | permissions: 5 | contents: write 6 | pull-requests: write 7 | 8 | on: 9 | workflow_dispatch: 10 | inputs: 11 | run_id: 12 | description: "id of workflow process" 13 | required: true 14 | workflow_params: 15 | description: "stringified params" 16 | required: true 17 | 18 | jobs: 19 | call-workflow-passing-data: 20 | uses: jetbrains-junie/junie-workflows/.github/workflows/ej-issue.yml@main 21 | with: 22 | workflow_params: ${{ inputs.workflow_params }} 23 | -------------------------------------------------------------------------------- /.github/workflows/pr.yaml: -------------------------------------------------------------------------------- 1 | name: "Build main" 2 | 3 | on: 4 | pull_request: 5 | paths-ignore: 6 | - 'docs/**' 7 | - '*.md' 8 | 9 | jobs: 10 | check: 11 | runs-on: ubuntu-latest 12 | timeout-minutes: 120 13 | 14 | steps: 15 | - uses: actions/checkout@v4 16 | with: 17 | fetch-depth: 0 18 | 19 | - uses: actions/setup-java@v4 20 | with: 21 | distribution: 'zulu' 22 | java-version: 17 23 | 24 | - uses: gradle/gradle-build-action@v3 25 | with: 26 | arguments: build --scan --full-stacktrace -PstressTest=100 27 | 28 | - name: Bundle the build report 29 | if: failure() 30 | run: find . -type d -name 'reports' | zip -@ -r build-reports.zip 31 | 32 | - name: Upload the build report 33 | if: failure() 34 | uses: actions/upload-artifact@master 35 | with: 36 | name: error-report 37 | path: build-reports.zip 38 | -------------------------------------------------------------------------------- /.github/workflows/publish.yml: -------------------------------------------------------------------------------- 1 | name: "Publish library" 2 | 3 | on: 4 | workflow_dispatch: 5 | branches: [main] 6 | inputs: 7 | version: 8 | description: 'Version' 9 | required: true 10 | type: string 11 | 12 | env: 13 | ORG_GRADLE_PROJECT_mavenCentralUsername: '${{ secrets.SONATYPE_USER }}' 14 | ORG_GRADLE_PROJECT_mavenCentralPassword: '${{ secrets.SONATYPE_PWD }}' 15 | ORG_GRADLE_PROJECT_signingInMemoryKeyId: '${{ secrets.SIGNING_KEY_ID }}' 16 | ORG_GRADLE_PROJECT_signingInMemoryKey: '${{ secrets.SIGNING_KEY }}' 17 | ORG_GRADLE_PROJECT_signingInMemoryKeyPassword: '${{ secrets.SIGNING_KEY_PASSPHRASE }}' 18 | 19 | jobs: 20 | publish: 21 | timeout-minutes: 30 22 | runs-on: macos-latest 23 | steps: 24 | - uses: actions/checkout@v4 25 | with: 26 | fetch-depth: 0 27 | 28 | - uses: actions/setup-java@v4 29 | with: 30 | distribution: 'zulu' 31 | java-version: 11 32 | 33 | - uses: gradle/gradle-build-action@v3 34 | with: 35 | arguments: assemble -Pversion=${{ inputs.version }} 36 | 37 | - name: Upload reports 38 | if: failure() 39 | uses: actions/upload-artifact@v4 40 | with: 41 | name: 'reports-${{ matrix.os }}' 42 | path: '**/build/reports/**' 43 | 44 | - name: Publish final version 45 | uses: gradle/gradle-build-action@v3 46 | with: 47 | arguments: -Pversion=${{ inputs.version }} publishAllPublicationsToMavenCentralRepository 48 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Created by https://www.toptal.com/developers/gitignore/api/intellij+all,kotlin,gradle,macos 2 | # Edit at https://www.toptal.com/developers/gitignore?templates=intellij+all,kotlin,gradle,macos 3 | 4 | ### Intellij+all ### 5 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider 6 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 7 | 8 | # User-specific stuff 9 | .idea/**/workspace.xml 10 | .idea/**/tasks.xml 11 | .idea/**/usage.statistics.xml 12 | .idea/**/dictionaries 13 | .idea/**/shelf 14 | 15 | # AWS User-specific 16 | .idea/**/aws.xml 17 | 18 | # Generated files 19 | .idea/**/contentModel.xml 20 | 21 | # Sensitive or high-churn files 22 | .idea/**/dataSources/ 23 | .idea/**/dataSources.ids 24 | .idea/**/dataSources.local.xml 25 | .idea/**/sqlDataSources.xml 26 | .idea/**/dynamic.xml 27 | .idea/**/uiDesigner.xml 28 | .idea/**/dbnavigator.xml 29 | 30 | # Gradle 31 | .idea/**/gradle.xml 32 | .idea/**/libraries 33 | 34 | # Gradle and Maven with auto-import 35 | # When using Gradle or Maven with auto-import, you should exclude module files, 36 | # since they will be recreated, and may cause churn. Uncomment if using 37 | # auto-import. 38 | # .idea/artifacts 39 | # .idea/compiler.xml 40 | # .idea/jarRepositories.xml 41 | # .idea/modules.xml 42 | # .idea/*.iml 43 | # .idea/modules 44 | # *.iml 45 | # *.ipr 46 | 47 | # CMake 48 | cmake-build-*/ 49 | 50 | # Mongo Explorer plugin 51 | .idea/**/mongoSettings.xml 52 | 53 | # File-based project format 54 | *.iws 55 | 56 | # IntelliJ 57 | out/ 58 | 59 | # mpeltonen/sbt-idea plugin 60 | .idea_modules/ 61 | 62 | # JIRA plugin 63 | atlassian-ide-plugin.xml 64 | 65 | # Cursive Clojure plugin 66 | .idea/replstate.xml 67 | 68 | # Crashlytics plugin (for Android Studio and IntelliJ) 69 | com_crashlytics_export_strings.xml 70 | crashlytics.properties 71 | crashlytics-build.properties 72 | fabric.properties 73 | 74 | # Editor-based Rest Client 75 | .idea/httpRequests 76 | 77 | # Android studio 3.1+ serialized cache file 78 | .idea/caches/build_file_checksums.ser 79 | 80 | ### Intellij+all Patch ### 81 | # Ignores the whole .idea folder and all .iml files 82 | # See https://github.com/joeblau/gitignore.io/issues/186 and https://github.com/joeblau/gitignore.io/issues/360 83 | 84 | .idea/ 85 | 86 | # Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-249601023 87 | 88 | *.iml 89 | modules.xml 90 | .idea/misc.xml 91 | *.ipr 92 | 93 | # Sonarlint plugin 94 | .idea/sonarlint 95 | 96 | ### Kotlin ### 97 | # Compiled class file 98 | *.class 99 | 100 | # Log file 101 | *.log 102 | 103 | # BlueJ files 104 | *.ctxt 105 | 106 | # Mobile Tools for Java (J2ME) 107 | .mtj.tmp/ 108 | 109 | # Package Files # 110 | *.jar 111 | *.war 112 | *.nar 113 | *.ear 114 | *.zip 115 | *.tar.gz 116 | *.rar 117 | 118 | # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml 119 | hs_err_pid* 120 | 121 | ### macOS ### 122 | # General 123 | .DS_Store 124 | .AppleDouble 125 | .LSOverride 126 | 127 | # Icon must end with two \r 128 | Icon 129 | 130 | 131 | # Thumbnails 132 | ._* 133 | 134 | # Files that might appear in the root of a volume 135 | .DocumentRevisions-V100 136 | .fseventsd 137 | .Spotlight-V100 138 | .TemporaryItems 139 | .Trashes 140 | .VolumeIcon.icns 141 | .com.apple.timemachine.donotpresent 142 | 143 | # Directories potentially created on remote AFP share 144 | .AppleDB 145 | .AppleDesktop 146 | Network Trash Folder 147 | Temporary Items 148 | .apdisk 149 | 150 | ### Gradle ### 151 | .gradle 152 | build/ 153 | 154 | # Ignore Gradle GUI config 155 | gradle-app.setting 156 | 157 | # Avoid ignoring Gradle wrapper jar file (.jar files are usually ignored) 158 | !gradle-wrapper.jar 159 | 160 | # Cache of project 161 | .gradletasknamecache 162 | 163 | # # Work around https://youtrack.jetbrains.com/issue/IDEA-116898 164 | # gradle/wrapper/gradle-wrapper.properties 165 | 166 | ### Gradle Patch ### 167 | **/build/ 168 | 169 | # End of https://www.toptal.com/developers/gitignore/api/intellij+all,kotlin,gradle,macos -------------------------------------------------------------------------------- /Kafka.MD: -------------------------------------------------------------------------------- 1 | # Kafka Cheatsheet 2 | 3 | # Install with Homebrew 4 | 5 | ``` 6 | brew install zookeeper 7 | brew install kafka 8 | ``` 9 | 10 | Start as service: 11 | 12 | ``` 13 | brew services start zookeeper 14 | brew services start kafka 15 | ``` 16 | 17 | start as process: 18 | 19 | ``` 20 | zookeeper-server-start /usr/local/etc/kafka/zookeeper.properties 21 | kafka-server-start /usr/local/etc/kafka/server.properties 22 | ``` 23 | 24 | Create & describe topic: 25 | 26 | ``` 27 | kafka-topics --bootstrap-server localhost:9092 --create --replication-factor 1 --partitions 1 --topic test 28 | kafka-topics --bootstrap-server localhost:9092 --describe --topic test 29 | ``` 30 | 31 | Listen & Consume: 32 | 33 | ``` 34 | kafka-console-producer --bootstrap-server localhost:9092 --topic test 35 | kafka-console-consumer --bootstrap-server localhost:9092 --topic test --from-beginning 36 | ``` 37 | 38 | # Not being able to start Kafka with Zookeeper: kafka.common.InconsistentClusterIdException 39 | > Reason is Kafka saved failed cluster ID in meta.properties. 40 | Try to delete kafka-logs/meta.properties from your tmp folder, which is located in C:/tmp folder by default on windows, and /tmp/kafka-logs on Linux 41 | 42 | For mac, the following steps are needed. 43 | 1. Stop kafka service: brew services stop kafka 44 | 2. open kafka server.properties file: vim /usr/local/etc/kafka/server.properties find value of log.dirs in this file. For me, it is /usr/local/var/lib/kafka-logs 45 | 3. delete path-to-log.dirs/meta.properties file 46 | 4. start kafka service brew services start kafka 47 | 48 | # Config recommendations 49 | 50 | - Partitions: Many users will have the partition count for a topic be equal to, or a multiple of, the number of brokers in the cluster. This allows the partitions to be evenly distributed to the brokers, which will evenly distribute the message load. This is not a requirement, however, as you can also balance message load by having multiple topics. 51 | > If you have some estimate regarding the target throughput of the topic and the expected throughput of the consumers, you can divide the target throughput by the expected consumer throughput and derive the number of partitions this way. So if I want to be able to write and read 1 GB/sec from a topic, and I know each consumer can only process 50 MB/s, then I know I need at least 20 partitions. This way, I can have 20 consumers reading from the topic and achieve 1 GB/sec. 52 | > If you don’t have this detailed information, our experience suggests that limiting the size of the partition on the disk to less than 6 GB per day of retention often gives sat‐ isfactory results. Starting small and expanding as needed is easier than starting too large. 53 | 54 | 55 | -------------------------------------------------------------------------------- /Zookeeper.MD: -------------------------------------------------------------------------------- 1 | 2 | # Zookeeper 3 | 4 | Apache Kafka uses Zookeeper to store metadata about the Kafka cluster, as well as consumer client details, 5 | 6 | ## Terminology 7 | 8 | - ensemble: Zookeeper is designed to work as a cluster, called an ensemble, to ensure high availability. 9 | => It is recommended that ensembles contain an odd number of servers. (Recommended: 5) 10 | => With 5, 2 nodes can go down. 1 maintenance, and 1 fatal. Higher introduces latency without observers 11 | => Observer nodes for help in balancing read-only traffic. 12 | 13 | -------------------------------------------------------------------------------- /build.gradle.kts: -------------------------------------------------------------------------------- 1 | import kotlinx.knit.KnitPluginExtension 2 | import org.gradle.api.tasks.testing.logging.TestExceptionFormat.* 3 | import org.gradle.api.tasks.testing.logging.TestLogEvent.FAILED 4 | import org.gradle.api.tasks.testing.logging.TestLogEvent.SKIPPED 5 | import org.gradle.api.tasks.testing.logging.TestLogEvent.STANDARD_ERROR 6 | import org.jetbrains.dokka.gradle.DokkaTask 7 | import org.jetbrains.kotlin.gradle.dsl.KotlinVersion.KOTLIN_2_0 8 | 9 | plugins { 10 | alias(libs.plugins.kotlin.jvm) 11 | alias(libs.plugins.kotlin.assert) 12 | alias(libs.plugins.dokka) 13 | alias(libs.plugins.spotless) 14 | alias(libs.plugins.knit) 15 | alias(libs.plugins.publish) 16 | } 17 | 18 | repositories { 19 | mavenCentral() 20 | } 21 | 22 | group = "io.github.nomisrev" 23 | 24 | dependencies { 25 | api(libs.kotlin.stdlib) 26 | api(libs.kotlinx.coroutines.core) 27 | api(libs.kotlinx.coroutines.jdk8) 28 | api(libs.kafka.clients) 29 | implementation(libs.slf4j.api) 30 | 31 | testImplementation(kotlin("test")) 32 | testImplementation(libs.testcontainers.kafka) 33 | testImplementation(libs.slf4j.simple) 34 | testImplementation(libs.kotlinx.coroutines.test) 35 | } 36 | 37 | @Suppress("OPT_IN_USAGE") 38 | powerAssert { 39 | functions = listOf("kotlin.test.assertEquals") 40 | } 41 | 42 | //configure { 43 | // functions = listOf("kotlin.test.assertEquals") 44 | //} 45 | 46 | configure { 47 | siteRoot = "https://nomisrev.github.io/kotlin-kafka/" 48 | } 49 | 50 | configure { 51 | toolchain { 52 | languageVersion.set(JavaLanguageVersion.of(8)) 53 | } 54 | } 55 | 56 | kotlin { 57 | explicitApi() 58 | compilerOptions { 59 | languageVersion.set(KOTLIN_2_0) 60 | apiVersion.set(KOTLIN_2_0) 61 | } 62 | } 63 | 64 | tasks { 65 | withType().configureEach { 66 | outputDirectory.set(rootDir.resolve("docs")) 67 | moduleName.set("kotlin-kafka") 68 | dokkaSourceSets { 69 | named("main") { 70 | includes.from("README.md") 71 | perPackageOption { 72 | matchingRegex.set(".*\\.internal.*") 73 | suppress.set(true) 74 | } 75 | sourceLink { 76 | localDirectory.set(file("src/main/kotlin")) 77 | remoteUrl.set(uri("https://github.com/nomisRev/kotlin-kafka/tree/main/src/main/kotlin").toURL()) 78 | remoteLineSuffix.set("#L") 79 | } 80 | } 81 | } 82 | } 83 | 84 | getByName("knitPrepare").dependsOn(getTasksByName("dokka", true)) 85 | 86 | withType().configureEach { 87 | useJUnitPlatform() 88 | maxParallelForks = (2 * Runtime.getRuntime().availableProcessors()) 89 | if (project.hasProperty("stressTest")) { 90 | systemProperty("io.github.nomisrev.kafka.TEST_ITERATIONS", project.properties["stressTest"] ?: 100) 91 | } 92 | testLogging { 93 | exceptionFormat = FULL 94 | events = setOf(SKIPPED, FAILED, STANDARD_ERROR) 95 | } 96 | } 97 | } 98 | -------------------------------------------------------------------------------- /docs/images/anchor-copy-button.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /docs/images/arrow_down.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | -------------------------------------------------------------------------------- /docs/images/burger.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | -------------------------------------------------------------------------------- /docs/images/copy-icon.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | -------------------------------------------------------------------------------- /docs/images/copy-successful-icon.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | -------------------------------------------------------------------------------- /docs/images/footer-go-to-link.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | -------------------------------------------------------------------------------- /docs/images/go-to-top-icon.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /docs/images/logo-icon.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | -------------------------------------------------------------------------------- /docs/images/nav-icons/abstract-class-kotlin.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | -------------------------------------------------------------------------------- /docs/images/nav-icons/abstract-class.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | -------------------------------------------------------------------------------- /docs/images/nav-icons/annotation-kotlin.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | -------------------------------------------------------------------------------- /docs/images/nav-icons/annotation.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /docs/images/nav-icons/class-kotlin.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | -------------------------------------------------------------------------------- /docs/images/nav-icons/class.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /docs/images/nav-icons/enum-kotlin.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | -------------------------------------------------------------------------------- /docs/images/nav-icons/enum.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /docs/images/nav-icons/exception-class.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /docs/images/nav-icons/field-value.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | -------------------------------------------------------------------------------- /docs/images/nav-icons/field-variable.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | -------------------------------------------------------------------------------- /docs/images/nav-icons/function.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /docs/images/nav-icons/interface-kotlin.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | -------------------------------------------------------------------------------- /docs/images/nav-icons/interface.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /docs/images/nav-icons/object.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | -------------------------------------------------------------------------------- /docs/images/nav-icons/typealias-kotlin.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | -------------------------------------------------------------------------------- /docs/images/theme-toggle.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | -------------------------------------------------------------------------------- /docs/kotlin-kafka/io.github.nomisRev.kafka.receiver/-receiver-settings/commit-strategy.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | commitStrategy 6 | 7 | 8 | 9 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 |
38 | 59 |
60 | 63 |
64 |
65 | 66 |
67 |

commitStrategy

68 |
69 | 70 |
71 | 76 |
77 |
78 |
79 | 80 | 81 | -------------------------------------------------------------------------------- /docs/kotlin-kafka/io.github.nomisRev.kafka.receiver/-receiver-settings/group-id.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | groupId 6 | 7 | 8 | 9 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 |
38 | 59 |
60 | 63 |
64 |
65 | 66 |
67 |

groupId

68 |
69 | 70 |
71 | 76 |
77 |
78 |
79 | 80 | 81 | -------------------------------------------------------------------------------- /docs/kotlin-kafka/io.github.nomisRev.kafka.receiver/-receiver-settings/properties.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | properties 6 | 7 | 8 | 9 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 |
38 | 59 |
60 | 63 |
64 |
65 | 66 |
67 |

properties

68 |
69 | 70 |
71 | 76 |
77 |
78 |
79 | 80 | 81 | -------------------------------------------------------------------------------- /docs/kotlin-kafka/io.github.nomisRev.kafka/-acks/value.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | value 6 | 7 | 8 | 9 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 |
38 | 59 |
60 | 63 |
64 |
65 | 66 |
67 |

value

68 |
69 | 70 |
71 | 76 |
77 |
78 |
79 | 80 | 81 | -------------------------------------------------------------------------------- /docs/kotlin-kafka/io.github.nomisRev.kafka/-admin-settings/bootstrap-server.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | bootstrapServer 6 | 7 | 8 | 9 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 |
38 | 59 |
60 | 63 |
64 |
65 | 66 |
67 |

bootstrapServer

68 |
69 | 70 |
71 | 76 |
77 |
78 |
79 | 80 | 81 | -------------------------------------------------------------------------------- /docs/kotlin-kafka/io.github.nomisRev.kafka/-auto-offset-reset/value.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | value 6 | 7 | 8 | 9 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 |
38 | 59 |
60 | 63 |
64 |
65 | 66 |
67 |

value

68 |
69 | 70 |
71 | 76 |
77 |
78 |
79 | 80 | 81 | -------------------------------------------------------------------------------- /docs/kotlin-kafka/io.github.nomisRev.kafka/-consumer-settings/auto-offset-reset.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | autoOffsetReset 6 | 7 | 8 | 9 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 |
38 | 59 |
60 | 63 |
64 |
65 | 66 |
67 |

autoOffsetReset

68 |
69 | 70 |
71 | 76 |
77 |
78 |
79 | 80 | 81 | -------------------------------------------------------------------------------- /docs/kotlin-kafka/io.github.nomisRev.kafka/-consumer-settings/bootstrap-servers.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | bootstrapServers 6 | 7 | 8 | 9 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 |
38 | 59 |
60 | 63 |
64 |
65 | 66 |
67 |

bootstrapServers

68 |
69 | 70 |
71 | 76 |
77 |
78 |
79 | 80 | 81 | -------------------------------------------------------------------------------- /docs/kotlin-kafka/io.github.nomisRev.kafka/-consumer-settings/client-id.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | clientId 6 | 7 | 8 | 9 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 |
38 | 59 |
60 | 63 |
64 |
65 | 66 |
67 |

clientId

68 |
69 | 70 |
71 | 76 |
77 |
78 |
79 | 80 | 81 | -------------------------------------------------------------------------------- /docs/kotlin-kafka/io.github.nomisRev.kafka/-consumer-settings/fetch-max-bytes.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | fetchMaxBytes 6 | 7 | 8 | 9 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 |
38 | 59 |
60 | 63 |
64 |
65 | 66 |
67 |

fetchMaxBytes

68 |
69 | 70 |
71 | 76 |
77 |
78 |
79 | 80 | 81 | -------------------------------------------------------------------------------- /docs/kotlin-kafka/io.github.nomisRev.kafka/-consumer-settings/fetch-max-wait.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | fetchMaxWait 6 | 7 | 8 | 9 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 |
38 | 59 |
60 | 63 |
64 |
65 | 66 |
67 |

fetchMaxWait

68 |
69 | 70 |
71 | 76 |
77 |
78 |
79 | 80 | 81 | -------------------------------------------------------------------------------- /docs/kotlin-kafka/io.github.nomisRev.kafka/-consumer-settings/group-id.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | groupId 6 | 7 | 8 | 9 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 |
38 | 59 |
60 | 63 |
64 |
65 | 66 |
67 |

groupId

68 |
69 | 70 |
71 | 76 |
77 |
78 |
79 | 80 | 81 | -------------------------------------------------------------------------------- /docs/kotlin-kafka/io.github.nomisRev.kafka/-consumer-settings/heartbeat-interval.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | heartbeatInterval 6 | 7 | 8 | 9 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 |
38 | 59 |
60 | 63 |
64 |
65 | 66 |
67 |

heartbeatInterval

68 |
69 | 70 |
71 | 76 |
77 |
78 |
79 | 80 | 81 | -------------------------------------------------------------------------------- /docs/kotlin-kafka/io.github.nomisRev.kafka/-consumer-settings/metadata-max-age.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | metadataMaxAge 6 | 7 | 8 | 9 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 |
38 | 59 |
60 | 63 |
64 |
65 | 66 |
67 |

metadataMaxAge

68 |
69 | 70 |
71 | 76 |
77 |
78 |
79 | 80 | 81 | -------------------------------------------------------------------------------- /docs/kotlin-kafka/io.github.nomisRev.kafka/-consumer-settings/receive-buffer.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | receiveBuffer 6 | 7 | 8 | 9 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 |
38 | 59 |
60 | 63 |
64 |
65 | 66 |
67 |

receiveBuffer

68 |
69 | 70 |
71 | 76 |
77 |
78 |
79 | 80 | 81 | -------------------------------------------------------------------------------- /docs/kotlin-kafka/io.github.nomisRev.kafka/-consumer-settings/reconnect-backoff.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | reconnectBackoff 6 | 7 | 8 | 9 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 |
38 | 59 |
60 | 63 |
64 |
65 | 66 |
67 |

reconnectBackoff

68 |
69 | 70 |
71 | 76 |
77 |
78 |
79 | 80 | 81 | -------------------------------------------------------------------------------- /docs/kotlin-kafka/io.github.nomisRev.kafka/-consumer-settings/request-timeout.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | requestTimeout 6 | 7 | 8 | 9 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 |
38 | 59 |
60 | 63 |
64 |
65 | 66 |
67 |

requestTimeout

68 |
69 | 70 |
71 | 76 |
77 |
78 |
79 | 80 | 81 | -------------------------------------------------------------------------------- /docs/kotlin-kafka/io.github.nomisRev.kafka/-consumer-settings/retry-backoff.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | retryBackoff 6 | 7 | 8 | 9 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 |
38 | 59 |
60 | 63 |
64 |
65 | 66 |
67 |

retryBackoff

68 |
69 | 70 |
71 | 76 |
77 |
78 |
79 | 80 | 81 | -------------------------------------------------------------------------------- /docs/kotlin-kafka/io.github.nomisRev.kafka/-consumer-settings/send-buffer.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | sendBuffer 6 | 7 | 8 | 9 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 |
38 | 59 |
60 | 63 |
64 |
65 | 66 |
67 |

sendBuffer

68 |
69 | 70 |
71 | 76 |
77 |
78 |
79 | 80 | 81 | -------------------------------------------------------------------------------- /docs/kotlin-kafka/io.github.nomisRev.kafka/-nothing-deserializer/close.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | close 6 | 7 | 8 | 9 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 |
38 | 59 |
60 | 63 |
64 |
65 | 66 |
67 |

close

68 |
69 |
open override fun close()(source)
70 |
71 | 76 |
77 |
78 |
79 | 80 | 81 | -------------------------------------------------------------------------------- /docs/kotlin-kafka/io.github.nomisRev.kafka/-nothing-serializer/close.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | close 6 | 7 | 8 | 9 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 |
38 | 59 |
60 | 63 |
64 |
65 | 66 |
67 |

close

68 |
69 |
open override fun close()(source)
70 |
71 | 76 |
77 |
78 |
79 | 80 | 81 | -------------------------------------------------------------------------------- /docs/kotlin-kafka/io.github.nomisRev.kafka/-producer-settings/acks.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | acks 6 | 7 | 8 | 9 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 |
38 | 59 |
60 | 63 |
64 |
65 | 66 |
67 |

acks

68 |
69 | 70 |
71 | 76 |
77 |
78 |
79 | 80 | 81 | -------------------------------------------------------------------------------- /docs/kotlin-kafka/io.github.nomisRev.kafka/-producer-settings/bootstrap-servers.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | bootstrapServers 6 | 7 | 8 | 9 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 |
38 | 59 |
60 | 63 |
64 |
65 | 66 |
67 |

bootstrapServers

68 |
69 | 70 |
71 | 76 |
77 |
78 |
79 | 80 | 81 | -------------------------------------------------------------------------------- /docs/kotlin-kafka/io.github.nomisRev.kafka/-producer-settings/other.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | other 6 | 7 | 8 | 9 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 |
38 | 59 |
60 | 63 |
64 |
65 | 66 |
67 |

other

68 |
69 |
val other: Properties? = null(source)
70 |
71 | 76 |
77 |
78 |
79 | 80 | 81 | -------------------------------------------------------------------------------- /docs/scripts/clipboard.js: -------------------------------------------------------------------------------- 1 | window.addEventListener('load', () => { 2 | document.querySelectorAll('span.copy-icon').forEach(element => { 3 | element.addEventListener('click', (el) => copyElementsContentToClipboard(element)); 4 | }) 5 | 6 | document.querySelectorAll('span.anchor-icon').forEach(element => { 7 | element.addEventListener('click', (el) => { 8 | if(element.hasAttribute('pointing-to')){ 9 | const location = hrefWithoutCurrentlyUsedAnchor() + '#' + element.getAttribute('pointing-to') 10 | copyTextToClipboard(element, location) 11 | } 12 | }); 13 | }) 14 | }) 15 | 16 | const copyElementsContentToClipboard = (element) => { 17 | const selection = window.getSelection(); 18 | const range = document.createRange(); 19 | range.selectNodeContents(element.parentNode.parentNode); 20 | selection.removeAllRanges(); 21 | selection.addRange(range); 22 | 23 | copyAndShowPopup(element, () => selection.removeAllRanges()) 24 | } 25 | 26 | const copyTextToClipboard = (element, text) => { 27 | var textarea = document.createElement("textarea"); 28 | textarea.textContent = text; 29 | textarea.style.position = "fixed"; 30 | document.body.appendChild(textarea); 31 | textarea.select(); 32 | 33 | copyAndShowPopup(element, () => document.body.removeChild(textarea)) 34 | } 35 | 36 | const copyAndShowPopup = (element, after) => { 37 | try { 38 | document.execCommand('copy'); 39 | element.nextElementSibling.classList.add('active-popup'); 40 | setTimeout(() => { 41 | element.nextElementSibling.classList.remove('active-popup'); 42 | }, 1200); 43 | } catch (e) { 44 | console.error('Failed to write to clipboard:', e) 45 | } 46 | finally { 47 | if(after) after() 48 | } 49 | } 50 | 51 | const hrefWithoutCurrentlyUsedAnchor = () => window.location.href.split('#')[0] 52 | 53 | -------------------------------------------------------------------------------- /docs/scripts/navigation-loader.js: -------------------------------------------------------------------------------- 1 | navigationPageText = fetch(pathToRoot + "navigation.html").then(response => response.text()) 2 | 3 | displayNavigationFromPage = () => { 4 | navigationPageText.then(data => { 5 | document.getElementById("sideMenu").innerHTML = data; 6 | }).then(() => { 7 | document.querySelectorAll(".overview > a").forEach(link => { 8 | link.setAttribute("href", pathToRoot + link.getAttribute("href")); 9 | }) 10 | }).then(() => { 11 | document.querySelectorAll(".sideMenuPart").forEach(nav => { 12 | if (!nav.classList.contains("hidden")) 13 | nav.classList.add("hidden") 14 | }) 15 | }).then(() => { 16 | revealNavigationForCurrentPage() 17 | }).then(() => { 18 | scrollNavigationToSelectedElement() 19 | }) 20 | document.querySelectorAll('.footer a[href^="#"]').forEach(anchor => { 21 | anchor.addEventListener('click', function (e) { 22 | e.preventDefault(); 23 | document.querySelector(this.getAttribute('href')).scrollIntoView({ 24 | behavior: 'smooth' 25 | }); 26 | }); 27 | }); 28 | } 29 | 30 | revealNavigationForCurrentPage = () => { 31 | let pageId = document.getElementById("content").attributes["pageIds"].value.toString(); 32 | let parts = document.querySelectorAll(".sideMenuPart"); 33 | let found = 0; 34 | do { 35 | parts.forEach(part => { 36 | if (part.attributes['pageId'].value.indexOf(pageId) !== -1 && found === 0) { 37 | found = 1; 38 | if (part.classList.contains("hidden")) { 39 | part.classList.remove("hidden"); 40 | part.setAttribute('data-active', ""); 41 | } 42 | revealParents(part) 43 | } 44 | }); 45 | pageId = pageId.substring(0, pageId.lastIndexOf("/")) 46 | } while (pageId.indexOf("/") !== -1 && found === 0) 47 | }; 48 | revealParents = (part) => { 49 | if (part.classList.contains("sideMenuPart")) { 50 | if (part.classList.contains("hidden")) 51 | part.classList.remove("hidden"); 52 | revealParents(part.parentNode) 53 | } 54 | }; 55 | 56 | scrollNavigationToSelectedElement = () => { 57 | let selectedElement = document.querySelector('div.sideMenuPart[data-active]') 58 | if (selectedElement == null) { // nothing selected, probably just the main page opened 59 | return 60 | } 61 | 62 | let hasIcon = selectedElement.querySelectorAll(":scope > div.overview span.nav-icon").length > 0 63 | 64 | // for instance enums also have children and are expandable, but are not package/module elements 65 | let isPackageElement = selectedElement.children.length > 1 && !hasIcon 66 | if (isPackageElement) { 67 | // if package is selected or linked, it makes sense to align it to top 68 | // so that you can see all the members it contains 69 | selectedElement.scrollIntoView(true) 70 | } else { 71 | // if a member within a package is linked, it makes sense to center it since it, 72 | // this should make it easier to look at surrounding members 73 | selectedElement.scrollIntoView({ 74 | behavior: 'auto', 75 | block: 'center', 76 | inline: 'center' 77 | }) 78 | } 79 | } 80 | 81 | /* 82 | This is a work-around for safari being IE of our times. 83 | It doesn't fire a DOMContentLoaded, presumabely because eventListener is added after it wants to do it 84 | */ 85 | if (document.readyState == 'loading') { 86 | window.addEventListener('DOMContentLoaded', () => { 87 | displayNavigationFromPage() 88 | }) 89 | } else { 90 | displayNavigationFromPage() 91 | } 92 | -------------------------------------------------------------------------------- /docs/scripts/sourceset_dependencies.js: -------------------------------------------------------------------------------- 1 | sourceset_dependencies='{":dokkaHtml/main":[]}' 2 | -------------------------------------------------------------------------------- /docs/scripts/symbol-parameters-wrapper_deferred.js: -------------------------------------------------------------------------------- 1 | // helps with some corner cases where starts working already, 2 | // but the signature is not yet long enough to be wrapped 3 | const leftPaddingPx = 60 4 | 5 | const symbolResizeObserver = new ResizeObserver(entries => { 6 | entries.forEach(entry => { 7 | const symbolElement = entry.target 8 | symbolResizeObserver.unobserve(symbolElement) // only need it once, otherwise will be executed multiple times 9 | wrapSymbolParameters(symbolElement); 10 | }) 11 | }); 12 | 13 | const wrapAllSymbolParameters = () => { 14 | document.querySelectorAll("div.symbol").forEach(symbol => wrapSymbolParameters(symbol)) 15 | } 16 | 17 | const wrapSymbolParameters = (symbol) => { 18 | let parametersBlock = symbol.querySelector("span.parameters") 19 | if (parametersBlock == null) { 20 | return // nothing to wrap 21 | } 22 | 23 | let symbolBlockWidth = symbol.clientWidth 24 | 25 | // Even though the script is marked as `defer` and we wait for `DOMContentLoaded` event, 26 | // it can happen that `symbolBlockWidth` is 0, indicating that something hasn't been loaded. 27 | // In this case, just retry once all styles have been applied and it has been resized correctly. 28 | if (symbolBlockWidth === 0) { 29 | symbolResizeObserver.observe(symbol) 30 | return 31 | } 32 | 33 | let innerTextWidth = Array.from(symbol.children) 34 | .filter(it => !it.classList.contains("block")) // blocks are usually on their own (like annotations), so ignore it 35 | .map(it => it.getBoundingClientRect().width).reduce((a, b) => a + b, 0) 36 | 37 | // if signature text takes up more than a single line, wrap params for readability 38 | let shouldWrapParams = innerTextWidth > (symbolBlockWidth - leftPaddingPx) 39 | if (shouldWrapParams) { 40 | parametersBlock.classList.add("wrapped") 41 | parametersBlock.querySelectorAll("span.parameter").forEach(param => { 42 | // has to be a physical indent so that it can be copied. styles like 43 | // paddings and `::before { content: " " }` do not work for that 44 | param.prepend(createNbspIndent()) 45 | }) 46 | } 47 | } 48 | 49 | const createNbspIndent = () => { 50 | let indent = document.createElement("span") 51 | indent.append(document.createTextNode("\u00A0\u00A0\u00A0\u00A0")) 52 | indent.classList.add("nbsp-indent") 53 | return indent 54 | } 55 | 56 | const resetAllSymbolParametersWrapping = () => { 57 | document.querySelectorAll("div.symbol").forEach(symbol => resetSymbolParametersWrapping(symbol)) 58 | } 59 | 60 | const resetSymbolParametersWrapping = (symbol) => { 61 | let parameters = symbol.querySelector("span.parameters") 62 | if (parameters != null) { 63 | parameters.classList.remove("wrapped") 64 | parameters.querySelectorAll("span.parameter").forEach(param => { 65 | let indent = param.querySelector("span.nbsp-indent") 66 | if (indent != null) indent.remove() 67 | }) 68 | } 69 | } 70 | 71 | if (document.readyState === 'loading') { 72 | window.addEventListener('DOMContentLoaded', () => { 73 | wrapAllSymbolParameters() 74 | }) 75 | } else { 76 | wrapAllSymbolParameters() 77 | } 78 | 79 | window.onresize = event => { 80 | // need to re-calculate if params need to be wrapped after resize 81 | resetAllSymbolParametersWrapping() 82 | wrapAllSymbolParameters() 83 | } 84 | -------------------------------------------------------------------------------- /docs/styles/font-jb-sans-auto.css: -------------------------------------------------------------------------------- 1 | /* Light weight */ 2 | @font-face { 3 | font-family: 'JetBrains Sans'; 4 | src: url('https://resources.jetbrains.com/storage/jetbrains-sans/JetBrainsSans-Light.woff2') format('woff2'), url('https://resources.jetbrains.com/storage/jetbrains-sans/JetBrainsSans-Light.woff') format('woff'); 5 | font-weight: 300; 6 | font-style: normal; 7 | } 8 | /* Regular weight */ 9 | @font-face { 10 | font-family: 'JetBrains Sans'; 11 | src: url('https://resources.jetbrains.com/storage/jetbrains-sans/JetBrainsSans-Regular.woff2') format('woff2'), url('https://resources.jetbrains.com/storage/jetbrains-sans/JetBrainsSans-Regular.woff') format('woff'); 12 | font-weight: 400; 13 | font-style: normal; 14 | } 15 | /* SemiBold weight */ 16 | @font-face { 17 | font-family: 'JetBrains Sans'; 18 | src: url('https://resources.jetbrains.com/storage/jetbrains-sans/JetBrainsSans-SemiBold.woff2') format('woff2'), url('https://resources.jetbrains.com/storage/jetbrains-sans/JetBrainsSans-SemiBold.woff') format('woff'); 19 | font-weight: 600; 20 | font-style: normal; 21 | } 22 | 23 | @supports (font-variation-settings: normal) { 24 | @font-face { 25 | font-family: 'JetBrains Sans'; 26 | src: url('https://resources.jetbrains.com/storage/jetbrains-sans/JetBrainsSans.woff2') format('woff2 supports variations'), 27 | url('https://resources.jetbrains.com/storage/jetbrains-sans/JetBrainsSans.woff2') format('woff2-variations'), 28 | url('https://resources.jetbrains.com/storage/jetbrains-sans/JetBrainsSans.woff') format('woff-variations'); 29 | font-weight: 100 900; 30 | font-style: normal; 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /docs/styles/logo-styles.css: -------------------------------------------------------------------------------- 1 | :root { 2 | --dokka-logo-image-url: url('../images/logo-icon.svg'); 3 | --dokka-logo-height: 50px; 4 | --dokka-logo-width: 50px; 5 | } 6 | -------------------------------------------------------------------------------- /gradle.properties: -------------------------------------------------------------------------------- 1 | kotlin.code.style=official 2 | 3 | # Package definitions 4 | projects.group=io.github.nomisrev 5 | 6 | SONATYPE_HOST=S01 7 | RELEASE_SIGNING_ENABLED=true 8 | 9 | POM_NAME=kotlin-kafka 10 | POM_DESCRIPTION=Kafka integration with Kotlin, KotlinX Coroutines & Ktor. 11 | POM_URL=https://github.com/nomisrev/kafka-kotlin/ 12 | 13 | POM_LICENSE_NAME=The Apache Software License, Version 2.0 14 | POM_LICENSE_URL=https://www.apache.org/licenses/LICENSE-2.0.txt 15 | POM_LICENSE_DIST=repo 16 | 17 | POM_SCM_URL=https://github.com/nomisrev/kafka-kotlin/ 18 | POM_SCM_CONNECTION=scm:git:git://github.com/nomisRev/kafka-kotlin.git 19 | POM_SCM_DEV_CONNECTION=scm:git:ssh://git@github.com/nomisRev/kafka-kotlin.git 20 | 21 | POM_DEVELOPER_ID=nomisRev 22 | POM_DEVELOPER_NAME=Simon Vergauwen 23 | POM_DEVELOPER_URL=https://github.com/nomisRev/ 24 | -------------------------------------------------------------------------------- /gradle/libs.versions.toml: -------------------------------------------------------------------------------- 1 | [versions] 2 | kotest = "5.8.1" 3 | kafka = "3.7.0" 4 | kotlin = "2.0.0-RC2" 5 | kotlinx-coroutines = "1.8.0" 6 | dokka = "2.0.0" 7 | knit = "0.5.0" 8 | kover = "0.7.6" 9 | testcontainers-kafka = "1.19.7" 10 | slf4j = "2.0.12" 11 | spotless="7.0.2" 12 | publish="0.28.0" 13 | 14 | [libraries] 15 | kotest-property = { module = "io.kotest:kotest-property", version.ref = "kotest" } 16 | kafka-connect = { module = "org.apache.kafka:connect-runtime", version.ref = "kafka" } 17 | kafka-clients = { module = "org.apache.kafka:kafka-clients", version.ref = "kafka" } 18 | kafka-streams = { module = "org.apache.kafka:kafka-streams", version.ref = "kafka" } 19 | kotlin-scripting-compiler-embeddable = { module = "org.jetbrains.kotlin:kotlin-scripting-compiler-embeddable", version.ref = "kotlin" } 20 | kotlin-stdlib = { module = "org.jetbrains.kotlin:kotlin-stdlib", version.ref = "kotlin" } 21 | kotlinx-coroutines-core = { module = "org.jetbrains.kotlinx:kotlinx-coroutines-core", version.ref = "kotlinx-coroutines" } 22 | kotlinx-coroutines-jdk8 = { module = "org.jetbrains.kotlinx:kotlinx-coroutines-jdk8", version.ref = "kotlinx-coroutines" } 23 | kotlinx-coroutines-test = { module = "org.jetbrains.kotlinx:kotlinx-coroutines-test", version.ref = "kotlinx-coroutines" } 24 | testcontainers-kafka = { module = "org.testcontainers:kafka", version.ref = "testcontainers-kafka" } 25 | slf4j-api = { module = "org.slf4j:slf4j-api", version.ref = "slf4j" } 26 | slf4j-simple = { module = "org.slf4j:slf4j-simple", version.ref = "slf4j" } 27 | 28 | [plugins] 29 | kotlin-jvm = { id = "org.jetbrains.kotlin.jvm", version.ref = "kotlin" } 30 | kotlin-assert = { id = "org.jetbrains.kotlin.plugin.power-assert", version.ref="kotlin" } 31 | dokka = { id = "org.jetbrains.dokka", version.ref = "dokka" } 32 | kover = { id = "org.jetbrains.kotlinx.kover", version.ref = "kover" } 33 | spotless = { id = "com.diffplug.spotless", version.ref = "spotless" } 34 | publish = { id = "com.vanniktech.maven.publish", version.ref="publish" } 35 | knit = { id = "org.jetbrains.kotlinx.knit", version.ref="knit" } 36 | -------------------------------------------------------------------------------- /gradle/wrapper/gradle-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nomisRev/kotlin-kafka/28a059d5cd140f695bda0aabdb4824e11a1ff0d4/gradle/wrapper/gradle-wrapper.jar -------------------------------------------------------------------------------- /gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | distributionBase=GRADLE_USER_HOME 2 | distributionPath=wrapper/dists 3 | distributionUrl=https\://services.gradle.org/distributions/gradle-8.7-bin.zip 4 | networkTimeout=10000 5 | zipStoreBase=GRADLE_USER_HOME 6 | zipStorePath=wrapper/dists 7 | -------------------------------------------------------------------------------- /gradlew.bat: -------------------------------------------------------------------------------- 1 | @rem 2 | @rem Copyright 2015 the original author or authors. 3 | @rem 4 | @rem Licensed under the Apache License, Version 2.0 (the "License"); 5 | @rem you may not use this file except in compliance with the License. 6 | @rem You may obtain a copy of the License at 7 | @rem 8 | @rem https://www.apache.org/licenses/LICENSE-2.0 9 | @rem 10 | @rem Unless required by applicable law or agreed to in writing, software 11 | @rem distributed under the License is distributed on an "AS IS" BASIS, 12 | @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | @rem See the License for the specific language governing permissions and 14 | @rem limitations under the License. 15 | @rem 16 | 17 | @if "%DEBUG%"=="" @echo off 18 | @rem ########################################################################## 19 | @rem 20 | @rem Gradle startup script for Windows 21 | @rem 22 | @rem ########################################################################## 23 | 24 | @rem Set local scope for the variables with windows NT shell 25 | if "%OS%"=="Windows_NT" setlocal 26 | 27 | set DIRNAME=%~dp0 28 | if "%DIRNAME%"=="" set DIRNAME=. 29 | @rem This is normally unused 30 | set APP_BASE_NAME=%~n0 31 | set APP_HOME=%DIRNAME% 32 | 33 | @rem Resolve any "." and ".." in APP_HOME to make it shorter. 34 | for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi 35 | 36 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 37 | set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" 38 | 39 | @rem Find java.exe 40 | if defined JAVA_HOME goto findJavaFromJavaHome 41 | 42 | set JAVA_EXE=java.exe 43 | %JAVA_EXE% -version >NUL 2>&1 44 | if %ERRORLEVEL% equ 0 goto execute 45 | 46 | echo. 47 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 48 | echo. 49 | echo Please set the JAVA_HOME variable in your environment to match the 50 | echo location of your Java installation. 51 | 52 | goto fail 53 | 54 | :findJavaFromJavaHome 55 | set JAVA_HOME=%JAVA_HOME:"=% 56 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe 57 | 58 | if exist "%JAVA_EXE%" goto execute 59 | 60 | echo. 61 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 62 | echo. 63 | echo Please set the JAVA_HOME variable in your environment to match the 64 | echo location of your Java installation. 65 | 66 | goto fail 67 | 68 | :execute 69 | @rem Setup the command line 70 | 71 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar 72 | 73 | 74 | @rem Execute Gradle 75 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* 76 | 77 | :end 78 | @rem End local scope for the variables with windows NT shell 79 | if %ERRORLEVEL% equ 0 goto mainEnd 80 | 81 | :fail 82 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of 83 | rem the _cmd.exe /c_ return code! 84 | set EXIT_CODE=%ERRORLEVEL% 85 | if %EXIT_CODE% equ 0 set EXIT_CODE=1 86 | if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE% 87 | exit /b %EXIT_CODE% 88 | 89 | :mainEnd 90 | if "%OS%"=="Windows_NT" endlocal 91 | 92 | :omega 93 | -------------------------------------------------------------------------------- /guide/build.gradle.kts: -------------------------------------------------------------------------------- 1 | plugins { 2 | kotlin("jvm") 3 | } 4 | 5 | repositories { 6 | mavenCentral() 7 | } 8 | 9 | dependencies { 10 | implementation(rootProject) 11 | implementation(libs.testcontainers.kafka) 12 | implementation("io.arrow-kt:suspendapp:0.4.0") 13 | testImplementation("org.jetbrains.kotlin:kotlin-test-junit") 14 | testImplementation("org.jetbrains.kotlinx:kotlinx-knit-test:0.5.0") 15 | } 16 | 17 | sourceSets.test { 18 | java.srcDirs("example", "test") 19 | } -------------------------------------------------------------------------------- /guide/example/example-admin-01.kt: -------------------------------------------------------------------------------- 1 | package example.exampleAdmin01 2 | 3 | import arrow.continuations.SuspendApp 4 | import io.github.nomisRev.kafka.Admin 5 | import io.github.nomisRev.kafka.AdminSettings 6 | import io.github.nomisRev.kafka.await 7 | import io.github.nomisRev.kafka.createTopic 8 | import io.github.nomisRev.kafka.deleteTopic 9 | import org.apache.kafka.clients.ClientDnsLookup 10 | import org.apache.kafka.clients.admin.AdminClientConfig.CLIENT_DNS_LOOKUP_CONFIG 11 | import org.apache.kafka.clients.admin.NewTopic 12 | import java.util.Properties 13 | 14 | fun main() = SuspendApp { 15 | val settings = AdminSettings( 16 | Kafka.container.bootstrapServers, 17 | Properties().apply { 18 | put(CLIENT_DNS_LOOKUP_CONFIG, ClientDnsLookup.USE_ALL_DNS_IPS) 19 | } 20 | ) 21 | Admin(settings).use { admin -> 22 | admin.createTopic(NewTopic("admin-settings-example", 1, 1)) 23 | val topics = admin.listTopics().namesToListings().await() 24 | println(topics) 25 | admin.deleteTopic("admin-settings-example") 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /guide/example/example-publisher-01.kt: -------------------------------------------------------------------------------- 1 | package example.examplePublisher01 2 | 3 | import arrow.continuations.SuspendApp 4 | import io.github.nomisRev.kafka.imap 5 | import io.github.nomisRev.kafka.publisher.KafkaPublisher 6 | import io.github.nomisRev.kafka.publisher.PublisherSettings 7 | import org.apache.kafka.clients.producer.ProducerRecord 8 | import org.apache.kafka.common.Metric 9 | import org.apache.kafka.common.MetricName 10 | import org.apache.kafka.common.serialization.IntegerSerializer 11 | import org.apache.kafka.common.serialization.StringSerializer 12 | @JvmInline value class Key(val index: Int) 13 | @JvmInline value class Message(val content: String) 14 | 15 | fun main() = SuspendApp { 16 | val settings = PublisherSettings( 17 | Kafka.container.bootstrapServers, 18 | IntegerSerializer().imap { key: Key -> key.index }, 19 | StringSerializer().imap { msg: Message -> msg.content }, 20 | ) 21 | 22 | KafkaPublisher(settings).use { publisher -> 23 | // ... use the publisher 24 | val m: Map = publisher.metrics() 25 | println(m) 26 | 27 | publisher.publishScope { 28 | // send record without awaiting acknowledgement 29 | offer(ProducerRecord("example-topic", Key(1), Message("msg-1"))) 30 | 31 | // send record and suspends until acknowledged 32 | publish(ProducerRecord("example-topic", Key(2), Message("msg-2"))) 33 | } 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /guide/example/example-readme-01.kt: -------------------------------------------------------------------------------- 1 | package example.exampleReadme01 2 | 3 | import arrow.continuations.SuspendApp 4 | import io.github.nomisRev.kafka.Admin 5 | import io.github.nomisRev.kafka.AdminSettings 6 | import io.github.nomisRev.kafka.createTopic 7 | import io.github.nomisRev.kafka.imap 8 | import io.github.nomisRev.kafka.map 9 | import io.github.nomisRev.kafka.publisher.Acks 10 | import io.github.nomisRev.kafka.publisher.KafkaPublisher 11 | import io.github.nomisRev.kafka.publisher.PublisherSettings 12 | import io.github.nomisRev.kafka.receiver.AutoOffsetReset 13 | import io.github.nomisRev.kafka.receiver.KafkaReceiver 14 | import io.github.nomisRev.kafka.receiver.ReceiverSettings 15 | import kotlinx.coroutines.Dispatchers 16 | import kotlinx.coroutines.flow.map 17 | import kotlinx.coroutines.flow.take 18 | import kotlinx.coroutines.launch 19 | import org.apache.kafka.clients.admin.NewTopic 20 | import org.apache.kafka.clients.producer.ProducerRecord 21 | import org.apache.kafka.common.serialization.IntegerDeserializer 22 | import org.apache.kafka.common.serialization.IntegerSerializer 23 | import org.apache.kafka.common.serialization.StringDeserializer 24 | import org.apache.kafka.common.serialization.StringSerializer 25 | import java.util.UUID 26 | 27 | @JvmInline 28 | value class Key(val index: Int) 29 | 30 | @JvmInline 31 | value class Message(val content: String) 32 | 33 | fun main(): Unit = SuspendApp { 34 | val topicName = "test-topic" 35 | val msgCount = 10 36 | val kafka = Kafka.container 37 | 38 | Admin(AdminSettings(kafka.bootstrapServers)).use { client -> 39 | client.createTopic(NewTopic(topicName, 1, 1)) 40 | } 41 | 42 | launch(Dispatchers.IO) { // Send 20 messages, and then close the producer 43 | val settings: PublisherSettings = PublisherSettings( 44 | kafka.bootstrapServers, 45 | IntegerSerializer().imap { key: Key -> key.index }, 46 | StringSerializer().imap { msg: Message -> msg.content }, 47 | Acks.All 48 | ) 49 | KafkaPublisher(settings).use { publisher -> 50 | publisher.publishScope { 51 | (1..msgCount).forEach { index -> 52 | offer(ProducerRecord(topicName, Key(index), Message("msg: $index"))) 53 | } 54 | } 55 | } 56 | } 57 | 58 | launch(Dispatchers.IO) { // Consume 20 messages as a stream, and then close the consumer 59 | val settings: ReceiverSettings = ReceiverSettings( 60 | kafka.bootstrapServers, 61 | IntegerDeserializer().map(::Key), 62 | StringDeserializer().map(::Message), 63 | groupId = UUID.randomUUID().toString(), 64 | autoOffsetReset = AutoOffsetReset.Earliest 65 | ) 66 | KafkaReceiver(settings) 67 | .receive(topicName) 68 | .take(msgCount) 69 | .map { "${it.key()} -> ${it.value()}" } 70 | .collect(::println) 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /guide/src/main/kotlin/KafkaContainer.kt: -------------------------------------------------------------------------------- 1 | import org.testcontainers.containers.KafkaContainer 2 | import org.testcontainers.utility.DockerImageName 3 | import java.lang.System.getProperty 4 | 5 | /** 6 | * A singleton `Kafka` Test Container. 7 | * 8 | * This setup guarantees that the container is `reuseable` **if** you have the following setting: In 9 | * `~/.testcontainers.properties` you need to add following line: `testcontainers.reuse.enable=true` 10 | * 11 | * With this flag enabled, test containers will now be able to re-use existing containers, which 12 | * save about 10s per container of start-up. This container starts in ~3s when being re-used, and 13 | * that only happens once per project. 14 | * 15 | * There is no need to `close` or `stop` the test-container since the lifecycle is now 100% 16 | * controlled by TC. 17 | * 18 | * ```kotlin 19 | * class MySpec : StringSpec({ 20 | * val kafka = Kafka.container 21 | * ... 22 | * }) 23 | * ``` 24 | * 25 | * @see https://www.testcontainers.org/test_framework_integration/manual_lifecycle_control/ 26 | * @see https://pawelpluta.com/optimise-testcontainers-for-better-tests-performance/ 27 | */ 28 | class Kafka private constructor(imageName: DockerImageName) : KafkaContainer(imageName) { 29 | 30 | companion object { 31 | private val image: DockerImageName = 32 | if (getProperty("os.arch") == "aarch64") DockerImageName.parse("niciqy/cp-kafka-arm64:7.0.1") 33 | .asCompatibleSubstituteFor("confluentinc/cp-kafka") 34 | else DockerImageName.parse("confluentinc/cp-kafka:6.2.1") 35 | 36 | val container: KafkaContainer by lazy { 37 | Kafka(image).also { it.start() } 38 | } 39 | } 40 | 41 | // override fun containerIsStarted(containerInfo: InspectContainerResponse?, reused: Boolean) { 42 | // super.containerIsStarted(containerInfo, reused) 43 | // // If we're reusing the container, we want to reset the state of the container. We do this by 44 | // // deleting all topics. 45 | // // if (reused) 46 | // runBlocking { 47 | // Admin(AdminSettings(bootstrapServers)).use { admin -> 48 | // val names = admin.listTopics().listings().await() 49 | // admin.deleteTopics(names.map { it.name() }).all().await() 50 | // } 51 | // } 52 | // } 53 | } 54 | -------------------------------------------------------------------------------- /guide/src/main/resources/log4j.properties: -------------------------------------------------------------------------------- 1 | log4j.rootLogger=OFF -------------------------------------------------------------------------------- /guide/test/AdminSettingsSpec.kt: -------------------------------------------------------------------------------- 1 | // This file was automatically generated from Admin.kt by Knit tool. Do not edit. 2 | package example.test 3 | 4 | import org.junit.Test 5 | import kotlinx.knit.test.* 6 | 7 | class AdminSettingsSpec { 8 | @Test 9 | fun testExampleAdmin01() { 10 | captureOutput("ExampleAdmin01") { example.exampleAdmin01.main() }.also { lines -> 11 | check(lines.isNotEmpty()) 12 | } 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /guide/test/AdminSpec.kt: -------------------------------------------------------------------------------- 1 | // This file was automatically generated from Admin.kt by Knit tool. Do not edit. 2 | package example.test 3 | 4 | import org.junit.Test 5 | import kotlinx.knit.test.* 6 | 7 | class AdminSpec { 8 | @Test 9 | fun testExampleAdmin01() { 10 | captureOutput("ExampleAdmin01") { example.exampleAdmin01.main() }.also { lines -> 11 | check(lines.isNotEmpty()) 12 | } 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /knit.code.include: -------------------------------------------------------------------------------- 1 | package ${knit.package}.${knit.name} 2 | -------------------------------------------------------------------------------- /knit.properties: -------------------------------------------------------------------------------- 1 | knit.dir=guide/example/ 2 | knit.package=example 3 | 4 | test.dir=guide/test/ 5 | test.package=example.test 6 | 7 | knit.include=knit.code.include 8 | -------------------------------------------------------------------------------- /renovate.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": [ 3 | "config:base" 4 | ], 5 | "commitBodyTable": true, 6 | "packageRules": [ 7 | { 8 | "matchPackagePatterns": [ 9 | "*" 10 | ], 11 | "matchUpdateTypes": [ 12 | "major", 13 | "minor", 14 | "patch" 15 | ], 16 | "groupName": "all dependencies", 17 | "groupSlug": "all" 18 | } 19 | ] 20 | } -------------------------------------------------------------------------------- /settings.gradle.kts: -------------------------------------------------------------------------------- 1 | enableFeaturePreview("TYPESAFE_PROJECT_ACCESSORS") 2 | 3 | rootProject.name = "kotlin-kafka" 4 | 5 | dependencyResolutionManagement { 6 | repositories { 7 | mavenCentral() 8 | } 9 | } 10 | 11 | plugins { 12 | id("org.gradle.toolchains.foojay-resolver-convention") version "0.8.0" 13 | } 14 | 15 | include(":guide") 16 | -------------------------------------------------------------------------------- /src/main/kotlin/io/github/nomisRev/kafka/Admin.kt: -------------------------------------------------------------------------------- 1 | @file:Suppress("unused") 2 | 3 | package io.github.nomisRev.kafka 4 | 5 | import java.util.Properties 6 | import org.apache.kafka.clients.admin.Admin 7 | import org.apache.kafka.clients.admin.AdminClientConfig 8 | import org.apache.kafka.clients.admin.CreateTopicsOptions 9 | import org.apache.kafka.clients.admin.DeleteTopicsOptions 10 | import org.apache.kafka.clients.admin.DescribeTopicsOptions 11 | import org.apache.kafka.clients.admin.ListTopicsOptions 12 | import org.apache.kafka.clients.admin.NewTopic 13 | import org.apache.kafka.clients.admin.TopicDescription 14 | 15 | /** 16 | * 17 | * 18 | * Construct an [AutoCloseable] [Admin] with [AdminSettings]. Always consume safely with 19 | * [kotlin.use], or arrow.fx.coroutines.Resource. 20 | * 21 | * 33 | * ```kotlin 34 | * fun main() = SuspendApp { 35 | * val settings = AdminSettings( 36 | * Kafka.container.bootstrapServers, 37 | * Properties().apply { 38 | * put(CLIENT_DNS_LOOKUP_CONFIG, ClientDnsLookup.USE_ALL_DNS_IPS) 39 | * } 40 | * ) 41 | * Admin(settings).use { admin -> 42 | * admin.createTopic(NewTopic("admin-settings-example", 1, 1)) 43 | * val topics = admin.listTopics().namesToListings().await() 44 | * println(topics) 45 | * admin.deleteTopic("admin-settings-example") 46 | * } 47 | * } 48 | * ``` 49 | * 50 | * 51 | */ 52 | public fun Admin(settings: AdminSettings): Admin = Admin.create(settings.properties()) 53 | 54 | /** Extension method on [Admin] to create a Topic in a suspending way. */ 55 | public suspend fun Admin.createTopic( 56 | topic: NewTopic, 57 | option: CreateTopicsOptions = CreateTopicsOptions(), 58 | ): Unit = createTopic(listOf(topic), option) 59 | 60 | /** Extension method on [Admin] to create Topics in a suspending way. */ 61 | public suspend fun Admin.createTopic( 62 | topic: Iterable, 63 | option: CreateTopicsOptions = CreateTopicsOptions(), 64 | ) { 65 | createTopics(topic.toList(), option).all().await() 66 | } 67 | 68 | public suspend fun Admin.topicExists( 69 | topic: NewTopic, 70 | listTopicsOptions: ListTopicsOptions = ListTopicsOptions(), 71 | ): Boolean = listTopics(listTopicsOptions).names().await().contains(topic.name()) 72 | 73 | /** Extension method on [Admin] to delete a single Topic in a suspending way. */ 74 | public suspend fun Admin.deleteTopic( 75 | name: String, 76 | options: DeleteTopicsOptions = DeleteTopicsOptions(), 77 | ) { 78 | deleteTopics(listOf(name), options).all().await() 79 | } 80 | 81 | /** Extension method to describe a single Topic */ 82 | public suspend fun Admin.describeTopic( 83 | name: String, 84 | options: DescribeTopicsOptions = DescribeTopicsOptions(), 85 | ): TopicDescription? = 86 | describeTopics(listOf(name), options).topicNameValues().getOrDefault(name, null)?.await() 87 | 88 | /** 89 | * Typed data class for creating a valid [Admin] instance. The only required parameter is the 90 | * [bootstrapServer], and all other optional parameters can be added using the [props] parameter. 91 | * 92 | * @see [AdminClientConfig] for additional kafka [Admin] parameters 93 | * 94 | * If you want to request an important _stable_ kafka parameter to be added to the data class, 95 | * please open an issue on the kotlin-kafka repo. 96 | */ 97 | public data class AdminSettings( 98 | val bootstrapServer: String, 99 | private val props: Properties? = null, 100 | ) { 101 | public fun properties(): Properties = 102 | Properties().apply { 103 | props?.let { putAll(it) } 104 | put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServer) 105 | } 106 | } 107 | -------------------------------------------------------------------------------- /src/main/kotlin/io/github/nomisRev/kafka/KafkaFuture.kt: -------------------------------------------------------------------------------- 1 | @file:JvmName("KafkaFutureExt") 2 | 3 | package io.github.nomisRev.kafka 4 | 5 | import kotlinx.coroutines.Deferred 6 | import kotlinx.coroutines.future.asDeferred 7 | import kotlinx.coroutines.future.await 8 | import org.apache.kafka.clients.admin.CreateTopicsResult 9 | import org.apache.kafka.clients.admin.DeleteTopicsResult 10 | import org.apache.kafka.common.KafkaFuture 11 | 12 | /** Await all [DeleteTopicsResult] in a suspending way. */ 13 | public suspend fun DeleteTopicsResult.await() { 14 | all().await() 15 | } 16 | 17 | /** Await all [CreateTopicsResult] in a suspending way. */ 18 | public suspend fun CreateTopicsResult.await() { 19 | all().await() 20 | } 21 | 22 | /** 23 | * Await a [KafkaFuture] in a suspending way. Code inspired by 24 | * [KotlinX Coroutines JDK8](https://github.com/Kotlin/kotlinx.coroutines/tree/master/integration/kotlinx-coroutines-jdk8) 25 | */ 26 | public suspend fun KafkaFuture.await(): T = 27 | toCompletionStage().await() 28 | 29 | /** 30 | * Converts this [KafkaFuture] to an instance of [Deferred]. 31 | * 32 | * The [KafkaFuture] is cancelled when the resulting deferred is cancelled. 33 | */ 34 | @Suppress("DeferredIsResult") 35 | public fun KafkaFuture.asDeferred(): Deferred = 36 | toCompletionStage().asDeferred() 37 | -------------------------------------------------------------------------------- /src/main/kotlin/io/github/nomisRev/kafka/Serializers.kt: -------------------------------------------------------------------------------- 1 | package io.github.nomisRev.kafka 2 | 3 | import org.apache.kafka.common.header.Headers 4 | import org.apache.kafka.common.serialization.Deserializer 5 | import org.apache.kafka.common.serialization.Serializer 6 | 7 | /** 8 | * A [Serializer] for [Nothing], this way we signal in a typed way that `Key` is not used for a 9 | * certain topic. 10 | */ 11 | public object NothingSerializer : Serializer { 12 | override fun close(): Unit = Unit 13 | override fun configure(configs: MutableMap?, isKey: Boolean): Unit = Unit 14 | override fun serialize(topic: String?, data: Nothing?): ByteArray = ByteArray(0) 15 | } 16 | 17 | /** 18 | * A [Deserializer] for [Nothing], this way we signal in a typed way that `Key` is not used for a 19 | * certain topic. 20 | */ 21 | public object NothingDeserializer : Deserializer { 22 | override fun close(): Unit = Unit 23 | override fun configure(configs: MutableMap?, isKey: Boolean): Unit = Unit 24 | override fun deserialize(topic: String?, data: ByteArray?): Nothing = TODO("Impossible") 25 | } 26 | 27 | public fun Serializer.imap(f: (B) -> A): Serializer = MappedSerializer(this, f) 28 | 29 | private class MappedSerializer(val original: Serializer, val imap: (B) -> A) : 30 | Serializer { 31 | override fun close() = original.close() 32 | 33 | override fun configure(configs: MutableMap?, isKey: Boolean) = 34 | original.configure(configs, isKey) 35 | 36 | override fun serialize(topic: String?, headers: Headers?, data: B): ByteArray = 37 | original.serialize(topic, headers, imap(data)) 38 | 39 | override fun serialize(topic: String?, data: B): ByteArray = original.serialize(topic, imap(data)) 40 | } 41 | 42 | public fun Deserializer.map(f: (A) -> B): Deserializer = MappedDeserializer(this, f) 43 | 44 | private class MappedDeserializer(val original: Deserializer, val map: (A) -> B) : 45 | Deserializer { 46 | override fun close() = original.close() 47 | 48 | override fun configure(configs: MutableMap?, isKey: Boolean) = 49 | original.configure(configs, isKey) 50 | 51 | override fun deserialize(topic: String?, data: ByteArray?): B = 52 | original.deserialize(topic, data).let(map) 53 | 54 | override fun deserialize(topic: String?, headers: Headers?, data: ByteArray?): B = 55 | original.deserialize(topic, headers, data).let(map) 56 | } 57 | -------------------------------------------------------------------------------- /src/main/kotlin/io/github/nomisRev/kafka/internal/FlowTimeChunked.kt: -------------------------------------------------------------------------------- 1 | @file:Suppress("INVISIBLE_MEMBER", "INVISIBLE_REFERENCE") 2 | 3 | /* 4 | * Inspired by https://github.com/Kotlin/kotlinx.coroutines/pull/2378 5 | */ 6 | package io.github.nomisRev.kafka.internal 7 | 8 | import kotlinx.coroutines.ExperimentalCoroutinesApi 9 | import kotlinx.coroutines.channels.Channel 10 | import kotlinx.coroutines.channels.ReceiveChannel 11 | import kotlinx.coroutines.channels.getOrElse 12 | import kotlinx.coroutines.channels.produce 13 | import kotlinx.coroutines.coroutineScope 14 | import kotlinx.coroutines.flow.Flow 15 | import kotlinx.coroutines.flow.flow 16 | import kotlinx.coroutines.selects.whileSelect 17 | import kotlinx.coroutines.selects.onTimeout 18 | import kotlin.time.Duration 19 | 20 | /** 21 | * Chunk [Flow] until [size] elements are collected, or until a certain [duration] has passed. 22 | * When the [Flow] completes or throws an exception 23 | * 24 | * Groups emissions from this [Flow] into [List] . Time based implementations 25 | * collect upstream and emit to downstream in separate coroutines - concurrently, like Flow.buffer() operator. 26 | * Exact timing of emissions is not guaranteed, as it depends on collector coroutine availability. 27 | * 28 | * Size based chunking happens in a single coroutine and is purely sequential. 29 | * 30 | * Emissions always preserve order. 31 | * Collects upstream into a buffer and emits its content as a list at every interval or when its buffer reaches 32 | * maximum size. When upstream completes (or is empty), it will try to emit immediately what is left of 33 | * a chunk, omitting the interval and maxSize constraints. 34 | * 35 | * @param duration Interval between emissions in milliseconds. Every emission happens only after 36 | * interval passes, unless upstream Flow completes sooner or maximum size of a chunk is reached. 37 | * 38 | * @param size Maximum size of a single chunk. If reached, it will try to emit a chunk, ignoring the 39 | * interval constraint. If so happens, time-to-next-chunk gets reset to the interval value. 40 | */ 41 | @ExperimentalCoroutinesApi 42 | @Deprecated("Will no longer be part of kotlin-kafka.") 43 | public fun Flow.chunked( 44 | size: Int, 45 | duration: Duration, 46 | ): Flow> { 47 | require(size > 0) { "Cannot create chunks smaller than 0 but found $size" } 48 | require(!duration.isNegative() && duration != Duration.ZERO) { "Chunk duration should be positive non-zero duration" } 49 | return flow { 50 | coroutineScope { 51 | val emitNowAndMaybeContinue = Channel(capacity = Channel.RENDEZVOUS) 52 | val elements = produce(capacity = size) { 53 | collect { element -> 54 | val hasCapacity = channel.trySend(element).isSuccess 55 | if (!hasCapacity) { 56 | emitNowAndMaybeContinue.send(true) 57 | channel.send(element) 58 | } 59 | } 60 | emitNowAndMaybeContinue.send(false) 61 | } 62 | 63 | whileSelect { 64 | emitNowAndMaybeContinue.onReceive { shouldContinue -> 65 | val chunk = elements.drain(maxElements = size) 66 | if (chunk.isNotEmpty()) emit(chunk) 67 | shouldContinue 68 | } 69 | 70 | onTimeout(duration) { 71 | val chunk: List = elements.drain(maxElements = size) 72 | if (chunk.isNotEmpty()) emit(chunk) 73 | true 74 | } 75 | } 76 | } 77 | } 78 | } 79 | 80 | private tailrec fun ReceiveChannel.drain( 81 | acc: MutableList = mutableListOf(), 82 | maxElements: Int, 83 | ): List = 84 | if (acc.size == maxElements) acc 85 | else { 86 | val nextValue = tryReceive().getOrElse { error: Throwable? -> error?.let { throw (it) } ?: return acc } 87 | acc.add(nextValue) 88 | drain(acc, maxElements) 89 | } 90 | -------------------------------------------------------------------------------- /src/main/kotlin/io/github/nomisRev/kafka/publisher/Acks.kt: -------------------------------------------------------------------------------- 1 | package io.github.nomisRev.kafka.publisher 2 | 3 | import org.apache.kafka.clients.producer.ProducerConfig 4 | 5 | /** 6 | * The number of acknowledgments the producer requires the leader to have received before considering a request complete. 7 | * This controls the durability of records that are sent 8 | * 9 | * **NOTE:** Enabling idempotence requires this config value to be [All], otherwise [ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG] is ignored. 10 | */ 11 | public enum class Acks(public val value: String) { 12 | /** 13 | * Using [Zero] the producer will not wait for any acknowledgment from the server at all. 14 | * The record will be immediately added to the socket buffer and considered sent. 15 | * No guarantee can be made that the server has received the record in this case, 16 | * and [ProducerConfig.RETRIES_CONFIG] will not take effect (as the client won't generally know of any failures). 17 | * The offset given back for each record will always be set to `-1` 18 | */ 19 | Zero("0"), 20 | 21 | /** 22 | * This will mean the leader will write the record to its local log but will respond without awaiting full acknowledgement from all followers. 23 | * In this case should the leader fail immediately after acknowledging the record but before the followers have replicated it then the record will be lost. 24 | */ 25 | One("1"), 26 | 27 | /** 28 | * This means the leader will wait for the full set of in-sync replicas to acknowledge the record. 29 | * This guarantees that the record will not be lost as long as at least one in-sync replica remains alive. 30 | * This is the strongest available guarantee. This is equivalent to the [MinusOne] setting. 31 | */ 32 | All("all"), 33 | 34 | /** 35 | * Alias to all 36 | * @see All 37 | */ 38 | MinusOne("-1"), 39 | } 40 | -------------------------------------------------------------------------------- /src/main/kotlin/io/github/nomisRev/kafka/receiver/CommitStrategy.kt: -------------------------------------------------------------------------------- 1 | package io.github.nomisRev.kafka.receiver 2 | 3 | import kotlin.time.Duration 4 | 5 | /** 6 | * The strategy to apply to the "Offset commit manager". 7 | * Offsets can be committed to kafka using different strategies: 8 | * 9 | * - Every `n` acknowledged records 10 | * - Every `interval` duration, commit *all* acknowledged records 11 | * - Every `n` acknowledged records **or** every `interval` duration, whichever comes first. 12 | */ 13 | public sealed interface CommitStrategy { 14 | 15 | /** Commit **all** [Offset.acknowledge] messages to kafka every [interval]. */ 16 | @JvmInline 17 | public value class ByTime(public val interval: Duration) : CommitStrategy { 18 | init { 19 | require(interval.isPosNonZero()) { 20 | "Time based auto-commit requires positive non-zero interval but found $interval" 21 | } 22 | } 23 | } 24 | 25 | /** Commit messages to kafka every [size] acknowledged message. */ 26 | @JvmInline 27 | public value class BySize(public val size: Int) : CommitStrategy { 28 | init { 29 | require(size > 0) { 30 | "Size based auto-commit requires positive non-zero commit batch size but found $size" 31 | } 32 | } 33 | } 34 | 35 | /** 36 | * Commit messages to kafka every [size] acknowledged message, or every [interval]. 37 | * Whichever condition is reached first. 38 | */ 39 | public data class BySizeOrTime(public val size: Int, public val interval: Duration) : CommitStrategy { 40 | init { 41 | require(size > 0) { 42 | "Size based auto-commit requires positive non-zero commit batch size but found $size" 43 | } 44 | require(interval.isPosNonZero()) { 45 | "Time based auto-commit requires positive non-zero interval but found $interval" 46 | } 47 | } 48 | } 49 | } 50 | 51 | internal fun CommitStrategy.size(): Int = 52 | when (this) { 53 | is CommitStrategy.BySize -> size 54 | is CommitStrategy.BySizeOrTime -> size 55 | is CommitStrategy.ByTime -> 0 56 | } 57 | 58 | internal fun Duration.isPosNonZero(): Boolean = 59 | this != Duration.ZERO && isPositive() 60 | -------------------------------------------------------------------------------- /src/main/kotlin/io/github/nomisRev/kafka/receiver/ConsumerPartition.kt: -------------------------------------------------------------------------------- 1 | package io.github.nomisRev.kafka.receiver 2 | 3 | import org.apache.kafka.common.TopicPartition 4 | 5 | /** 6 | * Topic partition interface that supports `seek` operations that can be invoked when partitions are assigned. 7 | */ 8 | public interface ConsumerPartition { 9 | 10 | /** Returns the underlying Kafka topic partition. */ 11 | public val topicPartition: TopicPartition 12 | 13 | /** 14 | * Seeks to the first available offset of the topic partition. 15 | * This overrides the offset starting from which records are fetched. 16 | */ 17 | public fun seekToBeginning() 18 | 19 | /** 20 | * Seeks to the last offset of the topic partition. 21 | * This overrides the offset starting from which records are fetched. 22 | */ 23 | public fun seekToEnd() 24 | 25 | /** 26 | * Seeks to the specified offset of the topic partition. 27 | * This overrides the offset starting from which records are fetched. 28 | */ 29 | public fun seek(offset: Long) 30 | 31 | /** 32 | * Seek to the topic partition offset that is greater than or equal to the timestamp. 33 | * If there are no matching records, [seekToEnd] is performed. 34 | * See [org.apache.kafka.clients.consumer.Consumer.offsetsForTimes]. 35 | */ 36 | public fun seekToTimestamp(timestamp: Long) 37 | 38 | /** 39 | * Returns the offset of the next record that will be fetched from this topic partition. 40 | * @return current offset of this partition 41 | */ 42 | public fun position(): Long 43 | } 44 | -------------------------------------------------------------------------------- /src/main/kotlin/io/github/nomisRev/kafka/receiver/ConsumerRecord.kt: -------------------------------------------------------------------------------- 1 | package io.github.nomisRev.kafka.receiver 2 | 3 | import org.apache.kafka.clients.consumer.ConsumerRecord 4 | 5 | /** 6 | * A [org.apache.kafka.clients.consumer.ConsumerRecord] for keys of [K], and values of [V]. 7 | * With a property [Offset] which allows acknowledging, or committing record offsets to kafka. 8 | */ 9 | public class ReceiverRecord( 10 | record: ConsumerRecord, 11 | /** 12 | * Returns an acknowledgeable offset that should be acknowledged after this record has been consumed. 13 | * Acknowledged records are automatically committed based on the configured [CommitStrategy]. 14 | * Acknowledged records may be also committed using [Offset.commit]. 15 | */ 16 | public val offset: Offset, 17 | ) : ConsumerRecord( 18 | record.topic(), 19 | record.partition(), 20 | record.offset(), 21 | record.timestamp(), 22 | record.timestampType(), 23 | record.serializedKeySize(), 24 | record.serializedValueSize(), 25 | record.key(), 26 | record.value(), 27 | record.headers(), 28 | record.leaderEpoch() 29 | ) -------------------------------------------------------------------------------- /src/main/kotlin/io/github/nomisRev/kafka/receiver/Offset.kt: -------------------------------------------------------------------------------- 1 | package io.github.nomisRev.kafka.receiver 2 | 3 | import org.apache.kafka.clients.consumer.RetriableCommitFailedException 4 | import org.apache.kafka.common.TopicPartition 5 | import kotlinx.coroutines.flow.Flow 6 | 7 | /** 8 | * Topic partition offset that must be acknowledged after the record is processed. 9 | * 10 | * When you [acknowledge] this [Offset] it will be added to the batch of offsets to be committed based on [CommitStrategy], 11 | * whilst [commit] will actually commit this commit to kafka, and suspend until it's completed. 12 | * 13 | * So if you want to _force_ a commit without back-pressuring a stream, you can use `launch { offset.commit() }`. 14 | * This is considered a niche use-case, 15 | * and using the batch commit functionality with [CommitStrategy] and [acknowledge] is recommended. 16 | */ 17 | public interface Offset { 18 | /** The topic partition corresponding to this [ReceiverRecord]. */ 19 | public val topicPartition: TopicPartition 20 | 21 | /** The partition offset corresponding to the record to which this [ReceiverRecord] is associated. */ 22 | public val offset: Long 23 | 24 | /** 25 | * Acknowledges the [ReceiverRecord] associated with this offset. 26 | * The offset will be committed automatically based on the commit configuration [CommitStrategy]. 27 | * When an offset is acknowledged, it is assumed that all records in this partition up to, 28 | * and including this offset have been processed. All acknowledged offsets are committed if possible 29 | * when the receiver [Flow] completes or according to [CommitStrategy]. 30 | */ 31 | public suspend fun acknowledge(): Unit 32 | 33 | /** 34 | * Acknowledges the record associated with this instance and commits all acknowledged offsets. 35 | * This method suspends until the record has been committed, 36 | * it may be wrapped in `launch` to avoid suspending until commit has completed. 37 | * 38 | * If commit fails with [RetriableCommitFailedException] 39 | * the commit operation is retried [ReceiverSettings.maxCommitAttempts] times before this method returns. 40 | */ 41 | public suspend fun commit(): Unit 42 | } 43 | -------------------------------------------------------------------------------- /src/main/kotlin/io/github/nomisRev/kafka/receiver/ReceiverSettings.kt: -------------------------------------------------------------------------------- 1 | package io.github.nomisRev.kafka.receiver 2 | 3 | import io.github.nomisRev.kafka.NothingDeserializer 4 | import org.apache.kafka.clients.consumer.ConsumerConfig 5 | import org.apache.kafka.common.serialization.Deserializer 6 | import java.util.Properties 7 | import kotlin.time.Duration 8 | import kotlin.time.Duration.Companion.milliseconds 9 | import kotlin.time.Duration.Companion.nanoseconds 10 | import kotlin.time.Duration.Companion.seconds 11 | 12 | private val DEFAULT_POLL_TIMEOUT = 100.milliseconds 13 | private const val DEFAULT_MAX_COMMIT_ATTEMPTS = 100 14 | private val DEFAULT_COMMIT_RETRY_INTERVAL = 500.milliseconds 15 | private val DEFAULT_COMMIT_INTERVAL = 5.seconds 16 | 17 | public enum class AutoOffsetReset(public val value: String) { 18 | Earliest("earliest"), Latest("latest"), None("none") 19 | } 20 | 21 | /** 22 | * A data class that exposes configuration for [KafkaReceiver], 23 | * and the underlying [org.apache.kafka.clients.consumer.KafkaConsumer]. 24 | * 25 | * It forces to specify the required parameters to offer a type-safe API, 26 | * so it requires [bootstrapServers], [valueDeserializer], and [groupId]. 27 | * All other parameters are configured to the sanest defaults. 28 | */ 29 | public data class ReceiverSettings( 30 | val bootstrapServers: String, 31 | val keyDeserializer: Deserializer, 32 | val valueDeserializer: Deserializer, 33 | val groupId: String, 34 | val autoOffsetReset: AutoOffsetReset = AutoOffsetReset.Earliest, 35 | val commitStrategy: CommitStrategy = CommitStrategy.ByTime(DEFAULT_COMMIT_INTERVAL), 36 | val pollTimeout: Duration = DEFAULT_POLL_TIMEOUT, 37 | val commitRetryInterval: Duration = DEFAULT_COMMIT_RETRY_INTERVAL, 38 | val maxCommitAttempts: Int = DEFAULT_MAX_COMMIT_ATTEMPTS, 39 | val maxDeferredCommits: Int = 0, 40 | val closeTimeout: Duration = Duration.INFINITE, 41 | val properties: Properties = Properties(), 42 | ) { 43 | init { 44 | require(commitRetryInterval.isPosNonZero()) { "Commit Retry interval must be >= 0 but found $pollTimeout" } 45 | require(pollTimeout.isPosNonZero()) { "Poll timeout must be >= 0 but found $pollTimeout" } 46 | require(closeTimeout.isPosNonZero()) { "Close timeout must be >= 0 but found $closeTimeout" } 47 | } 48 | 49 | internal fun toProperties() = Properties().apply { 50 | put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers) 51 | if (keyDeserializer !== NothingDeserializer) { 52 | put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, keyDeserializer::class.qualifiedName) 53 | } 54 | put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, valueDeserializer::class.qualifiedName) 55 | put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false") 56 | put(ConsumerConfig.GROUP_ID_CONFIG, groupId) 57 | put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, autoOffsetReset.value) 58 | putAll(properties) 59 | } 60 | } 61 | 62 | /** Alternative constructor for [ReceiverSettings] without a key */ 63 | public fun ReceiverSettings( 64 | bootstrapServers: String, 65 | valueDeserializer: Deserializer, 66 | groupId: String, 67 | autoOffsetReset: AutoOffsetReset = AutoOffsetReset.Earliest, 68 | commitStrategy: CommitStrategy = CommitStrategy.ByTime(DEFAULT_COMMIT_INTERVAL), 69 | pollTimeout: Duration = DEFAULT_POLL_TIMEOUT, 70 | commitRetryInterval: Duration = DEFAULT_COMMIT_RETRY_INTERVAL, 71 | maxCommitAttempts: Int = DEFAULT_MAX_COMMIT_ATTEMPTS, 72 | maxDeferredCommits: Int = 0, 73 | closeTimeout: Duration = Long.MAX_VALUE.nanoseconds, 74 | properties: Properties = Properties(), 75 | ): ReceiverSettings = 76 | ReceiverSettings( 77 | bootstrapServers, 78 | NothingDeserializer, 79 | valueDeserializer, 80 | groupId, 81 | autoOffsetReset, 82 | commitStrategy, 83 | pollTimeout, 84 | commitRetryInterval, 85 | maxCommitAttempts, 86 | maxDeferredCommits, 87 | closeTimeout, 88 | properties 89 | ) 90 | -------------------------------------------------------------------------------- /src/main/kotlin/io/github/nomisRev/kafka/receiver/internals/AckMode.kt: -------------------------------------------------------------------------------- 1 | package io.github.nomisRev.kafka.receiver.internals 2 | 3 | internal enum class AckMode { 4 | AUTO_ACK, MANUAL_ACK, ATMOST_ONCE, EXACTLY_ONCE 5 | } 6 | -------------------------------------------------------------------------------- /src/main/kotlin/io/github/nomisRev/kafka/receiver/internals/AtMostOnceOffsets.kt: -------------------------------------------------------------------------------- 1 | package io.github.nomisRev.kafka.receiver.internals 2 | 3 | import org.apache.kafka.clients.consumer.OffsetAndMetadata 4 | import org.apache.kafka.common.TopicPartition 5 | import java.util.concurrent.ConcurrentHashMap 6 | 7 | internal class UtmostOnceOffsets { 8 | private val committedOffsets = ConcurrentHashMap() 9 | private val dispatchedOffsets = ConcurrentHashMap() 10 | 11 | fun onCommit(offsets: Map) = 12 | offsets.forEach { (key, value) -> 13 | committedOffsets[key] = value.offset() 14 | } 15 | 16 | fun onDispatch(topicPartition: TopicPartition, offset: Long) { 17 | dispatchedOffsets[topicPartition] = offset 18 | } 19 | 20 | fun committedOffset(topicPartition: TopicPartition): Long = 21 | committedOffsets[topicPartition] ?: -1 22 | 23 | /*suspend*/ fun undoCommitAhead(committableBatch: CommittableBatch): Boolean { 24 | var undoRequired = false 25 | committedOffsets.forEach { (topicPartition, value) -> 26 | // TODO this should be safe. Add requireNotNull with better error message 27 | val offsetToCommit = dispatchedOffsets[topicPartition]!! + 1 28 | if (value > offsetToCommit) { 29 | committableBatch.updateOffset(topicPartition, offsetToCommit) 30 | undoRequired = true 31 | } 32 | } 33 | return undoRequired 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /src/main/kotlin/io/github/nomisRev/kafka/receiver/internals/SeekablePartition.kt: -------------------------------------------------------------------------------- 1 | package io.github.nomisRev.kafka.receiver.internals 2 | 3 | import io.github.nomisRev.kafka.receiver.ConsumerPartition 4 | import org.apache.kafka.clients.consumer.Consumer 5 | import org.apache.kafka.common.TopicPartition 6 | import java.util.Collections 7 | 8 | // This implementation offers an API with the TopicPartition partially applied. 9 | internal class SeekablePartition( 10 | private val consumer: Consumer<*, *>, 11 | override val topicPartition: TopicPartition, 12 | ) : ConsumerPartition { 13 | 14 | override fun seekToBeginning(): Unit = 15 | consumer.seekToBeginning(listOf(topicPartition)) 16 | 17 | override fun seekToEnd(): Unit = 18 | consumer.seekToEnd(listOf(topicPartition)) 19 | 20 | override fun seek(offset: Long): Unit = 21 | consumer.seek(topicPartition, offset) 22 | 23 | override fun seekToTimestamp(timestamp: Long) { 24 | val offsets = consumer.offsetsForTimes(Collections.singletonMap(topicPartition, timestamp)) 25 | val next = offsets.values.iterator().next() 26 | if (next == null) seekToEnd() 27 | else consumer.seek(topicPartition, next.offset()) 28 | } 29 | 30 | override fun position(): Long = 31 | consumer.position(topicPartition) 32 | 33 | override fun toString(): String = 34 | topicPartition.toString() 35 | } 36 | -------------------------------------------------------------------------------- /src/main/kotlin/io/github/nomisRev/kafka/utils/Closeable.kt: -------------------------------------------------------------------------------- 1 | package io.github.nomisRev.kafka 2 | 3 | import kotlinx.coroutines.flow.Flow 4 | import kotlinx.coroutines.flow.flow 5 | import java.io.Closeable 6 | 7 | @Deprecated( 8 | "Will be removed in Kotlin-Kafka 0.4.x", 9 | ReplaceWith( 10 | "flow { use { emit(it) } }", 11 | "kotlinx.coroutines.flow.flow" 12 | ) 13 | ) 14 | public fun A.asFlow(): Flow = 15 | flow { use { emit(it) } } 16 | -------------------------------------------------------------------------------- /src/test/kotlin/io/github/nomisrev/kafka/KafkaContainer.kt: -------------------------------------------------------------------------------- 1 | package io.github.nomisrev.kafka 2 | 3 | import org.testcontainers.containers.KafkaContainer 4 | import org.testcontainers.utility.DockerImageName 5 | 6 | class Kafka : KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:latest")) { 7 | 8 | fun pause() { 9 | dockerClient.pauseContainerCmd(containerId).exec() 10 | } 11 | 12 | fun unpause() { 13 | dockerClient.unpauseContainerCmd(containerId).exec() 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /src/test/kotlin/io/github/nomisrev/kafka/Predef.kt: -------------------------------------------------------------------------------- 1 | package io.github.nomisrev.kafka 2 | 3 | import kotlinx.coroutines.flow.Flow 4 | import kotlinx.coroutines.flow.FlowCollector 5 | import kotlinx.coroutines.flow.map 6 | import kotlin.test.assertTrue 7 | 8 | inline fun Flow.mapIndexed( 9 | crossinline transform: suspend (index: Int, value: A) -> B, 10 | ): Flow { 11 | var index = 0 12 | return map { value -> 13 | transform(index++, value) 14 | } 15 | } 16 | 17 | inline fun assertThrows( 18 | message: String? = "Expected exception ${A::class.java}, but code didn't throw any exception.", 19 | block: () -> Unit, 20 | ): A { 21 | val exception = try { 22 | block() 23 | null 24 | } catch (e: Throwable) { 25 | e 26 | } 27 | ?: throw AssertionError(message) 28 | assertTrue(exception is A, "Expected exception of ${A::class.java} but found ${exception.javaClass.name}") 29 | return exception 30 | } 31 | 32 | suspend fun FlowCollector.emitAll(iterable: Iterable): Unit = 33 | iterable.forEach { emit(it) } -------------------------------------------------------------------------------- /src/test/kotlin/io/github/nomisrev/kafka/receiver/CommitStrategySpec.kt: -------------------------------------------------------------------------------- 1 | package io.github.nomisrev.kafka.receiver 2 | 3 | import io.github.nomisRev.kafka.receiver.CommitStrategy 4 | import kotlinx.coroutines.runBlocking 5 | import org.junit.jupiter.api.Test 6 | import org.junit.jupiter.api.assertThrows 7 | import java.lang.IllegalArgumentException 8 | import kotlin.test.assertEquals 9 | import kotlin.time.Duration.Companion.seconds 10 | 11 | class CommitStrategySpec { 12 | @Test 13 | fun `Negative or zero sized BySize strategy fails`() = runBlocking { 14 | val actual = assertThrows { 15 | CommitStrategy.BySize(0) 16 | }.message 17 | assertEquals( 18 | "Size based auto-commit requires positive non-zero commit batch size but found 0", 19 | actual 20 | ) 21 | } 22 | 23 | @Test 24 | fun `Negative or zero sized BySizeOrTime strategy fails`() = runBlocking { 25 | val actual = assertThrows { 26 | CommitStrategy.BySizeOrTime(0, 1.seconds) 27 | }.message 28 | assertEquals( 29 | "Size based auto-commit requires positive non-zero commit batch size but found 0", 30 | actual 31 | ) 32 | } 33 | 34 | @Test 35 | fun `Negative or zero duration BySizeOrTime strategy fails`() = runBlocking { 36 | val actual = assertThrows { 37 | CommitStrategy.BySizeOrTime(1, 0.seconds) 38 | }.message 39 | assertEquals( 40 | "Time based auto-commit requires positive non-zero interval but found ${0.seconds}", 41 | actual 42 | ) 43 | } 44 | 45 | @Test 46 | fun `Negative or zero duration ByTime strategy fails`() = runBlocking { 47 | val actual = assertThrows { 48 | CommitStrategy.ByTime(0.seconds) 49 | }.message 50 | assertEquals( 51 | "Time based auto-commit requires positive non-zero interval but found ${0.seconds}", 52 | actual 53 | ) 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /src/test/resources/logback-test.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | %yellow(%date{ISO8601}) | %highlight(%level) | %cyan(%logger{36}) - %highlight(%msg%n%ex{full}) 6 | 7 | 8 | 9 | 10 | 11 | 12 | --------------------------------------------------------------------------------