├── project ├── build.properties ├── plugins.sbt └── Dependencies.scala ├── renovate.json ├── .github ├── dependabot.yml ├── .scala-steward.conf ├── mergify.yml └── workflows │ ├── test.yml │ ├── release.yml │ └── coverage-report.yml ├── .git-blame-ignore-revs ├── .scalafmt.conf ├── kafka-streams └── src │ ├── test │ ├── resources │ │ └── log4j.properties │ └── scala │ │ └── io │ │ └── github │ │ └── embeddedkafka │ │ └── schemaregistry │ │ ├── TestAvroClass.scala │ │ └── streams │ │ └── ExampleKafkaStreamsSpec.scala │ └── main │ └── scala │ └── io │ └── github │ └── embeddedkafka │ └── schemaregistry │ └── streams │ ├── EmbeddedStreamsConfigImpl.scala │ └── EmbeddedKafkaStreams.scala ├── embedded-kafka-schema-registry └── src │ ├── test │ ├── resources │ │ └── log4j.properties │ └── scala │ │ └── io │ │ └── github │ │ └── embeddedkafka │ │ └── schemaregistry │ │ ├── TestAvroClass.scala │ │ ├── EmbeddedKafkaSpecSupport.scala │ │ ├── EmbeddedKafkaTraitSpec.scala │ │ ├── EmbeddedKafkaObjectSpec.scala │ │ └── EmbeddedKafkaSpec.scala │ └── main │ └── scala │ └── io │ └── github │ └── embeddedkafka │ └── schemaregistry │ ├── Codecs.scala │ ├── servers.scala │ ├── config.scala │ ├── ops │ └── SchemaRegistryOps.scala │ └── EmbeddedKafka.scala ├── LICENSE ├── .gitignore └── README.md /project/build.properties: -------------------------------------------------------------------------------- 1 | sbt.version=1.11.7 2 | -------------------------------------------------------------------------------- /renovate.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://docs.renovatebot.com/renovate-schema.json", 3 | "extends": [ 4 | "config:recommended" 5 | ], 6 | "dependencyDashboardAutoclose": true 7 | } 8 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "github-actions" 4 | directory: "/" 5 | schedule: 6 | interval: "weekly" 7 | labels: 8 | - "dependencies" 9 | -------------------------------------------------------------------------------- /.git-blame-ignore-revs: -------------------------------------------------------------------------------- 1 | # Scala Steward: Reformat with scalafmt 3.7.13 2 | d466da5e1e78a36c65a0768db4cece34cbea1e86 3 | 4 | # Scala Steward: Reformat with scalafmt 3.9.7 5 | a52d982ac83272f05b4d197d05850777268dbe47 6 | -------------------------------------------------------------------------------- /.github/.scala-steward.conf: -------------------------------------------------------------------------------- 1 | updates.pin = [ 2 | { groupId = "org.slf4j", artifactId = "slf4j-reload4j", version = "1." } 3 | ] 4 | pullRequests.customLabels = [ "dependencies", "scala" ] 5 | updatePullRequests = "always" 6 | -------------------------------------------------------------------------------- /.scalafmt.conf: -------------------------------------------------------------------------------- 1 | version = 3.10.2 2 | preset = defaultWithAlign 3 | docstrings.blankFirstLine = yes 4 | newlines.implicitParamListModifierPrefer = before 5 | newlines.beforeCurlyLambdaParams = multilineWithCaseOnly 6 | runner.dialect = scala213 7 | -------------------------------------------------------------------------------- /project/plugins.sbt: -------------------------------------------------------------------------------- 1 | addSbtPlugin("com.github.sbt" % "sbt-release" % "1.4.0") 2 | addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.5.6") 3 | addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.11.2") 4 | addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.4.3") 5 | addSbtPlugin("org.typelevel" % "sbt-tpolecat" % "0.5.2") 6 | -------------------------------------------------------------------------------- /kafka-streams/src/test/resources/log4j.properties: -------------------------------------------------------------------------------- 1 | log4j.rootLogger=info, stdout 2 | 3 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender 4 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout 5 | 6 | # Pattern to output the caller's file name and line number. 7 | log4j.appender.stdout.layout.ConversionPattern=%5p [%t] (%F:%L) - %m%n 8 | -------------------------------------------------------------------------------- /embedded-kafka-schema-registry/src/test/resources/log4j.properties: -------------------------------------------------------------------------------- 1 | log4j.rootLogger=info, stdout 2 | 3 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender 4 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout 5 | 6 | # Pattern to output the caller's file name and line number. 7 | log4j.appender.stdout.layout.ConversionPattern=%5p [%t] (%F:%L) - %m%n 8 | -------------------------------------------------------------------------------- /.github/mergify.yml: -------------------------------------------------------------------------------- 1 | queue_rules: 2 | - name: default 3 | queue_conditions: 4 | - author=scala-steward 5 | - check-success=test 6 | - check-success=Codacy Static Code Analysis 7 | merge_conditions: 8 | - check-success=test 9 | - check-success=Codacy Static Code Analysis 10 | merge_method: squash 11 | 12 | pull_request_rules: 13 | - name: refactored queue action rule 14 | conditions: [] 15 | actions: 16 | queue: 17 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: Test 2 | 3 | on: 4 | pull_request: 5 | branches: ["*"] 6 | 7 | jobs: 8 | test: 9 | runs-on: ubuntu-latest 10 | 11 | steps: 12 | - uses: actions/checkout@v6 13 | - uses: actions/setup-java@v5 14 | with: 15 | distribution: 'temurin' 16 | java-version: 17 17 | cache: 'sbt' 18 | - uses: sbt/setup-sbt@v1 19 | - name: Check formatting 20 | run: sbt scalafmtCheckAll 21 | - name: Compile 22 | run: sbt compile 23 | - name: Run cross-version tests 24 | run: sbt +test 25 | -------------------------------------------------------------------------------- /kafka-streams/src/main/scala/io/github/embeddedkafka/schemaregistry/streams/EmbeddedStreamsConfigImpl.scala: -------------------------------------------------------------------------------- 1 | package io.github.embeddedkafka.schemaregistry.streams 2 | 3 | import io.github.embeddedkafka.schemaregistry.EmbeddedKafkaConfig 4 | import io.github.embeddedkafka.streams.EmbeddedStreamsConfig 5 | 6 | final class EmbeddedStreamsConfigImpl 7 | extends EmbeddedStreamsConfig[EmbeddedKafkaConfig] { 8 | override def config(streamName: String, extraConfig: Map[String, AnyRef])( 9 | implicit kafkaConfig: EmbeddedKafkaConfig 10 | ): Map[String, AnyRef] = 11 | baseStreamConfig(streamName) ++ extraConfig 12 | } 13 | -------------------------------------------------------------------------------- /embedded-kafka-schema-registry/src/main/scala/io/github/embeddedkafka/schemaregistry/Codecs.scala: -------------------------------------------------------------------------------- 1 | package io.github.embeddedkafka.schemaregistry 2 | 3 | import org.apache.kafka.clients.consumer.ConsumerRecord 4 | 5 | object Codecs { 6 | implicit def stringKeyGenericValueCrDecoder[V] 7 | : ConsumerRecord[String, V] => (String, V) = 8 | cr => (cr.key, cr.value) 9 | implicit def genericValueCrDecoder[V]: ConsumerRecord[String, V] => V = 10 | _.value 11 | implicit def stringKeyGenericValueTopicCrDecoder[V] 12 | : ConsumerRecord[String, V] => (String, String, V) = 13 | cr => (cr.topic, cr.key, cr.value) 14 | } 15 | -------------------------------------------------------------------------------- /kafka-streams/src/main/scala/io/github/embeddedkafka/schemaregistry/streams/EmbeddedKafkaStreams.scala: -------------------------------------------------------------------------------- 1 | package io.github.embeddedkafka.schemaregistry.streams 2 | 3 | import io.github.embeddedkafka.schemaregistry.{ 4 | EmbeddedKafkaConfig, 5 | EmbeddedKafka 6 | } 7 | import io.github.embeddedkafka.streams.EmbeddedKafkaStreamsSupport 8 | 9 | trait EmbeddedKafkaStreams 10 | extends EmbeddedKafkaStreamsSupport[EmbeddedKafkaConfig] 11 | with EmbeddedKafka { 12 | override protected[embeddedkafka] val streamsConfig = 13 | new EmbeddedStreamsConfigImpl 14 | } 15 | 16 | object EmbeddedKafkaStreams extends EmbeddedKafkaStreams 17 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | 3 | on: 4 | push: 5 | branches: [master] 6 | tags: ["*"] 7 | 8 | jobs: 9 | release: 10 | runs-on: ubuntu-latest 11 | 12 | steps: 13 | - uses: actions/checkout@v6 14 | - uses: actions/setup-java@v5 15 | with: 16 | distribution: 'temurin' 17 | java-version: 17 18 | cache: 'sbt' 19 | - uses: sbt/setup-sbt@v1 20 | - name: Publish artifacts 21 | run: sbt ci-release || sbt sonatypeReleaseAll 22 | env: 23 | PGP_PASSPHRASE: ${{ secrets.PGP_PASSPHRASE }} 24 | PGP_SECRET: ${{ secrets.PGP_SECRET }} 25 | SONATYPE_USERNAME: ${{ secrets.SONATYPE_USERNAME }} 26 | SONATYPE_PASSWORD: ${{ secrets.SONATYPE_PASSWORD }} 27 | -------------------------------------------------------------------------------- /.github/workflows/coverage-report.yml: -------------------------------------------------------------------------------- 1 | name: Coverage report 2 | 3 | on: 4 | push: 5 | branches: [master] 6 | 7 | jobs: 8 | coverage_report: 9 | runs-on: ubuntu-latest 10 | 11 | steps: 12 | - uses: actions/checkout@v6 13 | - uses: actions/setup-java@v5 14 | with: 15 | distribution: 'temurin' 16 | java-version: 17 17 | cache: 'sbt' 18 | - uses: sbt/setup-sbt@v1 19 | - name: Compile 20 | run: sbt compile 21 | - name: Run cross-version tests with coverage 22 | run: sbt coverage +test 23 | - name: Generate coverage report 24 | run: sbt coverageReport 25 | - name: Aggregate sub-projects coverage reports 26 | run: sbt coverageAggregate 27 | - uses: codacy/codacy-coverage-reporter-action@v1 28 | with: 29 | project-token: ${{ secrets.CODACY_PROJECT_TOKEN }} 30 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2016 Emanuele Blanco 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /kafka-streams/src/test/scala/io/github/embeddedkafka/schemaregistry/TestAvroClass.scala: -------------------------------------------------------------------------------- 1 | package io.github.embeddedkafka.schemaregistry 2 | 3 | import org.apache.avro.specific.SpecificRecordBase 4 | import org.apache.avro.{AvroRuntimeException, Schema} 5 | 6 | case class TestAvroClass(var name: String) extends SpecificRecordBase { 7 | def this() = this("") 8 | 9 | override def get(i: Int): AnyRef = 10 | i match { 11 | case 0 => name 12 | case _ => throw new AvroRuntimeException("Bad index") 13 | } 14 | 15 | override def put(i: Int, v: scala.Any): Unit = 16 | i match { 17 | case 0 => 18 | name = v match { 19 | case (utf8: org.apache.avro.util.Utf8) => utf8.toString 20 | case _ => v.asInstanceOf[String] 21 | } 22 | case _ => throw new AvroRuntimeException("Bad index") 23 | } 24 | 25 | override def getSchema: Schema = TestAvroClass.avroSchema 26 | } 27 | 28 | object TestAvroClass { 29 | val avroSchema = 30 | (new Schema.Parser) 31 | .parse(""" 32 | |{"namespace": "io.github.embeddedkafka.schemaregistry", 33 | | "type": "record", 34 | | "name": "TestAvroClass", 35 | | "fields": [ 36 | | {"name": "name", "type": "string"} 37 | | ] 38 | |} 39 | """.stripMargin) 40 | } 41 | -------------------------------------------------------------------------------- /embedded-kafka-schema-registry/src/test/scala/io/github/embeddedkafka/schemaregistry/TestAvroClass.scala: -------------------------------------------------------------------------------- 1 | package io.github.embeddedkafka.schemaregistry 2 | 3 | import org.apache.avro.specific.SpecificRecordBase 4 | import org.apache.avro.{AvroRuntimeException, Schema} 5 | 6 | case class TestAvroClass(var name: String) extends SpecificRecordBase { 7 | def this() = this("") 8 | 9 | override def get(i: Int): AnyRef = 10 | i match { 11 | case 0 => name 12 | case _ => throw new AvroRuntimeException("Bad index") 13 | } 14 | 15 | override def put(i: Int, v: scala.Any): Unit = 16 | i match { 17 | case 0 => 18 | name = v match { 19 | case (utf8: org.apache.avro.util.Utf8) => utf8.toString 20 | case _ => v.asInstanceOf[String] 21 | } 22 | case _ => throw new AvroRuntimeException("Bad index") 23 | } 24 | 25 | override def getSchema: Schema = TestAvroClass.avroSchema 26 | } 27 | 28 | object TestAvroClass { 29 | val avroSchema = 30 | (new Schema.Parser) 31 | .parse(""" 32 | |{"namespace": "io.github.embeddedkafka.schemaregistry", 33 | | "type": "record", 34 | | "name": "TestAvroClass", 35 | | "fields": [ 36 | | {"name": "name", "type": "string"} 37 | | ] 38 | |} 39 | """.stripMargin) 40 | } 41 | -------------------------------------------------------------------------------- /embedded-kafka-schema-registry/src/test/scala/io/github/embeddedkafka/schemaregistry/EmbeddedKafkaSpecSupport.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Cloned from embeddedkafka project 3 | */ 4 | package io.github.embeddedkafka.schemaregistry 5 | 6 | import java.net.{InetAddress, Socket} 7 | 8 | import io.github.embeddedkafka.schemaregistry.EmbeddedKafkaSpecSupport.{ 9 | Available, 10 | NotAvailable, 11 | ServerStatus 12 | } 13 | import org.scalatest.Assertion 14 | import org.scalatest.concurrent.{Eventually, IntegrationPatience} 15 | import org.scalatest.matchers.should.Matchers 16 | import org.scalatest.time.{Milliseconds, Seconds, Span} 17 | import org.scalatest.wordspec.AnyWordSpecLike 18 | 19 | import scala.util.{Failure, Success, Try} 20 | 21 | trait EmbeddedKafkaSpecSupport 22 | extends AnyWordSpecLike 23 | with Matchers 24 | with Eventually 25 | with IntegrationPatience { 26 | 27 | implicit val config: PatienceConfig = 28 | PatienceConfig(Span(1, Seconds), Span(100, Milliseconds)) 29 | 30 | def expectedServerStatus(port: Int, expectedStatus: ServerStatus): Assertion = 31 | eventually { 32 | status(port) shouldBe expectedStatus 33 | } 34 | 35 | private def status(port: Int): ServerStatus = { 36 | Try(new Socket(InetAddress.getByName("localhost"), port)) match { 37 | case Failure(_) => NotAvailable 38 | case Success(_) => Available 39 | } 40 | } 41 | } 42 | 43 | object EmbeddedKafkaSpecSupport { 44 | sealed trait ServerStatus 45 | case object Available extends ServerStatus 46 | case object NotAvailable extends ServerStatus 47 | } 48 | -------------------------------------------------------------------------------- /embedded-kafka-schema-registry/src/main/scala/io/github/embeddedkafka/schemaregistry/servers.scala: -------------------------------------------------------------------------------- 1 | package io.github.embeddedkafka.schemaregistry 2 | 3 | import java.nio.file.Path 4 | import io.confluent.kafka.schemaregistry.rest.SchemaRegistryRestApplication 5 | import io.github.embeddedkafka.{EmbeddedServer, EmbeddedServerWithKafka} 6 | import kafka.server.{BrokerServer, ControllerServer} 7 | 8 | import scala.reflect.io.Directory 9 | 10 | /** 11 | * An instance of an embedded Schema Registry app. 12 | * 13 | * @param app 14 | * the Schema Registry app. 15 | */ 16 | case class EmbeddedSR(app: SchemaRegistryRestApplication) 17 | extends EmbeddedServer { 18 | 19 | /** 20 | * Shuts down the app. 21 | */ 22 | override def stop(clearLogs: Boolean = false): Unit = app.stop() 23 | } 24 | 25 | case class EmbeddedKWithSR( 26 | broker: BrokerServer, 27 | controller: ControllerServer, 28 | app: EmbeddedSR, 29 | logsDirs: Path, 30 | config: EmbeddedKafkaConfig 31 | ) extends EmbeddedServerWithKafka { 32 | override def stop(clearLogs: Boolean): Unit = { 33 | app.stop() 34 | 35 | // In combined mode, we want to shut down the broker first, since the controller may be 36 | // needed for controlled shutdown. Additionally, the controller shutdown process currently 37 | // stops the raft client early on, which would disrupt broker shutdown. 38 | broker.shutdown() 39 | controller.shutdown() 40 | broker.awaitShutdown() 41 | controller.awaitShutdown() 42 | 43 | if (clearLogs) { 44 | val _ = Directory(logsDirs.toFile).deleteRecursively() 45 | } 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /project/Dependencies.scala: -------------------------------------------------------------------------------- 1 | import sbt._ 2 | 3 | object Dependencies { 4 | 5 | object CustomResolvers { 6 | lazy val Confluent = "confluent" at "https://packages.confluent.io/maven/" 7 | 8 | lazy val resolvers: Seq[Resolver] = Seq(Confluent) 9 | } 10 | 11 | object Versions { 12 | val Scala3 = "3.3.7" 13 | val Scala213 = "2.13.18" 14 | val EmbeddedKafka = "4.1.0" 15 | val ConfluentPlatform = "8.1.1" 16 | val Slf4j = "2.0.17" 17 | val ScalaTest = "3.2.19" 18 | } 19 | 20 | object Common { 21 | // Exclude any transitive Kafka dependency to prevent runtime errors. 22 | // They tend to evict Apache's since their version is greater 23 | lazy val confluentDeps: Seq[ModuleID] = Seq( 24 | "io.confluent" % "kafka-avro-serializer" % Versions.ConfluentPlatform % Test, 25 | "io.confluent" % "kafka-schema-registry" % Versions.ConfluentPlatform 26 | ).map(_ excludeAll ExclusionRule().withOrganization("org.apache.kafka")) 27 | 28 | lazy val testDeps: Seq[ModuleID] = Seq( 29 | "org.slf4j" % "slf4j-reload4j" % Versions.Slf4j, 30 | "org.scalatest" %% "scalatest-wordspec" % Versions.ScalaTest, 31 | "org.scalatest" %% "scalatest-shouldmatchers" % Versions.ScalaTest 32 | ).map(_ % Test) 33 | } 34 | 35 | object EmbeddedKafkaSchemaRegistry { 36 | lazy val prodDeps: Seq[ModuleID] = Seq( 37 | "io.github.embeddedkafka" %% "embedded-kafka" % Versions.EmbeddedKafka 38 | ) 39 | 40 | } 41 | 42 | object KafkaStreams { 43 | lazy val prodDeps: Seq[ModuleID] = Seq( 44 | "io.github.embeddedkafka" %% "embedded-kafka-streams" % Versions.EmbeddedKafka 45 | ) 46 | } 47 | 48 | } 49 | -------------------------------------------------------------------------------- /embedded-kafka-schema-registry/src/main/scala/io/github/embeddedkafka/schemaregistry/config.scala: -------------------------------------------------------------------------------- 1 | package io.github.embeddedkafka.schemaregistry 2 | 3 | import io.github.embeddedkafka.{ 4 | EmbeddedKafkaConfig => OriginalEmbeddedKafkaConfig 5 | } 6 | 7 | trait EmbeddedKafkaConfig extends OriginalEmbeddedKafkaConfig { 8 | def schemaRegistryPort: Int 9 | def customSchemaRegistryProperties: Map[String, String] 10 | } 11 | 12 | case class EmbeddedKafkaConfigImpl( 13 | kafkaPort: Int, 14 | controllerPort: Int, 15 | schemaRegistryPort: Int, 16 | customBrokerProperties: Map[String, String], 17 | customProducerProperties: Map[String, String], 18 | customConsumerProperties: Map[String, String], 19 | customSchemaRegistryProperties: Map[String, String] 20 | ) extends EmbeddedKafkaConfig { 21 | override val numberOfThreads: Int = 3 22 | } 23 | 24 | object EmbeddedKafkaConfig { 25 | lazy val defaultSchemaRegistryPort = 6003 26 | 27 | implicit val defaultConfig: EmbeddedKafkaConfig = apply() 28 | 29 | def apply( 30 | kafkaPort: Int = OriginalEmbeddedKafkaConfig.defaultKafkaPort, 31 | controllerPort: Int = OriginalEmbeddedKafkaConfig.defaultControllerPort, 32 | schemaRegistryPort: Int = defaultSchemaRegistryPort, 33 | customBrokerProperties: Map[String, String] = Map.empty, 34 | customProducerProperties: Map[String, String] = Map.empty, 35 | customConsumerProperties: Map[String, String] = Map.empty, 36 | customSchemaRegistryProperties: Map[String, String] = Map.empty 37 | ): EmbeddedKafkaConfig = 38 | EmbeddedKafkaConfigImpl( 39 | kafkaPort, 40 | controllerPort, 41 | schemaRegistryPort, 42 | customBrokerProperties, 43 | customProducerProperties, 44 | customConsumerProperties, 45 | customSchemaRegistryProperties 46 | ) 47 | } 48 | -------------------------------------------------------------------------------- /embedded-kafka-schema-registry/src/test/scala/io/github/embeddedkafka/schemaregistry/EmbeddedKafkaTraitSpec.scala: -------------------------------------------------------------------------------- 1 | package io.github.embeddedkafka.schemaregistry 2 | 3 | import io.github.embeddedkafka.schemaregistry.EmbeddedKafka._ 4 | import io.github.embeddedkafka.schemaregistry.EmbeddedKafkaSpecSupport.{ 5 | Available, 6 | NotAvailable 7 | } 8 | import org.scalatest.Assertion 9 | 10 | class EmbeddedKafkaTraitSpec extends EmbeddedKafkaSpecSupport { 11 | "the withRunningKafka method" should { 12 | "start a Schema Registry server on a specified port" in { 13 | implicit val config: EmbeddedKafkaConfig = 14 | EmbeddedKafkaConfig(schemaRegistryPort = 12345) 15 | 16 | withRunningKafka { 17 | expectedServerStatus(12345, Available) 18 | } 19 | } 20 | } 21 | 22 | "the withRunningKafkaOnFoundPort method" should { 23 | "start a Schema Registry server on an available port if 0" in { 24 | val userDefinedConfig: EmbeddedKafkaConfig = 25 | EmbeddedKafkaConfig(schemaRegistryPort = 0) 26 | withRunningKafkaOnFoundPort(userDefinedConfig) { actualConfig => 27 | expectedServerStatus(actualConfig.schemaRegistryPort, Available) 28 | } 29 | } 30 | 31 | "start and stop Kafka Broker, Kafka Controller, and Schema Registry successfully on non-zero ports" in { 32 | val userDefinedConfig = 33 | EmbeddedKafkaConfig( 34 | kafkaPort = 12345, 35 | controllerPort = 12346, 36 | schemaRegistryPort = 12347 37 | ) 38 | 39 | val actualConfig = withRunningKafkaOnFoundPort(userDefinedConfig) { 40 | actualConfig => 41 | actualConfig shouldBe userDefinedConfig 42 | everyServerIsAvailable(actualConfig) 43 | actualConfig 44 | } 45 | noServerIsAvailable(actualConfig) 46 | } 47 | } 48 | 49 | private def everyServerIsAvailable(config: EmbeddedKafkaConfig): Assertion = { 50 | expectedServerStatus(config.kafkaPort, Available) 51 | expectedServerStatus(config.schemaRegistryPort, Available) 52 | expectedServerStatus(config.controllerPort, Available) 53 | } 54 | 55 | private def noServerIsAvailable(config: EmbeddedKafkaConfig): Assertion = { 56 | expectedServerStatus(config.kafkaPort, NotAvailable) 57 | expectedServerStatus(config.schemaRegistryPort, NotAvailable) 58 | expectedServerStatus(config.controllerPort, NotAvailable) 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Created by https://www.toptal.com/developers/gitignore/api/sbt,scala,intellij+all 2 | # Edit at https://www.toptal.com/developers/gitignore?templates=sbt,scala,intellij+all 3 | 4 | ### Intellij+all ### 5 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider 6 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 7 | 8 | # User-specific stuff 9 | .idea/**/workspace.xml 10 | .idea/**/tasks.xml 11 | .idea/**/usage.statistics.xml 12 | .idea/**/dictionaries 13 | .idea/**/shelf 14 | 15 | # AWS User-specific 16 | .idea/**/aws.xml 17 | 18 | # Generated files 19 | .idea/**/contentModel.xml 20 | 21 | # Sensitive or high-churn files 22 | .idea/**/dataSources/ 23 | .idea/**/dataSources.ids 24 | .idea/**/dataSources.local.xml 25 | .idea/**/sqlDataSources.xml 26 | .idea/**/dynamic.xml 27 | .idea/**/uiDesigner.xml 28 | .idea/**/dbnavigator.xml 29 | 30 | # Gradle 31 | .idea/**/gradle.xml 32 | .idea/**/libraries 33 | 34 | # Gradle and Maven with auto-import 35 | # When using Gradle or Maven with auto-import, you should exclude module files, 36 | # since they will be recreated, and may cause churn. Uncomment if using 37 | # auto-import. 38 | # .idea/artifacts 39 | # .idea/compiler.xml 40 | # .idea/jarRepositories.xml 41 | # .idea/modules.xml 42 | # .idea/*.iml 43 | # .idea/modules 44 | # *.iml 45 | # *.ipr 46 | 47 | # CMake 48 | cmake-build-*/ 49 | 50 | # Mongo Explorer plugin 51 | .idea/**/mongoSettings.xml 52 | 53 | # File-based project format 54 | *.iws 55 | 56 | # IntelliJ 57 | out/ 58 | 59 | # mpeltonen/sbt-idea plugin 60 | .idea_modules/ 61 | 62 | # JIRA plugin 63 | atlassian-ide-plugin.xml 64 | 65 | # Cursive Clojure plugin 66 | .idea/replstate.xml 67 | 68 | # SonarLint plugin 69 | .idea/sonarlint/ 70 | 71 | # Crashlytics plugin (for Android Studio and IntelliJ) 72 | com_crashlytics_export_strings.xml 73 | crashlytics.properties 74 | crashlytics-build.properties 75 | fabric.properties 76 | 77 | # Editor-based Rest Client 78 | .idea/httpRequests 79 | 80 | # Android studio 3.1+ serialized cache file 81 | .idea/caches/build_file_checksums.ser 82 | 83 | ### Intellij+all Patch ### 84 | # Ignore everything but code style settings and run configurations 85 | # that are supposed to be shared within teams. 86 | 87 | .idea/* 88 | 89 | !.idea/codeStyles 90 | !.idea/runConfigurations 91 | 92 | ### SBT ### 93 | # Simple Build Tool 94 | # http://www.scala-sbt.org/release/docs/Getting-Started/Directories.html#configuring-version-control 95 | 96 | dist/* 97 | target/ 98 | lib_managed/ 99 | src_managed/ 100 | project/boot/ 101 | project/plugins/project/ 102 | .history 103 | .cache 104 | .lib/ 105 | 106 | ### SBT Patch ### 107 | .bsp/ 108 | 109 | ### Scala ### 110 | *.class 111 | *.log 112 | 113 | # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml 114 | hs_err_pid* 115 | 116 | # End of https://www.toptal.com/developers/gitignore/api/sbt,scala,intellij+all -------------------------------------------------------------------------------- /embedded-kafka-schema-registry/src/main/scala/io/github/embeddedkafka/schemaregistry/ops/SchemaRegistryOps.scala: -------------------------------------------------------------------------------- 1 | package io.github.embeddedkafka.schemaregistry.ops 2 | 3 | import java.net.{ServerSocket, URI} 4 | import java.util.Properties 5 | 6 | import io.confluent.kafka.schemaregistry.rest.{ 7 | SchemaRegistryConfig, 8 | SchemaRegistryRestApplication 9 | } 10 | import io.confluent.rest.RestConfig 11 | import io.github.embeddedkafka.EmbeddedServer 12 | import io.github.embeddedkafka.ops.RunningServersOps 13 | import io.github.embeddedkafka.schemaregistry.{EmbeddedKafkaConfig, EmbeddedSR} 14 | 15 | /** 16 | * Trait for Schema Registry-related actions. Relies on 17 | * `io.confluent.kafka.schemaregistry.rest.SchemaRegistryRestApplication`. 18 | */ 19 | trait SchemaRegistryOps { 20 | 21 | private[embeddedkafka] def startSchemaRegistry( 22 | schemaRegistryPort: Int, 23 | kafkaPort: Int, 24 | customProperties: Map[String, String] 25 | ): SchemaRegistryRestApplication = { 26 | def findAvailablePort: Int = { 27 | val server = new ServerSocket(0) 28 | val port = server.getLocalPort 29 | server.close() 30 | port 31 | } 32 | 33 | val actualSchemaRegistryPort = 34 | if (schemaRegistryPort == 0) findAvailablePort else schemaRegistryPort 35 | 36 | val restAppProperties = Map( 37 | RestConfig.LISTENERS_CONFIG -> s"http://localhost:$actualSchemaRegistryPort", 38 | SchemaRegistryConfig.KAFKASTORE_BOOTSTRAP_SERVERS_CONFIG -> s"PLAINTEXT://localhost:$kafkaPort" 39 | ) ++ customProperties 40 | 41 | val restApp = new SchemaRegistryRestApplication( 42 | new SchemaRegistryConfig(map2Properties(restAppProperties)) 43 | ) 44 | restApp.start() 45 | restApp 46 | } 47 | 48 | private[this] def map2Properties(map: Map[String, AnyRef]): Properties = { 49 | val props = new Properties 50 | 51 | map.foreach { 52 | case (k, v) => props.put(k, v) 53 | } 54 | 55 | props 56 | } 57 | } 58 | 59 | /** 60 | * [[SchemaRegistryOps]] extension relying on `RunningServersOps` for keeping 61 | * track of running [[EmbeddedSR]] instances. 62 | */ 63 | trait RunningSchemaRegistryOps { 64 | this: SchemaRegistryOps with RunningServersOps => 65 | 66 | def startSchemaRegistry(implicit config: EmbeddedKafkaConfig): EmbeddedSR = { 67 | val restApp = EmbeddedSR( 68 | startSchemaRegistry( 69 | config.schemaRegistryPort, 70 | config.kafkaPort, 71 | config.customSchemaRegistryProperties 72 | ) 73 | ) 74 | runningServers.add(restApp) 75 | restApp 76 | } 77 | 78 | /** 79 | * Stops all in memory Schema Registry instances. 80 | */ 81 | def stopSchemaRegistry(): Unit = 82 | runningServers.stopAndRemove(isEmbeddedSR) 83 | 84 | private[embeddedkafka] def isEmbeddedSR(server: EmbeddedServer): Boolean = 85 | server.isInstanceOf[EmbeddedSR] 86 | 87 | private[embeddedkafka] def schemaRegistryPort( 88 | restApp: SchemaRegistryRestApplication 89 | ): Int = { 90 | val listeners = restApp.getConfiguration.originalProperties 91 | .getProperty(RestConfig.LISTENERS_CONFIG) 92 | URI.create(listeners).getPort 93 | } 94 | 95 | } 96 | -------------------------------------------------------------------------------- /embedded-kafka-schema-registry/src/test/scala/io/github/embeddedkafka/schemaregistry/EmbeddedKafkaObjectSpec.scala: -------------------------------------------------------------------------------- 1 | package io.github.embeddedkafka.schemaregistry 2 | 3 | import io.github.embeddedkafka.schemaregistry.EmbeddedKafkaSpecSupport.{ 4 | Available, 5 | NotAvailable 6 | } 7 | 8 | import java.nio.file.Files 9 | 10 | class EmbeddedKafkaObjectSpec extends EmbeddedKafkaSpecSupport { 11 | "the EmbeddedKafka object" when { 12 | "invoking the start and stop methods" should { 13 | "start and stop a specific Kafka along with Schema Registry" in { 14 | val firstBroker = EmbeddedKafka.start()( 15 | EmbeddedKafkaConfig( 16 | kafkaPort = 7000, 17 | controllerPort = 7001, 18 | schemaRegistryPort = 7002 19 | ) 20 | ) 21 | EmbeddedKafka.start()( 22 | EmbeddedKafkaConfig( 23 | kafkaPort = 8000, 24 | controllerPort = 8001, 25 | schemaRegistryPort = 8002 26 | ) 27 | ) 28 | 29 | expectedServerStatus(7002, Available) 30 | expectedServerStatus(7000, Available) 31 | expectedServerStatus(7001, Available) 32 | 33 | expectedServerStatus(8002, Available) 34 | expectedServerStatus(8000, Available) 35 | expectedServerStatus(8001, Available) 36 | 37 | EmbeddedKafka.stop(firstBroker) 38 | 39 | expectedServerStatus(7002, NotAvailable) 40 | expectedServerStatus(7000, NotAvailable) 41 | expectedServerStatus(7001, NotAvailable) 42 | 43 | expectedServerStatus(8002, Available) 44 | expectedServerStatus(8000, Available) 45 | expectedServerStatus(8001, Available) 46 | 47 | EmbeddedKafka.stop() 48 | } 49 | 50 | "start and stop Kafka Broker, Kafka Controller, and Schema Registry on different specified ports using an implicit configuration" in { 51 | implicit val config: EmbeddedKafkaConfig = 52 | EmbeddedKafkaConfig( 53 | kafkaPort = 12345, 54 | controllerPort = 54321, 55 | schemaRegistryPort = 13542 56 | ) 57 | 58 | EmbeddedKafka.start() 59 | 60 | expectedServerStatus(13542, Available) 61 | expectedServerStatus(12345, Available) 62 | expectedServerStatus(54321, Available) 63 | 64 | EmbeddedKafka.stop() 65 | } 66 | } 67 | 68 | "invoking the isRunning method" should { 69 | "return true when Schema Registry, Kafka Broker, and Kafka Controller are all running" in { 70 | implicit val config: EmbeddedKafkaConfig = 71 | EmbeddedKafkaConfig() 72 | 73 | EmbeddedKafka.start() 74 | EmbeddedKafka.isRunning shouldBe true 75 | EmbeddedKafka.stop() 76 | EmbeddedKafka.isRunning shouldBe false 77 | } 78 | 79 | "return true when Schema Registry, Kafka Broker, and Kafka Controller are all running, if started separately" in { 80 | implicit val config: EmbeddedKafkaConfig = 81 | EmbeddedKafkaConfig() 82 | 83 | EmbeddedKafka.startKafka(Files.createTempDirectory("kafka-test-logs")) 84 | EmbeddedKafka.startSchemaRegistry 85 | 86 | EmbeddedKafka.isRunning shouldBe true 87 | EmbeddedKafka.stop() 88 | EmbeddedKafka.isRunning shouldBe false 89 | } 90 | 91 | "return false when only Kafka Broker and Kafka Controller are running" in { 92 | implicit val config: EmbeddedKafkaConfig = 93 | EmbeddedKafkaConfig() 94 | 95 | EmbeddedKafka.startKafka(Files.createTempDirectory("kafka-test-logs")) 96 | EmbeddedKafka.startSchemaRegistry 97 | EmbeddedKafka.stopSchemaRegistry() 98 | EmbeddedKafka.isRunning shouldBe false 99 | EmbeddedKafka.stop() 100 | EmbeddedKafka.isRunning shouldBe false 101 | } 102 | } 103 | } 104 | } 105 | -------------------------------------------------------------------------------- /embedded-kafka-schema-registry/src/main/scala/io/github/embeddedkafka/schemaregistry/EmbeddedKafka.scala: -------------------------------------------------------------------------------- 1 | package io.github.embeddedkafka.schemaregistry 2 | 3 | import java.nio.file.{Files, Path} 4 | 5 | import io.github.embeddedkafka.ops.{EmbeddedKafkaOps, RunningEmbeddedKafkaOps} 6 | import io.github.embeddedkafka.schemaregistry.ops.{ 7 | RunningSchemaRegistryOps, 8 | SchemaRegistryOps 9 | } 10 | import io.github.embeddedkafka.{EmbeddedKafkaSupport, EmbeddedServer, ServerOps} 11 | 12 | trait EmbeddedKafka 13 | extends EmbeddedKafkaSupport[EmbeddedKafkaConfig] 14 | with EmbeddedKafkaOps[EmbeddedKafkaConfig, EmbeddedKWithSR] 15 | with SchemaRegistryOps { 16 | override private[embeddedkafka] def baseConsumerConfig( 17 | implicit config: EmbeddedKafkaConfig 18 | ): Map[String, Object] = 19 | defaultConsumerConfig ++ config.customConsumerProperties 20 | 21 | override private[embeddedkafka] def baseProducerConfig( 22 | implicit config: EmbeddedKafkaConfig 23 | ): Map[String, Object] = 24 | defaultProducerConf ++ config.customProducerProperties 25 | 26 | override private[embeddedkafka] def withRunningServers[T]( 27 | config: EmbeddedKafkaConfig, 28 | kafkaLogsDir: Path 29 | )(body: EmbeddedKafkaConfig => T): T = { 30 | val (broker, controller) = 31 | startKafka( 32 | config.kafkaPort, 33 | config.controllerPort, 34 | config.customBrokerProperties, 35 | kafkaLogsDir 36 | ) 37 | 38 | val actualBrokerPort = EmbeddedKafka.kafkaPort(broker) 39 | val actualControllerPort = EmbeddedKafka.controllerPort(controller) 40 | 41 | val restApp = startSchemaRegistry( 42 | config.schemaRegistryPort, 43 | actualBrokerPort, 44 | config.customSchemaRegistryProperties 45 | ) 46 | 47 | val configWithUsedPorts = EmbeddedKafkaConfig( 48 | actualBrokerPort, 49 | actualControllerPort, 50 | EmbeddedKafka.schemaRegistryPort(restApp), 51 | config.customBrokerProperties, 52 | config.customProducerProperties, 53 | config.customConsumerProperties, 54 | config.customSchemaRegistryProperties 55 | ) 56 | 57 | try { 58 | body(configWithUsedPorts) 59 | } finally { 60 | restApp.stop() 61 | // In combined mode, we want to shut down the broker first, since the controller may be 62 | // needed for controlled shutdown. Additionally, the controller shutdown process currently 63 | // stops the raft client early on, which would disrupt broker shutdown. 64 | broker.shutdown() 65 | controller.shutdown() 66 | broker.awaitShutdown() 67 | controller.awaitShutdown() 68 | } 69 | } 70 | } 71 | 72 | object EmbeddedKafka 73 | extends EmbeddedKafka 74 | with RunningEmbeddedKafkaOps[EmbeddedKafkaConfig, EmbeddedKWithSR] 75 | with RunningSchemaRegistryOps { 76 | override def start()( 77 | implicit config: EmbeddedKafkaConfig 78 | ): EmbeddedKWithSR = { 79 | val kafkaLogsDir = Files.createTempDirectory("kafka-logs") 80 | 81 | val (broker, controller) = startKafka( 82 | kafkaPort = config.kafkaPort, 83 | controllerPort = config.controllerPort, 84 | customBrokerProperties = config.customBrokerProperties, 85 | kafkaLogDir = kafkaLogsDir 86 | ) 87 | 88 | val actualBrokerPort = EmbeddedKafka.kafkaPort(broker) 89 | val actualControllerPort = EmbeddedKafka.controllerPort(controller) 90 | 91 | val restApp = EmbeddedSR( 92 | startSchemaRegistry( 93 | config.schemaRegistryPort, 94 | actualBrokerPort, 95 | config.customSchemaRegistryProperties 96 | ) 97 | ) 98 | 99 | val configWithUsedPorts = EmbeddedKafkaConfigImpl( 100 | kafkaPort = actualBrokerPort, 101 | controllerPort = actualControllerPort, 102 | schemaRegistryPort = EmbeddedKafka.schemaRegistryPort(restApp.app), 103 | customBrokerProperties = config.customBrokerProperties, 104 | customProducerProperties = config.customProducerProperties, 105 | customConsumerProperties = config.customConsumerProperties, 106 | customSchemaRegistryProperties = config.customSchemaRegistryProperties 107 | ) 108 | 109 | val server = EmbeddedKWithSR( 110 | broker = broker, 111 | controller = controller, 112 | app = restApp, 113 | logsDirs = kafkaLogsDir, 114 | configWithUsedPorts 115 | ) 116 | 117 | runningServers.add(server) 118 | server 119 | } 120 | 121 | override def isRunning: Boolean = 122 | runningServers.list 123 | .toFilteredSeq[EmbeddedKWithSR](s => 124 | // Need to consider both independently-started Schema Registry and 125 | // all-in-one Kafka with SR 126 | isEmbeddedKWithSR(s) || isEmbeddedSR(s) 127 | ) 128 | .nonEmpty 129 | 130 | private[embeddedkafka] def isEmbeddedKWithSR( 131 | server: EmbeddedServer 132 | ): Boolean = 133 | server.isInstanceOf[EmbeddedKWithSR] 134 | } 135 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # embedded-kafka-schema-registry 2 | 3 | [![Maven Central](https://img.shields.io/maven-central/v/io.github.embeddedkafka/embedded-kafka-schema-registry_2.13)](https://central.sonatype.com/artifact/io.github.embeddedkafka/embedded-kafka-schema-registry_2.13) 4 | [![Test](https://github.com/embeddedkafka/embedded-kafka-schema-registry/actions/workflows/test.yml/badge.svg)](https://github.com/embeddedkafka/embedded-kafka-schema-registry/actions/workflows/test.yml) 5 | [![Codacy Badge](https://api.codacy.com/project/badge/Grade/ea9c18f6d1f547a599d76102cd0e709a)](https://www.codacy.com/gh/embeddedkafka/embedded-kafka-schema-registry) 6 | [![Codacy Coverage Badge](https://api.codacy.com/project/badge/Coverage/ea9c18f6d1f547a599d76102cd0e709a)](https://www.codacy.com/gh/embeddedkafka/embedded-kafka-schema-registry) 7 | [![Mergify Status](https://img.shields.io/endpoint.svg?url=https://api.mergify.com/v1/badges/embeddedkafka/embedded-kafka-schema-registry&style=flat)](https://mergify.io) 8 | [![Scala Steward badge](https://img.shields.io/badge/Scala_Steward-helping-blue.svg?style=flat&logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAA4AAAAQCAMAAAARSr4IAAAAVFBMVEUAAACHjojlOy5NWlrKzcYRKjGFjIbp293YycuLa3pYY2LSqql4f3pCUFTgSjNodYRmcXUsPD/NTTbjRS+2jomhgnzNc223cGvZS0HaSD0XLjbaSjElhIr+AAAAAXRSTlMAQObYZgAAAHlJREFUCNdNyosOwyAIhWHAQS1Vt7a77/3fcxxdmv0xwmckutAR1nkm4ggbyEcg/wWmlGLDAA3oL50xi6fk5ffZ3E2E3QfZDCcCN2YtbEWZt+Drc6u6rlqv7Uk0LdKqqr5rk2UCRXOk0vmQKGfc94nOJyQjouF9H/wCc9gECEYfONoAAAAASUVORK5CYII=)](https://scala-steward.org) 9 | 10 | A library that provides in-memory instances of both Kafka and [Confluent Schema Registry](https://docs.confluent.io/platform/current/schema-registry/index.html) to run your tests against. 11 | 12 | Relies on the [embedded-kafka](https://github.com/embeddedkafka/embedded-kafka) library. 13 | 14 | ## Version compatibility matrix 15 | 16 | The library available on Maven Central. 17 | 18 | Versions match the version of Confluent Schema Registry they're built against. 19 | 20 | | embedded-kafka-schema-registry version | Confluent Schema Registry version | embedded-kafka & Kafka Kafka version | Scala versions | Java version | 21 | |----------------------------------------|-----------------------------------|--------------------------------------|-----------------|--------------| 22 | | 8.1.0 | 8.1.0 | 4.1.x | 2.13, 3.3 | 17+ | 23 | | 8.0.0 | 8.0.0 | 4.0.x | 2.13, 3.3 | 17+ | 24 | | 7.9.2 | 7.9.2 | 3.9.x | 2,12, 2.13, 3.3 | 17+ | 25 | | 7.9.1 | 7.9.1 | 3.9.x | 2,12, 2.13, 3.3 | 17+ | 26 | | 7.9.0 | 7.9.0 | 3.9.x | 2,12, 2.13, 3.3 | 8+ | 27 | | 7.8.0 | 7.8.0 | 3.8.x | 2,12, 2.13, 3.3 | 8+ | 28 | | 7.7.0 | 7.7.0 | 3.7.x | 2,12, 2.13, 3.3 | 8+ | 29 | 30 | ## Important known limitation (prior to Kafka v2.8.0) 31 | 32 | [Prior to v2.8.0](https://github.com/apache/kafka/pull/10174) Kafka core was inlining the Scala library, so you couldn't use a different Scala **patch** version than [what Kafka used to compile its jars](https://github.com/apache/kafka/blob/trunk/gradle/dependencies.gradle#L30)! 33 | 34 | ## Breaking change: new package name 35 | 36 | From v6.2.0 onwards package name has been updated to reflect the library group id (i.e. `io.github.embeddedkafka`). 37 | 38 | Aliases to the old package name have been added, along with a one-time [Scalafix rule](https://github.com/embeddedkafka/embedded-kafka-scalafix) to ensure the smoothest migration. 39 | 40 | ## embedded-kafka-schema-registry 41 | 42 | ### How to use 43 | 44 | * In your `build.sbt` file add the following resolvers: 45 | 46 | ```scala 47 | resolvers ++= Seq( 48 | "confluent" at "https://packages.confluent.io/maven/" 49 | ) 50 | ``` 51 | 52 | * In your `build.sbt` file add the following dependency (replace `x.x.x` with the appropriate version): `"io.github.embeddedkafka" %% "embedded-kafka-schema-registry" % "x.x.x" % Test` 53 | * Have your class extend the `EmbeddedKafka` trait (from the `io.github.embeddedkafka.schemaregistry` package). 54 | * Enclose the code that needs a running instance of Kafka within the `withRunningKafka` closure. 55 | * Provide an implicit `EmbeddedKafkaConfigImpl` (from the same package mentioned before). 56 | 57 | ```scala 58 | class MySpec extends AnyWordSpecLike with Matchers with EmbeddedKafka { 59 | 60 | "runs with embedded kafka and Schema Registry" should { 61 | 62 | "work" in { 63 | implicit val config = EmbeddedKafkaConfig() 64 | 65 | withRunningKafka { 66 | // ... code goes here 67 | } 68 | } 69 | } 70 | } 71 | ``` 72 | 73 | * Kafka Broker, Kafka Controller and Schema Registry will be instantiated respectively on port 6001, 6002, and 6003 and automatically shutdown at the end of the test. 74 | 75 | ## embedded-kafka-schema-registry-streams 76 | 77 | A library that builds on top of `embedded-kafka-schema-registry` to offer easy testing of [Kafka Streams](https://cwiki.apache.org/confluence/display/KAFKA/Kafka+Streams) with Confluent Schema Registry. 78 | 79 | It takes care of instantiating and starting your streams as well as closing them after running your test-case code. 80 | 81 | ### How to use 82 | 83 | * In your `build.sbt` file add the following dependency (replace `x.x.x` with the appropriate version): `"io.github.embeddedkafka" %% "embedded-kafka-schema-registry-streams" % "x.x.x" % Test` 84 | * For most of the cases have your class extend the `EmbeddedKafkaStreams` trait (from the `io.github.embeddedkafka.schemaregistry.streams` package). This offers both streams management and easy creation of consumers for asserting resulting messages in output/sink topics. 85 | * Use `EmbeddedKafkaStreams.runStreams` and `EmbeddedKafka.withConsumer` and `EmbeddedKafka.withProducer`. This allows you to create your own consumers of custom types as seen in the [example test](kafka-streams/src/test/scala/io/github/embeddedkafka/schemaregistry/streams/ExampleKafkaStreamsSpec.scala). 86 | -------------------------------------------------------------------------------- /embedded-kafka-schema-registry/src/test/scala/io/github/embeddedkafka/schemaregistry/EmbeddedKafkaSpec.scala: -------------------------------------------------------------------------------- 1 | package io.github.embeddedkafka.schemaregistry 2 | 3 | import io.confluent.kafka.serializers.{ 4 | AbstractKafkaSchemaSerDeConfig, 5 | KafkaAvroDeserializer, 6 | KafkaAvroDeserializerConfig, 7 | KafkaAvroSerializer 8 | } 9 | import io.github.embeddedkafka.Codecs._ 10 | import io.github.embeddedkafka.schemaregistry.EmbeddedKafka._ 11 | import io.github.embeddedkafka.schemaregistry.EmbeddedKafkaConfig.defaultConfig 12 | import org.apache.kafka.common.serialization.{Deserializer, Serializer} 13 | import org.scalatest.BeforeAndAfterAll 14 | 15 | import scala.concurrent.duration._ 16 | import scala.jdk.CollectionConverters._ 17 | 18 | class EmbeddedKafkaSpec 19 | extends EmbeddedKafkaSpecSupport 20 | with BeforeAndAfterAll { 21 | val consumerPollTimeout: FiniteDuration = 5.seconds 22 | 23 | implicit val serializer: Serializer[TestAvroClass] = { 24 | val props = Map( 25 | AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG -> s"http://localhost:${defaultConfig.schemaRegistryPort}" 26 | ) 27 | 28 | val ser = new KafkaAvroSerializer 29 | ser.configure(props.asJava, false) 30 | ser.asInstanceOf[Serializer[TestAvroClass]] 31 | } 32 | 33 | implicit val deserializer: Deserializer[TestAvroClass] = { 34 | val props = Map( 35 | AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG -> s"http://localhost:${defaultConfig.schemaRegistryPort}", 36 | KafkaAvroDeserializerConfig.SPECIFIC_AVRO_READER_CONFIG -> true.toString 37 | ) 38 | 39 | val ser = new KafkaAvroDeserializer 40 | ser.configure(props.asJava, false) 41 | ser.asInstanceOf[Deserializer[TestAvroClass]] 42 | } 43 | 44 | override def beforeAll(): Unit = { 45 | super.beforeAll() 46 | val _ = EmbeddedKafka.start() 47 | } 48 | 49 | override def afterAll(): Unit = { 50 | EmbeddedKafka.stop() 51 | super.afterAll() 52 | } 53 | 54 | "the publishToKafka method" should { 55 | "publish synchronously a message to Kafka storing its schema into Schema Registry" in { 56 | val message = TestAvroClass("name") 57 | val topic = "publish_test_topic" 58 | publishToKafka(topic, message) 59 | 60 | val record = consumeFirstMessageFrom[TestAvroClass](topic) 61 | 62 | record shouldBe message 63 | } 64 | 65 | "publish synchronously a message with String key to Kafka storing its schema into Schema Registry" in { 66 | val key = "key" 67 | val message = TestAvroClass("name") 68 | val topic = "publish_test_topic_string_key" 69 | 70 | publishToKafka(topic, key, message) 71 | 72 | val (recordKey, recordValue) = 73 | consumeFirstKeyedMessageFrom[String, TestAvroClass](topic) 74 | 75 | recordKey shouldBe key 76 | recordValue shouldBe message 77 | } 78 | 79 | "publish synchronously a batch of messages with String keys to Kafka storing its schema into Schema Registry" in { 80 | val key1 = "key1" 81 | val message1 = TestAvroClass("name") 82 | val key2 = "key2" 83 | val message2 = TestAvroClass("other name") 84 | val topic = "publish_test_topic_batch_string_key" 85 | 86 | val messages = List((key1, message1), (key2, message2)) 87 | 88 | publishToKafka(topic, messages) 89 | 90 | val records = 91 | consumeNumberKeyedMessagesFrom[String, TestAvroClass](topic, number = 2) 92 | 93 | records.size shouldBe 2 94 | 95 | val (record1Key, record1Value) :: (record2Key, record2Value) :: Nil = 96 | records 97 | 98 | record1Key shouldBe key1 99 | record1Value shouldBe message1 100 | 101 | record2Key shouldBe key2 102 | record2Value shouldBe message2 103 | } 104 | } 105 | 106 | "the consumeFirstMessageFrom method" should { 107 | "return a message published to a topic reading its schema from Schema Registry" in { 108 | val message = TestAvroClass("name") 109 | val topic = "consume_test_topic" 110 | 111 | publishToKafka(topic, message) 112 | 113 | val record = consumeFirstMessageFrom[TestAvroClass](topic) 114 | 115 | record shouldBe message 116 | } 117 | } 118 | 119 | "the consumeFirstKeyedMessageFrom method" should { 120 | "return a message with String key published to a topic reading its schema from Schema Registry" in { 121 | val key = "greeting" 122 | val message = TestAvroClass("name") 123 | val topic = "consume_test_topic" 124 | 125 | publishToKafka(topic, key, message) 126 | 127 | val (k, m) = consumeFirstKeyedMessageFrom[String, TestAvroClass](topic) 128 | k shouldBe key 129 | m shouldBe message 130 | } 131 | } 132 | 133 | "the consumeNumberMessagesFromTopics method" should { 134 | "consume from multiple topics reading messages schema from Schema Registry" in { 135 | val topicMessagesMap = Map( 136 | "topic1" -> List(TestAvroClass("name")), 137 | "topic2" -> List(TestAvroClass("other name")) 138 | ) 139 | 140 | topicMessagesMap.foreach { 141 | case (topic, messages) => 142 | messages.foreach(publishToKafka(topic, _)) 143 | } 144 | 145 | val consumedMessages = 146 | consumeNumberMessagesFromTopics[TestAvroClass]( 147 | topicMessagesMap.keySet, 148 | topicMessagesMap.values.map(_.size).sum 149 | ) 150 | 151 | consumedMessages.toMap shouldEqual topicMessagesMap 152 | } 153 | } 154 | 155 | "the consumeNumberKeyedMessagesFromTopics method" should { 156 | "consume from multiple topics reading messages schema from Schema Registry" in { 157 | val topicMessagesMap = 158 | Map( 159 | "topic1" -> List(("m1", TestAvroClass("name"))), 160 | "topic2" -> List( 161 | ("m2a", TestAvroClass("other name")), 162 | ("m2b", TestAvroClass("even another name")) 163 | ) 164 | ) 165 | 166 | topicMessagesMap.foreach { 167 | case (topic, keyedMessages) => 168 | keyedMessages.foreach { case (k, v) => publishToKafka(topic, k, v) } 169 | } 170 | 171 | val consumedMessages = 172 | consumeNumberKeyedMessagesFromTopics[String, TestAvroClass]( 173 | topicMessagesMap.keySet, 174 | topicMessagesMap.values.map(_.size).sum 175 | ) 176 | 177 | consumedMessages.toMap shouldEqual topicMessagesMap 178 | } 179 | } 180 | } 181 | -------------------------------------------------------------------------------- /kafka-streams/src/test/scala/io/github/embeddedkafka/schemaregistry/streams/ExampleKafkaStreamsSpec.scala: -------------------------------------------------------------------------------- 1 | package io.github.embeddedkafka.schemaregistry.streams 2 | 3 | import io.confluent.kafka.serializers.{ 4 | AbstractKafkaSchemaSerDeConfig, 5 | KafkaAvroDeserializer, 6 | KafkaAvroDeserializerConfig, 7 | KafkaAvroSerializer 8 | } 9 | import io.github.embeddedkafka.Codecs._ 10 | import io.github.embeddedkafka.schemaregistry.{ 11 | EmbeddedKafkaConfig, 12 | TestAvroClass 13 | } 14 | import io.github.embeddedkafka.schemaregistry.streams.EmbeddedKafkaStreams._ 15 | import org.apache.avro.generic.{GenericRecord, GenericRecordBuilder} 16 | import org.apache.kafka.common.serialization._ 17 | import org.apache.kafka.streams.StreamsBuilder 18 | import org.apache.kafka.streams.kstream.{Consumed, KStream, Produced} 19 | import org.scalatest.Assertion 20 | import org.scalatest.concurrent.Eventually 21 | import org.scalatest.matchers.should.Matchers 22 | import org.scalatest.wordspec.AnyWordSpec 23 | 24 | import scala.concurrent.duration._ 25 | import scala.jdk.CollectionConverters._ 26 | 27 | class ExampleKafkaStreamsSpec 28 | extends AnyWordSpec 29 | with Matchers 30 | with Eventually { 31 | implicit val config: EmbeddedKafkaConfig = 32 | EmbeddedKafkaConfig( 33 | kafkaPort = 7000, 34 | controllerPort = 7001, 35 | schemaRegistryPort = 7002 36 | ) 37 | 38 | private def avroSerde[T](props: Map[String, AnyRef]): Serde[T] = { 39 | val ser = new KafkaAvroSerializer 40 | ser.configure(props.asJava, false) 41 | val deser = new KafkaAvroDeserializer 42 | deser.configure(props.asJava, false) 43 | 44 | Serdes.serdeFrom( 45 | ser.asInstanceOf[Serializer[T]], 46 | deser.asInstanceOf[Deserializer[T]] 47 | ) 48 | } 49 | 50 | val (inTopic, outTopic) = ("in", "out") 51 | 52 | val stringSerde: Serde[String] = Serdes.String 53 | val specificAvroSerde: Serde[TestAvroClass] = { 54 | val props = Map( 55 | AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG -> s"http://localhost:${config.schemaRegistryPort}", 56 | KafkaAvroDeserializerConfig.SPECIFIC_AVRO_READER_CONFIG -> true.toString 57 | ) 58 | 59 | avroSerde(props) 60 | } 61 | val genericAvroSerde: Serde[GenericRecord] = { 62 | val props = Map( 63 | AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG -> s"http://localhost:${config.schemaRegistryPort}" 64 | ) 65 | avroSerde(props) 66 | } 67 | 68 | "A Kafka streams test using Schema Registry" should { 69 | "support kafka streams and specific record" in { 70 | val streamBuilder = new StreamsBuilder 71 | val stream: KStream[String, TestAvroClass] = 72 | streamBuilder.stream( 73 | inTopic, 74 | Consumed.`with`(stringSerde, specificAvroSerde) 75 | ) 76 | 77 | stream.to(outTopic, Produced.`with`(stringSerde, specificAvroSerde)) 78 | 79 | runStreams(Seq(inTopic, outTopic), streamBuilder.build()) { 80 | implicit val avroSerializer: Serializer[TestAvroClass] = 81 | specificAvroSerde.serializer 82 | 83 | implicit val avroDeserializer: Deserializer[TestAvroClass] = 84 | specificAvroSerde.deserializer 85 | 86 | publishToKafka(inTopic, "hello", TestAvroClass("world")) 87 | publishToKafka(inTopic, "foo", TestAvroClass("bar")) 88 | publishToKafka(inTopic, "baz", TestAvroClass("yaz")) 89 | 90 | val firstTwoMessages = 91 | consumeNumberKeyedMessagesFrom[String, TestAvroClass](outTopic, 2) 92 | 93 | firstTwoMessages should be( 94 | Seq("hello" -> TestAvroClass("world"), "foo" -> TestAvroClass("bar")) 95 | ) 96 | 97 | val thirdMessage = 98 | consumeFirstKeyedMessageFrom[String, TestAvroClass](outTopic) 99 | 100 | thirdMessage should be("baz" -> TestAvroClass("yaz")) 101 | } 102 | } 103 | 104 | "support kafka streams and generic record" in { 105 | val recordWorld: GenericRecord = 106 | new GenericRecordBuilder(TestAvroClass.avroSchema) 107 | .set("name", "world") 108 | .build() 109 | val recordBar: GenericRecord = 110 | new GenericRecordBuilder(TestAvroClass.avroSchema) 111 | .set("name", "bar") 112 | .build() 113 | val recordYaz: GenericRecord = 114 | new GenericRecordBuilder(TestAvroClass.avroSchema) 115 | .set("name", "yaz") 116 | .build() 117 | 118 | val streamBuilder = new StreamsBuilder 119 | val stream: KStream[String, GenericRecord] = 120 | streamBuilder.stream( 121 | inTopic, 122 | Consumed.`with`(stringSerde, genericAvroSerde) 123 | ) 124 | 125 | stream.to(outTopic, Produced.`with`(stringSerde, genericAvroSerde)) 126 | 127 | runStreams(Seq(inTopic, outTopic), streamBuilder.build()) { 128 | implicit val genericAvroSerializer: Serializer[GenericRecord] = 129 | genericAvroSerde.serializer 130 | 131 | implicit val genericAvroDeserializer: Deserializer[GenericRecord] = 132 | genericAvroSerde.deserializer 133 | 134 | publishToKafka(inTopic, "hello", recordWorld) 135 | publishToKafka(inTopic, "foo", recordBar) 136 | publishToKafka(inTopic, "baz", recordYaz) 137 | 138 | val firstTwoMessages = 139 | consumeNumberKeyedMessagesFrom[String, GenericRecord](outTopic, 2) 140 | 141 | firstTwoMessages should be( 142 | Seq("hello" -> recordWorld, "foo" -> recordBar) 143 | ) 144 | 145 | val thirdMessage = 146 | consumeFirstKeyedMessageFrom[String, GenericRecord](outTopic) 147 | 148 | thirdMessage should be("baz" -> recordYaz) 149 | } 150 | } 151 | 152 | "allow support creating custom consumers" in { 153 | implicit val patienceConfig: PatienceConfig = 154 | PatienceConfig(5.seconds, 100.millis) 155 | 156 | implicit val avroSerializer: Serializer[TestAvroClass] = 157 | specificAvroSerde.serializer 158 | 159 | implicit val avroDeserializer: Deserializer[TestAvroClass] = 160 | specificAvroSerde.deserializer 161 | 162 | val streamBuilder = new StreamsBuilder 163 | val stream: KStream[String, TestAvroClass] = 164 | streamBuilder.stream( 165 | inTopic, 166 | Consumed.`with`(stringSerde, specificAvroSerde) 167 | ) 168 | 169 | stream.to(outTopic, Produced.`with`(stringSerde, specificAvroSerde)) 170 | 171 | runStreams(Seq(inTopic, outTopic), streamBuilder.build()) { 172 | publishToKafka(inTopic, "hello", TestAvroClass("world")) 173 | publishToKafka(inTopic, "foo", TestAvroClass("bar")) 174 | 175 | withConsumer[String, TestAvroClass, Assertion] { consumer => 176 | consumer.subscribe(java.util.Collections.singleton(outTopic)) 177 | 178 | eventually { 179 | val records = consumer 180 | .poll(java.time.Duration.ofMillis(100.millis.toMillis)) 181 | .asScala 182 | .map(r => (r.key, r.value)) 183 | 184 | records should be( 185 | Seq( 186 | "hello" -> TestAvroClass("world"), 187 | "foo" -> TestAvroClass("bar") 188 | ) 189 | ) 190 | } 191 | } 192 | } 193 | } 194 | } 195 | } 196 | --------------------------------------------------------------------------------