├── website ├── static │ ├── .nojekyll │ └── img │ │ └── favicon.ico ├── babel.config.js ├── .gitignore ├── sidebars.js ├── src │ ├── pages │ │ └── styles.module.css │ └── css │ │ └── custom.css ├── README.md ├── package.json └── docusaurus.config.js ├── kafka-websocket-proxy-docs └── .keep ├── helm ├── .gitignore ├── kafka-websocket-proxy-chart │ ├── Chart.yaml │ ├── templates │ │ ├── service.yaml │ │ ├── ingress.yaml │ │ └── statefulset.yaml │ └── values.yaml └── README.md ├── project ├── build.properties ├── DockerTasksPlugin.scala ├── PrometheusConfigPlugin.scala └── plugins.sbt ├── version.sbt ├── .gitmodules ├── server └── src │ ├── main │ └── scala │ │ └── net │ │ └── scalytica │ │ └── kafka │ │ └── wsproxy │ │ ├── codecs │ │ ├── Implicits.scala │ │ ├── package.scala │ │ ├── Binary.scala │ │ ├── SessionIdSerde.scala │ │ ├── WsGroupIdSerde.scala │ │ ├── StringBasedSerde.scala │ │ ├── DynamicCfgSerde.scala │ │ └── SessionSerde.scala │ │ ├── models │ │ ├── AclCredentials.scala │ │ ├── BrokerInfo.scala │ │ ├── WsCommit.scala │ │ ├── ValueDetails.scala │ │ ├── SimpleTopicDescription.scala │ │ ├── WsMessageId.scala │ │ ├── KafkaHeader.scala │ │ ├── ReadIsolationLevel.scala │ │ ├── value_types.scala │ │ ├── WsProxyAuthResult.scala │ │ └── WsProducerResult.scala │ │ ├── jmx │ │ ├── mbeans │ │ │ └── WsProxyJmxBean.scala │ │ ├── package.scala │ │ └── MXBeanActor.scala │ │ ├── session │ │ ├── OldSessionTypes.scala │ │ ├── SessionHandlerRef.scala │ │ ├── package.scala │ │ ├── SessionId.scala │ │ ├── ClientInstance.scala │ │ ├── ActiveSessions.scala │ │ ├── SessionOpResult.scala │ │ └── SessionDataConsumer.scala │ │ ├── utils │ │ ├── JavaDurationConverters.scala │ │ ├── WsProxyEnvLoader.scala │ │ └── HostResolver.scala │ │ ├── errors │ │ ├── KafkaFutureErrorHandler.scala │ │ └── errors.scala │ │ ├── producer │ │ ├── ExtendedProducerRecord.java │ │ ├── InputJsonParser.scala │ │ ├── WsTransactionalProducer.scala │ │ └── ProducerFlowExtras.scala │ │ ├── web │ │ ├── SocketProtocol.scala │ │ ├── RoutesPrereqs.scala │ │ ├── StatusRoutes.scala │ │ ├── Headers.scala │ │ ├── SchemaRoutes.scala │ │ └── websockets │ │ │ └── ClientSpecificCfgLoader.scala │ │ ├── logging │ │ ├── WithProxyLogger.scala │ │ ├── ColouredLevel.scala │ │ └── WsProxyEnvLoggerConfigurator.scala │ │ ├── auth │ │ ├── tokens.scala │ │ └── package.scala │ │ ├── config │ │ ├── DynamicConfigHandlerRef.scala │ │ ├── package.scala │ │ ├── DynamicConfigurations.scala │ │ ├── DynamicConfigConsumer.scala │ │ └── DynamicConfigHandlerProtocol.scala │ │ ├── actor │ │ └── ActorWithProtocolExtensions.scala │ │ ├── streams │ │ └── ProxyFlowExtras.scala │ │ └── consumer │ │ └── CommitStackHandler.scala │ └── test │ ├── resources │ ├── sasl │ │ ├── kafka │ │ │ ├── broker1.keystore.jks │ │ │ ├── client.keystore.jks │ │ │ ├── broker1.truststore.jks │ │ │ ├── client.truststore.jks │ │ │ └── jaas.conf │ │ └── server │ │ │ └── create-certs.sh │ ├── monitoring │ │ └── prometheus_config.yml │ ├── logback-config-testcase-1.xml │ ├── logback-config-testcase-2.xml │ └── logback-test.xml │ └── scala │ ├── net │ └── scalytica │ │ ├── test │ │ ├── SessionOpResultValues.scala │ │ ├── FlakyTests.scala │ │ ├── ExtraAssertions.scala │ │ ├── EmbeddedHttpServer.scala │ │ ├── test_routes.scala │ │ ├── FileLoader.scala │ │ └── WsClientSpec.scala │ │ └── kafka │ │ └── wsproxy │ │ ├── producer │ │ └── package.scala │ │ ├── jmx │ │ ├── TestJmxQueries.scala │ │ ├── mbeans │ │ │ ├── MXBeanSpecLike.scala │ │ │ ├── ConnectionStatsMXBeanSpec.scala │ │ │ ├── ProxyStatusMXBeanSpec.scala │ │ │ ├── ProducerClientStatsMXBeanSpec.scala │ │ │ └── ConsumerClientStatsMXBeanSpec.scala │ │ └── TestJmxManager.scala │ │ ├── auth │ │ ├── TokenRequestSpec.scala │ │ ├── JwkSpec.scala │ │ ├── KafkaLoginModulesSpec.scala │ │ └── UrlJwkProviderSpec.scala │ │ ├── codecs │ │ └── BinarySpec.scala │ │ ├── web │ │ ├── HeadersSpec.scala │ │ └── SchemaRoutesSpec.scala │ │ ├── utils │ │ ├── HostResolverSpec.scala │ │ └── BlockingRetrySpec.scala │ │ ├── models │ │ └── FormatsSpec.scala │ │ ├── config │ │ ├── DynamicConfigTestDataGenerators.scala │ │ └── DynamicConfigurationsSpec.scala │ │ ├── logging │ │ └── ColouredLevelSpec.scala │ │ ├── session │ │ └── ActiveSessionsSpec.scala │ │ └── streams │ │ └── ProxyFlowExtrasSpec.scala │ └── org │ └── scalatest │ └── CustomEitherValues.scala ├── .sbtopts ├── .git-blame-ignore-revs ├── .scalafix.conf ├── .gitlab ├── issue_templates │ ├── Feature.md │ └── Bug.md └── build_templates │ ├── gitlab-ci-scala-docker.yml │ ├── gitlab-ci-scala-pages.yml │ └── gitlab-ci-scala-build.yml ├── docs ├── apis.md ├── getting-started.md ├── index.md ├── development.md └── faq.md ├── .gitignore ├── .scalafmt.conf ├── gatling └── src │ └── test │ ├── resources │ └── logback-test.xml │ └── scala │ └── net │ └── scalytica │ └── kafka │ └── wsproxy │ └── gatling │ ├── BaselineSimulation.scala │ └── BaseConsumerSimulation.scala ├── README.md └── .codeclimate.yml /website/static/.nojekyll: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /kafka-websocket-proxy-docs/.keep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /helm/.gitignore: -------------------------------------------------------------------------------- 1 | *.tgz 2 | myvalues.yaml 3 | -------------------------------------------------------------------------------- /project/build.properties: -------------------------------------------------------------------------------- 1 | sbt.version=1.11.2 -------------------------------------------------------------------------------- /version.sbt: -------------------------------------------------------------------------------- 1 | ThisBuild / version := "2.0.1-SNAPSHOT" 2 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "docker"] 2 | path = docker 3 | url = git@gitlab.com:kpmeen/kafka-dev-sandbox.git 4 | -------------------------------------------------------------------------------- /helm/kafka-websocket-proxy-chart/Chart.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v2 2 | name: kafka-websocket-proxy 3 | version: 0.1.3 4 | -------------------------------------------------------------------------------- /website/babel.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | presets: [require.resolve('@docusaurus/core/lib/babel/preset')], 3 | }; 4 | -------------------------------------------------------------------------------- /website/static/img/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kpmeen/kafka-websocket-proxy/HEAD/website/static/img/favicon.ico -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/codecs/Implicits.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.codecs 2 | 3 | object Implicits extends Encoders with Decoders 4 | -------------------------------------------------------------------------------- /server/src/test/resources/sasl/kafka/broker1.keystore.jks: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kpmeen/kafka-websocket-proxy/HEAD/server/src/test/resources/sasl/kafka/broker1.keystore.jks -------------------------------------------------------------------------------- /server/src/test/resources/sasl/kafka/client.keystore.jks: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kpmeen/kafka-websocket-proxy/HEAD/server/src/test/resources/sasl/kafka/client.keystore.jks -------------------------------------------------------------------------------- /server/src/test/resources/sasl/kafka/broker1.truststore.jks: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kpmeen/kafka-websocket-proxy/HEAD/server/src/test/resources/sasl/kafka/broker1.truststore.jks -------------------------------------------------------------------------------- /server/src/test/resources/sasl/kafka/client.truststore.jks: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kpmeen/kafka-websocket-proxy/HEAD/server/src/test/resources/sasl/kafka/client.truststore.jks -------------------------------------------------------------------------------- /helm/README.md: -------------------------------------------------------------------------------- 1 | `helm package kafka-websocket-proxy-chart` 2 | 3 | `helm upgrade --install kafka-websocket-proxy-chart kafka-websocket-proxy-chart-0.1.3.tgz [-f myvalues.yaml]` 4 | 5 | -------------------------------------------------------------------------------- /.sbtopts: -------------------------------------------------------------------------------- 1 | -mem 2048 -J-Xss2M 2 | # -jvm-debug 5005 3 | #-mem 2048 -J-XX:MaxDirectMemorySize=1024M 4 | #-mem 2048 -J-XX:+UnlockExperimentalVMOptions -J-XX:+EnableJVMCI -J-XX:+UseJVMCICompiler 5 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/models/AclCredentials.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.models 2 | 3 | case class AclCredentials(username: String, password: String) 4 | -------------------------------------------------------------------------------- /.git-blame-ignore-revs: -------------------------------------------------------------------------------- 1 | # Scala Steward: Reformat with scalafmt 3.8.2 2 | 68be4e9bd06489b43286be222a115d840eb0891f 3 | 4 | # Scala Steward: Reformat with scalafmt 3.8.3 5 | c910d3c821d62eaf780ffb5b0904967ca2c3f1f3 6 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/models/BrokerInfo.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.models 2 | 3 | case class BrokerInfo( 4 | id: Int, 5 | host: String, 6 | port: Int, 7 | rack: Option[String] 8 | ) 9 | -------------------------------------------------------------------------------- /server/src/test/resources/monitoring/prometheus_config.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | ssl: false 4 | startDelaySeconds: 0 5 | #whitelistObjectNames: [ 6 | # "net.scalytica.kafka.wsproxy:*", 7 | # "kafka.*" 8 | #] 9 | lowercaseOutputName: false 10 | lowercaseOutputLabelNames: false -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/codecs/package.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy 2 | 3 | package object codecs { 4 | 5 | implicit private[codecs] val cfg: io.circe.generic.extras.Configuration = 6 | io.circe.generic.extras.Configuration.default 7 | 8 | } 9 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/jmx/mbeans/WsProxyJmxBean.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.jmx.mbeans 2 | 3 | import javax.management.ObjectName 4 | 5 | trait WsProxyJmxBean { 6 | 7 | protected def register(): Unit 8 | protected def unregister(): Unit 9 | 10 | val objectName: ObjectName 11 | } 12 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/models/WsCommit.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.models 2 | 3 | /** 4 | * Model for capturing inbound offset commit messages from clients. 5 | * 6 | * @param wsProxyMessageId 7 | * the [[WsMessageId]] for the message. 8 | */ 9 | case class WsCommit(wsProxyMessageId: WsMessageId) 10 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/models/ValueDetails.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.models 2 | 3 | import Formats.FormatType 4 | 5 | object ValueDetails { 6 | 7 | case class InValueDetails[T](value: T, format: FormatType) 8 | 9 | case class OutValueDetails[T](value: T, format: Option[FormatType] = None) 10 | 11 | } 12 | -------------------------------------------------------------------------------- /website/.gitignore: -------------------------------------------------------------------------------- 1 | yarn.lock 2 | package-lock.json 3 | 4 | # Dependencies 5 | /node_modules 6 | 7 | # Production 8 | /build 9 | 10 | # Generated files 11 | .docusaurus 12 | .cache-loader 13 | 14 | # Misc 15 | .DS_Store 16 | .env.local 17 | .env.development.local 18 | .env.test.local 19 | .env.production.local 20 | 21 | npm-debug.log* 22 | yarn-debug.log* 23 | yarn-error.log* -------------------------------------------------------------------------------- /helm/kafka-websocket-proxy-chart/templates/service.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: {{ .Release.Name }} 5 | labels: 6 | app: kafka-websocket-proxy 7 | spec: 8 | type: {{ .Values.service.type }} 9 | ports: 10 | - name: http 11 | port: {{ .Values.service.port }} 12 | targetPort: {{ .Values.service.port }} 13 | selector: 14 | app: kafka-websocket-proxy 15 | -------------------------------------------------------------------------------- /server/src/test/scala/net/scalytica/test/SessionOpResultValues.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.test 2 | 3 | import net.scalytica.kafka.wsproxy.session._ 4 | 5 | trait SessionOpResultValues { 6 | 7 | implicit def convertSessionOpResultToValuable[T]( 8 | res: SessionOpResult 9 | ): Valuable = new Valuable(res) 10 | 11 | class Valuable(res: SessionOpResult) { 12 | 13 | def value: Session = { 14 | res.session 15 | } 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /.scalafix.conf: -------------------------------------------------------------------------------- 1 | rules = [ 2 | RedundantSyntax, 3 | OrganizeImports, 4 | RemoveUnused 5 | ] 6 | 7 | RedundantSyntax { 8 | finalObject = true 9 | stringInterpolator = true 10 | } 11 | 12 | OrganizeImports { 13 | targetDialect = Scala2 14 | blankLines = Auto 15 | groups = [ 16 | "re:javax?\\." 17 | "scala." 18 | "scala.meta." 19 | "net.scalytica.kafka." 20 | "net.scalytica.test" 21 | "*" 22 | ] 23 | } 24 | 25 | RemoveUnused { 26 | imports = false 27 | } -------------------------------------------------------------------------------- /.gitlab/issue_templates/Feature.md: -------------------------------------------------------------------------------- 1 | ### Problem to solve 2 | 3 | 4 | 5 | ### Further details 6 | 7 | 8 | 9 | ### Proposal 10 | 11 | 12 | 13 | ### What is the definition of completed? 14 | 15 | 16 | 17 | ### Links / references 18 | 19 | /label ~Feature ~Improvement 20 | -------------------------------------------------------------------------------- /docs/apis.md: -------------------------------------------------------------------------------- 1 | --- 2 | id: apis 3 | title: Endpoints and API 4 | slug: /apis 5 | --- 6 | 7 | The Kafka WebSocket Proxy provides two sets of APIs. Firstly, the proxy server API which contain 8 | the websocket endpoints and a health check endpoint. This is the main API for consuming and producing 9 | messages to Kafka topics. Secondly, the admin server API that provides some useful endpoints from an 10 | operational perspective. 11 | 12 | * [Proxy server API](proxy-server-api.md) 13 | * [Admin server API](admin-server-api.md) 14 | 15 | 16 | 17 | -------------------------------------------------------------------------------- /server/src/test/resources/sasl/kafka/jaas.conf: -------------------------------------------------------------------------------- 1 | KafkaServer { 2 | org.apache.kafka.common.security.plain.PlainLoginModule required 3 | username="admin" 4 | password="admin" 5 | user_admin="admin" 6 | user_broker1="broker1" 7 | user_client="client"; 8 | }; 9 | 10 | Client { 11 | org.apache.zookeeper.server.auth.DigestLoginModule required 12 | username="kafka" 13 | password="kafka"; 14 | }; 15 | 16 | Server { 17 | org.apache.zookeeper.server.auth.DigestLoginModule required 18 | user_super="admin" 19 | user_kafka="kafka"; 20 | }; -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/models/SimpleTopicDescription.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.models 2 | 3 | import org.apache.kafka.clients.admin.TopicDescription 4 | 5 | case class SimpleTopicDescription( 6 | name: TopicName, 7 | numPartitions: Int 8 | ) 9 | 10 | object SimpleTopicDescription { 11 | 12 | def fromKafkaTopicDescription( 13 | td: TopicDescription 14 | ): SimpleTopicDescription = { 15 | SimpleTopicDescription( 16 | name = TopicName(td.name()), 17 | numPartitions = td.partitions().size() 18 | ) 19 | } 20 | 21 | } 22 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/session/OldSessionTypes.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.session 2 | 3 | import net.scalytica.kafka.wsproxy.models.WsClientId 4 | import net.scalytica.kafka.wsproxy.models.WsGroupId 5 | import net.scalytica.kafka.wsproxy.models.WsServerId 6 | 7 | // TODO: This should eventually be removed from the source tree 8 | final case class OldSession( 9 | consumerGroupId: WsGroupId, 10 | consumers: Set[OldConsumerInstance], 11 | consumerLimit: Int 12 | ) 13 | 14 | final case class OldConsumerInstance(id: WsClientId, serverId: WsServerId) 15 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/codecs/Binary.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.codecs 2 | 3 | import java.nio.charset.StandardCharsets 4 | import java.util.Base64 5 | 6 | import scala.util.Try 7 | 8 | private[codecs] object Binary { 9 | 10 | private[this] val enc = Base64.getEncoder 11 | private[this] val dec = Base64.getDecoder 12 | 13 | def decodeBase64(base64: String): Try[Array[Byte]] = { 14 | Try(dec.decode(base64)) 15 | } 16 | 17 | def encodeBase64(data: Array[Byte]): String = { 18 | new String(enc.encode(data), StandardCharsets.UTF_8) 19 | } 20 | 21 | } 22 | -------------------------------------------------------------------------------- /server/src/test/scala/net/scalytica/kafka/wsproxy/producer/package.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy 2 | 3 | import org.apache.pekko.NotUsed 4 | import org.apache.pekko.kafka.ProducerMessage.Envelope 5 | import org.apache.pekko.stream.scaladsl.Flow 6 | import net.scalytica.kafka.wsproxy.models.{WsProducerRecord, WsProducerResult} 7 | 8 | package object producer { 9 | 10 | type ProducerRecord = WsProducerRecord[String, String] 11 | type ProducerEnvelope = 12 | Envelope[String, String, ProducerRecord] 13 | type ProducerFlow = 14 | Flow[ProducerEnvelope, WsProducerResult, NotUsed] 15 | 16 | } 17 | -------------------------------------------------------------------------------- /website/sidebars.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | docs: [ 3 | { 4 | type: 'category', 5 | label: 'Kafka WebSocket Proxy', 6 | items: [ 7 | 'index', 8 | 'getting-started', 9 | 'configuration', 10 | 'logging', 11 | 'monitoring', 12 | { 13 | type: 'category', 14 | label: 'Endpoints and APIs', 15 | link: { 16 | type: 'doc', 17 | id: 'apis' 18 | }, 19 | items: ['http', 'websockets'] 20 | }, 21 | 'development', 22 | 'faq' 23 | ], 24 | }, 25 | ], 26 | }; 27 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/codecs/SessionIdSerde.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.codecs 2 | 3 | import net.scalytica.kafka.wsproxy.session.SessionId 4 | 5 | class SessionIdSerde extends StringBasedSerde[SessionId] { 6 | 7 | override def serialize(topic: String, data: SessionId): Array[Byte] = 8 | Option(data).map(d => ser.serialize(topic, d.value)).orNull 9 | 10 | override def deserialize(topic: String, data: Array[Byte]): SessionId = { 11 | Option(data).flatMap { d => 12 | val str = des.deserialize(topic, d) 13 | Option(str).map(SessionId.apply) 14 | }.orNull 15 | } 16 | 17 | } 18 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/codecs/WsGroupIdSerde.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.codecs 2 | 3 | import net.scalytica.kafka.wsproxy.models.WsGroupId 4 | 5 | class WsGroupIdSerde extends StringBasedSerde[WsGroupId] { 6 | 7 | override def serialize(topic: String, data: WsGroupId): Array[Byte] = 8 | Option(data).map(d => ser.serialize(topic, d.value)).orNull 9 | 10 | override def deserialize(topic: String, data: Array[Byte]): WsGroupId = { 11 | Option(data).flatMap { d => 12 | val str = des.deserialize(topic, d) 13 | Option(str).map(WsGroupId.apply) 14 | }.orNull 15 | } 16 | 17 | } 18 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/utils/JavaDurationConverters.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.utils 2 | 3 | import java.time.{Duration => JDuration} 4 | 5 | import scala.concurrent.duration.Duration 6 | import scala.concurrent.duration.FiniteDuration 7 | 8 | object JavaDurationConverters { 9 | 10 | implicit final class JavaDurationOps(val self: JDuration) extends AnyVal { 11 | def asScala: FiniteDuration = Duration.fromNanos(self.toNanos) 12 | } 13 | 14 | implicit final class ScalaDurationOps(val self: Duration) extends AnyVal { 15 | def asJava: JDuration = JDuration.ofNanos(self.toNanos) 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/session/SessionHandlerRef.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.session 2 | 3 | import org.apache.pekko.actor.typed.ActorRef 4 | import org.apache.pekko.actor.typed.ActorSystem 5 | import org.apache.pekko.kafka.scaladsl.Consumer 6 | import org.apache.pekko.stream.scaladsl.RunnableGraph 7 | 8 | case class SessionHandlerRef( 9 | stream: RunnableGraph[Consumer.Control], 10 | shRef: ActorRef[SessionHandlerProtocol.SessionProtocol] 11 | ) { 12 | def stop()(implicit system: ActorSystem[_]): Unit = { 13 | shRef.tell( 14 | SessionHandlerProtocol.StopSessionHandler(system.ignoreRef) 15 | ) 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.log 2 | *.sc 3 | *.swp 4 | *.orig 5 | *.class 6 | *.iml 7 | .run 8 | .history 9 | .idea 10 | .idea_modules 11 | .classpath 12 | .project 13 | .settings 14 | .DS_Store 15 | .java-version 16 | .bloop 17 | .metals 18 | .bsp 19 | project/project 20 | project/target 21 | project/metals.sbt 22 | project/*-shim.sbt 23 | target 24 | ext/ 25 | server/src/test/resources/sasl/server/*.crt 26 | server/src/test/resources/sasl/server/*.csr 27 | server/src/test/resources/sasl/server/*.jks 28 | 29 | gl-code-quality-report.json 30 | gl-sast-report.json 31 | gl-secret-detection-report.json 32 | semgrep.sarif 33 | .csslintrc 34 | .eslintignore 35 | .eslintrc.yml 36 | .rubocop.yml 37 | coffeelint.json -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/models/WsMessageId.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.models 2 | 3 | /** 4 | * Identifies a consumed message by concatenating information found in the Kafka 5 | * consumer record. 6 | * 7 | * @param value 8 | * the String which uniquely identifies the a consumed message. 9 | */ 10 | case class WsMessageId private (value: String) extends AnyVal 11 | 12 | object WsMessageId { 13 | 14 | def apply( 15 | topic: TopicName, 16 | partition: Partition, 17 | offset: Offset, 18 | timestamp: Timestamp 19 | ): WsMessageId = 20 | WsMessageId( 21 | s"${topic.value}-${partition.value}-${offset.value}-${timestamp.value}" 22 | ) 23 | 24 | } 25 | -------------------------------------------------------------------------------- /server/src/test/scala/net/scalytica/test/FlakyTests.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.test 2 | 3 | import org.scalatest._ 4 | 5 | trait FlakyTests extends Retries { self: TestSuite => 6 | 7 | val maxRetries = 5 // scalastyle:ignore 8 | 9 | override def withFixture(test: NoArgTest) = { 10 | if (isRetryable(test)) withRetryableFixture(test, maxRetries) 11 | else test() 12 | } 13 | 14 | def withRetryableFixture(test: NoArgTest, retries: Int): Outcome = { 15 | val outcome = test() 16 | outcome match { 17 | case Failed(_) | Canceled(_) => 18 | if (maxRetries == 1) test() 19 | else withRetryableFixture(test, retries - 1) 20 | 21 | case other => 22 | other 23 | } 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/utils/WsProxyEnvLoader.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.utils 2 | 3 | object WsProxyEnvLoader { 4 | 5 | def keys: Set[String] = properties.keySet 6 | 7 | def hasKey(key: String): Boolean = keys.exists(_.equals(key)) 8 | 9 | def properties: Map[String, String] = sys.env ++ sys.props 10 | 11 | def get(key: String): Option[String] = properties.get(key) 12 | 13 | def exists(key: String): Boolean = 14 | sys.env.exists(_._1.equals(key)) || sys.props.exists(_._1.equals(key)) 15 | 16 | def envString(keyPrefix: String*): String = 17 | properties.view 18 | .filterKeys(k => keyPrefix.exists(k.startsWith)) 19 | .toMap 20 | .mkString("\n") 21 | } 22 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/errors/KafkaFutureErrorHandler.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.errors 2 | 3 | import java.util.concurrent.{ExecutionException => JExecException} 4 | 5 | import org.apache.kafka.common.errors.TopicExistsException 6 | 7 | object KafkaFutureErrorHandler { 8 | 9 | def handle[A]( 10 | ok: => A 11 | )( 12 | ko: Throwable => A 13 | ): PartialFunction[Throwable, A] = { 14 | case juce: JExecException => 15 | Option(juce.getCause) 16 | .map { 17 | case _: TopicExistsException => ok 18 | case _ => ko(juce) 19 | } 20 | .getOrElse(ko(juce)) 21 | 22 | case t => 23 | ko(t) 24 | } 25 | 26 | } 27 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/session/package.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy 2 | 3 | import net.scalytica.kafka.wsproxy.config.Configuration.AppCfg 4 | import net.scalytica.kafka.wsproxy.session.SessionHandlerProtocol._ 5 | 6 | import org.apache.pekko.kafka.scaladsl.Consumer.Control 7 | import org.apache.pekko.stream.scaladsl.Source 8 | // scalastyle:on line.size.limit 9 | 10 | package object session { 11 | 12 | type SessionSource = Source[SessionProtocol, Control] 13 | 14 | val SessionConsumerGroupIdPrefix: String = "ws-proxy-session-consumer" 15 | 16 | private[session] def sessionConsumerGroupId(implicit cfg: AppCfg): String = { 17 | s"$SessionConsumerGroupIdPrefix-${cfg.server.serverId.value}" 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /website/src/pages/styles.module.css: -------------------------------------------------------------------------------- 1 | /* stylelint-disable docusaurus/copyright-header */ 2 | 3 | /** 4 | * CSS files with the .module.css suffix will be treated as CSS modules 5 | * and scoped locally. 6 | */ 7 | 8 | .heroBanner { 9 | padding: 4rem 0; 10 | text-align: center; 11 | position: relative; 12 | overflow: hidden; 13 | } 14 | 15 | @media screen and (max-width: 966px) { 16 | .heroBanner { 17 | padding: 2rem; 18 | } 19 | } 20 | 21 | .buttons { 22 | display: flex; 23 | align-items: center; 24 | justify-content: center; 25 | } 26 | 27 | .features { 28 | display: flex; 29 | align-items: center; 30 | padding: 2rem 0; 31 | width: 100%; 32 | } 33 | 34 | .featureImage { 35 | height: 200px; 36 | width: 200px; 37 | } 38 | -------------------------------------------------------------------------------- /helm/kafka-websocket-proxy-chart/values.yaml: -------------------------------------------------------------------------------- 1 | replicaCount: 3 2 | image: 3 | repository: kpmeen/kafka-websocket-proxy 4 | tag: latest 5 | service: 6 | type: ClusterIP 7 | port: 8078 8 | ingress: 9 | enabled: "false" 10 | host: kafka-websocket-proxy.example.com 11 | env: 12 | WSPROXY_SERVER_ID: "kafka-websocket-proxy" 13 | WSPROXY_KAFKA_BOOTSTRAP_HOSTS: "localhost:8082" 14 | WSPROXY_KAFKA_PRODUCER_COMPRESSION_TYPE: "lz4" 15 | WSPROXY_CH_MAX_STACK_SIZE: "100" 16 | WSPROXY_CH_AUTOCOMMIT_ENABLED: "false" 17 | WSPROXY_SECURE_HEALTHCHECK_ENDPOINT: "false" 18 | WSPROXY_BASIC_AUTH_ENABLED: "false" 19 | WSPROXY_BASIC_AUTH_USERNAME: "kafka" 20 | WSPROXY_BASIC_AUTH_PASSWORD: "kafka" 21 | WSPROXY_BASIC_AUTH_REALM: "Kafka Websocket Proxy" 22 | -------------------------------------------------------------------------------- /server/src/test/scala/net/scalytica/test/ExtraAssertions.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.test 2 | 3 | import org.apache.kafka.common.errors.ProducerFencedException 4 | import org.scalactic.source 5 | import org.scalatest.Assertion 6 | import org.scalatest.matchers.must.Matchers 7 | import org.scalatest.wordspec.AnyWordSpecLike 8 | 9 | import scala.util.{Failure, Success, Try} 10 | 11 | trait ExtraAssertions { self: AnyWordSpecLike with Matchers => 12 | 13 | def assertCause[T <: AnyRef](f: => Any)( 14 | implicit pos: source.Position 15 | ): Assertion = Try(f) match { 16 | case Success(_) => 17 | fail("Expected a ProducerFencedException") 18 | case Failure(exception) => 19 | exception.getCause mustBe a[ProducerFencedException] 20 | } 21 | 22 | } 23 | -------------------------------------------------------------------------------- /server/src/test/scala/net/scalytica/kafka/wsproxy/jmx/TestJmxQueries.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.jmx 2 | 3 | import net.scalytica.kafka.wsproxy.logging.WithProxyLogger 4 | 5 | import javax.management.JMX 6 | 7 | class TestJmxQueries extends WsProxyJmxQueries with WithProxyLogger { 8 | 9 | def getMxBean[B](beanName: String, clazz: Class[B]): B = { 10 | val on = asObjectName(beanName, mxBeanType(clazz)) 11 | JMX.newMXBeanProxy(mbs, on, clazz) 12 | } 13 | 14 | def removeMxBean[B](beanName: String, clazz: Class[B]): Unit = { 15 | val on = asObjectName(beanName, mxBeanType(clazz)) 16 | try { 17 | WsProxyJmxRegistrar.unregisterFromMBeanServer(on) 18 | } catch { 19 | case t: Throwable => log.info("Nothing to unregister", t) 20 | } 21 | } 22 | 23 | } 24 | -------------------------------------------------------------------------------- /server/src/test/scala/net/scalytica/kafka/wsproxy/auth/TokenRequestSpec.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.auth 2 | 3 | import org.scalatest.matchers.must.Matchers 4 | import org.scalatest.wordspec.AnyWordSpec 5 | 6 | class TokenRequestSpec extends AnyWordSpec with Matchers { 7 | 8 | val tokenRequest = TokenRequest( 9 | "client identifier", 10 | "super secret", 11 | "the audience", 12 | "client_credentials" 13 | ) 14 | 15 | // scalastyle:off 16 | val expectedJsonString = 17 | """{"client_id":"client identifier","client_secret":"super secret","audience":"the audience","grant_type":"client_credentials"}""" 18 | // scalastyle:on 19 | 20 | "The TokenRequest" should { 21 | 22 | "render the correct JSON" in { 23 | tokenRequest.jsonString mustBe expectedJsonString 24 | } 25 | 26 | } 27 | 28 | } 29 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/producer/ExtendedProducerRecord.java: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.producer; 2 | 3 | import org.apache.kafka.clients.producer.ProducerRecord; 4 | import org.apache.kafka.common.header.Header; 5 | 6 | /** 7 | * Custom extension of {@link ProducerRecord} that allows creating an instance 8 | * with the correct types using the weaker type system in Java. 9 | * 10 | * @param the key type 11 | * @param the value type 12 | */ 13 | public class ExtendedProducerRecord extends ProducerRecord { 14 | 15 | public ExtendedProducerRecord(String topic, K key, V value, Iterable
headers) { 16 | super(topic, null, null, key, value, headers); 17 | } 18 | 19 | public ExtendedProducerRecord(String topic, V value, Iterable
headers) { 20 | super(topic, null, null, null, value, headers); 21 | } 22 | 23 | } 24 | -------------------------------------------------------------------------------- /website/README.md: -------------------------------------------------------------------------------- 1 | # Website 2 | 3 | This website is built using [Docusaurus 2](https://v2.docusaurus.io/), a modern static website generator. 4 | 5 | ## Installation 6 | 7 | ```console 8 | yarn install 9 | ``` 10 | 11 | ## Local Development 12 | 13 | ```console 14 | yarn start 15 | ``` 16 | 17 | This command starts a local development server and open up a browser window. Most changes are reflected live without having to restart the server. 18 | 19 | ## Build 20 | 21 | ```console 22 | yarn build 23 | ``` 24 | 25 | This command generates static content into the `build` directory and can be served using any static contents hosting service. 26 | 27 | ## Deployment 28 | 29 | ```console 30 | GIT_USER= USE_SSH=true yarn deploy 31 | ``` 32 | 33 | If you are using GitHub pages for hosting, this command is a convenient way to build the website and push to the `gh-pages` branch. 34 | -------------------------------------------------------------------------------- /.scalafmt.conf: -------------------------------------------------------------------------------- 1 | version = "3.8.3" 2 | 3 | runner.dialect = scala213 4 | 5 | project.git = true 6 | 7 | style = defaultWithAlign 8 | maxColumn = 80 9 | 10 | docstrings.style = Asterisk 11 | docstrings.oneline = fold 12 | docstrings.wrap = yes 13 | docstrings.forceBlankLineBefore = false 14 | 15 | danglingParentheses.preset = true 16 | 17 | newlines.topLevelStatementBlankLines = [] 18 | newlines.penalizeSingleSelectMultiArgList = false 19 | newlines.implicitParamListModifierPrefer = before 20 | 21 | align.arrowEnumeratorGenerator = true 22 | align.openParenCallSite = false 23 | align.openParenDefnSite = false 24 | 25 | includeCurlyBraceInSelectChains = true 26 | 27 | optIn.breakChainOnFirstMethodDot = false 28 | 29 | trailingCommas = never 30 | 31 | rewrite.scala3.convertToNewSyntax = false 32 | 33 | rewrite.rules = [ 34 | sortimports, 35 | SortModifiers, 36 | RedundantParens 37 | ] 38 | 39 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/session/SessionId.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.session 2 | 3 | import net.scalytica.kafka.wsproxy.models.WsClientId 4 | import net.scalytica.kafka.wsproxy.models.WsGroupId 5 | import net.scalytica.kafka.wsproxy.models.WsProducerId 6 | 7 | case class SessionId(value: String) 8 | 9 | object SessionId { 10 | def apply(producerId: WsProducerId): SessionId = SessionId(producerId.value) 11 | def apply(groupId: WsGroupId): SessionId = SessionId(groupId.value) 12 | 13 | def forProducer(mgid: Option[SessionId])(or: WsProducerId): SessionId = { 14 | mgid.getOrElse(SessionId(s"${or.value}-producer-session")) 15 | } 16 | 17 | def forConsumer(mgid: Option[WsGroupId])(or: WsClientId): SessionId = { 18 | mgid 19 | .map(SessionId.apply) 20 | .getOrElse(SessionId(s"${or.value}-consumer-session")) 21 | } 22 | 23 | } 24 | -------------------------------------------------------------------------------- /server/src/test/scala/net/scalytica/kafka/wsproxy/codecs/BinarySpec.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.codecs 2 | 3 | import java.nio.charset.StandardCharsets 4 | 5 | import org.scalatest.TryValues 6 | import org.scalatest.matchers.must.Matchers 7 | import org.scalatest.wordspec.AnyWordSpec 8 | 9 | class BinarySpec extends AnyWordSpec with Matchers with TryValues { 10 | 11 | val unencodedString = "foobar" 12 | val unencodedBytes = unencodedString.getBytes(StandardCharsets.UTF_8) 13 | val encodedString = "Zm9vYmFy" 14 | 15 | "Binary" should { 16 | 17 | "encode a String with base64" in { 18 | Binary.encodeBase64(unencodedBytes) mustBe encodedString 19 | } 20 | 21 | "decode a base64 encoded String" in { 22 | Binary 23 | .decodeBase64(encodedString) 24 | .success 25 | .value must contain allElementsOf unencodedBytes 26 | } 27 | 28 | } 29 | 30 | } 31 | -------------------------------------------------------------------------------- /website/src/css/custom.css: -------------------------------------------------------------------------------- 1 | /* stylelint-disable docusaurus/copyright-header */ 2 | /** 3 | * Any CSS included here will be global. The classic template 4 | * bundles Infima by default. Infima is a CSS framework designed to 5 | * work well for content-centric websites. 6 | */ 7 | 8 | /* You can override the default Infima variables here. */ 9 | :root { 10 | --ifm-color-primary: #25c2a0; 11 | --ifm-color-primary-dark: rgb(33, 175, 144); 12 | --ifm-color-primary-darker: rgb(31, 165, 136); 13 | --ifm-color-primary-darkest: rgb(26, 136, 112); 14 | --ifm-color-primary-light: rgb(70, 203, 174); 15 | --ifm-color-primary-lighter: rgb(102, 212, 189); 16 | --ifm-color-primary-lightest: rgb(146, 224, 208); 17 | --ifm-code-font-size: 95%; 18 | } 19 | 20 | .docusaurus-highlight-code-line { 21 | background-color: rgb(72, 77, 91); 22 | display: block; 23 | margin: 0 calc(-1 * var(--ifm-pre-padding)); 24 | padding: 0 var(--ifm-pre-padding); 25 | } 26 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/models/KafkaHeader.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.models 2 | 3 | import java.nio.charset.StandardCharsets 4 | 5 | import scala.jdk.CollectionConverters._ 6 | 7 | import org.apache.kafka.common.header.Headers 8 | import org.apache.kafka.common.header.internals.RecordHeader 9 | 10 | case class KafkaHeader(key: String, value: String) { 11 | 12 | def asRecordHeader: RecordHeader = { 13 | new RecordHeader(key, value.getBytes(StandardCharsets.UTF_8)) 14 | } 15 | 16 | } 17 | 18 | object KafkaHeader { 19 | 20 | def fromKafkaRecordHeaders(headers: Headers): Option[Seq[KafkaHeader]] = 21 | Option(headers).map(_.iterator().asScala.toSeq).flatMap { 22 | case theHeaders if theHeaders.isEmpty => None 23 | case theHeaders => 24 | Option(theHeaders.map { h => 25 | KafkaHeader(h.key, new String(h.value, StandardCharsets.UTF_8)) 26 | }) 27 | } 28 | 29 | } 30 | -------------------------------------------------------------------------------- /gatling/src/test/resources/logback-test.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | true 5 | 6 | 7 | 8 | 9 | 10 | %date{HH:mm:ss.SSS} %highlight(%-5level) %cyan(%logger{0}) - %msg%n 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | -------------------------------------------------------------------------------- /server/src/test/scala/net/scalytica/kafka/wsproxy/jmx/mbeans/MXBeanSpecLike.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.jmx.mbeans 2 | 3 | import net.scalytica.kafka.wsproxy.jmx.TestJmxQueries 4 | import net.scalytica.test.WsProxySpec 5 | import org.apache.pekko.actor.testkit.typed.scaladsl.ActorTestKit 6 | import org.apache.pekko.actor.typed.scaladsl.adapter._ 7 | import org.scalatest.{BeforeAndAfterAll, OptionValues} 8 | import org.scalatest.matchers.must.Matchers 9 | import org.scalatest.wordspec.AnyWordSpecLike 10 | 11 | trait MXBeanSpecLike 12 | extends AnyWordSpecLike 13 | with Matchers 14 | with OptionValues 15 | with BeforeAndAfterAll 16 | with WsProxySpec { 17 | 18 | val tk: ActorTestKit = ActorTestKit(system.toTyped) 19 | val jmxq: TestJmxQueries = new TestJmxQueries 20 | 21 | override protected def beforeAll(): Unit = super.beforeAll() 22 | 23 | override protected def afterAll(): Unit = { 24 | tk.shutdownTestKit() 25 | super.afterAll() 26 | } 27 | 28 | } 29 | -------------------------------------------------------------------------------- /server/src/test/scala/net/scalytica/kafka/wsproxy/web/HeadersSpec.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.web 2 | 3 | import org.apache.pekko.http.scaladsl.model.headers.BasicHttpCredentials 4 | import net.scalytica.kafka.wsproxy.web.Headers.XKafkaAuthHeader 5 | import org.scalatest.OptionValues 6 | import org.scalatest.matchers.must.Matchers 7 | import org.scalatest.wordspec.AnyWordSpec 8 | 9 | class HeadersSpec extends AnyWordSpec with Matchers with OptionValues { 10 | 11 | "The X-Kafka-Auth header" should { 12 | 13 | "correctly parse a header String" in { 14 | val usr = "foo" 15 | val pwd = "bar" 16 | 17 | val basicCreds = BasicHttpCredentials(usr, pwd) 18 | val header = XKafkaAuthHeader(basicCreds) 19 | 20 | val parsed = XKafkaAuthHeader.parse(header.value()).toOption 21 | 22 | header mustBe parsed.value 23 | parsed.value.credentials.username mustBe usr 24 | parsed.value.credentials.password mustBe pwd 25 | } 26 | 27 | } 28 | 29 | } 30 | -------------------------------------------------------------------------------- /docs/getting-started.md: -------------------------------------------------------------------------------- 1 | --- 2 | id: getting-started 3 | title: Getting Started 4 | slug: /getting-started 5 | --- 6 | 7 | ## Docker image 8 | 9 | The `kafka-websocket-proxy` is available pre-installed in a docker image from 10 | Docker Hub at [kpmeen/kafka-websocket-proxy](https://hub.docker.com/r/kpmeen/kafka-websocket-proxy/tags). 11 | 12 | For configuration, please see [Configuration](configuration.md) for details on 13 | which environment properties should be used. 14 | 15 | ### Healthcheck 16 | 17 | To add a healthcheck to to the docker container, a command calling the 18 | `/healthcheck` endpoint can be configured. Below is an example using 19 | docker-compose: 20 | 21 | ``` 22 | services: 23 | ... 24 | 25 | kafka-ws-proxy: 26 | image: kpmeen/kafka-websocket-proxy:latest 27 | ... 28 | healthcheck: 29 | test: ['CMD-SHELL', 'curl -f http://localhost:8078/healthcheck || exit 1'] 30 | interval: 30s 31 | timeout: 3s 32 | retries: 40 33 | ... 34 | 35 | ... 36 | ``` 37 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/web/SocketProtocol.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.web 2 | 3 | import net.scalytica.kafka.wsproxy.StringExtensions 4 | 5 | object SocketProtocol { 6 | 7 | sealed trait SocketPayload { self => 8 | 9 | lazy val name: String = 10 | self.niceClassSimpleName.stripSuffix("Payload").toSnakeCase 11 | 12 | def isSameName(s: String): Boolean = name.equalsIgnoreCase(s) 13 | } 14 | 15 | object SocketPayload { 16 | 17 | def fromString(string: String): Option[SocketPayload] = 18 | Option(string).flatMap { 19 | case s: String if JsonPayload.isSameName(s) => Some(JsonPayload) 20 | case _ => None 21 | } 22 | 23 | def unsafeFromString(s: String): SocketPayload = 24 | fromString(s).getOrElse { 25 | throw new IllegalArgumentException( 26 | s"'$s' is not a valid socket payload" 27 | ) 28 | } 29 | } 30 | 31 | case object JsonPayload extends SocketPayload 32 | 33 | } 34 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![pipeline status](https://gitlab.com/kpmeen/kafka-websocket-proxy/badges/master/pipeline.svg)](https://gitlab.com/kpmeen/kafka-websocket-proxy/commits/master) 2 | [![coverage report](https://gitlab.com/kpmeen/kafka-websocket-proxy/badges/master/coverage.svg)](https://gitlab.com/kpmeen/kafka-websocket-proxy/commits/master) 3 | 4 | # Kafka WebSocket Proxy 5 | 6 | Documentation is available at [https://kpmeen.gitlab.io/kafka-websocket-proxy/](https://kpmeen.gitlab.io/kafka-websocket-proxy/). 7 | 8 | > ### Important note on compatibility 9 | > 10 | > Prior to version 1.x.x the internal session model only supported consumers. 11 | > This model is not compatible with the session model used in version 1.x.x. 12 | > 13 | > If Kafka WebSocket Proxy has only ever been used for producer clients, then 14 | > the upgrade should be unproblematic. 15 | > 16 | > When Kafka WebSocket Proxy has been used for consuming data it is best to 17 | > ensure that the session topic is empty / recreated on the Kafka cluster before 18 | > deploying version 1.x.x. 19 | 20 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/web/RoutesPrereqs.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.web 2 | 3 | import net.scalytica.kafka.wsproxy.models.WsServerId 4 | 5 | import io.circe.Json 6 | import org.apache.pekko.http.scaladsl.model.ContentTypes 7 | import org.apache.pekko.http.scaladsl.model.HttpEntity 8 | import org.apache.pekko.http.scaladsl.model.HttpResponse 9 | import org.apache.pekko.http.scaladsl.model.StatusCode 10 | 11 | trait RoutesPrereqs { 12 | protected val serverId: WsServerId 13 | 14 | implicit protected def jsonToString(json: Json): String = json.spaces2 15 | 16 | protected def jsonMessageFromString(msg: String): Json = 17 | Json.obj("message" -> Json.fromString(msg)) 18 | 19 | protected def jsonResponseMsg( 20 | statusCode: StatusCode, 21 | message: String 22 | ): HttpResponse = { 23 | HttpResponse( 24 | status = statusCode, 25 | entity = HttpEntity( 26 | contentType = ContentTypes.`application/json`, 27 | string = jsonMessageFromString(message) 28 | ) 29 | ) 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /gatling/src/test/scala/net/scalytica/kafka/wsproxy/gatling/BaselineSimulation.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.gatling 2 | 3 | import io.gatling.core.Predef._ 4 | import io.gatling.http.Predef._ 5 | 6 | import scala.concurrent.duration._ 7 | 8 | // scalastyle:off magic.number 9 | class BaselineSimulation extends BaseConsumerSimulation { 10 | 11 | val topic = "test1" 12 | val payload = "json" 13 | val valTpe = "string" 14 | val autoCommit = "true" 15 | 16 | val scn = scenario("Baseline WebSocket") 17 | .exec { session => 18 | session.setAll( 19 | "cid" -> ("gatling-consumer-" + session.userId), 20 | "gid" -> ("gatling-group-" + session.userId) 21 | ) 22 | } 23 | .exec { 24 | ws("Connect WS") 25 | .connect(socketOutUri(topic, payload, valTpe)) 26 | .await(30 seconds)( 27 | multiWsChecks(topic = topic, hasKey = false, num = 1): _* 28 | ) 29 | } 30 | .exec(ws("Close WS").close) 31 | 32 | setUp(scn.inject(atOnceUsers(30))).protocols(httpProtocol) 33 | } 34 | // scalastyle:on magic.number 35 | -------------------------------------------------------------------------------- /.codeclimate.yml: -------------------------------------------------------------------------------- 1 | --- 2 | version: 2 3 | prepare: 4 | fetch: 5 | - url: "https://gitlab.com/gitlab-org/ci-cd/codequality/-/raw/master/codeclimate_defaults/.codeclimate.yml.template" 6 | path: ".default-codeclimate.yml" 7 | 8 | checks: 9 | # Disable checks that are handled by Scalastyle 10 | argument-count: 11 | enabled: false 12 | complex-logic: 13 | enabled: false 14 | file-lines: 15 | enabled: false 16 | method-complexity: 17 | enabled: false 18 | method-count: 19 | enabled: false 20 | method-lines: 21 | enabled: false 22 | return-statements: 23 | enabled: false 24 | identical-code: 25 | enabled: false 26 | similar-code: 27 | enabled: false 28 | nested-control-flow: 29 | enabled: false 30 | 31 | plugins: 32 | scalastyle: 33 | enabled: true 34 | config: 35 | config: scalastyle-config.xml 36 | 37 | exclude_patterns: 38 | - "docker/" 39 | - ".gitlab/" 40 | - ".idea/" 41 | - "gatling/" 42 | - "project/" 43 | - "target/" 44 | - "**/target/" 45 | - "script/" 46 | - "**/spec/" 47 | - "**/test/" 48 | - "docs/" 49 | - "website/" 50 | -------------------------------------------------------------------------------- /.gitlab/issue_templates/Bug.md: -------------------------------------------------------------------------------- 1 | ### Summary 2 | 3 | 4 | 5 | 6 | ### Steps to reproduce 7 | 8 | 9 | 10 | 11 | ### Example Project 12 | 13 | 14 | 15 | 16 | 17 | 18 | ### What is the current bug behavior? 19 | 20 | 21 | 22 | 23 | ### What is the expected correct behavior? 24 | 25 | 26 | 27 | 28 | ### Relevant logs and/or screenshots 29 | 30 | 32 | 33 | 34 | ### Possible fixes 35 | 36 | 37 | 38 | /label ~bug 39 | /assign @kpmeen 40 | -------------------------------------------------------------------------------- /project/DockerTasksPlugin.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.sbt.plugin 2 | 3 | import sbt._ 4 | 5 | import scala.sys.process._ 6 | 7 | object DockerTasksPlugin extends AutoPlugin { 8 | 9 | object autoImport { 10 | lazy val startKafka = taskKey[Unit]("Start single-node cluster.") 11 | lazy val stopKafka = taskKey[Unit]("Stop single-node cluster.") 12 | lazy val restartKafka = taskKey[Unit]("Restart single-node cluster.") 13 | lazy val statusKafka = taskKey[Unit]("Check single-node cluster status.") 14 | lazy val cleanKafka = settingKey[Boolean]("If true, will pass -c to init") 15 | 16 | } 17 | 18 | import autoImport._ 19 | 20 | override def globalSettings: Seq[Setting[_]] = { 21 | def initScript(cmd: String, clean: Boolean) = { 22 | val args = if (clean) " -c" else "" 23 | s"./docker/plain/init.sh $cmd$args" 24 | } 25 | 26 | Seq( 27 | cleanKafka := true, 28 | startKafka := { initScript("start", cleanKafka.value) ! }, 29 | stopKafka := { initScript("stop", cleanKafka.value) ! }, 30 | restartKafka := { initScript("restart", cleanKafka.value) ! }, 31 | statusKafka := { initScript("status", cleanKafka.value) ! } 32 | ) 33 | } 34 | 35 | } 36 | -------------------------------------------------------------------------------- /server/src/test/resources/logback-config-testcase-1.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 7 | 8 | 9 | true 10 | 11 | 12 | 13 | 14 | 15 | %d{HH:mm:ss.SSS} %colouredLevel %logger{36} - %msg%n 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | -------------------------------------------------------------------------------- /website/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "kafka-websocket-proxy-docs", 3 | "version": "0.0.0", 4 | "private": true, 5 | "workspaces": { 6 | "packages": [ 7 | "docs" 8 | ], 9 | "nohoist": [ 10 | "**/html-minifier-terser" 11 | ] 12 | }, 13 | "scripts": { 14 | "docusaurus": "docusaurus", 15 | "start": "docusaurus start", 16 | "build": "docusaurus build", 17 | "swizzle": "docusaurus swizzle", 18 | "deploy": "docusaurus deploy", 19 | "clear": "docusaurus clear", 20 | "serve": "docusaurus serve", 21 | "write-translations": "docusaurus write-translations", 22 | "write-heading-ids": "docusaurus write-heading-ids" 23 | }, 24 | "dependencies": { 25 | "@algolia/client-search": "^4.5.1", 26 | "@docusaurus/core": "^2.0.0-beta.0", 27 | "@docusaurus/plugin-content-docs": "^2.0.0-beta.0", 28 | "@docusaurus/preset-classic": "^2.0.0-beta.0", 29 | "@mdx-js/react": "^1.6.21", 30 | "@types/react": "^17.0.1", 31 | "clsx": "^1.1.1", 32 | "prism-react-renderer": "^1.1.1", 33 | "react": "^17.0.1", 34 | "react-dom": "^17.0.1" 35 | }, 36 | "browserslist": { 37 | "production": [ 38 | ">0.5%", 39 | "not dead", 40 | "not op_mini all" 41 | ], 42 | "development": [ 43 | "last 1 chrome version", 44 | "last 1 firefox version", 45 | "last 1 safari version" 46 | ] 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /.gitlab/build_templates/gitlab-ci-scala-docker.yml: -------------------------------------------------------------------------------- 1 | ################################################################################ 2 | # GitLab CI build script for Scala applications published as docker images # 3 | ################################################################################ 4 | # scala-steward:off 5 | 6 | variables: 7 | CI: "true" 8 | DOCKER_DRIVER: overlay2 9 | DOCKER_TLS_CERTDIR: "" 10 | # Setting specific folder for sbt-coursier to cache artifacts 11 | COURSIER_CACHE: "/root/cache/coursier" 12 | 13 | cache: 14 | untracked: true 15 | paths: 16 | - cache 17 | 18 | publish docker containers: 19 | stage: publish-docker 20 | image: registry.gitlab.com/kpmeen/docker-scala-sbt:scala_2.13.16_jdk21 21 | interruptible: false 22 | tags: 23 | - saas-linux-small-amd64 24 | services: 25 | - docker:24.0.6-dind 26 | before_script: 27 | - docker info 28 | script: 29 | - docker login -u gitlab-ci-token -p $CI_JOB_TOKEN registry.gitlab.com 30 | - docker login -u kpmeen -p $DOCKERHUB_TOKEN 31 | - docker buildx create --name kwp-multiarch-builder-$CI_JOB_ID --driver docker-container --bootstrap --use 32 | - sbt docker:publish 33 | rules: 34 | - if: $CI_MERGE_REQUEST_ID 35 | when: never 36 | - if: $CI_COMMIT_TAG =~ /^v\d.*/ 37 | when: on_success 38 | - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH 39 | when: on_success 40 | -------------------------------------------------------------------------------- /helm/kafka-websocket-proxy-chart/templates/ingress.yaml: -------------------------------------------------------------------------------- 1 | {{- if eq .Values.service.ingress.enabled "true" }} 2 | apiVersion: networking.k8s.io/v1 3 | kind: Ingress 4 | metadata: 5 | annotations: 6 | kubernetes.io/ingress.class: nginx 7 | nginx.ingress.kubernetes.io/proxy-read-timeout: "3600" 8 | nginx.ingress.kubernetes.io/proxy-send-timeout: "3600" 9 | nginx.ingress.kubernetes.io/server-snippets: |- 10 | location /socket { 11 | proxy_set_header Upgrade $http_upgrade; 12 | proxy_http_version 1.1; 13 | proxy_set_header X-Forwarded-Host $http_host; 14 | proxy_set_header X-Forwarded-Proto $scheme; 15 | proxy_set_header X-Forwarded-For $remote_addr; 16 | proxy_set_header Host $host; 17 | proxy_set_header Connection "upgrade"; 18 | proxy_cache_bypass $http_upgrade; 19 | } 20 | name: kafka-websocket-proxy 21 | spec: 22 | rules: 23 | - host: {{ .Values.service.ingress.host }} 24 | http: 25 | paths: 26 | - backend: 27 | service: 28 | name: kafka-websocket-proxy 29 | port: 30 | number: 8078 31 | path: / 32 | pathType: Prefix 33 | {{- if eq .Values.service.ingress.tls.enabled "true" }} 34 | tls: 35 | - hosts: 36 | - {{ .Values.service.ingress.host }} 37 | secretName: {{ .Values.service.ingress.tls.secretName }} 38 | {{- end }} 39 | {{- end }} -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/web/StatusRoutes.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.web 2 | 3 | import net.scalytica.kafka.wsproxy.auth.OpenIdClient 4 | import net.scalytica.kafka.wsproxy.config.Configuration.AppCfg 5 | 6 | import org.apache.pekko.http.scaladsl.model.StatusCodes._ 7 | import org.apache.pekko.http.scaladsl.model._ 8 | import org.apache.pekko.http.scaladsl.server.Directives._ 9 | import org.apache.pekko.http.scaladsl.server._ 10 | 11 | /** Routes for verifying status and health for the proxy */ 12 | trait StatusRoutes { self: BaseRoutes => 13 | 14 | private[this] def serveHealthCheck = { 15 | complete { 16 | HttpResponse( 17 | status = OK, 18 | entity = HttpEntity( 19 | contentType = ContentTypes.`application/json`, 20 | string = """{ "response": "I'm healthy" }""" 21 | ) 22 | ) 23 | } 24 | } 25 | 26 | def statusRoutes( 27 | implicit cfg: AppCfg, 28 | maybeOidcClient: Option[OpenIdClient] 29 | ): Route = { 30 | extractMaterializer { implicit mat => 31 | path("healthcheck") { 32 | get { 33 | if (cfg.server.secureHealthCheckEndpoint) { 34 | maybeAuthenticate(cfg, maybeOidcClient, mat) { _ => 35 | serveHealthCheck 36 | } 37 | } else { 38 | serveHealthCheck 39 | } 40 | } 41 | } 42 | } 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/models/ReadIsolationLevel.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.models 2 | 3 | import java.util.Locale 4 | 5 | import org.apache.kafka.common.IsolationLevel 6 | 7 | sealed trait ReadIsolationLevel { 8 | protected val level: IsolationLevel 9 | lazy val value: String = level.toString.toLowerCase(Locale.ROOT) 10 | def isSameName(str: String): Boolean = value.equalsIgnoreCase(str) 11 | } 12 | 13 | object ReadIsolationLevel { 14 | 15 | def fromString(str: String): Option[ReadIsolationLevel] = { 16 | Option(str).flatMap { 17 | case s if ReadUncommitted.isSameName(s) => Some(ReadUncommitted) 18 | case s if ReadCommitted.isSameName(s) => Some(ReadCommitted) 19 | case _ => None 20 | } 21 | } 22 | 23 | def unsafeFromString(str: String): ReadIsolationLevel = 24 | fromString(str).getOrElse { 25 | throw new IllegalArgumentException( 26 | s"Read isolation '$str' is not a valid value. Please use one of" + 27 | s" '${ReadCommitted.value}' or '${ReadUncommitted.value}'." 28 | ) 29 | } 30 | 31 | } 32 | 33 | case object ReadUncommitted extends ReadIsolationLevel { 34 | override protected val level = IsolationLevel.READ_UNCOMMITTED 35 | } 36 | 37 | case object ReadCommitted extends ReadIsolationLevel { 38 | override protected val level = IsolationLevel.READ_COMMITTED 39 | } 40 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/codecs/StringBasedSerde.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.codecs 2 | 3 | import java.util.{Map => JMap} 4 | 5 | import org.apache.kafka.common.serialization.Deserializer 6 | import org.apache.kafka.common.serialization.Serde 7 | import org.apache.kafka.common.serialization.Serializer 8 | import org.apache.kafka.common.serialization.{Serdes => KSerdes} 9 | 10 | /** Scaffolding for Serdes based on the basic String Serde */ 11 | trait StringBasedSerializer[T] extends Serializer[T] { 12 | protected val ser: Serializer[String] = KSerdes.String().serializer() 13 | 14 | override def configure(configs: JMap[String, _], isKey: Boolean): Unit = {} 15 | 16 | override def close(): Unit = {} 17 | } 18 | 19 | trait StringBasedDeserializer[T] extends Deserializer[T] { 20 | protected val des: Deserializer[String] = KSerdes.String().deserializer() 21 | 22 | override def configure(configs: JMap[String, _], isKey: Boolean): Unit = {} 23 | 24 | override def close(): Unit = {} 25 | } 26 | 27 | trait StringBasedSerde[T] 28 | extends Serde[T] 29 | with StringBasedSerializer[T] 30 | with StringBasedDeserializer[T] { 31 | 32 | override def serializer(): StringBasedSerde[T] = this 33 | override def deserializer(): StringBasedSerde[T] = this 34 | 35 | override def configure(configs: JMap[String, _], isKey: Boolean): Unit = {} 36 | 37 | override def close(): Unit = {} 38 | } 39 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/web/Headers.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.web 2 | 3 | import scala.util.Try 4 | 5 | import net.scalytica.kafka.wsproxy.models.AclCredentials 6 | 7 | import org.apache.pekko.http.scaladsl.model.headers.BasicHttpCredentials 8 | import org.apache.pekko.http.scaladsl.model.headers.ModeledCustomHeader 9 | import org.apache.pekko.http.scaladsl.model.headers.ModeledCustomHeaderCompanion 10 | 11 | object Headers { 12 | 13 | val KafkaAuthHeaderName = "X-Kafka-Auth" 14 | 15 | object XKafkaAuthHeader 16 | extends ModeledCustomHeaderCompanion[XKafkaAuthHeader] { 17 | override def name: String = KafkaAuthHeaderName 18 | 19 | override def parse(value: String): Try[XKafkaAuthHeader] = 20 | Try { 21 | val creds = BasicHttpCredentials(value) 22 | XKafkaAuthHeader(creds) 23 | } 24 | } 25 | 26 | final case class XKafkaAuthHeader(credentials: BasicHttpCredentials) 27 | extends ModeledCustomHeader[XKafkaAuthHeader] { 28 | 29 | override def companion: ModeledCustomHeaderCompanion[XKafkaAuthHeader] = 30 | XKafkaAuthHeader 31 | 32 | override def renderInRequests(): Boolean = true 33 | 34 | override def renderInResponses(): Boolean = false 35 | 36 | override def value(): String = credentials.token() 37 | 38 | def aclCredentials: AclCredentials = 39 | AclCredentials(credentials.username, credentials.password) 40 | } 41 | 42 | } 43 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/codecs/DynamicCfgSerde.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.codecs 2 | 3 | import scala.util.Failure 4 | import scala.util.Success 5 | import scala.util.Try 6 | 7 | import net.scalytica.kafka.wsproxy.config.Configuration 8 | import net.scalytica.kafka.wsproxy.config.Configuration.DynamicCfg 9 | import net.scalytica.kafka.wsproxy.errors.InvalidDynamicCfg 10 | import net.scalytica.kafka.wsproxy.logging.WithProxyLogger 11 | 12 | /** 13 | * Serde implementation for [[DynamicCfg]] types. Since these are config objects 14 | * derived from HOCON config objects, this implementation will serialize and 15 | * deserialize the object to/from a HOCON string. 16 | */ 17 | class DynamicCfgSerde 18 | extends StringBasedSerde[DynamicCfg] 19 | with WithProxyLogger { 20 | 21 | override def serialize(topic: String, data: DynamicCfg): Array[Byte] = 22 | Option(data).map(d => ser.serialize(topic, d.asHoconString())).orNull 23 | 24 | override def deserialize(topic: String, data: Array[Byte]): DynamicCfg = { 25 | Option(data).map { d => 26 | val str = des.deserialize(topic, d) 27 | 28 | log.trace(s"Deserialized dynamic config from topic $topic to:\n$str") 29 | 30 | Try(Configuration.loadDynamicCfgString(str)) match { 31 | case Success(value) => value 32 | case Failure(err) => 33 | throw InvalidDynamicCfg(err.getMessage, Option(err)) 34 | } 35 | }.orNull 36 | } 37 | 38 | } 39 | -------------------------------------------------------------------------------- /helm/kafka-websocket-proxy-chart/templates/statefulset.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: apps/v1 2 | kind: StatefulSet 3 | metadata: 4 | name: {{ .Release.Name }} 5 | labels: 6 | app: kafka-websocket-proxy 7 | spec: 8 | serviceName: {{ .Release.Name }} 9 | replicas: {{ .Values.replicaCount }} 10 | selector: 11 | matchLabels: 12 | app: kafka-websocket-proxy 13 | template: 14 | metadata: 15 | labels: 16 | app: kafka-websocket-proxy 17 | spec: 18 | containers: 19 | - name: kafka-websocket-proxy 20 | image: {{ .Values.image.repository }}:{{ .Values.image.tag }} 21 | ports: 22 | - containerPort: {{ .Values.service.port }} 23 | env: 24 | {{- range $key, $value := .Values.env }} 25 | - name: {{ $key }} 26 | value: {{ $value | quote }} 27 | {{- end }} 28 | {{- if .Values.certsSecretName }} 29 | volumeMounts: 30 | - name: certs-volume 31 | mountPath: /certs 32 | {{- end }} 33 | livenessProbe: 34 | httpGet: 35 | path: /healthcheck 36 | port: {{ .Values.service.port }} 37 | initialDelaySeconds: 30 38 | periodSeconds: 10 39 | timeoutSeconds: 5 40 | failureThreshold: 3 41 | {{- if .Values.certsSecretName }} 42 | volumes: 43 | - name: certs-volume 44 | secret: 45 | secretName: {{ .Values.certsSecretName }} 46 | {{- end }} 47 | -------------------------------------------------------------------------------- /docs/index.md: -------------------------------------------------------------------------------- 1 | --- 2 | id: index 3 | title: Kafka WebSocket Proxy 4 | slug: / 5 | keywords: 6 | - kafka 7 | - websockets 8 | - proxy 9 | - streaming 10 | - ingestion 11 | - consumption 12 | --- 13 | 14 | The Kafka WebSocket Proxy is mainly created as a more efficient alternative to 15 | using HTTP based REST proxies. 16 | 17 | With WebSockets, the overhead of the HTTP protocol is removed. Instead, a much 18 | "cheaper" TCP socket is set up between the client and proxy, through any 19 | intermediate networking components. 20 | 21 | Since WebSockets are bidirectional, they open up for client - server 22 | implementations that are much closer to the way the regular Kafka client works. 23 | The WebSocket becomes more like an extension to the Kafka consumer/producer 24 | clients used internally in the proxy. 25 | 26 | 27 | > ### Caution 28 | > Using the Kafka WebSocket Proxy as a way to feed data directly to browser 29 | > based clients _can_ cause serious problems for the user experience when the 30 | > data rates are high enough. It is therefore very important to consider how 31 | > often and when your web application re-renders the page. In bad cases the 32 | > web browser may end up freezing the computer. 33 | > 34 | > For these use-cases, consider having the WebSocket client receive data in web 35 | > workers, specifically a `SharedWorker`. This allows sending the same data to 36 | > each open tab containing the web application that needs a WebSocket connection. 37 | > **See [this SO answer](https://stackoverflow.com/a/61866896) for more details on this technique.** 38 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/jmx/package.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy 2 | 3 | import javax.management.ObjectName 4 | 5 | import net.scalytica.kafka.wsproxy.models.FullConsumerId 6 | import net.scalytica.kafka.wsproxy.models.FullProducerId 7 | 8 | package object jmx { 9 | 10 | private[jmx] val BasePackage = 11 | this.getClass.getPackageName.stripSuffix(".jmx") 12 | 13 | private[jmx] val MXBeanSuffix = "MXBean" 14 | 15 | private[this] def asObjectNameInternal( 16 | name: String, 17 | tpe: String = WildcardString 18 | ): ObjectName = { 19 | val attributes: Map[String, String] = Map("name" -> name, "type" -> tpe) 20 | new ObjectName( 21 | BasePackage, { 22 | import scala.jdk.CollectionConverters._ 23 | new java.util.Hashtable(attributes.asJava) 24 | } 25 | ) 26 | } 27 | 28 | def asObjectName(name: String, tpe: String): ObjectName = 29 | asObjectNameInternal(name, tpe) 30 | 31 | lazy val AllDomainMXBeansQuery: ObjectName = 32 | asObjectNameInternal(WildcardString) 33 | 34 | lazy val WildcardString: String = "*" 35 | 36 | def mxBeanType[T](clazz: Class[T]): String = { 37 | clazz.getInterfaces 38 | .find(_.getSimpleName.endsWith(MXBeanSuffix)) 39 | .map(_.getSimpleName) 40 | .getOrElse(clazz.getSimpleName) 41 | } 42 | 43 | def producerStatsName(fullProducerId: FullProducerId): String = 44 | s"wsproxy-producer-stats-${fullProducerId.value}" 45 | 46 | def consumerStatsName(fullConsumerId: FullConsumerId): String = 47 | s"wsproxy-consumer-stats-${fullConsumerId.value}" 48 | } 49 | -------------------------------------------------------------------------------- /.gitlab/build_templates/gitlab-ci-scala-pages.yml: -------------------------------------------------------------------------------- 1 | ################################################################################ 2 | # GitLab CI pipeline for publishing docs to pages # 3 | ################################################################################ 4 | # scala-steward:off 5 | 6 | variables: 7 | CI: "true" 8 | DOCKER_DRIVER: overlay2 9 | SBT_MDOCS_MODULE_DIR: '$CI_PROJECT_NAME-docs' 10 | 11 | .pages_shared_rules: 12 | rules: 13 | # Execute only when a tag is starting with v followed by a digit 14 | - if: $TEST_DOCKER_BUILD == "true" 15 | when: never 16 | - if: $DO_RELEASE_PREP == "true" 17 | when: never 18 | - if: $CI_COMMIT_TAG =~ /^v\d.*/ && $MANUAL_PUBLISH_PAGES != "true" 19 | when: on_success 20 | allow_failure: true 21 | - if: $MANUAL_PUBLISH_PAGES == "true" 22 | when: manual 23 | allow_failure: false 24 | 25 | pages:build: 26 | rules: 27 | - !reference [.pages_shared_rules, rules] 28 | stage: build 29 | image: registry.gitlab.com/kpmeen/docker-scala-sbt:scala_2.13.16_jdk21 30 | interruptible: false 31 | script: 32 | - sbt docs/mdoc 33 | artifacts: 34 | paths: 35 | - $SBT_MDOCS_MODULE_DIR/target 36 | - website 37 | 38 | pages: 39 | rules: 40 | - !reference [.pages_shared_rules, rules] 41 | stage: deploy 42 | image: node:18-alpine 43 | interruptible: false 44 | needs: 45 | - job: pages:build 46 | artifacts: true 47 | script: 48 | - cd website 49 | - yarn install 50 | - yarn build 51 | - mv ./build ../public 52 | artifacts: 53 | paths: 54 | - public 55 | -------------------------------------------------------------------------------- /website/docusaurus.config.js: -------------------------------------------------------------------------------- 1 | /** @type {import('@docusaurus/types').DocusaurusConfig} */ 2 | module.exports = { 3 | title: 'Kafka WebSocket Proxy', 4 | tagline: 'documentation site', 5 | url: 'https://kpmeen.gitlab.io', 6 | baseUrl: '/kafka-websocket-proxy/', 7 | onBrokenLinks: 'throw', 8 | onBrokenMarkdownLinks: 'warn', 9 | favicon: 'img/favicon.ico', 10 | organizationName: 'kpmeen', 11 | projectName: 'kafka-websocket-proxy', 12 | themeConfig: { 13 | docs: { 14 | sidebar: { 15 | hideable: true 16 | } 17 | }, 18 | navbar: { 19 | title: 'Kafka WebSocket Proxy', 20 | items: [ 21 | { 22 | to: '/', 23 | activeBasePath: 'docs/index', 24 | label: 'Docs', 25 | position: 'left', 26 | }, 27 | { 28 | href: 'https://gitlab.com/kpmeen/kafka-websocket-proxy', 29 | label: 'GitLab', 30 | position: 'right', 31 | }, 32 | ], 33 | }, 34 | footer: { 35 | style: 'dark', 36 | copyright: `Copyright © ${new Date().getFullYear()} Knut Petter Meen. Built with Docusaurus.`, 37 | }, 38 | }, 39 | presets: [ 40 | [ 41 | '@docusaurus/preset-classic', 42 | { 43 | docs: { 44 | path: '../kafka-websocket-proxy-docs/target/mdoc', 45 | sidebarPath: require.resolve('./sidebars.js'), 46 | editUrl: 'https://gitlab.com/kpmeen/kafka-websocket-proxy/edit/master/website/', 47 | routeBasePath: '/', 48 | }, 49 | theme: { 50 | customCss: require.resolve('./src/css/custom.css'), 51 | }, 52 | }, 53 | ], 54 | ], 55 | }; 56 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/logging/WithProxyLogger.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.logging 2 | 3 | import scala.reflect.ClassTag 4 | 5 | import net.scalytica.kafka.wsproxy.NiceClassNameExtensions 6 | 7 | import com.typesafe.scalalogging.Logger 8 | 9 | /** Convenience trait for providing loggers to different objects and classes. */ 10 | trait WithProxyLogger { self => 11 | 12 | protected lazy val loggerName: String = self.niceClassName 13 | 14 | final protected lazy val _log: Logger = Logger(loggerName) 15 | 16 | final protected lazy val log: Logger = _log 17 | 18 | } 19 | 20 | object WithProxyLogger { 21 | 22 | private[this] case class ClassProxyLogger(n: String) extends WithProxyLogger { 23 | override protected lazy val loggerName: String = n 24 | } 25 | 26 | def namedLoggerFor[T](implicit ct: ClassTag[T]): Logger = { 27 | val cpl = ClassProxyLogger(ct.runtimeClass.niceClassName) 28 | cpl.log 29 | } 30 | 31 | } 32 | 33 | object DefaultProxyLogger extends WithProxyLogger { 34 | 35 | override protected lazy val loggerName: String = 36 | this.packageName.stripSuffix("logging") + "Server" 37 | 38 | def trace(msg: String): Unit = log.trace(msg) 39 | def debug(msg: String): Unit = log.debug(msg) 40 | def info(msg: String): Unit = log.info(msg) 41 | def warn(msg: String): Unit = log.warn(msg) 42 | def warn(msg: String, cause: Throwable): Unit = log.warn(msg, cause) 43 | def error(msg: String): Unit = log.error(msg) 44 | def error(msg: String, cause: Throwable): Unit = log.error(msg, cause) 45 | 46 | } 47 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/auth/tokens.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.auth 2 | 3 | import io.circe.generic.extras.auto._ 4 | import io.circe.syntax._ 5 | import org.apache.pekko.http.scaladsl.model.ContentTypes 6 | import org.apache.pekko.http.scaladsl.model.HttpEntity 7 | import org.apache.pekko.http.scaladsl.model.HttpMethods 8 | import org.apache.pekko.http.scaladsl.model.HttpRequest 9 | import org.apache.pekko.http.scaladsl.model.headers.OAuth2BearerToken 10 | import org.apache.pekko.util.ByteString 11 | 12 | case class TokenRequest private ( 13 | clientId: String, 14 | clientSecret: String, 15 | audience: String, 16 | grantType: String 17 | ) { 18 | 19 | def jsonString: String = this.asJson.noSpaces 20 | 21 | /** 22 | * Creates a pekko-http [[HttpRequest]] instance from this [[TokenRequest]] 23 | * 24 | * @param url 25 | * The URL that the request will be executed against 26 | * @return 27 | * a instance of [[HttpRequest]] 28 | */ 29 | def request(url: String): HttpRequest = { 30 | val entity = HttpEntity.Strict( 31 | contentType = ContentTypes.`application/json`, 32 | data = ByteString(jsonString) 33 | ) 34 | HttpRequest( 35 | method = HttpMethods.POST, 36 | uri = url, 37 | entity = entity 38 | ) 39 | } 40 | } 41 | 42 | case class AccessToken( 43 | tokenType: String, 44 | accessToken: String, 45 | expiresIn: Long, 46 | refreshToken: Option[String] 47 | ) { 48 | 49 | /** 50 | * @return 51 | * An instance of a [[OAuth2BearerToken]] based on the {{accessToken}} 52 | */ 53 | def bearerToken: OAuth2BearerToken = OAuth2BearerToken(accessToken) 54 | 55 | } 56 | -------------------------------------------------------------------------------- /docs/development.md: -------------------------------------------------------------------------------- 1 | --- 2 | id: development 3 | title: Development 4 | slug: /development 5 | --- 6 | 7 | ## Development 8 | 9 | tbd... 10 | 11 | ## Testing 12 | 13 | ### Connecting with `wscat` 14 | 15 | Using a CLI tool called `wscat`, interacting with the `kafka-websocket-proxy` is 16 | relatively simple. The tool is freely available on 17 | [github](https://github.com/websockets/wscat), and is highly recommended. 18 | 19 | 1. Install `wscat` 20 | ```bash 21 | npm install -g wscat 22 | ``` 23 | 24 | 2. Check `kafka-websocket-proxy` health 25 | ```bash 26 | curl http://:/healthcheck 27 | ``` 28 | 29 | Expected response: 30 | 31 | ```json 32 | { "response": "I'm healthy" } 33 | ``` 34 | 35 | If you receive an error response, verify the `kafka-websocket-proxy` config 36 | before going forward. 37 | 38 | 3. Open a consumer connection to `kafka-websocket-proxy` 39 | > NOTE: Ensure that there is an available topic to consume from in the Kafka cluster. 40 | 41 | ```bash 42 | wscat --connect "ws://:/socket/out?clientId=my-client&topic=my-topic&valType=json" 43 | Connected (press CTRL+C to quit) 44 | ``` 45 | 46 | 3. Open a producer connection to `kafka-websocket-proxy` 47 | ```bash 48 | wscat --connect "ws://:/socket/in?clientId=my-client&topic=my-topic&valType=json" 49 | Connected (press CTRL+C to quit) 50 | > 51 | ``` 52 | 53 | 4. Send a message through the producer socket 54 | ```json 55 | { "headers": [ { "key": "my_header", "value": "header_value" } ], "key": { "value": "foo", "format": "string" }, "value": { "value": "bar01", "format": "string" }, "messageId": "client_generated_id" } 56 | ``` 57 | 58 | -------------------------------------------------------------------------------- /project/PrometheusConfigPlugin.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.sbt.plugin 2 | 3 | import java.nio.charset.StandardCharsets 4 | 5 | import com.typesafe.sbt.packager.docker.DockerPlugin 6 | import sbt.Keys._ 7 | import sbt._ 8 | 9 | object PrometheusConfigPlugin extends AutoPlugin { 10 | 11 | override def requires = DockerPlugin 12 | 13 | override def trigger = AllRequirements 14 | 15 | object autoImport { 16 | 17 | val prometheusConfigFileName = 18 | settingKey[String]("The name of the prometheus config file") 19 | 20 | val prometheusConfigFileTargetDir = 21 | settingKey[File]("The directory to save the generated prometheus config.") 22 | 23 | val prometheusConfigFile = settingKey[File]("The generated config file") 24 | 25 | val prometheusGenerate = 26 | taskKey[Unit]("Generate the default prometheus-config.yml") 27 | 28 | } 29 | 30 | val DefaultConfig = 31 | """--- 32 | | 33 | |startDelaySeconds: 0 34 | |lowercaseOutputName: false 35 | |lowercaseOutputLabelNames: false 36 | |""".stripMargin 37 | 38 | import autoImport._ 39 | 40 | private[this] def writeConfig(destFile: File): Unit = { 41 | IO.write( 42 | file = destFile, 43 | content = DefaultConfig, 44 | charset = StandardCharsets.UTF_8, 45 | append = false 46 | ) 47 | } 48 | 49 | override def projectSettings = { 50 | Seq( 51 | prometheusConfigFileName := "prometheus-config.yml", 52 | prometheusConfigFileTargetDir := target.value, 53 | prometheusConfigFile := { 54 | prometheusConfigFileTargetDir.value / prometheusConfigFileName.value 55 | }, 56 | prometheusGenerate := writeConfig(prometheusConfigFile.value) 57 | ) 58 | } 59 | 60 | } 61 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/jmx/MXBeanActor.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.jmx 2 | 3 | import javax.management.ObjectName 4 | 5 | import net.scalytica.kafka.wsproxy.logging.WithProxyLogger 6 | 7 | import org.apache.pekko.actor.typed.Behavior 8 | import org.apache.pekko.actor.typed.PostStop 9 | import org.apache.pekko.actor.typed.PreRestart 10 | import org.apache.pekko.actor.typed.Signal 11 | import org.apache.pekko.actor.typed.scaladsl.AbstractBehavior 12 | import org.apache.pekko.actor.typed.scaladsl.ActorContext 13 | import org.apache.pekko.actor.typed.scaladsl.Behaviors 14 | 15 | abstract class MXBeanActor[T](ctx: ActorContext[T]) 16 | extends AbstractBehavior[T](ctx) 17 | with WithProxyLogger { self => 18 | 19 | import WsProxyJmxRegistrar._ 20 | 21 | val objectName: ObjectName = 22 | asObjectName(context.self.path.name, getMXTypeName) 23 | 24 | register() 25 | 26 | def getMXTypeName: String = mxBeanType(self.getClass) 27 | 28 | protected def register(): Unit = { 29 | registerToMBeanServer(this, objectName) 30 | log.trace(s"Registered MXBean ${objectName.getCanonicalName}") 31 | } 32 | 33 | protected def unregister(): Unit = { 34 | unregisterFromMBeanServer(objectName) 35 | log.trace(s"Unregistered MXBean ${objectName.getCanonicalName}") 36 | } 37 | 38 | override def onSignal: PartialFunction[Signal, Behavior[T]] = { 39 | case PostStop => 40 | unregister() 41 | Behaviors.unhandled 42 | 43 | case PreRestart => 44 | log.trace(s"Restarting MXBean ${objectName.getCanonicalName}...") 45 | unregister() 46 | register() 47 | Behaviors.unhandled 48 | } 49 | 50 | protected def doAndSame(op: () => Unit): Behavior[T] = { 51 | op() 52 | Behaviors.same 53 | } 54 | 55 | } 56 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/session/ClientInstance.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.session 2 | 3 | import net.scalytica.kafka.wsproxy.models.FullClientId 4 | import net.scalytica.kafka.wsproxy.models.FullConsumerId 5 | import net.scalytica.kafka.wsproxy.models.FullProducerId 6 | import net.scalytica.kafka.wsproxy.models.WsClientId 7 | import net.scalytica.kafka.wsproxy.models.WsGroupId 8 | import net.scalytica.kafka.wsproxy.models.WsProducerId 9 | import net.scalytica.kafka.wsproxy.models.WsProducerInstanceId 10 | import net.scalytica.kafka.wsproxy.models.WsServerId 11 | 12 | /** Helps to identify a Kafka client instance */ 13 | sealed trait ClientInstance { 14 | val id: FullClientId 15 | val serverId: WsServerId 16 | } 17 | 18 | /** 19 | * Wraps information about each instantiated producer within a 20 | * [[ProducerSession]]. 21 | * 22 | * @param id 23 | * The full client (or producer) ID and instance ID given. 24 | * @param serverId 25 | * The server ID where the producer instance is running. 26 | */ 27 | case class ProducerInstance( 28 | id: FullProducerId, 29 | serverId: WsServerId 30 | ) extends ClientInstance { 31 | val producerId: WsProducerId = id.producerId 32 | val instanceId: Option[WsProducerInstanceId] = id.instanceId 33 | } 34 | 35 | /** 36 | * Wraps information about each instantiated consumer within a 37 | * [[ConsumerSession]]. 38 | * 39 | * @param id 40 | * The full client (or consumer) ID and group ID given. 41 | * @param serverId 42 | * The server ID where the consumer instance is running. 43 | */ 44 | case class ConsumerInstance( 45 | id: FullConsumerId, 46 | serverId: WsServerId 47 | ) extends ClientInstance { 48 | val clientId: WsClientId = id.clientId 49 | val groupId: WsGroupId = id.groupId 50 | } 51 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/producer/InputJsonParser.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.producer 2 | 3 | import net.scalytica.kafka.wsproxy.logging.WithProxyLogger 4 | import net.scalytica.kafka.wsproxy.models.ProducerEmptyMessage 5 | import net.scalytica.kafka.wsproxy.models.WsProducerRecord 6 | 7 | import io.circe.Decoder 8 | 9 | trait InputJsonParser { self: WithProxyLogger => 10 | 11 | /** 12 | * Parses an input message, in the form of a JSON String, into an instance of 13 | * [[WsProducerRecord]], which will be passed on to Kafka down-stream. 14 | * 15 | * @param jsonStr 16 | * the String containing the JSON formatted message 17 | * @param keyDec 18 | * the JSON decoder to use for the message key 19 | * @param valDec 20 | * the JSON decoder to use for the message value 21 | * @tparam K 22 | * the message key type 23 | * @tparam V 24 | * the message value type 25 | * @return 26 | * an instance of [[WsProducerRecord]] 27 | */ 28 | @throws[Throwable] 29 | protected def parseInput[K, V]( 30 | jsonStr: String 31 | )(implicit keyDec: Decoder[K], valDec: Decoder[V]): WsProducerRecord[K, V] = { 32 | import io.circe._ 33 | import io.circe.parser._ 34 | import net.scalytica.kafka.wsproxy.codecs.Decoders._ 35 | 36 | if (jsonStr.isEmpty) ProducerEmptyMessage 37 | else { 38 | parse(jsonStr) match { 39 | case Left(ParsingFailure(message, err)) => 40 | log.error(s"Error parsing JSON string:\n$message") 41 | log.debug(s"JSON was: $jsonStr") 42 | throw err 43 | 44 | case Right(json) => 45 | json.as[WsProducerRecord[K, V]] match { 46 | case Left(err) => throw err 47 | case Right(wpr) => wpr 48 | } 49 | } 50 | } 51 | } 52 | 53 | } 54 | -------------------------------------------------------------------------------- /server/src/test/resources/logback-config-testcase-2.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 7 | 8 | 9 | true 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | false 20 | 21 | yyyy-MM-dd' 'HH:mm:ss.SSS 22 | true 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | %d{HH:mm:ss.SSS} %colouredLevel %logger{36} - %msg%n 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/web/SchemaRoutes.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.web 2 | 3 | import net.scalytica.kafka.wsproxy.auth.OpenIdClient 4 | import net.scalytica.kafka.wsproxy.config.Configuration.AppCfg 5 | 6 | import org.apache.pekko.http.scaladsl.marshalling.ToResponseMarshallable 7 | import org.apache.pekko.http.scaladsl.model._ 8 | import org.apache.pekko.http.scaladsl.server.Directives._ 9 | import org.apache.pekko.http.scaladsl.server._ 10 | 11 | trait SchemaRoutes { self: BaseRoutes => 12 | 13 | private[this] def avroNotAvailable(): ToResponseMarshallable = { 14 | HttpEntity( 15 | contentType = ContentTypes.`application/json`, 16 | string = 17 | "Kafka WebSocket Proxy does not implement an Avro protocol any more." 18 | ) 19 | } 20 | 21 | def schemaRoutes( 22 | implicit cfg: AppCfg, 23 | maybeOpenIdClient: Option[OpenIdClient] 24 | ): Route = { 25 | extractMaterializer { implicit mat => 26 | maybeAuthenticate(cfg, maybeOpenIdClient, mat) { _ => 27 | pathPrefix("schemas") { 28 | pathPrefix("avro") { 29 | pathPrefix("producer") { 30 | path("record") { 31 | get { 32 | complete(avroNotAvailable()) 33 | } 34 | } ~ path("result") { 35 | get { 36 | complete(avroNotAvailable()) 37 | } 38 | } 39 | } ~ pathPrefix("consumer") { 40 | path("record") { 41 | get { 42 | complete(avroNotAvailable()) 43 | } 44 | } ~ path("commit") { 45 | get { 46 | complete(avroNotAvailable()) 47 | } 48 | } 49 | } 50 | } 51 | } 52 | } 53 | } 54 | } 55 | 56 | } 57 | -------------------------------------------------------------------------------- /server/src/test/scala/net/scalytica/kafka/wsproxy/utils/HostResolverSpec.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.utils 2 | 3 | import net.scalytica.kafka.wsproxy.config.Configuration.KafkaBootstrapHosts 4 | import net.scalytica.kafka.wsproxy.config.Configuration 5 | import net.scalytica.kafka.wsproxy.utils.HostResolver.resolveKafkaBootstrapHosts 6 | import net.scalytica.test.SharedAttributes.defaultTestAppCfg 7 | import net.scalytica.test.WsProxySpec 8 | 9 | import scala.concurrent.duration._ 10 | import org.scalatest.matchers.must.Matchers 11 | import org.scalatest.wordspec.AnyWordSpec 12 | 13 | class HostResolverSpec extends AnyWordSpec with Matchers with WsProxySpec { 14 | 15 | override protected val testTopicPrefix: String = "host-resolver-test-topic" 16 | 17 | implicit lazy val cfg: Configuration.AppCfg = defaultTestAppCfg.copy( 18 | kafkaClient = defaultTestAppCfg.kafkaClient.copy( 19 | brokerResolutionTimeout = 5 seconds, 20 | bootstrapHosts = KafkaBootstrapHosts(List("localhost")) 21 | ) 22 | ) 23 | 24 | "The HostResolver" should { 25 | 26 | "fail resolution after retrying an action for a given duration" in { 27 | val hosts = KafkaBootstrapHosts(List("foo")) 28 | resolveKafkaBootstrapHosts(hosts).hasErrors mustBe true 29 | } 30 | 31 | "successfully resolve a host" in { 32 | val hosts = KafkaBootstrapHosts(List("github.com")) 33 | resolveKafkaBootstrapHosts(hosts).hasErrors mustBe false 34 | } 35 | 36 | "resolve some hosts and fail others" in { 37 | val hosts = KafkaBootstrapHosts( 38 | List("github.com", "foo", "uggabugga", "gitlab.com", "google.com") 39 | ) 40 | val res = resolveKafkaBootstrapHosts(hosts) 41 | res.hasErrors mustBe true 42 | res.results.count(_.isError) mustBe 2 43 | res.results.count(_.isSuccess) mustBe 3 44 | } 45 | 46 | } 47 | 48 | } 49 | -------------------------------------------------------------------------------- /.gitlab/build_templates/gitlab-ci-scala-build.yml: -------------------------------------------------------------------------------- 1 | ################################################################################ 2 | # GitLab CI build script for Scala applications published as docker images # 3 | ################################################################################ 4 | # scala-steward:off 5 | 6 | variables: 7 | CI: "true" 8 | DOCKER_DRIVER: overlay2 9 | DOCKER_TLS_CERTDIR: "" 10 | # Setting specific folder for sbt-coursier to cache artifacts 11 | COURSIER_CACHE: "/root/cache/coursier" 12 | 13 | cache: 14 | untracked: true 15 | paths: 16 | - cache 17 | 18 | check style and formatting: 19 | stage: build 20 | image: registry.gitlab.com/kpmeen/docker-scala-sbt:scala_2.13.16_jdk21 21 | interruptible: true 22 | tags: 23 | - saas-linux-small-amd64 24 | script: 25 | # Ensure that the pipeline fails fast if there are issues with the 26 | # style or formatting 27 | - sbt clean scalastyle scalafmt 28 | - git diff --exit-code || (echo "ERROR Code formatting check failed, see differences above."; false) 29 | 30 | compile and run test suites: 31 | stage: test 32 | image: registry.gitlab.com/kpmeen/docker-scala-sbt:scala_2.13.16_jdk21 33 | interruptible: true 34 | tags: 35 | - saas-linux-small-amd64 36 | script: 37 | - sbt coverage test coverageReport 38 | - sbt coverageAggregate 39 | coverage: '/Statement coverage[A-Za-z\.*]\s*:\s*([^%]+)/' 40 | artifacts: 41 | access: 'developer' 42 | paths: 43 | - target/ 44 | - target/scala-2.13/scoverage-report 45 | - target/scala-2.13/coverage-report/cobertura.xml 46 | - ./**/target/ 47 | - ./**/target/test-reports/TEST-*.xml 48 | reports: 49 | junit: ./**/target/test-reports/TEST-*.xml 50 | coverage_report: 51 | coverage_format: cobertura 52 | path: target/scala-2.13/coverage-report/cobertura.xml 53 | -------------------------------------------------------------------------------- /server/src/test/scala/org/scalatest/CustomEitherValues.scala: -------------------------------------------------------------------------------- 1 | package org.scalatest 2 | 3 | import org.scalactic.source.Position 4 | import org.scalatest.Resources.{ 5 | eitherLeftValueNotDefined, 6 | eitherRightValueNotDefined 7 | } 8 | import org.scalatest.exceptions.{StackDepthException, TestFailedException} 9 | 10 | /** 11 | * Implementation of EitherValues that works around the need to use the 12 | * deprecated .right function on Either values. The ScalaTest community is 13 | * working on a solution. 14 | * 15 | * @see 16 | * https://github.com/scalatest/scalatest/issues/972 17 | */ 18 | trait CustomEitherValues { 19 | 20 | implicit def convertEitherToValuableEither[L, R](either: Either[L, R])( 21 | implicit pos: Position 22 | ): ValuableEither[L, R] = new ValuableEither(either, pos) 23 | 24 | private[this] def testFailedException( 25 | messageFun: StackDepthException => String, 26 | causeMessage: String, 27 | pos: Position 28 | ): TestFailedException = { 29 | new TestFailedException( 30 | messageFun = sde => Option(messageFun(sde)), 31 | cause = Some(new NoSuchElementException(causeMessage)), 32 | pos = pos 33 | ) 34 | } 35 | 36 | class ValuableEither[L, R](either: Either[L, R], pos: Position) { 37 | 38 | def rightValue: R = either match { 39 | case Right(r) => r 40 | case Left(_) => 41 | throw testFailedException( 42 | messageFun = sde => eitherRightValueNotDefined(sde), 43 | causeMessage = "Either.rightValue on Left", 44 | pos = pos 45 | ) 46 | } 47 | 48 | def leftValue: L = either match { 49 | case Left(l) => l 50 | case Right(_) => 51 | throw testFailedException( 52 | messageFun = sde => eitherLeftValueNotDefined(sde), 53 | causeMessage = "Either.leftValue on Right", 54 | pos = pos 55 | ) 56 | } 57 | } 58 | 59 | } 60 | -------------------------------------------------------------------------------- /server/src/test/scala/net/scalytica/test/EmbeddedHttpServer.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.test 2 | 3 | import org.apache.pekko.actor.ActorSystem 4 | import org.apache.pekko.http.scaladsl.Http 5 | import org.apache.pekko.http.scaladsl.model._ 6 | import org.apache.pekko.http.scaladsl.server.Directives._ 7 | import org.apache.pekko.http.scaladsl.server.{ExceptionHandler, Route} 8 | 9 | import scala.concurrent.ExecutionContext 10 | import scala.concurrent.duration._ 11 | 12 | trait EmbeddedHttpServer { 13 | 14 | val shutdownDeadline: FiniteDuration = 20 seconds 15 | 16 | implicit private[this] def exceptionHandler: ExceptionHandler = 17 | ExceptionHandler { case t: Throwable => 18 | complete( 19 | HttpResponse( 20 | status = StatusCodes.InternalServerError, 21 | entity = t.getMessage 22 | ) 23 | ) 24 | } 25 | 26 | def withHttpServerForRoute[T]( 27 | host: String = "localhost", 28 | port: Int = availablePort 29 | )(routes: (String, Int) => Route)(block: (String, Int) => T)( 30 | implicit sys: ActorSystem, 31 | ec: ExecutionContext 32 | ): T = withEmbeddedServer(host, port, routes(host, port))(block) 33 | 34 | def withEmbeddedServer[T]( 35 | host: String = "localhost", 36 | port: Int = availablePort, 37 | routes: Route, 38 | completionWaitDuration: Option[FiniteDuration] = None 39 | )(block: (String, Int) => T)( 40 | implicit sys: ActorSystem, 41 | ec: ExecutionContext 42 | ): T = { 43 | val server = Http().newServerAt(host, port).bindFlow(Route.seal(routes)) 44 | try { 45 | val res = block(host, port) 46 | completionWaitDuration match { 47 | case Some(waitDuration) => Thread.sleep(waitDuration.toMillis) 48 | case None => () 49 | } 50 | res 51 | } finally { 52 | val _ = server.flatMap(_.terminate(shutdownDeadline)) 53 | } 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /gatling/src/test/scala/net/scalytica/kafka/wsproxy/gatling/BaseConsumerSimulation.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.gatling 2 | 3 | import io.gatling.core.Predef._ 4 | import io.gatling.core.session.Expression 5 | import io.gatling.http.Predef._ 6 | import io.gatling.http.check.ws.WsFrameCheck 7 | 8 | trait BaseConsumerSimulation extends Simulation { 9 | 10 | val httpProtocol = http 11 | .baseUrl("http://localhost:8078") 12 | .acceptHeader("application/json;q=0.9,*/*;q=0.8") 13 | .doNotTrackHeader("1") 14 | .acceptEncodingHeader("gzip, deflate") 15 | .userAgentHeader("Gatling") 16 | .wsBaseUrl("ws://localhost:8078") 17 | 18 | def socketOutUri( 19 | topic: String, 20 | payload: String, 21 | valTpe: String, 22 | keyTpe: Option[String] = None, 23 | autoCommit: Boolean = true 24 | ): Expression[String] = { 25 | val uriStr = "/socket/out?" + 26 | s"clientId=client-${System.currentTimeMillis}&" + 27 | s"topic=$topic&" + 28 | s"socketPayload=$payload&" + 29 | s"valType=$valTpe&" + 30 | s"autoCommit=$autoCommit" 31 | 32 | val uri = keyTpe.map(k => s"$uriStr&keyType=$k").getOrElse(uriStr) 33 | uri 34 | } 35 | 36 | def wsCheck(topic: String, hasKey: Boolean = false): WsFrameCheck = { 37 | ws.checkTextMessage("Validate") 38 | .check(jsonPath("$.wsProxyMessageId").exists) 39 | .check(jsonPath("$.topic").is(topic)) 40 | .check(jsonPath("$.partition").ofType[Int].exists) 41 | .check(jsonPath("$.offset").ofType[Long].exists) 42 | .check(jsonPath("$.timestamp").ofType[Long].exists) 43 | .check(jsonPath("$.value").exists) 44 | .check { 45 | if (hasKey) jsonPath("$.key").exists 46 | else jsonPath("$.key").notExists 47 | } 48 | } 49 | 50 | def multiWsChecks( 51 | topic: String, 52 | hasKey: Boolean = false, 53 | num: Int = 1 54 | ): Seq[WsFrameCheck] = (0 to num).map(_ => wsCheck(topic, hasKey)) 55 | 56 | } 57 | -------------------------------------------------------------------------------- /project/plugins.sbt: -------------------------------------------------------------------------------- 1 | // scalastyle:off 2 | logLevel := Level.Warn 3 | 4 | resolvers ++= DefaultOptions.resolvers(snapshot = false) 5 | resolvers += Resolver.sonatypeCentralSnapshots 6 | resolvers ++= Seq( 7 | Resolver.mavenCentral, 8 | Resolver.jcenterRepo, 9 | Resolver.typesafeRepo("releases"), 10 | Resolver.sbtIvyRepo("releases"), 11 | // Remove below resolver once the following issues has been resolved: 12 | // https://issues.jboss.org/projects/JBINTER/issues/JBINTER-21 13 | "JBoss" at "https://repository.jboss.org/" 14 | ) 15 | 16 | // sbt-git plugin to get access to git commands from the build scripts 17 | addSbtPlugin("com.github.sbt" % "sbt-git" % "2.1.0") 18 | 19 | // Dependency handling 20 | addSbtPlugin("com.timushev.sbt" % "sbt-updates" % "0.6.4") 21 | 22 | // Formatting and style checking 23 | addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.5.4") 24 | addSbtPlugin("com.beautiful-scala" % "sbt-scalastyle" % "1.5.1") 25 | 26 | // Code coverage 27 | addSbtPlugin("org.scoverage" %% "sbt-scoverage" % "2.3.1") 28 | 29 | // Native packaging plugin 30 | addSbtPlugin("com.github.sbt" %% "sbt-native-packager" % "1.11.1") 31 | 32 | // Release plugin 33 | addSbtPlugin("com.github.sbt" % "sbt-release" % "1.4.0") 34 | 35 | // Gatling plugin 36 | //addSbtPlugin("io.gatling" % "gatling-sbt" % "3.1.0") 37 | 38 | //classpathTypes += "maven-plugin" 39 | 40 | // Scalafix for automated code re-writes when updating dependencies 41 | addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.14.3") 42 | 43 | // documentation generator 44 | addSbtPlugin("org.scalameta" % "sbt-mdoc" % "2.7.1") 45 | 46 | // FIXME: Workaround to get project working due to scala-xml conflicts. 47 | // The culprit is a transitive dependency to scalariform_2.12 that relies 48 | // on scala-xml 1.2.0. While everything else depends on 2.1.0 49 | // see: https://stackoverflow.com/a/74338012 50 | ThisBuild / libraryDependencySchemes += "org.scala-lang.modules" %% "scala-xml" % VersionScheme.Always 51 | -------------------------------------------------------------------------------- /server/src/test/scala/net/scalytica/kafka/wsproxy/models/FormatsSpec.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.models 2 | 3 | import net.scalytica.kafka.wsproxy.models.Formats._ 4 | import net.scalytica.kafka.wsproxy.models.Formats.FormatType._ 5 | import org.scalatest.OptionValues 6 | import org.scalatest.matchers.must.Matchers 7 | import org.scalatest.wordspec.AnyWordSpec 8 | 9 | class FormatsSpec extends AnyWordSpec with Matchers with OptionValues { 10 | 11 | "Format types" should { 12 | 13 | "be possible to initialise a JsonType using a string value" in { 14 | fromString("json").value mustBe JsonType 15 | } 16 | 17 | "be possible to initialise a StringType using a string value" in { 18 | fromString("string").value mustBe StringType 19 | } 20 | 21 | "be possible to initialise a IntType using a string value" in { 22 | fromString("int").value mustBe IntType 23 | } 24 | 25 | "be possible to initialise a LongType using a string value" in { 26 | fromString("long").value mustBe LongType 27 | } 28 | 29 | "be possible to initialise a DoubleType using a string value" in { 30 | fromString("double").value mustBe DoubleType 31 | } 32 | 33 | "be possible to initialise a FloatType using a string value" in { 34 | fromString("float").value mustBe FloatType 35 | } 36 | 37 | "not care about case when initialising FormatType from string value" in { 38 | fromString("STRING").value mustBe StringType 39 | fromString("sTrInG").value mustBe StringType 40 | } 41 | 42 | "not initialise a FormatType from an unknown string value" in { 43 | fromString("bytearray") mustBe None 44 | } 45 | 46 | "throw exception when unsafely initialising from invalid string value" in { 47 | an[IllegalArgumentException] must be thrownBy unsafeFromString("foobar") 48 | } 49 | 50 | // "use snake_case for the name string" in { 51 | // ByteArrayType.name mustBe "byte_array" 52 | // } 53 | 54 | } 55 | 56 | } 57 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/auth/package.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy 2 | 3 | import scala.concurrent.Future 4 | 5 | import com.typesafe.scalalogging.Logger 6 | import io.circe.generic.extras.{Configuration => CirceConfiguration} 7 | import org.apache.pekko.stream.Materializer 8 | import org.apache.pekko.stream.scaladsl.Source 9 | import org.apache.pekko.util.ByteString 10 | import pdi.jwt.JwtBase64 11 | import pdi.jwt.JwtClaim 12 | 13 | package object auth { 14 | 15 | implicit private[auth] val circeConfig: CirceConfiguration = 16 | CirceConfiguration.default.withSnakeCaseMemberNames 17 | 18 | val PubKeyAlgo = "RSA" 19 | 20 | private[auth] def decodeToBigInt(value: Option[String]): BigInt = { 21 | value.map(v => BigInt(1, JwtBase64.decode(v))).getOrElse(BigInt(0)) 22 | } 23 | 24 | private[auth] def foldBody( 25 | body: Source[ByteString, Any] 26 | )( 27 | implicit mat: Materializer 28 | ): Future[Option[String]] = { 29 | body.runFold[Option[String]](None) { (maybeStr, bytes) => 30 | val decoded = bytes.decodeSafeString 31 | maybeStr match { 32 | case js @ Some(_) => js.map(s => decoded.map(s + _)).getOrElse(decoded) 33 | case None => decoded 34 | } 35 | } 36 | } 37 | 38 | implicit private[auth] class JwtClaimExtensions(jc: JwtClaim) { 39 | 40 | def logValidity( 41 | logger: Logger, 42 | isValid: Boolean, 43 | detailedLogging: Boolean = false 44 | ): Unit = { 45 | val jwtId = 46 | if (detailedLogging) jc.jwtId.map(jid => s" with jti: [$jid]") 47 | else None 48 | val aud = jc.audience.map(_.mkString(", ")).getOrElse("not set") 49 | val iss = jc.issuer.getOrElse("not set") 50 | val logMsg = "Jwt claim%s for audience [%s] from issuer [%s] is %s!" 51 | val validStr = if (isValid) "valid" else "NOT valid" 52 | 53 | logger.debug(logMsg.format(jwtId.getOrElse(""), aud, iss, validStr)) 54 | } 55 | 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /server/src/test/scala/net/scalytica/test/test_routes.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.test 2 | 3 | import org.apache.pekko.actor.ActorSystem 4 | import org.apache.pekko.actor.typed.ActorRef 5 | import org.apache.pekko.actor.typed.scaladsl.adapter._ 6 | import org.apache.pekko.http.scaladsl.server.Route 7 | import org.apache.pekko.http.scaladsl.server.Directives.handleExceptions 8 | import org.apache.pekko.stream.Materializer 9 | import net.scalytica.kafka.wsproxy.models.{FullClientId, WsServerId} 10 | import net.scalytica.kafka.wsproxy.session.{SessionHandlerProtocol, SessionId} 11 | import net.scalytica.kafka.wsproxy.web._ 12 | import net.scalytica.kafka.wsproxy.web.admin.AdminRoutes 13 | 14 | trait TestAdHocRoute extends RoutesPrereqs with RouteFailureHandlers { 15 | override val serverId: WsServerId = WsServerId("node-1") 16 | 17 | override def cleanupClient(sid: SessionId, fid: FullClientId)( 18 | implicit sh: ActorRef[SessionHandlerProtocol.SessionProtocol], 19 | mat: Materializer 20 | ): Unit = () 21 | 22 | def routeWithExceptionHandler(route: Route)( 23 | implicit as: ActorSystem, 24 | mat: Materializer 25 | ): Route = { 26 | implicit val shRef = 27 | as.toTyped.ignoreRef[SessionHandlerProtocol.SessionProtocol] 28 | handleExceptions(wsExceptionHandler)(route) 29 | } 30 | } 31 | 32 | /** Routes for testing [[SchemaRoutes]] */ 33 | trait TestSchemaRoutes extends SchemaRoutes with BaseRoutes { 34 | override val serverId: WsServerId = WsServerId("node-1") 35 | } 36 | 37 | /** Routes for testing [[StatusRoutes]] */ 38 | trait TestStatusRoutes extends StatusRoutes with BaseRoutes { 39 | override val serverId: WsServerId = WsServerId("node-1") 40 | } 41 | 42 | /** Routes for testing [[AdminRoutes]] */ 43 | trait TestAdminRoutes extends AdminRoutes with BaseRoutes { 44 | override val serverId: WsServerId = WsServerId("node-1") 45 | } 46 | 47 | /** Routes for testing [[ServerRoutes]] */ 48 | trait TestServerRoutes extends ServerRoutes { 49 | override val serverId: WsServerId = WsServerId("node-1") 50 | } 51 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/producer/WsTransactionalProducer.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.producer 2 | 3 | import org.apache.pekko.NotUsed 4 | import org.apache.pekko.kafka.ProducerMessage.Envelope 5 | import org.apache.pekko.kafka.ProducerMessage.Results 6 | import org.apache.pekko.kafka.ProducerSettings 7 | import org.apache.pekko.stream.ActorAttributes 8 | import org.apache.pekko.stream.scaladsl.Flow 9 | 10 | object WsTransactionalProducer { 11 | 12 | /** 13 | * This is a generic variation of 14 | * [[org.apache.pekko.kafka.scaladsl.Transactional.flow]]. Instead of 15 | * "forcing" the pass-through message to be an instance of 16 | * [[org.apache.pekko.kafka.ConsumerMessage.PartitionOffset]], it allows 17 | * sending any data type as the pass through element. 18 | * 19 | * @param settings 20 | * The Kafka [[ProducerSettings]] to use 21 | * @param transactionalId 22 | * The transactional ID to use for the Kafka producer 23 | * @tparam K 24 | * The type of the key element in the producer message 25 | * @tparam V 26 | * The type of the value element in the producer message 27 | * @tparam PassThrough 28 | * The type of the pass-through element of the result. 29 | * @return 30 | * 31 | * @see 32 | * [[org.apache.pekko.kafka.scaladsl.Transactional.flow]] 33 | */ 34 | def flexiFlow[K, V, PassThrough]( 35 | settings: ProducerSettings[K, V], 36 | transactionalId: String 37 | ): Flow[Envelope[K, V, PassThrough], Results[K, V, PassThrough], NotUsed] = { 38 | require( 39 | transactionalId != null && transactionalId.nonEmpty, 40 | "You must define a Transactional id." 41 | ) 42 | require( 43 | settings.producerFactorySync.isEmpty, 44 | "You cannot use a shared or external producer factory." 45 | ) 46 | 47 | val flow = Flow.fromGraph( 48 | new WsTransactionalProducerStage[K, V, PassThrough](settings) 49 | ) 50 | 51 | if (settings.dispatcher.isEmpty) flow 52 | else flow.withAttributes(ActorAttributes.dispatcher(settings.dispatcher)) 53 | } 54 | 55 | } 56 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/config/DynamicConfigHandlerRef.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.config 2 | 3 | import org.apache.pekko.actor.typed.ActorRef 4 | import org.apache.pekko.actor.typed.ActorSystem 5 | import org.apache.pekko.kafka.scaladsl.Consumer 6 | import org.apache.pekko.stream.scaladsl.RunnableGraph 7 | 8 | /** 9 | * Parent trait to provide access to the [[ActorRef]] referencing the 10 | * [[DynamicConfigHandler]] actor. 11 | */ 12 | trait DynamicConfigHandlerRef { 13 | val dchRef: ActorRef[DynamicConfigHandlerProtocol.DynamicConfigProtocol] 14 | } 15 | 16 | /** 17 | * Keeps a reference to the [[DynamicConfigHandler]] actor, and the Kafka 18 | * consumer stream that feeds its state changes from Kafka. 19 | * 20 | * @param stream 21 | * The runnable Kafka Consumer stream used by the [[DynamicConfigHandler]]. 22 | * @param dchRef 23 | * The [[ActorRef]] referencing the [[DynamicConfigHandler]] actor. 24 | */ 25 | final case class RunnableDynamicConfigHandlerRef( 26 | stream: RunnableGraph[Consumer.Control], 27 | dchRef: ActorRef[DynamicConfigHandlerProtocol.DynamicConfigProtocol] 28 | ) extends DynamicConfigHandlerRef { 29 | 30 | def asReadOnlyRef: ReadableDynamicConfigHandlerRef = 31 | ReadableDynamicConfigHandlerRef(dchRef) 32 | 33 | def stop()(implicit system: ActorSystem[_]): Unit = { 34 | dchRef.tell( 35 | DynamicConfigHandlerProtocol.StopConfigHandler(system.ignoreRef) 36 | ) 37 | } 38 | 39 | } 40 | 41 | /** 42 | * Keeps a read-only reference to the [[DynamicConfigHandler]] actor. This is to 43 | * be used by e.g. the standard endpoints when they need to find configs for 44 | * connecting clients. 45 | * 46 | * @param dchRef 47 | * The [[ActorRef]] referencing the [[DynamicConfigHandler]] actor. 48 | */ 49 | final case class ReadableDynamicConfigHandlerRef( 50 | dchRef: ActorRef[DynamicConfigHandlerProtocol.DynamicConfigProtocol] 51 | ) extends DynamicConfigHandlerRef { 52 | 53 | def stop()(implicit system: ActorSystem[_]): Unit = { 54 | dchRef.tell( 55 | DynamicConfigHandlerProtocol.StopConfigHandler(system.ignoreRef) 56 | ) 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /server/src/test/scala/net/scalytica/kafka/wsproxy/config/DynamicConfigTestDataGenerators.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.config 2 | 3 | import net.scalytica.kafka.wsproxy.config.Configuration.DynamicCfg 4 | import net.scalytica.kafka.wsproxy.OptionExtensions 5 | 6 | // scalastyle:off magic.number 7 | trait DynamicConfigTestDataGenerators { 8 | 9 | private[config] def createProducerLimitTestCfg( 10 | pid: String, 11 | maxCons: Int, 12 | msgSec: Int 13 | ): DynamicCfg = { 14 | // scalastyle:off line.size.limit 15 | Configuration.loadDynamicCfgString( 16 | s"""{"max-connections":$maxCons,"messages-per-second":$msgSec,"producer-id":"$pid"}""" 17 | ) 18 | // scalastyle:on line.size.limit 19 | } 20 | 21 | private[config] def createConsumerLimitTestCfg( 22 | gid: String, 23 | maxCons: Int, 24 | msgSec: Int 25 | ): DynamicCfg = { 26 | // scalastyle:off line.size.limit 27 | Configuration.loadDynamicCfgString( 28 | s"""{"group-id":"$gid","max-connections":$maxCons,"messages-per-second":$msgSec}""" 29 | ) 30 | // scalastyle:on line.size.limit 31 | } 32 | 33 | private[config] lazy val cfg1 = createProducerLimitTestCfg("p-1", 1, 10) 34 | private[config] lazy val cfg2 = createConsumerLimitTestCfg("g-1", 1, 10) 35 | private[config] lazy val cfg3 = createProducerLimitTestCfg("p-2", 2, 20) 36 | private[config] lazy val cfg4 = createConsumerLimitTestCfg("g-2", 2, 20) 37 | private[config] lazy val cfg5 = createProducerLimitTestCfg("p-3", 3, 30) 38 | private[config] lazy val cfg6 = createConsumerLimitTestCfg("g-3", 3, 30) 39 | 40 | private[config] val expectedSeq: Seq[(String, DynamicCfg)] = Seq( 41 | dynamicCfgTopicKey(cfg1).getUnsafe -> cfg1, 42 | dynamicCfgTopicKey(cfg2).getUnsafe -> cfg2, 43 | dynamicCfgTopicKey(cfg3).getUnsafe -> cfg3, 44 | dynamicCfgTopicKey(cfg4).getUnsafe -> cfg4, 45 | dynamicCfgTopicKey(cfg5).getUnsafe -> cfg5, 46 | dynamicCfgTopicKey(cfg6).getUnsafe -> cfg6 47 | ) 48 | 49 | private[config] val expectedMap: Map[String, DynamicCfg] = expectedSeq.toMap 50 | 51 | private[config] val expectedKeys = expectedSeq.map(_._1) 52 | private[config] val expectedValues = expectedSeq.map(_._2) 53 | 54 | } 55 | // scalastyle:on magic.number 56 | -------------------------------------------------------------------------------- /server/src/test/scala/net/scalytica/kafka/wsproxy/jmx/TestJmxManager.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.jmx 2 | 3 | import net.scalytica.kafka.wsproxy.admin.WsKafkaAdminClient 4 | import net.scalytica.kafka.wsproxy.config.Configuration.AppCfg 5 | import net.scalytica.kafka.wsproxy.jmx.mbeans.ConsumerClientStatsProtocol._ 6 | import net.scalytica.kafka.wsproxy.jmx.mbeans.ProducerClientStatsProtocol._ 7 | import net.scalytica.kafka.wsproxy.jmx.mbeans.{ 8 | ConnectionsStatsProtocol, 9 | ProxyStatusProtocol 10 | } 11 | import net.scalytica.kafka.wsproxy.logging.WithProxyLogger 12 | import org.apache.pekko.actor.typed.{ActorRef, ActorSystem} 13 | import org.apache.pekko.actor.typed.scaladsl.adapter._ 14 | import org.apache.pekko.actor.{ActorSystem => ClassicActorSystem} 15 | 16 | case class TestJmxManager(appCfg: AppCfg)(implicit system: ClassicActorSystem) 17 | extends BaseJmxManager 18 | with JmxProxyStatusOps 19 | with JmxConnectionStatsOps 20 | with JmxConsumerStatsOps 21 | with JmxProducerStatsOps 22 | with WithProxyLogger { 23 | 24 | val beanNamePrefix: String = "test-suite" 25 | 26 | override protected val connectionStatsActorName: String = 27 | s"$beanNamePrefix-wsproxy-connections" 28 | 29 | override protected val proxyStatusActorName: String = 30 | s"$beanNamePrefix-wsproxy-status" 31 | 32 | override protected val totalConsumerStatsPrefix: String = 33 | s"$beanNamePrefix-all" 34 | override protected val totalProducerStatsPrefix: String = 35 | s"$beanNamePrefix-all" 36 | 37 | override def adminClient: WsKafkaAdminClient = { 38 | log.info("The admin client is not available for this test suite.") 39 | throw new NotImplementedError("Not used in tests") 40 | } 41 | 42 | override def sys: ActorSystem[_] = system.toTyped 43 | 44 | override def close(): Unit = {} 45 | 46 | def proxyStatusActorRef: ActorRef[ProxyStatusProtocol.ProxyStatusCommand] = 47 | proxyStatusActor 48 | 49 | def connectionStatsActorRef 50 | : ActorRef[ConnectionsStatsProtocol.ConnectionsStatsCommand] = 51 | connectionStatsActor 52 | 53 | def totalConsumerStatsRef: ActorRef[ConsumerClientStatsCommand] = 54 | totalConsumerClientStatsActor 55 | 56 | def totalProducerStatsRef: ActorRef[ProducerClientStatsCommand] = 57 | totalProducerClientStatsActor 58 | } 59 | -------------------------------------------------------------------------------- /server/src/test/scala/net/scalytica/kafka/wsproxy/jmx/mbeans/ConnectionStatsMXBeanSpec.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.jmx.mbeans 2 | 3 | import net.scalytica.kafka.wsproxy.jmx.mbeans.ConnectionsStatsProtocol._ 4 | import org.apache.pekko.actor.testkit.typed.scaladsl.TestProbe 5 | import org.apache.pekko.actor.typed.ActorRef 6 | 7 | import scala.concurrent.duration._ 8 | 9 | // scalastyle:off magic.number 10 | class ConnectionStatsMXBeanSpec extends MXBeanSpecLike { 11 | 12 | override protected val testTopicPrefix: String = 13 | "connectionstats-mxbean-test-topic" 14 | 15 | val beanName = "bean-spec-wsproxy-connections" 16 | 17 | val ref: ActorRef[ConnectionsStatsCommand] = 18 | tk.spawn(ConnectionsStatsMXBeanActor(), beanName) 19 | 20 | val probe: TestProbe[ConnectionStatsResponse] = 21 | tk.createTestProbe[ConnectionStatsResponse]("bean-spec-stats-probe") 22 | 23 | val proxy: ConnectionsStatsMXBean = 24 | jmxq.getMxBean(beanName, classOf[ConnectionsStatsMXBean]) 25 | 26 | "The ConnectionStatsMXBean" should { 27 | 28 | "increment number of open producer connections" in { 29 | ref ! AddProducer(probe.ref) 30 | probe.expectMessage(1 second, ProducerAdded) 31 | proxy.getOpenWebSocketsProducers mustBe 1 32 | proxy.getOpenWebSocketsTotal mustBe 1 33 | } 34 | 35 | "increment number of open consumer connections" in { 36 | ref ! AddConsumer(probe.ref) 37 | probe.expectMessage(1 second, ConsumerAdded) 38 | proxy.getOpenWebSocketsConsumers mustBe 1 39 | proxy.getOpenWebSocketsTotal mustBe 2 40 | } 41 | 42 | "decrement number of open producer connections" in { 43 | ref ! RemoveProducer(probe.ref) 44 | probe.expectMessage(1 second, ProducerRemoved) 45 | proxy.getOpenWebSocketsProducers mustBe 0 46 | proxy.getOpenWebSocketsTotal mustBe 1 47 | } 48 | 49 | "decrement number of open consumer connections" in { 50 | ref ! RemoveConsumer(probe.ref) 51 | probe.expectMessage(1 second, ConsumerRemoved) 52 | proxy.getOpenWebSocketsConsumers mustBe 0 53 | proxy.getOpenWebSocketsTotal mustBe 0 54 | } 55 | 56 | "stop the MXBean" in { 57 | ref ! Stop 58 | probe.expectTerminated(ref) 59 | jmxq.findByTypeAndName[ConnectionsStatsMXBean](beanName) mustBe None 60 | } 61 | 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/config/package.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy 2 | 3 | import net.scalytica.kafka.wsproxy.config.Configuration.AppCfg 4 | import net.scalytica.kafka.wsproxy.config.Configuration.ClientSpecificLimitCfg 5 | import net.scalytica.kafka.wsproxy.config.Configuration.ConsumerSpecificLimitCfg 6 | import net.scalytica.kafka.wsproxy.config.Configuration.DynamicCfg 7 | import net.scalytica.kafka.wsproxy.config.Configuration.ProducerSpecificLimitCfg 8 | import net.scalytica.kafka.wsproxy.models.WsGroupId 9 | import net.scalytica.kafka.wsproxy.models.WsProducerId 10 | 11 | package object config { 12 | 13 | val DynCfgConsumerGroupIdPrefix: String = 14 | "ws-proxy-dynamic-config-handler-consumer" 15 | 16 | private[config] def dynCfgConsumerGroupId(implicit cfg: AppCfg): String = { 17 | s"$DynCfgConsumerGroupIdPrefix-${cfg.server.serverId.value}" 18 | } 19 | 20 | /** 21 | * Build a Kafka key to use for consumer specific [[DynamicCfg]] 22 | * @param id 23 | * The [[WsGroupId]] to build a key from 24 | * @return 25 | * A String value to use as key 26 | */ 27 | def consumerCfgKey(id: WsGroupId): String = 28 | "consumer-%s".formatted(id.value) 29 | 30 | /** 31 | * Build a Kafka key to use for producer specific [[DynamicCfg]] 32 | * @param id 33 | * The [[WsProducerId]] to build a key from 34 | * @return 35 | * A String value to use as key 36 | */ 37 | def producerCfgKey(id: WsProducerId): String = 38 | "producer-%s".formatted(id.value) 39 | 40 | /** 41 | * Generates a valid key to be used on the dynamic config Kafka topic. The key 42 | * is composed to include the type of dynamic configuration associated with 43 | * it. 44 | * 45 | * @param dc 46 | * The [[DynamicCfg]] to derive a key from. 47 | * @return 48 | * The key to use for the Kafka topic 49 | */ 50 | private[config] def dynamicCfgTopicKey(dc: DynamicCfg): Option[String] = { 51 | dc match { 52 | case csl: ClientSpecificLimitCfg => 53 | csl match { 54 | case cs: ConsumerSpecificLimitCfg => 55 | Option(consumerCfgKey(cs.groupId)) 56 | 57 | case ps: ProducerSpecificLimitCfg => 58 | Option(producerCfgKey(ps.producerId)) 59 | } 60 | 61 | case _ => None 62 | } 63 | } 64 | 65 | } 66 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/models/value_types.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.models 2 | 3 | import java.util.UUID 4 | 5 | sealed trait WsIdentifier { 6 | val value: String 7 | } 8 | 9 | /** Identifier for this running instance of the proxy */ 10 | case class WsServerId(value: String) 11 | 12 | /** Identifier for the consumer group a consumer client belongs to */ 13 | case class WsGroupId(value: String) extends WsIdentifier 14 | 15 | object WsGroupId { 16 | 17 | def fromOption(mgid: Option[WsGroupId])(or: WsClientId): WsGroupId = 18 | mgid.getOrElse(WsGroupId(s"${or.value}-group")) 19 | 20 | } 21 | 22 | /** Identifies a connecting client */ 23 | case class WsClientId(value: String) extends WsIdentifier 24 | 25 | /** Identifier for multiple instance of a producer application */ 26 | case class WsProducerId(value: String) extends WsIdentifier 27 | 28 | /** Identifier for a single producer client application instance */ 29 | case class WsProducerInstanceId(value: String) extends WsIdentifier 30 | 31 | sealed trait FullClientId { 32 | def applicationIdentifier: String 33 | def instanceIdentifier: String 34 | def value: String = s"$applicationIdentifier-$instanceIdentifier" 35 | } 36 | 37 | case class FullConsumerId( 38 | groupId: WsGroupId, 39 | clientId: WsClientId 40 | ) extends FullClientId { 41 | override lazy val applicationIdentifier: String = groupId.value 42 | override lazy val instanceIdentifier: String = clientId.value 43 | } 44 | 45 | case class FullProducerId( 46 | producerId: WsProducerId, 47 | instanceId: Option[WsProducerInstanceId] 48 | ) extends FullClientId { 49 | val uuid: UUID = UUID.randomUUID() 50 | override lazy val applicationIdentifier: String = producerId.value 51 | override lazy val instanceIdentifier: String = 52 | instanceId.map(_.value).getOrElse(uuid.toString) 53 | } 54 | 55 | /** The name of a Kafka topic */ 56 | case class TopicName(value: String) extends AnyVal 57 | /** The partition number of a Kafka topic */ 58 | case class Partition(value: Int) extends AnyVal 59 | /** The consumer offset within a Kafka topic partition */ 60 | case class Offset(value: Long) extends AnyVal 61 | /** The timestamp of a given message at an offset in a Kafka topic partition */ 62 | case class Timestamp(value: Long) extends AnyVal 63 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/web/websockets/ClientSpecificCfgLoader.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.web.websockets 2 | 3 | import scala.concurrent.ExecutionContext 4 | import scala.concurrent.Future 5 | import scala.concurrent.duration._ 6 | 7 | import net.scalytica.kafka.wsproxy.config.Configuration.AppCfg 8 | import net.scalytica.kafka.wsproxy.config.DynamicConfigHandlerImplicits._ 9 | import net.scalytica.kafka.wsproxy.config.ReadableDynamicConfigHandlerRef 10 | import net.scalytica.kafka.wsproxy.logging.WithProxyLogger 11 | import net.scalytica.kafka.wsproxy.models.InSocketArgs 12 | import net.scalytica.kafka.wsproxy.models.OutSocketArgs 13 | import net.scalytica.kafka.wsproxy.models.SocketArgs 14 | import net.scalytica.kafka.wsproxy.utils.BlockingRetry 15 | 16 | import org.apache.pekko.actor.typed.Scheduler 17 | import org.apache.pekko.util.Timeout 18 | 19 | trait ClientSpecificCfgLoader extends WithProxyLogger { 20 | 21 | private[this] val dynCfgTimeout = 4 seconds 22 | private[this] val dynCfgInterval = 1 second 23 | private[this] val attempts = 5 24 | 25 | protected def applyDynamicConfigs(args: SocketArgs)(appCfg: AppCfg)( 26 | implicit 27 | maybeDynamicCfgHandlerRef: Option[ReadableDynamicConfigHandlerRef], 28 | ctx: ExecutionContext, 29 | scheduler: Scheduler 30 | ): AppCfg = { 31 | BlockingRetry.retryAwaitFuture(dynCfgTimeout, dynCfgInterval, attempts) { 32 | timeout => 33 | implicit val attemptTimeout: Timeout = timeout 34 | maybeDynamicCfgHandlerRef 35 | .map { ref => 36 | args match { 37 | case isa: InSocketArgs => 38 | ref.findConfigForProducer(isa.producerId).map { 39 | case None => appCfg 40 | case Some(cfg) => appCfg.addProducerCfg(cfg) 41 | } 42 | case osa: OutSocketArgs => 43 | ref.findConfigForConsumer(osa.groupId).map { 44 | case None => appCfg 45 | case Some(cfg) => appCfg.addConsumerCfg(cfg) 46 | } 47 | } 48 | } 49 | .getOrElse(Future.successful(appCfg)) 50 | } { ex => 51 | log.warn(s"Unable to load dynamic config for request args: $args", ex) 52 | Future.successful(appCfg) 53 | } 54 | } 55 | 56 | } 57 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/logging/ColouredLevel.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.logging 2 | 3 | import ch.qos.logback.classic._ 4 | import ch.qos.logback.classic.pattern._ 5 | import ch.qos.logback.classic.spi._ 6 | 7 | class ColouredLevel extends ClassicConverter { 8 | 9 | def convert(event: ILoggingEvent): String = event.getLevel match { 10 | case Level.TRACE => Colours.blue(Level.TRACE.levelStr) 11 | case Level.DEBUG => Colours.cyan(Level.DEBUG.levelStr) 12 | case Level.INFO => Colours.white(Level.INFO.levelStr) 13 | case Level.WARN => Colours.yellow(Level.WARN.levelStr) 14 | case Level.ERROR => Colours.red(Level.ERROR.levelStr) 15 | } 16 | 17 | } 18 | 19 | object Colours { 20 | 21 | import scala.Console._ 22 | 23 | private[this] val SbtLogNoFormat = "sbt.log.noformat" 24 | private[this] val WsProxyLogNoFormat = "wsproxy.log.noformat" 25 | private[this] val WsProxyLogNoFormatEnv = "WSPROXY_LOG_ANSI_OFF" 26 | 27 | /** 28 | * Check if coloured output is enabled and that the current environment 29 | * actually supports ANSI coloured output. 30 | * 31 | * @return 32 | * true if ANSI colours should be used, otherwise false 33 | */ 34 | def useANSI: Boolean = 35 | sys.props 36 | .get(SbtLogNoFormat) 37 | .orElse(sys.props.get(WsProxyLogNoFormat)) 38 | .orElse(sys.env.get(WsProxyLogNoFormatEnv)) 39 | .map(_ != "true") 40 | .orElse { 41 | sys.props 42 | .get("os.name") 43 | .map(_.toLowerCase(java.util.Locale.ENGLISH)) 44 | .filter(_.contains("windows")) 45 | .map(_ => false) 46 | } 47 | .getOrElse(true) 48 | 49 | // Functions for adding ANSI colour codes to a String 50 | def red(str: String): String = if (useANSI) s"$RED$str$RESET" else str 51 | def blue(str: String): String = if (useANSI) s"$BLUE$str$RESET" else str 52 | def cyan(str: String): String = if (useANSI) s"$CYAN$str$RESET" else str 53 | def green(str: String): String = if (useANSI) s"$GREEN$str$RESET" else str 54 | def magenta(str: String): String = if (useANSI) s"$MAGENTA$str$RESET" else str 55 | def white(str: String): String = if (useANSI) s"$WHITE$str$RESET" else str 56 | def black(str: String): String = if (useANSI) s"$BLACK$str$RESET" else str 57 | def yellow(str: String): String = if (useANSI) s"$YELLOW$str$RESET" else str 58 | 59 | } 60 | -------------------------------------------------------------------------------- /server/src/test/scala/net/scalytica/kafka/wsproxy/auth/JwkSpec.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.auth 2 | 3 | import java.security.KeyPairGenerator 4 | import java.security.interfaces.{RSAPrivateKey, RSAPublicKey} 5 | import java.util.UUID 6 | 7 | import net.scalytica.kafka.wsproxy.errors.InvalidPublicKeyError 8 | import org.scalatest.TryValues 9 | import org.scalatest.matchers.must.Matchers 10 | import org.scalatest.wordspec.AnyWordSpec 11 | import pdi.jwt.{JwtAlgorithm, JwtBase64} 12 | 13 | // scalastyle:off magic.number 14 | class JwkSpec extends AnyWordSpec with Matchers with TryValues { 15 | 16 | private[this] def base64UrlEncode(bytes: Array[Byte]): String = 17 | JwtBase64.encodeString(bytes) 18 | 19 | def generateJwk( 20 | keyType: String = PubKeyAlgo 21 | ): (Jwk, RSAPublicKey, RSAPrivateKey) = { 22 | val generator = KeyPairGenerator.getInstance(PubKeyAlgo) 23 | generator.initialize(2048) // scalastyle:ignore 24 | val keyPair = generator.generateKeyPair() 25 | val privateKey = keyPair.getPrivate.asInstanceOf[RSAPrivateKey] 26 | val publicKey = keyPair.getPublic.asInstanceOf[RSAPublicKey] 27 | val keyId = UUID.randomUUID().toString 28 | 29 | val modulus = publicKey.getModulus 30 | val exponent = publicKey.getPublicExponent 31 | 32 | val jwk = Jwk( 33 | kty = keyType, 34 | kid = Option(keyId), 35 | alg = Option(JwtAlgorithm.RS256.name), 36 | use = Option("sig"), 37 | key_ops = None, 38 | x5u = None, 39 | x5c = None, 40 | x5t = None, 41 | n = Option(base64UrlEncode(modulus.toByteArray)), 42 | e = Option(base64UrlEncode(exponent.toByteArray)) 43 | ) 44 | 45 | (jwk, publicKey, privateKey) 46 | } 47 | 48 | val (goodJwk, rsa256PubKey, rsa256PrivKey) = generateJwk() 49 | 50 | val bad = generateJwk("HMAC")._1 51 | 52 | "Jwk object" should { 53 | 54 | "generate a public key from a Jwk instance" in { 55 | val res = Jwk.generatePublicKey(goodJwk).success.value 56 | res.getAlgorithm mustBe rsa256PubKey.getAlgorithm 57 | res.getFormat mustBe rsa256PubKey.getFormat 58 | res.getEncoded must contain allElementsOf rsa256PubKey.getEncoded 59 | } 60 | 61 | "fail to generate a key if the algorithm isn't valid" in { 62 | Jwk 63 | .generatePublicKey(bad) 64 | .failure 65 | .exception mustBe an[InvalidPublicKeyError] 66 | } 67 | 68 | } 69 | 70 | } 71 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/config/DynamicConfigurations.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.config 2 | 3 | import net.scalytica.kafka.wsproxy.config.Configuration.DynamicCfg 4 | import net.scalytica.kafka.wsproxy.config.DynamicConfigHandlerProtocol._ 5 | import net.scalytica.kafka.wsproxy.logging.WithProxyLogger 6 | 7 | /** 8 | * Class containing active dynamic configurations. Used as the state object for 9 | * the [[DynamicConfigHandler]] actor. 10 | * 11 | * @param configs 12 | * A [[Map]] with String keys and [[DynamicCfg]] values. 13 | */ 14 | case class DynamicConfigurations( 15 | configs: Map[String, DynamicCfg] = Map.empty 16 | ) extends WithProxyLogger { 17 | 18 | def isEmpty: Boolean = configs.isEmpty 19 | 20 | def keys: Set[String] = configs.keySet 21 | 22 | def values: Seq[DynamicCfg] = configs.values.toSeq 23 | 24 | def hasKey(key: String): Boolean = configs.keySet.contains(key) 25 | 26 | def findByKey(key: String): Option[DynamicCfg] = configs.get(key) 27 | 28 | private[config] def update( 29 | rec: UpdateDynamicConfigRecord 30 | ): Option[DynamicConfigurations] = { 31 | findByKey(rec.key) match { 32 | case Some(existing) => 33 | if (existing == rec.value) { 34 | log.trace(s"Dynamic config for ${rec.key} did not change") 35 | None 36 | } else { 37 | log.trace(s"Updating dynamic config for ${rec.key}") 38 | Option(DynamicConfigurations(configs.updated(rec.key, rec.value))) 39 | } 40 | 41 | case None => 42 | log.trace(s"Adding dynamic config for ${rec.key}") 43 | Option(DynamicConfigurations(configs.updated(rec.key, rec.value))) 44 | } 45 | } 46 | 47 | private[config] def remove( 48 | rem: RemoveDynamicConfigRecord 49 | ): Option[DynamicConfigurations] = { 50 | if (hasKey(rem.key)) { 51 | log.trace(s"Removing dynamic config for ${rem.key}") 52 | Option(DynamicConfigurations(configs.removed(rem.key))) 53 | } else { 54 | log.trace(s"Dynamic config with key ${rem.key} could not be found.") 55 | None 56 | } 57 | } 58 | 59 | private[config] def removeAll(): DynamicConfigurations = 60 | DynamicConfigurations() 61 | } 62 | 63 | object DynamicConfigurations { 64 | 65 | def apply(configs: UpdateDynamicConfigRecord*): DynamicConfigurations = { 66 | DynamicConfigurations(configs.map(c => c.key -> c.value).toMap) 67 | } 68 | 69 | } 70 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/actor/ActorWithProtocolExtensions.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.actor 2 | 3 | import java.util.concurrent.TimeoutException 4 | 5 | import scala.concurrent.ExecutionContext 6 | import scala.concurrent.Future 7 | 8 | import net.scalytica.kafka.wsproxy._ 9 | import net.scalytica.kafka.wsproxy.logging.WithProxyLogger 10 | 11 | import org.apache.pekko.actor.typed.ActorRef 12 | import org.apache.pekko.actor.typed.Scheduler 13 | import org.apache.pekko.actor.typed.scaladsl.AskPattern._ 14 | import org.apache.pekko.util.Timeout 15 | 16 | trait ActorWithProtocolExtensions[Proto, Res] extends WithProxyLogger { 17 | val ref: ActorRef[Proto] 18 | 19 | /** 20 | * Simple extension to perform an {{{ask}}} against a typed actor. 21 | * 22 | * @param f 23 | * The function that will send the command to the actor. 24 | * @param timeout 25 | * The [[Timeout]] to use. 26 | * @param scheduler 27 | * The [[Scheduler]] to use 28 | * @return 29 | * Eventually returns a result of type [[Res]]. 30 | */ 31 | def doAsk(f: ActorRef[Res] => Proto)( 32 | implicit timeout: Timeout, 33 | scheduler: Scheduler 34 | ): Future[Res] = ref.ask[Res](r => f(r)) 35 | 36 | /** 37 | * Same as [[doAsk()]], but with a simple recovery implementation that is most 38 | * commonly used. 39 | * 40 | * @param f 41 | * The function that will send the command to the actor. 42 | * @param incomplete 43 | * The function to apply in case of a [[TimeoutException]]. Typically will 44 | * be an {{IncompleteOp}} instance from the given protocol [[Proto]]. 45 | * @param ec 46 | * The [[ExecutionContext]] to use. 47 | * @param timeout 48 | * The [[Timeout]] to use. 49 | * @param scheduler 50 | * The [[Scheduler]] to use 51 | * @return 52 | * Eventually returns a result of type [[Res]]. 53 | */ 54 | def doAskWithStandardRecovery( 55 | f: ActorRef[Res] => Proto 56 | )(incomplete: (String, Throwable) => Res)( 57 | implicit ec: ExecutionContext, 58 | timeout: Timeout, 59 | scheduler: Scheduler 60 | ): Future[Res] = doAsk(f).recoverWith { 61 | case t: TimeoutException => 62 | val msg = s"Timeout calling ${t.operationName}." 63 | log.debug(msg, t) 64 | Future.successful(incomplete(msg, t)) 65 | 66 | case t: Throwable => 67 | log.warn(s"Unhandled error calling ${t.operationName}.", t) 68 | throw t 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /server/src/test/scala/net/scalytica/kafka/wsproxy/logging/ColouredLevelSpec.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.logging 2 | 3 | import ch.qos.logback.classic.Level 4 | import ch.qos.logback.classic.spi.LoggingEvent 5 | import org.scalatest.BeforeAndAfterEach 6 | import org.scalatest.matchers.must.Matchers 7 | import org.scalatest.wordspec.AnyWordSpec 8 | 9 | import scala.io.AnsiColor 10 | 11 | class ColouredLevelSpec 12 | extends AnyWordSpec 13 | with Matchers 14 | with BeforeAndAfterEach { 15 | 16 | override def afterEach(): Unit = { 17 | super.afterEach() 18 | val _ = sys.props -= "wsproxy.log.noformat" 19 | } 20 | 21 | val instance = new ColouredLevel 22 | 23 | private[this] def createLogEvent(level: Level): LoggingEvent = { 24 | val le = new LoggingEvent() 25 | le.setLoggerName(classOf[ColouredLevelSpec].getCanonicalName) 26 | le.setLevel(level) 27 | le.setMessage(s"Testing ${level.levelStr.toLowerCase} output") 28 | le.setTimeStamp(System.currentTimeMillis()) 29 | le 30 | } 31 | 32 | "The ColouredLevel" should { 33 | 34 | "not add any colour if disabled via system property" in { 35 | sys.props += "wsproxy.log.noformat" -> "true" 36 | 37 | val le = createLogEvent(Level.TRACE) 38 | val res = instance.convert(le) 39 | 40 | res mustBe Level.TRACE.levelStr 41 | } 42 | 43 | "add colour to error level " in { 44 | val le = createLogEvent(Level.ERROR) 45 | val res = instance.convert(le) 46 | 47 | res mustBe AnsiColor.RED + Level.ERROR.levelStr + AnsiColor.RESET 48 | } 49 | 50 | "add colour to warn level " in { 51 | val le = createLogEvent(Level.WARN) 52 | val res = instance.convert(le) 53 | 54 | res mustBe AnsiColor.YELLOW + Level.WARN.levelStr + AnsiColor.RESET 55 | } 56 | 57 | "add colour to info level " in { 58 | val le = createLogEvent(Level.INFO) 59 | val res = instance.convert(le) 60 | 61 | res mustBe AnsiColor.WHITE + Level.INFO.levelStr + AnsiColor.RESET 62 | } 63 | 64 | "add colour to debug level " in { 65 | val le = createLogEvent(Level.DEBUG) 66 | val res = instance.convert(le) 67 | 68 | res mustBe AnsiColor.CYAN + Level.DEBUG.levelStr + AnsiColor.RESET 69 | } 70 | 71 | "add colour to trace level " in { 72 | val le = createLogEvent(Level.TRACE) 73 | val res = instance.convert(le) 74 | 75 | res mustBe AnsiColor.BLUE + Level.TRACE.levelStr + AnsiColor.RESET 76 | } 77 | } 78 | 79 | } 80 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/streams/ProxyFlowExtras.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.streams 2 | 3 | import scala.concurrent.ExecutionContext 4 | import scala.concurrent.duration._ 5 | 6 | import net.scalytica.kafka.wsproxy.logging.WithProxyLogger 7 | 8 | import org.apache.pekko.NotUsed 9 | import org.apache.pekko.http.scaladsl.model.ws.BinaryMessage 10 | import org.apache.pekko.http.scaladsl.model.ws.Message 11 | import org.apache.pekko.http.scaladsl.model.ws.TextMessage 12 | import org.apache.pekko.stream.Materializer 13 | import org.apache.pekko.stream.scaladsl.Flow 14 | import org.apache.pekko.stream.scaladsl.Sink 15 | import org.apache.pekko.util.ByteString 16 | 17 | trait ProxyFlowExtras { self: WithProxyLogger => 18 | 19 | /** 20 | * Helper function to transform a [[TextMessage]] to a String. 21 | * 22 | * @param mat 23 | * The implicit [[Materializer]] to use 24 | * @param ec 25 | * The implicit [[ExecutionContext]] to use 26 | * @return 27 | * A [[Flow]] with [[Message]] input and [[String]] output 28 | */ 29 | def wsMessageToStringFlow( 30 | implicit mat: Materializer, 31 | ec: ExecutionContext 32 | ): Flow[Message, String, NotUsed] = 33 | Flow[Message] 34 | .mapConcat { 35 | case tm: TextMessage => TextMessage(tm.textStream) :: Nil 36 | case bm: BinaryMessage => bm.dataStream.runWith(Sink.ignore); Nil 37 | } 38 | .mapAsync(1)(_.toStrict(5 seconds).map(_.text)) 39 | 40 | /** 41 | * Helper function to transform a [[BinaryMessage]] to a [[ByteString]]. 42 | * 43 | * @param mat 44 | * The implicit [[Materializer]] to use 45 | * @param ec 46 | * The implicit [[ExecutionContext]] to use 47 | * @return 48 | * A [[Flow]] with [[Message]] input and [[ByteString]] output 49 | */ 50 | def wsMessageToByteStringFlow( 51 | implicit mat: Materializer, 52 | ec: ExecutionContext 53 | ): Flow[Message, ByteString, NotUsed] = Flow[Message] 54 | .log("wsMessageToByteStringFlow", _ => "Concatenating incoming bytes...") 55 | .mapConcat { 56 | case tm: TextMessage => 57 | log.trace("Received TextMessage through socket") 58 | tm.textStream.runWith(Sink.ignore); Nil 59 | 60 | case bm: BinaryMessage => 61 | log.trace("Received BinaryMessage through socket") 62 | BinaryMessage(bm.dataStream) :: Nil 63 | } 64 | .log("wsMessageToByteStringFlow", m => s"Aggregated message: $m") 65 | .mapAsync(1)(_.toStrict(5 seconds).map(_.data)) 66 | 67 | } 68 | -------------------------------------------------------------------------------- /server/src/test/scala/net/scalytica/kafka/wsproxy/session/ActiveSessionsSpec.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.session 2 | 3 | import net.scalytica.kafka.wsproxy.models.WsGroupId 4 | import org.scalatest.matchers.must.Matchers 5 | import org.scalatest.wordspec.AnyWordSpec 6 | import org.scalatest.{CustomEitherValues, OptionValues} 7 | 8 | // scalastyle:off magic.number 9 | class ActiveSessionsSpec 10 | extends AnyWordSpec 11 | with Matchers 12 | with OptionValues 13 | with CustomEitherValues { 14 | 15 | val s1 = ConsumerSession(SessionId("s1"), WsGroupId("c1")) 16 | val s2 = ProducerSession(SessionId("s2")) 17 | val s3 = ConsumerSession(SessionId("s3"), WsGroupId("c3")) 18 | 19 | val expected = Map( 20 | s1.sessionId -> s1, 21 | s2.sessionId -> s2, 22 | s3.sessionId -> s3 23 | ) 24 | 25 | val as = ActiveSessions(expected) 26 | 27 | "An ActiveSessions" should { 28 | 29 | "be initialised with an empty session map" in { 30 | ActiveSessions().sessions mustBe empty 31 | } 32 | 33 | "be initialised with a non-empty session map" in { 34 | as.sessions must contain allElementsOf expected 35 | } 36 | 37 | "be initialised with a list of sessions" in { 38 | ActiveSessions(s1, s2, s3).sessions must contain allElementsOf expected 39 | } 40 | 41 | "return none if a session is not found" in { 42 | as.find(SessionId("s10")) mustBe None 43 | } 44 | 45 | "add a new session" in { 46 | val ns = ConsumerSession(SessionId("s4"), WsGroupId("c4"), 4) 47 | val as2 = as.add(ns) 48 | 49 | val res = as2.rightValue.sessions 50 | res must contain allElementsOf (expected + (ns.sessionId -> ns)) 51 | } 52 | 53 | "update an existing session with a new session object" in { 54 | val ns = ConsumerSession(SessionId("s2"), WsGroupId("c2"), 4) 55 | val as2 = as.updateSession(SessionId("s2"), ns) 56 | 57 | as2.rightValue must not be as 58 | as2.rightValue.find(SessionId("s2")).value mustBe ns 59 | } 60 | 61 | "add a new session if it didn't previously exist" in { 62 | val ns = ConsumerSession(SessionId("s4"), WsGroupId("c4"), 4) 63 | val as2 = as.updateSession(SessionId("s4"), ns) 64 | 65 | val res = as2.rightValue.sessions 66 | res must contain allElementsOf (expected + (ns.sessionId -> ns)) 67 | } 68 | 69 | "remove a session" in { 70 | val as2 = as.removeSession(SessionId("s2")) 71 | 72 | as2.rightValue.sessions must contain allElementsOf ( 73 | expected - SessionId("s2") 74 | ) 75 | } 76 | } 77 | 78 | } 79 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/session/ActiveSessions.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.session 2 | 3 | import net.scalytica.kafka.wsproxy.logging.WithProxyLogger 4 | import net.scalytica.kafka.wsproxy.models.WsServerId 5 | 6 | /** 7 | * Data type for keeping track of active sessions. Each active session is 8 | * represented as a key-value pair, where the key is the consumer group id. 9 | * 10 | * @param sessions 11 | * a [[Map]] with [[SessionId]] keys and [[Session]] values. 12 | * @see 13 | * [[Session]] 14 | */ 15 | case class ActiveSessions( 16 | sessions: Map[SessionId, Session] = Map.empty 17 | ) extends WithProxyLogger { 18 | 19 | def find(sessionId: SessionId): Option[Session] = sessions.get(sessionId) 20 | 21 | def removeInstancesFromServerId(serverId: WsServerId): ActiveSessions = { 22 | log.trace(s"Removing all sessions for $serverId...") 23 | val s = sessions.map { case (sid, session) => 24 | sid -> session.removeInstancesFromServerId(serverId) 25 | } 26 | copy(sessions = s) 27 | } 28 | 29 | def add(session: Session): Either[String, ActiveSessions] = { 30 | sessions.find(_._1 == session.sessionId) match { 31 | case Some(_) => 32 | val msg = s"Session for ${session.sessionId} already exists" 33 | log.trace(msg) 34 | Left(msg) 35 | 36 | case None => 37 | log.trace(s"Adding session $session...") 38 | val ns = session.sessionId -> session 39 | Right(copy(sessions = sessions + ns)) 40 | } 41 | } 42 | 43 | def updateSession( 44 | sessionId: SessionId, 45 | session: Session 46 | ): Either[String, ActiveSessions] = { 47 | log.trace(s"Updating session for $sessionId...") 48 | sessions.find(_._1 == sessionId) match { 49 | case None => 50 | log.trace(s"Session for $sessionId was not found...") 51 | add(session) 52 | 53 | case Some((gid, _)) => 54 | Right(copy(sessions = sessions.updated(gid, session))) 55 | } 56 | } 57 | 58 | def removeSession(sessionId: SessionId): Either[String, ActiveSessions] = { 59 | sessions.find(_._1 == sessionId) match { 60 | case None => 61 | val msg = s"Session for $sessionId does not exist" 62 | log.trace(msg) 63 | Left(msg) 64 | case Some(_) => 65 | log.trace(s"Removing session for $sessionId") 66 | Right(copy(sessions = sessions - sessionId)) 67 | } 68 | } 69 | } 70 | 71 | object ActiveSessions { 72 | 73 | def apply(session: Session*): ActiveSessions = 74 | ActiveSessions(session.map(s => s.sessionId -> s).toMap) 75 | 76 | } 77 | -------------------------------------------------------------------------------- /server/src/test/scala/net/scalytica/test/FileLoader.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.test 2 | 3 | import java.net.URI 4 | import java.nio.charset.Charset 5 | import java.nio.file.FileSystems.{getFileSystem, newFileSystem} 6 | import java.nio.file.{Path, Paths} 7 | 8 | import org.apache.pekko.stream.{IOResult, Materializer} 9 | import org.apache.pekko.stream.scaladsl.{FileIO, Source} 10 | import org.apache.pekko.util.ByteString 11 | import com.typesafe.config.{Config, ConfigFactory} 12 | 13 | import scala.jdk.CollectionConverters._ 14 | import scala.concurrent.{Await, Future} 15 | import scala.concurrent.duration._ 16 | import scala.util.Try 17 | 18 | trait FileLoader { self => 19 | 20 | def fileSource(p: Path): Source[ByteString, Future[IOResult]] = 21 | FileIO.fromPath(p) 22 | 23 | def loadFile(f: String): Path = Paths.get(self.getClass.getResource(f).toURI) 24 | 25 | def loadConfig(f: String = "/application.conf"): Config = 26 | ConfigFactory.parseFile(filePath(f).toFile).resolve() 27 | 28 | def testConfigPath: Path = filePath("/application-test.conf") 29 | 30 | private[this] def loadLogbackTestConfig( 31 | fileName: String 32 | )(implicit mat: Materializer): String = 33 | Await.result( 34 | FileIO.fromPath(filePath(s"/$fileName")).runFold("") { case (str, bs) => 35 | str + bs.decodeString(Charset.forName("UTF-8")) 36 | }, 37 | 2 seconds 38 | ) 39 | 40 | def testLogbackConfig(implicit mat: Materializer): String = 41 | loadLogbackTestConfig("logback-test.xml") 42 | 43 | def logbackConfigForTestcase(testCase: Int = 1)( 44 | implicit mat: Materializer 45 | ): String = 46 | loadLogbackTestConfig(s"logback-config-testcase-$testCase.xml") 47 | 48 | def filePath(f: String): Path = { 49 | val fileUrl = self.getClass.getResource(f) 50 | val thePath = { 51 | // Check if we're referencing a file inside a JAR file. 52 | // If yes, we need (for some reason) to handle that explicitly by 53 | // defining a FileSystem instance that understands how to load classpath 54 | // resources inside a JAR file. 55 | if (fileUrl.toString.contains("!")) { 56 | val arr = fileUrl.toString.split("!") 57 | val uri = URI.create(arr(0)) 58 | val fileSystem = Try(getFileSystem(uri)).toEither match { 59 | case Left(_) => newFileSystem(uri, Map.empty[String, String].asJava) 60 | case Right(fs) => fs 61 | } 62 | fileSystem.getPath(arr(1)) 63 | } else { 64 | Paths.get(fileUrl.toURI) 65 | } 66 | } 67 | thePath 68 | } 69 | } 70 | 71 | object FileLoader extends FileLoader 72 | -------------------------------------------------------------------------------- /docs/faq.md: -------------------------------------------------------------------------------- 1 | --- 2 | id: faq 3 | title: FAQ 4 | slug: /faq 5 | --- 6 | 7 | ## FAQ 8 | 9 | ##### Can I use Kafka WebSocket Proxy from a frontend client? 10 | 11 | In general, yes. But there are a couple of important things to consider first. 12 | 13 | * A WebSocket is a long-lived connection. And if you open 1 connection per 14 | browser tab, you will have to use different `groupId` for each tab. 15 | 16 | * Consider the amount of data being sent to your browser and how frequent each 17 | tab is re-rendering components based on the data. In a worst case scenario the 18 | browser performance will degrade to the point where it locks up completely. 19 | 20 | * Consider using one or more Service Workers to connect to the WebSocket Proxy. 21 | This will limit the number of connections required within the same browser. 22 | 23 | 24 | ##### My consumer client fails to reconnect because the `clientId` is in use 25 | 26 | If the WebSocket client gets disconnected, and your code attempts to reconnect 27 | automatically, you might experience that the connection is rejected due to the 28 | `clientId` already being used. This is typically a transient state. `clientId`s 29 | are used to identify a specific client, and is used to manage some internal 30 | state related to this `clientId`. Whenever a consumer client disconnects, the 31 | `kafka-websocket-proxy` needs to perform some cleanup tasks related to this 32 | internal `clientId` state. This can, in some cases, take a few milliseconds 33 | longer than desired. CPU usage, memory availability, GC cycles, etc. are all 34 | things that can affect the cleanup duration. 35 | 36 | To ensure your client is able to use the same `clientId` it is recommended to 37 | wait a couple of seconds before attempting to reconnect the client. 38 | 39 | If the problem still persists, please don't hesitate opening a ticket in the 40 | [issue tracker](https://gitlab.com/kpmeen/kafka-websocket-proxy/-/issues) where 41 | you describe the issue. Please be as detailed as possible to help us understand 42 | your problem. 43 | 44 | 45 | ##### The websocket connection between my client and the proxy is terminating at a regular interval 46 | 47 | If there is a firewall (like F5) between your client and the Kafka WebSocket Proxy, there may be 48 | some rules in place that prevent a connection to be open for more than a given amount of time. 49 | Usually this is because a set of rules for HTTP connections are triggered. To address this issue, 50 | it is necessary to ensure the firewall allows WebSocket connections, and that they are allowed to 51 | remain open to prevent the firewall terminating the connection. 52 | 53 | 54 | -------------------------------------------------------------------------------- /server/src/test/scala/net/scalytica/kafka/wsproxy/jmx/mbeans/ProxyStatusMXBeanSpec.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.jmx.mbeans 2 | 3 | import net.scalytica.kafka.wsproxy.jmx.mbeans.ProxyStatusProtocol._ 4 | import net.scalytica.kafka.wsproxy.models.BrokerInfo 5 | import org.apache.pekko.actor.testkit.typed.scaladsl.TestProbe 6 | import org.apache.pekko.actor.typed.ActorRef 7 | 8 | import scala.concurrent.duration._ 9 | 10 | // scalastyle:off magic.number 11 | class ProxyStatusMXBeanSpec extends MXBeanSpecLike { 12 | 13 | override protected val testTopicPrefix: String = "mxbean-test-topic" 14 | 15 | val beanName = "bean-spec-wsproxy-status-a" 16 | 17 | val brokerInfoList: List[BrokerInfo] = List( 18 | BrokerInfo(1, "host1", 1234, None), 19 | BrokerInfo(2, "host2", 5678, Some("rack2")) 20 | ) 21 | 22 | def jmxContext[T]( 23 | body: ( 24 | ActorRef[ProxyStatusCommand], 25 | TestProbe[ProxyStatusResponse] 26 | ) => T 27 | ): T = { 28 | val appCfg = plainTestConfig() 29 | val ref = tk.spawn(ProxyStatusMXBeanActor(appCfg), beanName) 30 | val probe = tk.createTestProbe[ProxyStatusResponse]() 31 | 32 | val res = body(ref, probe) 33 | 34 | ref ! Stop 35 | probe.expectTerminated(ref) 36 | 37 | jmxq.findByTypeAndName[ProxyStatusMXBean](beanName) mustBe None 38 | 39 | res 40 | } 41 | 42 | "The ProxyStatusMXBean" should { 43 | 44 | "contain all relevant configuration data" in jmxContext { (psaRef, probe) => 45 | psaRef ! UpdateKafkaClusterInfo(brokerInfoList, probe.ref) 46 | 47 | // Ensure the actor is up and running and registered in the MBean registry 48 | // before continuing with the test 49 | probe.expectMessage(5 seconds, KafkaClusterInfoUpdated) 50 | 51 | val psb = jmxq.getMxBean(beanName, classOf[ProxyStatusMXBean]) 52 | 53 | psb.isHttpEnabled mustBe true 54 | psb.isHttpsEnabled mustBe false 55 | psb.isBasicAuthEnabled mustBe false 56 | psb.isOpenIDConnectEnabled mustBe false 57 | psb.getHttpPort mustBe 8078 58 | psb.getHttpsPort mustBe 0 59 | psb.getSessionStateTopicName mustBe "_wsproxy.session.state" 60 | psb.getUpSince must not be empty 61 | psb.getUptimeMillis mustBe >(0L) 62 | psb.getBrokerInfoListMXView.brokers must have size 2 63 | val b1 = psb.getBrokerInfoListMXView.brokers.headOption.value 64 | b1.id mustBe 1 65 | b1.host mustBe "host1" 66 | b1.port mustBe 1234 67 | b1.rack mustBe null // scalastyle:ignore 68 | 69 | val b2 = psb.getBrokerInfoListMXView.brokers.lastOption.value 70 | b2.id mustBe 2 71 | b2.host mustBe "host2" 72 | b2.port mustBe 5678 73 | b2.rack mustBe "rack2" 74 | } 75 | 76 | } 77 | 78 | } 79 | // scalastyle:on 80 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/codecs/SessionSerde.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.codecs 2 | 3 | import net.scalytica.kafka.wsproxy.codecs.Implicits.oldSessionDecoder 4 | import net.scalytica.kafka.wsproxy.codecs.Implicits.sessionDecoder 5 | import net.scalytica.kafka.wsproxy.codecs.Implicits.sessionEncoder 6 | import net.scalytica.kafka.wsproxy.errors.InvalidSessionStateFormat 7 | import net.scalytica.kafka.wsproxy.logging.WithProxyLogger 8 | import net.scalytica.kafka.wsproxy.models.FullConsumerId 9 | import net.scalytica.kafka.wsproxy.session.ConsumerInstance 10 | import net.scalytica.kafka.wsproxy.session.ConsumerSession 11 | import net.scalytica.kafka.wsproxy.session.OldSession 12 | import net.scalytica.kafka.wsproxy.session.Session 13 | import net.scalytica.kafka.wsproxy.session.SessionId 14 | 15 | import io.circe.parser.parse 16 | import io.circe.syntax._ 17 | 18 | class SessionSerde extends StringBasedSerde[Session] with WithProxyLogger { 19 | 20 | override def serialize(topic: String, data: Session): Array[Byte] = 21 | Option(data).map(d => ser.serialize(topic, d.asJson.noSpaces)).orNull 22 | 23 | override def deserialize(topic: String, data: Array[Byte]): Session = { 24 | Option(data).map { d => 25 | val str = des.deserialize(topic, d) 26 | 27 | log.trace(s"Deserialized session message from topic $topic to:\n$str") 28 | 29 | parse(str).flatMap(_.as[Session]) match { 30 | case Left(err) => 31 | log.warn( 32 | "Session data could not be deserialized to latest format," + 33 | "falling back to old format" 34 | ) 35 | parse(str).flatMap(_.as[OldSession]) match { 36 | case Left(oldErr) => 37 | log.error( 38 | s"Exception deserializing session from topic $topic:" + 39 | s"\n$str" + 40 | s"\nfirst error: ${err.getMessage}" + 41 | s"\nsecond error: ${oldErr.getMessage}" 42 | ) 43 | throw InvalidSessionStateFormat( 44 | s"Exception deserializing session data from topic $topic" 45 | ) 46 | 47 | case Right(old) => 48 | ConsumerSession( 49 | sessionId = SessionId(old.consumerGroupId), 50 | groupId = old.consumerGroupId, 51 | maxConnections = old.consumerLimit, 52 | instances = old.consumers.map { o => 53 | ConsumerInstance( 54 | id = FullConsumerId(old.consumerGroupId, o.id), 55 | serverId = o.serverId 56 | ) 57 | } 58 | ) 59 | } 60 | 61 | case Right(session) => session 62 | } 63 | }.orNull 64 | } 65 | 66 | } 67 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/session/SessionOpResult.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.session 2 | 3 | /** Result type used for operations to alter the session state. */ 4 | sealed trait SessionOpResult { self => 5 | def session: Session 6 | 7 | def asString: String = { 8 | val tn = self.getClass.getTypeName 9 | val clean = 10 | if (tn.contains('$')) tn.substring(tn.lastIndexOf("$")).stripPrefix("$") 11 | else tn 12 | 13 | clean 14 | .foldLeft("") { (str, in) => 15 | if (in.isUpper) str + " " + in.toLower 16 | else str + in 17 | } 18 | .trim 19 | } 20 | } 21 | 22 | case class SessionStateRestored() extends SessionOpResult { 23 | override def session = 24 | throw new NoSuchElementException( 25 | "No access to session instance when restoring global session state" 26 | ) 27 | } 28 | 29 | case class RestoringSessionState() extends SessionOpResult { 30 | override def session = 31 | throw new NoSuchElementException( 32 | "No access to session instance when restoring global session state" 33 | ) 34 | } 35 | 36 | case object ProducerSessionsDisabled extends SessionOpResult { 37 | override def session = 38 | throw new NoSuchElementException( 39 | "Producer sessions have been disabled in the configuration." 40 | ) 41 | } 42 | 43 | case class SessionInitialised(session: Session) extends SessionOpResult 44 | 45 | case class SessionUpdated(session: Session) extends SessionOpResult 46 | 47 | case class InstanceAdded(session: Session) extends SessionOpResult 48 | 49 | case class InstanceRemoved(session: Session) extends SessionOpResult 50 | 51 | case class InstanceExists(session: Session) extends SessionOpResult 52 | 53 | case class InstanceTypeForSessionIncorrect(session: Session) 54 | extends SessionOpResult 55 | 56 | case class InstanceLimitReached(session: Session) extends SessionOpResult 57 | 58 | case class InstanceDoesNotExists(session: Session) extends SessionOpResult 59 | 60 | case class ProducerInstanceMissingId(session: Session) extends SessionOpResult 61 | 62 | case class InstancesForServerRemoved() extends SessionOpResult { 63 | 64 | override def session = 65 | throw new NoSuchElementException( 66 | "Cannot access session value when it's not found" 67 | ) 68 | } 69 | 70 | case class SessionNotFound(sessionId: SessionId) extends SessionOpResult { 71 | 72 | override def session = 73 | throw new NoSuchElementException( 74 | "Cannot access session value when it's not found" 75 | ) 76 | } 77 | 78 | case class IncompleteOperation(reason: String) extends SessionOpResult { 79 | 80 | override def session = 81 | throw new NoSuchElementException( 82 | "Cannot access session value when it's not found" 83 | ) 84 | } 85 | -------------------------------------------------------------------------------- /server/src/test/scala/net/scalytica/kafka/wsproxy/auth/KafkaLoginModulesSpec.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.auth 2 | 3 | import net.scalytica.kafka.wsproxy.models.AclCredentials 4 | import org.apache.kafka.clients.admin.ScramMechanism 5 | import org.apache.kafka.common.security.plain.internals.PlainSaslServer 6 | import org.scalatest.OptionValues 7 | import org.scalatest.matchers.must.Matchers 8 | import org.scalatest.wordspec.AnyWordSpec 9 | 10 | class KafkaLoginModulesSpec 11 | extends AnyWordSpec 12 | with Matchers 13 | with OptionValues { 14 | 15 | val DummyCredentials = AclCredentials("foo", "bar") 16 | 17 | val PlainLoginModuleString = 18 | "org.apache.kafka.common.security.plain.PlainLoginModule" 19 | 20 | val ScramLoginModuleString = 21 | "org.apache.kafka.common.security.scram.ScramLoginModule" 22 | 23 | "The KafkaLoginModules" should { 24 | 25 | "return correct login module for sasl.mechanism PLAIN auth" in { 26 | val res = KafkaLoginModules.fromSaslMechanism( 27 | maybeSaslMechanism = Option(PlainSaslServer.PLAIN_MECHANISM), 28 | maybeAclCredentials = Option(DummyCredentials) 29 | ) 30 | res.value.moduleClassName mustBe PlainLoginModuleString 31 | } 32 | 33 | "return correct login module for sasl.mechanism SCRAM-SHA-512 auth" in { 34 | val res = KafkaLoginModules.fromSaslMechanism( 35 | maybeSaslMechanism = 36 | Option(ScramMechanism.SCRAM_SHA_512.mechanismName()), 37 | maybeAclCredentials = Option(DummyCredentials) 38 | ) 39 | res.value.moduleClassName mustBe ScramLoginModuleString 40 | } 41 | 42 | "return correct login module for sasl.mechanism SCRAM-SHA-256 auth" in { 43 | val res = KafkaLoginModules.fromSaslMechanism( 44 | maybeSaslMechanism = 45 | Option(ScramMechanism.SCRAM_SHA_256.mechanismName()), 46 | maybeAclCredentials = Option(DummyCredentials) 47 | ) 48 | res.value.moduleClassName mustBe ScramLoginModuleString 49 | } 50 | 51 | "return no login module for unsupported sasl.mechanism" in { 52 | KafkaLoginModules.fromSaslMechanism( 53 | maybeSaslMechanism = Option("SCRAM-SHA-258"), 54 | maybeAclCredentials = Option(DummyCredentials) 55 | ) mustBe None 56 | 57 | KafkaLoginModules.fromSaslMechanism( 58 | maybeSaslMechanism = Option("OAUTHBEARER"), 59 | maybeAclCredentials = Option(DummyCredentials) 60 | ) mustBe None 61 | 62 | KafkaLoginModules.fromSaslMechanism( 63 | maybeSaslMechanism = Option("GSSAPI"), 64 | maybeAclCredentials = Option(DummyCredentials) 65 | ) mustBe None 66 | 67 | KafkaLoginModules.fromSaslMechanism( 68 | maybeSaslMechanism = Option("LDAP"), 69 | maybeAclCredentials = Option(DummyCredentials) 70 | ) mustBe None 71 | } 72 | 73 | } 74 | 75 | } 76 | -------------------------------------------------------------------------------- /server/src/test/scala/net/scalytica/kafka/wsproxy/utils/BlockingRetrySpec.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.utils 2 | 3 | import net.scalytica.kafka.wsproxy.NiceClassNameExtensions 4 | import org.scalatest.concurrent.ScalaFutures 5 | import org.scalatest.matchers.must.Matchers 6 | import org.scalatest.time.{Millis, Seconds, Span} 7 | import org.scalatest.wordspec.AnyWordSpec 8 | 9 | import scala.concurrent.TimeoutException 10 | import scala.concurrent.duration._ 11 | import scala.util.control.NoStackTrace 12 | 13 | // scalastyle:off magic.number 14 | class BlockingRetrySpec extends AnyWordSpec with Matchers with ScalaFutures { 15 | 16 | case class RetryTestException() 17 | extends Exception("retry test exception") 18 | with NoStackTrace 19 | 20 | implicit override val patienceConfig: PatienceConfig = PatienceConfig( 21 | timeout = scaled(Span(10, Seconds)), 22 | interval = scaled(Span(15, Millis)) 23 | ) 24 | 25 | private[this] val completed = "completed" 26 | 27 | // Function for tests that should fail. For example testing retries 28 | private[this] def failFastFuture: Throwable = throw RetryTestException() 29 | 30 | // Function that will run for the duration specified in the delay 31 | private[this] def testOp[T]( 32 | value: T, 33 | delay: FiniteDuration = 0 seconds 34 | ): T = { 35 | Thread.sleep(delay.toMillis) 36 | value 37 | } 38 | 39 | // Function that will fail with a TimeoutException 40 | private[this] def timeoutOp[T](delay: FiniteDuration): T = { 41 | Thread.sleep(delay.toMillis) 42 | throw new TimeoutException("Timing out from test") 43 | } 44 | 45 | s"The ${BlockingRetry.niceClassNameShort}" should { 46 | 47 | "successfully complete a future" in { 48 | BlockingRetry.retry(1 second, 10 millis, 3)(testOp("completed")) { 49 | fail("Expected successful Future execution", _) 50 | } mustBe completed 51 | } 52 | 53 | "successfully complete a future before retry count reaches limit" in { 54 | BlockingRetry.retry(4 seconds, 10 millis, 5)( 55 | testOp("completed", 500 millis) 56 | ) { err => 57 | fail("Expected successful Future execution", err) 58 | } mustBe completed 59 | } 60 | 61 | "allow the Future to fail when the timeout expires" in { 62 | assertThrows[TimeoutException] { 63 | BlockingRetry.retry( 64 | timeout = 3 seconds, 65 | interval = 10 millis, 66 | numRetries = 3 67 | )(timeoutOp(2 seconds))(err => throw err) 68 | } 69 | } 70 | 71 | "allow the Future to fail when all retries have been used" in { 72 | assertThrows[RetryTestException] { 73 | BlockingRetry.retry( 74 | timeout = 3 seconds, 75 | interval = 10 millis, 76 | numRetries = 3 77 | )(failFastFuture)(throw _) 78 | } 79 | } 80 | 81 | } 82 | } 83 | -------------------------------------------------------------------------------- /server/src/test/resources/logback-test.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 5 | 6 | 7 | true 8 | 9 | 10 | 11 | 12 | 13 | %d{HH:mm:ss.SSS} %colouredLevel %logger{36} [%F:%L] - %msg%n 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/config/DynamicConfigConsumer.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.config 2 | 3 | import net.scalytica.kafka.wsproxy._ 4 | import net.scalytica.kafka.wsproxy.codecs.BasicSerdes 5 | import net.scalytica.kafka.wsproxy.codecs.DynamicCfgSerde 6 | import net.scalytica.kafka.wsproxy.config.Configuration.AppCfg 7 | import net.scalytica.kafka.wsproxy.config.DynamicConfigHandlerProtocol._ 8 | import net.scalytica.kafka.wsproxy.logging.WithProxyLogger 9 | 10 | import org.apache.kafka.clients.consumer.ConsumerConfig._ 11 | import org.apache.kafka.clients.consumer.OffsetResetStrategy.EARLIEST 12 | import org.apache.pekko.actor.typed.ActorSystem 13 | import org.apache.pekko.kafka.ConsumerSettings 14 | import org.apache.pekko.kafka.Subscriptions 15 | import org.apache.pekko.kafka.scaladsl.Consumer 16 | import org.apache.pekko.stream.scaladsl.Source 17 | 18 | /** 19 | * Consumer implementation for reading 20 | * [[net.scalytica.kafka.wsproxy.config.Configuration.DynamicCfg]] messages from 21 | * Kafka. 22 | * 23 | * @param cfg 24 | * The [[AppCfg]] to use. 25 | * @param sys 26 | * The typed [[ActorSystem]] to use. 27 | */ 28 | private[config] class DynamicConfigConsumer( 29 | implicit cfg: AppCfg, 30 | sys: ActorSystem[_] 31 | ) extends WithProxyLogger { 32 | 33 | private[this] val kDes = BasicSerdes.StringDeserializer 34 | private[this] val vDes = new DynamicCfgSerde().deserializer() 35 | 36 | private[this] val kafkaUrl = cfg.kafkaClient.bootstrapHosts.mkString() 37 | private[this] val dynCfgTopic = cfg.dynamicConfigHandler.topicName.value 38 | 39 | private[this] lazy val cid = dynCfgConsumerGroupId 40 | 41 | private[this] lazy val consumerProps = { 42 | ConsumerSettings(sys, kDes, vDes) 43 | .withBootstrapServers(kafkaUrl) 44 | .withProperties(cfg.consumer.kafkaClientProperties) 45 | .withProperties(consumerMetricsProperties) 46 | .withGroupId(cid) 47 | .withClientId(cid) 48 | .withProperties( 49 | // Always begin at the start of the topic 50 | AUTO_OFFSET_RESET_CONFIG -> EARLIEST.name.toLowerCase, 51 | ENABLE_AUTO_COMMIT_CONFIG -> "false" 52 | ) 53 | } 54 | 55 | /** 56 | * The pekko-stream Source consuming messages from the dynamic config topic. 57 | */ 58 | lazy val dynamicCfgSource: Source[InternalCommand, Consumer.Control] = { 59 | val subscription = Subscriptions.topics(Set(dynCfgTopic)) 60 | 61 | Consumer.plainSource(consumerProps, subscription).map { cr => 62 | Option(cr.value()) 63 | .map { value => 64 | log.trace(s"Received UpdateDynamicConfigRecord for key ${cr.key()}") 65 | UpdateDynamicConfigRecord(cr.key(), value, cr.offset()) 66 | } 67 | .getOrElse { 68 | log.trace(s"Received RemoveDynamicConfigRecord for key ${cr.key()}") 69 | RemoveDynamicConfigRecord(cr.key(), cr.offset()) 70 | } 71 | } 72 | } 73 | } 74 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/models/WsProxyAuthResult.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.models 2 | 3 | import net.scalytica.kafka.wsproxy.config.Configuration.AppCfg 4 | import net.scalytica.kafka.wsproxy.logging.WithProxyLogger 5 | 6 | import io.circe.Json 7 | import io.circe.parser._ 8 | import org.apache.pekko.http.scaladsl.model.headers.OAuth2BearerToken 9 | import pdi.jwt.JwtClaim 10 | 11 | sealed trait WsProxyAuthResult { 12 | 13 | def aclCredentials: Option[AclCredentials] = None 14 | def maybeBearerToken: Option[OAuth2BearerToken] = None 15 | 16 | } 17 | 18 | case class JwtAuthResult( 19 | bearerToken: OAuth2BearerToken, 20 | claim: JwtClaim 21 | )(implicit cfg: AppCfg) 22 | extends WsProxyAuthResult 23 | with WithProxyLogger { 24 | 25 | private[this] val maybeCreds = cfg.server.customJwtKafkaCredsKeys 26 | 27 | private[this] def mapClaim[T]( 28 | userKey: String, 29 | passwordKey: String 30 | )( 31 | valid: (String, String) => T 32 | )( 33 | invalid: => T 34 | ): T = { 35 | parse(claim.toJson).toOption 36 | .flatMap { js => 37 | for { 38 | user <- parseKey(userKey, js) 39 | pass <- parseKey(passwordKey, js) 40 | } yield { 41 | log.trace("Correctly parsed custom JWT attributes") 42 | valid(user, pass) 43 | } 44 | } 45 | .getOrElse { 46 | // This should _really_ not happen. Since the claim must have been 47 | // correctly parsed before this function is executed. 48 | log.warn("JWT Claim has an invalid format.") 49 | invalid 50 | } 51 | } 52 | 53 | override def maybeBearerToken: Option[OAuth2BearerToken] = Option(bearerToken) 54 | 55 | def isValid: Boolean = { 56 | maybeCreds 57 | .map { case (uk, pk) => 58 | mapClaim(uk, pk)((_, _) => true)(false) 59 | } 60 | .getOrElse { 61 | if (cfg.server.isKafkaTokenAuthOnlyEnabled) false 62 | else true 63 | } 64 | } 65 | 66 | override def aclCredentials: Option[AclCredentials] = { 67 | maybeCreds.flatMap { case (uk, pk) => 68 | mapClaim[Option[AclCredentials]](uk, pk) { (user, pass) => 69 | Option(AclCredentials(user, pass)) 70 | }(None) 71 | } 72 | } 73 | 74 | private[this] def parseKey( 75 | key: String, 76 | json: Json 77 | ): Option[String] = { 78 | json.hcursor.downField(key).as[String] match { 79 | case Right(value) => 80 | log.trace(s"Found JWT Kafka credentials key $key with value $value") 81 | Some(value) 82 | case Left(err) => 83 | val msg = "Could not find expected Kafka credentials key " + 84 | s"'$key' in OAuth Bearer token." 85 | log.warn(msg, err) 86 | None 87 | } 88 | } 89 | 90 | } 91 | 92 | case class BasicAuthResult(id: String) extends WsProxyAuthResult 93 | 94 | case object AuthDisabled extends WsProxyAuthResult 95 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/session/SessionDataConsumer.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.session 2 | 3 | import net.scalytica.kafka.wsproxy._ 4 | import net.scalytica.kafka.wsproxy.codecs.BasicSerdes 5 | import net.scalytica.kafka.wsproxy.codecs.SessionSerde 6 | import net.scalytica.kafka.wsproxy.config.Configuration.AppCfg 7 | import net.scalytica.kafka.wsproxy.logging.WithProxyLogger 8 | 9 | import org.apache.kafka.clients.consumer.ConsumerConfig._ 10 | import org.apache.kafka.clients.consumer.OffsetResetStrategy.EARLIEST 11 | import org.apache.pekko.actor.typed.ActorSystem 12 | import org.apache.pekko.actor.typed.scaladsl.adapter._ 13 | import org.apache.pekko.kafka.ConsumerSettings 14 | import org.apache.pekko.kafka.Subscriptions 15 | import org.apache.pekko.kafka.scaladsl.Consumer 16 | 17 | /** 18 | * Consumer for processing all updates to session state topic and transforming 19 | * them into [[SessionHandlerProtocol.SessionProtocol]] messages that can be 20 | * sent to the [[SessionHandler]] 21 | * 22 | * @param cfg 23 | * the [[AppCfg]] to use 24 | * @param sys 25 | * the [[ActorSystem]] to use 26 | */ 27 | private[session] class SessionDataConsumer( 28 | implicit cfg: AppCfg, 29 | sys: ActorSystem[_] 30 | ) extends WithProxyLogger { 31 | 32 | private[this] val kDes = BasicSerdes.StringDeserializer 33 | private[this] val vDes = new SessionSerde().deserializer() 34 | 35 | private[this] val kafkaUrl = cfg.kafkaClient.bootstrapHosts.mkString() 36 | 37 | private[this] lazy val cid = sessionConsumerGroupId 38 | 39 | private[this] val sessionStateTopic = 40 | cfg.sessionHandler.topicName.value 41 | 42 | private[this] lazy val consumerProps = { 43 | ConsumerSettings(sys.toClassic, kDes, vDes) 44 | .withBootstrapServers(kafkaUrl) 45 | .withGroupId(cid) 46 | .withClientId(cid) 47 | .withProperties( 48 | // Always begin at the start of the topic 49 | AUTO_OFFSET_RESET_CONFIG -> EARLIEST.name.toLowerCase, 50 | ENABLE_AUTO_COMMIT_CONFIG -> "false" 51 | ) 52 | .withProperties(cfg.consumer.kafkaClientProperties) 53 | .withProperties(consumerMetricsProperties) 54 | } 55 | 56 | /** 57 | * The pekko-stream Source consuming messages from the session state topic. 58 | */ 59 | lazy val sessionStateSource: SessionSource = { 60 | val subscription = Subscriptions.topics(Set(sessionStateTopic)) 61 | 62 | Consumer.plainSource(consumerProps, subscription).map { cr => 63 | Option(cr.value) 64 | .map { v => 65 | log.trace(s"Received UpdateSession event for key ${cr.key()}") 66 | SessionHandlerProtocol.UpdateSession( 67 | sessionId = SessionId(cr.key), 68 | s = v, 69 | offset = cr.offset() 70 | ) 71 | } 72 | .getOrElse { 73 | log.trace(s"Received RemoveSession event for key ${cr.key()}") 74 | SessionHandlerProtocol.RemoveSession(SessionId(cr.key), cr.offset()) 75 | } 76 | } 77 | } 78 | 79 | } 80 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/models/WsProducerResult.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.models 2 | 3 | import net.scalytica.kafka.wsproxy.errors.ImpossibleError 4 | import net.scalytica.kafka.wsproxy.logging.WithProxyLogger 5 | 6 | import org.apache.pekko.kafka.ProducerMessage 7 | 8 | /** 9 | * Type that wraps metadata about messages committed to Kafka. 10 | * 11 | * @param topic 12 | * the topic the message was written to. 13 | * @param partition 14 | * the partition the message was written to. 15 | * @param offset 16 | * the partition offset for the written message. 17 | * @param timestamp 18 | * the timestamp the message was received by Kafka. 19 | * @param clientMessageId 20 | * message identifier given by the client to uniquely identify the message. 21 | */ 22 | case class WsProducerResult( 23 | topic: String, 24 | partition: Int = 0, 25 | offset: Long = 0, 26 | timestamp: Long = 0, 27 | clientMessageId: Option[String] = None 28 | ) 29 | 30 | object WsProducerResult extends WithProxyLogger { 31 | 32 | private[this] def fromResult[K, V, PassThrough]( 33 | res: ProducerMessage.Results[K, V, PassThrough] 34 | )( 35 | clientMessageId: PassThrough => Option[String] 36 | ): Seq[WsProducerResult] = { 37 | res match { 38 | case ProducerMessage.Result(md, ProducerMessage.Message(_, pt)) => 39 | log.trace( 40 | "Mapping ProducerMessage.Result to a single WsProducerResult instance" 41 | ) 42 | val pr = WsProducerResult( 43 | topic = md.topic, 44 | partition = md.partition, 45 | offset = md.offset, 46 | timestamp = md.timestamp, 47 | clientMessageId = clientMessageId(pt) 48 | ) 49 | Seq(pr) 50 | 51 | case ProducerMessage.MultiResult(_, _) => 52 | // This should not happen, since the WsProducer does not use the 53 | // ProducerMessage.MultiMessage type when publishing messages to Kafka. 54 | val msg = 55 | "The proxy does not currently support ProducerMessage.MultiResult." 56 | log.error(msg) 57 | throw ImpossibleError(msg) 58 | 59 | case ProducerMessage.PassThroughResult(_) => 60 | log.trace( 61 | "Mapping ProducerMessage.PassThroughResult to WsProducerResult " + 62 | "will be ignored." 63 | ) 64 | Seq.empty 65 | } 66 | } 67 | 68 | /** 69 | * Function to transform an instance of 70 | * [[org.apache.pekko.kafka.ProducerMessage.Results]] to a collection of 71 | * [[WsProducerResult]] instances. 72 | * 73 | * @param res 74 | * The Kafka producer result to transform 75 | * @tparam K 76 | * The type of the key element of the Kafka producer result 77 | * @tparam V 78 | * The type of the value element of the Kafka producer result 79 | * @return 80 | * a collection containing one-to-one transformations of kafka producer 81 | * results to [[WsProducerResult]]s 82 | */ 83 | def fromProducerResult[K, V]( 84 | res: ProducerMessage.Results[K, V, WsProducerRecord[K, V]] 85 | ): Seq[WsProducerResult] = fromResult(res)(pt => pt.clientMessageId) 86 | 87 | } 88 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/utils/HostResolver.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.utils 2 | 3 | import java.net.InetAddress 4 | 5 | import scala.util.Try 6 | 7 | import net.scalytica.kafka.wsproxy.config.Configuration.AppCfg 8 | import net.scalytica.kafka.wsproxy.config.Configuration.KafkaBootstrapHosts 9 | import net.scalytica.kafka.wsproxy.logging.WithProxyLogger 10 | import net.scalytica.kafka.wsproxy.utils.BlockingRetry.retry 11 | 12 | object HostResolver extends WithProxyLogger { 13 | 14 | sealed trait HostResolutionResult { 15 | def isSuccess: Boolean 16 | def isError: Boolean = !isSuccess 17 | } 18 | 19 | case class HostResolved(host: InetAddress) extends HostResolutionResult { 20 | override def isSuccess = true 21 | } 22 | 23 | case class HostResolutionError(reason: String) extends HostResolutionResult { 24 | override def isSuccess = false 25 | } 26 | 27 | case class HostResolutionStatus(results: List[HostResolutionResult]) { 28 | 29 | def hasErrors: Boolean = results.exists(_.isError) 30 | 31 | } 32 | 33 | def resolveHost( 34 | host: String, 35 | errMsg: Option[String] = None 36 | ): HostResolutionResult = 37 | Try(InetAddress.getByName(host)) 38 | .map { addr => 39 | log.info(s"Successfully resolved host $host") 40 | HostResolved(addr) 41 | } 42 | .getOrElse { 43 | val msg = errMsg.getOrElse(s"$host could not be resolved") 44 | HostResolutionError(msg) 45 | } 46 | 47 | /** 48 | * Will try to resolve all the kafka hosts configured in the 49 | * {{{kafka.ws.proxy.kafka-client.bootstrap-hosts}}} configuration property. 50 | * 51 | * This is a workaround for https://github.com/apache/kafka/pull/6305. 52 | * 53 | * @param bootstrapHosts 54 | * the kafka hosts to resolve 55 | * @param cfg 56 | * the application configuration to use 57 | * @return 58 | * an instance of [[HostResolutionStatus]] containing all 59 | * [[HostResolutionResult]] s for the configured Kafka bootstrap hosts. 60 | */ 61 | def resolveKafkaBootstrapHosts( 62 | bootstrapHosts: KafkaBootstrapHosts 63 | )(implicit cfg: AppCfg): HostResolutionStatus = { 64 | val timeout = cfg.kafkaClient.brokerResolutionTimeout 65 | 66 | val resErrStr = (host: String) => 67 | s"$host could not be resolved within time limit of $timeout." 68 | 69 | if (bootstrapHosts.hosts.nonEmpty) { 70 | val resolutions = bootstrapHosts.hostStrings.map { host => 71 | retry[HostResolutionResult]( 72 | timeout = timeout, 73 | interval = cfg.server.brokerResolutionRetryInterval, 74 | numRetries = cfg.server.brokerResolutionRetries 75 | ) { 76 | resolveHost(host, Some(resErrStr(host))) 77 | } { _ => 78 | log.warn(s"Resolution of host $host failed, no more retries.") 79 | HostResolutionError( 80 | s"$host could not be resolved within time limit of $timeout." 81 | ) 82 | } 83 | } 84 | HostResolutionStatus(resolutions) 85 | } else { 86 | HostResolutionStatus(List(HostResolutionError("NOT DEFINED"))) 87 | } 88 | } 89 | } 90 | -------------------------------------------------------------------------------- /server/src/test/scala/net/scalytica/kafka/wsproxy/auth/UrlJwkProviderSpec.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.auth 2 | 3 | import org.apache.pekko.util.Timeout 4 | import net.scalytica.test.WsProxySpec 5 | import org.scalatest.Inspectors.forAll 6 | import org.scalatest.concurrent.ScalaFutures 7 | import org.scalatest.matchers.must.Matchers 8 | import org.scalatest.time.{Minute, Span} 9 | import org.scalatest.wordspec.AnyWordSpec 10 | import org.scalatest.{Assertion, OptionValues, TryValues} 11 | import pdi.jwt.JwtAlgorithm 12 | 13 | import scala.concurrent.duration._ 14 | 15 | class UrlJwkProviderSpec 16 | extends AnyWordSpec 17 | with WsProxySpec 18 | with Matchers 19 | with ScalaFutures 20 | with OptionValues 21 | with TryValues { 22 | 23 | override protected val testTopicPrefix: String = "jwk-test-topic" 24 | 25 | implicit override val patienceConfig: PatienceConfig = 26 | PatienceConfig(timeout = Span(1, Minute)) 27 | 28 | implicit val timeout: Timeout = 3 seconds 29 | 30 | val validHost = "samples.auth0.com" 31 | val validCfgUri = ".well-known/jwks.json" 32 | val validJwksUrl1 = s"https://$validHost/$validCfgUri" 33 | val validJwksUrl2 = "https://www.googleapis.com/oauth2/v3/certs" 34 | 35 | val unresolvableHost = "foo.bar.baz" 36 | val unresolvableCfgUrl = s"https://$validHost/wrong/location" 37 | 38 | val auth0ValidKid = "UW65QUcG827BofOEU7QXY" 39 | 40 | private[this] def validateJwk(jwk: Jwk): Assertion = { 41 | jwk.kty mustBe Jwk.PubKeyAlgo 42 | jwk.use.value mustBe "sig" 43 | jwk.alg.value mustBe JwtAlgorithm.RS256.name 44 | jwk.kid must not be empty 45 | jwk.n must not be empty 46 | jwk.e must not be empty 47 | } 48 | 49 | "The UrlJwkProvider" should { 50 | 51 | "successfully initialize" in { 52 | new UrlJwkProvider(validJwksUrl1) 53 | } 54 | 55 | "fail when initializing with a URL with missing https:// prefix" in { 56 | assertThrows[IllegalArgumentException] { 57 | new UrlJwkProvider(s"$validHost") 58 | } 59 | } 60 | 61 | "fail when initializing with a URL with unresolvable host" in { 62 | assertThrows[IllegalArgumentException] { 63 | new UrlJwkProvider(s"https://$unresolvableHost") 64 | } 65 | } 66 | 67 | s"successfully load jwks configuration from $validJwksUrl1" in { 68 | val res1 = new UrlJwkProvider(validJwksUrl1).load().futureValue 69 | 70 | res1 must not be empty 71 | forAll(res1)(validateJwk) 72 | } 73 | s"successfully load jwks configuration from $validJwksUrl2" in { 74 | val res2 = new UrlJwkProvider(validJwksUrl2).load().futureValue 75 | 76 | res2 must not be empty 77 | forAll(res2)(validateJwk) 78 | } 79 | 80 | "successfully get a key from the jwks configuration" in { 81 | val jwk = new UrlJwkProvider(validJwksUrl1) 82 | .get(auth0ValidKid) 83 | .futureValue 84 | .success 85 | .value 86 | 87 | validateJwk(jwk) 88 | jwk.kid.value mustBe auth0ValidKid 89 | } 90 | 91 | "return an empty list when the config cannot be found" in { 92 | new UrlJwkProvider(unresolvableCfgUrl).load().futureValue mustBe empty 93 | } 94 | } 95 | 96 | } 97 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/logging/WsProxyEnvLoggerConfigurator.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.logging 2 | 3 | import java.io.ByteArrayInputStream 4 | 5 | import net.scalytica.kafka.wsproxy.StringExtensions 6 | import net.scalytica.kafka.wsproxy.utils.{WsProxyEnvLoader => env} 7 | 8 | import ch.qos.logback.classic.Level 9 | import ch.qos.logback.classic.joran.JoranConfigurator 10 | import ch.qos.logback.classic.{Logger => LogbackLogger} 11 | import org.slf4j.ILoggerFactory 12 | import org.slf4j.LoggerFactory 13 | 14 | trait WsProxyEnvLoggerConfigurator { 15 | 16 | val RootLogger = "ROOT" 17 | val PropPrefix = "logger." 18 | val LogbackCfgEnv = "WSPROXY_LOGBACK_XML_CONFIG" 19 | 20 | private[this] lazy val loggerFactory: ILoggerFactory = 21 | LoggerFactory.getILoggerFactory 22 | 23 | private[this] val log = 24 | loggerFactory.getLogger(getClass.getCanonicalName) 25 | 26 | private[this] lazy val ctx = loggerFactory 27 | .getLogger(RootLogger) 28 | .asInstanceOf[LogbackLogger] 29 | .getLoggerContext 30 | 31 | private[this] def createJoranCfg(): JoranConfigurator = { 32 | val c = new JoranConfigurator 33 | c.setContext(ctx) 34 | currentConfigurator = Some(c) 35 | c 36 | } 37 | 38 | private var currentConfigurator: Option[JoranConfigurator] = None 39 | 40 | private[this] def configurator(reuse: Boolean): JoranConfigurator = { 41 | log.trace("Initialising logback configurator...") 42 | currentConfigurator 43 | .map(curr => if (!reuse) createJoranCfg() else curr) 44 | .getOrElse(createJoranCfg()) 45 | } 46 | 47 | def load(): Unit = { 48 | log.trace("Checking environment for logback configurations...") 49 | if (!env.hasKey(LogbackCfgEnv)) loadLoggersFromEnv() 50 | else loadConfigStringFromEnv() 51 | } 52 | 53 | def reload(): Unit = { 54 | log.trace("Reloading logback...") 55 | reset() 56 | load() 57 | } 58 | 59 | private[logging] def reset(): Unit = { 60 | log.trace("Resetting logback context...") 61 | configurator(true).setContext(ctx) 62 | } 63 | 64 | private[this] def loadLoggersFromEnv(): Unit = { 65 | log.trace("Loading loggers from environment...") 66 | val loggers = env.properties.view.filterKeys(_.startsWith(PropPrefix)).toMap 67 | loggers 68 | .map(kv => kv._1.stripPrefix(PropPrefix) -> Level.toLevel(kv._2)) 69 | .foreach(kv => setLoggerLevel(kv._1, kv._2)) 70 | } 71 | 72 | private[this] def loadConfigStringFromEnv(): Unit = { 73 | env.properties 74 | .find(p => p._1.equals(LogbackCfgEnv) && p._2.safeNonEmpty) 75 | .foreach(kv => loadConfigString(kv._2)) 76 | } 77 | 78 | private[this] def setLoggerLevel(loggerName: String, level: Level): Unit = 79 | loggerFactory.synchronized { 80 | loggerFactory 81 | .getLogger(loggerName) 82 | .asInstanceOf[LogbackLogger] 83 | .setLevel(level) 84 | } 85 | 86 | private[logging] def loadConfigString(cfg: String): Unit = 87 | loggerFactory.synchronized { 88 | ctx.reset() 89 | val is = new ByteArrayInputStream(cfg.getBytes) 90 | configurator(false).doConfigure(is) 91 | } 92 | 93 | } 94 | 95 | object WsProxyEnvLoggerConfigurator extends WsProxyEnvLoggerConfigurator 96 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/producer/ProducerFlowExtras.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.producer 2 | 3 | import scala.concurrent.ExecutionContext 4 | import scala.concurrent.duration._ 5 | 6 | import net.scalytica.kafka.wsproxy.config.Configuration.AppCfg 7 | import net.scalytica.kafka.wsproxy.config.Configuration.ClientSpecificLimitCfg 8 | import net.scalytica.kafka.wsproxy.logging.WithProxyLogger 9 | import net.scalytica.kafka.wsproxy.models.InSocketArgs 10 | import net.scalytica.kafka.wsproxy.models.ProducerEmptyMessage 11 | import net.scalytica.kafka.wsproxy.models.WsProducerId 12 | import net.scalytica.kafka.wsproxy.models.WsProducerRecord 13 | import net.scalytica.kafka.wsproxy.streams.ProxyFlowExtras 14 | 15 | import io.circe.Decoder 16 | import org.apache.pekko.NotUsed 17 | import org.apache.pekko.http.scaladsl.model.ws.Message 18 | import org.apache.pekko.stream.Materializer 19 | import org.apache.pekko.stream.OverflowStrategy 20 | import org.apache.pekko.stream.scaladsl.Flow 21 | 22 | private[producer] trait ProducerFlowExtras 23 | extends ProxyFlowExtras 24 | with InputJsonParser { 25 | self: WithProxyLogger => 26 | 27 | /** Convenience function for logging and throwing an error in a Flow */ 28 | def logAndEmpty[T](msg: String, t: Throwable)(empty: T): T = { 29 | log.error(msg, t) 30 | empty 31 | } 32 | 33 | def rateLimitedJsonToWsProducerRecordFlow[K, V]( 34 | args: InSocketArgs 35 | )( 36 | implicit cfg: AppCfg, 37 | kd: Decoder[K], 38 | vd: Decoder[V], 39 | ec: ExecutionContext, 40 | mat: Materializer 41 | ): Flow[Message, WsProducerRecord[K, V], NotUsed] = 42 | (rateLimiter(args) via wsMessageToStringFlow) 43 | .recover { case t: Exception => 44 | logAndEmpty("There was an error processing a JSON message", t)("") 45 | } 46 | .map(str => parseInput[K, V](str)) 47 | .recover { case t: Exception => 48 | logAndEmpty("JSON message could not be parsed", t)( 49 | ProducerEmptyMessage 50 | ) 51 | } 52 | .filter(_.nonEmpty) 53 | 54 | def rateLimiter(args: InSocketArgs)( 55 | implicit cfg: AppCfg 56 | ): Flow[Message, Message, NotUsed] = { 57 | val defaultMps = cfg.producer.limits.defaultMessagesPerSecond 58 | rateLimitFlow( 59 | args.producerId, 60 | defaultMps, 61 | cfg.producer.limits.clientSpecificLimits 62 | ) 63 | } 64 | 65 | def rateLimitFlow( 66 | producerId: WsProducerId, 67 | defaultMessagesPerSecond: Int, 68 | clientLimits: Seq[ClientSpecificLimitCfg] 69 | ): Flow[Message, Message, NotUsed] = { 70 | val mps = clientLimits 71 | .find(_.id.equals(producerId.value)) 72 | .flatMap(_.messagesPerSecond) 73 | .getOrElse(defaultMessagesPerSecond) 74 | 75 | if (mps == 0) { 76 | Flow[Message].log("rateLimiterFlow", _ => "no rate limiting") 77 | } else { 78 | Flow[Message] 79 | .log("rateLimiterFlow", _ => s"Limiting to $mps messages per second") 80 | .buffer(mps, OverflowStrategy.backpressure) 81 | .throttle(mps, 1 second) 82 | } 83 | } 84 | 85 | } 86 | 87 | private[producer] object ProducerFlowExtras 88 | extends ProducerFlowExtras 89 | with WithProxyLogger 90 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/consumer/CommitStackHandler.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.consumer 2 | 3 | import net.scalytica.kafka.wsproxy.config.Configuration.AppCfg 4 | import net.scalytica.kafka.wsproxy.consumer.CommitStackTypes._ 5 | import net.scalytica.kafka.wsproxy.models.WsCommit 6 | import net.scalytica.kafka.wsproxy.models.WsConsumerRecord 7 | 8 | import org.apache.pekko.actor.typed.ActorRef 9 | import org.apache.pekko.actor.typed.Behavior 10 | import org.apache.pekko.actor.typed.scaladsl.Behaviors 11 | import org.apache.pekko.kafka.CommitterSettings 12 | import org.apache.pekko.stream.Materializer 13 | 14 | object CommitStackHandler { 15 | 16 | /** ADT defining the valid protocol for the [[CommitStackHandler]] */ 17 | sealed trait CommitProtocol 18 | 19 | case class Stash(record: WsConsumerRecord[_, _]) extends CommitProtocol 20 | case class Commit(commit: WsCommit) extends CommitProtocol 21 | case class GetStack(sender: ActorRef[CommitStack]) extends CommitProtocol 22 | case object Continue extends CommitProtocol 23 | case object Stop extends CommitProtocol 24 | case class Terminate(cause: Throwable) extends CommitProtocol 25 | 26 | /** Behaviour initialising the message commit stack */ 27 | def commitStack( 28 | implicit cfg: AppCfg, 29 | mat: Materializer 30 | ): Behavior[CommitProtocol] = { 31 | implicit val cs: CommitterSettings = CommitterSettings.create(mat.system) 32 | committableStack() 33 | } 34 | 35 | /** 36 | * Behavior implementation for the CommitHandler. New [[Uncommitted]] messages 37 | * are appended at the end of the [[CommitStack]]. Whenever a [[Commit]] is 38 | * received, the stack is flushed of all messages that are older than the 39 | * message being committed. This way the number of messages in the stack can 40 | * be somewhat controlled. 41 | * 42 | * @param stack 43 | * the message stack of [[Uncommitted]] messages 44 | * @return 45 | * a Behavior describing the [[CommitStackHandler]]. 46 | */ 47 | private[this] def committableStack( 48 | stack: CommitStack = CommitStack.empty 49 | )( 50 | implicit cfg: AppCfg, 51 | cs: CommitterSettings, 52 | mat: Materializer 53 | ): Behavior[CommitProtocol] = 54 | Behaviors.setup { implicit ctx => 55 | Behaviors.receiveMessage { 56 | case Stash(record) => 57 | stack.stash(record).map(committableStack).getOrElse(Behaviors.same) 58 | 59 | case Commit(wsc) => 60 | stack 61 | .commit(wsc.wsProxyMessageId) 62 | .map(committableStack) 63 | .getOrElse(Behaviors.same) 64 | 65 | case Continue => 66 | Behaviors.same 67 | 68 | case GetStack(to) => 69 | ctx.log.debug("Current stack is:\n" + stack.mkString("\n")) 70 | to ! stack 71 | Behaviors.same 72 | 73 | case Stop => 74 | ctx.log.debug("Received stop message") 75 | Behaviors.stopped 76 | 77 | case Terminate(cause) => 78 | ctx.log.error("Terminating commit stack handler due to error", cause) 79 | Behaviors.stopped 80 | } 81 | } 82 | } 83 | -------------------------------------------------------------------------------- /server/src/test/scala/net/scalytica/kafka/wsproxy/web/SchemaRoutesSpec.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.web 2 | 3 | import org.apache.pekko.http.scaladsl.model.StatusCodes._ 4 | import org.apache.pekko.http.scaladsl.server._ 5 | import org.apache.pekko.http.scaladsl.testkit.RouteTestTimeout 6 | import io.circe.Json 7 | import net.scalytica.kafka.wsproxy.auth.OpenIdClient 8 | import net.scalytica.kafka.wsproxy.config.Configuration.AppCfg 9 | import net.scalytica.test._ 10 | import org.scalatest.concurrent.ScalaFutures 11 | import org.scalatest.time.{Minutes, Span} 12 | import org.scalatest.wordspec.AnyWordSpec 13 | import org.scalatest.{EitherValues, OptionValues} 14 | 15 | import scala.concurrent.duration._ 16 | 17 | class SchemaRoutesSpec 18 | extends AnyWordSpec 19 | with TestSchemaRoutes 20 | with EitherValues 21 | with OptionValues 22 | with ScalaFutures 23 | with WsProxySpec { 24 | 25 | override protected val testTopicPrefix: String = "schema-routes-test-topic" 26 | 27 | implicit override val patienceConfig: PatienceConfig = 28 | PatienceConfig(timeout = Span(2, Minutes)) 29 | 30 | implicit val timeout: RouteTestTimeout = RouteTestTimeout(20 seconds) 31 | 32 | implicit val oidClient: Option[OpenIdClient] = None 33 | 34 | val NoAvroSupportStr = 35 | "Kafka WebSocket Proxy does not implement an Avro protocol any more." 36 | 37 | "The schema routes" should { 38 | "return HTTP 404 when requesting an invalid resource" in { 39 | implicit val cfg: AppCfg = plainTestConfig() 40 | 41 | val expected = Json 42 | .obj( 43 | "message" -> Json 44 | .fromString("This is not the resource you are looking for.") 45 | ) 46 | .spaces2 47 | 48 | Get() ~> Route.seal(schemaRoutes) ~> check { 49 | status mustBe NotFound 50 | responseAs[String] mustBe expected 51 | } 52 | } 53 | 54 | "return the Avro schema for producer records" in { 55 | implicit val cfg: AppCfg = plainTestConfig() 56 | 57 | Get("/schemas/avro/producer/record") ~> 58 | Route.seal(schemaRoutes) ~> 59 | check { 60 | status mustBe OK 61 | responseAs[String] mustBe NoAvroSupportStr 62 | } 63 | } 64 | 65 | "return the Avro schema for producer results" in { 66 | implicit val cfg: AppCfg = plainTestConfig() 67 | 68 | Get("/schemas/avro/producer/result") ~> 69 | Route.seal(schemaRoutes) ~> 70 | check { 71 | status mustBe OK 72 | responseAs[String] mustBe NoAvroSupportStr 73 | } 74 | } 75 | 76 | "return the Avro schema for consumer record" in { 77 | implicit val cfg: AppCfg = plainTestConfig() 78 | 79 | Get("/schemas/avro/consumer/record") ~> 80 | Route.seal(schemaRoutes) ~> 81 | check { 82 | status mustBe OK 83 | responseAs[String] mustBe NoAvroSupportStr 84 | } 85 | } 86 | 87 | "return the Avro schema for consumer commit" in { 88 | implicit val cfg: AppCfg = plainTestConfig() 89 | 90 | Get("/schemas/avro/consumer/commit") ~> 91 | Route.seal(schemaRoutes) ~> 92 | check { 93 | status mustBe OK 94 | responseAs[String] mustBe NoAvroSupportStr 95 | } 96 | } 97 | } 98 | } 99 | -------------------------------------------------------------------------------- /server/src/test/scala/net/scalytica/test/WsClientSpec.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.test 2 | 3 | import org.apache.pekko.http.scaladsl.model.headers.{ 4 | Authorization, 5 | BasicHttpCredentials, 6 | HttpCredentials 7 | } 8 | import org.apache.pekko.http.scaladsl.server.Route 9 | import org.apache.pekko.http.scaladsl.testkit.{ 10 | RouteTestTimeout, 11 | ScalatestRouteTest, 12 | WSProbe 13 | } 14 | import org.apache.pekko.testkit.TestDuration 15 | import net.scalytica.kafka.wsproxy.auth.OpenIdClient 16 | import net.scalytica.kafka.wsproxy.config.Configuration.AppCfg 17 | import net.scalytica.kafka.wsproxy.config.ReadableDynamicConfigHandlerRef 18 | import net.scalytica.kafka.wsproxy.session.SessionHandlerRef 19 | import net.scalytica.kafka.wsproxy.web.Headers.XKafkaAuthHeader 20 | import org.scalatest.Suite 21 | import org.scalatest.matchers.must.Matchers 22 | 23 | import scala.concurrent.duration._ 24 | 25 | trait WsClientSpec 26 | extends ScalatestRouteTest 27 | with TestServerRoutes 28 | with Matchers { self: Suite => 29 | 30 | implicit private[this] val routeTestTimeout: RouteTestTimeout = 31 | RouteTestTimeout((20 seconds).dilated) 32 | 33 | protected def wsRouteFrom( 34 | implicit cfg: AppCfg, 35 | shRef: SessionHandlerRef, 36 | optReadCfgRef: Option[ReadableDynamicConfigHandlerRef], 37 | optOidc: Option[OpenIdClient] 38 | ): Route = wsProxyRoutes 39 | 40 | /** Verify the server routes using an unsecured Kafka cluster */ 41 | private[this] def defaultRouteCheck[T]( 42 | uri: String, 43 | routes: Route, 44 | creds: Option[HttpCredentials] = None 45 | )( 46 | body: => T 47 | )( 48 | implicit probe: WSProbe 49 | ): T = { 50 | creds match { 51 | case None => WS(uri, probe.flow) ~> routes ~> check(body) 52 | case Some(c) => 53 | val authHeader = addHeader(Authorization(c)) 54 | WS(uri, probe.flow) ~> authHeader ~> routes ~> check(body) 55 | } 56 | } 57 | 58 | /** Verify the server routes using a secured Kafka cluster */ 59 | private[this] def secureKafkaRouteCheck[T]( 60 | uri: String, 61 | routes: Route, 62 | kafkaCreds: XKafkaAuthHeader, 63 | creds: Option[HttpCredentials] = None 64 | )( 65 | body: => T 66 | )( 67 | implicit wsClient: WSProbe 68 | ): T = { 69 | val headers = creds match { 70 | case Some(c) => addHeaders(Authorization(c), kafkaCreds) 71 | case None => addHeader(kafkaCreds) 72 | } 73 | WS(uri, wsClient.flow) ~> headers ~> routes ~> check(body) 74 | } 75 | 76 | /** Set the X-Kafka-Auth header */ 77 | def addKafkaCreds(creds: BasicHttpCredentials): RequestTransformer = { 78 | val kaHeader = XKafkaAuthHeader(creds) 79 | addHeader(kaHeader) 80 | } 81 | 82 | /** Check that the websocket behaves */ 83 | def inspectWebSocket[T]( 84 | uri: String, 85 | routes: Route, 86 | kafkaCreds: Option[BasicHttpCredentials] = None, 87 | creds: Option[HttpCredentials] = None 88 | )(body: => T)(implicit wsClient: WSProbe): T = { 89 | val u = creds.map(c => uri + s"&access_token=${c.token}").getOrElse(uri) 90 | 91 | kafkaCreds 92 | .map(c => secureKafkaRouteCheck(u, routes, XKafkaAuthHeader(c))(body)) 93 | .getOrElse(defaultRouteCheck(u, routes)(body)) 94 | } 95 | 96 | } 97 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/errors/errors.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.errors 2 | 3 | import scala.util.control.NoStackTrace 4 | 5 | case class ConfigurationError(message: String) extends RuntimeException(message) 6 | 7 | case class FatalProxyServerError( 8 | message: String, 9 | cause: Option[Throwable] = None 10 | ) extends RuntimeException(message, cause.orNull) 11 | 12 | abstract class ProxyRequestError( 13 | msg: String, 14 | cause: Option[Throwable] = None 15 | ) extends Exception(msg, cause.orNull) 16 | 17 | case class RequestValidationError( 18 | msg: String, 19 | cause: Option[Throwable] = None 20 | ) extends ProxyRequestError(msg, cause) 21 | with NoStackTrace 22 | 23 | abstract class ProxyError( 24 | msg: String, 25 | cause: Option[Throwable] = None 26 | ) extends Exception(msg, cause.orNull) 27 | 28 | case class ImpossibleError( 29 | msg: String, 30 | cause: Option[Throwable] = None 31 | ) extends ProxyError(msg, cause) 32 | with NoStackTrace 33 | 34 | case class UnexpectedError( 35 | msg: String, 36 | cause: Option[Throwable] = None 37 | ) extends ProxyError(msg, cause) 38 | with NoStackTrace 39 | 40 | case class TrivialError( 41 | msg: String, 42 | cause: Option[Throwable] = None 43 | ) extends ProxyError(msg, cause) 44 | with NoStackTrace 45 | 46 | case class RetryFailedError( 47 | msg: String, 48 | cause: Option[Throwable] = None 49 | ) extends ProxyError(msg, cause) 50 | with NoStackTrace 51 | 52 | case class InvalidSessionStateFormat( 53 | msg: String, 54 | cause: Option[Throwable] = None 55 | ) extends ProxyError(msg, cause) 56 | with NoStackTrace 57 | 58 | case class InvalidDynamicCfg( 59 | msg: String, 60 | cause: Option[Throwable] = None 61 | ) extends ProxyError(msg, cause) 62 | with NoStackTrace 63 | 64 | case class OpenIdConnectError(message: String, cause: Option[Throwable] = None) 65 | extends ProxyError(message, cause) 66 | with NoStackTrace 67 | 68 | case class TopicNotFoundError(message: String) 69 | extends ProxyError(message) 70 | with NoStackTrace 71 | 72 | case class IllegalFormatTypeError(message: String) extends ProxyError(message) 73 | 74 | abstract class ProxyAuthError(msg: String, cause: Option[Throwable] = None) 75 | extends ProxyError(msg, cause) 76 | 77 | case class AuthenticationError(message: String, cause: Option[Throwable] = None) 78 | extends ProxyAuthError(message, cause) 79 | with NoStackTrace 80 | 81 | case class AuthorisationError(message: String, cause: Option[Throwable] = None) 82 | extends ProxyAuthError(message, cause) 83 | with NoStackTrace 84 | 85 | case class InvalidTokenError(message: String, cause: Option[Throwable] = None) 86 | extends ProxyAuthError(message, cause) 87 | with NoStackTrace 88 | 89 | sealed abstract class JwkError(msg: String, throwable: Option[Throwable]) 90 | extends ProxyAuthError(msg, throwable) 91 | 92 | case class InvalidPublicKeyError( 93 | message: String, 94 | cause: Option[Throwable] = None 95 | ) extends JwkError(message, cause) 96 | 97 | case class SigningKeyNotFoundError( 98 | message: String, 99 | cause: Option[Throwable] = None 100 | ) extends JwkError(message, cause) 101 | -------------------------------------------------------------------------------- /server/src/test/scala/net/scalytica/kafka/wsproxy/jmx/mbeans/ProducerClientStatsMXBeanSpec.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.jmx.mbeans 2 | 3 | import net.scalytica.kafka.wsproxy.jmx.mbeans.ProducerClientStatsProtocol._ 4 | import net.scalytica.kafka.wsproxy.jmx.producerStatsName 5 | import net.scalytica.kafka.wsproxy.models.{ 6 | FullProducerId, 7 | WsProducerId, 8 | WsProducerInstanceId 9 | } 10 | import org.apache.pekko.actor.typed.Behavior 11 | import org.scalatest.Assertion 12 | 13 | import scala.concurrent.duration._ 14 | 15 | // scalastyle:off magic.number 16 | class ProducerClientStatsMXBeanSpec extends MXBeanSpecLike { 17 | 18 | override protected val testTopicPrefix: String = 19 | "producerstats-mxbean-test-topic" 20 | 21 | val pid: WsProducerId = WsProducerId("bean-spec") 22 | val iid: WsProducerInstanceId = WsProducerInstanceId("bean-spec-instance") 23 | val fid: FullProducerId = FullProducerId(pid, Option(iid)) 24 | val fidNoInst: FullProducerId = FullProducerId(pid, None) 25 | 26 | val beanName1: String = producerStatsName(fid) 27 | val beanName2: String = producerStatsName(fidNoInst) 28 | 29 | val behavior1: Behavior[ProducerClientStatsCommand] = 30 | ProducerClientStatsMXBeanActor( 31 | fullProducerId = fid, 32 | useAutoAggregation = false 33 | ) 34 | 35 | val behavior2: Behavior[ProducerClientStatsCommand] = 36 | ProducerClientStatsMXBeanActor( 37 | fullProducerId = fidNoInst, 38 | useAutoAggregation = false 39 | ) 40 | 41 | def jmxContext[T]( 42 | beanName: String, 43 | behavior: Behavior[ProducerClientStatsCommand] 44 | ): Assertion = { 45 | val ref = tk.spawn(behavior, beanName) 46 | val probe = 47 | tk.createTestProbe[ProducerClientStatsResponse](s"$beanName-probe") 48 | 49 | val proxy = jmxq.getMxBean(beanName, classOf[ProducerClientStatsMXBean]) 50 | 51 | ref ! IncrementRecordsReceived(probe.ref) 52 | probe.expectMessage(1 second, RecordsReceivedIncremented) 53 | ref ! UpdateRecordsPerHour(1, probe.ref) 54 | probe.expectMessage(1 second, RecordsPerHourUpdated) 55 | ref ! UpdateRecordsPerMinute(1, probe.ref) 56 | probe.expectMessage(1 second, RecordsPerMinuteUpdated) 57 | 58 | proxy.getNumRecordsReceivedTotal mustBe 1 59 | proxy.getNumRecordsReceivedLastMinute mustBe 1 60 | proxy.getNumRecordsReceivedLastHour mustBe 1 61 | 62 | ref ! IncrementAcksSent(probe.ref) 63 | probe.expectMessage(1 second, AcksSentIncremented) 64 | ref ! UpdateAcksPerHour(1, probe.ref) 65 | probe.expectMessage(1 second, AcksPerHourUpdated) 66 | ref ! UpdateAcksPerMinute(1, probe.ref) 67 | probe.expectMessage(1 second, AcksPerMinuteUpdated) 68 | 69 | proxy.getNumAcksSentTotal mustBe 1 70 | proxy.getNumAcksSentLastMinute mustBe 1 71 | proxy.getNumAcksSentLastHour mustBe 1 72 | 73 | ref ! Stop 74 | probe.expectTerminated(ref) 75 | 76 | jmxq.findByTypeAndName[ProducerClientStatsMXBean](beanName) mustBe None 77 | } 78 | 79 | "The ProducerClientStatsMxBean" when { 80 | 81 | "provided with both producerId and instanceId" should { 82 | "increment the bean counters" in jmxContext(beanName1, behavior1) 83 | } 84 | 85 | "provided with only producerId" should { 86 | "increment the bean counters" in jmxContext(beanName2, behavior2) 87 | } 88 | } 89 | } 90 | -------------------------------------------------------------------------------- /server/src/test/scala/net/scalytica/kafka/wsproxy/streams/ProxyFlowExtrasSpec.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.streams 2 | 3 | import net.scalytica.test.SharedAttributes.defaultTypesafeConfig 4 | import net.scalytica.test.WsProxySpec 5 | import org.apache.pekko.actor.testkit.typed.scaladsl._ 6 | import org.apache.pekko.actor.typed.ActorSystem 7 | import org.apache.pekko.http.scaladsl.model.ws.{BinaryMessage, TextMessage} 8 | import org.apache.pekko.stream.Materializer 9 | import org.apache.pekko.stream.scaladsl._ 10 | import org.apache.pekko.util.ByteString 11 | import org.scalatest.concurrent.{Eventually, ScalaFutures} 12 | import org.scalatest.matchers.must.Matchers 13 | import org.scalatest.time.{Minute, Span} 14 | import org.scalatest.wordspec.AnyWordSpec 15 | import org.scalatest.{BeforeAndAfterAll, OptionValues} 16 | 17 | import java.nio.charset.StandardCharsets 18 | 19 | class ProxyFlowExtrasSpec 20 | extends AnyWordSpec 21 | with ProxyFlowExtras 22 | with WsProxySpec 23 | with BeforeAndAfterAll 24 | with Matchers 25 | with OptionValues 26 | with Eventually 27 | with ScalaFutures { 28 | 29 | override protected val testTopicPrefix: String = 30 | "proxy-flow-extras-test-topic" 31 | 32 | implicit override val patienceConfig: PatienceConfig = 33 | PatienceConfig(timeout = Span(1, Minute)) 34 | 35 | val atk: ActorTestKit = 36 | ActorTestKit("proxy-flow-extras-spec", defaultTypesafeConfig) 37 | 38 | implicit val as: ActorSystem[Nothing] = atk.system 39 | implicit val mat: Materializer = Materializer.matFromSystem 40 | 41 | override def afterAll(): Unit = { 42 | mat.shutdown() 43 | as.terminate() 44 | atk.shutdownTestKit() 45 | 46 | super.afterAll() 47 | } 48 | 49 | private[this] val theString = "This is a test" 50 | 51 | private[this] val binaryData = 52 | BinaryMessage(ByteString(theString, StandardCharsets.UTF_8)) 53 | 54 | private[this] val textData = TextMessage(theString) 55 | 56 | "ProxyFlowExtras" when { 57 | "expecting a text message" should { 58 | "not convert binary messages" in { 59 | Source 60 | .single(binaryData) 61 | .via(wsMessageToStringFlow) 62 | .runWith(Sink.seq[String]) 63 | .futureValue must have size 0 64 | } 65 | 66 | "successfully convert a text message" in { 67 | val result = Source 68 | .single(textData) 69 | .via(wsMessageToStringFlow) 70 | .runWith(Sink.seq[String]) 71 | .futureValue 72 | 73 | result must have size 1 74 | result.headOption.value mustBe theString 75 | } 76 | } 77 | 78 | "expecting a binary message" should { 79 | "not convert text messages" in { 80 | Source 81 | .single(textData) 82 | .via(wsMessageToByteStringFlow) 83 | .runWith(Sink.seq[ByteString]) 84 | .futureValue must have size 0 85 | } 86 | "successfully convert a binary message" in { 87 | val result = Source 88 | .single(binaryData) 89 | .via(wsMessageToByteStringFlow) 90 | .runWith(Sink.seq[ByteString]) 91 | .futureValue 92 | 93 | result must have size 1 94 | result.headOption.value mustBe ByteString(theString) 95 | } 96 | } 97 | } 98 | 99 | } 100 | -------------------------------------------------------------------------------- /server/src/main/scala/net/scalytica/kafka/wsproxy/config/DynamicConfigHandlerProtocol.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.config 2 | 3 | import net.scalytica.kafka.wsproxy.config.Configuration.DynamicCfg 4 | 5 | import org.apache.pekko.actor.typed.ActorRef 6 | 7 | object DynamicConfigHandlerProtocol { 8 | 9 | /* 10 | Response types from the DynamicConfigHandler 11 | */ 12 | 13 | sealed trait DynamicConfigOpResult { self => } 14 | 15 | final case class ConfigsRestored() extends DynamicConfigOpResult 16 | final case class RestoringConfigs() extends DynamicConfigOpResult 17 | final case class ConfigSaved(cfg: DynamicCfg) extends DynamicConfigOpResult 18 | final case class ConfigRemoved(topicKey: String) extends DynamicConfigOpResult 19 | final case class RemovedAllConfigs() extends DynamicConfigOpResult 20 | final case class Stopped() extends DynamicConfigOpResult 21 | final case class ConfigNotFound(key: String) extends DynamicConfigOpResult 22 | final case class FoundActiveConfigs(dc: DynamicConfigurations) 23 | extends DynamicConfigOpResult 24 | final case class FoundConfig(key: String, cfg: DynamicCfg) 25 | extends DynamicConfigOpResult 26 | final case class IncompleteOp(reason: String, t: Option[Throwable] = None) 27 | extends DynamicConfigOpResult 28 | final case class InvalidKey(maybeKey: Option[String] = None) 29 | extends DynamicConfigOpResult 30 | 31 | /** Main protocol to be used by the dynamic config handler */ 32 | sealed trait DynamicConfigProtocol 33 | 34 | /* 35 | Operational commands 36 | */ 37 | 38 | sealed trait OpCommand extends DynamicConfigProtocol 39 | 40 | final case class CheckIfReady(replyTo: ActorRef[DynamicConfigOpResult]) 41 | extends OpCommand 42 | 43 | final case class StopConfigHandler(replyTo: ActorRef[DynamicConfigOpResult]) 44 | extends OpCommand 45 | 46 | /* 47 | Allowed commands for the DynamicConfigHandler 48 | */ 49 | 50 | sealed trait Command extends DynamicConfigProtocol 51 | 52 | final case class Save( 53 | cfg: DynamicCfg, 54 | replyTo: ActorRef[DynamicConfigOpResult] 55 | ) extends Command { 56 | 57 | def topicKey: Option[String] = dynamicCfgTopicKey(cfg) 58 | 59 | } 60 | 61 | final case class Remove( 62 | topicKey: String, 63 | replyTo: ActorRef[DynamicConfigOpResult] 64 | ) extends Command 65 | 66 | final case class RemoveAll( 67 | replyTo: ActorRef[DynamicConfigOpResult] 68 | ) extends Command 69 | 70 | final case class GetAll( 71 | replyTo: ActorRef[DynamicConfigOpResult] 72 | ) extends Command 73 | 74 | final case class FindConfig( 75 | key: String, 76 | replyTo: ActorRef[DynamicConfigOpResult] 77 | ) extends Command 78 | 79 | /* 80 | Messages received from [[DynamicConfigConsumer]] 81 | */ 82 | 83 | sealed private[config] trait InternalCommand extends DynamicConfigProtocol { 84 | val key: String 85 | val offset: Long 86 | } 87 | 88 | final private[config] case class UpdateDynamicConfigRecord( 89 | key: String, 90 | value: DynamicCfg, 91 | offset: Long 92 | ) extends InternalCommand 93 | 94 | final private[config] case class RemoveDynamicConfigRecord( 95 | key: String, 96 | offset: Long 97 | ) extends InternalCommand 98 | 99 | } 100 | -------------------------------------------------------------------------------- /server/src/test/scala/net/scalytica/kafka/wsproxy/jmx/mbeans/ConsumerClientStatsMXBeanSpec.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.jmx.mbeans 2 | 3 | import net.scalytica.kafka.wsproxy.jmx.consumerStatsName 4 | import net.scalytica.kafka.wsproxy.jmx.mbeans.ConsumerClientStatsProtocol._ 5 | import net.scalytica.kafka.wsproxy.models.{ 6 | FullConsumerId, 7 | WsClientId, 8 | WsGroupId 9 | } 10 | import org.apache.pekko.actor.testkit.typed.scaladsl.TestProbe 11 | import org.apache.pekko.actor.typed.{ActorRef, Behavior} 12 | 13 | import scala.concurrent.duration._ 14 | 15 | // scalastyle:off magic.number 16 | class ConsumerClientStatsMXBeanSpec extends MXBeanSpecLike { 17 | 18 | override protected val testTopicPrefix: String = 19 | "consumerstats-mxbean-test-topic" 20 | 21 | val cid: WsClientId = WsClientId("bean-spec") 22 | val gid: WsGroupId = WsGroupId("bean-spec") 23 | val fid: FullConsumerId = FullConsumerId(gid, cid) 24 | val beanName: String = consumerStatsName(fid) 25 | 26 | val behavior: Behavior[ConsumerClientStatsCommand] = 27 | ConsumerClientStatsMXBeanActor( 28 | fullConsumerId = fid, 29 | useAutoAggregation = false 30 | ) 31 | 32 | val ref: ActorRef[ConsumerClientStatsCommand] = 33 | tk.spawn(behavior, beanName) 34 | 35 | val probe: TestProbe[ConsumerClientStatsResponse] = 36 | tk.createTestProbe[ConsumerClientStatsResponse]("bean-spec-ccsr-probe") 37 | 38 | val proxy: ConsumerClientStatsMXBean = 39 | jmxq.getMxBean(beanName, classOf[ConsumerClientStatsMXBean]) 40 | 41 | override protected def afterAll(): Unit = { 42 | super.afterAll() 43 | } 44 | 45 | "The ConsumerClientStatsMXBean" should { 46 | 47 | "increment number of records sent" in { 48 | ref ! IncrementRecordSent(probe.ref) 49 | probe.expectMessage(1 second, RecordsSentIncremented) 50 | proxy.getNumRecordsSentTotal mustBe 1 51 | proxy.getNumUncommittedRecords mustBe 1 52 | } 53 | 54 | "increment number of commits received" in { 55 | ref ! IncrementCommitsReceived(probe.ref) 56 | probe.expectMessage(1 second, CommitsReceivedIncremented) 57 | proxy.getNumCommitsReceivedTotal mustBe 1 58 | proxy.getNumUncommittedRecords mustBe 0 59 | } 60 | 61 | "aggregate number of records for the last minute" in { 62 | ref ! UpdateRecordsPerMinute(proxy.getNumRecordsSentTotal, probe.ref) 63 | probe.expectMessage(1 second, RecordsPerMinuteUpdated) 64 | proxy.getNumRecordsSentTotal mustBe 1 65 | proxy.getNumRecordsSentLastMinute mustBe 1 66 | } 67 | 68 | "aggregate number of commits for the last minute" in { 69 | ref ! UpdateCommitsPerMinute(proxy.getNumCommitsReceivedTotal, probe.ref) 70 | probe.expectMessage(1 second, CommitsPerMinuteUpdated) 71 | proxy.getNumCommitsReceivedTotal mustBe 1 72 | proxy.getNumCommitsReceivedLastMinute mustBe 1 73 | } 74 | 75 | "aggregate number of records for the last hour" in { 76 | ref ! UpdateRecordsPerHour(1, probe.ref) 77 | probe.expectMessage(1 second, RecordsPerHourUpdated) 78 | proxy.getNumRecordsSentLastHour mustBe 1 79 | } 80 | 81 | "aggregate number of commits for the last hour" in { 82 | ref ! UpdateCommitsPerHour(1, probe.ref) 83 | probe.expectMessage(1 second, CommitsPerHourUpdated) 84 | proxy.getNumCommitsReceivedLastHour mustBe 1 85 | } 86 | 87 | "stop the MXBean" in { 88 | ref ! Stop 89 | probe.expectTerminated(ref) 90 | jmxq.findByTypeAndName[ConsumerClientStatsMXBean]( 91 | beanName 92 | ) mustBe None 93 | } 94 | 95 | } 96 | } 97 | -------------------------------------------------------------------------------- /server/src/test/resources/sasl/server/create-certs.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -o nounset \ 4 | -o errexit \ 5 | -o verbose \ 6 | -o xtrace 7 | 8 | base_name="wsproxytest" 9 | base_name_upper=`echo $base_name | tr '[:lower:]' '[:upper:]'` 10 | base_hostname="scalytica.net" 11 | wsproxy_hostname="$base_name.$base_hostname" 12 | 13 | PW="scalytica" 14 | 15 | ## 16 | # Generate self-signed CA files 17 | ## 18 | 19 | # Create a self signed key pair root CA certificate. 20 | keytool -genkeypair -v \ 21 | -alias "$base_name.ca" \ 22 | -dname "CN=ca.$base_hostname,OU=Development,O=Scalytica,L=Heggedal,S=Akershus,C=NO" \ 23 | -keystore "$base_name.ca.jks" \ 24 | -keypass $PW \ 25 | -storepass $PW \ 26 | -keyalg RSA \ 27 | -keysize 4096 \ 28 | -ext KeyUsage:critical="keyCertSign" \ 29 | -ext BasicConstraints:critical="ca:true" \ 30 | -validity 9999 31 | 32 | # Export the exampleCA public certificate as exampleca.crt so that it can be used in trust stores. 33 | keytool -export -v \ 34 | -alias "$base_name.ca" \ 35 | -file "$base_name.ca.crt" \ 36 | -keypass $PW \ 37 | -storepass $PW \ 38 | -keystore "$base_name.ca.jks" \ 39 | -rfc 40 | 41 | ## 42 | # Generate self-signed server files 43 | ## 44 | 45 | # Create a server certificate, tied to host url 46 | keytool -genkeypair -v \ 47 | -alias "$wsproxy_hostname" \ 48 | -dname "CN=$wsproxy_hostname,OU=Development,O=Scalytica,L=Heggedal,S=Akershus,C=NO" \ 49 | -keystore "$wsproxy_hostname.jks" \ 50 | -keypass $PW \ 51 | -storepass $PW \ 52 | -keyalg RSA \ 53 | -keysize 2048 \ 54 | -validity 385 55 | 56 | # Create a certificate signing request for the host url 57 | keytool -certreq -v \ 58 | -alias "$wsproxy_hostname" \ 59 | -keypass $PW \ 60 | -storepass $PW \ 61 | -keystore "$wsproxy_hostname.jks" \ 62 | -file "$wsproxy_hostname.csr" 63 | 64 | # Tell the CA to sign the server certificate. Note that the extension is on the 65 | # request, and not the original certificate. 66 | # Technically, keyUsage should be: 67 | # - digitalSignature for DHE or ECDHE, 68 | # - keyEncipherment for RSA. 69 | keytool -gencert -v \ 70 | -alias "$base_name.ca" \ 71 | -keypass $PW \ 72 | -storepass $PW \ 73 | -keystore "$base_name.ca.jks" \ 74 | -infile "$wsproxy_hostname.csr" \ 75 | -outfile "$wsproxy_hostname.crt" \ 76 | -ext KeyUsage:critical="digitalSignature,keyEncipherment" \ 77 | -ext EKU="serverAuth" \ 78 | -ext SAN="DNS:$wsproxy_hostname" \ 79 | -rfc 80 | 81 | # Tell jks it can trust the self-signed CA as a signer. 82 | keytool -import -v \ 83 | -alias "$base_name.ca" \ 84 | -file "$base_name.ca.crt" \ 85 | -keystore "$wsproxy_hostname.jks" \ 86 | -storetype JKS \ 87 | -storepass $PW << EOF 88 | yes 89 | EOF 90 | 91 | # Import the signed certificate back into the server jks 92 | keytool -import -v \ 93 | -alias "$wsproxy_hostname" \ 94 | -file "$wsproxy_hostname.crt" \ 95 | -keystore "$wsproxy_hostname.jks" \ 96 | -storetype JKS \ 97 | -storepass $PW 98 | 99 | # List out the contents of the server jks just to confirm it. 100 | # This is the key store you should present as the server. 101 | #keytool -list -v \ 102 | # -keystore "$wsproxy_hostname.jks" \ 103 | # -storepass $PW 104 | 105 | ## 106 | # Generate trust store for clients 107 | ## 108 | 109 | # Create a JKS keystore that trusts the self-signed CA. 110 | keytool -import -v \ 111 | -alias "$base_name.ca" \ 112 | -file "$base_name.ca.crt" \ 113 | -keypass $PW \ 114 | -storepass $PW \ 115 | -keystore "$wsproxy_hostname.truststore.jks" << EOF 116 | yes 117 | EOF 118 | 119 | # List out the details of the store password. 120 | keytool -list -v \ 121 | -keystore "$wsproxy_hostname.truststore.jks" \ 122 | -storepass $PW 123 | 124 | ########## 125 | echo "All done..." -------------------------------------------------------------------------------- /server/src/test/scala/net/scalytica/kafka/wsproxy/config/DynamicConfigurationsSpec.scala: -------------------------------------------------------------------------------- 1 | package net.scalytica.kafka.wsproxy.config 2 | 3 | import net.scalytica.kafka.wsproxy.config.Configuration.{ 4 | ConsumerSpecificLimitCfg, 5 | ProducerSpecificLimitCfg 6 | } 7 | import net.scalytica.kafka.wsproxy.config.DynamicConfigHandlerProtocol._ 8 | import org.scalatest.Inspectors.forAll 9 | import org.scalatest.OptionValues 10 | import org.scalatest.matchers.must.Matchers 11 | import org.scalatest.wordspec.AnyWordSpec 12 | 13 | // scalastyle:off magic.number 14 | class DynamicConfigurationsSpec 15 | extends AnyWordSpec 16 | with Matchers 17 | with OptionValues 18 | with DynamicConfigTestDataGenerators { 19 | 20 | val expected: DynamicConfigurations = DynamicConfigurations(expectedMap) 21 | 22 | private[this] val clsNameProdSpecLimitCfg = 23 | classOf[ProducerSpecificLimitCfg] 24 | 25 | private[this] val clsNameConsSpecLimitCfg = 26 | classOf[ConsumerSpecificLimitCfg] 27 | 28 | "DynamicConfigurations" should { 29 | 30 | "be initialised with an empty configs map" in { 31 | DynamicConfigurations().configs mustBe empty 32 | } 33 | 34 | "be initialised with a non-empty configs map" in { 35 | expected.configs must contain allElementsOf expectedMap 36 | } 37 | 38 | "be initialised with a list of UpdateDynamicConfigRecord instances" in { 39 | val records = expectedMap.zipWithIndex.map { case ((k, v), i) => 40 | UpdateDynamicConfigRecord(k, v, i.toLong) 41 | }.toSeq 42 | val res = DynamicConfigurations(records: _*).configs 43 | res must contain allElementsOf expectedMap 44 | } 45 | 46 | "return none if config for a key is not found" in { 47 | expected.findByKey("foobar") mustBe None 48 | } 49 | 50 | "return the config if config for a key is found" in { 51 | forAll(expectedMap) { case (k, v) => 52 | expected.findByKey(k).value mustBe v 53 | } 54 | } 55 | 56 | "return false if config map does not contain key" in { 57 | expected.hasKey("foobar") mustBe false 58 | } 59 | 60 | "return true if config map does not contain key" in { 61 | forAll(expectedKeys) { key => 62 | expected.hasKey(key) mustBe true 63 | } 64 | } 65 | 66 | "add a new dynamic configuration" in { 67 | val cfg = createConsumerLimitTestCfg("g-4", 4, 40) 68 | val key = dynamicCfgTopicKey(cfg).value 69 | val add = UpdateDynamicConfigRecord(key, cfg, 4) 70 | 71 | val res = expected.update(add).value 72 | 73 | res.hasKey(key) mustBe true 74 | res.findByKey(key).value mustBe cfg 75 | } 76 | 77 | "update an existing dynamic configuration" in { 78 | val (key, value) = expectedSeq(2) 79 | 80 | value match { 81 | case orig: ProducerSpecificLimitCfg => 82 | val cfg = orig.copy(messagesPerSecond = Some(200)) 83 | val upd = UpdateDynamicConfigRecord(key, cfg, 10) 84 | 85 | val res = expected.update(upd).value 86 | 87 | res.hasKey(key) mustBe true 88 | res.findByKey(key).value mustBe cfg 89 | 90 | case _ => 91 | fail( 92 | s"Expected a $clsNameProdSpecLimitCfg, " + 93 | s"but got a $clsNameConsSpecLimitCfg" 94 | ) 95 | } 96 | } 97 | 98 | "remove a dynamic configuration" in { 99 | val remKey = expectedKeys.lastOption.value 100 | 101 | val res = expected.remove(RemoveDynamicConfigRecord(remKey, 20)).value 102 | 103 | res.hasKey(remKey) mustBe false 104 | res.findByKey(remKey) mustBe None 105 | } 106 | 107 | "remove all dynamic configurations" in { 108 | expected.removeAll().isEmpty mustBe true 109 | } 110 | } 111 | } 112 | --------------------------------------------------------------------------------