├── .gitignore ├── LICENSE ├── README.md ├── activator.properties ├── build.sbt ├── project └── build.properties ├── src ├── main │ └── scala │ │ └── com │ │ └── example │ │ ├── PingActor.scala │ │ ├── PongActor.scala │ │ ├── consumer │ │ ├── BatchCommittableSourceConsumerMain.scala │ │ ├── CommittableSourceConsumerMain.scala │ │ └── PlainSourceConsumerMain.scala │ │ └── producer │ │ ├── CommitConsumerToFlowProducerMain.scala │ │ ├── ConsumerToCommitableSinkProducerMain.scala │ │ ├── FlowProducerMain.scala │ │ └── PlainSinkProducerMain.scala └── test │ └── scala │ └── com │ └── example │ └── PingPongActorSpec.scala └── tutorial ├── index.html └── index.md /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | # Created by https://www.gitignore.io/api/scala 3 | 4 | ### Scala ### 5 | *.class 6 | *.log 7 | 8 | # sbt specific 9 | .cache 10 | .history 11 | .lib/ 12 | dist/* 13 | target/ 14 | lib_managed/ 15 | src_managed/ 16 | project/boot/ 17 | project/plugins/project/ 18 | 19 | # Scala-IDE specific 20 | .scala_dependencies 21 | .worksheet 22 | 23 | /.idea 24 | 25 | # ENSIME specific 26 | .ensime_cache/ 27 | .ensime 28 | .idea/libraries/SBT__org_scala_lang_modules_scala_xml_2_11_1_0_5_jar.xml 29 | .idea/libraries/SBT__org_scala_lang_scala_reflect_2_11_8_jar.xml 30 | .idea/libraries/SBT__org_scalactic_scalactic_2_11_3_0_0_jar.xml 31 | .idea/libraries/SBT__org_scalatest_scalatest_2_11_3_0_0_jar.xml 32 | .idea/modules/reactive-kafka-example0830-build.iml 33 | .idea/modules/reactive-kafka-example0830.iml 34 | .idea/vcs.xml 35 | 36 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright 2014 Typesafe, Inc. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | reactive-kafka-scala-example 2 | ========================= 3 | reactive-kafka examples in scala 4 | 5 | Akka Streams Kafka: http://doc.akka.io/docs/akka-stream-kafka/current/home.html 6 | 7 | ### Install Kafka on OSX 8 | ``` 9 | brew install kafka 10 | ``` 11 | 12 | ### Start Kafka 13 | ``` 14 | zkserver start 15 | kafka-server-start /usr/local/etc/kafka/server.properties 16 | ``` 17 | 18 | ### Start a consumer on console for topic1/topic2 19 | ``` 20 | kafka-console-consumer --zookeeper localhost:2181 --topic topic1 --from-beginning 21 | kafka-console-consumer --zookeeper localhost:2181 --topic topic2 --from-beginning 22 | ``` 23 | 24 | ### Start a producer on console for topic1 25 | ``` 26 | kafka-console-producer --broker-list localhost:9092 --topic topic1 27 | ``` 28 | === 29 | ### Start reactive-kafka examples 30 | ``` 31 | git clone https://github.com/makersu/reactive-kafka-scala-example.git 32 | cd reactive-kafka-scala-example 33 | sbt run 34 | ``` 35 | ``` 36 | Multiple main classes detected, select one to run: 37 | 38 | [1] com.example.consumer.BatchCommittableSourceConsumerMain 39 | [2] com.example.consumer.CommittableSourceConsumerMain 40 | [3] com.example.consumer.PlainSourceConsumerMain 41 | [4] com.example.producer.CommitConsumerToFlowProducerMain 42 | [5] com.example.producer.ConsumerToCommitableSinkProducerMain 43 | [6] com.example.producer.FlowProducerMain 44 | [7] com.example.producer.PlainSinkProducerMain 45 | 46 | Enter number: 47 | ``` 48 | 49 | 50 | 51 | -------------------------------------------------------------------------------- /activator.properties: -------------------------------------------------------------------------------- 1 | name=reactive-kafka-scala-example 2 | title=Apahce Kafka examples using reactive-kafka in scala 3 | description=Apache Kafka examples using reactive-kafka in scala. 4 | tags=kafka,reactive-kafka,akka-stream,scala,producer,consumer 5 | -------------------------------------------------------------------------------- /build.sbt: -------------------------------------------------------------------------------- 1 | name := """reactive-kafka-example""" 2 | 3 | version := "1.0" 4 | 5 | scalaVersion := "2.11.8" 6 | 7 | val akkaVersion = "2.4.9" 8 | 9 | libraryDependencies ++= Seq( 10 | "com.typesafe.akka" %% "akka-actor" % akkaVersion, 11 | "com.typesafe.akka" %% "akka-testkit" % akkaVersion % "test", 12 | "org.scalatest" %% "scalatest" % "3.0.0" % "test") 13 | 14 | libraryDependencies ++= Seq( 15 | "com.typesafe.akka" %% "akka-stream-kafka" % "0.11-RC1" 16 | ) 17 | 18 | -------------------------------------------------------------------------------- /project/build.properties: -------------------------------------------------------------------------------- 1 | #Activator-generated Properties 2 | #Wed Aug 31 11:50:59 CST 2016 3 | template.uuid=c331da07-ace3-45a5-bc6d-bf11e5d6385c 4 | sbt.version=0.13.8 5 | -------------------------------------------------------------------------------- /src/main/scala/com/example/PingActor.scala: -------------------------------------------------------------------------------- 1 | package com.example 2 | 3 | import akka.actor.{Actor, ActorLogging, Props} 4 | 5 | class PingActor extends Actor with ActorLogging { 6 | import PingActor._ 7 | 8 | var counter = 0 9 | val pongActor = context.actorOf(PongActor.props, "pongActor") 10 | 11 | def receive = { 12 | case Initialize => 13 | log.info("In PingActor - starting ping-pong") 14 | pongActor ! PingMessage("ping") 15 | case PongActor.PongMessage(text) => 16 | log.info("In PingActor - received message: {}", text) 17 | counter += 1 18 | if (counter == 3) context.system.shutdown() 19 | else sender() ! PingMessage("ping") 20 | } 21 | } 22 | 23 | object PingActor { 24 | val props = Props[PingActor] 25 | case object Initialize 26 | case class PingMessage(text: String) 27 | } -------------------------------------------------------------------------------- /src/main/scala/com/example/PongActor.scala: -------------------------------------------------------------------------------- 1 | package com.example 2 | 3 | import akka.actor.{Actor, ActorLogging, Props} 4 | 5 | class PongActor extends Actor with ActorLogging { 6 | import PongActor._ 7 | 8 | def receive = { 9 | case PingActor.PingMessage(text) => 10 | log.info("In PongActor - received message: {}", text) 11 | sender() ! PongMessage("pong") 12 | } 13 | } 14 | 15 | object PongActor { 16 | val props = Props[PongActor] 17 | case class PongMessage(text: String) 18 | } 19 | -------------------------------------------------------------------------------- /src/main/scala/com/example/consumer/BatchCommittableSourceConsumerMain.scala: -------------------------------------------------------------------------------- 1 | package com.example.consumer 2 | 3 | import akka.Done 4 | import akka.actor.ActorSystem 5 | import akka.kafka.ConsumerMessage.CommittableOffsetBatch 6 | import akka.kafka.scaladsl.Consumer 7 | import akka.kafka.{ConsumerSettings, Subscriptions} 8 | import akka.stream.ActorMaterializer 9 | import akka.stream.scaladsl.Sink 10 | import org.apache.kafka.clients.consumer.ConsumerConfig 11 | import org.apache.kafka.common.serialization.{ByteArrayDeserializer, StringDeserializer} 12 | 13 | import scala.concurrent.ExecutionContext.Implicits.global 14 | import scala.concurrent.Future 15 | /** 16 | * Created by marksu on 8/31/16. 17 | */ 18 | object BatchCommittableSourceConsumerMain extends App { 19 | implicit val system = ActorSystem("BatchCommittableSourceConsumerMain") 20 | implicit val materializer = ActorMaterializer() 21 | 22 | //TODO: move to configuration application.conf 23 | val consumerSettings = ConsumerSettings(system, new ByteArrayDeserializer, new StringDeserializer) 24 | .withBootstrapServers("localhost:9092") 25 | .withGroupId("BatchCommittableSourceConsumer") 26 | .withProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest") 27 | 28 | val done = 29 | Consumer.committableSource(consumerSettings, Subscriptions.topics("topic1")) 30 | .mapAsync(1) { msg => 31 | println(s"BatchCommittableConsumer consume: $msg") 32 | Future.successful(Done).map(_ => msg.committableOffset) 33 | } 34 | .batch(max = 20, first => CommittableOffsetBatch.empty.updated(first)) { (batch, elem) => 35 | batch.updated(elem) 36 | } 37 | .mapAsync(3)(_.commitScaladsl()) 38 | .runWith(Sink.ignore) 39 | 40 | } 41 | -------------------------------------------------------------------------------- /src/main/scala/com/example/consumer/CommittableSourceConsumerMain.scala: -------------------------------------------------------------------------------- 1 | package com.example.consumer 2 | 3 | import akka.actor.ActorSystem 4 | import akka.kafka.scaladsl.Consumer 5 | import akka.kafka.{ConsumerSettings, Subscriptions} 6 | import akka.stream.ActorMaterializer 7 | import akka.stream.scaladsl.Sink 8 | import org.apache.kafka.clients.consumer.ConsumerConfig 9 | import org.apache.kafka.common.serialization.{ByteArrayDeserializer, StringDeserializer} 10 | 11 | /** 12 | * Created by marksu on 8/31/16. 13 | */ 14 | object CommittableSourceConsumerMain extends App { 15 | 16 | implicit val system = ActorSystem("CommittableSourceConsumerMain") 17 | implicit val materializer = ActorMaterializer() 18 | 19 | //TODO: move to configuration application.conf 20 | val consumerSettings = 21 | ConsumerSettings(system, new ByteArrayDeserializer, new StringDeserializer) 22 | .withBootstrapServers("localhost:9092") 23 | .withGroupId("CommittableSourceConsumer") 24 | .withProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest") 25 | 26 | val done = 27 | Consumer.committableSource(consumerSettings, Subscriptions.topics("topic1")) 28 | .mapAsync(1) { msg => 29 | println(s"CommittableSourceConsumer consume: $msg") 30 | msg.committableOffset.commitScaladsl() 31 | } 32 | .runWith(Sink.ignore) 33 | 34 | } 35 | -------------------------------------------------------------------------------- /src/main/scala/com/example/consumer/PlainSourceConsumerMain.scala: -------------------------------------------------------------------------------- 1 | package com.example.consumer 2 | 3 | import java.util.concurrent.atomic.AtomicLong 4 | 5 | import akka.Done 6 | import akka.actor.ActorSystem 7 | import akka.kafka.scaladsl.Consumer 8 | import akka.kafka.{ConsumerSettings, Subscriptions} 9 | import akka.stream.ActorMaterializer 10 | import akka.stream.scaladsl.Sink 11 | import org.apache.kafka.clients.consumer.{ConsumerConfig, ConsumerRecord} 12 | import org.apache.kafka.common.TopicPartition 13 | import org.apache.kafka.common.serialization.{ByteArrayDeserializer, StringDeserializer} 14 | 15 | import scala.concurrent.ExecutionContext.Implicits.global 16 | import scala.concurrent.Future 17 | 18 | /** 19 | * Created by marksu on 8/31/16. 20 | */ 21 | object PlainSourceConsumerMain extends App { 22 | implicit val system = ActorSystem("PlainSourceConsumerMain") 23 | implicit val materializer = ActorMaterializer() 24 | 25 | //TODO: move to configuration application.conf 26 | val consumerSettings = ConsumerSettings(system, new ByteArrayDeserializer, new StringDeserializer) 27 | .withBootstrapServers("localhost:9092") 28 | .withGroupId("PlainSourceConsumer") 29 | .withProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest") 30 | 31 | val db = new DB 32 | db.loadOffset().foreach { fromOffset => 33 | val partition = 0 34 | val subscription = Subscriptions.assignmentWithOffset( 35 | new TopicPartition("topic1", partition) -> fromOffset 36 | ) 37 | val done = 38 | Consumer.plainSource(consumerSettings, subscription) 39 | .mapAsync(1)(db.save) 40 | .runWith(Sink.ignore) 41 | } 42 | 43 | } 44 | 45 | //External Offset Storage 46 | class DB { 47 | 48 | private val offset = new AtomicLong(2) 49 | 50 | def save(record: ConsumerRecord[Array[Byte], String]): Future[Done] = { 51 | println(s"DB.save: ${record.value}") 52 | offset.set(record.offset) 53 | Future.successful(Done) 54 | } 55 | 56 | def loadOffset(): Future[Long] = 57 | Future.successful(offset.get) 58 | 59 | def update(data: String): Future[Done] = { 60 | println(s"DB.update: $data") 61 | Future.successful(Done) 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /src/main/scala/com/example/producer/CommitConsumerToFlowProducerMain.scala: -------------------------------------------------------------------------------- 1 | package com.example.producer 2 | 3 | import akka.actor.ActorSystem 4 | import akka.kafka.scaladsl.{Consumer, Producer} 5 | import akka.kafka.{ConsumerSettings, ProducerMessage, ProducerSettings, Subscriptions} 6 | import akka.stream.ActorMaterializer 7 | import akka.stream.scaladsl.Sink 8 | import org.apache.kafka.clients.consumer.ConsumerConfig 9 | import org.apache.kafka.clients.producer.ProducerRecord 10 | import org.apache.kafka.common.serialization.{ByteArrayDeserializer, ByteArraySerializer, StringDeserializer, StringSerializer} 11 | 12 | /** 13 | * Created by marksu on 9/7/16. 14 | */ 15 | object CommitConsumerToFlowProducerMain extends App { 16 | implicit val system = ActorSystem("CommitConsumerToFlowProducerMain") 17 | implicit val materializer = ActorMaterializer() 18 | 19 | val consumerSettings = 20 | ConsumerSettings(system, new ByteArrayDeserializer, new StringDeserializer) 21 | .withBootstrapServers("localhost:9092") 22 | .withGroupId("CommitConsumerToFlowProducer") 23 | .withProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest") 24 | 25 | val producerSettings = ProducerSettings(system, new ByteArraySerializer, new StringSerializer) 26 | .withBootstrapServers("localhost:9092") 27 | 28 | val done = 29 | Consumer.committableSource(consumerSettings, Subscriptions.topics("topic1")) 30 | .map { msg => 31 | println(s"topic1 -> topic2: $msg") 32 | ProducerMessage.Message(new ProducerRecord[Array[Byte], String]( 33 | "topic2", 34 | msg.record.value 35 | ), msg.committableOffset) 36 | } 37 | .via(Producer.flow(producerSettings)) 38 | .mapAsync(producerSettings.parallelism) { result => 39 | result.message.passThrough.commitScaladsl() 40 | } 41 | .runWith(Sink.ignore) 42 | } 43 | -------------------------------------------------------------------------------- /src/main/scala/com/example/producer/ConsumerToCommitableSinkProducerMain.scala: -------------------------------------------------------------------------------- 1 | package com.example.producer 2 | 3 | import akka.actor.ActorSystem 4 | import akka.kafka.ConsumerMessage.CommittableOffsetBatch 5 | import akka.kafka.scaladsl.{Consumer, Producer} 6 | import akka.kafka.{ConsumerSettings, ProducerMessage, ProducerSettings, Subscriptions} 7 | import akka.stream.ActorMaterializer 8 | import akka.stream.scaladsl.Sink 9 | import org.apache.kafka.clients.consumer.ConsumerConfig 10 | import org.apache.kafka.clients.producer.ProducerRecord 11 | import org.apache.kafka.common.serialization.{ByteArrayDeserializer, ByteArraySerializer, StringDeserializer, StringSerializer} 12 | 13 | /** 14 | * Created by marksu on 8/31/16. 15 | */ 16 | object ConsumerToCommitableSinkProducerMain extends App { 17 | 18 | implicit val system = ActorSystem("Consumer2ProducerMain") 19 | implicit val materializer = ActorMaterializer() 20 | 21 | //TODO: move to configuration application.conf 22 | val consumerSettings = 23 | ConsumerSettings(system, new ByteArrayDeserializer, new StringDeserializer) 24 | .withBootstrapServers("localhost:9092") 25 | .withGroupId("Consumer2Producer") 26 | .withProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest") 27 | 28 | //TODO: move to configuration application.conf 29 | val producerSettings = 30 | ProducerSettings(system, new ByteArraySerializer, new StringSerializer) 31 | .withBootstrapServers("localhost:9092") 32 | 33 | Consumer.committableSource(consumerSettings, Subscriptions.topics("topic1")) 34 | .map { msg => 35 | println(s"topic1 -> topic2: $msg") 36 | ProducerMessage.Message(new ProducerRecord[Array[Byte], String]( 37 | "topic2", 38 | msg.record.value 39 | ), msg.committableOffset) 40 | } 41 | .runWith(Producer.commitableSink(producerSettings)) 42 | 43 | } 44 | -------------------------------------------------------------------------------- /src/main/scala/com/example/producer/FlowProducerMain.scala: -------------------------------------------------------------------------------- 1 | package com.example.producer 2 | 3 | import akka.actor.ActorSystem 4 | import akka.kafka.scaladsl.Producer 5 | import akka.kafka.{ProducerMessage, ProducerSettings} 6 | import akka.stream.ActorMaterializer 7 | import akka.stream.scaladsl.{Sink, Source} 8 | import org.apache.kafka.clients.producer.ProducerRecord 9 | import org.apache.kafka.common.serialization.{ByteArraySerializer, StringSerializer} 10 | 11 | /** 12 | * Created by marksu on 9/6/16. 13 | */ 14 | object FlowProducerMain extends App { 15 | implicit val system = ActorSystem("FlowProducerMain") 16 | implicit val materializer = ActorMaterializer() 17 | 18 | val producerSettings = ProducerSettings(system, new ByteArraySerializer, new StringSerializer) 19 | .withBootstrapServers("localhost:9092") 20 | 21 | val done = Source(1 to 10) 22 | .map { n => 23 | // val partition = math.abs(n) % 2 24 | val partition = 0 25 | ProducerMessage.Message(new ProducerRecord[Array[Byte], String]( 26 | "topic1", partition, null, n.toString 27 | ), n) 28 | } 29 | .via(Producer.flow(producerSettings)) 30 | .map { result => 31 | val record = result.message.record 32 | println(s"${record.topic}/${record.partition} ${result.offset}: ${record.value}" + 33 | s"(${result.message.passThrough})") 34 | result 35 | } 36 | .runWith(Sink.ignore) 37 | } 38 | -------------------------------------------------------------------------------- /src/main/scala/com/example/producer/PlainSinkProducerMain.scala: -------------------------------------------------------------------------------- 1 | package com.example.producer 2 | 3 | import akka.actor.ActorSystem 4 | import akka.kafka.ProducerSettings 5 | import akka.kafka.scaladsl.Producer 6 | import akka.stream.ActorMaterializer 7 | import akka.stream.scaladsl.Source 8 | import org.apache.kafka.clients.producer.ProducerRecord 9 | import org.apache.kafka.common.serialization.{ByteArraySerializer, StringSerializer} 10 | 11 | /** 12 | * Created by marksu on 8/31/16. 13 | */ 14 | object PlainSinkProducerMain extends App { 15 | 16 | implicit val system = ActorSystem("PlainSinkProducerMain") 17 | implicit val materializer = ActorMaterializer() 18 | 19 | val producerSettings = ProducerSettings(system, new ByteArraySerializer, new StringSerializer) 20 | .withBootstrapServers("localhost:9092") 21 | 22 | val done = Source(1 to 10) 23 | .map(_.toString) 24 | .map { elem => 25 | println(s"PlainSinkProducer produce: ${elem}") 26 | new ProducerRecord[Array[Byte], String]("topic1", elem) 27 | } 28 | .runWith(Producer.plainSink(producerSettings)) 29 | 30 | } 31 | -------------------------------------------------------------------------------- /src/test/scala/com/example/PingPongActorSpec.scala: -------------------------------------------------------------------------------- 1 | package com.example 2 | 3 | import akka.actor.ActorSystem 4 | import akka.actor.Actor 5 | import akka.actor.Props 6 | import akka.testkit.{ TestActors, TestKit, ImplicitSender } 7 | import org.scalatest.WordSpecLike 8 | import org.scalatest.Matchers 9 | import org.scalatest.BeforeAndAfterAll 10 | 11 | class PingPongActorSpec(_system: ActorSystem) extends TestKit(_system) with ImplicitSender 12 | with WordSpecLike with Matchers with BeforeAndAfterAll { 13 | 14 | def this() = this(ActorSystem("MySpec")) 15 | 16 | override def afterAll { 17 | TestKit.shutdownActorSystem(system) 18 | } 19 | 20 | "A Ping actor" must { 21 | "send back a ping on a pong" in { 22 | val pingActor = system.actorOf(PingActor.props) 23 | pingActor ! PongActor.PongMessage("pong") 24 | expectMsg(PingActor.PingMessage("ping")) 25 | } 26 | } 27 | 28 | "A Pong actor" must { 29 | "send back a pong on a ping" in { 30 | val pongActor = system.actorOf(PongActor.props) 31 | pongActor ! PingActor.PingMessage("ping") 32 | expectMsg(PongActor.PongMessage("pong")) 33 | } 34 | } 35 | 36 | } 37 | -------------------------------------------------------------------------------- /tutorial/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 |
4 | 5 | 6 |