├── .github
└── workflows
│ └── main.yml
├── .gitignore
├── README.md
├── build.sbt
├── project
├── Dependencies.scala
├── build.properties
└── plugins.sbt
└── src
├── main
├── resources
│ ├── consumer.conf
│ ├── dataset.csv
│ ├── extract.csv
│ ├── logback.xml
│ └── producer.conf
└── scala
│ └── fr
│ └── ps
│ └── eng
│ └── kafka
│ └── app4s
│ ├── client
│ ├── Conf.scala
│ ├── ConsumingApp.scala
│ └── ProducingApp.scala
│ └── common
│ ├── Dataset.scala
│ ├── HelperFunctions.scala
│ ├── HelperSerdes.scala
│ ├── Key.scala
│ ├── Platform.scala
│ ├── Rating.scala
│ ├── TvShow.scala
│ └── package.scala
└── test
├── resources
├── application.conf
└── logback-test.xml
└── scala
└── fr
└── ps
└── eng
└── kafka
└── app4s
├── DemoTestProvider.scala
├── client
├── ConsumingAppSpec.scala
└── ProducingAppSpec.scala
└── common
├── DatasetSpec.scala
├── HelperFunctionsSpec.scala
└── HelperSerdesSpec.scala
/.github/workflows/main.yml:
--------------------------------------------------------------------------------
1 | name: Master Workflow
2 |
3 | on:
4 | push:
5 | branches:
6 | - 'master'
7 |
8 | jobs:
9 | main:
10 | name: Master Workflow Job
11 | runs-on: ubuntu-latest
12 |
13 | steps:
14 | - name: Checkout Produce/Consume Scala example
15 | uses: actions/checkout@v1
16 |
17 | - name: Install SDKMAN
18 | run: curl -s "https://get.sdkman.io" | bash
19 |
20 | - name: Install SBT
21 | run: source "$HOME/.sdkman/bin/sdkman-init.sh" && sdk install sbt
22 |
23 | - name: Compile project
24 | run: sbt compile
25 |
26 | - name: Run test
27 | run: sbt test
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .idea/
2 | *.iml
3 |
4 | dist/*
5 | target/
6 | lib_managed/
7 | src_managed/
8 | project/boot/
9 | project/plugins/project/
10 | .history
11 | .cache
12 | .lib/
13 |
14 | *.class
15 | *.log
16 | .desktop/
17 |
18 | .bsp/
19 |
20 | .java-version
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Kafka App for Scala
2 |
3 | 
4 |
5 | This module is the attached source code from the blog post
6 | [Getting Started with Scala and Apache Kafka](https://www.confluent.io/blog/kafka-scala-tutorial-for-beginners/).
7 | It discusses how to use the basic Kafka Clients in a Scala application.
8 | Originally inpired by the first
9 | [scala example](https://github.com/confluentinc/examples/tree/6.0.0-post/clients/cloud/scala),
10 | it goes beyond by showing multiple ways to produce, to consume and to configure the clients.
11 |
12 | 1. [Try it](#try-it)
13 | 2. [Produce](#produce)
14 | 3. [Consume](#consume)
15 | 3. [Read More](#read-more)
16 |
17 | ## Try it
18 | ```bash
19 | git clone https://github.com/DivLoic/kafka-application4s.git
20 | cd kafka-application4s
21 | sbt compile
22 | ```
23 |
24 | ### Local
25 |
26 | You first need to run Kafka and the
27 | [Schema Registry](https://docs.confluent.io/platform/current/schema-registry/index.html).
28 | Any recent installation of Kafka or the Confluent platform can be used.
29 | Many installation methods can be found on the [CP Download Page](https://www.confluent.io/download).
30 |
31 | i.e. Confluent Cli on Mac
32 | ```shell script
33 | curl -sL https://cnfl.io/cli | sh -s -- latest -b /usr/local/bin
34 | export CONFLUENT_HOME=...
35 | export PATH=$PATH:$CONFLUENT_HOME
36 | confluent local services schema-registry start
37 | ```
38 |
39 |
40 | ### Cloud
41 | The module also works with a cluster hosted on Confluent Cloud.
42 | You will find in [consumer.conf](src/main/resources/consumer.conf)
43 | and [producer.conf](src/main/resources/producer.conf) the commented config related to the cloud.
44 | After that, you will need either to edit these files or to define the following variables:
45 |
46 | ```shell script
47 | export BOOTSTRAP_SERVERS="...:9092"
48 | export CLUSTER_API_KEY="..."
49 | export CLUSTER_API_SECRET="..."
50 | export SCHEMA_REGISTRY_URL="https:/..."
51 | export SR_API_KEY="..."
52 | export SR_API_SECRET="..."
53 | ```
54 |
55 | For more on Confluent Cloud login see the
56 | [documentation](https://docs.confluent.io/current/cloud/access-management/index.html).
57 |
58 | ## Produce
59 | Run:
60 | ```shell script
61 | sbt produce "-Djline.terminal=none" --error
62 | ```
63 |
64 | [](https://asciinema.org/a/weOD3XpAVawpZQrVW0Ic8pN8q)
65 |
66 | ## Consume
67 | Run:
68 | ```shell script
69 | sbt consume "-Djline.terminal=none" --error
70 | ```
71 |
72 | [](https://asciinema.org/a/DjI20wqNnU470hcXkb0uKpe2C)
73 |
74 | ## Read more
75 | - The code is detail in the [blog post](https://www.confluent.io/blog/kafka-scala-tutorial-for-beginners/)
76 | - For a step by step approach including tests checkout this [Kafka Tutorial](https://kafka-tutorials.confluent.io/produce-consume-lang/scala.html)
--------------------------------------------------------------------------------
/build.sbt:
--------------------------------------------------------------------------------
1 | import sbt.fullRunTask
2 |
3 | ThisBuild / scalaVersion := "2.13.3"
4 | ThisBuild / version := "0.1.0-SNAPSHOT"
5 | ThisBuild / organization := "fr.ps.eng"
6 | ThisBuild / organizationName := "ps-engineering"
7 | ThisBuild / javacOptions ++= Seq("-source", "1.8", "-target", "1.8", "-Xlint")
8 | ThisBuild / scalacOptions ++= Seq("-language:postfixOps")
9 |
10 | lazy val produce: TaskKey[Unit] = taskKey[Unit]("Message Production")
11 | lazy val consume: TaskKey[Unit] = taskKey[Unit]("Message Consumption")
12 |
13 | lazy val root = (project in file("."))
14 | .settings(
15 | name := "kafka-application4s",
16 | resolvers += "Confluent Repo" at "https://packages.confluent.io/maven",
17 | libraryDependencies ++= (Dependencies.rootDependencies ++ Dependencies.kafkaClientsDeps),
18 | libraryDependencies ++= (Dependencies.testDependencies map(_ % Test)),
19 | fullRunTask(produce, Compile, s"fr.ps.eng.kafka.app4s.client.ProducingApp"),
20 | fullRunTask(consume, Compile, s"fr.ps.eng.kafka.app4s.client.ConsumingApp")
21 | )
22 |
--------------------------------------------------------------------------------
/project/Dependencies.scala:
--------------------------------------------------------------------------------
1 | import sbt._
2 |
3 | object Dependencies {
4 |
5 | lazy val rootDependencies: List[ModuleID] =
6 | "org.typelevel" %% "cats-core" % "2.1.1" ::
7 | "ch.qos.logback" % "logback-classic" % "1.2.3" ::
8 | "com.github.pureconfig" %% "pureconfig" % "0.13.0" ::
9 | "com.sksamuel.avro4s" %% "avro4s-core" % "3.1.1" ::
10 | "com.nrinaudo" %% "kantan.csv" % "0.6.1" ::
11 | "com.nrinaudo" %% "kantan.csv-enumeratum" % "0.6.1" :: Nil
12 |
13 | lazy val kafkaClientsDeps: List[ModuleID] =
14 | "org.apache.kafka" % "kafka-clients" % "2.6.0" ::
15 | "io.confluent" % "kafka-avro-serializer" % "6.0.0" :: Nil
16 |
17 | lazy val testDependencies: List[ModuleID] =
18 | "org.scalatest" %% "scalatest" % "3.2.3" ::
19 | "org.scalactic" %% "scalactic" % "3.2.3" ::
20 | "org.scalacheck" %% "scalacheck" % "1.15.1" ::
21 | "org.typelevel" %% "cats-core" % "2.3.0" :: Nil
22 | }
23 |
--------------------------------------------------------------------------------
/project/build.properties:
--------------------------------------------------------------------------------
1 | sbt.version=1.4.0
2 |
--------------------------------------------------------------------------------
/project/plugins.sbt:
--------------------------------------------------------------------------------
1 | addSbtPlugin("net.virtual-void" % "sbt-dependency-graph" % "0.10.0-RC1")
--------------------------------------------------------------------------------
/src/main/resources/consumer.conf:
--------------------------------------------------------------------------------
1 | deserializer-config {
2 | schema.registry.url = "http://localhost:8081"
3 | schema.registry.url = ${?SCHEMA_REGISTRY_URL}
4 | # See https://docs.confluent.io/5.0.0/cloud/using/config-client.html#configure-ccloud-clients for ccloud settings
5 | # basic.auth.credentials.source = USER_INFO
6 | # schema.registry.basic.auth.user.info = ${SR_API_KEY}":"${SR_API_SECRET}
7 | }
8 |
9 | consumer-config {
10 | # See https://kafka.apache.org/documentation/#consumerconfigs for more consumer configs
11 | group.id = null
12 | auto.offset.reset = latest
13 | enable.auto.commit = true
14 | bootstrap.servers = "localhost:9092"
15 | bootstrap.servers = ${?BOOTSTRAP_SERVERS}
16 | # See https://docs.confluent.io/5.0.0/cloud/using/config-client.html#configure-ccloud-clients for ccloud settings
17 | # sasl.mechanism = PLAIN
18 | # security.protocol = SASL_SSL
19 | # ssl.endpoint.identification.algorithm = https
20 | # sasl.jaas.config = "org.apache.kafka.common.security.plain.PlainLoginModule required username=\""${CLUSTER_API_KEY}"\" password=\""${CLUSTER_API_SECRET}"\";"
21 | }
22 |
23 | tv-show-topic-name: "DEV_TV_SHOW"
24 | tv-show-topic-name: ${?TV_SHOW_TOPIC}
25 |
26 | rating-topic-name: "DEV_RATING"
27 | rating-topic-name: ${?RATING_TOPIC}
28 |
29 | polling-timeout: 5 seconds
30 |
--------------------------------------------------------------------------------
/src/main/resources/extract.csv:
--------------------------------------------------------------------------------
1 | Netflix,Breaking Bad,2008,9.5
2 | Hulu,Destiny,2014,9.6
3 | Hulu,Hungry Henry,2014,9.5
4 | PrimeVideo,Malgudi Days,1987,9.5
5 | Hulu,The Joy of Painting,1983,9.4
6 | PrimeVideo,Band of Brothers,2001,9.4
7 | Netflix,Our Planet,2019,9.3
8 | Netflix,Ramayan,1987,9.3
9 | PrimeVideo,The Wire,2002,9.3
10 | PrimeVideo,Green Paradise,2011,9.3
11 | Netflix,Avatar: The Last Airbender,2005,9.2
12 | Netflix,Yeh Meri Family,2018,9.2
13 | Hulu,Rick and Morty,2013,9.2
14 | PrimeVideo,The Sopranos,1999,9.2
15 | PrimeVideo,Baseball,1994,9.2
16 | PrimeVideo,The Bay,2010,9.2
17 | PrimeVideo,Harmony with A R Rahman,2018,9.2
18 | PrimeVideo,Everyday Driver,2017,9.2
19 | PrimeVideo,Single and Anxious,2016,9.2
20 | Netflix,Sherlock,2010,9.1
21 | Netflix,Fullmetal Alchemist: Brotherhood,2009,9.1
22 | Netflix,The Vietnam War,2017,9.1
23 | Hulu,Leah Remini: Scientology and the Aftermath,2016,9.1
24 | PrimeVideo,The Planets,2019,9.1
25 | Disney+,The Imagineering Story,2019,9.1
26 | Netflix,The Office,2005,8.9
27 | Netflix,When They See Us,2019,8.9
28 | Netflix,Hunter x Hunter,2011,8.9
29 | Netflix,Still Game,2002,8.9
30 | Netflix,The Untamed,2019,8.9
31 | Netflix,Car Masters: Rust to Riches,2018,8.9
32 | Netflix,Stories by Rabindranath Tagore,2015,8.9
33 | Netflix,Peasants Rebellion,2018,8.9
34 | Netflix,An Hour to Save Your Life,2014,8.9
35 | Hulu,Fargo,2014,8.9
36 | Hulu,Gravity Falls,2012,8.9
37 | Hulu,Pride and Prejudice,1995,8.9
38 | Hulu,Cowboy Bebop,1998,8.9
39 | Hulu,Nightwatch,2015,8.9
40 | Hulu,BuzzFeed Unsolved - True Crime,2016,8.9
41 | Hulu,The Wine Show,2016,8.9
42 | PrimeVideo,Undercover,2011,8.9
43 | PrimeVideo,Vinland Saga,2019,8.9
44 | PrimeVideo,Nature,1982,8.9
45 | PrimeVideo,Alice in Paris,2016,8.9
46 | PrimeVideo,Worricker,2011,8.9
47 | Netflix,Stranger Things,2016,8.8
48 | Netflix,Black Mirror,2011,8.8
49 | Netflix,Peaky Blinders,2013,8.8
50 | Netflix,Attack on Titan,2013,8.8
51 | Netflix,Narcos,2015,8.8
52 | Netflix,Twin Peaks,1990,8.8
53 | Netflix,One-Punch Man,2015,8.8
54 | Netflix,The West Wing,1999,8.8
55 | Netflix,Monty Python's Flying Circus,1969,8.8
56 | Netflix,Crash Landing on You,2019,8.8
57 | Netflix,The Universe,2007,8.8
58 | Netflix,30 for 30,2009,8.8
59 | Netflix,Mr. Sunshine,2018,8.8
60 | Netflix,Forensic Files,1996,8.8
61 | Netflix,Signal,2016,8.8
62 | Netflix,Raja Rasoi Aur Anya Kahaniyan,2015,8.8
63 | Netflix,Monkey Life,2007,8.8
64 | Hulu,Seinfeld,1989,8.8
65 | Hulu,It's Always Sunny in Philadelphia,2005,8.8
66 | Hulu,Normal People,2020,8.8
67 | Hulu,Steins,Gate,2011,8.8
68 | Hulu,Over the Garden Wall,2014,8.8
69 | Hulu,Nathan For You,2013,8.8
70 | Hulu,Demon Slayer: Kimetsu no Yaiba,2019,8.8
71 | Hulu,The Promised Neverland,2019,8.8
72 | Hulu,Homicide Hunter: Lt Joe Kenda,2011,8.8
73 | Hulu,Horrible Histories,2009,8.8
74 | Hulu,Good Eats: Reloaded,2018,8.8
75 | Hulu,Sea Rescue,2012,8.8
76 | Hulu,UFC Fight Flashback,2014,8.8
77 | PrimeVideo,The Carol Burnett Show,1967,8.8
78 | PrimeVideo,The Slave Hunters,2010,8.8
79 | PrimeVideo,King of the Road,2016,8.8
80 | PrimeVideo,Xploration DIY Sci,2016,8.8
81 | PrimeVideo,Stacey David's GearZ,2010,8.8
82 | PrimeVideo,Fresh Tracks,2016,8.8
83 | Disney+,One Strange Rock,2018,8.8
84 | Netflix,Better Call Saul,2015,8.7
85 | Netflix,Dark,2017,8.7
86 | Netflix,House of Cards,2013,8.7
87 | Netflix,The Haunting,2018,8.7
88 | Netflix,Arrested Development,2003,8.7
89 | Netflix,The Crown,2016,8.7
90 | Netflix,BoJack Horseman,2014,8.7
91 | Netflix,Sacred Games,2018,8.7
92 | Netflix,Gomorrah,2014,8.7
93 | Netflix,Middleditch & Schwartz,2020,8.7
94 | Netflix,Haikyu!!,2014,8.7
95 | Netflix,NOVA,1974,8.7
96 | Netflix,Shtisel,2013,8.7
97 | Netflix,Justice League Unlimited,2004,8.7
98 | Netflix,Norm Macdonald Has a Show,2018,8.7
99 | Netflix,World War II In HD Colour,2009,8.7
100 | Netflix,The Supervet,2014,8.7
101 | Netflix,Puffin Rock,2015,8.7
102 | Netflix,Moving Art,2007,8.7
103 | Netflix,The Bionic Vet,2010,8.7
104 | Hulu,The Simpsons,1989,8.7
105 | Hulu,South Park,1997,8.7
106 | Hulu,This Is Us,2016,8.7
107 | Hulu,Friday Night Lights,2006,8.7
108 | Hulu,The Shield,2002,8.7
109 | Hulu,One Piece,1999,8.7
110 | Hulu,Letterkenny,2016,8.7
111 | Hulu,Buzzfeed Unsolved: Supernatural,2016,8.7
112 | PrimeVideo,House,2004,8.7
113 | PrimeVideo,The Boys,2019,8.7
114 | PrimeVideo,Fleabag,2016,8.7
115 | PrimeVideo,Rome,2005,8.7
116 | PrimeVideo,The Marvelous Mrs. Maisel,2017,8.7
117 | PrimeVideo,Curb Your Enthusiasm,2000,8.7
118 | PrimeVideo,Six Feet Under,2001,8.7
119 | PrimeVideo,Oz,1997,8.7
120 | PrimeVideo,The Grand Tour,2016,8.7
121 | PrimeVideo,Downton Abbey,2010,8.7
122 | PrimeVideo,The Thick of It,2005,8.7
123 | PrimeVideo,The Brain with Dr. David Eagleman,2015,8.7
124 | PrimeVideo,Tumble Leaf,2013,8.7
125 | PrimeVideo,The Fabric of the Cosmos,2011,8.7
126 | PrimeVideo,Handcrafted America,2016,8.7
127 | PrimeVideo,Owlegories,2016,8.7
128 | PrimeVideo,Enchantimals,2018,8.7
129 | PrimeVideo,Delayed Gratification Series,2017,8.7
130 | Disney+,The Mandalorian,2019,8.7
131 | Netflix,Mindhunter,2017,8.6
132 | Netflix,Parks and Recreation,2009,8.6
133 | Netflix,Dexter,2006,8.6
134 | Netflix,Marvel's Daredevil,2015,8.6
135 | Netflix,Shameless,2011,8.6
136 | Netflix,Mad Men,2007,8.6
137 | Netflix,Star Trek: The Next Generation,1987,8.6
138 | Netflix,Making a Murderer,2015,8.6
139 | Netflix,Anne with an E,2017,8.6
140 | Netflix,Pose,2018,8.6
141 | Netflix,Chef's Table,2015,8.6
142 | Netflix,Hilda,2018,8.6
143 | Netflix,Ezel,2009,8.6
144 | Netflix,North & South,2004,8.6
145 | Netflix,Code Geass: Lelouch of the Rebellion,2006,8.6
146 | Netflix,Fullmetal Alchemist,2003,8.6
147 | Netflix,Your Lie in April,2014,8.6
148 | Netflix,Wentworth,2013,8.6
149 | Netflix,Merlí,2015,8.6
150 | Netflix,Rake,2010,8.6
151 | Netflix,Formula 1: Drive to Survive,2019,8.6
152 | Netflix,The Great British Bake Off,2010,8.6
153 | Netflix,Oliver Stone's Untold History of the United States,2012,8.6
154 | Netflix,The Rise of Phoenixes,2018,8.6
155 | Netflix,Stranger,2017,8.6
156 | Netflix,Daniel Sloss: Live Shows,2018,8.6
157 | Netflix,The Repair Shop,2017,8.6
158 | Netflix,La Niña,2016,8.6
159 | Netflix,Incomplete Life,2014,8.6
160 | Netflix,Fake or Fortune?,2011,8.6
161 | Netflix,Ice Road Rescue,2015,8.6
162 | Netflix,Age of Rebellion,2018,8.6
163 | Hulu,Sons of Anarchy,2008,8.6
164 | Hulu,Archer,2009,8.6
165 | Hulu,Justified,2010,8.6
166 | Hulu,The X-Files,1993,8.6
167 | Hulu,Adventure Time,2010,8.6
168 | Hulu,Atlanta,2016,8.6
169 | Hulu,Naruto Shippūden,2007,8.6
170 | Hulu,Line of Duty,2012,8.6
171 | Hulu,Peep show,2003,8.6
172 | Hulu,Battlestar Galactica,2004,8.6
173 | Hulu,Spaced,1999,8.6
174 | Hulu,The Bridge,2011,8.6
175 | Hulu,RuPaul's Drag Race All Stars,2012,8.6
176 | Hulu,The Eric Andre Show,2012,8.6
177 | Hulu,The Three Stooges,1934,8.6
178 | Hulu,The Incredible Dr. Pol,2011,8.6
179 | Hulu,Clannad After Story,2009,8.6
180 | Hulu,Worth It,2016,8.6
181 | Hulu,Jeopardy! The Greatest of All Time,2020,8.6
182 | Hulu,Snake City,2014,8.6
183 | Hulu,Part of Me,2014,8.6
184 | Hulu,Los Cowboys,2015,8.6
185 | PrimeVideo,Deadwood,2004,8.6
186 | PrimeVideo,The Newsroom,2012,8.6
187 | PrimeVideo,Father Ted,1995,8.6
188 | PrimeVideo,The Family Man,2019,8.6
189 | PrimeVideo,Endeavour,2012,8.6
190 | PrimeVideo,Detectorists,2014,8.6
191 | PrimeVideo,Romanzo Criminale,2008,8.6
192 | PrimeVideo,Mister Rogers' Neighborhood,1968,8.6
193 | PrimeVideo,The Men Who Built America,2012,8.6
194 | PrimeVideo,All or Nothing,2016,8.6
195 | PrimeVideo,Macross,1982,8.6
196 | PrimeVideo,The National Parks: America's Best Idea,2009,8.6
197 | PrimeVideo,Globe Trekker,2005,8.6
198 | PrimeVideo,How We Got to Now,2014,8.6
199 | PrimeVideo,Boundless,2013,8.6
200 | PrimeVideo,Lucky Dog,2013,8.6
--------------------------------------------------------------------------------
/src/main/resources/logback.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | %-6relative %-5level %logger {35} - %msg \n
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
--------------------------------------------------------------------------------
/src/main/resources/producer.conf:
--------------------------------------------------------------------------------
1 | serializer-config {
2 | schema.registry.url = "http://localhost:8081"
3 | schema.registry.url = ${?SCHEMA_REGISTRY_URL}
4 | # See https://docs.confluent.io/5.0.0/cloud/using/config-client.html#configure-ccloud-clients for ccloud settings
5 | # basic.auth.credentials.source = USER_INFO
6 | # schema.registry.basic.auth.user.info = ${SR_API_KEY}":"${SR_API_SECRET}
7 | }
8 |
9 | producer-config {
10 | # See https://kafka.apache.org/documentation/#producerconfigs for more producer configs
11 | acks = all
12 | client.id = null
13 | compression.type = zstd
14 | bootstrap.servers = "localhost:9092"
15 | bootstrap.servers = ${?BOOTSTRAP_SERVERS}
16 | max.in.flight.requests.per.connection = 1
17 | # See https://docs.confluent.io/5.0.0/cloud/using/config-client.html#configure-ccloud-clients for ccloud settings
18 | # sasl.mechanism = PLAIN
19 | # security.protocol = SASL_SSL
20 | # ssl.endpoint.identification.algorithm = https
21 | # sasl.jaas.config = "org.apache.kafka.common.security.plain.PlainLoginModule required username=\""${CLUSTER_API_KEY}"\" password=\""${CLUSTER_API_SECRET}"\";"
22 | }
23 |
24 | tv-show-topic-name: "DEV_TV_SHOW"
25 | tv-show-topic-name: ${?TV_SHOW_TOPIC}
26 |
27 | rating-topic-name: "DEV_RATING"
28 | rating-topic-name: ${?RATING_TOPIC}
29 |
30 | generator-period = 1.5 seconds
31 | generator-parallelism-level = 3
32 |
--------------------------------------------------------------------------------
/src/main/scala/fr/ps/eng/kafka/app4s/client/Conf.scala:
--------------------------------------------------------------------------------
1 | package fr.ps.eng.kafka.app4s.client
2 |
3 | import com.typesafe.config.Config
4 |
5 | import scala.concurrent.duration.FiniteDuration
6 |
7 | /**
8 | * Created by loicmdivad.
9 | */
10 | object Conf {
11 |
12 | case class ProducerAppConfig(producerConfig: Config,
13 | serializerConfig: Config,
14 | tvShowTopicName: String,
15 | ratingTopicName: String,
16 | generatorPeriod: FiniteDuration,
17 | generatorParallelismLevel: Int = 1)
18 |
19 | case class ConsumerAppConfig(consumerConfig: Config,
20 | deserializerConfig: Config,
21 | tvShowTopicName: String,
22 | ratingTopicName: String,
23 | pollingTimeout: FiniteDuration)
24 | }
25 |
--------------------------------------------------------------------------------
/src/main/scala/fr/ps/eng/kafka/app4s/client/ConsumingApp.scala:
--------------------------------------------------------------------------------
1 | package fr.ps.eng.kafka.app4s.client
2 |
3 | import cats.instances.map._
4 | import cats.instances.int._
5 | import cats.syntax.either._
6 | import cats.syntax.monoid._
7 |
8 | import java.util
9 | import java.util.concurrent.{Executors, ScheduledExecutorService, TimeUnit}
10 |
11 | import fr.ps.eng.kafka.app4s.client.Conf.ConsumerAppConfig
12 | import fr.ps.eng.kafka.app4s.common._
13 | import org.apache.kafka.clients.consumer.{ConsumerRebalanceListener, ConsumerRecords, KafkaConsumer}
14 | import org.apache.kafka.common.TopicPartition
15 | import org.apache.kafka.common.serialization.Deserializer
16 | import org.slf4j.{Logger, LoggerFactory}
17 | import pureconfig.ConfigSource
18 | import pureconfig.generic.auto._
19 |
20 | import scala.jdk.CollectionConverters._
21 | import scala.jdk.DurationConverters._
22 | import scala.util.Try
23 |
24 | /**
25 | * Created by loicmdivad.
26 | */
27 | object ConsumingApp extends App with HelperFunctions with HelperSerdes {
28 |
29 | lazy val logger: Logger = LoggerFactory.getLogger(getClass)
30 |
31 | val configFile = sys.props.get("config.file").getOrElse(getClass.getResource("/consumer.conf").getPath)
32 |
33 | ConfigSource.file(configFile).load[ConsumerAppConfig].map { config =>
34 | // (0) configure serializers and producers
35 | val keyDeserializer: Deserializer[Key] = reflectionAvroDeserializer4S[Key]
36 | val tvShowDeserializer: Deserializer[TvShow] = reflectionAvroDeserializer4S[TvShow]
37 | val ratingDeserializer: Deserializer[Rating] = reflectionAvroDeserializer4S[Rating]
38 |
39 | keyDeserializer.configure(config.deserializerConfig.toMap.asJava, true)
40 | tvShowDeserializer :: ratingDeserializer :: Nil foreach (_.configure(config.deserializerConfig.toMap.asJava, false))
41 |
42 | val baseConfig: Map[String, AnyRef] = config.consumerConfig.toMap
43 |
44 | val consumerConfig1 = baseConfig ++ Map("group.id" -> "group1", "fetch.max.bytes" -> "50") asJava
45 | val consumer1 = new KafkaConsumer[Key, TvShow](consumerConfig1, keyDeserializer, tvShowDeserializer)
46 |
47 | val consumerConfig2 = baseConfig ++ Map("group.id" -> "group2", "enable.auto.commit" -> "true") asJava
48 |
49 | // (1) backtracking from the beginning
50 | val tvShowPartition: Vector[TopicPartition] = consumer1
51 | .partitionsFor(config.tvShowTopicName)
52 | .asScala
53 | .toVector
54 | .map(info => new TopicPartition(info.topic(), info.partition()))
55 |
56 | consumer1.assign(tvShowPartition asJava)
57 | consumer1.seekToBeginning(consumer1.assignment())
58 |
59 | val records: ConsumerRecords[Key, TvShow] = consumer1.poll(config.pollingTimeout.toJava)
60 |
61 | logger info s"Just polled the ${records.count()} th TV shows."
62 | logger warn s"Closing the the first consumer n°1 now!"
63 | Try(consumer1.close())
64 | .recover { case error => logger.error("Failed to close the kafka consumer", error) }
65 |
66 | // (2) consuming the latest messages
67 | val scheduler: ScheduledExecutorService = Executors.newSingleThreadScheduledExecutor()
68 | scheduler.schedule(() => {
69 | var map = Map.empty[String, Int]
70 | val listener = new ConsumerRebalanceListener {
71 | override def onPartitionsRevoked(partitions: util.Collection[TopicPartition]): Unit =
72 | logger info s"The following partition are revoked: ${partitions.asScala.mkString(", ")}"
73 |
74 | override def onPartitionsAssigned(partitions: util.Collection[TopicPartition]): Unit =
75 | logger info s"The following partition are assigned: ${partitions.asScala.mkString(", ")}"
76 | }
77 |
78 | val consumer2 = new KafkaConsumer[Key, Rating](consumerConfig2, keyDeserializer, ratingDeserializer)
79 | consumer2.subscribe(config.ratingTopicName :: Nil asJava, listener)
80 |
81 | while (!scheduler.isShutdown) {
82 | Thread.sleep(2000)
83 | Try {
84 | val records = consumer2.poll(config.pollingTimeout.toJava)
85 | records.iterator().asScala.toVector
86 | }.map {
87 | _.groupBy(_.value().user)
88 | .view
89 | .mapValues(_.size)
90 | .toMap
91 |
92 | }.recover { case error =>
93 | logger.error("something wrong happened", error)
94 | Map.empty[String, Int]
95 |
96 | }.foreach { recordMap =>
97 | map = map |+| recordMap
98 | if(map.nonEmpty) print(s"\rPolled ${printRating(map)}.")
99 | }
100 | }
101 |
102 | println()
103 | logger warn s"Closing the the first consumer n°2 now!"
104 | Try(consumer2.close())
105 | .recover { case error => logger.error("Failed to close the kafka consumer", error) }
106 |
107 | }, 1, TimeUnit.SECONDS)
108 |
109 | sys.addShutdownHook {
110 | scheduler.shutdown()
111 | scheduler.awaitTermination(10, TimeUnit.SECONDS)
112 | }
113 |
114 | }.recover {
115 | case failures =>
116 |
117 | logger error "Failed to parse the configuration of the consumer application."
118 |
119 | failures.toList.foreach(err => logger.error(err.description))
120 |
121 | sys.exit(-1)
122 | }
123 |
124 | def printRating(map: Map[String, Int]): String = map
125 | .map { case (user, events) => s"$user: $events ⭐️" }
126 | .toList
127 | .sorted
128 | .mkString(", ")
129 | }
130 |
--------------------------------------------------------------------------------
/src/main/scala/fr/ps/eng/kafka/app4s/client/ProducingApp.scala:
--------------------------------------------------------------------------------
1 | package fr.ps.eng.kafka.app4s.client
2 |
3 | import cats.syntax.either._
4 |
5 | import java.time.Instant
6 | import java.util.TimerTask
7 | import java.util.UUID.randomUUID
8 | import java.util.concurrent.Future
9 |
10 | import fr.ps.eng.kafka.app4s.client.Conf.ProducerAppConfig
11 | import fr.ps.eng.kafka.app4s.common._
12 | import org.apache.kafka.clients.producer.{Callback, KafkaProducer, ProducerRecord, RecordMetadata}
13 | import org.apache.kafka.common.header.Header
14 | import org.apache.kafka.common.header.internals.{RecordHeader, RecordHeaders}
15 | import org.apache.kafka.common.serialization.Serializer
16 | import org.slf4j.{Logger, LoggerFactory}
17 | import pureconfig.ConfigSource
18 | import pureconfig.generic.auto._
19 |
20 | import scala.concurrent.duration._
21 | import scala.jdk.CollectionConverters._
22 | import scala.util.{Random, Success, Try}
23 |
24 | /**
25 | * Created by loicmdivad.
26 | */
27 | object ProducingApp extends App with HelperFunctions with HelperSerdes {
28 |
29 | lazy val logger: Logger = LoggerFactory.getLogger(getClass)
30 |
31 | val configFile = sys.props.get("config.file").getOrElse(getClass.getResource("/producer.conf").getPath)
32 |
33 | ConfigSource.file(configFile).load[ProducerAppConfig].map { config =>
34 | // (0) configure serializers and producers
35 | val keySerializer: Serializer[Key] = reflectionAvroSerializer4S[Key]
36 | val tvShowSerializer: Serializer[TvShow] = reflectionAvroSerializer4S[TvShow]
37 | val ratingSerializer: Serializer[Rating] = reflectionAvroSerializer4S[Rating]
38 |
39 | keySerializer.configure(config.serializerConfig.toMap.asJava, true)
40 | tvShowSerializer :: ratingSerializer :: Nil foreach (_.configure(config.serializerConfig.toMap.asJava, false))
41 |
42 | val baseConfig: Map[String, AnyRef] = config.producerConfig.toMap
43 |
44 | val producerConfig1 = baseConfig ++ Map("client.id" -> "client1", "linger.ms" -> s"${(1 minute) toMillis}") asJava
45 | val producer1 = new KafkaProducer[Key, TvShow](producerConfig1, keySerializer, tvShowSerializer)
46 |
47 | val producerConfig2 = baseConfig ++ Map("client.id" -> "client2", "retries" -> "0") asJava
48 | val producer2 = new KafkaProducer[Key, Rating](producerConfig2, keySerializer, ratingSerializer)
49 |
50 | val producerConfig3 = baseConfig ++ Map("client.id" -> "client3", "transactional.id" -> "client3") asJava
51 | val producer3 = new KafkaProducer[Key, Rating](producerConfig3, keySerializer, ratingSerializer)
52 |
53 | // (1) batching the complete tv show collection
54 | logger info "Batching the tv show referential dataset now ..."
55 | val _: Vector[Future[RecordMetadata]] = Dataset.AllTvShows.toVector.map { case (showKey, showValue) =>
56 | val record = new ProducerRecord[Key, TvShow](config.tvShowTopicName, showKey, showValue)
57 | producer1 send record
58 | }
59 |
60 | Try {
61 | producer1.flush()
62 | producer1.close()
63 | logger info "Successfully produce the complete TV show collection."
64 | logger info s"${Dataset.AllTvShows size} TV shows from the " +
65 | "Netflix, Hulu and Disney+'s catalogs are available to consumers."
66 | }.recover {
67 | case error: InterruptedException => logger.error("failed to flush and close the producer", error)
68 | case error => logger.error("An unexpected error occurs while producing the show collection", error)
69 | }
70 |
71 | // (2) captiously send new records
72 | val generator = new java.util.Timer()
73 | val tasks: Seq[TimerTask] = (0 until config.generatorParallelismLevel) map { genId =>
74 | val userId = s"user-${randomUUID().toString.take(8)}"
75 | new java.util.TimerTask {
76 | override def run(): Unit = {
77 | val rating: Short = Random.nextInt(5).toShort
78 | val eventTime: Long = Instant.now.toEpochMilli
79 | val (showKey, showValue) = getRandomTvShow(Dataset.AllTvShows)
80 | val genHeader = new RecordHeader("generator-id", s"GENERATOR-$genId" getBytes)
81 | val showHeader =
82 | new RecordHeader("details", s"Rating for ${showValue name} (on ${showValue platform})" getBytes)
83 |
84 | val randomRecord: ProducerRecord[Key, Rating] = new ProducerRecord[Key, Rating](
85 | config.ratingTopicName,
86 | null, // let the defaultPartitioner do its job
87 | eventTime,
88 | showKey,
89 | Rating(userId, rating),
90 | new RecordHeaders(Iterable[Header](genHeader, showHeader) asJava)
91 | )
92 |
93 | producer2 send randomRecord
94 | }
95 | }
96 | }
97 |
98 | logger info s"Starting the rating generator with ${tasks.length} threads."
99 | tasks foreach (generator.schedule(_, randomDelay(2 second), randomPeriod(config.generatorPeriod)))
100 |
101 | // (3) open, perform and close a transaction
102 | val producerCallback: Callback = new Callback {
103 | override def onCompletion(metadata: RecordMetadata, exception: Exception): Unit = Option(exception)
104 |
105 | .map(error => logger.error("fail to produce a record due to: ", error))
106 |
107 | .getOrElse(logger info s"Successfully produce a new record to kafka: ${
108 | s"topic: ${metadata.topic()}, partition: ${metadata.partition()}, offset: ${metadata.offset()}"
109 | }")
110 | }
111 |
112 | producer3.initTransactions()
113 | var exitFlag: Either[String, Try[_]] = Right(Success())
114 |
115 | sys.addShutdownHook {
116 | logger info "Stopping all the generator threads ..."
117 | generator.cancel()
118 | Try {
119 | producer2 :: producer3 :: Nil foreach { producer =>
120 | producer.flush()
121 | producer.close()
122 | }
123 | }
124 | logger info "Closing the producer app now!"
125 | }
126 |
127 | while (!exitFlag.swap.contains("exit")) {
128 | logger info s"Asking for 3 ratings to perform a transaction:"
129 |
130 | exitFlag = for {
131 | record1 <- getConsoleRating(config)
132 | record2 <- getConsoleRating(config)
133 | record3 <- getConsoleRating(config)
134 | } yield {
135 | Try {
136 | producer3.beginTransaction()
137 | producer3.send(record1, producerCallback)
138 | producer3.send(record2, producerCallback)
139 | producer3.send(record3, producerCallback)
140 | producer3.commitTransaction()
141 | logger info "A transaction of 3 records has been completed."
142 | }.recover {
143 | case _ =>
144 | logger error "A failure occurs during the transaction."
145 | producer3.abortTransaction();
146 | }
147 | }
148 |
149 | exitFlag
150 | .left
151 | .filterToOption(_ != "exit")
152 | .foreach { input =>
153 | logger warn s"$input was unexpected: type [0-5] or 'pass' or 'exit'."
154 | }
155 |
156 | }
157 |
158 | }.recover {
159 | case failures =>
160 |
161 | logger error "Failed to parse the configuration of the producer application."
162 |
163 | failures.toList.foreach(err => logger.error(err.description))
164 |
165 | sys.exit(-1)
166 | }
167 |
168 | def randomDelay(max: FiniteDuration): Long = Math.abs(Random.nextLong(max toMillis))
169 |
170 | def randomPeriod(max: FiniteDuration): Long = Math.abs(max toMillis)
171 |
172 | def getRandomTvShow(tvShowMap: Map[Key, TvShow]): (Key, TvShow) =
173 | tvShowMap.toList(Random.nextInt(tvShowMap.size))
174 |
175 | def getConsoleRating(config: ProducerAppConfig): Either[String, ProducerRecord[Key, Rating]] = {
176 | def wrap(key: Key, short: Short) =
177 | new ProducerRecord(config.ratingTopicName, key, Rating("console", short))
178 |
179 | var input = "pass"
180 | var showId: Key = null
181 | var show: TvShow = null
182 | while (input equals "pass") {
183 | val pair = getRandomTvShow(Dataset.`200TvShows`)
184 | showId = pair._1
185 | show = pair._2
186 | print(s"""Have you watched "${show name}" on ${show platform}? how was it (from 0 to 5)?\n> """)
187 | input = scala.io.StdIn.readLine()
188 | }
189 |
190 | Try(input.toShort)
191 | .filter(_ <= 5)
192 | .map(wrap(showId, _))
193 | .toEither
194 | .left
195 | .map(_ => input)
196 | }
197 | }
198 |
--------------------------------------------------------------------------------
/src/main/scala/fr/ps/eng/kafka/app4s/common/Dataset.scala:
--------------------------------------------------------------------------------
1 | package fr.ps.eng.kafka.app4s.common
2 |
3 | import kantan.csv._
4 | import kantan.csv.ops._
5 | /**
6 | * Created by loicmdivad.
7 | */
8 | object Dataset {
9 |
10 | val AllTvShows: Map[Key, TvShow] = load("/dataset.csv")
11 |
12 | val `200TvShows`: Map[Key, TvShow] = load("/extract.csv")
13 |
14 | private def load(filePath: String) = getClass.getResource(filePath).toURI
15 | .asCsvReader[TvShow](rfc.withoutHeader)
16 | .filter(_.isRight)
17 | .collect(_.toTry.get)
18 | .map(show => (Key(sha1(show.name)), show))
19 | .toVector.toMap
20 | }
21 |
--------------------------------------------------------------------------------
/src/main/scala/fr/ps/eng/kafka/app4s/common/HelperFunctions.scala:
--------------------------------------------------------------------------------
1 | package fr.ps.eng.kafka.app4s.common
2 |
3 | import java.util.Properties
4 |
5 | import com.typesafe.config.Config
6 |
7 | import scala.jdk.CollectionConverters._
8 |
9 | /**
10 | * Created by loicmdivad.
11 | */
12 | trait HelperFunctions {
13 |
14 | implicit class configMapperOps(config: Config) {
15 |
16 | def toMap: Map[String, AnyRef] = config
17 | .entrySet()
18 | .asScala
19 | .map(pair => (pair.getKey, config.getAnyRef(pair.getKey)))
20 | .toMap
21 |
22 | def toProperties: Properties = {
23 | val properties = new Properties()
24 | properties.putAll(config.toMap.asJava)
25 | properties
26 | }
27 | }
28 |
29 | }
30 |
--------------------------------------------------------------------------------
/src/main/scala/fr/ps/eng/kafka/app4s/common/HelperSerdes.scala:
--------------------------------------------------------------------------------
1 | package fr.ps.eng.kafka.app4s.common
2 |
3 | import com.sksamuel.avro4s.RecordFormat
4 | import io.confluent.kafka.serializers.KafkaAvroSerializer
5 | import io.confluent.kafka.serializers.KafkaAvroDeserializer
6 | import org.apache.avro.generic.IndexedRecord
7 | import org.apache.kafka.common.serialization.{Deserializer, Serializer}
8 | /**
9 | * Created by loicmdivad.
10 | */
11 | trait HelperSerdes {
12 |
13 | implicit lazy val keyFormat: RecordFormat[Key] = RecordFormat[Key]
14 | implicit lazy val tvShowFormat: RecordFormat[TvShow] = RecordFormat[TvShow]
15 | implicit lazy val ratingFormat: RecordFormat[Rating] = RecordFormat[Rating]
16 |
17 | def reflectionAvroSerializer4S[T: RecordFormat]: Serializer[T] = new Serializer[T] {
18 | val inner = new KafkaAvroSerializer()
19 |
20 | override def configure(configs: java.util.Map[String, _], isKey: Boolean): Unit =
21 | inner.configure(configs, isKey)
22 |
23 | override def serialize(topic: String, maybeData: T): Array[Byte] = Option(maybeData)
24 | .map(data => inner.serialize(topic, implicitly[RecordFormat[T]].to(data)))
25 | .getOrElse(Array.emptyByteArray)
26 |
27 | override def close(): Unit = inner.close()
28 | }
29 |
30 | def reflectionAvroDeserializer4S[T: RecordFormat]: Deserializer[T] = new Deserializer[T] {
31 | val inner = new KafkaAvroDeserializer()
32 |
33 | override def configure(configs: java.util.Map[String, _], isKey: Boolean): Unit =
34 | inner.configure(configs, isKey)
35 |
36 | override def deserialize(topic: String, maybeData: Array[Byte]): T = Option(maybeData)
37 | .filter(_.nonEmpty)
38 | .map { data =>
39 | implicitly[RecordFormat[T]]
40 | .from(inner.deserialize(topic, data).asInstanceOf[IndexedRecord])
41 | }
42 | .getOrElse(null.asInstanceOf[T])
43 |
44 | override def close(): Unit = inner.close()
45 | }
46 | }
47 |
--------------------------------------------------------------------------------
/src/main/scala/fr/ps/eng/kafka/app4s/common/Key.scala:
--------------------------------------------------------------------------------
1 | package fr.ps.eng.kafka.app4s.common
2 |
3 | import com.sksamuel.avro4s.AvroName
4 |
5 | /**
6 | * Created by loicmdivad.
7 | */
8 | case class Key(@AvroName("show_id") showId: String)
9 |
--------------------------------------------------------------------------------
/src/main/scala/fr/ps/eng/kafka/app4s/common/Platform.scala:
--------------------------------------------------------------------------------
1 | package fr.ps.eng.kafka.app4s.common
2 |
3 | import enumeratum._
4 |
5 | /**
6 | * Created by loicmdivad.
7 | */
8 | sealed trait Platform extends EnumEntry
9 |
10 | object Platform extends Enum[Platform] {
11 |
12 | case object Netflix extends Platform
13 | case object Hulu extends Platform
14 | case object PrimeVideo extends Platform
15 | case object DisneyPlus extends Platform
16 |
17 | override def values: IndexedSeq[Platform] = Vector(Netflix, Hulu, PrimeVideo, DisneyPlus)
18 | }
19 |
--------------------------------------------------------------------------------
/src/main/scala/fr/ps/eng/kafka/app4s/common/Rating.scala:
--------------------------------------------------------------------------------
1 | package fr.ps.eng.kafka.app4s.common
2 |
3 | /**
4 | * Created by loicmdivad.
5 | */
6 | case class Rating(user: String, value: Short)
7 |
--------------------------------------------------------------------------------
/src/main/scala/fr/ps/eng/kafka/app4s/common/TvShow.scala:
--------------------------------------------------------------------------------
1 | package fr.ps.eng.kafka.app4s.common
2 |
3 | import kantan.csv._
4 | import kantan.csv.enumeratum._
5 |
6 | /**
7 | * Created by loicmdivad.
8 | */
9 | case class TvShow(platform: Platform, name: String, releaseYear: Int, imdb: Option[Double])
10 |
11 | object TvShow {
12 |
13 | implicit val tvShowRowDecoder: RowDecoder[TvShow] = RowDecoder.ordered {
14 | (platform: Platform, name: String, releaseYear: Int, imdb: Option[Double]) =>
15 | TvShow(platform, name, releaseYear, imdb)
16 | }
17 | }
--------------------------------------------------------------------------------
/src/main/scala/fr/ps/eng/kafka/app4s/common/package.scala:
--------------------------------------------------------------------------------
1 | package fr.ps.eng.kafka.app4s
2 |
3 | import java.nio.charset.StandardCharsets
4 | import java.security.MessageDigest
5 | import java.util.Base64
6 |
7 | /**
8 | * Created by loicmdivad.
9 | */
10 | package object common {
11 |
12 | val Base64Encoder: Base64.Encoder = java.util.Base64.getEncoder
13 | val SHA1Digest: MessageDigest = java.security.MessageDigest.getInstance("SHA-1")
14 |
15 | def sha1(name: String): String =
16 | new String(Base64Encoder.encode(SHA1Digest.digest(name.getBytes)), StandardCharsets.UTF_8)
17 | }
18 |
--------------------------------------------------------------------------------
/src/test/resources/application.conf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DivLoic/kafka-application4s/c0b72a03f0ad085e3e811d825706005a673838d2/src/test/resources/application.conf
--------------------------------------------------------------------------------
/src/test/resources/logback-test.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | %-6relative %-5level %logger {35} - %msg \n
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
--------------------------------------------------------------------------------
/src/test/scala/fr/ps/eng/kafka/app4s/DemoTestProvider.scala:
--------------------------------------------------------------------------------
1 | package fr.ps.eng.kafka.app4s
2 |
3 | import java.time.{LocalDate, ZoneId}
4 |
5 | import org.scalacheck.{Arbitrary, Gen}
6 | import org.scalacheck.Arbitrary.arbDate
7 | import org.scalacheck.rng.Seed
8 |
9 | trait DemoTestProvider {
10 | implicit val arbLocalDate = Arbitrary
11 | .apply(arbDate.arbitrary.map(_.toInstant.atZone(ZoneId.systemDefault()).toLocalDate()))
12 | def parameter: Gen.Parameters = Gen.Parameters.default
13 | def sampleSize: Int = 50
14 | def retries: Int = 150
15 | def seed: Seed = Seed.random()
16 | }
17 |
--------------------------------------------------------------------------------
/src/test/scala/fr/ps/eng/kafka/app4s/client/ConsumingAppSpec.scala:
--------------------------------------------------------------------------------
1 | package fr.ps.eng.kafka.app4s.client
2 |
3 | /**
4 | * Created by loicmdivad.
5 | */
6 | class ConsumingAppSpec {
7 |
8 | }
9 |
--------------------------------------------------------------------------------
/src/test/scala/fr/ps/eng/kafka/app4s/client/ProducingAppSpec.scala:
--------------------------------------------------------------------------------
1 | package fr.ps.eng.kafka.app4s.client
2 |
3 | /**
4 | * Created by loicmdivad.
5 | */
6 | class ProducingAppSpec {
7 |
8 | }
9 |
--------------------------------------------------------------------------------
/src/test/scala/fr/ps/eng/kafka/app4s/common/DatasetSpec.scala:
--------------------------------------------------------------------------------
1 | package fr.ps.eng.kafka.app4s.common
2 |
3 | import org.scalatest.flatspec.AnyFlatSpec
4 |
5 | class DatasetSpec extends AnyFlatSpec {
6 |
7 | }
8 |
--------------------------------------------------------------------------------
/src/test/scala/fr/ps/eng/kafka/app4s/common/HelperFunctionsSpec.scala:
--------------------------------------------------------------------------------
1 | package fr.ps.eng.kafka.app4s.common
2 |
3 | import com.typesafe.config.{Config, ConfigFactory}
4 | import fr.ps.eng.kafka.app4s.DemoTestProvider
5 | import org.scalatest.GivenWhenThen
6 | import org.scalatest.flatspec.AnyFlatSpec
7 | import org.scalatest.matchers.should.Matchers
8 |
9 | class HelperFunctionsSpec extends AnyFlatSpec
10 | with Matchers
11 | with GivenWhenThen
12 | with DemoTestProvider
13 | with HelperSerdes
14 | with HelperFunctions {
15 |
16 | val configFile: String = s"""
17 | |config1 = true
18 | |config2 = 42
19 | |config3 = 24 hours
20 | |config4 = {
21 | | config41 = "bla"
22 | | config42 = "di"
23 | | config43 = "bla"
24 | |}
25 | |config5 = ["foo", "bar", "mut", "mut"]
26 | |config6 = {
27 | | config7 {
28 | | config71 = localhost
29 | | config71 = $${?FAKE_VARIABLE}
30 | | config72 = 80
31 | | }
32 | |}
33 | |""".stripMargin
34 |
35 |
36 | val config: Config = ConfigFactory.parseString(configFile).resolve()
37 |
38 | "configMapperOps#toMap" should "convert a config into a map" in {
39 | Given("a typesafe config object")
40 | When("when the typesafe configuration is converted to map")
41 | val result = config.toMap
42 |
43 | Then("the configuration can b")
44 | result.get("config1") should contain(true)
45 | result.get("config2") should contain(42)
46 | result.get("config3") should contain("24 hours")
47 | result.get("config4") shouldBe empty
48 | result.get("config4.config41") should contain("bla")
49 | result.get("config4.config42") should contain("di")
50 | result.get("config4.config43") should contain("bla")
51 | result.get("config5") should contain
52 | theSameElementsInOrderAs(Array("foo", "bar", "mut", "mut"))
53 | result.get("config6") shouldBe empty
54 | result.get("config6.config7") shouldBe empty
55 | result.get("config6.config7") shouldBe empty
56 | result.get("config6.config7.config71") should contain("localhost")
57 | result.get("config6.config7.config72") should contain(80)
58 | }
59 |
60 |
61 | "configMapperOps#toProperties" should "convert a config into a property" in {
62 | Given("a typesafe config object")
63 | When("when the typesafe configuration is converted to property")
64 | val result = config.toProperties
65 |
66 | Then("the configuration can ")
67 | result.get("config1") shouldBe true
68 | result.get("config2") shouldBe 42
69 | result.get("config3") shouldBe "24 hours"
70 | result.get("config4") should be (null)
71 | result.get("config4.config41") shouldBe "bla"
72 | result.get("config4.config42") shouldBe "di"
73 | result.get("config4.config43") shouldBe "bla"
74 | result.get("config5") should contain
75 | theSameElementsInOrderAs(Array("foo", "bar", "mut", "mut"))
76 | result.get("config6") should be (null)
77 | result.get("config6.config7") should be (null)
78 | result.get("config6.config7") should be (null)
79 | result.get("config6.config7.config71") shouldBe "localhost"
80 | result.get("config6.config7.config72") shouldBe 80
81 | }
82 | }
83 |
--------------------------------------------------------------------------------
/src/test/scala/fr/ps/eng/kafka/app4s/common/HelperSerdesSpec.scala:
--------------------------------------------------------------------------------
1 | package fr.ps.eng.kafka.app4s.common
2 |
3 | import java.time.LocalDate
4 |
5 | import com.sksamuel.avro4s.RecordFormat
6 | import fr.ps.eng.kafka.app4s.DemoTestProvider
7 | import org.apache.kafka.common.header.Header
8 | import org.apache.kafka.common.header.internals.RecordHeaders
9 | import org.apache.kafka.common.serialization.{Deserializer, Serializer}
10 | import org.scalacheck.Gen
11 | import org.scalatest.GivenWhenThen
12 | import org.scalatest.flatspec.AnyFlatSpec
13 | import org.scalatest.matchers.should.Matchers
14 |
15 | import scala.jdk.CollectionConverters._
16 | import scala.util.{Success, Try}
17 |
18 | class HelperSerdesSpec extends AnyFlatSpec
19 | with Matchers
20 | with GivenWhenThen
21 | with DemoTestProvider
22 | with HelperSerdes {
23 |
24 | val testTopic = "TEST-TOPIC"
25 | val testHeaders = new RecordHeaders(Array.empty[Header])
26 | val testSerdeConf = Map("schema.registry.url" -> "mock://notused/")
27 |
28 | "reflectionAvroSerializer4S" should "create a working serializer" in {
29 | Given("a scala case class")
30 | case class TestClassFoo(a: String, b: Int, c: Boolean)
31 |
32 | And("its corresponding avro4s formatter")
33 | implicit val formatter: RecordFormat[TestClassFoo] = RecordFormat[TestClassFoo]
34 |
35 | And("arbitrary instances of the given case class")
36 | val instances: List[TestClassFoo] = Gen
37 | .listOfN(sampleSize, Gen.resultOf(TestClassFoo))
38 | .pureApply(parameter, seed, retries)
39 |
40 | When("the instances are serialized")
41 | val serializer: Serializer[TestClassFoo] = reflectionAvroSerializer4S[TestClassFoo]
42 | serializer.configure(testSerdeConf.asJava, false)
43 |
44 | val result = instances.map(serializer.serialize(testTopic, testHeaders, _))
45 |
46 | Then("byte arrays are successfully generated")
47 | result.foreach(datum => datum should not be empty)
48 | }
49 |
50 | it should "handle null values" in {
51 | Given("a scala case class")
52 | case class TestClassBaz(a: String, b: Double, c: LocalDate)
53 |
54 | And("its corresponding avro4s formatter")
55 | implicit val formatter: RecordFormat[TestClassBaz] = RecordFormat[TestClassBaz]
56 |
57 | And("arbitrary instances of the given case class")
58 | val serializer: Serializer[TestClassBaz] = reflectionAvroSerializer4S[TestClassBaz]
59 | serializer.configure(testSerdeConf.asJava, false)
60 |
61 | When("a null value instance is serialized")
62 | val nullRecord: TestClassBaz = null
63 | val result: Array[Byte] = serializer.serialize(testTopic, testHeaders, nullRecord)
64 |
65 | Then("an empty byte array is returned")
66 | result shouldBe empty
67 | }
68 |
69 | "reflectionAvroDeserializer4S" should "create a working deserializer" in {
70 | Given("a scala case class")
71 | case class TestClassBar(a: String, b: Double, c: LocalDate)
72 |
73 | And("its corresponding avro4s formatter")
74 | implicit val formatter: RecordFormat[TestClassBar] = RecordFormat[TestClassBar]
75 | val serializer: Serializer[TestClassBar] = reflectionAvroSerializer4S[TestClassBar]
76 | serializer.configure(testSerdeConf.asJava, false)
77 |
78 | And("a collection of instances serialized")
79 | val instances = Gen
80 | .listOfN(sampleSize, Gen.resultOf(TestClassBar))
81 | .pureApply(parameter, seed, retries)
82 | .map(serializer.serialize(testTopic, testHeaders, _))
83 |
84 | When("the collection elements are deserialized")
85 | val deserializer: Deserializer[TestClassBar] = reflectionAvroDeserializer4S[TestClassBar]
86 | deserializer.configure(testSerdeConf.asJava, false)
87 |
88 | val result: List[Try[TestClassBar]] = instances
89 | .map(instance => Try(deserializer.deserialize(testTopic, instance)))
90 |
91 | Then("scala instances are successfully generated")
92 | result.foreach(datum => datum shouldBe a[Success[_]])
93 | result.map(_.get.c).foreach(datum => datum.isBefore(LocalDate.MAX))
94 | }
95 |
96 | it should "handle empty arrays" in {
97 | Given("a scala case class")
98 | case class TestClassBaz(a: String, b: Double, c: LocalDate)
99 |
100 | And("its corresponding avro4s formatter")
101 | implicit val formatter: RecordFormat[TestClassBaz] = RecordFormat[TestClassBaz]
102 |
103 | And("its corresponding deserializer")
104 | val deserializer: Deserializer[TestClassBaz] = reflectionAvroDeserializer4S[TestClassBaz]
105 | deserializer.configure(testSerdeConf.asJava, false)
106 |
107 | When("an empty byte array is deserialized")
108 | val result: TestClassBaz = deserializer.deserialize(testTopic, Array.empty[Byte])
109 |
110 | Then("an null value is returned")
111 | result should be (null)
112 | }
113 | }
114 |
--------------------------------------------------------------------------------