├── .gitignore ├── LICENSE ├── README.adoc ├── build.gradle ├── data-generator ├── .gitignore ├── build.gradle └── src │ └── main │ ├── java │ └── io │ │ └── confluent │ │ └── developer │ │ └── ccloud │ │ └── demo │ │ └── kstream │ │ ├── DataGeneratorApplication.java │ │ ├── data │ │ └── domain │ │ │ ├── account │ │ │ ├── Account.java │ │ │ ├── AccountRepository.java │ │ │ ├── AccountService.java │ │ │ └── AccountServiceImpl.java │ │ │ └── transaction │ │ │ ├── Transaction.java │ │ │ ├── TransactionController.java │ │ │ ├── TransactionRequest.java │ │ │ ├── TransactionService.java │ │ │ └── TransactionServiceImpl.java │ │ └── topic │ │ ├── AccountTopicConfig.java │ │ ├── TopicConfig.java │ │ └── TransactionRequestTopicConfig.java │ └── resources │ ├── application-ccloud.properties │ ├── application-postgresql.properties │ ├── application.properties │ ├── db │ └── migration │ │ └── postgresql │ │ └── V0_1_0__Account.sql │ ├── lombol.config │ └── topics-defaults.properties ├── docker-compose └── local │ └── docker-compose.yml ├── gradle └── wrapper │ ├── gradle-wrapper.jar │ └── gradle-wrapper.properties ├── gradlew ├── gradlew.bat ├── images ├── ccloud-stack-resources.png ├── cloud-ui-messages.jpg ├── data-generator.png ├── jdbc-source-connector.png ├── transaction-failed.png ├── transaction-service.png ├── transaction-statement-overview.png ├── transaction-statements.png └── transaction-success.png ├── kstreams-demo-kotlin ├── .gitignore ├── build.gradle.kts ├── gradle │ └── wrapper │ │ ├── gradle-wrapper.jar │ │ └── gradle-wrapper.properties ├── gradlew ├── gradlew.bat ├── settings.gradle.kts └── src │ ├── main │ ├── kotlin │ │ └── io │ │ │ └── confluent │ │ │ └── developer │ │ │ └── ccloud │ │ │ └── demo │ │ │ └── kstreams │ │ │ ├── DemoApplication.kt │ │ │ ├── KStreamsConfig.kt │ │ │ ├── TransactionTransformer.kt │ │ │ ├── Util.kt │ │ │ ├── domain │ │ │ ├── Funds.kt │ │ │ ├── Transaction.kt │ │ │ └── TransactionResult.kt │ │ │ └── topic │ │ │ ├── FundsStoreConfig.kt │ │ │ ├── TopicConfig.kt │ │ │ ├── TransactionFailedTopicConfig.kt │ │ │ ├── TransactionRequestTopicConfig.kt │ │ │ └── TransactionSuccessTopicConfig.kt │ └── resources │ │ ├── application-ccloud.properties │ │ ├── application.properties │ │ └── topics-defaults.properties │ └── test │ ├── kotlin │ └── io │ │ └── confluent │ │ └── developer │ │ └── ccloud │ │ └── demo │ │ └── kstreams │ │ ├── DemoApplicationTests.kt │ │ ├── KStreamConfigTest.kt │ │ └── TransactionTransformerTest.kt │ └── resources │ ├── application-default.properties │ └── logback-test.xml ├── kstreams-demo ├── .gitignore ├── build.gradle └── src │ ├── main │ ├── java │ │ └── io │ │ │ └── confluent │ │ │ └── developer │ │ │ └── ccloud │ │ │ └── demo │ │ │ └── kstream │ │ │ ├── KStreamConfig.java │ │ │ ├── KStreamDemoApplication.java │ │ │ ├── TransactionTransformer.java │ │ │ ├── domain │ │ │ ├── Funds.java │ │ │ ├── Transaction.java │ │ │ └── TransactionResult.java │ │ │ └── topic │ │ │ ├── FundsStoreConfig.java │ │ │ ├── TopicConfig.java │ │ │ ├── TransactionFailedTopicConfig.java │ │ │ ├── TransactionRequestTopicConfig.java │ │ │ └── TransactionSuccessTopicConfig.java │ └── resources │ │ ├── application-ccloud.properties │ │ ├── application.properties │ │ ├── lombol.config │ │ └── topics-defaults.properties │ └── test │ ├── java │ └── io │ │ └── confluent │ │ └── developer │ │ └── ccloud │ │ └── demo │ │ └── kstream │ │ ├── KStreamConfigTest.java │ │ └── TransactionTransformerTest.java │ └── resources │ ├── application-default.properties │ └── logback-test.xml ├── scripts ├── ccloud │ ├── ccloud-generate-cp-config.sh │ ├── ccloud_library.sh │ ├── ccloud_stack_create.sh │ ├── ccloud_stack_destroy.sh │ ├── docker-compose.yaml │ └── start_connect.sh ├── common │ ├── colors.sh │ └── helper.sh ├── connect │ ├── connector-status.sh │ └── deploy-jdbc-connector.sh └── generate-transaction.sh └── settings.gradle /.gitignore: -------------------------------------------------------------------------------- 1 | .classpath 2 | .DS_Store 3 | .project 4 | .settings 5 | .vscode 6 | target 7 | tmp 8 | delta_configs 9 | stack-configs 10 | # Created by https://www.toptal.com/developers/gitignore/api/gradle 11 | # Edit at https://www.toptal.com/developers/gitignore?templates=gradle 12 | 13 | ### Gradle ### 14 | .gradle 15 | build/ 16 | 17 | # Ignore Gradle GUI config 18 | gradle-app.setting 19 | 20 | # Avoid ignoring Gradle wrapper jar file (.jar files are usually ignored) 21 | !gradle-wrapper.jar 22 | 23 | # Cache of project 24 | .gradletasknamecache 25 | 26 | # # Work around https://youtrack.jetbrains.com/issue/IDEA-116898 27 | # gradle/wrapper/gradle-wrapper.properties 28 | 29 | ### Gradle Patch ### 30 | **/build/ 31 | 32 | # End of https://www.toptal.com/developers/gitignore/api/gradle 33 | 34 | pom.xml -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Viktor Gamov 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.adoc: -------------------------------------------------------------------------------- 1 | = Workshop — Developing Event-driven Microservices with Spring Boot, Confluent Cloud, Kotlin, and Java. 2 | Viktor Gamov , © 2020 Confluent, Inc. 3 | 2020-08-25 4 | :revdate: 2020-08-25 19:22:48 -0600 5 | :linkattrs: 6 | :ast: * 7 | :y: ✓ 8 | :n: ✘ 9 | :y: icon:check-sign[role="green"] 10 | :n: icon:check-minus[role="red"] 11 | :c: icon:file-text-alt[role="blue"] 12 | :toc: auto 13 | :toc-placement: auto 14 | :toc-position: auto 15 | :toc-title: Table of contents 16 | :toclevels: 3 17 | :idprefix: 18 | :idseparator: - 19 | :sectanchors: 20 | :icons: font 21 | :source-highlighter: highlight.js 22 | :highlightjs-theme: idea 23 | :experimental: 24 | :imagesdir: ./images 25 | ifndef::awestruct[] 26 | :imagesdir: ../images 27 | :awestruct-draft: false 28 | :awestruct-layout: post 29 | :awestruct-tags: [] 30 | :idprefix: 31 | :idseparator: - 32 | endif::awestruct[] 33 | 34 | Developing Event-driven Microservices with Spring Boot, Confluent Cloud, Kotlin, and Java. 35 | 36 | toc::[] 37 | 38 | == Workshop prerequisites and setup 39 | 40 | === Prerequisites 41 | 42 | Ensure you install the following toolset on your computer: 43 | 44 | * https://confluent.cloud[Confluent Cloud] 45 | + 46 | 47 | TIP: You should have your login and password information handy after you sign up for Confluent Cloud. 48 | The `ccloud` init script will ask you for your login information. 49 | 50 | * https://docs.confluent.io/current/cloud/cli/install.html[Confluent Cloud CLI] 51 | * Docker 52 | ** https://docs.docker.com/docker-for-mac/install/[install Docker Desktop for MacOS] 53 | ** Docker Compose (installed with Docker Desktop) 54 | * Git 55 | * https://jdk.dev[Java 11 (or later)] 56 | * Your favorite Java IDE or even text editor 57 | ** Personally, I recommend https://www.jetbrains.com/idea/[IntelliJ IDEA]. 58 | 59 | 60 | === Setup 61 | 62 | Before you proceed, be sure to complete the following steps: 63 | 64 | .Getting code 65 | [source,shell script] 66 | ---- 67 | git clone https://github.com/gAmUssA/springone2020-workshop #<1> 68 | cd springone2020-workshop #<2> 69 | ---- 70 | <1> Clone the repository 71 | <2> Change directory of the workshop folder 72 | 73 | == 0️⃣ Provisioning Confluent Cloud cluster 74 | 75 | [source,shell script] 76 | ---- 77 | $ cd scripts/ccloud 78 | $ ccloud login --save #<1> 79 | $ ./ccloud_stack_create.sh #<2> 80 | 81 | This demo uses real, Confluent Cloud resources. 82 | To avoid unexpected charges, carefully evaluate the cost of resources before launching the script and ensure all resources are destroyed after you are done running it. 83 | Do you still want to run this script? [y/n] y 84 | Do you also want to create a Confluent Cloud ksqlDB app (hourly charges may apply)? [y/n] y 85 | 86 | Creating Confluent Cloud stack for service account demo-app-3067, ID: 103469. 87 | Set Kafka cluster "lkc-oz98y" as the active cluster for environment "env-nx57d". 88 | 89 | Waiting up to 720 seconds for Confluent Cloud cluster to be ready and for credentials to propagate 90 | ..... 91 | Sleeping an additional 80 seconds to ensure propagation of all metadata 92 | ---- 93 | <1> Login to your Confluent Cloud account. 94 | <2> The CCloud Stack script will ask you to login to your CCloud account. 95 | 96 | It will automatically provision Kafka and ksqlDB cluster. 97 | 98 | Among other things, this script generates a config that we need to pass to the `docker-compose` start command to connector container connect to the cloud Kafka cluster. 99 | 100 | When ready, move to the next section, where you will generate some referential data. 101 | 102 | == 1️⃣ Loading referential data with Kafka Connect 103 | 104 | To leverage the full power of stream processing, it is best to preload the required data in topics. 105 | Kafka Streams and ksqlDB will allow you to join and lookup data from your events with any other topic. 106 | 107 | This section of the workshop will set up a https://www.confluent.io/hub/confluentinc/kafka-connect-jdbc[Kafka Connect JDBC Source connector] instance that will synchronize any data from a PostgreSQL instance to an `account` topic in Kafka. 108 | 109 | This exercise simulates a https://en.wikipedia.org/wiki/Change_data_capture[Change Data Capture pattern] where we bridge an existing data source to Kafka real-time. 110 | 111 | image::jdbc-source-connector.png[JDBC Source Connector] 112 | 113 | === Start JDBC connector in Docker 114 | 115 | [source,shell script] 116 | ---- 117 | ./start_connect.sh stack-configs/java-service-account-103523.config #<1> 118 | ---- 119 | <1> Replace with actual service account ID you did get during «Provisioning Confluent Cloud cluster» step. 120 | 121 | ==== Start the Data Generator application 122 | 123 | Within the workshop project, you will find a `data-generator` folder containing an application designed to generate some random accounts in our PostgreSQL `Account` DB. 124 | This utility application will generate about `1000` test accounts. 125 | The Data Generator also contains a REST endpoint to help us submit transaction requests to Kafka later during the workshop. 126 | 127 | image::data-generator.png[Data Generator] 128 | 129 | NOTE: Open a new terminal window in the workshop project folder. 130 | 131 | .The data generator can be launched by running the following commands: 132 | [source,shell script] 133 | ---- 134 | $ source ./scripts/cclou/delta_configs/env.delta 135 | $ ./gradlew :data-generator:build #<1> 136 | $ java -jar data-generator/build/libs/data-generator-0.0.1-SNAPSHOT.jar #<2> 137 | ---- 138 | <1> To build. 139 | <2> To run after build. 140 | 141 | NOTE: To run the Data Generator application in your IDE launch the main method from 142 | `src/main/java/io/confluent/developer/ccloud/demo/kstream/DataGeneratorApplication.java.` 143 | Make sure you have environment variables set according to the `delta_configs/env.delta` file. 144 | 145 | After the dataset generated, you should see the following output: 146 | 147 | ---- 148 | 2020-08-26 22:58:44.507 INFO 15959 --- [unt-Generator-1] Account Service : Generated account number 1000. 149 | ---- 150 | 151 | === Start the connector 152 | 153 | Open a new terminal window and run the following command from the root of the workshop project folder: 154 | 155 | [source, shell script] 156 | ---- 157 | ./scrips/connect/deploy-jdbc-connector.sh #<1> 158 | ---- 159 | <1> This command will start a connector instance. 160 | 161 | [NOTE] 162 | ==== 163 | To validate the status of the connector, you can run 164 | 165 | [source,shell script] 166 | ---- 167 | ./scripts/connect/connector-status.sh 168 | ---- 169 | ==== 170 | 171 | === Monitor the account data flowing in Kafka from Confluent Cloud user interface 172 | 173 | . Access Confluent Cloud user interface from https://confluent.cloud. 174 | . From the main screen, navigate to an environment that looks like`demo-env-.` 175 | . Inside of this environment, you should see a cluster that looks like`demo-kafka-cluster-.` 176 | On the left side, click on 'Topics.` 177 | . Click on the `account` topic and access the `messages` tab. 178 | . Click on the `offset` textbox and type `0` and press Enter the user interface to load all messages from partition `0` starting from `0`. 179 | 180 | With the connector running, you should see `account` events in the user interface. 181 | 182 | .Messages explorer in Confluent Cloud user interface 183 | image::cloud-ui-messages.jpg[c3-messages] 184 | 185 | In the next section, we will implement a highly scalable stream processing application using Kafka Streams. 186 | 187 | == 2️⃣ Implementing a Stream Processor with Kafka Streams 188 | 189 | Now is the time to get into the heart of the action. We will implement a Kafka Streams topology to process atomic transactions to any request submitted to the `transaction-request` topic. 190 | 191 | Within the *workshop* project folder, you will find a `kstreams-demo` subfolder representing a Kafka Streams application. 192 | Spring Boot and the `spring-kafka` project handled the boilerplate code required to connect to Kafka. 193 | This workshop will focus on writing a `Kafka Streams` topology with the function processing for our use case. 194 | 195 | [WARNING] 196 | . "Help me! I can't figure out what code to modify!" 197 | ==== 198 | If you are lost during the exercise, you can reset your codebase and switch to the `solution` to run the Stream Processor without coding the solution yourself. 199 | 200 | Be careful before running the next command as you will lose any uncommitted changes in your local git repository: 201 | 202 | ---- 203 | git reset --hard origin/master && git checkout solution 204 | ---- 205 | ==== 206 | 207 | === Atomic transaction processing with Kafka Streams 208 | 209 | Our business requirement states that we must check whether the funds are sufficient for every request received before updating the balance of the account being processed. 210 | We should never have two transactions being processed at the same time for the same account. 211 | This would create a race condition for which we have no guarantee we can enforce the balance check before withdrawing funds. 212 | 213 | _The Data Generator_ writes transaction requests to the Kafka topic with a key equal to the transaction's account number. 214 | Therefore, we can be sure all messages of an account will be processed by a single thread for our Transaction Service no matter how many instances are concurrently running. 215 | 216 | Kafka Streams won't commit any message offset until it completes our business logic of managing a transaction request. 217 | 218 | image::transaction-service.png[Transaction Service] 219 | 220 | ==== Implement the Transaction Transformer 221 | 222 | Because of our stream processor's transaction nature, we require a specific component from Kafka Streams named a `Transformer.` 223 | This utility allows us to process events one by one while interacting with a `State Store`–another 224 | component of Kafka Streams that help us to persist our account balance in a local instance of an embedded database - RocksDB. 225 | 226 | Open the `io.confluent.developer.ccloud.demo.kstream.TransactionTransformer` 227 | Java class and implement the `transform` function to return a `TransactionResult` based on the validity of the transaction request. 228 | The `TransactionResult` contains a `success` flag set to `true` if the funds were successfully updated. 229 | 230 | The `transform` method also updates the `store` State Store. 231 | The class already has utility functions to help you execute our business logic. 232 | 233 | [WARNING] 234 | ."Help me! I can't figure out what code to modify!" 235 | ==== 236 | If you are stuck on this exercise, you can switch to the `solution-transformer` branch: 237 | 238 | [source, shell script] 239 | ---- 240 | git reset --hard origin/master && git checkout solution-transformer #<1> 241 | ---- 242 | <1> All of your local code changes will be lost. 243 | 244 | ==== 245 | 246 | ==== Implement the Streaming Topology 247 | 248 | In Kafka Streams, a `Topology` is the definition of your data flow. 249 | It's a manifest for all operations and transformations to be applied to your data. 250 | 251 | To start a stream processor, Kafka Streams only requires you to build a`Topology` and hand it over. 252 | Kafka Streams will take care of managing the underlying consumers and producers. 253 | 254 | The `io.confluent.developer.ccloud.demo.kstream.KStreamConfig` Java class already contains all the boilerplate code required by Kafka Streams to start our processor. 255 | In this exercise, we will leverage a `StreamsBuilder` to define and instantiate a `Topology` that will handle our transaction processing. 256 | 257 | Open the `io.confluent.developer.ccloud.demo.kstream.KStreamConfig.defineStreams` method and get ready to write your first Kafka Streams Topology. 258 | 259 | ==== Create a KStream from the source topic. 260 | 261 | Use the `stream` method of `streamsBuilder` to turn a topic into a `KStream.` 262 | 263 | [source,java] 264 | ---- 265 | KStream transactionStream = streamsBuilder.stream("transaction-request"); 266 | ---- 267 | 268 | ==== Leverage the Transformer to process our requests 269 | 270 | To inform Kafka Streams that we want to update the `funds` State Store for all incoming requests atomically, we can leverage the `transformValues` operator to plugin our `TransactionTransformer.` 271 | This operator requires us to specify the `funds` State Store that the `Transformer` will use. 272 | This also instructs Kafka Streams to keep track of events from our `transaction-request` since they will result in a change of state for our store. 273 | 274 | [source,java] 275 | ---- 276 | KStream resultStream = transactionStream.transformValue(this::transactionTransformer, "funds"); 277 | ---- 278 | 279 | ==== Redirect the transaction result to the appropriate topic. 280 | 281 | With a new derived stream containing `TransactionResult,` we can now use the information contained in the payload to feed a success or failure topic. 282 | 283 | We will achieve this by deriving two streams from our `resultStream.` 284 | Each stream will be built by applying a `filter` and `filterNot` operator with a predicate on the `success` flag from our `TransactionResult` payload. 285 | With the two derived streams, we can explicitly call the `to` operator to instruct Kafka 286 | Streams to write the mutated events to their respective topics. 287 | 288 | [source,java] 289 | ---- 290 | resultStream 291 | .filter(this::success) 292 | .to("transaction-successs"); 293 | 294 | resultStream 295 | .filterNot(this::success) 296 | .to("transaction-failed"); 297 | ---- 298 | 299 | ==== The implemented `defineStreams` method 300 | 301 | Use this reference implementation to validate you have the right stream definition. 302 | 303 | [source,java] 304 | ---- 305 | private void defineStreams(StreamsBuilder streamsBuilder) { 306 | KStream transactionStream = 307 | streamsBuilder.stream("transaction-request"); 308 | 309 | 310 | KStream resultStream = transactionStream 311 | .transformValues( 312 | this::transactionTransformer, "funds" 313 | ); 314 | 315 | resultStream 316 | .filter(this::success) 317 | .to("transaction-successs"); 318 | 319 | resultStream 320 | .filterNot(this::success) 321 | .to("transaction-failed"); 322 | } 323 | ---- 324 | 325 | === Running the Kafka Streams application 326 | 327 | NOTE: If you are running the application from your Java IDE, launch the main method from `io.confluent.developer.ccloud.demo.kstream.KStreamDemoApplication`. 328 | 329 | If you want to run with the CLI, you must build the application before launching it. 330 | 331 | .To build the application, run the following command: 332 | ---- 333 | ./gradlew :kstreams-demo:build 334 | ---- 335 | 336 | .To run the application run the following command 337 | ---- 338 | java -jar kstreams-demo/build/libs/kstreams-demo-0.0.1-SNAPSHOT.jar 339 | ---- 340 | 341 | === Generate some transactions using the Data Generator endpoint 342 | 343 | Ensure your Data Generator application is still running from the previous section. 344 | 345 | The utility script `scripts/generate-transaction.sh` will let you generate transactions. 346 | Generate a few transactions using the following commands: 347 | 348 | ---- 349 | scripts/generate-transaction.sh 1 DEPOSIT 100 CAD 350 | scripts/generate-transaction.sh 1 DEPOSIT 200 CAD 351 | scripts/generate-transaction.sh 1 DEPOSIT 300 CAD 352 | scripts/generate-transaction.sh 1 WITHDRAW 300 CAD 353 | scripts/generate-transaction.sh 1 WITHDRAW 10000 CAD 354 | 355 | scripts/generate-transaction.sh 2 DEPOSIT 100 CAD 356 | scripts/generate-transaction.sh 2 DEPOSIT 50 CAD 357 | scripts/generate-transaction.sh 2 DEPOSIT 300 CAD 358 | scripts/generate-transaction.sh 2 WITHDRAW 300 CAD 359 | ---- 360 | 361 | The script will pass in the following arguments: 362 | 363 | * The account number 364 | * The amount 365 | * The type of operation (`DEPOSIT` or `WITHDRAW`) 366 | * The currency 367 | 368 | === Monitor the successful transaction results 369 | 370 | . Access Confluent Cloud user interface from https://confluent.cloud. 371 | . From the main screen, navigate to the environment that looks like `demo-env-.` 372 | . Inside of the environment, you should see a cluster that looks like `demo-kafka-cluster-.` 373 | On the left side, click on `Topics.` 374 | . Click on the `transaction-success` topic and access the `messages` tab. 375 | . Click on the `offset` textbox and type `0` and press enter to load all messages from partition 0 starting from offset 0. 376 | 377 | You should see `transaction-success` events in the user interface. If you 378 | don't see any messages, try your luck with partition 1 starting from offset 0. 379 | 380 | // TODO 381 | //image::transaction-success.png[transaction-success] 382 | 383 | === Monitor the failed transaction results from Control Center 384 | 385 | . Click on the `topic` tab from the cluster navigation menu. 386 | 387 | . Select the `transaction-failed` topic and access the `messages` tab. 388 | 389 | . Click on the `offset` textbox and type `0` and press enter to load all messages from partition 0 starting from offset 0. 390 | 391 | You should see `transaction-failed` events in the user interface. If you don't 392 | see any messages, try your lock with partition 1 starting from offset 0. 393 | 394 | // TODO 395 | // image::transaction-failed.png[transaction-failed] 396 | 397 | In the next section, we will explore how writing Stream Processor can be 398 | simplified with `ksqlDB.` 399 | 400 | == 3️⃣ Enrich transaction results with ksqlDB 401 | 402 | In the first section of this workshop, we configured a JDBC Source Connector to 403 | load all account details into an `account` topic. In the next exercise, we will 404 | write a second Stream Processor to generate a detailed transaction statement 405 | enriched with account details. 406 | 407 | // T.M. - The wording the following sentence can be better. Not sure I quite understand what is trying to be said in the first clause. 408 | Rather than within this new service as another Kafka Streams application, we 409 | will leverage ksqlDB to declare a stream processor that will enrich our 410 | transaction data in real-time with our referential data coming from the 411 | `account` topic. The objective of this section is to show how you can use an 412 | SQL-like query language to generate streams processors like Kafka Streams, 413 | without having to compile and run any custom software. 414 | 415 | image::transaction-statement-overview.png[Transaction Statements] 416 | 417 | [TIP] 418 | .Connect to ksqlDB with CLI 419 | ==== 420 | In this exercise, we're going to use ksqlDB Cloud UI. 421 | But you also can run CLI using docker. 422 | 423 | [source,shell script] 424 | ---- 425 | docker run -it confluentinc/ksqldb-cli:0.11.0 ksql -u $KSQL_API_KEY -p $KSQL_API_SECRET $KSQLDB_ENDPOINT 426 | ---- 427 | 428 | ==== 429 | 430 | === Create the account table 431 | 432 | // ksql config 433 | 434 | // ccloud ksql app list 435 | // ccloud kafka cluster list 436 | // ccloud ksql app configure-acls lksqlc-7k6dj account --cluster lkc-nro63 437 | 438 | ksqlDB is built on top of Kafka Streams. As such, the `KStream` and `KTable` are both key constructs for defining stream processors. 439 | 440 | The first step requires us to instruct ksqlDB that we wish to turn the `account` topic into a `Table`. 441 | This table will allow us to join each `transaction-success` event with the latest `account` event of the underlying 442 | topic. 443 | Run the following command in your ksqlDB CLI terminal: 444 | 445 | [source,sql] 446 | ---- 447 | CREATE TABLE ACCOUNT ( 448 | numkey string PRIMARY KEY, 449 | number INT, 450 | cityAddress STRING, 451 | countryAddress STRING, 452 | creationDate BIGINT, 453 | firstName STRING, 454 | lastName STRING, 455 | numberAddress STRING, 456 | streetAddress STRING, 457 | updateDate BIGINT 458 | ) WITH ( 459 | KAFKA_TOPIC = 'account', 460 | VALUE_FORMAT='JSON' 461 | ); 462 | ---- 463 | 464 | === Create the transaction-success stream 465 | 466 | Before we create the `Transaction Statement` stream processor, we must also inform ksqlDB that we wish to turn the `transaction-success` into a `Stream.` 467 | Run the following command in your ksqlDB CLI terminal: 468 | 469 | [source,sql] 470 | ---- 471 | CREATE STREAM TRANSACTION_SUCCESS ( 472 | numkey string KEY, 473 | transaction STRUCT, 474 | funds STRUCT, 475 | success boolean, 476 | errorType STRING 477 | ) WITH ( 478 | kafka_topic='transaction-success', 479 | value_format='json' 480 | ); 481 | ---- 482 | 483 | === Create the transaction statement stream 484 | 485 | Now that we have all the ingredients of our `Transaction Statement` stream processor, we can now create a new stream derived from our `transaction-success` events paired with the latest data from the `account` topic. 486 | We will instruct ksqlDB to create a new stream as a query. 487 | By default, ksqlDB will publish any output to a new `TRANSACTION_STATEMENT` topic. 488 | The select query provides the details about with events to subscribe and which table to join each 489 | notification. 490 | The output of this new stream processor will be a mix of the transaction details coupled with all the matching account details. 491 | The key from `transaction-success` and `account` will be used as matching criteria for the `LEFT JOIN` command. 492 | `EMIT CHANGES` informs ksqlDB that the query is long-running and should be kept alive–as if it were a Kafka Streams application to be 100% available to process all events. 493 | Run the following command in your ksqlDB CLI prompt: 494 | 495 | [source,sql] 496 | ---- 497 | CREATE STREAM TRANSACTION_STATEMENT AS 498 | SELECT * 499 | FROM TRANSACTION_SUCCESS 500 | LEFT JOIN ACCOUNT ON TRANSACTION_SUCCESS.numkey = ACCOUNT.numkey 501 | EMIT CHANGES; 502 | ---- 503 | 504 | === Monitor the Transaction Statements in Cloud user interface 505 | 506 | . Access Confluent Cloud user interface from https://confluent.cloud 507 | 508 | . From the main screen, navigate to environment that looks like 509 | `demo-env-`. 510 | 511 | . Inside of this environment, you should see a cluster that looks like `demo-kafka-cluster-`. 512 | . On the left side, click on `Topics.` 513 | . Click on the `TRANSACTION_STATEMENT` topic and access the `messages` tab. 514 | . Click on the `offset` textbox and type `0` and press enter to load all messages from partition 0 starting from offset `0`. 515 | 516 | image::transaction-statements.png[c3-transaction-statements] 517 | 518 | == ✅ It's a wrap! 519 | 520 | Congratulations! Now you know how to build event-driven microservices using 521 | Spring Boot, Kafka Streams, and ksqlDB. 522 | 523 | [WARNING] 524 | .Don't forget to clean up 525 | ==== 526 | 527 | [source,shell script] 528 | ---- 529 | $ cd scripts/ccloud 530 | $ docker-compose down -v #<1> 531 | $ ./ccloud_stack_destroy.sh stack-configs/java-service-account-103523.config #<2> 532 | ---- 533 | <1> Stop a connector and database 534 | <2> Destroy ccloud stack to avoid unexpected charges. 535 | 536 | ==== 537 | 538 | == Special Thanks! 539 | 540 | This workshop is based on the work of https://github.com/daniellavoie[Daniel Lavoie]. 541 | Much ♥️! 542 | -------------------------------------------------------------------------------- /build.gradle: -------------------------------------------------------------------------------- 1 | buildscript { 2 | ext { 3 | springBootVersion = '2.3.3.RELEASE' 4 | } 5 | repositories { 6 | jcenter() 7 | maven { url "https://repo.spring.io/milestone" } 8 | maven { url "https://plugins.gradle.org/m2/" } 9 | } 10 | 11 | dependencies { 12 | classpath "org.springframework.boot:spring-boot-gradle-plugin:${springBootVersion}" 13 | } 14 | } 15 | 16 | allprojects { 17 | group = 'io.confluent.developer' 18 | version = '0.0.1-SNAPSHOT' 19 | } 20 | 21 | subprojects { 22 | apply plugin: 'java' 23 | apply plugin: 'maven-publish' 24 | apply plugin: 'org.springframework.boot' 25 | apply plugin: 'io.spring.dependency-management' 26 | 27 | repositories { 28 | jcenter() 29 | } 30 | 31 | sourceCompatibility = '11' 32 | 33 | publishing { 34 | publications { 35 | maven(MavenPublication) { 36 | from(components.java) 37 | } 38 | } 39 | } 40 | 41 | tasks.withType(JavaCompile) { 42 | options.compilerArgs = [ 43 | '-Xlint:deprecation', 44 | '-Werror' 45 | ] 46 | } 47 | 48 | tasks.withType(Test) { 49 | testLogging { 50 | outputs.upToDateWhen { false } 51 | events 'standard_out' 52 | exceptionFormat = "full" 53 | } 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /data-generator/.gitignore: -------------------------------------------------------------------------------- 1 | .classpath 2 | .DS_Store 3 | .project 4 | .settings 5 | .vscode 6 | 7 | target -------------------------------------------------------------------------------- /data-generator/build.gradle: -------------------------------------------------------------------------------- 1 | /* 2 | * This file was generated by the Gradle 'init' task. 3 | */ 4 | 5 | dependencies { 6 | implementation 'com.github.javafaker:javafaker:1.0.1' 7 | implementation 'org.springframework.boot:spring-boot-starter-data-jpa:2.3.0.RELEASE' 8 | implementation 'org.springframework.boot:spring-boot-starter-webflux:2.3.0.RELEASE' 9 | implementation 'org.springframework.kafka:spring-kafka:2.5.0.RELEASE' 10 | compileOnly 'org.projectlombok:lombok:1.18.8' 11 | annotationProcessor 'org.projectlombok:lombok:1.18.8' 12 | implementation 'org.apache.kafka:kafka-streams:2.5.0' 13 | implementation 'io.projectreactor:reactor-core:3.3.5.RELEASE' 14 | implementation 'org.postgresql:postgresql:42.2.12' 15 | implementation 'org.flywaydb:flyway-core:6.4.1' 16 | implementation 'org.projectlombok:lombok:1.18.12' 17 | } 18 | -------------------------------------------------------------------------------- /data-generator/src/main/java/io/confluent/developer/ccloud/demo/kstream/DataGeneratorApplication.java: -------------------------------------------------------------------------------- 1 | package io.confluent.developer.ccloud.demo.kstream; 2 | 3 | import org.apache.kafka.clients.admin.NewTopic; 4 | import org.springframework.boot.CommandLineRunner; 5 | import org.springframework.boot.SpringApplication; 6 | import org.springframework.boot.autoconfigure.SpringBootApplication; 7 | import org.springframework.context.annotation.Bean; 8 | import org.springframework.kafka.annotation.EnableKafka; 9 | import org.springframework.kafka.config.TopicBuilder; 10 | 11 | import io.confluent.developer.ccloud.demo.kstream.data.domain.account.AccountService; 12 | import io.confluent.developer.ccloud.demo.kstream.topic.AccountTopicConfig; 13 | import io.confluent.developer.ccloud.demo.kstream.topic.TopicConfig; 14 | import io.confluent.developer.ccloud.demo.kstream.topic.TransactionRequestTopicConfig; 15 | import lombok.RequiredArgsConstructor; 16 | import lombok.extern.slf4j.Slf4j; 17 | 18 | @EnableKafka 19 | @SpringBootApplication 20 | @Slf4j(topic = "Data Generator") 21 | @RequiredArgsConstructor 22 | public class DataGeneratorApplication implements CommandLineRunner { 23 | 24 | private final AccountService accountService; 25 | 26 | public static void main(String[] args) { 27 | SpringApplication.run(DataGeneratorApplication.class, args); 28 | } 29 | 30 | // TOPICS 31 | @Bean 32 | NewTopic accountTopic(AccountTopicConfig topicConfig) { 33 | return createTopic(topicConfig); 34 | } 35 | 36 | @Bean 37 | NewTopic transactionRequestTopic(TransactionRequestTopicConfig topicConfig) { 38 | return createTopic(topicConfig); 39 | } 40 | 41 | private NewTopic createTopic(TopicConfig topicConfig) { 42 | log.info("Creating topic {}...", topicConfig.getName()); 43 | return TopicBuilder.name(topicConfig.getName()) 44 | .partitions(topicConfig.getPartitions()) 45 | .replicas(topicConfig.getReplicationFactor()) 46 | .compact() 47 | .build(); 48 | } 49 | // end TOPIC 50 | 51 | @Override 52 | public void run(String... args) { 53 | accountService.generateAccounts(); 54 | } 55 | 56 | } -------------------------------------------------------------------------------- /data-generator/src/main/java/io/confluent/developer/ccloud/demo/kstream/data/domain/account/Account.java: -------------------------------------------------------------------------------- 1 | package io.confluent.developer.ccloud.demo.kstream.data.domain.account; 2 | 3 | import java.time.LocalDateTime; 4 | 5 | import javax.persistence.Column; 6 | import javax.persistence.Entity; 7 | import javax.persistence.Id; 8 | 9 | import lombok.AllArgsConstructor; 10 | import lombok.Data; 11 | import lombok.NoArgsConstructor; 12 | 13 | @Entity 14 | @Data 15 | @NoArgsConstructor 16 | @AllArgsConstructor 17 | public class Account { 18 | 19 | private int number; 20 | private String firstName; 21 | private String lastName; 22 | private String streetAddress; 23 | private String numberAddress; 24 | private String cityAddress; 25 | private String countryAddress; 26 | private LocalDateTime creationDate; 27 | private LocalDateTime updateDate; 28 | 29 | @Id 30 | public int getNumber() { 31 | return number; 32 | } 33 | 34 | @Column(nullable = false) 35 | public LocalDateTime getUpdateDate() { 36 | return updateDate; 37 | } 38 | 39 | } 40 | -------------------------------------------------------------------------------- /data-generator/src/main/java/io/confluent/developer/ccloud/demo/kstream/data/domain/account/AccountRepository.java: -------------------------------------------------------------------------------- 1 | package io.confluent.developer.ccloud.demo.kstream.data.domain.account; 2 | 3 | import org.springframework.data.domain.Page; 4 | import org.springframework.data.domain.Pageable; 5 | import org.springframework.data.jpa.repository.JpaRepository; 6 | 7 | public interface AccountRepository extends JpaRepository { 8 | 9 | Page findByNumberBetweenOrderByNumberDesc(int from, int to, Pageable pageable); 10 | } 11 | -------------------------------------------------------------------------------- /data-generator/src/main/java/io/confluent/developer/ccloud/demo/kstream/data/domain/account/AccountService.java: -------------------------------------------------------------------------------- 1 | package io.confluent.developer.ccloud.demo.kstream.data.domain.account; 2 | 3 | public interface AccountService { 4 | 5 | void generateAccounts(); 6 | } 7 | -------------------------------------------------------------------------------- /data-generator/src/main/java/io/confluent/developer/ccloud/demo/kstream/data/domain/account/AccountServiceImpl.java: -------------------------------------------------------------------------------- 1 | package io.confluent.developer.ccloud.demo.kstream.data.domain.account; 2 | 3 | import com.github.javafaker.Faker; 4 | 5 | import org.springframework.beans.factory.annotation.Value; 6 | import org.springframework.data.domain.Page; 7 | import org.springframework.data.domain.PageRequest; 8 | import org.springframework.stereotype.Service; 9 | 10 | import java.time.LocalDateTime; 11 | 12 | import lombok.extern.slf4j.Slf4j; 13 | import reactor.core.publisher.Flux; 14 | import reactor.core.publisher.Mono; 15 | import reactor.core.scheduler.Schedulers; 16 | 17 | import static java.time.Duration.ofSeconds; 18 | import static reactor.util.retry.Retry.backoff; 19 | 20 | @Service 21 | @Slf4j(topic = "Account Service") 22 | public class AccountServiceImpl implements AccountService { 23 | 24 | private final AccountRepository accountRepository; 25 | private final int startingAccountNumber; 26 | private final int lastAccountNumber; 27 | 28 | public AccountServiceImpl(AccountRepository accountRepository, 29 | @Value("${datafaker.account.starting-account-number:0}") int startingAccountNumber, 30 | @Value("${datafaker.account.numbers:1000}") int numbersOfAccount) { 31 | this.accountRepository = accountRepository; 32 | 33 | this.startingAccountNumber = startingAccountNumber; 34 | this.lastAccountNumber = startingAccountNumber + numbersOfAccount; 35 | } 36 | 37 | @Override 38 | public void generateAccounts() { 39 | Faker faker = new Faker(); 40 | 41 | Mono.>create( 42 | sink -> sink.success(accountRepository.findByNumberBetweenOrderByNumberDesc(startingAccountNumber, 43 | lastAccountNumber + 1, 44 | PageRequest.of(0, 1)))) 45 | .map(page -> page.getContent().size() == 0 ? startingAccountNumber 46 | : page.getContent().get(0).getNumber()) 47 | .filter(latestAccount -> latestAccount < lastAccountNumber) 48 | .flatMapMany(latestAccount -> Flux.range(latestAccount + 1, lastAccountNumber)) 49 | .map(index -> new Account(index, faker.name().firstName(), faker.name().lastName(), 50 | faker.address().streetName(), faker.address().buildingNumber(), 51 | faker.address().city(), 52 | faker.address().country(), LocalDateTime.now(), LocalDateTime.now())) 53 | 54 | .buffer(10000) 55 | .doOnNext(accountRepository::saveAll) 56 | .doOnNext(accounts -> log.info("Generated account number {}.", accounts.get(accounts.size() - 1).getNumber())) 57 | .retryWhen(backoff(10, ofSeconds(1))) 58 | .doOnError(ex -> log.error("Failed to retry", ex)) 59 | .subscribeOn(Schedulers.newSingle("Account-Generator")) 60 | .subscribe(); 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /data-generator/src/main/java/io/confluent/developer/ccloud/demo/kstream/data/domain/transaction/Transaction.java: -------------------------------------------------------------------------------- 1 | package io.confluent.developer.ccloud.demo.kstream.data.domain.transaction; 2 | 3 | import java.math.BigDecimal; 4 | 5 | import lombok.AllArgsConstructor; 6 | import lombok.Data; 7 | 8 | @Data 9 | @AllArgsConstructor 10 | public class Transaction { 11 | 12 | public enum Type { 13 | DEPOSIT, WITHDRAW 14 | } 15 | 16 | private final String guid; 17 | private final String account; 18 | private final BigDecimal amount; 19 | private final Type type; 20 | private final String currency; 21 | private final String country; 22 | 23 | } 24 | -------------------------------------------------------------------------------- /data-generator/src/main/java/io/confluent/developer/ccloud/demo/kstream/data/domain/transaction/TransactionController.java: -------------------------------------------------------------------------------- 1 | package io.confluent.developer.ccloud.demo.kstream.data.domain.transaction; 2 | 3 | import org.springframework.web.bind.annotation.PostMapping; 4 | import org.springframework.web.bind.annotation.RequestBody; 5 | import org.springframework.web.bind.annotation.RequestMapping; 6 | import org.springframework.web.bind.annotation.RestController; 7 | 8 | import reactor.core.publisher.Mono; 9 | 10 | @RestController 11 | @RequestMapping("/transaction") 12 | public class TransactionController { 13 | 14 | private final TransactionService transactionService; 15 | 16 | public TransactionController(TransactionService transactionService) { 17 | this.transactionService = transactionService; 18 | } 19 | 20 | @PostMapping 21 | public Mono publishTransaction(@RequestBody TransactionRequest transactionRequest) { 22 | return transactionService.publishTransaction(transactionRequest); 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /data-generator/src/main/java/io/confluent/developer/ccloud/demo/kstream/data/domain/transaction/TransactionRequest.java: -------------------------------------------------------------------------------- 1 | package io.confluent.developer.ccloud.demo.kstream.data.domain.transaction; 2 | 3 | import java.math.BigDecimal; 4 | 5 | import io.confluent.developer.ccloud.demo.kstream.data.domain.transaction.Transaction.Type; 6 | import lombok.AllArgsConstructor; 7 | import lombok.Data; 8 | import lombok.NoArgsConstructor; 9 | 10 | @Data 11 | @NoArgsConstructor 12 | @AllArgsConstructor 13 | public class TransactionRequest { 14 | 15 | String account; 16 | BigDecimal amount; 17 | Type type; 18 | String currency; 19 | String country; 20 | } 21 | -------------------------------------------------------------------------------- /data-generator/src/main/java/io/confluent/developer/ccloud/demo/kstream/data/domain/transaction/TransactionService.java: -------------------------------------------------------------------------------- 1 | package io.confluent.developer.ccloud.demo.kstream.data.domain.transaction; 2 | 3 | import reactor.core.publisher.Mono; 4 | 5 | public interface TransactionService { 6 | 7 | Mono publishTransaction(TransactionRequest transactionRequest); 8 | } 9 | -------------------------------------------------------------------------------- /data-generator/src/main/java/io/confluent/developer/ccloud/demo/kstream/data/domain/transaction/TransactionServiceImpl.java: -------------------------------------------------------------------------------- 1 | package io.confluent.developer.ccloud.demo.kstream.data.domain.transaction; 2 | 3 | import org.springframework.kafka.core.KafkaTemplate; 4 | import org.springframework.stereotype.Service; 5 | 6 | import java.util.UUID; 7 | 8 | import io.confluent.developer.ccloud.demo.kstream.topic.TransactionRequestTopicConfig; 9 | import lombok.RequiredArgsConstructor; 10 | import reactor.core.publisher.Mono; 11 | 12 | @Service 13 | @RequiredArgsConstructor 14 | public class TransactionServiceImpl implements TransactionService { 15 | 16 | private final KafkaTemplate transactionTemplate; 17 | private final TransactionRequestTopicConfig transactionRequestTopicConfig; 18 | 19 | @Override 20 | public Mono publishTransaction(TransactionRequest transactionRequest) { 21 | 22 | return Mono 23 | .fromFuture(transactionTemplate 24 | .send(transactionRequestTopicConfig.getName(), transactionRequest.getAccount(), 25 | new Transaction(UUID.randomUUID().toString(), transactionRequest.getAccount(), 26 | transactionRequest.getAmount(), transactionRequest.getType(), 27 | transactionRequest.getCurrency(), transactionRequest.getCountry())) 28 | .completable()) 29 | .then(); 30 | } 31 | 32 | } 33 | -------------------------------------------------------------------------------- /data-generator/src/main/java/io/confluent/developer/ccloud/demo/kstream/topic/AccountTopicConfig.java: -------------------------------------------------------------------------------- 1 | package io.confluent.developer.ccloud.demo.kstream.topic; 2 | 3 | import org.springframework.boot.context.properties.ConfigurationProperties; 4 | import org.springframework.context.annotation.Configuration; 5 | 6 | @Configuration 7 | @ConfigurationProperties("topics.account") 8 | public class AccountTopicConfig extends TopicConfig { 9 | 10 | } 11 | -------------------------------------------------------------------------------- /data-generator/src/main/java/io/confluent/developer/ccloud/demo/kstream/topic/TopicConfig.java: -------------------------------------------------------------------------------- 1 | package io.confluent.developer.ccloud.demo.kstream.topic; 2 | 3 | import org.springframework.context.annotation.Configuration; 4 | import org.springframework.context.annotation.PropertySource; 5 | 6 | import lombok.Data; 7 | 8 | @Configuration 9 | @PropertySource(value = "classpath:topics-defaults.properties") 10 | @Data 11 | public abstract class TopicConfig { 12 | 13 | private String name; 14 | private boolean compacted = false; 15 | private int partitions = 1; 16 | private short replicationFactor = 1; 17 | 18 | } 19 | -------------------------------------------------------------------------------- /data-generator/src/main/java/io/confluent/developer/ccloud/demo/kstream/topic/TransactionRequestTopicConfig.java: -------------------------------------------------------------------------------- 1 | package io.confluent.developer.ccloud.demo.kstream.topic; 2 | 3 | import org.springframework.boot.context.properties.ConfigurationProperties; 4 | import org.springframework.context.annotation.Configuration; 5 | 6 | @Configuration 7 | @ConfigurationProperties("topics.transaction-request") 8 | public class TransactionRequestTopicConfig extends TopicConfig { 9 | 10 | } 11 | -------------------------------------------------------------------------------- /data-generator/src/main/resources/application-ccloud.properties: -------------------------------------------------------------------------------- 1 | # CCloud 2 | # Kafka 3 | # common configs 4 | spring.kafka.properties.ssl.endpoint.identification.algorithm=https 5 | spring.kafka.properties.sasl.mechanism=PLAIN 6 | spring.kafka.properties.request.timeout.ms=20000 7 | spring.kafka.properties.bootstrap.servers=${BOOTSTRAP_SERVERS} 8 | spring.kafka.properties.retry.backoff.ms=500 9 | spring.kafka.properties.sasl.jaas.config=${SASL_JAAS_CONFIG_PROPERTY_FORMAT} 10 | spring.kafka.properties.security.protocol=SASL_SSL 11 | 12 | # Cloud SR Config 13 | spring.kafka.properties.basic.auth.credentials.source=USER_INFO 14 | spring.kafka.properties.schema.registry.basic.auth.user.info=${SCHEMA_REGISTRY_BASIC_AUTH_USER_INFO} 15 | spring.kafka.properties.schema.registry.url=${SCHEMA_REGISTRY_URL} 16 | # producer configuration 17 | #spring.kafka.producer.key-serializer=org.apache.kafka.common.serialization.StringSerializer 18 | #spring.kafka.producer.value-serializer=io.confluent.kafka.serializers.KafkaAvroSerializer 19 | 20 | # consumer configuration 21 | #spring.kafka.consumer.key-deserializer=org.apache.kafka.common.serialization.StringDeserializer 22 | #spring.kafka.consumer.value-deserializer=io.confluent.kafka.serializers.KafkaAvroDeserializer -------------------------------------------------------------------------------- /data-generator/src/main/resources/application-postgresql.properties: -------------------------------------------------------------------------------- 1 | spring.datasource.url=jdbc:postgresql://localhost:5432/account-store 2 | 3 | spring.flyway.locations=classpath:db/migration/postgresql -------------------------------------------------------------------------------- /data-generator/src/main/resources/application.properties: -------------------------------------------------------------------------------- 1 | spring.application.name=data-faker 2 | spring.profiles.active=postgresql,ccloud 3 | 4 | # JDBC Datasource 5 | #spring.datasource.driver-class-name=org.h2.Driver 6 | spring.datasource.username=account-store 7 | spring.datasource.password=account-store 8 | 9 | # Jpa 10 | #spring.jpa.hibernate.ddl-auto=create-drop 11 | spring.jpa.hibernate.ddl-auto=validate 12 | spring.jpa.properties.javax.persistence.schema-generation.scripts.action=create 13 | spring.jpa.properties.javax.persistence.schema-generation.scripts.create-target=target/create.sql 14 | 15 | # Flyway 16 | spring.flyway.enabled=true 17 | 18 | # Kafka 19 | spring.kafka.consumer.key-deserializer=org.apache.kafka.common.serialization.StringDeserializer 20 | spring.kafka.consumer.value-deserializer=org.springframework.kafka.support.serializer.JsonDeserializer 21 | spring.kafka.consumer.properties.spring.json.type.mapping=transaction:io.confluent.developer.ccloud.demo.kstream.data.domain.transaction.Transaction 22 | 23 | spring.kafka.producer.key-serializer=org.apache.kafka.common.serialization.StringSerializer 24 | spring.kafka.producer.value-serializer=org.springframework.kafka.support.serializer.JsonSerializer 25 | spring.kafka.producer.properties.spring.json.type.mapping=transaction:io.confluent.developer.ccloud.demo.kstream.data.domain.transaction.Transaction 26 | 27 | # Kafka Stream 28 | spring.kafka.streams.properties.default.key.serde=org.apache.kafka.common.serialization.Serdes$StringSerde 29 | spring.kafka.streams.properties.default.value.serde=org.springframework.kafka.support.serializer.JsonSerde 30 | 31 | spring.kafka.streams.properties.spring.json.type.mapping=transaction:io.confluent.developer.ccloud.demo.kstream.data.domain.transaction.Transaction 32 | spring.kafka.streams.properties.spring.json.trusted.packages=* -------------------------------------------------------------------------------- /data-generator/src/main/resources/db/migration/postgresql/V0_1_0__Account.sql: -------------------------------------------------------------------------------- 1 | create table account (number int4 not null, city_address varchar(255), country_address varchar(255), creation_date timestamp not null, first_name varchar(255), last_name varchar(255), number_address varchar(255), street_address varchar(255), update_date timestamp not null, primary key (number)); -------------------------------------------------------------------------------- /data-generator/src/main/resources/lombol.config: -------------------------------------------------------------------------------- 1 | lombok.anyConstructor.addConstructorProperties=true -------------------------------------------------------------------------------- /data-generator/src/main/resources/topics-defaults.properties: -------------------------------------------------------------------------------- 1 | # Topics initialization 2 | topics.account.name=account 3 | topics.account.partitions=10 4 | topics.account.replication-factor=3 5 | 6 | topics.transaction-request.name=transaction-request 7 | topics.transaction-request.partitions=10 8 | topics.transaction-request.replication-factor=3 9 | 10 | topics.transaction-failed.name=transaction-failed 11 | topics.transaction-failed.partitions=10 12 | topics.transaction-failed.replication-factor=3 13 | 14 | topics.transaction-success.name=transaction-success 15 | topics.transaction-success.partitions=10 16 | topics.transaction-success.replication-factor=3 17 | 18 | -------------------------------------------------------------------------------- /docker-compose/local/docker-compose.yml: -------------------------------------------------------------------------------- 1 | --- 2 | version: '3.7' 3 | services: 4 | zookeeper: 5 | image: confluentinc/cp-zookeeper:5.5.1 6 | container_name: kstreams-demo-zookeeper 7 | ports: 8 | - 2181:2181 9 | environment: 10 | ZOOKEEPER_CLIENT_PORT: 2181 11 | ZOOKEEPER_TICK_TIME: 2000 12 | 13 | kafka: 14 | image: confluentinc/cp-enterprise-kafka:5.5.1 15 | container_name: kstreams-demo-kafka 16 | hostname: kafka 17 | depends_on: 18 | - zookeeper 19 | ports: 20 | - 9092:9092 21 | - 19092:19092 22 | environment: 23 | CONFLUENT_METRICS_REPORTER_BOOTSTRAP_SERVERS: "kafka:19092" 24 | CONFLUENT_METRICS_REPORTER_MAX_REQUEST_SIZE: 10485760 25 | CONFLUENT_METRICS_REPORTER_TOPIC_CREATE: "false" 26 | CONFLUENT_METRICS_REPORTER_TOPIC_REPLICAS: 1 27 | KAFKA_ADVERTISED_LISTENERS: LOCALHOST://localhost:9092,DOCKER://kafka:19092 28 | KAFKA_BROKER_ID: 1 29 | KAFKA_BROKER_RACK: "r1" 30 | KAFKA_INTER_BROKER_LISTENER_NAME: DOCKER 31 | KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: DOCKER:PLAINTEXT,LOCALHOST:PLAINTEXT 32 | KAFKA_METRIC_REPORTERS: io.confluent.metrics.reporter.ConfluentMetricsReporter 33 | KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 34 | KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 35 | 36 | ksqldb: 37 | image: confluentinc/ksqldb-server:0.6.0 38 | hostname: ksqldb 39 | container_name: kstreams-demo-ksqldb 40 | ports: 41 | - "8088:8088" 42 | environment: 43 | KSQL_LISTENERS: http://0.0.0.0:8088 44 | KSQL_BOOTSTRAP_SERVERS: kafka:19092 45 | KSQL_KSQL_SCHEMA_REGISTRY_URL: "http://schemaregistry:8085" 46 | KSQL_KSQL_LOGGING_PROCESSING_STREAM_AUTO_CREATE: "true" 47 | KSQL_KSQL_LOGGING_PROCESSING_TOPIC_AUTO_CREATE: "true" 48 | KSQL_KSQL_STREAMS_AUTO_OFFSET_RESET: earliest 49 | 50 | ksqldb-cli: 51 | image: confluentinc/ksqldb-cli:0.6.0 52 | container_name: kstreams-demo-ksqldb-cli 53 | depends_on: 54 | - ksqldb 55 | entrypoint: /bin/sh 56 | tty: true 57 | 58 | schemaregistry: 59 | image: confluentinc/cp-schema-registry:5.5.1 60 | container_name: kstreams-demo-sr 61 | restart: always 62 | depends_on: 63 | - zookeeper 64 | - kafka 65 | environment: 66 | SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: "kafka:19092" 67 | SCHEMA_REGISTRY_HOST_NAME: microservices-dev-sr 68 | SCHEMA_REGISTRY_LISTENERS: "http://0.0.0.0:8085" 69 | SCHEMA_REGISTRY_LOG4J_ROOT_LOGLEVEL: INFO 70 | ports: 71 | - 8085:8085 72 | 73 | connect: 74 | image: confluentinc/cp-kafka-connect:5.5.1 75 | container_name: kstreams-demo-connect 76 | restart: always 77 | ports: 78 | - "8083:8083" 79 | depends_on: 80 | - zookeeper 81 | - kafka 82 | environment: 83 | CONNECT_BOOTSTRAP_SERVERS: "kafka:19092" 84 | CONNECT_REST_PORT: 8083 85 | CONNECT_LISTENERS: "http://0.0.0.0:8083" 86 | CONNECT_GROUP_ID: "connect" 87 | CONNECT_PRODUCER_CLIENT_ID: "connect-worker-producer" 88 | CONNECT_CONFIG_STORAGE_TOPIC: connect-configs 89 | CONNECT_OFFSET_STORAGE_TOPIC: connect-offsets 90 | CONNECT_STATUS_STORAGE_TOPIC: connect-statuses 91 | CONNECT_REPLICATION_FACTOR: 1 92 | CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: 1 93 | CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: 1 94 | CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: 1 95 | CONNECT_KEY_CONVERTER: "org.apache.kafka.connect.storage.StringConverter" 96 | CONNECT_VALUE_CONVERTER: "org.apache.kafka.connect.json.JsonConverter" 97 | CONNECT_INTERNAL_KEY_CONVERTER: "org.apache.kafka.connect.json.JsonConverter" 98 | CONNECT_INTERNAL_VALUE_CONVERTER: "org.apache.kafka.connect.json.JsonConverter" 99 | CONNECT_REST_ADVERTISED_HOST_NAME: "connect" 100 | CONNECT_PLUGIN_PATH: "/usr/share/java,/connect-plugins" 101 | CONNECT_LOG4J_ROOT_LOGLEVEL: INFO 102 | CONNECT_LOG4J_LOGGERS: org.reflections=ERROR 103 | CLASSPATH: /usr/share/java/monitoring-interceptors/monitoring-interceptors-5.5.1.jar 104 | AWS_ACCESS_KEY_ID: minio 105 | AWS_SECRET_ACCESS_KEY: minio123 106 | 107 | control-center: 108 | image: confluentinc/cp-enterprise-control-center:5.5.1 109 | container_name: kstreams-demo-control-center 110 | restart: always 111 | depends_on: 112 | - zookeeper 113 | - kafka 114 | - connect 115 | ports: 116 | - "9021:9021" 117 | environment: 118 | CONTROL_CENTER_BOOTSTRAP_SERVERS: "kafka:19092" 119 | CONTROL_CENTER_ZOOKEEPER_CONNECT: "zookeeper:2181" 120 | CONTROL_CENTER_REPLICATION_FACTOR: 1 121 | CONTROL_CENTER_MONITORING_INTERCEPTOR_TOPIC_REPLICATION: 1 122 | CONTROL_CENTER_INTERNAL_TOPICS_REPLICATION: 1 123 | CONTROL_CENTER_COMMAND_TOPIC_REPLICATION: 1 124 | CONTROL_CENTER_METRICS_TOPIC_REPLICATION: 1 125 | CONTROL_CENTER_MONITORING_INTERCEPTOR_TOPIC_PARTITIONS: 1 126 | CONTROL_CENTER_INTERNAL_TOPICS_PARTITIONS: 1 127 | CONTROL_CENTER_METRICS_TOPIC_PARTITIONS: 1 128 | CONTROL_CENTER_STREAMS_NUM_STREAM_THREADS: 1 129 | # Amount of heap to use for internal caches. Increase for better thoughput 130 | CONTROL_CENTER_STREAMS_CACHE_MAX_BYTES_BUFFERING: 100000000 131 | CONTROL_CENTER_CONNECT_CLUSTER: "http://connect:8083" 132 | CONTROL_CENTER_SCHEMA_REGISTRY_URL: "http://schema-registry:8081" 133 | CONTROL_CENTER_DEPRECATED_VIEWS_ENABLE: "true" 134 | CONTROL_CENTER_STREAMS_CONSUMER_REQUEST_TIMEOUT_MS: "960032" 135 | # HTTP and HTTPS to Control Center UI 136 | CONTROL_CENTER_REST_LISTENERS: "http://0.0.0.0:9021" 137 | CONTROL_CENTER_KSQL_URL: "http://ksqldb:8088" 138 | CONTROL_CENTER_KSQL_ADVERTISED_URL: "http://0.0.0.0:8088" 139 | 140 | account-db: 141 | container_name: kstreams-demo-account-db 142 | image: postgres 143 | restart: always 144 | ports: 145 | - 5432:5432 146 | environment: 147 | POSTGRES_USER: account-store 148 | POSTGRES_PASSWORD: account-store -------------------------------------------------------------------------------- /gradle/wrapper/gradle-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gAmUssA/springone2020-workshop/53cb7e4ff322d0cb51f9904582fdd45d5200012e/gradle/wrapper/gradle-wrapper.jar -------------------------------------------------------------------------------- /gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | distributionBase=GRADLE_USER_HOME 2 | distributionPath=wrapper/dists 3 | distributionUrl=https\://services.gradle.org/distributions/gradle-6.6.1-bin.zip 4 | zipStoreBase=GRADLE_USER_HOME 5 | zipStorePath=wrapper/dists 6 | -------------------------------------------------------------------------------- /gradlew: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env sh 2 | 3 | # 4 | # Copyright 2015 the original author or authors. 5 | # 6 | # Licensed under the Apache License, Version 2.0 (the "License"); 7 | # you may not use this file except in compliance with the License. 8 | # You may obtain a copy of the License at 9 | # 10 | # https://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | # See the License for the specific language governing permissions and 16 | # limitations under the License. 17 | # 18 | 19 | ############################################################################## 20 | ## 21 | ## Gradle start up script for UN*X 22 | ## 23 | ############################################################################## 24 | 25 | # Attempt to set APP_HOME 26 | # Resolve links: $0 may be a link 27 | PRG="$0" 28 | # Need this for relative symlinks. 29 | while [ -h "$PRG" ] ; do 30 | ls=`ls -ld "$PRG"` 31 | link=`expr "$ls" : '.*-> \(.*\)$'` 32 | if expr "$link" : '/.*' > /dev/null; then 33 | PRG="$link" 34 | else 35 | PRG=`dirname "$PRG"`"/$link" 36 | fi 37 | done 38 | SAVED="`pwd`" 39 | cd "`dirname \"$PRG\"`/" >/dev/null 40 | APP_HOME="`pwd -P`" 41 | cd "$SAVED" >/dev/null 42 | 43 | APP_NAME="Gradle" 44 | APP_BASE_NAME=`basename "$0"` 45 | 46 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 47 | DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' 48 | 49 | # Use the maximum available, or set MAX_FD != -1 to use that value. 50 | MAX_FD="maximum" 51 | 52 | warn () { 53 | echo "$*" 54 | } 55 | 56 | die () { 57 | echo 58 | echo "$*" 59 | echo 60 | exit 1 61 | } 62 | 63 | # OS specific support (must be 'true' or 'false'). 64 | cygwin=false 65 | msys=false 66 | darwin=false 67 | nonstop=false 68 | case "`uname`" in 69 | CYGWIN* ) 70 | cygwin=true 71 | ;; 72 | Darwin* ) 73 | darwin=true 74 | ;; 75 | MINGW* ) 76 | msys=true 77 | ;; 78 | NONSTOP* ) 79 | nonstop=true 80 | ;; 81 | esac 82 | 83 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar 84 | 85 | 86 | # Determine the Java command to use to start the JVM. 87 | if [ -n "$JAVA_HOME" ] ; then 88 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then 89 | # IBM's JDK on AIX uses strange locations for the executables 90 | JAVACMD="$JAVA_HOME/jre/sh/java" 91 | else 92 | JAVACMD="$JAVA_HOME/bin/java" 93 | fi 94 | if [ ! -x "$JAVACMD" ] ; then 95 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME 96 | 97 | Please set the JAVA_HOME variable in your environment to match the 98 | location of your Java installation." 99 | fi 100 | else 101 | JAVACMD="java" 102 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 103 | 104 | Please set the JAVA_HOME variable in your environment to match the 105 | location of your Java installation." 106 | fi 107 | 108 | # Increase the maximum file descriptors if we can. 109 | if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then 110 | MAX_FD_LIMIT=`ulimit -H -n` 111 | if [ $? -eq 0 ] ; then 112 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then 113 | MAX_FD="$MAX_FD_LIMIT" 114 | fi 115 | ulimit -n $MAX_FD 116 | if [ $? -ne 0 ] ; then 117 | warn "Could not set maximum file descriptor limit: $MAX_FD" 118 | fi 119 | else 120 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" 121 | fi 122 | fi 123 | 124 | # For Darwin, add options to specify how the application appears in the dock 125 | if $darwin; then 126 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" 127 | fi 128 | 129 | # For Cygwin or MSYS, switch paths to Windows format before running java 130 | if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then 131 | APP_HOME=`cygpath --path --mixed "$APP_HOME"` 132 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` 133 | 134 | JAVACMD=`cygpath --unix "$JAVACMD"` 135 | 136 | # We build the pattern for arguments to be converted via cygpath 137 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` 138 | SEP="" 139 | for dir in $ROOTDIRSRAW ; do 140 | ROOTDIRS="$ROOTDIRS$SEP$dir" 141 | SEP="|" 142 | done 143 | OURCYGPATTERN="(^($ROOTDIRS))" 144 | # Add a user-defined pattern to the cygpath arguments 145 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then 146 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" 147 | fi 148 | # Now convert the arguments - kludge to limit ourselves to /bin/sh 149 | i=0 150 | for arg in "$@" ; do 151 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` 152 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option 153 | 154 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition 155 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` 156 | else 157 | eval `echo args$i`="\"$arg\"" 158 | fi 159 | i=`expr $i + 1` 160 | done 161 | case $i in 162 | 0) set -- ;; 163 | 1) set -- "$args0" ;; 164 | 2) set -- "$args0" "$args1" ;; 165 | 3) set -- "$args0" "$args1" "$args2" ;; 166 | 4) set -- "$args0" "$args1" "$args2" "$args3" ;; 167 | 5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; 168 | 6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; 169 | 7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; 170 | 8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; 171 | 9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; 172 | esac 173 | fi 174 | 175 | # Escape application args 176 | save () { 177 | for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done 178 | echo " " 179 | } 180 | APP_ARGS=`save "$@"` 181 | 182 | # Collect all arguments for the java command, following the shell quoting and substitution rules 183 | eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" 184 | 185 | exec "$JAVACMD" "$@" 186 | -------------------------------------------------------------------------------- /gradlew.bat: -------------------------------------------------------------------------------- 1 | @rem 2 | @rem Copyright 2015 the original author or authors. 3 | @rem 4 | @rem Licensed under the Apache License, Version 2.0 (the "License"); 5 | @rem you may not use this file except in compliance with the License. 6 | @rem You may obtain a copy of the License at 7 | @rem 8 | @rem https://www.apache.org/licenses/LICENSE-2.0 9 | @rem 10 | @rem Unless required by applicable law or agreed to in writing, software 11 | @rem distributed under the License is distributed on an "AS IS" BASIS, 12 | @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | @rem See the License for the specific language governing permissions and 14 | @rem limitations under the License. 15 | @rem 16 | 17 | @if "%DEBUG%" == "" @echo off 18 | @rem ########################################################################## 19 | @rem 20 | @rem Gradle startup script for Windows 21 | @rem 22 | @rem ########################################################################## 23 | 24 | @rem Set local scope for the variables with windows NT shell 25 | if "%OS%"=="Windows_NT" setlocal 26 | 27 | set DIRNAME=%~dp0 28 | if "%DIRNAME%" == "" set DIRNAME=. 29 | set APP_BASE_NAME=%~n0 30 | set APP_HOME=%DIRNAME% 31 | 32 | @rem Resolve any "." and ".." in APP_HOME to make it shorter. 33 | for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi 34 | 35 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 36 | set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" 37 | 38 | @rem Find java.exe 39 | if defined JAVA_HOME goto findJavaFromJavaHome 40 | 41 | set JAVA_EXE=java.exe 42 | %JAVA_EXE% -version >NUL 2>&1 43 | if "%ERRORLEVEL%" == "0" goto execute 44 | 45 | echo. 46 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 47 | echo. 48 | echo Please set the JAVA_HOME variable in your environment to match the 49 | echo location of your Java installation. 50 | 51 | goto fail 52 | 53 | :findJavaFromJavaHome 54 | set JAVA_HOME=%JAVA_HOME:"=% 55 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe 56 | 57 | if exist "%JAVA_EXE%" goto execute 58 | 59 | echo. 60 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 61 | echo. 62 | echo Please set the JAVA_HOME variable in your environment to match the 63 | echo location of your Java installation. 64 | 65 | goto fail 66 | 67 | :execute 68 | @rem Setup the command line 69 | 70 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar 71 | 72 | 73 | @rem Execute Gradle 74 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* 75 | 76 | :end 77 | @rem End local scope for the variables with windows NT shell 78 | if "%ERRORLEVEL%"=="0" goto mainEnd 79 | 80 | :fail 81 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of 82 | rem the _cmd.exe /c_ return code! 83 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 84 | exit /b 1 85 | 86 | :mainEnd 87 | if "%OS%"=="Windows_NT" endlocal 88 | 89 | :omega 90 | -------------------------------------------------------------------------------- /images/cloud-ui-messages.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gAmUssA/springone2020-workshop/53cb7e4ff322d0cb51f9904582fdd45d5200012e/images/cloud-ui-messages.jpg -------------------------------------------------------------------------------- /images/data-generator.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gAmUssA/springone2020-workshop/53cb7e4ff322d0cb51f9904582fdd45d5200012e/images/data-generator.png -------------------------------------------------------------------------------- /images/jdbc-source-connector.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gAmUssA/springone2020-workshop/53cb7e4ff322d0cb51f9904582fdd45d5200012e/images/jdbc-source-connector.png -------------------------------------------------------------------------------- /images/transaction-failed.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gAmUssA/springone2020-workshop/53cb7e4ff322d0cb51f9904582fdd45d5200012e/images/transaction-failed.png -------------------------------------------------------------------------------- /images/transaction-service.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gAmUssA/springone2020-workshop/53cb7e4ff322d0cb51f9904582fdd45d5200012e/images/transaction-service.png -------------------------------------------------------------------------------- /images/transaction-statement-overview.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gAmUssA/springone2020-workshop/53cb7e4ff322d0cb51f9904582fdd45d5200012e/images/transaction-statement-overview.png -------------------------------------------------------------------------------- /images/transaction-statements.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gAmUssA/springone2020-workshop/53cb7e4ff322d0cb51f9904582fdd45d5200012e/images/transaction-statements.png -------------------------------------------------------------------------------- /images/transaction-success.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gAmUssA/springone2020-workshop/53cb7e4ff322d0cb51f9904582fdd45d5200012e/images/transaction-success.png -------------------------------------------------------------------------------- /kstreams-demo-kotlin/.gitignore: -------------------------------------------------------------------------------- 1 | HELP.md 2 | .gradle 3 | build/ 4 | !gradle/wrapper/gradle-wrapper.jar 5 | !**/src/main/**/build/ 6 | !**/src/test/**/build/ 7 | 8 | ### STS ### 9 | .apt_generated 10 | .classpath 11 | .factorypath 12 | .project 13 | .settings 14 | .springBeans 15 | .sts4-cache 16 | 17 | ### IntelliJ IDEA ### 18 | .idea 19 | *.iws 20 | *.iml 21 | *.ipr 22 | out/ 23 | !**/src/main/**/out/ 24 | !**/src/test/**/out/ 25 | 26 | ### NetBeans ### 27 | /nbproject/private/ 28 | /nbbuild/ 29 | /dist/ 30 | /nbdist/ 31 | /.nb-gradle/ 32 | 33 | ### VS Code ### 34 | .vscode/ 35 | -------------------------------------------------------------------------------- /kstreams-demo-kotlin/build.gradle.kts: -------------------------------------------------------------------------------- 1 | import org.jetbrains.kotlin.gradle.tasks.KotlinCompile 2 | 3 | plugins { 4 | id("org.springframework.boot") 5 | id("io.spring.dependency-management") 6 | kotlin("jvm") version "1.3.72" 7 | kotlin("plugin.spring") version "1.3.72" 8 | } 9 | 10 | group = "io.developer.confluent" 11 | version = "0.0.1-SNAPSHOT" 12 | java.sourceCompatibility = JavaVersion.VERSION_11 13 | 14 | repositories { 15 | mavenCentral() 16 | } 17 | 18 | dependencies { 19 | implementation("org.springframework.boot:spring-boot-starter") 20 | implementation("org.apache.kafka:kafka-streams") 21 | implementation("org.jetbrains.kotlin:kotlin-reflect") 22 | implementation("org.jetbrains.kotlin:kotlin-stdlib-jdk8") 23 | implementation("org.springframework.kafka:spring-kafka") 24 | implementation("com.fasterxml.jackson.module:jackson-module-kotlin") 25 | 26 | testImplementation("org.springframework.boot:spring-boot-starter-test") { 27 | exclude(group = "org.junit.vintage", module = "junit-vintage-engine") 28 | } 29 | testImplementation("org.springframework.kafka:spring-kafka-test") 30 | testImplementation("org.apache.kafka:kafka-streams-test-utils") 31 | //testImplementation("junit:junit:4.13") 32 | } 33 | 34 | tasks.withType { 35 | useJUnitPlatform() 36 | // Show test results. 37 | testLogging { 38 | events ("passed", "skipped", "failed") 39 | } 40 | } 41 | 42 | tasks.withType { 43 | kotlinOptions { 44 | freeCompilerArgs = listOf("-Xjsr305=strict") 45 | jvmTarget = "11" 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /kstreams-demo-kotlin/gradle/wrapper/gradle-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gAmUssA/springone2020-workshop/53cb7e4ff322d0cb51f9904582fdd45d5200012e/kstreams-demo-kotlin/gradle/wrapper/gradle-wrapper.jar -------------------------------------------------------------------------------- /kstreams-demo-kotlin/gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | distributionBase=GRADLE_USER_HOME 2 | distributionPath=wrapper/dists 3 | distributionUrl=https\://services.gradle.org/distributions/gradle-6.4.1-bin.zip 4 | zipStoreBase=GRADLE_USER_HOME 5 | zipStorePath=wrapper/dists 6 | -------------------------------------------------------------------------------- /kstreams-demo-kotlin/gradlew: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env sh 2 | 3 | # 4 | # Copyright 2015 the original author or authors. 5 | # 6 | # Licensed under the Apache License, Version 2.0 (the "License"); 7 | # you may not use this file except in compliance with the License. 8 | # You may obtain a copy of the License at 9 | # 10 | # https://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | # See the License for the specific language governing permissions and 16 | # limitations under the License. 17 | # 18 | 19 | ############################################################################## 20 | ## 21 | ## Gradle start up script for UN*X 22 | ## 23 | ############################################################################## 24 | 25 | # Attempt to set APP_HOME 26 | # Resolve links: $0 may be a link 27 | PRG="$0" 28 | # Need this for relative symlinks. 29 | while [ -h "$PRG" ] ; do 30 | ls=`ls -ld "$PRG"` 31 | link=`expr "$ls" : '.*-> \(.*\)$'` 32 | if expr "$link" : '/.*' > /dev/null; then 33 | PRG="$link" 34 | else 35 | PRG=`dirname "$PRG"`"/$link" 36 | fi 37 | done 38 | SAVED="`pwd`" 39 | cd "`dirname \"$PRG\"`/" >/dev/null 40 | APP_HOME="`pwd -P`" 41 | cd "$SAVED" >/dev/null 42 | 43 | APP_NAME="Gradle" 44 | APP_BASE_NAME=`basename "$0"` 45 | 46 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 47 | DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' 48 | 49 | # Use the maximum available, or set MAX_FD != -1 to use that value. 50 | MAX_FD="maximum" 51 | 52 | warn () { 53 | echo "$*" 54 | } 55 | 56 | die () { 57 | echo 58 | echo "$*" 59 | echo 60 | exit 1 61 | } 62 | 63 | # OS specific support (must be 'true' or 'false'). 64 | cygwin=false 65 | msys=false 66 | darwin=false 67 | nonstop=false 68 | case "`uname`" in 69 | CYGWIN* ) 70 | cygwin=true 71 | ;; 72 | Darwin* ) 73 | darwin=true 74 | ;; 75 | MINGW* ) 76 | msys=true 77 | ;; 78 | NONSTOP* ) 79 | nonstop=true 80 | ;; 81 | esac 82 | 83 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar 84 | 85 | 86 | # Determine the Java command to use to start the JVM. 87 | if [ -n "$JAVA_HOME" ] ; then 88 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then 89 | # IBM's JDK on AIX uses strange locations for the executables 90 | JAVACMD="$JAVA_HOME/jre/sh/java" 91 | else 92 | JAVACMD="$JAVA_HOME/bin/java" 93 | fi 94 | if [ ! -x "$JAVACMD" ] ; then 95 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME 96 | 97 | Please set the JAVA_HOME variable in your environment to match the 98 | location of your Java installation." 99 | fi 100 | else 101 | JAVACMD="java" 102 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 103 | 104 | Please set the JAVA_HOME variable in your environment to match the 105 | location of your Java installation." 106 | fi 107 | 108 | # Increase the maximum file descriptors if we can. 109 | if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then 110 | MAX_FD_LIMIT=`ulimit -H -n` 111 | if [ $? -eq 0 ] ; then 112 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then 113 | MAX_FD="$MAX_FD_LIMIT" 114 | fi 115 | ulimit -n $MAX_FD 116 | if [ $? -ne 0 ] ; then 117 | warn "Could not set maximum file descriptor limit: $MAX_FD" 118 | fi 119 | else 120 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" 121 | fi 122 | fi 123 | 124 | # For Darwin, add options to specify how the application appears in the dock 125 | if $darwin; then 126 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" 127 | fi 128 | 129 | # For Cygwin or MSYS, switch paths to Windows format before running java 130 | if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then 131 | APP_HOME=`cygpath --path --mixed "$APP_HOME"` 132 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` 133 | 134 | JAVACMD=`cygpath --unix "$JAVACMD"` 135 | 136 | # We build the pattern for arguments to be converted via cygpath 137 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` 138 | SEP="" 139 | for dir in $ROOTDIRSRAW ; do 140 | ROOTDIRS="$ROOTDIRS$SEP$dir" 141 | SEP="|" 142 | done 143 | OURCYGPATTERN="(^($ROOTDIRS))" 144 | # Add a user-defined pattern to the cygpath arguments 145 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then 146 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" 147 | fi 148 | # Now convert the arguments - kludge to limit ourselves to /bin/sh 149 | i=0 150 | for arg in "$@" ; do 151 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` 152 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option 153 | 154 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition 155 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` 156 | else 157 | eval `echo args$i`="\"$arg\"" 158 | fi 159 | i=`expr $i + 1` 160 | done 161 | case $i in 162 | 0) set -- ;; 163 | 1) set -- "$args0" ;; 164 | 2) set -- "$args0" "$args1" ;; 165 | 3) set -- "$args0" "$args1" "$args2" ;; 166 | 4) set -- "$args0" "$args1" "$args2" "$args3" ;; 167 | 5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; 168 | 6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; 169 | 7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; 170 | 8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; 171 | 9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; 172 | esac 173 | fi 174 | 175 | # Escape application args 176 | save () { 177 | for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done 178 | echo " " 179 | } 180 | APP_ARGS=`save "$@"` 181 | 182 | # Collect all arguments for the java command, following the shell quoting and substitution rules 183 | eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" 184 | 185 | exec "$JAVACMD" "$@" 186 | -------------------------------------------------------------------------------- /kstreams-demo-kotlin/gradlew.bat: -------------------------------------------------------------------------------- 1 | @rem 2 | @rem Copyright 2015 the original author or authors. 3 | @rem 4 | @rem Licensed under the Apache License, Version 2.0 (the "License"); 5 | @rem you may not use this file except in compliance with the License. 6 | @rem You may obtain a copy of the License at 7 | @rem 8 | @rem https://www.apache.org/licenses/LICENSE-2.0 9 | @rem 10 | @rem Unless required by applicable law or agreed to in writing, software 11 | @rem distributed under the License is distributed on an "AS IS" BASIS, 12 | @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | @rem See the License for the specific language governing permissions and 14 | @rem limitations under the License. 15 | @rem 16 | 17 | @if "%DEBUG%" == "" @echo off 18 | @rem ########################################################################## 19 | @rem 20 | @rem Gradle startup script for Windows 21 | @rem 22 | @rem ########################################################################## 23 | 24 | @rem Set local scope for the variables with windows NT shell 25 | if "%OS%"=="Windows_NT" setlocal 26 | 27 | set DIRNAME=%~dp0 28 | if "%DIRNAME%" == "" set DIRNAME=. 29 | set APP_BASE_NAME=%~n0 30 | set APP_HOME=%DIRNAME% 31 | 32 | @rem Resolve any "." and ".." in APP_HOME to make it shorter. 33 | for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi 34 | 35 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 36 | set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" 37 | 38 | @rem Find java.exe 39 | if defined JAVA_HOME goto findJavaFromJavaHome 40 | 41 | set JAVA_EXE=java.exe 42 | %JAVA_EXE% -version >NUL 2>&1 43 | if "%ERRORLEVEL%" == "0" goto init 44 | 45 | echo. 46 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 47 | echo. 48 | echo Please set the JAVA_HOME variable in your environment to match the 49 | echo location of your Java installation. 50 | 51 | goto fail 52 | 53 | :findJavaFromJavaHome 54 | set JAVA_HOME=%JAVA_HOME:"=% 55 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe 56 | 57 | if exist "%JAVA_EXE%" goto init 58 | 59 | echo. 60 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 61 | echo. 62 | echo Please set the JAVA_HOME variable in your environment to match the 63 | echo location of your Java installation. 64 | 65 | goto fail 66 | 67 | :init 68 | @rem Get command-line arguments, handling Windows variants 69 | 70 | if not "%OS%" == "Windows_NT" goto win9xME_args 71 | 72 | :win9xME_args 73 | @rem Slurp the command line arguments. 74 | set CMD_LINE_ARGS= 75 | set _SKIP=2 76 | 77 | :win9xME_args_slurp 78 | if "x%~1" == "x" goto execute 79 | 80 | set CMD_LINE_ARGS=%* 81 | 82 | :execute 83 | @rem Setup the command line 84 | 85 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar 86 | 87 | 88 | @rem Execute Gradle 89 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% 90 | 91 | :end 92 | @rem End local scope for the variables with windows NT shell 93 | if "%ERRORLEVEL%"=="0" goto mainEnd 94 | 95 | :fail 96 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of 97 | rem the _cmd.exe /c_ return code! 98 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 99 | exit /b 1 100 | 101 | :mainEnd 102 | if "%OS%"=="Windows_NT" endlocal 103 | 104 | :omega 105 | -------------------------------------------------------------------------------- /kstreams-demo-kotlin/settings.gradle.kts: -------------------------------------------------------------------------------- 1 | rootProject.name = "kstreams-demo-kotlin" 2 | -------------------------------------------------------------------------------- /kstreams-demo-kotlin/src/main/kotlin/io/confluent/developer/ccloud/demo/kstreams/DemoApplication.kt: -------------------------------------------------------------------------------- 1 | package io.confluent.developer.ccloud.demo.kstreams 2 | 3 | import org.springframework.boot.autoconfigure.SpringBootApplication 4 | import org.springframework.boot.runApplication 5 | 6 | @SpringBootApplication 7 | class DemoApplication 8 | 9 | fun main(args: Array) { 10 | runApplication(*args) 11 | } 12 | -------------------------------------------------------------------------------- /kstreams-demo-kotlin/src/main/kotlin/io/confluent/developer/ccloud/demo/kstreams/KStreamsConfig.kt: -------------------------------------------------------------------------------- 1 | package io.confluent.developer.ccloud.demo.kstreams 2 | 3 | import com.fasterxml.jackson.databind.ObjectMapper 4 | import io.confluent.developer.ccloud.demo.kstreams.domain.Funds 5 | import io.confluent.developer.ccloud.demo.kstreams.domain.Transaction 6 | import io.confluent.developer.ccloud.demo.kstreams.domain.TransactionResult 7 | import io.confluent.developer.ccloud.demo.kstreams.topic.* 8 | import org.apache.kafka.clients.admin.NewTopic 9 | import org.apache.kafka.common.serialization.Serdes 10 | import org.apache.kafka.streams.StreamsBuilder 11 | import org.apache.kafka.streams.Topology 12 | import org.apache.kafka.streams.kstream.KStream 13 | import org.apache.kafka.streams.kstream.ValueTransformerSupplier 14 | import org.apache.kafka.streams.state.Stores 15 | import org.springframework.context.annotation.Bean 16 | import org.springframework.context.annotation.Configuration 17 | import org.springframework.kafka.annotation.EnableKafkaStreams 18 | import org.springframework.kafka.config.TopicBuilder 19 | import org.springframework.kafka.support.serializer.JsonSerde 20 | 21 | @Configuration 22 | @EnableKafkaStreams 23 | class KStreamConfig( 24 | private val transactionRequestConfiguration: TransactionRequestTopicConfig, 25 | private val transactionSuccessConfiguration: TransactionSuccessTopicConfig, 26 | private val transactionFailedConfiguration: TransactionFailedTopicConfig, 27 | private val fundsStoreConfig: FundsStoreConfig) { 28 | 29 | val log = logger() 30 | 31 | @Bean 32 | fun transactionFailed(topicConfig: TransactionFailedTopicConfig): NewTopic { 33 | return createTopic(topicConfig) 34 | } 35 | 36 | @Bean 37 | fun transactionSuccess(topicConfig: TransactionSuccessTopicConfig): NewTopic { 38 | return createTopic(topicConfig) 39 | } 40 | 41 | private fun createTopic(topicConfig: TopicConfig): NewTopic { 42 | log.info("Creating topic {}...", topicConfig.name) 43 | return TopicBuilder.name(topicConfig.name) 44 | .partitions(topicConfig.partitions) 45 | .replicas(topicConfig.partitions) 46 | .compact() 47 | .build() 48 | } 49 | 50 | @Bean 51 | fun topology(streamsBuilder: StreamsBuilder): Topology { 52 | streamsBuilder.addStateStore( 53 | Stores.keyValueStoreBuilder(Stores.persistentKeyValueStore(fundsStoreConfig.name), 54 | Serdes.String(), JsonSerde(Funds::class.java, OBJECT_MAPPER))) 55 | defineStreams(streamsBuilder) 56 | val topology = streamsBuilder.build() 57 | log.trace("Topology description : {}", topology.describe()) 58 | return topology 59 | } 60 | 61 | protected fun defineStreams(streamsBuilder: StreamsBuilder) { 62 | val storeName: String = fundsStoreConfig.name 63 | 64 | val transactionStream: KStream = streamsBuilder.stream(transactionRequestConfiguration.name) 65 | 66 | val resultStream: KStream = transactionStream 67 | .transformValues(ValueTransformerSupplier { TransactionTransformer(storeName) }, storeName) 68 | 69 | resultStream 70 | .filter { account: String, result: TransactionResult -> success(account, result) } 71 | .to(transactionSuccessConfiguration.name) 72 | 73 | resultStream 74 | .filterNot { account: String, result: TransactionResult -> success(account, result) } 75 | .to(transactionFailedConfiguration.name) 76 | } 77 | 78 | private fun success(account: String, result: TransactionResult): Boolean { 79 | return result.success 80 | } 81 | 82 | companion object { 83 | private val OBJECT_MAPPER = ObjectMapper().findAndRegisterModules() 84 | } 85 | } -------------------------------------------------------------------------------- /kstreams-demo-kotlin/src/main/kotlin/io/confluent/developer/ccloud/demo/kstreams/TransactionTransformer.kt: -------------------------------------------------------------------------------- 1 | package io.confluent.developer.ccloud.demo.kstreams 2 | 3 | import io.confluent.developer.ccloud.demo.kstreams.domain.Funds 4 | import io.confluent.developer.ccloud.demo.kstreams.domain.Transaction 5 | import io.confluent.developer.ccloud.demo.kstreams.domain.TransactionResult 6 | import org.apache.kafka.streams.kstream.ValueTransformer 7 | import org.apache.kafka.streams.processor.ProcessorContext 8 | import org.apache.kafka.streams.state.KeyValueStore 9 | import java.math.BigDecimal 10 | 11 | class TransactionTransformer(private val stateStoreName: String) 12 | : ValueTransformer { 13 | 14 | private val log = logger() 15 | private lateinit var store: KeyValueStore 16 | 17 | override fun close() {} 18 | 19 | private fun createEmptyFunds(account: String): Funds { 20 | val funds = Funds(account, BigDecimal.ZERO) 21 | store.put(account, funds) 22 | return funds 23 | } 24 | 25 | private fun depositFunds(transaction: Transaction): Funds { 26 | return updateFunds(transaction.account, transaction.amount) 27 | } 28 | 29 | private fun getFunds(account: String?): Funds { 30 | return store.get(account) ?: return createEmptyFunds(account!!) 31 | } 32 | 33 | private fun hasEnoughFunds(transaction: Transaction): Boolean { 34 | return getFunds(transaction.account!!).balance.compareTo(transaction.amount) !== -1 35 | } 36 | 37 | override fun init(context: ProcessorContext) { 38 | store = context.getStateStore(stateStoreName) as KeyValueStore 39 | } 40 | 41 | override fun transform(transaction: Transaction): TransactionResult { 42 | if (transaction.type?.equals(Transaction.Type.DEPOSIT)!!) { 43 | return TransactionResult(transaction, 44 | depositFunds(transaction), 45 | true, 46 | null) 47 | } 48 | if (hasEnoughFunds(transaction)) { 49 | return TransactionResult(transaction, withdrawFunds(transaction), true, null) 50 | } 51 | log.info("Not enough funds for account {}.", transaction.account) 52 | return TransactionResult(transaction, 53 | getFunds(transaction.account), 54 | false, 55 | TransactionResult.ErrorType.INSUFFICIENT_FUNDS) 56 | } 57 | 58 | private fun updateFunds(account: String?, amount: BigDecimal?): Funds { 59 | val funds = Funds(account, getFunds(account).balance.add(amount)) 60 | 61 | log.info("Updating funds for account {} with {}. Current balance is {}.", account, amount, funds.balance) 62 | store.put(account, funds) 63 | return funds 64 | } 65 | 66 | private fun withdrawFunds(transaction: Transaction): Funds { 67 | return updateFunds(transaction.account, transaction.amount?.negate()) 68 | } 69 | } -------------------------------------------------------------------------------- /kstreams-demo-kotlin/src/main/kotlin/io/confluent/developer/ccloud/demo/kstreams/Util.kt: -------------------------------------------------------------------------------- 1 | package io.confluent.developer.ccloud.demo.kstreams 2 | 3 | import org.slf4j.Logger 4 | import org.slf4j.LoggerFactory 5 | 6 | inline fun logger(): Logger { 7 | return LoggerFactory.getLogger(T::class.java) 8 | } -------------------------------------------------------------------------------- /kstreams-demo-kotlin/src/main/kotlin/io/confluent/developer/ccloud/demo/kstreams/domain/Funds.kt: -------------------------------------------------------------------------------- 1 | package io.confluent.developer.ccloud.demo.kstreams.domain 2 | 3 | import java.math.BigDecimal 4 | 5 | data class Funds(val account: String?, val balance: BigDecimal) 6 | -------------------------------------------------------------------------------- /kstreams-demo-kotlin/src/main/kotlin/io/confluent/developer/ccloud/demo/kstreams/domain/Transaction.kt: -------------------------------------------------------------------------------- 1 | package io.confluent.developer.ccloud.demo.kstreams.domain 2 | 3 | import java.math.BigDecimal 4 | 5 | data class Transaction( 6 | var guid: String? = null, 7 | var account: String? = null, 8 | var amount: BigDecimal? = null, 9 | var type: Type? = null, 10 | var currency: String? = null, 11 | var country: String? = null 12 | ) { 13 | enum class Type { 14 | DEPOSIT, WITHDRAW 15 | } 16 | } 17 | 18 | -------------------------------------------------------------------------------- /kstreams-demo-kotlin/src/main/kotlin/io/confluent/developer/ccloud/demo/kstreams/domain/TransactionResult.kt: -------------------------------------------------------------------------------- 1 | package io.confluent.developer.ccloud.demo.kstreams.domain 2 | 3 | data class TransactionResult( 4 | var transaction: Transaction, 5 | var funds: Funds, 6 | var success: Boolean, 7 | var errorType: ErrorType? 8 | ) { 9 | enum class ErrorType { 10 | INSUFFICIENT_FUNDS 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /kstreams-demo-kotlin/src/main/kotlin/io/confluent/developer/ccloud/demo/kstreams/topic/FundsStoreConfig.kt: -------------------------------------------------------------------------------- 1 | package io.confluent.developer.ccloud.demo.kstreams.topic 2 | 3 | import org.springframework.boot.context.properties.ConfigurationProperties 4 | import org.springframework.context.annotation.Configuration 5 | 6 | @Configuration 7 | @ConfigurationProperties("stores.funds") 8 | data class FundsStoreConfig(var name: String = "funds") -------------------------------------------------------------------------------- /kstreams-demo-kotlin/src/main/kotlin/io/confluent/developer/ccloud/demo/kstreams/topic/TopicConfig.kt: -------------------------------------------------------------------------------- 1 | package io.confluent.developer.ccloud.demo.kstreams.topic 2 | 3 | import org.springframework.context.annotation.Configuration 4 | import org.springframework.context.annotation.PropertySource 5 | 6 | @Configuration 7 | @PropertySource(value = ["classpath:topics-defaults.properties"]) 8 | abstract class TopicConfig { 9 | var name: String = "" 10 | var compacted: Boolean = false 11 | var partitions: Int = 1 12 | var replicationFactor: Int = 1 13 | } -------------------------------------------------------------------------------- /kstreams-demo-kotlin/src/main/kotlin/io/confluent/developer/ccloud/demo/kstreams/topic/TransactionFailedTopicConfig.kt: -------------------------------------------------------------------------------- 1 | package io.confluent.developer.ccloud.demo.kstreams.topic 2 | 3 | import org.springframework.boot.context.properties.ConfigurationProperties 4 | import org.springframework.context.annotation.Configuration 5 | 6 | @Configuration 7 | @ConfigurationProperties("topics.transaction-failed") 8 | class TransactionFailedTopicConfig : TopicConfig() -------------------------------------------------------------------------------- /kstreams-demo-kotlin/src/main/kotlin/io/confluent/developer/ccloud/demo/kstreams/topic/TransactionRequestTopicConfig.kt: -------------------------------------------------------------------------------- 1 | package io.confluent.developer.ccloud.demo.kstreams.topic 2 | 3 | import org.springframework.boot.context.properties.ConfigurationProperties 4 | import org.springframework.context.annotation.Configuration 5 | 6 | @Configuration 7 | @ConfigurationProperties("topics.transaction-request") 8 | class TransactionRequestTopicConfig : TopicConfig() -------------------------------------------------------------------------------- /kstreams-demo-kotlin/src/main/kotlin/io/confluent/developer/ccloud/demo/kstreams/topic/TransactionSuccessTopicConfig.kt: -------------------------------------------------------------------------------- 1 | package io.confluent.developer.ccloud.demo.kstreams.topic 2 | 3 | import org.springframework.boot.context.properties.ConfigurationProperties 4 | import org.springframework.context.annotation.Configuration 5 | 6 | @Configuration 7 | @ConfigurationProperties("topics.transaction-success") 8 | class TransactionSuccessTopicConfig : TopicConfig() -------------------------------------------------------------------------------- /kstreams-demo-kotlin/src/main/resources/application-ccloud.properties: -------------------------------------------------------------------------------- 1 | # CCloud 2 | spring.kafka.properties.ssl.endpoint.identification.algorithm=https 3 | spring.kafka.properties.sasl.mechanism=PLAIN 4 | spring.kafka.properties.request.timeout.ms=20000 5 | spring.kafka.properties.bootstrap.servers=${BOOTSTRAP_SERVERS} 6 | spring.kafka.properties.retry.backoff.ms=500 7 | spring.kafka.properties.sasl.jaas.config=${SASL_JAAS_CONFIG_PROPERTY_FORMAT} 8 | spring.kafka.properties.security.protocol=SASL_SSL 9 | spring.kafka.streams.replication-factor=3 10 | 11 | # Cloud SR Config 12 | spring.kafka.properties.basic.auth.credentials.source=USER_INFO 13 | spring.kafka.properties.schema.registry.basic.auth.user.info=${SCHEMA_REGISTRY_BASIC_AUTH_USER_INFO} 14 | spring.kafka.properties.schema.registry.url=${SCHEMA_REGISTRY_URL} 15 | -------------------------------------------------------------------------------- /kstreams-demo-kotlin/src/main/resources/application.properties: -------------------------------------------------------------------------------- 1 | spring.application.name=transaction-kotlin 2 | spring.profiles.active=ccloud 3 | spring.jackson.serialization.write-dates-as-timestamps=false 4 | spring.kafka.properties.auto-offset-reset=earliest 5 | spring.kafka.streams.properties.default.key.serde=org.apache.kafka.common.serialization.Serdes$StringSerde 6 | spring.kafka.streams.properties.default.value.serde=org.springframework.kafka.support.serializer.JsonSerde 7 | spring.kafka.streams.properties.spring.json.type.mapping=transaction:io.confluent.developer.ccloud.demo.kstreams.domain.Transaction 8 | spring.kafka.streams.properties.spring.json.trusted.packages=* -------------------------------------------------------------------------------- /kstreams-demo-kotlin/src/main/resources/topics-defaults.properties: -------------------------------------------------------------------------------- 1 | # Topics initialization 2 | topics.transaction-failed.name=transaction-failed 3 | topics.transaction-failed.partitions=10 4 | topics.transaction-failed.replication-factor=3 5 | 6 | topics.transaction-request.name=transaction-request 7 | topics.transaction-request.partitions=10 8 | topics.transaction-request.replication-factor=3 9 | 10 | topics.transaction-success.name=transaction-success 11 | topics.transaction-success.partitions=10 12 | topics.transaction-success.replication-factor=3 -------------------------------------------------------------------------------- /kstreams-demo-kotlin/src/test/kotlin/io/confluent/developer/ccloud/demo/kstreams/DemoApplicationTests.kt: -------------------------------------------------------------------------------- 1 | package io.confluent.developer.ccloud.demo.kstreams 2 | 3 | 4 | import org.junit.jupiter.api.Test 5 | import org.springframework.boot.test.context.SpringBootTest 6 | 7 | @SpringBootTest 8 | class DemoApplicationTests { 9 | 10 | @Test 11 | fun contextLoads() { 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /kstreams-demo-kotlin/src/test/kotlin/io/confluent/developer/ccloud/demo/kstreams/KStreamConfigTest.kt: -------------------------------------------------------------------------------- 1 | package io.confluent.developer.ccloud.demo.kstreams 2 | 3 | import com.fasterxml.jackson.databind.ObjectMapper 4 | import io.confluent.developer.ccloud.demo.kstreams.domain.Funds 5 | import io.confluent.developer.ccloud.demo.kstreams.domain.Transaction 6 | import io.confluent.developer.ccloud.demo.kstreams.domain.TransactionResult 7 | import io.confluent.developer.ccloud.demo.kstreams.topic.FundsStoreConfig 8 | import io.confluent.developer.ccloud.demo.kstreams.topic.TransactionFailedTopicConfig 9 | import io.confluent.developer.ccloud.demo.kstreams.topic.TransactionRequestTopicConfig 10 | import io.confluent.developer.ccloud.demo.kstreams.topic.TransactionSuccessTopicConfig 11 | import org.apache.kafka.common.serialization.Serde 12 | import org.apache.kafka.common.serialization.Serdes 13 | import org.apache.kafka.streams.* 14 | import org.apache.kafka.streams.state.KeyValueStore 15 | import org.hamcrest.MatcherAssert.assertThat 16 | import org.hamcrest.Matchers.* 17 | import org.junit.jupiter.api.BeforeEach 18 | import org.junit.jupiter.api.Test 19 | import org.springframework.kafka.support.serializer.JsonSerde 20 | import java.math.BigDecimal 21 | import java.util.* 22 | 23 | class KStreamConfigTest { 24 | var txRequestConfig: TransactionRequestTopicConfig? = null 25 | var txSuccessConfig: TransactionSuccessTopicConfig? = null 26 | var txFailedTopicConfig: TransactionFailedTopicConfig? = null 27 | 28 | private lateinit var transactionSerde: JsonSerde 29 | private lateinit var transactionResultSerde: JsonSerde 30 | private lateinit var stringSerde: Serde 31 | 32 | private lateinit var deposit100: Transaction 33 | private lateinit var withdraw100: Transaction 34 | private lateinit var withdraw200: Transaction 35 | 36 | private var fundsStoreConfig: FundsStoreConfig? = null 37 | private lateinit var properties: Properties 38 | private lateinit var topology: Topology 39 | 40 | @BeforeEach 41 | fun setUp() { 42 | 43 | // TODO use spring test and test configs 44 | txRequestConfig = TransactionRequestTopicConfig() 45 | txRequestConfig!!.name = "transaction-request" 46 | txSuccessConfig = TransactionSuccessTopicConfig() 47 | txSuccessConfig!!.name = "transaction-success" 48 | txFailedTopicConfig = TransactionFailedTopicConfig() 49 | txFailedTopicConfig!!.name = "transaction-failed" 50 | fundsStoreConfig = FundsStoreConfig() 51 | fundsStoreConfig!!.name = "funds-store" 52 | val streamsBuilder = StreamsBuilder() 53 | 54 | topology = KStreamConfig(txRequestConfig!!, txSuccessConfig!!, txFailedTopicConfig!!, fundsStoreConfig!!) 55 | .topology(streamsBuilder) 56 | 57 | // serdes 58 | transactionSerde = JsonSerde(Transaction::class.java, OBJECT_MAPPER) 59 | transactionResultSerde = JsonSerde(TransactionResult::class.java, OBJECT_MAPPER) 60 | stringSerde = Serdes.String() 61 | 62 | // ttd 63 | properties = Properties() 64 | properties.putAll(TransactionTransformerTest.testConfig) 65 | deposit100 = Transaction(UUID.randomUUID().toString(), 66 | "1", 67 | BigDecimal(100), 68 | Transaction.Type.DEPOSIT, 69 | "USD", 70 | "USA") 71 | withdraw100 = Transaction(UUID.randomUUID().toString(), 72 | "1", 73 | BigDecimal(100), 74 | Transaction.Type.WITHDRAW, 75 | "USD", 76 | "USA") 77 | withdraw200 = Transaction(UUID.randomUUID().toString(), 78 | "1", 79 | BigDecimal(200), 80 | Transaction.Type.WITHDRAW, 81 | "USD", 82 | "USA") 83 | } 84 | 85 | @Test 86 | fun testDriverShouldNotBeNull() { 87 | TopologyTestDriver(topology, properties).use { testDriver -> 88 | assertThat(testDriver, not(nullValue()) 89 | ) 90 | } 91 | } 92 | 93 | @Test 94 | fun shouldCreateSuccessfulTransaction() { 95 | TopologyTestDriver(topology, properties).use { testDriver -> 96 | val inputTopic: TestInputTopic = testDriver 97 | .createInputTopic(txRequestConfig?.name, stringSerde.serializer(), transactionSerde.serializer()) 98 | inputTopic.pipeInput(deposit100.account, deposit100) 99 | inputTopic.pipeInput(withdraw100.account, withdraw100) 100 | val outputTopic: TestOutputTopic = 101 | testDriver.createOutputTopic(txSuccessConfig?.name, 102 | stringSerde.deserializer(), 103 | transactionResultSerde.deserializer()) 104 | val successfulTransactions: List = outputTopic.readValuesToList() 105 | // balance should be 0 106 | val transactionResult: TransactionResult = successfulTransactions[1] 107 | assertThat(transactionResult.funds.balance, `is`(BigDecimal(0))) 108 | } 109 | } 110 | 111 | @Test 112 | fun shouldBeInsufficientFunds() { 113 | TopologyTestDriver(topology, properties).use { testDriver -> 114 | val inputTopic: TestInputTopic = testDriver 115 | .createInputTopic(txRequestConfig?.name, stringSerde.serializer(), transactionSerde.serializer()) 116 | inputTopic.pipeInput(deposit100.account, deposit100) 117 | inputTopic.pipeInput(withdraw200.account, withdraw200) 118 | val failedResultOutputTopic: TestOutputTopic = testDriver.createOutputTopic(txFailedTopicConfig?.name, stringSerde.deserializer(), 119 | transactionResultSerde.deserializer()) 120 | val successResultOutputTopic: TestOutputTopic = testDriver.createOutputTopic(txSuccessConfig?.name, stringSerde.deserializer(), 121 | transactionResultSerde.deserializer()) 122 | val successfulDeposit100Result: TransactionResult = successResultOutputTopic.readValuesToList()[0] 123 | assertThat(successfulDeposit100Result.funds.balance, `is`(BigDecimal(100))) 124 | 125 | val failedTransactions: List = failedResultOutputTopic.readValuesToList() 126 | // balance should be 0 127 | val transactionResult: TransactionResult = failedTransactions[0] 128 | assertThat(transactionResult.errorType, `is`(TransactionResult.ErrorType.INSUFFICIENT_FUNDS)) 129 | } 130 | } 131 | 132 | @Test 133 | fun balanceShouldBe300() { 134 | TopologyTestDriver(topology, properties).use { testDriver -> 135 | val inputTopic: TestInputTopic = 136 | testDriver.createInputTopic(txRequestConfig?.name, stringSerde.serializer(), transactionSerde.serializer()) 137 | inputTopic.pipeInput(deposit100.account, deposit100) 138 | inputTopic.pipeInput(deposit100.account, deposit100) 139 | inputTopic.pipeInput(deposit100.account, deposit100) 140 | val store: KeyValueStore = testDriver.getKeyValueStore(fundsStoreConfig?.name) 141 | assertThat(store["1"].balance, `is`(BigDecimal(300))) 142 | } 143 | } 144 | 145 | companion object { 146 | private val OBJECT_MAPPER = ObjectMapper().findAndRegisterModules() 147 | } 148 | } -------------------------------------------------------------------------------- /kstreams-demo-kotlin/src/test/kotlin/io/confluent/developer/ccloud/demo/kstreams/TransactionTransformerTest.kt: -------------------------------------------------------------------------------- 1 | package io.confluent.developer.ccloud.demo.kstreams 2 | 3 | import com.fasterxml.jackson.databind.ObjectMapper 4 | import io.confluent.developer.ccloud.demo.kstreams.domain.Funds 5 | import io.confluent.developer.ccloud.demo.kstreams.domain.Transaction 6 | import io.confluent.developer.ccloud.demo.kstreams.domain.TransactionResult 7 | import io.confluent.developer.ccloud.demo.kstreams.domain.TransactionResult.ErrorType.INSUFFICIENT_FUNDS 8 | import org.apache.kafka.common.serialization.Serdes 9 | import org.apache.kafka.streams.StreamsConfig 10 | import org.apache.kafka.streams.processor.MockProcessorContext 11 | import org.apache.kafka.streams.state.KeyValueStore 12 | import org.apache.kafka.streams.state.Stores 13 | import org.hamcrest.MatcherAssert.assertThat 14 | import org.hamcrest.Matchers.`is` 15 | import org.hamcrest.Matchers.nullValue 16 | import org.junit.jupiter.api.BeforeEach 17 | import org.junit.jupiter.api.Test 18 | import org.springframework.kafka.support.serializer.JsonDeserializer 19 | import org.springframework.kafka.support.serializer.JsonSerde 20 | import java.math.BigDecimal 21 | import java.util.* 22 | 23 | class TransactionTransformerTest { 24 | private var fundsStore: KeyValueStore? = null 25 | private var mockContext: MockProcessorContext? = null 26 | private var transactionTransformer: TransactionTransformer? = null 27 | 28 | @BeforeEach 29 | fun setup() { 30 | val properties = Properties() 31 | 32 | properties.putAll(testConfig) 33 | mockContext = MockProcessorContext(properties) 34 | fundsStore = Stores.keyValueStoreBuilder( 35 | Stores.inMemoryKeyValueStore("fundsStore"), 36 | Serdes.String(), 37 | JsonSerde(Funds::class.java, OBJECT_MAPPER)) 38 | .withLoggingDisabled() // Changelog is not supported by MockProcessorContext. 39 | .build() 40 | fundsStore?.init(mockContext, fundsStore) 41 | mockContext!!.register(fundsStore, null) 42 | transactionTransformer = TransactionTransformer(fundsStore?.name()!!) 43 | transactionTransformer!!.init(mockContext!!) 44 | } 45 | 46 | @Test 47 | fun shouldStoreTransaction() { 48 | val transaction = Transaction(UUID.randomUUID().toString(), 49 | "1", 50 | BigDecimal(100), 51 | Transaction.Type.DEPOSIT, 52 | "USD", 53 | "USA") 54 | val transactionResult: TransactionResult = transactionTransformer?.transform(transaction)!! 55 | 56 | assertThat(transactionResult.success, `is`(true)) 57 | } 58 | 59 | @Test 60 | fun shouldHaveInsufficientFunds() { 61 | val transaction = Transaction(UUID.randomUUID().toString(), "1", BigDecimal("100"), Transaction.Type.WITHDRAW, "RUR", 62 | "Russia") 63 | val result: TransactionResult = transactionTransformer?.transform(transaction)!! 64 | assertThat(result.success, `is`(false)) 65 | assertThat(result.errorType, `is`(INSUFFICIENT_FUNDS)) 66 | } 67 | 68 | @Test 69 | fun shouldHaveEnoughFunds() { 70 | val transaction1 = Transaction(UUID.randomUUID().toString(), "1", BigDecimal("300"), Transaction.Type.DEPOSIT, "RUR", 71 | "Russia") 72 | val transaction2 = Transaction(UUID.randomUUID().toString(), "1", BigDecimal("200"), Transaction.Type.WITHDRAW, "RUR", 73 | "Russia") 74 | transactionTransformer?.transform(transaction1) 75 | val result: TransactionResult = transactionTransformer?.transform(transaction2)!! 76 | assertThat(result.success, `is`(true)) 77 | assertThat(result.errorType, `is`(nullValue())) 78 | } 79 | 80 | companion object { 81 | private val OBJECT_MAPPER = ObjectMapper().findAndRegisterModules() 82 | val testConfig = mapOf( 83 | StreamsConfig.BOOTSTRAP_SERVERS_CONFIG to "localhost:8080", 84 | StreamsConfig.APPLICATION_ID_CONFIG to "mytest", 85 | StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG to "org.apache.kafka.common.serialization.Serdes\$StringSerde", 86 | StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG to "org.springframework.kafka.support.serializer.JsonSerde", 87 | JsonDeserializer.TYPE_MAPPINGS to "transaction:io.confluent.developer.ccloud.demo.kstreams.domain.Transaction", 88 | JsonDeserializer.TRUSTED_PACKAGES to "*" 89 | ) 90 | } 91 | } -------------------------------------------------------------------------------- /kstreams-demo-kotlin/src/test/resources/application-default.properties: -------------------------------------------------------------------------------- 1 | kafka.streams.enabled=false -------------------------------------------------------------------------------- /kstreams-demo-kotlin/src/test/resources/logback-test.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | %yellow(%d{yyyy-MM-dd HH:mm:ss}) %cyan(%logger{50}) %highlight([%p]) %green((%file:%line\)) - %msg%n 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | %yellow(%d{yyyy-MM-dd HH:mm:ss}) %highlight([%p]) %magenta((%file:%line\)) - %msg%n 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | -------------------------------------------------------------------------------- /kstreams-demo/.gitignore: -------------------------------------------------------------------------------- 1 | .classpath 2 | .DS_Store 3 | .project 4 | .settings 5 | .vscode 6 | 7 | target -------------------------------------------------------------------------------- /kstreams-demo/build.gradle: -------------------------------------------------------------------------------- 1 | dependencies { 2 | implementation 'org.springframework.boot:spring-boot-starter-json:2.3.0.RELEASE' 3 | implementation 'org.springframework.kafka:spring-kafka:2.5.0.RELEASE' 4 | implementation 'org.apache.kafka:kafka-streams:2.5.0' 5 | implementation 'com.github.javafaker:javafaker:1.0.1' 6 | compileOnly 'org.projectlombok:lombok:1.18.8' 7 | annotationProcessor 'org.projectlombok:lombok:1.18.8' 8 | testImplementation 'org.apache.kafka:kafka-streams-test-utils:2.5.0' 9 | testImplementation 'org.springframework.kafka:spring-kafka-test:2.5.0.RELEASE' 10 | testImplementation 'org.springframework.boot:spring-boot-starter-test:2.3.0.RELEASE' 11 | testImplementation 'junit:junit:4.13' 12 | } 13 | -------------------------------------------------------------------------------- /kstreams-demo/src/main/java/io/confluent/developer/ccloud/demo/kstream/KStreamConfig.java: -------------------------------------------------------------------------------- 1 | package io.confluent.developer.ccloud.demo.kstream; 2 | 3 | import com.fasterxml.jackson.databind.ObjectMapper; 4 | 5 | import org.apache.kafka.clients.admin.NewTopic; 6 | import org.apache.kafka.common.serialization.Serdes; 7 | import org.apache.kafka.streams.StreamsBuilder; 8 | import org.apache.kafka.streams.Topology; 9 | import org.apache.kafka.streams.kstream.KStream; 10 | import org.apache.kafka.streams.state.Stores; 11 | import org.springframework.context.annotation.Bean; 12 | import org.springframework.context.annotation.Configuration; 13 | import org.springframework.kafka.annotation.EnableKafkaStreams; 14 | import org.springframework.kafka.config.TopicBuilder; 15 | import org.springframework.kafka.support.serializer.JsonSerde; 16 | 17 | import io.confluent.developer.ccloud.demo.kstream.domain.Funds; 18 | import io.confluent.developer.ccloud.demo.kstream.domain.Transaction; 19 | import io.confluent.developer.ccloud.demo.kstream.domain.TransactionResult; 20 | import io.confluent.developer.ccloud.demo.kstream.topic.FundsStoreConfig; 21 | import io.confluent.developer.ccloud.demo.kstream.topic.TopicConfig; 22 | import io.confluent.developer.ccloud.demo.kstream.topic.TransactionFailedTopicConfig; 23 | import io.confluent.developer.ccloud.demo.kstream.topic.TransactionRequestTopicConfig; 24 | import io.confluent.developer.ccloud.demo.kstream.topic.TransactionSuccessTopicConfig; 25 | import lombok.RequiredArgsConstructor; 26 | import lombok.extern.slf4j.Slf4j; 27 | 28 | @Configuration 29 | @EnableKafkaStreams 30 | @Slf4j 31 | @RequiredArgsConstructor 32 | public class KStreamConfig { 33 | 34 | private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper().findAndRegisterModules(); 35 | 36 | private final TransactionRequestTopicConfig transactionRequestConfiguration; 37 | private final TransactionSuccessTopicConfig transactionSuccessConfiguration; 38 | private final TransactionFailedTopicConfig transactionFailedConfiguration; 39 | private final FundsStoreConfig fundsStoreConfig; 40 | 41 | @Bean 42 | NewTopic transactionFailed(TransactionFailedTopicConfig topicConfig) { 43 | return createTopic(topicConfig); 44 | } 45 | 46 | @Bean 47 | NewTopic transactionSuccess(TransactionSuccessTopicConfig topicConfig) { 48 | return createTopic(topicConfig); 49 | } 50 | 51 | private NewTopic createTopic(TopicConfig topicConfig) { 52 | log.info("Creating topic {}...", topicConfig.getName()); 53 | return TopicBuilder.name(topicConfig.getName()) 54 | .partitions(topicConfig.getPartitions()) 55 | .replicas(topicConfig.getReplicationFactor()) 56 | .compact() 57 | .build(); 58 | } 59 | 60 | @Bean 61 | public Topology topology(StreamsBuilder streamsBuilder) { 62 | streamsBuilder.addStateStore( 63 | Stores.keyValueStoreBuilder(Stores.persistentKeyValueStore(fundsStoreConfig.getName()), 64 | Serdes.String(), new JsonSerde<>(Funds.class, OBJECT_MAPPER))); 65 | 66 | defineStreams(streamsBuilder); 67 | 68 | Topology topology = streamsBuilder.build(); 69 | 70 | log.trace("Topology description : {}", topology.describe()); 71 | 72 | return topology; 73 | } 74 | 75 | protected void defineStreams(StreamsBuilder streamsBuilder) { 76 | 77 | // TODO: implement me!!! 78 | } 79 | 80 | private boolean success(String account, TransactionResult result) { 81 | return result.isSuccess(); 82 | } 83 | 84 | 85 | } 86 | -------------------------------------------------------------------------------- /kstreams-demo/src/main/java/io/confluent/developer/ccloud/demo/kstream/KStreamDemoApplication.java: -------------------------------------------------------------------------------- 1 | package io.confluent.developer.ccloud.demo.kstream; 2 | 3 | import org.springframework.boot.SpringApplication; 4 | import org.springframework.boot.autoconfigure.SpringBootApplication; 5 | import org.springframework.kafka.annotation.EnableKafkaStreams; 6 | 7 | @EnableKafkaStreams 8 | @SpringBootApplication 9 | public class KStreamDemoApplication { 10 | 11 | public static void main(String[] args) { 12 | SpringApplication.run(KStreamDemoApplication.class, args); 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /kstreams-demo/src/main/java/io/confluent/developer/ccloud/demo/kstream/TransactionTransformer.java: -------------------------------------------------------------------------------- 1 | package io.confluent.developer.ccloud.demo.kstream; 2 | 3 | import org.apache.kafka.streams.kstream.ValueTransformer; 4 | import org.apache.kafka.streams.processor.ProcessorContext; 5 | import org.apache.kafka.streams.state.KeyValueStore; 6 | 7 | import java.math.BigDecimal; 8 | import java.util.Optional; 9 | 10 | import io.confluent.developer.ccloud.demo.kstream.domain.Funds; 11 | import io.confluent.developer.ccloud.demo.kstream.domain.Transaction; 12 | import io.confluent.developer.ccloud.demo.kstream.domain.TransactionResult; 13 | import lombok.RequiredArgsConstructor; 14 | import lombok.extern.slf4j.Slf4j; 15 | 16 | @Slf4j(topic = "Transaction Transformer") 17 | @RequiredArgsConstructor 18 | public class TransactionTransformer 19 | implements ValueTransformer { 20 | 21 | private final String stateStoreName; 22 | 23 | private KeyValueStore store; 24 | 25 | @Override 26 | public void close() { 27 | } 28 | 29 | private Funds createEmptyFunds(String account) { 30 | Funds funds = new Funds(account, BigDecimal.ZERO); 31 | store.put(account, funds); 32 | return funds; 33 | } 34 | 35 | private Funds depositFunds(Transaction transaction) { 36 | return updateFunds(transaction.getAccount(), transaction.getAmount()); 37 | } 38 | 39 | private Funds getFunds(String account) { 40 | return Optional.ofNullable(store.get(account)) 41 | .orElseGet(() -> createEmptyFunds(account)); 42 | } 43 | 44 | private boolean hasEnoughFunds(Transaction transaction) { 45 | return getFunds(transaction.getAccount()).getBalance().compareTo(transaction.getAmount()) != -1; 46 | } 47 | 48 | @Override 49 | @SuppressWarnings("unchecked") 50 | public void init(ProcessorContext context) { 51 | store = (KeyValueStore) context.getStateStore(stateStoreName); 52 | } 53 | 54 | @Override 55 | public TransactionResult transform(Transaction transaction) { 56 | // TODO: implement me!!! 57 | return null; 58 | } 59 | 60 | private Funds updateFunds(String account, BigDecimal amount) { 61 | Funds funds = new Funds(account, getFunds(account).getBalance().add(amount)); 62 | log.info("Updating funds for account {} with {}. Current balance is {}.", account, amount, funds.getBalance()); 63 | store.put(account, funds); 64 | return funds; 65 | } 66 | 67 | private Funds withdrawFunds(Transaction transaction) { 68 | return updateFunds(transaction.getAccount(), transaction.getAmount().negate()); 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /kstreams-demo/src/main/java/io/confluent/developer/ccloud/demo/kstream/domain/Funds.java: -------------------------------------------------------------------------------- 1 | package io.confluent.developer.ccloud.demo.kstream.domain; 2 | 3 | import java.math.BigDecimal; 4 | 5 | import lombok.AllArgsConstructor; 6 | import lombok.Data; 7 | import lombok.NoArgsConstructor; 8 | 9 | @Data 10 | @AllArgsConstructor 11 | @NoArgsConstructor 12 | public class Funds { 13 | 14 | String account; 15 | BigDecimal balance; 16 | 17 | } 18 | -------------------------------------------------------------------------------- /kstreams-demo/src/main/java/io/confluent/developer/ccloud/demo/kstream/domain/Transaction.java: -------------------------------------------------------------------------------- 1 | package io.confluent.developer.ccloud.demo.kstream.domain; 2 | 3 | import java.math.BigDecimal; 4 | 5 | import lombok.AllArgsConstructor; 6 | import lombok.Data; 7 | import lombok.NoArgsConstructor; 8 | 9 | @Data 10 | @AllArgsConstructor 11 | @NoArgsConstructor 12 | public class Transaction { 13 | 14 | public enum Type { 15 | DEPOSIT, WITHDRAW 16 | } 17 | 18 | String guid; 19 | String account; 20 | BigDecimal amount; 21 | Type type; 22 | String currency; 23 | String country; 24 | } 25 | -------------------------------------------------------------------------------- /kstreams-demo/src/main/java/io/confluent/developer/ccloud/demo/kstream/domain/TransactionResult.java: -------------------------------------------------------------------------------- 1 | package io.confluent.developer.ccloud.demo.kstream.domain; 2 | 3 | import lombok.Value; 4 | 5 | @Value 6 | public class TransactionResult { 7 | 8 | public enum ErrorType { 9 | INSUFFICIENT_FUNDS 10 | } 11 | 12 | Transaction transaction; 13 | Funds funds; 14 | boolean success; 15 | ErrorType errorType; 16 | } 17 | -------------------------------------------------------------------------------- /kstreams-demo/src/main/java/io/confluent/developer/ccloud/demo/kstream/topic/FundsStoreConfig.java: -------------------------------------------------------------------------------- 1 | package io.confluent.developer.ccloud.demo.kstream.topic; 2 | 3 | import org.springframework.boot.context.properties.ConfigurationProperties; 4 | import org.springframework.context.annotation.Configuration; 5 | 6 | @Configuration 7 | @ConfigurationProperties("stores.funds") 8 | public class FundsStoreConfig { 9 | 10 | private String name = "funds"; 11 | 12 | public String getName() { 13 | return name; 14 | } 15 | 16 | public void setName(String name) { 17 | this.name = name; 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /kstreams-demo/src/main/java/io/confluent/developer/ccloud/demo/kstream/topic/TopicConfig.java: -------------------------------------------------------------------------------- 1 | package io.confluent.developer.ccloud.demo.kstream.topic; 2 | 3 | import org.springframework.context.annotation.Configuration; 4 | import org.springframework.context.annotation.PropertySource; 5 | 6 | import lombok.Data; 7 | 8 | @Configuration 9 | @PropertySource(value = "classpath:topics-defaults.properties") 10 | @Data 11 | public abstract class TopicConfig { 12 | 13 | private String name; 14 | private boolean compacted; 15 | private int partitions; 16 | private short replicationFactor; 17 | 18 | } 19 | -------------------------------------------------------------------------------- /kstreams-demo/src/main/java/io/confluent/developer/ccloud/demo/kstream/topic/TransactionFailedTopicConfig.java: -------------------------------------------------------------------------------- 1 | package io.confluent.developer.ccloud.demo.kstream.topic; 2 | 3 | import org.springframework.boot.context.properties.ConfigurationProperties; 4 | import org.springframework.context.annotation.Configuration; 5 | 6 | @Configuration 7 | @ConfigurationProperties("topics.transaction-failed") 8 | public class TransactionFailedTopicConfig extends TopicConfig { 9 | 10 | } 11 | -------------------------------------------------------------------------------- /kstreams-demo/src/main/java/io/confluent/developer/ccloud/demo/kstream/topic/TransactionRequestTopicConfig.java: -------------------------------------------------------------------------------- 1 | package io.confluent.developer.ccloud.demo.kstream.topic; 2 | 3 | import org.springframework.boot.context.properties.ConfigurationProperties; 4 | import org.springframework.context.annotation.Configuration; 5 | 6 | @Configuration 7 | @ConfigurationProperties("topics.transaction-request") 8 | public class TransactionRequestTopicConfig extends TopicConfig { 9 | 10 | } 11 | -------------------------------------------------------------------------------- /kstreams-demo/src/main/java/io/confluent/developer/ccloud/demo/kstream/topic/TransactionSuccessTopicConfig.java: -------------------------------------------------------------------------------- 1 | package io.confluent.developer.ccloud.demo.kstream.topic; 2 | 3 | import org.springframework.boot.context.properties.ConfigurationProperties; 4 | import org.springframework.context.annotation.Configuration; 5 | 6 | @Configuration 7 | @ConfigurationProperties("topics.transaction-success") 8 | public class TransactionSuccessTopicConfig extends TopicConfig { 9 | 10 | } 11 | -------------------------------------------------------------------------------- /kstreams-demo/src/main/resources/application-ccloud.properties: -------------------------------------------------------------------------------- 1 | # CCloud 2 | spring.kafka.properties.ssl.endpoint.identification.algorithm=https 3 | spring.kafka.properties.sasl.mechanism=PLAIN 4 | spring.kafka.properties.request.timeout.ms=20000 5 | spring.kafka.properties.bootstrap.servers=${BOOTSTRAP_SERVERS} 6 | spring.kafka.properties.retry.backoff.ms=500 7 | spring.kafka.properties.sasl.jaas.config=${SASL_JAAS_CONFIG_PROPERTY_FORMAT} 8 | spring.kafka.properties.security.protocol=SASL_SSL 9 | spring.kafka.streams.replication-factor=3 10 | 11 | # Cloud SR Config 12 | spring.kafka.properties.basic.auth.credentials.source=USER_INFO 13 | spring.kafka.properties.schema.registry.basic.auth.user.info=${SCHEMA_REGISTRY_BASIC_AUTH_USER_INFO} 14 | spring.kafka.properties.schema.registry.url=${SCHEMA_REGISTRY_URL} 15 | -------------------------------------------------------------------------------- /kstreams-demo/src/main/resources/application.properties: -------------------------------------------------------------------------------- 1 | spring.application.name=transaction 2 | spring.profiles.active=ccloud 3 | spring.jackson.serialization.write-dates-as-timestamps=false 4 | spring.kafka.properties.auto-offset-reset=earliest 5 | spring.kafka.streams.properties.default.key.serde=org.apache.kafka.common.serialization.Serdes$StringSerde 6 | spring.kafka.streams.properties.default.value.serde=org.springframework.kafka.support.serializer.JsonSerde 7 | spring.kafka.streams.properties.spring.json.type.mapping=transaction:io.confluent.developer.ccloud.demo.kstream.domain.Transaction 8 | spring.kafka.streams.properties.spring.json.trusted.packages=* -------------------------------------------------------------------------------- /kstreams-demo/src/main/resources/lombol.config: -------------------------------------------------------------------------------- 1 | lombok.anyConstructor.addConstructorProperties=true -------------------------------------------------------------------------------- /kstreams-demo/src/main/resources/topics-defaults.properties: -------------------------------------------------------------------------------- 1 | # Topics initialization 2 | topics.transaction-failed.name=transaction-failed 3 | topics.transaction-failed.partitions=10 4 | topics.transaction-failed.replication-factor=3 5 | 6 | topics.transaction-request.name=transaction-request 7 | topics.transaction-request.partitions=10 8 | topics.transaction-request.replication-factor=3 9 | 10 | topics.transaction-success.name=transaction-success 11 | topics.transaction-success.partitions=10 12 | topics.transaction-success.replication-factor=3 -------------------------------------------------------------------------------- /kstreams-demo/src/test/java/io/confluent/developer/ccloud/demo/kstream/KStreamConfigTest.java: -------------------------------------------------------------------------------- 1 | package io.confluent.developer.ccloud.demo.kstream; 2 | 3 | import com.fasterxml.jackson.databind.ObjectMapper; 4 | 5 | import org.apache.kafka.common.serialization.Serde; 6 | import org.apache.kafka.common.serialization.Serdes; 7 | import org.apache.kafka.streams.StreamsBuilder; 8 | import org.apache.kafka.streams.TestInputTopic; 9 | import org.apache.kafka.streams.TestOutputTopic; 10 | import org.apache.kafka.streams.Topology; 11 | import org.apache.kafka.streams.TopologyTestDriver; 12 | import org.apache.kafka.streams.state.KeyValueStore; 13 | import org.junit.Before; 14 | import org.junit.Test; 15 | import org.springframework.kafka.support.serializer.JsonDeserializer; 16 | import org.springframework.kafka.support.serializer.JsonSerde; 17 | 18 | import java.math.BigDecimal; 19 | import java.util.List; 20 | import java.util.Map; 21 | import java.util.Properties; 22 | import java.util.UUID; 23 | 24 | import io.confluent.developer.ccloud.demo.kstream.domain.Funds; 25 | import io.confluent.developer.ccloud.demo.kstream.domain.Transaction; 26 | import io.confluent.developer.ccloud.demo.kstream.domain.TransactionResult; 27 | import io.confluent.developer.ccloud.demo.kstream.topic.FundsStoreConfig; 28 | import io.confluent.developer.ccloud.demo.kstream.topic.TransactionFailedTopicConfig; 29 | import io.confluent.developer.ccloud.demo.kstream.topic.TransactionRequestTopicConfig; 30 | import io.confluent.developer.ccloud.demo.kstream.topic.TransactionSuccessTopicConfig; 31 | 32 | import static org.apache.kafka.streams.StreamsConfig.APPLICATION_ID_CONFIG; 33 | import static org.apache.kafka.streams.StreamsConfig.BOOTSTRAP_SERVERS_CONFIG; 34 | import static org.apache.kafka.streams.StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG; 35 | import static org.apache.kafka.streams.StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG; 36 | import static org.hamcrest.MatcherAssert.assertThat; 37 | import static org.hamcrest.Matchers.is; 38 | import static org.hamcrest.Matchers.not; 39 | import static org.hamcrest.Matchers.nullValue; 40 | 41 | public class KStreamConfigTest { 42 | 43 | TopologyTestDriver testDriver; 44 | TransactionRequestTopicConfig txRequestConfig; 45 | TransactionSuccessTopicConfig txSuccessConfig; 46 | TransactionFailedTopicConfig txFailedTopicConfig; 47 | 48 | private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper().findAndRegisterModules(); 49 | private JsonSerde transactionSerde; 50 | private JsonSerde transactionResultSerde; 51 | private Serde stringSerde; 52 | private Transaction deposit100; 53 | private Transaction withdraw100; 54 | private Transaction withdraw200; 55 | private FundsStoreConfig fundsStoreConfig; 56 | 57 | final static Map testConfig = Map.of( 58 | BOOTSTRAP_SERVERS_CONFIG, "localhost:8080", 59 | APPLICATION_ID_CONFIG, "mytest", 60 | DEFAULT_KEY_SERDE_CLASS_CONFIG, "org.apache.kafka.common.serialization.Serdes$StringSerde", 61 | DEFAULT_VALUE_SERDE_CLASS_CONFIG, "org.springframework.kafka.support.serializer.JsonSerde", 62 | JsonDeserializer.TYPE_MAPPINGS, "transaction:io.confluent.developer.ccloud.demo.kstream.domain.Transaction", 63 | JsonDeserializer.TRUSTED_PACKAGES, "*" 64 | ); 65 | private Properties properties; 66 | private Topology topology; 67 | 68 | @Before 69 | public void setUp() { 70 | 71 | // TODO use spring test and test configs 72 | txRequestConfig = new TransactionRequestTopicConfig(); 73 | txRequestConfig.setName("transaction-request"); 74 | 75 | txSuccessConfig = new TransactionSuccessTopicConfig(); 76 | txSuccessConfig.setName("transaction-success"); 77 | 78 | txFailedTopicConfig = new TransactionFailedTopicConfig(); 79 | txFailedTopicConfig.setName("transaction-failed"); 80 | 81 | fundsStoreConfig = new FundsStoreConfig(); 82 | fundsStoreConfig.setName("funds-store"); 83 | 84 | final StreamsBuilder streamsBuilder = new StreamsBuilder(); 85 | topology = new KStreamConfig(txRequestConfig, txSuccessConfig, txFailedTopicConfig, fundsStoreConfig) 86 | .topology(streamsBuilder); 87 | 88 | // serdes 89 | transactionSerde = new JsonSerde<>(Transaction.class, OBJECT_MAPPER); 90 | transactionResultSerde = new JsonSerde<>(TransactionResult.class, OBJECT_MAPPER); 91 | stringSerde = Serdes.String(); 92 | 93 | // ttd 94 | properties = new Properties(); 95 | properties.putAll(testConfig); 96 | 97 | deposit100 = new Transaction(UUID.randomUUID().toString(), 98 | "1", 99 | new BigDecimal(100), 100 | Transaction.Type.DEPOSIT, 101 | "USD", 102 | "USA"); 103 | 104 | withdraw100 = new Transaction(UUID.randomUUID().toString(), 105 | "1", 106 | new BigDecimal(100), 107 | Transaction.Type.WITHDRAW, 108 | "USD", 109 | "USA"); 110 | 111 | withdraw200 = new Transaction(UUID.randomUUID().toString(), 112 | "1", 113 | new BigDecimal(200), 114 | Transaction.Type.WITHDRAW, 115 | "USD", 116 | "USA"); 117 | } 118 | 119 | @Test 120 | public void testDriverShouldNotBeNull() { 121 | try (TopologyTestDriver testDriver = new TopologyTestDriver(topology, properties)) { 122 | assertThat(testDriver, not(nullValue()) 123 | ); 124 | } 125 | } 126 | 127 | @Test 128 | public void shouldCreateSuccessfulTransaction() { 129 | 130 | try (TopologyTestDriver testDriver = new TopologyTestDriver(topology, properties)) { 131 | 132 | final TestInputTopic inputTopic = testDriver 133 | .createInputTopic(txRequestConfig.getName(), stringSerde.serializer(), transactionSerde.serializer()); 134 | 135 | inputTopic.pipeInput(deposit100.getAccount(), deposit100); 136 | inputTopic.pipeInput(withdraw100.getAccount(), withdraw100); 137 | 138 | final TestOutputTopic 139 | outputTopic = 140 | testDriver.createOutputTopic(txSuccessConfig.getName(), stringSerde.deserializer(), 141 | transactionResultSerde.deserializer()); 142 | 143 | final List successfulTransactions = outputTopic.readValuesToList(); 144 | // balance should be 0 145 | final TransactionResult transactionResult = successfulTransactions.get(1); 146 | assertThat(transactionResult.getFunds().getBalance(), is(new BigDecimal(0))); 147 | } 148 | } 149 | 150 | @Test 151 | public void shouldBeInsufficientFunds() { 152 | 153 | try (TopologyTestDriver testDriver = new TopologyTestDriver(topology, properties)) { 154 | final TestInputTopic inputTopic = testDriver 155 | .createInputTopic(txRequestConfig.getName(), stringSerde.serializer(), transactionSerde.serializer()); 156 | 157 | inputTopic.pipeInput(deposit100.getAccount(), deposit100); 158 | inputTopic.pipeInput(withdraw200.getAccount(), withdraw200); 159 | 160 | final TestOutputTopic 161 | failedResultOutputTopic = 162 | testDriver.createOutputTopic(txFailedTopicConfig.getName(), stringSerde.deserializer(), 163 | transactionResultSerde.deserializer()); 164 | 165 | final TestOutputTopic 166 | successResultOutputTopic = 167 | testDriver.createOutputTopic(txSuccessConfig.getName(), stringSerde.deserializer(), 168 | transactionResultSerde.deserializer()); 169 | 170 | final TransactionResult successfulDeposit100Result = successResultOutputTopic.readValuesToList().get(0); 171 | assertThat(successfulDeposit100Result.getFunds().getBalance(), is(new BigDecimal(100))); 172 | 173 | final List failedTransactions = failedResultOutputTopic.readValuesToList(); 174 | // balance should be 0 175 | final TransactionResult transactionResult = failedTransactions.get(0); 176 | assertThat(transactionResult.getErrorType(), is(TransactionResult.ErrorType.INSUFFICIENT_FUNDS)); 177 | } 178 | } 179 | 180 | @Test 181 | public void balanceShouldBe300() { 182 | try (TopologyTestDriver testDriver = new TopologyTestDriver(topology, properties)) { 183 | final TestInputTopic inputTopic = testDriver 184 | .createInputTopic(txRequestConfig.getName(), stringSerde.serializer(), transactionSerde.serializer()); 185 | 186 | inputTopic.pipeInput(deposit100.getAccount(), deposit100); 187 | inputTopic.pipeInput(deposit100.getAccount(), deposit100); 188 | inputTopic.pipeInput(deposit100.getAccount(), deposit100); 189 | 190 | final KeyValueStore store = testDriver.getKeyValueStore(fundsStoreConfig.getName()); 191 | 192 | assertThat(store.get("1").getBalance(), is(new BigDecimal(300))); 193 | } 194 | } 195 | } -------------------------------------------------------------------------------- /kstreams-demo/src/test/java/io/confluent/developer/ccloud/demo/kstream/TransactionTransformerTest.java: -------------------------------------------------------------------------------- 1 | package io.confluent.developer.ccloud.demo.kstream; 2 | 3 | import com.fasterxml.jackson.databind.ObjectMapper; 4 | 5 | import org.apache.kafka.common.serialization.Serdes; 6 | import org.apache.kafka.streams.processor.MockProcessorContext; 7 | import org.apache.kafka.streams.state.KeyValueStore; 8 | import org.apache.kafka.streams.state.Stores; 9 | import org.junit.Before; 10 | import org.junit.Test; 11 | import org.springframework.kafka.support.serializer.JsonSerde; 12 | 13 | import java.math.BigDecimal; 14 | import java.util.Properties; 15 | import java.util.UUID; 16 | 17 | import io.confluent.developer.ccloud.demo.kstream.domain.Funds; 18 | import io.confluent.developer.ccloud.demo.kstream.domain.Transaction; 19 | import io.confluent.developer.ccloud.demo.kstream.domain.TransactionResult; 20 | 21 | import static io.confluent.developer.ccloud.demo.kstream.KStreamConfigTest.testConfig; 22 | import static io.confluent.developer.ccloud.demo.kstream.domain.TransactionResult.ErrorType.INSUFFICIENT_FUNDS; 23 | import static org.hamcrest.MatcherAssert.assertThat; 24 | import static org.hamcrest.Matchers.is; 25 | import static org.hamcrest.Matchers.nullValue; 26 | 27 | public class TransactionTransformerTest { 28 | 29 | private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper().findAndRegisterModules(); 30 | private KeyValueStore fundsStore; 31 | private MockProcessorContext mockContext; 32 | private TransactionTransformer transactionTransformer; 33 | 34 | 35 | @Before 36 | public void setup() { 37 | final Properties properties = new Properties(); 38 | properties.putAll(testConfig); 39 | mockContext = new MockProcessorContext(properties); 40 | 41 | fundsStore = Stores.keyValueStoreBuilder( 42 | Stores.inMemoryKeyValueStore("fundsStore"), 43 | Serdes.String(), 44 | new JsonSerde<>(Funds.class, OBJECT_MAPPER)) 45 | .withLoggingDisabled() // Changelog is not supported by MockProcessorContext. 46 | .build(); 47 | 48 | fundsStore.init(mockContext, fundsStore); 49 | mockContext.register(fundsStore, null); 50 | 51 | transactionTransformer = new TransactionTransformer(fundsStore.name()); 52 | transactionTransformer.init(mockContext); 53 | } 54 | 55 | @Test 56 | public void shouldStoreTransaction() { 57 | final Transaction 58 | transaction = 59 | new Transaction(UUID.randomUUID().toString(), "1", new BigDecimal(100), Transaction.Type.DEPOSIT, "USD", "USA"); 60 | final TransactionResult transactionResult = transactionTransformer.transform(transaction); 61 | 62 | assertThat(transactionResult.isSuccess(), is(true)); 63 | } 64 | 65 | @Test 66 | public void shouldHaveInsufficientFunds() { 67 | final Transaction 68 | transaction = 69 | new Transaction(UUID.randomUUID().toString(), "1", new BigDecimal("100"), Transaction.Type.WITHDRAW, "RUR", 70 | "Russia"); 71 | final TransactionResult result = transactionTransformer.transform(transaction); 72 | 73 | assertThat(result.isSuccess(), is(false)); 74 | assertThat(result.getErrorType(), is(INSUFFICIENT_FUNDS)); 75 | } 76 | 77 | @Test 78 | public void shouldHaveEnoughFunds() { 79 | final Transaction transaction1 = 80 | new Transaction(UUID.randomUUID().toString(), "1", new BigDecimal("300"), Transaction.Type.DEPOSIT, "RUR", 81 | "Russia"); 82 | 83 | final Transaction transaction2 = 84 | new Transaction(UUID.randomUUID().toString(), "1", new BigDecimal("200"), Transaction.Type.WITHDRAW, "RUR", 85 | "Russia"); 86 | transactionTransformer.transform(transaction1); 87 | final TransactionResult result = transactionTransformer.transform(transaction2); 88 | 89 | assertThat(result.isSuccess(), is(true)); 90 | assertThat(result.getErrorType(), is(nullValue())); 91 | } 92 | } -------------------------------------------------------------------------------- /kstreams-demo/src/test/resources/application-default.properties: -------------------------------------------------------------------------------- 1 | kafka.streams.enabled=false -------------------------------------------------------------------------------- /kstreams-demo/src/test/resources/logback-test.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | %yellow(%d{yyyy-MM-dd HH:mm:ss}) %cyan(%logger{50}) %highlight([%p]) %green((%file:%line\)) - %msg%n 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | %yellow(%d{yyyy-MM-dd HH:mm:ss}) %highlight([%p]) %magenta((%file:%line\)) - %msg%n 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | -------------------------------------------------------------------------------- /scripts/ccloud/ccloud-generate-cp-config.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # Copyright 2020 Confluent Inc. 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | 18 | ############################################################################### 19 | # Overview: 20 | # 21 | # This code reads a local Confluent Cloud configuration file 22 | # and writes delta configuration files into ./delta_configs for 23 | # Confluent Platform components and clients connecting to Confluent Cloud. 24 | # 25 | # Confluent Platform Components: 26 | # - Confluent Schema Registry 27 | # - KSQL Data Generator 28 | # - ksqlDB server 29 | # - Confluent Replicator (executable) 30 | # - Confluent Control Center 31 | # - Confluent Metrics Reporter 32 | # - Confluent REST Proxy 33 | # - Kafka Connect 34 | # - Kafka connector 35 | # - Kafka command line tools 36 | # 37 | # Kafka Clients: 38 | # - Java (Producer/Consumer) 39 | # - Java (Streams) 40 | # - Python 41 | # - .NET 42 | # - Go 43 | # - Node.js (https://github.com/Blizzard/node-rdkafka) 44 | # - C++ 45 | # 46 | # Documentation for using this script: 47 | # 48 | # https://docs.confluent.io/current/cloud/connect/auto-generate-configs.html 49 | # 50 | # Arguments: 51 | # 52 | # 1 (optional) - CONFIG_FILE, defaults to ~/.ccloud/config, (required if specifying SR_CONFIG_FILE) 53 | # 2 (optional) - SR_CONFIG_FILE, defaults to CONFIG_FILE 54 | # 55 | # Example CONFIG_FILE at ~/.ccloud/config 56 | # 57 | # $ cat $HOME/.ccloud/config 58 | # 59 | # bootstrap.servers= 60 | # ssl.endpoint.identification.algorithm=https 61 | # security.protocol=SASL_SSL 62 | # sasl.mechanism=PLAIN 63 | # sasl.jaas.config=org.apache.kafka.common.security.plain.PlainLoginModule required username\="" password\=""; 64 | # 65 | # If you are using Confluent Cloud Schema Registry, add the following configuration parameters 66 | # either to file above (arg 1 CONFIG_FILE) or to a separate file (arg 2 SR_CONFIG_FILE) 67 | # 68 | # basic.auth.credentials.source=USER_INFO 69 | # schema.registry.basic.auth.user.info=: 70 | # schema.registry.url=https:// 71 | # 72 | # If you are using Confluent Cloud ksqlDB, add the following configuration parameters 73 | # to file above (arg 1 CONFIG_FILE) 74 | # 75 | # ksql.endpoint= 76 | # ksql.basic.auth.user.info=: 77 | # 78 | ################################################################################ 79 | CONFIG_FILE=$1 80 | if [[ -z "$CONFIG_FILE" ]]; then 81 | CONFIG_FILE=~/.ccloud/config 82 | fi 83 | if [[ ! -f "$CONFIG_FILE" ]]; then 84 | echo "File $CONFIG_FILE is not found. Please create this properties file to connect to your Confluent Cloud cluster and then try again" 85 | echo "See https://docs.confluent.io/current/cloud/connect/auto-generate-configs.html for more information" 86 | exit 1 87 | fi 88 | 89 | SR_CONFIG_FILE=$2 90 | if [[ -z "$SR_CONFIG_FILE" ]]; then 91 | SR_CONFIG_FILE=$CONFIG_FILE 92 | fi 93 | if [[ ! -f "$SR_CONFIG_FILE" ]]; then 94 | echo "File $SR_CONFIG_FILE is not found. Please create this properties file to connect to your Schema Registry and then try again" 95 | echo "See https://docs.confluent.io/current/cloud/connect/auto-generate-configs.html for more information" 96 | exit 1 97 | fi 98 | 99 | echo -e "\nGenerating component configurations from $CONFIG_FILE and Schema Registry configurations from $SR_CONFIG_FILE" 100 | echo -e "\n(If you want to run any of these components to talk to Confluent Cloud, these are the configurations to add to the properties file for each component)" 101 | 102 | # Set permissions 103 | PERM=600 104 | if ls --version 2>/dev/null | grep -q 'coreutils' ; then 105 | # GNU binutils 106 | PERM=$(stat -c "%a" $CONFIG_FILE) 107 | else 108 | # BSD 109 | PERM=$(stat -f "%OLp" $CONFIG_FILE) 110 | fi 111 | 112 | # Make destination 113 | DEST="delta_configs" 114 | mkdir -p $DEST 115 | 116 | ################################################################################ 117 | # Glean parameters from the Confluent Cloud configuration file 118 | ################################################################################ 119 | 120 | # Kafka cluster 121 | BOOTSTRAP_SERVERS=$( grep "^bootstrap.server" $CONFIG_FILE | awk -F'=' '{print $2;}' ) 122 | BOOTSTRAP_SERVERS=${BOOTSTRAP_SERVERS/\\/} 123 | SASL_JAAS_CONFIG=$( grep "^sasl.jaas.config" $CONFIG_FILE | cut -d'=' -f2- ) 124 | SASL_JAAS_CONFIG_PROPERTY_FORMAT=${SASL_JAAS_CONFIG/username\\=/username=} 125 | SASL_JAAS_CONFIG_PROPERTY_FORMAT=${SASL_JAAS_CONFIG_PROPERTY_FORMAT/password\\=/password=} 126 | CLOUD_KEY=$( echo $SASL_JAAS_CONFIG | awk '{print $3}' | awk -F'"' '$0=$2' ) 127 | CLOUD_SECRET=$( echo $SASL_JAAS_CONFIG | awk '{print $4}' | awk -F'"' '$0=$2' ) 128 | 129 | # Schema Registry 130 | BASIC_AUTH_CREDENTIALS_SOURCE=$( grep "^basic.auth.credentials.source" $SR_CONFIG_FILE | awk -F'=' '{print $2;}' ) 131 | SCHEMA_REGISTRY_BASIC_AUTH_USER_INFO=$( grep "^schema.registry.basic.auth.user.info" $SR_CONFIG_FILE | awk -F'=' '{print $2;}' ) 132 | SCHEMA_REGISTRY_URL=$( grep "^schema.registry.url" $SR_CONFIG_FILE | awk -F'=' '{print $2;}' ) 133 | 134 | # ksqlDB 135 | KSQLDB_ENDPOINT=$( grep "^ksql.endpoint" $CONFIG_FILE | awk -F'=' '{print $2;}' ) 136 | KSQLDB_BASIC_AUTH_USER_INFO=$( grep "^ksql.basic.auth.user.info" $CONFIG_FILE | awk -F'=' '{print $2;}' ) 137 | 138 | ################################################################################ 139 | # Build configuration file with CCloud connection parameters and 140 | # Confluent Monitoring Interceptors for Streams Monitoring in Confluent Control Center 141 | ################################################################################ 142 | INTERCEPTORS_CONFIG_FILE=$DEST/interceptors-ccloud.config 143 | rm -f $INTERCEPTORS_CONFIG_FILE 144 | echo "# Configuration derived from $CONFIG_FILE" > $INTERCEPTORS_CONFIG_FILE 145 | while read -r line 146 | do 147 | # Skip lines that are commented out 148 | if [[ ! -z $line && ${line:0:1} == '#' ]]; then 149 | continue 150 | fi 151 | # Skip lines that contain just whitespace 152 | if [[ -z "${line// }" ]]; then 153 | continue 154 | fi 155 | if [[ ${line:0:9} == 'bootstrap' ]]; then 156 | line=${line/\\/} 157 | fi 158 | echo $line >> $INTERCEPTORS_CONFIG_FILE 159 | done < "$CONFIG_FILE" 160 | echo -e "\n# Confluent Monitoring Interceptor specific configuration" >> $INTERCEPTORS_CONFIG_FILE 161 | while read -r line 162 | do 163 | # Skip lines that are commented out 164 | if [[ ! -z $line && ${line:0:1} == '#' ]]; then 165 | continue 166 | fi 167 | # Skip lines that contain just whitespace 168 | if [[ -z "${line// }" ]]; then 169 | continue 170 | fi 171 | if [[ ${line:0:9} == 'bootstrap' ]]; then 172 | line=${line/\\/} 173 | fi 174 | if [[ ${line:0:4} == 'sasl' || 175 | ${line:0:3} == 'ssl' || 176 | ${line:0:8} == 'security' || 177 | ${line:0:9} == 'bootstrap' ]]; then 178 | echo "confluent.monitoring.interceptor.$line" >> $INTERCEPTORS_CONFIG_FILE 179 | fi 180 | done < "$CONFIG_FILE" 181 | chmod $PERM $INTERCEPTORS_CONFIG_FILE 182 | 183 | echo -e "\nConfluent Platform Components:" 184 | 185 | ################################################################################ 186 | # Confluent Schema Registry instance (local) for Confluent Cloud 187 | ################################################################################ 188 | SR_CONFIG_DELTA=$DEST/schema-registry-ccloud.delta 189 | echo "$SR_CONFIG_DELTA" 190 | rm -f $SR_CONFIG_DELTA 191 | while read -r line 192 | do 193 | if [[ ! -z $line && ${line:0:1} != '#' ]]; then 194 | if [[ ${line:0:29} != 'basic.auth.credentials.source' && ${line:0:15} != 'schema.registry' ]]; then 195 | echo "kafkastore.$line" >> $SR_CONFIG_DELTA 196 | fi 197 | fi 198 | done < "$CONFIG_FILE" 199 | chmod $PERM $SR_CONFIG_DELTA 200 | 201 | ################################################################################ 202 | # Confluent Replicator (executable) for Confluent Cloud 203 | ################################################################################ 204 | REPLICATOR_PRODUCER_DELTA=$DEST/replicator-to-ccloud-producer.delta 205 | echo "$REPLICATOR_PRODUCER_DELTA" 206 | rm -f $REPLICATOR_PRODUCER_DELTA 207 | cp $INTERCEPTORS_CONFIG_FILE $REPLICATOR_PRODUCER_DELTA 208 | echo -e "\n# Confluent Replicator (executable) specific configuration" >> $REPLICATOR_PRODUCER_DELTA 209 | echo "interceptor.classes=io.confluent.monitoring.clients.interceptor.MonitoringProducerInterceptor" >> $REPLICATOR_PRODUCER_DELTA 210 | REPLICATOR_SASL_JAAS_CONFIG=$SASL_JAAS_CONFIG 211 | REPLICATOR_SASL_JAAS_CONFIG=${REPLICATOR_SASL_JAAS_CONFIG//\\=/=} 212 | REPLICATOR_SASL_JAAS_CONFIG=${REPLICATOR_SASL_JAAS_CONFIG//\"/\\\"} 213 | chmod $PERM $REPLICATOR_PRODUCER_DELTA 214 | 215 | ################################################################################ 216 | # ksqlDB Server runs locally and connects to Confluent Cloud 217 | ################################################################################ 218 | KSQLDB_SERVER_DELTA=$DEST/ksqldb-server-ccloud.delta 219 | echo "$KSQLDB_SERVER_DELTA" 220 | cp $INTERCEPTORS_CONFIG_FILE $KSQLDB_SERVER_DELTA 221 | echo -e "\n# ksqlDB Server specific configuration" >> $KSQLDB_SERVER_DELTA 222 | echo "producer.interceptor.classes=io.confluent.monitoring.clients.interceptor.MonitoringProducerInterceptor" >> $KSQLDB_SERVER_DELTA 223 | echo "consumer.interceptor.classes=io.confluent.monitoring.clients.interceptor.MonitoringConsumerInterceptor" >> $KSQLDB_SERVER_DELTA 224 | echo "ksql.streams.producer.retries=2147483647" >> $KSQLDB_SERVER_DELTA 225 | echo "ksql.streams.producer.confluent.batch.expiry.ms=9223372036854775807" >> $KSQLDB_SERVER_DELTA 226 | echo "ksql.streams.producer.request.timeout.ms=300000" >> $KSQLDB_SERVER_DELTA 227 | echo "ksql.streams.producer.max.block.ms=9223372036854775807" >> $KSQLDB_SERVER_DELTA 228 | echo "ksql.streams.replication.factor=3" >> $KSQLDB_SERVER_DELTA 229 | echo "ksql.internal.topic.replicas=3" >> $KSQLDB_SERVER_DELTA 230 | echo "ksql.sink.replicas=3" >> $KSQLDB_SERVER_DELTA 231 | echo -e "\n# Confluent Schema Registry configuration for ksqlDB Server" >> $KSQLDB_SERVER_DELTA 232 | while read -r line 233 | do 234 | if [[ ${line:0:29} == 'basic.auth.credentials.source' ]]; then 235 | echo "ksql.schema.registry.$line" >> $KSQLDB_SERVER_DELTA 236 | elif [[ ${line:0:15} == 'schema.registry' ]]; then 237 | echo "ksql.$line" >> $KSQLDB_SERVER_DELTA 238 | fi 239 | done < $SR_CONFIG_FILE 240 | chmod $PERM $KSQLDB_SERVER_DELTA 241 | 242 | ################################################################################ 243 | # KSQL DataGen for Confluent Cloud 244 | ################################################################################ 245 | KSQL_DATAGEN_DELTA=$DEST/ksql-datagen.delta 246 | echo "$KSQL_DATAGEN_DELTA" 247 | rm -f $KSQL_DATAGEN_DELTA 248 | cp $INTERCEPTORS_CONFIG_FILE $KSQL_DATAGEN_DELTA 249 | echo -e "\n# KSQL DataGen specific configuration" >> $KSQL_DATAGEN_DELTA 250 | echo "interceptor.classes=io.confluent.monitoring.clients.interceptor.MonitoringProducerInterceptor" >> $KSQL_DATAGEN_DELTA 251 | echo -e "\n# Confluent Schema Registry configuration for KSQL DataGen" >> $KSQL_DATAGEN_DELTA 252 | while read -r line 253 | do 254 | if [[ ${line:0:29} == 'basic.auth.credentials.source' ]]; then 255 | echo "ksql.schema.registry.$line" >> $KSQL_DATAGEN_DELTA 256 | elif [[ ${line:0:15} == 'schema.registry' ]]; then 257 | echo "ksql.$line" >> $KSQL_DATAGEN_DELTA 258 | fi 259 | done < $SR_CONFIG_FILE 260 | chmod $PERM $KSQL_DATAGEN_DELTA 261 | 262 | ################################################################################ 263 | # Confluent Control Center runs locally, monitors Confluent Cloud, and uses Confluent Cloud cluster as the backstore 264 | ################################################################################ 265 | C3_DELTA=$DEST/control-center-ccloud.delta 266 | echo "$C3_DELTA" 267 | rm -f $C3_DELTA 268 | echo -e "\n# Confluent Control Center specific configuration" >> $C3_DELTA 269 | while read -r line 270 | do 271 | if [[ ! -z $line && ${line:0:1} != '#' ]]; then 272 | if [[ ${line:0:9} == 'bootstrap' ]]; then 273 | line=${line/\\/} 274 | echo "$line" >> $C3_DELTA 275 | fi 276 | if [[ ${line:0:4} == 'sasl' || ${line:0:3} == 'ssl' || ${line:0:8} == 'security' ]]; then 277 | echo "confluent.controlcenter.streams.$line" >> $C3_DELTA 278 | fi 279 | fi 280 | done < "$CONFIG_FILE" 281 | # max.message.bytes is enforced to 8MB in Confluent Cloud 282 | echo "confluent.metrics.topic.max.message.bytes=8388608" >> $C3_DELTA 283 | echo -e "\n# Confluent Schema Registry configuration for Confluent Control Center" >> $C3_DELTA 284 | while read -r line 285 | do 286 | if [[ ${line:0:29} == 'basic.auth.credentials.source' ]]; then 287 | echo "confluent.controlcenter.schema.registry.$line" >> $C3_DELTA 288 | elif [[ ${line:0:15} == 'schema.registry' ]]; then 289 | echo "confluent.controlcenter.$line" >> $C3_DELTA 290 | fi 291 | done < $SR_CONFIG_FILE 292 | chmod $PERM $C3_DELTA 293 | 294 | ################################################################################ 295 | # Confluent Metrics Reporter to Confluent Cloud 296 | ################################################################################ 297 | METRICS_REPORTER_DELTA=$DEST/metrics-reporter.delta 298 | echo "$METRICS_REPORTER_DELTA" 299 | rm -f $METRICS_REPORTER_DELTA 300 | echo "metric.reporters=io.confluent.metrics.reporter.ConfluentMetricsReporter" >> $METRICS_REPORTER_DELTA 301 | echo "confluent.metrics.reporter.topic.replicas=3" >> $METRICS_REPORTER_DELTA 302 | while read -r line 303 | do 304 | if [[ ! -z $line && ${line:0:1} != '#' ]]; then 305 | if [[ ${line:0:9} == 'bootstrap' || ${line:0:4} == 'sasl' || ${line:0:3} == 'ssl' || ${line:0:8} == 'security' ]]; then 306 | echo "confluent.metrics.reporter.$line" >> $METRICS_REPORTER_DELTA 307 | fi 308 | fi 309 | done < "$CONFIG_FILE" 310 | chmod $PERM $METRICS_REPORTER_DELTA 311 | 312 | ################################################################################ 313 | # Confluent REST Proxy to Confluent Cloud 314 | ################################################################################ 315 | REST_PROXY_DELTA=$DEST/rest-proxy.delta 316 | echo "$REST_PROXY_DELTA" 317 | rm -f $REST_PROXY_DELTA 318 | while read -r line 319 | do 320 | if [[ ! -z $line && ${line:0:1} != '#' ]]; then 321 | if [[ ${line:0:9} == 'bootstrap' || ${line:0:4} == 'sasl' || ${line:0:3} == 'ssl' || ${line:0:8} == 'security' ]]; then 322 | echo "$line" >> $REST_PROXY_DELTA 323 | echo "client.$line" >> $REST_PROXY_DELTA 324 | fi 325 | fi 326 | done < "$CONFIG_FILE" 327 | echo -e "\n# Confluent Schema Registry configuration for REST Proxy" >> $REST_PROXY_DELTA 328 | while read -r line 329 | do 330 | if [[ ${line:0:29} == 'basic.auth.credentials.source' || ${line:0:36} == 'schema.registry.basic.auth.user.info' ]]; then 331 | echo "client.$line" >> $REST_PROXY_DELTA 332 | elif [[ ${line:0:19} == 'schema.registry.url' ]]; then 333 | echo "$line" >> $REST_PROXY_DELTA 334 | fi 335 | done < $SR_CONFIG_FILE 336 | chmod $PERM $REST_PROXY_DELTA 337 | 338 | ################################################################################ 339 | # Kafka Connect runs locally and connects to Confluent Cloud 340 | ################################################################################ 341 | CONNECT_DELTA=$DEST/connect-ccloud.delta 342 | echo "$CONNECT_DELTA" 343 | rm -f $CONNECT_DELTA 344 | cat < $CONNECT_DELTA 345 | # Configuration for embedded admin client 346 | replication.factor=3 347 | config.storage.replication.factor=3 348 | offset.storage.replication.factor=3 349 | status.storage.replication.factor=3 350 | 351 | EOF 352 | while read -r line 353 | do 354 | if [[ ! -z $line && ${line:0:1} != '#' ]]; then 355 | if [[ ${line:0:9} == 'bootstrap' ]]; then 356 | line=${line/\\/} 357 | echo "$line" >> $CONNECT_DELTA 358 | fi 359 | if [[ ${line:0:4} == 'sasl' || ${line:0:3} == 'ssl' || ${line:0:8} == 'security' ]]; then 360 | echo "$line" >> $CONNECT_DELTA 361 | fi 362 | fi 363 | done < "$CONFIG_FILE" 364 | 365 | for prefix in "producer" "consumer" "producer.confluent.monitoring.interceptor" "consumer.confluent.monitoring.interceptor" ; do 366 | 367 | echo -e "\n# Configuration for embedded $prefix" >> $CONNECT_DELTA 368 | while read -r line 369 | do 370 | if [[ ! -z $line && ${line:0:1} != '#' ]]; then 371 | if [[ ${line:0:9} == 'bootstrap' ]]; then 372 | line=${line/\\/} 373 | fi 374 | if [[ ${line:0:4} == 'sasl' || ${line:0:3} == 'ssl' || ${line:0:8} == 'security' ]]; then 375 | echo "${prefix}.$line" >> $CONNECT_DELTA 376 | fi 377 | fi 378 | done < "$CONFIG_FILE" 379 | 380 | done 381 | 382 | 383 | cat <> $CONNECT_DELTA 384 | 385 | # Confluent Schema Registry for Kafka Connect 386 | value.converter=io.confluent.connect.avro.AvroConverter 387 | value.converter.basic.auth.credentials.source=$BASIC_AUTH_CREDENTIALS_SOURCE 388 | value.converter.schema.registry.basic.auth.user.info=$SCHEMA_REGISTRY_BASIC_AUTH_USER_INFO 389 | value.converter.schema.registry.url=$SCHEMA_REGISTRY_URL 390 | EOF 391 | chmod $PERM $CONNECT_DELTA 392 | 393 | ################################################################################ 394 | # Kafka connector 395 | ################################################################################ 396 | CONNECTOR_DELTA=$DEST/connector-ccloud.delta 397 | echo "$CONNECTOR_DELTA" 398 | rm -f $CONNECTOR_DELTA 399 | cat <> $CONNECTOR_DELTA 400 | // Confluent Schema Registry for Kafka connectors 401 | value.converter=io.confluent.connect.avro.AvroConverter 402 | value.converter.basic.auth.credentials.source=$BASIC_AUTH_CREDENTIALS_SOURCE 403 | value.converter.schema.registry.basic.auth.user.info=$SCHEMA_REGISTRY_BASIC_AUTH_USER_INFO 404 | value.converter.schema.registry.url=$SCHEMA_REGISTRY_URL 405 | EOF 406 | chmod $PERM $CONNECTOR_DELTA 407 | 408 | ################################################################################ 409 | # AK command line tools 410 | ################################################################################ 411 | AK_TOOLS_DELTA=$DEST/ak-tools-ccloud.delta 412 | echo "$AK_TOOLS_DELTA" 413 | rm -f $AK_TOOLS_DELTA 414 | cp $CONFIG_FILE $AK_TOOLS_DELTA 415 | chmod $PERM $AK_TOOLS_DELTA 416 | 417 | ################################################################################ 418 | # ENV 419 | ################################################################################ 420 | ENV_CONFIG=$DEST/env.delta 421 | echo "$ENV_CONFIG" 422 | rm -f $ENV_CONFIG 423 | 424 | cat <> $ENV_CONFIG 425 | export BOOTSTRAP_SERVERS=$BOOTSTRAP_SERVERS 426 | export SASL_JAAS_CONFIG='$SASL_JAAS_CONFIG_PROPERTY_FORMAT' 427 | export SASL_JAAS_CONFIG_PROPERTY_FORMAT='$SASL_JAAS_CONFIG_PROPERTY_FORMAT' 428 | export REPLICATOR_SASL_JAAS_CONFIG='$REPLICATOR_SASL_JAAS_CONFIG' 429 | export BASIC_AUTH_CREDENTIALS_SOURCE=$BASIC_AUTH_CREDENTIALS_SOURCE 430 | export SCHEMA_REGISTRY_BASIC_AUTH_USER_INFO=$SCHEMA_REGISTRY_BASIC_AUTH_USER_INFO 431 | export SCHEMA_REGISTRY_URL=$SCHEMA_REGISTRY_URL 432 | export CLOUD_KEY=$CLOUD_KEY 433 | export CLOUD_SECRET=$CLOUD_SECRET 434 | export KSQLDB_ENDPOINT=$KSQLDB_ENDPOINT 435 | export KSQLDB_BASIC_AUTH_USER_INFO=$KSQLDB_BASIC_AUTH_USER_INFO 436 | EOF 437 | chmod $PERM $ENV_CONFIG -------------------------------------------------------------------------------- /scripts/ccloud/ccloud_library.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ################################################################ 4 | # ccloud_library.sh 5 | # -------------------------------------------------------------- 6 | # This library of functions automates common tasks with Confluent Cloud https://confluent.cloud/ 7 | # 8 | # Example usage in https://github.com/confluentinc/examples 9 | # 10 | # Get the library: 11 | # 12 | # wget -O ccloud_library.sh https://raw.githubusercontent.com/confluentinc/examples/latest/utils/ccloud_library.sh 13 | # 14 | # Use the library from your script: 15 | # 16 | # source ./ccloud_library.sh 17 | # 18 | # Support: 19 | # 20 | # 1. Community support via https://github.com/confluentinc/examples/issues 21 | # 2. There are no guarantees for backwards compatibility 22 | # 3. PRs welcome ;) 23 | ################################################################ 24 | 25 | RED='\033[0;31m' 26 | NC='\033[0m' # No Color 27 | GREEN='\033[0;32m' 28 | BLUE='\033[0;34m' 29 | YELLOW='\033[1;33m' 30 | 31 | # -------------------------------------------------------------- 32 | # Initialize 33 | # -------------------------------------------------------------- 34 | DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )" 35 | 36 | # -------------------------------------------------------------- 37 | # Library 38 | # -------------------------------------------------------------- 39 | 40 | function ccloud::prompt_continue_ccloud_demo() { 41 | echo "This demo uses real Confluent Cloud resources." 42 | echo "To avoid unexpected charges, carefully evaluate the cost of resources before launching the script and ensure all resources are destroyed after you are done running it." 43 | read -p "Do you still want to run this script? [y/n] " -n 1 -r 44 | echo 45 | if [[ ! $REPLY =~ ^[Yy]$ ]] 46 | then 47 | exit 1 48 | fi 49 | 50 | return 0 51 | } 52 | function ccloud::validate_expect_installed() { 53 | if [[ $(type expect 2>&1) =~ "not found" ]]; then 54 | echo "'expect' is not found. Install 'expect' and try again" 55 | exit 1 56 | fi 57 | 58 | return 0 59 | } 60 | function ccloud::validate_ccloud_cli_installed() { 61 | if [[ $(type ccloud 2>&1) =~ "not found" ]]; then 62 | echo "'ccloud' is not found. Install Confluent Cloud CLI (https://docs.confluent.io/current/quickstart/cloud-quickstart/index.html#step-2-install-the-ccloud-cli) and try again" 63 | exit 1 64 | fi 65 | } 66 | 67 | function ccloud::validate_ccloud_cli_v2() { 68 | ccloud::validate_ccloud_cli_installed || exit 1 69 | 70 | if [[ -z $(ccloud version 2>&1 | grep "Go") ]]; then 71 | echo "This demo requires the new Confluent Cloud CLI. Please update your version and try again." 72 | exit 1 73 | fi 74 | 75 | return 0 76 | } 77 | 78 | function ccloud::validate_logged_in_ccloud_cli() { 79 | ccloud::validate_ccloud_cli_v2 || exit 1 80 | 81 | if [[ "$(ccloud kafka cluster list 2>&1)" == "Error: You must log in to run that command." ]]; then 82 | echo "ERROR: Log into Confluent Cloud with the command 'ccloud login --save' before running the demo. The '--save' argument saves your Confluent Cloud user login credentials or refresh token (in the case of SSO) to the local netrc file." 83 | exit 1 84 | fi 85 | 86 | return 0 87 | } 88 | 89 | function ccloud::get_version_ccloud_cli() { 90 | ccloud version | grep "^Version:" | cut -d':' -f2 | cut -d'v' -f2 91 | } 92 | 93 | function ccloud::validate_version_ccloud_cli() { 94 | 95 | ccloud::validate_ccloud_cli_installed || exit 1 96 | 97 | REQUIRED_CCLOUD_VER=${1:-"1.7.0"} 98 | CCLOUD_VER=$(ccloud::get_version_ccloud_cli) 99 | 100 | if ccloud::version_gt $REQUIRED_CCLOUD_VER $CCLOUD_VER; then 101 | echo "ccloud version ${REQUIRED_CCLOUD_VER} or greater is required. Current reported version: ${CCLOUD_VER}" 102 | echo 'To update run: ccloud update' 103 | exit 1 104 | fi 105 | } 106 | 107 | function ccloud::validate_psql_installed() { 108 | if [[ $(type psql 2>&1) =~ "not found" ]]; then 109 | echo "psql is not found. Install psql and try again" 110 | exit 1 111 | fi 112 | 113 | return 0 114 | } 115 | 116 | function ccloud::validate_aws_cli_installed() { 117 | if [[ $(type aws 2>&1) =~ "not found" ]]; then 118 | echo "AWS CLI is not found. Install AWS CLI and try again" 119 | exit 1 120 | fi 121 | 122 | return 0 123 | } 124 | 125 | function ccloud::get_version_aws_cli() { 126 | version_major=$(aws --version 2>&1 | awk -F/ '{print $2;}' | head -c 1) 127 | if [[ "$version_major" -eq 2 ]]; then 128 | echo "2" 129 | else 130 | echo "1" 131 | fi 132 | return 0 133 | } 134 | 135 | function ccloud::validate_gsutil_installed() { 136 | if [[ $(type gsutil 2>&1) =~ "not found" ]]; then 137 | echo "Google Cloud gsutil is not found. Install Google Cloud gsutil and try again" 138 | exit 1 139 | fi 140 | 141 | return 0 142 | } 143 | 144 | function ccloud::validate_az_installed() { 145 | if [[ $(type az 2>&1) =~ "not found" ]]; then 146 | echo "Azure CLI is not found. Install Azure CLI and try again" 147 | exit 1 148 | fi 149 | 150 | return 0 151 | } 152 | 153 | function ccloud::validate_cloud_source() { 154 | config=$1 155 | 156 | source $config 157 | 158 | if [[ "$DATA_SOURCE" == "kinesis" ]]; then 159 | ccloud::validate_aws_cli_installed || exit 1 160 | if [[ -z "$KINESIS_REGION" || -z "$AWS_PROFILE" ]]; then 161 | echo "ERROR: DATA_SOURCE=kinesis, but KINESIS_REGION or AWS_PROFILE is not set. Please set these parameters in config/demo.cfg and try again." 162 | exit 1 163 | fi 164 | aws kinesis list-streams --profile $AWS_PROFILE --region $KINESIS_REGION > /dev/null \ 165 | || { echo "Could not run 'aws kinesis list-streams'. Check credentials and run again." ; exit 1; } 166 | elif [[ "$DATA_SOURCE" == "rds" ]]; then 167 | ccloud::validate_aws_cli_installed || exit 1 168 | if [[ -z "$RDS_REGION" || -z "$AWS_PROFILE" ]]; then 169 | echo "ERROR: DATA_SOURCE=rds, but RDS_REGION or AWS_PROFILE is not set. Please set these parameters in config/demo.cfg and try again." 170 | exit 1 171 | fi 172 | aws rds describe-db-instances --profile $AWS_PROFILE --region $RDS_REGION > /dev/null \ 173 | || { echo "Could not run 'aws rds describe-db-instances'. Check credentials and run again." ; exit 1; } 174 | else 175 | echo "Cloud source $cloudsource is not valid. Must be one of [kinesis|rds]." 176 | exit 1 177 | fi 178 | 179 | return 0 180 | } 181 | 182 | function ccloud::validate_cloud_storage() { 183 | config=$1 184 | 185 | source $config 186 | storage=$DESTINATION_STORAGE 187 | 188 | if [[ "$storage" == "s3" ]]; then 189 | ccloud::validate_aws_cli_installed || exit 1 190 | ccloud::validate_credentials_s3 $S3_PROFILE $S3_BUCKET || exit 1 191 | aws s3api list-buckets --profile $S3_PROFILE --region $STORAGE_REGION > /dev/null \ 192 | || { echo "Could not run 'aws s3api list-buckets'. Check credentials and run again." ; exit 1; } 193 | elif [[ "$storage" == "gcs" ]]; then 194 | ccloud::validate_gsutil_installed || exit 1 195 | ccloud::validate_credentials_gcp $GCS_CREDENTIALS_FILE $GCS_BUCKET || exit 1 196 | elif [[ "$storage" == "az" ]]; then 197 | ccloud::validate_az_installed || exit 1 198 | ccloud::validate_credentials_az $AZBLOB_STORAGE_ACCOUNT $AZBLOB_CONTAINER || exit 1 199 | else 200 | echo "Storage destination $storage is not valid. Must be one of [s3|gcs|az]." 201 | exit 1 202 | fi 203 | 204 | return 0 205 | } 206 | 207 | function ccloud::validate_credentials_gcp() { 208 | GCS_CREDENTIALS_FILE=$1 209 | GCS_BUCKET=$2 210 | 211 | if [[ -z "$GCS_CREDENTIALS_FILE" || -z "$GCS_BUCKET" ]]; then 212 | echo "ERROR: DESTINATION_STORAGE=gcs, but GCS_CREDENTIALS_FILE or GCS_BUCKET is not set. Please set these parameters in config/demo.cfg and try again." 213 | exit 1 214 | fi 215 | 216 | gcloud auth activate-service-account --key-file $GCS_CREDENTIALS_FILE || { 217 | echo "ERROR: Cannot activate service account with key file $GCS_CREDENTIALS_FILE. Verify your credentials and try again." 218 | exit 1 219 | } 220 | 221 | # Create JSON-formatted string of the GCS credentials 222 | export GCS_CREDENTIALS=$(python ./stringify-gcp-credentials.py $GCS_CREDENTIALS_FILE) 223 | # Remove leading and trailing double quotes, otherwise connector creation from CLI fails 224 | GCS_CREDENTIALS=$(echo "${GCS_CREDENTIALS:1:${#GCS_CREDENTIALS}-2}") 225 | 226 | return 0 227 | } 228 | 229 | function ccloud::validate_credentials_az() { 230 | AZBLOB_STORAGE_ACCOUNT=$1 231 | AZBLOB_CONTAINER=$2 232 | 233 | if [[ -z "$AZBLOB_STORAGE_ACCOUNT" || -z "$AZBLOB_CONTAINER" ]]; then 234 | echo "ERROR: DESTINATION_STORAGE=az, but AZBLOB_STORAGE_ACCOUNT or AZBLOB_CONTAINER is not set. Please set these parameters in config/demo.cfg and try again." 235 | exit 1 236 | fi 237 | 238 | if [[ "$AZBLOB_STORAGE_ACCOUNT" == "default" ]]; then 239 | echo "ERROR: Azure Blob storage account name cannot be 'default'. Verify the value of the storage account name (did you create one?) in config/demo.cfg, as specified by the parameter AZBLOB_STORAGE_ACCOUNT, and try again." 240 | exit 1 241 | fi 242 | 243 | exists=$(az storage account check-name --name $AZBLOB_STORAGE_ACCOUNT | jq -r .reason) 244 | if [[ "$exists" != "AlreadyExists" ]]; then 245 | echo "ERROR: Azure Blob storage account name $AZBLOB_STORAGE_ACCOUNT does not exist. Check the value of AZBLOB_STORAGE_ACCOUNT in config/demo.cfg and try again." 246 | exit 1 247 | fi 248 | export AZBLOB_ACCOUNT_KEY=$(az storage account keys list --account-name $AZBLOB_STORAGE_ACCOUNT | jq -r '.[0].value') 249 | if [[ "$AZBLOB_ACCOUNT_KEY" == "" ]]; then 250 | echo "ERROR: Cannot get the key for Azure Blob storage account name $AZBLOB_STORAGE_ACCOUNT. Check the value of AZBLOB_STORAGE_ACCOUNT in config/demo.cfg, and your key, and try again." 251 | exit 1 252 | fi 253 | 254 | return 0 255 | } 256 | 257 | function ccloud::validate_credentials_s3() { 258 | S3_PROFILE=$1 259 | S3_BUCKET=$2 260 | 261 | if [[ -z "$S3_PROFILE" || -z "$S3_BUCKET" ]]; then 262 | echo "ERROR: DESTINATION_STORAGE=s3, but S3_PROFILE or S3_BUCKET is not set. Please set these parameters in config/demo.cfg and try again." 263 | exit 1 264 | fi 265 | 266 | aws configure get aws_access_key_id --profile $S3_PROFILE 1>/dev/null || { 267 | echo "ERROR: Cannot determine aws_access_key_id from S3_PROFILE=$S3_PROFILE. Verify your credentials and try again." 268 | exit 1 269 | } 270 | aws configure get aws_secret_access_key --profile $S3_PROFILE 1>/dev/null || { 271 | echo "ERROR: Cannot determine aws_secret_access_key from S3_PROFILE=$S3_PROFILE. Verify your credentials and try again." 272 | exit 1 273 | } 274 | return 0 275 | } 276 | 277 | function ccloud::validate_schema_registry_up() { 278 | auth=$1 279 | sr_endpoint=$2 280 | 281 | curl --silent -u $auth $sr_endpoint > /dev/null || { 282 | echo "ERROR: Could not validate credentials to Confluent Cloud Schema Registry. Please troubleshoot" 283 | exit 1 284 | } 285 | 286 | echo "Validated credentials to Confluent Cloud Schema Registry at $sr_endpoint" 287 | return 0 288 | } 289 | 290 | 291 | function ccloud::create_and_use_environment() { 292 | ENVIRONMENT_NAME=$1 293 | 294 | OUTPUT=$(ccloud environment create $ENVIRONMENT_NAME -o json) 295 | if [[ $? != 0 ]]; then 296 | echo "ERROR: Failed to create environment $ENVIRONMENT_NAME. Please troubleshoot (maybe run ./clean.sh) and run again" 297 | exit 1 298 | fi 299 | ENVIRONMENT=$(echo "$OUTPUT" | jq -r ".id") 300 | ccloud environment use $ENVIRONMENT &>/dev/null 301 | 302 | echo $ENVIRONMENT 303 | 304 | return 0 305 | } 306 | 307 | function ccloud::find_cluster() { 308 | CLUSTER_NAME=$1 309 | CLUSTER_CLOUD=$2 310 | CLUSTER_REGION=$3 311 | 312 | local FOUND_CLUSTER=$(ccloud kafka cluster list -o json | jq -c -r '.[] | select((.name == "'"$CLUSTER_NAME"'") and (.provider == "'"$CLUSTER_CLOUD"'") and (.region == "'"$CLUSTER_REGION"'"))') 313 | [[ ! -z "$FOUND_CLUSTER" ]] && { 314 | echo "$FOUND_CLUSTER" | jq -r .id 315 | return 0 316 | } || { 317 | return 1 318 | } 319 | } 320 | 321 | function ccloud::create_and_use_cluster() { 322 | CLUSTER_NAME=$1 323 | CLUSTER_CLOUD=$2 324 | CLUSTER_REGION=$3 325 | 326 | OUTPUT=$(ccloud kafka cluster create "$CLUSTER_NAME" --cloud $CLUSTER_CLOUD --region $CLUSTER_REGION 2>&1) 327 | if [ $? -eq 0 ]; then 328 | CLUSTER=$(echo "$OUTPUT" | grep '| Id' | awk '{print $4;}') 329 | ccloud kafka cluster use $CLUSTER 330 | echo $CLUSTER 331 | else 332 | echo "Error creating cluster: $OUTPUT. Troubleshoot and try again" 333 | exit 1 334 | fi 335 | 336 | return 0 337 | } 338 | 339 | function ccloud::maybe_create_and_use_cluster() { 340 | CLUSTER_NAME=$1 341 | CLUSTER_CLOUD=$2 342 | CLUSTER_REGION=$3 343 | CLUSTER_ID=$(ccloud::find_cluster $CLUSTER_NAME $CLUSTER_CLOUD $CLUSTER_REGION) 344 | if [ $? -eq 0 ] 345 | then 346 | ccloud kafka cluster use $CLUSTER_ID 347 | echo $CLUSTER_ID 348 | else 349 | ccloud::create_and_use_cluster "$CLUSTER_NAME" "$CLUSTER_CLOUD" "$CLUSTER_REGION" 350 | fi 351 | 352 | return 0 353 | } 354 | 355 | function ccloud::create_service_account() { 356 | SERVICE_NAME=$1 357 | 358 | OUTPUT=$(ccloud service-account create $SERVICE_NAME --description $SERVICE_NAME -o json) 359 | SERVICE_ACCOUNT_ID=$(echo "$OUTPUT" | jq -r ".id") 360 | 361 | echo $SERVICE_ACCOUNT_ID 362 | 363 | return 0 364 | } 365 | 366 | function ccloud::enable_schema_registry() { 367 | SCHEMA_REGISTRY_CLOUD=$1 368 | SCHEMA_REGISTRY_GEO=$2 369 | 370 | OUTPUT=$(ccloud schema-registry cluster enable --cloud $SCHEMA_REGISTRY_CLOUD --geo $SCHEMA_REGISTRY_GEO -o json) 371 | SCHEMA_REGISTRY=$(echo "$OUTPUT" | jq -r ".id") 372 | 373 | echo $SCHEMA_REGISTRY 374 | 375 | return 0 376 | } 377 | 378 | function ccloud::find_credentials_resource() { 379 | SERVICE_ACCOUNT_ID=$1 380 | RESOURCE=$2 381 | local FOUND_CRED=$(ccloud api-key list -o json | jq -c -r 'map(select((.resource_id == "'"$RESOURCE"'") and (.owner = "'"$SERVICE_ACCOUNT_ID"'")))') 382 | local FOUND_COUNT=$(echo "$FOUND_CRED" | jq 'length') 383 | [[ $FOUND_COUNT -ne 0 ]] && { 384 | echo "$FOUND_CRED" | jq -r '.[0].key' 385 | return 0 386 | } || { 387 | return 1 388 | } 389 | } 390 | function ccloud::create_credentials_resource() { 391 | SERVICE_ACCOUNT_ID=$1 392 | RESOURCE=$2 393 | 394 | OUTPUT=$(ccloud api-key create --service-account $SERVICE_ACCOUNT_ID --resource $RESOURCE -o json) 395 | API_KEY_SA=$(echo "$OUTPUT" | jq -r ".key") 396 | API_SECRET_SA=$(echo "$OUTPUT" | jq -r ".secret") 397 | 398 | echo "${API_KEY_SA}:${API_SECRET_SA}" 399 | 400 | return 0 401 | } 402 | ##################################################################### 403 | # The return from this function will be a colon ':' deliminted 404 | # list, if the api-key is created the second element of the 405 | # list will the secret. If the api-key is being reused 406 | # the second element of the list will be empty 407 | ##################################################################### 408 | function ccloud::maybe_create_credentials_resource() { 409 | SERVICE_ACCOUNT_ID=$1 410 | RESOURCE=$2 411 | 412 | local KEY=$(ccloud::find_credentials_resource $SERVICE_ACCOUNT_ID $RESOURCE) 413 | [[ -z $KEY ]] && { 414 | ccloud::create_credentials_resource $SERVICE_ACCOUNT_ID $RESOURCE 415 | } || { 416 | echo "$KEY:"; # the secret cannot be retrieved from a found key, caller needs to handle this 417 | return 0 418 | } 419 | } 420 | 421 | function ccloud::find_ksqldb_app() { 422 | KSQLDB_NAME=$1 423 | CLUSTER=$2 424 | 425 | local FOUND_APP=$(ccloud ksql app list -o json | jq -c -r 'map(select((.name == "'"$KSQLDB_NAME"'") and (.kafka == "'"$CLUSTER"'")))') 426 | local FOUND_COUNT=$(echo "$FOUND_APP" | jq 'length') 427 | [[ $FOUND_COUNT -ne 0 ]] && { 428 | echo "$FOUND_APP" | jq -r '.[].id' 429 | return 0 430 | } || { 431 | return 1 432 | } 433 | } 434 | 435 | function ccloud::create_ksqldb_app() { 436 | KSQLDB_NAME=$1 437 | CLUSTER=$2 438 | 439 | KSQLDB=$(ccloud ksql app create --cluster $CLUSTER -o json "$KSQLDB_NAME" | jq -r ".id") 440 | echo $KSQLDB 441 | 442 | return 0 443 | } 444 | function ccloud::maybe_create_ksqldb_app() { 445 | KSQLDB_NAME=$1 446 | CLUSTER=$2 447 | 448 | APP_ID=$(ccloud::find_ksqldb_app $KSQLDB_NAME $CLUSTER) 449 | if [ $? -eq 0 ] 450 | then 451 | echo $APP_ID 452 | else 453 | ccloud::create_ksqldb_app "$KSQLDB_NAME" "$CLUSTER" 454 | fi 455 | 456 | return 0 457 | } 458 | 459 | function ccloud::create_acls_all_resources_full_access() { 460 | SERVICE_ACCOUNT_ID=$1 461 | [[ $QUIET == "true" ]] && 462 | local REDIRECT_TO="/dev/null" || 463 | local REDIRECT_TO="/dev/stdout" 464 | 465 | ccloud kafka acl create --allow --service-account $SERVICE_ACCOUNT_ID --operation CREATE --topic '*' &>"$REDIRECT_TO" 466 | ccloud kafka acl create --allow --service-account $SERVICE_ACCOUNT_ID --operation WRITE --topic '*' &>"$REDIRECT_TO" 467 | ccloud kafka acl create --allow --service-account $SERVICE_ACCOUNT_ID --operation READ --topic '*' &>"$REDIRECT_TO" 468 | ccloud kafka acl create --allow --service-account $SERVICE_ACCOUNT_ID --operation DESCRIBE --topic '*' &>"$REDIRECT_TO" 469 | ccloud kafka acl create --allow --service-account $SERVICE_ACCOUNT_ID --operation DESCRIBE_CONFIGS --topic '*' &>"$REDIRECT_TO" 470 | 471 | ccloud kafka acl create --allow --service-account $SERVICE_ACCOUNT_ID --operation READ --consumer-group '*' &>"$REDIRECT_TO" 472 | ccloud kafka acl create --allow --service-account $SERVICE_ACCOUNT_ID --operation WRITE --consumer-group '*' &>"$REDIRECT_TO" 473 | ccloud kafka acl create --allow --service-account $SERVICE_ACCOUNT_ID --operation CREATE --consumer-group '*' &>"$REDIRECT_TO" 474 | 475 | ccloud kafka acl create --allow --service-account $SERVICE_ACCOUNT_ID --operation DESCRIBE --transactional-id '*' &>"$REDIRECT_TO" 476 | ccloud kafka acl create --allow --service-account $SERVICE_ACCOUNT_ID --operation WRITE --transactional-id '*' &>"$REDIRECT_TO" 477 | 478 | ccloud kafka acl create --allow --service-account $SERVICE_ACCOUNT_ID --operation IDEMPOTENT-WRITE --cluster-scope &>"$REDIRECT_TO" 479 | 480 | return 0 481 | } 482 | 483 | function ccloud::delete_acls_ccloud_stack() { 484 | SERVICE_ACCOUNT_ID=$1 485 | 486 | [[ $QUIET == "true" ]] && 487 | local REDIRECT_TO="/dev/null" || 488 | local REDIRECT_TO="/dev/stdout" 489 | 490 | ccloud kafka acl delete --allow --service-account $SERVICE_ACCOUNT_ID --operation CREATE --topic '*' &>"$REDIRECT_TO" 491 | ccloud kafka acl delete --allow --service-account $SERVICE_ACCOUNT_ID --operation WRITE --topic '*' &>"$REDIRECT_TO" 492 | ccloud kafka acl delete --allow --service-account $SERVICE_ACCOUNT_ID --operation READ --topic '*' &>"$REDIRECT_TO" 493 | ccloud kafka acl delete --allow --service-account $SERVICE_ACCOUNT_ID --operation DESCRIBE --topic '*' &>"$REDIRECT_TO" 494 | ccloud kafka acl delete --allow --service-account $SERVICE_ACCOUNT_ID --operation DESCRIBE_CONFIGS --topic '*' &>"$REDIRECT_TO" 495 | 496 | ccloud kafka acl delete --allow --service-account $SERVICE_ACCOUNT_ID --operation READ --consumer-group '*' &>"$REDIRECT_TO" 497 | ccloud kafka acl delete --allow --service-account $SERVICE_ACCOUNT_ID --operation WRITE --consumer-group '*' &>"$REDIRECT_TO" 498 | ccloud kafka acl delete --allow --service-account $SERVICE_ACCOUNT_ID --operation CREATE --consumer-group '*' &>"$REDIRECT_TO" 499 | 500 | ccloud kafka acl delete --allow --service-account $SERVICE_ACCOUNT_ID --operation DESCRIBE --transactional-id '*' &>"$REDIRECT_TO" 501 | ccloud kafka acl delete --allow --service-account $SERVICE_ACCOUNT_ID --operation WRITE --transactional-id '*' &>"$REDIRECT_TO" 502 | 503 | return 0 504 | } 505 | 506 | function ccloud::validate_ccloud_config() { 507 | expected_configfile=$1 508 | 509 | if [[ ! -f "$expected_configfile" ]]; then 510 | echo "Confluent Cloud configuration file does not exist at $expected_configfile. Please create the configuration file with properties set to your Confluent Cloud cluster and try again." 511 | exit 1 512 | else 513 | cat "$CONFIG_FILE" | jq . &> /dev/null 514 | status=$? 515 | if [[ $status == 0 ]]; then 516 | echo "ERROR: File $CONFIG_FILE is not properly formatted as key=value pairs (did you accidentally point to the Confluent Cloud CLI 'config.json' file?--this will not work). Manually create the required properties file to connect to your Confluent Cloud cluster and then try again." 517 | echo "See https://docs.confluent.io/current/cloud/connect/auto-generate-configs.html for more information" 518 | exit 1 519 | elif ! [[ $(grep "^\s*bootstrap.server" $expected_configfile) ]]; then 520 | echo "Missing 'bootstrap.server' in $expected_configfile. Please modify the configuration file with properties set to your Confluent Cloud cluster and try again." 521 | exit 1 522 | fi 523 | fi 524 | 525 | return 0 526 | } 527 | 528 | function ccloud::validate_ksqldb_up() { 529 | ksqldb_endpoint=$1 530 | ccloud_config_file=$2 531 | credentials=$3 532 | 533 | ccloud::validate_logged_in_ccloud_cli || exit 1 534 | 535 | if [[ "$ksqldb_endpoint" == "" ]]; then 536 | echo "ERROR: Provision a ksqlDB cluster via the Confluent Cloud UI and add the configuration parameter ksql.endpoint and ksql.basic.auth.user.info into your Confluent Cloud configuration file at $ccloud_config_file and try again." 537 | exit 1 538 | fi 539 | ksqlDBAppId=$(ccloud ksql app list | grep "$ksqldb_endpoint" | awk '{print $1}') 540 | if [[ "$ksqlDBAppId" == "" ]]; then 541 | echo "ERROR: Confluent Cloud ksqlDB endpoint $ksqldb_endpoint is not found. Provision a ksqlDB cluster via the Confluent Cloud UI and add the configuration parameter ksql.endpoint and ksql.basic.auth.user.info into your Confluent Cloud configuration file at $ccloud_config_file and try again." 542 | exit 1 543 | fi 544 | STATUS=$(ccloud ksql app describe $ksqlDBAppId | grep "Status" | grep UP) 545 | if [[ "$STATUS" == "" ]]; then 546 | echo "ERROR: Confluent Cloud ksqlDB endpoint $ksqldb_endpoint with id $ksqlDBAppId is not in UP state. Troubleshoot and try again." 547 | exit 1 548 | fi 549 | 550 | ccloud::validate_credentials_ksqldb "$ksqldb_endpoint" "$ccloud_config_file" "$credentials" || exit 1 551 | 552 | return 0 553 | } 554 | 555 | function ccloud::validate_azure_account() { 556 | AZBLOB_STORAGE_ACCOUNT=$1 557 | 558 | if [[ "$AZBLOB_STORAGE_ACCOUNT" == "default" ]]; then 559 | echo "ERROR: Azure Blob storage account name cannot be 'default'. Verify the value of the storage account name (did you create one?) in config/demo.cfg, as specified by the parameter AZBLOB_STORAGE_ACCOUNT, and try again." 560 | exit 1 561 | fi 562 | 563 | exists=$(az storage account check-name --name $AZBLOB_STORAGE_ACCOUNT | jq -r .reason) 564 | if [[ "$exists" != "AlreadyExists" ]]; then 565 | echo "ERROR: Azure Blob storage account name $AZBLOB_STORAGE_ACCOUNT does not exist. Check the value of STORAGE_PROFILE in config/demo.cfg and try again." 566 | exit 1 567 | fi 568 | export AZBLOB_ACCOUNT_KEY=$(az storage account keys list --account-name $AZBLOB_STORAGE_ACCOUNT | jq -r '.[0].value') 569 | if [[ "$AZBLOB_ACCOUNT_KEY" == "" ]]; then 570 | echo "ERROR: Cannot get the key for Azure Blob storage account name $AZBLOB_STORAGE_ACCOUNT. Check the value of STORAGE_PROFILE in config/demo.cfg, and your key, and try again." 571 | exit 1 572 | fi 573 | 574 | return 0 575 | } 576 | 577 | function ccloud::validate_credentials_ksqldb() { 578 | ksqldb_endpoint=$1 579 | ccloud_config_file=$2 580 | credentials=$3 581 | 582 | response=$(curl ${ksqldb_endpoint}/info \ 583 | -H "Content-Type: application/vnd.ksql.v1+json; charset=utf-8" \ 584 | --silent \ 585 | -u $credentials) 586 | if [[ "$response" =~ "Unauthorized" ]]; then 587 | echo "ERROR: Authorization failed to the ksqlDB cluster. Check your ksqlDB credentials set in the configuration parameter ksql.basic.auth.user.info in your Confluent Cloud configuration file at $ccloud_config_file and try again." 588 | exit 1 589 | fi 590 | 591 | echo "Validated credentials to Confluent Cloud ksqlDB at $ksqldb_endpoint" 592 | return 0 593 | } 594 | 595 | function ccloud::create_connector() { 596 | file=$1 597 | 598 | echo -e "\nCreating connector from $file\n" 599 | 600 | # About the Confluent Cloud CLI command 'ccloud connector create': 601 | # - Typical usage of this CLI would be 'ccloud connector create --config ' 602 | # - However, in this demo, the connector's configuration file contains parameters that need to be first substituted 603 | # so the CLI command includes eval and heredoc. 604 | # - The '-vvv' is added for verbose output 605 | ccloud connector create -vvv --config <(eval "cat </dev/null 657 | return $? 658 | } 659 | 660 | function ccloud::validate_topic_exists() { 661 | topic=$1 662 | 663 | ccloud kafka topic describe $topic &>/dev/null 664 | return $? 665 | } 666 | 667 | function ccloud::validate_subject_exists() { 668 | subject=$1 669 | sr_url=$2 670 | sr_credentials=$3 671 | 672 | curl --silent -u $sr_credentials $sr_url/subjects/$subject/versions/latest | jq -r ".subject" | grep $subject > /dev/null 673 | return $? 674 | } 675 | 676 | function ccloud::login_ccloud_cli(){ 677 | 678 | URL=$1 679 | EMAIL=$2 680 | PASSWORD=$3 681 | 682 | ccloud::validate_expect_installed 683 | 684 | echo -e "\n# Login" 685 | OUTPUT=$( 686 | expect </dev/null 795 | ccloud kafka acl create --allow --service-account $serviceAccount --operation WRITE --topic $TOPIC --prefix 796 | ccloud kafka acl create --allow --service-account $serviceAccount --operation READ --topic $TOPIC --prefix 797 | done 798 | 799 | ccloud kafka acl create --allow --service-account $serviceAccount --operation READ --consumer-group connect-cloud 800 | 801 | echo "Connectors: creating topics and ACLs for service account $serviceAccount" 802 | ccloud kafka acl create --allow --service-account $serviceAccount --operation READ --consumer-group connect-replicator 803 | ccloud kafka acl create --allow --service-account $serviceAccount --operation describe --cluster-scope 804 | 805 | return 0 806 | } 807 | 808 | function ccloud::validate_ccloud_stack_up() { 809 | CLOUD_KEY=$1 810 | CONFIG_FILE=$2 811 | enable_ksqldb=$3 812 | 813 | if [ -z "$enable_ksqldb" ]; then 814 | enable_ksqldb=true 815 | fi 816 | 817 | ccloud::validate_environment_set || exit 1 818 | ccloud::set_kafka_cluster_use "$CLOUD_KEY" "$CONFIG_FILE" || exit 1 819 | ccloud::validate_schema_registry_up "$SCHEMA_REGISTRY_BASIC_AUTH_USER_INFO" "$SCHEMA_REGISTRY_URL" || exit 1 820 | if $enable_ksqldb ; then 821 | ccloud::validate_ksqldb_up "$KSQLDB_ENDPOINT" "$CONFIG_FILE" "$KSQLDB_BASIC_AUTH_USER_INFO" || exit 1 822 | fi 823 | } 824 | 825 | function ccloud::validate_environment_set() { 826 | ccloud environment list | grep '*' &>/dev/null || { 827 | echo "ERROR: could not determine if environment is set. Run 'ccloud environment list' and set 'ccloud environment use' and try again" 828 | exit 1 829 | } 830 | 831 | return 0 832 | 833 | } 834 | 835 | function ccloud::set_kafka_cluster_use() { 836 | CLOUD_KEY=$1 837 | CONFIG_FILE=$2 838 | 839 | if [[ "$CLOUD_KEY" == "" ]]; then 840 | echo "ERROR: could not parse the broker credentials from $CONFIG_FILE. Verify your credentials and try again." 841 | exit 1 842 | fi 843 | kafkaCluster=$(ccloud api-key list | grep "$CLOUD_KEY" | awk '{print $8;}') 844 | if [[ "$kafkaCluster" == "" ]]; then 845 | echo "ERROR: Could not associate key $CLOUD_KEY to a Confluent Cloud Kafka cluster. Verify your credentials, ensure the API key has a set resource type, and try again." 846 | exit 1 847 | fi 848 | ccloud kafka cluster use $kafkaCluster 849 | endpoint=$(ccloud kafka cluster describe $kafkaCluster -o json | jq -r ".endpoint" | cut -c 12-) 850 | echo -e "\nAssociated key $CLOUD_KEY to Confluent Cloud Kafka cluster $kafkaCluster at $endpoint" 851 | 852 | return 0 853 | } 854 | 855 | function ccloud::create_ccloud_stack() { 856 | QUIET="${QUIET:-true}" 857 | REPLICATION_FACTOR=${REPLICATION_FACTOR:-1} 858 | enable_ksqldb=$1 859 | 860 | if [[ -z "$SERVICE_ACCOUNT_ID" ]]; then 861 | # Service Account is not received so it will be created 862 | local RANDOM_NUM=$((1 + RANDOM % 1000000)) 863 | SERVICE_NAME=${SERVICE_NAME:-"demo-app-$RANDOM_NUM"} 864 | SERVICE_ACCOUNT_ID=$(ccloud::create_service_account $SERVICE_NAME) 865 | fi 866 | 867 | if [[ "$SERVICE_NAME" == "" ]]; then 868 | echo "ERROR: SERVICE_NAME is not defined. If you are providing the SERVICE_ACCOUNT_ID to this function please also provide the SERVICE_NAME" 869 | exit 1 870 | fi 871 | 872 | echo -e "☁️ Creating Confluent Cloud stack for service account ${GREEN}${SERVICE_NAME}${NC}, ID: ${GREEN}${SERVICE_ACCOUNT_ID}${NC}." 873 | 874 | if [[ -z "$ENVIRONMENT" ]]; 875 | then 876 | # Environment is not received so it will be created 877 | ENVIRONMENT_NAME=${ENVIRONMENT_NAME:-"demo-env-$SERVICE_ACCOUNT_ID"} 878 | ENVIRONMENT=$(ccloud::create_and_use_environment $ENVIRONMENT_NAME) 879 | else 880 | ccloud environment use $ENVIRONMENT &>/dev/null 881 | fi 882 | 883 | CLUSTER_NAME=${CLUSTER_NAME:-"demo-kafka-cluster-$SERVICE_ACCOUNT_ID"} 884 | CLUSTER_CLOUD="${CLUSTER_CLOUD:-aws}" 885 | CLUSTER_REGION="${CLUSTER_REGION:-us-west-2}" 886 | CLUSTER=$(ccloud::maybe_create_and_use_cluster "$CLUSTER_NAME" $CLUSTER_CLOUD $CLUSTER_REGION) 887 | if [[ "$CLUSTER" == "" ]] ; then 888 | echo "Kafka cluster id is empty" 889 | echo "ERROR: Could not create cluster. Please troubleshoot" 890 | exit 1 891 | fi 892 | BOOTSTRAP_SERVERS=$(ccloud kafka cluster describe $CLUSTER -o json | jq -r ".endpoint" | cut -c 12-) 893 | CLUSTER_CREDS=$(ccloud::maybe_create_credentials_resource $SERVICE_ACCOUNT_ID $CLUSTER) 894 | 895 | MAX_WAIT=720 896 | echo "" 897 | echo "Waiting up to $MAX_WAIT seconds for Confluent Cloud cluster to be ready and for credentials to propagate" 898 | ccloud::retry $MAX_WAIT ccloud::validate_ccloud_cluster_ready || exit 1 899 | 900 | # Estimating another 80s wait still sometimes required 901 | WARMUP_TIME=${WARMUP_TIME:-80} 902 | echo "Sleeping an additional ${WARMUP_TIME} seconds to ensure propagation of all metadata" 903 | sleep $WARMUP_TIME 904 | 905 | SCHEMA_REGISTRY_GEO="${SCHEMA_REGISTRY_GEO:-us}" 906 | SCHEMA_REGISTRY=$(ccloud::enable_schema_registry $CLUSTER_CLOUD $SCHEMA_REGISTRY_GEO) 907 | SCHEMA_REGISTRY_ENDPOINT=$(ccloud schema-registry cluster describe -o json | jq -r ".endpoint_url") 908 | SCHEMA_REGISTRY_CREDS=$(ccloud::maybe_create_credentials_resource $SERVICE_ACCOUNT_ID $SCHEMA_REGISTRY) 909 | 910 | if $enable_ksqldb ; then 911 | KSQLDB_NAME=${KSQLDB_NAME:-"demo-ksqldb-$SERVICE_ACCOUNT_ID"} 912 | KSQLDB=$(ccloud::maybe_create_ksqldb_app "$KSQLDB_NAME" $CLUSTER) 913 | KSQLDB_ENDPOINT=$(ccloud ksql app describe $KSQLDB -o json | jq -r ".endpoint") 914 | KSQLDB_CREDS=$(ccloud::maybe_create_credentials_resource $SERVICE_ACCOUNT_ID $KSQLDB) 915 | KSQLDB_SERVICE_ACCOUNT_ID=$(ccloud service-account list -o json 2>/dev/null | jq -r "map(select(.name == \"KSQL.$KSQLDB\")) | .[0].id") 916 | ccloud ksql app configure-acls $KSQLDB 917 | fi 918 | 919 | ccloud::create_acls_all_resources_full_access $SERVICE_ACCOUNT_ID 920 | 921 | CLOUD_API_KEY=`echo $CLUSTER_CREDS | awk -F: '{print $1}'` 922 | CLOUD_API_SECRET=`echo $CLUSTER_CREDS | awk -F: '{print $2}'` 923 | ccloud api-key use $CLOUD_API_KEY --resource ${CLUSTER} 924 | 925 | if [[ -z "$SKIP_CONFIG_FILE_WRITE" ]]; then 926 | if [[ -z "$CLIENT_CONFIG" ]]; then 927 | mkdir -p stack-configs 928 | CLIENT_CONFIG="stack-configs/java-service-account-$SERVICE_ACCOUNT_ID.config" 929 | fi 930 | 931 | cat < $CLIENT_CONFIG 932 | # -------------------------------------- 933 | # Confluent Cloud connection information 934 | # -------------------------------------- 935 | # ENVIRONMENT ID: ${ENVIRONMENT} 936 | # SERVICE ACCOUNT ID: ${SERVICE_ACCOUNT_ID} 937 | # KAFKA CLUSTER ID: ${CLUSTER} 938 | # SCHEMA REGISTRY CLUSTER ID: ${SCHEMA_REGISTRY} 939 | EOF 940 | if $enable_ksqldb ; then 941 | cat <> $CLIENT_CONFIG 942 | # KSQLDB APP ID: ${KSQLDB} 943 | EOF 944 | fi 945 | cat <> $CLIENT_CONFIG 946 | # -------------------------------------- 947 | ssl.endpoint.identification.algorithm=https 948 | sasl.mechanism=PLAIN 949 | security.protocol=SASL_SSL 950 | bootstrap.servers=${BOOTSTRAP_SERVERS} 951 | sasl.jaas.config=org.apache.kafka.common.security.plain.PlainLoginModule required username\="${CLOUD_API_KEY}" password\="${CLOUD_API_SECRET}"; 952 | basic.auth.credentials.source=USER_INFO 953 | schema.registry.url=${SCHEMA_REGISTRY_ENDPOINT} 954 | schema.registry.basic.auth.user.info=`echo $SCHEMA_REGISTRY_CREDS | awk -F: '{print $1}'`:`echo $SCHEMA_REGISTRY_CREDS | awk -F: '{print $2}'` 955 | replication.factor=${REPLICATION_FACTOR} 956 | EOF 957 | if $enable_ksqldb ; then 958 | cat <> $CLIENT_CONFIG 959 | ksql.endpoint=${KSQLDB_ENDPOINT} 960 | ksql.basic.auth.user.info=`echo $KSQLDB_CREDS | awk -F: '{print $1}'`:`echo $KSQLDB_CREDS | awk -F: '{print $2}'` 961 | EOF 962 | fi 963 | 964 | echo 965 | echo -e "${GREEN}Client configuration file saved to: ${BLUE}$CLIENT_CONFIG${NC}" 966 | fi 967 | 968 | return 0 969 | } 970 | 971 | function ccloud::destroy_ccloud_stack() { 972 | SERVICE_ACCOUNT_ID=$1 973 | 974 | ENVIRONMENT_NAME=${ENVIRONMENT_NAME:-"demo-env-$SERVICE_ACCOUNT_ID"} 975 | CLUSTER_NAME=${CLUSTER_NAME:-"demo-kafka-cluster-$SERVICE_ACCOUNT_ID"} 976 | CLIENT_CONFIG=${CLIENT_CONFIG:-"stack-configs/java-service-account-$SERVICE_ACCOUNT_ID.config"} 977 | KSQLDB_NAME=${KSQLDB_NAME:-"demo-ksqldb-$SERVICE_ACCOUNT_ID"} 978 | 979 | QUIET="${QUIET:-true}" 980 | [[ $QUIET == "true" ]] && 981 | local REDIRECT_TO="/dev/null" || 982 | local REDIRECT_TO="/dev/stdout" 983 | 984 | echo "Destroying Confluent Cloud stack associated to service account id $SERVICE_ACCOUNT_ID" 985 | 986 | if [[ $KSQLDB_ENDPOINT != "" ]]; then 987 | KSQLDB=$(ccloud ksql app list | grep $KSQLDB_NAME | awk '{print $1;}') 988 | echo "KSQLDB: $KSQLDB" 989 | ccloud ksql app delete $KSQLDB &>"$REDIRECT_TO" 990 | fi 991 | 992 | ccloud::delete_acls_ccloud_stack $SERVICE_ACCOUNT_ID 993 | ccloud service-account delete $SERVICE_ACCOUNT_ID &>"$REDIRECT_TO" 994 | 995 | CLUSTER=$(ccloud kafka cluster list | grep $CLUSTER_NAME | tr -d '\*' | awk '{print $1;}') 996 | echo "CLUSTER: $CLUSTER" 997 | ccloud kafka cluster delete $CLUSTER &> "$REDIRECT_TO" 998 | 999 | ENVIRONMENT=$(ccloud environment list | grep $ENVIRONMENT_NAME | tr -d '\*' | awk '{print $1;}') 1000 | echo "ENVIRONMENT: $ENVIRONMENT" 1001 | ccloud environment delete $ENVIRONMENT &> "$REDIRECT_TO" 1002 | 1003 | rm -f $CLIENT_CONFIG 1004 | 1005 | return 0 1006 | } 1007 | 1008 | ############################################## 1009 | # These are some duplicate functions from 1010 | # helper.sh to decouple the script files. In 1011 | # the future we can work to remove this 1012 | # duplication if necessary 1013 | ############################################## 1014 | function ccloud::retry() { 1015 | local -r -i max_wait="$1"; shift 1016 | local -r cmd="$@" 1017 | 1018 | local -i sleep_interval=5 1019 | local -i curr_wait=0 1020 | 1021 | until $cmd 1022 | do 1023 | if (( curr_wait >= max_wait )) 1024 | then 1025 | echo "ERROR: Failed after $curr_wait seconds. Please troubleshoot and run again." 1026 | return 1 1027 | else 1028 | printf "." 1029 | curr_wait=$((curr_wait+sleep_interval)) 1030 | sleep $sleep_interval 1031 | fi 1032 | done 1033 | printf "\n" 1034 | } 1035 | function ccloud::version_gt() { 1036 | test "$(printf '%s\n' "$@" | sort -V | head -n 1)" != "$1"; 1037 | } 1038 | -------------------------------------------------------------------------------- /scripts/ccloud/ccloud_stack_create.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ######################################### 4 | # This script uses real Confluent Cloud resources. 5 | # To avoid unexpected charges, carefully evaluate the cost of resources before launching the script and ensure all resources are destroyed after you are done running it. 6 | ######################################### 7 | 8 | export CLUSTER_CLOUD=gcp 9 | # Use to find the closest http://www.gcping.com/ 10 | export CLUSTER_REGION=us-east4 11 | #CLUSTER_CLOUD=aws 12 | 13 | # Source library 14 | source ../common/colors.sh 15 | source ../common/helper.sh 16 | source ./ccloud_library.sh 17 | 18 | ccloud::validate_version_ccloud_cli 1.7.0 || exit 1 19 | check_jq || exit 1 20 | ccloud::validate_logged_in_ccloud_cli || exit 1 21 | 22 | ccloud::prompt_continue_ccloud_demo || exit 1 23 | 24 | enable_ksqldb=false 25 | read -p "Do you also want to create a Confluent Cloud 🚀 ksqlDB app (hourly charges may apply)? [y/n] " -n 1 -r 26 | echo 27 | if [[ $REPLY =~ ^[Yy]$ ]] 28 | then 29 | enable_ksqldb=true 30 | fi 31 | 32 | echo 33 | ccloud::create_ccloud_stack $enable_ksqldb || exit 1 34 | 35 | echo 36 | echo -e "${BLUE}Validating...${NC}" 37 | SERVICE_ACCOUNT_ID=$(ccloud kafka cluster list -o json | jq -r '.[0].name' | awk -F'-' '{print $4;}') 38 | CONFIG_FILE=stack-configs/java-service-account-$SERVICE_ACCOUNT_ID.config 39 | ccloud::validate_ccloud_config $CONFIG_FILE || exit 1 40 | ./ccloud-generate-cp-config.sh $CONFIG_FILE > /dev/null 41 | source delta_configs/env.delta 42 | 43 | if $enable_ksqldb ; then 44 | MAX_WAIT=500 45 | echo -e "${GREEN}Waiting up to $MAX_WAIT seconds for Confluent Cloud ksqlDB cluster to be UP${NC}" 46 | retry $MAX_WAIT ccloud::validate_ccloud_ksqldb_endpoint_ready $KSQLDB_ENDPOINT || exit 1 47 | fi 48 | 49 | ccloud::validate_ccloud_stack_up $CLOUD_KEY $CONFIG_FILE $enable_ksqldb || exit 1 50 | 51 | echo 52 | echo "ACLs in this cluster:" 53 | ccloud kafka acl list 54 | 55 | echo 56 | echo -e "${GREEN}Local client configuration file written to ${BOLD}${BLUE}${CONFIG_FILE}${NC}" 57 | echo 58 | 59 | echo 60 | echo -e "${YELLOW}To destroy this Confluent Cloud stack run ->${NC}" 61 | echo -e " ./ccloud_stack_destroy.sh $CONFIG_FILE" 62 | echo 63 | 64 | echo 65 | ENVIRONMENT=$(ccloud environment list | grep demo-env-$SERVICE_ACCOUNT_ID | tr -d '\*' | awk '{print $1;}') 66 | echo -e "${BLUE}Tip:${NC} 'ccloud' CLI has been set to the new environment ${GREEN}${ENVIRONMENT}${NC}" -------------------------------------------------------------------------------- /scripts/ccloud/ccloud_stack_destroy.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )" 4 | 5 | # Source library 6 | source $DIR/../common/colors.sh 7 | source $DIR/../common/helper.sh 8 | source $DIR/ccloud_library.sh 9 | 10 | ccloud::validate_version_ccloud_cli 1.7.0 || exit 1 11 | ccloud::validate_logged_in_ccloud_cli || exit 1 12 | check_jq || exit 1 13 | 14 | if [ -z "$1" ]; then 15 | echo -e "${RED}ERROR: Must supply argument that is the client configuration file created from './ccloud_stack_create.sh'. (Is it in stack-configs/ folder?) ${NC}" 16 | exit 1 17 | else 18 | CONFIG_FILE=$1 19 | fi 20 | 21 | read -p "This script will destroy the entire environment specified in $CONFIG_FILE. Do you want to proceed? [y/n] " -n 1 -r 22 | echo 23 | if [[ ! $REPLY =~ ^[Yy]$ ]] 24 | then 25 | exit 1 26 | fi 27 | 28 | ccloud::validate_ccloud_config $CONFIG_FILE || exit 1 29 | DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )" 30 | ./ccloud-generate-cp-config.sh $CONFIG_FILE > /dev/null 31 | source delta_configs/env.delta 32 | SERVICE_ACCOUNT_ID=$(ccloud::get_service_account $CLOUD_KEY $CONFIG_FILE) || exit 1 33 | 34 | echo 35 | ccloud::destroy_ccloud_stack $SERVICE_ACCOUNT_ID 36 | 37 | echo 38 | echo -e "${BOLD}${BLUE}Tip:${NC} 'ccloud' CLI currently has no environment set" 39 | -------------------------------------------------------------------------------- /scripts/ccloud/docker-compose.yaml: -------------------------------------------------------------------------------- 1 | # docker-compose supports environment variable substitution with the ${VARIABLE-NAME} syntax. 2 | # Environment variables can be sourced in a variety of ways. 3 | # One of those ways is through a well known '.env' file located in the same folder as the docker-compose.yml file. 4 | # See the Docker documentation for details: https://docs.docker.com/compose/environment-variables/#the-env-file 5 | # 6 | # This feature is being used to parameterize some values within this file. 7 | # In this directory is also a .env file, which is actually a symbolic link to /utils/config.env. 8 | # That file contains values which get substituted here when docker-compose parses this file. 9 | # 10 | # If you'd like to view the docker-compose.yml file rendered with its environment variable substituions 11 | # you can execute the `docker-compose config` command. Take note that some demos provide additional 12 | # environment variable values by exporting them in a script prior to running `docker-compose up`. 13 | --- 14 | version: '3.7' 15 | services: 16 | connect-cloud: 17 | image: confluentinc/cp-kafka-connect:5.5.1 18 | hostname: connect-cloud 19 | container_name: connect-cloud 20 | ports: 21 | - "8083:8083" 22 | volumes: 23 | - mi2:/usr/share/java/monitoring-interceptors/ 24 | - $PWD/delta_configs/ak-tools-ccloud.delta:/tmp/ak-tools-ccloud.delta 25 | environment: 26 | CONNECT_BOOTSTRAP_SERVERS: $BOOTSTRAP_SERVERS 27 | CONNECT_REST_ADVERTISED_HOST_NAME: connect-cloud 28 | CONNECT_PRODUCER_CLIENT_ID: "connect-cloud-worker-producer" 29 | CONNECT_REST_PORT: 8083 30 | CONNECT_GROUP_ID: "connect-cloud" 31 | 32 | CONNECT_CONFIG_STORAGE_TOPIC: connect-demo-configs 33 | CONNECT_OFFSET_STORAGE_TOPIC: connect-demo-offsets 34 | CONNECT_STATUS_STORAGE_TOPIC: connect-demo-statuses 35 | 36 | CONNECT_REPLICATION_FACTOR: 3 37 | CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: 3 38 | CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: 3 39 | CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: 3 40 | 41 | CONNECT_KEY_CONVERTER: "org.apache.kafka.connect.storage.StringConverter" 42 | CONNECT_VALUE_CONVERTER: "io.confluent.connect.avro.AvroConverter" 43 | CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: $SCHEMA_REGISTRY_URL 44 | CONNECT_VALUE_CONVERTER_BASIC_AUTH_CREDENTIALS_SOURCE: $BASIC_AUTH_CREDENTIALS_SOURCE 45 | CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_BASIC_AUTH_USER_INFO: $SCHEMA_REGISTRY_BASIC_AUTH_USER_INFO 46 | CONNECT_INTERNAL_KEY_CONVERTER: "org.apache.kafka.connect.json.JsonConverter" 47 | CONNECT_INTERNAL_VALUE_CONVERTER: "org.apache.kafka.connect.json.JsonConverter" 48 | 49 | CONNECT_PLUGIN_PATH: "/usr/share/java,/usr/share/confluent-hub-components" 50 | CONNECT_LOG4J_ROOT_LOGLEVEL: INFO 51 | CONNECT_LOG4J_LOGGERS: org.reflections=ERROR 52 | # CLASSPATH required due to CC-2422 53 | CLASSPATH: "/usr/share/java/monitoring-interceptors/monitoring-interceptors-${CONFLUENT}.jar" 54 | 55 | # Connect worker 56 | CONNECT_SECURITY_PROTOCOL: SASL_SSL 57 | CONNECT_SASL_JAAS_CONFIG: $SASL_JAAS_CONFIG 58 | CONNECT_SASL_MECHANISM: PLAIN 59 | CONNECT_SSL_ENDPOINT_IDENTIFICATION_ALGORITHM: "HTTPS" 60 | 61 | # Connect embedded producer 62 | CONNECT_PRODUCER_SECURITY_PROTOCOL: SASL_SSL 63 | CONNECT_PRODUCER_SASL_JAAS_CONFIG: $SASL_JAAS_CONFIG 64 | CONNECT_PRODUCER_SASL_MECHANISM: PLAIN 65 | CONNECT_PRODUCER_INTERCEPTOR_CLASSES: "io.confluent.monitoring.clients.interceptor.MonitoringProducerInterceptor" 66 | CONNECT_PRODUCER_CONFLUENT_MONITORING_INTERCEPTOR_BOOTSTRAP_SERVERS: $BOOTSTRAP_SERVERS 67 | CONNECT_PRODUCER_CONFLUENT_MONITORING_INTERCEPTOR_SECURITY_PROTOCOL: SASL_SSL 68 | CONNECT_PRODUCER_CONFLUENT_MONITORING_INTERCEPTOR_SASL_JAAS_CONFIG: $SASL_JAAS_CONFIG 69 | CONNECT_PRODUCER_CONFLUENT_MONITORING_INTERCEPTOR_SASL_MECHANISM: PLAIN 70 | 71 | # Connect embedded consumer 72 | CONNECT_CONSUMER_SECURITY_PROTOCOL: SASL_SSL 73 | CONNECT_CONSUMER_SASL_JAAS_CONFIG: $SASL_JAAS_CONFIG 74 | CONNECT_CONSUMER_SASL_MECHANISM: PLAIN 75 | CONNECT_CONSUMER_INTERCEPTOR_CLASSES: "io.confluent.monitoring.clients.interceptor.MonitoringConsumerInterceptor" 76 | CONNECT_CONSUMER_CONFLUENT_MONITORING_INTERCEPTOR_BOOTSTRAP_SERVERS: $BOOTSTRAP_SERVERS 77 | CONNECT_CONSUMER_CONFLUENT_MONITORING_INTERCEPTOR_SECURITY_PROTOCOL: SASL_SSL 78 | CONNECT_CONSUMER_CONFLUENT_MONITORING_INTERCEPTOR_SASL_JAAS_CONFIG: $SASL_JAAS_CONFIG 79 | CONNECT_CONSUMER_CONFLUENT_MONITORING_INTERCEPTOR_SASL_MECHANISM: PLAIN 80 | 81 | account-db: 82 | container_name: kstreams-demo-account-db 83 | image: postgres 84 | restart: always 85 | ports: 86 | - 5432:5432 87 | environment: 88 | POSTGRES_USER: account-store 89 | POSTGRES_PASSWORD: account-store 90 | volumes: 91 | mi2: {} -------------------------------------------------------------------------------- /scripts/ccloud/start_connect.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Source library 4 | . ../common/colors.sh 5 | . ../common/helper.sh 6 | 7 | check_jq || exit 8 | # using CP 5.5.1 9 | export CONFLUENT=5.5.1 10 | 11 | #export CONFIG_FILE=~/.ccloud/config 12 | 13 | if [ -z "$1" ]; then 14 | echo -e "${RED}ERROR: Must supply argument that is the client configuration file created from './ccloud_stack_create.sh'. (Is it in stack-configs/ folder?) ${NC}" 15 | exit 1 16 | else 17 | CONFIG_FILE=$1 18 | fi 19 | 20 | export USE_CONFLUENT_CLOUD_SCHEMA_REGISTRY=true 21 | export USE_CONFLUENT_CLOUD_KSQL=true 22 | 23 | check_ccloud_config $CONFIG_FILE || exit 24 | 25 | if [[ "${USE_CONFLUENT_CLOUD_SCHEMA_REGISTRY}" == true ]]; then 26 | SCHEMA_REGISTRY_CONFIG_FILE=$CONFIG_FILE 27 | else 28 | SCHEMA_REGISTRY_CONFIG_FILE=schema_registry_docker.config 29 | fi 30 | ./ccloud-generate-cp-config.sh $CONFIG_FILE $SCHEMA_REGISTRY_CONFIG_FILE 31 | 32 | DELTA_CONFIGS_DIR=delta_configs 33 | source $DELTA_CONFIGS_DIR/env.delta 34 | 35 | if [[ "$USE_CONFLUENT_CLOUD_SCHEMA_REGISTRY" == true ]]; then 36 | validate_confluent_cloud_schema_registry $SCHEMA_REGISTRY_BASIC_AUTH_USER_INFO $SCHEMA_REGISTRY_URL || exit 1 37 | fi 38 | 39 | docker-compose up -d 40 | 41 | echo -e "${BLUE}☁️ ${BOLD}Kafka Connect${NC} is starting up...${NC}" 42 | 43 | # ---- Set up Replicator source connector --- 44 | export CONNECT_HOST=connect-cloud 45 | echo -e "\n--\n\n$(date) Waiting for Kafka Connect to start on ${GREEN}$CONNECT_HOST ${NC}… ⏳" 46 | grep -q "Kafka Connect started" <(docker-compose logs -f $CONNECT_HOST) 47 | echo -e "\n--\n$(date) 🛢 Creating JDBC to CCloud connector" 48 | #. ./scripts/submit_replicator_docker_config.sh -------------------------------------------------------------------------------- /scripts/common/colors.sh: -------------------------------------------------------------------------------- 1 | # ---------------------------------- 2 | # Colors 3 | # ---------------------------------- 4 | NOCOLOR='\033[0m' 5 | NC='\033[0m' 6 | RED='\033[0;31m' 7 | GREEN='\033[0;32m' 8 | ORANGE='\033[0;33m' 9 | BLUE='\033[0;34m' 10 | PURPLE='\033[0;35m' 11 | CYAN='\033[0;36m' 12 | LIGHTGRAY='\033[0;37m' 13 | DARKGRAY='\033[1;30m' 14 | LIGHTRED='\033[1;31m' 15 | LIGHTGREEN='\033[1;32m' 16 | YELLOW='\033[1;33m' 17 | LIGHTBLUE='\033[1;34m' 18 | LIGHTPURPLE='\033[1;35m' 19 | LIGHTCYAN='\033[1;36m' 20 | WHITE='\033[1;37m' 21 | BOLD='\033[1m' 22 | -------------------------------------------------------------------------------- /scripts/common/helper.sh: -------------------------------------------------------------------------------- 1 | function check_jq() { 2 | if [[ $(type jq 2>&1) =~ "not found" ]]; then 3 | echo "'jq' is not found. Install 'jq' and try again" 4 | exit 1 5 | fi 6 | 7 | return 0 8 | } 9 | 10 | function check_ccloud_config() { 11 | expected_configfile=$1 12 | 13 | if [[ ! -f "$expected_configfile" ]]; then 14 | echo "Confluent Cloud configuration file does not exist at $expected_configfile. Please create the configuration file with properties set to your Confluent Cloud cluster and try again." 15 | exit 1 16 | elif ! [[ $(grep "^\s*bootstrap.server" $expected_configfile) ]]; then 17 | echo "Missing 'bootstrap.server' in $expected_configfile. Please modify the configuration file with properties set to your Confluent Cloud cluster and try again." 18 | exit 1 19 | fi 20 | 21 | return 0 22 | } 23 | 24 | function validate_confluent_cloud_schema_registry() { 25 | auth=$1 26 | sr_endpoint=$2 27 | 28 | curl --silent -u $auth $sr_endpoint 29 | if [[ "$?" -ne 0 ]]; then 30 | echo "ERROR: Could not validate credentials to Confluent Cloud Schema Registry. Please troubleshoot" 31 | exit 1 32 | fi 33 | return 0 34 | } 35 | 36 | function check_docker() { 37 | if ! docker ps -q &>/dev/null; then 38 | echo "This demo requires Docker but it doesn't appear to be running. Please start Docker and try again." 39 | exit 1 40 | fi 41 | 42 | return 0 43 | } 44 | 45 | 46 | retry() { 47 | local -r -i max_wait="$1"; shift 48 | local -r cmd="$@" 49 | 50 | local -i sleep_interval=5 51 | local -i curr_wait=0 52 | 53 | until $cmd 54 | do 55 | if (( curr_wait >= max_wait )) 56 | then 57 | echo "ERROR: Failed after $curr_wait seconds. Please troubleshoot and run again." 58 | return 1 59 | else 60 | printf "." 61 | curr_wait=$((curr_wait+sleep_interval)) 62 | sleep $sleep_interval 63 | fi 64 | done 65 | printf "\n" 66 | } -------------------------------------------------------------------------------- /scripts/connect/connector-status.sh: -------------------------------------------------------------------------------- 1 | curl -s "http://localhost:8083/connectors"| jq '.[]'| xargs -I{connector_name} curl -s "http://localhost:8083/connectors/"{connector_name}"/status"| jq -c -M '[.name,.connector.state,.tasks[].state]|join(":|:")' -------------------------------------------------------------------------------- /scripts/connect/deploy-jdbc-connector.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | curl -X POST http://localhost:8083/connectors \ 4 | -H 'Content-Type: application/json' \ 5 | -d @- << EOF 6 | { 7 | "name": "account-jdbc-source", 8 | "config": { 9 | "connector.class": "io.confluent.connect.jdbc.JdbcSourceConnector", 10 | "value.converter": "org.apache.kafka.connect.json.JsonConverter", 11 | "value.converter.schemas.enable": false, 12 | "tasks.max": "1", 13 | "connection.url": "jdbc:postgresql://account-db:5432/account-store", 14 | "connection.user": "account-store", 15 | "connection.password": "account-store", 16 | "mode": "timestamp", 17 | "timestamp.column.name": "update_date", 18 | "table.whitelist": "account", 19 | "topic.prefix": "", 20 | "name": "account-jdbc-source", 21 | "transforms": "createKey,extractString,renameFields", 22 | "transforms.createKey.type":"org.apache.kafka.connect.transforms.ValueToKey", 23 | "transforms.createKey.fields":"number", 24 | "transforms.extractString.type":"org.apache.kafka.connect.transforms.ExtractField\$Key", 25 | "transforms.extractString.field":"number", 26 | "transforms.renameFields.type": "org.apache.kafka.connect.transforms.ReplaceField\$Value", 27 | "transforms.renameFields.renames": "city_address:cityAddress,country_address:countryAddress,creation_date:creationDate,first_name:firstName,last_name:lastName,number_address:numberAddress,street_address:streetAddress,update_date:updateDate" 28 | } 29 | } 30 | EOF -------------------------------------------------------------------------------- /scripts/generate-transaction.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ACCOUNT=$1 4 | TYPE=$2 5 | AMOUNT=$3 6 | CURRENCY=$4 7 | 8 | curl -X POST -H "Content-Type: application/json" http://localhost:8080/transaction --data "{\"account\": \"$ACCOUNT\", \"amount\": $AMOUNT, \"type\": \"$TYPE\", \"currency\": \"$CURRENCY\"}" -------------------------------------------------------------------------------- /settings.gradle: -------------------------------------------------------------------------------- 1 | rootProject.name = 'kstreams-ksqldb-workshop' 2 | include(':data-generator') 3 | include(':kstreams-demo') 4 | include(':kstreams-demo-kotlin') 5 | --------------------------------------------------------------------------------