├── CONTRIBUTING.md ├── hello-kafka-api ├── kotlin │ ├── gradle.properties │ ├── settings.gradle.kts │ ├── gradle │ │ └── wrapper │ │ │ ├── gradle-wrapper.jar │ │ │ └── gradle-wrapper.properties │ ├── src │ │ └── main │ │ │ └── kotlin │ │ │ └── hellokafka │ │ │ ├── Consumer.kt │ │ │ ├── App.kt │ │ │ └── Producer.kt │ ├── .gitignore │ ├── readme.md │ ├── build.gradle.kts │ ├── gradlew.bat │ └── gradlew ├── java │ ├── .mvn │ │ └── wrapper │ │ │ ├── maven-wrapper.jar │ │ │ ├── maven-wrapper.properties │ │ │ └── MavenWrapperDownloader.java │ ├── src │ │ └── main │ │ │ └── java │ │ │ └── hellokafka │ │ │ ├── App.java │ │ │ ├── Consumer.java │ │ │ └── Producer.java │ ├── .gitignore │ ├── readme.md │ └── pom.xml └── readme.md ├── hello-kafka-streams ├── kotlin │ ├── gradle.properties │ ├── settings.gradle.kts │ ├── gradle │ │ └── wrapper │ │ │ ├── gradle-wrapper.jar │ │ │ └── gradle-wrapper.properties │ ├── src │ │ └── main │ │ │ ├── resources │ │ │ └── application.yml │ │ │ └── kotlin │ │ │ └── hellostreams │ │ │ ├── App.kt │ │ │ ├── Producer.kt │ │ │ ├── Consumer.kt │ │ │ └── Processor.kt │ ├── .gitignore │ ├── readme.md │ ├── build.gradle.kts │ └── gradlew.bat └── java │ ├── .mvn │ └── wrapper │ │ ├── maven-wrapper.jar │ │ ├── maven-wrapper.properties │ │ └── MavenWrapperDownloader.java │ ├── src │ └── main │ │ ├── resources │ │ └── application.yml │ │ └── java │ │ └── hellokafka │ │ ├── App.java │ │ ├── Consumer.java │ │ ├── Producer.java │ │ └── Processor.java │ ├── .gitignore │ ├── readme.md │ └── pom.xml ├── hello-kafka-unit-test ├── kotlin │ ├── gradle.properties │ ├── settings.gradle.kts │ ├── gradle │ │ └── wrapper │ │ │ ├── gradle-wrapper.jar │ │ │ └── gradle-wrapper.properties │ ├── src │ │ ├── main │ │ │ └── kotlin │ │ │ │ └── hellokafka │ │ │ │ ├── App.kt │ │ │ │ ├── Producer.kt │ │ │ │ └── Consumer.kt │ │ └── test │ │ │ └── kotlin │ │ │ └── hellokafka │ │ │ ├── EmbeddedKafkaTest.kt │ │ │ └── ContainerKafkaTest.kt │ ├── readme.md │ ├── .gitignore │ ├── build.gradle.kts │ └── gradlew.bat ├── java │ ├── .mvn │ │ └── wrapper │ │ │ ├── maven-wrapper.jar │ │ │ ├── maven-wrapper.properties │ │ │ └── MavenWrapperDownloader.java │ ├── src │ │ ├── main │ │ │ └── java │ │ │ │ └── hellokafka │ │ │ │ ├── App.java │ │ │ │ ├── Producer.java │ │ │ │ └── Consumer.java │ │ └── test │ │ │ └── java │ │ │ └── hellokafka │ │ │ ├── EmbeddedKafkaTest.java │ │ │ └── ContainerKafkaTest.java │ ├── readme.md │ ├── .gitignore │ └── pom.xml └── readme.md ├── docs ├── kafka.png ├── kafka.pptx ├── kafka-api.png ├── kafka-cqrs.png ├── export │ └── kafka.png ├── kafka-basic.png ├── kafka-cqrs-es.png ├── social-intro.png ├── social-intro2.png ├── kafka-unit-test.png ├── kafka-data-streaming.png ├── kafka-event-sourcing.png ├── kafka-microservices.png ├── kafka-streams-sample.png ├── __kafka-data-streaming.png ├── kafka-publish-subscribe.png ├── kafka-publish-unit-test.png ├── kafka-streams-topology.png ├── __kafka-publish-subscribe.png ├── kafka-event-sourcing-groupid.png ├── kafka-event-sourcing-sequence.png └── _kafka-publish-subscribe-simple.png ├── hello-kafka-cqrs ├── settings.gradle.kts ├── gradle │ └── wrapper │ │ ├── gradle-wrapper.jar │ │ └── gradle-wrapper.properties ├── order-command-service │ ├── src │ │ └── main │ │ │ ├── kotlin │ │ │ └── microkafka │ │ │ │ └── escqrs │ │ │ │ └── orders │ │ │ │ └── command │ │ │ │ ├── model │ │ │ │ ├── OrderCommands.kt │ │ │ │ └── OrderEvents.kt │ │ │ │ ├── App.kt │ │ │ │ ├── OrderController.kt │ │ │ │ └── OrderCommandHandler.kt │ │ │ └── resources │ │ │ └── application.yml │ └── build.gradle.kts ├── order-query-service │ ├── src │ │ └── main │ │ │ ├── kotlin │ │ │ └── microkafka │ │ │ │ └── escqrs │ │ │ │ └── orders │ │ │ │ └── query │ │ │ │ ├── OrderRepository.kt │ │ │ │ ├── model │ │ │ │ ├── OrderDto.kt │ │ │ │ ├── OrderEvents.kt │ │ │ │ ├── OrderEntity.kt │ │ │ │ └── Mapping.kt │ │ │ │ ├── App.kt │ │ │ │ ├── OrderController.kt │ │ │ │ └── OrderListener.kt │ │ │ └── resources │ │ │ └── application.yml │ └── build.gradle.kts ├── .gitignore ├── build.gradle.kts ├── gradlew.bat ├── readme.md └── gradlew ├── hello-kafka-es ├── settings.gradle.kts ├── gradle │ └── wrapper │ │ ├── gradle-wrapper.jar │ │ └── gradle-wrapper.properties ├── order-query-service │ ├── src │ │ └── main │ │ │ ├── kotlin │ │ │ └── kafka │ │ │ │ └── es │ │ │ │ └── query │ │ │ │ ├── model │ │ │ │ ├── Order.kt │ │ │ │ └── OrderEvent.kt │ │ │ │ ├── App.kt │ │ │ │ ├── OrderController.kt │ │ │ │ ├── OrderRepository.kt │ │ │ │ └── OrderListener.kt │ │ │ └── resources │ │ │ └── application.yml │ └── build.gradle.kts ├── order-command-service │ ├── src │ │ └── main │ │ │ ├── kotlin │ │ │ └── kafka │ │ │ │ └── es │ │ │ │ └── command │ │ │ │ ├── OrderEvent.kt │ │ │ │ ├── App.kt │ │ │ │ └── Producer.kt │ │ │ └── resources │ │ │ └── application.yml │ └── build.gradle.kts ├── .gitignore ├── build.gradle.kts ├── gradlew.bat └── gradlew ├── hello-kafka-microservices ├── settings.gradle.kts ├── gradle │ └── wrapper │ │ ├── gradle-wrapper.jar │ │ └── gradle-wrapper.properties ├── order-service │ ├── src │ │ └── main │ │ │ ├── kotlin │ │ │ └── microkafka │ │ │ │ └── orders │ │ │ │ ├── OrderRepository.kt │ │ │ │ ├── model │ │ │ │ ├── OrderDto.kt │ │ │ │ ├── Order.kt │ │ │ │ └── Mapping.kt │ │ │ │ ├── App.kt │ │ │ │ ├── OrderController.kt │ │ │ │ └── OrderService.kt │ │ │ └── resources │ │ │ └── application.yml │ └── build.gradle.kts ├── invoice-service │ ├── src │ │ └── main │ │ │ ├── kotlin │ │ │ └── microkafka │ │ │ │ └── invoices │ │ │ │ ├── InvoiceRepository.kt │ │ │ │ ├── model │ │ │ │ ├── Order.kt │ │ │ │ ├── InvoicesDto.kt │ │ │ │ ├── Invoice.kt │ │ │ │ └── Mapping.kt │ │ │ │ ├── App.kt │ │ │ │ ├── InvoiceController.kt │ │ │ │ ├── OrderListener.kt │ │ │ │ └── InvoiceService.kt │ │ │ └── resources │ │ │ └── application.yml │ └── build.gradle.kts ├── .gitignore ├── build.gradle.kts ├── gradlew.bat ├── readme.md └── gradlew ├── hello-kafka-es-streamsapi ├── kotlin │ ├── settings.gradle.kts │ ├── gradle │ │ └── wrapper │ │ │ ├── gradle-wrapper.jar │ │ │ └── gradle-wrapper.properties │ ├── order-query-service │ │ ├── src │ │ │ └── main │ │ │ │ ├── resources │ │ │ │ └── application.yml │ │ │ │ └── kotlin │ │ │ │ └── kafka │ │ │ │ └── es │ │ │ │ └── query │ │ │ │ ├── model │ │ │ │ ├── Order.kt │ │ │ │ ├── Mapping.kt │ │ │ │ └── OrderEvent.kt │ │ │ │ ├── App.kt │ │ │ │ ├── OrderController.kt │ │ │ │ ├── OrderRepository.kt │ │ │ │ └── OrderListener.kt │ │ └── build.gradle.kts │ ├── order-command-service │ │ ├── src │ │ │ └── main │ │ │ │ ├── kotlin │ │ │ │ └── kafka │ │ │ │ │ └── es │ │ │ │ │ └── command │ │ │ │ │ ├── OrderEvent.kt │ │ │ │ │ ├── App.kt │ │ │ │ │ └── Producer.kt │ │ │ │ └── resources │ │ │ │ └── application.yml │ │ └── build.gradle.kts │ ├── .gitignore │ ├── readme.md │ ├── build.gradle.kts │ └── gradlew.bat └── readme.md ├── .github ├── ISSUE_TEMPLATE │ ├── custom.md │ ├── feature_request.md │ └── bug_report.md └── pull_request_template.md ├── hello-kafka ├── docker-compose.yml └── docker-compose-withui.yml ├── .gitignore ├── LICENSE ├── docker-compose.yml └── CODE_OF_CONDUCT.md /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # your contribution is welcome 👍🚀 2 | -------------------------------------------------------------------------------- /hello-kafka-api/kotlin/gradle.properties: -------------------------------------------------------------------------------- 1 | kotlin.code.style=official 2 | -------------------------------------------------------------------------------- /hello-kafka-streams/kotlin/gradle.properties: -------------------------------------------------------------------------------- 1 | kotlin.code.style=official 2 | -------------------------------------------------------------------------------- /hello-kafka-unit-test/kotlin/gradle.properties: -------------------------------------------------------------------------------- 1 | kotlin.code.style=official 2 | -------------------------------------------------------------------------------- /hello-kafka-api/kotlin/settings.gradle.kts: -------------------------------------------------------------------------------- 1 | 2 | rootProject.name = "hello-kafka" 3 | 4 | -------------------------------------------------------------------------------- /docs/kafka.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/thecodemonkey/kafka-microservices/HEAD/docs/kafka.png -------------------------------------------------------------------------------- /docs/kafka.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/thecodemonkey/kafka-microservices/HEAD/docs/kafka.pptx -------------------------------------------------------------------------------- /hello-kafka-streams/kotlin/settings.gradle.kts: -------------------------------------------------------------------------------- 1 | 2 | rootProject.name = "hello-kafka-streams" 3 | 4 | -------------------------------------------------------------------------------- /docs/kafka-api.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/thecodemonkey/kafka-microservices/HEAD/docs/kafka-api.png -------------------------------------------------------------------------------- /docs/kafka-cqrs.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/thecodemonkey/kafka-microservices/HEAD/docs/kafka-cqrs.png -------------------------------------------------------------------------------- /hello-kafka-unit-test/kotlin/settings.gradle.kts: -------------------------------------------------------------------------------- 1 | 2 | rootProject.name = "hello-kafka-unit-test" 3 | 4 | -------------------------------------------------------------------------------- /docs/export/kafka.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/thecodemonkey/kafka-microservices/HEAD/docs/export/kafka.png -------------------------------------------------------------------------------- /docs/kafka-basic.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/thecodemonkey/kafka-microservices/HEAD/docs/kafka-basic.png -------------------------------------------------------------------------------- /docs/kafka-cqrs-es.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/thecodemonkey/kafka-microservices/HEAD/docs/kafka-cqrs-es.png -------------------------------------------------------------------------------- /docs/social-intro.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/thecodemonkey/kafka-microservices/HEAD/docs/social-intro.png -------------------------------------------------------------------------------- /docs/social-intro2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/thecodemonkey/kafka-microservices/HEAD/docs/social-intro2.png -------------------------------------------------------------------------------- /docs/kafka-unit-test.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/thecodemonkey/kafka-microservices/HEAD/docs/kafka-unit-test.png -------------------------------------------------------------------------------- /docs/kafka-data-streaming.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/thecodemonkey/kafka-microservices/HEAD/docs/kafka-data-streaming.png -------------------------------------------------------------------------------- /docs/kafka-event-sourcing.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/thecodemonkey/kafka-microservices/HEAD/docs/kafka-event-sourcing.png -------------------------------------------------------------------------------- /docs/kafka-microservices.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/thecodemonkey/kafka-microservices/HEAD/docs/kafka-microservices.png -------------------------------------------------------------------------------- /docs/kafka-streams-sample.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/thecodemonkey/kafka-microservices/HEAD/docs/kafka-streams-sample.png -------------------------------------------------------------------------------- /docs/__kafka-data-streaming.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/thecodemonkey/kafka-microservices/HEAD/docs/__kafka-data-streaming.png -------------------------------------------------------------------------------- /docs/kafka-publish-subscribe.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/thecodemonkey/kafka-microservices/HEAD/docs/kafka-publish-subscribe.png -------------------------------------------------------------------------------- /docs/kafka-publish-unit-test.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/thecodemonkey/kafka-microservices/HEAD/docs/kafka-publish-unit-test.png -------------------------------------------------------------------------------- /docs/kafka-streams-topology.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/thecodemonkey/kafka-microservices/HEAD/docs/kafka-streams-topology.png -------------------------------------------------------------------------------- /docs/__kafka-publish-subscribe.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/thecodemonkey/kafka-microservices/HEAD/docs/__kafka-publish-subscribe.png -------------------------------------------------------------------------------- /hello-kafka-cqrs/settings.gradle.kts: -------------------------------------------------------------------------------- 1 | rootProject.name = "hello-kafka-cqrs" 2 | include("order-command-service", "order-query-service") 3 | -------------------------------------------------------------------------------- /hello-kafka-es/settings.gradle.kts: -------------------------------------------------------------------------------- 1 | rootProject.name = "hello-kafka-es" 2 | include("order-command-service", "order-query-service") 3 | -------------------------------------------------------------------------------- /docs/kafka-event-sourcing-groupid.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/thecodemonkey/kafka-microservices/HEAD/docs/kafka-event-sourcing-groupid.png -------------------------------------------------------------------------------- /docs/kafka-event-sourcing-sequence.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/thecodemonkey/kafka-microservices/HEAD/docs/kafka-event-sourcing-sequence.png -------------------------------------------------------------------------------- /hello-kafka-microservices/settings.gradle.kts: -------------------------------------------------------------------------------- 1 | rootProject.name = "hello-kafka-microservices" 2 | include("order-service", "invoice-service") 3 | -------------------------------------------------------------------------------- /docs/_kafka-publish-subscribe-simple.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/thecodemonkey/kafka-microservices/HEAD/docs/_kafka-publish-subscribe-simple.png -------------------------------------------------------------------------------- /hello-kafka-es-streamsapi/kotlin/settings.gradle.kts: -------------------------------------------------------------------------------- 1 | rootProject.name = "hello-kafka-es-streamsapi" 2 | include("order-command-service", "order-query-service") 3 | -------------------------------------------------------------------------------- /hello-kafka-es/gradle/wrapper/gradle-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/thecodemonkey/kafka-microservices/HEAD/hello-kafka-es/gradle/wrapper/gradle-wrapper.jar -------------------------------------------------------------------------------- /hello-kafka-api/java/.mvn/wrapper/maven-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/thecodemonkey/kafka-microservices/HEAD/hello-kafka-api/java/.mvn/wrapper/maven-wrapper.jar -------------------------------------------------------------------------------- /hello-kafka-cqrs/gradle/wrapper/gradle-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/thecodemonkey/kafka-microservices/HEAD/hello-kafka-cqrs/gradle/wrapper/gradle-wrapper.jar -------------------------------------------------------------------------------- /hello-kafka-api/kotlin/gradle/wrapper/gradle-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/thecodemonkey/kafka-microservices/HEAD/hello-kafka-api/kotlin/gradle/wrapper/gradle-wrapper.jar -------------------------------------------------------------------------------- /hello-kafka-streams/java/.mvn/wrapper/maven-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/thecodemonkey/kafka-microservices/HEAD/hello-kafka-streams/java/.mvn/wrapper/maven-wrapper.jar -------------------------------------------------------------------------------- /hello-kafka-unit-test/java/.mvn/wrapper/maven-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/thecodemonkey/kafka-microservices/HEAD/hello-kafka-unit-test/java/.mvn/wrapper/maven-wrapper.jar -------------------------------------------------------------------------------- /hello-kafka-microservices/gradle/wrapper/gradle-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/thecodemonkey/kafka-microservices/HEAD/hello-kafka-microservices/gradle/wrapper/gradle-wrapper.jar -------------------------------------------------------------------------------- /hello-kafka-streams/kotlin/gradle/wrapper/gradle-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/thecodemonkey/kafka-microservices/HEAD/hello-kafka-streams/kotlin/gradle/wrapper/gradle-wrapper.jar -------------------------------------------------------------------------------- /hello-kafka-unit-test/kotlin/gradle/wrapper/gradle-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/thecodemonkey/kafka-microservices/HEAD/hello-kafka-unit-test/kotlin/gradle/wrapper/gradle-wrapper.jar -------------------------------------------------------------------------------- /hello-kafka-es-streamsapi/kotlin/gradle/wrapper/gradle-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/thecodemonkey/kafka-microservices/HEAD/hello-kafka-es-streamsapi/kotlin/gradle/wrapper/gradle-wrapper.jar -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/custom.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Custom issue template 3 | about: Describe this issue template's purpose here. 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | 11 | -------------------------------------------------------------------------------- /hello-kafka-es-streamsapi/kotlin/order-query-service/src/main/resources/application.yml: -------------------------------------------------------------------------------- 1 | server: 2 | port: 8887 3 | spring: 4 | kafka: 5 | streams: 6 | application-id: hello-es-streams 7 | 8 | 9 | -------------------------------------------------------------------------------- /hello-kafka-es/gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | distributionBase=GRADLE_USER_HOME 2 | distributionPath=wrapper/dists 3 | distributionUrl=https\://services.gradle.org/distributions/gradle-7.0.2-bin.zip 4 | zipStoreBase=GRADLE_USER_HOME 5 | zipStorePath=wrapper/dists 6 | -------------------------------------------------------------------------------- /hello-kafka-api/kotlin/gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | distributionBase=GRADLE_USER_HOME 2 | distributionPath=wrapper/dists 3 | distributionUrl=https\://services.gradle.org/distributions/gradle-6.7-bin.zip 4 | zipStoreBase=GRADLE_USER_HOME 5 | zipStorePath=wrapper/dists 6 | -------------------------------------------------------------------------------- /hello-kafka-cqrs/gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | distributionBase=GRADLE_USER_HOME 2 | distributionPath=wrapper/dists 3 | distributionUrl=https\://services.gradle.org/distributions/gradle-7.0.2-bin.zip 4 | zipStoreBase=GRADLE_USER_HOME 5 | zipStorePath=wrapper/dists 6 | -------------------------------------------------------------------------------- /hello-kafka-api/java/.mvn/wrapper/maven-wrapper.properties: -------------------------------------------------------------------------------- 1 | distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.8.1/apache-maven-3.8.1-bin.zip 2 | wrapperUrl=https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar 3 | -------------------------------------------------------------------------------- /hello-kafka-microservices/gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | distributionBase=GRADLE_USER_HOME 2 | distributionPath=wrapper/dists 3 | distributionUrl=https\://services.gradle.org/distributions/gradle-7.0.2-bin.zip 4 | zipStoreBase=GRADLE_USER_HOME 5 | zipStorePath=wrapper/dists 6 | -------------------------------------------------------------------------------- /hello-kafka-streams/java/.mvn/wrapper/maven-wrapper.properties: -------------------------------------------------------------------------------- 1 | distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.8.1/apache-maven-3.8.1-bin.zip 2 | wrapperUrl=https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar 3 | -------------------------------------------------------------------------------- /hello-kafka-streams/kotlin/gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | distributionBase=GRADLE_USER_HOME 2 | distributionPath=wrapper/dists 3 | distributionUrl=https\://services.gradle.org/distributions/gradle-6.7-bin.zip 4 | zipStoreBase=GRADLE_USER_HOME 5 | zipStorePath=wrapper/dists 6 | -------------------------------------------------------------------------------- /hello-kafka-unit-test/java/.mvn/wrapper/maven-wrapper.properties: -------------------------------------------------------------------------------- 1 | distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.8.1/apache-maven-3.8.1-bin.zip 2 | wrapperUrl=https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar 3 | -------------------------------------------------------------------------------- /hello-kafka-unit-test/kotlin/gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | distributionBase=GRADLE_USER_HOME 2 | distributionPath=wrapper/dists 3 | distributionUrl=https\://services.gradle.org/distributions/gradle-6.7-bin.zip 4 | zipStoreBase=GRADLE_USER_HOME 5 | zipStorePath=wrapper/dists 6 | -------------------------------------------------------------------------------- /hello-kafka-es-streamsapi/kotlin/gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | distributionBase=GRADLE_USER_HOME 2 | distributionPath=wrapper/dists 3 | distributionUrl=https\://services.gradle.org/distributions/gradle-7.0.2-bin.zip 4 | zipStoreBase=GRADLE_USER_HOME 5 | zipStorePath=wrapper/dists 6 | -------------------------------------------------------------------------------- /hello-kafka-microservices/order-service/src/main/kotlin/microkafka/orders/OrderRepository.kt: -------------------------------------------------------------------------------- 1 | package microkafka.orders 2 | 3 | import microkafka.orders.model.Order 4 | import org.springframework.data.jpa.repository.JpaRepository 5 | 6 | interface OrderRepository : JpaRepository { 7 | 8 | } 9 | -------------------------------------------------------------------------------- /hello-kafka-es/order-query-service/src/main/kotlin/kafka/es/query/model/Order.kt: -------------------------------------------------------------------------------- 1 | package kafka.es.query.model 2 | 3 | data class Order( 4 | val id: String, 5 | var userId: String? = null, 6 | var productId: String? = null, 7 | var amount: Int? = null, 8 | var price: Int? = null 9 | ) 10 | -------------------------------------------------------------------------------- /hello-kafka-microservices/invoice-service/src/main/kotlin/microkafka/invoices/InvoiceRepository.kt: -------------------------------------------------------------------------------- 1 | package microkafka.invoices 2 | 3 | import microkafka.invoices.model.Invoice 4 | import org.springframework.data.jpa.repository.JpaRepository 5 | 6 | interface InvoiceRepository : JpaRepository { 7 | 8 | } 9 | -------------------------------------------------------------------------------- /hello-kafka-cqrs/order-command-service/src/main/kotlin/microkafka/escqrs/orders/command/model/OrderCommands.kt: -------------------------------------------------------------------------------- 1 | package microkafka.escqrs.orders.command.model 2 | 3 | data class OrderCreateCommand(val userId: String, val productId: String, val amount: Int) 4 | data class OrderAmountUpdateCommand(val orderId: String, val amount: Int) 5 | -------------------------------------------------------------------------------- /hello-kafka-es-streamsapi/kotlin/order-query-service/src/main/kotlin/kafka/es/query/model/Order.kt: -------------------------------------------------------------------------------- 1 | package kafka.es.query.model 2 | 3 | data class Order( 4 | val id: String, 5 | var userId: String? = null, 6 | var productId: String? = null, 7 | var amount: Int? = null, 8 | var price: Int? = null 9 | ) 10 | -------------------------------------------------------------------------------- /hello-kafka-es/order-command-service/src/main/kotlin/kafka/es/command/OrderEvent.kt: -------------------------------------------------------------------------------- 1 | package kafka.es.command 2 | 3 | data class OrderEvent ( 4 | var id: String? = null, 5 | 6 | var userId: String? = null, 7 | var productId: String? = null, 8 | var amount: Int? = null, 9 | var price: Int? = null 10 | ) 11 | -------------------------------------------------------------------------------- /hello-kafka-es/order-command-service/src/main/resources/application.yml: -------------------------------------------------------------------------------- 1 | spring: 2 | kafka: 3 | producer: 4 | value-serializer: org.springframework.kafka.support.serializer.JsonSerializer 5 | properties: 6 | spring.json.trusted.packages: "*" 7 | spring.json.type.mapping: order:kafka.es.command.OrderEvent 8 | -------------------------------------------------------------------------------- /hello-kafka-es/order-query-service/src/main/kotlin/kafka/es/query/model/OrderEvent.kt: -------------------------------------------------------------------------------- 1 | package kafka.es.query.model 2 | 3 | data class OrderEvent ( 4 | var id: String? = null, 5 | 6 | var userId: String? = null, 7 | var productId: String? = null, 8 | var amount: Int? = null, 9 | var price: Int? = null 10 | ) 11 | -------------------------------------------------------------------------------- /hello-kafka-microservices/invoice-service/src/main/kotlin/microkafka/invoices/model/Order.kt: -------------------------------------------------------------------------------- 1 | package microkafka.invoices.model 2 | 3 | class Order { 4 | var id: String? = null 5 | //var status: String? = null 6 | 7 | var userId: String? = null 8 | var productId: String? = null 9 | var amount: Int? = null 10 | } 11 | -------------------------------------------------------------------------------- /hello-kafka-streams/java/src/main/resources/application.yml: -------------------------------------------------------------------------------- 1 | spring: 2 | kafka: 3 | streams: 4 | application-id: hello-streams 5 | properties: 6 | default.key.serde: org.apache.kafka.common.serialization.Serdes$StringSerde 7 | default.value.serde: org.apache.kafka.common.serialization.Serdes$StringSerde 8 | 9 | -------------------------------------------------------------------------------- /hello-kafka-es-streamsapi/kotlin/order-command-service/src/main/kotlin/kafka/es/command/OrderEvent.kt: -------------------------------------------------------------------------------- 1 | package kafka.es.command 2 | 3 | data class OrderEvent ( 4 | var id: String? = null, 5 | 6 | var userId: String? = null, 7 | var productId: String? = null, 8 | var amount: Int? = null, 9 | var price: Int? = null 10 | ) 11 | -------------------------------------------------------------------------------- /hello-kafka-microservices/order-service/src/main/kotlin/microkafka/orders/model/OrderDto.kt: -------------------------------------------------------------------------------- 1 | package microkafka.orders.model 2 | 3 | data class OrderDto( 4 | var id: String? = null, 5 | var status: String? = null, 6 | 7 | var userId: String? = null, 8 | var productId: String? = null, 9 | var amount: Int? = null 10 | ) 11 | -------------------------------------------------------------------------------- /hello-kafka-cqrs/order-query-service/src/main/kotlin/microkafka/escqrs/orders/query/OrderRepository.kt: -------------------------------------------------------------------------------- 1 | package microkafka.escqrs.orders.query 2 | 3 | import microkafka.escqrs.orders.query.model.OrderEntity 4 | import org.springframework.data.jpa.repository.JpaRepository 5 | 6 | interface OrderRepository : JpaRepository { 7 | 8 | } 9 | -------------------------------------------------------------------------------- /hello-kafka-es-streamsapi/kotlin/order-command-service/src/main/resources/application.yml: -------------------------------------------------------------------------------- 1 | spring: 2 | kafka: 3 | producer: 4 | value-serializer: org.springframework.kafka.support.serializer.JsonSerializer 5 | properties: 6 | spring.json.trusted.packages: "*" 7 | spring.json.type.mapping: order:kafka.es.command.OrderEvent 8 | -------------------------------------------------------------------------------- /hello-kafka-microservices/invoice-service/src/main/kotlin/microkafka/invoices/model/InvoicesDto.kt: -------------------------------------------------------------------------------- 1 | package microkafka.invoices.model 2 | 3 | import java.math.BigDecimal 4 | 5 | data class InvoicesDto( 6 | var id: String? = null, 7 | var userId: String? = null, 8 | var orderId: String? = null, 9 | var amount: Double? = null 10 | ) 11 | -------------------------------------------------------------------------------- /hello-kafka-streams/kotlin/src/main/resources/application.yml: -------------------------------------------------------------------------------- 1 | spring: 2 | kafka: 3 | streams: 4 | application-id: hello-streams 5 | properties: 6 | default.key.serde: org.apache.kafka.common.serialization.Serdes$StringSerde 7 | default.value.serde: org.apache.kafka.common.serialization.Serdes$StringSerde 8 | 9 | 10 | 11 | -------------------------------------------------------------------------------- /hello-kafka-cqrs/order-query-service/src/main/kotlin/microkafka/escqrs/orders/query/model/OrderDto.kt: -------------------------------------------------------------------------------- 1 | package microkafka.escqrs.orders.query.model 2 | 3 | data class OrderDto( 4 | var id: String? = null, 5 | var status: String? = null, 6 | 7 | var userId: String? = null, 8 | var productId: String? = null, 9 | var amount: Int? = null 10 | ) 11 | -------------------------------------------------------------------------------- /hello-kafka-unit-test/kotlin/src/main/kotlin/hellokafka/App.kt: -------------------------------------------------------------------------------- 1 | package hellokafka 2 | 3 | 4 | import org.springframework.boot.autoconfigure.SpringBootApplication 5 | import org.springframework.boot.runApplication 6 | 7 | @SpringBootApplication 8 | class App 9 | 10 | fun main(args: Array) { 11 | runApplication(*args) 12 | print("start") 13 | } 14 | 15 | -------------------------------------------------------------------------------- /hello-kafka-api/java/src/main/java/hellokafka/App.java: -------------------------------------------------------------------------------- 1 | package hellokafka; 2 | 3 | import org.springframework.boot.SpringApplication; 4 | import org.springframework.boot.autoconfigure.SpringBootApplication; 5 | 6 | @SpringBootApplication 7 | public class App { 8 | public static void main(String[] args) { 9 | SpringApplication.run(App.class, args); 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /hello-kafka/docker-compose.yml: -------------------------------------------------------------------------------- 1 | services: 2 | zookeeper: 3 | image: zookeeper:3.7.0 4 | ports: ["2181:2181"] 5 | 6 | kafka: 7 | image: wurstmeister/kafka:2.12-2.5.0 8 | container_name: kafka 9 | ports: ["9092:9092"] 10 | depends_on: [zookeeper] 11 | environment: 12 | KAFKA_ADVERTISED_HOST_NAME: kafka 13 | KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 -------------------------------------------------------------------------------- /hello-kafka-unit-test/java/src/main/java/hellokafka/App.java: -------------------------------------------------------------------------------- 1 | package hellokafka; 2 | 3 | import org.springframework.boot.SpringApplication; 4 | import org.springframework.boot.autoconfigure.SpringBootApplication; 5 | 6 | @SpringBootApplication 7 | public class App { 8 | public static void main(String[] args) { 9 | SpringApplication.run(App.class, args); 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /hello-kafka-cqrs/order-command-service/src/main/resources/application.yml: -------------------------------------------------------------------------------- 1 | server: 2 | port: 8883 3 | spring: 4 | kafka: 5 | producer: 6 | value-serializer: org.springframework.kafka.support.serializer.JsonSerializer 7 | properties: 8 | spring.json.trusted.packages: "*" 9 | spring.json.type.mapping: order:microkafka.escqrs.orders.command.model.OrderCreatedEvent 10 | -------------------------------------------------------------------------------- /hello-kafka-streams/kotlin/src/main/kotlin/hellostreams/App.kt: -------------------------------------------------------------------------------- 1 | package hellostreams 2 | 3 | import org.springframework.boot.autoconfigure.SpringBootApplication 4 | import org.springframework.boot.runApplication 5 | 6 | @SpringBootApplication 7 | class App 8 | 9 | fun main(args: Array) { 10 | runApplication(*args) 11 | println("start stream processing..") 12 | } 13 | -------------------------------------------------------------------------------- /hello-kafka-microservices/invoice-service/src/main/kotlin/microkafka/invoices/model/Invoice.kt: -------------------------------------------------------------------------------- 1 | package microkafka.invoices.model 2 | 3 | import javax.persistence.* 4 | 5 | @Entity 6 | @Table(name = "invoices") 7 | class Invoice { 8 | 9 | @Id 10 | var id: String? = null 11 | 12 | var userId: String? = null 13 | var orderId: String? = null 14 | 15 | var amount: Double? = null 16 | } 17 | -------------------------------------------------------------------------------- /hello-kafka-es/order-query-service/src/main/kotlin/kafka/es/query/App.kt: -------------------------------------------------------------------------------- 1 | package kafka.es.query 2 | 3 | import org.springframework.boot.autoconfigure.SpringBootApplication 4 | import org.springframework.boot.runApplication 5 | 6 | 7 | @SpringBootApplication 8 | class App 9 | 10 | fun main(args: Array) { 11 | runApplication(*args) 12 | print("start order query microservice...") 13 | } 14 | -------------------------------------------------------------------------------- /hello-kafka-microservices/order-service/src/main/kotlin/microkafka/orders/App.kt: -------------------------------------------------------------------------------- 1 | package microkafka.orders 2 | 3 | import org.springframework.boot.autoconfigure.SpringBootApplication 4 | import org.springframework.boot.runApplication 5 | 6 | @SpringBootApplication 7 | class App 8 | 9 | fun main(args: Array) { 10 | runApplication(*args) 11 | print("start orders microservice...") 12 | } 13 | 14 | -------------------------------------------------------------------------------- /hello-kafka-api/kotlin/src/main/kotlin/hellokafka/Consumer.kt: -------------------------------------------------------------------------------- 1 | package hellokafka 2 | 3 | import org.springframework.kafka.annotation.KafkaListener 4 | import org.springframework.stereotype.Service 5 | 6 | @Service 7 | class Consumer { 8 | 9 | @KafkaListener(topics= ["hello-topic"], groupId = "kafka_kotlin_id") 10 | fun consume(message: String) { 11 | println("RECEIVE MESSAGE : $message") 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /hello-kafka-microservices/order-service/src/main/kotlin/microkafka/orders/model/Order.kt: -------------------------------------------------------------------------------- 1 | package microkafka.orders.model 2 | 3 | import javax.persistence.* 4 | 5 | @Entity 6 | @Table(name = "orders") 7 | class Order { 8 | 9 | @Id 10 | var id: String? = null 11 | var status: String? = null 12 | 13 | var userId: String? = null 14 | var productId: String? = null 15 | var amount: Int? = null 16 | } 17 | -------------------------------------------------------------------------------- /hello-kafka-microservices/invoice-service/src/main/kotlin/microkafka/invoices/App.kt: -------------------------------------------------------------------------------- 1 | package microkafka.invoices 2 | 3 | import org.springframework.boot.autoconfigure.SpringBootApplication 4 | import org.springframework.boot.runApplication 5 | 6 | 7 | @SpringBootApplication 8 | class App 9 | 10 | fun main(args: Array) { 11 | runApplication(*args) 12 | print("start invoice microservice...") 13 | } 14 | 15 | -------------------------------------------------------------------------------- /hello-kafka-es-streamsapi/kotlin/order-query-service/src/main/kotlin/kafka/es/query/App.kt: -------------------------------------------------------------------------------- 1 | package kafka.es.query 2 | 3 | import org.springframework.boot.autoconfigure.SpringBootApplication 4 | import org.springframework.boot.runApplication 5 | 6 | 7 | @SpringBootApplication 8 | class App 9 | 10 | fun main(args: Array) { 11 | runApplication(*args) 12 | println("### start order query microservice...") 13 | } 14 | -------------------------------------------------------------------------------- /hello-kafka-cqrs/order-query-service/src/main/kotlin/microkafka/escqrs/orders/query/App.kt: -------------------------------------------------------------------------------- 1 | package microkafka.escqrs.orders.query 2 | 3 | import org.springframework.boot.autoconfigure.SpringBootApplication 4 | import org.springframework.boot.runApplication 5 | 6 | 7 | @SpringBootApplication 8 | class App 9 | 10 | fun main(args: Array) { 11 | runApplication(*args) 12 | print("start order query microservice...") 13 | } 14 | -------------------------------------------------------------------------------- /hello-kafka-es/order-query-service/src/main/resources/application.yml: -------------------------------------------------------------------------------- 1 | server: 2 | port: 8885 3 | spring: 4 | kafka: 5 | consumer: 6 | value-deserializer: org.springframework.kafka.support.serializer.JsonDeserializer 7 | group-id: ${random.uuid} 8 | auto-offset-reset: earliest 9 | properties: 10 | spring.json.trusted.packages: "*" 11 | spring.json.type.mapping: order:kafka.es.query.model.OrderEvent 12 | -------------------------------------------------------------------------------- /hello-kafka-streams/java/src/main/java/hellokafka/App.java: -------------------------------------------------------------------------------- 1 | package hellokafka; 2 | 3 | import org.springframework.boot.SpringApplication; 4 | import org.springframework.boot.autoconfigure.SpringBootApplication; 5 | 6 | @SpringBootApplication 7 | public class App { 8 | public static void main(String[] args) { 9 | SpringApplication.run(App.class, args); 10 | System.out.println("start stream processing.."); 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /hello-kafka-es/order-query-service/src/main/kotlin/kafka/es/query/OrderController.kt: -------------------------------------------------------------------------------- 1 | package kafka.es.query 2 | 3 | import org.springframework.http.ResponseEntity.ok 4 | import org.springframework.web.bind.annotation.* 5 | 6 | @RestController 7 | @RequestMapping("/orders") 8 | class OrderController(val repository: OrderRepository) { 9 | 10 | @GetMapping 11 | fun getAllOrders() = 12 | ok().body(repository.findAll()) 13 | 14 | } 15 | -------------------------------------------------------------------------------- /hello-kafka-api/java/src/main/java/hellokafka/Consumer.java: -------------------------------------------------------------------------------- 1 | package hellokafka; 2 | 3 | import org.springframework.kafka.annotation.KafkaListener; 4 | import org.springframework.stereotype.Service; 5 | 6 | @Service 7 | public class Consumer { 8 | 9 | @KafkaListener(topics= { "hello-topic" }, groupId = "kafka_kotlin_id") 10 | public void consume(String message) { 11 | System.out.println("RECEIVE MESSAGE : " + message); 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /hello-kafka-api/kotlin/src/main/kotlin/hellokafka/App.kt: -------------------------------------------------------------------------------- 1 | package hellokafka 2 | 3 | import org.springframework.boot.autoconfigure.SpringBootApplication 4 | import org.springframework.boot.context.properties.ConfigurationPropertiesScan 5 | import org.springframework.boot.runApplication 6 | 7 | @SpringBootApplication 8 | class App 9 | 10 | fun main(args: Array) { 11 | runApplication(*args) 12 | print("start pub/sub process...") 13 | } 14 | 15 | -------------------------------------------------------------------------------- /hello-kafka-es/order-command-service/src/main/kotlin/kafka/es/command/App.kt: -------------------------------------------------------------------------------- 1 | package kafka.es.command 2 | 3 | import org.springframework.boot.autoconfigure.SpringBootApplication 4 | import org.springframework.boot.context.properties.ConfigurationPropertiesScan 5 | import org.springframework.boot.runApplication 6 | 7 | 8 | @SpringBootApplication 9 | class App 10 | 11 | fun main(args: Array) { 12 | runApplication(*args) 13 | print("start order command producer...") 14 | } 15 | -------------------------------------------------------------------------------- /hello-kafka-es-streamsapi/kotlin/order-command-service/src/main/kotlin/kafka/es/command/App.kt: -------------------------------------------------------------------------------- 1 | package kafka.es.command 2 | 3 | import org.springframework.boot.autoconfigure.SpringBootApplication 4 | import org.springframework.boot.context.properties.ConfigurationPropertiesScan 5 | import org.springframework.boot.runApplication 6 | 7 | 8 | @SpringBootApplication 9 | class App 10 | 11 | fun main(args: Array) { 12 | runApplication(*args) 13 | print("start order command producer...") 14 | } 15 | -------------------------------------------------------------------------------- /hello-kafka-cqrs/order-query-service/src/main/kotlin/microkafka/escqrs/orders/query/model/OrderEvents.kt: -------------------------------------------------------------------------------- 1 | package microkafka.escqrs.orders.query.model 2 | 3 | abstract class OrderEvent ( 4 | open val id: String? = null, 5 | 6 | open var userId: String? = null, 7 | open var productId: String? = null, 8 | open var amount: Int? = null 9 | ) 10 | 11 | data class OrderCreatedEvent(override val id: String?) : OrderEvent(id) { } 12 | data class OrderUpdatedEvent(override val id: String?) : OrderEvent(id) { } 13 | -------------------------------------------------------------------------------- /hello-kafka-cqrs/order-query-service/src/main/kotlin/microkafka/escqrs/orders/query/model/OrderEntity.kt: -------------------------------------------------------------------------------- 1 | package microkafka.escqrs.orders.query.model 2 | 3 | import javax.persistence.Entity 4 | import javax.persistence.Id 5 | import javax.persistence.Table 6 | 7 | @Entity 8 | @Table(name = "orders") 9 | class OrderEntity { 10 | 11 | @Id 12 | var id: String? = null 13 | var status: String? = null 14 | 15 | var userId: String? = null 16 | var productId: String? = null 17 | var amount: Int? = null 18 | } 19 | -------------------------------------------------------------------------------- /hello-kafka-cqrs/order-command-service/src/main/kotlin/microkafka/escqrs/orders/command/App.kt: -------------------------------------------------------------------------------- 1 | package microkafka.escqrs.orders.command 2 | 3 | import org.springframework.boot.autoconfigure.SpringBootApplication 4 | import org.springframework.boot.context.properties.ConfigurationPropertiesScan 5 | import org.springframework.boot.runApplication 6 | 7 | 8 | @SpringBootApplication 9 | class App 10 | 11 | fun main(args: Array) { 12 | runApplication(*args) 13 | print("start order command microservice...") 14 | } 15 | -------------------------------------------------------------------------------- /hello-kafka-es/order-query-service/src/main/kotlin/kafka/es/query/OrderRepository.kt: -------------------------------------------------------------------------------- 1 | package kafka.es.query 2 | 3 | import kafka.es.query.model.Order 4 | import org.springframework.stereotype.Component 5 | 6 | 7 | @Component 8 | class OrderRepository { 9 | private val records = HashMap() 10 | 11 | fun findById(id: String) = records[id] 12 | fun findAll() = records.values 13 | 14 | fun save(order: Order) = records.put(order.id, order) 15 | 16 | fun exists(id: String) = records.containsKey(id) 17 | } 18 | -------------------------------------------------------------------------------- /hello-kafka-microservices/invoice-service/src/main/kotlin/microkafka/invoices/InvoiceController.kt: -------------------------------------------------------------------------------- 1 | package microkafka.invoices 2 | 3 | import microkafka.invoices.model.toDTO 4 | import org.springframework.http.ResponseEntity.ok 5 | import org.springframework.web.bind.annotation.* 6 | 7 | @RestController 8 | @RequestMapping("/invoices") 9 | class InvoiceController(val invoiceRepository: InvoiceRepository) { 10 | 11 | @GetMapping 12 | fun getAllInvoices() = 13 | ok().body(invoiceRepository.findAll().toDTO()) 14 | 15 | } 16 | -------------------------------------------------------------------------------- /hello-kafka-cqrs/order-query-service/src/main/kotlin/microkafka/escqrs/orders/query/OrderController.kt: -------------------------------------------------------------------------------- 1 | package microkafka.escqrs.orders.query 2 | 3 | import microkafka.escqrs.orders.query.model.toDTO 4 | import org.springframework.http.ResponseEntity.ok 5 | import org.springframework.web.bind.annotation.* 6 | 7 | @RestController 8 | @RequestMapping("/orders") 9 | class OrderController(val orderRepository: OrderRepository) { 10 | 11 | @GetMapping 12 | fun getAllOrders() = 13 | ok().body(orderRepository.findAll().toDTO()) 14 | 15 | } 16 | -------------------------------------------------------------------------------- /hello-kafka-unit-test/java/readme.md: -------------------------------------------------------------------------------- 1 | # Hello Kafka Unit-Test 2 | a simple Unit-Test using Kafka + Spring + JUnit5 + Maven + Java 3 | 4 |

5 | ## prerequisites 6 | 7 | - maven/mvn 8 | - java sdk 1.8 9 | 10 |

11 | 12 | ## quickstart 13 | 14 | ### 1. get the source code 15 | 16 | ```shell 17 | git clone https://github.com/thecodemonkey/kafka-microservices.git 18 | ``` 19 | 20 | ### 2. run unit tests 21 | 22 | ```shell 23 | cd hello-kafka-unit-test/java 24 | mvn test 25 | ``` 26 | 27 | ### 3. tests should be green 28 | 29 |

30 | -------------------------------------------------------------------------------- /hello-kafka-cqrs/order-command-service/src/main/kotlin/microkafka/escqrs/orders/command/model/OrderEvents.kt: -------------------------------------------------------------------------------- 1 | package microkafka.escqrs.orders.command.model 2 | 3 | abstract class OrderEvent ( 4 | open val id: String? = null, 5 | 6 | open var orderId: String? = null, 7 | open var userId: String? = null, 8 | open var productId: String? = null, 9 | open var amount: Int? = null 10 | ) 11 | 12 | data class OrderCreatedEvent(override val id: String?) : OrderEvent(id) { } 13 | data class OrderUpdatedEvent(override val id: String?) : OrderEvent(id) { } 14 | -------------------------------------------------------------------------------- /hello-kafka-es-streamsapi/kotlin/order-query-service/src/main/kotlin/kafka/es/query/model/Mapping.kt: -------------------------------------------------------------------------------- 1 | package kafka.es.query.model 2 | 3 | import java.lang.reflect.Field 4 | import java.lang.reflect.InvocationTargetException 5 | import kotlin.reflect.full.declaredMemberProperties 6 | import kotlin.reflect.full.primaryConstructor 7 | 8 | 9 | fun List.toOrders() = this.map { it.toOrder() } 10 | 11 | fun OrderEvent.toOrder() = Order( 12 | this.id!!, 13 | this.productId, 14 | this.userId, 15 | this.amount, 16 | this.price 17 | ) 18 | 19 | -------------------------------------------------------------------------------- /hello-kafka-api/java/.gitignore: -------------------------------------------------------------------------------- 1 | HELP.md 2 | target/ 3 | !.mvn/wrapper/maven-wrapper.jar 4 | !**/src/main/**/target/ 5 | !**/src/test/**/target/ 6 | 7 | ### STS ### 8 | .apt_generated 9 | .classpath 10 | .factorypath 11 | .project 12 | .settings 13 | .springBeans 14 | .sts4-cache 15 | 16 | ### IntelliJ IDEA ### 17 | .idea 18 | *.iws 19 | *.iml 20 | *.ipr 21 | 22 | ### NetBeans ### 23 | /nbproject/private/ 24 | /nbbuild/ 25 | /dist/ 26 | /nbdist/ 27 | /.nb-gradle/ 28 | build/ 29 | !**/src/main/**/build/ 30 | !**/src/test/**/build/ 31 | 32 | ### VS Code ### 33 | .vscode/ 34 | -------------------------------------------------------------------------------- /hello-kafka-unit-test/kotlin/readme.md: -------------------------------------------------------------------------------- 1 | # Hello Kafka Unit-Test 2 | a simple Unit-Test using Kafka + Spring + JUnit5 + Gradle + Kotlin 3 | 4 |

5 | ## prerequisites 6 | 7 | - Gradle 8 | - java sdk 1.8 9 | 10 |

11 | 12 | ## quickstart 13 | 14 | ### 1. get the source code 15 | 16 | ```shell 17 | git clone https://github.com/thecodemonkey/kafka-microservices.git 18 | ``` 19 | 20 | ### 2. run unit tests 21 | 22 | ```shell 23 | cd hello-kafka-unit-test/kotlin 24 | gradle test -i 25 | ``` 26 | 27 | ### 3. tests should be green 28 | 29 |

30 | -------------------------------------------------------------------------------- /hello-kafka-streams/java/.gitignore: -------------------------------------------------------------------------------- 1 | HELP.md 2 | target/ 3 | !.mvn/wrapper/maven-wrapper.jar 4 | !**/src/main/**/target/ 5 | !**/src/test/**/target/ 6 | 7 | ### STS ### 8 | .apt_generated 9 | .classpath 10 | .factorypath 11 | .project 12 | .settings 13 | .springBeans 14 | .sts4-cache 15 | 16 | ### IntelliJ IDEA ### 17 | .idea 18 | *.iws 19 | *.iml 20 | *.ipr 21 | 22 | ### NetBeans ### 23 | /nbproject/private/ 24 | /nbbuild/ 25 | /dist/ 26 | /nbdist/ 27 | /.nb-gradle/ 28 | build/ 29 | !**/src/main/**/build/ 30 | !**/src/test/**/build/ 31 | 32 | ### VS Code ### 33 | .vscode/ 34 | -------------------------------------------------------------------------------- /hello-kafka-unit-test/java/.gitignore: -------------------------------------------------------------------------------- 1 | HELP.md 2 | target/ 3 | !.mvn/wrapper/maven-wrapper.jar 4 | !**/src/main/**/target/ 5 | !**/src/test/**/target/ 6 | 7 | ### STS ### 8 | .apt_generated 9 | .classpath 10 | .factorypath 11 | .project 12 | .settings 13 | .springBeans 14 | .sts4-cache 15 | 16 | ### IntelliJ IDEA ### 17 | .idea 18 | *.iws 19 | *.iml 20 | *.ipr 21 | 22 | ### NetBeans ### 23 | /nbproject/private/ 24 | /nbbuild/ 25 | /dist/ 26 | /nbdist/ 27 | /.nb-gradle/ 28 | build/ 29 | !**/src/main/**/build/ 30 | !**/src/test/**/build/ 31 | 32 | ### VS Code ### 33 | .vscode/ 34 | -------------------------------------------------------------------------------- /hello-kafka-microservices/invoice-service/src/main/kotlin/microkafka/invoices/model/Mapping.kt: -------------------------------------------------------------------------------- 1 | package microkafka.invoices.model 2 | 3 | fun List.toDTO() = this.map { it?.toDTO() } 4 | 5 | fun Invoice.toDTO() = InvoicesDto( 6 | this.id, 7 | this.userId, 8 | this.orderId, 9 | this.amount 10 | ) 11 | 12 | fun InvoicesDto.fromDTO(): Invoice { 13 | val self = this 14 | 15 | return Invoice().apply { 16 | id = self.id 17 | 18 | userId = self.userId 19 | orderId = self.orderId 20 | amount = self.amount 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /hello-kafka-unit-test/java/src/main/java/hellokafka/Producer.java: -------------------------------------------------------------------------------- 1 | package hellokafka; 2 | 3 | import org.springframework.beans.factory.annotation.Autowired; 4 | import org.springframework.kafka.core.KafkaTemplate; 5 | import org.springframework.stereotype.Service; 6 | 7 | 8 | @Service 9 | public class Producer { 10 | 11 | @Autowired 12 | public final KafkaTemplate kafka = null; 13 | 14 | void send(String message) { 15 | System.out.println("PUBLISH message: $message"); 16 | this.kafka.send("hello-topic-ut", message); 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /hello-kafka-unit-test/kotlin/src/main/kotlin/hellokafka/Producer.kt: -------------------------------------------------------------------------------- 1 | package hellokafka 2 | 3 | import org.springframework.kafka.core.KafkaTemplate 4 | import org.springframework.scheduling.annotation.EnableScheduling 5 | import org.springframework.scheduling.annotation.Scheduled 6 | import org.springframework.stereotype.Service 7 | import java.time.LocalDateTime 8 | 9 | @Service 10 | class Producer(val kafka: KafkaTemplate) { 11 | 12 | fun send(message: String) { 13 | println("PUBLISH message: $message") 14 | this.kafka.send("hello-topic-ut", message) 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /hello-kafka-microservices/order-service/src/main/resources/application.yml: -------------------------------------------------------------------------------- 1 | server: 2 | port: 8881 3 | spring: 4 | kafka: 5 | producer: 6 | value-serializer: org.springframework.kafka.support.serializer.JsonSerializer 7 | properties: 8 | spring.json.type.mapping: order:microkafka.orders.model.Order 9 | datasource: 10 | url: jdbc:h2:mem:ms-orders-db 11 | driverClassName: org.h2.Driver 12 | username: sa 13 | password: 14 | jpa: 15 | show-sql: true 16 | hibernate: 17 | ddl-auto: create 18 | generate-ddl: true 19 | h2: 20 | console: 21 | enabled: true 22 | -------------------------------------------------------------------------------- /hello-kafka-microservices/order-service/src/main/kotlin/microkafka/orders/model/Mapping.kt: -------------------------------------------------------------------------------- 1 | package microkafka.orders.model 2 | 3 | fun List.toDTO() = this.map { it?.toDTO() } 4 | 5 | fun Order.toDTO() = OrderDto( 6 | this.id, 7 | this.status, 8 | 9 | this.userId, 10 | this.productId, 11 | this.amount 12 | ) 13 | 14 | fun OrderDto.fromDTO(): Order { 15 | val self = this 16 | 17 | return Order().apply { 18 | id = self.id 19 | status = self.status 20 | 21 | userId = self.userId 22 | productId = self.productId 23 | amount = self.amount 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /hello-kafka-unit-test/kotlin/src/main/kotlin/hellokafka/Consumer.kt: -------------------------------------------------------------------------------- 1 | package hellokafka 2 | 3 | import org.springframework.kafka.annotation.KafkaListener 4 | import org.springframework.stereotype.Service 5 | import java.util.concurrent.CountDownLatch 6 | 7 | @Service 8 | class Consumer { 9 | val data: MutableList = ArrayList() 10 | var latch = CountDownLatch(2) 11 | 12 | @KafkaListener(topics= ["hello-topic-ut"], groupId = "kafka_kotlin_id") 13 | fun consume(message: String) { 14 | println("RECEIVE message: $message") 15 | data.add(message) 16 | latch.countDown() 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /hello-kafka/docker-compose-withui.yml: -------------------------------------------------------------------------------- 1 | services: 2 | zookeeper: 3 | image: zookeeper:3.7.0 4 | ports: ["2181:2181"] 5 | 6 | kafka: 7 | image: wurstmeister/kafka:2.12-2.5.0 8 | ports: ["9092:9092"] 9 | depends_on: [zookeeper] 10 | environment: 11 | KAFKA_ADVERTISED_HOST_NAME: kafka 12 | KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 13 | 14 | kafka-web-ui: 15 | image: tchiotludo/akhq:0.17.0 16 | ports: ["8081:8080"] 17 | depends_on: [kafka] 18 | environment: 19 | AKHQ_CONFIGURATION: | 20 | akhq.connections.docker-kafka-server.properties.bootstrap.servers: kafka:9092 21 | -------------------------------------------------------------------------------- /hello-kafka-cqrs/.gitignore: -------------------------------------------------------------------------------- 1 | HELP.md 2 | .gradle 3 | build/ 4 | !gradle/wrapper/gradle-wrapper.jar 5 | !**/src/main/**/build/ 6 | !**/src/test/**/build/ 7 | 8 | ### STS ### 9 | .apt_generated 10 | .classpath 11 | .factorypath 12 | .project 13 | .settings 14 | .springBeans 15 | .sts4-cache 16 | bin/ 17 | !**/src/main/**/bin/ 18 | !**/src/test/**/bin/ 19 | 20 | ### IntelliJ IDEA ### 21 | .idea 22 | *.iws 23 | *.iml 24 | *.ipr 25 | out/ 26 | !**/src/main/**/out/ 27 | !**/src/test/**/out/ 28 | 29 | ### NetBeans ### 30 | /nbproject/private/ 31 | /nbbuild/ 32 | /dist/ 33 | /nbdist/ 34 | /.nb-gradle/ 35 | 36 | ### VS Code ### 37 | .vscode/ 38 | -------------------------------------------------------------------------------- /hello-kafka-es/.gitignore: -------------------------------------------------------------------------------- 1 | HELP.md 2 | .gradle 3 | build/ 4 | !gradle/wrapper/gradle-wrapper.jar 5 | !**/src/main/**/build/ 6 | !**/src/test/**/build/ 7 | 8 | ### STS ### 9 | .apt_generated 10 | .classpath 11 | .factorypath 12 | .project 13 | .settings 14 | .springBeans 15 | .sts4-cache 16 | bin/ 17 | !**/src/main/**/bin/ 18 | !**/src/test/**/bin/ 19 | 20 | ### IntelliJ IDEA ### 21 | .idea 22 | *.iws 23 | *.iml 24 | *.ipr 25 | out/ 26 | !**/src/main/**/out/ 27 | !**/src/test/**/out/ 28 | 29 | ### NetBeans ### 30 | /nbproject/private/ 31 | /nbbuild/ 32 | /dist/ 33 | /nbdist/ 34 | /.nb-gradle/ 35 | 36 | ### VS Code ### 37 | .vscode/ 38 | -------------------------------------------------------------------------------- /hello-kafka-api/kotlin/.gitignore: -------------------------------------------------------------------------------- 1 | HELP.md 2 | .gradle 3 | build/ 4 | !gradle/wrapper/gradle-wrapper.jar 5 | !**/src/main/**/build/ 6 | !**/src/test/**/build/ 7 | 8 | ### STS ### 9 | .apt_generated 10 | .classpath 11 | .factorypath 12 | .project 13 | .settings 14 | .springBeans 15 | .sts4-cache 16 | bin/ 17 | !**/src/main/**/bin/ 18 | !**/src/test/**/bin/ 19 | 20 | ### IntelliJ IDEA ### 21 | .idea 22 | *.iws 23 | *.iml 24 | *.ipr 25 | out/ 26 | !**/src/main/**/out/ 27 | !**/src/test/**/out/ 28 | 29 | ### NetBeans ### 30 | /nbproject/private/ 31 | /nbbuild/ 32 | /dist/ 33 | /nbdist/ 34 | /.nb-gradle/ 35 | 36 | ### VS Code ### 37 | .vscode/ 38 | -------------------------------------------------------------------------------- /hello-kafka-microservices/.gitignore: -------------------------------------------------------------------------------- 1 | HELP.md 2 | .gradle 3 | build/ 4 | !gradle/wrapper/gradle-wrapper.jar 5 | !**/src/main/**/build/ 6 | !**/src/test/**/build/ 7 | 8 | ### STS ### 9 | .apt_generated 10 | .classpath 11 | .factorypath 12 | .project 13 | .settings 14 | .springBeans 15 | .sts4-cache 16 | bin/ 17 | !**/src/main/**/bin/ 18 | !**/src/test/**/bin/ 19 | 20 | ### IntelliJ IDEA ### 21 | .idea 22 | *.iws 23 | *.iml 24 | *.ipr 25 | out/ 26 | !**/src/main/**/out/ 27 | !**/src/test/**/out/ 28 | 29 | ### NetBeans ### 30 | /nbproject/private/ 31 | /nbbuild/ 32 | /dist/ 33 | /nbdist/ 34 | /.nb-gradle/ 35 | 36 | ### VS Code ### 37 | .vscode/ 38 | -------------------------------------------------------------------------------- /hello-kafka-streams/kotlin/.gitignore: -------------------------------------------------------------------------------- 1 | HELP.md 2 | .gradle 3 | build/ 4 | !gradle/wrapper/gradle-wrapper.jar 5 | !**/src/main/**/build/ 6 | !**/src/test/**/build/ 7 | 8 | ### STS ### 9 | .apt_generated 10 | .classpath 11 | .factorypath 12 | .project 13 | .settings 14 | .springBeans 15 | .sts4-cache 16 | bin/ 17 | !**/src/main/**/bin/ 18 | !**/src/test/**/bin/ 19 | 20 | ### IntelliJ IDEA ### 21 | .idea 22 | *.iws 23 | *.iml 24 | *.ipr 25 | out/ 26 | !**/src/main/**/out/ 27 | !**/src/test/**/out/ 28 | 29 | ### NetBeans ### 30 | /nbproject/private/ 31 | /nbbuild/ 32 | /dist/ 33 | /nbdist/ 34 | /.nb-gradle/ 35 | 36 | ### VS Code ### 37 | .vscode/ 38 | -------------------------------------------------------------------------------- /hello-kafka-es-streamsapi/kotlin/.gitignore: -------------------------------------------------------------------------------- 1 | HELP.md 2 | .gradle 3 | build/ 4 | !gradle/wrapper/gradle-wrapper.jar 5 | !**/src/main/**/build/ 6 | !**/src/test/**/build/ 7 | 8 | ### STS ### 9 | .apt_generated 10 | .classpath 11 | .factorypath 12 | .project 13 | .settings 14 | .springBeans 15 | .sts4-cache 16 | bin/ 17 | !**/src/main/**/bin/ 18 | !**/src/test/**/bin/ 19 | 20 | ### IntelliJ IDEA ### 21 | .idea 22 | *.iws 23 | *.iml 24 | *.ipr 25 | out/ 26 | !**/src/main/**/out/ 27 | !**/src/test/**/out/ 28 | 29 | ### NetBeans ### 30 | /nbproject/private/ 31 | /nbbuild/ 32 | /dist/ 33 | /nbdist/ 34 | /.nb-gradle/ 35 | 36 | ### VS Code ### 37 | .vscode/ 38 | -------------------------------------------------------------------------------- /hello-kafka-unit-test/kotlin/.gitignore: -------------------------------------------------------------------------------- 1 | HELP.md 2 | .gradle 3 | build/ 4 | !gradle/wrapper/gradle-wrapper.jar 5 | !**/src/main/**/build/ 6 | !**/src/test/**/build/ 7 | 8 | ### STS ### 9 | .apt_generated 10 | .classpath 11 | .factorypath 12 | .project 13 | .settings 14 | .springBeans 15 | .sts4-cache 16 | bin/ 17 | !**/src/main/**/bin/ 18 | !**/src/test/**/bin/ 19 | 20 | ### IntelliJ IDEA ### 21 | .idea 22 | *.iws 23 | *.iml 24 | *.ipr 25 | out/ 26 | !**/src/main/**/out/ 27 | !**/src/test/**/out/ 28 | 29 | ### NetBeans ### 30 | /nbproject/private/ 31 | /nbbuild/ 32 | /dist/ 33 | /nbdist/ 34 | /.nb-gradle/ 35 | 36 | ### VS Code ### 37 | .vscode/ 38 | -------------------------------------------------------------------------------- /hello-kafka-streams/kotlin/src/main/kotlin/hellostreams/Producer.kt: -------------------------------------------------------------------------------- 1 | package hellostreams 2 | 3 | 4 | import org.springframework.kafka.core.KafkaTemplate 5 | import org.springframework.scheduling.annotation.EnableScheduling 6 | import org.springframework.scheduling.annotation.Scheduled 7 | import org.springframework.stereotype.Service 8 | 9 | 10 | @Service 11 | @EnableScheduling 12 | class Producer(val kafka: KafkaTemplate) { 13 | 14 | @Scheduled(fixedRate = 1000) 15 | fun send() { 16 | val message = "hello kafka streams"; 17 | println("publish MESSAGE : $message") 18 | this.kafka.send("input-topic", message) 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Is your feature request related to a problem? Please describe.** 11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 12 | 13 | **Describe the solution you'd like** 14 | A clear and concise description of what you want to happen. 15 | 16 | **Describe alternatives you've considered** 17 | A clear and concise description of any alternative solutions or features you've considered. 18 | 19 | **Additional context** 20 | Add any other context or screenshots about the feature request here. 21 | -------------------------------------------------------------------------------- /hello-kafka-unit-test/java/src/main/java/hellokafka/Consumer.java: -------------------------------------------------------------------------------- 1 | package hellokafka; 2 | 3 | import org.springframework.kafka.annotation.KafkaListener; 4 | import org.springframework.stereotype.Service; 5 | 6 | import java.util.ArrayList; 7 | import java.util.List; 8 | import java.util.concurrent.CountDownLatch; 9 | 10 | @Service 11 | public class Consumer { 12 | 13 | List data = new ArrayList<>(); 14 | CountDownLatch latch = new CountDownLatch(2); 15 | 16 | @KafkaListener(topics= {"hello-topic-ut"}, groupId = "kafka_java_id") 17 | void consume(String message) { 18 | System.out.println("RECEIVE message: $message"); 19 | data.add(message); 20 | latch.countDown(); 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /hello-kafka-api/kotlin/src/main/kotlin/hellokafka/Producer.kt: -------------------------------------------------------------------------------- 1 | package hellokafka 2 | 3 | 4 | import org.springframework.kafka.core.KafkaTemplate 5 | import org.springframework.scheduling.annotation.EnableScheduling 6 | import org.springframework.scheduling.annotation.Scheduled 7 | import org.springframework.stereotype.Service 8 | import java.time.LocalDateTime 9 | 10 | @Service 11 | @EnableScheduling 12 | class Producer(val kafka: KafkaTemplate) { 13 | 14 | @Scheduled(fixedRate = 1000) 15 | fun send() { 16 | val message: String = "hello kafka " + LocalDateTime.now().toString() 17 | println("SEND MESSAGE : $message") 18 | 19 | this.kafka.send("hello-topic", message) 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /hello-kafka-es-streamsapi/kotlin/order-query-service/src/main/kotlin/kafka/es/query/OrderController.kt: -------------------------------------------------------------------------------- 1 | package kafka.es.query 2 | 3 | import org.springframework.http.ResponseEntity.ok 4 | import org.springframework.web.bind.annotation.GetMapping 5 | import org.springframework.web.bind.annotation.PathVariable 6 | import org.springframework.web.bind.annotation.RequestMapping 7 | import org.springframework.web.bind.annotation.RestController 8 | 9 | @RestController 10 | @RequestMapping("/orders") 11 | class OrderController(val repository: OrderRepository) { 12 | 13 | @GetMapping 14 | fun getAllOrders() = ok().body(repository.findAll()) 15 | 16 | @GetMapping("/{id}") 17 | fun getOrderById(@PathVariable id: String) = ok().body(repository.findById(id)) 18 | 19 | } 20 | -------------------------------------------------------------------------------- /hello-kafka-microservices/order-service/src/main/kotlin/microkafka/orders/OrderController.kt: -------------------------------------------------------------------------------- 1 | package microkafka.orders 2 | 3 | import microkafka.orders.model.OrderDto 4 | import microkafka.orders.model.toDTO 5 | import org.springframework.http.ResponseEntity.ok 6 | import org.springframework.web.bind.annotation.* 7 | 8 | @RestController 9 | @RequestMapping("/orders") 10 | class OrderController( 11 | val orderRepository: OrderRepository, 12 | val orderService: OrderService){ 13 | 14 | @GetMapping 15 | fun getAllOrders() = 16 | ok().body(orderRepository.findAll().toDTO()) 17 | 18 | @PostMapping 19 | fun newOrder(@RequestBody order: OrderDto) = 20 | ok().body(orderService.createOrder(order.userId!!, order.productId!!, order.amount!!)) 21 | } 22 | -------------------------------------------------------------------------------- /hello-kafka-cqrs/order-command-service/build.gradle.kts: -------------------------------------------------------------------------------- 1 | plugins { 2 | id("org.springframework.boot") 3 | 4 | kotlin("jvm") 5 | kotlin("plugin.spring") 6 | } 7 | 8 | dependencies { 9 | /*annotationProcessor("org.springframework.boot:spring-boot-configuration-processor")*/ 10 | /*implementation("org.springframework.boot:spring-boot-starter-data-rest")*/ 11 | implementation("org.springframework.boot:spring-boot-starter-web") 12 | 13 | implementation("org.springframework.kafka:spring-kafka") 14 | implementation("org.apache.kafka:kafka-streams") 15 | 16 | implementation("com.fasterxml.jackson.module:jackson-module-kotlin") 17 | implementation("org.jetbrains.kotlin:kotlin-reflect") 18 | implementation("org.jetbrains.kotlin:kotlin-stdlib-jdk8") 19 | 20 | } 21 | -------------------------------------------------------------------------------- /hello-kafka-es/order-command-service/build.gradle.kts: -------------------------------------------------------------------------------- 1 | plugins { 2 | id("org.springframework.boot") 3 | 4 | kotlin("jvm") 5 | kotlin("plugin.spring") 6 | } 7 | 8 | dependencies { 9 | /*annotationProcessor("org.springframework.boot:spring-boot-configuration-processor")*/ 10 | /*implementation("org.springframework.boot:spring-boot-starter-data-rest")*/ 11 | implementation("org.springframework.boot:spring-boot-starter-web") 12 | 13 | implementation("org.springframework.kafka:spring-kafka") 14 | implementation("org.apache.kafka:kafka-streams") 15 | 16 | implementation("com.fasterxml.jackson.module:jackson-module-kotlin") 17 | implementation("org.jetbrains.kotlin:kotlin-reflect") 18 | implementation("org.jetbrains.kotlin:kotlin-stdlib-jdk8") 19 | 20 | } 21 | -------------------------------------------------------------------------------- /hello-kafka-streams/kotlin/src/main/kotlin/hellostreams/Consumer.kt: -------------------------------------------------------------------------------- 1 | package hellostreams 2 | 3 | import org.apache.kafka.clients.admin.NewTopic 4 | import org.springframework.context.annotation.Bean 5 | import org.springframework.kafka.annotation.KafkaListener 6 | import org.springframework.kafka.config.TopicBuilder 7 | import org.springframework.stereotype.Service 8 | import java.time.LocalDateTime 9 | 10 | @Service 11 | class Consumer() { 12 | 13 | @Bean 14 | fun outputTopic(): NewTopic { 15 | return TopicBuilder.name("output-topic").build() 16 | } 17 | 18 | @KafkaListener(topics= ["output-topic"], groupId = "kafka_kotlin_id") 19 | fun consume(message:String) : Unit { 20 | println("receive MESSAGE : $message at ${LocalDateTime.now()}"); 21 | } 22 | 23 | } 24 | -------------------------------------------------------------------------------- /hello-kafka-es-streamsapi/kotlin/order-command-service/build.gradle.kts: -------------------------------------------------------------------------------- 1 | plugins { 2 | id("org.springframework.boot") 3 | 4 | kotlin("jvm") 5 | kotlin("plugin.spring") 6 | } 7 | 8 | dependencies { 9 | /*annotationProcessor("org.springframework.boot:spring-boot-configuration-processor")*/ 10 | /*implementation("org.springframework.boot:spring-boot-starter-data-rest")*/ 11 | implementation("org.springframework.boot:spring-boot-starter") 12 | 13 | implementation("org.springframework.kafka:spring-kafka") 14 | implementation("org.apache.kafka:kafka-streams") 15 | 16 | implementation("com.fasterxml.jackson.module:jackson-module-kotlin") 17 | implementation("org.jetbrains.kotlin:kotlin-reflect") 18 | implementation("org.jetbrains.kotlin:kotlin-stdlib-jdk8") 19 | 20 | } 21 | -------------------------------------------------------------------------------- /hello-kafka-streams/java/src/main/java/hellokafka/Consumer.java: -------------------------------------------------------------------------------- 1 | package hellokafka; 2 | 3 | import org.apache.kafka.clients.admin.NewTopic; 4 | import org.springframework.context.annotation.Bean; 5 | import org.springframework.kafka.annotation.KafkaListener; 6 | import org.springframework.kafka.config.TopicBuilder; 7 | import org.springframework.stereotype.Service; 8 | 9 | import java.time.LocalDateTime; 10 | 11 | @Service 12 | public class Consumer { 13 | 14 | @Bean 15 | public NewTopic outputTopic() { 16 | return TopicBuilder.name("output-topic").build(); 17 | } 18 | 19 | @KafkaListener(topics= { "output-topic" }, groupId = "kafka_java_id") 20 | public void consume(String message) { 21 | System.out.println("receive MESSAGE : " + message + " at " + LocalDateTime.now()); 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /hello-kafka-cqrs/order-query-service/src/main/resources/application.yml: -------------------------------------------------------------------------------- 1 | server: 2 | port: 8884 3 | spring: 4 | kafka: 5 | listener: 6 | missing-topics-fatal: false 7 | consumer: 8 | group-id: microkafka-order-query-service 9 | value-deserializer: org.springframework.kafka.support.serializer.JsonDeserializer 10 | auto-offset-reset: latest 11 | properties: 12 | spring.json.trusted.packages: "*" 13 | spring.json.type.mapping: order:microkafka.escqrs.orders.query.model.OrderCreatedEvent 14 | datasource: 15 | url: jdbc:h2:mem:ms-escqrs-db 16 | driverClassName: org.h2.Driver 17 | username: sa 18 | password: 19 | jpa: 20 | show-sql: true 21 | hibernate: 22 | ddl-auto: create 23 | generate-ddl: true 24 | h2: 25 | console: 26 | enabled: true 27 | -------------------------------------------------------------------------------- /hello-kafka-cqrs/order-command-service/src/main/kotlin/microkafka/escqrs/orders/command/OrderController.kt: -------------------------------------------------------------------------------- 1 | package microkafka.escqrs.orders.command 2 | 3 | import microkafka.escqrs.orders.command.model.OrderAmountUpdateCommand 4 | import microkafka.escqrs.orders.command.model.OrderCreateCommand 5 | import org.springframework.http.ResponseEntity.ok 6 | import org.springframework.web.bind.annotation.* 7 | 8 | @RestController 9 | @RequestMapping("/orders") 10 | class OrderController( 11 | val orderCommandHandler: OrderCommandHandler 12 | ) { 13 | @PostMapping 14 | fun newOrder(@RequestBody command: OrderCreateCommand) = 15 | ok().body(orderCommandHandler.handleCreate(command)) 16 | 17 | @PutMapping 18 | fun updateOrder(@RequestBody command: OrderAmountUpdateCommand) = 19 | ok().body(orderCommandHandler.handleUpdate(command)) 20 | } 21 | -------------------------------------------------------------------------------- /hello-kafka-streams/java/src/main/java/hellokafka/Producer.java: -------------------------------------------------------------------------------- 1 | package hellokafka; 2 | 3 | import org.springframework.beans.factory.annotation.Autowired; 4 | import org.springframework.kafka.core.KafkaTemplate; 5 | import org.springframework.scheduling.annotation.EnableScheduling; 6 | import org.springframework.scheduling.annotation.Scheduled; 7 | import org.springframework.stereotype.Service; 8 | 9 | import java.time.LocalDateTime; 10 | 11 | @Service 12 | @EnableScheduling 13 | public class Producer { 14 | 15 | @Autowired 16 | public final KafkaTemplate kafka = null; 17 | 18 | @Scheduled(fixedRate = 1000) 19 | public void send() { 20 | String message = "hello kafka streams"; 21 | System.out.println("publish MESSAGE : " + message ); 22 | 23 | this.kafka.send("input-topic", message); 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /hello-kafka-api/java/src/main/java/hellokafka/Producer.java: -------------------------------------------------------------------------------- 1 | package hellokafka; 2 | 3 | import org.springframework.beans.factory.annotation.Autowired; 4 | import org.springframework.kafka.core.KafkaTemplate; 5 | import org.springframework.scheduling.annotation.EnableScheduling; 6 | import org.springframework.scheduling.annotation.Scheduled; 7 | import org.springframework.stereotype.Service; 8 | 9 | import java.time.LocalDateTime; 10 | 11 | @Service 12 | @EnableScheduling 13 | public class Producer { 14 | 15 | @Autowired 16 | public final KafkaTemplate kafka = null; 17 | 18 | @Scheduled(fixedRate = 1000) 19 | public void send() { 20 | String message = "hello kafka " + LocalDateTime.now().toString(); 21 | System.out.println("SEND MESSAGE : " + message ); 22 | 23 | this.kafka.send("hello-topic", message); 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # See http://help.github.com/ignore-files/ for more about ignoring files. 2 | 3 | # compiled output 4 | **/dist/** 5 | dist 6 | /tmp 7 | /out-tsc 8 | /data 9 | 10 | # Only exists if Bazel was run 11 | /bazel-out 12 | 13 | # dependencies 14 | node_modules 15 | 16 | # profiling files 17 | chrome-profiler-events*.json 18 | speed-measure-plugin*.json 19 | 20 | # IDEs and editors 21 | .idea/** 22 | .idea 23 | **/.idea/** 24 | .project 25 | .classpath 26 | .c9/ 27 | *.launch 28 | .settings/ 29 | *.sublime-workspace 30 | 31 | # IDE - VSCode 32 | .vscode/* 33 | !.vscode/settings.json 34 | !.vscode/tasks.json 35 | !.vscode/launch.json 36 | !.vscode/extensions.json 37 | .history/* 38 | 39 | # misc 40 | /.sass-cache 41 | /connect.lock 42 | /coverage 43 | /libpeerconnection.log 44 | npm-debug.log 45 | yarn-error.log 46 | testem.log 47 | /typings 48 | 49 | # System Files 50 | .DS_Store 51 | Thumbs.db 52 | -------------------------------------------------------------------------------- /hello-kafka-microservices/order-service/build.gradle.kts: -------------------------------------------------------------------------------- 1 | plugins { 2 | id("org.springframework.boot") 3 | 4 | kotlin("jvm") 5 | kotlin("plugin.spring") 6 | } 7 | 8 | dependencies { 9 | annotationProcessor("org.springframework.boot:spring-boot-configuration-processor") 10 | implementation("org.springframework.boot:spring-boot-starter-data-jpa") 11 | implementation("org.springframework.boot:spring-boot-starter-data-rest") 12 | implementation("org.springframework.boot:spring-boot-starter-web") 13 | 14 | implementation("org.springframework.kafka:spring-kafka") 15 | implementation("org.apache.kafka:kafka-streams") 16 | 17 | implementation("com.fasterxml.jackson.module:jackson-module-kotlin") 18 | implementation("org.jetbrains.kotlin:kotlin-reflect") 19 | implementation("org.jetbrains.kotlin:kotlin-stdlib-jdk8") 20 | runtimeOnly("com.h2database:h2") 21 | } 22 | -------------------------------------------------------------------------------- /hello-kafka-microservices/invoice-service/build.gradle.kts: -------------------------------------------------------------------------------- 1 | plugins { 2 | id("org.springframework.boot") 3 | 4 | kotlin("jvm") 5 | kotlin("plugin.spring") 6 | } 7 | 8 | dependencies { 9 | annotationProcessor("org.springframework.boot:spring-boot-configuration-processor") 10 | implementation("org.springframework.boot:spring-boot-starter-data-jpa") 11 | implementation("org.springframework.boot:spring-boot-starter-data-rest") 12 | implementation("org.springframework.boot:spring-boot-starter-web") 13 | 14 | implementation("org.springframework.kafka:spring-kafka") 15 | implementation("org.apache.kafka:kafka-streams") 16 | 17 | implementation("com.fasterxml.jackson.module:jackson-module-kotlin") 18 | implementation("org.jetbrains.kotlin:kotlin-reflect") 19 | implementation("org.jetbrains.kotlin:kotlin-stdlib-jdk8") 20 | runtimeOnly("com.h2database:h2") 21 | } 22 | -------------------------------------------------------------------------------- /hello-kafka-microservices/invoice-service/src/main/resources/application.yml: -------------------------------------------------------------------------------- 1 | server: 2 | port: 8882 3 | spring: 4 | kafka: 5 | producer: 6 | value-serializer: org.springframework.kafka.support.serializer.JsonSerializer 7 | listener: 8 | missing-topics-fatal: false 9 | consumer: 10 | group-id: microkafka-sample 11 | value-deserializer: org.springframework.kafka.support.serializer.JsonDeserializer 12 | auto-offset-reset: latest 13 | properties: 14 | spring.json.trusted.packages: "*" 15 | spring.json.type.mapping: order:microkafka.invoices.model.Order 16 | datasource: 17 | url: jdbc:h2:mem:ms-invoices-db 18 | driverClassName: org.h2.Driver 19 | username: sa 20 | password: 21 | jpa: 22 | show-sql: true 23 | hibernate: 24 | ddl-auto: create 25 | generate-ddl: true 26 | h2: 27 | console: 28 | enabled: true 29 | -------------------------------------------------------------------------------- /hello-kafka-cqrs/order-query-service/build.gradle.kts: -------------------------------------------------------------------------------- 1 | plugins { 2 | id("org.springframework.boot") 3 | 4 | kotlin("jvm") 5 | kotlin("plugin.spring") 6 | } 7 | 8 | dependencies { 9 | annotationProcessor("org.springframework.boot:spring-boot-configuration-processor") 10 | implementation("org.springframework.boot:spring-boot-starter-data-jpa") 11 | implementation("org.springframework.boot:spring-boot-starter-data-rest") 12 | implementation("org.springframework.boot:spring-boot-starter-web") 13 | 14 | implementation("org.springframework.kafka:spring-kafka") 15 | implementation("org.apache.kafka:kafka-streams") 16 | 17 | implementation("com.fasterxml.jackson.module:jackson-module-kotlin") 18 | implementation("org.jetbrains.kotlin:kotlin-reflect") 19 | implementation("org.jetbrains.kotlin:kotlin-stdlib-jdk8") 20 | runtimeOnly("com.h2database:h2") 21 | 22 | 23 | 24 | 25 | } 26 | -------------------------------------------------------------------------------- /hello-kafka-es/order-query-service/build.gradle.kts: -------------------------------------------------------------------------------- 1 | plugins { 2 | id("org.springframework.boot") 3 | 4 | kotlin("jvm") 5 | kotlin("plugin.spring") 6 | } 7 | 8 | dependencies { 9 | annotationProcessor("org.springframework.boot:spring-boot-configuration-processor") 10 | implementation("org.springframework.boot:spring-boot-starter-data-jpa") 11 | implementation("org.springframework.boot:spring-boot-starter-data-rest") 12 | implementation("org.springframework.boot:spring-boot-starter-web") 13 | 14 | implementation("org.springframework.kafka:spring-kafka") 15 | implementation("org.apache.kafka:kafka-streams") 16 | 17 | implementation("com.fasterxml.jackson.module:jackson-module-kotlin") 18 | implementation("org.jetbrains.kotlin:kotlin-reflect") 19 | implementation("org.jetbrains.kotlin:kotlin-stdlib-jdk8") 20 | runtimeOnly("com.h2database:h2") 21 | 22 | 23 | 24 | 25 | } 26 | -------------------------------------------------------------------------------- /hello-kafka-microservices/invoice-service/src/main/kotlin/microkafka/invoices/OrderListener.kt: -------------------------------------------------------------------------------- 1 | package microkafka.invoices 2 | 3 | import microkafka.invoices.model.Order 4 | import org.springframework.kafka.annotation.KafkaListener 5 | import org.springframework.stereotype.Service 6 | 7 | @Service 8 | class OrderListener(val invoiceService: InvoiceService) { 9 | 10 | // subscribes to the kafka topic "orders" 11 | // this function is called for every record of the stream that has not yet been processed 12 | @KafkaListener(topics= ["orders"]) 13 | fun consume(order: Order) : Unit { 14 | println(" order received from orders topic : $order"); 15 | 16 | // create a new invoice for each order received 17 | invoiceService.createInvoice(order); 18 | 19 | // received orders can also be saved in the local service DB for further processing 20 | // orderRepository.save(oder) 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /hello-kafka-es-streamsapi/kotlin/order-query-service/build.gradle.kts: -------------------------------------------------------------------------------- 1 | plugins { 2 | id("org.springframework.boot") 3 | 4 | kotlin("jvm") 5 | kotlin("plugin.spring") 6 | } 7 | 8 | dependencies { 9 | annotationProcessor("org.springframework.boot:spring-boot-configuration-processor") 10 | implementation("org.springframework.boot:spring-boot-starter-data-jpa") 11 | implementation("org.springframework.boot:spring-boot-starter-data-rest") 12 | implementation("org.springframework.boot:spring-boot-starter-web") 13 | 14 | implementation("org.springframework.kafka:spring-kafka") 15 | implementation("org.apache.kafka:kafka-streams") 16 | 17 | implementation("com.fasterxml.jackson.module:jackson-module-kotlin") 18 | implementation("org.jetbrains.kotlin:kotlin-reflect") 19 | implementation("org.jetbrains.kotlin:kotlin-stdlib-jdk8") 20 | runtimeOnly("com.h2database:h2") 21 | 22 | 23 | 24 | 25 | } 26 | -------------------------------------------------------------------------------- /hello-kafka-cqrs/order-query-service/src/main/kotlin/microkafka/escqrs/orders/query/model/Mapping.kt: -------------------------------------------------------------------------------- 1 | package microkafka.escqrs.orders.query.model 2 | 3 | fun List.toDTO() = this.map { it?.toDTO() } 4 | 5 | fun OrderEntity.toDTO() = OrderDto( 6 | this.id, 7 | this.status, 8 | 9 | this.userId, 10 | this.productId, 11 | this.amount 12 | ) 13 | 14 | fun OrderDto.fromDTO(): OrderEntity { 15 | val self = this 16 | 17 | return OrderEntity().apply { 18 | id = self.id 19 | status = self.status 20 | 21 | userId = self.userId 22 | productId = self.productId 23 | amount = self.amount 24 | } 25 | } 26 | 27 | fun OrderEvent.fromEvent(): OrderEntity { 28 | val self = this 29 | 30 | return OrderEntity().apply { 31 | id = self.id 32 | 33 | userId = self.userId 34 | productId = self.productId 35 | amount = self.amount 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /hello-kafka-cqrs/order-query-service/src/main/kotlin/microkafka/escqrs/orders/query/OrderListener.kt: -------------------------------------------------------------------------------- 1 | package microkafka.escqrs.orders.query 2 | 3 | import microkafka.escqrs.orders.query.model.OrderEntity 4 | import microkafka.escqrs.orders.query.model.OrderEvent 5 | import microkafka.escqrs.orders.query.model.fromEvent 6 | import org.springframework.kafka.annotation.KafkaListener 7 | import org.springframework.stereotype.Service 8 | 9 | @Service 10 | class OrderListener(val orderRepository: OrderRepository) { 11 | 12 | // subscribes to the kafka topic "orders" 13 | // this function is called for every event(record) of specified topic that has not yet been processed 14 | @KafkaListener(topics= ["cqrs-orders"]) 15 | fun consume(event: OrderEvent) : Unit { 16 | println(" order received from orders topic : $event"); 17 | 18 | val order = event.fromEvent(); 19 | 20 | // persist the received order 21 | orderRepository.save(order) 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /hello-kafka-api/java/readme.md: -------------------------------------------------------------------------------- 1 | # Hello Kafka API 2 | a tiny publish/subscribe example using Kafka + Spring + Maven + Java 3 | 4 |

5 | ## prerequisites 6 | 7 | - docker/docker-compose 8 | - local dns mapping: 127.0.0.1 kafka 9 | - maven/mvn 10 | - java sdk 1.8 11 | 12 |

13 | 14 | ## quickstart 15 | 16 | ### 1. get the source code 17 | 18 | ```shell 19 | git clone https://github.com/thecodemonkey/kafka-microservices.git 20 | ``` 21 | 22 | ### 2. start local Kafka Environment 23 | 24 | ```shell 25 | docker-compose -f docker-compose.yml up -d 26 | 27 | #stop kafka: docker-compose down 28 | ``` 29 | 30 | local dns setup => etc/hosts: 31 | 32 | 127.0.0.1 kafka 33 | 34 | ### 3. run sample application 35 | 36 | ```shell 37 | cd hello-kafka-api/java 38 | mvn spring-boot:run 39 | ``` 40 | 41 | ### 4. see console output 42 | 43 | ```bash 44 | SEND MESSAGE : hello kafka 2021-05-24T13:32:49.086 45 | RECEIVE MESSAGE : hello kafka 2021-05-24T13:32:49.086 46 | ``` 47 | 48 |

49 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Describe the bug** 11 | A clear and concise description of what the bug is. 12 | 13 | **To Reproduce** 14 | Steps to reproduce the behavior: 15 | 1. Go to '...' 16 | 2. Click on '....' 17 | 3. Scroll down to '....' 18 | 4. See error 19 | 20 | **Expected behavior** 21 | A clear and concise description of what you expected to happen. 22 | 23 | **Screenshots** 24 | If applicable, add screenshots to help explain your problem. 25 | 26 | **Desktop (please complete the following information):** 27 | - OS: [e.g. iOS] 28 | - Browser [e.g. chrome, safari] 29 | - Version [e.g. 22] 30 | 31 | **Smartphone (please complete the following information):** 32 | - Device: [e.g. iPhone6] 33 | - OS: [e.g. iOS8.1] 34 | - Browser [e.g. stock browser, safari] 35 | - Version [e.g. 22] 36 | 37 | **Additional context** 38 | Add any other context about the problem here. 39 | -------------------------------------------------------------------------------- /hello-kafka-api/kotlin/readme.md: -------------------------------------------------------------------------------- 1 | # Hello Kafka API 2 | a tiny publish/subscribe example using Kafka + Spring + Gradle + Kotlin 3 | 4 |

5 | 6 | ## prerequisites 7 | 8 | - docker/docker-compose 9 | - local dns mapping: 127.0.0.1 kafka 10 | - Gradle 11 | - java sdk 1.8 12 | - Kotlin 13 | 14 |

15 | 16 | ## quickstart 17 | 18 | ### 1. get the source code 19 | 20 | ```shell 21 | git clone https://github.com/thecodemonkey/kafka-microservices.git 22 | ``` 23 | 24 | ### 2. start local Kafka Environment 25 | 26 | ```shell 27 | docker-compose -f docker-compose.yml up -d 28 | 29 | #stop kafka: docker-compose down 30 | ``` 31 | 32 | local dns setup => etc/hosts: 33 | 34 | 127.0.0.1 kafka 35 | 36 | ### 3. run sample application 37 | 38 | ```shell 39 | cd hello-kafka-api/kotlin 40 | gradle bootRun 41 | ``` 42 | 43 | ### 4. see console output 44 | 45 | ```bash 46 | SEND MESSAGE : hello kafka 2021-05-24T13:32:49.086 47 | RECEIVE MESSAGE : hello kafka 2021-05-24T13:32:49.086 48 | ``` 49 | 50 |

51 | -------------------------------------------------------------------------------- /hello-kafka-streams/kotlin/src/main/kotlin/hellostreams/Processor.kt: -------------------------------------------------------------------------------- 1 | package hellostreams 2 | 3 | import org.apache.kafka.streams.StreamsBuilder 4 | import org.apache.kafka.streams.kstream.KStream 5 | import org.springframework.context.annotation.Bean 6 | import org.springframework.kafka.annotation.EnableKafkaStreams 7 | import org.springframework.stereotype.Service 8 | 9 | @Service 10 | @EnableKafkaStreams 11 | class Processor { 12 | 13 | @Bean 14 | fun processSimple(builder: StreamsBuilder): KStream { 15 | val stream: KStream = builder.stream("input-topic") 16 | 17 | stream.mapValues { value -> "##=> $value <=##" } // transform each record of the stream 18 | .peek { _, i -> println("process MESSAGE in stream: $i") } // outputs something in the console. debugging only 19 | .to("output-topic"); // publishes the transformed records to another topic 20 | 21 | return stream 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /hello-kafka-streams/java/readme.md: -------------------------------------------------------------------------------- 1 | # Hello Kafka Streams 2 | 3 | a tiny **Stream Processing** example using Kafka + Spring + Java + Maven 4 | 5 |

6 | 7 | ## prerequisites 8 | 9 | - docker/docker-compose 10 | - maven/mvn 11 | - java sdk 1.8 12 | 13 | 14 |

15 | 16 | ## quickstart 17 | 18 | ### 1. get the source code 19 | 20 | ```shell 21 | git clone https://github.com/thecodemonkey/kafka-microservices.git 22 | ``` 23 | 24 | ### 2. start local Kafka Environment 25 | 26 | ```shell 27 | docker-compose up -d 28 | #stop kafka: docker-compose down 29 | ``` 30 | 31 | ### 3. run sample application 32 | 33 | ```shell 34 | cd hello-kafka-streams/java 35 | mvn spring-boot:run 36 | ``` 37 | 38 | ### 4. see console output 39 | 40 | ```bash 41 | publish MESSAGE : hello kafka streams 42 | process MESSAGE in stream: ##=> hello kafka streams <=## 43 | receive MESSAGE : ##=> hello kafka streams <=## 44 | ``` 45 | 46 | Each of the 3 components (producer, processor and consumer) is generated 47 | the corresponding console output. 48 | 49 |

50 | -------------------------------------------------------------------------------- /hello-kafka-streams/java/src/main/java/hellokafka/Processor.java: -------------------------------------------------------------------------------- 1 | package hellokafka; 2 | 3 | import org.apache.kafka.streams.StreamsBuilder; 4 | import org.apache.kafka.streams.kstream.KStream; 5 | import org.springframework.context.annotation.Bean; 6 | import org.springframework.kafka.annotation.EnableKafkaStreams; 7 | import org.springframework.stereotype.Service; 8 | 9 | @Service 10 | @EnableKafkaStreams 11 | public class Processor { 12 | 13 | @Bean 14 | public KStream processSimple(StreamsBuilder builder) { 15 | KStream stream = builder.stream("input-topic"); 16 | 17 | stream.mapValues( value -> "##=> $value <=##" ) // transform each record of the stream 18 | .peek( (k, i) -> System.out.println("process MESSAGE in stream: $i") ) // outputs something in the console. debugging only 19 | .to("output-topic"); // publishes the transformed records to another topic 20 | 21 | return stream; 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /hello-kafka-es-streamsapi/kotlin/readme.md: -------------------------------------------------------------------------------- 1 | # Hello Kafka Streams Event Streaming with Streams API 2 | 3 | a tiny **Event Sourcing** example using Kafka + Spring + Kotlin + Gradle 4 | 5 |

6 | 7 | ## prerequisites 8 | 9 | - docker/docker-compose 10 | - gradle 11 | - java sdk 1.8 12 | - kotlin 13 | - local dns mapping: 127.0.0.1 kafka 14 | 15 |

16 | 17 | ## quickstart 18 | 19 | ### 1. get the source code 20 | 21 | ```shell 22 | git clone https://github.com/thecodemonkey/kafka-microservices.git 23 | ``` 24 | 25 | ### 2. start local Kafka Environment 26 | 27 | ```shell 28 | docker-compose -f docker-compose.yml up -d 29 | #stop kafka: docker-compose down 30 | ``` 31 | 32 | local dns setup => etc/hosts: 33 | 34 | 127.0.0.1 kafka 35 | 36 | ### 3. run sample application 37 | 38 | ```shell 39 | cd hello-kafka-es-streamsapi/kotlin 40 | gradle bootRun 41 | ``` 42 | 43 | ### 4. see console output 44 | 45 | ```bash 46 | SEND MESSAGE : hello kafka 2021-05-24T13:32:49.086 47 | RECEIVE MESSAGE : hello kafka 2021-05-24T13:32:49.086 48 | ``` 49 | 50 |

51 | -------------------------------------------------------------------------------- /hello-kafka-streams/kotlin/readme.md: -------------------------------------------------------------------------------- 1 | # Hello Kafka Streams 2 | 3 | a tiny **Stream Processing** example using Kafka + Spring + Kotlin + Gradle 4 | 5 |

6 | 7 | ## prerequisites 8 | 9 | - docker/docker-compose 10 | - gradle 11 | - java sdk 1.8 12 | - kotlin 13 | 14 |

15 | 16 | ## quickstart 17 | 18 | ### 1. get the source code 19 | 20 | ```shell 21 | git clone https://github.com/thecodemonkey/kafka-microservices.git 22 | ``` 23 | 24 | ### 2. start local Kafka Environment 25 | 26 | ```shell 27 | docker-compose -f docker-compose.yml up -d 28 | #stop kafka: docker-compose down 29 | ``` 30 | 31 | ### 3. run sample application 32 | 33 | ```shell 34 | cd hello-kafka-streams/kotlin 35 | gradle bootRun 36 | ``` 37 | 38 | ### 4. see console output 39 | 40 | ```bash 41 | publish MESSAGE : hello kafka streams 42 | process MESSAGE in stream: ##=> hello kafka streams <=## 43 | receive MESSAGE : ##=> hello kafka streams <=## 44 | ``` 45 | 46 | Each of the 3 components (producer, processor and consumer) is generated 47 | the corresponding console output. 48 | 49 |

50 | -------------------------------------------------------------------------------- /hello-kafka-microservices/order-service/src/main/kotlin/microkafka/orders/OrderService.kt: -------------------------------------------------------------------------------- 1 | package microkafka.orders 2 | 3 | import microkafka.orders.model.Order 4 | import org.springframework.kafka.core.KafkaTemplate 5 | import org.springframework.stereotype.Service 6 | import java.util.* 7 | 8 | @Service 9 | class OrderService(val orderRepository: OrderRepository, 10 | val kafkaTemplate: KafkaTemplate) { 11 | 12 | fun createOrder(userId: String, productId: String, amount: Int) { 13 | 14 | val order = Order().also { 15 | it.id = UUID.randomUUID().toString() 16 | it.status = "NEW" 17 | it.userId = userId 18 | it.productId = productId 19 | it.amount = amount 20 | } 21 | 22 | // In this example, NO CQRS and NO event sourcing are implemented, 23 | // the order is immediately persisted in the local DB when it is created, 24 | // instead of receiving the corresponding event ! 25 | orderRepository.save(order); 26 | 27 | kafkaTemplate.send("orders", "create", order) 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /hello-kafka-es-streamsapi/kotlin/order-query-service/src/main/kotlin/kafka/es/query/model/OrderEvent.kt: -------------------------------------------------------------------------------- 1 | package kafka.es.query.model 2 | 3 | import kotlin.reflect.full.declaredMemberProperties 4 | import kotlin.reflect.full.primaryConstructor 5 | 6 | data class OrderEvent ( 7 | var id: String? = null, 8 | 9 | var userId: String? = null, 10 | var productId: String? = null, 11 | var amount: Int? = null, 12 | var price: Int? = null 13 | ) 14 | 15 | inline infix fun T.merge(other: T): T { 16 | val nameToProperty = T::class.declaredMemberProperties.associateBy { it.name } 17 | val primaryConstructor = T::class.primaryConstructor!! 18 | val args = primaryConstructor.parameters.associateWith { parameter -> 19 | val property = nameToProperty[parameter.name]!! 20 | val valueOther = property.get(other) 21 | val valueThis = property.get(this) 22 | 23 | if (valueOther is Double && valueOther == 0.0) { 24 | valueThis 25 | } else { 26 | valueOther ?: valueThis 27 | } 28 | } 29 | return primaryConstructor.callBy(args) 30 | } 31 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 Ilja leyberman 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /hello-kafka-cqrs/build.gradle.kts: -------------------------------------------------------------------------------- 1 | import org.jetbrains.kotlin.gradle.tasks.KotlinCompile 2 | 3 | plugins { 4 | id("org.springframework.boot") version "2.2.0.RELEASE" apply false 5 | id("io.spring.dependency-management") version "1.0.8.RELEASE" apply false 6 | kotlin("jvm") version "1.3.50" apply false 7 | kotlin("plugin.spring") version "1.3.50" apply false 8 | } 9 | 10 | allprojects { 11 | group = "com.tss.kafka.samples" 12 | version = "1.0.0" 13 | 14 | tasks.withType { 15 | sourceCompatibility = "1.8" 16 | targetCompatibility = "1.8" 17 | } 18 | 19 | tasks.withType { 20 | kotlinOptions { 21 | freeCompilerArgs = listOf("-Xjsr305=strict") 22 | jvmTarget = "1.8" 23 | } 24 | } 25 | } 26 | 27 | subprojects { 28 | repositories { 29 | mavenCentral() 30 | } 31 | 32 | apply { 33 | plugin("io.spring.dependency-management") 34 | } 35 | } 36 | 37 | task("start-kafka") { 38 | commandLine("docker-compose", "-f", "../docker-compose.yml", "up", "-d") 39 | } 40 | 41 | task("stop-kafka") { 42 | commandLine("docker-compose", "down") 43 | } 44 | -------------------------------------------------------------------------------- /hello-kafka-es/build.gradle.kts: -------------------------------------------------------------------------------- 1 | import org.jetbrains.kotlin.gradle.tasks.KotlinCompile 2 | 3 | plugins { 4 | id("org.springframework.boot") version "2.2.0.RELEASE" apply false 5 | id("io.spring.dependency-management") version "1.0.8.RELEASE" apply false 6 | kotlin("jvm") version "1.3.50" apply false 7 | kotlin("plugin.spring") version "1.3.50" apply false 8 | } 9 | 10 | allprojects { 11 | group = "com.tss.kafka.samples" 12 | version = "1.0.0" 13 | 14 | tasks.withType { 15 | sourceCompatibility = "1.8" 16 | targetCompatibility = "1.8" 17 | } 18 | 19 | tasks.withType { 20 | kotlinOptions { 21 | freeCompilerArgs = listOf("-Xjsr305=strict") 22 | jvmTarget = "1.8" 23 | } 24 | } 25 | } 26 | 27 | subprojects { 28 | repositories { 29 | mavenCentral() 30 | } 31 | 32 | apply { 33 | plugin("io.spring.dependency-management") 34 | } 35 | } 36 | 37 | task("start-kafka") { 38 | commandLine("docker-compose", "-f", "../docker-compose.yml", "up", "-d") 39 | } 40 | 41 | task("stop-kafka") { 42 | commandLine("docker-compose", "down") 43 | } 44 | -------------------------------------------------------------------------------- /hello-kafka-cqrs/order-command-service/src/main/kotlin/microkafka/escqrs/orders/command/OrderCommandHandler.kt: -------------------------------------------------------------------------------- 1 | package microkafka.escqrs.orders.command 2 | 3 | import microkafka.escqrs.orders.command.model.* 4 | import org.springframework.kafka.core.KafkaTemplate 5 | import org.springframework.stereotype.Service 6 | import java.util.* 7 | 8 | @Service 9 | class OrderCommandHandler(val kafkaTemplate: KafkaTemplate) { 10 | 11 | fun handleCreate(command: OrderCreateCommand) { 12 | val event = OrderCreatedEvent( 13 | id = UUID.randomUUID().toString() 14 | ).apply { 15 | userId = command.userId 16 | productId = command.productId 17 | amount = command.amount 18 | } 19 | 20 | kafkaTemplate.send("cqrs-orders", "OrderCreated", event) 21 | } 22 | 23 | fun handleUpdate(command: OrderAmountUpdateCommand) { 24 | val event = OrderUpdatedEvent( 25 | id = UUID.randomUUID().toString() 26 | ).apply { 27 | orderId = command.orderId 28 | amount = command.amount 29 | } 30 | 31 | kafkaTemplate.send("cqrs-orders", "AmountUpdated", event) 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /hello-kafka-es-streamsapi/kotlin/order-query-service/src/main/kotlin/kafka/es/query/OrderRepository.kt: -------------------------------------------------------------------------------- 1 | package kafka.es.query 2 | 3 | import kafka.es.query.model.Order 4 | import kafka.es.query.model.OrderEvent 5 | import kafka.es.query.model.toOrder 6 | import org.apache.kafka.streams.state.QueryableStoreTypes 7 | import org.apache.kafka.streams.state.ReadOnlyKeyValueStore 8 | import org.springframework.kafka.config.StreamsBuilderFactoryBean 9 | import org.springframework.stereotype.Component 10 | 11 | @Component 12 | class OrderRepository(val streamsBuilderFactoryBean: StreamsBuilderFactoryBean) { 13 | 14 | // create store instance to access the current state of the orders 15 | fun store(): ReadOnlyKeyValueStore = 16 | streamsBuilderFactoryBean.kafkaStreams 17 | .store("es-orders-store", QueryableStoreTypes.keyValueStore()) 18 | 19 | fun findAll() : List { 20 | return store().all() 21 | .iterator() 22 | .asSequence() 23 | .toMutableList() 24 | .map { i -> i.value.toOrder() } 25 | } 26 | 27 | fun findById(id: String): Order { 28 | return store().get(id).toOrder() 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /hello-kafka-microservices/build.gradle.kts: -------------------------------------------------------------------------------- 1 | import org.jetbrains.kotlin.gradle.tasks.KotlinCompile 2 | 3 | plugins { 4 | id("org.springframework.boot") version "2.2.0.RELEASE" apply false 5 | id("io.spring.dependency-management") version "1.0.8.RELEASE" apply false 6 | kotlin("jvm") version "1.3.50" apply false 7 | kotlin("plugin.spring") version "1.3.50" apply false 8 | } 9 | 10 | allprojects { 11 | group = "com.tss.kafka.samples" 12 | version = "1.0.0" 13 | 14 | tasks.withType { 15 | sourceCompatibility = "1.8" 16 | targetCompatibility = "1.8" 17 | } 18 | 19 | tasks.withType { 20 | kotlinOptions { 21 | freeCompilerArgs = listOf("-Xjsr305=strict") 22 | jvmTarget = "1.8" 23 | } 24 | } 25 | } 26 | 27 | subprojects { 28 | repositories { 29 | mavenCentral() 30 | } 31 | 32 | apply { 33 | plugin("io.spring.dependency-management") 34 | } 35 | } 36 | 37 | task("start-kafka") { 38 | commandLine("docker-compose", "-f", "../docker-compose.yml", "up", "-d") 39 | } 40 | 41 | task("stop-kafka") { 42 | commandLine("docker-compose", "down") 43 | } 44 | -------------------------------------------------------------------------------- /hello-kafka-es-streamsapi/kotlin/build.gradle.kts: -------------------------------------------------------------------------------- 1 | import org.jetbrains.kotlin.gradle.tasks.KotlinCompile 2 | 3 | plugins { 4 | id("org.springframework.boot") version "2.2.0.RELEASE" apply false 5 | id("io.spring.dependency-management") version "1.0.8.RELEASE" apply false 6 | kotlin("jvm") version "1.3.50" apply false 7 | kotlin("plugin.spring") version "1.3.50" apply false 8 | } 9 | 10 | allprojects { 11 | group = "com.tss.kafka.samples" 12 | version = "1.0.0" 13 | 14 | tasks.withType { 15 | sourceCompatibility = "1.8" 16 | targetCompatibility = "1.8" 17 | } 18 | 19 | tasks.withType { 20 | kotlinOptions { 21 | freeCompilerArgs = listOf("-Xjsr305=strict") 22 | jvmTarget = "1.8" 23 | } 24 | } 25 | } 26 | 27 | subprojects { 28 | repositories { 29 | mavenCentral() 30 | } 31 | 32 | apply { 33 | plugin("io.spring.dependency-management") 34 | } 35 | } 36 | 37 | task("start-kafka") { 38 | commandLine("docker-compose", "-f", "../docker-compose.yml", "up", "-d") 39 | } 40 | 41 | task("stop-kafka") { 42 | commandLine("docker-compose", "down") 43 | } 44 | -------------------------------------------------------------------------------- /hello-kafka-es/order-command-service/src/main/kotlin/kafka/es/command/Producer.kt: -------------------------------------------------------------------------------- 1 | package kafka.es.command 2 | 3 | 4 | import org.springframework.kafka.core.KafkaTemplate 5 | import org.springframework.scheduling.annotation.EnableScheduling 6 | import org.springframework.scheduling.annotation.Scheduled 7 | import org.springframework.stereotype.Service 8 | import kotlin.random.Random.Default.nextInt as rint 9 | 10 | 11 | @Service 12 | @EnableScheduling 13 | class Producer(val kafka: KafkaTemplate) { 14 | 15 | @Scheduled(fixedRate = 1000) 16 | fun send() { 17 | 18 | // simulate different orders 19 | val event = OrderEvent(rint(1, 4).toString(), "1"); 20 | 21 | // create random updates 22 | when(rint(0, 3)) { 23 | 0 -> event.amount = rint(1, 99) // update amount with random value (from 1 to 99) 24 | 1 -> event.price = rint(1, 99) // update price with random value (from 1 to 99) 25 | 2 -> event.productId = rint(1,99).toString() // update product with random value (from 1 to 99) 26 | } 27 | 28 | println("produce event: $event"); 29 | this.kafka.send("es-orders", event) 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /hello-kafka-es-streamsapi/kotlin/order-command-service/src/main/kotlin/kafka/es/command/Producer.kt: -------------------------------------------------------------------------------- 1 | package kafka.es.command 2 | 3 | 4 | import org.springframework.kafka.core.KafkaTemplate 5 | import org.springframework.scheduling.annotation.EnableScheduling 6 | import org.springframework.scheduling.annotation.Scheduled 7 | import org.springframework.stereotype.Service 8 | import kotlin.random.Random.Default.nextInt as rint 9 | 10 | 11 | @Service 12 | @EnableScheduling 13 | class Producer(val kafka: KafkaTemplate) { 14 | 15 | @Scheduled(fixedRate = 1000) 16 | fun send() { 17 | 18 | // simulate different orders 19 | val event = OrderEvent(rint(1, 4).toString(), "1"); 20 | 21 | // create random updates 22 | when(rint(0, 3)) { 23 | 0 -> event.amount = rint(1, 99) // update amount with random value (from 1 to 99) 24 | 1 -> event.price = rint(1, 99) // update price with random value (from 1 to 99) 25 | 2 -> event.productId = rint(1,99).toString() // update product with random value (from 1 to 99) 26 | } 27 | 28 | println("produce event: $event"); 29 | this.kafka.send("es-orders-streams", event.id!!, event) 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /hello-kafka-es/order-query-service/src/main/kotlin/kafka/es/query/OrderListener.kt: -------------------------------------------------------------------------------- 1 | package kafka.es.query 2 | 3 | import kafka.es.query.model.Order 4 | import kafka.es.query.model.OrderEvent 5 | import org.springframework.kafka.annotation.KafkaListener 6 | import org.springframework.stereotype.Service 7 | 8 | @Service 9 | class OrderListener(val repository: OrderRepository) { 10 | 11 | @KafkaListener(topics= ["es-orders"]) 12 | fun consume(event: OrderEvent) { 13 | 14 | // 1. load entity from local storage by id 15 | var order = repository.findById(event.id!!) 16 | 17 | // 2. create new order entity if not exists within local storage 18 | if (order == null) 19 | order = Order(event.id!!) 20 | 21 | // 3. partially update the entity 22 | order.apply { 23 | if (event.userId != null) this?.userId = event.userId 24 | if (event.productId != null) this?.productId = event.productId 25 | if (event.amount != null) this?.amount = event.amount 26 | if (event.price != null) this?.price = event.price 27 | } 28 | 29 | // 4. updates local storage 30 | repository.save(order!!); 31 | 32 | println("RECEIVE MESSAGE : $order") 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | ## Description 4 | 5 | 6 | ## Motivation and Context 7 | 8 | 9 | 10 | ## Types of changes 11 | 12 | - [ ] Bug fix (non-breaking change which fixes an issue) 13 | - [ ] New feature (non-breaking change which adds functionality) 14 | - [ ] Breaking change (fix or feature that would cause compatibility issues) 15 | - [ ] DevOps change (fix or alteration in a build pipeline or config file) 16 | 17 | ## Checklist: 18 | 19 | 20 | - [ ] My code follows the code style of this project. 21 | - [ ] My change requires a change to the documentation. 22 | - [ ] I have updated the documentation accordingly. 23 | - [ ] I have added tests to cover my changes. 24 | - [ ] All new and existing tests passed. 25 | - [ ] I have added the upcoming release notes accordingly. 26 | -------------------------------------------------------------------------------- /hello-kafka-microservices/invoice-service/src/main/kotlin/microkafka/invoices/InvoiceService.kt: -------------------------------------------------------------------------------- 1 | package microkafka.invoices 2 | 3 | import microkafka.invoices.model.Invoice 4 | import microkafka.invoices.model.Order 5 | import org.springframework.kafka.core.KafkaTemplate 6 | import org.springframework.stereotype.Service 7 | import java.util.* 8 | 9 | @Service 10 | class InvoiceService(val invoiceRepository: InvoiceRepository, 11 | val kafkaTemplate: KafkaTemplate) { 12 | 13 | fun createInvoice(order: Order): Invoice { 14 | 15 | val invoice = Invoice().also { 16 | it.id = UUID.randomUUID().toString() 17 | it.orderId = order.id 18 | it.userId = order.userId 19 | it.amount = order.amount?.times(0.5) // possible implementation sum of (productRepository.getProducts(order.productId's)) 20 | } 21 | 22 | // In this example NO CQRS and NO event sourcing are implemented, 23 | // the invoice is immediately persisted in the local DB when it is created, 24 | // instead of receiving the corresponding event ! 25 | invoiceRepository.save(invoice); 26 | 27 | kafkaTemplate.send("invoices", "create", invoice) 28 | 29 | return invoice; 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: "3.4" 2 | 3 | services: 4 | zookeeper: 5 | image: zookeeper:3.7.0 6 | ports: 7 | - "2181:2181" 8 | 9 | kafka: 10 | container_name: kafka 11 | image: wurstmeister/kafka:2.12-2.5.0 12 | ports: 13 | - "9092:9092" 14 | depends_on: 15 | - zookeeper 16 | environment: 17 | KAFKA_ADVERTISED_HOST_NAME: kafka 18 | KAFKA_ADVERTISED_PORT: "9092" 19 | KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 20 | KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 21 | KAFKA_MESSAGE_MAX_BYTES: 104858800 22 | volumes: 23 | - /var/run/docker.sock:/var/run/docker.sock 24 | 25 | kafka-rest: 26 | image: confluentinc/cp-kafka-rest:4.1.1 27 | hostname: kafka-rest 28 | ports: 29 | - "38082:38082" 30 | depends_on: 31 | - kafka 32 | environment: 33 | KAFKA_REST_ZOOKEEPER_CONNECT: zookeeper:2181 34 | KAFKA_REST_HOST_NAME: kafka-rest 35 | KAFKA_REST_LISTENERS: http://kafka-rest:38082 36 | 37 | kafka-web-gui: 38 | image: tchiotludo/akhq:0.17.0 39 | ports: 40 | - "8081:8080" 41 | environment: 42 | AKHQ_CONFIGURATION: | 43 | akhq: 44 | connections: 45 | docker-kafka-server: 46 | properties: 47 | bootstrap.servers: "kafka:9092" 48 | -------------------------------------------------------------------------------- /hello-kafka-api/kotlin/build.gradle.kts: -------------------------------------------------------------------------------- 1 | import org.jetbrains.kotlin.gradle.tasks.KotlinCompile 2 | 3 | plugins { 4 | id("org.springframework.boot") version "2.2.7.RELEASE" 5 | id("io.spring.dependency-management") version "1.0.9.RELEASE" 6 | kotlin("jvm") version "1.3.72" 7 | kotlin("plugin.spring") version "1.3.72" 8 | } 9 | 10 | group = "com.tss.kafka.samples" 11 | version = "0.0.1-SNAPSHOT" 12 | java.sourceCompatibility = JavaVersion.VERSION_1_8 13 | 14 | repositories { 15 | mavenCentral() 16 | } 17 | 18 | dependencies { 19 | annotationProcessor("org.springframework.boot:spring-boot-configuration-processor") 20 | implementation("org.springframework.boot:spring-boot-starter") 21 | 22 | 23 | implementation("org.springframework.kafka:spring-kafka") 24 | 25 | implementation("com.fasterxml.jackson.module:jackson-module-kotlin") 26 | implementation("org.jetbrains.kotlin:kotlin-reflect") 27 | implementation("org.jetbrains.kotlin:kotlin-stdlib-jdk8") 28 | 29 | } 30 | 31 | tasks.withType { 32 | kotlinOptions { 33 | freeCompilerArgs = listOf("-Xjsr305=strict") 34 | jvmTarget = "1.8" 35 | } 36 | } 37 | 38 | task("start-kafka") { 39 | commandLine("docker-compose", "-f", "../../docker-compose.yml", "up", "-d") 40 | } 41 | 42 | task("stop-kafka") { 43 | commandLine("docker-compose", "down") 44 | } 45 | -------------------------------------------------------------------------------- /hello-kafka-streams/kotlin/build.gradle.kts: -------------------------------------------------------------------------------- 1 | import org.jetbrains.kotlin.gradle.tasks.KotlinCompile 2 | 3 | plugins { 4 | id("org.springframework.boot") version "2.2.7.RELEASE" 5 | id("io.spring.dependency-management") version "1.0.9.RELEASE" 6 | kotlin("jvm") version "1.3.72" 7 | kotlin("plugin.spring") version "1.3.72" 8 | } 9 | 10 | group = "com.tss.kafka.samples" 11 | version = "0.0.1-SNAPSHOT" 12 | java.sourceCompatibility = JavaVersion.VERSION_1_8 13 | 14 | repositories { 15 | mavenCentral() 16 | } 17 | 18 | dependencies { 19 | annotationProcessor("org.springframework.boot:spring-boot-configuration-processor") 20 | implementation("org.springframework.boot:spring-boot-starter") 21 | 22 | implementation("org.springframework.kafka:spring-kafka") 23 | implementation("org.apache.kafka:kafka-streams") 24 | 25 | implementation("com.fasterxml.jackson.module:jackson-module-kotlin") 26 | implementation("org.jetbrains.kotlin:kotlin-reflect") 27 | implementation("org.jetbrains.kotlin:kotlin-stdlib-jdk8") 28 | } 29 | 30 | tasks.withType { 31 | kotlinOptions { 32 | freeCompilerArgs = listOf("-Xjsr305=strict") 33 | jvmTarget = "1.8" 34 | } 35 | } 36 | 37 | task("start-kafka") { 38 | commandLine("docker-compose", "-f", "../docker-compose.yml", "up", "-d") 39 | } 40 | 41 | task("stop-kafka") { 42 | commandLine("docker-compose", "down") 43 | } 44 | -------------------------------------------------------------------------------- /hello-kafka-api/java/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 4 | 4.0.0 5 | 6 | org.springframework.boot 7 | spring-boot-starter-parent 8 | 2.5.1 9 | 10 | 11 | il.samples 12 | hello-kafka-api 13 | 0.0.1-SNAPSHOT 14 | hello-kafka-api 15 | Demo project for Kafka Spring Java 16 | 17 | 1.8 18 | 19 | 20 | 21 | org.springframework.boot 22 | spring-boot-starter 23 | 24 | 25 | org.springframework.kafka 26 | spring-kafka 27 | 28 | 29 | 30 | 31 | 32 | org.springframework.boot 33 | spring-boot-maven-plugin 34 | 35 | 36 | 37 | 38 | -------------------------------------------------------------------------------- /hello-kafka-unit-test/kotlin/build.gradle.kts: -------------------------------------------------------------------------------- 1 | import org.jetbrains.kotlin.gradle.tasks.KotlinCompile 2 | 3 | plugins { 4 | id("org.springframework.boot") version "2.2.7.RELEASE" 5 | id("io.spring.dependency-management") version "1.0.9.RELEASE" 6 | kotlin("jvm") version "1.3.72" 7 | kotlin("plugin.spring") version "1.3.72" 8 | } 9 | 10 | group = "com.tss.kafka.samples" 11 | version = "0.0.1-SNAPSHOT" 12 | java.sourceCompatibility = JavaVersion.VERSION_1_8 13 | 14 | repositories { 15 | mavenCentral() 16 | } 17 | 18 | dependencies { 19 | implementation("org.springframework.boot:spring-boot-starter") 20 | testImplementation("org.springframework.boot:spring-boot-starter-test") 21 | 22 | implementation("org.springframework.kafka:spring-kafka") 23 | implementation("org.springframework.kafka:spring-kafka-test") 24 | implementation("org.testcontainers:kafka:1.15.0") 25 | implementation("org.testcontainers:junit-jupiter:1.15.0") 26 | 27 | 28 | implementation("com.fasterxml.jackson.module:jackson-module-kotlin") 29 | implementation("org.jetbrains.kotlin:kotlin-reflect") 30 | implementation("org.jetbrains.kotlin:kotlin-stdlib-jdk8") 31 | } 32 | 33 | tasks.withType { 34 | kotlinOptions { 35 | freeCompilerArgs = listOf("-Xjsr305=strict") 36 | jvmTarget = "1.8" 37 | } 38 | } 39 | 40 | tasks.withType { 41 | useJUnitPlatform() 42 | } 43 | 44 | task("start-kafka") { 45 | commandLine("docker-compose", "-f", "../docker-compose.yml", "up", "-d") 46 | } 47 | 48 | task("stop-kafka") { 49 | commandLine("docker-compose", "down") 50 | } 51 | -------------------------------------------------------------------------------- /hello-kafka-unit-test/kotlin/src/test/kotlin/hellokafka/EmbeddedKafkaTest.kt: -------------------------------------------------------------------------------- 1 | package hellokafka 2 | 3 | import org.assertj.core.api.Assertions.assertThat 4 | import org.junit.jupiter.api.Test 5 | import org.springframework.beans.factory.annotation.Autowired 6 | import org.springframework.boot.test.context.SpringBootTest 7 | import org.springframework.kafka.test.context.EmbeddedKafka 8 | import org.springframework.test.annotation.DirtiesContext 9 | import java.util.concurrent.CountDownLatch 10 | import java.util.concurrent.TimeUnit 11 | 12 | 13 | @SpringBootTest(properties = [ 14 | "spring.kafka.consumer.auto-offset-reset=earliest", 15 | "spring.kafka.consumer.group-id=unit-test-client", 16 | "spring.kafka.bootstrap-servers=localhost:9099" 17 | ]) 18 | @DirtiesContext // resets the context between tests 19 | @EmbeddedKafka(brokerProperties = ["listeners=PLAINTEXT://localhost:9099", "port=9099"]) 20 | class EmbeddedKafkaTest( 21 | @Autowired 22 | private val consumer: Consumer, 23 | @Autowired 24 | private val producer: Producer 25 | ) { 26 | 27 | @Test 28 | fun `should publish and receive 2 messages`() { 29 | val msg = "helllllo"; 30 | 31 | consumer.latch = CountDownLatch(2) 32 | 33 | producer.send(msg); 34 | producer.send(msg); 35 | 36 | // wait max 2 sec. 37 | consumer.latch.await(2000, TimeUnit.MILLISECONDS) 38 | 39 | assertThat(consumer.latch.count).isEqualTo(0) 40 | assertThat(consumer.data).isNotNull 41 | assertThat(consumer.data.count()).isEqualTo(2) 42 | assertThat(consumer.data[0]).isEqualTo(msg) 43 | assertThat(consumer.data[1]).isEqualTo(msg) 44 | } 45 | } 46 | 47 | 48 | -------------------------------------------------------------------------------- /hello-kafka-streams/java/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 4 | 4.0.0 5 | 6 | org.springframework.boot 7 | spring-boot-starter-parent 8 | 2.5.1 9 | 10 | 11 | il.samples 12 | hello-kafka-streams 13 | 0.0.1-SNAPSHOT 14 | hello-kafka-streams 15 | Demo project for Kafka Streams API in Java 16 | 17 | 1.8 18 | 19 | 20 | 21 | org.springframework.boot 22 | spring-boot-starter 23 | 24 | 25 | org.springframework.kafka 26 | spring-kafka 27 | 28 | 29 | org.apache.kafka 30 | kafka-streams 31 | 32 | 33 | 34 | 35 | 36 | org.springframework.boot 37 | spring-boot-maven-plugin 38 | 39 | 40 | 41 | 42 | -------------------------------------------------------------------------------- /hello-kafka-unit-test/java/src/test/java/hellokafka/EmbeddedKafkaTest.java: -------------------------------------------------------------------------------- 1 | package hellokafka; 2 | 3 | 4 | import org.assertj.core.api.Assertions; 5 | import org.junit.jupiter.api.Test; 6 | import org.springframework.beans.factory.annotation.Autowired; 7 | import org.springframework.boot.test.context.SpringBootTest; 8 | import org.springframework.kafka.test.context.EmbeddedKafka; 9 | import org.springframework.test.annotation.DirtiesContext; 10 | 11 | import java.util.concurrent.CountDownLatch; 12 | import java.util.concurrent.TimeUnit; 13 | 14 | @SpringBootTest(properties = { 15 | "spring.kafka.consumer.auto-offset-reset=earliest", 16 | "spring.kafka.consumer.group-id=unit-test-client", 17 | "spring.kafka.bootstrap-servers=localhost:9099" 18 | }) 19 | @DirtiesContext // resets the context between tests 20 | @EmbeddedKafka(brokerProperties = {"listeners=PLAINTEXT://localhost:9099", "port=9099"}) 21 | class EmbeddedKafkaTest { 22 | @Autowired 23 | public Consumer consumer; 24 | @Autowired 25 | public Producer producer; 26 | 27 | @Test 28 | public void should_publish_and_receive_2_messages() throws InterruptedException { 29 | String msg = "helllllo"; 30 | 31 | consumer.latch = new CountDownLatch(2); 32 | 33 | producer.send(msg); 34 | producer.send(msg); 35 | 36 | // wait max 2 sec. 37 | consumer.latch.await(2000, TimeUnit.MILLISECONDS); 38 | 39 | Assertions.assertThat(consumer.latch.getCount()).isEqualTo(0); 40 | Assertions.assertThat(consumer.data).isNotNull(); 41 | Assertions.assertThat(consumer.data.size()).isEqualTo(2); 42 | Assertions.assertThat(consumer.data.get(0)).isEqualTo(msg); 43 | Assertions.assertThat(consumer.data.get(1)).isEqualTo(msg); 44 | } 45 | } 46 | 47 | 48 | -------------------------------------------------------------------------------- /hello-kafka-es-streamsapi/kotlin/order-query-service/src/main/kotlin/kafka/es/query/OrderListener.kt: -------------------------------------------------------------------------------- 1 | package kafka.es.query 2 | 3 | import kafka.es.query.model.OrderEvent 4 | import kafka.es.query.model.merge 5 | import org.apache.kafka.common.serialization.Serde 6 | import org.apache.kafka.common.serialization.Serdes 7 | import org.apache.kafka.streams.StreamsBuilder 8 | import org.apache.kafka.streams.kstream.* 9 | import org.springframework.beans.factory.annotation.Autowired 10 | import org.springframework.context.annotation.Bean 11 | import org.springframework.kafka.annotation.EnableKafkaStreams 12 | import org.springframework.kafka.support.serializer.JsonDeserializer 13 | import org.springframework.kafka.support.serializer.JsonSerializer 14 | import org.springframework.stereotype.Service 15 | 16 | 17 | @Service 18 | @EnableKafkaStreams 19 | class OrderListener() { 20 | 21 | @Bean 22 | fun getSerde(): Serde { 23 | // create serde to serialize and deserialize OrderEvent 24 | val orderSer: JsonSerializer = JsonSerializer() 25 | // param false disables exact type mapping while deserializing 26 | val orderDes = JsonDeserializer(OrderEvent::class.java, false) 27 | return Serdes.serdeFrom(orderSer, orderDes) 28 | } 29 | 30 | @Autowired 31 | fun eventProcessor(builder: StreamsBuilder, serde: Serde) { 32 | 33 | builder.stream("es-orders-streams", Consumed.with(Serdes.String(), serde)) // subscribe to stream with serde 34 | .peek { key, value -> println("process item: $key - $value")} // just debug output 35 | .groupByKey(Grouped.with(Serdes.String(), serde)) // groupByKey with serde! 36 | .reduce( 37 | { order, order2 -> // current state orderEvent and a new one 38 | order.merge(order2) // update current state partially 39 | }, 40 | Materialized.`as`("es-orders-store")) // persists current order state to store. 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /hello-kafka-unit-test/java/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 4 | 4.0.0 5 | 6 | org.springframework.boot 7 | spring-boot-starter-parent 8 | 2.5.1 9 | 10 | 11 | il.samples 12 | hello-kafka-api 13 | 0.0.1-SNAPSHOT 14 | hello-kafka-api 15 | Demo project for Kafka Spring Java 16 | 17 | 1.8 18 | 19 | 20 | 21 | org.springframework.boot 22 | spring-boot-starter 23 | 24 | 25 | org.springframework.boot 26 | spring-boot-starter-test 27 | 28 | 29 | 30 | 31 | org.springframework.kafka 32 | spring-kafka 33 | 34 | 35 | org.springframework.kafka 36 | spring-kafka-test 37 | 38 | 39 | org.testcontainers 40 | kafka 41 | 1.15.0 42 | 43 | 44 | org.testcontainers 45 | junit-jupiter 46 | 1.15.0 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | org.springframework.boot 55 | spring-boot-maven-plugin 56 | 57 | 58 | 59 | 60 | -------------------------------------------------------------------------------- /hello-kafka-api/readme.md: -------------------------------------------------------------------------------- 1 | # Hello Kafka API 2 | a tiny publish/subscribe example using Kafka + Spring + Kotlin + Gradle 3 | 4 | ![alt text](../docs/kafka-publish-subscribe.png) 5 | 6 | 7 | # Example 8 | 9 | This example demonstrates a very simple use of publish/subscribe in Kafka. 10 | There are only 2 components, Producer and Consumer. Both are implemented as Spring @Services and run completely asynchronously. 11 | The two components could also be outsourced to separate applications or 2 separate processes. 12 | 13 |

14 | 15 | ### Producer 16 | 17 | The producer sends a new message(a simple string) every second to a Kafka topic called **hello-topic** 18 | For checking purposes, the timestamp is appended: 19 | 20 | The implementation of the producer is really easy, especially thanks to Spring: 21 | 22 | ```kotlin 23 | @Scheduled(fixedRate = 1000) 24 | fun send() { 25 | val message: String = "hello kafka " + LocalDateTime.now().toString() 26 | println("SEND MESSAGE : $message") 27 | 28 | this.kafka.send("hello-topic", message) // this.kafka: KafkaTemplate a generic injectable height level Spring Component 29 | } 30 | ``` 31 | 32 | console output: 33 | 34 | ```bash 35 | SEND MESSAGE : hello kafka 2021-05-24T13:32:49.086 36 | ``` 37 | 38 |

39 | 40 | ### Consumer 41 | 42 | The consumer subscribes to the topic **hello-topic** and receives a new message(a string) 43 | every time it arrives in the Kafka topic. 44 | 45 | The implementation of the consumer is also very simple: 46 | 47 | ```kotlin 48 | @KafkaListener(topics= ["hello-topic"], groupId = "kafka_kotlin_id") 49 | fun consume(message: String) { 50 | println("RECEIVE MESSAGE : $message"); 51 | } 52 | ``` 53 | 54 | console output: 55 | 56 | ```bash 57 | RECEIVE MESSAGE : hello kafka 2021-05-24T13:32:49.086 58 | ``` 59 | 60 |

61 | 62 | ### alternative implementation 63 | 64 | In the example, **KafkaTemplate** is used to send messages. 65 | This is a preconfigured height level component provided by the Spring Framework. 66 | 67 | Alternatively, you can also use the low level component **KafkaProducer** to send messages. 68 | However, this requires some parameters that have to be preconfigured before messages can be sent to Kafka: 69 | 70 | ```kotlin 71 | 72 | val map = mutableMapOf() 73 | map["key.serializer"] = "org.apache.kafka.common.serialization.StringSerializer" 74 | map["value.serializer"] = "org.apache.kafka.common.serialization.StringSerializer" 75 | map["bootstrap.servers"] = "localhost:9092" 76 | 77 | val producer = KafkaProducer(map as Map?) 78 | producer.send(ProducerRecord("hello-topic", message)) 79 | 80 | ``` 81 | 82 | The same applies to the consumer. There is also the low level component **KafkaConsumer** with which you can receive messages. 83 | 84 |

85 | 86 | 87 | ----- 88 | 89 | ## About Kafka API 90 | 91 |
92 | kafka api 93 |
94 | 95 | ... 96 | -------------------------------------------------------------------------------- /hello-kafka-cqrs/gradlew.bat: -------------------------------------------------------------------------------- 1 | @rem 2 | @rem Copyright 2015 the original author or authors. 3 | @rem 4 | @rem Licensed under the Apache License, Version 2.0 (the "License"); 5 | @rem you may not use this file except in compliance with the License. 6 | @rem You may obtain a copy of the License at 7 | @rem 8 | @rem https://www.apache.org/licenses/LICENSE-2.0 9 | @rem 10 | @rem Unless required by applicable law or agreed to in writing, software 11 | @rem distributed under the License is distributed on an "AS IS" BASIS, 12 | @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | @rem See the License for the specific language governing permissions and 14 | @rem limitations under the License. 15 | @rem 16 | 17 | @if "%DEBUG%" == "" @echo off 18 | @rem ########################################################################## 19 | @rem 20 | @rem Gradle startup script for Windows 21 | @rem 22 | @rem ########################################################################## 23 | 24 | @rem Set local scope for the variables with windows NT shell 25 | if "%OS%"=="Windows_NT" setlocal 26 | 27 | set DIRNAME=%~dp0 28 | if "%DIRNAME%" == "" set DIRNAME=. 29 | set APP_BASE_NAME=%~n0 30 | set APP_HOME=%DIRNAME% 31 | 32 | @rem Resolve any "." and ".." in APP_HOME to make it shorter. 33 | for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi 34 | 35 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 36 | set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" 37 | 38 | @rem Find java.exe 39 | if defined JAVA_HOME goto findJavaFromJavaHome 40 | 41 | set JAVA_EXE=java.exe 42 | %JAVA_EXE% -version >NUL 2>&1 43 | if "%ERRORLEVEL%" == "0" goto execute 44 | 45 | echo. 46 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 47 | echo. 48 | echo Please set the JAVA_HOME variable in your environment to match the 49 | echo location of your Java installation. 50 | 51 | goto fail 52 | 53 | :findJavaFromJavaHome 54 | set JAVA_HOME=%JAVA_HOME:"=% 55 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe 56 | 57 | if exist "%JAVA_EXE%" goto execute 58 | 59 | echo. 60 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 61 | echo. 62 | echo Please set the JAVA_HOME variable in your environment to match the 63 | echo location of your Java installation. 64 | 65 | goto fail 66 | 67 | :execute 68 | @rem Setup the command line 69 | 70 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar 71 | 72 | 73 | @rem Execute Gradle 74 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* 75 | 76 | :end 77 | @rem End local scope for the variables with windows NT shell 78 | if "%ERRORLEVEL%"=="0" goto mainEnd 79 | 80 | :fail 81 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of 82 | rem the _cmd.exe /c_ return code! 83 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 84 | exit /b 1 85 | 86 | :mainEnd 87 | if "%OS%"=="Windows_NT" endlocal 88 | 89 | :omega 90 | -------------------------------------------------------------------------------- /hello-kafka-es/gradlew.bat: -------------------------------------------------------------------------------- 1 | @rem 2 | @rem Copyright 2015 the original author or authors. 3 | @rem 4 | @rem Licensed under the Apache License, Version 2.0 (the "License"); 5 | @rem you may not use this file except in compliance with the License. 6 | @rem You may obtain a copy of the License at 7 | @rem 8 | @rem https://www.apache.org/licenses/LICENSE-2.0 9 | @rem 10 | @rem Unless required by applicable law or agreed to in writing, software 11 | @rem distributed under the License is distributed on an "AS IS" BASIS, 12 | @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | @rem See the License for the specific language governing permissions and 14 | @rem limitations under the License. 15 | @rem 16 | 17 | @if "%DEBUG%" == "" @echo off 18 | @rem ########################################################################## 19 | @rem 20 | @rem Gradle startup script for Windows 21 | @rem 22 | @rem ########################################################################## 23 | 24 | @rem Set local scope for the variables with windows NT shell 25 | if "%OS%"=="Windows_NT" setlocal 26 | 27 | set DIRNAME=%~dp0 28 | if "%DIRNAME%" == "" set DIRNAME=. 29 | set APP_BASE_NAME=%~n0 30 | set APP_HOME=%DIRNAME% 31 | 32 | @rem Resolve any "." and ".." in APP_HOME to make it shorter. 33 | for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi 34 | 35 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 36 | set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" 37 | 38 | @rem Find java.exe 39 | if defined JAVA_HOME goto findJavaFromJavaHome 40 | 41 | set JAVA_EXE=java.exe 42 | %JAVA_EXE% -version >NUL 2>&1 43 | if "%ERRORLEVEL%" == "0" goto execute 44 | 45 | echo. 46 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 47 | echo. 48 | echo Please set the JAVA_HOME variable in your environment to match the 49 | echo location of your Java installation. 50 | 51 | goto fail 52 | 53 | :findJavaFromJavaHome 54 | set JAVA_HOME=%JAVA_HOME:"=% 55 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe 56 | 57 | if exist "%JAVA_EXE%" goto execute 58 | 59 | echo. 60 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 61 | echo. 62 | echo Please set the JAVA_HOME variable in your environment to match the 63 | echo location of your Java installation. 64 | 65 | goto fail 66 | 67 | :execute 68 | @rem Setup the command line 69 | 70 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar 71 | 72 | 73 | @rem Execute Gradle 74 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* 75 | 76 | :end 77 | @rem End local scope for the variables with windows NT shell 78 | if "%ERRORLEVEL%"=="0" goto mainEnd 79 | 80 | :fail 81 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of 82 | rem the _cmd.exe /c_ return code! 83 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 84 | exit /b 1 85 | 86 | :mainEnd 87 | if "%OS%"=="Windows_NT" endlocal 88 | 89 | :omega 90 | -------------------------------------------------------------------------------- /hello-kafka-api/kotlin/gradlew.bat: -------------------------------------------------------------------------------- 1 | @rem 2 | @rem Copyright 2015 the original author or authors. 3 | @rem 4 | @rem Licensed under the Apache License, Version 2.0 (the "License"); 5 | @rem you may not use this file except in compliance with the License. 6 | @rem You may obtain a copy of the License at 7 | @rem 8 | @rem https://www.apache.org/licenses/LICENSE-2.0 9 | @rem 10 | @rem Unless required by applicable law or agreed to in writing, software 11 | @rem distributed under the License is distributed on an "AS IS" BASIS, 12 | @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | @rem See the License for the specific language governing permissions and 14 | @rem limitations under the License. 15 | @rem 16 | 17 | @if "%DEBUG%" == "" @echo off 18 | @rem ########################################################################## 19 | @rem 20 | @rem Gradle startup script for Windows 21 | @rem 22 | @rem ########################################################################## 23 | 24 | @rem Set local scope for the variables with windows NT shell 25 | if "%OS%"=="Windows_NT" setlocal 26 | 27 | set DIRNAME=%~dp0 28 | if "%DIRNAME%" == "" set DIRNAME=. 29 | set APP_BASE_NAME=%~n0 30 | set APP_HOME=%DIRNAME% 31 | 32 | @rem Resolve any "." and ".." in APP_HOME to make it shorter. 33 | for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi 34 | 35 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 36 | set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" 37 | 38 | @rem Find java.exe 39 | if defined JAVA_HOME goto findJavaFromJavaHome 40 | 41 | set JAVA_EXE=java.exe 42 | %JAVA_EXE% -version >NUL 2>&1 43 | if "%ERRORLEVEL%" == "0" goto execute 44 | 45 | echo. 46 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 47 | echo. 48 | echo Please set the JAVA_HOME variable in your environment to match the 49 | echo location of your Java installation. 50 | 51 | goto fail 52 | 53 | :findJavaFromJavaHome 54 | set JAVA_HOME=%JAVA_HOME:"=% 55 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe 56 | 57 | if exist "%JAVA_EXE%" goto execute 58 | 59 | echo. 60 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 61 | echo. 62 | echo Please set the JAVA_HOME variable in your environment to match the 63 | echo location of your Java installation. 64 | 65 | goto fail 66 | 67 | :execute 68 | @rem Setup the command line 69 | 70 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar 71 | 72 | 73 | @rem Execute Gradle 74 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* 75 | 76 | :end 77 | @rem End local scope for the variables with windows NT shell 78 | if "%ERRORLEVEL%"=="0" goto mainEnd 79 | 80 | :fail 81 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of 82 | rem the _cmd.exe /c_ return code! 83 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 84 | exit /b 1 85 | 86 | :mainEnd 87 | if "%OS%"=="Windows_NT" endlocal 88 | 89 | :omega 90 | -------------------------------------------------------------------------------- /hello-kafka-microservices/gradlew.bat: -------------------------------------------------------------------------------- 1 | @rem 2 | @rem Copyright 2015 the original author or authors. 3 | @rem 4 | @rem Licensed under the Apache License, Version 2.0 (the "License"); 5 | @rem you may not use this file except in compliance with the License. 6 | @rem You may obtain a copy of the License at 7 | @rem 8 | @rem https://www.apache.org/licenses/LICENSE-2.0 9 | @rem 10 | @rem Unless required by applicable law or agreed to in writing, software 11 | @rem distributed under the License is distributed on an "AS IS" BASIS, 12 | @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | @rem See the License for the specific language governing permissions and 14 | @rem limitations under the License. 15 | @rem 16 | 17 | @if "%DEBUG%" == "" @echo off 18 | @rem ########################################################################## 19 | @rem 20 | @rem Gradle startup script for Windows 21 | @rem 22 | @rem ########################################################################## 23 | 24 | @rem Set local scope for the variables with windows NT shell 25 | if "%OS%"=="Windows_NT" setlocal 26 | 27 | set DIRNAME=%~dp0 28 | if "%DIRNAME%" == "" set DIRNAME=. 29 | set APP_BASE_NAME=%~n0 30 | set APP_HOME=%DIRNAME% 31 | 32 | @rem Resolve any "." and ".." in APP_HOME to make it shorter. 33 | for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi 34 | 35 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 36 | set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" 37 | 38 | @rem Find java.exe 39 | if defined JAVA_HOME goto findJavaFromJavaHome 40 | 41 | set JAVA_EXE=java.exe 42 | %JAVA_EXE% -version >NUL 2>&1 43 | if "%ERRORLEVEL%" == "0" goto execute 44 | 45 | echo. 46 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 47 | echo. 48 | echo Please set the JAVA_HOME variable in your environment to match the 49 | echo location of your Java installation. 50 | 51 | goto fail 52 | 53 | :findJavaFromJavaHome 54 | set JAVA_HOME=%JAVA_HOME:"=% 55 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe 56 | 57 | if exist "%JAVA_EXE%" goto execute 58 | 59 | echo. 60 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 61 | echo. 62 | echo Please set the JAVA_HOME variable in your environment to match the 63 | echo location of your Java installation. 64 | 65 | goto fail 66 | 67 | :execute 68 | @rem Setup the command line 69 | 70 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar 71 | 72 | 73 | @rem Execute Gradle 74 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* 75 | 76 | :end 77 | @rem End local scope for the variables with windows NT shell 78 | if "%ERRORLEVEL%"=="0" goto mainEnd 79 | 80 | :fail 81 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of 82 | rem the _cmd.exe /c_ return code! 83 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 84 | exit /b 1 85 | 86 | :mainEnd 87 | if "%OS%"=="Windows_NT" endlocal 88 | 89 | :omega 90 | -------------------------------------------------------------------------------- /hello-kafka-streams/kotlin/gradlew.bat: -------------------------------------------------------------------------------- 1 | @rem 2 | @rem Copyright 2015 the original author or authors. 3 | @rem 4 | @rem Licensed under the Apache License, Version 2.0 (the "License"); 5 | @rem you may not use this file except in compliance with the License. 6 | @rem You may obtain a copy of the License at 7 | @rem 8 | @rem https://www.apache.org/licenses/LICENSE-2.0 9 | @rem 10 | @rem Unless required by applicable law or agreed to in writing, software 11 | @rem distributed under the License is distributed on an "AS IS" BASIS, 12 | @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | @rem See the License for the specific language governing permissions and 14 | @rem limitations under the License. 15 | @rem 16 | 17 | @if "%DEBUG%" == "" @echo off 18 | @rem ########################################################################## 19 | @rem 20 | @rem Gradle startup script for Windows 21 | @rem 22 | @rem ########################################################################## 23 | 24 | @rem Set local scope for the variables with windows NT shell 25 | if "%OS%"=="Windows_NT" setlocal 26 | 27 | set DIRNAME=%~dp0 28 | if "%DIRNAME%" == "" set DIRNAME=. 29 | set APP_BASE_NAME=%~n0 30 | set APP_HOME=%DIRNAME% 31 | 32 | @rem Resolve any "." and ".." in APP_HOME to make it shorter. 33 | for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi 34 | 35 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 36 | set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" 37 | 38 | @rem Find java.exe 39 | if defined JAVA_HOME goto findJavaFromJavaHome 40 | 41 | set JAVA_EXE=java.exe 42 | %JAVA_EXE% -version >NUL 2>&1 43 | if "%ERRORLEVEL%" == "0" goto execute 44 | 45 | echo. 46 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 47 | echo. 48 | echo Please set the JAVA_HOME variable in your environment to match the 49 | echo location of your Java installation. 50 | 51 | goto fail 52 | 53 | :findJavaFromJavaHome 54 | set JAVA_HOME=%JAVA_HOME:"=% 55 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe 56 | 57 | if exist "%JAVA_EXE%" goto execute 58 | 59 | echo. 60 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 61 | echo. 62 | echo Please set the JAVA_HOME variable in your environment to match the 63 | echo location of your Java installation. 64 | 65 | goto fail 66 | 67 | :execute 68 | @rem Setup the command line 69 | 70 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar 71 | 72 | 73 | @rem Execute Gradle 74 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* 75 | 76 | :end 77 | @rem End local scope for the variables with windows NT shell 78 | if "%ERRORLEVEL%"=="0" goto mainEnd 79 | 80 | :fail 81 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of 82 | rem the _cmd.exe /c_ return code! 83 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 84 | exit /b 1 85 | 86 | :mainEnd 87 | if "%OS%"=="Windows_NT" endlocal 88 | 89 | :omega 90 | -------------------------------------------------------------------------------- /hello-kafka-unit-test/kotlin/gradlew.bat: -------------------------------------------------------------------------------- 1 | @rem 2 | @rem Copyright 2015 the original author or authors. 3 | @rem 4 | @rem Licensed under the Apache License, Version 2.0 (the "License"); 5 | @rem you may not use this file except in compliance with the License. 6 | @rem You may obtain a copy of the License at 7 | @rem 8 | @rem https://www.apache.org/licenses/LICENSE-2.0 9 | @rem 10 | @rem Unless required by applicable law or agreed to in writing, software 11 | @rem distributed under the License is distributed on an "AS IS" BASIS, 12 | @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | @rem See the License for the specific language governing permissions and 14 | @rem limitations under the License. 15 | @rem 16 | 17 | @if "%DEBUG%" == "" @echo off 18 | @rem ########################################################################## 19 | @rem 20 | @rem Gradle startup script for Windows 21 | @rem 22 | @rem ########################################################################## 23 | 24 | @rem Set local scope for the variables with windows NT shell 25 | if "%OS%"=="Windows_NT" setlocal 26 | 27 | set DIRNAME=%~dp0 28 | if "%DIRNAME%" == "" set DIRNAME=. 29 | set APP_BASE_NAME=%~n0 30 | set APP_HOME=%DIRNAME% 31 | 32 | @rem Resolve any "." and ".." in APP_HOME to make it shorter. 33 | for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi 34 | 35 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 36 | set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" 37 | 38 | @rem Find java.exe 39 | if defined JAVA_HOME goto findJavaFromJavaHome 40 | 41 | set JAVA_EXE=java.exe 42 | %JAVA_EXE% -version >NUL 2>&1 43 | if "%ERRORLEVEL%" == "0" goto execute 44 | 45 | echo. 46 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 47 | echo. 48 | echo Please set the JAVA_HOME variable in your environment to match the 49 | echo location of your Java installation. 50 | 51 | goto fail 52 | 53 | :findJavaFromJavaHome 54 | set JAVA_HOME=%JAVA_HOME:"=% 55 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe 56 | 57 | if exist "%JAVA_EXE%" goto execute 58 | 59 | echo. 60 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 61 | echo. 62 | echo Please set the JAVA_HOME variable in your environment to match the 63 | echo location of your Java installation. 64 | 65 | goto fail 66 | 67 | :execute 68 | @rem Setup the command line 69 | 70 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar 71 | 72 | 73 | @rem Execute Gradle 74 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* 75 | 76 | :end 77 | @rem End local scope for the variables with windows NT shell 78 | if "%ERRORLEVEL%"=="0" goto mainEnd 79 | 80 | :fail 81 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of 82 | rem the _cmd.exe /c_ return code! 83 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 84 | exit /b 1 85 | 86 | :mainEnd 87 | if "%OS%"=="Windows_NT" endlocal 88 | 89 | :omega 90 | -------------------------------------------------------------------------------- /hello-kafka-es-streamsapi/kotlin/gradlew.bat: -------------------------------------------------------------------------------- 1 | @rem 2 | @rem Copyright 2015 the original author or authors. 3 | @rem 4 | @rem Licensed under the Apache License, Version 2.0 (the "License"); 5 | @rem you may not use this file except in compliance with the License. 6 | @rem You may obtain a copy of the License at 7 | @rem 8 | @rem https://www.apache.org/licenses/LICENSE-2.0 9 | @rem 10 | @rem Unless required by applicable law or agreed to in writing, software 11 | @rem distributed under the License is distributed on an "AS IS" BASIS, 12 | @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | @rem See the License for the specific language governing permissions and 14 | @rem limitations under the License. 15 | @rem 16 | 17 | @if "%DEBUG%" == "" @echo off 18 | @rem ########################################################################## 19 | @rem 20 | @rem Gradle startup script for Windows 21 | @rem 22 | @rem ########################################################################## 23 | 24 | @rem Set local scope for the variables with windows NT shell 25 | if "%OS%"=="Windows_NT" setlocal 26 | 27 | set DIRNAME=%~dp0 28 | if "%DIRNAME%" == "" set DIRNAME=. 29 | set APP_BASE_NAME=%~n0 30 | set APP_HOME=%DIRNAME% 31 | 32 | @rem Resolve any "." and ".." in APP_HOME to make it shorter. 33 | for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi 34 | 35 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 36 | set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" 37 | 38 | @rem Find java.exe 39 | if defined JAVA_HOME goto findJavaFromJavaHome 40 | 41 | set JAVA_EXE=java.exe 42 | %JAVA_EXE% -version >NUL 2>&1 43 | if "%ERRORLEVEL%" == "0" goto execute 44 | 45 | echo. 46 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 47 | echo. 48 | echo Please set the JAVA_HOME variable in your environment to match the 49 | echo location of your Java installation. 50 | 51 | goto fail 52 | 53 | :findJavaFromJavaHome 54 | set JAVA_HOME=%JAVA_HOME:"=% 55 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe 56 | 57 | if exist "%JAVA_EXE%" goto execute 58 | 59 | echo. 60 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 61 | echo. 62 | echo Please set the JAVA_HOME variable in your environment to match the 63 | echo location of your Java installation. 64 | 65 | goto fail 66 | 67 | :execute 68 | @rem Setup the command line 69 | 70 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar 71 | 72 | 73 | @rem Execute Gradle 74 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* 75 | 76 | :end 77 | @rem End local scope for the variables with windows NT shell 78 | if "%ERRORLEVEL%"=="0" goto mainEnd 79 | 80 | :fail 81 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of 82 | rem the _cmd.exe /c_ return code! 83 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 84 | exit /b 1 85 | 86 | :mainEnd 87 | if "%OS%"=="Windows_NT" endlocal 88 | 89 | :omega 90 | -------------------------------------------------------------------------------- /hello-kafka-es-streamsapi/readme.md: -------------------------------------------------------------------------------- 1 | # Hello Kafka Event Sourcing with Streams 2 | 3 | a minimal Event Sourcing implementation 4 | using Kafka Streams API + Spring + Kotlin + Gradle 5 | 6 | ![event sourcing](../docs/kafka-event-sourcing.png) 7 | 8 | ## Example 9 | 10 | xxx 11 | 12 |

13 | 14 | ### Order Command Service 15 | 16 | The OrderCommandService creates a new order event every second and publishes this to the Kafka topic **"es-orders"**. 17 | This is intended to simulate the continuous flow of data/events. 18 | 19 | Different order updates are simulated: price change, quantity, product change. 20 | 21 | The Command Service is really very simple and does not contain any 22 | Event sourcing magic other than creating events. 23 | 24 | This is where the Kafka system does most of the work. All order events published in the "es-orders-streams" topic are saved in chronological order. 25 | 26 | > The command service itself does not contain its own database, nor does it call the consumer service directly. 27 | > Kafka Topic "es-orders-streams" is the only link between these two services. 28 | 29 |

30 | 31 | ### Order Query Service 32 | 33 | xxx 34 | 35 | > The Query Service itself neither accesses a database nor does it call the Command Service. 36 | > Kafka Topic "es-orders-stream" is the only link between these two services. 37 | > The current state of the orders exists in the state store "es-orders-state", with is also a kafka topic. 38 | > When the service is started, the current state is always updated continuously 39 | > as long as the service is running. 40 | 41 | The Kafka streams processor only needs the application-id: 42 | 43 | ```yaml 44 | 45 | spring: 46 | kafka: 47 | streams: 48 | application-id: hello-es-streams 49 | 50 | ``` 51 | 52 |

53 | 54 | ## start application 55 | 56 | ## prerequisites 57 | 58 | - docker/docker-compose 59 | - gradle 60 | - java sdk 1.8 61 | - kotlin 62 | - local dns mapping: 127.0.0.1 kafka 63 | 64 |

65 | 66 | ```shell 67 | # 1. get project sources from git 68 | git clone https://github.com/thecodemonkey/kafka-microservices.git 69 | 70 | # 2. local dns setup => etc/hosts => 127.0.0.1 kafka 71 | 72 | cd hello-kafka-es/kotlin # 3. go to project root folder 73 | 74 | gradle start-kafka # 4. start kafka infrastructure(zookeeper, kafka, web gui) as docker containers. 75 | # see docker-compose.yml fro more details 76 | 77 | gradle :order-command-service:bootRun # 5. start command service (publishes random Order Events to the Kafka topic) 78 | gradle :order-query-service:bootRun # 6. start query service (listen to kafka topic for order events and restores order state in kafka state store, provides a REST endpoint to query(interactive query) current orders state) 79 | 80 | ``` 81 | 82 | ## event sourcing in action 83 | 84 | ```shell 85 | 86 | # 1. start producer/command service to create random order events 87 | gradle :order-command-service:bootRun 88 | 89 | # 3. start query service to create current state of orders 90 | gradle :order-query-service:bootRun 91 | 92 | # 5. get the current state of orders. 93 | curl 'http://localhost:8887/orders' 94 | curl 'http://localhost:8887/orders/1' 95 | 96 | 97 | ``` 98 | 99 |

100 | 101 | --- 102 | 103 | ## About Event Sourcing with Streams API 104 | 105 | xxx 106 | 107 |

108 | -------------------------------------------------------------------------------- /hello-kafka-cqrs/readme.md: -------------------------------------------------------------------------------- 1 | # Hello Kafka CQRS 2 | 3 | a minimal CQRS microservice implementation 4 | using Kafka + Spring + Kotlin + Gradle + H2 5 | 6 | ![alt text](../docs/kafka-cqrs.png) 7 | 8 | 9 | ## Example 10 | 11 | in this example the CQRS pattern is implemented. 12 | The purpose of CQRS (Command-Query-Responsibility-Segregation) lies in the decoupling of 13 | read and write operations. To demonstrate this decoupling, 14 | 2 separate services **order-query-service** and **order-command-service** were implemented in this example. 15 | 16 | In the example, an order can be sent via the command service and queried via the query service. 17 | 18 |

19 | 20 | ### Order-Command-Service 21 | Order-Command-Service provides a REST API for creating and changing orders: 22 | 23 | POST: /orders 24 | PUT: /orders 25 | 26 | The order commands(simple DTO's) sent to the REST endpoint are transformed into order events in the **CommandHandler**. 27 | Then the order events published to the Kafka topic **cqrs-orders**. 28 | 29 | >The order command service runs on its own port: 8883 and has no access to any of the database 30 | >and it does not call the query service either. The service also exclusively uses its own data models. 31 | > 32 | > **This service is decoupled.** 33 | > 34 | > The only existing link is the connection to the Kafka Topic **cqrs-orders** 35 | 36 | 37 | ### Order-Query-Service 38 | 39 | Order Query Service consists of 2 components. The asynchronous OrderListener that subscribes to the Kafka topic **cqrs-orders** 40 | and processes all received OrderEvents. The corresponding changes (order events) to the orders are persisted in the local DB (H2). 41 | The local DB therefore always contains the current status of all orders. The change history, on the other hand, is in the Kafka topic "cqrs-orders". 42 |

43 | 44 | >The order query service also runs on its own port: 8884 and has access to its own local DB. 45 | >The Query Service does not call the Command Service either. The service also exclusively uses its own data models 46 | > 47 | > **This service is decoupled.** 48 | > 49 | >The only existing link is the connection to the Kafka Topic "cqrs-orders" 50 | 51 | ## start application 52 | 53 | ## prerequisites 54 | 55 | - docker/docker-compose 56 | - gradle 57 | - java sdk 1.8 58 | - kotlin 59 | - local dns mapping: 127.0.0.1 kafka 60 | 61 |

62 | 63 | ```shell 64 | # 1. get project sources from git 65 | git clone https://github.com/thecodemonkey/kafka-microservices.git 66 | 67 | # 2. local dns setup => etc/hosts => 127.0.0.1 kafka 68 | 69 | cd hello-kafka-es-cqrs/kotlin # 3. go to project root folder 70 | 71 | gradle start-kafka # 4. start kafka infrastructure(zookeeper, kafka, web gui) as docker containers. 72 | # see docker-compose.yml fro more details 73 | 74 | gradle :order-command-service:bootRun # 5. start command service (provide REST Endpoint to receive OrderCommands. Publishes OrderEvents to the Kafka topic) 75 | gradle :order-query-service:bootRun # 6. start query service (listen to kafka topic for new order events and persist order changes to local db, provides a REST endpoint to query persisted orders) 76 | 77 | 78 | ``` 79 | 80 | ## make an order and see what happens 81 | 82 | ```shell 83 | 84 | # 1. create a new order (execute command) 85 | curl --request POST 'http://localhost:8883/orders' --header 'Content-Type: application/json' --data-raw '{ "userId" : "0123", "productId": "777", "amount": 1 }' 86 | 87 | # 2. get all available orders (execute query) 88 | curl 'http://localhost:8884/orders' 89 | 90 | ``` 91 | 92 |

93 | 94 | --- 95 | 96 | ## About CQRS 97 | 98 | xxx 99 | -------------------------------------------------------------------------------- /hello-kafka-unit-test/kotlin/src/test/kotlin/hellokafka/ContainerKafkaTest.kt: -------------------------------------------------------------------------------- 1 | package hellokafka 2 | 3 | import org.apache.kafka.clients.consumer.ConsumerConfig 4 | import org.apache.kafka.clients.producer.ProducerConfig 5 | import org.apache.kafka.common.serialization.StringDeserializer 6 | import org.apache.kafka.common.serialization.StringSerializer 7 | import org.assertj.core.api.Assertions 8 | import org.junit.jupiter.api.Test 9 | import org.springframework.beans.factory.annotation.Autowired 10 | import org.springframework.boot.test.context.SpringBootTest 11 | import org.springframework.boot.test.context.TestConfiguration 12 | import org.springframework.context.annotation.Bean 13 | import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory 14 | import org.springframework.kafka.core.* 15 | import org.springframework.test.annotation.DirtiesContext 16 | import org.testcontainers.containers.KafkaContainer 17 | import org.testcontainers.containers.wait.strategy.WaitStrategy 18 | import org.testcontainers.junit.jupiter.Container 19 | import org.testcontainers.junit.jupiter.Testcontainers 20 | import org.testcontainers.utility.DockerImageName 21 | import java.util.concurrent.CountDownLatch 22 | import java.util.concurrent.TimeUnit 23 | 24 | 25 | @SpringBootTest 26 | @DirtiesContext // resets the context between tests 27 | @Testcontainers 28 | class ContainerKafkaTest( 29 | @Autowired 30 | private val consumer: Consumer, 31 | @Autowired 32 | private val producer: Producer) 33 | { 34 | 35 | companion object{ 36 | @Container 37 | var kafka = KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:5.4.3")); 38 | } 39 | 40 | 41 | @Test 42 | fun `should publish and receive 2 messages`() { 43 | val msg = "helllllo"; 44 | 45 | consumer.latch = CountDownLatch(2) 46 | 47 | producer.send(msg); 48 | producer.send(msg); 49 | 50 | // wait max 2 sec. 51 | consumer.latch.await(2000, TimeUnit.MILLISECONDS) 52 | 53 | Assertions.assertThat(consumer.latch.count).isEqualTo(0) 54 | Assertions.assertThat(consumer.data).isNotNull 55 | Assertions.assertThat(consumer.data.count()).isEqualTo(2) 56 | Assertions.assertThat(consumer.data[0]).isEqualTo(msg) 57 | Assertions.assertThat(consumer.data[1]).isEqualTo(msg) 58 | } 59 | 60 | @TestConfiguration 61 | internal class KafkaTestContainersConfiguration { 62 | @Bean 63 | fun kafkaListenerContainerFactory(): ConcurrentKafkaListenerContainerFactory { 64 | val factory = ConcurrentKafkaListenerContainerFactory() 65 | factory.setConsumerFactory(consumerFactory()) 66 | 67 | 68 | 69 | return factory 70 | } 71 | 72 | @Bean 73 | fun consumerFactory(): ConsumerFactory { 74 | return DefaultKafkaConsumerFactory(consumerConfigs()) 75 | } 76 | 77 | @Bean 78 | fun consumerConfigs(): Map { 79 | val props: MutableMap = HashMap() 80 | props[ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG] = kafka.getBootstrapServers() 81 | props[ConsumerConfig.AUTO_OFFSET_RESET_CONFIG] = "earliest"; 82 | props[ConsumerConfig.GROUP_ID_CONFIG] = "unit-test-c-client"; 83 | props[ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG] = StringDeserializer::class.java 84 | props[ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG] = StringDeserializer::class.java 85 | return props 86 | } 87 | 88 | @Bean 89 | fun producerFactory(): ProducerFactory { 90 | val configProps: MutableMap = HashMap() 91 | configProps[ProducerConfig.BOOTSTRAP_SERVERS_CONFIG] = kafka.getBootstrapServers() 92 | configProps[ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG] = StringSerializer::class.java 93 | configProps[ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG] = StringSerializer::class.java 94 | return DefaultKafkaProducerFactory(configProps) 95 | } 96 | 97 | @Bean 98 | fun kafkaTemplate(): KafkaTemplate { 99 | return KafkaTemplate(producerFactory()) 100 | } 101 | } 102 | } 103 | -------------------------------------------------------------------------------- /hello-kafka-unit-test/java/src/test/java/hellokafka/ContainerKafkaTest.java: -------------------------------------------------------------------------------- 1 | package hellokafka; 2 | 3 | 4 | import org.apache.kafka.clients.consumer.ConsumerConfig; 5 | import org.apache.kafka.clients.producer.ProducerConfig; 6 | import org.apache.kafka.common.serialization.StringDeserializer; 7 | import org.apache.kafka.common.serialization.StringSerializer; 8 | import org.assertj.core.api.Assertions; 9 | import org.junit.jupiter.api.Test; 10 | import org.springframework.beans.factory.annotation.Autowired; 11 | import org.springframework.boot.test.context.SpringBootTest; 12 | import org.springframework.boot.test.context.TestConfiguration; 13 | import org.springframework.context.annotation.Bean; 14 | import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory; 15 | import org.springframework.kafka.core.*; 16 | import org.springframework.test.annotation.DirtiesContext; 17 | import org.testcontainers.containers.KafkaContainer; 18 | import org.testcontainers.junit.jupiter.Container; 19 | import org.testcontainers.junit.jupiter.Testcontainers; 20 | import org.testcontainers.utility.DockerImageName; 21 | 22 | import java.util.HashMap; 23 | import java.util.Map; 24 | import java.util.concurrent.CountDownLatch; 25 | import java.util.concurrent.TimeUnit; 26 | 27 | @SpringBootTest 28 | @DirtiesContext // resets the context between tests 29 | @Testcontainers 30 | class ContainerKafkaTest { 31 | 32 | @Autowired 33 | public Consumer consumer; 34 | @Autowired 35 | public Producer producer; 36 | 37 | @Container 38 | public static KafkaContainer kafka = new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:5.4.3")); 39 | 40 | @Test 41 | public void should_publish_and_receive_2_messages() throws InterruptedException { 42 | String msg = "helllllo"; 43 | 44 | consumer.latch = new CountDownLatch(2); 45 | 46 | producer.send(msg); 47 | producer.send(msg); 48 | 49 | // wait max 2 sec. 50 | consumer.latch.await(2000, TimeUnit.MILLISECONDS); 51 | 52 | Assertions.assertThat(consumer.latch.getCount()).isEqualTo(0); 53 | Assertions.assertThat(consumer.data).isNotNull(); 54 | Assertions.assertThat(consumer.data.size()).isEqualTo(2); 55 | Assertions.assertThat(consumer.data.get(0)).isEqualTo(msg); 56 | Assertions.assertThat(consumer.data.get(1)).isEqualTo(msg); 57 | } 58 | 59 | @TestConfiguration 60 | static class KafkaTestContainersConfiguration { 61 | 62 | @Bean 63 | ConcurrentKafkaListenerContainerFactory kafkaListenerContainerFactory() { 64 | ConcurrentKafkaListenerContainerFactory factory = new ConcurrentKafkaListenerContainerFactory(); 65 | factory.setConsumerFactory(consumerFactory()); 66 | return factory; 67 | } 68 | 69 | @Bean 70 | ConsumerFactory consumerFactory() { 71 | return new DefaultKafkaConsumerFactory(consumerConfigs()); 72 | } 73 | 74 | @Bean 75 | Map consumerConfigs() { 76 | Map props = new HashMap<>(); 77 | props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafka.getBootstrapServers()); 78 | props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); 79 | props.put(ConsumerConfig.GROUP_ID_CONFIG, "unit-test-c-client"); 80 | props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); 81 | props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); 82 | return props; 83 | } 84 | 85 | @Bean 86 | ProducerFactory producerFactory() { 87 | Map configProps = new HashMap<>(); 88 | configProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafka.getBootstrapServers()); 89 | configProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class); 90 | configProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class); 91 | return new DefaultKafkaProducerFactory(configProps); 92 | } 93 | 94 | @Bean 95 | KafkaTemplate kafkaTemplate() { 96 | return new KafkaTemplate(producerFactory()); 97 | } 98 | } 99 | } 100 | -------------------------------------------------------------------------------- /hello-kafka-unit-test/readme.md: -------------------------------------------------------------------------------- 1 | # Hello Kafka Unit-Test 2 | a simple Unit-Test using Kafka + Spring + Kotlin + JUnit5 3 | 4 | ![alt text](../docs/kafka-unit-test.png) 5 | 6 | 7 | # Example 8 | 9 | The example consists of 2 components: the runtime code and the corresponding unit tests. 10 | The producer sends simple text messages to a Kafka Topic "hello-topic-ut". 11 | The consumer subscribes to the topic "hello-topic-ut" and receives automatically all messages 12 | sent to that topic. 13 | 14 | > The magic of this example is that you don't need a running Kafka infrastructure. 15 | > The unit tests provide Kafka Environment! 16 | 17 |

18 | 19 | ### Producer 20 | 21 | The producer sends the text message to the topic "hello-topic-ut". 22 | 23 | ```kotlin 24 | @Service 25 | class Producer(val kafka: KafkaTemplate) { 26 | fun send(message: String) { 27 | println("PUBLISH message: $message") 28 | this.kafka.send("hello-topic-ut", message) 29 | } 30 | } 31 | ``` 32 | 33 |

34 | 35 | ### Consumer 36 | 37 | The consumer subscribes to the topic "hello-topic-ui" and thus automatically receives the messages. 38 | The received messages are stored in memory so that they can be queried later in unit tests. 39 | 40 | Furthermore, the consumer contains the CountDownLatch component which helps to cleanly test asynchronous 41 | processing in the unit tests. 42 | 43 | ```kotlin 44 | @Service 45 | class Consumer { 46 | val data: MutableList = ArrayList() 47 | var latch = CountDownLatch(2) 48 | 49 | @KafkaListener(topics= ["hello-topic-ut"], groupId = "kafka_kotlin_id") 50 | fun consume(message: String) { 51 | println("RECEIVE message: $message") 52 | data.add(message) 53 | latch.countDown() 54 | } 55 | } 56 | ``` 57 | 58 |

59 | 60 | ### Unit-Test 61 | 62 | This example demonstrates 2 different ways, 63 | how to implement Kafka unit tests: 64 | 65 | - EmbeddedKafka 66 | - Testcontainers/KafkaContainer 67 | 68 | ### dependencies 69 | 70 | the following packages are used for this purpose. 71 | 72 | ```kotlin 73 | 74 | // build.gradle.kts 75 | 76 | implementation("org.springframework.boot:spring-boot-starter") 77 | testImplementation("org.springframework.boot:spring-boot-starter-test") 78 | 79 | // embedded kafka 80 | implementation("org.springframework.kafka:spring-kafka") 81 | implementation("org.springframework.kafka:spring-kafka-test") 82 | 83 | // kafka as container 84 | implementation("org.testcontainers:kafka:1.15.0") 85 | implementation("org.testcontainers:junit-jupiter:1.15.0") 86 | 87 | ``` 88 | 89 | ### EmbeddedKafkaTest 90 | 91 | This is probably the easiest way to test Kafka. Just add **@EmbeddedKafka** 92 | annotation to the test class and set the brockerPropierties. It is best to make sure that the 93 | port is not used yet. 94 | 95 | ```kotlin 96 | 97 | @SpringBootTest(properties = [ 98 | "spring.kafka.consumer.auto-offset-reset=earliest", 99 | "spring.kafka.consumer.group-id=unit-test-client", 100 | "spring.kafka.bootstrap-servers=localhost:9099" 101 | ]) 102 | @DirtiesContext // resets the context between tests 103 | @EmbeddedKafka(brokerProperties = ["listeners=PLAINTEXT://localhost:9099", "port=9099"]) 104 | class EmbeddedKafkaTest( 105 | @Autowired 106 | private val consumer: Consumer, 107 | @Autowired 108 | private val producer: Producer 109 | ) { 110 | 111 | @Test 112 | fun `should publish and receive 2 messages`() { 113 | val msg = "helllllo"; 114 | 115 | consumer.latch = CountDownLatch(2) 116 | 117 | producer.send(msg); 118 | producer.send(msg); 119 | 120 | // wait max 2 sec. 121 | consumer.latch.await(2000, TimeUnit.MILLISECONDS) 122 | 123 | assertThat(consumer.latch.count).isEqualTo(0) 124 | ... 125 | } 126 | } 127 | 128 | ``` 129 | 130 | ### ContainerKafkaTest 131 | 132 | This possibility allows unit testing with real containers. 133 | Thus, the test environment can be adapted as closely as possible to the productive environment. 134 | 135 | more details: https://www.testcontainers.org/modules/kafka/ 136 | 137 | 138 | ```kotlin 139 | 140 | @SpringBootTest 141 | @DirtiesContext // resets the context between tests 142 | @Testcontainers 143 | class ContainerKafkaTest( 144 | @Autowired 145 | private val consumer: Consumer, 146 | @Autowired 147 | private val producer: Producer) 148 | { 149 | 150 | companion object{ 151 | @Container 152 | var kafka = KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:5.4.3")); 153 | } 154 | 155 | 156 | @Test 157 | fun `should publish and receive 2 messages`() { ... } 158 | 159 | @TestConfiguration 160 | internal class KafkaTestContainersConfiguration { ... } 161 | } 162 | 163 | ``` 164 | 165 |

166 | 167 | 168 | ## references 169 | 170 | 171 | - https://www.baeldung.com/spring-boot-kafka-testing 172 | - https://www.testcontainers.org/ 173 | - https://www.baeldung.com/java-countdown-latch 174 | -------------------------------------------------------------------------------- /hello-kafka-microservices/readme.md: -------------------------------------------------------------------------------- 1 | # Hello Kafka Microservices 2 | a minimal microservice implementation 3 | using Kafka + Spring + Kotlin + Gradle + H2 4 | 5 | ![alt text](../docs/kafka-microservices.png) 6 | 7 | ## Example 8 | 9 | In this example, 2 microservices are implemented that communicate with each other using events. 10 | 11 | > The services do not call each other and do not use a common database. 12 | > They only communicate through events. 13 | 14 | The Kafka system takes care of publishing and subscribing to such events. 15 | 16 | In this example, event-based service communication is implemented. 17 | However, there is no event sourcing. **Event history** plays an essential role in **event sourcing** . 18 | In this example the event history is irrelevant. 19 | Only the one-time notification of the respective event is necessary. 20 | 21 | 22 | ### Order-Service 23 | 24 | New orders can be created or queried via the order service. 25 | A corresponding REST API is provided for this: 26 | 27 | POST /orders 28 | GET /orders 29 | 30 | The received order is first persisted in the local database. 31 | An event is then published in the Kafka Topic. 32 | Thus, all subscribers of this topic should be informed about this event. 33 | 34 |

35 | 36 | ### Invoice-Service 37 | 38 | The invoice service subscribes to the relevant Kafka topic and receives all events that are published via it. 39 | There is a corresponding OrderListener in the Invoice Service, which takes care of the processing of order events. 40 | On the basis of each received order, a new InvoicesEntity is created and persisted in the local DB of the Invoice Service. 41 | 42 | In addition, a new "Invoice Event" is created and published in the corresponding Kafka Topic. 43 | This should also notify all relevant services about the creation of a new invoice. 44 | You could now implement another Service for example **Mail Service** who subscribes to the **"Invoice" topic** and 45 | takes care of sending the e-mails. 46 | 47 | The invoice service also provides its own REST API. For example, frontends can use this to query all invoices. 48 |

49 | 50 | 51 | ## start application 52 | 53 | ## prerequisites 54 | 55 | - docker/docker-compose 56 | - gradle 57 | - java sdk 1.8 58 | - kotlin 59 | - local dns mapping: 127.0.0.1 kafka 60 | 61 |

62 | 63 | ```shell 64 | # 1. get project sources from git 65 | git clone https://github.com/thecodemonkey/kafka-microservices.git 66 | 67 | # 2. local dns setup => etc/hosts => 127.0.0.1 kafka 68 | 69 | cd hello-kafka-microservices/kotlin # 3. go to project root folder 70 | 71 | gradle start-kafka # 4. start kafka infrastructure(zookeeper, kafka, web gui) as docker containers. 72 | # see docker-compose.yml fro more details 73 | 74 | gradle :order-service:bootRun # 5. start order service (provide REST Endpoint to create new Orders or select the existing. Publishes OrderEvents to the Kafka topic) 75 | gradle :invoice-service:bootRun # 6. start invoice service (listen to kafka topic for new order events, create new Invoices and persist Invoice changes to local db, provides a REST endpoint to query persisted invoices) 76 | 77 | 78 | ``` 79 |

80 | 81 | ## make an order and see what happens 82 | 83 | ```shell 84 | 85 | # 1. create a new order (execute command) 86 | curl --request POST 'http://localhost:8881/orders' --header 'Content-Type: application/json' --data-raw '{ "userId" : "0123", "productId": "777", "amount": 1 }' 87 | 88 | # 2. get all available orders 89 | curl 'http://localhost:8881/orders' 90 | 91 | # 3. get all available invoices 92 | curl 'http://localhost:8882/invoices' 93 | 94 | # 4. check topic with Kafka UI 95 | # open in browser: http://localhost:8081/ 96 | 97 | ``` 98 | 99 |

100 | 101 | ---- 102 | 103 | ## About event driven microservices 104 | 105 | The complexity of an application does not necessarily increase by the number of lines of code, 106 | but by the dependencies. These dependencies arise, for example, through the use of a common data model, 107 | or by the use of a common database. But also, if one outsourced the whole code into several smaller services, so 108 | dependencies can arise from the fact that the services call each other. 109 | The event-driven approach enables maximum decoupling. The services communicate with each other exclusively by means of events. 110 | 111 | There are 3 essential points that make up event-driven microservices: 112 | 113 | - Each service has its own database 114 | - The services do not call each other via. REST or HTTP, Instead, they only communicate with each other via events. 115 | - The services do not use common data models 116 | 117 | This guarantees maximum decoupling. This decoupling/autonomy makes an event driven microservice to a more maintainable microservice. Kafka is ideally suited as an event bus to ensure communication between the microservices. 118 |

119 | -------------------------------------------------------------------------------- /hello-kafka-api/java/.mvn/wrapper/MavenWrapperDownloader.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2007-present the original author or authors. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * https://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | import java.net.*; 18 | import java.io.*; 19 | import java.nio.channels.*; 20 | import java.util.Properties; 21 | 22 | public class MavenWrapperDownloader { 23 | 24 | private static final String WRAPPER_VERSION = "0.5.6"; 25 | /** 26 | * Default URL to download the maven-wrapper.jar from, if no 'downloadUrl' is provided. 27 | */ 28 | private static final String DEFAULT_DOWNLOAD_URL = "https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/" 29 | + WRAPPER_VERSION + "/maven-wrapper-" + WRAPPER_VERSION + ".jar"; 30 | 31 | /** 32 | * Path to the maven-wrapper.properties file, which might contain a downloadUrl property to 33 | * use instead of the default one. 34 | */ 35 | private static final String MAVEN_WRAPPER_PROPERTIES_PATH = 36 | ".mvn/wrapper/maven-wrapper.properties"; 37 | 38 | /** 39 | * Path where the maven-wrapper.jar will be saved to. 40 | */ 41 | private static final String MAVEN_WRAPPER_JAR_PATH = 42 | ".mvn/wrapper/maven-wrapper.jar"; 43 | 44 | /** 45 | * Name of the property which should be used to override the default download url for the wrapper. 46 | */ 47 | private static final String PROPERTY_NAME_WRAPPER_URL = "wrapperUrl"; 48 | 49 | public static void main(String args[]) { 50 | System.out.println("- Downloader started"); 51 | File baseDirectory = new File(args[0]); 52 | System.out.println("- Using base directory: " + baseDirectory.getAbsolutePath()); 53 | 54 | // If the maven-wrapper.properties exists, read it and check if it contains a custom 55 | // wrapperUrl parameter. 56 | File mavenWrapperPropertyFile = new File(baseDirectory, MAVEN_WRAPPER_PROPERTIES_PATH); 57 | String url = DEFAULT_DOWNLOAD_URL; 58 | if (mavenWrapperPropertyFile.exists()) { 59 | FileInputStream mavenWrapperPropertyFileInputStream = null; 60 | try { 61 | mavenWrapperPropertyFileInputStream = new FileInputStream(mavenWrapperPropertyFile); 62 | Properties mavenWrapperProperties = new Properties(); 63 | mavenWrapperProperties.load(mavenWrapperPropertyFileInputStream); 64 | url = mavenWrapperProperties.getProperty(PROPERTY_NAME_WRAPPER_URL, url); 65 | } catch (IOException e) { 66 | System.out.println("- ERROR loading '" + MAVEN_WRAPPER_PROPERTIES_PATH + "'"); 67 | } finally { 68 | try { 69 | if (mavenWrapperPropertyFileInputStream != null) { 70 | mavenWrapperPropertyFileInputStream.close(); 71 | } 72 | } catch (IOException e) { 73 | // Ignore ... 74 | } 75 | } 76 | } 77 | System.out.println("- Downloading from: " + url); 78 | 79 | File outputFile = new File(baseDirectory.getAbsolutePath(), MAVEN_WRAPPER_JAR_PATH); 80 | if (!outputFile.getParentFile().exists()) { 81 | if (!outputFile.getParentFile().mkdirs()) { 82 | System.out.println( 83 | "- ERROR creating output directory '" + outputFile.getParentFile().getAbsolutePath() + "'"); 84 | } 85 | } 86 | System.out.println("- Downloading to: " + outputFile.getAbsolutePath()); 87 | try { 88 | downloadFileFromURL(url, outputFile); 89 | System.out.println("Done"); 90 | System.exit(0); 91 | } catch (Throwable e) { 92 | System.out.println("- Error downloading"); 93 | e.printStackTrace(); 94 | System.exit(1); 95 | } 96 | } 97 | 98 | private static void downloadFileFromURL(String urlString, File destination) throws Exception { 99 | if (System.getenv("MVNW_USERNAME") != null && System.getenv("MVNW_PASSWORD") != null) { 100 | String username = System.getenv("MVNW_USERNAME"); 101 | char[] password = System.getenv("MVNW_PASSWORD").toCharArray(); 102 | Authenticator.setDefault(new Authenticator() { 103 | @Override 104 | protected PasswordAuthentication getPasswordAuthentication() { 105 | return new PasswordAuthentication(username, password); 106 | } 107 | }); 108 | } 109 | URL website = new URL(urlString); 110 | ReadableByteChannel rbc; 111 | rbc = Channels.newChannel(website.openStream()); 112 | FileOutputStream fos = new FileOutputStream(destination); 113 | fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE); 114 | fos.close(); 115 | rbc.close(); 116 | } 117 | 118 | } 119 | -------------------------------------------------------------------------------- /hello-kafka-streams/java/.mvn/wrapper/MavenWrapperDownloader.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2007-present the original author or authors. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * https://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | import java.net.*; 18 | import java.io.*; 19 | import java.nio.channels.*; 20 | import java.util.Properties; 21 | 22 | public class MavenWrapperDownloader { 23 | 24 | private static final String WRAPPER_VERSION = "0.5.6"; 25 | /** 26 | * Default URL to download the maven-wrapper.jar from, if no 'downloadUrl' is provided. 27 | */ 28 | private static final String DEFAULT_DOWNLOAD_URL = "https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/" 29 | + WRAPPER_VERSION + "/maven-wrapper-" + WRAPPER_VERSION + ".jar"; 30 | 31 | /** 32 | * Path to the maven-wrapper.properties file, which might contain a downloadUrl property to 33 | * use instead of the default one. 34 | */ 35 | private static final String MAVEN_WRAPPER_PROPERTIES_PATH = 36 | ".mvn/wrapper/maven-wrapper.properties"; 37 | 38 | /** 39 | * Path where the maven-wrapper.jar will be saved to. 40 | */ 41 | private static final String MAVEN_WRAPPER_JAR_PATH = 42 | ".mvn/wrapper/maven-wrapper.jar"; 43 | 44 | /** 45 | * Name of the property which should be used to override the default download url for the wrapper. 46 | */ 47 | private static final String PROPERTY_NAME_WRAPPER_URL = "wrapperUrl"; 48 | 49 | public static void main(String args[]) { 50 | System.out.println("- Downloader started"); 51 | File baseDirectory = new File(args[0]); 52 | System.out.println("- Using base directory: " + baseDirectory.getAbsolutePath()); 53 | 54 | // If the maven-wrapper.properties exists, read it and check if it contains a custom 55 | // wrapperUrl parameter. 56 | File mavenWrapperPropertyFile = new File(baseDirectory, MAVEN_WRAPPER_PROPERTIES_PATH); 57 | String url = DEFAULT_DOWNLOAD_URL; 58 | if (mavenWrapperPropertyFile.exists()) { 59 | FileInputStream mavenWrapperPropertyFileInputStream = null; 60 | try { 61 | mavenWrapperPropertyFileInputStream = new FileInputStream(mavenWrapperPropertyFile); 62 | Properties mavenWrapperProperties = new Properties(); 63 | mavenWrapperProperties.load(mavenWrapperPropertyFileInputStream); 64 | url = mavenWrapperProperties.getProperty(PROPERTY_NAME_WRAPPER_URL, url); 65 | } catch (IOException e) { 66 | System.out.println("- ERROR loading '" + MAVEN_WRAPPER_PROPERTIES_PATH + "'"); 67 | } finally { 68 | try { 69 | if (mavenWrapperPropertyFileInputStream != null) { 70 | mavenWrapperPropertyFileInputStream.close(); 71 | } 72 | } catch (IOException e) { 73 | // Ignore ... 74 | } 75 | } 76 | } 77 | System.out.println("- Downloading from: " + url); 78 | 79 | File outputFile = new File(baseDirectory.getAbsolutePath(), MAVEN_WRAPPER_JAR_PATH); 80 | if (!outputFile.getParentFile().exists()) { 81 | if (!outputFile.getParentFile().mkdirs()) { 82 | System.out.println( 83 | "- ERROR creating output directory '" + outputFile.getParentFile().getAbsolutePath() + "'"); 84 | } 85 | } 86 | System.out.println("- Downloading to: " + outputFile.getAbsolutePath()); 87 | try { 88 | downloadFileFromURL(url, outputFile); 89 | System.out.println("Done"); 90 | System.exit(0); 91 | } catch (Throwable e) { 92 | System.out.println("- Error downloading"); 93 | e.printStackTrace(); 94 | System.exit(1); 95 | } 96 | } 97 | 98 | private static void downloadFileFromURL(String urlString, File destination) throws Exception { 99 | if (System.getenv("MVNW_USERNAME") != null && System.getenv("MVNW_PASSWORD") != null) { 100 | String username = System.getenv("MVNW_USERNAME"); 101 | char[] password = System.getenv("MVNW_PASSWORD").toCharArray(); 102 | Authenticator.setDefault(new Authenticator() { 103 | @Override 104 | protected PasswordAuthentication getPasswordAuthentication() { 105 | return new PasswordAuthentication(username, password); 106 | } 107 | }); 108 | } 109 | URL website = new URL(urlString); 110 | ReadableByteChannel rbc; 111 | rbc = Channels.newChannel(website.openStream()); 112 | FileOutputStream fos = new FileOutputStream(destination); 113 | fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE); 114 | fos.close(); 115 | rbc.close(); 116 | } 117 | 118 | } 119 | -------------------------------------------------------------------------------- /hello-kafka-unit-test/java/.mvn/wrapper/MavenWrapperDownloader.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2007-present the original author or authors. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * https://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | import java.net.*; 18 | import java.io.*; 19 | import java.nio.channels.*; 20 | import java.util.Properties; 21 | 22 | public class MavenWrapperDownloader { 23 | 24 | private static final String WRAPPER_VERSION = "0.5.6"; 25 | /** 26 | * Default URL to download the maven-wrapper.jar from, if no 'downloadUrl' is provided. 27 | */ 28 | private static final String DEFAULT_DOWNLOAD_URL = "https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/" 29 | + WRAPPER_VERSION + "/maven-wrapper-" + WRAPPER_VERSION + ".jar"; 30 | 31 | /** 32 | * Path to the maven-wrapper.properties file, which might contain a downloadUrl property to 33 | * use instead of the default one. 34 | */ 35 | private static final String MAVEN_WRAPPER_PROPERTIES_PATH = 36 | ".mvn/wrapper/maven-wrapper.properties"; 37 | 38 | /** 39 | * Path where the maven-wrapper.jar will be saved to. 40 | */ 41 | private static final String MAVEN_WRAPPER_JAR_PATH = 42 | ".mvn/wrapper/maven-wrapper.jar"; 43 | 44 | /** 45 | * Name of the property which should be used to override the default download url for the wrapper. 46 | */ 47 | private static final String PROPERTY_NAME_WRAPPER_URL = "wrapperUrl"; 48 | 49 | public static void main(String args[]) { 50 | System.out.println("- Downloader started"); 51 | File baseDirectory = new File(args[0]); 52 | System.out.println("- Using base directory: " + baseDirectory.getAbsolutePath()); 53 | 54 | // If the maven-wrapper.properties exists, read it and check if it contains a custom 55 | // wrapperUrl parameter. 56 | File mavenWrapperPropertyFile = new File(baseDirectory, MAVEN_WRAPPER_PROPERTIES_PATH); 57 | String url = DEFAULT_DOWNLOAD_URL; 58 | if (mavenWrapperPropertyFile.exists()) { 59 | FileInputStream mavenWrapperPropertyFileInputStream = null; 60 | try { 61 | mavenWrapperPropertyFileInputStream = new FileInputStream(mavenWrapperPropertyFile); 62 | Properties mavenWrapperProperties = new Properties(); 63 | mavenWrapperProperties.load(mavenWrapperPropertyFileInputStream); 64 | url = mavenWrapperProperties.getProperty(PROPERTY_NAME_WRAPPER_URL, url); 65 | } catch (IOException e) { 66 | System.out.println("- ERROR loading '" + MAVEN_WRAPPER_PROPERTIES_PATH + "'"); 67 | } finally { 68 | try { 69 | if (mavenWrapperPropertyFileInputStream != null) { 70 | mavenWrapperPropertyFileInputStream.close(); 71 | } 72 | } catch (IOException e) { 73 | // Ignore ... 74 | } 75 | } 76 | } 77 | System.out.println("- Downloading from: " + url); 78 | 79 | File outputFile = new File(baseDirectory.getAbsolutePath(), MAVEN_WRAPPER_JAR_PATH); 80 | if (!outputFile.getParentFile().exists()) { 81 | if (!outputFile.getParentFile().mkdirs()) { 82 | System.out.println( 83 | "- ERROR creating output directory '" + outputFile.getParentFile().getAbsolutePath() + "'"); 84 | } 85 | } 86 | System.out.println("- Downloading to: " + outputFile.getAbsolutePath()); 87 | try { 88 | downloadFileFromURL(url, outputFile); 89 | System.out.println("Done"); 90 | System.exit(0); 91 | } catch (Throwable e) { 92 | System.out.println("- Error downloading"); 93 | e.printStackTrace(); 94 | System.exit(1); 95 | } 96 | } 97 | 98 | private static void downloadFileFromURL(String urlString, File destination) throws Exception { 99 | if (System.getenv("MVNW_USERNAME") != null && System.getenv("MVNW_PASSWORD") != null) { 100 | String username = System.getenv("MVNW_USERNAME"); 101 | char[] password = System.getenv("MVNW_PASSWORD").toCharArray(); 102 | Authenticator.setDefault(new Authenticator() { 103 | @Override 104 | protected PasswordAuthentication getPasswordAuthentication() { 105 | return new PasswordAuthentication(username, password); 106 | } 107 | }); 108 | } 109 | URL website = new URL(urlString); 110 | ReadableByteChannel rbc; 111 | rbc = Channels.newChannel(website.openStream()); 112 | FileOutputStream fos = new FileOutputStream(destination); 113 | fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE); 114 | fos.close(); 115 | rbc.close(); 116 | } 117 | 118 | } 119 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | We as members, contributors, and leaders pledge to make participation in our 6 | community a harassment-free experience for everyone, regardless of age, body 7 | size, visible or invisible disability, ethnicity, sex characteristics, gender 8 | identity and expression, level of experience, education, socio-economic status, 9 | nationality, personal appearance, race, religion, or sexual identity 10 | and orientation. 11 | 12 | We pledge to act and interact in ways that contribute to an open, welcoming, 13 | diverse, inclusive, and healthy community. 14 | 15 | ## Our Standards 16 | 17 | Examples of behavior that contributes to a positive environment for our 18 | community include: 19 | 20 | * Demonstrating empathy and kindness toward other people 21 | * Being respectful of differing opinions, viewpoints, and experiences 22 | * Giving and gracefully accepting constructive feedback 23 | * Accepting responsibility and apologizing to those affected by our mistakes, 24 | and learning from the experience 25 | * Focusing on what is best not just for us as individuals, but for the 26 | overall community 27 | 28 | Examples of unacceptable behavior include: 29 | 30 | * The use of sexualized language or imagery, and sexual attention or 31 | advances of any kind 32 | * Trolling, insulting or derogatory comments, and personal or political attacks 33 | * Public or private harassment 34 | * Publishing others' private information, such as a physical or email 35 | address, without their explicit permission 36 | * Other conduct which could reasonably be considered inappropriate in a 37 | professional setting 38 | 39 | ## Enforcement Responsibilities 40 | 41 | Community leaders are responsible for clarifying and enforcing our standards of 42 | acceptable behavior and will take appropriate and fair corrective action in 43 | response to any behavior that they deem inappropriate, threatening, offensive, 44 | or harmful. 45 | 46 | Community leaders have the right and responsibility to remove, edit, or reject 47 | comments, commits, code, wiki edits, issues, and other contributions that are 48 | not aligned to this Code of Conduct, and will communicate reasons for moderation 49 | decisions when appropriate. 50 | 51 | ## Scope 52 | 53 | This Code of Conduct applies within all community spaces, and also applies when 54 | an individual is officially representing the community in public spaces. 55 | Examples of representing our community include using an official e-mail address, 56 | posting via an official social media account, or acting as an appointed 57 | representative at an online or offline event. 58 | 59 | ## Enforcement 60 | 61 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 62 | reported to the community leaders responsible for enforcement at 63 | github. 64 | All complaints will be reviewed and investigated promptly and fairly. 65 | 66 | All community leaders are obligated to respect the privacy and security of the 67 | reporter of any incident. 68 | 69 | ## Enforcement Guidelines 70 | 71 | Community leaders will follow these Community Impact Guidelines in determining 72 | the consequences for any action they deem in violation of this Code of Conduct: 73 | 74 | ### 1. Correction 75 | 76 | **Community Impact**: Use of inappropriate language or other behavior deemed 77 | unprofessional or unwelcome in the community. 78 | 79 | **Consequence**: A private, written warning from community leaders, providing 80 | clarity around the nature of the violation and an explanation of why the 81 | behavior was inappropriate. A public apology may be requested. 82 | 83 | ### 2. Warning 84 | 85 | **Community Impact**: A violation through a single incident or series 86 | of actions. 87 | 88 | **Consequence**: A warning with consequences for continued behavior. No 89 | interaction with the people involved, including unsolicited interaction with 90 | those enforcing the Code of Conduct, for a specified period of time. This 91 | includes avoiding interactions in community spaces as well as external channels 92 | like social media. Violating these terms may lead to a temporary or 93 | permanent ban. 94 | 95 | ### 3. Temporary Ban 96 | 97 | **Community Impact**: A serious violation of community standards, including 98 | sustained inappropriate behavior. 99 | 100 | **Consequence**: A temporary ban from any sort of interaction or public 101 | communication with the community for a specified period of time. No public or 102 | private interaction with the people involved, including unsolicited interaction 103 | with those enforcing the Code of Conduct, is allowed during this period. 104 | Violating these terms may lead to a permanent ban. 105 | 106 | ### 4. Permanent Ban 107 | 108 | **Community Impact**: Demonstrating a pattern of violation of community 109 | standards, including sustained inappropriate behavior, harassment of an 110 | individual, or aggression toward or disparagement of classes of individuals. 111 | 112 | **Consequence**: A permanent ban from any sort of public interaction within 113 | the community. 114 | 115 | ## Attribution 116 | 117 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], 118 | version 2.0, available at 119 | https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. 120 | 121 | Community Impact Guidelines were inspired by [Mozilla's code of conduct 122 | enforcement ladder](https://github.com/mozilla/diversity). 123 | 124 | [homepage]: https://www.contributor-covenant.org 125 | 126 | For answers to common questions about this code of conduct, see the FAQ at 127 | https://www.contributor-covenant.org/faq. Translations are available at 128 | https://www.contributor-covenant.org/translations. 129 | -------------------------------------------------------------------------------- /hello-kafka-cqrs/gradlew: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env sh 2 | 3 | # 4 | # Copyright 2015 the original author or authors. 5 | # 6 | # Licensed under the Apache License, Version 2.0 (the "License"); 7 | # you may not use this file except in compliance with the License. 8 | # You may obtain a copy of the License at 9 | # 10 | # https://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | # See the License for the specific language governing permissions and 16 | # limitations under the License. 17 | # 18 | 19 | ############################################################################## 20 | ## 21 | ## Gradle start up script for UN*X 22 | ## 23 | ############################################################################## 24 | 25 | # Attempt to set APP_HOME 26 | # Resolve links: $0 may be a link 27 | PRG="$0" 28 | # Need this for relative symlinks. 29 | while [ -h "$PRG" ] ; do 30 | ls=`ls -ld "$PRG"` 31 | link=`expr "$ls" : '.*-> \(.*\)$'` 32 | if expr "$link" : '/.*' > /dev/null; then 33 | PRG="$link" 34 | else 35 | PRG=`dirname "$PRG"`"/$link" 36 | fi 37 | done 38 | SAVED="`pwd`" 39 | cd "`dirname \"$PRG\"`/" >/dev/null 40 | APP_HOME="`pwd -P`" 41 | cd "$SAVED" >/dev/null 42 | 43 | APP_NAME="Gradle" 44 | APP_BASE_NAME=`basename "$0"` 45 | 46 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 47 | DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' 48 | 49 | # Use the maximum available, or set MAX_FD != -1 to use that value. 50 | MAX_FD="maximum" 51 | 52 | warn () { 53 | echo "$*" 54 | } 55 | 56 | die () { 57 | echo 58 | echo "$*" 59 | echo 60 | exit 1 61 | } 62 | 63 | # OS specific support (must be 'true' or 'false'). 64 | cygwin=false 65 | msys=false 66 | darwin=false 67 | nonstop=false 68 | case "`uname`" in 69 | CYGWIN* ) 70 | cygwin=true 71 | ;; 72 | Darwin* ) 73 | darwin=true 74 | ;; 75 | MINGW* ) 76 | msys=true 77 | ;; 78 | NONSTOP* ) 79 | nonstop=true 80 | ;; 81 | esac 82 | 83 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar 84 | 85 | 86 | # Determine the Java command to use to start the JVM. 87 | if [ -n "$JAVA_HOME" ] ; then 88 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then 89 | # IBM's JDK on AIX uses strange locations for the executables 90 | JAVACMD="$JAVA_HOME/jre/sh/java" 91 | else 92 | JAVACMD="$JAVA_HOME/bin/java" 93 | fi 94 | if [ ! -x "$JAVACMD" ] ; then 95 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME 96 | 97 | Please set the JAVA_HOME variable in your environment to match the 98 | location of your Java installation." 99 | fi 100 | else 101 | JAVACMD="java" 102 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 103 | 104 | Please set the JAVA_HOME variable in your environment to match the 105 | location of your Java installation." 106 | fi 107 | 108 | # Increase the maximum file descriptors if we can. 109 | if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then 110 | MAX_FD_LIMIT=`ulimit -H -n` 111 | if [ $? -eq 0 ] ; then 112 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then 113 | MAX_FD="$MAX_FD_LIMIT" 114 | fi 115 | ulimit -n $MAX_FD 116 | if [ $? -ne 0 ] ; then 117 | warn "Could not set maximum file descriptor limit: $MAX_FD" 118 | fi 119 | else 120 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" 121 | fi 122 | fi 123 | 124 | # For Darwin, add options to specify how the application appears in the dock 125 | if $darwin; then 126 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" 127 | fi 128 | 129 | # For Cygwin or MSYS, switch paths to Windows format before running java 130 | if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then 131 | APP_HOME=`cygpath --path --mixed "$APP_HOME"` 132 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` 133 | 134 | JAVACMD=`cygpath --unix "$JAVACMD"` 135 | 136 | # We build the pattern for arguments to be converted via cygpath 137 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` 138 | SEP="" 139 | for dir in $ROOTDIRSRAW ; do 140 | ROOTDIRS="$ROOTDIRS$SEP$dir" 141 | SEP="|" 142 | done 143 | OURCYGPATTERN="(^($ROOTDIRS))" 144 | # Add a user-defined pattern to the cygpath arguments 145 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then 146 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" 147 | fi 148 | # Now convert the arguments - kludge to limit ourselves to /bin/sh 149 | i=0 150 | for arg in "$@" ; do 151 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` 152 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option 153 | 154 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition 155 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` 156 | else 157 | eval `echo args$i`="\"$arg\"" 158 | fi 159 | i=`expr $i + 1` 160 | done 161 | case $i in 162 | 0) set -- ;; 163 | 1) set -- "$args0" ;; 164 | 2) set -- "$args0" "$args1" ;; 165 | 3) set -- "$args0" "$args1" "$args2" ;; 166 | 4) set -- "$args0" "$args1" "$args2" "$args3" ;; 167 | 5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; 168 | 6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; 169 | 7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; 170 | 8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; 171 | 9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; 172 | esac 173 | fi 174 | 175 | # Escape application args 176 | save () { 177 | for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done 178 | echo " " 179 | } 180 | APP_ARGS=`save "$@"` 181 | 182 | # Collect all arguments for the java command, following the shell quoting and substitution rules 183 | eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" 184 | 185 | exec "$JAVACMD" "$@" 186 | -------------------------------------------------------------------------------- /hello-kafka-es/gradlew: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env sh 2 | 3 | # 4 | # Copyright 2015 the original author or authors. 5 | # 6 | # Licensed under the Apache License, Version 2.0 (the "License"); 7 | # you may not use this file except in compliance with the License. 8 | # You may obtain a copy of the License at 9 | # 10 | # https://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | # See the License for the specific language governing permissions and 16 | # limitations under the License. 17 | # 18 | 19 | ############################################################################## 20 | ## 21 | ## Gradle start up script for UN*X 22 | ## 23 | ############################################################################## 24 | 25 | # Attempt to set APP_HOME 26 | # Resolve links: $0 may be a link 27 | PRG="$0" 28 | # Need this for relative symlinks. 29 | while [ -h "$PRG" ] ; do 30 | ls=`ls -ld "$PRG"` 31 | link=`expr "$ls" : '.*-> \(.*\)$'` 32 | if expr "$link" : '/.*' > /dev/null; then 33 | PRG="$link" 34 | else 35 | PRG=`dirname "$PRG"`"/$link" 36 | fi 37 | done 38 | SAVED="`pwd`" 39 | cd "`dirname \"$PRG\"`/" >/dev/null 40 | APP_HOME="`pwd -P`" 41 | cd "$SAVED" >/dev/null 42 | 43 | APP_NAME="Gradle" 44 | APP_BASE_NAME=`basename "$0"` 45 | 46 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 47 | DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' 48 | 49 | # Use the maximum available, or set MAX_FD != -1 to use that value. 50 | MAX_FD="maximum" 51 | 52 | warn () { 53 | echo "$*" 54 | } 55 | 56 | die () { 57 | echo 58 | echo "$*" 59 | echo 60 | exit 1 61 | } 62 | 63 | # OS specific support (must be 'true' or 'false'). 64 | cygwin=false 65 | msys=false 66 | darwin=false 67 | nonstop=false 68 | case "`uname`" in 69 | CYGWIN* ) 70 | cygwin=true 71 | ;; 72 | Darwin* ) 73 | darwin=true 74 | ;; 75 | MINGW* ) 76 | msys=true 77 | ;; 78 | NONSTOP* ) 79 | nonstop=true 80 | ;; 81 | esac 82 | 83 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar 84 | 85 | 86 | # Determine the Java command to use to start the JVM. 87 | if [ -n "$JAVA_HOME" ] ; then 88 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then 89 | # IBM's JDK on AIX uses strange locations for the executables 90 | JAVACMD="$JAVA_HOME/jre/sh/java" 91 | else 92 | JAVACMD="$JAVA_HOME/bin/java" 93 | fi 94 | if [ ! -x "$JAVACMD" ] ; then 95 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME 96 | 97 | Please set the JAVA_HOME variable in your environment to match the 98 | location of your Java installation." 99 | fi 100 | else 101 | JAVACMD="java" 102 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 103 | 104 | Please set the JAVA_HOME variable in your environment to match the 105 | location of your Java installation." 106 | fi 107 | 108 | # Increase the maximum file descriptors if we can. 109 | if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then 110 | MAX_FD_LIMIT=`ulimit -H -n` 111 | if [ $? -eq 0 ] ; then 112 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then 113 | MAX_FD="$MAX_FD_LIMIT" 114 | fi 115 | ulimit -n $MAX_FD 116 | if [ $? -ne 0 ] ; then 117 | warn "Could not set maximum file descriptor limit: $MAX_FD" 118 | fi 119 | else 120 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" 121 | fi 122 | fi 123 | 124 | # For Darwin, add options to specify how the application appears in the dock 125 | if $darwin; then 126 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" 127 | fi 128 | 129 | # For Cygwin or MSYS, switch paths to Windows format before running java 130 | if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then 131 | APP_HOME=`cygpath --path --mixed "$APP_HOME"` 132 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` 133 | 134 | JAVACMD=`cygpath --unix "$JAVACMD"` 135 | 136 | # We build the pattern for arguments to be converted via cygpath 137 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` 138 | SEP="" 139 | for dir in $ROOTDIRSRAW ; do 140 | ROOTDIRS="$ROOTDIRS$SEP$dir" 141 | SEP="|" 142 | done 143 | OURCYGPATTERN="(^($ROOTDIRS))" 144 | # Add a user-defined pattern to the cygpath arguments 145 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then 146 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" 147 | fi 148 | # Now convert the arguments - kludge to limit ourselves to /bin/sh 149 | i=0 150 | for arg in "$@" ; do 151 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` 152 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option 153 | 154 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition 155 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` 156 | else 157 | eval `echo args$i`="\"$arg\"" 158 | fi 159 | i=`expr $i + 1` 160 | done 161 | case $i in 162 | 0) set -- ;; 163 | 1) set -- "$args0" ;; 164 | 2) set -- "$args0" "$args1" ;; 165 | 3) set -- "$args0" "$args1" "$args2" ;; 166 | 4) set -- "$args0" "$args1" "$args2" "$args3" ;; 167 | 5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; 168 | 6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; 169 | 7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; 170 | 8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; 171 | 9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; 172 | esac 173 | fi 174 | 175 | # Escape application args 176 | save () { 177 | for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done 178 | echo " " 179 | } 180 | APP_ARGS=`save "$@"` 181 | 182 | # Collect all arguments for the java command, following the shell quoting and substitution rules 183 | eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" 184 | 185 | exec "$JAVACMD" "$@" 186 | -------------------------------------------------------------------------------- /hello-kafka-api/kotlin/gradlew: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env sh 2 | 3 | # 4 | # Copyright 2015 the original author or authors. 5 | # 6 | # Licensed under the Apache License, Version 2.0 (the "License"); 7 | # you may not use this file except in compliance with the License. 8 | # You may obtain a copy of the License at 9 | # 10 | # https://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | # See the License for the specific language governing permissions and 16 | # limitations under the License. 17 | # 18 | 19 | ############################################################################## 20 | ## 21 | ## Gradle start up script for UN*X 22 | ## 23 | ############################################################################## 24 | 25 | # Attempt to set APP_HOME 26 | # Resolve links: $0 may be a link 27 | PRG="$0" 28 | # Need this for relative symlinks. 29 | while [ -h "$PRG" ] ; do 30 | ls=`ls -ld "$PRG"` 31 | link=`expr "$ls" : '.*-> \(.*\)$'` 32 | if expr "$link" : '/.*' > /dev/null; then 33 | PRG="$link" 34 | else 35 | PRG=`dirname "$PRG"`"/$link" 36 | fi 37 | done 38 | SAVED="`pwd`" 39 | cd "`dirname \"$PRG\"`/" >/dev/null 40 | APP_HOME="`pwd -P`" 41 | cd "$SAVED" >/dev/null 42 | 43 | APP_NAME="Gradle" 44 | APP_BASE_NAME=`basename "$0"` 45 | 46 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 47 | DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' 48 | 49 | # Use the maximum available, or set MAX_FD != -1 to use that value. 50 | MAX_FD="maximum" 51 | 52 | warn () { 53 | echo "$*" 54 | } 55 | 56 | die () { 57 | echo 58 | echo "$*" 59 | echo 60 | exit 1 61 | } 62 | 63 | # OS specific support (must be 'true' or 'false'). 64 | cygwin=false 65 | msys=false 66 | darwin=false 67 | nonstop=false 68 | case "`uname`" in 69 | CYGWIN* ) 70 | cygwin=true 71 | ;; 72 | Darwin* ) 73 | darwin=true 74 | ;; 75 | MINGW* ) 76 | msys=true 77 | ;; 78 | NONSTOP* ) 79 | nonstop=true 80 | ;; 81 | esac 82 | 83 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar 84 | 85 | 86 | # Determine the Java command to use to start the JVM. 87 | if [ -n "$JAVA_HOME" ] ; then 88 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then 89 | # IBM's JDK on AIX uses strange locations for the executables 90 | JAVACMD="$JAVA_HOME/jre/sh/java" 91 | else 92 | JAVACMD="$JAVA_HOME/bin/java" 93 | fi 94 | if [ ! -x "$JAVACMD" ] ; then 95 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME 96 | 97 | Please set the JAVA_HOME variable in your environment to match the 98 | location of your Java installation." 99 | fi 100 | else 101 | JAVACMD="java" 102 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 103 | 104 | Please set the JAVA_HOME variable in your environment to match the 105 | location of your Java installation." 106 | fi 107 | 108 | # Increase the maximum file descriptors if we can. 109 | if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then 110 | MAX_FD_LIMIT=`ulimit -H -n` 111 | if [ $? -eq 0 ] ; then 112 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then 113 | MAX_FD="$MAX_FD_LIMIT" 114 | fi 115 | ulimit -n $MAX_FD 116 | if [ $? -ne 0 ] ; then 117 | warn "Could not set maximum file descriptor limit: $MAX_FD" 118 | fi 119 | else 120 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" 121 | fi 122 | fi 123 | 124 | # For Darwin, add options to specify how the application appears in the dock 125 | if $darwin; then 126 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" 127 | fi 128 | 129 | # For Cygwin or MSYS, switch paths to Windows format before running java 130 | if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then 131 | APP_HOME=`cygpath --path --mixed "$APP_HOME"` 132 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` 133 | 134 | JAVACMD=`cygpath --unix "$JAVACMD"` 135 | 136 | # We build the pattern for arguments to be converted via cygpath 137 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` 138 | SEP="" 139 | for dir in $ROOTDIRSRAW ; do 140 | ROOTDIRS="$ROOTDIRS$SEP$dir" 141 | SEP="|" 142 | done 143 | OURCYGPATTERN="(^($ROOTDIRS))" 144 | # Add a user-defined pattern to the cygpath arguments 145 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then 146 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" 147 | fi 148 | # Now convert the arguments - kludge to limit ourselves to /bin/sh 149 | i=0 150 | for arg in "$@" ; do 151 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` 152 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option 153 | 154 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition 155 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` 156 | else 157 | eval `echo args$i`="\"$arg\"" 158 | fi 159 | i=`expr $i + 1` 160 | done 161 | case $i in 162 | 0) set -- ;; 163 | 1) set -- "$args0" ;; 164 | 2) set -- "$args0" "$args1" ;; 165 | 3) set -- "$args0" "$args1" "$args2" ;; 166 | 4) set -- "$args0" "$args1" "$args2" "$args3" ;; 167 | 5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; 168 | 6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; 169 | 7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; 170 | 8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; 171 | 9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; 172 | esac 173 | fi 174 | 175 | # Escape application args 176 | save () { 177 | for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done 178 | echo " " 179 | } 180 | APP_ARGS=`save "$@"` 181 | 182 | # Collect all arguments for the java command, following the shell quoting and substitution rules 183 | eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" 184 | 185 | exec "$JAVACMD" "$@" 186 | -------------------------------------------------------------------------------- /hello-kafka-microservices/gradlew: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env sh 2 | 3 | # 4 | # Copyright 2015 the original author or authors. 5 | # 6 | # Licensed under the Apache License, Version 2.0 (the "License"); 7 | # you may not use this file except in compliance with the License. 8 | # You may obtain a copy of the License at 9 | # 10 | # https://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | # See the License for the specific language governing permissions and 16 | # limitations under the License. 17 | # 18 | 19 | ############################################################################## 20 | ## 21 | ## Gradle start up script for UN*X 22 | ## 23 | ############################################################################## 24 | 25 | # Attempt to set APP_HOME 26 | # Resolve links: $0 may be a link 27 | PRG="$0" 28 | # Need this for relative symlinks. 29 | while [ -h "$PRG" ] ; do 30 | ls=`ls -ld "$PRG"` 31 | link=`expr "$ls" : '.*-> \(.*\)$'` 32 | if expr "$link" : '/.*' > /dev/null; then 33 | PRG="$link" 34 | else 35 | PRG=`dirname "$PRG"`"/$link" 36 | fi 37 | done 38 | SAVED="`pwd`" 39 | cd "`dirname \"$PRG\"`/" >/dev/null 40 | APP_HOME="`pwd -P`" 41 | cd "$SAVED" >/dev/null 42 | 43 | APP_NAME="Gradle" 44 | APP_BASE_NAME=`basename "$0"` 45 | 46 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 47 | DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' 48 | 49 | # Use the maximum available, or set MAX_FD != -1 to use that value. 50 | MAX_FD="maximum" 51 | 52 | warn () { 53 | echo "$*" 54 | } 55 | 56 | die () { 57 | echo 58 | echo "$*" 59 | echo 60 | exit 1 61 | } 62 | 63 | # OS specific support (must be 'true' or 'false'). 64 | cygwin=false 65 | msys=false 66 | darwin=false 67 | nonstop=false 68 | case "`uname`" in 69 | CYGWIN* ) 70 | cygwin=true 71 | ;; 72 | Darwin* ) 73 | darwin=true 74 | ;; 75 | MINGW* ) 76 | msys=true 77 | ;; 78 | NONSTOP* ) 79 | nonstop=true 80 | ;; 81 | esac 82 | 83 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar 84 | 85 | 86 | # Determine the Java command to use to start the JVM. 87 | if [ -n "$JAVA_HOME" ] ; then 88 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then 89 | # IBM's JDK on AIX uses strange locations for the executables 90 | JAVACMD="$JAVA_HOME/jre/sh/java" 91 | else 92 | JAVACMD="$JAVA_HOME/bin/java" 93 | fi 94 | if [ ! -x "$JAVACMD" ] ; then 95 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME 96 | 97 | Please set the JAVA_HOME variable in your environment to match the 98 | location of your Java installation." 99 | fi 100 | else 101 | JAVACMD="java" 102 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 103 | 104 | Please set the JAVA_HOME variable in your environment to match the 105 | location of your Java installation." 106 | fi 107 | 108 | # Increase the maximum file descriptors if we can. 109 | if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then 110 | MAX_FD_LIMIT=`ulimit -H -n` 111 | if [ $? -eq 0 ] ; then 112 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then 113 | MAX_FD="$MAX_FD_LIMIT" 114 | fi 115 | ulimit -n $MAX_FD 116 | if [ $? -ne 0 ] ; then 117 | warn "Could not set maximum file descriptor limit: $MAX_FD" 118 | fi 119 | else 120 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" 121 | fi 122 | fi 123 | 124 | # For Darwin, add options to specify how the application appears in the dock 125 | if $darwin; then 126 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" 127 | fi 128 | 129 | # For Cygwin or MSYS, switch paths to Windows format before running java 130 | if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then 131 | APP_HOME=`cygpath --path --mixed "$APP_HOME"` 132 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` 133 | 134 | JAVACMD=`cygpath --unix "$JAVACMD"` 135 | 136 | # We build the pattern for arguments to be converted via cygpath 137 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` 138 | SEP="" 139 | for dir in $ROOTDIRSRAW ; do 140 | ROOTDIRS="$ROOTDIRS$SEP$dir" 141 | SEP="|" 142 | done 143 | OURCYGPATTERN="(^($ROOTDIRS))" 144 | # Add a user-defined pattern to the cygpath arguments 145 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then 146 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" 147 | fi 148 | # Now convert the arguments - kludge to limit ourselves to /bin/sh 149 | i=0 150 | for arg in "$@" ; do 151 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` 152 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option 153 | 154 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition 155 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` 156 | else 157 | eval `echo args$i`="\"$arg\"" 158 | fi 159 | i=`expr $i + 1` 160 | done 161 | case $i in 162 | 0) set -- ;; 163 | 1) set -- "$args0" ;; 164 | 2) set -- "$args0" "$args1" ;; 165 | 3) set -- "$args0" "$args1" "$args2" ;; 166 | 4) set -- "$args0" "$args1" "$args2" "$args3" ;; 167 | 5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; 168 | 6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; 169 | 7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; 170 | 8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; 171 | 9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; 172 | esac 173 | fi 174 | 175 | # Escape application args 176 | save () { 177 | for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done 178 | echo " " 179 | } 180 | APP_ARGS=`save "$@"` 181 | 182 | # Collect all arguments for the java command, following the shell quoting and substitution rules 183 | eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" 184 | 185 | exec "$JAVACMD" "$@" 186 | --------------------------------------------------------------------------------