├── .github
├── PULL_REQUEST_TEMPLATE.md
└── workflows
│ ├── apache-pulsar-workflow.yml
│ ├── auditlog-workflow.yml
│ ├── cache-invalidation-workflow.yml
│ ├── camel-component.yml
│ ├── camel-kafka-connect.yml
│ ├── cloudevents.yml
│ ├── debezium-server-name-mapper-workflow.yml
│ ├── distributed-caching-workflow.yml
│ ├── end-to-end-demo-workflow.yml
│ ├── graphql-workflow.yml
│ ├── jpa-aggregations-workflow.yml
│ ├── kinesis-workflow.yml
│ ├── kstreams-fk-join-workflow.yml
│ ├── kstreams-live-update-workflow.yml
│ ├── kstreams-workflow.yml
│ ├── outbox-workflow.yml
│ ├── postgres-toast-workflow.yml
│ ├── quarkus-native-workflow.yml
│ ├── saga-workflow.yml
│ └── testcontainers-workflow.yml
├── .gitignore
├── LICENSE.txt
├── README.md
├── ai-rag
├── README.md
├── client
│ ├── .dockerignore
│ ├── .gitignore
│ ├── .mvn
│ │ └── wrapper
│ │ │ ├── .gitignore
│ │ │ ├── MavenWrapperDownloader.java
│ │ │ └── maven-wrapper.properties
│ ├── mvnw
│ ├── mvnw.cmd
│ ├── pom.xml
│ └── src
│ │ ├── main
│ │ ├── docker
│ │ │ ├── Dockerfile.jvm
│ │ │ ├── Dockerfile.legacy-jar
│ │ │ ├── Dockerfile.native
│ │ │ └── Dockerfile.native-micro
│ │ ├── java
│ │ │ └── io
│ │ │ │ └── debezium
│ │ │ │ └── examples
│ │ │ │ └── airag
│ │ │ │ ├── Chat.java
│ │ │ │ ├── DocumentDatabase.java
│ │ │ │ ├── MilvusRetrievalAugmentor.java
│ │ │ │ ├── MilvusStore.java
│ │ │ │ └── RagCommand.java
│ │ └── resources
│ │ │ ├── application.properties
│ │ │ └── log4j.properties
│ │ └── test
│ │ └── java
│ │ └── io
│ │ └── debezium
│ │ └── examples
│ │ └── airag
│ │ └── RagCommandTest.java
├── config-postgres
│ └── ai-sample-data.sql
├── config-server
│ └── application.properties
├── docker-compose.yml
└── image-server
│ └── Dockerfile
├── apache-pulsar
├── README.md
├── pom.xml
└── src
│ └── main
│ ├── java
│ └── io
│ │ └── debezium
│ │ └── examples
│ │ └── apache
│ │ └── pulsar
│ │ ├── PulsarProducer.java
│ │ └── config
│ │ └── PropertyLoader.java
│ └── resources
│ ├── config.properties
│ └── log4j.properties
├── auditlog
├── README.md
├── admin-service
│ ├── .dockerignore
│ ├── pom.xml
│ └── src
│ │ └── main
│ │ ├── docker
│ │ ├── Dockerfile.jvm
│ │ └── Dockerfile.native
│ │ ├── java
│ │ └── io
│ │ │ └── debezium
│ │ │ └── demos
│ │ │ └── auditing
│ │ │ └── admin
│ │ │ ├── AuditData.java
│ │ │ ├── SourceData.java
│ │ │ ├── TransactionData.java
│ │ │ ├── TransactionEvent.java
│ │ │ ├── VegetableData.java
│ │ │ ├── VegetableEvent.java
│ │ │ ├── service
│ │ │ └── AdminService.java
│ │ │ └── wih
│ │ │ └── SendTransactionEventWIHandler.java
│ │ └── resources
│ │ ├── META-INF
│ │ └── resources
│ │ │ └── index.html
│ │ ├── application.properties
│ │ └── io
│ │ └── debezium
│ │ └── demos
│ │ └── auditing
│ │ └── admin
│ │ ├── matching-rules.drl
│ │ ├── transactions.bpmn2
│ │ └── vegetables.bpmn2
├── docker-compose.yaml
├── log-enricher
│ ├── .dockerignore
│ ├── pom.xml
│ └── src
│ │ └── main
│ │ ├── docker
│ │ ├── Dockerfile.jvm
│ │ └── Dockerfile.native
│ │ ├── java
│ │ └── io
│ │ │ └── debezium
│ │ │ └── demos
│ │ │ └── auditing
│ │ │ └── enricher
│ │ │ ├── BufferOffsets.java
│ │ │ ├── ChangeEventEnricher.java
│ │ │ ├── JsonObjectSerde.java
│ │ │ └── TopologyProducer.java
│ │ └── resources
│ │ └── application.properties
├── pom.xml
├── register-postgres.json
└── vegetables-service
│ ├── jwt
│ ├── jwt-token.json
│ ├── jwtenizr-0.0.4-SNAPSHOT.jar
│ ├── jwtenizr-config.json
│ ├── microprofile-config.properties
│ └── token.jwt
│ ├── pom.xml
│ └── src
│ └── main
│ ├── docker
│ ├── Dockerfile.jvm
│ └── Dockerfile.native
│ ├── java
│ └── io
│ │ └── debezium
│ │ └── demos
│ │ └── auditing
│ │ └── vegetables
│ │ ├── model
│ │ └── Vegetable.java
│ │ ├── rest
│ │ ├── VegetableResource.java
│ │ └── util
│ │ │ └── ErrorMapper.java
│ │ ├── service
│ │ └── VegetableService.java
│ │ └── transactioncontext
│ │ ├── Audited.java
│ │ ├── TransactionContextData.java
│ │ └── TransactionInterceptor.java
│ └── resources
│ ├── application.properties
│ └── import.sql
├── cache-invalidation
├── README.md
├── pom.xml
├── resources
│ └── data
│ │ ├── create-order-request.json
│ │ └── update-item-request.json
└── src
│ ├── main
│ ├── java
│ │ └── io
│ │ │ └── debezium
│ │ │ └── examples
│ │ │ └── cacheinvalidation
│ │ │ ├── model
│ │ │ ├── Item.java
│ │ │ └── PurchaseOrder.java
│ │ │ ├── persistence
│ │ │ ├── DatabaseChangeEventListener.java
│ │ │ ├── KnownTransactions.java
│ │ │ ├── TransactionRegistrationIntegrator.java
│ │ │ └── TransactionRegistrationListener.java
│ │ │ └── rest
│ │ │ ├── CacheResource.java
│ │ │ ├── CreateOrderRequest.java
│ │ │ ├── CreateOrderResponse.java
│ │ │ ├── ItemResource.java
│ │ │ ├── OrderResource.java
│ │ │ ├── RestApplication.java
│ │ │ ├── UpdateItemRequest.java
│ │ │ └── UpdateItemResponse.java
│ └── resources
│ │ ├── META-INF
│ │ ├── data.sql
│ │ └── services
│ │ │ └── org.hibernate.integrator.spi.Integrator
│ │ └── application.properties
│ └── test
│ └── java
│ └── io
│ └── debezium
│ └── examples
│ └── cacheinvalidation
│ └── CacheInvalidationTest.java
├── camel-component
├── README.md
├── docker-compose.png
├── docker-compose.yaml
├── pom.xml
├── qa-app
│ ├── pom.xml
│ └── src
│ │ └── main
│ │ ├── docker
│ │ └── Dockerfile.jvm
│ │ ├── java
│ │ └── io
│ │ │ └── debezium
│ │ │ └── examples
│ │ │ └── camel
│ │ │ └── qaapp
│ │ │ ├── Answer.java
│ │ │ ├── Question.java
│ │ │ ├── Vote.java
│ │ │ └── rest
│ │ │ └── QuestionService.java
│ │ └── resources
│ │ ├── application.properties
│ │ └── log4j.properties
├── qa-camel
│ ├── Dockerfile
│ ├── pom.xml
│ └── src
│ │ └── main
│ │ ├── java
│ │ └── io
│ │ │ └── debezium
│ │ │ └── examples
│ │ │ └── camel
│ │ │ └── pipeline
│ │ │ ├── AggregateStore.java
│ │ │ ├── Answer.java
│ │ │ ├── Converters.java
│ │ │ ├── QaDatabaseUserNotifier.java
│ │ │ ├── Question.java
│ │ │ └── Runner.java
│ │ └── resources
│ │ └── log4j.properties
└── src
│ ├── main
│ └── resources
│ │ └── log4j.properties
│ └── test
│ └── resources
│ └── messages
│ ├── create-answer1.json
│ ├── create-answer2.json
│ ├── create-answer3.json
│ └── create-question.json
├── camel-kafka-connect
├── Dockerfile
├── README.md
├── coap-server
│ ├── Dockerfile
│ ├── pom.xml
│ └── src
│ │ └── main
│ │ ├── java
│ │ └── io
│ │ │ └── debezium
│ │ │ └── examples
│ │ │ └── camel
│ │ │ └── coap
│ │ │ ├── CoapServer.java
│ │ │ └── Runner.java
│ │ └── resources
│ │ └── log4j.properties
├── docker-compose.png
├── docker-compose.yaml
├── pom.xml
├── sink.json
└── source.json
├── cloudevents
├── README.md
├── avro-data-extractor
│ ├── Dockerfile
│ ├── pom.xml
│ └── src
│ │ └── main
│ │ ├── java
│ │ └── io
│ │ │ └── debezium
│ │ │ └── examples
│ │ │ └── cloudevents
│ │ │ └── dataextractor
│ │ │ ├── StreamsPipelineManager.java
│ │ │ └── model
│ │ │ └── CloudEvent.java
│ │ └── resources
│ │ ├── application.properties
│ │ └── log4j.properties
├── docker-compose.yaml
├── register-postgres-avro-avro.json
├── register-postgres-json-avro.json
└── register-postgres-json-json.json
├── db-activity-monitoring
├── README.md
├── activity-monitoring-dashboard.png
├── debezium-grafana
│ ├── Dockerfile
│ ├── activity-monitoring-dashboard.json
│ ├── alerting.yaml
│ ├── dashboard.yml
│ ├── datasource.yml
│ ├── debezium-dashboard.json
│ └── grafana.ini
├── debezium-jmx-exporter
│ ├── Dockerfile
│ └── config.yml
├── debezium-prometheus
│ └── Dockerfile
├── docker-compose.yml
├── inventory.sql
├── order-service
│ ├── .dockerignore
│ ├── .gitignore
│ ├── .mvn
│ │ └── wrapper
│ │ │ ├── .gitignore
│ │ │ ├── MavenWrapperDownloader.java
│ │ │ └── maven-wrapper.properties
│ ├── README.md
│ ├── mvnw
│ ├── mvnw.cmd
│ ├── pom.xml
│ └── src
│ │ └── main
│ │ ├── docker
│ │ ├── Dockerfile.jvm
│ │ ├── Dockerfile.legacy-jar
│ │ ├── Dockerfile.native
│ │ └── Dockerfile.native-micro
│ │ ├── java
│ │ └── io
│ │ │ └── debezium
│ │ │ └── examples
│ │ │ ├── AppLifecycleBean.java
│ │ │ ├── Endpoint.java
│ │ │ ├── LoadSimulator.java
│ │ │ ├── Order.java
│ │ │ └── OrderRepository.java
│ │ └── resources
│ │ ├── application.properties
│ │ └── import.sql
└── postgres-activity-monitoring.json
├── debezium-platform
└── postgresql-kafka-example
│ ├── README.md
│ ├── clean-up.sh
│ ├── create-environment.sh
│ ├── destination-kafka
│ └── 001_kafka.yml
│ ├── env.sh
│ ├── resources
│ ├── destination.png
│ ├── pipeline_configuration.png
│ ├── pipeline_designer.png
│ ├── source.png
│ └── transform.png
│ ├── setup-infra.sh
│ └── source-database
│ └── 001_postgresql.yml
├── debezium-server-name-mapper
├── README.md
├── docker-compose.yaml
├── pom.xml
└── src
│ └── main
│ ├── java
│ └── io
│ │ └── debezium
│ │ └── example
│ │ └── server
│ │ └── mapper
│ │ └── PrefixingNameMapper.java
│ └── resources
│ └── application.properties
├── debezium-server
├── debezium-server-mysql-redis-pubsub
│ ├── Dockerfile
│ ├── README.md
│ ├── config-mysql
│ │ └── application.properties
│ └── docker-compose.yml
└── debezium-server-sink-pubsub
│ ├── README.md
│ ├── config-mongodb
│ └── application.properties
│ ├── config-mysql
│ └── application.properties
│ ├── config-postgres
│ └── application.properties
│ └── docker-compose.yml
├── distributed-caching
├── README.md
├── architecture-overview.excalidraw
├── architecture-overview.png
├── cache-update-service
│ ├── .dockerignore
│ ├── pom.xml
│ └── src
│ │ └── main
│ │ ├── docker
│ │ ├── Dockerfile.jvm
│ │ └── Dockerfile.native
│ │ ├── java
│ │ └── io
│ │ │ └── debezium
│ │ │ └── examples
│ │ │ └── caching
│ │ │ └── cacheupdater
│ │ │ ├── facade
│ │ │ └── KafkaEventConsumer.java
│ │ │ ├── streams
│ │ │ ├── TopologyProducer.java
│ │ │ └── model
│ │ │ │ ├── OrderLine.java
│ │ │ │ ├── OrderLineAndPurchaseOrder.java
│ │ │ │ ├── OrderLineStatus.java
│ │ │ │ ├── OrderWithLines.java
│ │ │ │ └── PurchaseOrder.java
│ │ │ └── util
│ │ │ └── PurchaseOrdersContextInitializer.java
│ │ └── resources
│ │ └── application.properties
├── demo.md
├── docker-compose.yaml
├── infinispan-cen.xml
├── infinispan-lon.xml
├── infinispan-nyc.xml
├── order-service
│ ├── .dockerignore
│ ├── pom.xml
│ └── src
│ │ └── main
│ │ ├── docker
│ │ ├── Dockerfile.jvm
│ │ └── Dockerfile.native
│ │ ├── java
│ │ └── io
│ │ │ └── debezium
│ │ │ └── examples
│ │ │ └── caching
│ │ │ ├── commons
│ │ │ ├── BigDecimalAdapter.java
│ │ │ ├── EntityNotFoundException.java
│ │ │ ├── LocalDateTimeAdapter.java
│ │ │ └── PurchaseOrdersContextInitializer.java
│ │ │ ├── model
│ │ │ ├── OrderLine.java
│ │ │ ├── OrderLineStatus.java
│ │ │ └── PurchaseOrder.java
│ │ │ └── order
│ │ │ ├── rest
│ │ │ ├── CreateOrderRequest.java
│ │ │ ├── OrderLineDto.java
│ │ │ ├── OrderOperationResponse.java
│ │ │ ├── OrderResource.java
│ │ │ ├── RestApplication.java
│ │ │ ├── UpdateOrderLineRequest.java
│ │ │ └── util
│ │ │ │ ├── EntityNotFoundMapper.java
│ │ │ │ ├── JacksonProducer.java
│ │ │ │ └── OptimisticLockMapper.java
│ │ │ └── service
│ │ │ └── OrderService.java
│ │ └── resources
│ │ ├── META-INF
│ │ └── resources
│ │ │ └── index.html
│ │ └── application.properties
├── pom.xml
├── register-postgres.json
├── resources
│ └── data
│ │ ├── cancel-order-line-request.json
│ │ └── create-order-request.json
└── servers.json
├── end-to-end-demo
├── README.md
├── debezium-hiking-demo
│ ├── Dockerfile
│ ├── README.md
│ ├── pom.xml
│ ├── resources
│ │ └── wildfly
│ │ │ └── customization
│ │ │ ├── commands.cli
│ │ │ ├── execute.sh
│ │ │ └── mysql-connector-java-5.1.46.jar
│ └── src
│ │ ├── main
│ │ ├── java
│ │ │ └── io
│ │ │ │ └── debezium
│ │ │ │ └── examples
│ │ │ │ └── hikr
│ │ │ │ ├── model
│ │ │ │ ├── Hike.java
│ │ │ │ ├── Person.java
│ │ │ │ ├── Section.java
│ │ │ │ └── Trip.java
│ │ │ │ ├── repository
│ │ │ │ ├── HikeRepository.java
│ │ │ │ └── TripRepository.java
│ │ │ │ └── rest
│ │ │ │ ├── AdminResource.java
│ │ │ │ ├── HikeResource.java
│ │ │ │ ├── HikingApplication.java
│ │ │ │ ├── TripResource.java
│ │ │ │ └── model
│ │ │ │ ├── ExternalHike.java
│ │ │ │ ├── ExternalHikeSummary.java
│ │ │ │ ├── ExternalTrip.java
│ │ │ │ └── ExternalTripWithHikes.java
│ │ ├── resources
│ │ │ ├── META-INF
│ │ │ │ └── persistence.xml
│ │ │ └── valdr-bean-validation.json
│ │ └── webapp
│ │ │ ├── WEB-INF
│ │ │ ├── beans.xml
│ │ │ └── web.xml
│ │ │ ├── css
│ │ │ ├── bootstrap-theme.min.css
│ │ │ └── bootstrap.min.css
│ │ │ ├── detail.html
│ │ │ ├── hikes.html
│ │ │ ├── images
│ │ │ └── jbosscorp_logo.png
│ │ │ ├── js
│ │ │ ├── hikes.js
│ │ │ └── lib
│ │ │ │ ├── angular-route.js
│ │ │ │ ├── angular.js
│ │ │ │ ├── bootstrap.js
│ │ │ │ ├── lodash.compat.min.js
│ │ │ │ ├── restangular.js
│ │ │ │ └── valdr.js
│ │ │ ├── list.html
│ │ │ └── trips-detail.html
│ │ └── script
│ │ ├── docker-service.sh
│ │ ├── mongo.sh
│ │ └── orderCreation.json
├── debezium-jdbc
│ └── Dockerfile
├── debezium-thorntail-demo
│ ├── Dockerfile
│ ├── pom.xml
│ └── src
│ │ └── main
│ │ ├── java
│ │ └── com
│ │ │ └── example
│ │ │ └── dbzdemo
│ │ │ ├── SysoutChangeEventHandler.java
│ │ │ └── ws
│ │ │ ├── ChangeEventsWebsocketEndpoint.java
│ │ │ └── WebSocketChangeEventHandler.java
│ │ ├── resources
│ │ └── META-INF
│ │ │ └── beans.xml
│ │ └── webapp
│ │ └── index.html
├── debezium-vertx-demo
│ ├── Dockerfile
│ ├── README.md
│ ├── pom.xml
│ └── src
│ │ └── main
│ │ ├── java
│ │ └── io
│ │ │ └── debezium
│ │ │ └── example
│ │ │ └── endtoend
│ │ │ └── vertx
│ │ │ └── Verticle.java
│ │ └── resources
│ │ ├── html
│ │ └── index.html
│ │ └── log4j.properties
├── docker-compose.yaml
├── jdbc-sink.json
├── register-hiking-connector-json.json
└── register-hiking-connector.json
├── engine-wasm
├── README.md
├── build-wasm.sh
├── pom.xml
└── src
│ └── main
│ ├── java
│ └── io
│ │ └── debezium
│ │ └── examples
│ │ └── wasm
│ │ └── ChangeDataSender.java
│ └── resources
│ ├── compiled
│ └── cdc.wasm
│ ├── go
│ └── cdc.go
│ └── logback.xml
├── failover
├── README.md
├── docker-compose.yaml
├── haproxy
│ ├── Dockerfile
│ └── haproxy.cfg
├── mysql1
│ ├── Dockerfile
│ ├── inventory.sql
│ └── mysql.cnf
├── mysql2
│ ├── Dockerfile
│ ├── inventory.sql
│ └── mysql.cnf
└── register-mysql.json
├── graphql
├── README.md
├── aggregator
│ ├── Dockerfile
│ ├── pom.xml
│ └── src
│ │ └── main
│ │ ├── META-INF
│ │ └── beans.xml
│ │ ├── java
│ │ └── io
│ │ │ └── debezium
│ │ │ └── examples
│ │ │ └── graphql
│ │ │ ├── GraphQLConfiguration.java
│ │ │ ├── OrderPublisher.java
│ │ │ ├── RootResolver.java
│ │ │ ├── SubscriptionResolver.java
│ │ │ ├── endpoint
│ │ │ ├── GraphQLServlet.java
│ │ │ ├── GraphQLWSEndpoint.java
│ │ │ └── GraphQLWSEndpointConfigurer.java
│ │ │ ├── model
│ │ │ └── Order.java
│ │ │ └── serdes
│ │ │ └── ChangeEventAwareJsonSerde.java
│ │ ├── resources
│ │ ├── graphql
│ │ │ └── schema.graphqls
│ │ └── log4j.properties
│ │ └── webapp
│ │ └── graphiql
│ │ ├── es6-promise.auto.min.js
│ │ ├── fetch.min.js
│ │ ├── graphiql-subscriptions-fetcher-0.0.2.js
│ │ ├── graphiql.min.css
│ │ ├── graphiql.min.js
│ │ ├── index.html
│ │ ├── react-dom.min.js
│ │ ├── react.min.js
│ │ └── subscriptions-transport-ws-0.8.3.js
├── docker-compose.yaml
├── event-source
│ ├── Dockerfile
│ ├── pom.xml
│ └── src
│ │ └── main
│ │ ├── java
│ │ └── io
│ │ │ └── debezium
│ │ │ └── examples
│ │ │ └── graphql
│ │ │ └── eventsource
│ │ │ ├── EventSource.java
│ │ │ ├── Main.java
│ │ │ └── Order.java
│ │ └── resources
│ │ ├── META-INF
│ │ └── persistence.xml
│ │ └── log4j.properties
├── example-db
│ ├── Dockerfile
│ └── schema-update.sql
├── graphiql-screenshot.png
├── mysql-source.json
└── ws-client
│ ├── pom.xml
│ └── src
│ └── main
│ ├── java
│ └── io
│ │ └── debezium
│ │ └── examples
│ │ └── graphql
│ │ └── wsclient
│ │ ├── GraphQLQueryRequest.java
│ │ ├── GraphQLQueryResponse.java
│ │ ├── GraphQLResponseReceiver.java
│ │ ├── GraphQLSubscriptionClient.java
│ │ ├── JSONConverter.java
│ │ ├── Main.java
│ │ └── SimpleWebSocketClient.java
│ └── resources
│ └── log4j.properties
├── http-signaling-notification
├── README.md
├── docker-compose.yaml
├── initializerJson.json
├── pom.xml
├── register-postgres.json
└── src
│ └── main
│ ├── java
│ └── io
│ │ └── debezium
│ │ └── examples
│ │ ├── notification
│ │ └── HttpNotificationChannel.java
│ │ └── signal
│ │ └── HttpSignalChannel.java
│ └── resources
│ └── META-INF
│ └── services
│ ├── io.debezium.pipeline.notification.channels.NotificationChannel
│ └── io.debezium.pipeline.signal.channels.SignalChannelReader
├── infinispan-standalone
├── README.md
├── debezium-with-oracle-jdbc
│ ├── Dockerfile
│ └── init
│ │ └── inventory.sql
├── docker-compose.yml
└── register-oracle.json
├── jpa-aggregations
├── README.md
├── docker-compose.yaml
├── es-sink-aggregates.json
├── example-db
│ ├── Dockerfile
│ └── schema-update.sql
├── jpa-test
│ ├── pom.xml
│ ├── src
│ │ ├── main
│ │ │ ├── java
│ │ │ │ └── io
│ │ │ │ │ └── debezium
│ │ │ │ │ └── aggregation
│ │ │ │ │ ├── connect
│ │ │ │ │ ├── KafkaConnectArraySchemaSerializer.java
│ │ │ │ │ ├── KafkaConnectObjectSchemaSerializer.java
│ │ │ │ │ ├── KafkaConnectSchemaFactoryWrapper.java
│ │ │ │ │ └── KafkaConnectSchemaSerializer.java
│ │ │ │ │ └── hibernate
│ │ │ │ │ ├── Aggregate.java
│ │ │ │ │ ├── AggregationBuilderIntegrator.java
│ │ │ │ │ ├── AggregationBuilderListener.java
│ │ │ │ │ ├── MaterializeAggregate.java
│ │ │ │ │ └── ObjectMapperFactory.java
│ │ │ └── resources
│ │ │ │ └── META-INF
│ │ │ │ ├── persistence.xml
│ │ │ │ └── services
│ │ │ │ └── org.hibernate.integrator.spi.Integrator
│ │ └── test
│ │ │ ├── java
│ │ │ ├── com
│ │ │ │ └── example
│ │ │ │ │ └── domain
│ │ │ │ │ ├── Address.java
│ │ │ │ │ ├── AddressType.java
│ │ │ │ │ ├── Category.java
│ │ │ │ │ └── Customer.java
│ │ │ └── io
│ │ │ │ └── debezium
│ │ │ │ └── example
│ │ │ │ └── jpaaggregation
│ │ │ │ └── test
│ │ │ │ ├── JpaAggregationTest.java
│ │ │ │ └── SerializerTest.java
│ │ │ └── resources
│ │ │ └── log4j.properties
│ └── transaction.log
├── json-smt-es
│ ├── Dockerfile
│ ├── pom.xml
│ └── src
│ │ └── main
│ │ └── java
│ │ └── io
│ │ └── debezium
│ │ └── aggregation
│ │ └── smt
│ │ └── ExpandJsonSmt.java
└── source.json
├── json-logging
├── README.md
├── debezium-log4j-json
│ ├── Dockerfile
│ └── log4j.properties
├── docker-compose.yaml
└── register-mysql.json
├── kafka-ssl
├── README.md
├── docker-compose.yaml
├── register-postgres.json
└── resources
│ ├── kafka-ssl-keystore.p12
│ └── kafka-ssl-truststore.p12
├── kinesis
├── README.md
├── pom.xml
└── src
│ └── main
│ ├── java
│ └── io
│ │ └── debezium
│ │ └── examples
│ │ └── kinesis
│ │ └── ChangeDataSender.java
│ └── resources
│ └── log4j.properties
├── ksql
├── README.md
├── docker-compose.yaml
└── register-mysql.json
├── kstreams-fk-join
├── README.md
├── aggregator
│ ├── pom.xml
│ └── src
│ │ └── main
│ │ ├── docker
│ │ ├── Dockerfile.jvm
│ │ └── Dockerfile.native
│ │ ├── java
│ │ └── io
│ │ │ └── debezium
│ │ │ └── examples
│ │ │ └── kstreams
│ │ │ └── fkjoin
│ │ │ ├── model
│ │ │ ├── Address.java
│ │ │ ├── AddressAndCustomer.java
│ │ │ ├── Customer.java
│ │ │ └── CustomerWithAddresses.java
│ │ │ ├── streams
│ │ │ └── TopologyProducer.java
│ │ │ └── util
│ │ │ └── JsonObjectSerde.java
│ │ └── resources
│ │ └── application.properties
├── docker-compose.yaml
├── inventory-addresses.sql
└── register-postgres.json
├── kstreams-live-update
├── README.md
├── aggregator
│ ├── pom.xml
│ └── src
│ │ └── main
│ │ ├── docker
│ │ └── Dockerfile.jvm
│ │ ├── java
│ │ └── io
│ │ │ └── debezium
│ │ │ └── examples
│ │ │ └── kstreams
│ │ │ └── liveupdate
│ │ │ └── aggregator
│ │ │ ├── StreamsPipelineManager.java
│ │ │ ├── health
│ │ │ └── AggregatorStarted.java
│ │ │ ├── model
│ │ │ ├── Category.java
│ │ │ ├── Order.java
│ │ │ └── ValueAggregator.java
│ │ │ ├── serdes
│ │ │ └── StringWindowedSerde.java
│ │ │ └── ws
│ │ │ └── ChangeEventsWebsocketEndpoint.java
│ │ └── resources
│ │ ├── META-INF
│ │ ├── beans.xml
│ │ └── resources
│ │ │ ├── index.html
│ │ │ └── js
│ │ │ └── chart.js
│ │ │ ├── Chart.bundle.js
│ │ │ ├── Chart.bundle.min.js
│ │ │ ├── Chart.js
│ │ │ ├── Chart.min.js
│ │ │ ├── LICENSE.md
│ │ │ └── samples
│ │ │ └── utils.js
│ │ ├── application.properties
│ │ └── log4j.properties
├── debezium-es
│ └── Dockerfile
├── demo-os.md
├── demo.md
├── docker-compose-mysql.yaml
├── docker-compose-postgres.yaml
├── docker-compose.png
├── es-sink.json
├── event-source
│ ├── pom.xml
│ └── src
│ │ └── main
│ │ ├── docker
│ │ └── Dockerfile.jvm
│ │ ├── java
│ │ └── io
│ │ │ └── debezium
│ │ │ └── examples
│ │ │ └── kstreams
│ │ │ └── liveupdate
│ │ │ └── eventsource
│ │ │ ├── Category.java
│ │ │ ├── Customer.java
│ │ │ ├── EventSource.java
│ │ │ ├── Main.java
│ │ │ ├── Order.java
│ │ │ └── Product.java
│ │ └── resources
│ │ ├── META-INF
│ │ └── persistence.xml
│ │ └── log4j.properties
├── mysql-db
│ ├── Dockerfile
│ └── schema-update.sql
├── mysql-source.json
├── os-setup.sh
├── pgsql-source.json
└── postgres-db
│ ├── Dockerfile
│ └── schema-update.sql
├── kstreams
├── README.md
├── debezium-mongodb
│ └── Dockerfile
├── docker-compose.yaml
├── mongodb-sink.json
├── mysql-source.json
└── poc-ddd-aggregates
│ ├── Dockerfile
│ ├── pom.xml
│ ├── run-aggregator.sh
│ └── src
│ └── main
│ └── java
│ └── io
│ └── debezium
│ └── examples
│ └── aggregation
│ ├── StreamingAggregatesDDD.java
│ ├── model
│ ├── Address.java
│ ├── Addresses.java
│ ├── Customer.java
│ ├── CustomerAddressAggregate.java
│ ├── DefaultId.java
│ ├── EventType.java
│ └── LatestAddress.java
│ └── serdes
│ ├── JsonHybridDeserializer.java
│ ├── JsonPojoSerializer.java
│ └── SerdeFactory.java
├── machine-learning
├── flink-spark-iris
│ ├── README.adoc
│ ├── docker-compose-flink.yaml
│ ├── docker-compose-spark.yaml
│ ├── iris-flink
│ │ ├── Dockerfile
│ │ ├── pom.xml
│ │ └── src
│ │ │ └── main
│ │ │ └── java
│ │ │ └── io
│ │ │ └── github
│ │ │ └── vjuranek
│ │ │ └── FlinkKafkaKmeans.java
│ ├── iris-spark
│ │ ├── Dockerfile
│ │ ├── pom.xml
│ │ └── src
│ │ │ └── main
│ │ │ └── java
│ │ │ └── io
│ │ │ └── github
│ │ │ └── vjuranek
│ │ │ └── SparkKafkaStreamingKmeans.java
│ ├── iris2sql.py
│ ├── postgres
│ │ └── Dockerfile
│ ├── register-postgres-flink.json
│ └── register-postgres-spark.json
└── tensorflow-mnist
│ ├── README.adoc
│ ├── connect
│ ├── Dockerfile
│ └── mnist-smt
│ │ ├── pom.xml
│ │ └── src
│ │ └── main
│ │ └── java
│ │ └── io
│ │ └── debezium
│ │ └── transforms
│ │ └── MnistToCsv.java
│ ├── docker-compose.yaml
│ ├── mnist2sql.py
│ ├── mnist_kafka.ipynb
│ ├── postgres
│ └── Dockerfile
│ ├── register-postgres.json
│ └── tensorflow
│ ├── Dockerfile
│ └── requirements.txt
├── mongodb-outbox
├── README.md
├── debezium-strimzi
│ └── Dockerfile
├── docker-compose.yaml
└── register-mongodb.json
├── monitoring
├── README.md
├── dashboard.png
├── debezium-grafana
│ ├── Dockerfile
│ ├── dashboard.yml
│ ├── datasource.yml
│ ├── debezium-dashboard.json
│ └── debezium-mysql-connector-dashboard.json
├── debezium-jmx-exporter
│ ├── Dockerfile
│ └── config.yml
├── debezium-prometheus
│ └── Dockerfile
├── docker-compose.yml
├── inventory.sql
└── register-sqlserver.json
├── mysql-replication
├── README.md
├── connector-config-master.json
├── connector-config-replica.json
├── docker-compose.yaml
└── register-mysql.json
├── offset-editor
├── README.md
├── pom.xml
└── src
│ └── main
│ └── java
│ └── io
│ └── debezium
│ └── examples
│ └── offset
│ └── editor
│ ├── CommandLineInterface.java
│ ├── Main.java
│ ├── OffsetEditorApp.java
│ └── OffsetFileController.java
├── openshift
├── README.md
├── kafka-connect.yaml
├── kafka-connector.yaml
├── kafka.yaml
├── mysql.yaml
├── role-binding.yaml
├── role.yaml
├── secret.yaml
└── strimzi-kafka-operator.yaml
├── operator
├── tutorial-postgresql-kafka
│ ├── .gitignore
│ ├── README.md
│ ├── create-environment.sh
│ ├── destroy-environment.sh
│ ├── env.sh
│ └── infra
│ │ ├── 001_postgresql.yml
│ │ ├── 002_kafka-ephemeral.yml
│ │ ├── 010_debezium-subscription.yml
│ │ └── 011_debezium-server-ephemeral.yml
└── tutorial-pubsub
│ ├── .gitignore
│ ├── README.md
│ ├── create-environment.sh
│ ├── destroy-environment.sh
│ ├── env.sh
│ ├── k8s
│ ├── database
│ │ └── 001_postgresql.yml
│ ├── debezium
│ │ └── 002_debezium-server.yml
│ └── operator
│ │ └── 001_subscription.yml
│ ├── olm
│ └── install.sh
│ ├── pubsub.sh
│ └── pull.sh
├── outbox
├── README.md
├── debezium-strimzi
│ └── Dockerfile
├── docker-compose.yaml
├── jaeger.png
├── order-service
│ ├── .dockerignore
│ ├── pom.xml
│ └── src
│ │ └── main
│ │ ├── docker
│ │ ├── Dockerfile.jvm
│ │ └── Dockerfile.native
│ │ ├── java
│ │ └── io
│ │ │ └── debezium
│ │ │ └── examples
│ │ │ └── outbox
│ │ │ └── order
│ │ │ ├── event
│ │ │ ├── InvoiceCreatedEvent.java
│ │ │ ├── OrderCreatedEvent.java
│ │ │ └── OrderLineUpdatedEvent.java
│ │ │ ├── model
│ │ │ ├── EntityNotFoundException.java
│ │ │ ├── OrderLine.java
│ │ │ ├── OrderLineStatus.java
│ │ │ └── PurchaseOrder.java
│ │ │ ├── rest
│ │ │ ├── CreateOrderRequest.java
│ │ │ ├── OrderLineDto.java
│ │ │ ├── OrderOperationResponse.java
│ │ │ ├── OrderResource.java
│ │ │ ├── RestApplication.java
│ │ │ ├── UpdateOrderLineRequest.java
│ │ │ ├── UpdateOrderLineResponse.java
│ │ │ └── util
│ │ │ │ ├── EntityNotFoundMapper.java
│ │ │ │ └── JacksonProducer.java
│ │ │ └── service
│ │ │ └── OrderService.java
│ │ └── resources
│ │ ├── META-INF
│ │ └── resources
│ │ │ └── index.html
│ │ └── application.properties
├── outbox-overview.png
├── outbox.excalidraw
├── pom.xml
├── register-postgres.json
├── resources
│ └── data
│ │ ├── cancel-order-line-request.json
│ │ └── create-order-request.json
├── servers.json
├── service-overview.png
└── shipment-service
│ ├── .dockerignore
│ ├── pom.xml
│ └── src
│ └── main
│ ├── docker
│ ├── Dockerfile.jvm
│ └── Dockerfile.native
│ ├── java
│ └── io
│ │ └── debezium
│ │ └── examples
│ │ └── outbox
│ │ └── shipment
│ │ ├── facade
│ │ ├── KafkaEventConsumer.java
│ │ └── OrderEventHandler.java
│ │ ├── messagelog
│ │ ├── ConsumedMessage.java
│ │ └── MessageLog.java
│ │ ├── model
│ │ └── Shipment.java
│ │ └── service
│ │ └── ShipmentService.java
│ └── resources
│ └── application.properties
├── postgres-failover-slots
├── 00_init.sql
├── README.md
├── docker-compose.yaml
└── inventory-source.json
├── postgres-kafka-signal
├── README.md
├── connector.json
└── docker-compose.yml
├── postgres-toast
├── README.md
├── debezium-jdbc
│ └── Dockerfile
├── debezium-source.json
├── docker-compose.yaml
├── jdbc-sink.json
├── sink-db
│ ├── Dockerfile
│ └── schema-update.sql
├── source-db
│ ├── Dockerfile
│ └── schema-update.sql
└── toast-value-store
│ ├── .dockerignore
│ ├── pom.xml
│ └── src
│ └── main
│ ├── docker
│ ├── Dockerfile.jvm
│ └── Dockerfile.native
│ ├── java
│ └── io
│ │ └── debezium
│ │ └── demos
│ │ └── pgtoast
│ │ ├── JsonObjectSerde.java
│ │ ├── ToastColumnValueProvider.java
│ │ └── TopologyProducer.java
│ └── resources
│ └── application.properties
├── quarkus-native
├── README.md
├── docker-compose.yaml
├── pom.xml
└── src
│ └── main
│ ├── java
│ └── io
│ │ └── debezium
│ │ └── example
│ │ └── quarkus
│ │ ├── DatabaseChangeEventListener.java
│ │ ├── DebeziumConfiguration.java
│ │ └── ReflectingConfig.java
│ └── resources
│ └── application.properties
├── saga
├── .mvn
│ └── wrapper
│ │ ├── MavenWrapperDownloader.java
│ │ └── maven-wrapper.properties
├── README.md
├── customer-service
│ ├── .dockerignore
│ ├── pom.xml
│ └── src
│ │ └── main
│ │ ├── docker
│ │ ├── Dockerfile.fast-jar
│ │ ├── Dockerfile.jvm
│ │ └── Dockerfile.native
│ │ ├── java
│ │ └── io
│ │ │ └── debezium
│ │ │ └── examples
│ │ │ └── saga
│ │ │ └── customer
│ │ │ ├── event
│ │ │ └── CreditEvent.java
│ │ │ ├── facade
│ │ │ ├── CreditDeserializer.java
│ │ │ ├── CreditEventHandler.java
│ │ │ └── KafkaEventConsumer.java
│ │ │ ├── messagelog
│ │ │ ├── ConsumedMessage.java
│ │ │ └── MessageLog.java
│ │ │ └── model
│ │ │ ├── CreditLimitEvent.java
│ │ │ ├── CreditRequestType.java
│ │ │ ├── CreditStatus.java
│ │ │ └── Customer.java
│ │ └── resources
│ │ └── application.properties
├── debezium-strimzi
│ └── Dockerfile
├── delete-connectors.sh
├── docker-compose.yaml
├── init-customer.sql
├── init-order.sql
├── init-payment.sql
├── order-service
│ ├── .dockerignore
│ ├── pom.xml
│ └── src
│ │ └── main
│ │ ├── docker
│ │ ├── Dockerfile.fast-jar
│ │ ├── Dockerfile.jvm
│ │ └── Dockerfile.native
│ │ ├── java
│ │ └── io
│ │ │ └── debezium
│ │ │ └── examples
│ │ │ └── saga
│ │ │ ├── framework
│ │ │ ├── Saga.java
│ │ │ ├── SagaBase.java
│ │ │ ├── SagaManager.java
│ │ │ ├── SagaStatus.java
│ │ │ ├── SagaStepMessage.java
│ │ │ └── internal
│ │ │ │ ├── ConsumedMessage.java
│ │ │ │ ├── SagaState.java
│ │ │ │ ├── SagaStepState.java
│ │ │ │ └── SagaStepStatus.java
│ │ │ └── order
│ │ │ ├── event
│ │ │ ├── CreditApprovalEvent.java
│ │ │ ├── CreditApprovalEventPayload.java
│ │ │ ├── CreditApprovalStatus.java
│ │ │ ├── PaymentEvent.java
│ │ │ ├── PaymentEventPayload.java
│ │ │ └── PaymentStatus.java
│ │ │ ├── facade
│ │ │ ├── KafkaEventConsumer.java
│ │ │ └── serdes
│ │ │ │ ├── CreditDeserializer.java
│ │ │ │ ├── CreditEventConverter.java
│ │ │ │ ├── PaymentDeserializer.java
│ │ │ │ └── PaymentEventConverter.java
│ │ │ ├── model
│ │ │ ├── PurchaseOrder.java
│ │ │ └── PurchaseOrderStatus.java
│ │ │ ├── rest
│ │ │ ├── MyObjectMapperCustomizer.java
│ │ │ ├── PlaceOrderRequest.java
│ │ │ ├── PlaceOrderResponse.java
│ │ │ └── PurchaseOrderResource.java
│ │ │ └── saga
│ │ │ ├── OrderPlacementEventHandler.java
│ │ │ ├── OrderPlacementSaga.java
│ │ │ └── SagaEvent.java
│ │ └── resources
│ │ └── application.properties
├── payment-service
│ ├── .dockerignore
│ ├── pom.xml
│ └── src
│ │ └── main
│ │ ├── docker
│ │ ├── Dockerfile.fast-jar
│ │ ├── Dockerfile.jvm
│ │ └── Dockerfile.native
│ │ ├── java
│ │ └── io
│ │ │ └── debezium
│ │ │ └── examples
│ │ │ └── saga
│ │ │ └── payment
│ │ │ ├── event
│ │ │ └── PaymentEvent.java
│ │ │ ├── facade
│ │ │ ├── KafkaEventConsumer.java
│ │ │ ├── PaymentDeserializer.java
│ │ │ └── PaymentEventHandler.java
│ │ │ ├── messagelog
│ │ │ ├── ConsumedMessage.java
│ │ │ └── MessageLog.java
│ │ │ └── model
│ │ │ ├── Payment.java
│ │ │ ├── PaymentRequestType.java
│ │ │ └── PaymentStatus.java
│ │ └── resources
│ │ └── application.properties
├── pom.xml
├── register-connectors.sh
├── register-credit-connector.json
├── register-order-connector.json
├── register-payment-connector.json
├── register-sagastate-connector.json
├── requests
│ ├── place-invalid-order1.json
│ ├── place-invalid-order2.json
│ └── place-order.json
├── saga.json
├── servers.json
├── solution-overview.excalidraw
└── solution-overview.png
├── sql-server-read-replica
├── README.md
├── debezium-sqlserver-init
│ ├── Dockerfile
│ ├── debezium.cer
│ ├── debezium.key
│ ├── inventory.sql
│ ├── setup-primary.sql
│ └── setup-secondary.sql
├── docker-compose.yaml
└── register-sqlserver.json
├── testcontainers
├── README.md
├── pom.xml
└── src
│ └── test
│ ├── java
│ └── io
│ │ └── debezium
│ │ └── examples
│ │ └── testcontainers
│ │ └── DebeziumContainerTest.java
│ └── resources
│ └── log4j.properties
├── topic-auto-create
├── README.md
├── connector.json
└── docker-compose.yml
├── tutorial
├── README.md
├── db2data
│ └── .donotdelete
├── debezium-cassandra-init
│ ├── Dockerfile
│ ├── cassandra.yaml
│ ├── config.properties
│ ├── inventory.cql
│ ├── log4j.properties
│ └── startup-script.sh
├── debezium-db2-init
│ ├── db2connect
│ │ └── Dockerfile
│ └── db2server
│ │ ├── Dockerfile
│ │ ├── asncdc.c
│ │ ├── asncdc_UDF.sql
│ │ ├── asncdcaddremove.sql
│ │ ├── asncdctables.sql
│ │ ├── cdcsetup.sh
│ │ ├── custom-init
│ │ └── cleanup_storage.sh
│ │ ├── dbsetup.sh
│ │ ├── inventory.sql
│ │ ├── openshift_entrypoint.sh
│ │ ├── startup-agent.sql
│ │ └── startup-cdc-demo.sql
├── debezium-ifx-init
│ ├── ifxconnect
│ │ └── Dockerfile
│ └── ifxserver
│ │ ├── Dockerfile
│ │ ├── informix_post_init.sh
│ │ └── inventory.sql
├── debezium-sqlserver-init
│ └── inventory.sql
├── debezium-vitess-init
│ ├── Dockerfile
│ ├── install_local_dependencies.sh
│ └── local
│ │ ├── create_tables_sharded_inventory.sql
│ │ ├── create_tables_unsharded_customer.sql
│ │ ├── env.sh
│ │ ├── grpc_static_auth.json
│ │ ├── grpc_static_client_auth.json
│ │ ├── initial_cluster.sh
│ │ ├── insert_customer_data.sql
│ │ ├── insert_inventory_data.sql
│ │ ├── scripts
│ │ ├── etcd-up.sh
│ │ ├── mysqlctl-up.sh
│ │ ├── vtctld-up.sh
│ │ ├── vtgate-up.sh
│ │ └── vttablet-up.sh
│ │ ├── select_customer0_data.sql
│ │ ├── select_inventory-80_data.sql
│ │ ├── select_inventory80-_data.sql
│ │ ├── vschema_tables_sharded_inventory.json
│ │ └── vschema_tables_unsharded_customer.json
├── debezium-with-oracle-jdbc
│ ├── Dockerfile
│ └── init
│ │ └── inventory.sql
├── docker-compose-cassandra.yaml
├── docker-compose-db2.yaml
├── docker-compose-ifx.yaml
├── docker-compose-mariadb.yaml
├── docker-compose-mongodb.yaml
├── docker-compose-mysql-apicurio.png
├── docker-compose-mysql-apicurio.yaml
├── docker-compose-mysql-avro-connector.yaml
├── docker-compose-mysql-avro-worker.yaml
├── docker-compose-mysql-ext-secrets.yaml
├── docker-compose-mysql.yaml
├── docker-compose-oracle.yaml
├── docker-compose-postgres.yaml
├── docker-compose-sqlserver.yaml
├── docker-compose-timescaledb.yaml
├── docker-compose-vitess.yaml
├── docker-compose-zookeeperless-kafka-combined.yaml
├── docker-compose-zookeeperless-kafka.yaml
├── register-db2.json
├── register-ifx.json
├── register-mariadb.json
├── register-mongodb.json
├── register-mysql-apicurio-compatibile-converter-avro.json
├── register-mysql-apicurio-converter-avro.json
├── register-mysql-apicurio-converter-json.json
├── register-mysql-apicurio.json
├── register-mysql-avro.json
├── register-mysql-ext-secrets.json
├── register-mysql.json
├── register-oracle-logminer.json
├── register-postgres.json
├── register-sqlserver.json
├── register-timescaledb.yaml
├── register-vitess.json
├── secrets
│ └── mysql.properties
├── timescaledb
│ ├── 002_enable_replication.sh
│ └── Dockerfile
└── vitess-sharding-setup.png
├── unwrap-mongodb-smt
├── README.md
├── debezium-jdbc
│ └── Dockerfile
├── docker-compose.yaml
├── jdbc-sink.json
└── mongodb-source.json
└── unwrap-smt
├── README.md
├── debezium-jdbc-es
└── Dockerfile
├── docker-compose-es.yaml
├── docker-compose-jdbc.yaml
├── docker-compose.yaml
├── es-sink-aggregates.json
├── es-sink.json
├── jdbc-sink.json
└── source.json
/.github/PULL_REQUEST_TEMPLATE.md:
--------------------------------------------------------------------------------
1 | ## Description
2 |
7 |
8 | ## Checklist
9 |
10 | - [ ] If the changes include a new example, I added it to the list of examples in the [README.md](https://github.com/debezium/debezium-examples/blob/main/README.md) file
--------------------------------------------------------------------------------
/.github/workflows/saga-workflow.yml:
--------------------------------------------------------------------------------
1 | name: Build [saga]
2 |
3 | on:
4 | push:
5 | paths:
6 | - 'saga/**'
7 | - '.github/workflows/saga-workflow.yml'
8 | pull_request:
9 | paths:
10 | - 'saga/**'
11 | - '.github/workflows/saga-workflow.yml'
12 |
13 | jobs:
14 | build:
15 | runs-on: ubuntu-latest
16 | steps:
17 | - uses: actions/checkout@v4
18 | - name: Cache local Maven repository
19 | uses: actions/cache@v2
20 | with:
21 | path: ~/.m2/repository
22 | key: ${{ runner.os }}-maven-${{ hashFiles('saga/**/pom.xml') }}
23 | restore-keys: |
24 | ${{ runner.os }}-maven-
25 | - name: Check changes in [saga] example
26 | run: cd saga && mvn clean verify -B -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn -Dmaven.wagon.http.pool=false -Dmaven.wagon.httpconnectionManager.ttlSeconds=120
27 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | activemq-data/
2 | .idea/
3 | *.iml
4 | *.ipr
5 | *.iws
6 | .metadata/
7 | .recommenders/
8 | .classpath
9 | .factorypath
10 | .project
11 | .cache
12 | .settings/
13 | .checkstyle
14 | .gradle/
15 | .vscode/
16 | bin/
17 | build/
18 | deploy/
19 | target/
20 | mods/
21 | *.swp
22 | epom
23 | log
24 | npm-debug.log
25 | .DS_Store
26 | phantomjsdriver.log
27 |
28 | generated-sources/
29 |
30 | /state/
31 |
32 | oracle_instantclient/
33 | db2data/
34 |
--------------------------------------------------------------------------------
/ai-rag/client/.dockerignore:
--------------------------------------------------------------------------------
1 | *
2 | !target/*-runner
3 | !target/*-runner.jar
4 | !target/lib/*
5 | !target/quarkus-app/*
--------------------------------------------------------------------------------
/ai-rag/client/.gitignore:
--------------------------------------------------------------------------------
1 | #Maven
2 | target/
3 | pom.xml.tag
4 | pom.xml.releaseBackup
5 | pom.xml.versionsBackup
6 | release.properties
7 | .flattened-pom.xml
8 |
9 | # Eclipse
10 | .project
11 | .classpath
12 | .settings/
13 | bin/
14 |
15 | # IntelliJ
16 | .idea
17 | *.ipr
18 | *.iml
19 | *.iws
20 |
21 | # NetBeans
22 | nb-configuration.xml
23 |
24 | # Visual Studio Code
25 | .vscode
26 | .factorypath
27 |
28 | # OSX
29 | .DS_Store
30 |
31 | # Vim
32 | *.swp
33 | *.swo
34 |
35 | # patch
36 | *.orig
37 | *.rej
38 |
39 | # Local environment
40 | .env
41 |
42 | # Plugin directory
43 | /.quarkus/cli/plugins/
44 | # TLS Certificates
45 | .certs/
46 |
--------------------------------------------------------------------------------
/ai-rag/client/.mvn/wrapper/.gitignore:
--------------------------------------------------------------------------------
1 | maven-wrapper.jar
2 |
--------------------------------------------------------------------------------
/ai-rag/client/src/main/java/io/debezium/examples/airag/Chat.java:
--------------------------------------------------------------------------------
1 | package io.debezium.examples.airag;
2 |
3 | import dev.langchain4j.service.SystemMessage;
4 | import dev.langchain4j.service.UserMessage;
5 | import io.quarkiverse.langchain4j.RegisterAiService;
6 |
7 | @RegisterAiService(retrievalAugmentor = MilvusRetrievalAugmentor.class)
8 | public interface Chat {
9 |
10 | @SystemMessage("You are an expert that provides short summaries.")
11 | String chat(@UserMessage String message);
12 | }
--------------------------------------------------------------------------------
/ai-rag/client/src/main/resources/log4j.properties:
--------------------------------------------------------------------------------
1 | # Direct log messages to stdout
2 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender
3 | log4j.appender.stdout.Target=System.out
4 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
5 | log4j.appender.stdout.layout.ConversionPattern=%d{ISO8601} %-5p %X{dbz.connectorType}|%X{dbz.connectorName}|%X{dbz.connectorContext} %m [%c]%n
6 |
7 | # Root logger option
8 | log4j.rootLogger=WARN, stdout
9 |
10 | # Set up the default logging to be INFO level, then override specific units
11 | log4j.logger.io.debezium=INFO
12 |
--------------------------------------------------------------------------------
/ai-rag/config-postgres/ai-sample-data.sql:
--------------------------------------------------------------------------------
1 | CREATE SCHEMA ai;
2 | SET search_path TO ai;
3 |
4 | CREATE TABLE documents (
5 | id VARCHAR(64) PRIMARY KEY,
6 | metadata JSON,
7 | text TEXT
8 | );
9 | ALTER TABLE documents REPLICA IDENTITY FULL;
10 |
--------------------------------------------------------------------------------
/ai-rag/image-server/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM quay.io/debezium/server:3.2.0.Alpha1 AS builder
2 |
3 | USER root
4 | RUN microdnf -y install gzip && \
5 | microdnf clean all
6 |
7 | USER jboss
8 | RUN mkdir /tmp/ai && \
9 | curl -kL 'https://repo1.maven.org/maven2/io/debezium/debezium-ai-embeddings-ollama/3.2.0.Alpha1/debezium-ai-embeddings-ollama-3.2.0.Alpha1-embeddings.tar.gz' | tar xvzf - -C /tmp/ai --strip-components=1
10 |
11 | FROM quay.io/debezium/server:3.2.0.Alpha1
12 |
13 | USER root
14 | RUN microdnf clean all
15 |
16 | USER jboss
17 | COPY --from=builder /tmp/ai $SERVER_HOME/lib
18 |
19 |
--------------------------------------------------------------------------------
/apache-pulsar/src/main/resources/config.properties:
--------------------------------------------------------------------------------
1 | connector.class=io.debezium.connector.postgresql.PostgresConnector
2 | offset.storage=org.apache.kafka.connect.storage.FileOffsetBackingStore
3 | offset.storage.file.filename=offset.dat
4 | offset.flush.interval.ms=5000
5 | name=postgres-dbz-connector
6 | database.hostname=localhost
7 | database.port=5432
8 | database.user=postgres
9 | database.password=postgres
10 | database.dbname=postgres
11 | schema.include.list=inventory
12 | plugin.name=pgoutput
13 | topic.prefix=test
14 |
15 | pulsar.topic=persistent://public/default/{0}
16 | pulsar.broker.address=pulsar://localhost:6650
17 |
--------------------------------------------------------------------------------
/apache-pulsar/src/main/resources/log4j.properties:
--------------------------------------------------------------------------------
1 | # Root logger option
2 | log4j.rootLogger=INFO, stdout
3 |
4 | # Redirect log messages to console
5 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender
6 | log4j.appender.stdout.Target=System.out
7 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
8 | log4j.appender.stdout.layout.ConversionPattern=[%d{yyyy-MM-dd}T%d{HH:mm:ss.SSS}Z] %p %t %c{1}:%L %m%n
9 | log4j.logger.io.debezium.examples.apache.pulsar=DEBUG
10 |
--------------------------------------------------------------------------------
/auditlog/admin-service/.dockerignore:
--------------------------------------------------------------------------------
1 | *
2 | !target/*-runner
3 | !target/*-runner.jar
4 | !target/lib/*
--------------------------------------------------------------------------------
/auditlog/admin-service/src/main/docker/Dockerfile.jvm:
--------------------------------------------------------------------------------
1 | ####
2 | # This Dockerfile is used in order to build a container that runs the Quarkus application in JVM mode
3 | #
4 | # Before building the docker image run:
5 | #
6 | # mvn package
7 | #
8 | # Then, build the image with:
9 | #
10 | # docker build -f src/main/docker/Dockerfile.jvm -t quarkus/admin-service-jvm .
11 | #
12 | # Then run the container using:
13 | #
14 | # docker run -i --rm -p 8080:8080 quarkus/admin-service-jvm
15 | #
16 | ###
17 | # FROM fabric8/java-alpine-openjdk8-jre
18 | FROM fabric8/java-centos-openjdk8-jdk
19 | ENV JAVA_OPTIONS="-Dquarkus.http.host=0.0.0.0 -Djava.util.logging.manager=org.jboss.logmanager.LogManager"
20 | ENV AB_ENABLED=jmx_exporter
21 | COPY target/lib/* /deployments/lib/
22 | COPY target/*-runner.jar /deployments/app.jar
23 | ENTRYPOINT [ "/deployments/run-java.sh" ]
--------------------------------------------------------------------------------
/auditlog/admin-service/src/main/docker/Dockerfile.native:
--------------------------------------------------------------------------------
1 | ####
2 | # This Dockerfile is used in order to build a container that runs the Quarkus application in native (no JVM) mode
3 | #
4 | # Before building the docker image run:
5 | #
6 | # mvn package -Pnative -Dnative-image.docker-build=true
7 | #
8 | # Then, build the image with:
9 | #
10 | # docker build -f src/main/docker/Dockerfile.native -t quarkus/admin-service .
11 | #
12 | # Then run the container using:
13 | #
14 | # docker run -i --rm -p 8080:8080 quarkus/admin-service
15 | #
16 | ###
17 | FROM registry.access.redhat.com/ubi8/ubi-minimal
18 | WORKDIR /work/
19 | COPY target/*-runner /work/application
20 | RUN chmod 775 /work
21 | EXPOSE 8080
22 | CMD ["./application", "-Dquarkus.http.host=0.0.0.0"]
--------------------------------------------------------------------------------
/auditlog/admin-service/src/main/resources/io/debezium/demos/auditing/admin/matching-rules.drl:
--------------------------------------------------------------------------------
1 | package io.debezium.demos.auditing.admin
2 |
3 | import io.debezium.demos.auditing.admin.TransactionEvent;
4 | import io.debezium.demos.auditing.admin.VegetableEvent;
5 |
6 |
7 | rule "Match events"
8 |
9 | when
10 | $tx : TransactionEvent()
11 | $veg : VegetableEvent(source.transactionId == $tx.source.transactionId)
12 | then
13 | System.out.println($veg + " is matched with transaction data");
14 | $veg.setMatched(true);
15 |
16 | delete($tx);
17 | delete($veg);
18 | end
19 |
--------------------------------------------------------------------------------
/auditlog/log-enricher/.dockerignore:
--------------------------------------------------------------------------------
1 | *
2 | !target/*-runner
3 | !target/*-runner.jar
4 | !target/lib/*
--------------------------------------------------------------------------------
/auditlog/log-enricher/src/main/docker/Dockerfile.jvm:
--------------------------------------------------------------------------------
1 | ####
2 | # This Dockerfile is used in order to build a container that runs the Quarkus application in JVM mode
3 | #
4 | # Before building the docker image run:
5 | #
6 | # mvn package
7 | #
8 | # Then, build the image with:
9 | #
10 | # docker build -f src/main/docker/Dockerfile.jvm -t quarkus/auditing-log-enricher-jvm .
11 | #
12 | # Then run the container using:
13 | #
14 | # docker run -i --rm -p 8080:8080 quarkus/auditing-log-enricher-jvm
15 | #
16 | ###
17 | FROM fabric8/java-centos-openjdk8-jdk
18 | ENV JAVA_OPTIONS="-Dquarkus.http.host=0.0.0.0 -Djava.util.logging.manager=org.jboss.logmanager.LogManager"
19 | ENV AB_ENABLED=jmx_exporter
20 | COPY target/lib/* /deployments/lib/
21 | COPY target/*-runner.jar /deployments/app.jar
22 | ENTRYPOINT [ "/deployments/run-java.sh" ]
--------------------------------------------------------------------------------
/auditlog/log-enricher/src/main/docker/Dockerfile.native:
--------------------------------------------------------------------------------
1 | ####
2 | # This Dockerfile is used in order to build a container that runs the Quarkus application in native (no JVM) mode
3 | #
4 | # Before building the docker image run:
5 | #
6 | # mvn package -Pnative -Dnative-image.docker-build=true
7 | #
8 | # Then, build the image with:
9 | #
10 | # docker build -f src/main/docker/Dockerfile.native -t quarkus/auditing-log-enricher .
11 | #
12 | # Then run the container using:
13 | #
14 | # docker run -i --rm -p 8080:8080 quarkus/auditing-log-enricher
15 | #
16 | ###
17 | FROM registry.access.redhat.com/ubi8/ubi-minimal
18 | WORKDIR /work/
19 | COPY target/*-runner /work/application
20 | RUN chmod 775 /work
21 | EXPOSE 8080
22 | CMD ["./application", "-Dquarkus.http.host=0.0.0.0"]
--------------------------------------------------------------------------------
/auditlog/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 | 4.0.0
5 | io.debezium.demos.auditing
6 | auditing-parent
7 | 1.0-SNAPSHOT
8 | pom
9 |
10 | vegetables-service
11 | log-enricher
12 | admin-service
13 |
14 |
15 |
--------------------------------------------------------------------------------
/auditlog/register-postgres.json:
--------------------------------------------------------------------------------
1 | {
2 | "connector.class": "io.debezium.connector.postgresql.PostgresConnector",
3 | "tasks.max": "1",
4 | "database.hostname": "vegetables-db",
5 | "database.port": "5432",
6 | "database.user": "postgresuser",
7 | "database.password": "postgrespw",
8 | "database.dbname" : "vegetablesdb",
9 | "database.server.name": "dbserver1",
10 | "table.include.list": "inventory.vegetable,inventory.transaction_context_data"
11 | }
12 |
--------------------------------------------------------------------------------
/auditlog/vegetables-service/jwt/jwt-token.json:
--------------------------------------------------------------------------------
1 | {
2 | "iss" : "farmshop",
3 | "jti" : "42",
4 | "sub" : "farmerbob",
5 | "upn" : "farmerbob",
6 | "exp" : 4102444799,
7 | "groups":[
8 | "farmers",
9 | "customers"
10 | ]
11 | }
12 |
--------------------------------------------------------------------------------
/auditlog/vegetables-service/jwt/jwtenizr-0.0.4-SNAPSHOT.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/debezium/debezium-examples/9f5befd87e4a0b991e4d4eb6adb8ea45bc3e5796/auditlog/vegetables-service/jwt/jwtenizr-0.0.4-SNAPSHOT.jar
--------------------------------------------------------------------------------
/auditlog/vegetables-service/jwt/microprofile-config.properties:
--------------------------------------------------------------------------------
1 | #generated by jwtenizr
2 | #Thu Aug 22 09:15:13 CEST 2019
3 | mp.jwt.verify.issuer=farmshop
4 | mp.jwt.verify.publickey=MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAjYWJ6Zt9Jo9dxVuMglo0rYN4vBV0T7AP+qD/aI7tTrus6ZMvTi/+JKlNpEAS0b6yasYjxuKmh3eYT0PbGmGERr07VDsVcV/iezl9pj+fceY4lebrExS36yGQJs6BUXYF4P8ynmvnKC40AuyxKFgb3T08h1jxoBsBKlPfAT620ZP1vwgGwZB7iAfzdNYtt3z2NtkyPMaD1mHU6rxewjVN9XVSSSPKO8nFPTYsm1i4ePohgWr9bxwFHkXzyk7DnpUBMZzlVUUXVPuEpkVCqnWZTslMw/pgsyXPw1pmV76rVwhI0Ay4XohPW2QvDoPKHhuiQtcNrfL++iEFG8A9hh1K3QIDAQAB
5 |
--------------------------------------------------------------------------------
/auditlog/vegetables-service/jwt/token.jwt:
--------------------------------------------------------------------------------
1 | eyJraWQiOiJqd3Qua2V5IiwidHlwIjoiSldUIiwiYWxnIjoiUlMyNTYifQ.eyJzdWIiOiJmYXJtZXJib2IiLCJ1cG4iOiJmYXJtZXJib2IiLCJhdXRoX3RpbWUiOjE1NjY0NTgxMTMsImlzcyI6ImZhcm1zaG9wIiwiZ3JvdXBzIjpbImZhcm1lcnMiLCJjdXN0b21lcnMiXSwiZXhwIjo0MTAyNDQ0Nzk5LCJpYXQiOjE1NjY0NTgxMTMsImp0aSI6IjQyIn0.CscbJN8amqKryYvnVO1184J8F67HN2iTEjVN2VOPodcnoeOd7_iQVKUjC3h-ye5apkJjvAsQKrjzlrGCHRfl-n6jC9F7IkOtjoWnJ4wQ9BBo1SAtPw_Czt1I_Ujm-Kb1p5-BWACCBCVVFgYZTWP_laz5JZS7dIvs6VqoNnw7A4VpA6iPfTVfYlNY3u86-k1FvEg_hW-N9Y9RuihMsPuTdpHK5xdjCrJiD0VJ7-0eRQ8RXpycHuHN4xfmV8MqXBYjYSYDOhbnYbdQVbf0YJoFFqfb75my5olN-97ITsi2MS62W_y-RNT0qZrbytqINA3fF3VQsSY6VcaqRAeygrKm_Q
--------------------------------------------------------------------------------
/auditlog/vegetables-service/src/main/docker/Dockerfile.jvm:
--------------------------------------------------------------------------------
1 | ####
2 | # This Dockerfile is used in order to build a container that runs the Quarkus application in JVM mode
3 | #
4 | # Before building the docker image run:
5 | #
6 | # mvn package
7 | #
8 | # Then, build the image with:
9 | #
10 | # docker build -f src/main/docker/Dockerfile.jvm -t quarkus/debezium-auditing-demo-jvm .
11 | #
12 | # Then run the container using:
13 | #
14 | # docker run -i --rm -p 8080:8080 quarkus/debezium-auditing-demo-jvm
15 | #
16 | ###
17 | # FROM fabric8/java-alpine-openjdk8-jre
18 | FROM fabric8/java-centos-openjdk8-jdk
19 | ENV JAVA_OPTIONS="-Dquarkus.http.host=0.0.0.0 -Djava.util.logging.manager=org.jboss.logmanager.LogManager"
20 | ENV AB_ENABLED=jmx_exporter
21 | COPY target/lib/* /deployments/lib/
22 | COPY target/*-runner.jar /deployments/app.jar
23 | ENTRYPOINT [ "/deployments/run-java.sh" ]
--------------------------------------------------------------------------------
/auditlog/vegetables-service/src/main/docker/Dockerfile.native:
--------------------------------------------------------------------------------
1 | ####
2 | # This Dockerfile is used in order to build a container that runs the Quarkus application in native (no JVM) mode
3 | #
4 | # Before building the docker image run:
5 | #
6 | # mvn package -Pnative -Dnative-image.docker-build=true
7 | #
8 | # Then, build the image with:
9 | #
10 | # docker build -f src/main/docker/Dockerfile.native -t quarkus/debezium-auditing-demo .
11 | #
12 | # Then run the container using:
13 | #
14 | # docker run -i --rm -p 8080:8080 quarkus/debezium-auditing-demo
15 | #
16 | ###
17 | FROM registry.access.redhat.com/ubi8/ubi-minimal
18 | WORKDIR /work/
19 | COPY target/*-runner /work/application
20 | RUN chmod 775 /work
21 | EXPOSE 8080
22 | CMD ["./application", "-Dquarkus.http.host=0.0.0.0"]
--------------------------------------------------------------------------------
/auditlog/vegetables-service/src/main/java/io/debezium/demos/auditing/vegetables/rest/util/ErrorMapper.java:
--------------------------------------------------------------------------------
1 | package io.debezium.demos.auditing.vegetables.rest.util;
2 |
3 | import javax.json.Json;
4 | import javax.persistence.EntityNotFoundException;
5 | import javax.ws.rs.core.Response;
6 | import javax.ws.rs.core.Response.Status;
7 | import javax.ws.rs.ext.ExceptionMapper;
8 | import javax.ws.rs.ext.Provider;
9 |
10 | @Provider
11 | public class ErrorMapper implements ExceptionMapper {
12 |
13 | @Override
14 | public Response toResponse(EntityNotFoundException exception) {
15 | return Response.status(Status.NOT_FOUND)
16 | .entity(Json.createObjectBuilder()
17 | .add("error", exception.getMessage())
18 | .build())
19 | .build();
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/auditlog/vegetables-service/src/main/java/io/debezium/demos/auditing/vegetables/transactioncontext/Audited.java:
--------------------------------------------------------------------------------
1 | package io.debezium.demos.auditing.vegetables.transactioncontext;
2 |
3 | import static java.lang.annotation.ElementType.METHOD;
4 | import static java.lang.annotation.ElementType.TYPE;
5 | import static java.lang.annotation.RetentionPolicy.RUNTIME;
6 |
7 | import java.lang.annotation.Retention;
8 | import java.lang.annotation.Target;
9 |
10 | import javax.enterprise.util.Nonbinding;
11 | import javax.interceptor.InterceptorBinding;
12 |
13 | @InterceptorBinding
14 | @Target({METHOD, TYPE})
15 | @Retention(RUNTIME)
16 | public @interface Audited {
17 | @Nonbinding String useCase();
18 | }
19 |
--------------------------------------------------------------------------------
/auditlog/vegetables-service/src/main/java/io/debezium/demos/auditing/vegetables/transactioncontext/TransactionContextData.java:
--------------------------------------------------------------------------------
1 | package io.debezium.demos.auditing.vegetables.transactioncontext;
2 |
3 | import java.time.ZonedDateTime;
4 |
5 | import javax.persistence.Column;
6 | import javax.persistence.Entity;
7 | import javax.persistence.Id;
8 | import javax.persistence.Table;
9 |
10 | @Entity
11 | @Table(name="transaction_context_data")
12 | public class TransactionContextData {
13 |
14 | @Id
15 | @Column(name="transaction_id")
16 | public long transactionId;
17 |
18 | @Column(name="user_name")
19 | public String userName;
20 |
21 | @Column(name="client_date")
22 | public ZonedDateTime clientDate;
23 |
24 | @Column(name="useCase")
25 | public String useCase;
26 | }
27 |
--------------------------------------------------------------------------------
/auditlog/vegetables-service/src/main/resources/import.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE vegetable REPLICA IDENTITY FULL;
2 | INSERT INTO vegetable (id, name, description) VALUES (1, 'Potatoe', 'Spicy');
3 |
--------------------------------------------------------------------------------
/cache-invalidation/resources/data/create-order-request.json:
--------------------------------------------------------------------------------
1 | {
2 | "customer" : "Billy-Bob",
3 | "itemId" : 10003,
4 | "quantity" : 2
5 | }
6 |
--------------------------------------------------------------------------------
/cache-invalidation/resources/data/update-item-request.json:
--------------------------------------------------------------------------------
1 | {
2 | "description" : "North by Northwest - Director's Cut",
3 | "price" : 17.99
4 | }
5 |
--------------------------------------------------------------------------------
/cache-invalidation/src/main/java/io/debezium/examples/cacheinvalidation/rest/RestApplication.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright Debezium Authors.
3 | *
4 | * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
5 | */
6 | package io.debezium.examples.cacheinvalidation.rest;
7 |
8 | import java.util.HashSet;
9 | import java.util.Set;
10 |
11 | import jakarta.ws.rs.ApplicationPath;
12 | import jakarta.ws.rs.core.Application;
13 |
14 | @ApplicationPath("/rest")
15 | public class RestApplication extends Application {
16 |
17 | @Override
18 | public Set> getClasses() {
19 | Set> resourceClasses = new HashSet<>();
20 |
21 | resourceClasses.add(CacheResource.class);
22 | resourceClasses.add(OrderResource.class);
23 | resourceClasses.add(ItemResource.class);
24 |
25 | return resourceClasses;
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/cache-invalidation/src/main/java/io/debezium/examples/cacheinvalidation/rest/UpdateItemRequest.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright Debezium Authors.
3 | *
4 | * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
5 | */
6 | package io.debezium.examples.cacheinvalidation.rest;
7 |
8 | import java.math.BigDecimal;
9 |
10 | public class UpdateItemRequest {
11 |
12 | private String description;
13 | private BigDecimal price;
14 |
15 | public String getDescription() {
16 | return description;
17 | }
18 | public void setDescription(String description) {
19 | this.description = description;
20 | }
21 | public BigDecimal getPrice() {
22 | return price;
23 | }
24 | public void setPrice(BigDecimal price) {
25 | this.price = price;
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/cache-invalidation/src/main/resources/META-INF/data.sql:
--------------------------------------------------------------------------------
1 | INSERT INTO Item (id,description,price) VALUES (10001, 'The Birds', 9.99);
2 | INSERT INTO Item (id,description,price) VALUES (10002, 'To Catch A Thieve', 12.99);
3 | INSERT INTO Item (id,description,price) VALUES (10003, 'North By Northwest', 14.99);
4 |
--------------------------------------------------------------------------------
/cache-invalidation/src/main/resources/META-INF/services/org.hibernate.integrator.spi.Integrator:
--------------------------------------------------------------------------------
1 | io.debezium.examples.cacheinvalidation.persistence.TransactionRegistrationIntegrator
2 |
--------------------------------------------------------------------------------
/camel-component/docker-compose.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/debezium/debezium-examples/9f5befd87e4a0b991e4d4eb6adb8ea45bc3e5796/camel-component/docker-compose.png
--------------------------------------------------------------------------------
/camel-component/qa-app/src/main/java/io/debezium/examples/camel/qaapp/Vote.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright Debezium Authors.
3 | *
4 | * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
5 | */
6 | package io.debezium.examples.camel.qaapp;
7 |
8 | public class Vote {
9 |
10 | public static enum VoteType {
11 | UP,
12 | DOWN
13 | }
14 |
15 | private final VoteType voteType;
16 |
17 | public Vote(VoteType voteType) {
18 | this.voteType = voteType;
19 | }
20 |
21 | public int voteValue() {
22 | return voteType == VoteType.UP ? 1 : -1;
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/camel-component/qa-app/src/main/resources/application.properties:
--------------------------------------------------------------------------------
1 | quarkus.datasource.db-kind=postgresql
2 | quarkus.datasource.jdbc.url=jdbc:postgresql://qa-db:5432/postgres?currentSchema=inventory
3 | quarkus.datasource.username=postgres
4 | quarkus.datasource.password=postgres
5 | quarkus.hibernate-orm.database.generation=drop-and-create
6 | quarkus.hibernate-orm.log.sql=true
7 |
--------------------------------------------------------------------------------
/camel-component/qa-app/src/main/resources/log4j.properties:
--------------------------------------------------------------------------------
1 | # Direct log messages to stdout
2 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender
3 | log4j.appender.stdout.Target=System.out
4 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
5 | log4j.appender.stdout.layout.ConversionPattern=%d{ISO8601} %-5p %X{dbz.connectorType}|%X{dbz.connectorName}|%X{dbz.connectorContext} %m [%c]%n
6 |
7 | # Root logger option
8 | log4j.rootLogger=INFO, stdout
9 |
10 | # Set up the default logging to be INFO level, then override specific units
11 | log4j.logger.io.debezium=INFO
12 |
--------------------------------------------------------------------------------
/camel-component/qa-camel/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM registry.access.redhat.com/ubi8/openjdk-17-runtime
2 |
3 | COPY target/dependency/* /deployments/
4 | COPY target/*.jar /deployments/app.jar
5 | COPY target/docker-extra/run-java/run-java.sh /deployments
6 |
7 | ENTRYPOINT [ "/deployments/run-java.sh" ]
8 |
--------------------------------------------------------------------------------
/camel-component/qa-camel/src/main/java/io/debezium/examples/camel/pipeline/Converters.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright Debezium Authors.
3 | *
4 | * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
5 | */
6 | package io.debezium.examples.camel.pipeline;
7 |
8 | import org.apache.camel.Converter;
9 | import org.apache.kafka.connect.data.Struct;
10 |
11 | @Converter
12 | public class Converters {
13 |
14 | @Converter
15 | public static Question questionFromStruct(Struct struct) {
16 | return new Question(struct.getInt64("id"), struct.getString("text"), struct.getString("email"));
17 | }
18 |
19 | @Converter
20 | public static Answer answerFromStruct(Struct struct) {
21 | return new Answer(struct.getInt64("id"), struct.getString("text"), struct.getString("email"), struct.getInt64("question_id"));
22 | }
23 | }
24 |
--------------------------------------------------------------------------------
/camel-component/qa-camel/src/main/java/io/debezium/examples/camel/pipeline/Runner.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright Debezium Authors.
3 | *
4 | * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
5 | */
6 | package io.debezium.examples.camel.pipeline;
7 |
8 | import org.apache.camel.main.Main;
9 |
10 | public class Runner {
11 | private static final Main MAIN = new Main();
12 |
13 | public static void main(String[] args) throws Exception {
14 | MAIN.configure().addRoutesBuilder(QaDatabaseUserNotifier.class);
15 | MAIN.run();
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/camel-component/qa-camel/src/main/resources/log4j.properties:
--------------------------------------------------------------------------------
1 | # Direct log messages to stdout
2 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender
3 | log4j.appender.stdout.Target=System.out
4 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
5 | log4j.appender.stdout.layout.ConversionPattern=%d{ISO8601} %-5p %X{dbz.connectorType}|%X{dbz.connectorName}|%X{dbz.connectorContext} %m [%c]%n
6 |
7 | # Root logger option
8 | log4j.rootLogger=INFO, stdout
9 |
10 | # Set up the default logging to be INFO level, then override specific units
11 | log4j.logger.io.debezium=INFO
12 | log4j.logger.io.debezium.examples.camel=DEBUG
13 |
--------------------------------------------------------------------------------
/camel-component/src/main/resources/log4j.properties:
--------------------------------------------------------------------------------
1 | # Direct log messages to stdout
2 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender
3 | log4j.appender.stdout.Target=System.out
4 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
5 | log4j.appender.stdout.layout.ConversionPattern=%d{ISO8601} %-5p %X{dbz.connectorType}|%X{dbz.connectorName}|%X{dbz.connectorContext} %m [%c]%n
6 |
7 | # Root logger option
8 | log4j.rootLogger=INFO, stdout
9 |
10 | # Set up the default logging to be INFO level, then override specific units
11 | log4j.logger.io.debezium=INFO
12 |
--------------------------------------------------------------------------------
/camel-component/src/test/resources/messages/create-answer1.json:
--------------------------------------------------------------------------------
1 | {
2 | "email": "james.doe@example.com",
3 | "text": "I'd say roughly four"
4 | }
5 |
--------------------------------------------------------------------------------
/camel-component/src/test/resources/messages/create-answer2.json:
--------------------------------------------------------------------------------
1 | {
2 | "email": "jane.doe@example.com",
3 | "text": "Exactly four"
4 | }
5 |
--------------------------------------------------------------------------------
/camel-component/src/test/resources/messages/create-answer3.json:
--------------------------------------------------------------------------------
1 | {
2 | "email": "george.doe@example.com",
3 | "text": "The same amount as cat has."
4 | }
5 |
--------------------------------------------------------------------------------
/camel-component/src/test/resources/messages/create-question.json:
--------------------------------------------------------------------------------
1 | {
2 | "email": "john.doe@example.com",
3 | "text": "How many legs does a dog have?"
4 | }
--------------------------------------------------------------------------------
/camel-kafka-connect/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM quay.io/debezium/connect:2.1
2 |
3 | ARG CAMEL_KAFKA_CONNECT_VERSION=0.1.0
4 |
5 | RUN cd /tmp &&\
6 | curl -sO https://repo1.maven.org/maven2/org/apache/camel/kafkaconnector/camel-coap-kafka-connector/$CAMEL_KAFKA_CONNECT_VERSION/camel-coap-kafka-connector-$CAMEL_KAFKA_CONNECT_VERSION-package.tar.gz &&\
7 | tar -zxvf ./camel-coap-kafka-connector-$CAMEL_KAFKA_CONNECT_VERSION-package.tar.gz &&\
8 | mv ./camel-coap-kafka-connector $KAFKA_CONNECT_PLUGINS_DIR/camel-kafka-connector
9 |
--------------------------------------------------------------------------------
/camel-kafka-connect/coap-server/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM fabric8/java-centos-openjdk11-jdk
2 |
3 | COPY target/dependency/* /deployments/
4 | COPY target/*.jar /deployments/app.jar
5 |
6 | ENTRYPOINT [ "/deployments/run-java.sh" ]
7 |
--------------------------------------------------------------------------------
/camel-kafka-connect/coap-server/src/main/java/io/debezium/examples/camel/coap/CoapServer.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright Debezium Authors.
3 | *
4 | * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
5 | */
6 | package io.debezium.examples.camel.coap;
7 |
8 | import org.apache.camel.LoggingLevel;
9 | import org.apache.camel.builder.RouteBuilder;
10 |
11 | public class CoapServer extends RouteBuilder {
12 |
13 | @Override
14 | public void configure() throws Exception {
15 |
16 | from("coap:0.0.0.0:{{coap.port}}/data")
17 | .log(LoggingLevel.INFO, "CoAP server has received message '${body}' with headers ${headers}");
18 | }
19 | }
20 |
--------------------------------------------------------------------------------
/camel-kafka-connect/coap-server/src/main/java/io/debezium/examples/camel/coap/Runner.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright Debezium Authors.
3 | *
4 | * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
5 | */
6 | package io.debezium.examples.camel.coap;
7 |
8 | import org.apache.camel.main.Main;
9 |
10 | public class Runner {
11 | private static final Main MAIN = new Main();
12 |
13 | public static void main(String[] args) throws Exception {
14 | MAIN.configure().addRoutesBuilder(CoapServer.class);
15 | MAIN.run();
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/camel-kafka-connect/coap-server/src/main/resources/log4j.properties:
--------------------------------------------------------------------------------
1 | # Direct log messages to stdout
2 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender
3 | log4j.appender.stdout.Target=System.out
4 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
5 | log4j.appender.stdout.layout.ConversionPattern=%d{ISO8601} %-5p %X{dbz.connectorType}|%X{dbz.connectorName}|%X{dbz.connectorContext} %m [%c]%n
6 |
7 | # Root logger option
8 | log4j.rootLogger=INFO, stdout
9 |
10 | # Set up the default logging to be INFO level, then override specific units
11 | log4j.logger.io.debezium=INFO
12 | log4j.logger.io.debezium.examples.camel=DEBUG
13 |
--------------------------------------------------------------------------------
/camel-kafka-connect/docker-compose.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/debezium/debezium-examples/9f5befd87e4a0b991e4d4eb6adb8ea45bc3e5796/camel-kafka-connect/docker-compose.png
--------------------------------------------------------------------------------
/camel-kafka-connect/sink.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "camel-sink",
3 | "config": {
4 | "connector.class": "org.apache.camel.kafkaconnector.coap.CamelCoapSinkConnector",
5 | "tasks.max": "1",
6 | "topics": "customers",
7 | "camel.sink.path.uri": "coap-server:5684/data",
8 | "transforms": "unwrap",
9 | "transforms.unwrap.type": "io.debezium.transforms.ExtractNewRecordState",
10 | "transforms.unwrap.drop.tombstones": "true",
11 | "value.converter": "org.apache.kafka.connect.storage.StringConverter"
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/camel-kafka-connect/source.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "inventory-connector",
3 | "config": {
4 | "connector.class": "io.debezium.connector.postgresql.PostgresConnector",
5 | "tasks.max": "1",
6 | "database.hostname": "postgres",
7 | "database.port": "5432",
8 | "database.user": "postgres",
9 | "database.password": "postgres",
10 | "database.dbname" : "postgres",
11 | "topic.prefix": "dbserver1",
12 | "schema.include.list": "inventory",
13 | "transforms": "route",
14 | "transforms.route.type": "org.apache.kafka.connect.transforms.RegexRouter",
15 | "transforms.route.regex": "([^.]+)\\.([^.]+)\\.([^.]+)",
16 | "transforms.route.replacement": "$3"
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/cloudevents/avro-data-extractor/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM fabric8/java-jboss-openjdk8-jdk
2 | ENV JAVA_OPTIONS='-Dquarkus.http.host=0.0.0.0 -Dquarkus.http.port=${PORT}'
3 | COPY target/lib/* /deployments/lib/
4 | COPY target/*-runner.jar /deployments/app.jar
5 | ENTRYPOINT [ "/deployments/run-java.sh" ]
6 |
--------------------------------------------------------------------------------
/cloudevents/avro-data-extractor/src/main/java/io/debezium/examples/cloudevents/dataextractor/model/CloudEvent.java:
--------------------------------------------------------------------------------
1 | package io.debezium.examples.cloudevents.dataextractor.model;
2 |
3 | public class CloudEvent {
4 |
5 | public String id;
6 | public String source;
7 | public String specversion;
8 | public String type;
9 | public String time;
10 | public String datacontenttype;
11 | public String dataschema;
12 | public String iodebeziumop;
13 | public String iodebeziumversion;
14 | public String iodebeziumconnector;
15 | public String iodebeziumname;
16 | public String iodebeziumtsms;
17 | public boolean iodebeziumsnapshot;
18 | public String iodebeziumdb;
19 | public String iodebeziumschema;
20 | public String iodebeziumtable;
21 | public String iodebeziumtxId;
22 | public String iodebeziumlsn;
23 | public String iodebeziumxmin;
24 | public byte[] data;
25 | }
26 |
--------------------------------------------------------------------------------
/cloudevents/avro-data-extractor/src/main/resources/application.properties:
--------------------------------------------------------------------------------
1 | json.avro.customers.topic=dbserver2.inventory.customers
2 | json.avro.extracted.topic=customers2
3 |
4 | avro.avro.customers.topic=dbserver3.inventory.customers
5 | avro.avro.extracted.topic=customers3
6 |
7 | schema.registry.url=http://schema-registry:8081
8 |
9 | quarkus.kafka-streams.bootstrap-servers=localhost:9092
10 | quarkus.kafka-streams.application-id=cloudevents-data-extractor
11 | quarkus.kafka-streams.topics=${json.avro.customers.topic},${avro.avro.customers.topic}
12 |
13 | # streams options
14 | kafka-streams.cache.max.bytes.buffering=10240
15 | kafka-streams.commit.interval.ms=1000
16 | kafka-streams.metadata.max.age.ms=500
17 | kafka-streams.auto.offset.reset=earliest
18 | kafka-streams.processing.guarantee=exactly_once
19 |
20 | quarkus.log.console.enable=true
21 | quarkus.log.console.level=INFO
22 |
--------------------------------------------------------------------------------
/cloudevents/avro-data-extractor/src/main/resources/log4j.properties:
--------------------------------------------------------------------------------
1 | # Root logger option
2 | log4j.rootLogger=INFO, stdout
3 |
4 | # Direct log messages to stdout
5 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender
6 | log4j.appender.stdout.Target=System.out
7 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
8 | log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n
9 |
10 | log4j.io.debezium.examples=DEBUG
11 |
--------------------------------------------------------------------------------
/cloudevents/register-postgres-avro-avro.json:
--------------------------------------------------------------------------------
1 | {
2 | "connector.class": "io.debezium.connector.postgresql.PostgresConnector",
3 | "tasks.max": "1",
4 | "database.hostname": "postgres",
5 | "database.port": "5432",
6 | "database.user": "postgres",
7 | "database.password": "postgres",
8 | "database.dbname" : "postgres",
9 | "database.server.name": "dbserver3",
10 | "slot.name":"dbserver3",
11 | "schema.include.list": "inventory",
12 | "key.converter": "org.apache.kafka.connect.json.JsonConverter",
13 | "key.converter.schemas.enable": "false",
14 | "value.converter": "io.debezium.converters.CloudEventsConverter",
15 | "value.converter.serializer.type" : "avro",
16 | "value.converter.data.serializer.type" : "avro",
17 | "value.converter.avro.schema.registry.url": "http://schema-registry:8081"
18 | }
19 |
--------------------------------------------------------------------------------
/cloudevents/register-postgres-json-avro.json:
--------------------------------------------------------------------------------
1 | {
2 | "connector.class": "io.debezium.connector.postgresql.PostgresConnector",
3 | "tasks.max": "1",
4 | "database.hostname": "postgres",
5 | "database.port": "5432",
6 | "database.user": "postgres",
7 | "database.password": "postgres",
8 | "database.dbname" : "postgres",
9 | "database.server.name": "dbserver2",
10 | "slot.name":"dbserver2",
11 | "schema.include.list": "inventory",
12 | "key.converter": "org.apache.kafka.connect.json.JsonConverter",
13 | "key.converter.schemas.enable": "false",
14 | "value.converter": "io.debezium.converters.CloudEventsConverter",
15 | "value.converter.data.serializer.type" : "avro",
16 | "value.converter.avro.schema.registry.url": "http://schema-registry:8081"
17 | }
18 |
--------------------------------------------------------------------------------
/cloudevents/register-postgres-json-json.json:
--------------------------------------------------------------------------------
1 | {
2 | "connector.class": "io.debezium.connector.postgresql.PostgresConnector",
3 | "tasks.max": "1",
4 | "database.hostname": "postgres",
5 | "database.port": "5432",
6 | "database.user": "postgres",
7 | "database.password": "postgres",
8 | "database.dbname" : "postgres",
9 | "database.server.name": "dbserver1",
10 | "slot.name":"dbserver1",
11 | "schema.include.list": "inventory",
12 | "key.converter": "org.apache.kafka.connect.json.JsonConverter",
13 | "key.converter.schemas.enable": "false",
14 | "value.converter": "io.debezium.converters.CloudEventsConverter",
15 | "value.converter.data.serializer.type" : "json",
16 | "value.converter.json.schemas.enable" : "false"
17 | }
18 |
--------------------------------------------------------------------------------
/db-activity-monitoring/activity-monitoring-dashboard.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/debezium/debezium-examples/9f5befd87e4a0b991e4d4eb6adb8ea45bc3e5796/db-activity-monitoring/activity-monitoring-dashboard.png
--------------------------------------------------------------------------------
/db-activity-monitoring/debezium-grafana/Dockerfile:
--------------------------------------------------------------------------------
1 | ARG GRAFANA_VERSION
2 | FROM grafana/grafana:${GRAFANA_VERSION}
3 |
4 | COPY dashboard.yml /etc/grafana/provisioning/dashboards
5 | COPY datasource.yml /etc/grafana/provisioning/datasources
6 | COPY debezium-dashboard.json /var/lib/grafana/dashboards/debezium-dashboard.json
7 | COPY activity-monitoring-dashboard.json /var/lib/grafana/dashboards/activity-monitoring-dashboard.json
8 | COPY alerting.yaml /etc/grafana/provisioning/alerting/alerting.yaml
9 | COPY grafana.ini /etc/grafana/grafana.ini
10 |
--------------------------------------------------------------------------------
/db-activity-monitoring/debezium-grafana/dashboard.yml:
--------------------------------------------------------------------------------
1 | apiVersion: 1
2 | providers:
3 | - name: 'default'
4 | orgId: 1
5 | folder: ''
6 | type: file
7 | disableDeletion: false
8 | updateIntervalSeconds: 10 #how often Grafana will scan for changed dashboards
9 | options:
10 | path: /var/lib/grafana/dashboards
11 |
--------------------------------------------------------------------------------
/db-activity-monitoring/debezium-grafana/datasource.yml:
--------------------------------------------------------------------------------
1 | apiVersion: 1
2 |
3 | datasources:
4 | - name: prometheus
5 | type: prometheus
6 | url: http://prometheus:9090
7 | access: proxy
8 | version: 1
9 |
--------------------------------------------------------------------------------
/db-activity-monitoring/debezium-jmx-exporter/Dockerfile:
--------------------------------------------------------------------------------
1 | ARG DEBEZIUM_VERSION
2 | FROM quay.io/debezium/connect:${DEBEZIUM_VERSION}
3 |
4 | ARG JMX_AGENT_VERSION
5 | RUN mkdir /kafka/etc && cd /kafka/etc &&\
6 | curl -so jmx_prometheus_javaagent.jar \
7 | https://repo1.maven.org/maven2/io/prometheus/jmx/jmx_prometheus_javaagent/$JMX_AGENT_VERSION/jmx_prometheus_javaagent-$JMX_AGENT_VERSION.jar
8 |
9 | COPY config.yml /kafka/etc/config.yml
10 |
--------------------------------------------------------------------------------
/db-activity-monitoring/debezium-prometheus/Dockerfile:
--------------------------------------------------------------------------------
1 | ARG PROMETHEUS_VERSION
2 | FROM prom/prometheus:${PROMETHEUS_VERSION}
3 |
4 | RUN sed -i -e "s/\"localhost:9090\"/\"localhost:9090\",\"connect:8080\",\"order-service:8080\"/" /etc/prometheus/prometheus.yml
5 |
--------------------------------------------------------------------------------
/db-activity-monitoring/order-service/.dockerignore:
--------------------------------------------------------------------------------
1 | *
2 | !target/*-runner
3 | !target/*-runner.jar
4 | !target/lib/*
5 | !target/quarkus-app/*
--------------------------------------------------------------------------------
/db-activity-monitoring/order-service/.gitignore:
--------------------------------------------------------------------------------
1 | #Maven
2 | target/
3 | pom.xml.tag
4 | pom.xml.releaseBackup
5 | pom.xml.versionsBackup
6 | release.properties
7 | .flattened-pom.xml
8 |
9 | # Eclipse
10 | .project
11 | .classpath
12 | .settings/
13 | bin/
14 |
15 | # IntelliJ
16 | .idea
17 | *.ipr
18 | *.iml
19 | *.iws
20 |
21 | # NetBeans
22 | nb-configuration.xml
23 |
24 | # Visual Studio Code
25 | .vscode
26 | .factorypath
27 |
28 | # OSX
29 | .DS_Store
30 |
31 | # Vim
32 | *.swp
33 | *.swo
34 |
35 | # patch
36 | *.orig
37 | *.rej
38 |
39 | # Local environment
40 | .env
41 |
42 | # Plugin directory
43 | /.quarkus/cli/plugins/
44 |
--------------------------------------------------------------------------------
/db-activity-monitoring/order-service/.mvn/wrapper/.gitignore:
--------------------------------------------------------------------------------
1 | maven-wrapper.jar
2 |
--------------------------------------------------------------------------------
/db-activity-monitoring/order-service/src/main/docker/Dockerfile.native:
--------------------------------------------------------------------------------
1 | ####
2 | # This Dockerfile is used in order to build a container that runs the Quarkus application in native (no JVM) mode.
3 | #
4 | # Before building the container image run:
5 | #
6 | # ./mvnw package -Dnative
7 | #
8 | # Then, build the image with:
9 | #
10 | # docker build -f src/main/docker/Dockerfile.native -t quarkus/order-service .
11 | #
12 | # Then run the container using:
13 | #
14 | # docker run -i --rm -p 8080:8080 quarkus/order-service
15 | #
16 | ###
17 | FROM registry.access.redhat.com/ubi8/ubi-minimal:8.9
18 | WORKDIR /work/
19 | RUN chown 1001 /work \
20 | && chmod "g+rwX" /work \
21 | && chown 1001:root /work
22 | COPY --chown=1001:root target/*-runner /work/application
23 |
24 | EXPOSE 8080
25 | USER 1001
26 |
27 | ENTRYPOINT ["./application", "-Dquarkus.http.host=0.0.0.0"]
28 |
--------------------------------------------------------------------------------
/db-activity-monitoring/order-service/src/main/java/io/debezium/examples/Endpoint.java:
--------------------------------------------------------------------------------
1 | package io.debezium.examples;
2 |
3 | import jakarta.ws.rs.GET;
4 | import jakarta.ws.rs.Path;
5 | import jakarta.ws.rs.core.MediaType;
6 | import org.jboss.resteasy.reactive.RestResponse;
7 |
8 | import java.nio.file.Paths;
9 |
10 | @Path("")
11 | public class Endpoint {
12 |
13 | @GET
14 | @Path("hello")
15 | public RestResponse hello() {
16 | // HTTP OK status with text/plain content type
17 |
18 | java.nio.file.Path file = Paths.get("inventory.sql");
19 | return RestResponse.ResponseBuilder.ok(file, MediaType.APPLICATION_OCTET_STREAM_TYPE)
20 | // set a response header
21 | .header("Content-Disposition", "attachment; filename="
22 | + file.getFileName())
23 | // end of builder API
24 | .build();
25 | }
26 | }
--------------------------------------------------------------------------------
/db-activity-monitoring/order-service/src/main/java/io/debezium/examples/OrderRepository.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright Debezium Authors.
3 | *
4 | * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
5 | */
6 | package io.debezium.examples;
7 |
8 | import io.quarkus.hibernate.orm.panache.PanacheRepository;
9 | import jakarta.enterprise.context.ApplicationScoped;
10 |
11 | @ApplicationScoped
12 | public class OrderRepository implements PanacheRepository {
13 | }
14 |
--------------------------------------------------------------------------------
/db-activity-monitoring/order-service/src/main/resources/application.properties:
--------------------------------------------------------------------------------
1 | # configure your datasource
2 | quarkus.datasource.db-kind = postgresql
3 | quarkus.datasource.username = postgres
4 | quarkus.datasource.password = postgres
5 | quarkus.datasource.jdbc.url = jdbc:postgresql://postgres:5432/postgres
6 |
7 | quarkus.hibernate-orm.database.generation=none
8 |
9 | quarkus.http.non-application-root-path=/
10 | quarkus.micrometer.export.prometheus.enabled=true
11 | quarkus.micrometer.export.prometheus.path=metrics
12 | quarkus.micrometer.binder.jvm=false
13 | quarkus.micrometer.binder.netty.enabled=false
14 | quarkus.micrometer.binder.http-server.enabled=false
15 | quarkus.micrometer.binder.system=false
16 | quarkus.micrometer.binder.vertx.enabled=false
17 |
--------------------------------------------------------------------------------
/db-activity-monitoring/order-service/src/main/resources/import.sql:
--------------------------------------------------------------------------------
1 | -- This file allow to write SQL commands that will be emitted in test and dev.
2 | -- The commands are commented as their support depends of the database
3 | -- insert into myentity (id, field) values(1, 'field-1');
4 | -- insert into myentity (id, field) values(2, 'field-2');
5 | -- insert into myentity (id, field) values(3, 'field-3');
6 | -- alter sequence myentity_seq restart with 4;
--------------------------------------------------------------------------------
/db-activity-monitoring/postgres-activity-monitoring.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "inventory-connector",
3 | "config": {
4 | "connector.class": "io.debezium.connector.postgresql.PostgresConnector",
5 | "tasks.max": "1",
6 | "database.hostname": "postgres",
7 | "database.port": "5432",
8 | "database.user": "postgres",
9 | "database.password": "postgres",
10 | "database.server.id": "184054",
11 | "database.dbname": "postgres",
12 | "topic.prefix": "monitoring",
13 | "schema.history.internal.kafka.bootstrap.servers": "kafka:9092",
14 | "schema.history.internal.kafka.topic": "schema-changes.inventory",
15 | "internal.advanced.metrics.enable": "true"
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/debezium-platform/postgresql-kafka-example/clean-up.sh:
--------------------------------------------------------------------------------
1 | #! /usr/bin/env bash
2 |
3 | source env.sh
4 |
5 | minikube delete -p $CLUSTER
--------------------------------------------------------------------------------
/debezium-platform/postgresql-kafka-example/env.sh:
--------------------------------------------------------------------------------
1 | CLUSTER=debezium
2 | NAMESPACE=debezium-platform
3 | DEBEZIUM_PLATFORM_DOMAIN=platform.debezium.io
4 | TIMEOUT=300s
--------------------------------------------------------------------------------
/debezium-platform/postgresql-kafka-example/resources/destination.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/debezium/debezium-examples/9f5befd87e4a0b991e4d4eb6adb8ea45bc3e5796/debezium-platform/postgresql-kafka-example/resources/destination.png
--------------------------------------------------------------------------------
/debezium-platform/postgresql-kafka-example/resources/pipeline_configuration.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/debezium/debezium-examples/9f5befd87e4a0b991e4d4eb6adb8ea45bc3e5796/debezium-platform/postgresql-kafka-example/resources/pipeline_configuration.png
--------------------------------------------------------------------------------
/debezium-platform/postgresql-kafka-example/resources/pipeline_designer.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/debezium/debezium-examples/9f5befd87e4a0b991e4d4eb6adb8ea45bc3e5796/debezium-platform/postgresql-kafka-example/resources/pipeline_designer.png
--------------------------------------------------------------------------------
/debezium-platform/postgresql-kafka-example/resources/source.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/debezium/debezium-examples/9f5befd87e4a0b991e4d4eb6adb8ea45bc3e5796/debezium-platform/postgresql-kafka-example/resources/source.png
--------------------------------------------------------------------------------
/debezium-platform/postgresql-kafka-example/resources/transform.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/debezium/debezium-examples/9f5befd87e4a0b991e4d4eb6adb8ea45bc3e5796/debezium-platform/postgresql-kafka-example/resources/transform.png
--------------------------------------------------------------------------------
/debezium-server-name-mapper/docker-compose.yaml:
--------------------------------------------------------------------------------
1 | version: '2.1'
2 | services:
3 | db:
4 | image: quay.io/debezium/example-postgres:${DEBEZIUM_VERSION}
5 | ports:
6 | - 5432:5432
7 | environment:
8 | - POSTGRES_USER=postgres
9 | - POSTGRES_PASSWORD=postgres
10 | pulsar:
11 | image: apachepulsar/pulsar:2.5.2
12 | command: "bin/pulsar standalone"
13 | ports:
14 | - 6650:6650
15 |
--------------------------------------------------------------------------------
/debezium-server-name-mapper/src/main/java/io/debezium/example/server/mapper/PrefixingNameMapper.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright Debezium Authors.
3 | *
4 | * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
5 | */
6 | package io.debezium.example.server.mapper;
7 |
8 | import jakarta.enterprise.context.Dependent;
9 |
10 | import org.eclipse.microprofile.config.inject.ConfigProperty;
11 |
12 | import io.debezium.server.StreamNameMapper;
13 |
14 | @Dependent
15 | public class PrefixingNameMapper implements StreamNameMapper {
16 |
17 | @ConfigProperty(name = "mapper.prefix")
18 | String prefix;
19 |
20 | @Override
21 | public String map(String topic) {
22 | return prefix + "." + topic;
23 | }
24 |
25 | }
26 |
--------------------------------------------------------------------------------
/debezium-server-name-mapper/src/main/resources/application.properties:
--------------------------------------------------------------------------------
1 | debezium.sink.type=pulsar
2 | debezium.sink.pulsar.client.serviceUrl=pulsar://localhost:6650
3 | debezium.source.connector.class=io.debezium.connector.postgresql.PostgresConnector
4 | debezium.source.offset.storage.file.filename=target/offsets.dat
5 | debezium.source.offset.flush.interval.ms=0
6 | debezium.source.database.hostname=localhost
7 | debezium.source.database.port=5432
8 | debezium.source.database.user=postgres
9 | debezium.source.database.password=postgres
10 | debezium.source.database.dbname=postgres
11 | debezium.source.topic.prefix=tutorial
12 | debezium.source.schema.include.list=inventory
13 | mapper.prefix=superprefix
14 |
--------------------------------------------------------------------------------
/debezium-server/debezium-server-mysql-redis-pubsub/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.11-slim
2 |
3 | RUN apt update && apt install -y git
4 | RUN git clone https://github.com/googleapis/python-pubsub.git
5 |
6 | WORKDIR python-pubsub/samples/snippets
7 | RUN pip install -r requirements.txt
8 |
--------------------------------------------------------------------------------
/debezium-server/debezium-server-sink-pubsub/config-mongodb/application.properties:
--------------------------------------------------------------------------------
1 | debezium.sink.type=pubsub
2 | debezium.sink.pubsub.project.id=debezium-example
3 | debezium.sink.pubsub.ordering.enabled=false
4 | debezium.format.value=json
5 | debezium.format.value.schemas.enable=false
6 | debezium.source.connector.class=io.debezium.connector.mongodb.MongoDbConnector
7 | debezium.source.database.history=io.debezium.relational.history.FileDatabaseHistory
8 | debezium.source.tasks.max=1
9 | debezium.source.topic.prefix=tutorial
10 | debezium.source.mongodb.connection.string=mongodb://mongodb:27017/?replicaSet=rs0
11 | debezium.source.mongodb.user=debezium
12 | debezium.source.mongodb.password=dbz
13 | debezium.source.database.include.list=inventory
14 | debezium.source.collection.include.list=inventory.customers
15 | debezium.source.offset.storage.file.filename=/tmp/offsets.dat
16 | debezium.source.offset.flush.interval.ms=0
17 |
--------------------------------------------------------------------------------
/debezium-server/debezium-server-sink-pubsub/config-mysql/application.properties:
--------------------------------------------------------------------------------
1 | debezium.sink.type=pubsub
2 | debezium.sink.pubsub.project.id=project-id
3 | debezium.source.connector.class=io.debezium.connector.mysql.MySqlConnector
4 | debezium.source.database.hostname=mysql
5 | debezium.source.database.server.id=223344
6 | debezium.source.database.port=3306
7 | debezium.source.database.user=debezium
8 | debezium.source.database.password=dbz
9 | debezium.source.schema.history.internal=io.debezium.storage.file.history.FileSchemaHistory
10 | debezium.source.schema.history.internal.file.filename=data/schema.dat
11 | debezium.source.offset.storage.file.filename=data/offsets.dat
12 | debezium.source.offset.flush.interval.ms=0
13 | debezium.source.topic.prefix=tutorial
14 | debezium.source.database.include.list=inventory
15 | debezium.source.table.include.list=inventory.customers
16 |
--------------------------------------------------------------------------------
/debezium-server/debezium-server-sink-pubsub/config-postgres/application.properties:
--------------------------------------------------------------------------------
1 | debezium.sink.type=pubsub
2 | debezium.sink.pubsub.project.id=project-id
3 | debezium.source.connector.class=io.debezium.connector.postgresql.PostgresConnector
4 | debezium.source.offset.storage.file.filename=data/offsets.dat
5 | debezium.source.offset.flush.interval.ms=0
6 | debezium.source.database.hostname=postgres
7 | debezium.source.database.port=5432
8 | debezium.source.database.user=postgres
9 | debezium.source.database.password=postgres
10 | debezium.source.database.dbname=postgres
11 | debezium.source.topic.prefix=tutorial
12 | debezium.source.table.include.list=inventory.customers
13 | debezium.source.plugin.name=pgoutput
14 |
--------------------------------------------------------------------------------
/distributed-caching/architecture-overview.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/debezium/debezium-examples/9f5befd87e4a0b991e4d4eb6adb8ea45bc3e5796/distributed-caching/architecture-overview.png
--------------------------------------------------------------------------------
/distributed-caching/cache-update-service/.dockerignore:
--------------------------------------------------------------------------------
1 | *
2 | !target/*-runner
3 | !target/*-runner.jar
4 | !target/lib/*
--------------------------------------------------------------------------------
/distributed-caching/cache-update-service/src/main/docker/Dockerfile.native:
--------------------------------------------------------------------------------
1 | ####
2 | # This Dockerfile is used in order to build a container that runs shipment-service-quarkus in native (non-JVM) mode
3 | ###
4 | FROM registry.fedoraproject.org/fedora-minimal
5 | WORKDIR /work/
6 | COPY target/*-runner /work/application
7 | RUN chmod 755 /work
8 | cmd [ "./application", "-Dquarkus.http.host=0.0.0.0" ]
9 |
--------------------------------------------------------------------------------
/distributed-caching/cache-update-service/src/main/java/io/debezium/examples/caching/cacheupdater/streams/model/OrderLine.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright Debezium Authors.
3 | *
4 | * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
5 | */
6 | package io.debezium.examples.caching.cacheupdater.streams.model;
7 |
8 | import java.math.BigDecimal;
9 |
10 | import com.fasterxml.jackson.annotation.JsonProperty;
11 |
12 | /**
13 | * An entity mapping that represents a line item on a {@link PurchaseOrder} entity.
14 | */
15 | public class OrderLine {
16 |
17 | public Long id;
18 | public String item;
19 | public int quantity;
20 |
21 | @JsonProperty("total_price")
22 | public BigDecimal totalPrice;
23 | public OrderLineStatus status;
24 | public Long order_id;
25 | }
26 |
--------------------------------------------------------------------------------
/distributed-caching/cache-update-service/src/main/java/io/debezium/examples/caching/cacheupdater/streams/model/OrderLineStatus.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright Debezium Authors.
3 | *
4 | * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
5 | */
6 | package io.debezium.examples.caching.cacheupdater.streams.model;
7 |
8 | /**
9 | * Various statuses in which a {@link OrderLine} may be within.
10 | */
11 | public enum OrderLineStatus {
12 | ENTERED,
13 | CANCELLED,
14 | SHIPPED
15 | }
16 |
--------------------------------------------------------------------------------
/distributed-caching/cache-update-service/src/main/java/io/debezium/examples/caching/cacheupdater/streams/model/PurchaseOrder.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright Debezium Authors.
3 | *
4 | * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
5 | */
6 | package io.debezium.examples.caching.cacheupdater.streams.model;
7 |
8 | import com.fasterxml.jackson.annotation.JsonProperty;
9 |
10 | public class PurchaseOrder {
11 |
12 | public Long id;
13 |
14 | @JsonProperty("customer_id")
15 | public long customerId;
16 |
17 | /**
18 | * Coming in as *micros* since epoch.
19 | */
20 | @JsonProperty("order_date")
21 | public long orderDate;
22 |
23 | public int version;
24 | }
25 |
--------------------------------------------------------------------------------
/distributed-caching/demo.md:
--------------------------------------------------------------------------------
1 | # Demo Commands
2 |
3 | * Change replica identity to full:
4 |
5 | ```
6 | docker run --tty --rm -i \
7 | --network distributed-caching-network \
8 | quay.io/debezium/tooling:1.2 \
9 | bash -c 'pgcli postgresql://postgresuser:postgrespw@order-db:5432/orderdb'
10 |
11 | ALTER TABLE inventory.purchaseorder REPLICA IDENTITY FULL;
12 | ALTER TABLE inventory.orderline REPLICA IDENTITY FULL;
13 | ```
14 |
15 | * Select orders with cancelled items from the cache:
16 |
17 | ```
18 | from caching.PurchaseOrder po where po.lineItems.status="CANCELLED"
19 | ```
20 |
--------------------------------------------------------------------------------
/distributed-caching/order-service/.dockerignore:
--------------------------------------------------------------------------------
1 | *
2 | !target/*-runner
3 | !target/*-runner.jar
4 | !target/lib/*
--------------------------------------------------------------------------------
/distributed-caching/order-service/src/main/docker/Dockerfile.native:
--------------------------------------------------------------------------------
1 | ####
2 | # This Dockerfile is used in order to build a container that runs order-service-quarkus in native (non-JVM) mode
3 | #
4 | # Before building the docker image run:
5 | #
6 | # mvn package -Pnative -Dnative-image.docker-build=true
7 | #
8 | # Then, build the image with:
9 | #
10 | # docker build -f src/main/docker/Dockerfile.native -t quarkus/order-service-quarkus .
11 | #
12 | # Then run the containeer using:
13 | #
14 | # docker run -i --rm -p 8080:8080 quarkus/order-service-quarkus
15 | #
16 | ###
17 | FROM registry.fedoraproject.org/fedora-minimal
18 | WORKDIR /work/
19 | COPY target/*-runner /work/application
20 | RUN chmod 755 /work
21 | EXPOSE 8080
22 | cmd [ "./application", "-Dquarkus.http.host=0.0.0.0" ]
23 |
--------------------------------------------------------------------------------
/distributed-caching/order-service/src/main/java/io/debezium/examples/caching/commons/EntityNotFoundException.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright Debezium Authors.
3 | *
4 | * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
5 | */
6 | package io.debezium.examples.caching.commons;
7 |
8 | /**
9 | * An exception that indicates an entity could not be found.
10 | */
11 | public class EntityNotFoundException extends RuntimeException {
12 |
13 | private static final long serialVersionUID = -1L;
14 |
15 | public EntityNotFoundException(String message) {
16 | super(message);
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/distributed-caching/order-service/src/main/java/io/debezium/examples/caching/model/OrderLineStatus.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright Debezium Authors.
3 | *
4 | * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
5 | */
6 | package io.debezium.examples.caching.model;
7 |
8 | import org.infinispan.protostream.annotations.ProtoEnumValue;
9 |
10 | /**
11 | * Various statuses in which a {@link OrderLine} may be within.
12 | */
13 | public enum OrderLineStatus {
14 | @ProtoEnumValue(number = 1)
15 | ENTERED,
16 | @ProtoEnumValue(number = 2)
17 | CANCELLED,
18 | @ProtoEnumValue(number = 3)
19 | SHIPPED
20 | }
21 |
--------------------------------------------------------------------------------
/distributed-caching/order-service/src/main/java/io/debezium/examples/caching/order/rest/RestApplication.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright Debezium Authors.
3 | *
4 | * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
5 | */
6 | package io.debezium.examples.caching.order.rest;
7 |
8 | import javax.ws.rs.ApplicationPath;
9 | import javax.ws.rs.core.Application;
10 |
11 | /**
12 | * Defines the application path for the Order Service rest application.
13 | */
14 | @ApplicationPath("/")
15 | public class RestApplication extends Application {
16 | }
17 |
--------------------------------------------------------------------------------
/distributed-caching/order-service/src/main/resources/META-INF/resources/index.html:
--------------------------------------------------------------------------------
1 |
2 | Welcome to the Order Service based on Quarkus
3 |
--------------------------------------------------------------------------------
/distributed-caching/order-service/src/main/resources/application.properties:
--------------------------------------------------------------------------------
1 | # Quarkus configuration file
2 | # key = value
3 | quarkus.datasource.db-kind=postgresql
4 | quarkus.datasource.jdbc.url=jdbc:postgresql://localhost:5432/orderdb?currentSchema=inventory
5 | quarkus.datasource.username=postgresuser
6 | quarkus.datasource.password=postgrespw
7 |
8 | quarkus.hibernate-orm.database.generation=drop-and-create
9 | # quarkus.hibernate-orm.database.generation=update
10 | quarkus.hibernate-orm.log.sql=true
11 | quarkus.hibernate-orm.database.default-schema=inventory
12 |
13 | quarkus.log.level=INFO
14 | quarkus.log.min-level=INFO
15 | quarkus.log.console.enable=true
16 | quarkus.log.console.format=%d{HH:mm:ss} %-5p [%c] %s%e%n
17 |
18 | quarkus.infinispan-client.server-list=localhost:31222
19 | quarkus.infinispan-client.client-intelligence=BASIC
20 | quarkus.infinispan-client.use-auth=false
21 |
22 | quarkus.package.type=legacy-jar
23 |
--------------------------------------------------------------------------------
/distributed-caching/register-postgres.json:
--------------------------------------------------------------------------------
1 | {
2 | "name" : "order-connector",
3 | "config" : {
4 | "connector.class": "io.debezium.connector.postgresql.PostgresConnector",
5 | "tasks.max": "1",
6 | "database.hostname": "order-db",
7 | "database.port": "5432",
8 | "database.user": "postgresuser",
9 | "database.password": "postgrespw",
10 | "database.dbname" : "orderdb",
11 | "topic.prefix": "dbserver1",
12 | "decimal.handling.mode" : "string",
13 | "schema.include.list": "inventory",
14 | "table.include.list": ".*purchaseorder,.*orderline",
15 | "poll.interval.ms": "100"
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/distributed-caching/resources/data/cancel-order-line-request.json:
--------------------------------------------------------------------------------
1 | {
2 | "newStatus" : "CANCELLED",
3 | "version" : 0
4 | }
5 |
--------------------------------------------------------------------------------
/distributed-caching/resources/data/create-order-request.json:
--------------------------------------------------------------------------------
1 | {
2 | "customerId" : "123",
3 | "orderDate" : "2019-01-31T12:13:01",
4 | "lineItems" : [
5 | {
6 | "item" : "Debezium in Action",
7 | "quantity" : 2,
8 | "totalPrice" : 39.98
9 | },
10 | {
11 | "item" : "Debezium for Dummies",
12 | "quantity" : 1,
13 | "totalPrice" : 29.99
14 | }
15 | ]
16 | }
17 |
--------------------------------------------------------------------------------
/distributed-caching/servers.json:
--------------------------------------------------------------------------------
1 | {
2 | "Servers": {
3 | "1": {
4 | "Name": "orderdb",
5 | "Group": "Servers",
6 | "Host": "order-db",
7 | "Port": 5432,
8 | "MaintenanceDB": "orderdb",
9 | "Username": "postgresuser",
10 | "SSLMode": "prefer",
11 | "SSLCert": "/.postgresql/postgresql.crt",
12 | "SSLKey": "/.postgresql/postgresql.key",
13 | "SSLCompression": 0,
14 | "Timeout": 10,
15 | "UseSSHTunnel": 0,
16 | "TunnelPort": "22",
17 | "TunnelAuthentication": 0
18 | }
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/end-to-end-demo/debezium-hiking-demo/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM jboss/wildfly:12.0.0.Final
2 |
3 | ADD resources/wildfly/customization /opt/jboss/wildfly/customization/
4 |
5 | # Based on:
6 | # https://goldmann.pl/blog/2014/07/23/customizing-the-configuration-of-the-wildfly-docker-image/
7 | # https://tomylab.wordpress.com/2016/07/24/how-to-add-a-datasource-to-wildfly/
8 | RUN /opt/jboss/wildfly/customization/execute.sh
9 |
10 | ADD target/hikr-1.0-SNAPSHOT.war /opt/jboss/wildfly/standalone/deployments/
11 |
12 | # Fix for Error: Could not rename /opt/jboss/wildfly/standalone/configuration/standalone_xml_history/current
13 | RUN rm -rf /opt/jboss/wildfly/standalone/configuration/standalone_xml_history
14 |
--------------------------------------------------------------------------------
/end-to-end-demo/debezium-hiking-demo/README.md:
--------------------------------------------------------------------------------
1 | # Simple CRUD Demo
2 |
3 | A simple CRUD application using Java EE (JPA, JAX-RS, CDI etc.), based on top of WildFly and MySQL.
4 | Used as an example for streaming changes out of a database using Debezium.
5 |
6 | To run the app, follow these steps:
7 |
8 | mvn clean package
9 | docker build --no-cache -t debezium-examples/hike-manager:latest -f Dockerfile .
10 |
11 | docker run -it --rm --name mysql -p 3306:3306 -e MYSQL_ROOT_PASSWORD=debezium -e MYSQL_USER=mysqluser -e MYSQL_PASSWORD=mysqlpw quay.io/debezium/example-mysql:${DEBEZIUM_VERSION}
12 |
13 | docker run -it --rm -p 8080:8080 --link mysql debezium-examples/hike-manager:latest
14 |
15 | Then visit the application in a browser at http://localhost:8080/hibernate-ogm-hiking-demo-1.0-SNAPSHOT/hikes.html.
16 |
--------------------------------------------------------------------------------
/end-to-end-demo/debezium-hiking-demo/resources/wildfly/customization/commands.cli:
--------------------------------------------------------------------------------
1 | # Mark the commands below to be run as a batch
2 | batch
3 |
4 | # Add module
5 | module add --name=com.mysql --resources=/opt/jboss/wildfly/customization/mysql-connector-java-5.1.46.jar --dependencies=javax.api,javax.transaction.api
6 |
7 | # Add MySQL driver
8 | /subsystem=datasources/jdbc-driver=mysql:add(driver-name=mysql,driver-module-name=com.mysql,driver-xa-datasource-class-name=com.mysql.jdbc.jdbc2.optional.MysqlXADataSource)
9 |
10 | # Add the datasource
11 | data-source add --name=hikingDS --driver-name=mysql --jndi-name=java:jboss/datasources/HikingDS --connection-url=jdbc:mysql://mysql:3306/inventory?useUnicode=true&characterEncoding=UTF-8&useSSL=false --user-name=mysqluser --password=mysqlpw --use-ccm=false --max-pool-size=25 --blocking-timeout-wait-millis=5000 --enabled=true
12 |
13 | # Execute the batch
14 | run-batch
15 |
--------------------------------------------------------------------------------
/end-to-end-demo/debezium-hiking-demo/resources/wildfly/customization/mysql-connector-java-5.1.46.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/debezium/debezium-examples/9f5befd87e4a0b991e4d4eb6adb8ea45bc3e5796/end-to-end-demo/debezium-hiking-demo/resources/wildfly/customization/mysql-connector-java-5.1.46.jar
--------------------------------------------------------------------------------
/end-to-end-demo/debezium-hiking-demo/src/main/java/io/debezium/examples/hikr/model/Person.java:
--------------------------------------------------------------------------------
1 | package io.debezium.examples.hikr.model;
2 |
3 | import java.util.HashSet;
4 | import java.util.Set;
5 |
6 | import javax.persistence.Embeddable;
7 | import javax.persistence.Entity;
8 | import javax.persistence.GeneratedValue;
9 | import javax.persistence.GenerationType;
10 | import javax.persistence.Id;
11 | import javax.persistence.OneToMany;
12 | import javax.validation.constraints.Size;
13 |
14 | @Embeddable
15 | public class Person {
16 |
17 | @Size(min=3)
18 | public String name;
19 |
20 | Person() {
21 | }
22 |
23 | public Person(String name) {
24 | this.name = name;
25 | }
26 |
27 | public String getName() {
28 | return name;
29 | }
30 |
31 | public void setName(String name) {
32 | this.name = name;
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/end-to-end-demo/debezium-hiking-demo/src/main/java/io/debezium/examples/hikr/model/Section.java:
--------------------------------------------------------------------------------
1 | package io.debezium.examples.hikr.model;
2 |
3 | import javax.persistence.Column;
4 | import javax.persistence.Embeddable;
5 |
6 | @Embeddable
7 | public class Section {
8 |
9 | @Column(name="start")
10 | public String from;
11 |
12 | @Column(name="destination")
13 | public String to;
14 |
15 | Section() {
16 | }
17 |
18 | public Section(String from, String to) {
19 | this.from = from;
20 | this.to = to;
21 | }
22 |
23 | }
24 |
--------------------------------------------------------------------------------
/end-to-end-demo/debezium-hiking-demo/src/main/java/io/debezium/examples/hikr/rest/HikingApplication.java:
--------------------------------------------------------------------------------
1 | package io.debezium.examples.hikr.rest;
2 |
3 | import javax.ws.rs.ApplicationPath;
4 | import javax.ws.rs.core.Application;
5 |
6 | @ApplicationPath("/hiking-manager")
7 | public class HikingApplication extends Application {
8 | }
9 |
--------------------------------------------------------------------------------
/end-to-end-demo/debezium-hiking-demo/src/main/resources/META-INF/persistence.xml:
--------------------------------------------------------------------------------
1 |
2 |
6 |
7 |
8 | java:jboss/datasources/HikingDS
9 |
10 |
11 |
12 |
13 |
14 |
15 |
--------------------------------------------------------------------------------
/end-to-end-demo/debezium-hiking-demo/src/main/resources/valdr-bean-validation.json:
--------------------------------------------------------------------------------
1 | {
2 | "modelPackages": ["io.debezium.examples.hikr.rest.model"],
3 | "outputFullTypeName": false
4 | }
5 |
--------------------------------------------------------------------------------
/end-to-end-demo/debezium-hiking-demo/src/main/webapp/WEB-INF/beans.xml:
--------------------------------------------------------------------------------
1 |
2 |
6 |
7 |
--------------------------------------------------------------------------------
/end-to-end-demo/debezium-hiking-demo/src/main/webapp/WEB-INF/web.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
7 |
8 |
9 | valdr
10 | com.github.valdr.ValidationRulesServlet
11 |
12 |
13 |
14 | valdr
15 | /webresources/validation
16 |
17 |
18 |
--------------------------------------------------------------------------------
/end-to-end-demo/debezium-hiking-demo/src/main/webapp/images/jbosscorp_logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/debezium/debezium-examples/9f5befd87e4a0b991e4d4eb6adb8ea45bc3e5796/end-to-end-demo/debezium-hiking-demo/src/main/webapp/images/jbosscorp_logo.png
--------------------------------------------------------------------------------
/end-to-end-demo/debezium-hiking-demo/src/script/mongo.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | source docker-service.sh
4 | ARGS="-p 27017:27017"
5 |
6 | docker_service $1 mongo "$ARGS" ""
7 |
--------------------------------------------------------------------------------
/end-to-end-demo/debezium-hiking-demo/src/script/orderCreation.json:
--------------------------------------------------------------------------------
1 | {
2 | "tripId": 2,
3 | "customer": {
4 | "name": "Emmanuel",
5 | "email": "notanemailviajson"
6 | }
7 | }
8 |
--------------------------------------------------------------------------------
/end-to-end-demo/debezium-jdbc/Dockerfile:
--------------------------------------------------------------------------------
1 | ARG DEBEZIUM_VERSION=1.9
2 |
3 | FROM quay.io/debezium/connect:$DEBEZIUM_VERSION
4 |
5 | ARG POSTGRES_VERSION=42.2.8
6 | ARG KAFKA_JDBC_VERSION=5.3.1
7 |
8 | # Fetch and deploy PostgreSQL JDBC driver
9 | RUN cd /kafka/libs && \
10 | curl -sO https://repo1.maven.org/maven2/org/postgresql/postgresql/$POSTGRES_VERSION/postgresql-$POSTGRES_VERSION.jar
11 |
12 | # Fetch and deploy Kafka Connect JDBC
13 | ENV KAFKA_CONNECT_JDBC_DIR=$KAFKA_CONNECT_PLUGINS_DIR/kafka-connect-jdbc
14 | RUN mkdir $KAFKA_CONNECT_JDBC_DIR
15 |
16 | RUN cd $KAFKA_CONNECT_JDBC_DIR && \
17 | curl -sO https://packages.confluent.io/maven/io/confluent/kafka-connect-jdbc/$KAFKA_JDBC_VERSION/kafka-connect-jdbc-$KAFKA_JDBC_VERSION.jar
18 |
19 |
--------------------------------------------------------------------------------
/end-to-end-demo/debezium-thorntail-demo/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM fabric8/java-jboss-openjdk8-jdk
2 |
3 | COPY target/demo-thorntail.jar /deployments
4 |
--------------------------------------------------------------------------------
/end-to-end-demo/debezium-vertx-demo/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM fabric8/java-jboss-openjdk8-jdk
2 |
3 | COPY target/debezium-vertx-demo-fat.jar /deployments
4 |
--------------------------------------------------------------------------------
/end-to-end-demo/debezium-vertx-demo/src/main/resources/log4j.properties:
--------------------------------------------------------------------------------
1 | # Direct log messages to stdout
2 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender
3 | log4j.appender.stdout.Target=System.out
4 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
5 | log4j.appender.stdout.layout.ConversionPattern=%d{ISO8601} %-5p %X{dbz.connectorType}|%X{dbz.connectorName}|%X{dbz.connectorContext} %m [%c]%n
6 |
7 | # Root logger option
8 | log4j.rootLogger=WARN, stdout
9 |
10 | # Set up the default logging to be INFO level, then override specific units
11 | log4j.logger.io.debezium=INFO
12 |
--------------------------------------------------------------------------------
/end-to-end-demo/register-hiking-connector-json.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "hiking-connector-json",
3 | "config": {
4 | "connector.class": "io.debezium.connector.mysql.MySqlConnector",
5 | "tasks.max": "1",
6 | "database.hostname": "mysql",
7 | "database.port": "3306",
8 | "database.user": "debezium",
9 | "database.password": "dbz",
10 | "database.server.id": "184055",
11 | "database.server.name": "dbserver1",
12 | "database.include.list": "inventory",
13 | "database.history.kafka.bootstrap.servers": "kafka:9092",
14 | "database.history.kafka.topic": "schema-changes.inventory",
15 | "transforms": "route",
16 | "transforms.route.type": "org.apache.kafka.connect.transforms.RegexRouter",
17 | "transforms.route.regex": "([^.]+)\\.([^.]+)\\.([^.]+)",
18 | "transforms.route.replacement": "$1_$2_$3_json"
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/engine-wasm/build-wasm.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -euo pipefail
3 |
4 | SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
5 |
6 | docker run --rm \
7 | -v ${SCRIPT_DIR}/src/main/resources:/src \
8 | -w /src tinygo/tinygo:0.34.0 bash \
9 | -c "tinygo build -ldflags='-extldflags --import-memory' --no-debug -target=wasm-unknown -o /tmp/cdc.wasm go/cdc.go && cat /tmp/cdc.wasm" > \
10 | ${SCRIPT_DIR}/src/main/resources/compiled/cdc.wasm
11 |
--------------------------------------------------------------------------------
/engine-wasm/src/main/resources/compiled/cdc.wasm:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/debezium/debezium-examples/9f5befd87e4a0b991e4d4eb6adb8ea45bc3e5796/engine-wasm/src/main/resources/compiled/cdc.wasm
--------------------------------------------------------------------------------
/engine-wasm/src/main/resources/logback.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
5 |
6 | %d{ISO8601} %-5p %X{dbz.connectorType}|%X{dbz.connectorName}|%X{dbz.connectorContext} %m [%c]%n
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
--------------------------------------------------------------------------------
/failover/haproxy/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM haproxy:1
2 | COPY haproxy.cfg /usr/local/etc/haproxy/haproxy.cfg
3 |
--------------------------------------------------------------------------------
/failover/haproxy/haproxy.cfg:
--------------------------------------------------------------------------------
1 | listen haproxy
2 | bind *:3306
3 | mode tcp
4 | timeout connect 10s
5 | timeout client 10s
6 | timeout server 10s
7 | balance leastconn
8 | option tcp-check
9 | default-server port 3306 inter 2s downinter 5s rise 3 fall 2
10 | server primary mysql1:3306 check
11 | server backup mysql2:3306 backup check
12 |
--------------------------------------------------------------------------------
/failover/mysql1/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM mysql:5.7
2 |
3 | MAINTAINER Debezium Community
4 |
5 | COPY mysql.cnf /etc/mysql/conf.d/
6 | COPY inventory.sql /docker-entrypoint-initdb.d/
--------------------------------------------------------------------------------
/failover/mysql2/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM mysql:5.7
2 |
3 | MAINTAINER Debezium Community
4 |
5 | COPY mysql.cnf /etc/mysql/conf.d/
6 | COPY inventory.sql /docker-entrypoint-initdb.d/
--------------------------------------------------------------------------------
/failover/register-mysql.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "inventory-connector",
3 | "config": {
4 | "connector.class": "io.debezium.connector.mysql.MySqlConnector",
5 | "tasks.max": "1",
6 | "database.hostname": "mysql",
7 | "database.port": "3306",
8 | "database.user": "debezium",
9 | "database.password": "dbz",
10 | "database.server.id": "184054",
11 | "topic.prefix": "dbserver1",
12 | "database.include.list": "inventory",
13 | "schema.history.internal.kafka.bootstrap.servers": "kafka:9092",
14 | "schema.history.internal.kafka.topic": "schema-changes.inventory"
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/graphql/aggregator/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM fabric8/java-jboss-openjdk8-jdk
2 |
3 | COPY target/demo-thorntail.jar /deployments
4 |
--------------------------------------------------------------------------------
/graphql/aggregator/src/main/resources/graphql/schema.graphqls:
--------------------------------------------------------------------------------
1 | # Represents an Order in our shop
2 | type Order {
3 | # Unique identifier (immutable) of this Order
4 | id: String!
5 |
6 | # Time when this order has been processed
7 | timestamp: String!
8 |
9 | # ID of the customer that has placed this order
10 | customerId: ID!
11 |
12 | # ID of the product thas has been purchased with this order
13 | productId: ID!
14 |
15 | # Number of products bought
16 | quantity: Int!
17 | }
18 |
19 | type Query {
20 |
21 | # Say Hello to our API (health check)
22 | hello: String!
23 |
24 | # Return the latest order
25 | latestOrder: Order
26 | }
27 |
28 | type Subscription {
29 |
30 | # Gets invoked for each new placed order
31 | onNewOrder(withMinQuantity: Int, withProductId: Int): Order!
32 | }
33 |
--------------------------------------------------------------------------------
/graphql/aggregator/src/main/resources/log4j.properties:
--------------------------------------------------------------------------------
1 | # Root logger option
2 | log4j.rootLogger=INFO, stdout
3 |
4 | # Direct log messages to stdout
5 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender
6 | log4j.appender.stdout.Target=System.out
7 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
8 | log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n
9 |
10 | log4j.io.debezium.examples=DEBUG
11 |
--------------------------------------------------------------------------------
/graphql/event-source/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM fabric8/java-jboss-openjdk8-jdk
2 |
3 | COPY target/graphql-event-source-0.1-SNAPSHOT.jar /deployments
4 | COPY target/dependencies/* /deployments/
5 |
--------------------------------------------------------------------------------
/graphql/event-source/src/main/java/io/debezium/examples/graphql/eventsource/Main.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright Debezium Authors.
3 | *
4 | * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
5 | */
6 | package io.debezium.examples.graphql.eventsource;
7 |
8 | public class Main {
9 |
10 | private void run() {
11 | EventSource source = new EventSource();
12 |
13 | Runtime.getRuntime().addShutdownHook(new Thread(() -> {
14 | System.out.println("Stopping...");
15 | source.stop();
16 | }));
17 |
18 | source.run();
19 | }
20 |
21 | public static void main(String[] args) {
22 | new Main().run();
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/graphql/event-source/src/main/resources/log4j.properties:
--------------------------------------------------------------------------------
1 | # Root logger option
2 | log4j.rootLogger=INFO, stdout
3 |
4 | # Direct log messages to stdout
5 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender
6 | log4j.appender.stdout.Target=System.out
7 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
8 | log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n
9 |
10 | log4j.io.debezium.examples=DEBUG
11 |
--------------------------------------------------------------------------------
/graphql/example-db/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM quay.io/debezium/example-mysql:2.1
2 |
3 | COPY schema-update.sql /docker-entrypoint-initdb.d/
4 |
--------------------------------------------------------------------------------
/graphql/example-db/schema-update.sql:
--------------------------------------------------------------------------------
1 | # Switch to this database
2 | USE inventory;
3 |
4 | DROP TABLE IF EXISTS orders;
5 |
6 | CREATE TABLE orders (
7 | id INTEGER NOT NULL AUTO_INCREMENT PRIMARY KEY,
8 | ts TIMESTAMP NOT NULL,
9 | customer_id INTEGER NOT NULL,
10 | product_id INTEGER NOT NULL,
11 | quantity INTEGER NOT NULL
12 | ) AUTO_INCREMENT = 100001;
13 |
14 | ALTER TABLE orders ADD CONSTRAINT fk_customer_id FOREIGN KEY (customer_id) REFERENCES inventory.customers(id);
15 | ALTER TABLE orders ADD CONSTRAINT fk_orders_product_id FOREIGN KEY (product_id) REFERENCES inventory.products(id);
--------------------------------------------------------------------------------
/graphql/graphiql-screenshot.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/debezium/debezium-examples/9f5befd87e4a0b991e4d4eb6adb8ea45bc3e5796/graphql/graphiql-screenshot.png
--------------------------------------------------------------------------------
/graphql/mysql-source.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "mysql-source",
3 | "config": {
4 | "connector.class": "io.debezium.connector.mysql.MySqlConnector",
5 | "tasks.max": "1",
6 | "database.hostname": "mysql",
7 | "database.port": "3306",
8 | "database.user": "debezium",
9 | "database.password": "dbz",
10 | "database.server.id": "184054",
11 | "topic.prefix": "dbserver1",
12 | "decimal.handling.mode": "string",
13 | "table.include.list": "inventory.orders,inventory.customers",
14 | "schema.history.internal.kafka.bootstrap.servers": "kafka:9092",
15 | "schema.history.internal.kafka.topic": "schema-changes.inventory"
16 | }
17 | }
--------------------------------------------------------------------------------
/graphql/ws-client/src/main/java/io/debezium/examples/graphql/wsclient/GraphQLQueryRequest.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright Debezium Authors.
3 | *
4 | * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
5 | */
6 | package io.debezium.examples.graphql.wsclient;
7 |
8 | /**
9 | * Represents a GraphQL HTTP Request payload
10 | *
11 | * @see https://graphql.org/learn/serving-over-http/#post-request
12 | */
13 | public class GraphQLQueryRequest {
14 |
15 | private final String query;
16 |
17 | public GraphQLQueryRequest(String query) {
18 | this.query = query;
19 | }
20 |
21 | public String getQuery() {
22 | return query;
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/graphql/ws-client/src/main/java/io/debezium/examples/graphql/wsclient/GraphQLResponseReceiver.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright Debezium Authors.
3 | *
4 | * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
5 | */
6 | package io.debezium.examples.graphql.wsclient;
7 |
8 | @FunctionalInterface
9 | public interface GraphQLResponseReceiver {
10 | void messageReceived(GraphQLQueryResponse s);
11 | }
12 |
--------------------------------------------------------------------------------
/graphql/ws-client/src/main/resources/log4j.properties:
--------------------------------------------------------------------------------
1 | # Root logger option
2 | log4j.rootLogger=INFO, stdout
3 |
4 | # Direct log messages to stdout
5 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender
6 | log4j.appender.stdout.Target=System.out
7 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
8 | log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %t %c{1}:%L - %m%n
9 |
10 | log4j.io.debezium.examples=DEBUG
11 | log4j.io.debezium.examples.graphql.wsclient.SimpleWebSocketClient=INFO
--------------------------------------------------------------------------------
/http-signaling-notification/initializerJson.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "httpRequest" : {
4 | "method" : "GET",
5 | "path" : "/api/signal",
6 | "queryStringParameters" : {
7 | "code" : ["10969"]
8 | }
9 | },
10 | "httpResponse" : {
11 | "body": "{\"id\":\"924e3ff8-2245-43ca-ba77-2af9af02fa07\",\"type\":\"log\",\"data\":{\"message\": \"Signal message received from http endpoint.\"}}",
12 | "statusCode": 200
13 | }
14 | }
15 | ]
16 |
--------------------------------------------------------------------------------
/http-signaling-notification/register-postgres.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "inventory-connector",
3 | "config": {
4 | "connector.class": "io.debezium.connector.postgresql.PostgresConnector",
5 | "tasks.max": 1,
6 | "database.hostname": "postgres",
7 | "database.port": 5432,
8 | "database.user": "postgres",
9 | "database.password": "postgres",
10 | "database.dbname" : "postgres",
11 | "topic.prefix": "dbserver1",
12 | "schema.include.list": "inventory",
13 | "signal.enabled.channels": "http",
14 | "notification.enabled.channels": "http"
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/http-signaling-notification/src/main/resources/META-INF/services/io.debezium.pipeline.notification.channels.NotificationChannel:
--------------------------------------------------------------------------------
1 | io.debezium.examples.notification.HttpNotificationChannel
2 |
--------------------------------------------------------------------------------
/http-signaling-notification/src/main/resources/META-INF/services/io.debezium.pipeline.signal.channels.SignalChannelReader:
--------------------------------------------------------------------------------
1 | io.debezium.examples.signal.HttpSignalChannel
2 |
--------------------------------------------------------------------------------
/infinispan-standalone/debezium-with-oracle-jdbc/Dockerfile:
--------------------------------------------------------------------------------
1 | ARG DEBEZIUM_VERSION
2 | FROM quay.io/debezium/connect:$DEBEZIUM_VERSION
3 | ENV KAFKA_CONNECT_JDBC_DIR=$KAFKA_CONNECT_PLUGINS_DIR/kafka-connect-jdbc
4 | ENV INSTANT_CLIENT_DIR=/instant_client/
5 |
6 | USER root
7 | RUN microdnf install -y libaio && microdnf clean all
8 |
9 | USER kafka
10 |
11 | COPY oracle_instantclient/* $INSTANT_CLIENT_DIR
12 | COPY oracle_instantclient/xstreams.jar /kafka/libs
13 | COPY oracle_instantclient/ojdbc8.jar /kafka/libs
14 |
--------------------------------------------------------------------------------
/infinispan-standalone/debezium-with-oracle-jdbc/init/inventory.sql:
--------------------------------------------------------------------------------
1 | -- Create some customers
2 | CREATE TABLE customers (
3 | id NUMBER(4) NOT NULL PRIMARY KEY,
4 | first_name VARCHAR2(255) NOT NULL,
5 | last_name VARCHAR2(255) NOT NULL,
6 | email VARCHAR2(255) NOT NULL UNIQUE
7 | );
8 |
9 | GRANT SELECT ON customers to c##dbzuser;
10 | ALTER TABLE customers ADD SUPPLEMENTAL LOG DATA (ALL) COLUMNS;
11 |
12 | INSERT INTO customers VALUES (1001, 'Sally', 'Thomas', 'sally.thomas@acme.com');
13 | INSERT INTO customers VALUES (1002, 'George', 'Bailey', 'gbailey@foobar.com');
14 | INSERT INTO customers VALUES (1003, 'Edward', 'Walker', 'ed@walker.com');
15 | INSERT INTO customers VALUES (1004, 'Anne', 'Kretchmar', 'annek@noanswer.org');
16 |
17 |
--------------------------------------------------------------------------------
/jpa-aggregations/es-sink-aggregates.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "es-customers",
3 | "config": {
4 | "connector.class": "io.confluent.connect.elasticsearch.ElasticsearchSinkConnector",
5 | "tasks.max": "1",
6 | "topics": "customers-complete",
7 | "connection.url": "http://elastic:9200",
8 | "key.ignore": "false",
9 | "schema.ignore" : "false",
10 | "behavior.on.null.values" : "delete",
11 | "type.name": "customer-with-addresses",
12 | "transforms" : "key",
13 | "transforms.key.type": "org.apache.kafka.connect.transforms.ExtractField$Key",
14 | "transforms.key.field": "id"
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/jpa-aggregations/example-db/Dockerfile:
--------------------------------------------------------------------------------
1 | ARG DEBEZIUM_VERSION
2 | FROM quay.io/debezium/example-mysql:${DEBEZIUM_VERSION}
3 |
4 | COPY schema-update.sql /docker-entrypoint-initdb.d/
5 |
--------------------------------------------------------------------------------
/jpa-aggregations/jpa-test/src/main/java/io/debezium/aggregation/hibernate/MaterializeAggregate.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright Debezium Authors.
3 | *
4 | * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
5 | */
6 | package io.debezium.aggregation.hibernate;
7 |
8 | import java.lang.annotation.ElementType;
9 | import java.lang.annotation.Retention;
10 | import java.lang.annotation.RetentionPolicy;
11 | import java.lang.annotation.Target;
12 |
13 | @Retention(RetentionPolicy.RUNTIME)
14 | @Target(ElementType.TYPE)
15 | public @interface MaterializeAggregate {
16 | String aggregateName();
17 | }
18 |
--------------------------------------------------------------------------------
/jpa-aggregations/jpa-test/src/main/resources/META-INF/services/org.hibernate.integrator.spi.Integrator:
--------------------------------------------------------------------------------
1 | io.debezium.aggregation.hibernate.AggregationBuilderIntegrator
--------------------------------------------------------------------------------
/jpa-aggregations/jpa-test/src/test/java/com/example/domain/AddressType.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright Debezium Authors.
3 | *
4 | * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
5 | */
6 | package com.example.domain;
7 |
8 | public enum AddressType {
9 | SHIPPING,
10 | BILLING,
11 | LIVING;
12 | }
--------------------------------------------------------------------------------
/jpa-aggregations/jpa-test/src/test/java/com/example/domain/Category.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright Debezium Authors.
3 | *
4 | * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
5 | */
6 | package com.example.domain;
7 |
8 | import javax.persistence.Entity;
9 | import javax.persistence.Id;
10 | import javax.persistence.Table;
11 |
12 | @Entity
13 | @Table(name = "categories")
14 | public class Category {
15 |
16 | @Id
17 | public long id;
18 |
19 | public String name;
20 | }
21 |
--------------------------------------------------------------------------------
/jpa-aggregations/jpa-test/src/test/resources/log4j.properties:
--------------------------------------------------------------------------------
1 | # Root logger option
2 | log4j.rootLogger=DEBUG, stdout
3 |
4 | # Direct log messages to stdout
5 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender
6 | log4j.appender.stdout.Target=System.out
7 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
8 | log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n
9 |
--------------------------------------------------------------------------------
/jpa-aggregations/jpa-test/transaction.log:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/debezium/debezium-examples/9f5befd87e4a0b991e4d4eb6adb8ea45bc3e5796/jpa-aggregations/jpa-test/transaction.log
--------------------------------------------------------------------------------
/jpa-aggregations/source.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "inventory-connector",
3 | "config": {
4 | "connector.class": "io.debezium.connector.mysql.MySqlConnector",
5 | "tasks.max": "1",
6 | "database.hostname": "mysql",
7 | "database.port": "3306",
8 | "database.user": "debezium",
9 | "database.password": "dbz",
10 | "database.server.id": "184054",
11 | "topic.prefix": "dbserver1",
12 | "database.include.list": "inventory",
13 | "table.include.list": ".*aggregates",
14 | "schema.history.internal.kafka.bootstrap.servers": "kafka:9092",
15 | "schema.history.internal.kafka.topic": "schema-changes.inventory",
16 | "transforms" : "expandjson",
17 | "transforms.expandjson.type": "io.debezium.aggregation.smt.ExpandJsonSmt"
18 | }
19 | }
20 |
--------------------------------------------------------------------------------
/json-logging/debezium-log4j-json/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM quay.io/debezium/connect:2.1
2 |
3 | RUN curl https://repo1.maven.org/maven2/net/logstash/log4j/jsonevent-layout/1.7/jsonevent-layout-1.7.jar > /kafka/libs/jsonevent-layout.jar &&\
4 | curl https://repo1.maven.org/maven2/commons-lang/commons-lang/2.6/commons-lang-2.6.jar > /kafka/libs/commons-lang.jar &&\
5 | curl https://repo1.maven.org/maven2/net/minidev/json-smart/2.3/json-smart-2.3.jar > /kafka/libs/json-smart.jar &&\
6 | curl https://repo1.maven.org/maven2/net/minidev/accessors-smart/1.2/accessors-smart-1.2.jar > /kafka/libs/accessors-smart.jar &&\
7 | curl https://repo1.maven.org/maven2/org/ow2/asm/asm/8.0.1/asm-8.0.1.jar > /kafka/libs/asm.jar
8 |
9 | COPY log4j.properties /kafka/config/log4j.properties
10 |
--------------------------------------------------------------------------------
/json-logging/debezium-log4j-json/log4j.properties:
--------------------------------------------------------------------------------
1 | kafka.logs.dir=logs
2 |
3 | log4j.rootLogger=INFO, stdout, appender
4 |
5 | # Disable excessive reflection warnings - KAFKA-5229
6 | log4j.logger.org.reflections=ERROR
7 |
8 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender
9 | log4j.appender.stdout.threshold=INFO
10 | log4j.appender.stdout.layout=net.logstash.log4j.JSONEventLayoutV1
11 |
12 |
13 | log4j.appender.appender=org.apache.log4j.DailyRollingFileAppender
14 | log4j.appender.appender.DatePattern='.'yyyy-MM-dd-HH
15 | log4j.appender.appender.File=${kafka.logs.dir}/connect-service.log
16 | log4j.appender.appender.layout=org.apache.log4j.PatternLayout
17 | log4j.appender.appender.layout.ConversionPattern=%d{ISO8601} %-5p %X{dbz.connectorType}|%X{dbz.connectorName}|%X{dbz.connectorContext} %m [%c]%n
18 |
--------------------------------------------------------------------------------
/json-logging/register-mysql.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "inventory-connector",
3 | "config": {
4 | "connector.class": "io.debezium.connector.mysql.MySqlConnector",
5 | "tasks.max": "1",
6 | "database.hostname": "mysql",
7 | "database.port": "3306",
8 | "database.user": "debezium",
9 | "database.password": "dbz",
10 | "database.server.id": "184054",
11 | "topic.prefix": "dbserver1",
12 | "database.include.list": "inventory",
13 | "schema.history.internal.kafka.bootstrap.servers": "kafka:9092",
14 | "schema.history.internal.kafka.topic": "schema-changes.inventory"
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/kafka-ssl/register-postgres.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "inventory-connector",
3 | "config": {
4 | "connector.class": "io.debezium.connector.postgresql.PostgresConnector",
5 | "tasks.max": "1",
6 | "database.hostname": "localhost",
7 | "database.port": "5432",
8 | "database.user": "postgres",
9 | "database.password": "postgres",
10 | "database.dbname" : "postgres",
11 | "topic.prefix": "dbserver1",
12 | "schema.include.list": "inventory"
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/kafka-ssl/resources/kafka-ssl-keystore.p12:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/debezium/debezium-examples/9f5befd87e4a0b991e4d4eb6adb8ea45bc3e5796/kafka-ssl/resources/kafka-ssl-keystore.p12
--------------------------------------------------------------------------------
/kafka-ssl/resources/kafka-ssl-truststore.p12:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/debezium/debezium-examples/9f5befd87e4a0b991e4d4eb6adb8ea45bc3e5796/kafka-ssl/resources/kafka-ssl-truststore.p12
--------------------------------------------------------------------------------
/kinesis/src/main/resources/log4j.properties:
--------------------------------------------------------------------------------
1 | # Direct log messages to stdout
2 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender
3 | log4j.appender.stdout.Target=System.out
4 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
5 | log4j.appender.stdout.layout.ConversionPattern=%d{ISO8601} %-5p %X{dbz.connectorType}|%X{dbz.connectorName}|%X{dbz.connectorContext} %m [%c]%n
6 |
7 | # Root logger option
8 | log4j.rootLogger=WARN, stdout
9 |
10 | # Set up the default logging to be INFO level, then override specific units
11 | log4j.logger.io.debezium=INFO
12 |
--------------------------------------------------------------------------------
/ksql/README.md:
--------------------------------------------------------------------------------
1 | # Debezium KSQL Demo
2 |
3 | This demo accompanies the blog post [Querying Debezium Change Data Events With KSQL](https://debezium.io/blog/2018/05/24/querying-debezium-change-data-eEvents-with-ksql/).
4 |
5 | ```shell
6 | # Start the Kafka, Kafka Connect, KSQL server and CLI etc.
7 | export DEBEZIUM_VERSION=2.1
8 | docker-compose up
9 |
10 | # Start Debezium MySQL connector
11 | curl -i -X POST -H "Accept:application/json" -H "Content-Type:application/json" http://localhost:8083/connectors/ -d @register-mysql.json
12 |
13 | # Launch KSQL CLI
14 | docker-compose exec ksql-cli ksql http://ksql-server:8088
15 |
16 | # Run KSQL commands as described in the blog post...
17 |
18 | # Shut down the cluster
19 | docker-compose down
20 | ```
21 |
--------------------------------------------------------------------------------
/kstreams-fk-join/aggregator/src/main/docker/Dockerfile.native:
--------------------------------------------------------------------------------
1 | ####
2 | # This Dockerfile is used in order to build a container that runs the Quarkus application in native (no JVM) mode
3 | #
4 | # Before building the docker image run:
5 | #
6 | # mvn package -Pnative -Dquarkus.native.container-build=true
7 | #
8 | # Then, build the image with:
9 | #
10 | # docker build -f src/main/docker/Dockerfile.native -t quarkus/kafka-streams-quickstart-aggregator .
11 | #
12 | # Then run the container using:
13 | #
14 | # docker run -i --rm -p 8080:8080 quarkus/kafka-streams-quickstart-aggregator
15 | #
16 | ###
17 | FROM registry.fedoraproject.org/fedora-minimal
18 | RUN microdnf -y install libxcrypt-compat && microdnf clean all
19 | WORKDIR /work/
20 | COPY target/*-runner /work/application
21 | RUN chmod 775 /work
22 | EXPOSE 8080
23 | CMD ["./application", "-Dquarkus.http.host=0.0.0.0", "-Xmx32m"]
24 |
--------------------------------------------------------------------------------
/kstreams-fk-join/aggregator/src/main/java/io/debezium/examples/kstreams/fkjoin/model/Address.java:
--------------------------------------------------------------------------------
1 | package io.debezium.examples.kstreams.fkjoin.model;
2 |
3 | public class Address {
4 |
5 | public long id;
6 | public int customer_id;
7 | public String street;
8 | public String city;
9 | public String zipcode;
10 | public String country;
11 |
12 | @Override
13 | public String toString() {
14 | return "Address [id=" + id + ", customer_id=" + customer_id + ", street=" + street + ", city=" + city
15 | + ", zipcode=" + zipcode + ", country=" + country + "]";
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/kstreams-fk-join/aggregator/src/main/java/io/debezium/examples/kstreams/fkjoin/model/AddressAndCustomer.java:
--------------------------------------------------------------------------------
1 | package io.debezium.examples.kstreams.fkjoin.model;
2 |
3 | public class AddressAndCustomer {
4 |
5 | public Address address;
6 | public Customer customer;
7 |
8 | public AddressAndCustomer() {
9 | }
10 |
11 | public AddressAndCustomer(Address address, Customer customer) {
12 | this.address = address;
13 | this.customer = customer;
14 | }
15 |
16 | public static AddressAndCustomer create(Address address, Customer customer) {
17 | return new AddressAndCustomer(address, customer);
18 | }
19 |
20 | public Address address() {
21 | return address;
22 | }
23 |
24 | @Override
25 | public String toString() {
26 | return "AddressAndCustomer [address=" + address + ", customer=" + customer + "]";
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/kstreams-fk-join/aggregator/src/main/java/io/debezium/examples/kstreams/fkjoin/model/Customer.java:
--------------------------------------------------------------------------------
1 | package io.debezium.examples.kstreams.fkjoin.model;
2 |
3 | public class Customer {
4 |
5 | public int id;
6 | public String first_name;
7 | public String last_name;
8 | public String email;
9 | @Override
10 | public String toString() {
11 | return "Customer [id=" + id + ", first_name=" + first_name + ", last_name=" + last_name + ", email=" + email
12 | + "]";
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/kstreams-fk-join/aggregator/src/main/resources/application.properties:
--------------------------------------------------------------------------------
1 | customers.topic=dbserver1.inventory.customers
2 | addresses.topic=dbserver1.inventory.addresses
3 | customers.with.addresses.topic=customers-with-addresses
4 |
5 | quarkus.kafka-streams.bootstrap-servers=localhost:9092
6 | quarkus.kafka-streams.application-id=kstreams-fkjoin-aggregator
7 | quarkus.kafka-streams.topics=${customers.topic},${addresses.topic}
8 |
9 | # streams options
10 | kafka-streams.cache.max.bytes.buffering=10240
11 | kafka-streams.commit.interval.ms=1000
12 | kafka-streams.metadata.max.age.ms=500
13 | kafka-streams.auto.offset.reset=earliest
14 | kafka-streams.metrics.recording.level=DEBUG
15 | kafka-streams.consumer.session.timeout.ms=150
16 | kafka-streams.consumer.heartbeat.interval.ms=100
17 |
18 | quarkus.log.console.enable=true
19 | quarkus.log.console.level=INFO
20 |
--------------------------------------------------------------------------------
/kstreams-fk-join/inventory-addresses.sql:
--------------------------------------------------------------------------------
1 | CREATE TABLE inventory.addresses (
2 | id SERIAL NOT NULL PRIMARY KEY,
3 | customer_id INTEGER NOT NULL,
4 | street VARCHAR(255) NOT NULL,
5 | city VARCHAR(255) NOT NULL,
6 | zipcode VARCHAR(255) NOT NULL,
7 | country VARCHAR(255) NOT NULL,
8 | FOREIGN KEY (customer_id) REFERENCES inventory.customers(id)
9 | );
10 | ALTER SEQUENCE inventory.addresses_id_seq RESTART WITH 100001;
11 | ALTER TABLE inventory.addresses REPLICA IDENTITY FULL;
12 |
13 | INSERT INTO inventory.addresses
14 | VALUES (default, 1001, '42 Main Street', 'Hamburg', '90210', 'Canada'),
15 | (default, 1001, '11 Post Dr.', 'Berlin', '90211', 'Canada'),
16 | (default, 1002, '12 Rodeo Dr.', 'Los Angeles', '90212', 'US'),
17 | (default, 1002, '1 Debezium Plaza', 'Monterey', '90213', 'US'),
18 | (default, 1002, '2 Debezium Plaza', 'Monterey', '90213', 'US');
19 |
--------------------------------------------------------------------------------
/kstreams-fk-join/register-postgres.json:
--------------------------------------------------------------------------------
1 | {
2 | "connector.class": "io.debezium.connector.postgresql.PostgresConnector",
3 | "tasks.max": "1",
4 | "database.hostname": "postgres",
5 | "database.port": "5432",
6 | "database.user": "postgres",
7 | "database.password": "postgres",
8 | "database.dbname" : "postgres",
9 | "topic.prefix": "dbserver1",
10 | "schema.include.list": "inventory",
11 | "decimal.handling.mode" : "string",
12 | "poll.interval.ms": "100",
13 | "key.converter": "org.apache.kafka.connect.json.JsonConverter",
14 | "key.converter.schemas.enable": "false",
15 | "value.converter": "org.apache.kafka.connect.json.JsonConverter",
16 | "value.converter.schemas.enable": "false"
17 | }
18 |
--------------------------------------------------------------------------------
/kstreams-live-update/aggregator/src/main/java/io/debezium/examples/kstreams/liveupdate/aggregator/model/Category.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright Debezium Authors.
3 | *
4 | * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
5 | */
6 | package io.debezium.examples.kstreams.liveupdate.aggregator.model;
7 |
8 | import com.fasterxml.jackson.annotation.JsonProperty;
9 |
10 | public class Category {
11 |
12 | public long id;
13 |
14 | public String name;
15 |
16 | @JsonProperty("average_price")
17 | public long averagePrice;
18 |
19 | public Category() {
20 | }
21 |
22 | public Category(long id, String name) {
23 | this.id = id;
24 | this.name = name;
25 | }
26 |
27 | @Override
28 | public String toString() {
29 | return "Category [id=" + id + ", name=" + name + "]";
30 | }
31 | }
32 |
--------------------------------------------------------------------------------
/kstreams-live-update/aggregator/src/main/java/io/debezium/examples/kstreams/liveupdate/aggregator/model/ValueAggregator.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright Debezium Authors.
3 | *
4 | * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
5 | */
6 | package io.debezium.examples.kstreams.liveupdate.aggregator.model;
7 |
8 | public class ValueAggregator {
9 |
10 | public long count;
11 | public long sum;
12 |
13 | public ValueAggregator() {
14 | }
15 |
16 | public ValueAggregator increment(long value) {
17 | sum += value;
18 | count++;
19 | return this;
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/kstreams-live-update/aggregator/src/main/resources/application.properties:
--------------------------------------------------------------------------------
1 | orders.topic=dbserver1.inventory.orders
2 | categories.topic=dbserver1.inventory.categories
3 |
4 | quarkus.kafka-streams.bootstrap-servers=localhost:9092
5 | quarkus.kafka-streams.application-id=order-aggregator-ws
6 | quarkus.kafka-streams.topics=${orders.topic},${categories.topic}
7 |
8 | # streams options
9 | kafka-streams.cache.max.bytes.buffering=10240
10 | kafka-streams.commit.interval.ms=1000
11 | kafka-streams.metadata.max.age.ms=500
12 | kafka-streams.auto.offset.reset=earliest
13 | kafka-streams.processing.guarantee=exactly_once
14 |
15 | quarkus.log.console.enable=true
16 | quarkus.log.console.level=INFO
17 |
--------------------------------------------------------------------------------
/kstreams-live-update/aggregator/src/main/resources/log4j.properties:
--------------------------------------------------------------------------------
1 | # Root logger option
2 | log4j.rootLogger=INFO, stdout
3 |
4 | # Direct log messages to stdout
5 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender
6 | log4j.appender.stdout.Target=System.out
7 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
8 | log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n
9 |
10 | log4j.io.debezium.examples=DEBUG
11 |
--------------------------------------------------------------------------------
/kstreams-live-update/docker-compose.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/debezium/debezium-examples/9f5befd87e4a0b991e4d4eb6adb8ea45bc3e5796/kstreams-live-update/docker-compose.png
--------------------------------------------------------------------------------
/kstreams-live-update/es-sink.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "elastic-sink",
3 | "config": {
4 | "connector.class": "io.confluent.connect.elasticsearch.ElasticsearchSinkConnector",
5 | "tasks.max": "1",
6 | "topics": "dbserver1.inventory.orders",
7 | "connection.url": "http://elastic:9200",
8 | "transforms": "unwrap,key",
9 | "transforms.unwrap.type": "io.debezium.transforms.ExtractNewRecordState",
10 | "transforms.key.type": "org.apache.kafka.connect.transforms.ExtractField$Key",
11 | "transforms.key.field": "id",
12 | "key.ignore": "false",
13 | "type.name": "order",
14 | "behavior.on.null.values" : "delete",
15 | "topic.index.map" : "dbserver1.inventory.orders:orders",
16 | "schema.ignore" : "true"
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/kstreams-live-update/event-source/src/main/java/io/debezium/examples/kstreams/liveupdate/eventsource/Customer.java:
--------------------------------------------------------------------------------
1 | package io.debezium.examples.kstreams.liveupdate.eventsource;
2 |
3 | import javax.persistence.Entity;
4 | import javax.persistence.Id;
5 | import javax.persistence.Table;
6 |
7 | @Entity
8 | @Table(name="customers")
9 | public class Customer {
10 | @Id
11 | public long id;
12 | public String firstName;
13 | public String lastName;
14 | public String email;
15 |
16 | public Customer() {
17 |
18 | }
19 | }
20 |
--------------------------------------------------------------------------------
/kstreams-live-update/event-source/src/main/java/io/debezium/examples/kstreams/liveupdate/eventsource/Product.java:
--------------------------------------------------------------------------------
1 | package io.debezium.examples.kstreams.liveupdate.eventsource;
2 |
3 | import javax.persistence.Entity;
4 | import javax.persistence.Id;
5 | import javax.persistence.Table;
6 |
7 | @Entity
8 | @Table(name="products")
9 | public class Product {
10 | @Id
11 | public long id;
12 |
13 | public String name;
14 | public String description;
15 | public Float weight;
16 |
17 | public Product() {
18 |
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/kstreams-live-update/event-source/src/main/resources/log4j.properties:
--------------------------------------------------------------------------------
1 | # Root logger option
2 | log4j.rootLogger=INFO, stdout
3 |
4 | # Direct log messages to stdout
5 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender
6 | log4j.appender.stdout.Target=System.out
7 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
8 | log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n
9 |
10 | log4j.io.debezium.examples=DEBUG
11 |
--------------------------------------------------------------------------------
/kstreams-live-update/mysql-db/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM quay.io/debezium/example-mysql:2.1
2 |
3 | COPY schema-update.sql /docker-entrypoint-initdb.d/
4 |
--------------------------------------------------------------------------------
/kstreams-live-update/mysql-source.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "mysql-source",
3 | "config": {
4 | "connector.class": "io.debezium.connector.mysql.MySqlConnector",
5 | "tasks.max": "1",
6 | "database.hostname": "mysql",
7 | "database.port": "3306",
8 | "database.user": "debezium",
9 | "database.password": "dbz",
10 | "database.server.id": "184054",
11 | "topic.prefix": "dbserver1",
12 | "decimal.handling.mode" : "string",
13 | "table.include.list": "inventory.orders,inventory.categories",
14 | "schema.history.internal.kafka.bootstrap.servers": "kafka:9092",
15 | "schema.history.internal.kafka.topic": "schema-changes.inventory"
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/kstreams-live-update/pgsql-source.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "inventory-connector",
3 | "config": {
4 | "name": "inventory-connector",
5 | "connector.class": "io.debezium.connector.postgresql.PostgresConnector",
6 | "tasks.max": "1",
7 | "database.hostname": "postgres",
8 | "database.port": "5432",
9 | "database.dbname": "postgres",
10 | "database.user": "postgres",
11 | "database.password": "debezium",
12 | "topic.prefix": "dbserver1",
13 | "plugin.name": "pgoutput",
14 | "slot.name": "debezium",
15 | "publication.name": "dbz_publication",
16 | "table.include.list": "inventory.orders,inventory.categories"
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/kstreams-live-update/postgres-db/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM quay.io/debezium/example-postgres:2.1
2 |
3 | COPY schema-update.sql /docker-entrypoint-initdb.d/
4 |
--------------------------------------------------------------------------------
/kstreams/debezium-mongodb/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM quay.io/debezium/connect:2.0
2 | ENV KAFKA_CONNECT_MONGODB_DIR=$KAFKA_CONNECT_PLUGINS_DIR/kafka-connect-mongodb
3 |
4 | USER root
5 | RUN microdnf -y install git maven java-11-openjdk-devel && microdnf clean all
6 |
7 | USER kafka
8 |
9 | # Deploy MongoDB Sink Connector
10 | RUN mkdir -p $KAFKA_CONNECT_MONGODB_DIR && cd $KAFKA_CONNECT_MONGODB_DIR && \
11 | git clone https://github.com/hpgrahsl/kafka-connect-mongodb.git && \
12 | cd kafka-connect-mongodb && \
13 | git fetch --tags && \
14 | git checkout tags/v1.2.0 && \
15 | sed -i 's/http:\/\/packages.confluent.io\/maven\//https:\/\/packages.confluent.io\/maven\//g' pom.xml && \
16 | mvn clean package -DskipTests=true -DskipITs=true && \
17 | mv target/kafka-connect-mongodb/kafka-connect-mongodb-1.2.0-jar-with-dependencies.jar $KAFKA_CONNECT_MONGODB_DIR && \
18 | cd .. && rm -rf $KAFKA_CONNECT_MONGODB_DIR/kafka-connect-mongodb
19 |
--------------------------------------------------------------------------------
/kstreams/mongodb-sink.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "mongodb-sink",
3 | "config": {
4 | "connector.class": "at.grahsl.kafka.connect.mongodb.MongoDbSinkConnector",
5 | "tasks.max": "1",
6 | "topics": "final_ddd_aggregates",
7 | "mongodb.connection.uri": "mongodb://mongodb:27017/inventory?w=1&journal=true",
8 | "mongodb.collection": "customers_with_addresses",
9 | "mongodb.document.id.strategy": "at.grahsl.kafka.connect.mongodb.processor.id.strategy.FullKeyStrategy",
10 | "mongodb.delete.on.null.values": "true"
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/kstreams/mysql-source.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "mysql-source",
3 | "config": {
4 | "connector.class": "io.debezium.connector.mysql.MySqlConnector",
5 | "tasks.max": "1",
6 | "database.hostname": "mysql",
7 | "database.port": "3306",
8 | "database.user": "debezium",
9 | "database.password": "dbz",
10 | "database.server.id": "184054",
11 | "topic.prefix": "dbserver1",
12 | "table.include.list": "inventory.customers,inventory.addresses",
13 | "schema.history.internal.kafka.bootstrap.servers": "kafka:9092",
14 | "schema.history.internal.kafka.topic": "schema-changes.inventory",
15 | "transforms": "unwrap",
16 | "transforms.unwrap.type":"io.debezium.transforms.ExtractNewRecordState",
17 | "transforms.unwrap.drop.tombstones":"false"
18 | }
19 | }
20 |
--------------------------------------------------------------------------------
/kstreams/poc-ddd-aggregates/run-aggregator.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | export PATH="/opt/poc-ddd-aggregates/jdk/bin:${PATH}"
4 | export JAVA_APP_DIR=/opt/poc-ddd-aggregates/lib
5 | export JAVA_MAIN_CLASS=io.debezium.examples.aggregation.StreamingAggregatesDDD
6 |
7 | exec /opt/poc-ddd-aggregates/run-java.sh "$PARENT_TOPIC" "$CHILDREN_TOPIC" "$BOOTSTRAP_SERVERS"
8 |
--------------------------------------------------------------------------------
/kstreams/poc-ddd-aggregates/src/main/java/io/debezium/examples/aggregation/model/EventType.java:
--------------------------------------------------------------------------------
1 | package io.debezium.examples.aggregation.model;
2 |
3 | public enum EventType { UPSERT, DELETE }
4 |
--------------------------------------------------------------------------------
/machine-learning/flink-spark-iris/iris-flink/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM flink:1.17-scala_2.12-java11
2 | RUN curl -sfSL https://dlcdn.apache.org/flink/flink-ml-2.3.0/apache-flink-ml-2.3.0.tar.gz -o apache-flink-ml-2.3.0.tar.gz && \
3 | tar -zvxf apache-flink-ml-2.3.0.tar.gz && \
4 | cp apache-flink-ml-2.3.0/deps/lib/* /opt/flink/lib && \
5 | rm -rf apache-flink-ml-2.3.0 && \
6 | rm -f apache-flink-ml-2.3.0.tar.gz
7 | COPY target/debezium-flink-0.1.jar /opt/flink
8 |
--------------------------------------------------------------------------------
/machine-learning/flink-spark-iris/iris-spark/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM apache/spark:3.3.3
2 | COPY target/debezium-spark-1.0-SNAPSHOT-jar-with-dependencies.jar /opt/spark/work-dir
3 | CMD ["/opt/spark/bin/spark-submit", "debezium-spark-1.0-SNAPSHOT-jar-with-dependencies.jar"]
--------------------------------------------------------------------------------
/machine-learning/flink-spark-iris/postgres/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM quay.io/debezium/postgres:15
2 | COPY iris_train1.sql /docker-entrypoint-initdb.d/
--------------------------------------------------------------------------------
/machine-learning/flink-spark-iris/register-postgres-flink.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "iris-connector-flink",
3 | "config": {
4 | "connector.class": "io.debezium.connector.postgresql.PostgresConnector",
5 | "tasks.max": "1",
6 | "database.hostname": "postgres",
7 | "database.port": "5432",
8 | "database.user": "postgres",
9 | "database.password": "postgres",
10 | "database.dbname" : "postgres",
11 | "topic.prefix": "flink",
12 | "table.include.list": "public.iris_.*",
13 | "key.converter": "org.apache.kafka.connect.json.JsonConverter",
14 | "value.converter": "org.apache.kafka.connect.json.JsonConverter",
15 | "key.converter.schemas.enable": "true",
16 | "value.converter.schemas.enable": "true",
17 | "transforms": "unwrap",
18 | "transforms.unwrap.type": "io.debezium.transforms.ExtractNewRecordState"
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/machine-learning/flink-spark-iris/register-postgres-spark.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "iris-connector-spark",
3 | "config": {
4 | "connector.class": "io.debezium.connector.postgresql.PostgresConnector",
5 | "tasks.max": "1",
6 | "database.hostname": "postgres",
7 | "database.port": "5432",
8 | "database.user": "postgres",
9 | "database.password": "postgres",
10 | "database.dbname" : "postgres",
11 | "topic.prefix": "spark",
12 | "table.include.list": "public.iris_.*",
13 | "key.converter": "org.apache.kafka.connect.json.JsonConverter",
14 | "value.converter": "org.apache.kafka.connect.json.JsonConverter",
15 | "key.converter.schemas.enable": "false",
16 | "value.converter.schemas.enable": "false",
17 | "transforms": "unwrap",
18 | "transforms.unwrap.type": "io.debezium.transforms.ExtractNewRecordState"
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/machine-learning/tensorflow-mnist/connect/Dockerfile:
--------------------------------------------------------------------------------
1 | ARG DEBEZIUM_VERSION
2 | FROM quay.io/debezium/connect:${DEBEZIUM_VERSION}
3 | ENV KAFKA_CONNECT_MNIST_DIR=$KAFKA_CONNECT_PLUGINS_DIR/kafka-connect-mnist
4 |
5 | RUN mkdir $KAFKA_CONNECT_MNIST_DIR
6 | COPY mnist-smt/target/mnist-to-csv-1.0-SNAPSHOT.jar $KAFKA_CONNECT_MNIST_DIR
--------------------------------------------------------------------------------
/machine-learning/tensorflow-mnist/postgres/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM quay.io/debezium/postgres:15
2 |
3 | COPY mnist_train.sql /docker-entrypoint-initdb.d/
--------------------------------------------------------------------------------
/machine-learning/tensorflow-mnist/register-postgres.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "mnist-connector",
3 | "config": {
4 | "connector.class": "io.debezium.connector.postgresql.PostgresConnector",
5 | "tasks.max": "1",
6 | "database.hostname": "postgres",
7 | "database.port": "5432",
8 | "database.user": "postgres",
9 | "database.password": "postgres",
10 | "database.dbname" : "postgres",
11 | "topic.prefix": "tf",
12 | "table.include.list": "public.mnist_.*",
13 | "key.converter": "org.apache.kafka.connect.storage.StringConverter",
14 | "value.converter": "org.apache.kafka.connect.storage.StringConverter",
15 | "transforms": "unwrap, mnist",
16 | "transforms.unwrap.type": "io.debezium.transforms.ExtractNewRecordState",
17 | "transforms.mnist.type": "io.debezium.transforms.MnistToCsv"
18 | }
19 | }
20 |
--------------------------------------------------------------------------------
/machine-learning/tensorflow-mnist/tensorflow/Dockerfile:
--------------------------------------------------------------------------------
1 | ARG TENSORFLOW_VERSION
2 | FROM tensorflow/tensorflow:${TENSORFLOW_VERSION}-jupyter
3 |
4 | COPY requirements.txt /tf
5 | RUN python3 -m pip install --upgrade pip && python3 -m pip install -r requirements.txt
--------------------------------------------------------------------------------
/machine-learning/tensorflow-mnist/tensorflow/requirements.txt:
--------------------------------------------------------------------------------
1 | tensorflow-io
2 |
3 |
--------------------------------------------------------------------------------
/mongodb-outbox/debezium-strimzi/Dockerfile:
--------------------------------------------------------------------------------
1 | ARG STRIMZI_VERSION=latest-kafka-3.4.0
2 | FROM quay.io/strimzi/kafka:${STRIMZI_VERSION}
3 |
4 | ARG DEBEZIUM_CONNECTOR_VERSION=2.1.3.Final
5 | ENV KAFKA_CONNECT_PLUGIN_PATH=/tmp/connect-plugins
6 |
7 | RUN mkdir $KAFKA_CONNECT_PLUGIN_PATH &&\
8 | cd $KAFKA_CONNECT_PLUGIN_PATH &&\
9 | curl -sfSL https://repo1.maven.org/maven2/io/debezium/debezium-connector-mongodb/${DEBEZIUM_CONNECTOR_VERSION}/debezium-connector-mongodb-${DEBEZIUM_CONNECTOR_VERSION}-plugin.tar.gz | tar xz
10 |
--------------------------------------------------------------------------------
/monitoring/dashboard.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/debezium/debezium-examples/9f5befd87e4a0b991e4d4eb6adb8ea45bc3e5796/monitoring/dashboard.png
--------------------------------------------------------------------------------
/monitoring/debezium-grafana/Dockerfile:
--------------------------------------------------------------------------------
1 | ARG GRAFANA_VERSION
2 | FROM grafana/grafana:${GRAFANA_VERSION}
3 |
4 | COPY dashboard.yml /etc/grafana/provisioning/dashboards
5 | COPY datasource.yml /etc/grafana/provisioning/datasources
6 | COPY debezium-dashboard.json /var/lib/grafana/dashboards/debezium-dashboard.json
7 |
--------------------------------------------------------------------------------
/monitoring/debezium-grafana/dashboard.yml:
--------------------------------------------------------------------------------
1 | apiVersion: 1
2 | providers:
3 | - name: 'default'
4 | orgId: 1
5 | folder: ''
6 | type: file
7 | disableDeletion: false
8 | updateIntervalSeconds: 10 #how often Grafana will scan for changed dashboards
9 | options:
10 | path: /var/lib/grafana/dashboards
11 |
--------------------------------------------------------------------------------
/monitoring/debezium-grafana/datasource.yml:
--------------------------------------------------------------------------------
1 | apiVersion: 1
2 |
3 | datasources:
4 | - name: prometheus
5 | type: prometheus
6 | url: http://prometheus:9090
7 | access: proxy
8 | version: 1
9 |
--------------------------------------------------------------------------------
/monitoring/debezium-jmx-exporter/Dockerfile:
--------------------------------------------------------------------------------
1 | ARG DEBEZIUM_VERSION
2 | FROM quay.io/debezium/connect:${DEBEZIUM_VERSION}
3 |
4 | ARG JMX_AGENT_VERSION
5 | RUN mkdir /kafka/etc && cd /kafka/etc &&\
6 | curl -so jmx_prometheus_javaagent.jar \
7 | https://repo1.maven.org/maven2/io/prometheus/jmx/jmx_prometheus_javaagent/$JMX_AGENT_VERSION/jmx_prometheus_javaagent-$JMX_AGENT_VERSION.jar
8 |
9 | COPY config.yml /kafka/etc/config.yml
10 |
--------------------------------------------------------------------------------
/monitoring/debezium-prometheus/Dockerfile:
--------------------------------------------------------------------------------
1 | ARG PROMETHEUS_VERSION
2 | FROM prom/prometheus:${PROMETHEUS_VERSION}
3 |
4 | RUN sed -i -e "s/\"localhost:9090\"/\"localhost:9090\",\"connect:8080\"/" /etc/prometheus/prometheus.yml
5 |
--------------------------------------------------------------------------------
/monitoring/register-sqlserver.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "inventory-connector",
3 | "config": {
4 | "connector.class" : "io.debezium.connector.sqlserver.SqlServerConnector",
5 | "tasks.max" : "1",
6 | "topic.prefix" : "server1",
7 | "database.hostname" : "sqlserver",
8 | "database.port" : "1433",
9 | "database.user" : "sa",
10 | "database.password" : "Password!",
11 | "database.names" : "testDB",
12 | "schema.history.internal.kafka.bootstrap.servers" : "kafka:9092",
13 | "schema.history.internal.kafka.topic": "schema-changes.inventory",
14 | "database.encrypt": "false"
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/mysql-replication/connector-config-master.json:
--------------------------------------------------------------------------------
1 | {
2 | "connector.class": "io.debezium.connector.mysql.MySqlConnector",
3 | "tasks.max": "1",
4 | "database.hostname": "mysql-master",
5 | "database.port": "3306",
6 | "database.user": "debezium",
7 | "database.password": "dbz",
8 | "database.server.id": "184054",
9 | "topic.prefix": "dbserver1",
10 | "database.include.list": "inventory",
11 | "schema.history.internal.kafka.bootstrap.servers": "kafka:9092",
12 | "schema.history.internal.kafka.topic": "schema-changes.inventory"
13 | }
14 |
--------------------------------------------------------------------------------
/mysql-replication/connector-config-replica.json:
--------------------------------------------------------------------------------
1 | {
2 | "connector.class": "io.debezium.connector.mysql.MySqlConnector",
3 | "tasks.max": "1",
4 | "database.hostname": "mysql-replica",
5 | "database.port": "3306",
6 | "database.user": "debezium",
7 | "database.password": "dbz",
8 | "database.server.id": "184054",
9 | "topic.prefix": "dbserver1",
10 | "database.include.list": "inventory",
11 | "schema.history.internal.kafka.bootstrap.servers": "kafka:9092",
12 | "schema.history.internal.kafka.topic": "schema-changes.inventory"
13 | }
14 |
--------------------------------------------------------------------------------
/mysql-replication/register-mysql.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "inventory-connector",
3 | "config": {
4 | "connector.class": "io.debezium.connector.mysql.MySqlConnector",
5 | "tasks.max": "1",
6 | "database.hostname": "mysql-replica",
7 | "database.port": "3306",
8 | "database.user": "debezium",
9 | "database.password": "dbz",
10 | "database.server.id": "184054",
11 | "topic.prefix": "dbserver1",
12 | "database.include.list": "inventory",
13 | "schema.history.internal.kafka.bootstrap.servers": "kafka:9092",
14 | "schema.history.internal.kafka.topic": "schema-changes.inventory"
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/offset-editor/src/main/java/io/debezium/examples/offset/editor/Main.java:
--------------------------------------------------------------------------------
1 | package io.debezium.examples.offset.editor;
2 |
3 | public class Main {
4 | public static void main(String[] args) {
5 | if (args.length > 0) {
6 | CommandLineInterface cli = new CommandLineInterface(args);
7 | cli.run();
8 | } else {
9 | OffsetEditorApp.main(args);
10 | }
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/openshift/kafka-connector.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: kafka.strimzi.io/v1beta2
2 | kind: KafkaConnector
3 | metadata:
4 | name: debezium-connector-mysql
5 | labels:
6 | strimzi.io/cluster: debezium-connect-cluster
7 | spec:
8 | class: io.debezium.connector.mysql.MySqlConnector
9 | tasksMax: 1
10 | config:
11 | tasks.max: 1
12 | database.hostname: mysql
13 | database.port: 3306
14 | database.user: ${secrets:debezium-example/debezium-secret:username}
15 | database.password: ${secrets:debezium-example/debezium-secret:password}
16 | database.server.id: 184054
17 | database.server.name: mysql
18 | database.include.list: inventory
19 | database.history.kafka.bootstrap.servers: debezium-cluster-kafka-bootstrap:9092
20 | database.history.kafka.topic: schema-changes.inventory
21 |
--------------------------------------------------------------------------------
/openshift/mysql.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: v1
2 | kind: Service
3 | metadata:
4 | name: mysql
5 | spec:
6 | ports:
7 | - port: 3306
8 | selector:
9 | app: mysql
10 | clusterIP: None
11 | ---
12 | apiVersion: apps/v1
13 | kind: Deployment
14 | metadata:
15 | name: mysql
16 | spec:
17 | selector:
18 | matchLabels:
19 | app: mysql
20 | strategy:
21 | type: Recreate
22 | template:
23 | metadata:
24 | labels:
25 | app: mysql
26 | spec:
27 | containers:
28 | - image: quay.io/debezium/example-mysql:1.9
29 | name: mysql
30 | env:
31 | # Use secret in real usage
32 | - name: MYSQL_ROOT_PASSWORD
33 | value: debezium
34 | - name: MYSQL_USER
35 | value: mysqluser
36 | - name: MYSQL_PASSWORD
37 | value: mysqlpw
38 | ports:
39 | - containerPort: 3306
40 | name: mysql
41 |
--------------------------------------------------------------------------------
/openshift/role-binding.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: rbac.authorization.k8s.io/v1
2 | kind: RoleBinding
3 | metadata:
4 | name: connector-configuration-role-binding
5 | namespace: debezium-example
6 | subjects:
7 | - kind: ServiceAccount
8 | name: debezium-connect-cluster-connect
9 | namespace: debezium-example
10 | roleRef:
11 | kind: Role
12 | name: connector-configuration-role
13 | apiGroup: rbac.authorization.k8s.io
14 |
--------------------------------------------------------------------------------
/openshift/role.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: rbac.authorization.k8s.io/v1
2 | kind: Role
3 | metadata:
4 | name: connector-configuration-role
5 | namespace: debezium-example
6 | rules:
7 | - apiGroups: [""]
8 | resources: ["secrets"]
9 | resourceNames: ["debezium-secret"]
10 | verbs: ["get"]
11 |
--------------------------------------------------------------------------------
/openshift/secret.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: v1
2 | kind: Secret
3 | metadata:
4 | name: debezium-secret
5 | namespace: debezium-example
6 | type: Opaque
7 | data:
8 | username: ZGViZXppdW0=
9 | password: ZGJ6
10 |
--------------------------------------------------------------------------------
/openshift/strimzi-kafka-operator.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: operators.coreos.com/v1alpha1
2 | kind: Subscription
3 | metadata:
4 | name: my-strimzi-kafka-operator
5 | namespace: openshift-operators
6 | spec:
7 | channel: stable
8 | name: strimzi-kafka-operator
9 | source: operatorhubio-catalog
10 | sourceNamespace: olm
11 |
--------------------------------------------------------------------------------
/operator/tutorial-postgresql-kafka/.gitignore:
--------------------------------------------------------------------------------
1 | install.sh
--------------------------------------------------------------------------------
/operator/tutorial-postgresql-kafka/destroy-environment.sh:
--------------------------------------------------------------------------------
1 | #! /usr/bin/env bash
2 |
3 | source env.sh
4 |
5 | kind delete cluster --name $CLUSTER
--------------------------------------------------------------------------------
/operator/tutorial-postgresql-kafka/env.sh:
--------------------------------------------------------------------------------
1 | CLUSTER=debezium
2 | NAMESPACE=debezium
3 | TIMEOUT=300s
4 |
--------------------------------------------------------------------------------
/operator/tutorial-postgresql-kafka/infra/002_kafka-ephemeral.yml:
--------------------------------------------------------------------------------
1 | apiVersion: kafka.strimzi.io/v1beta2
2 | kind: Kafka
3 | metadata:
4 | name: dbz-kafka
5 | spec:
6 | kafka:
7 | version: 3.5.0
8 | replicas: 1
9 | listeners:
10 | - name: plain
11 | port: 9092
12 | type: internal
13 | tls: false
14 | - name: tls
15 | port: 9093
16 | type: internal
17 | tls: true
18 | config:
19 | offsets.topic.replication.factor: 1
20 | transaction.state.log.replication.factor: 1
21 | transaction.state.log.min.isr: 1
22 | default.replication.factor: 1
23 | min.insync.replicas: 1
24 | inter.broker.protocol.version: "3.5"
25 | storage:
26 | type: ephemeral
27 | zookeeper:
28 | replicas: 1
29 | storage:
30 | type: ephemeral
31 | entityOperator:
32 | topicOperator: {}
33 | userOperator: {}
--------------------------------------------------------------------------------
/operator/tutorial-postgresql-kafka/infra/010_debezium-subscription.yml:
--------------------------------------------------------------------------------
1 | apiVersion: operators.coreos.com/v1alpha1
2 | kind: Subscription
3 | metadata:
4 | name: debezium-operator-subscription
5 | namespace: operators
6 | spec:
7 | installPlanApproval: Automatic
8 | name: debezium-operator
9 | source: operatorhubio-catalog
10 | sourceNamespace: olm
11 | startingCSV: debezium-operator.v2.4.0
12 |
--------------------------------------------------------------------------------
/operator/tutorial-pubsub/.gitignore:
--------------------------------------------------------------------------------
1 | service-account.json
2 | k8s/debezium/001_pubsub.yml
3 |
--------------------------------------------------------------------------------
/operator/tutorial-pubsub/destroy-environment.sh:
--------------------------------------------------------------------------------
1 | #! /usr/bin/env bash
2 |
3 | source env.sh
4 |
5 | kind delete cluster --name $CLUSTER
6 |
--------------------------------------------------------------------------------
/operator/tutorial-pubsub/env.sh:
--------------------------------------------------------------------------------
1 | CLUSTER=debezium-demo
2 | NAMESPACE=demo
3 | TIMEOUT=300s
4 | IMAGE_TAG=2.5.0.Final
5 | PUBSUB_PROJECT_ID=change_me
6 | PUBSUB_CREDENTIALS_FILE=service-account.json
7 | PUBSUB_TOPIC=demo.inventory.customers
8 | PUBSUB_SUBSCRIPTION=demo
9 |
--------------------------------------------------------------------------------
/operator/tutorial-pubsub/k8s/operator/001_subscription.yml:
--------------------------------------------------------------------------------
1 | apiVersion: operators.coreos.com/v1alpha1
2 | kind: Subscription
3 | metadata:
4 | name: debezium-operator-subscription
5 | namespace: operators
6 | spec:
7 | installPlanApproval: Automatic
8 | name: debezium-operator
9 | source: operatorhubio-catalog
10 | sourceNamespace: olm
11 | channel: debezium-2.5.x
12 | startingCSV: debezium-operator.v2.5.3-final
13 |
--------------------------------------------------------------------------------
/operator/tutorial-pubsub/pull.sh:
--------------------------------------------------------------------------------
1 | #! /usr/bin/env bash
2 |
3 | source env.sh
4 |
5 | gcloud pubsub subscriptions pull $PUBSUB_SUBSCRIPTION --auto-ack --format=json --limit=$1 \
6 | | jq -r '.[].message.data | @base64d' \
7 | | jq .payload
8 |
--------------------------------------------------------------------------------
/outbox/debezium-strimzi/Dockerfile:
--------------------------------------------------------------------------------
1 | ARG STRIMZI_VERSION=latest-kafka-3.5.1
2 | FROM quay.io/strimzi/kafka:${STRIMZI_VERSION}
3 |
4 | ARG DEBEZIUM_CONNECTOR_VERSION=2.4.0.Alpha2
5 | ENV KAFKA_CONNECT_PLUGIN_PATH=/tmp/connect-plugins/
6 | ENV KAFKA_CONNECT_LIBS=/opt/kafka/libs
7 |
8 | RUN mkdir $KAFKA_CONNECT_PLUGIN_PATH &&\
9 | cd $KAFKA_CONNECT_PLUGIN_PATH &&\
10 | curl -sfSL https://repo1.maven.org/maven2/io/debezium/debezium-connector-postgres/${DEBEZIUM_CONNECTOR_VERSION}/debezium-connector-postgres-${DEBEZIUM_CONNECTOR_VERSION}-plugin.tar.gz | tar xz &&\
11 | cd debezium-connector-postgres &&\
12 | curl -sfSL https://repo1.maven.org/maven2/io/debezium/debezium-interceptor/${DEBEZIUM_CONNECTOR_VERSION}/debezium-interceptor-${DEBEZIUM_CONNECTOR_VERSION}.jar -o debezium-interceptor-${DEBEZIUM_CONNECTOR_VERSION}.jar
13 |
--------------------------------------------------------------------------------
/outbox/jaeger.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/debezium/debezium-examples/9f5befd87e4a0b991e4d4eb6adb8ea45bc3e5796/outbox/jaeger.png
--------------------------------------------------------------------------------
/outbox/order-service/.dockerignore:
--------------------------------------------------------------------------------
1 | *
2 | !target/*-runner
3 | !target/*-runner.jar
4 | !target/lib/*
5 | !target/quarkus-app/*
--------------------------------------------------------------------------------
/outbox/order-service/src/main/docker/Dockerfile.native:
--------------------------------------------------------------------------------
1 | ####
2 | # This Dockerfile is used in order to build a container that runs order-service-quarkus in native (non-JVM) mode
3 | #
4 | # Before building the docker image run:
5 | #
6 | # mvn package -Pnative -Dnative-image.docker-build=true
7 | #
8 | # Then, build the image with:
9 | #
10 | # docker build -f src/main/docker/Dockerfile.native -t quarkus/order-service-quarkus .
11 | #
12 | # Then run the containeer using:
13 | #
14 | # docker run -i --rm -p 8080:8080 quarkus/order-service-quarkus
15 | #
16 | ###
17 | FROM registry.fedoraproject.org/fedora-minimal
18 | WORKDIR /work/
19 | COPY target/*-runner /work/application
20 | RUN chmod 755 /work
21 | EXPOSE 8080
22 | cmd [ "./application", "-Dquarkus.http.host=0.0.0.0" ]
23 |
--------------------------------------------------------------------------------
/outbox/order-service/src/main/java/io/debezium/examples/outbox/order/model/EntityNotFoundException.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright Debezium Authors.
3 | *
4 | * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
5 | */
6 | package io.debezium.examples.outbox.order.model;
7 |
8 | /**
9 | * An exception that indicates an entity could not be found.
10 | */
11 | public class EntityNotFoundException extends RuntimeException {
12 |
13 | private static final long serialVersionUID = -1L;
14 |
15 | public EntityNotFoundException(String message) {
16 | super(message);
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/outbox/order-service/src/main/java/io/debezium/examples/outbox/order/model/OrderLineStatus.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright Debezium Authors.
3 | *
4 | * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
5 | */
6 | package io.debezium.examples.outbox.order.model;
7 |
8 | /**
9 | * Various statuses in which a {@link OrderLine} may be within.
10 | */
11 | public enum OrderLineStatus {
12 | ENTERED,
13 | CANCELLED,
14 | SHIPPED
15 | }
16 |
--------------------------------------------------------------------------------
/outbox/order-service/src/main/java/io/debezium/examples/outbox/order/rest/RestApplication.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright Debezium Authors.
3 | *
4 | * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
5 | */
6 | package io.debezium.examples.outbox.order.rest;
7 |
8 | import jakarta.ws.rs.ApplicationPath;
9 | import jakarta.ws.rs.core.Application;
10 |
11 | /**
12 | * Defines the application path for the Order Service rest application.
13 | */
14 | @ApplicationPath("/")
15 | public class RestApplication extends Application {
16 | }
17 |
--------------------------------------------------------------------------------
/outbox/order-service/src/main/java/io/debezium/examples/outbox/order/rest/UpdateOrderLineRequest.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright Debezium Authors.
3 | *
4 | * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
5 | */
6 | package io.debezium.examples.outbox.order.rest;
7 |
8 | import io.debezium.examples.outbox.order.model.OrderLine;
9 | import io.debezium.examples.outbox.order.model.OrderLineStatus;
10 |
11 | /**
12 | * A value object that represents updating a {@link OrderLine} status.
13 | */
14 | public class UpdateOrderLineRequest {
15 |
16 | private OrderLineStatus newStatus;
17 |
18 | public OrderLineStatus getNewStatus() {
19 | return newStatus;
20 | }
21 |
22 | public void setNewStatus(OrderLineStatus newStatus) {
23 | this.newStatus = newStatus;
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/outbox/order-service/src/main/resources/META-INF/resources/index.html:
--------------------------------------------------------------------------------
1 |
2 | Welcome to the Debezium Outbox Service based on Quarkus
3 |
--------------------------------------------------------------------------------
/outbox/outbox-overview.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/debezium/debezium-examples/9f5befd87e4a0b991e4d4eb6adb8ea45bc3e5796/outbox/outbox-overview.png
--------------------------------------------------------------------------------
/outbox/register-postgres.json:
--------------------------------------------------------------------------------
1 | {
2 | "connector.class": "io.debezium.connector.postgresql.PostgresConnector",
3 | "tasks.max": "1",
4 | "database.hostname": "order-db",
5 | "database.port": "5432",
6 | "database.user": "postgresuser",
7 | "database.password": "postgrespw",
8 | "database.dbname" : "orderdb",
9 | "topic.prefix": "dbserver1",
10 | "schema.include.list": "inventory",
11 | "table.include.list" : "inventory.outboxevent",
12 | "tombstones.on.delete" : "false",
13 | "transforms" : "outbox",
14 | "transforms.outbox.type" : "io.debezium.transforms.outbox.EventRouter",
15 | "transforms.outbox.route.topic.replacement" : "${routedByValue}.events",
16 | "transforms.outbox.table.fields.additional.placement" : "type:header:eventType"
17 | }
18 |
--------------------------------------------------------------------------------
/outbox/resources/data/cancel-order-line-request.json:
--------------------------------------------------------------------------------
1 | {
2 | "newStatus" : "CANCELLED"
3 | }
4 |
--------------------------------------------------------------------------------
/outbox/resources/data/create-order-request.json:
--------------------------------------------------------------------------------
1 | {
2 | "customerId" : "123",
3 | "orderDate" : "2019-01-31T12:13:01",
4 | "lineItems" : [
5 | {
6 | "item" : "Debezium in Action",
7 | "quantity" : 2,
8 | "totalPrice" : 39.98
9 | },
10 | {
11 | "item" : "Debezium for Dummies",
12 | "quantity" : 1,
13 | "totalPrice" : 29.99
14 | }
15 | ]
16 | }
17 |
--------------------------------------------------------------------------------
/outbox/service-overview.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/debezium/debezium-examples/9f5befd87e4a0b991e4d4eb6adb8ea45bc3e5796/outbox/service-overview.png
--------------------------------------------------------------------------------
/outbox/shipment-service/.dockerignore:
--------------------------------------------------------------------------------
1 | *
2 | !target/*-runner
3 | !target/*-runner.jar
4 | !target/lib/*
5 | !target/quarkus-app/*
--------------------------------------------------------------------------------
/outbox/shipment-service/src/main/docker/Dockerfile.native:
--------------------------------------------------------------------------------
1 | ####
2 | # This Dockerfile is used in order to build a container that runs shipment-service-quarkus in native (non-JVM) mode
3 | ###
4 | FROM registry.fedoraproject.org/fedora-minimal
5 | WORKDIR /work/
6 | COPY target/*-runner /work/application
7 | RUN chmod 755 /work
8 | cmd [ "./application", "-Dquarkus.http.host=0.0.0.0" ]
9 |
--------------------------------------------------------------------------------
/postgres-failover-slots/inventory-source.json:
--------------------------------------------------------------------------------
1 | {
2 | "connector.class" : "io.debezium.connector.postgresql.PostgresConnector",
3 | "tasks.max" : "1",
4 | "database.hostname" : "pgbouncer",
5 | "database.port" : "5432",
6 | "database.user" : "user",
7 | "database.password" : "top-secret",
8 | "database.dbname" : "inventorydb",
9 | "topic.prefix" : "dbserver1",
10 | "schema.include.list" : "inventory",
11 | "plugin.name" : "pgoutput",
12 | "slot.failover" : "true"
13 | }
14 |
--------------------------------------------------------------------------------
/postgres-kafka-signal/connector.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "debezium-postgres",
3 | "config": {
4 | "connector.class": "io.debezium.connector.postgresql.PostgresConnector",
5 | "tasks.max": "1",
6 | "database.hostname": "postgres",
7 | "database.port": "5432",
8 | "database.user": "myuser",
9 | "database.password": "mypassword",
10 | "database.dbname" : "postgres",
11 |
12 | "topic.prefix": "test",
13 |
14 | "table.include.list" : "public.characters",
15 | "signal.data.collection": "public.debezium_signal",
16 | "signal.enabled.channels": "source,kafka",
17 | "signal.kafka.topic": "signal-topic",
18 | "signal.kafka.bootstrap.servers": "broker:29092"
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/postgres-toast/debezium-jdbc/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM quay.io/debezium/connect:2.1
2 | ENV KAFKA_CONNECT_JDBC_DIR=$KAFKA_CONNECT_PLUGINS_DIR/kafka-connect-jdbc
3 |
4 | ARG POSTGRES_VERSION=42.5.1
5 | ARG KAFKA_JDBC_VERSION=5.3.1
6 |
7 | # Deploy PostgreSQL JDBC Driver
8 | RUN cd /kafka/libs && curl -sO https://jdbc.postgresql.org/download/postgresql-$POSTGRES_VERSION.jar
9 |
10 | # Deploy Kafka Connect JDBC
11 | RUN mkdir $KAFKA_CONNECT_JDBC_DIR && cd $KAFKA_CONNECT_JDBC_DIR &&\
12 | curl -sO https://packages.confluent.io/maven/io/confluent/kafka-connect-jdbc/$KAFKA_JDBC_VERSION/kafka-connect-jdbc-$KAFKA_JDBC_VERSION.jar
13 |
--------------------------------------------------------------------------------
/postgres-toast/debezium-source.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "inventory-connector",
3 | "connector.class": "io.debezium.connector.postgresql.PostgresConnector",
4 | "tasks.max": "1",
5 | "database.hostname": "source-db",
6 | "database.port": "5432",
7 | "database.user": "postgresusersource",
8 | "database.password": "postgrespw",
9 | "database.dbname" : "sourcedb",
10 | "topic.prefix": "dbserver1"
11 | }
12 |
--------------------------------------------------------------------------------
/postgres-toast/jdbc-sink.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "sink-connector",
3 | "connector.class": "io.confluent.connect.jdbc.JdbcSinkConnector",
4 | "tasks.max": "1",
5 | "topics": "dbserver1.inventory.customers",
6 | "table.name.format": "customers",
7 | "connection.url": "jdbc:postgresql://sink-db:5432/sinkdb?currentSchema=inventorysink&user=postgresusersink&password=postgrespw",
8 | "transforms": "unwrap",
9 | "transforms.unwrap.type": "io.debezium.transforms.ExtractNewRecordState",
10 | "transforms.unwrap.drop.tombstones": "false",
11 | "auto.create": "false",
12 | "insert.mode": "upsert",
13 | "delete.enabled": "true",
14 | "pk.fields": "id",
15 | "pk.mode": "record_key"
16 | }
17 |
--------------------------------------------------------------------------------
/postgres-toast/sink-db/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM quay.io/debezium/postgres:11
2 |
3 | COPY schema-update.sql /docker-entrypoint-initdb.d/
4 |
--------------------------------------------------------------------------------
/postgres-toast/sink-db/schema-update.sql:
--------------------------------------------------------------------------------
1 | CREATE SCHEMA inventorysink;
2 | SET SCHEMA 'inventorysink';
3 |
4 | CREATE TABLE customers (
5 | id SERIAL NOT NULL PRIMARY KEY,
6 | first_name VARCHAR(255) NOT NULL,
7 | last_name VARCHAR(255) NOT NULL,
8 | email VARCHAR(255) NOT NULL UNIQUE,
9 | biography TEXT
10 | );
11 |
12 | CREATE OR REPLACE FUNCTION ignore_unchanged_biography()
13 | RETURNS TRIGGER AS
14 | $BODY$
15 | BEGIN
16 | IF NEW."biography" = '__debezium_unavailable_value'
17 | THEN
18 | NEW."biography" = OLD."biography";
19 | END IF;
20 |
21 | RETURN NEW;
22 | END;
23 | $BODY$ LANGUAGE PLPGSQL;
24 |
25 | CREATE TRIGGER customer_biography_trigger
26 | BEFORE UPDATE OF "biography"
27 | ON customers
28 | FOR EACH ROW
29 | EXECUTE PROCEDURE ignore_unchanged_biography();
30 |
--------------------------------------------------------------------------------
/postgres-toast/source-db/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM quay.io/debezium/example-postgres:2.1
2 |
3 | COPY schema-update.sql /docker-entrypoint-initdb.d/
4 |
--------------------------------------------------------------------------------
/postgres-toast/toast-value-store/.dockerignore:
--------------------------------------------------------------------------------
1 | *
2 | !target/quarkus-app/*
--------------------------------------------------------------------------------
/postgres-toast/toast-value-store/src/main/docker/Dockerfile.native:
--------------------------------------------------------------------------------
1 | ####
2 | # This Dockerfile is used in order to build a container that runs the Quarkus application in native (no JVM) mode.
3 | #
4 | # Before building the container image run:
5 | #
6 | # ./mvnw package -Pnative
7 | #
8 | # Then, build the image with:
9 | #
10 | # docker build -f src/main/docker/Dockerfile.native -t quarkus/code-with-quarkus .
11 | #
12 | # Then run the container using:
13 | #
14 | # docker run -i --rm -p 8080:8080 quarkus/code-with-quarkus
15 | #
16 | ###
17 | FROM registry.access.redhat.com/ubi8/ubi-minimal:8.5
18 | WORKDIR /work/
19 | RUN chown 1001 /work \
20 | && chmod "g+rwX" /work \
21 | && chown 1001:root /work
22 | COPY --chown=1001:root target/*-runner /work/application
23 |
24 | EXPOSE 8080
25 | USER 1001
26 |
27 | CMD ["./application", "-Dquarkus.http.host=0.0.0.0"]
28 |
--------------------------------------------------------------------------------
/quarkus-native/docker-compose.yaml:
--------------------------------------------------------------------------------
1 | services:
2 | postgres:
3 | image: quay.io/debezium/example-postgres:3.0
4 | ports:
5 | - 5432:5432
6 | environment:
7 | - POSTGRES_USER=postgresuser
8 | - POSTGRES_PASSWORD=postgrespw
9 |
--------------------------------------------------------------------------------
/quarkus-native/src/main/java/io/debezium/example/quarkus/DebeziumConfiguration.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright Debezium Authors.
3 | *
4 | * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
5 | */
6 | package io.debezium.example.quarkus;
7 |
8 | import io.quarkus.runtime.annotations.StaticInitSafe;
9 | import io.smallrye.config.ConfigMapping;
10 |
11 | import java.util.Map;
12 |
13 | @StaticInitSafe
14 | @ConfigMapping(prefix = "debezium")
15 | public interface DebeziumConfiguration {
16 | Map configuration();
17 | }
18 |
--------------------------------------------------------------------------------
/saga/.mvn/wrapper/maven-wrapper.properties:
--------------------------------------------------------------------------------
1 | distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.6.3/apache-maven-3.6.3-bin.zip
2 | wrapperUrl=https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar
3 |
--------------------------------------------------------------------------------
/saga/customer-service/.dockerignore:
--------------------------------------------------------------------------------
1 | *
2 | !target/*-runner
3 | !target/*-runner.jar
4 | !target/lib/*
5 | !target/quarkus-app/*
--------------------------------------------------------------------------------
/saga/customer-service/src/main/docker/Dockerfile.native:
--------------------------------------------------------------------------------
1 | ####
2 | # This Dockerfile is used in order to build a container that runs the Quarkus application in native (no JVM) mode
3 | #
4 | # Before building the container image run:
5 | #
6 | # ./mvnw package -Pnative
7 | #
8 | # Then, build the image with:
9 | #
10 | # docker build -f src/main/docker/Dockerfile.native -t quarkus/code-with-quarkus .
11 | #
12 | # Then run the container using:
13 | #
14 | # docker run -i --rm -p 8080:8080 quarkus/code-with-quarkus
15 | #
16 | ###
17 | FROM registry.access.redhat.com/ubi8/ubi-minimal:8.3
18 | WORKDIR /work/
19 | RUN chown 1001 /work \
20 | && chmod "g+rwX" /work \
21 | && chown 1001:root /work
22 | COPY --chown=1001:root target/*-runner /work/application
23 |
24 | EXPOSE 8080
25 | USER 1001
26 |
27 | CMD ["./application", "-Dquarkus.http.host=0.0.0.0"]
28 |
--------------------------------------------------------------------------------
/saga/customer-service/src/main/java/io/debezium/examples/saga/customer/facade/CreditDeserializer.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright Debezium Authors.
3 | *
4 | * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
5 | */
6 | package io.debezium.examples.saga.customer.facade;
7 |
8 | import io.debezium.examples.saga.customer.model.CreditLimitEvent;
9 | import io.quarkus.kafka.client.serialization.ObjectMapperDeserializer;
10 |
11 | public class CreditDeserializer extends ObjectMapperDeserializer {
12 |
13 | public CreditDeserializer() {
14 | super(CreditLimitEvent.class);
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/saga/customer-service/src/main/java/io/debezium/examples/saga/customer/model/CreditRequestType.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright Debezium Authors.
3 | *
4 | * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
5 | */
6 | package io.debezium.examples.saga.customer.model;
7 |
8 | public enum CreditRequestType {
9 | REQUEST, CANCEL;
10 | }
11 |
--------------------------------------------------------------------------------
/saga/customer-service/src/main/java/io/debezium/examples/saga/customer/model/CreditStatus.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright Debezium Authors.
3 | *
4 | * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
5 | */
6 | package io.debezium.examples.saga.customer.model;
7 |
8 | public enum CreditStatus {
9 | APPROVED, REJECTED, CANCELLED;
10 | }
11 |
--------------------------------------------------------------------------------
/saga/debezium-strimzi/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM quay.io/strimzi/kafka:latest-kafka-3.4.0
2 | ENV KAFKA_CONNECT_PLUGIN_PATH=/tmp/connect-plugins
3 |
4 | RUN mkdir $KAFKA_CONNECT_PLUGIN_PATH &&\
5 | cd $KAFKA_CONNECT_PLUGIN_PATH &&\
6 | curl -sfSL https://repo1.maven.org/maven2/io/debezium/debezium-connector-postgres/2.1.3.Final/debezium-connector-postgres-2.1.3.Final-plugin.tar.gz | tar xz
7 |
--------------------------------------------------------------------------------
/saga/delete-connectors.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | http DELETE http://localhost:8083/connectors/order-outbox-connector
4 | http DELETE http://localhost:8083/connectors/payment-outbox-connector
5 | http DELETE http://localhost:8083/connectors/credit-outbox-connector
6 | http DELETE http://localhost:8083/connectors/order-sagastate-connector
7 |
--------------------------------------------------------------------------------
/saga/init-customer.sql:
--------------------------------------------------------------------------------
1 | create schema customer;
2 |
3 |
--------------------------------------------------------------------------------
/saga/init-order.sql:
--------------------------------------------------------------------------------
1 | create schema purchaseorder;
2 |
3 |
--------------------------------------------------------------------------------
/saga/init-payment.sql:
--------------------------------------------------------------------------------
1 | create schema payment;
2 |
3 |
--------------------------------------------------------------------------------
/saga/order-service/.dockerignore:
--------------------------------------------------------------------------------
1 | *
2 | !target/*-runner
3 | !target/*-runner.jar
4 | !target/lib/*
5 | !target/quarkus-app/*
--------------------------------------------------------------------------------
/saga/order-service/src/main/docker/Dockerfile.native:
--------------------------------------------------------------------------------
1 | ####
2 | # This Dockerfile is used in order to build a container that runs the Quarkus application in native (no JVM) mode
3 | #
4 | # Before building the container image run:
5 | #
6 | # ./mvnw package -Pnative
7 | #
8 | # Then, build the image with:
9 | #
10 | # docker build -f src/main/docker/Dockerfile.native -t quarkus/code-with-quarkus .
11 | #
12 | # Then run the container using:
13 | #
14 | # docker run -i --rm -p 8080:8080 quarkus/code-with-quarkus
15 | #
16 | ###
17 | FROM registry.access.redhat.com/ubi8/ubi-minimal:8.3
18 | WORKDIR /work/
19 | RUN chown 1001 /work \
20 | && chmod "g+rwX" /work \
21 | && chown 1001:root /work
22 | COPY --chown=1001:root target/*-runner /work/application
23 |
24 | EXPOSE 8080
25 | USER 1001
26 |
27 | CMD ["./application", "-Dquarkus.http.host=0.0.0.0"]
28 |
--------------------------------------------------------------------------------
/saga/order-service/src/main/java/io/debezium/examples/saga/framework/Saga.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright Debezium Authors.
3 | *
4 | * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
5 | */
6 | package io.debezium.examples.saga.framework;
7 |
8 | import java.lang.annotation.ElementType;
9 | import java.lang.annotation.Retention;
10 | import java.lang.annotation.RetentionPolicy;
11 | import java.lang.annotation.Target;
12 |
13 | @Retention(RetentionPolicy.RUNTIME)
14 | @Target(ElementType.TYPE)
15 | public @interface Saga {
16 |
17 | String type();
18 | String[] stepIds();
19 | }
20 |
--------------------------------------------------------------------------------
/saga/order-service/src/main/java/io/debezium/examples/saga/framework/SagaStatus.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright Debezium Authors.
3 | *
4 | * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
5 | */
6 | package io.debezium.examples.saga.framework;
7 |
8 | public enum SagaStatus {
9 | STARTED, ABORTING, ABORTED, COMPLETED;
10 | }
11 |
--------------------------------------------------------------------------------
/saga/order-service/src/main/java/io/debezium/examples/saga/framework/SagaStepMessage.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright Debezium Authors.
3 | *
4 | * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
5 | */
6 | package io.debezium.examples.saga.framework;
7 |
8 | import com.fasterxml.jackson.databind.JsonNode;
9 |
10 | /**
11 | * A message representing one logical step of execution in a saga.
12 | *
13 | * @author Gunnar Morling
14 | */
15 | public class SagaStepMessage {
16 |
17 | public String type;
18 | public String eventType;
19 | public JsonNode payload;
20 |
21 | public SagaStepMessage(String type, String eventType, JsonNode payload) {
22 | this.type = type;
23 | this.eventType = eventType;
24 | this.payload = payload;
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/saga/order-service/src/main/java/io/debezium/examples/saga/framework/internal/SagaStepState.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright Debezium Authors.
3 | *
4 | * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
5 | */
6 | package io.debezium.examples.saga.framework.internal;
7 |
8 | public class SagaStepState {
9 |
10 | public String type;
11 | public SagaStepStatus status;
12 |
13 | public SagaStepState(String type, SagaStepStatus status) {
14 | this.type = type;
15 | this.status = status;
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/saga/order-service/src/main/java/io/debezium/examples/saga/framework/internal/SagaStepStatus.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright Debezium Authors.
3 | *
4 | * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
5 | */
6 | package io.debezium.examples.saga.framework.internal;
7 |
8 | public enum SagaStepStatus {
9 | STARTED, FAILED, SUCCEEDED, COMPENSATING, COMPENSATED;
10 | }
11 |
--------------------------------------------------------------------------------
/saga/order-service/src/main/java/io/debezium/examples/saga/order/event/CreditApprovalEventPayload.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright Debezium Authors.
3 | *
4 | * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
5 | */
6 | package io.debezium.examples.saga.order.event;
7 |
8 | public class CreditApprovalEventPayload {
9 |
10 | public CreditApprovalStatus status;
11 | }
12 |
--------------------------------------------------------------------------------
/saga/order-service/src/main/java/io/debezium/examples/saga/order/event/CreditApprovalStatus.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright Debezium Authors.
3 | *
4 | * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
5 | */
6 | package io.debezium.examples.saga.order.event;
7 |
8 | import io.debezium.examples.saga.framework.internal.SagaStepStatus;
9 |
10 | public enum CreditApprovalStatus {
11 | APPROVED, REJECTED, CANCELLED;
12 |
13 | public SagaStepStatus toStepStatus() {
14 | return this == CANCELLED ? SagaStepStatus.COMPENSATED : this == REJECTED ? SagaStepStatus.FAILED : SagaStepStatus.SUCCEEDED;
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/saga/order-service/src/main/java/io/debezium/examples/saga/order/event/PaymentEventPayload.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright Debezium Authors.
3 | *
4 | * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
5 | */
6 | package io.debezium.examples.saga.order.event;
7 |
8 | public class PaymentEventPayload {
9 | public PaymentStatus status;
10 | }
11 |
--------------------------------------------------------------------------------
/saga/order-service/src/main/java/io/debezium/examples/saga/order/event/PaymentStatus.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright Debezium Authors.
3 | *
4 | * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
5 | */
6 | package io.debezium.examples.saga.order.event;
7 |
8 | import io.debezium.examples.saga.framework.internal.SagaStepStatus;
9 |
10 | public enum PaymentStatus {
11 | REQUESTED, CANCELLED, FAILED, COMPLETED;
12 |
13 | public SagaStepStatus toStepStatus() {
14 | switch(this) {
15 | case CANCELLED:
16 | return SagaStepStatus.COMPENSATED;
17 | case COMPLETED:
18 | case REQUESTED:
19 | return SagaStepStatus.SUCCEEDED;
20 | case FAILED:
21 | return SagaStepStatus.FAILED;
22 | default:
23 | throw new IllegalArgumentException("Unexpected state: " + this);
24 | }
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/saga/order-service/src/main/java/io/debezium/examples/saga/order/facade/serdes/CreditDeserializer.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright Debezium Authors.
3 | *
4 | * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
5 | */
6 | package io.debezium.examples.saga.order.facade.serdes;
7 |
8 | import io.debezium.examples.saga.order.event.CreditApprovalEventPayload;
9 | import io.quarkus.kafka.client.serialization.ObjectMapperDeserializer;
10 |
11 | public class CreditDeserializer extends ObjectMapperDeserializer {
12 |
13 | public CreditDeserializer() {
14 | super(CreditApprovalEventPayload.class);
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/saga/order-service/src/main/java/io/debezium/examples/saga/order/facade/serdes/PaymentDeserializer.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright Debezium Authors.
3 | *
4 | * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
5 | */
6 | package io.debezium.examples.saga.order.facade.serdes;
7 |
8 | import io.debezium.examples.saga.order.event.PaymentEventPayload;
9 | import io.quarkus.kafka.client.serialization.ObjectMapperDeserializer;
10 |
11 | public class PaymentDeserializer extends ObjectMapperDeserializer {
12 |
13 | public PaymentDeserializer() {
14 | super(PaymentEventPayload.class);
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/saga/order-service/src/main/java/io/debezium/examples/saga/order/model/PurchaseOrder.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright Debezium Authors.
3 | *
4 | * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
5 | */
6 | package io.debezium.examples.saga.order.model;
7 |
8 | import javax.persistence.Entity;
9 | import javax.persistence.EnumType;
10 | import javax.persistence.Enumerated;
11 |
12 | import io.quarkus.hibernate.orm.panache.PanacheEntity;
13 |
14 | @Entity
15 | public class PurchaseOrder extends PanacheEntity {
16 |
17 | public long itemId;
18 | public int quantity;
19 | public long customerId;
20 | public long paymentDue;
21 | public String creditCardNo;
22 |
23 | @Enumerated(EnumType.STRING)
24 | public PurchaseOrderStatus status;
25 | }
26 |
--------------------------------------------------------------------------------
/saga/order-service/src/main/java/io/debezium/examples/saga/order/model/PurchaseOrderStatus.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright Debezium Authors.
3 | *
4 | * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
5 | */
6 | package io.debezium.examples.saga.order.model;
7 |
8 | public enum PurchaseOrderStatus {
9 | CREATED, PROCESSING, CANCELLED, COMPLETED;
10 | }
11 |
--------------------------------------------------------------------------------
/saga/order-service/src/main/java/io/debezium/examples/saga/order/rest/MyObjectMapperCustomizer.java:
--------------------------------------------------------------------------------
1 | package io.debezium.examples.saga.order.rest;
2 |
3 | import com.fasterxml.jackson.annotation.JsonInclude;
4 | import com.fasterxml.jackson.databind.ObjectMapper;
5 | import io.quarkus.jackson.ObjectMapperCustomizer;
6 |
7 | import javax.inject.Singleton;
8 |
9 | @Singleton
10 | public class MyObjectMapperCustomizer implements ObjectMapperCustomizer {
11 |
12 | @Override
13 | public void customize(ObjectMapper objectMapper) {
14 | // To suppress serializing properties with null values
15 | objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/saga/order-service/src/main/java/io/debezium/examples/saga/order/rest/PlaceOrderResponse.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright Debezium Authors.
3 | *
4 | * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
5 | */
6 | package io.debezium.examples.saga.order.rest;
7 |
8 | import io.debezium.examples.saga.order.model.PurchaseOrder;
9 | import io.debezium.examples.saga.order.model.PurchaseOrderStatus;
10 |
11 | public class PlaceOrderResponse {
12 |
13 | public long orderId;
14 | public PurchaseOrderStatus status;
15 |
16 | public static PlaceOrderResponse fromPurchaseOrder(PurchaseOrder order) {
17 | PlaceOrderResponse response = new PlaceOrderResponse();
18 | response.orderId = order.id;
19 | response.status = order.status;
20 |
21 | return response;
22 | }
23 | }
24 |
--------------------------------------------------------------------------------
/saga/payment-service/.dockerignore:
--------------------------------------------------------------------------------
1 | *
2 | !target/*-runner
3 | !target/*-runner.jar
4 | !target/lib/*
5 | !target/quarkus-app/*
--------------------------------------------------------------------------------
/saga/payment-service/src/main/docker/Dockerfile.native:
--------------------------------------------------------------------------------
1 | ####
2 | # This Dockerfile is used in order to build a container that runs the Quarkus application in native (no JVM) mode
3 | #
4 | # Before building the container image run:
5 | #
6 | # ./mvnw package -Pnative
7 | #
8 | # Then, build the image with:
9 | #
10 | # docker build -f src/main/docker/Dockerfile.native -t quarkus/code-with-quarkus .
11 | #
12 | # Then run the container using:
13 | #
14 | # docker run -i --rm -p 8080:8080 quarkus/code-with-quarkus
15 | #
16 | ###
17 | FROM registry.access.redhat.com/ubi8/ubi-minimal:8.3
18 | WORKDIR /work/
19 | RUN chown 1001 /work \
20 | && chmod "g+rwX" /work \
21 | && chown 1001:root /work
22 | COPY --chown=1001:root target/*-runner /work/application
23 |
24 | EXPOSE 8080
25 | USER 1001
26 |
27 | CMD ["./application", "-Dquarkus.http.host=0.0.0.0"]
28 |
--------------------------------------------------------------------------------
/saga/payment-service/src/main/java/io/debezium/examples/saga/payment/facade/PaymentDeserializer.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright Debezium Authors.
3 | *
4 | * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
5 | */
6 | package io.debezium.examples.saga.payment.facade;
7 |
8 | import io.debezium.examples.saga.payment.model.Payment;
9 | import io.quarkus.kafka.client.serialization.ObjectMapperDeserializer;
10 |
11 | public class PaymentDeserializer extends ObjectMapperDeserializer {
12 |
13 | public PaymentDeserializer() {
14 | super(Payment.class);
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/saga/payment-service/src/main/java/io/debezium/examples/saga/payment/model/PaymentRequestType.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright Debezium Authors.
3 | *
4 | * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
5 | */
6 | package io.debezium.examples.saga.payment.model;
7 |
8 | public enum PaymentRequestType {
9 | REQUEST, CANCEL;
10 | }
11 |
--------------------------------------------------------------------------------
/saga/payment-service/src/main/java/io/debezium/examples/saga/payment/model/PaymentStatus.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright Debezium Authors.
3 | *
4 | * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
5 | */
6 | package io.debezium.examples.saga.payment.model;
7 |
8 | public enum PaymentStatus {
9 | REQUESTED, CANCELLED, FAILED, COMPLETED;
10 | }
11 |
--------------------------------------------------------------------------------
/saga/register-connectors.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | http PUT http://localhost:8083/connectors/order-outbox-connector/config < register-order-connector.json
4 | http PUT http://localhost:8083/connectors/payment-outbox-connector/config < register-payment-connector.json
5 | http PUT http://localhost:8083/connectors/credit-outbox-connector/config < register-credit-connector.json
6 | http PUT http://localhost:8083/connectors/order-sagastate-connector/config < register-sagastate-connector.json
7 |
--------------------------------------------------------------------------------
/saga/register-credit-connector.json:
--------------------------------------------------------------------------------
1 | {
2 | "connector.class": "io.debezium.connector.postgresql.PostgresConnector",
3 | "tasks.max": "1",
4 | "database.hostname": "customer-db",
5 | "database.port": "5432",
6 | "database.user": "customeruser",
7 | "database.password": "customerpw",
8 | "database.dbname" : "customerdb",
9 | "topic.prefix": "dbserver3",
10 | "schema.include.list": "customer",
11 | "table.include.list" : "customer.outboxevent",
12 | "tombstones.on.delete" : "false",
13 | "key.converter": "org.apache.kafka.connect.storage.StringConverter",
14 | "value.converter": "org.apache.kafka.connect.storage.StringConverter",
15 | "transforms" : "outbox",
16 | "transforms.outbox.type" : "io.debezium.transforms.outbox.EventRouter",
17 | "transforms.outbox.route.topic.replacement" : "${routedByValue}.response",
18 | "poll.interval.ms": "100"
19 | }
20 |
--------------------------------------------------------------------------------
/saga/register-order-connector.json:
--------------------------------------------------------------------------------
1 | {
2 | "connector.class": "io.debezium.connector.postgresql.PostgresConnector",
3 | "tasks.max": "1",
4 | "database.hostname": "order-db",
5 | "database.port": "5432",
6 | "database.user": "orderuser",
7 | "database.password": "orderpw",
8 | "database.dbname" : "orderdb",
9 | "topic.prefix": "dbserver1",
10 | "schema.include.list": "purchaseorder",
11 | "table.include.list" : "purchaseorder.outboxevent",
12 | "tombstones.on.delete" : "false",
13 | "key.converter": "org.apache.kafka.connect.storage.StringConverter",
14 | "value.converter": "org.apache.kafka.connect.storage.StringConverter",
15 | "transforms" : "saga",
16 | "transforms.saga.type" : "io.debezium.transforms.outbox.EventRouter",
17 | "transforms.saga.route.topic.replacement" : "${routedByValue}.request",
18 | "poll.interval.ms": "100"
19 | }
20 |
--------------------------------------------------------------------------------
/saga/register-payment-connector.json:
--------------------------------------------------------------------------------
1 | {
2 | "connector.class": "io.debezium.connector.postgresql.PostgresConnector",
3 | "tasks.max": "1",
4 | "database.hostname": "payment-db",
5 | "database.port": "5432",
6 | "database.user": "paymentuser",
7 | "database.password": "paymentpw",
8 | "database.dbname" : "paymentdb",
9 | "topic.prefix": "dbserver2",
10 | "schema.include.list": "payment",
11 | "table.include.list" : "payment.outboxevent",
12 | "tombstones.on.delete" : "false",
13 | "key.converter": "org.apache.kafka.connect.storage.StringConverter",
14 | "value.converter": "org.apache.kafka.connect.storage.StringConverter",
15 | "transforms" : "outbox",
16 | "transforms.outbox.type" : "io.debezium.transforms.outbox.EventRouter",
17 | "transforms.outbox.route.topic.replacement" : "${routedByValue}.response",
18 | "poll.interval.ms": "100"
19 | }
20 |
--------------------------------------------------------------------------------
/saga/requests/place-invalid-order1.json:
--------------------------------------------------------------------------------
1 | {
2 | "itemId" : 123,
3 | "quantity" : 2,
4 | "customerId" : 456,
5 | "paymentDue" : 4999,
6 | "creditCardNo" : "xxxx-yyyy-dddd-9999"
7 | }
8 |
--------------------------------------------------------------------------------
/saga/requests/place-invalid-order2.json:
--------------------------------------------------------------------------------
1 | {
2 | "itemId" : 123,
3 | "quantity" : 2,
4 | "customerId" : 456,
5 | "paymentDue" : 59999,
6 | "creditCardNo" : "xxxx-yyyy-dddd-aaa"
7 | }
8 |
--------------------------------------------------------------------------------
/saga/requests/place-order.json:
--------------------------------------------------------------------------------
1 | {
2 | "itemId" : 123,
3 | "quantity" : 2,
4 | "customerId" : 456,
5 | "paymentDue" : 4999,
6 | "creditCardNo" : "xxxx-yyyy-dddd-aaaa"
7 | }
8 |
--------------------------------------------------------------------------------
/saga/saga.json:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/debezium/debezium-examples/9f5befd87e4a0b991e4d4eb6adb8ea45bc3e5796/saga/saga.json
--------------------------------------------------------------------------------
/saga/solution-overview.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/debezium/debezium-examples/9f5befd87e4a0b991e4d4eb6adb8ea45bc3e5796/saga/solution-overview.png
--------------------------------------------------------------------------------
/sql-server-read-replica/debezium-sqlserver-init/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM mcr.microsoft.com/mssql/server:2019-latest
2 |
3 | COPY ./debezium.* /etc/sqlserver/
4 |
--------------------------------------------------------------------------------
/sql-server-read-replica/debezium-sqlserver-init/debezium.cer:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/debezium/debezium-examples/9f5befd87e4a0b991e4d4eb6adb8ea45bc3e5796/sql-server-read-replica/debezium-sqlserver-init/debezium.cer
--------------------------------------------------------------------------------
/sql-server-read-replica/debezium-sqlserver-init/debezium.key:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/debezium/debezium-examples/9f5befd87e4a0b991e4d4eb6adb8ea45bc3e5796/sql-server-read-replica/debezium-sqlserver-init/debezium.key
--------------------------------------------------------------------------------
/sql-server-read-replica/debezium-sqlserver-init/setup-secondary.sql:
--------------------------------------------------------------------------------
1 | CREATE MASTER KEY ENCRYPTION BY PASSWORD = 'Password!';
2 | CREATE LOGIN mirror WITH PASSWORD = 'Password!';
3 | CREATE USER mirror FOR LOGIN mirror;
4 | CREATE CERTIFICATE mirror
5 | AUTHORIZATION mirror
6 | FROM FILE = '/etc/sqlserver/debezium.cer'
7 | WITH PRIVATE KEY (
8 | FILE = '/etc/sqlserver/debezium.key',
9 | DECRYPTION BY PASSWORD = 'Password!'
10 | );
11 | CREATE ENDPOINT mirror
12 | STATE = STARTED
13 | AS TCP (LISTENER_IP = (0.0.0.0), LISTENER_PORT = 5022)
14 | FOR DATA_MIRRORING (
15 | AUTHENTICATION = CERTIFICATE mirror,
16 | ROLE = ALL,
17 | ENCRYPTION = REQUIRED ALGORITHM AES
18 | );
19 | GRANT CONNECT ON ENDPOINT::mirror TO mirror;
20 | ALTER AVAILABILITY GROUP dbz JOIN WITH (CLUSTER_TYPE = NONE);
21 | ALTER AVAILABILITY GROUP dbz GRANT CREATE ANY DATABASE;
22 | GO
23 |
--------------------------------------------------------------------------------
/sql-server-read-replica/register-sqlserver.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "inventory-connector",
3 | "config": {
4 | "connector.class" : "io.debezium.connector.sqlserver.SqlServerConnector",
5 | "tasks.max" : "1",
6 | "topic.prefix" : "server1",
7 | "database.hostname" : "secondary",
8 | "database.port" : "1433",
9 | "database.user" : "sa",
10 | "database.password" : "Password!",
11 | "database.names" : "testDB",
12 | "database.applicationIntent": "ReadOnly",
13 | "snapshot.isolation.mode": "snapshot",
14 | "schema.history.internal.kafka.bootstrap.servers" : "kafka:9092",
15 | "schema.history.internal.kafka.topic": "schema-changes.inventory",
16 | "database.encrypt": "false"
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/testcontainers/README.md:
--------------------------------------------------------------------------------
1 | # Testcontainers
2 |
3 | This example shows how to implement an integration test for your CDC set-up using [Testcontainers](https://www.testcontainers.org/).
4 | It spins up Postgres, Apache Kafka and Kafka Connect (including the Debezium) connectors,
5 | deploys an instance of the Debezium Postgres connector and runs some assertions against expected change events on the corresponding Kafka topic.
6 |
7 | Please refer to the [Debezium documentation](https://debezium.io/documentation/reference/2.0/integrations/testcontainers.html) to learn more about its Testcontainers support.
8 |
9 | ## Prerequisites
10 |
11 | * Java 11 development environment
12 | * Local [Docker](https://www.docker.com/) installation
13 |
14 | ## Running the Example
15 |
16 | ```
17 | mvn clean package
18 | ```
19 |
--------------------------------------------------------------------------------
/testcontainers/src/test/resources/log4j.properties:
--------------------------------------------------------------------------------
1 | # Direct log messages to stdout
2 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender
3 | log4j.appender.stdout.Target=System.out
4 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
5 | log4j.appender.stdout.layout.ConversionPattern=%d{ISO8601} %-5p %X{dbz.connectorType}|%X{dbz.connectorName}|%X{dbz.connectorContext} %m [%c]%n
6 |
7 | # Root logger option
8 | log4j.rootLogger=INFO, stdout
9 |
10 | # Set up the default logging to be INFO level, then override specific units
11 | log4j.logger.io.debezium=INFO
12 | log4j.logger.io.debezium.examples=DEBUG
13 | log4j.logger.org.apache.kafka.connect.json.JsonConverterConfig=WARN
14 |
--------------------------------------------------------------------------------
/tutorial/db2data/.donotdelete:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/debezium/debezium-examples/9f5befd87e4a0b991e4d4eb6adb8ea45bc3e5796/tutorial/db2data/.donotdelete
--------------------------------------------------------------------------------
/tutorial/debezium-cassandra-init/config.properties:
--------------------------------------------------------------------------------
1 | connector.name=test_connector
2 | commit.log.relocation.dir=/debezium/relocation/
3 | http.port=8000
4 |
5 | cassandra.config=/opt/cassandra/conf/cassandra.yaml
6 | cassandra.hosts=127.0.0.1
7 | cassandra.port=9042
8 |
9 | kafka.producer.bootstrap.servers=kafka:9092
10 | kafka.producer.retries=3
11 | kafka.producer.retry.backoff.ms=1000
12 | topic.prefix=test_prefix
13 |
14 | key.converter=org.apache.kafka.connect.json.JsonConverter
15 | value.converter=org.apache.kafka.connect.json.JsonConverter
16 |
17 | offset.backing.store.dir=/debezium/offsets
18 |
19 | snapshot.consistency=ONE
20 | snapshot.mode=ALWAYS
21 |
--------------------------------------------------------------------------------
/tutorial/debezium-cassandra-init/log4j.properties:
--------------------------------------------------------------------------------
1 | log4j.rootLogger=INFO, A1
2 |
3 | log4j.appender.A1=org.apache.log4j.ConsoleAppender
4 |
5 | log4j.appender.A1.layout=org.apache.log4j.PatternLayout
6 | log4j.appender.A1.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n
7 |
--------------------------------------------------------------------------------
/tutorial/debezium-db2-init/db2connect/Dockerfile:
--------------------------------------------------------------------------------
1 | ARG DEBEZIUM_VERSION
2 | FROM quay.io/debezium/connect:$DEBEZIUM_VERSION
3 |
4 | USER root
5 | RUN microdnf -y install libaio curl && microdnf clean all
6 |
7 | USER kafka
8 |
9 | # Deploy db2 client and drivers
10 | RUN curl https://repo1.maven.org/maven2/com/ibm/db2/jcc/11.5.0.0/jcc-11.5.0.0.jar --output /kafka/connect/debezium-connector-db2/jcc-11.5.0.0.jar
11 |
--------------------------------------------------------------------------------
/tutorial/debezium-db2-init/db2server/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ibmcom/db2:11.5.4.0
2 |
3 | LABEL maintainer="Debezium Community"
4 |
5 | RUN mkdir -p /asncdctools/src
6 |
7 | ADD asncdc_UDF.sql /asncdctools/src
8 | ADD asncdcaddremove.sql /asncdctools/src
9 | ADD asncdctables.sql /asncdctools/src
10 | ADD dbsetup.sh /asncdctools/src
11 | ADD startup-agent.sql /asncdctools/src
12 | ADD startup-cdc-demo.sql /asncdctools/src
13 | ADD inventory.sql /asncdctools/src
14 | ADD asncdc.c /asncdctools/src
15 |
16 | RUN mkdir /var/custom && \
17 | chmod -R 777 /asncdctools && \
18 | chmod -R 777 /var/custom
19 |
20 | ADD cdcsetup.sh /var/custom
21 | ADD custom-init /var/custom-init
22 |
23 | RUN chmod -R 777 /var/custom-init
24 |
25 | ADD openshift_entrypoint.sh /var/db2_setup/lib
26 |
27 | RUN chmod 777 /var/custom/cdcsetup.sh && \
28 | chmod 777 /var/db2_setup/lib/openshift_entrypoint.sh
29 |
--------------------------------------------------------------------------------
/tutorial/debezium-db2-init/db2server/asncdc_UDF.sql:
--------------------------------------------------------------------------------
1 | DROP SPECIFIC FUNCTION ASNCDC.asncdcservice;
2 |
3 | CREATE FUNCTION ASNCDC.ASNCDCSERVICES(command VARCHAR(6), service VARCHAR(8))
4 | RETURNS CLOB(100K)
5 | SPECIFIC asncdcservice
6 | EXTERNAL NAME 'asncdc!asncdcservice'
7 | LANGUAGE C
8 | PARAMETER STYLE SQL
9 | DBINFO
10 | DETERMINISTIC
11 | NOT FENCED
12 | RETURNS NULL ON NULL INPUT
13 | NO SQL
14 | NO EXTERNAL ACTION
15 | NO SCRATCHPAD
16 | ALLOW PARALLEL
17 | NO FINAL CALL;
--------------------------------------------------------------------------------
/tutorial/debezium-db2-init/db2server/cdcsetup.sh:
--------------------------------------------------------------------------------
1 | #/bin/bash
2 |
3 | if [ ! -f /asncdctools/src/asncdc.nlk ]; then
4 | rc=1
5 | echo "waiting for db2inst1 exists ."
6 | while [ "$rc" -ne 0 ]
7 | do
8 | sleep 5
9 | id db2inst1
10 | rc=$?
11 | echo '.'
12 | done
13 |
14 | su -c "/asncdctools/src/dbsetup.sh $DBNAME" - db2inst1
15 | fi
16 | touch /asncdctools/src/asncdc.nlk
17 |
18 | echo "CDC setup completed."
19 |
--------------------------------------------------------------------------------
/tutorial/debezium-db2-init/db2server/custom-init/cleanup_storage.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | ################################################################################
4 | # Wipes the storage directory.
5 | # Note that this essentially makes the database non-persistent when pod is deleted
6 | ################################################################################
7 |
8 | echo "Inspecting database directory"
9 | ls $STORAGE_DIR
10 |
11 | echo "Wiping database directory"
12 | rm -rf $STORAGE_DIR/*
13 |
--------------------------------------------------------------------------------
/tutorial/debezium-db2-init/db2server/openshift_entrypoint.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | ################################################################################
3 | # Runs custom init scripts followed by the original entry point
4 | ###############################################################################
5 |
6 | source ${SETUPDIR?}/include/db2_constants
7 | source ${SETUPDIR?}/include/db2_common_functions
8 |
9 | if [[ -d /var/custom-init ]]; then
10 | echo "(*) Running user-provided init scripts ... "
11 | chmod -R 777 /var/custom-init
12 | for script in `ls /var/custom-init`; do
13 | echo "(*) Running $script ..."
14 | /var/custom-init/$script
15 | done
16 | fi
17 |
18 | echo "Running original entry point"
19 | /var/db2_setup/lib/setup_db2_instance.sh
20 |
--------------------------------------------------------------------------------
/tutorial/debezium-db2-init/db2server/startup-agent.sql:
--------------------------------------------------------------------------------
1 | VALUES ASNCDC.ASNCDCSERVICES('start','asncdc');
--------------------------------------------------------------------------------
/tutorial/debezium-db2-init/db2server/startup-cdc-demo.sql:
--------------------------------------------------------------------------------
1 |
2 | VALUES ASNCDC.ASNCDCSERVICES('status','asncdc');
3 |
4 | CALL ASNCDC.ADDTABLE('DB2INST1', 'PRODUCTS' );
5 | CALL ASNCDC.ADDTABLE('DB2INST1', 'PRODUCTS_ON_HAND' );
6 | CALL ASNCDC.ADDTABLE('DB2INST1', 'CUSTOMERS' );
7 | CALL ASNCDC.ADDTABLE('DB2INST1', 'ORDERS' );
8 |
9 | VALUES ASNCDC.ASNCDCSERVICES('reinit','asncdc');
--------------------------------------------------------------------------------
/tutorial/debezium-ifx-init/ifxconnect/Dockerfile:
--------------------------------------------------------------------------------
1 | ARG DEBEZIUM_VERSION
2 | FROM quay.io/debezium/connect:$DEBEZIUM_VERSION
3 |
4 | ADD --chown=kafka:kafka --chmod=775 https://repo1.maven.org/maven2/com/ibm/informix/jdbc/4.50.10/jdbc-4.50.10.jar /kafka/connect/debezium-connector-informix/
5 | ADD --chown=kafka:kafka --chmod=775 https://repo1.maven.org/maven2/com/ibm/informix/ifx-changestream-client/1.1.3/ifx-changestream-client-1.1.3.jar /kafka/connect/debezium-connector-informix/
6 |
7 |
--------------------------------------------------------------------------------
/tutorial/debezium-ifx-init/ifxserver/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM icr.io/informix/informix-developer-database:14.10.FC9W1DE
2 |
3 | LABEL maintainer="Debezium Community"
4 |
5 | ADD --chown=informix:informix --chmod=775 informix_post_init.sh /opt/ibm/config
6 | ADD --chown=informix:informix --chmod=775 inventory.sql /opt/ibm/informix/etc
7 |
8 | ENV DBDATE Y4MD-
9 | ENV TYPE oltp
10 | ENV SIZE medium
11 | ENV LICENSE accept
12 | ENV RUN_FILE_POST_INIT informix_post_init.sh
13 |
--------------------------------------------------------------------------------
/tutorial/debezium-ifx-init/ifxserver/informix_post_init.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | dbaccess < $INFORMIXDIR/etc/syscdcv1.sql
4 |
5 | dbaccess < $INFORMIXDIR/etc/inventory.sql
6 |
--------------------------------------------------------------------------------
/tutorial/debezium-vitess-init/Dockerfile:
--------------------------------------------------------------------------------
1 | # Use a temporary layer for the build stage.
2 | FROM vitess/base:v19.0.4 AS base
3 |
4 | FROM vitess/lite:v19.0.4
5 |
6 | USER root
7 |
8 | RUN apt-get update
9 | RUN apt-get install -y sudo curl vim jq
10 |
11 | # Install etcd
12 | COPY install_local_dependencies.sh /vt/dist/install_local_dependencies.sh
13 | RUN /vt/dist/install_local_dependencies.sh
14 |
15 | # Copy binaries used by vitess components start-up scripts
16 | COPY --from=base /vt/bin/vtctl /vt/bin/
17 | COPY --from=base /vt/bin/mysqlctl /vt/bin/
18 |
19 | # Copy vitess components start-up scripts
20 | COPY local /vt/local
21 |
22 | USER vitess
23 | ENV PATH /vt/bin:$PATH
24 | ENV PATH /var/opt/etcd:$PATH
25 | CMD cd /vt/local && ./initial_cluster.sh && tail -f /dev/null
26 |
--------------------------------------------------------------------------------
/tutorial/debezium-vitess-init/install_local_dependencies.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -euo pipefail
4 |
5 | # Install etcd
6 | ETCD_VER=v3.4.9
7 | DOWNLOAD_URL=https://storage.googleapis.com/etcd
8 |
9 | curl -k -L ${DOWNLOAD_URL}/${ETCD_VER}/etcd-${ETCD_VER}-linux-amd64.tar.gz -o /tmp/etcd-${ETCD_VER}-linux-amd64.tar.gz
10 | mkdir -p /var/opt/etcd
11 | sudo tar xzvf /tmp/etcd-${ETCD_VER}-linux-amd64.tar.gz -C /var/opt/etcd --strip-components=1
12 | rm -f /tmp/etcd-${ETCD_VER}-linux-amd64.tar.gz
13 |
14 | mkdir -p /var/run/etcd && chown -R vitess:vitess /var/run/etcd
15 |
16 | # Clean up files we won't need in the final image.
17 | rm -rf /var/lib/apt/lists/*
18 |
--------------------------------------------------------------------------------
/tutorial/debezium-vitess-init/local/create_tables_sharded_inventory.sql:
--------------------------------------------------------------------------------
1 | CREATE TABLE products
2 | (
3 | id INT(11) NOT NULL,
4 | name VARCHAR(255) NOT NULL,
5 | description VARCHAR(512) DEFAULT NULL,
6 | weight FLOAT DEFAULT NULL,
7 | PRIMARY KEY (id)
8 | ) ENGINE = InnoDB;
9 |
10 | CREATE TABLE products_on_hand
11 | (
12 | product_id INT(11) NOT NULL,
13 | quantity INT(11) NOT NULL,
14 | PRIMARY KEY (product_id)
15 | ) ENGINE = InnoDB;
16 |
17 | CREATE TABLE orders
18 | (
19 | order_number INT(11) NOT NULL,
20 | order_date DATE NOT NULL,
21 | purchaser INT(11) NOT NULL,
22 | quantity INT(11) NOT NULL,
23 | product_id INT(11) NOT NULL,
24 | PRIMARY KEY (order_number)
25 | ) ENGINE = InnoDB;
26 |
--------------------------------------------------------------------------------
/tutorial/debezium-vitess-init/local/grpc_static_auth.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "Username": "vitess",
4 | "Password": "vitess_password"
5 | }
6 | ]
7 |
--------------------------------------------------------------------------------
/tutorial/debezium-vitess-init/local/grpc_static_client_auth.json:
--------------------------------------------------------------------------------
1 | {
2 | "Username": "vitess",
3 | "Password": "vitess_password"
4 | }
5 |
--------------------------------------------------------------------------------
/tutorial/debezium-vitess-init/local/insert_customer_data.sql:
--------------------------------------------------------------------------------
1 | INSERT INTO customers(first_name, last_name, email)
2 | VALUES ('Sally', 'Thomas', 'sally.thomas@acme.com');
3 |
4 | INSERT INTO customers(first_name, last_name, email)
5 | VALUES ('George', 'Bailey', 'gbailey@foobar.com');
6 |
7 | INSERT INTO customers(first_name, last_name, email)
8 | VALUES ('Edward', 'Walker', 'ed@walker.com');
9 |
10 | INSERT INTO customers(first_name, last_name, email)
11 | VALUES ('Anne', 'Kretchmar', 'annek@noanswer.org');
12 |
--------------------------------------------------------------------------------
/tutorial/debezium-vitess-init/local/scripts/mysqlctl-up.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | source ./env.sh
4 |
5 | cell=${CELL:-'test'}
6 | uid=$TABLET_UID
7 | mysql_port=$[17000 + $uid]
8 | printf -v alias '%s-%010d' $cell $uid
9 | printf -v tablet_dir 'vt_%010d' $uid
10 |
11 | mkdir -p $VTDATAROOT/backups
12 |
13 | echo "Starting MySQL for tablet $alias..."
14 | action="init"
15 |
16 | if [ -d $VTDATAROOT/$tablet_dir ]; then
17 | echo "Resuming from existing vttablet dir:"
18 | echo " $VTDATAROOT/$tablet_dir"
19 | action='start'
20 | fi
21 |
22 | mysqlctl \
23 | --log_dir $VTDATAROOT/tmp \
24 | --tablet_uid $uid \
25 | --mysql_port $mysql_port \
26 | $action
27 |
--------------------------------------------------------------------------------
/tutorial/debezium-vitess-init/local/scripts/vtctld-up.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | source ./env.sh
4 |
5 | cell=${CELL:-'test'}
6 | grpc_port=15999
7 |
8 | echo "Starting vtctld..."
9 | # shellcheck disable=SC2086
10 | vtctld \
11 | $TOPOLOGY_FLAGS \
12 | --cell $cell \
13 | --service_map 'grpc-vtctl,grpc-vtctld' \
14 | --backup_storage_implementation file \
15 | --file_backup_storage_root $VTDATAROOT/backups \
16 | --log_dir $VTDATAROOT/tmp \
17 | --port $vtctld_web_port \
18 | --grpc_port $grpc_port \
19 | --pid_file $VTDATAROOT/tmp/vtctld.pid \
20 | --grpc_auth_mode static\
21 | --grpc_auth_static_password_file grpc_static_auth.json\
22 | > $VTDATAROOT/tmp/vtctld.out 2>&1 &
23 |
--------------------------------------------------------------------------------
/tutorial/debezium-vitess-init/local/select_customer0_data.sql:
--------------------------------------------------------------------------------
1 | \! echo 'Using customer/0'
2 | use customer/0;
3 | \! echo 'customers'
4 | select * from customers;
5 |
--------------------------------------------------------------------------------
/tutorial/debezium-vitess-init/local/select_inventory-80_data.sql:
--------------------------------------------------------------------------------
1 | \! echo 'Using inventory/-80'
2 | use inventory/-80;
3 | \! echo 'products'
4 | select * from products;
5 | \! echo 'products_on_hand'
6 | select * from products_on_hand;
7 | \! echo 'orders'
8 | select * from orders;
9 |
--------------------------------------------------------------------------------
/tutorial/debezium-vitess-init/local/select_inventory80-_data.sql:
--------------------------------------------------------------------------------
1 | \! echo 'Using inventory/80-'
2 | use inventory/80-;
3 | \! echo 'products'
4 | select * from products;
5 | \! echo 'products_on_hand'
6 | select * from products_on_hand;
7 | \! echo 'orders'
8 | select * from orders;
9 |
--------------------------------------------------------------------------------
/tutorial/debezium-vitess-init/local/vschema_tables_unsharded_customer.json:
--------------------------------------------------------------------------------
1 | {
2 | "tables": {
3 | "my_product_seq": {
4 | "type": "sequence"
5 | },
6 | "my_order_seq": {
7 | "type": "sequence"
8 | },
9 | "my_customer_seq": {
10 | "type": "sequence"
11 | },
12 | "customers": {
13 | "auto_increment": {
14 | "column": "id",
15 | "sequence": "my_customer_seq"
16 | }
17 | }
18 | }
19 | }
20 |
--------------------------------------------------------------------------------
/tutorial/debezium-with-oracle-jdbc/Dockerfile:
--------------------------------------------------------------------------------
1 | ARG DEBEZIUM_VERSION
2 | FROM quay.io/debezium/connect:$DEBEZIUM_VERSION
3 | ENV KAFKA_CONNECT_JDBC_DIR=$KAFKA_CONNECT_PLUGINS_DIR/kafka-connect-jdbc
4 |
5 | # These should point to the driver version to be used
6 | ENV MAVEN_DEP_DESTINATION=$KAFKA_HOME/libs \
7 | ORACLE_JDBC_REPO="com/oracle/database/jdbc" \
8 | ORACLE_JDBC_GROUP="ojdbc8" \
9 | ORACLE_JDBC_VERSION="21.6.0.0" \
10 | ORACLE_JDBC_MD5=312e6f4ec9932bbf74a4461669970c4b
11 |
12 | RUN docker-maven-download central "$ORACLE_JDBC_REPO" "$ORACLE_JDBC_GROUP" "$ORACLE_JDBC_VERSION" "$ORACLE_JDBC_MD5"
13 |
14 | USER kafka
15 |
--------------------------------------------------------------------------------
/tutorial/docker-compose-cassandra.yaml:
--------------------------------------------------------------------------------
1 | version: '2'
2 | services:
3 | zookeeper:
4 | image: quay.io/debezium/zookeeper:${DEBEZIUM_VERSION}
5 | ports:
6 | - 2181:2181
7 | - 2888:2888
8 | - 3888:3888
9 | kafka:
10 | image: quay.io/debezium/kafka:${DEBEZIUM_VERSION}
11 | ports:
12 | - 9092:9092
13 | links:
14 | - zookeeper
15 | environment:
16 | - ZOOKEEPER_CONNECT=zookeeper:2181
17 | cassandra:
18 | image: debezium/example-cassandra:${DEBEZIUM_VERSION}
19 | build:
20 | context: ./debezium-cassandra-init
21 | ports:
22 | - 9042:9042
23 | links:
24 | - zookeeper
25 | - kafka
26 | volumes:
27 | - /var/lib/cassandra
28 |
--------------------------------------------------------------------------------
/tutorial/docker-compose-mysql-apicurio.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/debezium/debezium-examples/9f5befd87e4a0b991e4d4eb6adb8ea45bc3e5796/tutorial/docker-compose-mysql-apicurio.png
--------------------------------------------------------------------------------
/tutorial/register-db2.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "inventory-connector",
3 | "config": {
4 | "connector.class" : "io.debezium.connector.db2.Db2Connector",
5 | "tasks.max" : "1",
6 | "topic.prefix" : "db2server",
7 | "database.hostname" : "db2server",
8 | "database.port" : "50000",
9 | "database.user" : "db2inst1",
10 | "database.password" : "=Password!",
11 | "database.dbname" : "TESTDB",
12 | "database.cdcschema": "ASNCDC",
13 | "schema.history.internal.kafka.bootstrap.servers" : "kafka:9092",
14 | "schema.history.internal.kafka.topic": "schema-changes.inventory"
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/tutorial/register-ifx.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "inventory-connector",
3 | "config": {
4 | "connector.class" : "io.debezium.connector.informix.InformixConnector",
5 | "tasks.max" : "1",
6 | "topic.prefix" : "ifxserver",
7 | "database.hostname" : "ifxserver",
8 | "database.port" : "9088",
9 | "database.user" : "informix",
10 | "database.password" : "in4mix",
11 | "database.dbname" : "sysuser",
12 | "schema.history.internal.kafka.bootstrap.servers" : "kafka:9092",
13 | "schema.history.internal.kafka.topic": "schema-changes.inventory"
14 | }
15 | }
16 |
--------------------------------------------------------------------------------
/tutorial/register-mariadb.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "inventory-connector",
3 | "config": {
4 | "connector.class": "io.debezium.connector.mariadb.MariaDbConnector",
5 | "tasks.max": "1",
6 | "database.hostname": "mariadb",
7 | "database.port": "3306",
8 | "database.user": "debezium",
9 | "database.password": "dbz",
10 | "database.server.id": "184054",
11 | "topic.prefix": "dbserver1",
12 | "database.include.list": "inventory",
13 | "schema.history.internal.kafka.bootstrap.servers": "kafka:9092",
14 | "schema.history.internal.kafka.topic": "schema-changes.inventory"
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/tutorial/register-mongodb.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "inventory-connector",
3 | "config": {
4 | "connector.class" : "io.debezium.connector.mongodb.MongoDbConnector",
5 | "tasks.max" : "1",
6 | "mongodb.connection.string" : "mongodb://mongodb:27017/?replicaSet=rs0",
7 | "topic.prefix" : "dbserver1",
8 | "mongodb.user" : "debezium",
9 | "mongodb.password" : "dbz",
10 | "database.include.list" : "inventory"
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/tutorial/register-mysql-ext-secrets.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "inventory-connector",
3 | "config": {
4 | "connector.class": "io.debezium.connector.mysql.MySqlConnector",
5 | "tasks.max": "1",
6 | "database.hostname": "mysql",
7 | "database.port": "3306",
8 | "database.user": "${file:/secrets/mysql.properties:user}",
9 | "database.password": "${file:/secrets/mysql.properties:password}",
10 | "database.server.id": "184054",
11 | "topic.prefix": "dbserver1",
12 | "database.include.list": "inventory",
13 | "schema.history.internal.kafka.bootstrap.servers": "kafka:9092",
14 | "schema.history.internal.kafka.topic": "schema-changes.inventory"
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/tutorial/register-mysql.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "inventory-connector",
3 | "config": {
4 | "connector.class": "io.debezium.connector.mysql.MySqlConnector",
5 | "tasks.max": "1",
6 | "database.hostname": "mysql",
7 | "database.port": "3306",
8 | "database.user": "debezium",
9 | "database.password": "dbz",
10 | "database.server.id": "184054",
11 | "topic.prefix": "dbserver1",
12 | "database.include.list": "inventory",
13 | "schema.history.internal.kafka.bootstrap.servers": "kafka:9092",
14 | "schema.history.internal.kafka.topic": "schema-changes.inventory"
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/tutorial/register-oracle-logminer.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "inventory-connector",
3 | "config": {
4 | "connector.class" : "io.debezium.connector.oracle.OracleConnector",
5 | "tasks.max" : "1",
6 | "topic.prefix" : "server1",
7 | "database.hostname" : "",
8 | "database.port" : "1521",
9 | "database.user" : "c##dbzuser",
10 | "database.password" : "dbz",
11 | "database.dbname" : "ORCLCDB",
12 | "database.pdb.name" : "ORCLPDB1",
13 | "schema.history.internal.kafka.bootstrap.servers" : "kafka:9092",
14 | "schema.history.internal.kafka.topic": "schema-changes.inventory"
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/tutorial/register-postgres.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "inventory-connector",
3 | "config": {
4 | "connector.class": "io.debezium.connector.postgresql.PostgresConnector",
5 | "tasks.max": "1",
6 | "database.hostname": "postgres",
7 | "database.port": "5432",
8 | "database.user": "postgres",
9 | "database.password": "postgres",
10 | "database.dbname" : "postgres",
11 | "topic.prefix": "dbserver1",
12 | "schema.include.list": "inventory"
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/tutorial/register-sqlserver.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "inventory-connector",
3 | "config": {
4 | "connector.class" : "io.debezium.connector.sqlserver.SqlServerConnector",
5 | "tasks.max" : "1",
6 | "topic.prefix" : "server1",
7 | "database.hostname" : "sqlserver",
8 | "database.port" : "1433",
9 | "database.user" : "sa",
10 | "database.password" : "Password!",
11 | "database.names" : "testDB",
12 | "schema.history.internal.kafka.bootstrap.servers" : "kafka:9092",
13 | "schema.history.internal.kafka.topic": "schema-changes.inventory",
14 | "database.encrypt": "false"
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/tutorial/register-vitess.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "inventory-connector",
3 | "config": {
4 | "connector.class": "io.debezium.connector.vitess.VitessConnector",
5 | "tasks.max": "1",
6 | "database.hostname": "vitess",
7 | "database.port": "15991",
8 | "database.user": "vitess",
9 | "database.password": "vitess_password",
10 | "topic.prefix": "dbserver1",
11 | "vitess.keyspace": "inventory",
12 | "vitess.vtctld.host": "vitess",
13 | "vitess.vtctld.port": "15999",
14 | "vitess.tablet.type": "MASTER"
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/tutorial/secrets/mysql.properties:
--------------------------------------------------------------------------------
1 | user=debezium
2 | password=dbz
3 |
--------------------------------------------------------------------------------
/tutorial/timescaledb/002_enable_replication.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | echo "wal_level=logical" >> ${POSTGRESQL_CONF_DIR}/postgresql.conf
4 |
5 | psql -U "${POSTGRES_USER}" "${POSTGRES_DB}" -c "CREATE TABLE conditions (time TIMESTAMPTZ NOT NULL, location TEXT NOT NULL, temperature DOUBLE PRECISION NULL, humidity DOUBLE PRECISION NULL); SELECT create_hypertable('conditions', 'time'); INSERT INTO conditions VALUES(NOW(), 'Prague', 22.8, 53.3); CREATE PUBLICATION dbz_publication FOR ALL TABLES WITH (publish = 'insert, update')"
6 |
--------------------------------------------------------------------------------
/tutorial/timescaledb/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM timescale/timescaledb:latest-pg14
2 |
3 | COPY 002_enable_replication.sh docker-entrypoint-initdb.d
4 |
--------------------------------------------------------------------------------
/tutorial/vitess-sharding-setup.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/debezium/debezium-examples/9f5befd87e4a0b991e4d4eb6adb8ea45bc3e5796/tutorial/vitess-sharding-setup.png
--------------------------------------------------------------------------------
/unwrap-mongodb-smt/debezium-jdbc/Dockerfile:
--------------------------------------------------------------------------------
1 | ARG DEBEZIUM_VERSION
2 | FROM quay.io/debezium/connect:${DEBEZIUM_VERSION}
3 | ENV KAFKA_CONNECT_JDBC_DIR=$KAFKA_CONNECT_PLUGINS_DIR/kafka-connect-jdbc
4 |
5 | ARG POSTGRES_VERSION=42.5.1
6 | ARG KAFKA_JDBC_VERSION=5.3.1
7 |
8 | # Deploy PostgreSQL JDBC Driver
9 | RUN cd /kafka/libs && curl -sO https://jdbc.postgresql.org/download/postgresql-$POSTGRES_VERSION.jar
10 |
11 | # Deploy Kafka Connect JDBC
12 | RUN mkdir $KAFKA_CONNECT_JDBC_DIR && cd $KAFKA_CONNECT_JDBC_DIR &&\
13 | curl -sO https://packages.confluent.io/maven/io/confluent/kafka-connect-jdbc/$KAFKA_JDBC_VERSION/kafka-connect-jdbc-$KAFKA_JDBC_VERSION.jar
14 |
--------------------------------------------------------------------------------
/unwrap-mongodb-smt/jdbc-sink.json:
--------------------------------------------------------------------------------
1 | {
2 | "name" : "jdbc-sink",
3 | "config" : {
4 | "connector.class" : "io.confluent.connect.jdbc.JdbcSinkConnector",
5 | "tasks.max" : "1",
6 | "topics" : "customers",
7 | "connection.url" : "jdbc:postgresql://postgres:5432/inventorydb?user=postgresuser&password=postgrespw",
8 | "auto.create" : "true",
9 | "auto.evolve" : "true",
10 | "insert.mode" : "upsert",
11 | "delete.enabled": "true",
12 | "pk.fields" : "id",
13 | "pk.mode": "record_key",
14 | "transforms": "mongoflatten",
15 | "transforms.mongoflatten.type" : "io.debezium.connector.mongodb.transforms.ExtractNewDocumentState",
16 | "transforms.mongoflatten.drop.tombstones": "false"
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/unwrap-mongodb-smt/mongodb-source.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "inventory-connector",
3 | "config": {
4 | "connector.class" : "io.debezium.connector.mongodb.MongoDbConnector",
5 | "tasks.max" : "1",
6 | "topic.prefix" : "dbserver1",
7 | "mongodb.connection.string" : "mongodb://mongodb:27017/?replicaSet=rs0",
8 | "mongodb.user" : "debezium",
9 | "mongodb.password" : "dbz",
10 | "database.include.list" : "inventory",
11 | "schema.history.internal.kafka.bootstrap.servers" : "kafka:9092",
12 | "transforms": "route",
13 | "transforms.route.type" : "org.apache.kafka.connect.transforms.RegexRouter",
14 | "transforms.route.regex" : "([^.]+)\\.([^.]+)\\.([^.]+)",
15 | "transforms.route.replacement" : "$3"
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/unwrap-smt/es-sink-aggregates.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "elastic-sink-aggregates",
3 | "config": {
4 | "connector.class": "io.confluent.connect.elasticsearch.ElasticsearchSinkConnector",
5 | "tasks.max": "1",
6 | "topics": "aggregates",
7 | "connection.url": "http://elastic:9200",
8 | "transforms": "unwrap,key",
9 | "transforms.unwrap.type": "io.debezium.transforms.ExtractNewRecordState",
10 | "transforms.unwrap.drop.tombstones": "false",
11 | "transforms.key.type": "org.apache.kafka.connect.transforms.ExtractField$Key",
12 | "transforms.key.field": "rootId",
13 | "key.ignore": "false",
14 | "type.name": "customer-with-addresses",
15 | "behavior.on.null.values": "delete"
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/unwrap-smt/es-sink.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "elastic-sink",
3 | "config": {
4 | "connector.class": "io.confluent.connect.elasticsearch.ElasticsearchSinkConnector",
5 | "tasks.max": "1",
6 | "topics": "customers",
7 | "connection.url": "http://elastic:9200",
8 | "transforms": "unwrap,key",
9 | "transforms.unwrap.type": "io.debezium.transforms.ExtractNewRecordState",
10 | "transforms.unwrap.drop.tombstones": "false",
11 | "transforms.key.type": "org.apache.kafka.connect.transforms.ExtractField$Key",
12 | "transforms.key.field": "id",
13 | "key.ignore": "false",
14 | "type.name": "customer",
15 | "behavior.on.null.values": "delete"
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/unwrap-smt/jdbc-sink.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "jdbc-sink",
3 | "config": {
4 | "connector.class": "io.debezium.connector.jdbc.JdbcSinkConnector",
5 | "tasks.max": "1",
6 | "topics": "customers",
7 | "connection.url": "jdbc:postgresql://postgres:5432/inventory",
8 | "connection.username": "postgresuser",
9 | "connection.password": "postgrespw",
10 | "insert.mode": "upsert",
11 | "delete.enabled": "true",
12 | "primary.key.mode": "record_key",
13 | "primary.key.fields": "id",
14 | "schema.evolution": "basic"
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/unwrap-smt/source.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "inventory-connector",
3 | "config": {
4 | "connector.class": "io.debezium.connector.mysql.MySqlConnector",
5 | "tasks.max": "1",
6 | "topic.prefix": "dbserver1",
7 | "database.hostname": "mysql",
8 | "database.port": "3306",
9 | "database.user": "debezium",
10 | "database.password": "dbz",
11 | "database.server.id": "184054",
12 | "database.include.list": "inventory",
13 | "schema.history.internal.kafka.bootstrap.servers": "kafka:9092",
14 | "schema.history.internal.kafka.topic": "schema-changes.inventory",
15 | "transforms": "route",
16 | "transforms.route.type": "org.apache.kafka.connect.transforms.RegexRouter",
17 | "transforms.route.regex": "([^.]+)\\.([^.]+)\\.([^.]+)",
18 | "transforms.route.replacement": "$3"
19 | }
20 | }
21 |
--------------------------------------------------------------------------------