├── LICENSE
├── README.md
├── airbyte-pinot
├── config
│ ├── orders_schema.json
│ └── orders_table.json
├── docker-compose.yml
├── mysql
│ ├── .DS_Store
│ └── ecommerce-schema.sql
└── solution.png
├── aws-eventbridge-custom-events
├── email-service
│ ├── .idea
│ │ ├── .gitignore
│ │ ├── .name
│ │ ├── aws.xml
│ │ ├── compiler.xml
│ │ ├── email-service.iml
│ │ ├── jarRepositories.xml
│ │ ├── libraries
│ │ │ ├── Maven__com_amazonaws_aws_lambda_java_core_1_2_1.xml
│ │ │ ├── Maven__com_fasterxml_jackson_core_jackson_annotations_2_11_3.xml
│ │ │ ├── Maven__com_fasterxml_jackson_core_jackson_core_2_11_3.xml
│ │ │ ├── Maven__com_fasterxml_jackson_core_jackson_databind_2_11_3.xml
│ │ │ ├── Maven__com_fasterxml_jackson_datatype_jackson_datatype_jsr310_2_11_3.xml
│ │ │ ├── Maven__junit_junit_4_13_1.xml
│ │ │ └── Maven__org_hamcrest_hamcrest_core_1_3.xml
│ │ ├── misc.xml
│ │ ├── modules.xml
│ │ └── uiDesigner.xml
│ ├── EmailSenderFunction
│ │ ├── README.md
│ │ ├── email-service.iml
│ │ ├── pom.xml
│ │ └── src
│ │ │ └── main
│ │ │ ├── java
│ │ │ ├── com
│ │ │ │ └── edu
│ │ │ │ │ └── eb
│ │ │ │ │ └── EmailSender.java
│ │ │ └── schema
│ │ │ │ └── edu_orders
│ │ │ │ └── orderconfirmed
│ │ │ │ ├── AWSEvent.java
│ │ │ │ ├── Customer.java
│ │ │ │ ├── Item.java
│ │ │ │ ├── OrderConfirmed.java
│ │ │ │ └── marshaller
│ │ │ │ └── Marshaller.java
│ │ │ └── resources
│ │ │ └── log4j2.xml
│ ├── README.md
│ ├── events
│ │ └── event.json
│ └── template.yaml
└── order-service
│ ├── .idea
│ ├── .gitignore
│ ├── aws.xml
│ ├── compiler.xml
│ ├── encodings.xml
│ ├── jarRepositories.xml
│ └── misc.xml
│ ├── pom.xml
│ └── src
│ └── main
│ └── java
│ ├── com
│ └── edu
│ │ └── eb
│ │ └── EventProducer.java
│ └── schema
│ └── edu_orders
│ └── orderconfirmed
│ ├── AWSEvent.java
│ ├── Customer.java
│ ├── Item.java
│ ├── OrderConfirmed.java
│ └── marshaller
│ └── Marshaller.java
├── cqrs-views
├── README.md
├── config
│ ├── orders_schema.json
│ └── orders_table.json
├── data
│ └── updates.txt
├── docker-compose.yml
└── mysql
│ ├── mysql.cnf
│ └── mysql_bootstrap.sql
├── fitness-leaderboard-apache-pinot
├── commands.txt
├── data
│ ├── .DS_Store
│ └── samples
│ │ ├── fitness_events_24h.json
│ │ └── fitness_events_all_time.json
├── queries.txt
└── schemas
│ ├── steps-schema.json
│ └── steps-table.json
├── idempotent-consumer
├── README.md
├── mvnw
├── mvnw.cmd
├── orders.txt
├── pom.xml
└── src
│ ├── main
│ ├── docker
│ │ ├── Dockerfile.jvm
│ │ ├── Dockerfile.legacy-jar
│ │ ├── Dockerfile.native
│ │ └── Dockerfile.native-distroless
│ ├── java
│ │ └── com
│ │ │ └── edu
│ │ │ └── samples
│ │ │ ├── KafkaEventConsumer.java
│ │ │ ├── OrderEventHandler.java
│ │ │ ├── messagelog
│ │ │ ├── ConsumedMessage.java
│ │ │ └── MessageLog.java
│ │ │ └── serde
│ │ │ ├── Order.java
│ │ │ ├── OrderEvent.java
│ │ │ └── OrderEventDeserializer.java
│ └── resources
│ │ ├── META-INF
│ │ └── resources
│ │ │ └── index.html
│ │ └── application.properties
│ └── test
│ └── java
│ └── com
│ └── edu
│ └── samples
│ └── MessageLogTest.java
├── mv_with_materialize
├── README.md
├── debezium
│ └── commands.txt
├── docker-compose.yml
├── materialize
│ └── script.sql
├── payments-feeder
│ ├── Dockerfile
│ ├── docker-entrypoint.sh
│ └── minified-payments.txt
└── scripts
│ ├── mysql
│ └── orders.sql
│ └── postgres
│ └── shipments.sql
├── mz-funnel-chart-plotly
├── dash
│ └── app.py
├── data
│ └── events.csv
└── script.sql
├── mz_alerts
├── .DS_Store
├── README.md
├── docker-compose.yml
├── materialize.sql
├── pagerduty-client
│ ├── Dockerfile
│ ├── docker-entrypoint.sh
│ ├── pd_client.py
│ └── sample-incident.json
└── pagerduty_console.png
├── pinot-upserts
├── README.md
├── debezium
│ ├── output.json
│ └── register_mysql.sh
├── docker-compose.yml
├── images
│ ├── architecture.png
│ └── final-result.png
├── mysql
│ ├── mysql.cnf
│ └── mysql_bootstrap.sql
├── pinot
│ └── config
│ │ ├── orders_schema.json
│ │ └── orders_table.json
└── simulator
│ ├── Dockerfile
│ ├── requirements.txt
│ └── seeder.py
├── postgres-docker
├── docker-compose.yml
└── scripts
│ └── shipments-db.sql
├── quarkus-websockets-dashboard
├── README.md
├── mvnw
├── mvnw.cmd
├── pom.xml
└── src
│ └── main
│ ├── docker
│ ├── Dockerfile.fast-jar
│ ├── Dockerfile.jvm
│ └── Dockerfile.native
│ ├── java
│ └── com
│ │ └── edu
│ │ └── retail
│ │ └── ws
│ │ └── DashboardWebSocket.java
│ └── resources
│ ├── META-INF
│ └── resources
│ │ ├── index.html
│ │ └── js
│ │ └── dashboard.js
│ └── application.properties
├── redpanda-quarkus-microserives
└── risk-service
│ ├── .dockerignore
│ ├── .gitignore
│ ├── .mvn
│ └── wrapper
│ │ ├── MavenWrapperDownloader.java
│ │ ├── maven-wrapper.jar
│ │ └── maven-wrapper.properties
│ ├── mvnw
│ ├── mvnw.cmd
│ ├── pom.xml
│ └── src
│ └── main
│ ├── docker
│ ├── Dockerfile.jvm
│ ├── Dockerfile.legacy-jar
│ ├── Dockerfile.native
│ └── Dockerfile.native-distroless
│ ├── java
│ └── redpanda
│ │ └── samples
│ │ └── edm
│ │ ├── RedpandaEventConsumer.java
│ │ └── serde
│ │ ├── PaymentEventDeserializer.java
│ │ ├── PaymentReceivedEvent.java
│ │ └── PaymentValidatedEvent.java
│ └── resources
│ └── application.properties
├── spacex-launch-analysis
├── pinot
│ ├── .DS_Store
│ ├── batch-job-spec.yaml
│ ├── launches-schema.json
│ ├── launches-table.json
│ └── queries.sql
├── rawdata
│ ├── .DS_Store
│ └── spacex.csv
└── streamlit
│ ├── .DS_Store
│ ├── Pipfile
│ └── app.py
└── websockets-testing-with-firecamp
├── README.md
├── package.json
├── server-json.js
└── server.js
/README.md:
--------------------------------------------------------------------------------
1 | # Code Samples for Articles Featured at eventdrivenutopia.com
2 | This repository contains the source code for samples featured in eventdrivenutopia.com
3 |
--------------------------------------------------------------------------------
/airbyte-pinot/config/orders_schema.json:
--------------------------------------------------------------------------------
1 | {
2 | "schemaName": "orders",
3 | "primaryKeyColumns": [
4 | "id"
5 | ],
6 | "dimensionFieldSpecs": [
7 | {
8 | "name": "id",
9 | "dataType": "INT"
10 | },
11 | {
12 | "name": "store_id",
13 | "dataType": "INT"
14 | },
15 | {
16 | "name": "channel",
17 | "dataType": "STRING"
18 | },
19 | {
20 | "name": "country",
21 | "dataType": "STRING"
22 | },
23 | {
24 | "name": "status",
25 | "dataType": "STRING"
26 | }
27 | ],
28 | "metricFieldSpecs": [
29 | {
30 | "name": "total",
31 | "dataType": "FLOAT"
32 | }
33 | ],
34 | "dateTimeFieldSpecs": [{
35 | "name": "order_date",
36 | "dataType": "STRING",
37 | "format" : "1:DAYS:SIMPLE_DATE_FORMAT:yyyy-MM-dd",
38 | "granularity": "1:DAYS"
39 | }]
40 | }
--------------------------------------------------------------------------------
/airbyte-pinot/config/orders_table.json:
--------------------------------------------------------------------------------
1 | {
2 | "tableName": "orders",
3 | "tableType": "REALTIME",
4 | "segmentsConfig": {
5 | "timeColumnName": "order_date",
6 | "schemaName": "orders",
7 | "replication": "1",
8 | "replicasPerPartition": "1"
9 | },
10 | "ingestionConfig": {
11 | "batchIngestionConfig": {
12 | "segmentIngestionType": "APPEND",
13 | "segmentIngestionFrequency": "DAILY"
14 | },
15 | "transformConfigs": [
16 | {"columnName": "id", "transformFunction": "JSONPATHLONG(_airbyte_data, '$.id')" },
17 | {"columnName": "store_id", "transformFunction": "JSONPATHLONG(_airbyte_data, '$.store_id')" },
18 | {"columnName": "channel", "transformFunction": "JSONPATHSTRING(_airbyte_data, '$.channel')" },
19 | {"columnName": "country", "transformFunction": "JSONPATHSTRING(_airbyte_data, '$.country')" },
20 | {"columnName": "total", "transformFunction": "JSONPATHDOUBLE(_airbyte_data, '$.total')" },
21 | {"columnName": "status", "transformFunction": "JSONPATHSTRING(_airbyte_data, '$.status')" },
22 | {"columnName": "order_date", "transformFunction": "JSONPATHSTRING(_airbyte_data, '$.order_date')" }
23 | ]
24 | },
25 | "tableIndexConfig": {
26 | "loadMode": "MMAP",
27 | "streamConfigs": {
28 | "streamType": "kafka",
29 | "stream.kafka.topic.name": "orders",
30 | "stream.kafka.broker.list": "kafka:9093",
31 | "stream.kafka.consumer.type": "lowlevel",
32 | "stream.kafka.consumer.prop.auto.offset.reset": "smallest",
33 | "stream.kafka.consumer.factory.class.name": "org.apache.pinot.plugin.stream.kafka20.KafkaConsumerFactory",
34 | "stream.kafka.decoder.class.name": "org.apache.pinot.plugin.stream.kafka.KafkaJSONMessageDecoder"
35 | }
36 | },
37 | "tenants": {},
38 | "metadata": {},
39 | "routing": {
40 | "instanceSelectorType": "strictReplicaGroup"
41 | },
42 | "upsertConfig": {
43 | "mode": "FULL"
44 | }
45 | }
--------------------------------------------------------------------------------
/airbyte-pinot/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '3.7'
2 | services:
3 | zookeeper:
4 | image: zookeeper:3.5.6
5 | hostname: zookeeper
6 | container_name: zookeeper
7 | ports:
8 | - "2181:2181"
9 | environment:
10 | ZOOKEEPER_CLIENT_PORT: 2181
11 | ZOOKEEPER_TICK_TIME: 2000
12 | pinot-controller:
13 | image: apachepinot/pinot:0.9.3
14 | command: "StartController -zkAddress zookeeper:2181 -dataDir /data"
15 | container_name: "pinot-controller"
16 | volumes:
17 | - ./config:/config
18 | restart: unless-stopped
19 | ports:
20 | - "9000:9000"
21 | depends_on:
22 | - zookeeper
23 | pinot-broker:
24 | image: apachepinot/pinot:0.9.3
25 | command: "StartBroker -zkAddress zookeeper:2181"
26 | restart: unless-stopped
27 | container_name: "pinot-broker"
28 | ports:
29 | - "8099:8099"
30 | depends_on:
31 | - pinot-controller
32 | pinot-server:
33 | image: apachepinot/pinot:0.9.3
34 | command: "StartServer -zkAddress zookeeper:2181"
35 | restart: unless-stopped
36 | container_name: "pinot-server"
37 | depends_on:
38 | - pinot-broker
39 | kafka:
40 | image: wurstmeister/kafka:latest
41 | restart: unless-stopped
42 | container_name: "kafka"
43 | ports:
44 | - "9092:9092"
45 | expose:
46 | - "9093"
47 | depends_on:
48 | - zookeeper
49 | environment:
50 | KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181/kafka
51 | KAFKA_BROKER_ID: 0
52 | KAFKA_ADVERTISED_HOST_NAME: kafka
53 | KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9093,OUTSIDE://localhost:9092
54 | KAFKA_LISTENERS: PLAINTEXT://0.0.0.0:9093,OUTSIDE://0.0.0.0:9092
55 | KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,OUTSIDE:PLAINTEXT
--------------------------------------------------------------------------------
/airbyte-pinot/mysql/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dunithd/edu-samples/d937f12c81254a859867e125f11af22ed5943c2b/airbyte-pinot/mysql/.DS_Store
--------------------------------------------------------------------------------
/airbyte-pinot/mysql/ecommerce-schema.sql:
--------------------------------------------------------------------------------
1 | create database ecommernce;
2 |
3 | create table orders (
4 | id int auto_increment primary key,
5 | store_id int not null,
6 | order_date varchar(255) not null,
7 | channel varchar(25),
8 | country varchar(25),
9 | total float not null,
10 | status varchar(25)
11 | );
12 |
13 | insert into orders (id, store_id, order_date, channel, country, total, status) values (1, 100, '2021-08-15', 'STORE', 'Hungary', 173.04, 'ACTIVE');
14 | insert into orders (id, store_id, order_date, channel, country, total, status) values (2, 100, '2021-04-08', 'WEB', 'Palestinian Territory', 103.01, 'ACTIVE');
15 | insert into orders (id, store_id, order_date, channel, country, total, status) values (3, 100, '2021-10-31', 'MOBILE', 'China', 94.22, 'ACTIVE');
16 | insert into orders (id, store_id, order_date, channel, country, total, status) values (4, 101, '2022-01-23', 'WEB', 'Indonesia', 148.92, 'ACTIVE');
17 | insert into orders (id, store_id, order_date, channel, country, total, status) values (5, 100, '2021-05-10', 'MOBILE', 'Armenia', 314.16, 'ACTIVE');
18 | insert into orders (id, store_id, order_date, channel, country, total, status) values (6, 102, '2021-07-07', 'WEB', 'Czech Republic', 113.96, 'ACTIVE');
19 | insert into orders (id, store_id, order_date, channel, country, total, status) values (7, 101, '2021-11-14', 'WEB', 'China', 233.15, 'ACTIVE');
20 | insert into orders (id, store_id, order_date, channel, country, total, status) values (8, 100, '2021-11-25', 'STORE', 'Philippines', 138.3, 'ACTIVE');
21 | insert into orders (id, store_id, order_date, channel, country, total, status) values (9, 101, '2022-02-25', 'WEB', 'Philippines', 272.48, 'ACTIVE');
22 | insert into orders (id, store_id, order_date, channel, country, total, status) values (10, 101, '2021-07-12', 'STORE', 'China', 939.98, 'ACTIVE');
23 |
--------------------------------------------------------------------------------
/airbyte-pinot/solution.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dunithd/edu-samples/d937f12c81254a859867e125f11af22ed5943c2b/airbyte-pinot/solution.png
--------------------------------------------------------------------------------
/aws-eventbridge-custom-events/email-service/.idea/.gitignore:
--------------------------------------------------------------------------------
1 | # Default ignored files
2 | /shelf/
3 | /workspace.xml
4 |
--------------------------------------------------------------------------------
/aws-eventbridge-custom-events/email-service/.idea/.name:
--------------------------------------------------------------------------------
1 | EventBridgeStarterApp
--------------------------------------------------------------------------------
/aws-eventbridge-custom-events/email-service/.idea/aws.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
--------------------------------------------------------------------------------
/aws-eventbridge-custom-events/email-service/.idea/compiler.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
--------------------------------------------------------------------------------
/aws-eventbridge-custom-events/email-service/.idea/email-service.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/aws-eventbridge-custom-events/email-service/.idea/jarRepositories.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
--------------------------------------------------------------------------------
/aws-eventbridge-custom-events/email-service/.idea/libraries/Maven__com_amazonaws_aws_lambda_java_core_1_2_1.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/aws-eventbridge-custom-events/email-service/.idea/libraries/Maven__com_fasterxml_jackson_core_jackson_annotations_2_11_3.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/aws-eventbridge-custom-events/email-service/.idea/libraries/Maven__com_fasterxml_jackson_core_jackson_core_2_11_3.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/aws-eventbridge-custom-events/email-service/.idea/libraries/Maven__com_fasterxml_jackson_core_jackson_databind_2_11_3.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/aws-eventbridge-custom-events/email-service/.idea/libraries/Maven__com_fasterxml_jackson_datatype_jackson_datatype_jsr310_2_11_3.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/aws-eventbridge-custom-events/email-service/.idea/libraries/Maven__junit_junit_4_13_1.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/aws-eventbridge-custom-events/email-service/.idea/libraries/Maven__org_hamcrest_hamcrest_core_1_3.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/aws-eventbridge-custom-events/email-service/.idea/misc.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/aws-eventbridge-custom-events/email-service/.idea/modules.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/aws-eventbridge-custom-events/email-service/EmailSenderFunction/README.md:
--------------------------------------------------------------------------------
1 | # OrderConfirmed
2 |
3 | *Automatically generated by the [Amazon Event Schemas](https://aws.amazon.com/)*
4 |
5 |
6 | ## Requirements
7 |
8 | Building dependencies:
9 | 1. Java 1.8+
10 | 2. Jackson-core 2.10;
11 | 3. Jackson-databind 2.10;
12 | 4. Jackson-annotations 2.10;
13 |
14 | ## Add Dependencies
15 |
16 | ### Maven users
17 |
18 | Create and update it in current project's POM.xml:
19 |
20 | ```xml
21 |
22 |
23 | com.fasterxml.jackson.core
24 | jackson-databind
25 | 2.10.0
26 |
27 |
28 |
29 | com.fasterxml.jackson.core
30 | jackson-core
31 | 2.10.0
32 |
33 |
34 |
35 | com.fasterxml.jackson.core
36 | jackson-annotations
37 | 2.10.0
38 |
39 | ```
40 |
41 | ### Gradle users
42 |
43 | Create and update it in current project's build file:
44 |
45 | ```groovy
46 | // https://mvnrepository.com/artifact/com.fasterxml.jackson.core/jackson-databind
47 | implementation 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.10.0'
48 | // https://mvnrepository.com/artifact/com.fasterxml.jackson.core/jackson-core
49 | implementation 'com.fasterxml.jackson.core', name: 'jackson-core', version: '2.10.0'
50 | // https://mvnrepository.com/artifact/com.fasterxml.jackson.core/jackson-annotations
51 | implementation 'com.fasterxml.jackson.core', name: 'jackson-annotations', version: '2.10.0'
52 | ```
53 |
--------------------------------------------------------------------------------
/aws-eventbridge-custom-events/email-service/EmailSenderFunction/email-service.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
--------------------------------------------------------------------------------
/aws-eventbridge-custom-events/email-service/EmailSenderFunction/pom.xml:
--------------------------------------------------------------------------------
1 |
3 | 4.0.0
4 | com.edu.eb
5 | email-service
6 | 1.0
7 | jar
8 | This application subscribes for OrderConfirmed events coming from EventBridge to send an email.
9 |
10 | 1.8
11 | 1.8
12 |
13 |
14 |
15 |
16 | com.amazonaws
17 | aws-lambda-java-core
18 | 1.2.1
19 |
20 |
21 | junit
22 | junit
23 | 4.13.1
24 | test
25 |
26 |
27 | com.fasterxml.jackson.core
28 | jackson-annotations
29 | 2.11.3
30 |
31 |
32 | com.fasterxml.jackson.core
33 | jackson-databind
34 | 2.11.3
35 |
36 |
37 | com.fasterxml.jackson.datatype
38 | jackson-datatype-jsr310
39 | 2.11.3
40 |
41 |
42 |
43 | com.amazonaws
44 | aws-lambda-java-log4j2
45 | 1.2.0
46 |
47 |
48 | org.apache.logging.log4j
49 | log4j-api
50 | 2.13.0
51 |
52 |
53 | org.apache.logging.log4j
54 | log4j-core
55 | 2.13.2
56 |
57 |
58 | org.apache.logging.log4j
59 | log4j-slf4j18-impl
60 | 2.13.0
61 |
62 |
63 |
64 |
65 |
66 |
67 | org.apache.maven.plugins
68 | maven-shade-plugin
69 | 3.2.4
70 |
71 |
72 |
73 |
74 | package
75 |
76 | shade
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
--------------------------------------------------------------------------------
/aws-eventbridge-custom-events/email-service/EmailSenderFunction/src/main/java/com/edu/eb/EmailSender.java:
--------------------------------------------------------------------------------
1 | package com.edu.eb;
2 |
3 | import java.io.IOException;
4 | import java.io.InputStream;
5 | import java.io.OutputStream;
6 |
7 | import com.amazonaws.services.lambda.runtime.Context;
8 | import com.amazonaws.services.lambda.runtime.RequestStreamHandler;
9 | import schema.edu_orders.orderconfirmed.AWSEvent;
10 | import schema.edu_orders.orderconfirmed.OrderConfirmed;
11 | import schema.edu_orders.orderconfirmed.marshaller.Marshaller;
12 | import org.slf4j.Logger;
13 | import org.slf4j.LoggerFactory;
14 |
15 | /**
16 | * Handler for EventBridge invocations of a Lambda function target
17 | */
18 | public class EmailSender implements RequestStreamHandler {
19 | private static final Logger logger = LoggerFactory.getLogger(EmailSender.class);
20 | static final String NEW_DETAIL_TYPE = "HelloWorldFunction updated event of %s";
21 |
22 | private Object handleEvent(final AWSEvent inputEvent, final Context context) {
23 | if (inputEvent != null) {
24 | OrderConfirmed confirmation = inputEvent.getDetail();
25 |
26 | logger.info(String.format("Order ID: %s, Customer: %s No.of Items: %d",
27 | confirmation.getId(),
28 | confirmation.getCustomer().toString(),
29 | confirmation.getItems().size()));
30 |
31 | //Developers write your event-driven business logic code here, such as...
32 | inputEvent.setDetailType(String.format(NEW_DETAIL_TYPE, inputEvent.getDetailType()));
33 |
34 | return inputEvent;
35 | }
36 |
37 | throw new IllegalArgumentException("Unable to deserialize lambda input event to AWSEvent. Check that you have the right schema and event source.");
38 | }
39 |
40 | /**
41 | * Handles a Lambda Function request
42 | * @param input The Lambda Function input stream
43 | * @param output The Lambda function output stream
44 | * @param context The Lambda execution environment context object.
45 | * @throws IOException
46 | */
47 | public void handleRequest(InputStream input, OutputStream output, Context context) throws IOException {
48 | AWSEvent event = Marshaller.unmarshalEvent(input, OrderConfirmed.class);
49 |
50 | Object response = handleEvent(event, context);
51 |
52 | Marshaller.marshal(output, response);
53 | }
54 | }
55 |
--------------------------------------------------------------------------------
/aws-eventbridge-custom-events/email-service/EmailSenderFunction/src/main/java/schema/edu_orders/orderconfirmed/AWSEvent.java:
--------------------------------------------------------------------------------
1 | package schema.edu_orders.orderconfirmed;
2 |
3 | import java.util.Objects;
4 | import com.fasterxml.jackson.annotation.JsonProperty;
5 | import com.fasterxml.jackson.annotation.JsonCreator;
6 | import com.fasterxml.jackson.annotation.JsonValue;
7 | import java.util.ArrayList;
8 | import java.util.Date;
9 | import java.util.List;
10 | import schema.edu_orders.orderconfirmed.OrderConfirmed;
11 | import java.io.Serializable;
12 |
13 | public class AWSEvent {
14 |
15 | @JsonProperty("detail")
16 | private T detail = null;
17 |
18 | @JsonProperty("detail-type")
19 | private String detailType = null;
20 |
21 | @JsonProperty("resources")
22 | private List resources = null;
23 |
24 | @JsonProperty("id")
25 | private String id = null;
26 |
27 | @JsonProperty("source")
28 | private String source = null;
29 |
30 | @JsonProperty("time")
31 | private Date time = null;
32 |
33 | @JsonProperty("region")
34 | private String region = null;
35 |
36 | @JsonProperty("version")
37 | private String version = null;
38 |
39 | @JsonProperty("account")
40 | private String account = null;
41 |
42 | public AWSEvent detail(T detail) {
43 | this.detail = detail;
44 | return this;
45 | }
46 |
47 | public T getDetail() {
48 | return detail;
49 | }
50 |
51 | public void setDetail(T detail) {
52 | this.detail = detail;
53 | }
54 |
55 | public AWSEvent detailType(String detailType) {
56 | this.detailType = detailType;
57 | return this;
58 | }
59 |
60 | public String getDetailType() {
61 | return detailType;
62 | }
63 |
64 | public void setDetailType(String detailType) {
65 | this.detailType = detailType;
66 | }
67 |
68 | public AWSEvent resources(List resources) {
69 | this.resources = resources;
70 | return this;
71 | }
72 |
73 | public List getResources() {
74 | return resources;
75 | }
76 |
77 | public void setResources(List resources) {
78 | this.resources = resources;
79 | }
80 |
81 | public AWSEvent id(String id) {
82 | this.id = id;
83 | return this;
84 | }
85 |
86 | public String getId() {
87 | return id;
88 | }
89 |
90 | public void setId(String id) {
91 | this.id = id;
92 | }
93 |
94 | public AWSEvent source(String source) {
95 | this.source = source;
96 | return this;
97 | }
98 |
99 | public String getSource() {
100 | return source;
101 | }
102 |
103 | public void setSource(String source) {
104 | this.source = source;
105 | }
106 |
107 | public AWSEvent time(Date time) {
108 | this.time = time;
109 | return this;
110 | }
111 |
112 | public Date getTime() {
113 | return time;
114 | }
115 |
116 | public void setTime(Date time) {
117 | this.time = time;
118 | }
119 |
120 | public AWSEvent region(String region) {
121 | this.region = region;
122 | return this;
123 | }
124 |
125 | public String getRegion() {
126 | return region;
127 | }
128 |
129 | public void setRegion(String region) {
130 | this.region = region;
131 | }
132 |
133 | public AWSEvent version(String version) {
134 | this.version = version;
135 | return this;
136 | }
137 |
138 | public String getVersion() {
139 | return version;
140 | }
141 |
142 | public void setVersion(String version) {
143 | this.version = version;
144 | }
145 |
146 | public AWSEvent account(String account) {
147 | this.account = account;
148 | return this;
149 | }
150 |
151 | public String getAccount() {
152 | return account;
153 | }
154 |
155 | public void setAccount(String account) {
156 | this.account = account;
157 | }
158 |
159 | @Override
160 | public boolean equals(java.lang.Object o) {
161 | if (this == o) {
162 | return true;
163 | }
164 | if (o == null || getClass() != o.getClass()) {
165 | return false;
166 | }
167 | AWSEvent awSEvent = (AWSEvent) o;
168 | return Objects.equals(this.detail, awSEvent.detail) &&
169 | Objects.equals(this.detailType, awSEvent.detailType) &&
170 | Objects.equals(this.resources, awSEvent.resources) &&
171 | Objects.equals(this.id, awSEvent.id) &&
172 | Objects.equals(this.source, awSEvent.source) &&
173 | Objects.equals(this.time, awSEvent.time) &&
174 | Objects.equals(this.region, awSEvent.region) &&
175 | Objects.equals(this.version, awSEvent.version) &&
176 | Objects.equals(this.account, awSEvent.account);
177 | }
178 |
179 | @Override
180 | public int hashCode() {
181 | return java.util.Objects.hash(detail, detailType, resources, id, source, time, region, version, account);
182 | }
183 |
184 |
185 | @Override
186 | public String toString() {
187 | StringBuilder sb = new StringBuilder();
188 | sb.append("class AWSEvent {\n");
189 |
190 | sb.append(" detail: ").append(toIndentedString(detail)).append("\n");
191 | sb.append(" detailType: ").append(toIndentedString(detailType)).append("\n");
192 | sb.append(" resources: ").append(toIndentedString(resources)).append("\n");
193 | sb.append(" id: ").append(toIndentedString(id)).append("\n");
194 | sb.append(" source: ").append(toIndentedString(source)).append("\n");
195 | sb.append(" time: ").append(toIndentedString(time)).append("\n");
196 | sb.append(" region: ").append(toIndentedString(region)).append("\n");
197 | sb.append(" version: ").append(toIndentedString(version)).append("\n");
198 | sb.append(" account: ").append(toIndentedString(account)).append("\n");
199 | sb.append("}");
200 | return sb.toString();
201 | }
202 |
203 | private String toIndentedString(java.lang.Object o) {
204 | if (o == null) {
205 | return "null";
206 | }
207 | return o.toString().replace("\n", "\n ");
208 | }
209 | }
210 |
--------------------------------------------------------------------------------
/aws-eventbridge-custom-events/email-service/EmailSenderFunction/src/main/java/schema/edu_orders/orderconfirmed/Customer.java:
--------------------------------------------------------------------------------
1 | package schema.edu_orders.orderconfirmed;
2 |
3 | import java.util.Objects;
4 | import com.fasterxml.jackson.annotation.JsonProperty;
5 | import com.fasterxml.jackson.annotation.JsonCreator;
6 | import com.fasterxml.jackson.annotation.JsonValue;
7 | import java.io.Serializable;
8 |
9 | public class Customer implements Serializable {
10 | private static final long serialVersionUID = 1L;
11 |
12 | @JsonProperty("firstName")
13 | private String firstName = null;
14 |
15 | @JsonProperty("lastName")
16 | private String lastName = null;
17 |
18 | @JsonProperty("email")
19 | private String email = null;
20 |
21 | public Customer firstName(String firstName) {
22 | this.firstName = firstName;
23 | return this;
24 | }
25 |
26 |
27 | public String getFirstName() {
28 | return firstName;
29 | }
30 |
31 | public void setFirstName(String firstName) {
32 | this.firstName = firstName;
33 | }
34 |
35 | public Customer lastName(String lastName) {
36 | this.lastName = lastName;
37 | return this;
38 | }
39 |
40 |
41 | public String getLastName() {
42 | return lastName;
43 | }
44 |
45 | public void setLastName(String lastName) {
46 | this.lastName = lastName;
47 | }
48 |
49 | public Customer email(String email) {
50 | this.email = email;
51 | return this;
52 | }
53 |
54 |
55 | public String getEmail() {
56 | return email;
57 | }
58 |
59 | public void setEmail(String email) {
60 | this.email = email;
61 | }
62 |
63 | @Override
64 | public boolean equals(java.lang.Object o) {
65 | if (this == o) {
66 | return true;
67 | }
68 | if (o == null || getClass() != o.getClass()) {
69 | return false;
70 | }
71 | Customer customer = (Customer) o;
72 | return Objects.equals(this.firstName, customer.firstName) &&
73 | Objects.equals(this.lastName, customer.lastName) &&
74 | Objects.equals(this.email, customer.email);
75 | }
76 |
77 | @Override
78 | public int hashCode() {
79 | return java.util.Objects.hash(firstName, lastName, email);
80 | }
81 |
82 |
83 | @Override
84 | public String toString() {
85 | StringBuilder sb = new StringBuilder();
86 | sb.append("class Customer {\n");
87 |
88 | sb.append(" firstName: ").append(toIndentedString(firstName)).append("\n");
89 | sb.append(" lastName: ").append(toIndentedString(lastName)).append("\n");
90 | sb.append(" email: ").append(toIndentedString(email)).append("\n");
91 | sb.append("}");
92 | return sb.toString();
93 | }
94 |
95 | private String toIndentedString(java.lang.Object o) {
96 | if (o == null) {
97 | return "null";
98 | }
99 | return o.toString().replace("\n", "\n ");
100 | }
101 |
102 | }
103 |
--------------------------------------------------------------------------------
/aws-eventbridge-custom-events/email-service/EmailSenderFunction/src/main/java/schema/edu_orders/orderconfirmed/Item.java:
--------------------------------------------------------------------------------
1 | package schema.edu_orders.orderconfirmed;
2 |
3 | import java.util.Objects;
4 | import com.fasterxml.jackson.annotation.JsonProperty;
5 | import com.fasterxml.jackson.annotation.JsonCreator;
6 | import com.fasterxml.jackson.annotation.JsonValue;
7 | import java.math.BigDecimal;
8 | import java.io.Serializable;
9 |
10 | public class Item implements Serializable {
11 | private static final long serialVersionUID = 1L;
12 |
13 | @JsonProperty("sku")
14 | private BigDecimal sku = null;
15 |
16 | @JsonProperty("name")
17 | private String name = null;
18 |
19 | @JsonProperty("price")
20 | private Double price = null;
21 |
22 | @JsonProperty("quantity")
23 | private BigDecimal quantity = null;
24 |
25 | public Item sku(BigDecimal sku) {
26 | this.sku = sku;
27 | return this;
28 | }
29 |
30 |
31 | public BigDecimal getSku() {
32 | return sku;
33 | }
34 |
35 | public void setSku(BigDecimal sku) {
36 | this.sku = sku;
37 | }
38 |
39 | public Item name(String name) {
40 | this.name = name;
41 | return this;
42 | }
43 |
44 |
45 | public String getName() {
46 | return name;
47 | }
48 |
49 | public void setName(String name) {
50 | this.name = name;
51 | }
52 |
53 | public Item price(Double price) {
54 | this.price = price;
55 | return this;
56 | }
57 |
58 |
59 | public Double getPrice() {
60 | return price;
61 | }
62 |
63 | public void setPrice(Double price) {
64 | this.price = price;
65 | }
66 |
67 | public Item quantity(BigDecimal quantity) {
68 | this.quantity = quantity;
69 | return this;
70 | }
71 |
72 |
73 | public BigDecimal getQuantity() {
74 | return quantity;
75 | }
76 |
77 | public void setQuantity(BigDecimal quantity) {
78 | this.quantity = quantity;
79 | }
80 |
81 | @Override
82 | public boolean equals(java.lang.Object o) {
83 | if (this == o) {
84 | return true;
85 | }
86 | if (o == null || getClass() != o.getClass()) {
87 | return false;
88 | }
89 | Item item = (Item) o;
90 | return Objects.equals(this.sku, item.sku) &&
91 | Objects.equals(this.name, item.name) &&
92 | Objects.equals(this.price, item.price) &&
93 | Objects.equals(this.quantity, item.quantity);
94 | }
95 |
96 | @Override
97 | public int hashCode() {
98 | return java.util.Objects.hash(sku, name, price, quantity);
99 | }
100 |
101 |
102 | @Override
103 | public String toString() {
104 | StringBuilder sb = new StringBuilder();
105 | sb.append("class Item {\n");
106 |
107 | sb.append(" sku: ").append(toIndentedString(sku)).append("\n");
108 | sb.append(" name: ").append(toIndentedString(name)).append("\n");
109 | sb.append(" price: ").append(toIndentedString(price)).append("\n");
110 | sb.append(" quantity: ").append(toIndentedString(quantity)).append("\n");
111 | sb.append("}");
112 | return sb.toString();
113 | }
114 |
115 | private String toIndentedString(java.lang.Object o) {
116 | if (o == null) {
117 | return "null";
118 | }
119 | return o.toString().replace("\n", "\n ");
120 | }
121 |
122 | }
123 |
--------------------------------------------------------------------------------
/aws-eventbridge-custom-events/email-service/EmailSenderFunction/src/main/java/schema/edu_orders/orderconfirmed/OrderConfirmed.java:
--------------------------------------------------------------------------------
1 | package schema.edu_orders.orderconfirmed;
2 |
3 | import java.util.Objects;
4 | import com.fasterxml.jackson.annotation.JsonProperty;
5 | import com.fasterxml.jackson.annotation.JsonCreator;
6 | import com.fasterxml.jackson.annotation.JsonValue;
7 | import java.math.BigDecimal;
8 | import java.util.ArrayList;
9 | import java.util.List;
10 | import schema.edu_orders.orderconfirmed.Customer;
11 | import schema.edu_orders.orderconfirmed.Item;
12 | import java.io.Serializable;
13 |
14 | public class OrderConfirmed implements Serializable {
15 | private static final long serialVersionUID = 1L;
16 |
17 | @JsonProperty("id")
18 | private BigDecimal id = null;
19 |
20 | @JsonProperty("status")
21 | private String status = null;
22 |
23 | @JsonProperty("currency")
24 | private String currency = null;
25 |
26 | @JsonProperty("customer")
27 | private Customer customer = null;
28 |
29 | @JsonProperty("items")
30 | private List- items = null;
31 |
32 | public OrderConfirmed id(BigDecimal id) {
33 | this.id = id;
34 | return this;
35 | }
36 |
37 |
38 | public BigDecimal getId() {
39 | return id;
40 | }
41 |
42 | public void setId(BigDecimal id) {
43 | this.id = id;
44 | }
45 |
46 | public OrderConfirmed status(String status) {
47 | this.status = status;
48 | return this;
49 | }
50 |
51 |
52 | public String getStatus() {
53 | return status;
54 | }
55 |
56 | public void setStatus(String status) {
57 | this.status = status;
58 | }
59 |
60 | public OrderConfirmed currency(String currency) {
61 | this.currency = currency;
62 | return this;
63 | }
64 |
65 |
66 | public String getCurrency() {
67 | return currency;
68 | }
69 |
70 | public void setCurrency(String currency) {
71 | this.currency = currency;
72 | }
73 |
74 | public OrderConfirmed customer(Customer customer) {
75 | this.customer = customer;
76 | return this;
77 | }
78 |
79 |
80 | public Customer getCustomer() {
81 | return customer;
82 | }
83 |
84 | public void setCustomer(Customer customer) {
85 | this.customer = customer;
86 | }
87 |
88 | public OrderConfirmed items(List
- items) {
89 | this.items = items;
90 | return this;
91 | }
92 | public OrderConfirmed addItemsItem(Item itemsItem) {
93 | if (this.items == null) {
94 | this.items = new ArrayList
- ();
95 | }
96 | this.items.add(itemsItem);
97 | return this;
98 | }
99 |
100 | public List
- getItems() {
101 | return items;
102 | }
103 |
104 | public void setItems(List
- items) {
105 | this.items = items;
106 | }
107 |
108 | @Override
109 | public boolean equals(java.lang.Object o) {
110 | if (this == o) {
111 | return true;
112 | }
113 | if (o == null || getClass() != o.getClass()) {
114 | return false;
115 | }
116 | OrderConfirmed orderConfirmed = (OrderConfirmed) o;
117 | return Objects.equals(this.id, orderConfirmed.id) &&
118 | Objects.equals(this.status, orderConfirmed.status) &&
119 | Objects.equals(this.currency, orderConfirmed.currency) &&
120 | Objects.equals(this.customer, orderConfirmed.customer) &&
121 | Objects.equals(this.items, orderConfirmed.items);
122 | }
123 |
124 | @Override
125 | public int hashCode() {
126 | return java.util.Objects.hash(id, status, currency, customer, items);
127 | }
128 |
129 |
130 | @Override
131 | public String toString() {
132 | StringBuilder sb = new StringBuilder();
133 | sb.append("class OrderConfirmed {\n");
134 |
135 | sb.append(" id: ").append(toIndentedString(id)).append("\n");
136 | sb.append(" status: ").append(toIndentedString(status)).append("\n");
137 | sb.append(" currency: ").append(toIndentedString(currency)).append("\n");
138 | sb.append(" customer: ").append(toIndentedString(customer)).append("\n");
139 | sb.append(" items: ").append(toIndentedString(items)).append("\n");
140 | sb.append("}");
141 | return sb.toString();
142 | }
143 |
144 | private String toIndentedString(java.lang.Object o) {
145 | if (o == null) {
146 | return "null";
147 | }
148 | return o.toString().replace("\n", "\n ");
149 | }
150 |
151 | }
152 |
--------------------------------------------------------------------------------
/aws-eventbridge-custom-events/email-service/EmailSenderFunction/src/main/java/schema/edu_orders/orderconfirmed/marshaller/Marshaller.java:
--------------------------------------------------------------------------------
1 | package schema.edu_orders.orderconfirmed.marshaller;
2 |
3 | import com.fasterxml.jackson.databind.DeserializationFeature;
4 | import com.fasterxml.jackson.databind.ObjectMapper;
5 | import com.fasterxml.jackson.databind.type.TypeFactory;
6 | import schema.edu_orders.orderconfirmed.AWSEvent;
7 |
8 | import java.io.IOException;
9 | import java.io.InputStream;
10 | import java.io.OutputStream;
11 |
12 | public class Marshaller {
13 |
14 | private static final ObjectMapper MAPPER = createObjectMapper();
15 |
16 | public static void marshal(OutputStream output, T value) throws IOException {
17 | MAPPER.writeValue(output, value);
18 | }
19 |
20 | public static T unmarshal(InputStream input, Class type) throws IOException {
21 | return MAPPER.readValue(input, type);
22 | }
23 |
24 | public static AWSEvent unmarshalEvent(InputStream input, Class type) throws IOException {
25 | final TypeFactory typeFactory = MAPPER.getTypeFactory();
26 | return MAPPER.readValue(input, typeFactory.constructParametricType(AWSEvent.class, type));
27 | }
28 |
29 | private static ObjectMapper createObjectMapper() {
30 | return new ObjectMapper()
31 | .findAndRegisterModules()
32 | .disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES);
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/aws-eventbridge-custom-events/email-service/EmailSenderFunction/src/main/resources/log4j2.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | %d{yyyy-MM-dd HH:mm:ss} %X{AWSRequestId} %-5p %c{1} - %m%n
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
--------------------------------------------------------------------------------
/aws-eventbridge-custom-events/email-service/events/event.json:
--------------------------------------------------------------------------------
1 | {
2 | "id":"7bf73129-1428-4cd3-a780-95db273d1602",
3 | "detail-type":"Order Confirmed",
4 | "source":"edu.svc.orders",
5 | "account":"123456789012",
6 | "time":"2015-11-11T21:29:54Z",
7 | "region":"us-east-1",
8 | "resources":[
9 | "arn:aws:ec2:us-east-1:123456789012:instance/i-abcd1111"
10 | ],
11 | "detail":{
12 | "ADD-YOUR-FIELDS-HERE":""
13 | }
14 | }
--------------------------------------------------------------------------------
/aws-eventbridge-custom-events/email-service/template.yaml:
--------------------------------------------------------------------------------
1 | AWSTemplateFormatVersion: '2010-09-09'
2 | Transform: AWS::Serverless-2016-10-31
3 | Description: >
4 | email-service
5 |
6 | Sample SAM Template for email-service
7 |
8 | # More info about Globals: https://github.com/awslabs/serverless-application-model/blob/master/docs/globals.rst
9 | Globals:
10 | Function:
11 | Timeout: 20
12 |
13 | Resources:
14 | EmailSenderFunction:
15 | Type: AWS::Serverless::Function # More info about Function Resource: https://github.com/awslabs/serverless-application-model/blob/master/versions/2016-10-31.md#awsserverlessfunction
16 | Properties:
17 | CodeUri: EmailSenderFunction
18 | Handler: com.edu.eb.EmailSender::handleRequest
19 | Runtime: java8
20 | Environment: # More info about Env Vars: https://github.com/awslabs/serverless-application-model/blob/master/versions/2016-10-31.md#environment-object
21 | Variables:
22 | PARAM1: VALUE
23 | Events:
24 | OrderConfirmed:
25 | Type: CloudWatchEvent # More info about CloudWatchEvent Event Source: https://github.com/awslabs/serverless-application-model/blob/master/versions/2016-10-31.md#cloudwatchevent
26 | Properties:
27 | EventBusName: custom-event-bus #Uncomment this if your events are not on the 'default' event bus
28 | Pattern:
29 | source:
30 | - edu.svc.orders
31 | detail-type:
32 | - Order Confirmed
--------------------------------------------------------------------------------
/aws-eventbridge-custom-events/order-service/.idea/.gitignore:
--------------------------------------------------------------------------------
1 | # Default ignored files
2 | /shelf/
3 | /workspace.xml
4 |
--------------------------------------------------------------------------------
/aws-eventbridge-custom-events/order-service/.idea/aws.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
--------------------------------------------------------------------------------
/aws-eventbridge-custom-events/order-service/.idea/compiler.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
--------------------------------------------------------------------------------
/aws-eventbridge-custom-events/order-service/.idea/encodings.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
--------------------------------------------------------------------------------
/aws-eventbridge-custom-events/order-service/.idea/jarRepositories.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
--------------------------------------------------------------------------------
/aws-eventbridge-custom-events/order-service/.idea/misc.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
--------------------------------------------------------------------------------
/aws-eventbridge-custom-events/order-service/pom.xml:
--------------------------------------------------------------------------------
1 |
3 | 4.0.0
4 |
5 | UTF-8
6 |
7 | com.edu
8 | order-service
9 | jar
10 | 1.0-SNAPSHOT
11 | order-service
12 |
13 |
14 |
15 | software.amazon.awssdk
16 | bom
17 | 2.15.14
18 | pom
19 | import
20 |
21 |
22 |
23 |
24 |
25 | org.junit.jupiter
26 | junit-jupiter-api
27 | 5.4.2
28 | test
29 |
30 |
31 | org.junit.jupiter
32 | junit-jupiter-engine
33 | 5.4.2
34 | test
35 |
36 |
37 | org.junit.platform
38 | junit-platform-commons
39 | 1.4.0
40 |
41 |
42 | org.junit.platform
43 | junit-platform-launcher
44 | 1.4.0
45 | test
46 |
47 |
48 | org.slf4j
49 | slf4j-log4j12
50 | 1.7.25
51 |
52 |
53 | software.amazon.awssdk
54 | eventbridge
55 |
56 |
57 | com.fasterxml.jackson.core
58 | jackson-databind
59 | 2.10.0
60 |
61 |
62 |
63 | com.fasterxml.jackson.core
64 | jackson-core
65 | 2.10.0
66 |
67 |
68 |
69 | com.fasterxml.jackson.core
70 | jackson-annotations
71 | 2.10.0
72 |
73 |
74 |
75 |
76 |
77 | org.apache.maven.plugins
78 | maven-compiler-plugin
79 | 3.8.1
80 |
81 | 1.8
82 | 1.8
83 |
84 |
85 |
86 | org.apache.maven.plugins
87 | maven-surefire-plugin
88 | 2.22.1
89 |
90 |
91 |
92 |
93 |
--------------------------------------------------------------------------------
/aws-eventbridge-custom-events/order-service/src/main/java/com/edu/eb/EventProducer.java:
--------------------------------------------------------------------------------
1 | package com.edu.eb;
2 |
3 | import schema.edu_orders.orderconfirmed.AWSEvent;
4 | import schema.edu_orders.orderconfirmed.Customer;
5 | import schema.edu_orders.orderconfirmed.Item;
6 | import schema.edu_orders.orderconfirmed.OrderConfirmed;
7 | import schema.edu_orders.orderconfirmed.marshaller.Marshaller;
8 | import software.amazon.awssdk.regions.Region;
9 | import software.amazon.awssdk.services.eventbridge.EventBridgeClient;
10 | import software.amazon.awssdk.services.eventbridge.model.*;
11 |
12 | import java.io.*;
13 | import java.math.BigDecimal;
14 | import java.util.ArrayList;
15 | import java.util.List;
16 |
17 | public class EventProducer {
18 |
19 | public static void main(String[] args) {
20 | Region region = Region.US_EAST_1;
21 | EventBridgeClient eventBrClient = EventBridgeClient.builder()
22 | .region(region)
23 | .build();
24 |
25 | try {
26 | String detailString = null;
27 | OrderConfirmed event = populateDummyEvent();
28 |
29 | try {
30 | detailString = Marshaller.marshal(event);
31 | } catch (IOException e) {
32 | //Failed to serialise the event as a JSON formatted string. Let's quit.
33 | e.printStackTrace();
34 | System.exit(1);
35 | }
36 |
37 | PutEventsRequestEntry reqEntry = PutEventsRequestEntry.builder()
38 | .source("edu.svc.orders")
39 | .detailType("Order Confirmed")
40 | .detail(detailString)
41 | .eventBusName("custom-event-bus")
42 | .build();
43 |
44 | // Add the PutEventsRequestEntry to a list
45 | List list = new ArrayList();
46 | list.add(reqEntry);
47 |
48 | PutEventsRequest eventsRequest = PutEventsRequest.builder()
49 | .entries(reqEntry)
50 | .build();
51 |
52 | PutEventsResponse result = eventBrClient.putEvents(eventsRequest);
53 |
54 | for (PutEventsResultEntry resultEntry : result.entries()) {
55 | if (resultEntry.eventId() != null) {
56 | System.out.println("Event Id: " + resultEntry.eventId());
57 | } else {
58 | System.out.println("Injection failed with Error Code: " + resultEntry.errorCode());
59 | }
60 | }
61 |
62 | } catch (EventBridgeException e) {
63 | System.err.println(e.awsErrorDetails().errorMessage());
64 | System.exit(1);
65 | }
66 | eventBrClient.close();
67 | }
68 |
69 | /**
70 | * Populates a dummy event of type @OrderConfirmed
71 | *
72 | * @return
73 | */
74 | private static OrderConfirmed populateDummyEvent() {
75 | OrderConfirmed confirmation = new OrderConfirmed();
76 | confirmation.setId(BigDecimal.valueOf(123456789));
77 | confirmation.setStatus("CONFIRMED");
78 | confirmation.setCurrency("USD");
79 |
80 | Customer customer = new Customer();
81 | customer.setFirstName("John");
82 | customer.setLastName("Doe");
83 | customer.setEmail("aa@bb.cc");
84 | confirmation.setCustomer(customer);
85 |
86 | Item item = new Item();
87 | item.setSku(BigDecimal.valueOf(1));
88 | item.setName("Foo");
89 | item.setPrice(12.99);
90 | item.setQuantity(BigDecimal.valueOf(1));
91 |
92 | confirmation.addItemsItem(item);
93 |
94 | return confirmation;
95 | }
96 | }
--------------------------------------------------------------------------------
/aws-eventbridge-custom-events/order-service/src/main/java/schema/edu_orders/orderconfirmed/AWSEvent.java:
--------------------------------------------------------------------------------
1 | package schema.edu_orders.orderconfirmed;
2 |
3 | import java.util.Objects;
4 | import com.fasterxml.jackson.annotation.JsonProperty;
5 | import com.fasterxml.jackson.annotation.JsonCreator;
6 | import com.fasterxml.jackson.annotation.JsonValue;
7 | import java.util.ArrayList;
8 | import java.util.Date;
9 | import java.util.List;
10 | import schema.edu_orders.orderconfirmed.OrderConfirmed;
11 | import java.io.Serializable;
12 |
13 | public class AWSEvent {
14 |
15 | @JsonProperty("detail")
16 | private T detail = null;
17 |
18 | @JsonProperty("detail-type")
19 | private String detailType = null;
20 |
21 | @JsonProperty("resources")
22 | private List resources = null;
23 |
24 | @JsonProperty("id")
25 | private String id = null;
26 |
27 | @JsonProperty("source")
28 | private String source = null;
29 |
30 | @JsonProperty("time")
31 | private Date time = null;
32 |
33 | @JsonProperty("region")
34 | private String region = null;
35 |
36 | @JsonProperty("version")
37 | private String version = null;
38 |
39 | @JsonProperty("account")
40 | private String account = null;
41 |
42 | public AWSEvent detail(T detail) {
43 | this.detail = detail;
44 | return this;
45 | }
46 |
47 | public T getDetail() {
48 | return detail;
49 | }
50 |
51 | public void setDetail(T detail) {
52 | this.detail = detail;
53 | }
54 |
55 | public AWSEvent detailType(String detailType) {
56 | this.detailType = detailType;
57 | return this;
58 | }
59 |
60 | public String getDetailType() {
61 | return detailType;
62 | }
63 |
64 | public void setDetailType(String detailType) {
65 | this.detailType = detailType;
66 | }
67 |
68 | public AWSEvent resources(List resources) {
69 | this.resources = resources;
70 | return this;
71 | }
72 |
73 | public List getResources() {
74 | return resources;
75 | }
76 |
77 | public void setResources(List resources) {
78 | this.resources = resources;
79 | }
80 |
81 | public AWSEvent id(String id) {
82 | this.id = id;
83 | return this;
84 | }
85 |
86 | public String getId() {
87 | return id;
88 | }
89 |
90 | public void setId(String id) {
91 | this.id = id;
92 | }
93 |
94 | public AWSEvent source(String source) {
95 | this.source = source;
96 | return this;
97 | }
98 |
99 | public String getSource() {
100 | return source;
101 | }
102 |
103 | public void setSource(String source) {
104 | this.source = source;
105 | }
106 |
107 | public AWSEvent time(Date time) {
108 | this.time = time;
109 | return this;
110 | }
111 |
112 | public Date getTime() {
113 | return time;
114 | }
115 |
116 | public void setTime(Date time) {
117 | this.time = time;
118 | }
119 |
120 | public AWSEvent region(String region) {
121 | this.region = region;
122 | return this;
123 | }
124 |
125 | public String getRegion() {
126 | return region;
127 | }
128 |
129 | public void setRegion(String region) {
130 | this.region = region;
131 | }
132 |
133 | public AWSEvent version(String version) {
134 | this.version = version;
135 | return this;
136 | }
137 |
138 | public String getVersion() {
139 | return version;
140 | }
141 |
142 | public void setVersion(String version) {
143 | this.version = version;
144 | }
145 |
146 | public AWSEvent account(String account) {
147 | this.account = account;
148 | return this;
149 | }
150 |
151 | public String getAccount() {
152 | return account;
153 | }
154 |
155 | public void setAccount(String account) {
156 | this.account = account;
157 | }
158 |
159 | @Override
160 | public boolean equals(Object o) {
161 | if (this == o) {
162 | return true;
163 | }
164 | if (o == null || getClass() != o.getClass()) {
165 | return false;
166 | }
167 | AWSEvent awSEvent = (AWSEvent) o;
168 | return Objects.equals(this.detail, awSEvent.detail) &&
169 | Objects.equals(this.detailType, awSEvent.detailType) &&
170 | Objects.equals(this.resources, awSEvent.resources) &&
171 | Objects.equals(this.id, awSEvent.id) &&
172 | Objects.equals(this.source, awSEvent.source) &&
173 | Objects.equals(this.time, awSEvent.time) &&
174 | Objects.equals(this.region, awSEvent.region) &&
175 | Objects.equals(this.version, awSEvent.version) &&
176 | Objects.equals(this.account, awSEvent.account);
177 | }
178 |
179 | @Override
180 | public int hashCode() {
181 | return Objects.hash(detail, detailType, resources, id, source, time, region, version, account);
182 | }
183 |
184 |
185 | @Override
186 | public String toString() {
187 | StringBuilder sb = new StringBuilder();
188 | sb.append("class AWSEvent {\n");
189 |
190 | sb.append(" detail: ").append(toIndentedString(detail)).append("\n");
191 | sb.append(" detailType: ").append(toIndentedString(detailType)).append("\n");
192 | sb.append(" resources: ").append(toIndentedString(resources)).append("\n");
193 | sb.append(" id: ").append(toIndentedString(id)).append("\n");
194 | sb.append(" source: ").append(toIndentedString(source)).append("\n");
195 | sb.append(" time: ").append(toIndentedString(time)).append("\n");
196 | sb.append(" region: ").append(toIndentedString(region)).append("\n");
197 | sb.append(" version: ").append(toIndentedString(version)).append("\n");
198 | sb.append(" account: ").append(toIndentedString(account)).append("\n");
199 | sb.append("}");
200 | return sb.toString();
201 | }
202 |
203 | private String toIndentedString(Object o) {
204 | if (o == null) {
205 | return "null";
206 | }
207 | return o.toString().replace("\n", "\n ");
208 | }
209 | }
210 |
--------------------------------------------------------------------------------
/aws-eventbridge-custom-events/order-service/src/main/java/schema/edu_orders/orderconfirmed/Customer.java:
--------------------------------------------------------------------------------
1 | package schema.edu_orders.orderconfirmed;
2 |
3 | import java.util.Objects;
4 | import com.fasterxml.jackson.annotation.JsonProperty;
5 | import com.fasterxml.jackson.annotation.JsonCreator;
6 | import com.fasterxml.jackson.annotation.JsonValue;
7 | import java.io.Serializable;
8 |
9 | public class Customer implements Serializable {
10 | private static final long serialVersionUID = 1L;
11 |
12 | @JsonProperty("firstName")
13 | private String firstName = null;
14 |
15 | @JsonProperty("lastName")
16 | private String lastName = null;
17 |
18 | @JsonProperty("email")
19 | private String email = null;
20 |
21 | public Customer firstName(String firstName) {
22 | this.firstName = firstName;
23 | return this;
24 | }
25 |
26 |
27 | public String getFirstName() {
28 | return firstName;
29 | }
30 |
31 | public void setFirstName(String firstName) {
32 | this.firstName = firstName;
33 | }
34 |
35 | public Customer lastName(String lastName) {
36 | this.lastName = lastName;
37 | return this;
38 | }
39 |
40 |
41 | public String getLastName() {
42 | return lastName;
43 | }
44 |
45 | public void setLastName(String lastName) {
46 | this.lastName = lastName;
47 | }
48 |
49 | public Customer email(String email) {
50 | this.email = email;
51 | return this;
52 | }
53 |
54 |
55 | public String getEmail() {
56 | return email;
57 | }
58 |
59 | public void setEmail(String email) {
60 | this.email = email;
61 | }
62 |
63 | @Override
64 | public boolean equals(Object o) {
65 | if (this == o) {
66 | return true;
67 | }
68 | if (o == null || getClass() != o.getClass()) {
69 | return false;
70 | }
71 | Customer customer = (Customer) o;
72 | return Objects.equals(this.firstName, customer.firstName) &&
73 | Objects.equals(this.lastName, customer.lastName) &&
74 | Objects.equals(this.email, customer.email);
75 | }
76 |
77 | @Override
78 | public int hashCode() {
79 | return Objects.hash(firstName, lastName, email);
80 | }
81 |
82 |
83 | @Override
84 | public String toString() {
85 | StringBuilder sb = new StringBuilder();
86 | sb.append("class Customer {\n");
87 |
88 | sb.append(" firstName: ").append(toIndentedString(firstName)).append("\n");
89 | sb.append(" lastName: ").append(toIndentedString(lastName)).append("\n");
90 | sb.append(" email: ").append(toIndentedString(email)).append("\n");
91 | sb.append("}");
92 | return sb.toString();
93 | }
94 |
95 | private String toIndentedString(Object o) {
96 | if (o == null) {
97 | return "null";
98 | }
99 | return o.toString().replace("\n", "\n ");
100 | }
101 |
102 | }
103 |
--------------------------------------------------------------------------------
/aws-eventbridge-custom-events/order-service/src/main/java/schema/edu_orders/orderconfirmed/Item.java:
--------------------------------------------------------------------------------
1 | package schema.edu_orders.orderconfirmed;
2 |
3 | import java.util.Objects;
4 | import com.fasterxml.jackson.annotation.JsonProperty;
5 | import com.fasterxml.jackson.annotation.JsonCreator;
6 | import com.fasterxml.jackson.annotation.JsonValue;
7 | import java.math.BigDecimal;
8 | import java.io.Serializable;
9 |
10 | public class Item implements Serializable {
11 | private static final long serialVersionUID = 1L;
12 |
13 | @JsonProperty("sku")
14 | private BigDecimal sku = null;
15 |
16 | @JsonProperty("name")
17 | private String name = null;
18 |
19 | @JsonProperty("price")
20 | private Double price = null;
21 |
22 | @JsonProperty("quantity")
23 | private BigDecimal quantity = null;
24 |
25 | public Item sku(BigDecimal sku) {
26 | this.sku = sku;
27 | return this;
28 | }
29 |
30 |
31 | public BigDecimal getSku() {
32 | return sku;
33 | }
34 |
35 | public void setSku(BigDecimal sku) {
36 | this.sku = sku;
37 | }
38 |
39 | public Item name(String name) {
40 | this.name = name;
41 | return this;
42 | }
43 |
44 |
45 | public String getName() {
46 | return name;
47 | }
48 |
49 | public void setName(String name) {
50 | this.name = name;
51 | }
52 |
53 | public Item price(Double price) {
54 | this.price = price;
55 | return this;
56 | }
57 |
58 |
59 | public Double getPrice() {
60 | return price;
61 | }
62 |
63 | public void setPrice(Double price) {
64 | this.price = price;
65 | }
66 |
67 | public Item quantity(BigDecimal quantity) {
68 | this.quantity = quantity;
69 | return this;
70 | }
71 |
72 |
73 | public BigDecimal getQuantity() {
74 | return quantity;
75 | }
76 |
77 | public void setQuantity(BigDecimal quantity) {
78 | this.quantity = quantity;
79 | }
80 |
81 | @Override
82 | public boolean equals(Object o) {
83 | if (this == o) {
84 | return true;
85 | }
86 | if (o == null || getClass() != o.getClass()) {
87 | return false;
88 | }
89 | Item item = (Item) o;
90 | return Objects.equals(this.sku, item.sku) &&
91 | Objects.equals(this.name, item.name) &&
92 | Objects.equals(this.price, item.price) &&
93 | Objects.equals(this.quantity, item.quantity);
94 | }
95 |
96 | @Override
97 | public int hashCode() {
98 | return Objects.hash(sku, name, price, quantity);
99 | }
100 |
101 |
102 | @Override
103 | public String toString() {
104 | StringBuilder sb = new StringBuilder();
105 | sb.append("class Item {\n");
106 |
107 | sb.append(" sku: ").append(toIndentedString(sku)).append("\n");
108 | sb.append(" name: ").append(toIndentedString(name)).append("\n");
109 | sb.append(" price: ").append(toIndentedString(price)).append("\n");
110 | sb.append(" quantity: ").append(toIndentedString(quantity)).append("\n");
111 | sb.append("}");
112 | return sb.toString();
113 | }
114 |
115 | private String toIndentedString(Object o) {
116 | if (o == null) {
117 | return "null";
118 | }
119 | return o.toString().replace("\n", "\n ");
120 | }
121 |
122 | }
123 |
--------------------------------------------------------------------------------
/aws-eventbridge-custom-events/order-service/src/main/java/schema/edu_orders/orderconfirmed/OrderConfirmed.java:
--------------------------------------------------------------------------------
1 | package schema.edu_orders.orderconfirmed;
2 |
3 | import java.util.Objects;
4 | import com.fasterxml.jackson.annotation.JsonProperty;
5 | import com.fasterxml.jackson.annotation.JsonCreator;
6 | import com.fasterxml.jackson.annotation.JsonValue;
7 | import java.math.BigDecimal;
8 | import java.util.ArrayList;
9 | import java.util.List;
10 | import schema.edu_orders.orderconfirmed.Customer;
11 | import schema.edu_orders.orderconfirmed.Item;
12 | import java.io.Serializable;
13 |
14 | public class OrderConfirmed implements Serializable {
15 | private static final long serialVersionUID = 1L;
16 |
17 | @JsonProperty("id")
18 | private BigDecimal id = null;
19 |
20 | @JsonProperty("status")
21 | private String status = null;
22 |
23 | @JsonProperty("currency")
24 | private String currency = null;
25 |
26 | @JsonProperty("customer")
27 | private Customer customer = null;
28 |
29 | @JsonProperty("items")
30 | private List- items = null;
31 |
32 | public OrderConfirmed id(BigDecimal id) {
33 | this.id = id;
34 | return this;
35 | }
36 |
37 |
38 | public BigDecimal getId() {
39 | return id;
40 | }
41 |
42 | public void setId(BigDecimal id) {
43 | this.id = id;
44 | }
45 |
46 | public OrderConfirmed status(String status) {
47 | this.status = status;
48 | return this;
49 | }
50 |
51 |
52 | public String getStatus() {
53 | return status;
54 | }
55 |
56 | public void setStatus(String status) {
57 | this.status = status;
58 | }
59 |
60 | public OrderConfirmed currency(String currency) {
61 | this.currency = currency;
62 | return this;
63 | }
64 |
65 |
66 | public String getCurrency() {
67 | return currency;
68 | }
69 |
70 | public void setCurrency(String currency) {
71 | this.currency = currency;
72 | }
73 |
74 | public OrderConfirmed customer(Customer customer) {
75 | this.customer = customer;
76 | return this;
77 | }
78 |
79 |
80 | public Customer getCustomer() {
81 | return customer;
82 | }
83 |
84 | public void setCustomer(Customer customer) {
85 | this.customer = customer;
86 | }
87 |
88 | public OrderConfirmed items(List
- items) {
89 | this.items = items;
90 | return this;
91 | }
92 | public OrderConfirmed addItemsItem(Item itemsItem) {
93 | if (this.items == null) {
94 | this.items = new ArrayList
- ();
95 | }
96 | this.items.add(itemsItem);
97 | return this;
98 | }
99 |
100 | public List
- getItems() {
101 | return items;
102 | }
103 |
104 | public void setItems(List
- items) {
105 | this.items = items;
106 | }
107 |
108 | @Override
109 | public boolean equals(Object o) {
110 | if (this == o) {
111 | return true;
112 | }
113 | if (o == null || getClass() != o.getClass()) {
114 | return false;
115 | }
116 | OrderConfirmed orderConfirmed = (OrderConfirmed) o;
117 | return Objects.equals(this.id, orderConfirmed.id) &&
118 | Objects.equals(this.status, orderConfirmed.status) &&
119 | Objects.equals(this.currency, orderConfirmed.currency) &&
120 | Objects.equals(this.customer, orderConfirmed.customer) &&
121 | Objects.equals(this.items, orderConfirmed.items);
122 | }
123 |
124 | @Override
125 | public int hashCode() {
126 | return Objects.hash(id, status, currency, customer, items);
127 | }
128 |
129 |
130 | @Override
131 | public String toString() {
132 | StringBuilder sb = new StringBuilder();
133 | sb.append("class OrderConfirmed {\n");
134 |
135 | sb.append(" id: ").append(toIndentedString(id)).append("\n");
136 | sb.append(" status: ").append(toIndentedString(status)).append("\n");
137 | sb.append(" currency: ").append(toIndentedString(currency)).append("\n");
138 | sb.append(" customer: ").append(toIndentedString(customer)).append("\n");
139 | sb.append(" items: ").append(toIndentedString(items)).append("\n");
140 | sb.append("}");
141 | return sb.toString();
142 | }
143 |
144 | private String toIndentedString(Object o) {
145 | if (o == null) {
146 | return "null";
147 | }
148 | return o.toString().replace("\n", "\n ");
149 | }
150 |
151 | }
152 |
--------------------------------------------------------------------------------
/aws-eventbridge-custom-events/order-service/src/main/java/schema/edu_orders/orderconfirmed/marshaller/Marshaller.java:
--------------------------------------------------------------------------------
1 | package schema.edu_orders.orderconfirmed.marshaller;
2 |
3 | import com.fasterxml.jackson.databind.DeserializationFeature;
4 | import com.fasterxml.jackson.databind.ObjectMapper;
5 | import com.fasterxml.jackson.databind.type.TypeFactory;
6 | import schema.edu_orders.orderconfirmed.AWSEvent;
7 |
8 | import java.io.IOException;
9 | import java.io.InputStream;
10 | import java.io.OutputStream;
11 |
12 | public class Marshaller {
13 |
14 | private static final ObjectMapper MAPPER = createObjectMapper();
15 |
16 | public static void marshal(OutputStream output, T value) throws IOException {
17 | MAPPER.writeValue(output, value);
18 | }
19 |
20 | public static String marshal(T value) throws IOException {
21 | return MAPPER.writeValueAsString(value);
22 | }
23 |
24 | public static T unmarshal(InputStream input, Class type) throws IOException {
25 | return MAPPER.readValue(input, type);
26 | }
27 |
28 | public static AWSEvent unmarshalEvent(InputStream input, Class type) throws IOException {
29 | final TypeFactory typeFactory = MAPPER.getTypeFactory();
30 | return MAPPER.readValue(input, typeFactory.constructParametricType(AWSEvent.class, type));
31 | }
32 |
33 | private static ObjectMapper createObjectMapper() {
34 | return new ObjectMapper()
35 | .findAndRegisterModules()
36 | .disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES);
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/cqrs-views/README.md:
--------------------------------------------------------------------------------
1 | ## Start the Docker stack
2 |
3 | ```
4 | docker compose up -d
5 | ```
6 |
7 | ## Create Debezium Connector for MySQL
8 |
9 | ```
10 | curl -H 'Content-Type: application/json' localhost:8083/connectors --data '
11 | {
12 | "name": "orders-connector",
13 | "config": {
14 | "connector.class": "io.debezium.connector.mysql.MySqlConnector",
15 | "tasks.max": "1",
16 | "database.hostname": "mysql",
17 | "database.port": "3306",
18 | "database.user": "debezium",
19 | "database.password": "dbz",
20 | "database.server.id": "184054",
21 | "database.server.name": "mysql",
22 | "database.include.list": "pizzashop",
23 | "database.history.kafka.bootstrap.servers": "kafka:9092",
24 | "database.history.kafka.topic": "mysql-history"
25 | }
26 | }'
27 | ```
28 |
29 | ## Publish sample order update events
30 |
31 | ```
32 | kcat -b localhost:29092 -t order_updates -T -P -l data/updates.txt
33 | ```
34 |
35 | ## Connect to Materialize running in Docker Compose
36 |
37 | ```
38 | psql -U materialize -h localhost -p 6875 materialize
39 | ```
40 |
41 | ### Define sources and views
42 |
43 | CREATE SOURCE orders
44 | FROM KAFKA BROKER 'kafka:9092' TOPIC 'mysql.pizzashop.orders'
45 | FORMAT AVRO USING CONFLUENT SCHEMA REGISTRY 'http://schema-registry:8081' ENVELOPE DEBEZIUM;
46 |
47 | CREATE SOURCE items
48 | FROM KAFKA BROKER 'kafka:9092' TOPIC 'mysql.pizzashop.order_items'
49 | FORMAT AVRO USING CONFLUENT SCHEMA REGISTRY 'http://schema-registry:8081' ENVELOPE DEBEZIUM;
50 |
51 | CREATE SOURCE updates_source
52 | FROM KAFKA BROKER 'kafka:9092' TOPIC 'order_updates'
53 | FORMAT BYTES;
54 |
55 | CREATE MATERIALIZED VIEW updates AS
56 | SELECT
57 | (data->>'id')::int AS id,
58 | (data->>'order_id')::int AS order_id,
59 | data->>'status' AS status,
60 | data->>'updated_at' AS updated_at
61 | FROM (SELECT CONVERT_FROM(data, 'utf8')::jsonb AS data FROM updates_source);
62 |
63 | CREATE MATERIALIZED VIEW order_summary AS
64 | SELECT
65 | orders.order_id AS order_id,
66 | orders.total AS total,
67 | orders.created_at as created_at,
68 | array_agg(distinct concat( items.name,'|',items.quantity)) as items,
69 | array_agg(distinct concat( updates.status,'|',updates.updated_at)) as status
70 | FROM orders
71 | JOIN items ON orders.order_id=items.order_id
72 | JOIN updates ON orders.order_id=updates.order_id
73 | GROUP BY orders.order_id, orders.created_at, orders.total;
74 |
75 | CREATE SINK results
76 | FROM order_summary
77 | INTO KAFKA BROKER 'kafka:9092' TOPIC 'orders_enriched'
78 | CONSISTENCY TOPIC 'orders_enriched-consistency'
79 | CONSISTENCY FORMAT AVRO USING CONFLUENT SCHEMA REGISTRY 'http://schema-registry:8081'
80 | WITH (reuse_topic=true)
81 | FORMAT JSON;
82 |
83 |
84 | ## Create the Pinot schema and table for orders
85 |
86 | ```
87 | docker exec -it pinot-controller /opt/pinot/bin/pinot-admin.sh AddTable \
88 | -tableConfigFile /config/orders_table.json \
89 | -schemaFile /config/orders_schema.json -exec
90 | ```
91 |
92 | ## Produce the following event to test a new order status update
93 |
94 | ```json
95 | {"id":"4","order_id":1,"status":"READY","updated_at":1453535345}
96 | ```
--------------------------------------------------------------------------------
/cqrs-views/config/orders_schema.json:
--------------------------------------------------------------------------------
1 | {
2 | "schemaName": "orders",
3 | "primaryKeyColumns": [
4 | "order_id"
5 | ],
6 | "dimensionFieldSpecs": [
7 | {
8 | "name": "order_id",
9 | "dataType": "LONG"
10 | },
11 | {
12 | "name": "items",
13 | "dataType": "STRING",
14 | "singleValueField": false
15 | },
16 | {
17 | "name": "status",
18 | "dataType": "STRING",
19 | "singleValueField": false
20 | }
21 | ],
22 | "metricFieldSpecs": [
23 | {
24 | "name": "total",
25 | "dataType": "DOUBLE"
26 | }
27 | ],
28 | "dateTimeFieldSpecs": [
29 | {
30 | "name": "created_at",
31 | "dataType": "LONG",
32 | "format": "1:MILLISECONDS:EPOCH",
33 | "granularity": "1:MILLISECONDS"
34 | }
35 | ]
36 | }
--------------------------------------------------------------------------------
/cqrs-views/config/orders_table.json:
--------------------------------------------------------------------------------
1 | {
2 | "tableName": "orders",
3 | "tableType": "REALTIME",
4 | "segmentsConfig": {
5 | "timeColumnName": "created_at",
6 | "timeType": "MILLISECONDS",
7 | "retentionTimeUnit": "DAYS",
8 | "retentionTimeValue": "1",
9 | "segmentPushType": "APPEND",
10 | "segmentAssignmentStrategy": "BalanceNumSegmentAssignmentStrategy",
11 | "schemaName": "orders",
12 | "replicasPerPartition": "1"
13 | },
14 | "tenants": {},
15 | "tableIndexConfig": {
16 | "loadMode": "MMAP",
17 | "streamConfigs": {
18 | "streamType": "kafka",
19 | "stream.kafka.consumer.type": "lowLevel",
20 | "stream.kafka.topic.name": "orders_enriched-u8-1660198862-1311409326155391595",
21 | "stream.kafka.decoder.class.name": "org.apache.pinot.plugin.stream.kafka.KafkaJSONMessageDecoder",
22 | "stream.kafka.hlc.zk.connect.string": "zookeeper:2181/kafka",
23 | "stream.kafka.consumer.factory.class.name": "org.apache.pinot.plugin.stream.kafka20.KafkaConsumerFactory",
24 | "stream.kafka.consumer.prop.auto.offset.reset": "smallest",
25 | "stream.kafka.zk.broker.url": "zookeeper:2181/kafka",
26 | "stream.kafka.broker.list": "kafka:9092",
27 | "realtime.segment.flush.threshold.size": 30,
28 | "realtime.segment.flush.threshold.rows": 30
29 | },
30 | "nullHandlingEnabled": true
31 | },
32 | "ingestionConfig" : {
33 | "transformConfigs":[
34 | {
35 | "columnName":"items",
36 | "transformFunction":"jsonPathArray(after, '$.row.items')"
37 | },
38 | {
39 | "columnName":"status",
40 | "transformFunction":"jsonPathArray(after, '$.row.status')"
41 | },
42 | {
43 | "columnName":"order_id",
44 | "transformFunction":"JSONPATHLONG(after, '$.row.order_id')"
45 | },
46 | {
47 | "columnName":"total",
48 | "transformFunction":"JSONPATHDOUBLE(after, '$.row.total')"
49 | },
50 | {
51 | "columnName":"created_at",
52 | "transformFunction":"JSONPATHLONG(after, '$.row.created_at')"
53 | }
54 | ]
55 | },
56 | "metadata": {
57 | "customConfigs": {}
58 | },
59 | "upsertConfig": {
60 | "mode": "FULL"
61 | },
62 | "routing": {
63 | "instanceSelectorType": "strictReplicaGroup"
64 | }
65 | }
--------------------------------------------------------------------------------
/cqrs-views/data/updates.txt:
--------------------------------------------------------------------------------
1 | {"id":"1","order_id":1,"status":"CREATED","updated_at":1453535342}
2 | {"id":"2","order_id":1,"status":"PROCESSING","updated_at":1453535345}
3 | {"id":"4","order_id":2,"status":"CREATED","updated_at":1453535358}
--------------------------------------------------------------------------------
/cqrs-views/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '3.7'
2 | services:
3 | mysql:
4 | image: mysql/mysql-server:8.0.27
5 | hostname: mysql
6 | container_name: mysql
7 | ports:
8 | - 3306:3306
9 | environment:
10 | - MYSQL_ROOT_PASSWORD=debezium
11 | - MYSQL_USER=mysqluser
12 | - MYSQL_PASSWORD=mysqlpw
13 | volumes:
14 | - ${PWD}/mysql/mysql.cnf:/etc/mysql/conf.d
15 | - ${PWD}/mysql/mysql_bootstrap.sql:/docker-entrypoint-initdb.d/mysql_bootstrap.sql
16 | healthcheck: {test: mysql -pdebezium -e 'select 1', interval: 1s, start_period: 60s}
17 | zookeeper:
18 | image: confluentinc/cp-zookeeper:7.1.0
19 | hostname: zookeeper
20 | container_name: zookeeper
21 | ports:
22 | - "2181:2181"
23 | environment:
24 | ZOOKEEPER_CLIENT_PORT: 2181
25 | ZOOKEEPER_TICK_TIME: 2000
26 | kafka:
27 | image: confluentinc/cp-kafka:7.1.0
28 | hostname: kafka
29 | container_name: kafka
30 | ports:
31 | - "29092:29092"
32 | environment:
33 | KAFKA_BROKER_ID: 1
34 | KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181'
35 | KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
36 | KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092,PLAINTEXT_HOST://localhost:29092
37 | KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
38 | KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
39 | KAFKA_TOOLS_LOG4J_LOGLEVEL: ERROR
40 | depends_on:
41 | [zookeeper]
42 | healthcheck: {test: nc -z localhost 9092, interval: 1s, start_period: 120s}
43 | schema-registry:
44 | image: confluentinc/cp-schema-registry:5.5.3
45 | environment:
46 | - SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL=zookeeper:2181
47 | - SCHEMA_REGISTRY_HOST_NAME=schema-registry
48 | - SCHEMA_REGISTRY_LISTENERS=http://schema-registry:8081,http://localhost:8081
49 | depends_on: [zookeeper, kafka]
50 | debezium:
51 | image: debezium/connect:1.8
52 | hostname: debezium
53 | container_name: debezium
54 | environment:
55 | BOOTSTRAP_SERVERS: kafka:9092
56 | GROUP_ID: 1
57 | CONFIG_STORAGE_TOPIC: connect_configs
58 | OFFSET_STORAGE_TOPIC: connect_offsets
59 | KEY_CONVERTER: io.confluent.connect.avro.AvroConverter
60 | VALUE_CONVERTER: io.confluent.connect.avro.AvroConverter
61 | CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL: http://schema-registry:8081
62 | CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: http://schema-registry:8081
63 | healthcheck: {test: curl -f localhost:8083, interval: 1s, start_period: 120s}
64 | depends_on:
65 | kafka: {condition: service_healthy}
66 | mysql: {condition: service_healthy}
67 | ports:
68 | - 8083:8083
69 | materialized:
70 | image: materialize/materialized:v0.26.4
71 | ports:
72 | - 6875:6875
73 | healthcheck: {test: curl -f localhost:6875, interval: 1s, start_period: 30s}
74 | pinot-controller:
75 | image: apachepinot/pinot:0.10.0
76 | command: "StartController -zkAddress zookeeper:2181"
77 | container_name: "pinot-controller"
78 | volumes:
79 | - ./config:/config
80 | restart: unless-stopped
81 | ports:
82 | - "9000:9000"
83 | depends_on:
84 | - zookeeper
85 | pinot-broker:
86 | image: apachepinot/pinot:0.10.0
87 | command: "StartBroker -zkAddress zookeeper:2181"
88 | restart: unless-stopped
89 | container_name: "pinot-broker"
90 | ports:
91 | - "8099:8099"
92 | depends_on:
93 | - pinot-controller
94 | pinot-server:
95 | image: apachepinot/pinot:0.10.0
96 | container_name: "pinot-server"
97 | command: "StartServer -zkAddress zookeeper:2181"
98 | restart: unless-stopped
99 | depends_on:
100 | - pinot-broker
--------------------------------------------------------------------------------
/cqrs-views/mysql/mysql.cnf:
--------------------------------------------------------------------------------
1 | [mysqld]
2 | server-id = 223344
3 | log_bin = mysql-bin
4 | expire_logs_days = 1
5 | binlog_format = row
--------------------------------------------------------------------------------
/cqrs-views/mysql/mysql_bootstrap.sql:
--------------------------------------------------------------------------------
1 | CREATE DATABASE IF NOT EXISTS pizzashop;
2 | USE pizzashop;
3 |
4 | GRANT ALL PRIVILEGES ON pizzashop.* TO 'mysqluser';
5 |
6 | CREATE USER 'debezium' IDENTIFIED WITH mysql_native_password BY 'dbz';
7 |
8 | GRANT SELECT, RELOAD, SHOW DATABASES, REPLICATION SLAVE, REPLICATION CLIENT ON *.* TO 'debezium';
9 |
10 | FLUSH PRIVILEGES;
11 |
12 | CREATE TABLE IF NOT EXISTS pizzashop.order_items
13 | (
14 | id SERIAL PRIMARY KEY,
15 | order_id BIGINT UNSIGNED REFERENCES orders(order_id),
16 | name VARCHAR(255),
17 | quantity INT DEFAULT 1
18 | );
19 |
20 | CREATE TABLE IF NOT EXISTS pizzashop.orders
21 | (
22 | order_id SERIAL PRIMARY KEY,
23 | user_id BIGINT UNSIGNED,
24 | total FLOAT,
25 | created_at DATETIME DEFAULT NOW()
26 | );
27 |
28 | INSERT INTO pizzashop.orders (user_id, total) VALUES (100, 50.00);
29 | INSERT INTO pizzashop.orders (user_id, total) VALUES (101, 149.95);
30 |
31 | INSERT INTO pizzashop.order_items (order_id, name, quantity) VALUES (1, 'Sri Lankan Spicy Chicken Pizza', 1);
32 | INSERT INTO pizzashop.order_items (order_id, name, quantity) VALUES (1, 'Chicken BBQ', 1);
33 | INSERT INTO pizzashop.order_items (order_id, name, quantity) VALUES (2, 'Macaroni & Cheese', 1);
34 | INSERT INTO pizzashop.order_items (order_id, name, quantity) VALUES (2, 'Cheesy Garlic Bread Supreme', 1);
--------------------------------------------------------------------------------
/fitness-leaderboard-apache-pinot/commands.txt:
--------------------------------------------------------------------------------
1 |
2 | bin/pinot-admin.sh StartKafka -zkAddress=localhost:2123/kafka -port 9876
3 |
4 | kafka-topics --bootstrap-server localhost:9876 --list
5 |
6 | kafka-topics --create \
7 | --bootstrap-server localhost:9876 \
8 | --replication-factor 1 \
9 | --partitions 1 \
10 | --topic steps
11 |
12 | bin/pinot-admin.sh AddTable \
13 | -schemaFile schemas/steps-schema.json \
14 | -tableConfigFile schemas/steps-table.json \
15 | -exec
16 |
17 | kafka-console-producer \ 127 ↵
18 | --broker-list localhost:9876 \
19 | --topic steps < fitness_events_24h.json
20 |
21 | curl -H "Content-Type: application/json" -X POST \
22 | -d '{"sql":"select userName, country, sum(steps) as total from steps group by userName,country order by total desc limit 10"}' \
23 | http://localhost:8000/query/sql
--------------------------------------------------------------------------------
/fitness-leaderboard-apache-pinot/data/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dunithd/edu-samples/d937f12c81254a859867e125f11af22ed5943c2b/fitness-leaderboard-apache-pinot/data/.DS_Store
--------------------------------------------------------------------------------
/fitness-leaderboard-apache-pinot/data/samples/fitness_events_24h.json:
--------------------------------------------------------------------------------
1 | {"userId":9567,"userName":"anotti0","country":"Finland","gender":"Female","steps":6682,"loggedAt":"1627618483"}
2 | {"userId":5306,"userName":"roneile1","country":"Croatia","gender":"Male","steps":28102,"loggedAt":"1627677665"}
3 | {"userId":5502,"userName":"anaden2","country":"France","gender":"Female","steps":14430,"loggedAt":"1627631121"}
4 | {"userId":9454,"userName":"hwoodsford3","country":"France","gender":"Male","steps":28034,"loggedAt":"1627638216"}
5 | {"userId":5327,"userName":"mkinsella4","country":"Dominican Republic","gender":"Male","steps":52176,"loggedAt":"1627655052"}
6 | {"userId":8974,"userName":"aearly5","country":"China","gender":"Female","steps":7116,"loggedAt":"1627658000"}
7 | {"userId":6810,"userName":"akealy6","country":"Bolivia","gender":"Male","steps":71110,"loggedAt":"1627658348"}
8 | {"userId":6103,"userName":"eraggett7","country":"Indonesia","gender":"Male","steps":14622,"loggedAt":"1627609940"}
9 | {"userId":9820,"userName":"nwinslade8","country":"Canada","gender":"Female","steps":37649,"loggedAt":"1627686959"}
10 | {"userId":6339,"userName":"bbarkus9","country":"Indonesia","gender":"Male","steps":7016,"loggedAt":"1627649383"}
11 | {"userId":4741,"userName":"llettingtona","country":"Thailand","gender":"Female","steps":6032,"loggedAt":"1627610624"}
12 | {"userId":4217,"userName":"jpaneb","country":"China","gender":"Female","steps":64485,"loggedAt":"1627637394"}
13 | {"userId":413,"userName":"jnestorc","country":"Portugal","gender":"Male","steps":34390,"loggedAt":"1627643311"}
14 | {"userId":3571,"userName":"ehurlestoned","country":"Armenia","gender":"Female","steps":83903,"loggedAt":"1627609016"}
15 | {"userId":2897,"userName":"nhourigane","country":"Azerbaijan","gender":"Male","steps":57972,"loggedAt":"1627644923"}
--------------------------------------------------------------------------------
/fitness-leaderboard-apache-pinot/data/samples/fitness_events_all_time.json:
--------------------------------------------------------------------------------
1 | {"userId":7727,"userName":"pmatasov0","country":"Philippines","gender":"Female","steps":13492,"loggedAt":"1626235471"}
2 | {"userId":200,"userName":"abarbery1","country":"China","gender":"Male","steps":47266,"loggedAt":"1626569591"}
3 | {"userId":9688,"userName":"zblazek2","country":"China","gender":"Female","steps":51111,"loggedAt":"1626236503"}
4 | {"userId":5379,"userName":"stoolin3","country":"Indonesia","gender":"Male","steps":95956,"loggedAt":"1627294482"}
5 | {"userId":1678,"userName":"grouff4","country":"Philippines","gender":"Male","steps":93853,"loggedAt":"1625750303"}
6 | {"userId":3190,"userName":"wpheasey5","country":"Ukraine","gender":"Female","steps":16565,"loggedAt":"1625928058"}
7 | {"userId":3356,"userName":"tkineton6","country":"Mexico","gender":"Female","steps":44431,"loggedAt":"1626521292"}
8 | {"userId":4400,"userName":"sstaynes7","country":"China","gender":"Male","steps":30585,"loggedAt":"1627322203"}
9 | {"userId":2146,"userName":"tgregorin8","country":"Portugal","gender":"Female","steps":9906,"loggedAt":"1625196704"}
10 | {"userId":6021,"userName":"tboyles9","country":"Georgia","gender":"Male","steps":74941,"loggedAt":"1626064183"}
11 | {"userId":3430,"userName":"hgartera","country":"Yemen","gender":"Male","steps":37284,"loggedAt":"1626090306"}
12 | {"userId":6031,"userName":"gsharkeyb","country":"Finland","gender":"Female","steps":88929,"loggedAt":"1627317367"}
13 | {"userId":9511,"userName":"pfaircliffec","country":"China","gender":"Female","steps":61830,"loggedAt":"1627268410"}
14 | {"userId":5974,"userName":"ksaiensd","country":"United States","gender":"Male","steps":91745,"loggedAt":"1626066131"}
15 | {"userId":3526,"userName":"mpipparde","country":"Portugal","gender":"Female","steps":44174,"loggedAt":"1626808974"}
--------------------------------------------------------------------------------
/fitness-leaderboard-apache-pinot/queries.txt:
--------------------------------------------------------------------------------
1 | select userName, country, sum(steps) as total
2 | from steps
3 | group by userName,country
4 | order by total desc
5 | limit 10
6 |
7 | select userName, country, sum(steps) as total
8 | from steps
9 | where loggedAt > ToEpochSeconds(now()- 86400000)
10 | group by userName,country
11 | order by total desc
12 |
13 |
--------------------------------------------------------------------------------
/fitness-leaderboard-apache-pinot/schemas/steps-schema.json:
--------------------------------------------------------------------------------
1 | {
2 | "schemaName": "steps",
3 | "dimensionFieldSpecs": [
4 | {
5 | "name": "userId",
6 | "dataType": "INT"
7 | },
8 | {
9 | "name": "userName",
10 | "dataType": "STRING"
11 | },
12 | {
13 | "name": "country",
14 | "dataType": "STRING"
15 | },
16 | {
17 | "name": "gender",
18 | "dataType": "STRING"
19 | }
20 | ],
21 | "metricFieldSpecs": [
22 | {
23 | "name": "steps",
24 | "dataType": "INT"
25 | }
26 | ],
27 | "dateTimeFieldSpecs": [{
28 | "name": "loggedAt",
29 | "dataType": "LONG",
30 | "format" : "1:MILLISECONDS:EPOCH",
31 | "granularity": "1:MILLISECONDS"
32 | }]
33 | }
34 |
--------------------------------------------------------------------------------
/fitness-leaderboard-apache-pinot/schemas/steps-table.json:
--------------------------------------------------------------------------------
1 | {
2 | "tableName": "steps",
3 | "tableType": "REALTIME",
4 | "segmentsConfig": {
5 | "timeColumnName": "loggedAt",
6 | "timeType": "MILLISECONDS",
7 | "schemaName": "steps",
8 | "replicasPerPartition": "1"
9 | },
10 | "tenants": {},
11 | "tableIndexConfig": {
12 | "loadMode": "MMAP",
13 | "streamConfigs": {
14 | "streamType": "kafka",
15 | "stream.kafka.consumer.type": "lowlevel",
16 | "stream.kafka.topic.name": "steps",
17 | "stream.kafka.decoder.class.name": "org.apache.pinot.plugin.stream.kafka.KafkaJSONMessageDecoder",
18 | "stream.kafka.consumer.factory.class.name": "org.apache.pinot.plugin.stream.kafka20.KafkaConsumerFactory",
19 | "stream.kafka.broker.list": "localhost:9876",
20 | "realtime.segment.flush.threshold.time": "3600000",
21 | "realtime.segment.flush.threshold.size": "50000",
22 | "stream.kafka.consumer.prop.auto.offset.reset": "smallest"
23 | }
24 | },
25 | "metadata": {
26 | "customConfigs": {}
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/idempotent-consumer/README.md:
--------------------------------------------------------------------------------
1 | # Idempotent Event Consumer
2 |
3 | This sample uses Quarkus, Hibernate, MySQL, and Apache Kafka to implement a Kafka event consumer that detects and discards duplicate events coming from a Kafka topic.
4 |
5 | Underneath, it maintains a MySQL table called 'consumed_messages' to store successfully processed messages. When a new message arrives, the consumer logic checks this table for the existence of the message ID. If yes, the processing is ignored, otherwise, the message will be processed and its ID will be inserted to the consumed_messages table.
6 |
7 | ## Running the application in dev mode
8 |
9 | You can run your application in dev mode that enables live coding using:
10 | ```shell script
11 | ./mvnw compile quarkus:dev
12 | ```
13 |
14 | > **_NOTE:_** Quarkus now ships with a Dev UI, which is available in dev mode only at http://localhost:8080/q/dev/.
15 |
16 | ## Packaging and running the application
17 |
18 | The application can be packaged using:
19 | ```shell script
20 | ./mvnw package
21 | ```
22 | It produces the `quarkus-run.jar` file in the `target/quarkus-app/` directory.
23 | Be aware that it’s not an _über-jar_ as the dependencies are copied into the `target/quarkus-app/lib/` directory.
24 |
25 | If you want to build an _über-jar_, execute the following command:
26 | ```shell script
27 | ./mvnw package -Dquarkus.package.type=uber-jar
28 | ```
29 |
30 | The application is now runnable using `java -jar target/quarkus-app/quarkus-run.jar`.
31 |
32 | ## Creating a native executable
33 |
34 | You can create a native executable using:
35 | ```shell script
36 | ./mvnw package -Pnative
37 | ```
38 |
39 | Or, if you don't have GraalVM installed, you can run the native executable build in a container using:
40 | ```shell script
41 | ./mvnw package -Pnative -Dquarkus.native.container-build=true
42 | ```
43 |
44 | You can then execute your native executable with: `./target/hibernate-orm-mysql-1.0.0-SNAPSHOT-runner`
45 |
46 | If you want to learn more about building native executables, please consult https://quarkus.io/guides/maven-tooling.html.
47 |
48 | ## Related Guides
49 |
50 | - RESTEasy JAX-RS ([guide](https://quarkus.io/guides/rest-json)): REST endpoint framework implementing JAX-RS and more
51 |
52 | ## Provided Code
53 |
54 | ### RESTEasy JAX-RS
55 |
56 | Easily start your RESTful Web Services
57 |
58 | [Related guide section...](https://quarkus.io/guides/getting-started#the-jax-rs-resources)
59 |
--------------------------------------------------------------------------------
/idempotent-consumer/orders.txt:
--------------------------------------------------------------------------------
1 | {"eventId":"foo","eventType":"ORDER_CREATED","payload":{"id":12345,"customerId":"12df-dfdf-223","status":"CREATED"}}
--------------------------------------------------------------------------------
/idempotent-consumer/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 | 4.0.0
5 | com.edu.samples
6 | idempotent-consumer
7 | Idempotent Event Consumer
8 | 1.0.0-SNAPSHOT
9 |
10 | 3.8.1
11 | true
12 | 11
13 | 11
14 | UTF-8
15 | UTF-8
16 | quarkus-universe-bom
17 | io.quarkus
18 | 2.0.2.Final
19 | 3.0.0-M5
20 |
21 |
22 |
23 |
24 | ${quarkus.platform.group-id}
25 | ${quarkus.platform.artifact-id}
26 | ${quarkus.platform.version}
27 | pom
28 | import
29 |
30 |
31 |
32 |
33 |
34 | io.quarkus
35 | quarkus-hibernate-orm
36 |
37 |
38 | io.quarkus
39 | quarkus-resteasy
40 |
41 |
42 | io.quarkus
43 | quarkus-resteasy-jackson
44 |
45 |
46 | io.quarkus
47 | quarkus-jdbc-mysql
48 |
49 |
50 | io.quarkus
51 | quarkus-smallrye-reactive-messaging-kafka
52 |
53 |
54 | io.quarkus
55 | quarkus-arc
56 |
57 |
58 | io.quarkus
59 | quarkus-junit5
60 | test
61 |
62 |
63 | io.rest-assured
64 | rest-assured
65 | test
66 |
67 |
68 |
69 |
70 |
71 | ${quarkus.platform.group-id}
72 | quarkus-maven-plugin
73 | ${quarkus.platform.version}
74 | true
75 |
76 |
77 |
78 | build
79 | generate-code
80 | generate-code-tests
81 |
82 |
83 |
84 |
85 |
86 | maven-compiler-plugin
87 | ${compiler-plugin.version}
88 |
89 | ${maven.compiler.parameters}
90 |
91 |
92 |
93 | maven-surefire-plugin
94 | ${surefire-plugin.version}
95 |
96 |
97 | org.jboss.logmanager.LogManager
98 | ${maven.home}
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 | native
107 |
108 |
109 | native
110 |
111 |
112 |
113 |
114 |
115 | maven-failsafe-plugin
116 | ${surefire-plugin.version}
117 |
118 |
119 |
120 | integration-test
121 | verify
122 |
123 |
124 |
125 | ${project.build.directory}/${project.build.finalName}-runner
126 | org.jboss.logmanager.LogManager
127 | ${maven.home}
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 | native
137 |
138 |
139 |
140 |
141 |
--------------------------------------------------------------------------------
/idempotent-consumer/src/main/docker/Dockerfile.jvm:
--------------------------------------------------------------------------------
1 | ####
2 | # This Dockerfile is used in order to build a container that runs the Quarkus application in JVM mode
3 | #
4 | # Before building the container image run:
5 | #
6 | # ./mvnw package
7 | #
8 | # Then, build the image with:
9 | #
10 | # docker build -f src/main/docker/Dockerfile.jvm -t quarkus/hibernate-orm-mysql-jvm .
11 | #
12 | # Then run the container using:
13 | #
14 | # docker run -i --rm -p 8080:8080 quarkus/hibernate-orm-mysql-jvm
15 | #
16 | # If you want to include the debug port into your docker image
17 | # you will have to expose the debug port (default 5005) like this : EXPOSE 8080 5005
18 | #
19 | # Then run the container using :
20 | #
21 | # docker run -i --rm -p 8080:8080 -p 5005:5005 -e JAVA_ENABLE_DEBUG="true" quarkus/hibernate-orm-mysql-jvm
22 | #
23 | ###
24 | FROM registry.access.redhat.com/ubi8/ubi-minimal:8.4
25 |
26 | ARG JAVA_PACKAGE=java-11-openjdk-headless
27 | ARG RUN_JAVA_VERSION=1.3.8
28 | ENV LANG='en_US.UTF-8' LANGUAGE='en_US:en'
29 | # Install java and the run-java script
30 | # Also set up permissions for user `1001`
31 | RUN microdnf install curl ca-certificates ${JAVA_PACKAGE} \
32 | && microdnf update \
33 | && microdnf clean all \
34 | && mkdir /deployments \
35 | && chown 1001 /deployments \
36 | && chmod "g+rwX" /deployments \
37 | && chown 1001:root /deployments \
38 | && curl https://repo1.maven.org/maven2/io/fabric8/run-java-sh/${RUN_JAVA_VERSION}/run-java-sh-${RUN_JAVA_VERSION}-sh.sh -o /deployments/run-java.sh \
39 | && chown 1001 /deployments/run-java.sh \
40 | && chmod 540 /deployments/run-java.sh \
41 | && echo "securerandom.source=file:/dev/urandom" >> /etc/alternatives/jre/conf/security/java.security
42 |
43 | # Configure the JAVA_OPTIONS, you can add -XshowSettings:vm to also display the heap size.
44 | ENV JAVA_OPTIONS="-Dquarkus.http.host=0.0.0.0 -Djava.util.logging.manager=org.jboss.logmanager.LogManager"
45 | # We make four distinct layers so if there are application changes the library layers can be re-used
46 | COPY --chown=1001 target/quarkus-app/lib/ /deployments/lib/
47 | COPY --chown=1001 target/quarkus-app/*.jar /deployments/
48 | COPY --chown=1001 target/quarkus-app/app/ /deployments/app/
49 | COPY --chown=1001 target/quarkus-app/quarkus/ /deployments/quarkus/
50 |
51 | EXPOSE 8080
52 | USER 1001
53 |
54 | ENTRYPOINT [ "/deployments/run-java.sh" ]
55 |
56 |
--------------------------------------------------------------------------------
/idempotent-consumer/src/main/docker/Dockerfile.legacy-jar:
--------------------------------------------------------------------------------
1 | ####
2 | # This Dockerfile is used in order to build a container that runs the Quarkus application in JVM mode
3 | #
4 | # Before building the container image run:
5 | #
6 | # ./mvnw package -Dquarkus.package.type=legacy-jar
7 | #
8 | # Then, build the image with:
9 | #
10 | # docker build -f src/main/docker/Dockerfile.legacy-jar -t quarkus/hibernate-orm-mysql-legacy-jar .
11 | #
12 | # Then run the container using:
13 | #
14 | # docker run -i --rm -p 8080:8080 quarkus/hibernate-orm-mysql-legacy-jar
15 | #
16 | # If you want to include the debug port into your docker image
17 | # you will have to expose the debug port (default 5005) like this : EXPOSE 8080 5005
18 | #
19 | # Then run the container using :
20 | #
21 | # docker run -i --rm -p 8080:8080 -p 5005:5005 -e JAVA_ENABLE_DEBUG="true" quarkus/hibernate-orm-mysql-legacy-jar
22 | #
23 | ###
24 | FROM registry.access.redhat.com/ubi8/ubi-minimal:8.4
25 |
26 | ARG JAVA_PACKAGE=java-11-openjdk-headless
27 | ARG RUN_JAVA_VERSION=1.3.8
28 | ENV LANG='en_US.UTF-8' LANGUAGE='en_US:en'
29 | # Install java and the run-java script
30 | # Also set up permissions for user `1001`
31 | RUN microdnf install curl ca-certificates ${JAVA_PACKAGE} \
32 | && microdnf update \
33 | && microdnf clean all \
34 | && mkdir /deployments \
35 | && chown 1001 /deployments \
36 | && chmod "g+rwX" /deployments \
37 | && chown 1001:root /deployments \
38 | && curl https://repo1.maven.org/maven2/io/fabric8/run-java-sh/${RUN_JAVA_VERSION}/run-java-sh-${RUN_JAVA_VERSION}-sh.sh -o /deployments/run-java.sh \
39 | && chown 1001 /deployments/run-java.sh \
40 | && chmod 540 /deployments/run-java.sh \
41 | && echo "securerandom.source=file:/dev/urandom" >> /etc/alternatives/jre/conf/security/java.security
42 |
43 | # Configure the JAVA_OPTIONS, you can add -XshowSettings:vm to also display the heap size.
44 | ENV JAVA_OPTIONS="-Dquarkus.http.host=0.0.0.0 -Djava.util.logging.manager=org.jboss.logmanager.LogManager"
45 | COPY target/lib/* /deployments/lib/
46 | COPY target/*-runner.jar /deployments/app.jar
47 |
48 | EXPOSE 8080
49 | USER 1001
50 |
51 | ENTRYPOINT [ "/deployments/run-java.sh" ]
52 |
--------------------------------------------------------------------------------
/idempotent-consumer/src/main/docker/Dockerfile.native:
--------------------------------------------------------------------------------
1 | ####
2 | # This Dockerfile is used in order to build a container that runs the Quarkus application in native (no JVM) mode
3 | #
4 | # Before building the container image run:
5 | #
6 | # ./mvnw package -Pnative
7 | #
8 | # Then, build the image with:
9 | #
10 | # docker build -f src/main/docker/Dockerfile.native -t quarkus/hibernate-orm-mysql .
11 | #
12 | # Then run the container using:
13 | #
14 | # docker run -i --rm -p 8080:8080 quarkus/hibernate-orm-mysql
15 | #
16 | ###
17 | FROM registry.access.redhat.com/ubi8/ubi-minimal:8.4
18 | WORKDIR /work/
19 | RUN chown 1001 /work \
20 | && chmod "g+rwX" /work \
21 | && chown 1001:root /work
22 | COPY --chown=1001:root target/*-runner /work/application
23 |
24 | EXPOSE 8080
25 | USER 1001
26 |
27 | CMD ["./application", "-Dquarkus.http.host=0.0.0.0"]
28 |
--------------------------------------------------------------------------------
/idempotent-consumer/src/main/docker/Dockerfile.native-distroless:
--------------------------------------------------------------------------------
1 | ####
2 | # This Dockerfile is used in order to build a distroless container that runs the Quarkus application in native (no JVM) mode
3 | #
4 | # Before building the container image run:
5 | #
6 | # ./mvnw package -Pnative
7 | #
8 | # Then, build the image with:
9 | #
10 | # docker build -f src/main/docker/Dockerfile.native-distroless -t quarkus/hibernate-orm-mysql .
11 | #
12 | # Then run the container using:
13 | #
14 | # docker run -i --rm -p 8080:8080 quarkus/hibernate-orm-mysql
15 | #
16 | ###
17 | FROM quay.io/quarkus/quarkus-distroless-image:1.0
18 | COPY target/*-runner /application
19 |
20 | EXPOSE 8080
21 | USER nonroot
22 |
23 | CMD ["./application", "-Dquarkus.http.host=0.0.0.0"]
24 |
--------------------------------------------------------------------------------
/idempotent-consumer/src/main/java/com/edu/samples/KafkaEventConsumer.java:
--------------------------------------------------------------------------------
1 | package com.edu.samples;
2 |
3 | import com.edu.samples.serde.OrderEvent;
4 | import io.smallrye.reactive.messaging.kafka.KafkaRecord;
5 | import org.eclipse.microprofile.reactive.messaging.Acknowledgment;
6 | import org.eclipse.microprofile.reactive.messaging.Incoming;
7 | import org.slf4j.Logger;
8 | import org.slf4j.LoggerFactory;
9 |
10 | import javax.inject.Inject;
11 | import java.util.concurrent.CompletableFuture;
12 | import java.util.concurrent.CompletionStage;
13 |
14 | public class KafkaEventConsumer {
15 |
16 | private static final Logger LOG = LoggerFactory.getLogger(KafkaEventConsumer.class);
17 |
18 | @Inject
19 | OrderEventHandler orderEventHandler;
20 |
21 | @Incoming("orders")
22 | @Acknowledgment(Acknowledgment.Strategy.MANUAL)
23 | public CompletionStage onOrderEvent(KafkaRecord message) {
24 | return CompletableFuture.runAsync(() -> {
25 | try {
26 | LOG.info("Event with ID {} received", message.getPayload().getEventId());
27 |
28 | orderEventHandler.onOrderEvent(message.getPayload());
29 | message.ack();
30 | } catch (Exception e) {
31 | LOG.error("Error while preparing shipment");
32 | throw e;
33 | }
34 | });
35 | }
36 | }
37 |
--------------------------------------------------------------------------------
/idempotent-consumer/src/main/java/com/edu/samples/OrderEventHandler.java:
--------------------------------------------------------------------------------
1 | package com.edu.samples;
2 |
3 | import com.edu.samples.messagelog.MessageLog;
4 | import com.edu.samples.serde.OrderEvent;
5 | import org.slf4j.Logger;
6 | import org.slf4j.LoggerFactory;
7 |
8 | import javax.enterprise.context.ApplicationScoped;
9 | import javax.inject.Inject;
10 | import javax.transaction.Transactional;
11 |
12 | @ApplicationScoped
13 | public class OrderEventHandler {
14 |
15 | private static final Logger LOGGER = LoggerFactory.getLogger(OrderEventHandler.class);
16 |
17 | @Inject
18 | MessageLog log;
19 |
20 | @Transactional
21 | public void onOrderEvent(OrderEvent event) {
22 | String eventId = event.getEventId();
23 |
24 | if (log.alreadyProcessed(eventId)) {
25 | LOGGER.info("Event with eventID {} was already retrieved, ignoring it", eventId);
26 | return;
27 | }
28 |
29 | LOGGER.info("Received 'Order' event -- orderId: {}, customerId: '{}', status: '{}'",
30 | event.getPayload().getId(),
31 | event.getPayload().getCustomerId(),
32 | event.getPayload().getStatus()
33 | );
34 |
35 | log.processed(eventId);
36 | }
37 |
38 | }
39 |
--------------------------------------------------------------------------------
/idempotent-consumer/src/main/java/com/edu/samples/messagelog/ConsumedMessage.java:
--------------------------------------------------------------------------------
1 | package com.edu.samples.messagelog;
2 |
3 | import javax.persistence.Entity;
4 | import javax.persistence.Id;
5 | import javax.persistence.Table;
6 | import java.time.Instant;
7 |
8 | @Entity
9 | @Table(name = "consumed_messages")
10 | public class ConsumedMessage {
11 |
12 | @Id
13 | private String eventId;
14 |
15 | private Instant timeOfReceiving;
16 |
17 | ConsumedMessage() {
18 | }
19 |
20 | public ConsumedMessage(String eventId, Instant timeOfReceiving) {
21 | this.eventId = eventId;
22 | this.timeOfReceiving = timeOfReceiving;
23 | }
24 |
25 | public String getEventId() {
26 | return eventId;
27 | }
28 |
29 | public void setEventId(String eventId) {
30 | this.eventId = eventId;
31 | }
32 |
33 | public Instant getTimeOfReceiving() {
34 | return timeOfReceiving;
35 | }
36 |
37 | public void setTimeOfReceiving(Instant timeOfReceiving) {
38 | this.timeOfReceiving = timeOfReceiving;
39 | }
40 | }
41 |
--------------------------------------------------------------------------------
/idempotent-consumer/src/main/java/com/edu/samples/messagelog/MessageLog.java:
--------------------------------------------------------------------------------
1 | package com.edu.samples.messagelog;
2 |
3 | import org.slf4j.Logger;
4 | import org.slf4j.LoggerFactory;
5 |
6 | import javax.enterprise.context.ApplicationScoped;
7 | import javax.persistence.EntityManager;
8 | import javax.persistence.PersistenceContext;
9 | import javax.transaction.Transactional;
10 | import java.time.Instant;
11 | import java.util.UUID;
12 |
13 | @ApplicationScoped
14 | public class MessageLog {
15 |
16 | private static final Logger LOG = LoggerFactory.getLogger(MessageLog.class);
17 |
18 | @PersistenceContext
19 | EntityManager entityManager;
20 |
21 | @Transactional(value= Transactional.TxType.MANDATORY)
22 | public void processed(String eventId) {
23 | entityManager.persist(new ConsumedMessage(eventId, Instant.now()));
24 | }
25 |
26 | @Transactional(value= Transactional.TxType.MANDATORY)
27 | public boolean alreadyProcessed(String eventId) {
28 | LOG.debug("Looking for event with id {} in message log", eventId);
29 | return entityManager.find(ConsumedMessage.class, eventId) != null;
30 | }
31 | }
32 |
--------------------------------------------------------------------------------
/idempotent-consumer/src/main/java/com/edu/samples/serde/Order.java:
--------------------------------------------------------------------------------
1 | package com.edu.samples.serde;
2 |
3 | public class Order {
4 |
5 | private long id;
6 | private String customerId;
7 | private String status;
8 |
9 | public Order() {
10 | }
11 |
12 | public long getId() {
13 | return id;
14 | }
15 |
16 | public void setId(long id) {
17 | this.id = id;
18 | }
19 |
20 | public String getCustomerId() {
21 | return customerId;
22 | }
23 |
24 | public void setCustomerId(String customerId) {
25 | this.customerId = customerId;
26 | }
27 |
28 | public String getStatus() {
29 | return status;
30 | }
31 |
32 | public void setStatus(String status) {
33 | this.status = status;
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/idempotent-consumer/src/main/java/com/edu/samples/serde/OrderEvent.java:
--------------------------------------------------------------------------------
1 | package com.edu.samples.serde;
2 |
3 | import com.edu.samples.serde.Order;
4 |
5 | public class OrderEvent {
6 |
7 | private String eventId;
8 | private String eventType;
9 | private Order payload;
10 |
11 | public OrderEvent() {
12 | }
13 |
14 | public OrderEvent(String eventId, String eventType, Order payload) {
15 | this.eventId = eventId;
16 | this.eventType = eventType;
17 | this.payload = payload;
18 | }
19 |
20 | public String getEventId() {
21 | return eventId;
22 | }
23 |
24 | public void setEventId(String eventId) {
25 | this.eventId = eventId;
26 | }
27 |
28 | public String getEventType() {
29 | return eventType;
30 | }
31 |
32 | public void setEventType(String eventType) {
33 | this.eventType = eventType;
34 | }
35 |
36 | public Order getPayload() {
37 | return payload;
38 | }
39 |
40 | public void setPayload(Order payload) {
41 | this.payload = payload;
42 | }
43 | }
44 |
--------------------------------------------------------------------------------
/idempotent-consumer/src/main/java/com/edu/samples/serde/OrderEventDeserializer.java:
--------------------------------------------------------------------------------
1 | package com.edu.samples.serde;
2 |
3 | import io.quarkus.kafka.client.serialization.ObjectMapperDeserializer;
4 |
5 | public class OrderEventDeserializer extends ObjectMapperDeserializer {
6 |
7 | public OrderEventDeserializer() {
8 | super(OrderEvent.class);
9 | }
10 | }
11 |
--------------------------------------------------------------------------------
/idempotent-consumer/src/main/resources/application.properties:
--------------------------------------------------------------------------------
1 | # datasource configuration
2 | quarkus.datasource.db-kind=mysql
3 | quarkus.datasource.jdbc.url=jdbc:mysql://localhost:3306/shipmentdb
4 | quarkus.datasource.username=root
5 | quarkus.datasource.password=root
6 | #quarkus.hibernate-orm.database.generation=drop-and-create
7 | quarkus.hibernate-orm.database.generation=update
8 | quarkus.hibernate-orm.dialect=org.hibernate.dialect.MySQL8Dialect
9 | quarkus.hibernate-orm.log.sql=true
10 |
11 | mp.messaging.incoming.orders.connector=smallrye-kafka
12 | mp.messaging.incoming.orders.topic=orders
13 | mp.messaging.incoming.orders.bootstrap.servers=localhost:9092
14 | mp.messaging.incoming.orders.group.id=shipment-service
15 | mp.messaging.incoming.orders.key.deserializer=org.apache.kafka.common.serialization.StringDeserializer
16 | mp.messaging.incoming.orders.value.deserializer=com.edu.samples.serde.OrderEventDeserializer
17 | mp.messaging.incoming.orders.auto.offset.reset=earliest
--------------------------------------------------------------------------------
/idempotent-consumer/src/test/java/com/edu/samples/MessageLogTest.java:
--------------------------------------------------------------------------------
1 | package com.edu.samples;
2 |
3 | import com.edu.samples.messagelog.MessageLog;
4 | import io.quarkus.test.junit.QuarkusTest;
5 | import org.junit.jupiter.api.Assertions;
6 | import org.junit.jupiter.api.Test;
7 | import org.slf4j.Logger;
8 | import org.slf4j.LoggerFactory;
9 |
10 | import javax.inject.Inject;
11 | import javax.transaction.Transactional;
12 | import java.util.UUID;
13 |
14 | @QuarkusTest
15 | public class MessageLogTest {
16 |
17 | @Inject
18 | MessageLog messageLog;
19 |
20 | @Test
21 | @Transactional
22 | void testMessageUniqueness() {
23 | UUID uuid = UUID.randomUUID();
24 |
25 | messageLog.processed(uuid.toString());
26 | Assertions.assertTrue(messageLog.alreadyProcessed(uuid.toString()));
27 | }
28 |
29 | @Test
30 | void testEventDeserialization() {
31 |
32 | }
33 | }
34 |
--------------------------------------------------------------------------------
/mv_with_materialize/README.md:
--------------------------------------------------------------------------------
1 | # Materialized Views for Microservices with Materialize, Debezium, and Apache Kafka
2 |
3 | ## Step 1: Setup the infrastructure services
4 |
5 | Clone the repo and spin up the containers with Docker Compose.
6 |
7 | ```console
8 | git clone https://github.com/dunithd/edu-samples.git
9 | cd edu-samples/mv_with_materialize
10 | docker-compose up -d kafka zookeeper schema-registry mysql postgres payments-feeder
11 | ```
12 |
13 | ## Step 2: Startup Debezium and create MySQL and Postgres connectors
14 |
15 | ```console
16 | docker-compose up -d debezium
17 | ```
18 |
19 | Then create the MySQL connector that streams change events from the order_db.
20 |
21 | ```console
22 | docker-compose exec debezium curl -H 'Content-Type: application/json' debezium:8083/connectors --data '
23 | {
24 | "name": "orders-connector",
25 | "config": {
26 | "connector.class": "io.debezium.connector.mysql.MySqlConnector",
27 | "tasks.max": "1",
28 | "database.hostname": "mysql",
29 | "database.port": "3306",
30 | "database.user": "root",
31 | "database.password": "debezium",
32 | "database.server.id": "184054",
33 | "database.server.name": "mysql",
34 | "database.include.list": "order_db",
35 | "database.history.kafka.bootstrap.servers": "kafka:9092",
36 | "database.history.kafka.topic": "mysql-history"
37 | }
38 | }'
39 | ```
40 |
41 | Do the same to create the PostgreSQL connector that streams change events from the shipment_db.
42 |
43 | ```console
44 | docker-compose exec debezium curl -H 'Content-Type: application/json' debezium:8083/connectors --data '
45 | {
46 | "name": "shipments-connector",
47 | "config": {
48 | "connector.class": "io.debezium.connector.postgresql.PostgresConnector",
49 | "plugin.name": "pgoutput",
50 | "database.hostname": "postgres",
51 | "database.port": "5432",
52 | "database.user": "postgresuser",
53 | "database.password": "postgrespw",
54 | "database.dbname" : "shipment_db",
55 | "database.server.name": "postgres",
56 | "table.include.list": "public.shipments"
57 |
58 | }
59 | }'
60 | ```
61 |
62 | Verify whether the connectors have been deployed properly by:
63 |
64 | ```console
65 | docker-compose exec debezium curl debezium:8083/connectors
66 | ```
67 |
68 | ## Step 3: Start Materialize and define sources and materialized views
69 |
70 | ```console
71 | docker-compose up -d materialized
72 | ```
73 |
74 | Bring up the `mzcli` that you can use as a CLI to Materialized.
75 |
76 | ```console
77 | docker run -it --network mv_with_materialize_default materialize/mzcli mzcli -h materialized -U materialize -p 6875 -d materialize
78 | ```
79 |
80 | Copy and paste the contents inside /materialize/script.sql file into `mzcli` console.
81 |
82 | ```sql
83 | CREATE SOURCE orders
84 | FROM KAFKA BROKER 'kafka:9092' TOPIC 'mysql.order_db.orders'
85 | FORMAT AVRO USING CONFLUENT SCHEMA REGISTRY 'http://schema-registry:8081' ENVELOPE DEBEZIUM;
86 |
87 | CREATE SOURCE shipments
88 | FROM KAFKA BROKER 'kafka:9092' TOPIC 'postgres.public.shipments'
89 | FORMAT AVRO USING CONFLUENT SCHEMA REGISTRY 'http://schema-registry:8081' ENVELOPE DEBEZIUM;
90 |
91 | CREATE SOURCE payments
92 | FROM KAFKA BROKER 'kafka:9092' TOPIC 'payments'
93 | FORMAT BYTES;
94 |
95 | CREATE VIEW payments_view AS
96 | SELECT
97 | (payment_data ->'payment_id')::INT as payment_id,
98 | (payment_data ->'order_id')::INT as order_id,
99 | (payment_data ->'payment_date')::STRING as payment_date,
100 | (payment_data ->'payment_method')::STRING as payment_method,
101 | (payment_data ->'amount')::FLOAT as amount,
102 | (payment_data ->'status')::STRING as status
103 | FROM (
104 | SELECT convert_from(data, 'utf8')::jsonb AS payment_data
105 | FROM payments
106 | );
107 |
108 | CREATE MATERIALIZED VIEW order_history AS
109 | SELECT
110 | orders.order_id as order_id,
111 | orders.customer_id as customer_id,
112 | orders.order_date as order_date,
113 | orders.total as total,
114 | payments_view.status as payment_status,
115 | shipments.status as shipment_status
116 | FROM
117 | orders,
118 | payments_view,
119 | shipments
120 | WHERE
121 | orders.order_id = payments_view.order_id
122 | AND payments_view.order_id = shipments.order_id;
123 | ```
124 |
125 | ## Step 4: Verfiy `order_history` materialized view
126 |
127 | The following will return the order history for the customer with ID 100.
128 |
129 | ```sql
130 | SELECT * FROM order_history WHERE customer_id = '100';
131 | ```
132 |
133 | Hope you enjoyed the sample.
134 |
--------------------------------------------------------------------------------
/mv_with_materialize/debezium/commands.txt:
--------------------------------------------------------------------------------
1 |
2 | curl -H 'Content-Type: application/json' debezium:8083/connectors --data '
3 | {
4 | "name": "orders-connector",
5 | "config": {
6 | "connector.class": "io.debezium.connector.mysql.MySqlConnector",
7 | "tasks.max": "1",
8 | "database.hostname": "mysql",
9 | "database.port": "3306",
10 | "database.user": "root",
11 | "database.password": "debezium",
12 | "database.server.id": "184054",
13 | "database.server.name": "mysql",
14 | "database.include.list": "order_db",
15 | "database.history.kafka.bootstrap.servers": "kafka:9092",
16 | "database.history.kafka.topic": "mysql-history"
17 | }
18 | }'
19 |
20 | curl -H 'Content-Type: application/json' debezium:8083/connectors --data '
21 | {
22 | "name": "shipments-connector",
23 | "config": {
24 | "connector.class": "io.debezium.connector.postgresql.PostgresConnector",
25 | "plugin.name": "pgoutput",
26 | "database.hostname": "postgres",
27 | "database.port": "5432",
28 | "database.user": "postgresuser",
29 | "database.password": "postgrespw",
30 | "database.dbname" : "shipment_db",
31 | "database.server.name": "postgres",
32 | "table.include.list": "public.shipments"
33 |
34 | }
35 | }'
--------------------------------------------------------------------------------
/mv_with_materialize/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: "3.7"
2 | services:
3 | mysql:
4 | image: debezium/example-mysql:1.4
5 | ports:
6 | - 3306:3306
7 | environment:
8 | - MYSQL_ROOT_PASSWORD=debezium
9 | - MYSQL_USER=mysqluser
10 | - MYSQL_PASSWORD=mysqlpw
11 | volumes:
12 | - ./scripts/mysql:/docker-entrypoint-initdb.d
13 | postgres:
14 | image: debezium/postgres:13
15 | ports:
16 | - 5432:5432
17 | healthcheck:
18 | test: "pg_isready -U postgresuser -d shipment_db"
19 | interval: 2s
20 | timeout: 20s
21 | retries: 10
22 | environment:
23 | - POSTGRES_USER=postgresuser
24 | - POSTGRES_PASSWORD=postgrespw
25 | - POSTGRES_DB=shipment_db
26 | - PGPASSWORD=postgrespw
27 | volumes:
28 | - ./scripts/postgres:/docker-entrypoint-initdb.d
29 | zookeeper:
30 | image: confluentinc/cp-zookeeper:5.5.3
31 | environment:
32 | ZOOKEEPER_CLIENT_PORT: 2181
33 | kafka:
34 | image: confluentinc/cp-enterprise-kafka:5.5.3
35 | depends_on: [zookeeper]
36 | environment:
37 | KAFKA_ZOOKEEPER_CONNECT: "zookeeper:2181"
38 | KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092
39 | KAFKA_BROKER_ID: 1
40 | KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
41 | KAFKA_JMX_PORT: 9991
42 | ports:
43 | - 9092:9092
44 | debezium:
45 | image: debezium/connect:1.4
46 | environment:
47 | BOOTSTRAP_SERVERS: kafka:9092
48 | GROUP_ID: 1
49 | CONFIG_STORAGE_TOPIC: connect_configs
50 | OFFSET_STORAGE_TOPIC: connect_offsets
51 | KEY_CONVERTER: io.confluent.connect.avro.AvroConverter
52 | VALUE_CONVERTER: io.confluent.connect.avro.AvroConverter
53 | CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL: http://schema-registry:8081
54 | CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: http://schema-registry:8081
55 | depends_on: [kafka]
56 | ports:
57 | - 8083:8083
58 | schema-registry:
59 | image: confluentinc/cp-schema-registry:5.5.3
60 | environment:
61 | - SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL=zookeeper:2181
62 | - SCHEMA_REGISTRY_HOST_NAME=schema-registry
63 | - SCHEMA_REGISTRY_LISTENERS=http://schema-registry:8081,http://localhost:8081
64 | depends_on: [zookeeper, kafka]
65 | payments-feeder:
66 | build: payments-feeder
67 | materialized:
68 | image: materialize/materialized
69 | init: true
70 | command: -w2 --disable-telemetry
71 | ports:
72 | - 6875:6875
--------------------------------------------------------------------------------
/mv_with_materialize/materialize/script.sql:
--------------------------------------------------------------------------------
1 | CREATE SOURCE orders
2 | FROM KAFKA BROKER 'kafka:9092' TOPIC 'mysql.order_db.orders'
3 | FORMAT AVRO USING CONFLUENT SCHEMA REGISTRY 'http://schema-registry:8081' ENVELOPE DEBEZIUM;
4 |
5 | CREATE SOURCE shipments
6 | FROM KAFKA BROKER 'kafka:9092' TOPIC 'postgres.public.shipments'
7 | FORMAT AVRO USING CONFLUENT SCHEMA REGISTRY 'http://schema-registry:8081' ENVELOPE DEBEZIUM;
8 |
9 | CREATE SOURCE payments
10 | FROM KAFKA BROKER 'kafka:9092' TOPIC 'payments'
11 | FORMAT BYTES;
12 |
13 | CREATE VIEW payments_view AS
14 | SELECT
15 | (payment_data ->'payment_id')::INT as payment_id,
16 | (payment_data ->'order_id')::INT as order_id,
17 | (payment_data ->'payment_date')::STRING as payment_date,
18 | (payment_data ->'payment_method')::STRING as payment_method,
19 | (payment_data ->'amount')::FLOAT as amount,
20 | (payment_data ->'status')::STRING as status
21 | FROM (
22 | SELECT convert_from(data, 'utf8')::jsonb AS payment_data
23 | FROM payments
24 | );
25 |
26 | CREATE MATERIALIZED VIEW order_history AS
27 | SELECT
28 | orders.order_id as order_id,
29 | orders.customer_id as customer_id,
30 | orders.order_date as order_date,
31 | orders.total as total,
32 | payments_view.status as payment_status,
33 | shipments.status as shipment_status
34 | FROM
35 | orders,
36 | payments_view,
37 | shipments
38 | WHERE
39 | orders.order_id = payments_view.order_id
40 | AND payments_view.order_id = shipments.order_id;
--------------------------------------------------------------------------------
/mv_with_materialize/payments-feeder/Dockerfile:
--------------------------------------------------------------------------------
1 | # Copyright Materialize, Inc. All rights reserved.
2 | #
3 | # Use of this software is governed by the Business Source License
4 | # included in the LICENSE file at the root of this repository.
5 | #
6 | # As of the Change Date specified in that file, in accordance with
7 | # the Business Source License, use of this software will be governed
8 | # by the Apache License, Version 2.0.
9 |
10 | FROM confluentinc/cp-enterprise-kafka:5.3.0
11 |
12 | # https://github.com/confluentinc/cp-docker-images/issues/764
13 | RUN sed -i s,https://s3-us-west-2.amazonaws.com/staging-confluent-packages-5.3.0/deb/5.3,https://packages.confluent.io/deb/5.3, /etc/apt/sources.list
14 |
15 | RUN apt-get update && apt-get -qy install curl
16 |
17 | RUN curl -fsSL https://raw.githubusercontent.com/vishnubob/wait-for-it/master/wait-for-it.sh > /usr/local/bin/wait-for-it \
18 | && chmod +x /usr/local/bin/wait-for-it
19 |
20 | COPY docker-entrypoint.sh /usr/local/bin
21 |
22 | COPY minified-payments.txt /tmp
23 |
24 | ENTRYPOINT ["docker-entrypoint.sh"]
25 |
--------------------------------------------------------------------------------
/mv_with_materialize/payments-feeder/docker-entrypoint.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | # Copyright Materialize, Inc. All rights reserved.
4 | #
5 | # Use of this software is governed by the Business Source License
6 | # included in the LICENSE file at the root of this repository.
7 | #
8 | # As of the Change Date specified in that file, in accordance with
9 | # the Business Source License, use of this software will be governed
10 | # by the Apache License, Version 2.0.
11 |
12 | set -euo pipefail
13 |
14 | wait-for-it --timeout=60 zookeeper:2181
15 | wait-for-it --timeout=60 kafka:9092
16 |
17 | topics=(
18 | payments
19 | )
20 |
21 | echo "${topics[@]}" | xargs -n1 -P8 kafka-topics --zookeeper zookeeper:2181 --create --if-not-exists --partitions 1 --replication-factor 1 --topic
22 |
23 | echo "Starting payments event publishing..."
24 |
25 | cat /tmp/minified-payments.txt | kafka-console-producer --broker-list kafka:9092 --topic payments
26 |
27 | echo "Payments event publishing completed"
--------------------------------------------------------------------------------
/mv_with_materialize/payments-feeder/minified-payments.txt:
--------------------------------------------------------------------------------
1 | {"payment_id":20500,"order_id":10500,"payment_date":"2021-01-21","payment_method":"CARD","amount":49.99,"status":"SUCCESSFUL"}
2 | {"payment_id":21500,"order_id":11500,"payment_date":"2021-04-01","payment_method":"CARD","amount":49.99,"status":"SUCCESSFUL"}
3 | {"payment_id":22500,"order_id":12500,"payment_date":"2021-05-31","payment_method":"CARD","amount":280.00,"status":"SUCCESSFUL"}
--------------------------------------------------------------------------------
/mv_with_materialize/scripts/mysql/orders.sql:
--------------------------------------------------------------------------------
1 | create database order_db;
2 |
3 | use order_db;
4 |
5 | create table orders (
6 | order_id int primary key,
7 | customer_id long,
8 | order_date date,
9 | total float
10 | );
11 |
12 | insert into orders (order_id,customer_id,order_date,total) values(10500,100,'2021-01-21',49.99);
13 | insert into orders (order_id,customer_id,order_date,total) values(11500,100,'2021-04-01',49.99);
14 | insert into orders (order_id,customer_id,order_date,total) values(12500,100,'2021-05-31',280.00);
--------------------------------------------------------------------------------
/mv_with_materialize/scripts/postgres/shipments.sql:
--------------------------------------------------------------------------------
1 |
2 | CREATE TABLE shipments
3 | (
4 | shipment_id bigint NOT NULL,
5 | order_id bigint NOT NULL,
6 | date_created character varying(255) COLLATE pg_catalog."default",
7 | status character varying(25) COLLATE pg_catalog."default",
8 | CONSTRAINT shipments_pkey PRIMARY KEY (shipment_id)
9 | );
10 |
11 | INSERT INTO shipments values (30500,10500,'2021-01-21','DELIVERED');
12 | INSERT INTO shipments values (31500,11500,'2021-04-21','DELIVERED');
13 | INSERT INTO shipments values (32500,12500,'2021-05-31','PROCESSING');
--------------------------------------------------------------------------------
/mz-funnel-chart-plotly/dash/app.py:
--------------------------------------------------------------------------------
1 | import psycopg2
2 | import pandas.io.sql as sqlio
3 | import pandas as pd
4 | import dash
5 | from dash import dcc
6 | from dash import html
7 | import plotly.express as px
8 |
9 | app = dash.Dash(__name__)
10 |
11 | # Connect to Materialize as a regular database
12 | conn = psycopg2.connect("dbname=materialize user=materialize port=6875 host=localhost")
13 |
14 | # Read the materialized view with Pandas
15 | sql = "select * from consolidated_funnel order by cnt desc;"
16 | df = pd.read_sql_query(sql, conn)
17 |
18 | # Plot a funnel chart
19 | fig = px.funnel(df, x="step", y="cnt")
20 |
21 | # Main UI scaffolding for the dashboard
22 | app.layout = html.Div(children=[
23 | html.H1(children='Conversion Funnel'),
24 |
25 | html.Div(children='''
26 | Dash: A web application framework for your data.
27 | '''),
28 |
29 | dcc.Graph(
30 | id='funnel-chart',
31 | figure=fig
32 | )
33 | ])
34 |
35 | if __name__ == '__main__':
36 | app.run_server(debug=True)
37 |
38 | conn = None
39 |
40 |
--------------------------------------------------------------------------------
/mz-funnel-chart-plotly/script.sql:
--------------------------------------------------------------------------------
1 | CREATE SOURCE events (id,user_id,event_type,ts)
2 | FROM FILE '/path/to/events.csv'
3 | FORMAT CSV WITH 4 COLUMNS;
4 |
5 | CREATE MATERIALIZED VIEW consolidated_funnel AS
6 | WITH view_product as (
7 | select distinct user_id
8 | from events
9 | where event_type='View Product'
10 | AND ts::timestamp between '2021-09-01'::timestamp and '2021-09-30'::timestamp
11 | ),
12 |
13 | add_to_cart as (
14 | select distinct e.user_id
15 | from view_product v
16 | inner join events e on v.user_id = e.user_id
17 | where e.event_type = 'Add to Cart'
18 | ),
19 |
20 | checkout as (
21 | select distinct e.user_id
22 | from add_to_cart ac
23 | inner join events e on ac.user_id = e.user_id
24 | where e.event_type= 'Checkout'
25 | )
26 |
27 | select 'View Product' as step, count(*) as cnt from view_product
28 | union
29 | select 'Add to Cart' as step, count(*) as cnt from add_to_cart
30 | union
31 | select 'Checkout' as step, count(*) as cnt from checkout
32 | order by cnt desc;
--------------------------------------------------------------------------------
/mz_alerts/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dunithd/edu-samples/d937f12c81254a859867e125f11af22ed5943c2b/mz_alerts/.DS_Store
--------------------------------------------------------------------------------
/mz_alerts/README.md:
--------------------------------------------------------------------------------
1 | # Infrastructure Monitoring and Alerting with Materialize
2 |
3 | ## Step 1: Setup the infrastructure services
4 |
5 | Clone the repo and spin up the containers with Docker Compose.
6 |
7 | ```console
8 | git clone https://github.com/dunithd/edu-samples.git
9 | cd edu-samples/mz_alerts
10 | docker-compose up -d kafka zookeeper schema-registry materialized
11 | ```
12 |
13 | ## Step 2: Create a source, views, and a sink in Materialize
14 |
15 | Bring up the `mzcli` that you can use as a CLI to Materialized.
16 |
17 | ```console
18 | docker run -it --network mz_alerts_default materialize/mzcli mzcli -h materialized -U materialize -p 6875 -d materialize
19 | ```
20 |
21 | Copy and paste the contents in the materialize.sql file into `mzcli` console.
22 |
23 | ```sql
24 | CREATE SOURCE sensor_data_raw
25 | FROM PUBNUB
26 | SUBSCRIBE KEY 'sub-c-5f1b7c8e-fbee-11e3-aa40-02ee2ddab7fe'
27 | CHANNEL 'pubnub-sensor-network';
28 |
29 | CREATE VIEW sensor_data AS
30 | SELECT
31 | ((text::jsonb)->>'sensor_uuid') AS sensor_uuid,
32 | ((text::jsonb)->>'ambient_temperature')::float AS ambient_temperature,
33 | ((text::jsonb)->>'radiation_level')::int AS radiation_level,
34 | ((text::jsonb)->>'humidity')::float AS humidity,
35 | ((text::jsonb)->>'photosensor')::float AS photosensor,
36 | to_timestamp(((text::jsonb)->'timestamp')::bigint) AS ts
37 | FROM sensor_data_raw;
38 |
39 | CREATE MATERIALIZED VIEW anomalies AS
40 | SELECT sensor_uuid,
41 | AVG(ambient_temperature) AS avg
42 | FROM sensor_data
43 | WHERE EXTRACT(EPOCH FROM (ts + INTERVAL '5 seconds'))::bigint * 1000 > mz_logical_timestamp()
44 | GROUP BY sensor_uuid
45 | HAVING AVG(ambient_temperature) > 40;
46 |
47 | CREATE SINK alerts
48 | FROM anomalies
49 | INTO KAFKA BROKER 'kafka:9092' TOPIC 'high-temp-alerts'
50 | CONSISTENCY TOPIC 'high-temp-alerts-consistency'
51 | CONSISTENCY FORMAT AVRO USING CONFLUENT SCHEMA REGISTRY 'http://schema-registry:8081'
52 | WITH (reuse_topic=true)
53 | FORMAT JSON;
54 | ```
55 |
56 | ## Step 3: Configure PageDuty
57 |
58 | Configure PagerDuty as explained in the article.
59 |
60 | ## Step 4: Configure and run Python consumer
61 |
62 | Run the following in the Materialize CLI.
63 |
64 | ```sql
65 | SELECT sink_id, name, topic
66 | FROM mz_sinks
67 | JOIN mz_kafka_sinks ON mz_sinks.id = mz_kafka_sinks.sink_id
68 | ```
69 |
70 | Find out the Kafka topic name from the `topic` field in the result.
71 |
72 | Copy that value to replace the value of the alerts_topic variable inside `pagerduty-client/pd_client.py`
73 |
74 | ```python
75 | alerts_topic = "high-temp-alerts-u5-XXXXXX"
76 | ```
77 |
78 | Replace all PagerDuty related placeholders in the same script.
79 |
80 | Finall, build and run Python consumer container by running:
81 |
82 | ```console
83 | docker-compose up -d pagerduty-client
84 | ```
85 |
86 | ## Step 5: Log in to PagerDuty console to see the triggered incidents
87 |
88 | If eveything goes well, you should see a screen like this:
89 |
90 | 
91 |
92 | ### Troubleshooting
93 |
94 | If you don't see anything in PagerDuty, then there's something wrong with the Python script.
95 |
96 | To isolate that the issue is related to Python consumer, run the following:
97 |
98 | ```console
99 | docker-compose exec kafka kafka-console-consumer --bootstrap-server kafka:9092 --topic high-temp-alerts-u5-1631586083-13100517045407420899
100 | ```
101 |
102 | If you see an output like this, that means Materialize is working as expected as writes alerts into Kafka topic.
103 |
104 | ```console
105 | {"after":{"row":{"avg":41.799999952316284,"sensor_uuid":"probe-becc8842"}},"before":null,"transaction":{"id":"1631586809000"}}
106 | {"after":null,"before":{"row":{"avg":41.799999952316284,"sensor_uuid":"probe-becc8842"}},"transaction":{"id":"1631586813000"}}
107 | ```
108 |
109 | That confirms the fault is with the Python consumer. Check for PagerDuty credentials, network connections for more insights.
110 |
--------------------------------------------------------------------------------
/mz_alerts/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: "3.7"
2 | services:
3 | zookeeper:
4 | image: confluentinc/cp-zookeeper:5.5.3
5 | environment:
6 | ZOOKEEPER_CLIENT_PORT: 2181
7 | kafka:
8 | image: confluentinc/cp-enterprise-kafka:5.5.3
9 | depends_on: [zookeeper]
10 | environment:
11 | KAFKA_ZOOKEEPER_CONNECT: "zookeeper:2181"
12 | KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092
13 | KAFKA_BROKER_ID: 1
14 | KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
15 | KAFKA_JMX_PORT: 9991
16 | ports:
17 | - 9092:9092
18 | schema-registry:
19 | image: confluentinc/cp-schema-registry:5.5.3
20 | environment:
21 | - SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL=zookeeper:2181
22 | - SCHEMA_REGISTRY_HOST_NAME=schema-registry
23 | - SCHEMA_REGISTRY_LISTENERS=http://schema-registry:8081,http://localhost:8081
24 | depends_on: [zookeeper, kafka]
25 | materialized:
26 | image: materialize/materialized:v0.9.1
27 | init: true
28 | command: -w2 --disable-telemetry
29 | ports:
30 | - 6875:6875
31 | pagerduty-client:
32 | build: pagerduty-client
--------------------------------------------------------------------------------
/mz_alerts/materialize.sql:
--------------------------------------------------------------------------------
1 | CREATE SOURCE sensor_data_raw
2 | FROM PUBNUB
3 | SUBSCRIBE KEY 'sub-c-5f1b7c8e-fbee-11e3-aa40-02ee2ddab7fe'
4 | CHANNEL 'pubnub-sensor-network';
5 |
6 | CREATE VIEW sensor_data AS
7 | SELECT
8 | ((text::jsonb)->>'sensor_uuid') AS sensor_uuid,
9 | ((text::jsonb)->>'ambient_temperature')::float AS ambient_temperature,
10 | ((text::jsonb)->>'radiation_level')::int AS radiation_level,
11 | ((text::jsonb)->>'humidity')::float AS humidity,
12 | ((text::jsonb)->>'photosensor')::float AS photosensor,
13 | to_timestamp(((text::jsonb)->'timestamp')::bigint) AS ts
14 | FROM sensor_data_raw;
15 |
16 | CREATE MATERIALIZED VIEW anomalies AS
17 | SELECT sensor_uuid,
18 | AVG(ambient_temperature) AS avg
19 | FROM sensor_data
20 | WHERE EXTRACT(EPOCH FROM (ts + INTERVAL '5 seconds'))::bigint * 1000 > mz_logical_timestamp()
21 | GROUP BY sensor_uuid
22 | HAVING AVG(ambient_temperature) > 40;
23 |
24 | CREATE SINK alerts
25 | FROM anomalies
26 | INTO KAFKA BROKER 'kafka:9092' TOPIC 'high-temp-alerts'
27 | CONSISTENCY TOPIC 'high-temp-alerts-consistency'
28 | CONSISTENCY FORMAT AVRO USING CONFLUENT SCHEMA REGISTRY 'http://schema-registry:8081'
29 | WITH (reuse_topic=true)
30 | FORMAT JSON;
31 |
--------------------------------------------------------------------------------
/mz_alerts/pagerduty-client/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3
2 |
3 | RUN apt-get update && apt-get -qy install curl
4 |
5 | RUN pip install kafka-python pdpyras
6 |
7 | COPY . /pagerduty_client
8 |
9 | COPY docker-entrypoint.sh /usr/local/bin
10 |
11 | ENTRYPOINT ["docker-entrypoint.sh"]
12 |
--------------------------------------------------------------------------------
/mz_alerts/pagerduty-client/docker-entrypoint.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -euo pipefail
4 |
5 | cd /pagerduty_client
6 |
7 | python pd_client.py
--------------------------------------------------------------------------------
/mz_alerts/pagerduty-client/pd_client.py:
--------------------------------------------------------------------------------
1 | from pdpyras import APISession
2 | from requests.sessions import session
3 | from kafka import KafkaConsumer
4 | import json
5 | import sys
6 |
7 | print("Starting the alerts listener")
8 |
9 | # Kafka configurations
10 | kafka_broker = 'kafka:9092'
11 | alerts_topic = "high-temp-alerts-u5-1631586083-13100517045407420899"
12 |
13 | # PageDuty configurations
14 | api_token = 'u+CtEy_6s91Pp93RM7sQ'
15 | service_id = "PU35FY7"
16 | from_email = "dunithd@gmail.com"
17 |
18 | # Initialize the PageDuty session
19 | # session = APISession(api_token)
20 | # session.default_from = "dunithd@gmail.com"
21 |
22 | consumer = KafkaConsumer(alerts_topic,
23 | bootstrap_servers=[kafka_broker],
24 | auto_offset_reset='earliest',
25 | enable_auto_commit=True,
26 | group_id='my-group',
27 | value_deserializer=lambda x: json.loads(x.decode('utf-8')))
28 |
29 | # consumer.subscribe(pattern=alerts_topic_pattern)
30 |
31 | for message in consumer:
32 | try:
33 | event = message.value
34 | if event["after"] is None:
35 | continue
36 | else:
37 | row = event["after"]["row"]
38 |
39 | # print("sensor-uuid: %s average: %s"%(row["sensor_uuid"],str(row["avg"])))
40 | sensor_uuid = row["sensor_uuid"]
41 | avg_temperature = row["avg"]
42 |
43 | # Trigger a PD incident
44 | incident_title = "High temperature observed in the data center"
45 | incident_description = "The temperature sensor %s observed an average temperature of %s during the past minute." % (sensor_uuid,str(avg_temperature))
46 |
47 | payload = {
48 | "type": "incident",
49 | "service": {"id": "", "type": "service_reference"},
50 | "body": {"type": "incident_body", "details": ""}
51 | }
52 |
53 | # Manipulate the payload
54 | payload["title"] = incident_title
55 | payload["service"]["id"] = service_id
56 | payload["body"]["details"] = incident_description
57 |
58 | # pd_incident = session.rpost("incidents", json=payload)
59 | # print(pd_incident)
60 | print("Incident triggered")
61 | print(json.dumps(payload))
62 | except:
63 | print("An error occured while triggering the incident.", sys.exc_info()[0])
64 |
65 |
66 |
67 |
68 |
--------------------------------------------------------------------------------
/mz_alerts/pagerduty-client/sample-incident.json:
--------------------------------------------------------------------------------
1 | {
2 | "incident": {
3 | "type": "incident",
4 | "title": "Disk usage at 85%",
5 | "service": {
6 | "id": "PU35FY7",
7 | "type": "service_reference"
8 | },
9 | "body": {
10 | "type": "incident_body",
11 | "details": "A disk is getting full on this machine. You should investigate what is causing the disk to fill, and ensure that there is an automated process in place for ensuring data is rotated (eg. logs should have logrotate around them). If data is expected to stay on this disk forever, you should start planning to scale up to a larger disk."
12 | }
13 | }
14 | }
--------------------------------------------------------------------------------
/mz_alerts/pagerduty_console.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dunithd/edu-samples/d937f12c81254a859867e125f11af22ed5943c2b/mz_alerts/pagerduty_console.png
--------------------------------------------------------------------------------
/pinot-upserts/README.md:
--------------------------------------------------------------------------------
1 | # CDC-based Upserts with Debezium, Apache Kafka, and Apache Pinot #
2 |
3 | This is the code referred in the article [CDC-based Upserts with Debezium, Apache Kafka, and Apache Pinot](https://medium.com/event-driven-utopia/cdc-based-upserts-with-debezium-apache-kafka-and-apache-pinot-427cced24eb1)
4 |
5 | The project mimics an online e-commerce store with MySQL as the transactional database. E-commerce orders are captured in the orders MySQL table.
6 |
7 | When an order goes through its lifecycle stages, the status field should transition from OPEN to PROCESSING to SHIPPED.
8 |
9 | The goal is to capture the changes made to the orders table in MySQL and ship them into Apache Pinot so that we can run real-time analytics on orders.
10 |
11 | We will use Debezium and Apache Kafka to build this real-time data pipeline. Debezium captures the changes made to the orders table and streams them into Kafka, allowing Pinot to ingest them in real-time.
12 |
13 | 
14 |
15 | ## 1. Start the stack
16 |
17 | In a terminal, type the following to start the entire Docker compose stack.
18 |
19 | ```bash
20 | docker compose up -d
21 | ```
22 |
23 | Wait for like a minute to get the entire solution up and running.
24 |
25 | ## 2. Create the orders schema and table in Pinot
26 |
27 | Execute the following to create the `orders` schema and table inside Apache Pinot.
28 |
29 | ```bash
30 | docker exec -it pinot-controller /opt/pinot/bin/pinot-admin.sh AddTable \
31 | -tableConfigFile /config/orders_table.json \
32 | -schemaFile /config/orders_schema.json -exec
33 | ```
34 |
35 | Login to the Pinot query console on localhost:9000 and verify that the `orders` table is getting populated with incoming orders.
36 |
37 | ## 3. Manually change the order status in MySQL
38 |
39 | Execute the following command in a terminal to log into the MySQL container.
40 |
41 | ```bash
42 | docker exec -it mysql /bin/bash
43 | mysql -u mysqluser -p
44 | mysqlpw
45 | use fakeshop;
46 | ```
47 |
48 | Let's update the first 100 orders to have the PROCESSING status. Note that the order id is set to increase sequentially from 1.
49 |
50 | ```sql
51 | UPDATE orders
52 | SET status = 'PROCESSING'
53 | WHERE id <= 100;
54 | ```
55 |
56 | Run the following query to ensure that we can see orders with two statuses.
57 |
58 | ```sql
59 | SELECT status AS status,
60 | Count(status) AS orders
61 | FROM orders
62 | GROUP BY status;
63 | ```
64 |
65 | ## 4. Verify the status change in Apache Pinot
66 |
67 | After a few seconds, run the following query in the Pinot query console. You should see the 100 orders with the PROCESSING status.
68 |
69 | ```sql
70 | SELECT payload.status AS status,
71 | Count(payload.status) AS orders
72 | FROM orders
73 | GROUP BY payload.status
74 | ```
75 | That will result in the following:
76 |
77 | 
78 |
79 | ## 5. Tear down the stack
80 |
81 | Run the following command to tear down the stack.
82 |
83 | ```bash
84 | docker compose down
85 | ```
86 |
--------------------------------------------------------------------------------
/pinot-upserts/debezium/output.json:
--------------------------------------------------------------------------------
1 | {
2 | "schema":{
3 | "type":"struct",
4 | "fields":[
5 | {
6 | "type":"int64",
7 | "optional":false,
8 | "field":"id"
9 | },
10 | {
11 | "type":"int64",
12 | "optional":true,
13 | "field":"user_id"
14 | },
15 | {
16 | "type":"int64",
17 | "optional":true,
18 | "field":"product_id"
19 | },
20 | {
21 | "type":"string",
22 | "optional":true,
23 | "default":"OPEN",
24 | "field":"status"
25 | },
26 | {
27 | "type":"int64",
28 | "optional":true,
29 | "default":1,
30 | "field":"quantity"
31 | },
32 | {
33 | "type":"double",
34 | "optional":true,
35 | "field":"total"
36 | },
37 | {
38 | "type":"string",
39 | "optional":true,
40 | "name":"io.debezium.time.ZonedTimestamp",
41 | "version":1,
42 | "field":"created_at"
43 | },
44 | {
45 | "type":"int64",
46 | "optional":true,
47 | "name":"org.apache.kafka.connect.data.Timestamp",
48 | "version":1,
49 | "default":0,
50 | "field":"updated_at"
51 | },
52 | {
53 | "type":"string",
54 | "optional":true,
55 | "field":"__deleted"
56 | }
57 | ],
58 | "optional":false,
59 | "name":"mysql.fakeshop.orders.Value"
60 | },
61 | "payload":{
62 | "id":617,
63 | "user_id":38,
64 | "product_id":52,
65 | "status":"OPEN",
66 | "quantity":5,
67 | "total":460.3500061035156,
68 | "created_at":"2022-07-15T13:15:44Z",
69 | "updated_at":1657890944000,
70 | "__deleted":"false"
71 | }
72 | }
73 |
--------------------------------------------------------------------------------
/pinot-upserts/debezium/register_mysql.sh:
--------------------------------------------------------------------------------
1 | curl -s -X PUT -H "Content-Type:application/json" http://debezium:8083/connectors/mysql/config \
2 | -d '{
3 | "connector.class": "io.debezium.connector.mysql.MySqlConnector",
4 | "database.hostname": "mysql",
5 | "database.port": 3306,
6 | "database.user": "debezium",
7 | "database.password": "dbz",
8 | "database.server.name": "mysql",
9 | "database.server.id": "223344",
10 | "database.allowPublicKeyRetrieval": true,
11 | "database.history.kafka.bootstrap.servers": "kafka:9092",
12 | "database.history.kafka.topic": "mysql-history",
13 | "database.include.list": "fakeshop",
14 | "time.precision.mode": "connect",
15 | "include.schema.changes": false,
16 | "transforms":"unwrap",
17 | "transforms.unwrap.type":"io.debezium.transforms.ExtractNewRecordState",
18 | "transforms.unwrap.drop.tombstones":false,
19 | "transforms.unwrap.delete.handling.mode":"rewrite"
20 | }'
--------------------------------------------------------------------------------
/pinot-upserts/docker-compose.yml:
--------------------------------------------------------------------------------
1 | services:
2 | mysql:
3 | image: mysql/mysql-server:8.0.27
4 | hostname: mysql
5 | container_name: mysql
6 | ports:
7 | - 3306:3306
8 | environment:
9 | - MYSQL_ROOT_PASSWORD=debezium
10 | - MYSQL_USER=mysqluser
11 | - MYSQL_PASSWORD=mysqlpw
12 | volumes:
13 | - ${PWD}/mysql/mysql.cnf:/etc/mysql/conf.d
14 | - ${PWD}/mysql/mysql_bootstrap.sql:/docker-entrypoint-initdb.d/mysql_bootstrap.sql
15 | healthcheck: {test: mysql -pdebezium -e 'select 1', interval: 1s, start_period: 60s}
16 | zookeeper:
17 | image: confluentinc/cp-zookeeper:7.1.0
18 | hostname: zookeeper
19 | container_name: zookeeper
20 | ports:
21 | - "2181:2181"
22 | environment:
23 | ZOOKEEPER_CLIENT_PORT: 2181
24 | ZOOKEEPER_TICK_TIME: 2000
25 | kafka:
26 | image: confluentinc/cp-kafka:7.1.0
27 | hostname: kafka
28 | container_name: kafka
29 | ports:
30 | - "29092:29092"
31 | environment:
32 | KAFKA_BROKER_ID: 1
33 | KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181'
34 | KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
35 | KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092,PLAINTEXT_HOST://localhost:29092
36 | KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
37 | KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
38 | KAFKA_TOOLS_LOG4J_LOGLEVEL: ERROR
39 | depends_on:
40 | [zookeeper]
41 | healthcheck: {test: nc -z localhost 9092, interval: 1s, start_period: 120s}
42 | debezium:
43 | image: debezium/connect:1.8
44 | hostname: debezium
45 | container_name: debezium
46 | environment:
47 | BOOTSTRAP_SERVERS: kafka:9092
48 | GROUP_ID: 1
49 | CONFIG_STORAGE_TOPIC: connect_configs
50 | OFFSET_STORAGE_TOPIC: connect_offsets
51 | ports:
52 | - 8083:8083
53 | healthcheck: {test: curl -f localhost:8083, interval: 1s, start_period: 120s}
54 | depends_on:
55 | kafka: {condition: service_healthy}
56 | mysql: {condition: service_healthy}
57 | debezium_deploy:
58 | image: debezium/connect:1.8
59 | depends_on:
60 | debezium: {condition: service_healthy}
61 | environment:
62 | KAFKA_ADDR: kafka:9092
63 | volumes:
64 | - ${PWD}/debezium/register_mysql.sh:/register_mysql.sh
65 | entrypoint: [bash, -c, /register_mysql.sh]
66 | pinot-controller:
67 | image: apachepinot/pinot:0.10.0
68 | command: "StartController -zkAddress zookeeper:2181"
69 | container_name: "pinot-controller"
70 | volumes:
71 | - ./pinot/config:/config
72 | restart: unless-stopped
73 | ports:
74 | - "9000:9000"
75 | depends_on:
76 | - zookeeper
77 | pinot-broker:
78 | image: apachepinot/pinot:0.10.0
79 | command: "StartBroker -zkAddress zookeeper:2181"
80 | restart: unless-stopped
81 | container_name: "pinot-broker"
82 | ports:
83 | - "8099:8099"
84 | depends_on:
85 | - pinot-controller
86 | pinot-server:
87 | image: apachepinot/pinot:0.10.0
88 | container_name: "pinot-server"
89 | command: "StartServer -zkAddress zookeeper:2181"
90 | restart: unless-stopped
91 | depends_on:
92 | - pinot-broker
93 | simulator:
94 | build: simulator
95 | container_name: simulator
96 | init: true
97 | depends_on:
98 | mysql: {condition: service_healthy}
99 | debezium: {condition: service_healthy}
100 | kafka: {condition: service_healthy}
--------------------------------------------------------------------------------
/pinot-upserts/images/architecture.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dunithd/edu-samples/d937f12c81254a859867e125f11af22ed5943c2b/pinot-upserts/images/architecture.png
--------------------------------------------------------------------------------
/pinot-upserts/images/final-result.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dunithd/edu-samples/d937f12c81254a859867e125f11af22ed5943c2b/pinot-upserts/images/final-result.png
--------------------------------------------------------------------------------
/pinot-upserts/mysql/mysql.cnf:
--------------------------------------------------------------------------------
1 | [mysqld]
2 | server-id = 223344
3 | log_bin = mysql-bin
4 | expire_logs_days = 1
5 | binlog_format = row
--------------------------------------------------------------------------------
/pinot-upserts/mysql/mysql_bootstrap.sql:
--------------------------------------------------------------------------------
1 | CREATE DATABASE IF NOT EXISTS fakeshop;
2 | USE fakeshop;
3 |
4 | GRANT ALL PRIVILEGES ON fakeshop.* TO 'mysqluser';
5 |
6 | CREATE USER 'debezium' IDENTIFIED WITH mysql_native_password BY 'dbz';
7 |
8 | GRANT SELECT, RELOAD, SHOW DATABASES, REPLICATION SLAVE, REPLICATION CLIENT ON *.* TO 'debezium';
9 |
10 | FLUSH PRIVILEGES;
11 |
12 | CREATE TABLE IF NOT EXISTS fakeshop.users
13 | (
14 | id SERIAL PRIMARY KEY,
15 | first_name VARCHAR(255),
16 | last_name VARCHAR(255),
17 | email VARCHAR(255),
18 | country VARCHAR(255),
19 | created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
20 | updated_at DATETIME DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
21 | );
22 |
23 | CREATE TABLE IF NOT EXISTS fakeshop.products
24 | (
25 | id SERIAL PRIMARY KEY,
26 | name VARCHAR(100),
27 | category VARCHAR(100),
28 | price FLOAT,
29 | created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
30 | updated_at DATETIME DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
31 | );
32 |
33 | CREATE TABLE IF NOT EXISTS fakeshop.orders
34 | (
35 | id SERIAL PRIMARY KEY,
36 | user_id BIGINT UNSIGNED REFERENCES users(id),
37 | product_id BIGINT UNSIGNED REFERENCES products(id),
38 | status VARCHAR(50) DEFAULT 'OPEN',
39 | quantity INT UNSIGNED DEFAULT 1,
40 | total FLOAT,
41 | created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
42 | updated_at DATETIME DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
43 | );
--------------------------------------------------------------------------------
/pinot-upserts/pinot/config/orders_schema.json:
--------------------------------------------------------------------------------
1 | {
2 | "schemaName": "orders",
3 | "primaryKeyColumns": [
4 | "payload.id"
5 | ],
6 | "dimensionFieldSpecs": [
7 | {
8 | "name": "payload.id",
9 | "dataType": "INT"
10 | },
11 | {
12 | "name": "payload.user_id",
13 | "dataType": "INT"
14 | },
15 | {
16 | "name": "payload.product_id",
17 | "dataType": "INT"
18 | },
19 | {
20 | "name": "payload.status",
21 | "dataType": "STRING"
22 | }
23 | ],
24 | "metricFieldSpecs": [
25 | {
26 | "name": "payload.quantity",
27 | "dataType": "INT"
28 | },
29 | {
30 | "name": "payload.total",
31 | "dataType": "FLOAT"
32 | }
33 | ],
34 | "dateTimeFieldSpecs": [
35 | {
36 | "name": "payload.updated_at",
37 | "dataType": "LONG",
38 | "format": "1:MILLISECONDS:EPOCH",
39 | "granularity": "1:MILLISECONDS"
40 | }
41 | ]
42 | }
--------------------------------------------------------------------------------
/pinot-upserts/pinot/config/orders_table.json:
--------------------------------------------------------------------------------
1 | {
2 | "tableName": "orders",
3 | "tableType": "REALTIME",
4 | "segmentsConfig": {
5 | "timeColumnName": "payload.updated_at",
6 | "timeType": "MILLISECONDS",
7 | "retentionTimeUnit": "DAYS",
8 | "retentionTimeValue": "1",
9 | "segmentPushType": "APPEND",
10 | "segmentAssignmentStrategy": "BalanceNumSegmentAssignmentStrategy",
11 | "schemaName": "orders",
12 | "replicasPerPartition": "1"
13 | },
14 | "tenants": {},
15 | "tableIndexConfig": {
16 | "loadMode": "MMAP",
17 | "streamConfigs": {
18 | "streamType": "kafka",
19 | "stream.kafka.consumer.type": "lowLevel",
20 | "stream.kafka.topic.name": "mysql.fakeshop.orders",
21 | "stream.kafka.decoder.class.name": "org.apache.pinot.plugin.stream.kafka.KafkaJSONMessageDecoder",
22 | "stream.kafka.hlc.zk.connect.string": "zookeeper:2181/kafka",
23 | "stream.kafka.consumer.factory.class.name": "org.apache.pinot.plugin.stream.kafka20.KafkaConsumerFactory",
24 | "stream.kafka.zk.broker.url": "zookeeper:2181/kafka",
25 | "stream.kafka.broker.list": "kafka:9092",
26 | "realtime.segment.flush.threshold.size": 30,
27 | "realtime.segment.flush.threshold.rows": 30
28 | }
29 | },
30 | "ingestionConfig" : {
31 | "complexTypeConfig": {
32 | "delimeter": "."
33 | }
34 | },
35 | "metadata": {
36 | "customConfigs": {}
37 | },
38 | "upsertConfig": {
39 | "mode": "FULL"
40 | },
41 | "routing": {
42 | "instanceSelectorType": "strictReplicaGroup"
43 | }
44 | }
--------------------------------------------------------------------------------
/pinot-upserts/simulator/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.9.9-bullseye
2 |
3 | WORKDIR /workdir
4 |
5 | COPY requirements.txt .
6 | RUN pip install --no-cache-dir -r requirements.txt
7 |
8 | COPY . .
9 |
10 | ENTRYPOINT ["python", "seeder.py"]
--------------------------------------------------------------------------------
/pinot-upserts/simulator/requirements.txt:
--------------------------------------------------------------------------------
1 | Faker==13.15.0
2 | kafka-python==2.0.2
3 | mysql-connector-python==8.0.27
4 | requests==2.28.1
--------------------------------------------------------------------------------
/pinot-upserts/simulator/seeder.py:
--------------------------------------------------------------------------------
1 | import random, time
2 | from mysql.connector import connect, Error
3 | from faker import Faker
4 | from faker.providers import company
5 |
6 | # CONFIG
7 | usersLimit = 1000
8 | productsLimit = 100
9 | ordersLimit = 100000
10 | orderInterval = 100
11 | itemPriceMin = 5
12 | itemPriceMax = 500
13 | categories = ['electronics', 'groceries', 'health', 'household', 'automotive']
14 |
15 | mysqlHost = 'mysql'
16 | mysqlPort = '3306'
17 | mysqlUser = 'mysqluser'
18 | mysqlPass = 'mysqlpw'
19 | debeziumHostPort = 'debezium:8083'
20 | kafkaHostPort = 'kafka:9092'
21 |
22 | # INSERT TEMPLATES
23 | insert_user_tpl = "INSERT INTO fakeshop.users (first_name, last_name, email, country) VALUES ( %s, %s, %s, %s )"
24 | insert_product_tpl = "INSERT INTO fakeshop.products (name, category, price) VALUES ( %s, %s, %s )"
25 | insert_order_tpl = "INSERT INTO fakeshop.orders (user_id, product_id, quantity, total) VALUES ( %s, %s, %s, %s )"
26 |
27 | fake = Faker()
28 | fake.add_provider(company)
29 |
30 | try:
31 | with connect(
32 | host=mysqlHost,
33 | user=mysqlUser,
34 | password=mysqlPass,
35 | ) as connection:
36 | with connection.cursor() as cursor:
37 | print("Seeding fakshop database...")
38 | cursor.executemany(
39 | insert_user_tpl,
40 | [
41 | (
42 | fake.first_name(),
43 | fake.last_name(),
44 | fake.email(),
45 | fake.country()
46 | ) for i in range(usersLimit)
47 | ]
48 | )
49 | cursor.executemany(
50 | insert_product_tpl,
51 | [
52 | (
53 | fake.company(),
54 | random.choice(categories),
55 | random.randint(itemPriceMin*100,itemPriceMax*100)/100
56 | ) for i in range(productsLimit)
57 | ]
58 | )
59 | connection.commit()
60 |
61 | print("Getting product ID and PRICE as tuples...")
62 | cursor.execute("SELECT id, price FROM fakeshop.products")
63 | product_prices = [(row[0], row[1]) for row in cursor]
64 |
65 | print("Preparing to loop + seed orders")
66 | for i in range(ordersLimit):
67 | # Get a random a user and a product to order
68 | product = random.choice(product_prices)
69 | user = random.randint(0,usersLimit-1)
70 | purchase_quantity = random.randint(1,5)
71 |
72 | cursor.execute(
73 | insert_order_tpl,
74 | (
75 | user,
76 | product[0],
77 | purchase_quantity,
78 | product[1] * purchase_quantity
79 | )
80 | )
81 | connection.commit()
82 |
83 | #Pause
84 | time.sleep(orderInterval/1000)
85 |
86 | connection.close()
87 |
88 | except Error as e:
89 | print(e)
--------------------------------------------------------------------------------
/postgres-docker/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: "3.7"
2 | services:
3 | postgres:
4 | image: debezium/postgres:13
5 | ports:
6 | - 5432:5432
7 | healthcheck:
8 | test: "pg_isready -U postgresuser -d shipment_db"
9 | interval: 2s
10 | timeout: 20s
11 | retries: 10
12 | environment:
13 | - POSTGRES_USER=postgresuser
14 | - POSTGRES_PASSWORD=postgrespw
15 | - POSTGRES_DB=shipment_db
16 | - PGPASSWORD=postgrespw
17 | volumes:
18 | - ./scripts:/docker-entrypoint-initdb.d
19 | zookeeper:
20 | image: confluentinc/cp-zookeeper:5.5.3
21 | environment:
22 | ZOOKEEPER_CLIENT_PORT: 2181
23 | kafka:
24 | image: confluentinc/cp-enterprise-kafka:5.5.3
25 | depends_on: [zookeeper]
26 | environment:
27 | KAFKA_ZOOKEEPER_CONNECT: "zookeeper:2181"
28 | KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092
29 | KAFKA_BROKER_ID: 1
30 | KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
31 | KAFKA_JMX_PORT: 9991
32 | ports:
33 | - 9092:9092
34 | debezium:
35 | image: debezium/connect:1.4
36 | environment:
37 | BOOTSTRAP_SERVERS: kafka:9092
38 | GROUP_ID: 1
39 | CONFIG_STORAGE_TOPIC: connect_configs
40 | OFFSET_STORAGE_TOPIC: connect_offsets
41 | KEY_CONVERTER: io.confluent.connect.avro.AvroConverter
42 | VALUE_CONVERTER: io.confluent.connect.avro.AvroConverter
43 | CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL: http://schema-registry:8081
44 | CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: http://schema-registry:8081
45 | depends_on: [kafka]
46 | ports:
47 | - 8083:8083
48 | schema-registry:
49 | image: confluentinc/cp-schema-registry:5.5.3
50 | environment:
51 | - SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL=zookeeper:2181
52 | - SCHEMA_REGISTRY_HOST_NAME=schema-registry
53 | - SCHEMA_REGISTRY_LISTENERS=http://schema-registry:8081,http://localhost:8081
54 | depends_on: [zookeeper, kafka]
--------------------------------------------------------------------------------
/postgres-docker/scripts/shipments-db.sql:
--------------------------------------------------------------------------------
1 |
2 | CREATE TABLE IF NOT EXISTS shipments
3 | (
4 | shipment_id bigint NOT NULL,
5 | order_id bigint NOT NULL,
6 | date_created character varying(255) COLLATE pg_catalog."default",
7 | status character varying(25) COLLATE pg_catalog."default",
8 | CONSTRAINT shipments_pkey PRIMARY KEY (shipment_id)
9 | );
10 |
11 |
12 | INSERT INTO shipments values (30500,10500,'2021-01-21','COMPLETED');
13 | INSERT INTO shipments values (31500,11500,'2021-04-21','COMPLETED');
14 | INSERT INTO shipments values (32500,12500,'2021-05-31','PROCESSING');
--------------------------------------------------------------------------------
/quarkus-websockets-dashboard/README.md:
--------------------------------------------------------------------------------
1 | # websockets-quickstart project
2 |
3 | This project uses Quarkus, the Supersonic Subatomic Java Framework.
4 |
5 | If you want to learn more about Quarkus, please visit its website: https://quarkus.io/ .
6 |
7 | ## Running the application in dev mode
8 |
9 | You can run your application in dev mode that enables live coding using:
10 | ```shell script
11 | ./mvnw compile quarkus:dev
12 | ```
13 |
14 | ## Packaging and running the application
15 |
16 | The application can be packaged using:
17 | ```shell script
18 | ./mvnw package
19 | ```
20 | It produces the `websockets-quickstart-1.0.0-SNAPSHOT-runner.jar` file in the `/target` directory.
21 | Be aware that it’s not an _über-jar_ as the dependencies are copied into the `target/lib` directory.
22 |
23 | If you want to build an _über-jar_, execute the following command:
24 | ```shell script
25 | ./mvnw package -Dquarkus.package.type=uber-jar
26 | ```
27 |
28 | The application is now runnable using `java -jar target/websockets-quickstart-1.0.0-SNAPSHOT-runner.jar`.
29 |
30 | ## Creating a native executable
31 |
32 | You can create a native executable using:
33 | ```shell script
34 | ./mvnw package -Pnative
35 | ```
36 |
37 | Or, if you don't have GraalVM installed, you can run the native executable build in a container using:
38 | ```shell script
39 | ./mvnw package -Pnative -Dquarkus.native.container-build=true
40 | ```
41 |
42 | You can then execute your native executable with: `./target/websockets-quickstart-1.0.0-SNAPSHOT-runner`
43 |
44 | If you want to learn more about building native executables, please consult https://quarkus.io/guides/maven-tooling.html.
45 |
--------------------------------------------------------------------------------
/quarkus-websockets-dashboard/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 | 4.0.0
5 | com.edu.samples
6 | quarkus-websockets-dashboard
7 | 1.0.0-SNAPSHOT
8 |
9 | 3.8.1
10 | true
11 | 11
12 | 11
13 | UTF-8
14 | UTF-8
15 | 1.11.3.Final
16 | quarkus-universe-bom
17 | io.quarkus
18 | 1.11.3.Final
19 | 3.0.0-M5
20 |
21 |
22 |
23 |
24 | ${quarkus.platform.group-id}
25 | ${quarkus.platform.artifact-id}
26 | ${quarkus.platform.version}
27 | pom
28 | import
29 |
30 |
31 |
32 |
33 |
34 | io.quarkus
35 | quarkus-undertow-websockets
36 |
37 |
38 | io.quarkus
39 | quarkus-arc
40 |
41 |
42 | io.quarkus
43 | quarkus-junit5
44 | test
45 |
46 |
47 | io.quarkus
48 | quarkus-scheduler
49 |
50 |
51 |
52 |
53 |
54 | io.quarkus
55 | quarkus-maven-plugin
56 | ${quarkus-plugin.version}
57 | true
58 |
59 |
60 |
61 | build
62 | generate-code
63 | generate-code-tests
64 |
65 |
66 |
67 |
68 |
69 | maven-compiler-plugin
70 | ${compiler-plugin.version}
71 |
72 |
73 | maven-surefire-plugin
74 | ${surefire-plugin.version}
75 |
76 |
77 | org.jboss.logmanager.LogManager
78 | ${maven.home}
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 | native
87 |
88 |
89 | native
90 |
91 |
92 |
93 |
94 |
95 | maven-failsafe-plugin
96 | ${surefire-plugin.version}
97 |
98 |
99 |
100 | integration-test
101 | verify
102 |
103 |
104 |
105 | ${project.build.directory}/${project.build.finalName}-runner
106 | org.jboss.logmanager.LogManager
107 | ${maven.home}
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 | native
117 |
118 |
119 |
120 |
121 |
--------------------------------------------------------------------------------
/quarkus-websockets-dashboard/src/main/docker/Dockerfile.fast-jar:
--------------------------------------------------------------------------------
1 | ####
2 | # This Dockerfile is used in order to build a container that runs the Quarkus application in JVM mode
3 | #
4 | # Before building the container image run:
5 | #
6 | # ./mvnw package -Dquarkus.package.type=fast-jar
7 | #
8 | # Then, build the image with:
9 | #
10 | # docker build -f src/main/docker/Dockerfile.fast-jar -t quarkus/websockets-quickstart-fast-jar .
11 | #
12 | # Then run the container using:
13 | #
14 | # docker run -i --rm -p 8080:8080 quarkus/websockets-quickstart-fast-jar
15 | #
16 | # If you want to include the debug port into your docker image
17 | # you will have to expose the debug port (default 5005) like this : EXPOSE 8080 5050
18 | #
19 | # Then run the container using :
20 | #
21 | # docker run -i --rm -p 8080:8080 -p 5005:5005 -e JAVA_ENABLE_DEBUG="true" quarkus/websockets-quickstart-fast-jar
22 | #
23 | ###
24 | FROM registry.access.redhat.com/ubi8/ubi-minimal:8.3
25 |
26 | ARG JAVA_PACKAGE=java-11-openjdk-headless
27 | ARG RUN_JAVA_VERSION=1.3.8
28 | ENV LANG='en_US.UTF-8' LANGUAGE='en_US:en'
29 | # Install java and the run-java script
30 | # Also set up permissions for user `1001`
31 | RUN microdnf install curl ca-certificates ${JAVA_PACKAGE} \
32 | && microdnf update \
33 | && microdnf clean all \
34 | && mkdir /deployments \
35 | && chown 1001 /deployments \
36 | && chmod "g+rwX" /deployments \
37 | && chown 1001:root /deployments \
38 | && curl https://repo1.maven.org/maven2/io/fabric8/run-java-sh/${RUN_JAVA_VERSION}/run-java-sh-${RUN_JAVA_VERSION}-sh.sh -o /deployments/run-java.sh \
39 | && chown 1001 /deployments/run-java.sh \
40 | && chmod 540 /deployments/run-java.sh \
41 | && echo "securerandom.source=file:/dev/urandom" >> /etc/alternatives/jre/lib/security/java.security
42 |
43 | # Configure the JAVA_OPTIONS, you can add -XshowSettings:vm to also display the heap size.
44 | ENV JAVA_OPTIONS="-Dquarkus.http.host=0.0.0.0 -Djava.util.logging.manager=org.jboss.logmanager.LogManager"
45 | # We make four distinct layers so if there are application changes the library layers can be re-used
46 | COPY --chown=1001 target/quarkus-app/lib/ /deployments/lib/
47 | COPY --chown=1001 target/quarkus-app/*.jar /deployments/
48 | COPY --chown=1001 target/quarkus-app/app/ /deployments/app/
49 | COPY --chown=1001 target/quarkus-app/quarkus/ /deployments/quarkus/
50 |
51 | EXPOSE 8080
52 | USER 1001
53 |
54 | ENTRYPOINT [ "/deployments/run-java.sh" ]
55 |
--------------------------------------------------------------------------------
/quarkus-websockets-dashboard/src/main/docker/Dockerfile.jvm:
--------------------------------------------------------------------------------
1 | ####
2 | # This Dockerfile is used in order to build a container that runs the Quarkus application in JVM mode
3 | #
4 | # Before building the container image run:
5 | #
6 | # ./mvnw package
7 | #
8 | # Then, build the image with:
9 | #
10 | # docker build -f src/main/docker/Dockerfile.jvm -t quarkus/websockets-quickstart-jvm .
11 | #
12 | # Then run the container using:
13 | #
14 | # docker run -i --rm -p 8080:8080 quarkus/websockets-quickstart-jvm
15 | #
16 | # If you want to include the debug port into your docker image
17 | # you will have to expose the debug port (default 5005) like this : EXPOSE 8080 5050
18 | #
19 | # Then run the container using :
20 | #
21 | # docker run -i --rm -p 8080:8080 -p 5005:5005 -e JAVA_ENABLE_DEBUG="true" quarkus/websockets-quickstart-jvm
22 | #
23 | ###
24 | FROM registry.access.redhat.com/ubi8/ubi-minimal:8.3
25 |
26 | ARG JAVA_PACKAGE=java-11-openjdk-headless
27 | ARG RUN_JAVA_VERSION=1.3.8
28 | ENV LANG='en_US.UTF-8' LANGUAGE='en_US:en'
29 | # Install java and the run-java script
30 | # Also set up permissions for user `1001`
31 | RUN microdnf install curl ca-certificates ${JAVA_PACKAGE} \
32 | && microdnf update \
33 | && microdnf clean all \
34 | && mkdir /deployments \
35 | && chown 1001 /deployments \
36 | && chmod "g+rwX" /deployments \
37 | && chown 1001:root /deployments \
38 | && curl https://repo1.maven.org/maven2/io/fabric8/run-java-sh/${RUN_JAVA_VERSION}/run-java-sh-${RUN_JAVA_VERSION}-sh.sh -o /deployments/run-java.sh \
39 | && chown 1001 /deployments/run-java.sh \
40 | && chmod 540 /deployments/run-java.sh \
41 | && echo "securerandom.source=file:/dev/urandom" >> /etc/alternatives/jre/lib/security/java.security
42 |
43 | # Configure the JAVA_OPTIONS, you can add -XshowSettings:vm to also display the heap size.
44 | ENV JAVA_OPTIONS="-Dquarkus.http.host=0.0.0.0 -Djava.util.logging.manager=org.jboss.logmanager.LogManager"
45 | COPY target/lib/* /deployments/lib/
46 | COPY target/*-runner.jar /deployments/app.jar
47 |
48 | EXPOSE 8080
49 | USER 1001
50 |
51 | ENTRYPOINT [ "/deployments/run-java.sh" ]
52 |
--------------------------------------------------------------------------------
/quarkus-websockets-dashboard/src/main/docker/Dockerfile.native:
--------------------------------------------------------------------------------
1 | ####
2 | # This Dockerfile is used in order to build a container that runs the Quarkus application in native (no JVM) mode
3 | #
4 | # Before building the container image run:
5 | #
6 | # ./mvnw package -Pnative
7 | #
8 | # Then, build the image with:
9 | #
10 | # docker build -f src/main/docker/Dockerfile.native -t quarkus/websockets-quickstart .
11 | #
12 | # Then run the container using:
13 | #
14 | # docker run -i --rm -p 8080:8080 quarkus/websockets-quickstart
15 | #
16 | ###
17 | FROM registry.access.redhat.com/ubi8/ubi-minimal:8.3
18 | WORKDIR /work/
19 | RUN chown 1001 /work \
20 | && chmod "g+rwX" /work \
21 | && chown 1001:root /work
22 | COPY --chown=1001:root target/*-runner /work/application
23 |
24 | EXPOSE 8080
25 | USER 1001
26 |
27 | CMD ["./application", "-Dquarkus.http.host=0.0.0.0"]
28 |
--------------------------------------------------------------------------------
/quarkus-websockets-dashboard/src/main/java/com/edu/retail/ws/DashboardWebSocket.java:
--------------------------------------------------------------------------------
1 | package com.edu.retail.ws;
2 |
3 | import io.quarkus.scheduler.Scheduled;
4 |
5 | import javax.enterprise.context.ApplicationScoped;
6 | import javax.websocket.*;
7 | import javax.websocket.server.PathParam;
8 | import javax.websocket.server.ServerEndpoint;
9 | import java.util.Map;
10 | import java.util.concurrent.ConcurrentHashMap;
11 | import java.util.concurrent.atomic.AtomicInteger;
12 |
13 | @ServerEndpoint("/dashboard/{clientId}")
14 | @ApplicationScoped
15 | public class DashboardWebSocket {
16 |
17 | private Map sessions = new ConcurrentHashMap<>();
18 | private AtomicInteger totalOrders = new AtomicInteger();
19 |
20 | @OnOpen
21 | public void onOpen(Session session, @PathParam("clientId") String clientId) {
22 | sessions.put(clientId, session);
23 | }
24 |
25 | @OnClose
26 | public void onClose(Session session, @PathParam("clientId") String clientId) {
27 | sessions.remove(clientId);
28 | }
29 |
30 | @OnError
31 | public void onError(Session session, @PathParam("clientId") String clientId, Throwable throwable) {
32 | sessions.remove(clientId);
33 | }
34 |
35 | @Scheduled(every="5s")
36 | void increment() {
37 | if (sessions != null) {
38 | totalOrders.incrementAndGet();
39 | broadcast(String.valueOf(totalOrders));
40 | }
41 | }
42 |
43 | private void broadcast(String message) {
44 | sessions.values().forEach(s -> {
45 | s.getAsyncRemote().sendObject(message, result -> {
46 | if (result.getException() != null) {
47 | System.out.println("Unable to send message: " + result.getException());
48 | }
49 | });
50 | });
51 | }
52 |
53 | }
54 |
--------------------------------------------------------------------------------
/quarkus-websockets-dashboard/src/main/resources/META-INF/resources/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | Sales Analytics Dashboard
7 |
9 |
10 |
11 |
12 |
20 |
21 |
22 |
23 |
24 |
27 |
28 |
N/A
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
--------------------------------------------------------------------------------
/quarkus-websockets-dashboard/src/main/resources/META-INF/resources/js/dashboard.js:
--------------------------------------------------------------------------------
1 | "use strict"
2 | var connected = false;
3 | var socket;
4 |
5 | function connect() {
6 | if (! connected) {
7 | var clientId = generateClientId(6);
8 | socket = new WebSocket("ws://" + location.host + "/dashboard/" + clientId);
9 |
10 | socket.onopen = function() {
11 | connected = true;
12 | console.log("Connected to the web socket with clientId [" + clientId + "]");
13 | $("#connect").attr("disabled", true);
14 | $("#connect").text("Connected");
15 | };
16 | socket.onmessage =function(m) {
17 | console.log("Got message: " + m.data);
18 | $("#totalOrders").text(m.data);
19 | };
20 | }
21 | }
22 |
23 | function generateClientId(length) {
24 | var result = '';
25 | var characters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';
26 | var charactersLength = characters.length;
27 | for ( var i = 0; i < length; i++ ) {
28 | result += characters.charAt(Math.floor(Math.random() * charactersLength));
29 | }
30 | return result;
31 | }
32 |
33 |
34 |
--------------------------------------------------------------------------------
/quarkus-websockets-dashboard/src/main/resources/application.properties:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dunithd/edu-samples/d937f12c81254a859867e125f11af22ed5943c2b/quarkus-websockets-dashboard/src/main/resources/application.properties
--------------------------------------------------------------------------------
/redpanda-quarkus-microserives/risk-service/.dockerignore:
--------------------------------------------------------------------------------
1 | *
2 | !target/*-runner
3 | !target/*-runner.jar
4 | !target/lib/*
5 | !target/quarkus-app/*
--------------------------------------------------------------------------------
/redpanda-quarkus-microserives/risk-service/.gitignore:
--------------------------------------------------------------------------------
1 | #Maven
2 | target/
3 | pom.xml.tag
4 | pom.xml.releaseBackup
5 | pom.xml.versionsBackup
6 | release.properties
7 |
8 | # Eclipse
9 | .project
10 | .classpath
11 | .settings/
12 | bin/
13 |
14 | # IntelliJ
15 | .idea
16 | *.ipr
17 | *.iml
18 | *.iws
19 |
20 | # NetBeans
21 | nb-configuration.xml
22 |
23 | # Visual Studio Code
24 | .vscode
25 | .factorypath
26 |
27 | # OSX
28 | .DS_Store
29 |
30 | # Vim
31 | *.swp
32 | *.swo
33 |
34 | # patch
35 | *.orig
36 | *.rej
37 |
38 | # Local environment
39 | .env
40 |
--------------------------------------------------------------------------------
/redpanda-quarkus-microserives/risk-service/.mvn/wrapper/MavenWrapperDownloader.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2007-present the original author or authors.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * https://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | import java.net.*;
17 | import java.io.*;
18 | import java.nio.channels.*;
19 | import java.util.Properties;
20 |
21 | public class MavenWrapperDownloader {
22 |
23 | private static final String WRAPPER_VERSION = "0.5.6";
24 | /**
25 | * Default URL to download the maven-wrapper.jar from, if no 'downloadUrl' is provided.
26 | */
27 | private static final String DEFAULT_DOWNLOAD_URL = "https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/"
28 | + WRAPPER_VERSION + "/maven-wrapper-" + WRAPPER_VERSION + ".jar";
29 |
30 | /**
31 | * Path to the maven-wrapper.properties file, which might contain a downloadUrl property to
32 | * use instead of the default one.
33 | */
34 | private static final String MAVEN_WRAPPER_PROPERTIES_PATH =
35 | ".mvn/wrapper/maven-wrapper.properties";
36 |
37 | /**
38 | * Path where the maven-wrapper.jar will be saved to.
39 | */
40 | private static final String MAVEN_WRAPPER_JAR_PATH =
41 | ".mvn/wrapper/maven-wrapper.jar";
42 |
43 | /**
44 | * Name of the property which should be used to override the default download url for the wrapper.
45 | */
46 | private static final String PROPERTY_NAME_WRAPPER_URL = "wrapperUrl";
47 |
48 | public static void main(String args[]) {
49 | System.out.println("- Downloader started");
50 | File baseDirectory = new File(args[0]);
51 | System.out.println("- Using base directory: " + baseDirectory.getAbsolutePath());
52 |
53 | // If the maven-wrapper.properties exists, read it and check if it contains a custom
54 | // wrapperUrl parameter.
55 | File mavenWrapperPropertyFile = new File(baseDirectory, MAVEN_WRAPPER_PROPERTIES_PATH);
56 | String url = DEFAULT_DOWNLOAD_URL;
57 | if(mavenWrapperPropertyFile.exists()) {
58 | FileInputStream mavenWrapperPropertyFileInputStream = null;
59 | try {
60 | mavenWrapperPropertyFileInputStream = new FileInputStream(mavenWrapperPropertyFile);
61 | Properties mavenWrapperProperties = new Properties();
62 | mavenWrapperProperties.load(mavenWrapperPropertyFileInputStream);
63 | url = mavenWrapperProperties.getProperty(PROPERTY_NAME_WRAPPER_URL, url);
64 | } catch (IOException e) {
65 | System.out.println("- ERROR loading '" + MAVEN_WRAPPER_PROPERTIES_PATH + "'");
66 | } finally {
67 | try {
68 | if(mavenWrapperPropertyFileInputStream != null) {
69 | mavenWrapperPropertyFileInputStream.close();
70 | }
71 | } catch (IOException e) {
72 | // Ignore ...
73 | }
74 | }
75 | }
76 | System.out.println("- Downloading from: " + url);
77 |
78 | File outputFile = new File(baseDirectory.getAbsolutePath(), MAVEN_WRAPPER_JAR_PATH);
79 | if(!outputFile.getParentFile().exists()) {
80 | if(!outputFile.getParentFile().mkdirs()) {
81 | System.out.println(
82 | "- ERROR creating output directory '" + outputFile.getParentFile().getAbsolutePath() + "'");
83 | }
84 | }
85 | System.out.println("- Downloading to: " + outputFile.getAbsolutePath());
86 | try {
87 | downloadFileFromURL(url, outputFile);
88 | System.out.println("Done");
89 | System.exit(0);
90 | } catch (Throwable e) {
91 | System.out.println("- Error downloading");
92 | e.printStackTrace();
93 | System.exit(1);
94 | }
95 | }
96 |
97 | private static void downloadFileFromURL(String urlString, File destination) throws Exception {
98 | if (System.getenv("MVNW_USERNAME") != null && System.getenv("MVNW_PASSWORD") != null) {
99 | String username = System.getenv("MVNW_USERNAME");
100 | char[] password = System.getenv("MVNW_PASSWORD").toCharArray();
101 | Authenticator.setDefault(new Authenticator() {
102 | @Override
103 | protected PasswordAuthentication getPasswordAuthentication() {
104 | return new PasswordAuthentication(username, password);
105 | }
106 | });
107 | }
108 | URL website = new URL(urlString);
109 | ReadableByteChannel rbc;
110 | rbc = Channels.newChannel(website.openStream());
111 | FileOutputStream fos = new FileOutputStream(destination);
112 | fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE);
113 | fos.close();
114 | rbc.close();
115 | }
116 |
117 | }
118 |
--------------------------------------------------------------------------------
/redpanda-quarkus-microserives/risk-service/.mvn/wrapper/maven-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dunithd/edu-samples/d937f12c81254a859867e125f11af22ed5943c2b/redpanda-quarkus-microserives/risk-service/.mvn/wrapper/maven-wrapper.jar
--------------------------------------------------------------------------------
/redpanda-quarkus-microserives/risk-service/.mvn/wrapper/maven-wrapper.properties:
--------------------------------------------------------------------------------
1 | distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.8.1/apache-maven-3.8.1-bin.zip
2 | wrapperUrl=https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar
3 |
--------------------------------------------------------------------------------
/redpanda-quarkus-microserives/risk-service/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 | 4.0.0
5 | org.acme
6 | redpanda-risk-service
7 | 1.0.0-SNAPSHOT
8 |
9 | 3.8.1
10 | true
11 | 11
12 | 11
13 | UTF-8
14 | UTF-8
15 | quarkus-bom
16 | io.quarkus.platform
17 | 2.1.0.Final
18 | 3.0.0-M5
19 |
20 |
21 |
22 |
23 | ${quarkus.platform.group-id}
24 | ${quarkus.platform.artifact-id}
25 | ${quarkus.platform.version}
26 | pom
27 | import
28 |
29 |
30 |
31 |
32 |
33 | io.quarkus
34 | quarkus-smallrye-reactive-messaging-kafka
35 |
36 |
37 | io.quarkus
38 | quarkus-arc
39 |
40 |
41 | io.quarkus
42 | quarkus-junit5
43 | test
44 |
45 |
46 |
47 |
48 |
49 | ${quarkus.platform.group-id}
50 | quarkus-maven-plugin
51 | ${quarkus.platform.version}
52 | true
53 |
54 |
55 |
56 | build
57 | generate-code
58 | generate-code-tests
59 |
60 |
61 |
62 |
63 |
64 | maven-compiler-plugin
65 | ${compiler-plugin.version}
66 |
67 | ${maven.compiler.parameters}
68 |
69 |
70 |
71 | maven-surefire-plugin
72 | ${surefire-plugin.version}
73 |
74 |
75 | org.jboss.logmanager.LogManager
76 | ${maven.home}
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 | native
85 |
86 |
87 | native
88 |
89 |
90 |
91 |
92 |
93 | maven-failsafe-plugin
94 | ${surefire-plugin.version}
95 |
96 |
97 |
98 | integration-test
99 | verify
100 |
101 |
102 |
103 | ${project.build.directory}/${project.build.finalName}-runner
104 | org.jboss.logmanager.LogManager
105 | ${maven.home}
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 | native
115 |
116 |
117 |
118 |
119 |
--------------------------------------------------------------------------------
/redpanda-quarkus-microserives/risk-service/src/main/docker/Dockerfile.jvm:
--------------------------------------------------------------------------------
1 | ####
2 | # This Dockerfile is used in order to build a container that runs the Quarkus application in JVM mode
3 | #
4 | # Before building the container image run:
5 | #
6 | # ./mvnw package
7 | #
8 | # Then, build the image with:
9 | #
10 | # docker build -f src/main/docker/Dockerfile.jvm -t quarkus/kafka-quickstart-processor-jvm .
11 | #
12 | # Then run the container using:
13 | #
14 | # docker run -i --rm -p 8080:8080 quarkus/kafka-quickstart-processor-jvm
15 | #
16 | # If you want to include the debug port into your docker image
17 | # you will have to expose the debug port (default 5005) like this : EXPOSE 8080 5005
18 | #
19 | # Then run the container using :
20 | #
21 | # docker run -i --rm -p 8080:8080 -p 5005:5005 -e JAVA_ENABLE_DEBUG="true" quarkus/kafka-quickstart-processor-jvm
22 | #
23 | ###
24 | FROM registry.access.redhat.com/ubi8/ubi-minimal:8.4
25 |
26 | ARG JAVA_PACKAGE=java-11-openjdk-headless
27 | ARG RUN_JAVA_VERSION=1.3.8
28 | ENV LANG='en_US.UTF-8' LANGUAGE='en_US:en'
29 | # Install java and the run-java script
30 | # Also set up permissions for user `1001`
31 | RUN microdnf install curl ca-certificates ${JAVA_PACKAGE} \
32 | && microdnf update \
33 | && microdnf clean all \
34 | && mkdir /deployments \
35 | && chown 1001 /deployments \
36 | && chmod "g+rwX" /deployments \
37 | && chown 1001:root /deployments \
38 | && curl https://repo1.maven.org/maven2/io/fabric8/run-java-sh/${RUN_JAVA_VERSION}/run-java-sh-${RUN_JAVA_VERSION}-sh.sh -o /deployments/run-java.sh \
39 | && chown 1001 /deployments/run-java.sh \
40 | && chmod 540 /deployments/run-java.sh \
41 | && echo "securerandom.source=file:/dev/urandom" >> /etc/alternatives/jre/conf/security/java.security
42 |
43 | # Configure the JAVA_OPTIONS, you can add -XshowSettings:vm to also display the heap size.
44 | ENV JAVA_OPTIONS="-Dquarkus.http.host=0.0.0.0 -Djava.util.logging.manager=org.jboss.logmanager.LogManager"
45 | # We make four distinct layers so if there are application changes the library layers can be re-used
46 | COPY --chown=1001 target/quarkus-app/lib/ /deployments/lib/
47 | COPY --chown=1001 target/quarkus-app/*.jar /deployments/
48 | COPY --chown=1001 target/quarkus-app/app/ /deployments/app/
49 | COPY --chown=1001 target/quarkus-app/quarkus/ /deployments/quarkus/
50 |
51 | EXPOSE 8080
52 | USER 1001
53 |
54 | ENTRYPOINT [ "/deployments/run-java.sh" ]
55 |
56 |
--------------------------------------------------------------------------------
/redpanda-quarkus-microserives/risk-service/src/main/docker/Dockerfile.legacy-jar:
--------------------------------------------------------------------------------
1 | ####
2 | # This Dockerfile is used in order to build a container that runs the Quarkus application in JVM mode
3 | #
4 | # Before building the container image run:
5 | #
6 | # ./mvnw package -Dquarkus.package.type=legacy-jar
7 | #
8 | # Then, build the image with:
9 | #
10 | # docker build -f src/main/docker/Dockerfile.legacy-jar -t quarkus/kafka-quickstart-processor-legacy-jar .
11 | #
12 | # Then run the container using:
13 | #
14 | # docker run -i --rm -p 8080:8080 quarkus/kafka-quickstart-processor-legacy-jar
15 | #
16 | # If you want to include the debug port into your docker image
17 | # you will have to expose the debug port (default 5005) like this : EXPOSE 8080 5005
18 | #
19 | # Then run the container using :
20 | #
21 | # docker run -i --rm -p 8080:8080 -p 5005:5005 -e JAVA_ENABLE_DEBUG="true" quarkus/kafka-quickstart-processor-legacy-jar
22 | #
23 | ###
24 | FROM registry.access.redhat.com/ubi8/ubi-minimal:8.4
25 |
26 | ARG JAVA_PACKAGE=java-11-openjdk-headless
27 | ARG RUN_JAVA_VERSION=1.3.8
28 | ENV LANG='en_US.UTF-8' LANGUAGE='en_US:en'
29 | # Install java and the run-java script
30 | # Also set up permissions for user `1001`
31 | RUN microdnf install curl ca-certificates ${JAVA_PACKAGE} \
32 | && microdnf update \
33 | && microdnf clean all \
34 | && mkdir /deployments \
35 | && chown 1001 /deployments \
36 | && chmod "g+rwX" /deployments \
37 | && chown 1001:root /deployments \
38 | && curl https://repo1.maven.org/maven2/io/fabric8/run-java-sh/${RUN_JAVA_VERSION}/run-java-sh-${RUN_JAVA_VERSION}-sh.sh -o /deployments/run-java.sh \
39 | && chown 1001 /deployments/run-java.sh \
40 | && chmod 540 /deployments/run-java.sh \
41 | && echo "securerandom.source=file:/dev/urandom" >> /etc/alternatives/jre/conf/security/java.security
42 |
43 | # Configure the JAVA_OPTIONS, you can add -XshowSettings:vm to also display the heap size.
44 | ENV JAVA_OPTIONS="-Dquarkus.http.host=0.0.0.0 -Djava.util.logging.manager=org.jboss.logmanager.LogManager"
45 | COPY target/lib/* /deployments/lib/
46 | COPY target/*-runner.jar /deployments/app.jar
47 |
48 | EXPOSE 8080
49 | USER 1001
50 |
51 | ENTRYPOINT [ "/deployments/run-java.sh" ]
52 |
--------------------------------------------------------------------------------
/redpanda-quarkus-microserives/risk-service/src/main/docker/Dockerfile.native:
--------------------------------------------------------------------------------
1 | ####
2 | # This Dockerfile is used in order to build a container that runs the Quarkus application in native (no JVM) mode
3 | #
4 | # Before building the container image run:
5 | #
6 | # ./mvnw package -Pnative
7 | #
8 | # Then, build the image with:
9 | #
10 | # docker build -f src/main/docker/Dockerfile.native -t quarkus/kafka-quickstart-processor .
11 | #
12 | # Then run the container using:
13 | #
14 | # docker run -i --rm -p 8080:8080 quarkus/kafka-quickstart-processor
15 | #
16 | ###
17 | FROM registry.access.redhat.com/ubi8/ubi-minimal:8.4
18 | WORKDIR /work/
19 | RUN chown 1001 /work \
20 | && chmod "g+rwX" /work \
21 | && chown 1001:root /work
22 | COPY --chown=1001:root target/*-runner /work/application
23 |
24 | EXPOSE 8080
25 | USER 1001
26 |
27 | CMD ["./application", "-Dquarkus.http.host=0.0.0.0"]
28 |
--------------------------------------------------------------------------------
/redpanda-quarkus-microserives/risk-service/src/main/docker/Dockerfile.native-distroless:
--------------------------------------------------------------------------------
1 | ####
2 | # This Dockerfile is used in order to build a distroless container that runs the Quarkus application in native (no JVM) mode
3 | #
4 | # Before building the container image run:
5 | #
6 | # ./mvnw package -Pnative
7 | #
8 | # Then, build the image with:
9 | #
10 | # docker build -f src/main/docker/Dockerfile.native-distroless -t quarkus/kafka-quickstart-processor .
11 | #
12 | # Then run the container using:
13 | #
14 | # docker run -i --rm -p 8080:8080 quarkus/kafka-quickstart-processor
15 | #
16 | ###
17 | FROM quay.io/quarkus/quarkus-distroless-image:1.0
18 | COPY target/*-runner /application
19 |
20 | EXPOSE 8080
21 | USER nonroot
22 |
23 | CMD ["./application", "-Dquarkus.http.host=0.0.0.0"]
24 |
--------------------------------------------------------------------------------
/redpanda-quarkus-microserives/risk-service/src/main/java/redpanda/samples/edm/RedpandaEventConsumer.java:
--------------------------------------------------------------------------------
1 | package redpanda.samples.edm;
2 |
3 | import io.smallrye.reactive.messaging.annotations.Blocking;
4 | import java.util.ArrayList;
5 | import java.util.List;
6 | import javax.enterprise.context.ApplicationScoped;
7 | import org.eclipse.microprofile.reactive.messaging.Incoming;
8 | import org.eclipse.microprofile.reactive.messaging.Outgoing;
9 | import org.slf4j.Logger;
10 | import org.slf4j.LoggerFactory;
11 | import redpanda.samples.edm.serde.PaymentReceivedEvent;
12 | import redpanda.samples.edm.serde.PaymentValidatedEvent;
13 |
14 |
15 | @ApplicationScoped
16 | public class RedpandaEventConsumer {
17 |
18 | private static final Logger LOG = LoggerFactory.getLogger(RedpandaEventConsumer.class);
19 |
20 | private List flaggedCardNumbers = new ArrayList();
21 |
22 | @Incoming("flagged-txn")
23 | @Blocking
24 | public void onFlaggedTransaction(String cardNumber)
25 | throws InterruptedException {
26 | LOG.info(String.format("A flagged transaction received with card number %s", cardNumber));
27 | this.flaggedCardNumbers.add(cardNumber);
28 | }
29 |
30 | @Incoming("payments-in")
31 | @Outgoing("payments-out")
32 | @Blocking
33 | public PaymentValidatedEvent onPayment(PaymentReceivedEvent payment)
34 | throws InterruptedException {
35 | try {
36 | LOG.info(String.format("Payment received. Tx ID: %d, Card Number: %s, Amount: %d",
37 | payment.getTransactionId(),
38 | payment.getCardNumber(),
39 | payment.getAmount()
40 | ));
41 |
42 | String cardNumber = payment.getCardNumber();
43 | PaymentValidatedEvent validatedEvent = new PaymentValidatedEvent();
44 | validatedEvent.setTransactionID(payment.getTransactionId());
45 |
46 | if (flaggedCardNumbers.contains(cardNumber)) {
47 | LOG.info(String.format("!!!! PAYMENT DECLINED FOR CARD NUMBER", cardNumber));
48 | validatedEvent.setPaymentStatus("DECLINED");
49 | } else {
50 | LOG.info("Payment validated.");
51 | validatedEvent.setPaymentStatus("VALID");
52 | }
53 | return validatedEvent;
54 | } catch (Exception e) {
55 | LOG.error("Error while validating payment.");
56 | throw e;
57 | }
58 | }
59 |
60 | }
61 |
--------------------------------------------------------------------------------
/redpanda-quarkus-microserives/risk-service/src/main/java/redpanda/samples/edm/serde/PaymentEventDeserializer.java:
--------------------------------------------------------------------------------
1 | package redpanda.samples.edm.serde;
2 |
3 | import io.quarkus.kafka.client.serialization.ObjectMapperDeserializer;
4 |
5 | public class PaymentEventDeserializer extends ObjectMapperDeserializer {
6 |
7 | public PaymentEventDeserializer() {
8 | super(PaymentReceivedEvent.class);
9 | }
10 | }
11 |
--------------------------------------------------------------------------------
/redpanda-quarkus-microserives/risk-service/src/main/java/redpanda/samples/edm/serde/PaymentReceivedEvent.java:
--------------------------------------------------------------------------------
1 | package redpanda.samples.edm.serde;
2 |
3 | public class PaymentReceivedEvent {
4 | private int transactionId;
5 | private String cardNumber;
6 | private int amount;
7 |
8 | public PaymentReceivedEvent() {
9 | }
10 |
11 | public int getTransactionId() {
12 | return transactionId;
13 | }
14 |
15 | public void setTransactionId(int transactionId) {
16 | this.transactionId = transactionId;
17 | }
18 |
19 | public String getCardNumber() {
20 | return cardNumber;
21 | }
22 |
23 | public void setCardNumber(String cardNumber) {
24 | this.cardNumber = cardNumber;
25 | }
26 |
27 | public int getAmount() {
28 | return amount;
29 | }
30 |
31 | public void setAmount(int amount) {
32 | this.amount = amount;
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/redpanda-quarkus-microserives/risk-service/src/main/java/redpanda/samples/edm/serde/PaymentValidatedEvent.java:
--------------------------------------------------------------------------------
1 | package redpanda.samples.edm.serde;
2 |
3 | public class PaymentValidatedEvent {
4 | private int transactionID;
5 | private String paymentStatus;
6 |
7 | public PaymentValidatedEvent(int transactionId, String declined) {
8 | }
9 |
10 | public PaymentValidatedEvent() {
11 | }
12 |
13 | public int getTransactionID() {
14 | return transactionID;
15 | }
16 |
17 | public void setTransactionID(int transactionID) {
18 | this.transactionID = transactionID;
19 | }
20 |
21 | public String getPaymentStatus() {
22 | return paymentStatus;
23 | }
24 |
25 | public void setPaymentStatus(String paymentStatus) {
26 | this.paymentStatus = paymentStatus;
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/redpanda-quarkus-microserives/risk-service/src/main/resources/application.properties:
--------------------------------------------------------------------------------
1 | kafka.bootstrap.servers=localhost:9092
2 |
3 | mp.messaging.incoming.payments-in.connector=smallrye-kafka
4 | mp.messaging.incoming.payments-in.topic=payments-in
5 | mp.messaging.incoming.payments-in.auto.offset.reset=earliest
6 | mp.messaging.incoming.payments-in.group.id=rs-oin
7 | mp.messaging.incoming.payments-in.key.deserializer=org.apache.kafka.common.serialization.StringDeserializer
8 | mp.messaging.incoming.payments-in.value.deserializer=redpanda.samples.edm.serde.PaymentEventDeserializer
9 |
10 | mp.messaging.incoming.flagged-txn.connector=smallrye-kafka
11 | mp.messaging.incoming.flagged-txn.topic=flagged-txn
12 | mp.messaging.incoming.flagged-txn.auto.offset.reset=earliest
13 | mp.messaging.incoming.flagged-txn.group.id=rs-txn
14 | mp.messaging.incoming.flagged-txn.key.deserializer=org.apache.kafka.common.serialization.StringDeserializer
15 | mp.messaging.incoming.flagged-txn.value.deserializer=org.apache.kafka.common.serialization.StringDeserializer
16 |
17 | mp.messaging.outgoing.payments-out.connector=smallrye-kafka
18 | mp.messaging.outgoing.payments-out.topic=payments-out
19 | mp.messaging.outgoing.payments-out.value.serializer=io.quarkus.kafka.client.serialization.ObjectMapperSerializer
20 |
21 |
--------------------------------------------------------------------------------
/spacex-launch-analysis/pinot/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dunithd/edu-samples/d937f12c81254a859867e125f11af22ed5943c2b/spacex-launch-analysis/pinot/.DS_Store
--------------------------------------------------------------------------------
/spacex-launch-analysis/pinot/batch-job-spec.yaml:
--------------------------------------------------------------------------------
1 | executionFrameworkSpec:
2 | name: 'standalone'
3 | segmentGenerationJobRunnerClassName: 'org.apache.pinot.plugin.ingestion.batch.standalone.SegmentGenerationJobRunner'
4 | segmentTarPushJobRunnerClassName: 'org.apache.pinot.plugin.ingestion.batch.standalone.SegmentTarPushJobRunner'
5 | segmentUriPushJobRunnerClassName: 'org.apache.pinot.plugin.ingestion.batch.standalone.SegmentUriPushJobRunner'
6 | jobType: SegmentCreationAndTarPush
7 | inputDirURI: '/path/to/rawdata/'
8 | includeFileNamePattern: 'glob:**/*.csv'
9 | outputDirURI: '/path/to/segments/'
10 | overwriteOutput: true
11 | pinotFSSpecs:
12 | - scheme: file
13 | className: org.apache.pinot.spi.filesystem.LocalPinotFS
14 | recordReaderSpec:
15 | dataFormat: 'csv'
16 | className: 'org.apache.pinot.plugin.inputformat.csv.CSVRecordReader'
17 | configClassName: 'org.apache.pinot.plugin.inputformat.csv.CSVRecordReaderConfig'
18 | tableSpec:
19 | tableName: 'launches'
20 | schemaURI: 'http://localhost:9000/tables/launches/schema'
21 | tableConfigURI: 'http://localhost:9000/tables/launches'
22 | pinotClusterSpecs:
23 | - controllerURI: 'http://localhost:9000'
24 |
--------------------------------------------------------------------------------
/spacex-launch-analysis/pinot/launches-schema.json:
--------------------------------------------------------------------------------
1 | {
2 | "schemaName": "launches",
3 | "dimensionFieldSpecs": [
4 | {
5 | "name": "booster_version",
6 | "dataType": "STRING"
7 | },
8 | {
9 | "name": "launch_site",
10 | "dataType": "STRING"
11 | },
12 | {
13 | "name": "payload",
14 | "dataType": "STRING"
15 | },
16 | {
17 | "name": "payload_mass_kg",
18 | "dataType": "FLOAT"
19 | },
20 | {
21 | "name": "orbit",
22 | "dataType": "STRING"
23 | },
24 | {
25 | "name": "customer",
26 | "dataType": "STRING"
27 | },
28 | {
29 | "name": "mission_outcome",
30 | "dataType": "STRING"
31 | },
32 | {
33 | "name": "landing_outcome",
34 | "dataType": "STRING"
35 | }
36 | ],
37 | "metricFieldSpecs": [
38 | ],
39 | "dateTimeFieldSpecs": [
40 | {
41 | "name": "launch_date",
42 | "dataType": "STRING",
43 | "format" : "1:DAYS:SIMPLE_DATE_FORMAT:dd-MM-yyyy",
44 | "granularity": "1:DAYS"
45 | },
46 | {
47 | "name": "launch_time",
48 | "dataType": "STRING",
49 | "format" : "1:MINUTES:SIMPLE_DATE_FORMAT:HH:mm:ss",
50 | "granularity": "1:MINUTES"
51 | }
52 | ]
53 | }
--------------------------------------------------------------------------------
/spacex-launch-analysis/pinot/launches-table.json:
--------------------------------------------------------------------------------
1 | {
2 | "tableName": "launches",
3 | "segmentsConfig" : {
4 | "timeColumnName": "launch_date",
5 | "timeType": "DAYS",
6 | "replication" : "1",
7 | "schemaName" : "launches"
8 | },
9 | "tableIndexConfig" : {
10 | "invertedIndexColumns" : [],
11 | "loadMode" : "MMAP"
12 | },
13 | "tenants" : {
14 | "broker":"DefaultTenant",
15 | "server":"DefaultTenant"
16 | },
17 | "tableType":"OFFLINE",
18 | "metadata": {}
19 | }
--------------------------------------------------------------------------------
/spacex-launch-analysis/pinot/queries.sql:
--------------------------------------------------------------------------------
1 | -- launch_outcomes
2 | SELECT
3 | landing_outcome,
4 | count(landing_outcome) as lo_count
5 | FROM launches
6 | GROUP BY landing_outcome
7 |
8 | -- launches by launch site
9 | select
10 | launch_site,
11 | landing_outcome,
12 | count(launch_site) as frequency
13 | from launches
14 | group by launch_site,landing_outcome
15 |
16 | -- launches by customer
17 | select
18 | customer, count(customer) as total_launches
19 | from launches
20 | group by customer
21 | order by total_launches desc
22 |
23 | -- payload mass by year
24 | select
25 | year(FromDateTime(launch_date,'dd-MM-yyyy')) as launch_year,
26 | payload_mass_kg
27 | from launches
28 |
--------------------------------------------------------------------------------
/spacex-launch-analysis/rawdata/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dunithd/edu-samples/d937f12c81254a859867e125f11af22ed5943c2b/spacex-launch-analysis/rawdata/.DS_Store
--------------------------------------------------------------------------------
/spacex-launch-analysis/streamlit/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dunithd/edu-samples/d937f12c81254a859867e125f11af22ed5943c2b/spacex-launch-analysis/streamlit/.DS_Store
--------------------------------------------------------------------------------
/spacex-launch-analysis/streamlit/Pipfile:
--------------------------------------------------------------------------------
1 | [[source]]
2 | url = "https://pypi.org/simple"
3 | verify_ssl = true
4 | name = "pypi"
5 |
6 | [packages]
7 |
8 | [dev-packages]
9 |
10 | [requires]
11 | python_version = "3.8"
12 |
--------------------------------------------------------------------------------
/spacex-launch-analysis/streamlit/app.py:
--------------------------------------------------------------------------------
1 | from pinotdb import connect
2 | import pandas as pd
3 | import streamlit as st
4 | import numpy as np
5 | import altair as alt
6 | import plotly.express as px
7 |
8 | st.title('SpaceX Launch Statistics')
9 | st.markdown("Perform exploratory data analysis on SpaceX launch data set with Apache Pinot")
10 |
11 | conn = connect(host='localhost', port=8000, path='/query/sql', scheme='http')
12 | curs = conn.cursor()
13 |
14 | # Breakdown of the landing outcome
15 | st.subheader('Breakdown of landing outcome')
16 | st.markdown("What are difference landing outcomes with their frequencies? What's the % of successul launches?")
17 | curs.execute("""
18 | SELECT
19 | landing_outcome,count(landing_outcome) as frequency
20 | FROM launches
21 | GROUP BY landing_outcome
22 | LIMIT 200
23 | """)
24 | df = pd.DataFrame(curs, columns=[item[0] for item in curs.description])
25 | fig = px.pie(df, values='frequency', names='landing_outcome')
26 | st.plotly_chart(fig, use_container_width=True)
27 |
28 | # Launches by customer
29 | st.subheader('Launches by customer')
30 | st.markdown("Which customer has spent most money on SpaceX? Apparently, it is NASA.")
31 | curs.execute("""
32 | SELECT
33 | customer,
34 | count(customer) as total_launches
35 | FROM launches
36 | GROUP BY customer
37 | ORDER BY total_launches DESC
38 | """)
39 | df = pd.DataFrame(curs, columns=[item[0] for item in curs.description])
40 | chart = alt.Chart(df).mark_bar().encode(
41 | x='customer:N',
42 | y='total_launches:Q'
43 | )
44 | st.altair_chart(chart,use_container_width=True)
45 |
46 | # Let's calaculate launches per launch site
47 | st.subheader('Launches per launch site')
48 | st.markdown("What were the launch sites used for the missions? How did each site attribute to the total? Which site had contributed to the most successful landings? ")
49 | curs.execute("""
50 | SELECT
51 | launch_site,
52 | landing_outcome,
53 | count(launch_site) as frequency
54 | FROM launches
55 | GROUP BY launch_site,landing_outcome
56 | LIMIT 200
57 | """)
58 | df = pd.DataFrame(curs, columns=[item[0] for item in curs.description])
59 | chart = alt.Chart(df).mark_bar().encode(
60 | x='launch_site:N',
61 | y='frequency:Q',
62 | color='landing_outcome:N'
63 | )
64 | st.altair_chart(chart,use_container_width=True)
65 |
66 | # Payload mass variation by year
67 | st.subheader('Payload mass variation over the years')
68 | st.markdown("How did the payload capacity of each mission varied over the past then years?")
69 | curs.execute("""
70 | SELECT
71 | year(FromDateTime(launch_date,'dd-MM-yyyy')) as launch_year,
72 | payload_mass_kg
73 | FROM launches
74 | """)
75 | df = pd.DataFrame(curs, columns=[item[0] for item in curs.description])
76 | chart = alt.Chart(df).mark_line().encode(
77 | x='launch_year:N',
78 | y='sum(payload_mass_kg):Q'
79 | )
80 | st.altair_chart(chart,use_container_width=True)
81 |
82 | # Close the connection and clean up the resources
83 | conn = None
--------------------------------------------------------------------------------
/websockets-testing-with-firecamp/README.md:
--------------------------------------------------------------------------------
1 | # README #
2 |
3 | ## To run the plain text WebSocket server ##
4 |
5 | ```bash
6 | npm install
7 | node run server.js
8 | ```
9 | ## To run the JSON WebSocket server ##
10 |
11 | ```bash
12 | npm install
13 | node run server-json.js
14 | ```
15 |
16 | The WebSocket server will be running at port 8080
17 |
18 |
--------------------------------------------------------------------------------
/websockets-testing-with-firecamp/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "dependencies": {
3 | "websocket": "^1.0.34",
4 | "ws": "^8.3.0"
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/websockets-testing-with-firecamp/server-json.js:
--------------------------------------------------------------------------------
1 | const WebSocket = require('ws');
2 |
3 | const PORT = 8080;
4 | const wss = new WebSocket.Server({ port: PORT });
5 |
6 | wss.on('connection', (ws) => {
7 | const clientId = uuidv4();
8 | console.log("Connection accepted from client: " + clientId)
9 | ws.send('Welcome to the toUpper() service!');
10 |
11 | ws.on('message', function message(data) {
12 | const originalMessage = JSON.parse(data);
13 | console.log('received: %s', originalMessage);
14 |
15 | const ucMessage = originalMessage.message.toUpperCase();
16 | ws.send(JSON.stringify({"transformed" : ucMessage}));
17 | });
18 |
19 | ws.on("close", () => {
20 | console.log("Websocket client " + ws + " has been disconnected.")
21 | });
22 | });
23 |
24 | //A function to generate a unique client ID
25 | function uuidv4() {
26 | return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function (c) {
27 | var r = Math.random() * 16 | 0, v = c == 'x' ? r : (r & 0x3 | 0x8);
28 | return v.toString(16);
29 | });
30 | }
31 |
32 | console.log("Websocket server has been started and listening on port " + PORT);
--------------------------------------------------------------------------------
/websockets-testing-with-firecamp/server.js:
--------------------------------------------------------------------------------
1 | const WebSocket = require('ws');
2 |
3 | const PORT = 8080;
4 | const wss = new WebSocket.Server({ port: PORT });
5 |
6 | wss.on('connection', (ws) => {
7 | const clientId = uuidv4();
8 | console.log("Connection accepted from client: " + clientId)
9 | ws.send('Welcome to the toUpper() service!');
10 |
11 | ws.on('message', function message(data) {
12 | const message = data.toString();
13 | console.log('received: %s', message);
14 | ws.send(message.toUpperCase());
15 | });
16 |
17 | ws.on("close", () => {
18 | console.log("Websocket client " + ws + " has been disconnected.")
19 | });
20 | });
21 |
22 | //A function to generate a unique client ID
23 | function uuidv4() {
24 | return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function (c) {
25 | var r = Math.random() * 16 | 0, v = c == 'x' ? r : (r & 0x3 | 0x8);
26 | return v.toString(16);
27 | });
28 | }
29 |
30 | console.log("Websocket server has been started and listening on port " + PORT);
--------------------------------------------------------------------------------