├── .gitignore
├── Blog - dynamic SQL demo
├── Blog draft.md
├── Dynamic SQL diagram-concept.drawio.png
├── Dynamic SQL diagram-current.drawio.png
├── Dynamic SQL diagram-ideal.drawio.png
├── Rule match_recognize longplay.gif
├── Rule match_recognize longplay.mp4
├── Rule sum,count,group by longplay.gif
├── Rule sum,count,group by longplay.mp4
├── Test recording.gif
├── add rule.gif
├── clear event.gif
├── delete rule.gif
└── start and stop.gif
├── README.md
├── docker-compose-local-job.yaml
├── flink-job
├── .dockerignore
├── .gitignore
├── Dockerfile
├── README.md
├── build.gradle
├── docker-entrypoint.sh
├── gradle
│ └── wrapper
│ │ ├── gradle-wrapper.jar
│ │ └── gradle-wrapper.properties
├── gradlew
├── gradlew.bat
├── settings.gradle
└── src
│ ├── main
│ ├── java
│ │ └── com
│ │ │ └── ververica
│ │ │ └── field
│ │ │ ├── config
│ │ │ ├── Config.java
│ │ │ ├── Param.java
│ │ │ └── Parameters.java
│ │ │ ├── dynamicrules
│ │ │ ├── Alert.java
│ │ │ ├── JsonMapper.java
│ │ │ ├── KafkaUtils.java
│ │ │ ├── Keyed.java
│ │ │ ├── Main.java
│ │ │ ├── SQLRunner.java
│ │ │ ├── SqlEventParser.java
│ │ │ ├── TimestampAssignable.java
│ │ │ ├── TransactionEvent.java
│ │ │ ├── assigners
│ │ │ │ └── PeriodicTimestampAssigner.java
│ │ │ ├── converters
│ │ │ │ ├── StringConverter.java
│ │ │ │ └── TransactionStringConverter.java
│ │ │ ├── executor
│ │ │ │ └── CustomLocalExecutor.java
│ │ │ ├── functions
│ │ │ │ ├── BroadcastEmbeddedFlinkCluster.java
│ │ │ │ ├── BroadcastEmbeddedFlinkFunction.java
│ │ │ │ ├── JsonDeserializer.java
│ │ │ │ ├── JsonGeneratorWrapper.java
│ │ │ │ ├── JsonSerializer.java
│ │ │ │ ├── SqlDeserializer.java
│ │ │ │ ├── SqlEvent.java
│ │ │ │ ├── TimeStamper.java
│ │ │ │ └── TransactionEventsGenerator.java
│ │ │ ├── logger
│ │ │ │ └── CustomTimeLogger.java
│ │ │ ├── sinks
│ │ │ │ └── AlertsSink.java
│ │ │ ├── sources
│ │ │ │ ├── CustomSocketTextStreamFunction.java
│ │ │ │ ├── SqlsSource.java
│ │ │ │ ├── TimeBasedEvent.java
│ │ │ │ └── TransactionEventsSource.java
│ │ │ └── util
│ │ │ │ ├── SchemaHelper.java
│ │ │ │ └── TimestampHelpers.java
│ │ │ └── sources
│ │ │ ├── BaseGenerator.java
│ │ │ └── Throttler.java
│ └── resources
│ │ ├── log4j2.properties
│ │ ├── logback.xml
│ │ └── test-sql-client-defaults.yaml
│ └── test
│ ├── java
│ └── com
│ │ └── ververica
│ │ └── field
│ │ ├── config
│ │ └── ConfigTest.java
│ │ └── function
│ │ ├── BroadcastEmbeddedFlinkFunctionTest.java
│ │ ├── BroadcastEmbeddedFlinkFunctionTestBase.java
│ │ └── data
│ │ ├── BalanceChangeEvent.java
│ │ ├── CoreServiceUsageBillingEvent.java
│ │ ├── CoreServiceUsageEvent.java
│ │ ├── ShortBillingEvent.java
│ │ ├── SubscriberTerminationBillingEvent.java
│ │ └── SubscriberTerminationEvent.java
│ ├── resources
│ ├── example-input-data
│ │ ├── balance_change_example_data.csv
│ │ ├── balance_change_example_data_1.csv
│ │ ├── balance_change_example_data_2.csv
│ │ ├── balance_change_example_data_3.csv
│ │ ├── broadcast_example_data.csv
│ │ ├── broadcast_example_data_empty.csv
│ │ ├── broadcast_match_recognize_data.csv
│ │ ├── core_service_usage_example_data.csv
│ │ └── subscriber_termination_example_data.csv
│ ├── expected-output-data
│ │ ├── balance_change_alerts.csv
│ │ ├── balance_change_alerts_1.csv
│ │ ├── balance_change_alerts_2.csv
│ │ ├── balance_change_alerts_3.csv
│ │ ├── balance_change_alerts_broadcast.csv
│ │ ├── core_service_usage_alerts.csv
│ │ └── subscriber_termination_alerts.csv
│ └── log4j2-test.properties
│ └── scala
│ └── com
│ └── ververica
│ └── field
│ └── function
│ ├── TestHelpers.scala
│ └── sources
│ ├── TestSource.scala
│ └── TestSourceConfig.scala
└── webapp
├── .dockerignore
├── .eslintrc
├── .gitignore
├── .mvn
└── wrapper
│ ├── MavenWrapperDownloader.java
│ └── maven-wrapper.properties
├── .prettierrc
├── Dockerfile
├── README.md
├── cloud.Dockerfile
├── cloudbuild.yaml
├── mvn
├── MavenWrapperDownloader.java
└── maven-wrapper.properties
├── mvnw
├── mvnw.cmd
├── package-lock.json
├── package.json
├── pom.xml
├── public
├── favicon.ico
└── index.html
├── scripts
└── fix-leader-line.sh
├── src
├── app
│ ├── assets
│ │ ├── app.scss
│ │ ├── flink_squirrel_200_color.png
│ │ └── theme.scss
│ ├── components
│ │ ├── AddRuleModal.tsx
│ │ ├── Alerts.tsx
│ │ ├── App.tsx
│ │ ├── CenteredContainer.tsx
│ │ ├── FieldGroup.tsx
│ │ ├── Header.tsx
│ │ ├── Rules.tsx
│ │ ├── Transactions.tsx
│ │ └── index.ts
│ ├── interfaces
│ │ ├── Alert.ts
│ │ ├── Rule.ts
│ │ ├── Transaction.ts
│ │ └── index.ts
│ └── utils
│ │ ├── index.ts
│ │ └── useLines.ts
├── index.tsx
├── main
│ ├── java
│ │ └── com
│ │ │ └── ververica
│ │ │ └── demo
│ │ │ └── backend
│ │ │ ├── Main.java
│ │ │ ├── configurations
│ │ │ ├── KafkaConsumerConfig.java
│ │ │ ├── KafkaProducerConfig.java
│ │ │ ├── PropertyLogger.java
│ │ │ ├── SwaggerConfig.java
│ │ │ └── WebSocketConfig.java
│ │ │ ├── controllers
│ │ │ ├── DataGenerationController.java
│ │ │ └── SqlsController.java
│ │ │ ├── datasource
│ │ │ ├── DemoTransactionsGenerator.java
│ │ │ ├── RulesBootstrapper.java
│ │ │ ├── Throttler.java
│ │ │ ├── Transaction.java
│ │ │ └── TransactionsGenerator.java
│ │ │ ├── entities
│ │ │ └── Rule.java
│ │ │ ├── exceptions
│ │ │ └── RuleNotFoundException.java
│ │ │ ├── model
│ │ │ └── Alert.java
│ │ │ ├── repositories
│ │ │ ├── SqlRepository.java
│ │ │ └── SqlRepositoryEvent.java
│ │ │ └── services
│ │ │ ├── FlinkSqlService.java
│ │ │ ├── KafkaAlertsPusher.java
│ │ │ ├── KafkaConsumerService.java
│ │ │ ├── KafkaTransactionsConsumerService.java
│ │ │ └── KafkaTransactionsPusher.java
│ └── resources
│ │ └── application.yaml
├── react-app-env.d.ts
├── setupProxy.js
└── test
│ └── java
│ └── com
│ └── ververica
│ └── demo
│ └── backend
│ └── MainTest.java
├── tsconfig.json
├── tslint.json
└── webapp.Dockerfile
/.gitignore:
--------------------------------------------------------------------------------
1 | .idea
--------------------------------------------------------------------------------
/Blog - dynamic SQL demo/Dynamic SQL diagram-concept.drawio.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/getindata/flink-dynamic-cep-demo/e8c06f2e7014258ee514653833ec942b3e3f4d2e/Blog - dynamic SQL demo/Dynamic SQL diagram-concept.drawio.png
--------------------------------------------------------------------------------
/Blog - dynamic SQL demo/Dynamic SQL diagram-current.drawio.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/getindata/flink-dynamic-cep-demo/e8c06f2e7014258ee514653833ec942b3e3f4d2e/Blog - dynamic SQL demo/Dynamic SQL diagram-current.drawio.png
--------------------------------------------------------------------------------
/Blog - dynamic SQL demo/Dynamic SQL diagram-ideal.drawio.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/getindata/flink-dynamic-cep-demo/e8c06f2e7014258ee514653833ec942b3e3f4d2e/Blog - dynamic SQL demo/Dynamic SQL diagram-ideal.drawio.png
--------------------------------------------------------------------------------
/Blog - dynamic SQL demo/Rule match_recognize longplay.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/getindata/flink-dynamic-cep-demo/e8c06f2e7014258ee514653833ec942b3e3f4d2e/Blog - dynamic SQL demo/Rule match_recognize longplay.gif
--------------------------------------------------------------------------------
/Blog - dynamic SQL demo/Rule match_recognize longplay.mp4:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/getindata/flink-dynamic-cep-demo/e8c06f2e7014258ee514653833ec942b3e3f4d2e/Blog - dynamic SQL demo/Rule match_recognize longplay.mp4
--------------------------------------------------------------------------------
/Blog - dynamic SQL demo/Rule sum,count,group by longplay.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/getindata/flink-dynamic-cep-demo/e8c06f2e7014258ee514653833ec942b3e3f4d2e/Blog - dynamic SQL demo/Rule sum,count,group by longplay.gif
--------------------------------------------------------------------------------
/Blog - dynamic SQL demo/Rule sum,count,group by longplay.mp4:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/getindata/flink-dynamic-cep-demo/e8c06f2e7014258ee514653833ec942b3e3f4d2e/Blog - dynamic SQL demo/Rule sum,count,group by longplay.mp4
--------------------------------------------------------------------------------
/Blog - dynamic SQL demo/Test recording.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/getindata/flink-dynamic-cep-demo/e8c06f2e7014258ee514653833ec942b3e3f4d2e/Blog - dynamic SQL demo/Test recording.gif
--------------------------------------------------------------------------------
/Blog - dynamic SQL demo/add rule.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/getindata/flink-dynamic-cep-demo/e8c06f2e7014258ee514653833ec942b3e3f4d2e/Blog - dynamic SQL demo/add rule.gif
--------------------------------------------------------------------------------
/Blog - dynamic SQL demo/clear event.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/getindata/flink-dynamic-cep-demo/e8c06f2e7014258ee514653833ec942b3e3f4d2e/Blog - dynamic SQL demo/clear event.gif
--------------------------------------------------------------------------------
/Blog - dynamic SQL demo/delete rule.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/getindata/flink-dynamic-cep-demo/e8c06f2e7014258ee514653833ec942b3e3f4d2e/Blog - dynamic SQL demo/delete rule.gif
--------------------------------------------------------------------------------
/Blog - dynamic SQL demo/start and stop.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/getindata/flink-dynamic-cep-demo/e8c06f2e7014258ee514653833ec942b3e3f4d2e/Blog - dynamic SQL demo/start and stop.gif
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | ### Fraud Detection Demo with Apache Flink
2 |
3 | #### Requirements:
4 | Demo is bundled in a self-contained package. In order to build it from sources you will need:
5 |
6 | - git
7 | - docker
8 | - docker-compose
9 |
10 | Recommended resources allocated to Docker:
11 |
12 | - 4 CPUs
13 | - 8GB RAM
14 |
15 | You can checkout the repository and run the demo locally.
16 |
17 | #### How to run:
18 |
19 | In order to run the demo locally, execute the following commands which build the project from sources and start all required services, including the Apache Flink and Apache Kafka clusters.
20 |
21 | ```bash
22 | git clone git@gitlab.com:getindata/streamsense/flink-dynamic-cep-demo.git
23 | docker build -t demo-webapp:latest -f webapp/webapp.Dockerfile webapp/
24 | docker build -t demo-flink-job:latest -f flink-job/Dockerfile flink-job/
25 | docker-compose -f docker-compose-local-job.yaml up
26 | ```
27 |
28 | __Note__: Dependencies are stored in a cached Docker layer. If you later only modify the source code, not the dependencies, you can expect significantly shorter packaging times for the subsequent builds.
29 |
30 | When all components are up and running, go to `localhost:5656` in your browser.
31 |
32 | __Note__: you might need to change exposed ports in _docker-compose-local-job.yaml_ in case of collisions.
33 |
34 |
--------------------------------------------------------------------------------
/docker-compose-local-job.yaml:
--------------------------------------------------------------------------------
1 | version: "2.2"
2 | services:
3 |
4 | ### Zookeeper ###
5 |
6 | zoo1:
7 | image: zookeeper:3.4.9
8 | restart: unless-stopped
9 | hostname: zoo1
10 | ports:
11 | - "2181:2181"
12 | environment:
13 | ZOO_MY_ID: 1
14 | ZOO_PORT: 2181
15 | ZOO_SERVERS: server.1=zoo1:2888:3888
16 |
17 | ### Kafka ###
18 |
19 | kafka-cp-kafka-headless:
20 | image: confluentinc/cp-kafka:5.0.0
21 | hostname: kafka-cp-kafka-headless
22 | ports:
23 | - "9092:9092"
24 | environment:
25 | KAFKA_ADVERTISED_LISTENERS: LISTENER_DOCKER_INTERNAL://kafka-cp-kafka-headless:9092,LISTENER_DOCKER_EXTERNAL://${DOCKER_HOST_IP:-127.0.0.1}:19092
26 | KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: LISTENER_DOCKER_INTERNAL:PLAINTEXT,LISTENER_DOCKER_EXTERNAL:PLAINTEXT
27 | KAFKA_INTER_BROKER_LISTENER_NAME: LISTENER_DOCKER_INTERNAL
28 | KAFKA_ZOOKEEPER_CONNECT: "zoo1:2181"
29 | KAFKA_BROKER_ID: 1
30 | KAFKA_LOG4J_LOGGERS: "kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state.change.logger=INFO"
31 | KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
32 | depends_on:
33 | - zoo1
34 |
35 | ### Flink Job Cluster ###
36 |
37 | flink-master:
38 | image: ${FLINK_DOCKER_IMAGE_NAME:-demo-flink-job:latest}
39 | ports:
40 | - "8081:8081"
41 | command: job-cluster --job-classname com.ververica.field.dynamicrules.Main -Djobmanager.rpc.address=flink-master -Dparallelism.default=1 --data-source kafka --rules-source kafka --sqls-source kafka --alerts-sink kafka --rules-export-sink kafka --latency-sink kafka --kafka-host kafka-cp-kafka-headless --kafka-port 9092
42 | depends_on:
43 | - kafka-cp-kafka-headless
44 |
45 | flink-task-manager:
46 | image: ${FLINK_DOCKER_IMAGE_NAME:-demo-flink-job:latest}
47 | command: task-manager -DXmx6g -DXms6g -Djobmanager.rpc.address=flink-master -Dtaskmanager.memory.network.max=128m -Dtaskmanager.memory.framework.off-heap.size=512m -Dtaskmanager.memory.process.size=3g
48 | scale: ${DEFAULT_PARALLELISM:-1}
49 | depends_on:
50 | - kafka-cp-kafka-headless
51 |
52 | ### Spring Boot Demo###
53 |
54 | demo:
55 | image: demo-webapp:latest
56 | hostname: demo
57 | ports:
58 | - "5656:5656"
59 | depends_on:
60 | - zoo1
61 | - kafka-cp-kafka-headless
62 |
--------------------------------------------------------------------------------
/flink-job/.dockerignore:
--------------------------------------------------------------------------------
1 | build
2 | !build/libs/dynamic-fraud-detection-demo.jar
3 |
--------------------------------------------------------------------------------
/flink-job/.gitignore:
--------------------------------------------------------------------------------
1 | */build
2 | */.gradle
3 | .gradle
4 | build
5 | */out
6 |
7 | # IntelliJ
8 | .idea
9 | *.iml
10 |
11 | .DS_Store
12 | .AppleDouble
13 | .LSOverride
14 |
15 | # Icon must end with two \r
16 | Icon
17 |
18 | # Thumbnails
19 | ._*
20 |
21 | # Files that might appear in the root of a volume
22 | .DocumentRevisions-V100
23 | .fseventsd
24 | .Spotlight-V100
25 | .TemporaryItems
26 | .Trashes
27 | .VolumeIcon.icns
28 | .com.apple.timemachine.donotpresent
29 |
30 | # Directories potentially created on remote AFP share
31 | .AppleDB
32 | .AppleDesktop
33 | Network Trash Folder
34 | Temporary Items
35 | .apdisk
36 |
37 | ### Maven ###
38 | target/
39 | pom.xml.tag
40 | pom.xml.releaseBackup
41 | pom.xml.versionsBackup
42 | pom.xml.next
43 | release.properties
44 | dependency-reduced-pom.xml
45 | buildNumber.properties
46 | .mvn/timing.properties
47 | .mvn/wrapper/maven-wrapper.jar
48 |
49 | ### Scala ###
50 |
51 | ### Windows ###
52 | # Windows thumbnail cache files
53 | Thumbs.db
54 | ehthumbs.db
55 | ehthumbs_vista.db
56 |
57 | # Dump file
58 | *.stackdump
59 |
60 | # Folder config file
61 | [Dd]esktop.ini
62 |
63 | # Recycle Bin used on file shares
64 | $RECYCLE.BIN/
65 |
66 | # Windows Installer files
67 | *.cab
68 | *.msi
69 | *.msix
70 | *.msm
71 | *.msp
72 |
73 | # Windows shortcuts
74 | *.lnk
75 |
76 |
77 | tmp
78 |
--------------------------------------------------------------------------------
/flink-job/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM openjdk:8 AS job-build
2 | WORKDIR /home/gradle/app/
3 |
4 | COPY build.gradle settings.gradle ./
5 | COPY gradlew ./
6 | COPY gradle gradle
7 | # To cause download and cache of verifyGoogleJavaFormat dependency
8 | RUN echo "class Dummy {}" > Dummy.java
9 | # download dependencies
10 | RUN ./gradlew build
11 | COPY . .
12 | RUN rm Dummy.java
13 | RUN ./gradlew build
14 | RUN mv /home/gradle/app/build/libs/dynamic-fraud-detection-demo*-deploy.jar /home/gradle/app/build/libs/dynamic-fraud-detection-demo-deploy.jar
15 |
16 | # ---
17 |
18 | FROM flink:1.11.1
19 |
20 | COPY --from=job-build /home/gradle/app/build/libs/dynamic-fraud-detection-demo-deploy.jar lib/job.jar
21 | COPY docker-entrypoint.sh /
22 |
23 | USER flink
24 | EXPOSE 8081 6123
25 | ENTRYPOINT ["/docker-entrypoint.sh"]
26 |
--------------------------------------------------------------------------------
/flink-job/README.md:
--------------------------------------------------------------------------------
1 | # Dynamic Fraud Detection Demo with Apache Flink
2 |
3 | ## Introduction
4 |
5 |
6 | ### Instructions (local execution with netcat):
7 |
8 | 1. Start `netcat`:
9 | ```
10 | nc -lk 9999
11 | ```
12 | 2. Run main method of `com.ververica.field.dynamicrules.Main`
13 | 3. Submit to netcat in correct format:
14 | timestamp,SQL
15 |
16 | ##### Examples:
17 |
18 | ```
19 | 2021-06-25 10:38:30,SELECT payeeId FROM source_table WHERE paymentAmount > 10
20 | 2021-06-25 10:39:30,SELECT beneficiaryId FROM source_table WHERE paymentAmount > 10
21 | 2021-06-25 10:40:30,SELECT beneficiaryId FROM source_table WHERE paymentType = 'CSH'
22 | 2021-06-25 10:41:30,SELECT SUM(paymentAmount) FROM source_table WHERE paymentAmount < 10
23 | 2021-06-25 10:42:30,SELECT paymentType, MAX(paymentAmount) FROM source_table GROUP BY paymentType
24 | 2021-06-25 10:43:30,SELECT paymentType, MIN(paymentAmount) FROM source_table GROUP BY paymentType
25 | 2021-06-25 10:44:30,SELECT t.payeeId, t.first_payment, t.second_payment FROM source_table MATCH_RECOGNIZE ( PARTITION BY payeeId ORDER BY user_action_time MEASURES FIRST(paymentAmount) AS first_payment, LAST(paymentAmount) AS second_payment ONE ROW PER MATCH AFTER MATCH SKIP PAST LAST ROW PATTERN (A B) DEFINE A AS paymentAmount < 100, B AS paymentAmount > 100 ) AS t
26 | 2021-06-25 10:45:30,SELECT window_start, window_end, SUM(paymentAmount) FROM TUMBLE(TABLE source_table, DESCRIPTOR(eventTime), INTERVAL '10' SECONDS) WHERE paymentAmount > 10
27 | 2021-06-25 10:45:30,SELECT window_start, window_end, SUM(paymentAmount) FROM TABLE(TUMBLE(TABLE source_table, DESCRIPTOR(user_action_time), INTERVAL '10' SECONDS)) GROUP BY window_start, window_end
28 | ```
29 |
30 | ##### Examles of CLI params:
31 | --data-source kafka --rules-source kafka --alerts-sink kafka --rules-export-sink kafka
32 |
33 |
--------------------------------------------------------------------------------
/flink-job/docker-entrypoint.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | ################################################################################
4 | # Licensed to the Apache Software Foundation (ASF) under one
5 | # or more contributor license agreements. See the NOTICE file
6 | # distributed with this work for additional information
7 | # regarding copyright ownership. The ASF licenses this file
8 | # to you under the Apache License, Version 2.0 (the
9 | # "License"); you may not use this file except in compliance
10 | # with the License. You may obtain a copy of the License at
11 | #
12 | # http://www.apache.org/licenses/LICENSE-2.0
13 | #
14 | # Unless required by applicable law or agreed to in writing, software
15 | # distributed under the License is distributed on an "AS IS" BASIS,
16 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17 | # See the License for the specific language governing permissions and
18 | # limitations under the License.
19 | ################################################################################
20 |
21 | # Ported from Apache Flink project
22 |
23 | ### If unspecified, the hostname of the container is taken as the JobManager address
24 | FLINK_HOME=${FLINK_HOME:-"/opt/flink/bin"}
25 |
26 | JOB_CLUSTER="job-cluster"
27 | TASK_MANAGER="task-manager"
28 |
29 | CMD="$1"
30 | shift;
31 |
32 | if [ "${CMD}" == "--help" -o "${CMD}" == "-h" ]; then
33 | echo "Usage: $(basename $0) (${JOB_CLUSTER}|${TASK_MANAGER})"
34 | exit 0
35 | elif [ "${CMD}" == "${JOB_CLUSTER}" -o "${CMD}" == "${TASK_MANAGER}" ]; then
36 | echo "Starting the ${CMD}"
37 |
38 | if [ "${CMD}" == "${TASK_MANAGER}" ]; then
39 | exec $FLINK_HOME/bin/taskmanager.sh start-foreground "$@"
40 | else
41 | exec $FLINK_HOME/bin/standalone-job.sh start-foreground "$@"
42 | fi
43 | fi
44 |
45 | exec "$@"
46 |
--------------------------------------------------------------------------------
/flink-job/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/getindata/flink-dynamic-cep-demo/e8c06f2e7014258ee514653833ec942b3e3f4d2e/flink-job/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/flink-job/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | #Mon Oct 28 16:10:44 CET 2019
2 | distributionUrl=https\://services.gradle.org/distributions/gradle-4.4-all.zip
3 | distributionBase=GRADLE_USER_HOME
4 | distributionPath=wrapper/dists
5 | zipStorePath=wrapper/dists
6 | zipStoreBase=GRADLE_USER_HOME
7 |
--------------------------------------------------------------------------------
/flink-job/gradlew.bat:
--------------------------------------------------------------------------------
1 | @if "%DEBUG%" == "" @echo off
2 | @rem ##########################################################################
3 | @rem
4 | @rem Gradle startup script for Windows
5 | @rem
6 | @rem ##########################################################################
7 |
8 | @rem Set local scope for the variables with windows NT shell
9 | if "%OS%"=="Windows_NT" setlocal
10 |
11 | set DIRNAME=%~dp0
12 | if "%DIRNAME%" == "" set DIRNAME=.
13 | set APP_BASE_NAME=%~n0
14 | set APP_HOME=%DIRNAME%
15 |
16 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
17 | set DEFAULT_JVM_OPTS="-Xmx64m"
18 |
19 | @rem Find java.exe
20 | if defined JAVA_HOME goto findJavaFromJavaHome
21 |
22 | set JAVA_EXE=java.exe
23 | %JAVA_EXE% -version >NUL 2>&1
24 | if "%ERRORLEVEL%" == "0" goto init
25 |
26 | echo.
27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
28 | echo.
29 | echo Please set the JAVA_HOME variable in your environment to match the
30 | echo location of your Java installation.
31 |
32 | goto fail
33 |
34 | :findJavaFromJavaHome
35 | set JAVA_HOME=%JAVA_HOME:"=%
36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
37 |
38 | if exist "%JAVA_EXE%" goto init
39 |
40 | echo.
41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
42 | echo.
43 | echo Please set the JAVA_HOME variable in your environment to match the
44 | echo location of your Java installation.
45 |
46 | goto fail
47 |
48 | :init
49 | @rem Get command-line arguments, handling Windows variants
50 |
51 | if not "%OS%" == "Windows_NT" goto win9xME_args
52 |
53 | :win9xME_args
54 | @rem Slurp the command line arguments.
55 | set CMD_LINE_ARGS=
56 | set _SKIP=2
57 |
58 | :win9xME_args_slurp
59 | if "x%~1" == "x" goto execute
60 |
61 | set CMD_LINE_ARGS=%*
62 |
63 | :execute
64 | @rem Setup the command line
65 |
66 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
67 |
68 | @rem Execute Gradle
69 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
70 |
71 | :end
72 | @rem End local scope for the variables with windows NT shell
73 | if "%ERRORLEVEL%"=="0" goto mainEnd
74 |
75 | :fail
76 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
77 | rem the _cmd.exe /c_ return code!
78 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
79 | exit /b 1
80 |
81 | :mainEnd
82 | if "%OS%"=="Windows_NT" endlocal
83 |
84 | :omega
85 |
--------------------------------------------------------------------------------
/flink-job/settings.gradle:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 |
18 | rootProject.name = "dynamic-fraud-detection-demo"
--------------------------------------------------------------------------------
/flink-job/src/main/java/com/ververica/field/config/Config.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 | package com.ververica.field.config;
20 |
21 | import java.util.HashMap;
22 | import java.util.List;
23 | import java.util.Map;
24 |
25 | public class Config {
26 |
27 | private final Map, Object> values = new HashMap<>();
28 |
29 | public Config(
30 | Parameters inputParams,
31 | List> stringParams,
32 | List> intParams,
33 | List> boolParams) {
34 | overrideDefaults(inputParams, stringParams);
35 | overrideDefaults(inputParams, intParams);
36 | overrideDefaults(inputParams, boolParams);
37 | }
38 |
39 | public static Config fromParameters(Parameters parameters) {
40 | return new Config(
41 | parameters, Parameters.STRING_PARAMS, Parameters.INT_PARAMS, Parameters.BOOL_PARAMS);
42 | }
43 |
44 | public void put(Param key, T value) {
45 | values.put(key, value);
46 | }
47 |
48 | public T get(Param key) {
49 | return key.getType().cast(values.get(key));
50 | }
51 |
52 | private void overrideDefaults(Parameters inputParams, List> params) {
53 | for (Param param : params) {
54 | put(param, inputParams.getOrDefault(param));
55 | }
56 | }
57 | }
58 |
--------------------------------------------------------------------------------
/flink-job/src/main/java/com/ververica/field/config/Param.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 | package com.ververica.field.config;
20 |
21 | import lombok.Getter;
22 |
23 | @Getter
24 | public class Param {
25 |
26 | private String name;
27 | private Class type;
28 | private T defaultValue;
29 |
30 | Param(String name, T defaultValue, Class type) {
31 | this.name = name;
32 | this.type = type;
33 | this.defaultValue = defaultValue;
34 | }
35 |
36 | public static Param string(String name, String defaultValue) {
37 | return new Param<>(name, defaultValue, String.class);
38 | }
39 |
40 | public static Param integer(String name, Integer defaultValue) {
41 | return new Param<>(name, defaultValue, Integer.class);
42 | }
43 |
44 | public static Param bool(String name, Boolean defaultValue) {
45 | return new Param<>(name, defaultValue, Boolean.class);
46 | }
47 | }
48 |
--------------------------------------------------------------------------------
/flink-job/src/main/java/com/ververica/field/dynamicrules/Alert.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 | package com.ververica.field.dynamicrules;
20 |
21 | import lombok.AllArgsConstructor;
22 | import lombok.Data;
23 | import lombok.NoArgsConstructor;
24 | import org.apache.flink.api.java.tuple.Tuple4;
25 | import org.apache.flink.types.Row;
26 |
27 | @Data
28 | @NoArgsConstructor
29 | @AllArgsConstructor
30 | public class Alert {
31 | private String sql;
32 | private Boolean isAdded;
33 | private Object[] response;
34 | private Long timestamp;
35 | public static Alert fromTuple(Tuple4 el) {
36 | Object[] resp = new Object[el.f2.getArity()];
37 | for (int i = 0; i < resp.length; i++) resp[i] = el.f2.getField(i);
38 | return new Alert(el.f0, el.f1, resp, el.f3);
39 | }
40 | }
41 |
--------------------------------------------------------------------------------
/flink-job/src/main/java/com/ververica/field/dynamicrules/JsonMapper.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 | package com.ververica.field.dynamicrules;
20 |
21 | import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper;
22 |
23 | import java.io.IOException;
24 |
25 | public class JsonMapper {
26 |
27 | private final Class targetClass;
28 | private final ObjectMapper objectMapper;
29 |
30 | public JsonMapper(Class targetClass) {
31 | this.targetClass = targetClass;
32 | objectMapper = new ObjectMapper();
33 | }
34 |
35 | public T fromString(String line) throws IOException {
36 | return objectMapper.readValue(line, targetClass);
37 | }
38 |
39 | public String toString(T line) throws IOException {
40 | return objectMapper.writeValueAsString(line);
41 | }
42 | }
43 |
--------------------------------------------------------------------------------
/flink-job/src/main/java/com/ververica/field/dynamicrules/KafkaUtils.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 | package com.ververica.field.dynamicrules;
20 |
21 | import com.ververica.field.config.Config;
22 |
23 | import java.util.Properties;
24 |
25 | import static com.ververica.field.config.Parameters.*;
26 |
27 | public class KafkaUtils {
28 |
29 | public static Properties initConsumerProperties(Config config) {
30 | Properties kafkaProps = initProperties(config);
31 | String offset = config.get(OFFSET);
32 | kafkaProps.setProperty("auto.offset.reset", offset);
33 | return kafkaProps;
34 | }
35 |
36 | public static Properties initProducerProperties(Config params) {
37 | return initProperties(params);
38 | }
39 |
40 | private static Properties initProperties(Config config) {
41 | Properties kafkaProps = new Properties();
42 | String kafkaHost = config.get(KAFKA_HOST);
43 | int kafkaPort = config.get(KAFKA_PORT);
44 | String servers = String.format("%s:%s", kafkaHost, kafkaPort);
45 | kafkaProps.setProperty("bootstrap.servers", servers);
46 | return kafkaProps;
47 | }
48 | }
49 |
--------------------------------------------------------------------------------
/flink-job/src/main/java/com/ververica/field/dynamicrules/Keyed.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 | package com.ververica.field.dynamicrules;
20 |
21 | import lombok.AllArgsConstructor;
22 | import lombok.Data;
23 | import lombok.NoArgsConstructor;
24 |
25 | @Data
26 | @NoArgsConstructor
27 | @AllArgsConstructor
28 | public class Keyed {
29 | private IN wrapped;
30 | private KEY key;
31 | private ID id;
32 | }
33 |
--------------------------------------------------------------------------------
/flink-job/src/main/java/com/ververica/field/dynamicrules/Main.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 | package com.ververica.field.dynamicrules;
20 |
21 | import com.ververica.field.config.Config;
22 | import com.ververica.field.config.Parameters;
23 | import org.apache.flink.api.java.utils.ParameterTool;
24 |
25 | import static com.ververica.field.config.Parameters.*;
26 |
27 | public class Main {
28 |
29 | public static void main(String[] args) throws Exception {
30 | ParameterTool tool = ParameterTool.fromArgs(args);
31 | Parameters inputParams = new Parameters(tool);
32 | Config config = new Config(inputParams, STRING_PARAMS, INT_PARAMS, BOOL_PARAMS);
33 | SQLRunner sqlRunner = new SQLRunner(config);
34 | sqlRunner.run();
35 | }
36 | }
37 |
--------------------------------------------------------------------------------
/flink-job/src/main/java/com/ververica/field/dynamicrules/SqlEventParser.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 | package com.ververica.field.dynamicrules;
20 |
21 | import com.ververica.field.dynamicrules.functions.SqlEvent;
22 | import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper;
23 |
24 | import java.io.IOException;
25 |
26 | public class SqlEventParser {
27 |
28 | private final ObjectMapper objectMapper = new ObjectMapper();
29 |
30 | private static SqlEvent parsePlain(String sqlString) throws InstantiationException, IllegalAccessException {
31 | return SqlEvent.createFromCsv(sqlString, SqlEvent.class);
32 | }
33 |
34 | public SqlEvent fromString(String line) throws IOException, IllegalAccessException, InstantiationException {
35 | if (line.length() > 0 && '{' == line.charAt(0)) {
36 | return parseJson(line);
37 | } else {
38 | return parsePlain(line);
39 | }
40 | }
41 |
42 | private SqlEvent parseJson(String sqlString) throws IOException {
43 | return objectMapper.readValue(sqlString, SqlEvent.class);
44 | }
45 | }
46 |
--------------------------------------------------------------------------------
/flink-job/src/main/java/com/ververica/field/dynamicrules/TimestampAssignable.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 | package com.ververica.field.dynamicrules;
20 |
21 | public interface TimestampAssignable {
22 | void assignIngestionTimestamp(T timestamp);
23 | }
24 |
--------------------------------------------------------------------------------
/flink-job/src/main/java/com/ververica/field/dynamicrules/TransactionEvent.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 | package com.ververica.field.dynamicrules;
20 |
21 | import lombok.AllArgsConstructor;
22 | import lombok.Builder;
23 | import lombok.Data;
24 | import lombok.NoArgsConstructor;
25 |
26 | import java.math.BigDecimal;
27 | import java.time.ZoneOffset;
28 | import java.time.format.DateTimeFormatter;
29 | import java.util.Arrays;
30 | import java.util.Iterator;
31 | import java.util.List;
32 | import java.util.Locale;
33 |
34 | @Data
35 | @Builder
36 | @NoArgsConstructor
37 | @AllArgsConstructor
38 | public class TransactionEvent implements TimestampAssignable {
39 | private static transient DateTimeFormatter timeFormatter =
40 | DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")
41 | .withLocale(Locale.US)
42 | .withZone(ZoneOffset.UTC);
43 | public long transactionId;
44 | public long eventTime;
45 | public long payeeId;
46 | public long beneficiaryId;
47 | public BigDecimal paymentAmount;
48 | public String paymentType;
49 | public Long ingestionTimestamp;
50 |
51 | public static TransactionEvent fromString(String line) {
52 | List tokens = Arrays.asList(line.split(","));
53 | int numArgs = 7;
54 | if (tokens.size() != numArgs) {
55 | throw new RuntimeException(
56 | "Invalid transaction: "
57 | + line
58 | + ". Required number of arguments: "
59 | + numArgs
60 | + " found "
61 | + tokens.size());
62 | }
63 |
64 | TransactionEvent transaction = new TransactionEvent();
65 |
66 | try {
67 | Iterator iter = tokens.iterator();
68 | transaction.transactionId = Long.parseLong(iter.next());
69 | transaction.eventTime = Long.parseLong(iter.next());
70 | transaction.payeeId = Long.parseLong(iter.next());
71 | transaction.beneficiaryId = Long.parseLong(iter.next());
72 | transaction.paymentAmount = new BigDecimal(iter.next());
73 | transaction.paymentType = iter.next();
74 | transaction.ingestionTimestamp = Long.parseLong(iter.next());
75 | } catch (NumberFormatException nfe) {
76 | throw new RuntimeException("Invalid record: " + line, nfe);
77 | }
78 |
79 | return transaction;
80 | }
81 |
82 | @Override
83 | public void assignIngestionTimestamp(Long timestamp) {
84 | this.ingestionTimestamp = timestamp;
85 | }
86 |
87 | }
88 |
--------------------------------------------------------------------------------
/flink-job/src/main/java/com/ververica/field/dynamicrules/assigners/PeriodicTimestampAssigner.java:
--------------------------------------------------------------------------------
1 | package com.ververica.field.dynamicrules.assigners;
2 |
3 | import com.ververica.field.dynamicrules.sources.TimeBasedEvent;
4 | import org.apache.flink.streaming.api.functions.AssignerWithPeriodicWatermarks;
5 | import org.apache.flink.streaming.api.watermark.Watermark;
6 |
7 | import javax.annotation.Nullable;
8 | import java.io.Serializable;
9 |
10 | public class PeriodicTimestampAssigner implements AssignerWithPeriodicWatermarks, Serializable{
11 | public Long timeSpeedMultiplier;
12 | public Long timePaddingMs;
13 |
14 | public PeriodicTimestampAssigner(Long timeSpeedMultiplier, Long timePaddingMs) {
15 | this.timeSpeedMultiplier = timeSpeedMultiplier;
16 | this.timePaddingMs = timePaddingMs;
17 | }
18 |
19 |
20 | Long firstWatermarkTimeMs = 0L;
21 |
22 | @Nullable
23 | @Override
24 | public Watermark getCurrentWatermark() {
25 | Long currentTimeMs = System.currentTimeMillis();
26 | if (firstWatermarkTimeMs == 0L) firstWatermarkTimeMs = currentTimeMs;
27 | Long deltaMs = currentTimeMs - firstWatermarkTimeMs;
28 | Long watermarkVal = deltaMs * timeSpeedMultiplier - timePaddingMs;
29 | return new Watermark(watermarkVal);
30 | }
31 |
32 | @Override
33 | public long extractTimestamp(T element, long recordTimestamp) {
34 | return element.getTimestamp() * 1000;
35 | }
36 |
37 |
38 | }
39 |
--------------------------------------------------------------------------------
/flink-job/src/main/java/com/ververica/field/dynamicrules/converters/StringConverter.java:
--------------------------------------------------------------------------------
1 | package com.ververica.field.dynamicrules.converters;
2 |
3 | import java.io.Serializable;
4 |
5 | public abstract class StringConverter implements Serializable {
6 | public abstract T toValue(String input);
7 |
8 | public abstract String toString(T input);
9 | }
10 |
--------------------------------------------------------------------------------
/flink-job/src/main/java/com/ververica/field/dynamicrules/converters/TransactionStringConverter.java:
--------------------------------------------------------------------------------
1 | package com.ververica.field.dynamicrules.converters;
2 |
3 | import com.ververica.field.dynamicrules.TransactionEvent;
4 |
5 | public class TransactionStringConverter extends StringConverter {
6 | @Override
7 | public TransactionEvent toValue(String input) {
8 | return TransactionEvent.fromString(input);
9 | }
10 |
11 | @Override
12 | public String toString(TransactionEvent input) {
13 | return String.join(
14 | ",",
15 | new String[] {
16 | String.valueOf(input.transactionId),
17 | String.valueOf(input.eventTime),
18 | String.valueOf(input.payeeId),
19 | String.valueOf(input.beneficiaryId),
20 | String.valueOf(input.paymentAmount),
21 | String.valueOf(input.paymentType),
22 | String.valueOf(input.ingestionTimestamp),
23 | });
24 | }
25 | };
26 |
--------------------------------------------------------------------------------
/flink-job/src/main/java/com/ververica/field/dynamicrules/executor/CustomLocalExecutor.java:
--------------------------------------------------------------------------------
1 | package com.ververica.field.dynamicrules.executor;
2 |
3 | import org.apache.flink.client.cli.CustomCommandLine;
4 | import org.apache.flink.client.deployment.ClusterClientServiceLoader;
5 | import org.apache.flink.configuration.Configuration;
6 | import org.apache.flink.table.client.config.Environment;
7 | import org.apache.flink.table.client.gateway.local.ExecutionContext;
8 | import org.apache.flink.table.client.gateway.local.LocalExecutor;
9 |
10 | import java.net.URL;
11 | import java.util.List;
12 |
13 | public class CustomLocalExecutor extends LocalExecutor {
14 | public CustomLocalExecutor(
15 | Environment defaultEnvironment,
16 | List dependencies,
17 | Configuration flinkConfig,
18 | CustomCommandLine commandLine,
19 | ClusterClientServiceLoader clusterClientServiceLoader) {
20 | super(defaultEnvironment, dependencies, flinkConfig, commandLine, clusterClientServiceLoader);
21 | }
22 |
23 | public ExecutionContext> getExecutionContext(String sessionId) {
24 | return super.getExecutionContext(sessionId);
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/flink-job/src/main/java/com/ververica/field/dynamicrules/functions/BroadcastEmbeddedFlinkFunction.java:
--------------------------------------------------------------------------------
1 | package com.ververica.field.dynamicrules.functions;
2 |
3 | import com.ververica.field.dynamicrules.converters.StringConverter;
4 | import com.ververica.field.dynamicrules.logger.CustomTimeLogger;
5 | import lombok.extern.slf4j.Slf4j;
6 | import org.apache.flink.api.common.typeinfo.TypeInformation;
7 | import org.apache.flink.api.java.tuple.Tuple4;
8 | import org.apache.flink.configuration.Configuration;
9 | import org.apache.flink.streaming.api.functions.AssignerWithPeriodicWatermarks;
10 | import org.apache.flink.streaming.api.functions.co.KeyedBroadcastProcessFunction;
11 | import org.apache.flink.types.Row;
12 | import org.apache.flink.util.Collector;
13 |
14 | import java.util.HashMap;
15 | import java.util.List;
16 | import java.util.Map;
17 | import java.util.concurrent.atomic.AtomicInteger;
18 | // chcemy stworzyć operator na podstawie KeyedBroadcastProcessFunction,
19 | // który będzie jednym wejściem przyjmował eventy, drugim kody SQL,
20 | // a dla każdego klucza w stanie stworzymy local environment ,
21 | // wewnątrz niego datastream, który jest uzupełniany kolejnymi eventami w wyniku funkcji
22 | // processElement ,
23 | // a kody SQL będą widokami nad tym datastreamem
24 |
25 | /**
26 | * Function, that accepts patterns and routing instructions and executes them on NFA.
27 | *
28 | * @param
29 | * @param
30 | */
31 | @Slf4j
32 | public class BroadcastEmbeddedFlinkFunction
33 | extends KeyedBroadcastProcessFunction> {
34 | private static final AtomicInteger counter = new AtomicInteger(0);
35 | private static final AtomicInteger portCounter = new AtomicInteger(0);
36 | private StringConverter converterIn;
37 | private Map> clusters = new HashMap<>();
38 | private TypeInformation inTypeInfo;
39 | private List expressions;
40 | private AssignerWithPeriodicWatermarks assigner;
41 |
42 | private int subtaskIndex;
43 |
44 | private CustomTimeLogger customLogger;
45 | private long startTime;
46 |
47 | public BroadcastEmbeddedFlinkFunction(
48 | TypeInformation inTypeInfo,
49 | List expressions,
50 | Class converterIn,
51 | AssignerWithPeriodicWatermarks assigner)
52 | throws IllegalAccessException, InstantiationException {
53 | this.startTime = System.currentTimeMillis();
54 | this.customLogger = new CustomTimeLogger(startTime);
55 | this.inTypeInfo = inTypeInfo;
56 | this.expressions = expressions;
57 | this.converterIn = (StringConverter) converterIn.newInstance();
58 | this.assigner = assigner;
59 | }
60 |
61 | @Override
62 | public void open(Configuration parameters) throws Exception {
63 | super.open(parameters);
64 | subtaskIndex = getRuntimeContext().getIndexOfThisSubtask();
65 | }
66 |
67 | @Override
68 | public void close() throws Exception {
69 | for (BroadcastEmbeddedFlinkCluster cluster : clusters.values()) cluster.close();
70 | super.close();
71 | }
72 |
73 | @Override
74 | public void processElement(
75 | IN value, ReadOnlyContext ctx, Collector> out)
76 | throws Exception {
77 | try {
78 | int valueNumber = counter.getAndIncrement();
79 |
80 | customLogger.log(
81 | "Processing value number "
82 | + valueNumber
83 | + " : ("
84 | + value.toString()
85 | + ") //// Subtask index: "
86 | + subtaskIndex);
87 |
88 | customLogger.log("Converter in: " + converterIn);
89 | String strValue = converterIn.toString(value);
90 |
91 | for (BroadcastEmbeddedFlinkCluster cluster : clusters.values()) {
92 | cluster.write(strValue);
93 | }
94 | for (BroadcastEmbeddedFlinkCluster cluster : clusters.values()) {
95 | List> output = cluster.retrieveResults();
96 | for (Tuple4 line : output) {
97 | out.collect(line);
98 | }
99 | }
100 | } catch (Exception e) {
101 | customLogger.log("processElement exception: " + e.toString());
102 | throw e;
103 | }
104 | }
105 |
106 | @Override
107 | public void processBroadcastElement(
108 | SqlEvent value, Context ctx, Collector> out)
109 | throws Exception {
110 |
111 | if (value.eventDate.equals("REMOVE")) {
112 | log.info("Closing cluster for SQL " + value.sqlQuery);
113 | BroadcastEmbeddedFlinkCluster closedCluster = clusters.remove(value.sqlQuery);
114 | closedCluster.close();
115 | } else {
116 | log.info("Adding cluster for SQL " + value.sqlQuery);
117 | BroadcastEmbeddedFlinkCluster cluster =
118 | new BroadcastEmbeddedFlinkCluster(
119 | value.sqlQuery, inTypeInfo, expressions, converterIn.getClass(), assigner, startTime);
120 |
121 | cluster.open(generateSourcePort());
122 | clusters.put(value.sqlQuery, cluster);
123 | }
124 | }
125 |
126 | private int generateSourcePort() {
127 | int valueNumber = portCounter.getAndIncrement();
128 |
129 | return 34100 + valueNumber;
130 | }
131 | }
132 |
--------------------------------------------------------------------------------
/flink-job/src/main/java/com/ververica/field/dynamicrules/functions/JsonDeserializer.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 | package com.ververica.field.dynamicrules.functions;
20 |
21 | import com.ververica.field.dynamicrules.JsonMapper;
22 | import lombok.extern.slf4j.Slf4j;
23 | import org.apache.flink.api.common.functions.RichFlatMapFunction;
24 | import org.apache.flink.configuration.Configuration;
25 | import org.apache.flink.util.Collector;
26 |
27 | @Slf4j
28 | public class JsonDeserializer extends RichFlatMapFunction {
29 |
30 | private final Class targetClass;
31 | private JsonMapper parser;
32 |
33 | public JsonDeserializer(Class targetClass) {
34 | this.targetClass = targetClass;
35 | }
36 |
37 | @Override
38 | public void open(Configuration parameters) throws Exception {
39 | super.open(parameters);
40 | parser = new JsonMapper<>(targetClass);
41 | }
42 |
43 | @Override
44 | public void flatMap(String value, Collector out) throws Exception {
45 | log.info("{}", value);
46 | try {
47 | T parsed = parser.fromString(value);
48 | out.collect(parsed);
49 | } catch (Exception e) {
50 | log.warn("Failed parsing rule, dropping it:", e);
51 | }
52 | }
53 | }
54 |
--------------------------------------------------------------------------------
/flink-job/src/main/java/com/ververica/field/dynamicrules/functions/JsonGeneratorWrapper.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 | package com.ververica.field.dynamicrules.functions;
20 |
21 | import com.ververica.field.sources.BaseGenerator;
22 | import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonProcessingException;
23 | import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper;
24 |
25 | import java.util.SplittableRandom;
26 |
27 | public class JsonGeneratorWrapper extends BaseGenerator {
28 |
29 | private static final ObjectMapper objectMapper = new ObjectMapper();
30 | private BaseGenerator wrappedGenerator;
31 |
32 | public JsonGeneratorWrapper(BaseGenerator wrappedGenerator) {
33 | this.wrappedGenerator = wrappedGenerator;
34 | this.maxRecordsPerSecond = wrappedGenerator.getMaxRecordsPerSecond();
35 | }
36 |
37 | @Override
38 | public String randomEvent(SplittableRandom rnd, long id) {
39 | T transaction = wrappedGenerator.randomEvent(rnd, id);
40 | String json;
41 | try {
42 | json = objectMapper.writeValueAsString(transaction);
43 | } catch (JsonProcessingException e) {
44 | throw new RuntimeException(e);
45 | }
46 | return json;
47 | }
48 | }
49 |
--------------------------------------------------------------------------------
/flink-job/src/main/java/com/ververica/field/dynamicrules/functions/JsonSerializer.java:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | /*
5 | * Licensed to the Apache Software Foundation (ASF) under one
6 | * or more contributor license agreements. See the NOTICE file
7 | * distributed with this work for additional information
8 | * regarding copyright ownership. The ASF licenses this file
9 | * to you under the Apache License, Version 2.0 (the
10 | * "License"); you may not use this file except in compliance
11 | * with the License. You may obtain a copy of the License at
12 | *
13 | * http://www.apache.org/licenses/LICENSE-2.0
14 | *
15 | * Unless required by applicable law or agreed to in writing, software
16 | * distributed under the License is distributed on an "AS IS" BASIS,
17 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
18 | * See the License for the specific language governing permissions and
19 | * limitations under the License.
20 | */
21 |
22 | package com.ververica.field.dynamicrules.functions;
23 |
24 | import com.ververica.field.dynamicrules.JsonMapper;
25 | import lombok.extern.slf4j.Slf4j;
26 | import org.apache.flink.api.common.functions.RichFlatMapFunction;
27 | import org.apache.flink.configuration.Configuration;
28 | import org.apache.flink.util.Collector;
29 |
30 | @Slf4j
31 | public class JsonSerializer extends RichFlatMapFunction {
32 |
33 | private JsonMapper parser;
34 | private final Class targetClass;
35 |
36 | public JsonSerializer(Class sourceClass) {
37 | this.targetClass = sourceClass;
38 | }
39 |
40 | @Override
41 | public void open(Configuration parameters) throws Exception {
42 | super.open(parameters);
43 | parser = new JsonMapper<>(targetClass);
44 | }
45 |
46 | @Override
47 | public void flatMap(T value, Collector out) throws Exception {
48 | System.out.println(value);
49 | try {
50 | String serialized = parser.toString(value);
51 | out.collect(serialized);
52 | } catch (Exception e) {
53 | log.warn("Failed serializing to JSON dropping it:", e);
54 | }
55 | }
56 | }
57 |
--------------------------------------------------------------------------------
/flink-job/src/main/java/com/ververica/field/dynamicrules/functions/SqlDeserializer.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 | package com.ververica.field.dynamicrules.functions;
20 |
21 | import com.ververica.field.dynamicrules.SqlEventParser;
22 | import com.ververica.field.dynamicrules.functions.SqlEvent;
23 | import lombok.extern.slf4j.Slf4j;
24 | import org.apache.flink.api.common.functions.RichFlatMapFunction;
25 | import org.apache.flink.configuration.Configuration;
26 | import org.apache.flink.util.Collector;
27 |
28 | @Slf4j
29 | public class SqlDeserializer extends RichFlatMapFunction {
30 |
31 | private SqlEventParser sqlParser;
32 |
33 | @Override
34 | public void open(Configuration parameters) throws Exception {
35 | super.open(parameters);
36 | sqlParser = new SqlEventParser();
37 | }
38 |
39 | @Override
40 | public void flatMap(String value, Collector out) throws Exception {
41 | System.out.println(value);
42 | try {
43 | SqlEvent rule = sqlParser.fromString(value);
44 | out.collect(rule);
45 | } catch (Exception e) {
46 | System.out.println("Failed parsing rule, dropping it:" + value);
47 | }
48 | }
49 | }
50 |
--------------------------------------------------------------------------------
/flink-job/src/main/java/com/ververica/field/dynamicrules/functions/SqlEvent.java:
--------------------------------------------------------------------------------
1 | package com.ververica.field.dynamicrules.functions;
2 |
3 | import com.ververica.field.dynamicrules.sources.TimeBasedEvent;
4 | import com.ververica.field.dynamicrules.util.TimestampHelpers;
5 |
6 | public class SqlEvent extends TimeBasedEvent {
7 | public String eventDate;
8 | public String sqlQuery;
9 |
10 | public SqlEvent() {
11 | this("1970-01-01 00:00:00", "");
12 | }
13 |
14 | public SqlEvent(String eventDate, String sqlQuery) {
15 | this.eventDate = eventDate;
16 | this.sqlQuery = sqlQuery;
17 | }
18 |
19 | @Override
20 | public Long getTimestamp() {
21 | return TimestampHelpers.toUnixtime(eventDate);
22 | }
23 |
24 | @Override
25 | public TimeBasedEvent apply(String inputLine, String delimiter) {
26 | String[] splitInput = inputLine.split(delimiter, 2);
27 | return new SqlEvent(splitInput[0], splitInput[1]);
28 | }
29 | }
30 |
--------------------------------------------------------------------------------
/flink-job/src/main/java/com/ververica/field/dynamicrules/functions/TimeStamper.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 | package com.ververica.field.dynamicrules.functions;
20 |
21 | import com.ververica.field.dynamicrules.TimestampAssignable;
22 | import org.apache.flink.api.common.functions.RichFlatMapFunction;
23 | import org.apache.flink.util.Collector;
24 |
25 | public class TimeStamper> extends RichFlatMapFunction {
26 |
27 | @Override
28 | public void flatMap(T value, Collector out) throws Exception {
29 | value.assignIngestionTimestamp(System.currentTimeMillis());
30 | out.collect(value);
31 | }
32 | }
33 |
--------------------------------------------------------------------------------
/flink-job/src/main/java/com/ververica/field/dynamicrules/functions/TransactionEventsGenerator.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 | package com.ververica.field.dynamicrules.functions;
20 |
21 | import com.ververica.field.dynamicrules.TransactionEvent;
22 | import com.ververica.field.sources.BaseGenerator;
23 |
24 | import java.math.BigDecimal;
25 | import java.util.SplittableRandom;
26 | import java.util.concurrent.ThreadLocalRandom;
27 |
28 | public class TransactionEventsGenerator extends BaseGenerator {
29 |
30 | private static long MAX_PAYEE_ID = 10;
31 | private static long MAX_BENEFICIARY_ID = 10;
32 |
33 | private static double MIN_PAYMENT_AMOUNT = 5d;
34 | private static double MAX_PAYMENT_AMOUNT = 20d;
35 |
36 | public TransactionEventsGenerator(int maxRecordsPerSecond) {
37 | super(maxRecordsPerSecond);
38 | }
39 |
40 | @Override
41 | public TransactionEvent randomEvent(SplittableRandom rnd, long id) {
42 | long transactionId = rnd.nextLong(Long.MAX_VALUE);
43 | long payeeId = rnd.nextLong(MAX_PAYEE_ID);
44 | long beneficiaryId = rnd.nextLong(MAX_BENEFICIARY_ID);
45 | double paymentAmountDouble =
46 | ThreadLocalRandom.current().nextDouble(MIN_PAYMENT_AMOUNT, MAX_PAYMENT_AMOUNT);
47 | paymentAmountDouble = Math.floor(paymentAmountDouble * 100) / 100;
48 | BigDecimal paymentAmount = BigDecimal.valueOf(paymentAmountDouble);
49 |
50 | TransactionEvent transaction =
51 | TransactionEvent.builder()
52 | .transactionId(transactionId)
53 | .payeeId(payeeId)
54 | .beneficiaryId(beneficiaryId)
55 | .paymentAmount(paymentAmount)
56 | .paymentType(paymentType(transactionId))
57 | .eventTime(System.currentTimeMillis())
58 | .ingestionTimestamp(System.currentTimeMillis())
59 | .build();
60 |
61 | return transaction;
62 | }
63 |
64 | private String paymentType(long id) {
65 | int name = (int) (id % 2);
66 | switch (name) {
67 | case 0:
68 | return "CRD";
69 | case 1:
70 | return "CSH";
71 | default:
72 | throw new IllegalStateException("");
73 | }
74 | }
75 | }
76 |
--------------------------------------------------------------------------------
/flink-job/src/main/java/com/ververica/field/dynamicrules/logger/CustomTimeLogger.java:
--------------------------------------------------------------------------------
1 | package com.ververica.field.dynamicrules.logger;
2 |
3 | import java.io.Serializable;
4 |
5 | public class CustomTimeLogger implements Serializable {
6 |
7 | private long creationTime;
8 |
9 | public CustomTimeLogger(long creationTime) {
10 | this.creationTime = creationTime;
11 | }
12 |
13 | public void log(String message) {
14 | long currentTime = System.currentTimeMillis() - creationTime;
15 | System.out.println("[" + currentTime + "] - " + message);
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/flink-job/src/main/java/com/ververica/field/dynamicrules/sinks/AlertsSink.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 | package com.ververica.field.dynamicrules.sinks;
20 |
21 | import static com.ververica.field.config.Parameters.ALERTS_SINK;
22 | import static com.ververica.field.config.Parameters.ALERTS_TOPIC;
23 | import static com.ververica.field.config.Parameters.GCP_PROJECT_NAME;
24 | import static com.ververica.field.config.Parameters.GCP_PUBSUB_ALERTS_SUBSCRIPTION;
25 |
26 | import com.ververica.field.config.Config;
27 | import com.ververica.field.dynamicrules.Alert;
28 | import com.ververica.field.dynamicrules.KafkaUtils;
29 | import com.ververica.field.dynamicrules.functions.JsonSerializer;
30 | import java.io.IOException;
31 | import java.util.Properties;
32 | import org.apache.flink.api.common.serialization.SimpleStringSchema;
33 | import org.apache.flink.streaming.api.datastream.DataStream;
34 | import org.apache.flink.streaming.api.functions.sink.PrintSinkFunction;
35 | import org.apache.flink.streaming.api.functions.sink.SinkFunction;
36 | import org.apache.flink.streaming.connectors.gcp.pubsub.PubSubSink;
37 | import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer011;
38 |
39 | public class AlertsSink {
40 |
41 | public static SinkFunction createAlertsSink(Config config) throws IOException {
42 |
43 | String sinkType = config.get(ALERTS_SINK);
44 | AlertsSink.Type alertsSinkType = AlertsSink.Type.valueOf(sinkType.toUpperCase());
45 |
46 | switch (alertsSinkType) {
47 | case KAFKA:
48 | Properties kafkaProps = KafkaUtils.initProducerProperties(config);
49 | String alertsTopic = config.get(ALERTS_TOPIC);
50 | return new FlinkKafkaProducer011<>(alertsTopic, new SimpleStringSchema(), kafkaProps);
51 | case PUBSUB:
52 | return PubSubSink.newBuilder()
53 | .withSerializationSchema(new SimpleStringSchema())
54 | .withProjectName(config.get(GCP_PROJECT_NAME))
55 | .withTopicName(config.get(GCP_PUBSUB_ALERTS_SUBSCRIPTION))
56 | .build();
57 | case STDOUT:
58 | return new PrintSinkFunction<>(true);
59 | default:
60 | throw new IllegalArgumentException(
61 | "Source \"" + alertsSinkType + "\" unknown. Known values are:" + Type.values());
62 | }
63 | }
64 |
65 | public static DataStream alertsStreamToJson(DataStream alerts) {
66 | return alerts.flatMap(new JsonSerializer<>(Alert.class)).name("Alerts Deserialization");
67 | }
68 |
69 | public enum Type {
70 | KAFKA("Alerts Sink (Kafka)"),
71 | PUBSUB("Alerts Sink (Pub/Sub)"),
72 | STDOUT("Alerts Sink (Std. Out)");
73 |
74 | private String name;
75 |
76 | Type(String name) {
77 | this.name = name;
78 | }
79 |
80 | public String getName() {
81 | return name;
82 | }
83 | }
84 | }
85 |
--------------------------------------------------------------------------------
/flink-job/src/main/java/com/ververica/field/dynamicrules/sources/SqlsSource.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 | package com.ververica.field.dynamicrules.sources;
20 |
21 | import com.ververica.field.config.Config;
22 | import com.ververica.field.dynamicrules.KafkaUtils;
23 | import com.ververica.field.dynamicrules.functions.SqlDeserializer;
24 | import com.ververica.field.dynamicrules.functions.SqlEvent;
25 | import org.apache.flink.api.common.serialization.SimpleStringSchema;
26 | import org.apache.flink.streaming.api.datastream.DataStream;
27 | import org.apache.flink.streaming.api.functions.source.SocketTextStreamFunction;
28 | import org.apache.flink.streaming.api.functions.source.SourceFunction;
29 | import org.apache.flink.streaming.api.functions.timestamps.BoundedOutOfOrdernessTimestampExtractor;
30 | import org.apache.flink.streaming.api.windowing.time.Time;
31 | import org.apache.flink.streaming.connectors.gcp.pubsub.PubSubSource;
32 | import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer011;
33 |
34 | import java.io.IOException;
35 | import java.util.Properties;
36 | import java.util.concurrent.TimeUnit;
37 |
38 | import static com.ververica.field.config.Parameters.*;
39 |
40 | public class SqlsSource {
41 |
42 | private static final int SQLS_STREAM_PARALLELISM = 1;
43 |
44 | public static SourceFunction createSqlsSource(Config config) throws IOException {
45 |
46 | String sourceType = config.get(SQLS_SOURCE);
47 | SqlsSource.Type rulesSourceType = SqlsSource.Type.valueOf(sourceType.toUpperCase());
48 |
49 | switch (rulesSourceType) {
50 | case KAFKA:
51 | Properties kafkaProps = KafkaUtils.initConsumerProperties(config);
52 | String rulesTopic = config.get(SQLS_TOPIC);
53 | FlinkKafkaConsumer011 kafkaConsumer =
54 | new FlinkKafkaConsumer011<>(rulesTopic, new SimpleStringSchema(), kafkaProps);
55 | kafkaConsumer.setStartFromLatest();
56 | return kafkaConsumer;
57 | case PUBSUB:
58 | return PubSubSource.newBuilder()
59 | .withDeserializationSchema(new SimpleStringSchema())
60 | .withProjectName(config.get(GCP_PROJECT_NAME))
61 | .withSubscriptionName(config.get(GCP_PUBSUB_RULES_SUBSCRIPTION))
62 | .build();
63 | case SOCKET:
64 | return new SocketTextStreamFunction("localhost", config.get(SOCKET_PORT), "\n", -1);
65 | default:
66 | throw new IllegalArgumentException(
67 | "Source \"" + rulesSourceType + "\" unknown. Known values are:" + Type.values());
68 | }
69 | }
70 |
71 | public static DataStream stringsStreamToSqls(DataStream sqlStrings) {
72 | return sqlStrings
73 | .flatMap(new SqlDeserializer())
74 | .name("SQL Deserialization")
75 | .setParallelism(SQLS_STREAM_PARALLELISM)
76 | .assignTimestampsAndWatermarks(
77 | new BoundedOutOfOrdernessTimestampExtractor(
78 | Time.of(0, TimeUnit.MILLISECONDS)) {
79 | @Override
80 | public long extractTimestamp(SqlEvent element) {
81 | // Prevents connected data+update stream watermark stalling.
82 | return Long.MAX_VALUE;
83 | }
84 | });
85 | }
86 |
87 | public enum Type {
88 | KAFKA("Rules Source (Kafka)"),
89 | PUBSUB("Rules Source (Pub/Sub)"),
90 | SOCKET("Rules Source (Socket)");
91 |
92 | private String name;
93 |
94 | Type(String name) {
95 | this.name = name;
96 | }
97 |
98 | public String getName() {
99 | return name;
100 | }
101 | }
102 | }
103 |
--------------------------------------------------------------------------------
/flink-job/src/main/java/com/ververica/field/dynamicrules/sources/TimeBasedEvent.java:
--------------------------------------------------------------------------------
1 | package com.ververica.field.dynamicrules.sources;
2 |
3 | import org.apache.flink.cep.EventComparator;
4 |
5 | import java.io.Serializable;
6 |
7 | public abstract class TimeBasedEvent implements Comparable, EventComparator, Serializable {
8 | public abstract Long getTimestamp();
9 |
10 | public abstract TimeBasedEvent apply(String inputLine, String delimiter); // = ","
11 |
12 | @Override
13 | public int compareTo(TimeBasedEvent other) {
14 | if(other == null) return 1;
15 | return getTimestamp().compareTo(other.getTimestamp());
16 | }
17 | @Override
18 | public int compare(TimeBasedEvent o1, TimeBasedEvent o2){
19 | return o1.compareTo(o2);
20 | }
21 |
22 | public static T createFromCsv(String inputLine, Class type) throws IllegalAccessException, InstantiationException {
23 | return (T) type.newInstance().apply(inputLine, ",");
24 | }
25 |
26 | }
--------------------------------------------------------------------------------
/flink-job/src/main/java/com/ververica/field/dynamicrules/sources/TransactionEventsSource.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 | package com.ververica.field.dynamicrules.sources;
20 |
21 | import com.ververica.field.config.Config;
22 | import com.ververica.field.dynamicrules.KafkaUtils;
23 | import com.ververica.field.dynamicrules.TransactionEvent;
24 | import com.ververica.field.dynamicrules.functions.JsonDeserializer;
25 | import com.ververica.field.dynamicrules.functions.JsonGeneratorWrapper;
26 | import com.ververica.field.dynamicrules.functions.TimeStamper;
27 | import com.ververica.field.dynamicrules.functions.TransactionEventsGenerator;
28 | import org.apache.flink.api.common.serialization.SimpleStringSchema;
29 | import org.apache.flink.streaming.api.datastream.DataStream;
30 | import org.apache.flink.streaming.api.functions.source.SourceFunction;
31 | import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer011;
32 |
33 | import java.util.Properties;
34 |
35 | import static com.ververica.field.config.Parameters.*;
36 |
37 | public class TransactionEventsSource {
38 |
39 | public static SourceFunction createTransactionEventsSource(Config config) {
40 |
41 | String sourceType = config.get(TRANSACTIONS_SOURCE);
42 | TransactionEventsSource.Type transactionsSourceType =
43 | TransactionEventsSource.Type.valueOf(sourceType.toUpperCase());
44 |
45 | int transactionsPerSecond = config.get(RECORDS_PER_SECOND);
46 |
47 | switch (transactionsSourceType) {
48 | case KAFKA:
49 | Properties kafkaProps = KafkaUtils.initConsumerProperties(config);
50 | String transactionsTopic = config.get(DATA_TOPIC);
51 | FlinkKafkaConsumer011 kafkaConsumer =
52 | new FlinkKafkaConsumer011<>(transactionsTopic, new SimpleStringSchema(), kafkaProps);
53 | kafkaConsumer.setStartFromLatest();
54 | return kafkaConsumer;
55 | default:
56 | return new JsonGeneratorWrapper<>(new TransactionEventsGenerator(transactionsPerSecond));
57 | }
58 | }
59 |
60 | public static DataStream stringsStreamToTransactionEvents(
61 | DataStream transactionStrings) {
62 | return transactionStrings
63 | .flatMap(new JsonDeserializer(TransactionEvent.class))
64 | .returns(TransactionEvent.class)
65 | .flatMap(new TimeStamper())
66 | .returns(TransactionEvent.class)
67 | .name("TransactionEvents Deserialization");
68 | }
69 |
70 | public enum Type {
71 | GENERATOR("Transactions Source (generated locally)"),
72 | KAFKA("Transactions Source (Kafka)");
73 |
74 | private String name;
75 |
76 | Type(String name) {
77 | this.name = name;
78 | }
79 |
80 | public String getName() {
81 | return name;
82 | }
83 | }
84 | }
85 |
--------------------------------------------------------------------------------
/flink-job/src/main/java/com/ververica/field/dynamicrules/util/SchemaHelper.java:
--------------------------------------------------------------------------------
1 | package com.ververica.field.dynamicrules.util;
2 |
3 | public class SchemaHelper {
4 | static public String getSchemaContents(){
5 | return "################################################################################\n" +
6 | "# Licensed to the Apache Software Foundation (ASF) under one\n" +
7 | "# or more contributor license agreements. See the NOTICE file\n" +
8 | "# distributed with this work for additional information\n" +
9 | "# regarding copyright ownership. The ASF licenses this file\n" +
10 | "# to you under the Apache License, Version 2.0 (the\n" +
11 | "# \"License\"); you may not use this file except in compliance\n" +
12 | "# with the License. You may obtain a copy of the License at\n" +
13 | "#\n" +
14 | "# http://www.apache.org/licenses/LICENSE-2.0\n" +
15 | "#\n" +
16 | "# Unless required by applicable law or agreed to in writing, software\n" +
17 | "# distributed under the License is distributed on an \"AS IS\" BASIS,\n" +
18 | "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n" +
19 | "# See the License for the specific language governing permissions and\n" +
20 | "# limitations under the License.\n" +
21 | "################################################################################\n" +
22 | "\n" +
23 | "#==============================================================================\n" +
24 | "# TEST ENVIRONMENT FILE\n" +
25 | "# General purpose default environment file.\n" +
26 | "#==============================================================================\n" +
27 | "\n" +
28 | "# this file has variables that can be filled with content by replacing $VAR_XXX\n" +
29 | "\n" +
30 | "tables:\n" +
31 | " - name: TableNumber1\n" +
32 | " type: source-table\n" +
33 | " $VAR_UPDATE_MODE\n" +
34 | " schema:\n" +
35 | " - name: IntegerField1\n" +
36 | " type: INT\n" +
37 | " - name: StringField1\n" +
38 | " type: VARCHAR\n" +
39 | " - name: TimestampField1\n" +
40 | " type: TIMESTAMP\n" +
41 | " connector:\n" +
42 | " type: filesystem\n" +
43 | " path: \"$VAR_SOURCE_PATH1\"\n" +
44 | " format:\n" +
45 | " type: csv\n" +
46 | " fields:\n" +
47 | " - name: IntegerField1\n" +
48 | " type: INT\n" +
49 | " - name: StringField1\n" +
50 | " type: VARCHAR\n" +
51 | " - name: TimestampField1\n" +
52 | " type: TIMESTAMP\n" +
53 | " line-delimiter: \"\\n\"\n" +
54 | " comment-prefix: \"#\"\n" +
55 | "\n" +
56 | "execution:\n" +
57 | " planner: \"$VAR_PLANNER\"\n" +
58 | " type: \"$VAR_EXECUTION_TYPE\"\n" +
59 | " time-characteristic: event-time\n" +
60 | " periodic-watermarks-interval: 99\n" +
61 | " parallelism: 1\n" +
62 | " max-parallelism: 16\n" +
63 | " min-idle-state-retention: 1000\n" +
64 | " max-idle-state-retention: 600000\n" +
65 | " result-mode: \"$VAR_RESULT_MODE\"\n" +
66 | " max-table-result-rows: \"$VAR_MAX_ROWS\"\n" +
67 | " restart-strategy:\n" +
68 | " type: \"$VAR_RESTART_STRATEGY_TYPE\"\n" +
69 | " max-failures-per-interval: 10\n" +
70 | " failure-rate-interval: 99000\n" +
71 | " delay: 1000\n" +
72 | "\n" +
73 | "configuration:\n" +
74 | " table.optimizer.join-reorder-enabled: false\n" +
75 | "\n" +
76 | "deployment:\n" +
77 | " response-timeout: 5000\n";
78 | }
79 | }
80 |
--------------------------------------------------------------------------------
/flink-job/src/main/java/com/ververica/field/dynamicrules/util/TimestampHelpers.java:
--------------------------------------------------------------------------------
1 | package com.ververica.field.dynamicrules.util;
2 |
3 | import java.time.LocalDateTime;
4 | import java.time.ZoneOffset;
5 | import java.time.format.DateTimeFormatter;
6 |
7 | public class TimestampHelpers {
8 |
9 | static String defaultDatetimeFormat = "yyyy-MM-dd HH:mm:ss";
10 |
11 |
12 | static public Long toUnixtime(String datetime){
13 | DateTimeFormatter formatter = DateTimeFormatter.ofPattern(defaultDatetimeFormat);
14 | LocalDateTime localDateTime = LocalDateTime.parse(datetime, formatter);
15 | return localDateTime.toEpochSecond(ZoneOffset.UTC);
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/flink-job/src/main/java/com/ververica/field/sources/BaseGenerator.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 | package com.ververica.field.sources;
20 |
21 | import org.apache.flink.api.common.state.ListState;
22 | import org.apache.flink.api.common.state.ListStateDescriptor;
23 | import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
24 | import org.apache.flink.configuration.Configuration;
25 | import org.apache.flink.runtime.state.FunctionInitializationContext;
26 | import org.apache.flink.runtime.state.FunctionSnapshotContext;
27 | import org.apache.flink.streaming.api.checkpoint.CheckpointedFunction;
28 | import org.apache.flink.streaming.api.functions.source.RichParallelSourceFunction;
29 |
30 | import java.util.SplittableRandom;
31 |
32 | import static org.apache.flink.util.Preconditions.checkArgument;
33 |
34 | /** A simple random data generator with data rate throttling logic. */
35 | public abstract class BaseGenerator extends RichParallelSourceFunction
36 | implements CheckpointedFunction {
37 |
38 | private static final long serialVersionUID = 1L;
39 |
40 | protected int maxRecordsPerSecond;
41 |
42 | private volatile boolean running = true;
43 |
44 | private long id = -1;
45 |
46 | private transient ListState idState;
47 |
48 | protected BaseGenerator() {
49 | this.maxRecordsPerSecond = -1;
50 | }
51 |
52 | protected BaseGenerator(int maxRecordsPerSecond) {
53 | checkArgument(
54 | maxRecordsPerSecond == -1 || maxRecordsPerSecond > 0,
55 | "maxRecordsPerSecond must be positive or -1 (infinite)");
56 | this.maxRecordsPerSecond = maxRecordsPerSecond;
57 | }
58 |
59 | @Override
60 | public void open(Configuration parameters) throws Exception {
61 | if (id == -1) {
62 | id = getRuntimeContext().getIndexOfThisSubtask();
63 | }
64 | }
65 |
66 | @Override
67 | public final void run(SourceContext ctx) throws Exception {
68 | final int numberOfParallelSubtasks = getRuntimeContext().getNumberOfParallelSubtasks();
69 | final Throttler throttler = new Throttler(maxRecordsPerSecond, numberOfParallelSubtasks);
70 | final SplittableRandom rnd = new SplittableRandom();
71 |
72 | final Object lock = ctx.getCheckpointLock();
73 |
74 | while (running) {
75 | T event = randomEvent(rnd, id);
76 |
77 | synchronized (lock) {
78 | ctx.collect(event);
79 | id += numberOfParallelSubtasks;
80 | }
81 |
82 | throttler.throttle();
83 | }
84 | }
85 |
86 | @Override
87 | public final void cancel() {
88 | running = false;
89 | }
90 |
91 | @Override
92 | public final void snapshotState(FunctionSnapshotContext context) throws Exception {
93 | idState.clear();
94 | idState.add(id);
95 | }
96 |
97 | @Override
98 | public void initializeState(FunctionInitializationContext context) throws Exception {
99 | idState =
100 | context
101 | .getOperatorStateStore()
102 | .getUnionListState(new ListStateDescriptor<>("ids", BasicTypeInfo.LONG_TYPE_INFO));
103 |
104 | if (context.isRestored()) {
105 | long max = Long.MIN_VALUE;
106 | for (Long value : idState.get()) {
107 | max = Math.max(max, value);
108 | }
109 |
110 | id = max + getRuntimeContext().getIndexOfThisSubtask();
111 | }
112 | }
113 |
114 | public abstract T randomEvent(SplittableRandom rnd, long id);
115 |
116 | public int getMaxRecordsPerSecond() {
117 | return maxRecordsPerSecond;
118 | }
119 | }
120 |
--------------------------------------------------------------------------------
/flink-job/src/main/java/com/ververica/field/sources/Throttler.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 | package com.ververica.field.sources;
20 |
21 | import org.apache.flink.util.Preconditions;
22 |
23 | /** Utility to throttle a thread to a given number of executions (records) per second. */
24 | final class Throttler {
25 |
26 | private final long throttleBatchSize;
27 | private final long nanosPerBatch;
28 |
29 | private long endOfNextBatchNanos;
30 | private int currentBatch;
31 |
32 | Throttler(long maxRecordsPerSecond, int numberOfParallelSubtasks) {
33 | Preconditions.checkArgument(
34 | maxRecordsPerSecond == -1 || maxRecordsPerSecond > 0,
35 | "maxRecordsPerSecond must be positive or -1 (infinite)");
36 | Preconditions.checkArgument(
37 | numberOfParallelSubtasks > 0, "numberOfParallelSubtasks must be greater than 0");
38 |
39 | if (maxRecordsPerSecond == -1) {
40 | // unlimited speed
41 | throttleBatchSize = -1;
42 | nanosPerBatch = 0;
43 | endOfNextBatchNanos = System.nanoTime() + nanosPerBatch;
44 | currentBatch = 0;
45 | return;
46 | }
47 | final float ratePerSubtask = (float) maxRecordsPerSecond / numberOfParallelSubtasks;
48 |
49 | if (ratePerSubtask >= 10000) {
50 | // high rates: all throttling in intervals of 2ms
51 | throttleBatchSize = (int) ratePerSubtask / 500;
52 | nanosPerBatch = 2_000_000L;
53 | } else {
54 | throttleBatchSize = ((int) (ratePerSubtask / 20)) + 1;
55 | nanosPerBatch = ((int) (1_000_000_000L / ratePerSubtask)) * throttleBatchSize;
56 | }
57 | this.endOfNextBatchNanos = System.nanoTime() + nanosPerBatch;
58 | this.currentBatch = 0;
59 | }
60 |
61 | void throttle() throws InterruptedException {
62 | if (throttleBatchSize == -1) {
63 | return;
64 | }
65 | if (++currentBatch != throttleBatchSize) {
66 | return;
67 | }
68 | currentBatch = 0;
69 |
70 | final long now = System.nanoTime();
71 | final int millisRemaining = (int) ((endOfNextBatchNanos - now) / 1_000_000);
72 |
73 | if (millisRemaining > 0) {
74 | endOfNextBatchNanos += nanosPerBatch;
75 | Thread.sleep(millisRemaining);
76 | } else {
77 | endOfNextBatchNanos = now + nanosPerBatch;
78 | }
79 | }
80 | }
81 |
--------------------------------------------------------------------------------
/flink-job/src/main/resources/log4j2.properties:
--------------------------------------------------------------------------------
1 | appender.console.type=Console
2 | appender.console.name=STDOUT
3 | appender.console.layout.type=PatternLayout
4 | appender.console.layout.pattern=%d{HH:mm:ss.SSS} [%t] %-5level %logger{36} - %msg%n
5 |
6 | logger.console.name=org.apache.flink.fs.s3base.shaded.com.amazonaws.latency
7 | logger.console.level=WARN
8 |
9 | rootLogger.level=INFO
10 | rootLogger.appenderRef.console.ref=STDOUT
--------------------------------------------------------------------------------
/flink-job/src/main/resources/logback.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{60} %X{sourceThread} - %msg%n
5 |
6 |
7 |
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/flink-job/src/main/resources/test-sql-client-defaults.yaml:
--------------------------------------------------------------------------------
1 | ################################################################################
2 | # Licensed to the Apache Software Foundation (ASF) under one
3 | # or more contributor license agreements. See the NOTICE file
4 | # distributed with this work for additional information
5 | # regarding copyright ownership. The ASF licenses this file
6 | # to you under the Apache License, Version 2.0 (the
7 | # "License"); you may not use this file except in compliance
8 | # with the License. You may obtain a copy of the License at
9 | #
10 | # http://www.apache.org/licenses/LICENSE-2.0
11 | #
12 | # Unless required by applicable law or agreed to in writing, software
13 | # distributed under the License is distributed on an "AS IS" BASIS,
14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | # See the License for the specific language governing permissions and
16 | # limitations under the License.
17 | ################################################################################
18 |
19 | #==============================================================================
20 | # TEST ENVIRONMENT FILE
21 | # General purpose default environment file.
22 | #==============================================================================
23 |
24 | # this file has variables that can be filled with content by replacing $VAR_XXX
25 |
26 | tables:
27 | - name: TableNumber1
28 | type: source-table
29 | $VAR_UPDATE_MODE
30 | schema:
31 | - name: IntegerField1
32 | type: INT
33 | - name: StringField1
34 | type: VARCHAR
35 | - name: TimestampField1
36 | type: TIMESTAMP
37 | connector:
38 | type: filesystem
39 | path: "$VAR_SOURCE_PATH1"
40 | format:
41 | type: csv
42 | fields:
43 | - name: IntegerField1
44 | type: INT
45 | - name: StringField1
46 | type: VARCHAR
47 | - name: TimestampField1
48 | type: TIMESTAMP
49 | line-delimiter: "\n"
50 | comment-prefix: "#"
51 |
52 | execution:
53 | planner: "$VAR_PLANNER"
54 | type: "$VAR_EXECUTION_TYPE"
55 | time-characteristic: event-time
56 | periodic-watermarks-interval: 99
57 | parallelism: 1
58 | max-parallelism: 16
59 | min-idle-state-retention: 1000
60 | max-idle-state-retention: 600000
61 | result-mode: "$VAR_RESULT_MODE"
62 | max-table-result-rows: "$VAR_MAX_ROWS"
63 | restart-strategy:
64 | type: "$VAR_RESTART_STRATEGY_TYPE"
65 | max-failures-per-interval: 10
66 | failure-rate-interval: 99000
67 | delay: 1000
68 |
69 | configuration:
70 | table.optimizer.join-reorder-enabled: false
71 |
72 | deployment:
73 | response-timeout: 5000
74 |
--------------------------------------------------------------------------------
/flink-job/src/test/java/com/ververica/field/config/ConfigTest.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 | package com.ververica.field.config;
20 |
21 | import org.junit.Test;
22 |
23 | import static com.ververica.field.config.Parameters.KAFKA_HOST;
24 | import static com.ververica.field.config.Parameters.KAFKA_PORT;
25 | import static org.junit.Assert.assertEquals;
26 |
27 | public class ConfigTest {
28 |
29 | @Test
30 | public void testParameters() {
31 | String[] args = new String[] {"--kafka-host", "host-from-args"};
32 | Parameters parameters = Parameters.fromArgs(args);
33 | Config config = Config.fromParameters(parameters);
34 |
35 | final String kafkaHost = config.get(KAFKA_HOST);
36 | assertEquals("Wrong config parameter retrived", "host-from-args", kafkaHost);
37 | }
38 |
39 | @Test
40 | public void testParameterWithDefaults() {
41 | String[] args = new String[] {};
42 | Parameters parameters = Parameters.fromArgs(args);
43 | Config config = Config.fromParameters(parameters);
44 |
45 | final Integer kafkaPort = config.get(KAFKA_PORT);
46 | assertEquals("Wrong config parameter retrived", new Integer(9092), kafkaPort);
47 | }
48 | }
49 |
--------------------------------------------------------------------------------
/flink-job/src/test/java/com/ververica/field/function/BroadcastEmbeddedFlinkFunctionTestBase.java:
--------------------------------------------------------------------------------
1 | //package com.ververica.field.function;
2 | //
3 | //import com.ververica.field.dynamicrules.converters.StringConverter;
4 | //import com.ververica.field.dynamicrules.functions.BroadcastEmbeddedFlinkFunction;
5 | //import com.ververica.field.function.sources.TestSource;
6 | //import com.ververica.field.function.sources.TestSourceConfig;
7 | //import com.ververica.field.dynamicrules.functions.SqlEvent;
8 | //import com.ververica.field.dynamicrules.sources.TimeBasedEvent;
9 | //import org.apache.flink.api.common.state.MapStateDescriptor;
10 | //import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
11 | //import org.apache.flink.api.common.typeinfo.TypeHint;
12 | //import org.apache.flink.api.common.typeinfo.TypeInformation;
13 | //import org.apache.flink.api.java.tuple.Tuple3;
14 | //import org.apache.flink.streaming.api.datastream.BroadcastStream;
15 | //import org.apache.flink.streaming.api.datastream.DataStream;
16 | //import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
17 | //import org.apache.flink.streaming.api.functions.AssignerWithPeriodicWatermarks;
18 | //import org.apache.flink.types.Row;
19 | //import scala.reflect.ClassTag;
20 | //
21 | //import java.io.File;
22 | //import java.util.List;
23 | //
24 | //public class BroadcastEmbeddedFlinkFunctionTestBase {
25 | // String rootTestDir = System.getProperty("user.dir") + "/target/test-classes";
26 | //
27 | // String testInputDataPath;
28 | // String testBroadcastDataPath;
29 | // String defaultSql;
30 | // String keyByField;
31 | //
32 | // int timeSpeedMultiplier;
33 | // int processingTimeDelaySeconds;
34 | //
35 | // StringConverter inConverter;
36 | // List expressions;
37 | // String expectedOutputDataPath;
38 | // AssignerWithPeriodicWatermarks assigner;
39 | //
40 | // TypeInformation inTypeInfo;
41 | //
42 | // StreamExecutionEnvironment env;
43 | // ClassTag tag;
44 | //
45 | // public BroadcastEmbeddedFlinkFunctionTestBase(
46 | // String testInputDataPath,
47 | // String testBroadcastDataPath,
48 | // int timeSpeedMultiplier,
49 | // int processingTimeDelaySeconds,
50 | // String defaultSql,
51 | // StringConverter inConverter,
52 | // List expressions,
53 | // String expectedOutputDataPath,
54 | // AssignerWithPeriodicWatermarks assigner,
55 | // String keyByField,
56 | // TypeInformation inTypeInfo,
57 | // ClassTag tag) {
58 | // this.testInputDataPath = new File(testInputDataPath).getAbsolutePath();
59 | // this.testBroadcastDataPath = new File(testBroadcastDataPath).getAbsolutePath();
60 | // this.timeSpeedMultiplier = timeSpeedMultiplier;
61 | // this.processingTimeDelaySeconds = processingTimeDelaySeconds;
62 | // this.defaultSql = defaultSql;
63 | // this.inConverter = inConverter;
64 | // this.expressions = expressions;
65 | // this.expectedOutputDataPath = new File(expectedOutputDataPath).getAbsolutePath();
66 | // this.assigner = assigner;
67 | // this.keyByField = keyByField;
68 | // this.inTypeInfo = inTypeInfo;
69 | // this.tag = tag;
70 | // }
71 | //
72 | // public void run() throws Exception {
73 | // String actualOutputDataPath = rootTestDir + "/actual-output-data";
74 | //
75 | // try {
76 | // TestHelpers.deleteDir(actualOutputDataPath);
77 | // } catch (Exception e) {
78 | //
79 | // }
80 | //
81 | // StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
82 | //
83 | // TestSourceConfig sourceConfig =
84 | // new TestSourceConfig(
85 | // testInputDataPath, processingTimeDelaySeconds, timeSpeedMultiplier, ",");
86 | //
87 | // BroadcastEmbeddedFlinkFunction embeddedFlinkFunction =
88 | // new BroadcastEmbeddedFlinkFunction<>(
89 | // defaultSql, inTypeInfo, expressions, inConverter, assigner);
90 | //
91 | // DataStream input =
92 | // env.addSource(new TestSource<>(sourceConfig, tag))
93 | // .returns(inTypeInfo)
94 | // .assignTimestampsAndWatermarks(assigner)
95 | // .name("input")
96 | // .setParallelism(1)
97 | // .keyBy(keyByField);
98 | //
99 | // DataStream sqls =
100 | // env.readTextFile(testBroadcastDataPath)
101 | // .map(line -> (SqlEvent) new SqlEvent().apply(line, "\\|"))
102 | // .name("sqls")
103 | // .setParallelism(1);
104 | //
105 | // MapStateDescriptor ruleStateDescriptor =
106 | // new MapStateDescriptor<>(
107 | // "RulesBroadcastState",
108 | // BasicTypeInfo.STRING_TYPE_INFO,
109 | // TypeInformation.of(new TypeHint() {}));
110 | //
111 | // BroadcastStream sqlBroadcastStream = sqls.broadcast(ruleStateDescriptor);
112 | //
113 | // DataStream> output =
114 | // input.connect(sqlBroadcastStream).process(embeddedFlinkFunction).setParallelism(1);
115 | //
116 | // output.print();
117 | //
118 | // output.writeAsText(actualOutputDataPath);
119 | // env.execute();
120 | //
121 | // TestHelpers.assertExpectedEqualsActual(expectedOutputDataPath, actualOutputDataPath, true);
122 | // }
123 | //}
124 |
--------------------------------------------------------------------------------
/flink-job/src/test/java/com/ververica/field/function/data/BalanceChangeEvent.java:
--------------------------------------------------------------------------------
1 | package com.ververica.field.function.data;
2 |
3 | import lombok.Data;
4 | import lombok.NoArgsConstructor;
5 |
6 | @NoArgsConstructor
7 | @Data
8 | public class BalanceChangeEvent {
9 |
10 | String msisdn;
11 | String eventDate;
12 | String userActionTime;
13 | }
14 |
--------------------------------------------------------------------------------
/flink-job/src/test/java/com/ververica/field/function/data/CoreServiceUsageBillingEvent.java:
--------------------------------------------------------------------------------
1 | package com.ververica.field.function.data;
2 |
3 | import com.ververica.field.dynamicrules.util.TimestampHelpers;
4 | import com.ververica.field.dynamicrules.sources.TimeBasedEvent;
5 | import lombok.Data;
6 | import lombok.NoArgsConstructor;
7 |
8 | import java.io.Serializable;
9 |
10 | @NoArgsConstructor
11 | @Data
12 | public class CoreServiceUsageBillingEvent extends TimeBasedEvent implements Serializable {
13 |
14 | public String msisdn;
15 | public String eventDate;
16 | public String sourceEventType;
17 | public String sourceStatus;
18 | public String servedZone;
19 | public String otherZone;
20 | public Long consumptionAmount;
21 |
22 | public CoreServiceUsageBillingEvent(String inputLine, String delimiter) {
23 | String[] splitInput = inputLine.split(delimiter, 7);
24 |
25 | msisdn = splitInput[0];
26 | eventDate = splitInput[1];
27 | sourceEventType = splitInput[2];
28 | sourceStatus = splitInput[3];
29 | servedZone = splitInput[4];
30 | otherZone = splitInput[5];
31 | consumptionAmount = Long.parseLong(splitInput[6]);
32 | }
33 |
34 | @Override
35 | public Long getTimestamp() {
36 | return TimestampHelpers.toUnixtime(eventDate);
37 | }
38 |
39 | @Override
40 | public TimeBasedEvent apply(String inputLine, String delimiter) {
41 | return new CoreServiceUsageBillingEvent(inputLine, delimiter);
42 | }
43 |
44 | @Override
45 | public int compare(TimeBasedEvent o1, TimeBasedEvent o2) {
46 | return o1.compareTo(o2);
47 | }
48 |
49 | @Override
50 | public int compareTo(TimeBasedEvent other) {
51 | if (other == null) return 1;
52 | else if (getTimestamp() > other.getTimestamp()) return 1;
53 | else if (getTimestamp() == other.getTimestamp()) return 0;
54 | else return -1;
55 | }
56 | }
57 |
--------------------------------------------------------------------------------
/flink-job/src/test/java/com/ververica/field/function/data/CoreServiceUsageEvent.java:
--------------------------------------------------------------------------------
1 | package com.ververica.field.function.data;
2 |
3 | import lombok.Data;
4 | import lombok.NoArgsConstructor;
5 |
6 | @NoArgsConstructor
7 | @Data
8 | public class CoreServiceUsageEvent {
9 |
10 | String msisdn;
11 | Long eventDate;
12 | String firstOtherZone;
13 | String lastOtherZone;
14 | String firstServedZone;
15 | String lastServedZone;
16 | String eventType;
17 | }
18 |
--------------------------------------------------------------------------------
/flink-job/src/test/java/com/ververica/field/function/data/ShortBillingEvent.java:
--------------------------------------------------------------------------------
1 | package com.ververica.field.function.data;
2 |
3 | import com.ververica.field.dynamicrules.util.TimestampHelpers;
4 | import com.ververica.field.dynamicrules.sources.TimeBasedEvent;
5 | import lombok.Data;
6 | import lombok.NoArgsConstructor;
7 |
8 | import java.io.Serializable;
9 |
10 | @NoArgsConstructor
11 | @Data
12 | public class ShortBillingEvent extends TimeBasedEvent implements Serializable {
13 |
14 | public String msisdn;
15 | public String eventDate;
16 | public Long balanceBefore;
17 | public Long balanceAfter;
18 |
19 | public ShortBillingEvent(String inputLine, String delimiter) {
20 | String[] splitInput = inputLine.split(delimiter, 4);
21 |
22 | msisdn = splitInput[0];
23 | eventDate = splitInput[1];
24 | balanceBefore = Long.parseLong(splitInput[2]);
25 | balanceAfter = Long.parseLong(splitInput[3]);
26 | }
27 |
28 | @Override
29 | public Long getTimestamp() {
30 | return TimestampHelpers.toUnixtime(eventDate);
31 | }
32 |
33 | @Override
34 | public TimeBasedEvent apply(String inputLine, String delimiter) {
35 | return new ShortBillingEvent(inputLine, delimiter);
36 | }
37 |
38 | @Override
39 | public int compare(TimeBasedEvent o1, TimeBasedEvent o2) {
40 | return o1.compareTo(o2);
41 | }
42 |
43 | @Override
44 | public int compareTo(TimeBasedEvent other) {
45 | if (other == null) return 1;
46 | else if (getTimestamp() > other.getTimestamp()) return 1;
47 | else if (getTimestamp() == other.getTimestamp()) return 0;
48 | else return -1;
49 | }
50 | }
51 |
--------------------------------------------------------------------------------
/flink-job/src/test/java/com/ververica/field/function/data/SubscriberTerminationBillingEvent.java:
--------------------------------------------------------------------------------
1 | package com.ververica.field.function.data;
2 |
3 | import com.ververica.field.dynamicrules.util.TimestampHelpers;
4 | import com.ververica.field.dynamicrules.sources.TimeBasedEvent;
5 | import lombok.Data;
6 | import lombok.NoArgsConstructor;
7 |
8 | import java.io.Serializable;
9 |
10 | @NoArgsConstructor
11 | @Data
12 | public class SubscriberTerminationBillingEvent extends TimeBasedEvent implements Serializable {
13 |
14 | public String msisdn;
15 | public String eventDate;
16 | public String sourceEventType;
17 | public String sourceStatus;
18 |
19 | public SubscriberTerminationBillingEvent(String inputLine, String delimiter) {
20 | String[] splitInput = inputLine.split(delimiter, 4);
21 |
22 | msisdn = splitInput[0];
23 | eventDate = splitInput[1];
24 | sourceEventType = splitInput[2];
25 | sourceStatus = splitInput[3];
26 | }
27 |
28 | @Override
29 | public Long getTimestamp() {
30 | return TimestampHelpers.toUnixtime(eventDate);
31 | }
32 |
33 | @Override
34 | public TimeBasedEvent apply(String inputLine, String delimiter) {
35 | return new SubscriberTerminationBillingEvent(inputLine, delimiter);
36 | }
37 |
38 | @Override
39 | public int compare(TimeBasedEvent o1, TimeBasedEvent o2) {
40 | return o1.compareTo(o2);
41 | }
42 |
43 | @Override
44 | public int compareTo(TimeBasedEvent other) {
45 | if (other == null) return 1;
46 | else if (getTimestamp() > other.getTimestamp()) return 1;
47 | else if (getTimestamp() == other.getTimestamp()) return 0;
48 | else return -1;
49 | }
50 | }
51 |
--------------------------------------------------------------------------------
/flink-job/src/test/java/com/ververica/field/function/data/SubscriberTerminationEvent.java:
--------------------------------------------------------------------------------
1 | package com.ververica.field.function.data;
2 |
3 | import lombok.Data;
4 | import lombok.NoArgsConstructor;
5 |
6 | @NoArgsConstructor
7 | @Data
8 | public class SubscriberTerminationEvent {
9 |
10 | String msisdn;
11 | String eventDate;
12 | String terminationType;
13 | }
14 |
--------------------------------------------------------------------------------
/flink-job/src/test/resources/example-input-data/balance_change_example_data.csv:
--------------------------------------------------------------------------------
1 | 1,1970-01-01 00:00:00,0,5
2 | 2,1970-01-01 00:00:00,0,6
3 | 3,1970-01-01 00:00:00,0,6
4 | 2,1970-01-01 00:00:10,6,9
5 | 3,1970-01-01 00:00:10,6,12
6 | 2,1970-01-01 00:00:20,9,12
7 | 3,1970-01-01 00:00:20,12,13
8 | 1,1970-01-01 00:00:30,5,10
9 | 2,1970-01-01 00:00:30,12,15
10 | 3,1970-01-01 00:00:30,13,9
11 | 1,1970-01-01 00:00:40,10,10
12 | 3,1970-01-01 00:00:40,9,12
13 | 1,1970-01-01 00:00:50,10,12
14 | 2,1970-01-01 00:00:50,15,18
15 | 1,1970-01-01 00:01:00,12,9
16 | 3,1970-01-01 00:01:00,12,8
17 | 3,1970-01-01 00:01:10,8,11
18 | 1,1970-01-01 00:01:40,9,8
19 | 3,1970-01-01 00:01:50,11,0
20 | 0,1970-01-01 00:01:50,0,0
21 |
--------------------------------------------------------------------------------
/flink-job/src/test/resources/example-input-data/balance_change_example_data_1.csv:
--------------------------------------------------------------------------------
1 | 1,1970-01-01 00:00:00,0,5
2 | 1,1970-01-01 00:00:30,5,10
3 | 1,1970-01-01 00:00:40,10,10
4 | 1,1970-01-01 00:00:50,10,12
5 | 1,1970-01-01 00:01:00,12,9
6 | 1,1970-01-01 00:01:40,9,8
7 | 0,1970-01-01 00:01:50,0,0
8 |
--------------------------------------------------------------------------------
/flink-job/src/test/resources/example-input-data/balance_change_example_data_2.csv:
--------------------------------------------------------------------------------
1 | 2,1970-01-01 00:00:00,0,6
2 | 2,1970-01-01 00:00:10,6,9
3 | 2,1970-01-01 00:00:20,9,12
4 | 2,1970-01-01 00:00:30,12,15
5 | 2,1970-01-01 00:00:50,15,18
6 |
--------------------------------------------------------------------------------
/flink-job/src/test/resources/example-input-data/balance_change_example_data_3.csv:
--------------------------------------------------------------------------------
1 | 3,1970-01-01 00:00:00,0,6
2 | 3,1970-01-01 00:00:10,6,12
3 | 3,1970-01-01 00:00:20,12,13
4 | 3,1970-01-01 00:00:30,13,9
5 | 3,1970-01-01 00:00:40,9,12
6 | 3,1970-01-01 00:01:00,12,8
7 | 3,1970-01-01 00:01:10,8,11
8 | 3,1970-01-01 00:01:50,11,0
9 |
--------------------------------------------------------------------------------
/flink-job/src/test/resources/example-input-data/broadcast_example_data.csv:
--------------------------------------------------------------------------------
1 | 1970-01-01 00:00:05|SELECT msisdn, eventDate FROM source_table WHERE eventDate = '1970-01-01 00:00:30'
2 | 1970-01-01 00:00:15|SELECT msisdn, eventDate FROM source_table WHERE 1=1
3 | 1970-01-01 00:00:35|SELECT msisdn, balanceBefore, balanceAfter FROM source_table WHERE 1=1
--------------------------------------------------------------------------------
/flink-job/src/test/resources/example-input-data/broadcast_example_data_empty.csv:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/getindata/flink-dynamic-cep-demo/e8c06f2e7014258ee514653833ec942b3e3f4d2e/flink-job/src/test/resources/example-input-data/broadcast_example_data_empty.csv
--------------------------------------------------------------------------------
/flink-job/src/test/resources/example-input-data/broadcast_match_recognize_data.csv:
--------------------------------------------------------------------------------
1 | 1970-01-01 00:00:15|SELECT t.msisdn, t.eventDate FROM ( SELECT msisdn, eventDate, CASE WHEN balanceBefore <= 10 AND balanceAfter > 10 THEN 'A' WHEN balanceBefore > 10 AND balanceAfter <= 10 THEN 'C' ELSE 'B' END AS patternVal FROM source_table ) t LEFT JOIN ( SELECT x.msisdn, x.`timestamp` , x.begin_date FROM source_table MATCH_RECOGNIZE ( PARTITION BY msisdn ORDER BY user_action_time MEASURES LAST(user_action_time) AS `timestamp`, A.eventDate AS begin_date ONE ROW PER MATCH AFTER MATCH SKIP PAST LAST ROW PATTERN (A B*? C) WITHIN INTERVAL '25' SECOND DEFINE A AS A.balanceBefore <= 10 AND A.balanceAfter > 10, C AS C.balanceBefore > 10 AND C.balanceAfter <= 10 ) AS x ) p ON t.msisdn = p.msisdn AND t.eventDate = p.begin_date WHERE t.patternVal = 'A' AND p.msisdn is null
--------------------------------------------------------------------------------
/flink-job/src/test/resources/example-input-data/core_service_usage_example_data.csv:
--------------------------------------------------------------------------------
1 | 1,1970-01-01 00:00:00,OTHER,OTHER,,,0
2 | 1,1970-01-01 00:00:05,OTHER,OTHER,served1,other1,1
3 | 1,1970-01-01 00:00:10,OTHER,UPDATE,served2,other2,2
4 | 1,1970-01-01 00:00:20,OTHER,DELETE,served3,other3,3
5 | 1,1970-01-01 00:00:30,OTHER,OTHER,served4,other4,2
6 | 1,1970-01-01 00:00:40,OTHER,OTHER,served5,other5,1
7 | 1,1970-01-01 00:00:50,OTHER,OTHER,served6,other6,1
8 | 1,1970-01-01 00:01:00,OTHER,OTHER,served7,other7,2
9 | 1,1970-01-01 00:01:10,OTHER,UPDATE,served8,other8,7
10 | 1,1970-01-01 00:01:40,OTHER,DELETE,served9,other9,4
11 | 1,1970-01-01 00:01:50,OTHER,UPDATE,served10,other10,4
12 | 1,1970-01-01 00:02:00,OTHER,OTHER,served11,other11,4
--------------------------------------------------------------------------------
/flink-job/src/test/resources/example-input-data/subscriber_termination_example_data.csv:
--------------------------------------------------------------------------------
1 | 1,1970-01-01 00:00:00,OTHER,OTHER
2 | 1,1970-01-01 00:00:05,OTHER,OTHER
3 | 1,1970-01-01 00:00:10,OTHER,UPDATE
4 | 1,1970-01-01 00:00:20,OTHER,DELETE
5 | 1,1970-01-01 00:00:30,OTHER,OTHER
6 | 1,1970-01-01 00:00:40,OTHER,OTHER
7 | 1,1970-01-01 00:00:50,OTHER,OTHER
8 | 1,1970-01-01 00:01:00,OTHER,OTHER
9 | 1,1970-01-01 00:01:10,OTHER,UPDATE
10 | 1,1970-01-01 00:01:40,OTHER,DELETE
11 | 1,1970-01-01 00:01:50,OTHER,UPDATE
12 | 1,1970-01-01 00:02:00,OTHER,OTHER
13 | 1,1970-01-01 00:02:10,OTHER,OTHER
14 | 1,1970-01-01 00:03:10,OTHER,OTHER
--------------------------------------------------------------------------------
/flink-job/src/test/resources/expected-output-data/balance_change_alerts.csv:
--------------------------------------------------------------------------------
1 | 2,45000
2 | 3,95000
--------------------------------------------------------------------------------
/flink-job/src/test/resources/expected-output-data/balance_change_alerts_1.csv:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/getindata/flink-dynamic-cep-demo/e8c06f2e7014258ee514653833ec942b3e3f4d2e/flink-job/src/test/resources/expected-output-data/balance_change_alerts_1.csv
--------------------------------------------------------------------------------
/flink-job/src/test/resources/expected-output-data/balance_change_alerts_2.csv:
--------------------------------------------------------------------------------
1 | true,BalanceChangeEvent(msisdn=2, eventDate=1970-01-01 00:00:20, userActionTime=1970-01-01 00:00:20.0)
--------------------------------------------------------------------------------
/flink-job/src/test/resources/expected-output-data/balance_change_alerts_3.csv:
--------------------------------------------------------------------------------
1 | true,BalanceChangeEvent(msisdn=3, eventDate=1970-01-01 00:01:10, userActionTime=1970-01-01 00:01:10.0)
--------------------------------------------------------------------------------
/flink-job/src/test/resources/expected-output-data/balance_change_alerts_broadcast.csv:
--------------------------------------------------------------------------------
1 | SELECT msisdn, eventDate FROM source_table WHERE 1=1,true,3,1970-01-01 00:00:20
2 | SELECT msisdn, eventDate FROM source_table WHERE 1=1,true,3,1970-01-01 00:01:10
3 | SELECT msisdn FROM source_table WHERE 1=1,true,3
4 | SELECT msisdn FROM source_table WHERE 1=1,true,3
5 | SELECT msisdn, eventDate FROM source_table WHERE 1=1,true,3,1970-01-01 00:00:10
6 | SELECT msisdn, balanceBefore, balanceAfter FROM source_table WHERE 1=1,true,3,12,8
7 | SELECT msisdn FROM source_table WHERE 1=1,true,3
8 | SELECT msisdn FROM source_table WHERE 1=1,true,3
9 | SELECT msisdn, eventDate FROM source_table WHERE 1=1,true,3,1970-01-01 00:00:40
10 | SELECT msisdn, balanceBefore, balanceAfter FROM source_table WHERE 1=1,true,3,12,13
11 | SELECT msisdn, balanceBefore, balanceAfter FROM source_table WHERE 1=1,true,3,8,11
12 | SELECT msisdn FROM source_table WHERE 1=1,true,3
13 | SELECT msisdn, eventDate FROM source_table WHERE 1=1,true,3,1970-01-01 00:01:00
14 | SELECT msisdn, balanceBefore, balanceAfter FROM source_table WHERE 1=1,true,3,9,12
15 | SELECT msisdn FROM source_table WHERE 1=1,true,3
16 | SELECT msisdn, balanceBefore, balanceAfter FROM source_table WHERE 1=1,true,3,13,9
17 | SELECT msisdn, balanceBefore, balanceAfter FROM source_table WHERE 1=1,true,3,11,0
18 | SELECT msisdn, eventDate FROM source_table WHERE 1=1,true,3,1970-01-01 00:00:30
19 | SELECT msisdn, eventDate FROM source_table WHERE 1=1,true,3,1970-01-01 00:01:50
20 | SELECT msisdn FROM source_table WHERE 1=1,true,3
21 | SELECT msisdn FROM source_table WHERE 1=1,true,3
22 |
--------------------------------------------------------------------------------
/flink-job/src/test/resources/expected-output-data/core_service_usage_alerts.csv:
--------------------------------------------------------------------------------
1 | 1,70000,0,other4:other5:other6:other7:other8,served4:served5:served6:served7:served8,OTHER
2 | 1,120000,0,other9:other10:other11,served9:served10:served11,OTHER
--------------------------------------------------------------------------------
/flink-job/src/test/resources/expected-output-data/subscriber_termination_alerts.csv:
--------------------------------------------------------------------------------
1 | 1,20000,OTHER
2 | 1,120000,OTHER
--------------------------------------------------------------------------------
/flink-job/src/test/resources/log4j2-test.properties:
--------------------------------------------------------------------------------
1 | # Set root logger level to OFF to not flood build logs
2 | # set manually to INFO for debugging purposes
3 | rootLogger.level = OFF
4 | rootLogger.appenderRef.test.ref = TestLogger
5 |
6 | appender.testlogger.name = TestLogger
7 | appender.testlogger.type = CONSOLE
8 | appender.testlogger.target = SYSTEM_ERR
9 | appender.testlogger.layout.type = PatternLayout
10 | appender.testlogger.layout.pattern = %-4r [%t] %-5p %c %x - %m%n
11 |
12 | # Resource leak detetor only works with logging enabled at error level
13 | logger.netty.name = org.apache.flink.shaded.netty4.io.netty.util.ResourceLeakDetector
14 | logger.netty.level = ERROR
--------------------------------------------------------------------------------
/flink-job/src/test/scala/com/ververica/field/function/TestHelpers.scala:
--------------------------------------------------------------------------------
1 | package com.ververica.field.function
2 |
3 | import java.io.File
4 |
5 | import scala.reflect.io.Directory
6 |
7 | object TestHelpers {
8 | def assertExpectedEqualsActual(expectedOutputData: String, actualOutputDataDir: String, wrappedActual: Boolean = true): Unit = {
9 | val expected: Array[String] = TestHelpers.loadWholeText(expectedOutputData).split("\\n").sorted
10 | val actual: Array[String] = TestHelpers.loadAndConcatFilesInDir(actualOutputDataDir)
11 | .flatMap(line => line.split("\\n"))
12 | .sorted
13 | val actualStripEvent: Array[String] =
14 | if (wrappedActual)
15 | actual.map(line => line.trim.dropWhile(c => c != '(').drop(1).dropRight(1))
16 | else
17 | actual
18 |
19 | assert(expected.length == actualStripEvent.length,
20 | f"Expected count: ${expected.length} not equal to actual count: ${actualStripEvent.length}")
21 | expected.zip(actualStripEvent).foreach(lineTuple =>
22 | assert(lineTuple._1.trim == lineTuple._2.trim,
23 | f"""Expected "${lineTuple._1.trim}" not equal to actual "${lineTuple._2.trim}"""")
24 | )
25 | }
26 |
27 | def loadAndConcatFilesInDir(dir: String): Array[String] = {
28 | val files = new Directory(new File(dir)).files
29 | val filesContents: Array[String] = files.map(file => loadWholeText(file.path)).toArray
30 | filesContents.filter(fileContents => !fileContents.isEmpty)
31 | }
32 |
33 | def loadWholeText(path: String): String = {
34 | val source = scala.io.Source.fromFile(path)
35 | try source.mkString finally source.close()
36 | }
37 |
38 | def deleteDir(actualOutputData: String) = {
39 | new Directory(new File(actualOutputData)).deleteRecursively()
40 | }
41 | }
42 |
--------------------------------------------------------------------------------
/flink-job/src/test/scala/com/ververica/field/function/sources/TestSource.scala:
--------------------------------------------------------------------------------
1 | package com.ververica.field.function.sources
2 |
3 | import com.ververica.field.dynamicrules.sources.TimeBasedEvent
4 | import org.apache.flink.streaming.api.functions.source.SourceFunction
5 |
6 | import scala.collection.mutable
7 | import scala.io.Source
8 | import scala.reflect.{ClassTag, _}
9 | import scala.util.{Random, Try}
10 |
11 | case class TestSource[T <: TimeBasedEvent : ClassTag](config: TestSourceConfig)
12 | extends SourceFunction[T] {
13 |
14 | private var source: Source = _
15 |
16 | override def run(ctx: SourceFunction.SourceContext[T]): Unit = {
17 | source = Source.fromFile(config.inputPath)
18 | val rawInputLines: Array[String] = source.getLines().toArray
19 | source.close()
20 | generateStream(ctx, rawInputLines)
21 | }
22 |
23 | def generateStream(ctx: SourceFunction.SourceContext[T], rawInputLines: Array[String]): Unit = {
24 | val rng = new Random(1337)
25 | val eventQueue: mutable.PriorityQueue[(Long, T)] = mutable.PriorityQueue[(Long, T)]()(Ordering.by(-_._1))
26 | val eventTuples = rawInputLines.map(rawInput => {
27 | val event: T = TimeBasedEvent.createFromCsv[T](rawInput, classTag[T].runtimeClass.asInstanceOf[Class[T]])
28 | val processingTime = event.getTimestamp + rng.nextInt(config.processingTimeDelaySeconds + 1)
29 | (processingTime, event)
30 | })
31 | eventQueue.enqueue(eventTuples: _*)
32 |
33 | val firstEvent = eventQueue.dequeue()
34 | var currentEvent = firstEvent
35 | var nextEvent = Try(eventQueue.dequeue()).toOption
36 | while (nextEvent.isDefined) {
37 | ctx.collect(currentEvent._2)
38 | val currentDeltaMs = ((currentEvent._1 - firstEvent._1) * 1000) / config.timeSpeedMultiplier
39 | val nextDeltaMs = ((nextEvent.get._1 - firstEvent._1) * 1000) / config.timeSpeedMultiplier
40 | val sleepTimeMs = nextDeltaMs - currentDeltaMs
41 | Thread.sleep(sleepTimeMs)
42 | currentEvent = nextEvent.get
43 | nextEvent = Try(eventQueue.dequeue()).toOption
44 | }
45 | // Last item
46 | ctx.collect(currentEvent._2)
47 | }
48 |
49 | override def cancel(): Unit = {
50 | source.close()
51 | }
52 |
53 | }
54 |
55 |
56 |
--------------------------------------------------------------------------------
/flink-job/src/test/scala/com/ververica/field/function/sources/TestSourceConfig.scala:
--------------------------------------------------------------------------------
1 | package com.ververica.field.function.sources
2 |
3 | case class TestSourceConfig(inputPath: String,
4 | processingTimeDelaySeconds: Int,
5 | timeSpeedMultiplier: Int,
6 | testInputDelimiter: String = ",")
7 |
--------------------------------------------------------------------------------
/webapp/.dockerignore:
--------------------------------------------------------------------------------
1 | .git
2 | node_modules
3 | build
4 | .idea
5 | #target
--------------------------------------------------------------------------------
/webapp/.eslintrc:
--------------------------------------------------------------------------------
1 | {
2 | "extends": ["react-app"]
3 | }
4 |
--------------------------------------------------------------------------------
/webapp/.gitignore:
--------------------------------------------------------------------------------
1 | # Compiled class file
2 | *.class
3 |
4 | # Log file
5 | *.log
6 |
7 | # BlueJ files
8 | *.ctxt
9 |
10 | # Mobile Tools for Java (J2ME)
11 | .mtj.tmp/
12 |
13 | # Package Files #
14 | *.jar
15 | *.war
16 | *.nar
17 | *.ear
18 | *.zip
19 | *.tar.gz
20 | *.rar
21 |
22 | # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml
23 | hs_err_pid*
24 |
25 | .idea
26 | target
27 | *.iml
28 |
29 |
30 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
31 |
32 | # dependencies
33 | /node_modules
34 | /.pnp
35 | .pnp.js
36 |
37 | # testing
38 | /coverage
39 |
40 | # production
41 | /build
42 |
43 | # misc
44 | .DS_Store
45 | .env.local
46 | .env.development.local
47 | .env.test.local
48 | .env.production.local
49 |
50 | npm-debug.log*
51 | yarn-debug.log*
52 | yarn-error.log*
53 | node_modules/
54 |
55 | .vscode
56 | .project
57 | /bin
58 | .gradle
59 |
60 |
61 | # Temporary
62 | .npmrc
--------------------------------------------------------------------------------
/webapp/.mvn/wrapper/MavenWrapperDownloader.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2007-present the original author or authors.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | import java.net.*;
17 | import java.io.*;
18 | import java.nio.channels.*;
19 | import java.util.Properties;
20 |
21 | public class MavenWrapperDownloader {
22 |
23 | private static final String WRAPPER_VERSION = "0.5.5";
24 | /**
25 | * Default URL to download the maven-wrapper.jar from, if no 'downloadUrl' is provided.
26 | */
27 | private static final String DEFAULT_DOWNLOAD_URL = "https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/"
28 | + WRAPPER_VERSION + "/maven-wrapper-" + WRAPPER_VERSION + ".jar";
29 |
30 | /**
31 | * Path to the maven-wrapper.properties file, which might contain a downloadUrl property to
32 | * use instead of the default one.
33 | */
34 | private static final String MAVEN_WRAPPER_PROPERTIES_PATH =
35 | ".mvn/wrapper/maven-wrapper.properties";
36 |
37 | /**
38 | * Path where the maven-wrapper.jar will be saved to.
39 | */
40 | private static final String MAVEN_WRAPPER_JAR_PATH =
41 | ".mvn/wrapper/maven-wrapper.jar";
42 |
43 | /**
44 | * Name of the property which should be used to override the default download url for the wrapper.
45 | */
46 | private static final String PROPERTY_NAME_WRAPPER_URL = "wrapperUrl";
47 |
48 | public static void main(String args[]) {
49 | System.out.println("- Downloader started");
50 | File baseDirectory = new File(args[0]);
51 | System.out.println("- Using base directory: " + baseDirectory.getAbsolutePath());
52 |
53 | // If the maven-wrapper.properties exists, read it and check if it contains a custom
54 | // wrapperUrl parameter.
55 | File mavenWrapperPropertyFile = new File(baseDirectory, MAVEN_WRAPPER_PROPERTIES_PATH);
56 | String url = DEFAULT_DOWNLOAD_URL;
57 | if(mavenWrapperPropertyFile.exists()) {
58 | FileInputStream mavenWrapperPropertyFileInputStream = null;
59 | try {
60 | mavenWrapperPropertyFileInputStream = new FileInputStream(mavenWrapperPropertyFile);
61 | Properties mavenWrapperProperties = new Properties();
62 | mavenWrapperProperties.load(mavenWrapperPropertyFileInputStream);
63 | url = mavenWrapperProperties.getProperty(PROPERTY_NAME_WRAPPER_URL, url);
64 | } catch (IOException e) {
65 | System.out.println("- ERROR loading '" + MAVEN_WRAPPER_PROPERTIES_PATH + "'");
66 | } finally {
67 | try {
68 | if(mavenWrapperPropertyFileInputStream != null) {
69 | mavenWrapperPropertyFileInputStream.close();
70 | }
71 | } catch (IOException e) {
72 | // Ignore ...
73 | }
74 | }
75 | }
76 | System.out.println("- Downloading from: " + url);
77 |
78 | File outputFile = new File(baseDirectory.getAbsolutePath(), MAVEN_WRAPPER_JAR_PATH);
79 | if(!outputFile.getParentFile().exists()) {
80 | if(!outputFile.getParentFile().mkdirs()) {
81 | System.out.println(
82 | "- ERROR creating output directory '" + outputFile.getParentFile().getAbsolutePath() + "'");
83 | }
84 | }
85 | System.out.println("- Downloading to: " + outputFile.getAbsolutePath());
86 | try {
87 | downloadFileFromURL(url, outputFile);
88 | System.out.println("Done");
89 | System.exit(0);
90 | } catch (Throwable e) {
91 | System.out.println("- Error downloading");
92 | e.printStackTrace();
93 | System.exit(1);
94 | }
95 | }
96 |
97 | private static void downloadFileFromURL(String urlString, File destination) throws Exception {
98 | if (System.getenv("MVNW_USERNAME") != null && System.getenv("MVNW_PASSWORD") != null) {
99 | String username = System.getenv("MVNW_USERNAME");
100 | char[] password = System.getenv("MVNW_PASSWORD").toCharArray();
101 | Authenticator.setDefault(new Authenticator() {
102 | @Override
103 | protected PasswordAuthentication getPasswordAuthentication() {
104 | return new PasswordAuthentication(username, password);
105 | }
106 | });
107 | }
108 | URL website = new URL(urlString);
109 | ReadableByteChannel rbc;
110 | rbc = Channels.newChannel(website.openStream());
111 | FileOutputStream fos = new FileOutputStream(destination);
112 | fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE);
113 | fos.close();
114 | rbc.close();
115 | }
116 |
117 | }
118 |
--------------------------------------------------------------------------------
/webapp/.mvn/wrapper/maven-wrapper.properties:
--------------------------------------------------------------------------------
1 | distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.6.1/apache-maven-3.6.1-bin.zip
2 | wrapperUrl=https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.5/maven-wrapper-0.5.5.jar
3 |
--------------------------------------------------------------------------------
/webapp/.prettierrc:
--------------------------------------------------------------------------------
1 | {
2 | "printWidth": 120,
3 | "trailingComma": "es5"
4 | }
5 |
--------------------------------------------------------------------------------
/webapp/Dockerfile:
--------------------------------------------------------------------------------
1 | # Start with a base image containing Java runtime
2 | FROM openjdk:8-jdk-alpine
3 |
4 | # Add a volume pointing to /tmp
5 | VOLUME /tmp
6 |
7 | # Make port 5656 available to the world outside this container
8 | EXPOSE 5656
9 |
10 | ADD target/demo-backend-*.jar demo-backend.jar
11 |
12 | # Run the jar file
13 | ENTRYPOINT ["java","-Djava.security.egd=file:/dev/./urandom","-Dspring.profiles.active=dev","-jar","/demo-backend.jar"]
--------------------------------------------------------------------------------
/webapp/README.md:
--------------------------------------------------------------------------------
1 | To start Kafka:
2 |
3 | ```
4 | cd demo-backend
5 | docker-compose --log-level CRITICAL up
6 | ```
7 |
8 | To start backend/host frontend:
9 |
10 | ```
11 | mvn clean install spring-boot:run
12 | ```
13 |
14 | React App:
15 | The react app is now set to start both the java app and the react dev server, simply run
16 |
17 | ```
18 | npm install (to get all the new dependencies)
19 | npm start
20 | ```
21 |
22 | Two URLs:
23 |
24 | ```
25 | http://localhost:5656/transactions
26 | http://localhost:5656/temerature
27 | ```
28 |
29 | Swagger is available under:
30 |
31 | ```
32 | http://localhost:5656/swagger-ui.html
33 | ```
34 |
35 | Example SQL:
36 |
37 | ```
38 | SELECT payeeId FROM source_table WHERE paymentAmount > 10
39 | ```
40 |
41 |
42 | H2 Console:
43 |
44 | ```
45 | URL: http://localhost:5656/h2-console/
46 | ```
47 |
48 | | Setting | Value |
49 | | ------------ | ------------------ |
50 | | Driver Class | org.h2.Driver |
51 | | JDBC URL | jdbc:h2:mem:testdb |
52 | | User Name | sa |
53 | | Password | |
54 |
--------------------------------------------------------------------------------
/webapp/cloud.Dockerfile:
--------------------------------------------------------------------------------
1 | # --- UI Build
2 | FROM node:10 as ui-build
3 | WORKDIR /home/node/app
4 |
5 | COPY package.json package-lock.json tsconfig.json ./
6 | COPY scripts scripts
7 | RUN npm ci --unsafe-perm
8 | COPY public public
9 | COPY src/app src/app
10 | COPY src/index.tsx src
11 | COPY src/react-app-env.d.ts src
12 | RUN npm run build
13 |
14 | # --- Maven Build
15 | FROM maven:3.6.2-jdk-8-openj9 as maven-build
16 | WORKDIR /home/maven/work
17 |
18 | COPY pom.xml .
19 | RUN mvn -B -e -C -T 1C org.apache.maven.plugins:maven-dependency-plugin:3.1.1:go-offline
20 | COPY . .
21 | COPY --from=ui-build /home/node/app/build /home/maven/work/target/classes/static/
22 | RUN mvn -B -e -o -T 1C verify
23 | RUN mv target/demo-fraud-webapp*.jar target/demo-fraud-webapp.jar
24 |
25 | # --- Main container
26 | FROM openjdk:8-jdk-alpine as main
27 |
28 | COPY --from=maven-build /home/maven/work/target/demo-fraud-webapp.jar .
29 | EXPOSE 5656
30 |
31 | ENTRYPOINT ["java","-Djava.security.egd=file:/dev/./urandom","-Dspring.profiles.active=cloud","-jar","demo-fraud-webapp.jar"]
32 |
--------------------------------------------------------------------------------
/webapp/cloudbuild.yaml:
--------------------------------------------------------------------------------
1 | steps:
2 | - name: 'gcr.io/cloud-builders/npm'
3 | entrypoint: npm
4 | args: ['install']
5 | - name: 'gcr.io/cloud-builders/npm'
6 | entrypoint: npm
7 | args: ['run', 'postinstall']
8 | - name: 'gcr.io/cloud-builders/npm'
9 | entrypoint: npm
10 | args: ['run', 'build']
11 | - name: 'ubuntu'
12 | args: ['bash', '-c', 'ls', '-la']
13 | - name: maven:3.6.1-jdk-8-slim
14 | entrypoint: 'mvn'
15 | args: ['--no-transfer-progress', '--batch-mode', 'package']
16 | - name: 'ubuntu'
17 | args: ['bash', '-c', 'ls', '-la']
18 | - name: 'gcr.io/cloud-builders/docker'
19 | args: ['build', '-t', 'gcr.io/$PROJECT_ID/demo-backend', '.']
20 |
21 |
22 | images:
23 | - 'gcr.io/$PROJECT_ID/demo-backend'
--------------------------------------------------------------------------------
/webapp/mvn/MavenWrapperDownloader.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2007-present the original author or authors.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | import java.net.*;
17 | import java.io.*;
18 | import java.nio.channels.*;
19 | import java.util.Properties;
20 |
21 | public class MavenWrapperDownloader {
22 |
23 | private static final String WRAPPER_VERSION = "0.5.5";
24 | /**
25 | * Default URL to download the maven-wrapper.jar from, if no 'downloadUrl' is provided.
26 | */
27 | private static final String DEFAULT_DOWNLOAD_URL = "https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/"
28 | + WRAPPER_VERSION + "/maven-wrapper-" + WRAPPER_VERSION + ".jar";
29 |
30 | /**
31 | * Path to the maven-wrapper.properties file, which might contain a downloadUrl property to
32 | * use instead of the default one.
33 | */
34 | private static final String MAVEN_WRAPPER_PROPERTIES_PATH =
35 | ".mvn/wrapper/maven-wrapper.properties";
36 |
37 | /**
38 | * Path where the maven-wrapper.jar will be saved to.
39 | */
40 | private static final String MAVEN_WRAPPER_JAR_PATH =
41 | ".mvn/wrapper/maven-wrapper.jar";
42 |
43 | /**
44 | * Name of the property which should be used to override the default download url for the wrapper.
45 | */
46 | private static final String PROPERTY_NAME_WRAPPER_URL = "wrapperUrl";
47 |
48 | public static void main(String args[]) {
49 | System.out.println("- Downloader started");
50 | File baseDirectory = new File(args[0]);
51 | System.out.println("- Using base directory: " + baseDirectory.getAbsolutePath());
52 |
53 | // If the maven-wrapper.properties exists, read it and check if it contains a custom
54 | // wrapperUrl parameter.
55 | File mavenWrapperPropertyFile = new File(baseDirectory, MAVEN_WRAPPER_PROPERTIES_PATH);
56 | String url = DEFAULT_DOWNLOAD_URL;
57 | if(mavenWrapperPropertyFile.exists()) {
58 | FileInputStream mavenWrapperPropertyFileInputStream = null;
59 | try {
60 | mavenWrapperPropertyFileInputStream = new FileInputStream(mavenWrapperPropertyFile);
61 | Properties mavenWrapperProperties = new Properties();
62 | mavenWrapperProperties.load(mavenWrapperPropertyFileInputStream);
63 | url = mavenWrapperProperties.getProperty(PROPERTY_NAME_WRAPPER_URL, url);
64 | } catch (IOException e) {
65 | System.out.println("- ERROR loading '" + MAVEN_WRAPPER_PROPERTIES_PATH + "'");
66 | } finally {
67 | try {
68 | if(mavenWrapperPropertyFileInputStream != null) {
69 | mavenWrapperPropertyFileInputStream.close();
70 | }
71 | } catch (IOException e) {
72 | // Ignore ...
73 | }
74 | }
75 | }
76 | System.out.println("- Downloading from: " + url);
77 |
78 | File outputFile = new File(baseDirectory.getAbsolutePath(), MAVEN_WRAPPER_JAR_PATH);
79 | if(!outputFile.getParentFile().exists()) {
80 | if(!outputFile.getParentFile().mkdirs()) {
81 | System.out.println(
82 | "- ERROR creating output directory '" + outputFile.getParentFile().getAbsolutePath() + "'");
83 | }
84 | }
85 | System.out.println("- Downloading to: " + outputFile.getAbsolutePath());
86 | try {
87 | downloadFileFromURL(url, outputFile);
88 | System.out.println("Done");
89 | System.exit(0);
90 | } catch (Throwable e) {
91 | System.out.println("- Error downloading");
92 | e.printStackTrace();
93 | System.exit(1);
94 | }
95 | }
96 |
97 | private static void downloadFileFromURL(String urlString, File destination) throws Exception {
98 | if (System.getenv("MVNW_USERNAME") != null && System.getenv("MVNW_PASSWORD") != null) {
99 | String username = System.getenv("MVNW_USERNAME");
100 | char[] password = System.getenv("MVNW_PASSWORD").toCharArray();
101 | Authenticator.setDefault(new Authenticator() {
102 | @Override
103 | protected PasswordAuthentication getPasswordAuthentication() {
104 | return new PasswordAuthentication(username, password);
105 | }
106 | });
107 | }
108 | URL website = new URL(urlString);
109 | ReadableByteChannel rbc;
110 | rbc = Channels.newChannel(website.openStream());
111 | FileOutputStream fos = new FileOutputStream(destination);
112 | fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE);
113 | fos.close();
114 | rbc.close();
115 | }
116 |
117 | }
118 |
--------------------------------------------------------------------------------
/webapp/mvn/maven-wrapper.properties:
--------------------------------------------------------------------------------
1 | distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.6.1/apache-maven-3.6.1-bin.zip
2 | wrapperUrl=https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.5/maven-wrapper-0.5.5.jar
3 |
--------------------------------------------------------------------------------
/webapp/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "front-end",
3 | "version": "0.1.0",
4 | "author": "Daryl Roberts ",
5 | "private": true,
6 | "dependencies": {
7 | "@fortawesome/fontawesome-svg-core": "^1.2.25",
8 | "@fortawesome/free-solid-svg-icons": "^5.11.2",
9 | "@fortawesome/react-fontawesome": "^0.1.7",
10 | "@types/uuid": "^3.4.5",
11 | "axios": "^0.21.1",
12 | "bootstrap": "^4.3.1",
13 | "bootswatch": "^4.3.1",
14 | "get-form-data": "^2.0.0",
15 | "leader-line": "^1.0.5",
16 | "lodash": "^4.17.15",
17 | "react": "^16.8.6",
18 | "react-dom": "^16.8.6",
19 | "react-rangeslider": "^2.2.0",
20 | "react-select": "^3.0.4",
21 | "react-stomp": "^4.1.1",
22 | "react-use": "^10.6.2",
23 | "react-virtualized": "^9.21.1",
24 | "reactstrap": "^8.0.1",
25 | "styled-components": "^4.3.2",
26 | "uuid": "^3.3.3"
27 | },
28 | "devDependencies": {
29 | "@types/classnames": "^2.2.9",
30 | "@types/lodash": "^4.14.137",
31 | "@types/react": "^16.9.2",
32 | "@types/react-dom": "^16.8.5",
33 | "@types/react-select": "^3.0.2",
34 | "@types/react-rangeslider": "^2.2.1",
35 | "@types/react-virtualized": "^9.21.4",
36 | "@types/reactstrap": "^8.0.1",
37 | "@types/styled-components": "^4.1.18",
38 | "concurrently": "^4.1.2",
39 | "http-proxy-middleware": "^0.19.1",
40 | "node-sass": "^4.12.0",
41 | "react-scripts": "3.0.1",
42 | "tslint": "^5.19.0",
43 | "tslint-config-prettier": "^1.18.0",
44 | "tslint-react": "^4.0.0",
45 | "typescript": "^3.5.3"
46 | },
47 | "eslintConfig": {
48 | "extends": "react-app"
49 | },
50 | "browserslist": {
51 | "production": [
52 | ">0.2%",
53 | "not dead",
54 | "not op_mini all"
55 | ],
56 | "development": [
57 | "last 1 chrome version",
58 | "last 1 firefox version",
59 | "last 1 safari version"
60 | ]
61 | },
62 | "scripts": {
63 | "build": "react-scripts build",
64 | "client": "react-scripts start",
65 | "postinstall": "./scripts/fix-leader-line.sh",
66 | "server": "mvn clean install spring-boot:run",
67 | "start-kafka": "docker-compose --log-level CRITICAL up -d",
68 | "start": "concurrently --names 'client,server' 'npm run client' 'npm run server'",
69 | "stop-kafka": "docker-compose down -v",
70 | "test": "react-scripts test"
71 | }
72 | }
73 |
--------------------------------------------------------------------------------
/webapp/public/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/getindata/flink-dynamic-cep-demo/e8c06f2e7014258ee514653833ec942b3e3f4d2e/webapp/public/favicon.ico
--------------------------------------------------------------------------------
/webapp/public/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
17 | React App
18 |
19 |
20 |
21 |
22 |
32 |
33 |
34 |
--------------------------------------------------------------------------------
/webapp/scripts/fix-leader-line.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | if grep -q 'module.exports = LeaderLine' ./node_modules/leader-line/leader-line.min.js; then
4 | echo 'Leader Line already patched'
5 | else
6 | echo "\
7 | if (module && module.exports) { module.exports = LeaderLine }\
8 | " >> ./node_modules/leader-line/leader-line.min.js
9 | echo "Fixed LeaderLine!"
10 | fi;
11 |
12 |
13 |
--------------------------------------------------------------------------------
/webapp/src/app/assets/app.scss:
--------------------------------------------------------------------------------
1 | @import "./theme";
2 |
3 | body {
4 | // width: 1920px;
5 | // height: 1080px;
6 | position: relative;
7 | overflow: hidden;
8 | }
9 |
10 | .react-select {
11 | & &__control {
12 | @extend .form-control-sm;
13 | min-height: 0;
14 | height: auto;
15 | border-color: $gray-400;
16 |
17 | &--is-focused {
18 | border-color: $input-focus-border-color;
19 | box-shadow: 0 0 0 0.2rem rgba($primary, 0.25);
20 | }
21 |
22 | &:hover {
23 | border-color: $input-focus-border-color;
24 | }
25 | }
26 |
27 | & &__value-container {
28 | align-items: center;
29 | padding: 0;
30 | line-height: 1;
31 |
32 | & [class*="Input"] {
33 | margin-top: 0;
34 | margin-bottom: 0;
35 | padding: 0;
36 | }
37 | }
38 |
39 | & &__indicator {
40 | padding: 0.5px;
41 |
42 | &-separator {
43 | margin-left: 6px;
44 | margin-top: 0;
45 | margin-bottom: 0;
46 | }
47 | }
48 |
49 | & &__clear-indicator {
50 | &:hover {
51 | color: $primary;
52 | }
53 | }
54 |
55 | & &__dropdown-indicator {
56 | color: $gray-700;
57 | margin-right: -1px;
58 | margin-left: 6px;
59 |
60 | &:hover {
61 | color: $primary;
62 | }
63 | }
64 |
65 | & &__option {
66 | padding-top: 1px;
67 | padding-bottom: 1px;
68 | }
69 | }
70 |
71 | .ReactVirtualized__List {
72 | &:focus {
73 | outline: 0;
74 | }
75 |
76 | &::-webkit-scrollbar {
77 | display: none;
78 | }
79 | }
80 |
--------------------------------------------------------------------------------
/webapp/src/app/assets/flink_squirrel_200_color.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/getindata/flink-dynamic-cep-demo/e8c06f2e7014258ee514653833ec942b3e3f4d2e/webapp/src/app/assets/flink_squirrel_200_color.png
--------------------------------------------------------------------------------
/webapp/src/app/assets/theme.scss:
--------------------------------------------------------------------------------
1 | $enable-rounded: false;
2 |
3 | // Flink Colors
4 | // $warning: #eea43c;
5 | // $primary: #b479eb;
6 |
7 | @import "~bootstrap/scss/bootstrap";
8 |
--------------------------------------------------------------------------------
/webapp/src/app/components/Alerts.tsx:
--------------------------------------------------------------------------------
1 | import React, { FC } from "react";
2 | import { Button, CardBody, CardHeader, Table, CardFooter, Badge } from "reactstrap";
3 | import styled from "styled-components/macro";
4 | import { faArrowRight } from "@fortawesome/free-solid-svg-icons";
5 | import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
6 |
7 | import { Alert, Rule } from "../interfaces";
8 | import { CenteredContainer } from "./CenteredContainer";
9 | import { ScrollingCol} from "./App";
10 | import { Payment, Payee, Details, Beneficiary, paymentTypeMap } from "./Transactions";
11 | import { Line } from "app/utils/useLines";
12 |
13 | const AlertTable = styled(Table)`
14 | && {
15 | width: calc(100% + 1px);
16 | border: 0;
17 | margin: 0;
18 |
19 | td {
20 | vertical-align: middle !important;
21 |
22 | &:first-child {
23 | border-left: 0;
24 | }
25 |
26 | &:last-child {
27 | border-right: 0;
28 | }
29 | }
30 |
31 | tr:first-child {
32 | td {
33 | border-top: 0;
34 | }
35 | }
36 | }
37 | `;
38 |
39 | export const Alerts: FC = props => {
40 |
41 | const tooManyAlerts = props.alerts.length > 40;
42 |
43 | const handleScroll = () => {
44 | props.lines.forEach(line => line.line.position());
45 | };
46 |
47 | return (
48 |
49 | {props.alerts.map((alert, idx) => {
50 | console.log(alert)
51 | return (
52 |
59 |
60 |
63 | Event at {new Date(alert.timestamp).toLocaleString()}
64 |
65 |
66 |
67 |
68 |
69 | Response |
70 |
71 | {alert.response.map(text =>{
72 | return | |
{"col" + alert.response.indexOf(text).toString()}
{text}
;
73 | })}
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 | );
82 | })}
83 |
84 | );
85 | };
86 |
87 | interface Props {
88 | alerts: Alert[];
89 | clearAlert: any;
90 | lines: Line[];
91 | // handleScroll: () => void;
92 | }
93 |
--------------------------------------------------------------------------------
/webapp/src/app/components/CenteredContainer.tsx:
--------------------------------------------------------------------------------
1 | import React, { forwardRef, ReactNode, CSSProperties } from "react";
2 | import { Card } from "reactstrap";
3 | import cx from "classnames";
4 |
5 | export const CenteredContainer = forwardRef((props: Props, ref) => {
6 | return (
7 |
12 | {props.children}
13 |
14 | );
15 | });
16 |
17 | interface Props {
18 | tooManyItems: boolean;
19 | children: ReactNode;
20 | className?: string;
21 | style?: CSSProperties;
22 | }
23 |
--------------------------------------------------------------------------------
/webapp/src/app/components/FieldGroup.tsx:
--------------------------------------------------------------------------------
1 | import { IconDefinition } from "@fortawesome/free-solid-svg-icons";
2 | import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
3 | import React, { FC } from "react";
4 | import { Col, FormGroup, Label } from "reactstrap";
5 | import styled from "styled-components";
6 |
7 | const LabelColumn = styled(Label)`
8 | text-align: right;
9 | white-space: nowrap;
10 | overflow: hidden;
11 | text-overflow: ellipsis;
12 | flex-basis: 33%;
13 | flex: 1 1 auto;
14 | `;
15 |
16 | const InputColumn = styled(Col)`
17 | flex-basis: 67%;
18 | flex: 1 1 auto;
19 | `;
20 |
21 | export const FieldGroup: FC = props => (
22 |
23 |
24 |
25 | {props.label}
26 |
27 | {props.children}
28 |
29 | );
30 |
31 | interface Props {
32 | label: string;
33 | icon: IconDefinition;
34 | }
35 |
--------------------------------------------------------------------------------
/webapp/src/app/components/Header.tsx:
--------------------------------------------------------------------------------
1 | import logoImage from "app/assets/flink_squirrel_200_color.png";
2 | import React, { FC, useState, Dispatch, SetStateAction } from "react";
3 | import { Button, ButtonGroup, Col, Navbar, NavbarBrand } from "reactstrap";
4 | import styled from "styled-components/macro";
5 | import { AddRuleModal } from "./AddRuleModal";
6 | import { Rule } from "app/interfaces";
7 |
8 | const AppNavbar = styled(Navbar)`
9 | && {
10 | z-index: 1;
11 | justify-content: flex-start;
12 | padding: 0;
13 | }
14 | `;
15 |
16 | const Logo = styled.img`
17 | max-height: 40px;
18 | `;
19 |
20 | const TransactionsCol = styled(Col)`
21 | border-right: 1px solid rgba(255, 255, 255, 0.125);
22 | display: flex;
23 | align-items: center;
24 | justify-content: space-between;
25 | padding: 0.5em 15px;
26 | `;
27 |
28 | export const Header: FC = props => {
29 | const [modalOpen, setModalOpen] = useState(false);
30 | const openRuleModal = () => setModalOpen(true);
31 | const closeRuleModal = () => setModalOpen(false);
32 | const toggleRuleModal = () => setModalOpen(state => !state);
33 |
34 | const startTransactions = () => fetch("/api/startTransactionsGeneration").then();
35 | const stopTransactions = () => fetch("/api/stopTransactionsGeneration").then();
36 |
37 | return (
38 | <>
39 |
40 |
41 | Live Transactions
42 |
43 |
46 |
49 |
50 |
51 |
52 |
53 |
56 |
57 |
58 |
59 | Flink Dynamic CEP Demo
60 |
61 |
62 |
63 |
64 | >
65 | );
66 | };
67 |
68 | interface Props {
69 | setRules: Dispatch>;
70 | }
71 |
--------------------------------------------------------------------------------
/webapp/src/app/components/Rules.tsx:
--------------------------------------------------------------------------------
1 | import { library } from "@fortawesome/fontawesome-svg-core";
2 | import {
3 | faArrowUp,
4 | faCalculator,
5 | faClock,
6 | faFont,
7 | faInfoCircle,
8 | faLaptopCode,
9 | faLayerGroup,
10 | IconDefinition,
11 | } from "@fortawesome/free-solid-svg-icons";
12 | import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
13 | import Axios from "axios";
14 | import { isArray } from "lodash/fp";
15 | import React, { FC } from "react";
16 |
17 | import { Badge, Button, CardBody, CardFooter, CardHeader, Table } from "reactstrap";
18 | import styled from "styled-components/macro";
19 | import { Alert, Rule } from "../interfaces";
20 | import { CenteredContainer } from "./CenteredContainer";
21 | import { ScrollingCol } from "./App";
22 | import { Line } from "app/utils/useLines";
23 |
24 | library.add(faInfoCircle);
25 |
26 | const badgeColorMap: {
27 | [s: string]: string;
28 | } = {
29 | ACTIVE: "success",
30 | DELETE: "danger",
31 | PAUSE: "warning",
32 | };
33 |
34 | const iconMap: {
35 | [s: string]: IconDefinition;
36 | } = {
37 | aggregateFieldName: faFont,
38 | aggregatorFunctionType: faCalculator,
39 | groupingKeyNames: faLayerGroup,
40 | limit: faArrowUp,
41 | limitOperatorType: faLaptopCode,
42 | windowMinutes: faClock,
43 | };
44 |
45 | const seperator: {
46 | [s: string]: string;
47 | } = {
48 | EQUAL: "to",
49 | GREATER: "than",
50 | GREATER_EQUAL: "than",
51 | LESS: "than",
52 | LESS_EQUAL: "than",
53 | NOT_EQUAL: "to",
54 | };
55 |
56 | const RuleTitle = styled.div`
57 | display: flex;
58 | align-items: center;
59 | `;
60 |
61 | const RuleTable = styled(Table)`
62 | && {
63 | width: calc(100% + 1px);
64 | border: 0;
65 | margin: 0;
66 |
67 | td {
68 | vertical-align: middle !important;
69 |
70 | &:first-child {
71 | border-left: 0;
72 | }
73 |
74 | &:last-child {
75 | border-right: 0;
76 | }
77 | }
78 |
79 | tr:first-child {
80 | td {
81 | border-top: 0;
82 | }
83 | }
84 | }
85 | `;
86 |
87 |
88 | // const omitFields = omit(["ruleId", "ruleState", "unique"]);
89 |
90 | const hasAlert = (alerts: Alert[], rule: Rule) => alerts.some(alert => alert.sql === rule.content);
91 |
92 | export const Rules: FC = props => {
93 | const handleDelete = (id: number) => () => {
94 | Axios.delete(`/api/sqls/${id}`).then(props.clearRule(id));
95 | };
96 |
97 | const handleScroll = () => {
98 | props.ruleLines.forEach(line => line.line.position());
99 | props.alertLines.forEach(line => line.line.position());
100 | };
101 |
102 | const tooManyRules = props.rules.length > 3;
103 |
104 | return (
105 |
106 | {props.rules.map(rule => {
107 | if (!rule.content) {
108 | return null;
109 | }
110 |
111 | return (
112 |
121 |
122 |
123 |
124 | Rule #{rule.id}{" "}
125 |
126 |
129 |
130 |
131 | {rule.content}
132 |
133 |
134 | );
135 | })}
136 |
137 | );
138 | };
139 |
140 | interface Props {
141 | alerts: Alert[];
142 | rules: Rule[];
143 | clearRule: (id: number) => () => void;
144 | ruleLines: Line[];
145 | alertLines: Line[];
146 | }
147 |
--------------------------------------------------------------------------------
/webapp/src/app/components/index.ts:
--------------------------------------------------------------------------------
1 | export { AddRuleModal } from "./AddRuleModal";
2 | export { App } from "./App";
3 | export { Header } from "./Header";
4 | export { Alerts } from "./Alerts";
5 | export { Rules } from "./Rules";
6 | export { Transactions } from "./Transactions";
7 |
--------------------------------------------------------------------------------
/webapp/src/app/interfaces/Alert.ts:
--------------------------------------------------------------------------------
1 | import { Transaction } from "./Transaction";
2 | import { RefObject } from "react";
3 |
4 | export interface Alert {
5 | alertId: string;
6 | isAdded: boolean;
7 | timestamp: number;
8 | response: string[];
9 | sql: string;
10 | ref: RefObject;
11 | timeout: number
12 | }
13 |
--------------------------------------------------------------------------------
/webapp/src/app/interfaces/Rule.ts:
--------------------------------------------------------------------------------
1 | import { RefObject } from "react";
2 |
3 | export interface Rule {
4 | id: number;
5 | content: string;
6 | ref: RefObject;
7 | }
8 |
9 | export interface RulePayload {
10 | sql: string;
11 | }
--------------------------------------------------------------------------------
/webapp/src/app/interfaces/Transaction.ts:
--------------------------------------------------------------------------------
1 | // import { RefObject } from "react";
2 |
3 | // MSG
4 | // beneficiaryId: 42694
5 | // eventTime: 1565965071385
6 | // payeeId: 20908
7 | // paymentAmount: 13.54
8 | // paymentType: "CRD"
9 | // transactionId: 5954524216210268000
10 |
11 | export interface Transaction {
12 | beneficiaryId: number;
13 | eventTime: number;
14 | payeeId: number;
15 | paymentAmount: number;
16 | paymentType: string;
17 | transactionId: number;
18 | }
19 |
--------------------------------------------------------------------------------
/webapp/src/app/interfaces/index.ts:
--------------------------------------------------------------------------------
1 | export * from "./Rule";
2 | export * from "./Transaction";
3 | export * from "./Alert";
4 |
--------------------------------------------------------------------------------
/webapp/src/app/utils/index.ts:
--------------------------------------------------------------------------------
1 | export { useLines } from "./useLines";
2 |
--------------------------------------------------------------------------------
/webapp/src/app/utils/useLines.ts:
--------------------------------------------------------------------------------
1 | import { Alert, Rule } from "app/interfaces";
2 | import LeaderLine from "leader-line";
3 | import { flattenDeep } from "lodash/fp";
4 | import { RefObject, useCallback, useEffect, useState } from "react";
5 |
6 | export const useLines: UseLines = (transactionsRef, rules, alerts) => {
7 | const [lines, setLines] = useState([]);
8 |
9 | const updateLines = useCallback(() => {
10 | lines.forEach(line => {
11 | try {
12 | line.line.position();
13 | } catch {
14 | // nothing
15 | }
16 | });
17 | }, [lines]);
18 |
19 | useEffect(() => {
20 | const newLines = flattenDeep(
21 | rules.map(rule => {
22 | const hasAlert = alerts.some(alert => alert.sql === rule.content);
23 |
24 | const inputLine = new LeaderLine(transactionsRef.current, rule.ref.current, {
25 | color: hasAlert ? "#dc3545" : undefined,
26 | dash: { animation: true },
27 | endSocket: "left",
28 | startSocket: "right",
29 | }) as Line;
30 |
31 | const outputLines = alerts.reduce((acc, alert) => {
32 | if (alert.sql === rule.content) {
33 | return [
34 | ...acc,
35 | new LeaderLine(rule.ref.current, alert.ref.current, {
36 | color: "#fff",
37 | endPlugOutline: true,
38 | endSocket: "left",
39 | outline: true,
40 | outlineColor: "#dc3545",
41 | startSocket: "right",
42 | }) as Line,
43 | ];
44 | }
45 | return acc;
46 | }, []);
47 |
48 | return [inputLine, ...outputLines];
49 | })
50 | );
51 |
52 | setLines(newLines);
53 |
54 | return () => {
55 | newLines.forEach(line => line.line.remove());
56 | };
57 | }, [transactionsRef, rules, alerts]);
58 |
59 | return { lines, handleScroll: updateLines };
60 | };
61 |
62 | type UseLines = (
63 | transactionsRef: RefObject,
64 | rules: Rule[],
65 | alerts: Alert[]
66 | ) => {
67 | lines: Line[];
68 | handleScroll: () => void;
69 | };
70 |
71 | export interface Line {
72 | line: {
73 | color: string;
74 | position: () => void;
75 | remove: () => void;
76 | };
77 | ruleId: number;
78 | }
79 |
--------------------------------------------------------------------------------
/webapp/src/index.tsx:
--------------------------------------------------------------------------------
1 | import React from "react";
2 | import ReactDOM from "react-dom";
3 | import { App } from "./app/components";
4 | import "react-rangeslider/umd/rangeslider.min.css";
5 |
6 | ReactDOM.render(, document.getElementById("root"));
7 |
--------------------------------------------------------------------------------
/webapp/src/main/java/com/ververica/demo/backend/Main.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 |
18 | package com.ververica.demo.backend;
19 |
20 | import org.springframework.boot.SpringApplication;
21 | import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
22 | import org.springframework.boot.autoconfigure.SpringBootApplication;
23 | import org.springframework.context.annotation.ComponentScan;
24 |
25 | @SpringBootApplication
26 | @EnableAutoConfiguration
27 | @ComponentScan("com.ververica")
28 | public class Main {
29 | public static void main(String[] args) {
30 | SpringApplication.run(Main.class, args);
31 | }
32 | }
33 |
--------------------------------------------------------------------------------
/webapp/src/main/java/com/ververica/demo/backend/configurations/KafkaConsumerConfig.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 |
18 | package com.ververica.demo.backend.configurations;
19 |
20 | import java.util.HashMap;
21 | import java.util.Map;
22 | import org.apache.kafka.clients.consumer.ConsumerConfig;
23 | import org.apache.kafka.common.serialization.StringDeserializer;
24 | import org.springframework.beans.factory.annotation.Autowired;
25 | import org.springframework.beans.factory.annotation.Value;
26 | import org.springframework.context.annotation.Bean;
27 | import org.springframework.context.annotation.Configuration;
28 | import org.springframework.kafka.annotation.EnableKafka;
29 | import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
30 | import org.springframework.kafka.config.KafkaListenerContainerFactory;
31 | import org.springframework.kafka.core.ConsumerFactory;
32 | import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
33 | import org.springframework.kafka.listener.ConcurrentMessageListenerContainer;
34 |
35 | @EnableKafka
36 | @Configuration
37 | public class KafkaConsumerConfig {
38 |
39 | @Autowired private PropertyLogger propertyLogger;
40 |
41 | @Value("${kafka.bootstrap-servers}")
42 | public String bootstrapServer;
43 |
44 | @Bean
45 | public Map consumerConfigs() {
46 | Map props = new HashMap<>();
47 | props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServer);
48 | props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
49 | props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
50 | props.put(ConsumerConfig.GROUP_ID_CONFIG, "temp-groupid.group");
51 | props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");
52 |
53 | return props;
54 | }
55 |
56 | @Bean
57 | public ConsumerFactory consumerFactory() {
58 | return new DefaultKafkaConsumerFactory<>(consumerConfigs());
59 | }
60 |
61 | @Bean
62 | public KafkaListenerContainerFactory>
63 | kafkaListenerContainerFactory() {
64 | ConcurrentKafkaListenerContainerFactory factory =
65 | new ConcurrentKafkaListenerContainerFactory();
66 | factory.setConsumerFactory(consumerFactory());
67 | return factory;
68 | }
69 | }
70 |
--------------------------------------------------------------------------------
/webapp/src/main/java/com/ververica/demo/backend/configurations/KafkaProducerConfig.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 |
18 | package com.ververica.demo.backend.configurations;
19 |
20 | import java.util.HashMap;
21 | import java.util.Map;
22 | import org.apache.kafka.clients.producer.ProducerConfig;
23 | import org.apache.kafka.common.serialization.StringSerializer;
24 | import org.springframework.beans.factory.annotation.Value;
25 | import org.springframework.context.annotation.Bean;
26 | import org.springframework.context.annotation.Configuration;
27 | import org.springframework.kafka.core.DefaultKafkaProducerFactory;
28 | import org.springframework.kafka.core.KafkaTemplate;
29 | import org.springframework.kafka.core.ProducerFactory;
30 | import org.springframework.kafka.support.serializer.JsonSerializer;
31 |
32 | @Configuration
33 | public class KafkaProducerConfig {
34 |
35 | @Value("${kafka.bootstrap-servers}")
36 | private String bootstrapServers;
37 |
38 | @Bean
39 | public Map producerConfigsJson() {
40 | Map props = new HashMap<>();
41 | props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
42 | props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
43 | props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class);
44 | return props;
45 | }
46 |
47 | @Bean
48 | public Map producerConfigsString() {
49 | Map props = new HashMap<>();
50 | props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
51 | props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
52 | props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
53 | return props;
54 | }
55 |
56 | // Transactions
57 | @Bean
58 | public ProducerFactory producerFactoryForJson() {
59 | return new DefaultKafkaProducerFactory<>(producerConfigsJson());
60 | }
61 |
62 | @Bean
63 | public KafkaTemplate kafkaTemplateForJson() {
64 | return new KafkaTemplate<>(producerFactoryForJson());
65 | }
66 |
67 | // Strings
68 | @Bean
69 | public ProducerFactory producerFactoryForString() {
70 | return new DefaultKafkaProducerFactory<>(producerConfigsString());
71 | }
72 |
73 | @Bean
74 | public KafkaTemplate kafkaTemplateForString() {
75 | return new KafkaTemplate<>(producerFactoryForString());
76 | }
77 | }
78 |
--------------------------------------------------------------------------------
/webapp/src/main/java/com/ververica/demo/backend/configurations/PropertyLogger.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 |
18 | package com.ververica.demo.backend.configurations;
19 |
20 | import java.util.Arrays;
21 | import java.util.stream.StreamSupport;
22 | import org.slf4j.Logger;
23 | import org.slf4j.LoggerFactory;
24 | import org.springframework.beans.factory.annotation.Autowired;
25 | import org.springframework.context.ApplicationContext;
26 | import org.springframework.context.event.ContextRefreshedEvent;
27 | import org.springframework.context.event.EventListener;
28 | import org.springframework.core.env.AbstractEnvironment;
29 | import org.springframework.core.env.EnumerablePropertySource;
30 | import org.springframework.core.env.Environment;
31 | import org.springframework.core.env.MutablePropertySources;
32 | import org.springframework.stereotype.Component;
33 |
34 | @Component
35 | public class PropertyLogger {
36 |
37 | @Autowired
38 | public PropertyLogger(ApplicationContext context) {
39 | logProperties(context);
40 | }
41 |
42 | private static final Logger LOGGER = LoggerFactory.getLogger(PropertyLogger.class);
43 |
44 | @EventListener
45 | public void handleContextRefresh(ContextRefreshedEvent event) {
46 | logProperties(event.getApplicationContext());
47 | }
48 |
49 | public void logProperties(ApplicationContext context) {
50 | final Environment env = context.getEnvironment();
51 | LOGGER.info("====== Environment and configuration ======");
52 | LOGGER.info("Active profiles: {}", Arrays.toString(env.getActiveProfiles()));
53 | final MutablePropertySources sources = ((AbstractEnvironment) env).getPropertySources();
54 | StreamSupport.stream(sources.spliterator(), false)
55 | .filter(ps -> ps instanceof EnumerablePropertySource)
56 | .map(ps -> ((EnumerablePropertySource) ps).getPropertyNames())
57 | .flatMap(Arrays::stream)
58 | .distinct()
59 | .filter(
60 | prop ->
61 | !(prop.contains("credentials")
62 | || prop.contains("password")
63 | || prop.contains("java.class.path")
64 | || prop.contains("sun.boot.class.path")))
65 | .forEach(prop -> LOGGER.info("{}: {}", prop, env.getProperty(prop)));
66 | LOGGER.info("===========================================");
67 | }
68 | }
69 |
--------------------------------------------------------------------------------
/webapp/src/main/java/com/ververica/demo/backend/configurations/SwaggerConfig.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 |
18 | package com.ververica.demo.backend.configurations;
19 |
20 | import org.springframework.context.annotation.Bean;
21 | import org.springframework.context.annotation.Configuration;
22 | import springfox.documentation.builders.ApiInfoBuilder;
23 | import springfox.documentation.builders.PathSelectors;
24 | import springfox.documentation.builders.RequestHandlerSelectors;
25 | import springfox.documentation.service.ApiInfo;
26 | import springfox.documentation.spi.DocumentationType;
27 | import springfox.documentation.spring.web.plugins.Docket;
28 | import springfox.documentation.swagger2.annotations.EnableSwagger2;
29 |
30 | @Configuration
31 | @EnableSwagger2
32 | public class SwaggerConfig {
33 | @Bean
34 | public Docket api() {
35 | return new Docket(DocumentationType.SWAGGER_2)
36 | .select()
37 | .apis(RequestHandlerSelectors.basePackage("com.ververica"))
38 | .paths(PathSelectors.regex("/.*"))
39 | .build()
40 | .apiInfo(apiEndPointsInfo());
41 | }
42 |
43 | private ApiInfo apiEndPointsInfo() {
44 | return new ApiInfoBuilder()
45 | .title("Ververica Demo REST API")
46 | .description("Ververica Demo Management REST API")
47 | .license("Apache 2.0")
48 | .licenseUrl("http://www.apache.org/licenses/LICENSE-2.0.html")
49 | .version("1.0.0")
50 | .build();
51 | }
52 | }
53 |
--------------------------------------------------------------------------------
/webapp/src/main/java/com/ververica/demo/backend/configurations/WebSocketConfig.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 |
18 | package com.ververica.demo.backend.configurations;
19 |
20 | import org.springframework.context.annotation.Configuration;
21 | import org.springframework.messaging.simp.config.MessageBrokerRegistry;
22 | import org.springframework.web.socket.config.annotation.EnableWebSocketMessageBroker;
23 | import org.springframework.web.socket.config.annotation.StompEndpointRegistry;
24 | import org.springframework.web.socket.config.annotation.WebSocketMessageBrokerConfigurer;
25 |
26 | @Configuration
27 | @EnableWebSocketMessageBroker
28 | public class WebSocketConfig implements WebSocketMessageBrokerConfigurer {
29 |
30 | @Override
31 | public void registerStompEndpoints(StompEndpointRegistry registry) {
32 | registry.addEndpoint("/ws/backend").withSockJS();
33 | }
34 |
35 | @Override
36 | public void configureMessageBroker(MessageBrokerRegistry registry) {
37 | registry.enableSimpleBroker("/topic");
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/webapp/src/main/java/com/ververica/demo/backend/controllers/DataGenerationController.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 |
18 | package com.ververica.demo.backend.controllers;
19 |
20 | import com.ververica.demo.backend.datasource.DemoTransactionsGenerator;
21 | import com.ververica.demo.backend.datasource.TransactionsGenerator;
22 | import com.ververica.demo.backend.services.KafkaTransactionsPusher;
23 | import java.util.concurrent.ExecutorService;
24 | import java.util.concurrent.Executors;
25 | import lombok.extern.slf4j.Slf4j;
26 | import org.springframework.beans.factory.annotation.Autowired;
27 | import org.springframework.beans.factory.annotation.Value;
28 | import org.springframework.kafka.config.KafkaListenerEndpointRegistry;
29 | import org.springframework.kafka.listener.MessageListenerContainer;
30 | import org.springframework.web.bind.annotation.GetMapping;
31 | import org.springframework.web.bind.annotation.PathVariable;
32 | import org.springframework.web.bind.annotation.RestController;
33 |
34 | @RestController
35 | @Slf4j
36 | public class DataGenerationController {
37 |
38 | private TransactionsGenerator transactionsGenerator;
39 | private KafkaListenerEndpointRegistry kafkaListenerEndpointRegistry;
40 |
41 | private ExecutorService executor = Executors.newSingleThreadExecutor();
42 | private boolean generatingTransactions = false;
43 | private boolean listenerContainerRunning = true;
44 |
45 | @Value("${kafka.listeners.transactions.id}")
46 | private String transactionListenerId;
47 |
48 | @Value("${transactionsRateDisplayLimit}")
49 | private int transactionsRateDisplayLimit;
50 |
51 | @Autowired
52 | public DataGenerationController(
53 | KafkaTransactionsPusher transactionsPusher,
54 | KafkaListenerEndpointRegistry kafkaListenerEndpointRegistry) {
55 | transactionsGenerator = new DemoTransactionsGenerator(transactionsPusher, 1);
56 | this.kafkaListenerEndpointRegistry = kafkaListenerEndpointRegistry;
57 | }
58 |
59 | @GetMapping("/api/startTransactionsGeneration")
60 | public void startTransactionsGeneration() throws Exception {
61 | log.info("{}", "startTransactionsGeneration called");
62 | generateTransactions();
63 | }
64 |
65 | private void generateTransactions() {
66 | if (!generatingTransactions) {
67 | executor.submit(transactionsGenerator);
68 | generatingTransactions = true;
69 | }
70 | }
71 |
72 | @GetMapping("/api/stopTransactionsGeneration")
73 | public void stopTransactionsGeneration() {
74 | transactionsGenerator.cancel();
75 | generatingTransactions = false;
76 | log.info("{}", "stopTransactionsGeneration called");
77 | }
78 |
79 | @GetMapping("/api/generatorSpeed/{speed}")
80 | public void setGeneratorSpeed(@PathVariable double speed) {
81 | log.info("Generator speed change request: " + speed);
82 | if (speed <= 0) {
83 | transactionsGenerator.cancel();
84 | generatingTransactions = false;
85 | return;
86 | } else {
87 | generateTransactions();
88 | }
89 |
90 | MessageListenerContainer listenerContainer =
91 | kafkaListenerEndpointRegistry.getListenerContainer(transactionListenerId);
92 | if (speed > transactionsRateDisplayLimit) {
93 | listenerContainer.stop();
94 | listenerContainerRunning = false;
95 | } else if (!listenerContainerRunning) {
96 | listenerContainer.start();
97 | }
98 |
99 | if (transactionsGenerator != null) {
100 | transactionsGenerator.adjustMaxRecordsPerSecond(speed);
101 | }
102 | }
103 | }
104 |
--------------------------------------------------------------------------------
/webapp/src/main/java/com/ververica/demo/backend/controllers/SqlsController.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 |
18 | package com.ververica.demo.backend.controllers;
19 |
20 | import com.fasterxml.jackson.core.JsonProcessingException;
21 | import com.fasterxml.jackson.databind.ObjectMapper;
22 | import com.ververica.demo.backend.configurations.PropertyLogger;
23 | import com.ververica.demo.backend.repositories.SqlRepository;
24 | import com.ververica.demo.backend.repositories.SqlRepositoryEvent;
25 | import com.ververica.demo.backend.services.FlinkSqlService;
26 | import org.slf4j.Logger;
27 | import org.slf4j.LoggerFactory;
28 | import org.springframework.web.bind.annotation.*;
29 |
30 | import java.io.IOException;
31 | import java.util.List;
32 | import java.util.Optional;
33 |
34 | @RestController
35 | @RequestMapping("/api")
36 | class SqlsController {
37 | private static final Logger LOGGER = LoggerFactory.getLogger(PropertyLogger.class);
38 |
39 | private final SqlRepository repository;
40 | private final FlinkSqlService flinkSqlService;
41 |
42 | SqlsController(SqlRepository repository, FlinkSqlService flinkSqlService) {
43 | this.repository = repository;
44 | this.flinkSqlService = flinkSqlService;
45 | }
46 |
47 | private final ObjectMapper mapper = new ObjectMapper();
48 |
49 | @GetMapping("/sqls")
50 | List all() {
51 | return repository.findAll();
52 | }
53 |
54 | @PostMapping("/sqls")
55 | SqlRepositoryEvent newSql(@RequestBody SqlRepositoryEvent newSql) throws IOException {
56 | LOGGER.info("New SQL: " + newSql.content);
57 | SqlRepositoryEvent savedSql = repository.save(newSql);
58 |
59 | flinkSqlService.addSql(savedSql);
60 | return savedSql;
61 | }
62 |
63 | @GetMapping("/sqls/pushToFlink")
64 | void pushToFlink() {
65 | List sqls = repository.findAll();
66 | for (SqlRepositoryEvent sql : sqls) {
67 | flinkSqlService.addSql(sql);
68 | }
69 | }
70 |
71 | @DeleteMapping("/sqls/{id}")
72 | void deleteSql(@PathVariable Integer id) {
73 | Optional maybeSql = repository.findById(id);
74 |
75 | if (maybeSql.isPresent()) {
76 | SqlRepositoryEvent sqlToRemove = maybeSql.get();
77 | repository.deleteById(id);
78 | flinkSqlService.deleteSql(sqlToRemove);
79 | }
80 | }
81 | }
82 |
--------------------------------------------------------------------------------
/webapp/src/main/java/com/ververica/demo/backend/datasource/DemoTransactionsGenerator.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 |
18 | package com.ververica.demo.backend.datasource;
19 |
20 | import java.math.BigDecimal;
21 | import java.util.SplittableRandom;
22 | import java.util.function.Consumer;
23 | import lombok.extern.slf4j.Slf4j;
24 |
25 | @Slf4j
26 | public class DemoTransactionsGenerator extends TransactionsGenerator {
27 |
28 | private long lastPayeeIdBeneficiaryIdTriggered = System.currentTimeMillis();
29 | private long lastBeneficiaryIdTriggered = System.currentTimeMillis();
30 | private BigDecimal beneficiaryLimit = new BigDecimal(10000000);
31 | private BigDecimal payeeBeneficiaryLimit = new BigDecimal(20000000);
32 |
33 | public DemoTransactionsGenerator(Consumer consumer, double maxRecordsPerSecond) {
34 | super(consumer, maxRecordsPerSecond);
35 | }
36 |
37 | protected Transaction randomEvent(SplittableRandom rnd) {
38 | Transaction transaction = super.randomEvent(rnd);
39 | long now = System.currentTimeMillis();
40 | if (now - lastBeneficiaryIdTriggered > 8000 + rnd.nextInt(5000)) {
41 | transaction.setPaymentAmount(beneficiaryLimit.add(new BigDecimal(rnd.nextInt(1000000))));
42 | this.lastBeneficiaryIdTriggered = System.currentTimeMillis();
43 | }
44 | if (now - lastPayeeIdBeneficiaryIdTriggered > 12000 + rnd.nextInt(10000)) {
45 | transaction.setPaymentAmount(payeeBeneficiaryLimit.add(new BigDecimal(rnd.nextInt(1000000))));
46 | this.lastPayeeIdBeneficiaryIdTriggered = System.currentTimeMillis();
47 | }
48 | return transaction;
49 | }
50 | }
51 |
--------------------------------------------------------------------------------
/webapp/src/main/java/com/ververica/demo/backend/datasource/RulesBootstrapper.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 |
18 | package com.ververica.demo.backend.datasource;
19 |
20 | import com.ververica.demo.backend.repositories.SqlRepository;
21 | import com.ververica.demo.backend.repositories.SqlRepositoryEvent;
22 | import com.ververica.demo.backend.services.FlinkSqlService;
23 | import java.util.List;
24 | import org.springframework.beans.factory.annotation.Autowired;
25 | import org.springframework.boot.ApplicationArguments;
26 | import org.springframework.boot.ApplicationRunner;
27 | import org.springframework.stereotype.Component;
28 |
29 | @Component
30 | public class RulesBootstrapper implements ApplicationRunner {
31 |
32 | private SqlRepository sqlRepository;
33 | private FlinkSqlService flinkSqlService;
34 |
35 | @Autowired
36 | public RulesBootstrapper(SqlRepository userRepository, FlinkSqlService flinkSqlService) {
37 | this.sqlRepository = userRepository;
38 | this.flinkSqlService = flinkSqlService;
39 | }
40 |
41 | public void run(ApplicationArguments args) {
42 | String payload1 = "SELECT SUM(paymentAmount)\nFROM source_table\nWHERE paymentAmount <= 20";
43 |
44 | SqlRepositoryEvent sql1 = new SqlRepositoryEvent(payload1, 1);
45 |
46 | sqlRepository.save(sql1);
47 |
48 | List sqls = sqlRepository.findAll();
49 | sqls.forEach(sql -> flinkSqlService.addSql(sql));
50 | }
51 | }
52 |
--------------------------------------------------------------------------------
/webapp/src/main/java/com/ververica/demo/backend/datasource/Throttler.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 |
18 | package com.ververica.demo.backend.datasource;
19 |
20 | import com.google.common.base.Preconditions;
21 | import lombok.extern.slf4j.Slf4j;
22 |
23 | /** Utility to throttle a thread to a given number of executions (records) per second. */
24 | @Slf4j
25 | final class Throttler {
26 |
27 | private long throttleBatchSize;
28 | private long nanosPerBatch;
29 |
30 | private long endOfNextBatchNanos;
31 | private long currentBatch;
32 |
33 | Throttler(double maxRecordsPerSecond) {
34 | setup(maxRecordsPerSecond);
35 | }
36 |
37 | public void adjustMaxRecordsPerSecond(double maxRecordsPerSecond) {
38 | setup(maxRecordsPerSecond);
39 | }
40 |
41 | private synchronized void setup(double maxRecordsPerSecond) {
42 | Preconditions.checkArgument(
43 | maxRecordsPerSecond == -1 || maxRecordsPerSecond > 0,
44 | "maxRecordsPerSecond must be positive or -1 (infinite)");
45 |
46 | if (maxRecordsPerSecond == -1) {
47 | // unlimited speed
48 | throttleBatchSize = -1;
49 | nanosPerBatch = 0;
50 | endOfNextBatchNanos = System.nanoTime() + nanosPerBatch;
51 | currentBatch = 0;
52 | return;
53 | }
54 |
55 | if (maxRecordsPerSecond >= 10000) {
56 | // high rates: all throttling in intervals of 2ms
57 | throttleBatchSize = (long) maxRecordsPerSecond / 500;
58 | nanosPerBatch = 2_000_000L;
59 | } else {
60 | throttleBatchSize = ((long) (maxRecordsPerSecond / 20)) + 1;
61 | nanosPerBatch = ((long) (1_000_000_000L / maxRecordsPerSecond)) * throttleBatchSize;
62 | }
63 | this.endOfNextBatchNanos = System.nanoTime() + nanosPerBatch;
64 | this.currentBatch = 0;
65 | log.info("throttleBatchSize: " + throttleBatchSize);
66 | log.info("nanosPerBatch: " + nanosPerBatch);
67 | log.info("endOfNextBatchNanos: " + endOfNextBatchNanos);
68 | log.info("currentBatch: " + currentBatch);
69 | }
70 |
71 | synchronized void throttle() throws InterruptedException {
72 | if (throttleBatchSize == -1) {
73 | return;
74 | }
75 | if (++currentBatch != throttleBatchSize) {
76 | return;
77 | }
78 | currentBatch = 0;
79 |
80 | final long now = System.nanoTime();
81 | final long millisRemaining = ((endOfNextBatchNanos - now) / 1_000_000);
82 |
83 | if (millisRemaining > 0) {
84 | endOfNextBatchNanos += nanosPerBatch;
85 | Thread.sleep(millisRemaining);
86 | } else {
87 | endOfNextBatchNanos = now + nanosPerBatch;
88 | }
89 | }
90 | }
91 |
--------------------------------------------------------------------------------
/webapp/src/main/java/com/ververica/demo/backend/datasource/Transaction.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 |
18 | package com.ververica.demo.backend.datasource;
19 |
20 | import java.math.BigDecimal;
21 | import java.time.ZoneOffset;
22 | import java.time.ZonedDateTime;
23 | import java.time.format.DateTimeFormatter;
24 | import java.util.Arrays;
25 | import java.util.Iterator;
26 | import java.util.List;
27 | import java.util.Locale;
28 | import lombok.AllArgsConstructor;
29 | import lombok.Builder;
30 | import lombok.Data;
31 | import lombok.NoArgsConstructor;
32 |
33 | @Data
34 | @Builder
35 | @NoArgsConstructor
36 | @AllArgsConstructor
37 | public class Transaction {
38 | public long transactionId;
39 | public long eventTime;
40 | public long payeeId;
41 | public long beneficiaryId;
42 | public BigDecimal paymentAmount;
43 | public String paymentType;
44 |
45 | private static transient DateTimeFormatter timeFormatter =
46 | DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")
47 | .withLocale(Locale.US)
48 | .withZone(ZoneOffset.UTC);
49 |
50 | public static Transaction fromString(String line) {
51 | List tokens = Arrays.asList(line.split(","));
52 | int numArgs = 6;
53 | if (tokens.size() != numArgs) {
54 | throw new RuntimeException(
55 | "Invalid transaction: "
56 | + line
57 | + ". Required number of arguments: "
58 | + numArgs
59 | + " found "
60 | + tokens.size());
61 | }
62 |
63 | Transaction transaction = new Transaction();
64 |
65 | try {
66 | Iterator iter = tokens.iterator();
67 | transaction.transactionId = Long.parseLong(iter.next());
68 | transaction.eventTime =
69 | ZonedDateTime.parse(iter.next(), timeFormatter).toInstant().toEpochMilli();
70 | transaction.payeeId = Long.parseLong(iter.next());
71 | transaction.beneficiaryId = Long.parseLong(iter.next());
72 | transaction.paymentType = iter.next();
73 | transaction.paymentAmount = new BigDecimal(iter.next());
74 | } catch (NumberFormatException nfe) {
75 | throw new RuntimeException("Invalid record: " + line, nfe);
76 | }
77 |
78 | return transaction;
79 | }
80 | }
81 |
--------------------------------------------------------------------------------
/webapp/src/main/java/com/ververica/demo/backend/datasource/TransactionsGenerator.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 |
18 | package com.ververica.demo.backend.datasource;
19 |
20 | import java.math.BigDecimal;
21 | import java.util.SplittableRandom;
22 | import java.util.concurrent.ThreadLocalRandom;
23 | import java.util.function.Consumer;
24 | import lombok.extern.slf4j.Slf4j;
25 |
26 | @Slf4j
27 | public class TransactionsGenerator implements Runnable {
28 |
29 | private static long MAX_PAYEE_ID = 10;
30 | private static long MAX_BENEFICIARY_ID = 10;
31 |
32 | private static double MIN_PAYMENT_AMOUNT = 5d;
33 | private static double MAX_PAYMENT_AMOUNT = 20d;
34 | private final Throttler throttler;
35 |
36 | private volatile boolean running = true;
37 |
38 | private Consumer consumer;
39 |
40 | public TransactionsGenerator(Consumer consumer, double maxRecordsPerSecond) {
41 | this.consumer = consumer;
42 | this.throttler = new Throttler(maxRecordsPerSecond);
43 | }
44 |
45 | public void adjustMaxRecordsPerSecond(double maxRecordsPerSecond) {
46 | throttler.adjustMaxRecordsPerSecond(maxRecordsPerSecond);
47 | }
48 |
49 | protected Transaction randomEvent(SplittableRandom rnd) {
50 | long transactionId = rnd.nextLong(Long.MAX_VALUE);
51 | long payeeId = rnd.nextLong(MAX_PAYEE_ID);
52 | long beneficiaryId = rnd.nextLong(MAX_BENEFICIARY_ID);
53 | double paymentAmountDouble =
54 | ThreadLocalRandom.current().nextDouble(MIN_PAYMENT_AMOUNT, MAX_PAYMENT_AMOUNT);
55 | paymentAmountDouble = Math.floor(paymentAmountDouble * 100) / 100;
56 | BigDecimal paymentAmount = BigDecimal.valueOf(paymentAmountDouble);
57 |
58 | return Transaction.builder()
59 | .transactionId(transactionId)
60 | .payeeId(payeeId)
61 | .beneficiaryId(beneficiaryId)
62 | .paymentAmount(paymentAmount)
63 | .paymentType(paymentType(transactionId))
64 | .eventTime(System.currentTimeMillis())
65 | .build();
66 | }
67 |
68 | private static String paymentType(long id) {
69 | int name = (int) (id % 2);
70 | switch (name) {
71 | case 0:
72 | return "CRD";
73 | case 1:
74 | return "CSH";
75 | default:
76 | throw new IllegalStateException("");
77 | }
78 | }
79 |
80 | @Override
81 | public final void run() {
82 | running = true;
83 |
84 | final SplittableRandom rnd = new SplittableRandom();
85 |
86 | while (running) {
87 | Transaction event = randomEvent(rnd);
88 | log.debug("{}", event);
89 | consumer.accept(event);
90 | try {
91 | throttler.throttle();
92 | } catch (InterruptedException e) {
93 | throw new RuntimeException(e);
94 | }
95 | }
96 | log.info("Finished run()");
97 | }
98 |
99 | public final void cancel() {
100 | running = false;
101 | log.info("Cancelled");
102 | }
103 | }
104 |
--------------------------------------------------------------------------------
/webapp/src/main/java/com/ververica/demo/backend/entities/Rule.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 |
18 | package com.ververica.demo.backend.entities;
19 |
20 | import javax.persistence.Entity;
21 | import javax.persistence.GeneratedValue;
22 | import javax.persistence.GenerationType;
23 | import javax.persistence.Id;
24 | import lombok.AllArgsConstructor;
25 | import lombok.Data;
26 | import lombok.NoArgsConstructor;
27 |
28 | @Entity
29 | @Data
30 | @AllArgsConstructor
31 | @NoArgsConstructor
32 | public class Rule {
33 |
34 | public Rule(String rulePayload) {
35 | this.rulePayload = rulePayload;
36 | }
37 |
38 | @Id
39 | @GeneratedValue(strategy = GenerationType.IDENTITY)
40 | private Integer id;
41 |
42 | private String rulePayload;
43 | }
44 |
--------------------------------------------------------------------------------
/webapp/src/main/java/com/ververica/demo/backend/exceptions/RuleNotFoundException.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 |
18 | package com.ververica.demo.backend.exceptions;
19 |
20 | public class RuleNotFoundException extends RuntimeException {
21 |
22 | public RuleNotFoundException(Integer id) {
23 | super("Could not find employee " + id);
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/webapp/src/main/java/com/ververica/demo/backend/model/Alert.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 |
18 | package com.ververica.demo.backend.model;
19 |
20 | import com.ververica.demo.backend.datasource.Transaction;
21 | import java.math.BigDecimal;
22 | import lombok.AllArgsConstructor;
23 | import lombok.Data;
24 |
25 | @Data
26 | @AllArgsConstructor
27 | public class Alert {
28 | private Integer ruleId;
29 | private String rulePayload;
30 |
31 | Transaction triggeringEvent;
32 | BigDecimal triggeringValue;
33 | }
34 |
--------------------------------------------------------------------------------
/webapp/src/main/java/com/ververica/demo/backend/repositories/SqlRepository.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 |
18 | package com.ververica.demo.backend.repositories;
19 |
20 | import java.util.List;
21 | import org.springframework.data.repository.CrudRepository;
22 |
23 | public interface SqlRepository extends CrudRepository {
24 |
25 | @Override
26 | List findAll();
27 | }
28 |
--------------------------------------------------------------------------------
/webapp/src/main/java/com/ververica/demo/backend/repositories/SqlRepositoryEvent.java:
--------------------------------------------------------------------------------
1 | package com.ververica.demo.backend.repositories;
2 |
3 | import javax.persistence.*;
4 |
5 | import lombok.AllArgsConstructor;
6 | import lombok.Data;
7 | import lombok.NoArgsConstructor;
8 |
9 | @Entity
10 | @Data
11 | @AllArgsConstructor
12 | @NoArgsConstructor
13 | public class SqlRepositoryEvent {
14 | @Column(length=10000)
15 | public String content;
16 |
17 | @Id
18 | @GeneratedValue(strategy = GenerationType.IDENTITY)
19 | private Integer id;
20 | }
21 |
--------------------------------------------------------------------------------
/webapp/src/main/java/com/ververica/demo/backend/services/FlinkSqlService.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 |
18 | package com.ververica.demo.backend.services;
19 |
20 | import com.fasterxml.jackson.core.JsonProcessingException;
21 | import com.fasterxml.jackson.databind.ObjectMapper;
22 | import com.ververica.demo.backend.configurations.PropertyLogger;
23 | import com.ververica.demo.backend.repositories.SqlRepositoryEvent;
24 | import org.slf4j.Logger;
25 | import org.slf4j.LoggerFactory;
26 | import org.springframework.beans.factory.annotation.Autowired;
27 | import org.springframework.beans.factory.annotation.Value;
28 | import org.springframework.kafka.core.KafkaTemplate;
29 | import org.springframework.stereotype.Service;
30 |
31 | @Service
32 | public class FlinkSqlService {
33 |
34 | private KafkaTemplate kafkaTemplate;
35 | private static final Logger LOGGER = LoggerFactory.getLogger(PropertyLogger.class);
36 |
37 | @Value("${kafka.topic.sqls}")
38 | private String topic;
39 |
40 | private final ObjectMapper mapper = new ObjectMapper();
41 |
42 | @Autowired
43 | public FlinkSqlService(KafkaTemplate kafkaTemplate) {
44 | this.kafkaTemplate = kafkaTemplate;
45 | }
46 |
47 | public void addSql(SqlRepositoryEvent sql) {
48 | String toSend = "1970-01-01 00:01:01," + sql.content;
49 | LOGGER.info("To send: " + toSend);
50 | kafkaTemplate.send(topic, toSend);
51 | }
52 |
53 | public void deleteSql(SqlRepositoryEvent sql) {
54 | String toSend = "REMOVE," + sql.content;
55 | LOGGER.info("To send: " + toSend);
56 | kafkaTemplate.send(topic, toSend);
57 | }
58 | }
59 |
--------------------------------------------------------------------------------
/webapp/src/main/java/com/ververica/demo/backend/services/KafkaAlertsPusher.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 |
18 | package com.ververica.demo.backend.services;
19 |
20 | import com.ververica.demo.backend.model.Alert;
21 | import java.util.function.Consumer;
22 | import lombok.extern.slf4j.Slf4j;
23 | import org.springframework.beans.factory.annotation.Autowired;
24 | import org.springframework.beans.factory.annotation.Value;
25 | import org.springframework.kafka.core.KafkaTemplate;
26 | import org.springframework.stereotype.Service;
27 |
28 | @Service
29 | @Slf4j
30 | public class KafkaAlertsPusher implements Consumer {
31 |
32 | private KafkaTemplate kafkaTemplate;
33 |
34 | @Value("${kafka.topic.alerts}")
35 | private String topic;
36 |
37 | @Autowired
38 | public KafkaAlertsPusher(KafkaTemplate kafkaTemplateForJson) {
39 | this.kafkaTemplate = kafkaTemplateForJson;
40 | }
41 |
42 | @Override
43 | public void accept(Alert alert) {
44 | log.info("{}", alert);
45 | kafkaTemplate.send(topic, alert);
46 | }
47 | }
48 |
--------------------------------------------------------------------------------
/webapp/src/main/java/com/ververica/demo/backend/services/KafkaConsumerService.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 |
18 | package com.ververica.demo.backend.services;
19 |
20 | import com.fasterxml.jackson.databind.ObjectMapper;
21 | import com.ververica.demo.backend.repositories.SqlRepository;
22 | import lombok.extern.slf4j.Slf4j;
23 | import org.springframework.beans.factory.annotation.Autowired;
24 | import org.springframework.beans.factory.annotation.Value;
25 | import org.springframework.kafka.annotation.KafkaListener;
26 | import org.springframework.messaging.handler.annotation.Payload;
27 | import org.springframework.messaging.simp.SimpMessagingTemplate;
28 | import org.springframework.stereotype.Service;
29 |
30 | @Service
31 | @Slf4j
32 | public class KafkaConsumerService {
33 |
34 | private final SimpMessagingTemplate simpTemplate;
35 | private final SqlRepository sqlRepository;
36 | private final ObjectMapper mapper = new ObjectMapper();
37 |
38 | @Value("${web-socket.topic.alerts}")
39 | private String alertsWebSocketTopic;
40 |
41 | @Value("${web-socket.topic.latency}")
42 | private String latencyWebSocketTopic;
43 |
44 | @Autowired
45 | public KafkaConsumerService(SimpMessagingTemplate simpTemplate, SqlRepository sqlRepository) {
46 | this.simpTemplate = simpTemplate;
47 | this.sqlRepository = sqlRepository;
48 | }
49 |
50 | @KafkaListener(topics = "${kafka.topic.alerts}", groupId = "alerts")
51 | public void templateAlerts(@Payload String message) {
52 | log.debug("{}", message);
53 | simpTemplate.convertAndSend(alertsWebSocketTopic, message);
54 | }
55 |
56 | @KafkaListener(topics = "${kafka.topic.latency}", groupId = "latency")
57 | public void templateLatency(@Payload String message) {
58 | log.debug("{}", message);
59 | simpTemplate.convertAndSend(latencyWebSocketTopic, message);
60 | }
61 |
62 | // @KafkaListener(topics = "${kafka.topic.current-rules}", groupId = "current-rules")
63 | // public void templateCurrentFlinkRules(@Payload String message) throws IOException {
64 | // log.info("{}", message);
65 | // RulePayload payload = mapper.readValue(message, RulePayload.class);
66 | // Integer payloadId = payload.getRuleId();
67 | // Optional existingSql = sqlRepository.findById(payloadId);
68 | // if (!existingSql.isPresent()) {
69 | // sqlRepository.save(new SqlRepositoryEvent(payloadId, mapper.writeValueAsString(payload)));
70 | // }
71 | // }
72 | }
73 |
--------------------------------------------------------------------------------
/webapp/src/main/java/com/ververica/demo/backend/services/KafkaTransactionsConsumerService.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 |
18 | package com.ververica.demo.backend.services;
19 |
20 | import com.fasterxml.jackson.databind.ObjectMapper;
21 | import java.util.Map;
22 | import lombok.extern.slf4j.Slf4j;
23 | import org.apache.kafka.common.TopicPartition;
24 | import org.springframework.beans.factory.annotation.Autowired;
25 | import org.springframework.beans.factory.annotation.Value;
26 | import org.springframework.kafka.annotation.KafkaListener;
27 | import org.springframework.kafka.listener.ConsumerSeekAware;
28 | import org.springframework.messaging.handler.annotation.Payload;
29 | import org.springframework.messaging.simp.SimpMessagingTemplate;
30 | import org.springframework.stereotype.Service;
31 |
32 | @Service
33 | @Slf4j
34 | public class KafkaTransactionsConsumerService implements ConsumerSeekAware {
35 |
36 | private final SimpMessagingTemplate simpTemplate;
37 | private final ObjectMapper mapper = new ObjectMapper();
38 |
39 | @Value("${web-socket.topic.transactions}")
40 | private String transactionsWebSocketTopic;
41 |
42 | @Autowired
43 | public KafkaTransactionsConsumerService(SimpMessagingTemplate simpTemplate) {
44 | this.simpTemplate = simpTemplate;
45 | }
46 |
47 | @KafkaListener(
48 | id = "${kafka.listeners.transactions.id}",
49 | topics = "${kafka.topic.transactions}",
50 | groupId = "transactions")
51 | public void consumeTransactions(@Payload String message) {
52 | log.debug("{}", message);
53 | simpTemplate.convertAndSend(transactionsWebSocketTopic, message);
54 | }
55 |
56 | @Override
57 | public void registerSeekCallback(ConsumerSeekCallback callback) {}
58 |
59 | @Override
60 | public void onPartitionsAssigned(
61 | Map assignments, ConsumerSeekCallback callback) {
62 | assignments.forEach((t, o) -> callback.seekToEnd(t.topic(), t.partition()));
63 | }
64 |
65 | @Override
66 | public void onIdleContainer(
67 | Map assignments, ConsumerSeekCallback callback) {}
68 | }
69 |
--------------------------------------------------------------------------------
/webapp/src/main/java/com/ververica/demo/backend/services/KafkaTransactionsPusher.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 |
18 | package com.ververica.demo.backend.services;
19 |
20 | import com.ververica.demo.backend.datasource.Transaction;
21 | import java.util.function.Consumer;
22 | import lombok.extern.slf4j.Slf4j;
23 | import org.springframework.beans.factory.annotation.Autowired;
24 | import org.springframework.beans.factory.annotation.Value;
25 | import org.springframework.kafka.core.KafkaTemplate;
26 | import org.springframework.stereotype.Service;
27 |
28 | @Service
29 | @Slf4j
30 | public class KafkaTransactionsPusher implements Consumer {
31 |
32 | private KafkaTemplate kafkaTemplate;
33 | private Transaction lastTransaction;
34 |
35 | @Value("${kafka.topic.transactions}")
36 | private String topic;
37 |
38 | @Autowired
39 | public KafkaTransactionsPusher(KafkaTemplate kafkaTemplateForJson) {
40 | this.kafkaTemplate = kafkaTemplateForJson;
41 | }
42 |
43 | @Override
44 | public void accept(Transaction transaction) {
45 | lastTransaction = transaction;
46 | log.debug("{}", transaction);
47 | kafkaTemplate.send(topic, transaction);
48 | }
49 |
50 | public Transaction getLastTransaction() {
51 | return lastTransaction;
52 | }
53 | }
54 |
--------------------------------------------------------------------------------
/webapp/src/main/resources/application.yaml:
--------------------------------------------------------------------------------
1 | server:
2 | port: 5656
3 |
4 | security.auth.enabled: false
5 |
6 | transactionsRateDisplayLimit: 50
7 |
8 | kafka:
9 | topic:
10 | transactions: livetransactions
11 | sqls: sqls
12 | alerts: alerts
13 | latency: latency
14 | current-rules: current-rules
15 | listeners:
16 | transactions.id: transactions-listener
17 |
18 | bootstrap-servers: localhost:9092
19 |
20 | web-socket:
21 | topic:
22 | transactions: /topic/transactions
23 | alerts: /topic/alerts
24 | latency: /topic/latency
25 |
26 | management.endpoints.web.exposure.include: mappings, loggers
27 | spring:
28 | h2.console.enabled: true
29 | thymeleaf.cache: false
30 | autoconfigure.exclude: org.springframework.boot.autoconfigure.security.SecurityAutoConfiguration
31 | kafka:
32 | producer:
33 | bootstrap-servers: localhost:9092
34 | key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
35 | value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
--------------------------------------------------------------------------------
/webapp/src/react-app-env.d.ts:
--------------------------------------------------------------------------------
1 | ///
2 |
3 | declare module "leader-line";
4 | declare module "react-stomp";
5 | declare module "get-form-data";
6 |
--------------------------------------------------------------------------------
/webapp/src/setupProxy.js:
--------------------------------------------------------------------------------
1 | const proxy = require("http-proxy-middleware");
2 |
3 | module.exports = function(app) {
4 | app.use(proxy("/api", { target: "http://localhost:5656" }));
5 | app.use(proxy("/ws", { target: "ws://localhost:5656", ws: true }));
6 | };
7 |
--------------------------------------------------------------------------------
/webapp/src/test/java/com/ververica/demo/backend/MainTest.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 |
18 | package com.ververica.demo.backend;
19 |
20 | import org.junit.Ignore;
21 | import org.junit.Test;
22 | import org.junit.runner.RunWith;
23 | import org.springframework.boot.test.context.SpringBootTest;
24 | import org.springframework.test.context.junit4.SpringRunner;
25 |
26 | @Ignore
27 | @RunWith(SpringRunner.class)
28 | @SpringBootTest
29 | public class MainTest {
30 |
31 | @Test
32 | public void contextLoads() {}
33 | }
34 |
--------------------------------------------------------------------------------
/webapp/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "target": "es5",
4 | "lib": ["dom", "dom.iterable", "esnext"],
5 | "allowJs": true,
6 | "skipLibCheck": true,
7 | "esModuleInterop": true,
8 | "allowSyntheticDefaultImports": true,
9 | "strict": true,
10 | "forceConsistentCasingInFileNames": true,
11 | "module": "esnext",
12 | "moduleResolution": "node",
13 | "resolveJsonModule": true,
14 | "isolatedModules": true,
15 | "noEmit": true,
16 | "jsx": "preserve",
17 | "baseUrl": "src"
18 | },
19 | "include": ["src"]
20 | }
21 |
--------------------------------------------------------------------------------
/webapp/tslint.json:
--------------------------------------------------------------------------------
1 | {
2 | "extends": ["tslint:latest", "tslint-react", "tslint-config-prettier"],
3 | "rules": {
4 | "no-submodule-imports": false,
5 | "interface-name": false,
6 | "no-implicit-dependencies": false,
7 | "ordered-imports": false
8 | }
9 | }
10 |
--------------------------------------------------------------------------------
/webapp/webapp.Dockerfile:
--------------------------------------------------------------------------------
1 | # --- UI Build
2 | FROM node:10 as ui-build
3 | WORKDIR /home/node/app
4 |
5 | COPY package.json package-lock.json tsconfig.json ./
6 | COPY scripts scripts
7 | RUN npm ci --unsafe-perm
8 | COPY public public
9 | COPY src/app src/app
10 | COPY src/index.tsx src
11 | COPY src/react-app-env.d.ts src
12 | RUN npm run build
13 |
14 | # --- Maven Build
15 | FROM maven:3.6.2-jdk-8-openj9 as maven-build
16 | WORKDIR /home/maven/work
17 |
18 | COPY pom.xml .
19 | RUN mvn -B -e -C -T 1C org.apache.maven.plugins:maven-dependency-plugin:3.1.1:go-offline
20 | COPY . .
21 | COPY --from=ui-build /home/node/app/build /home/maven/work/target/classes/static/
22 | RUN mvn -B -e -o -T 1C verify
23 | RUN mv target/demo-fraud-webapp*.jar target/demo-fraud-webapp.jar
24 |
25 | # --- Main container
26 | FROM openjdk:8-jdk-alpine as main
27 |
28 | COPY --from=maven-build /home/maven/work/target/demo-fraud-webapp.jar .
29 | EXPOSE 5656
30 |
31 | ENTRYPOINT ["java","-Djava.security.egd=file:/dev/./urandom","-Dspring.profiles.active=dev","-jar","demo-fraud-webapp.jar"]
32 |
--------------------------------------------------------------------------------