├── .gitignore ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── LICENSE ├── README.md ├── bin └── rules-engine-cdk.ts ├── cdk.json ├── constructs └── kinesis-data-analytics-flink-construct.ts ├── events ├── rules.json └── sensor-events.json ├── jest.config.js ├── lambda ├── alert-ingestion │ └── index.py ├── event-ingestion │ └── index.py ├── rule-ingestion │ └── index.py └── rule-ops-ingestion │ └── index.py ├── lib └── rules-engine-cdk-stack.ts ├── package-lock.json ├── package.json ├── rules-engine ├── README.md ├── dependency-reduced-pom.xml ├── pom.xml └── src │ ├── main │ ├── java │ │ └── com │ │ │ └── aws │ │ │ └── rulesengine │ │ │ └── dynamicrules │ │ │ ├── Main.java │ │ │ ├── RulesEvaluator.java │ │ │ ├── exceptions │ │ │ ├── InsufficientDataException.java │ │ │ └── RuleNotFoundException.java │ │ │ ├── functions │ │ │ ├── DynamicAlertFunction.java │ │ │ └── DynamicKeyFunction.java │ │ │ ├── objects │ │ │ ├── Alert.java │ │ │ ├── AlertStatus.java │ │ │ ├── Equipment.java │ │ │ ├── OperationStatus.java │ │ │ ├── Rule.java │ │ │ ├── RuleOperationStatus.java │ │ │ ├── SensorEvent.java │ │ │ ├── SensorMapState.java │ │ │ ├── Status.java │ │ │ └── TimestampAssignable.java │ │ │ ├── parsers │ │ │ ├── RuleParser.java │ │ │ └── SensorEventParser.java │ │ │ └── utils │ │ │ ├── AlertUtils.java │ │ │ ├── Descriptors.java │ │ │ ├── JsonDeserializer.java │ │ │ ├── JsonMapper.java │ │ │ ├── JsonSerializer.java │ │ │ ├── Keyed.java │ │ │ ├── KinesisUtils.java │ │ │ ├── ProcessingUtils.java │ │ │ ├── RuleDeserializer.java │ │ │ └── TimeStamper.java │ └── resources │ │ ├── flink-application-properties-dev.json │ │ └── log4j2.properties │ └── test │ └── com.aws.rulesengine.dynamicrules │ └── RuleEvaluationTest.java └── tsconfig.json /.gitignore: -------------------------------------------------------------------------------- 1 | # Build, dist and temp folders 2 | build/ 3 | dist/ 4 | tmp/ 5 | temp/ 6 | */target/ 7 | 8 | # Compiled source 9 | *.com 10 | *.class 11 | *.dll 12 | *.exe 13 | *.o 14 | *.so 15 | *.py[cod] 16 | *$py.class 17 | 18 | # npm & bower 19 | bower_components 20 | node_modules 21 | npm-debug.log 22 | .npmignore 23 | 24 | # Caches # 25 | *.sass-cache 26 | 27 | # Logs and databases 28 | *.log 29 | *.sql 30 | *.sqlite 31 | 32 | # OS generated files 33 | .DS_Store 34 | Desktop.ini 35 | 36 | # Thumbnails 37 | ._* 38 | ehthumbs.db 39 | *[Tt]humbs.db 40 | 41 | # Files that might appear on external disks 42 | .Spotlight-V100 43 | .Trashes 44 | 45 | # Packages # 46 | # It's better to unpack these files and commit the raw source because 47 | # git has its own built in compression methods. 48 | *.7z 49 | *.jar 50 | *.rar 51 | *.zip 52 | *.gz 53 | *.bzip 54 | *.xz 55 | *.lzma 56 | 57 | #packing-only formats 58 | *.iso 59 | *.tar 60 | 61 | # IDEs stuff 62 | .idea 63 | 64 | #package management formats 65 | *.dmg 66 | *.xpi 67 | *.gem 68 | *.egg 69 | *.deb 70 | *.rpm 71 | 72 | # python 73 | __pycache__/ 74 | 75 | # python environments 76 | .env 77 | .venv 78 | env/ 79 | venv/ 80 | 81 | # cdk 82 | cdk.out 83 | *.csv 84 | deploy/node_modules/.bin/cdk 85 | 86 | *.lst 87 | 88 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | ## Code of Conduct 2 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct). 3 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact 4 | opensource-codeofconduct@amazon.com with any additional questions or comments. 5 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing Guidelines 2 | 3 | Thank you for your interest in contributing to our project. Whether it's a bug report, new feature, correction, or additional 4 | documentation, we greatly value feedback and contributions from our community. 5 | 6 | Please read through this document before submitting any issues or pull requests to ensure we have all the necessary 7 | information to effectively respond to your bug report or contribution. 8 | 9 | 10 | ## Reporting Bugs/Feature Requests 11 | 12 | We welcome you to use the GitHub issue tracker to report bugs or suggest features. 13 | 14 | When filing an issue, please check existing open, or recently closed, issues to make sure somebody else hasn't already 15 | reported the issue. Please try to include as much information as you can. Details like these are incredibly useful: 16 | 17 | * A reproducible test case or series of steps 18 | * The version of our code being used 19 | * Any modifications you've made relevant to the bug 20 | * Anything unusual about your environment or deployment 21 | 22 | 23 | ## Contributing via Pull Requests 24 | Contributions via pull requests are much appreciated. Before sending us a pull request, please ensure that: 25 | 26 | 1. You are working against the latest source on the *main* branch. 27 | 2. You check existing open, and recently merged, pull requests to make sure someone else hasn't addressed the problem already. 28 | 3. You open an issue to discuss any significant work - we would hate for your time to be wasted. 29 | 30 | To send us a pull request, please: 31 | 32 | 1. Fork the repository. 33 | 2. Modify the source; please focus on the specific change you are contributing. If you also reformat all the code, it will be hard for us to focus on your change. 34 | 3. Ensure local tests pass. 35 | 4. Commit to your fork using clear commit messages. 36 | 5. Send us a pull request, answering any default questions in the pull request interface. 37 | 6. Pay attention to any automated CI failures reported in the pull request, and stay involved in the conversation. 38 | 39 | GitHub provides additional document on [forking a repository](https://help.github.com/articles/fork-a-repo/) and 40 | [creating a pull request](https://help.github.com/articles/creating-a-pull-request/). 41 | 42 | 43 | ## Finding contributions to work on 44 | Looking at the existing issues is a great way to find something to contribute on. As our projects, by default, use the default GitHub issue labels (enhancement/bug/duplicate/help wanted/invalid/question/wontfix), looking at any 'help wanted' issues is a great place to start. 45 | 46 | 47 | ## Code of Conduct 48 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct). 49 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact 50 | opensource-codeofconduct@amazon.com with any additional questions or comments. 51 | 52 | 53 | ## Security issue notifications 54 | If you discover a potential security issue in this project we ask that you notify AWS/Amazon Security via our [vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/). Please do **not** create a public github issue. 55 | 56 | 57 | ## Licensing 58 | 59 | See the [LICENSE](LICENSE) file for our project's licensing. We will ask you to confirm the licensing of your contribution. 60 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT No Attribution 2 | 3 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of 6 | this software and associated documentation files (the "Software"), to deal in 7 | the Software without restriction, including without limitation the rights to 8 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of 9 | the Software, and to permit persons to whom the Software is furnished to do so. 10 | 11 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 12 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS 13 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR 14 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER 15 | IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 16 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | 18 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Dynamic Rules Engine 2 | 3 | ## Project Description 4 | 5 | The Dynamic Rules Engine is a serverless application that enables real-time evaluation of rules against incoming sensor data. It leverages AWS Kinesis Data Streams for ingesting sensor data and rule definitions, Amazon Managed Service for Apache Flink for processing the data and evaluating rules, and AWS Lambda functions for handling ingestion and processing operations. 6 | 7 | The key components include: 8 | - Amazon Managed Service for Apache Flink 9 | - Amazon Kinesis Data Streams 10 | - AWS Lambda (for testing) 11 | 12 | ## Creating Rules Engine Jar 13 | 14 | Use Maven to build jar. 15 | 16 | ```bash 17 | mvn package -f rules-engine/pom.xml 18 | ``` 19 | 20 | ## Deploy CDK Stack 21 | 22 | ```bash 23 | npm install ## Install required dependencies for CDK deployment 24 | cdk deploy RulesEngineCdkStack ## Deploy stack 25 | ``` 26 | 27 | ## Example Rule 28 | ```json 29 | { 30 | "id": "cda160c0-c790-47da-bd65-4abae838af3a", // Some UUID 31 | "name": "RuleTest1", 32 | "status": "ACTIVE", // ACTIVE or INACTIVE 33 | "equipmentName": "THERMOSTAT_1", 34 | "ruleExpression": "(SENSOR_cebb1baf_2df0_4267_b489_28be562fccea.hasChanged(5))", 35 | "sensorWindowMap": { 36 | "SENSOR_cebb1baf_2df0_4267_b489_28be562fccea": 5 // Map of how long the sensor value should be persisted 37 | } 38 | } 39 | ``` 40 | 41 | ## Example Sensor Value 42 | ```json 43 | { 44 | "equipment": { 45 | "id": "THERMOSTAT_1" 46 | }, 47 | "id": "SENSOR_cebb1baf_2df0_4267_b489_28be562fccea", // UUID of sensor 48 | "measureValue": 10, 49 | "eventTimestamp": 1721666423000 50 | } 51 | ``` 52 | 53 | ## Contributing 54 | 55 | Pull requests are welcome. 56 | 57 | ## Security 58 | 59 | See [CONTRIBUTING](CONTRIBUTING.md#security-issue-notifications) for more information. 60 | 61 | ## License 62 | This library is licensed under the MIT-0 License. See the LICENSE file. 63 | [MIT](https://choosealicense.com/licenses/mit/) 64 | 65 | 66 | -------------------------------------------------------------------------------- /bin/rules-engine-cdk.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | import "source-map-support/register"; 3 | import * as cdk from "aws-cdk-lib"; 4 | import { RulesEngineCdkStack } from "../lib/rules-engine-cdk-stack"; 5 | import { AwsSolutionsChecks } from "cdk-nag"; 6 | 7 | const app = new cdk.App(); 8 | new RulesEngineCdkStack(app, "RulesEngineCdkStack", {}); 9 | 10 | cdk.Aspects.of(app).add(new AwsSolutionsChecks({ logIgnores: true })); 11 | -------------------------------------------------------------------------------- /cdk.json: -------------------------------------------------------------------------------- 1 | { 2 | "app": "npx ts-node --prefer-ts-exts bin/rules-engine-cdk.ts", 3 | "watch": { 4 | "include": [ 5 | "**" 6 | ], 7 | "exclude": [ 8 | "README.md", 9 | "cdk*.json", 10 | "**/*.d.ts", 11 | "**/*.js", 12 | "tsconfig.json", 13 | "package*.json", 14 | "yarn.lock", 15 | "node_modules", 16 | "test" 17 | ] 18 | }, 19 | "context": { 20 | "@aws-cdk/aws-lambda:recognizeLayerVersion": true, 21 | "@aws-cdk/core:checkSecretUsage": true, 22 | "@aws-cdk/core:target-partitions": [ 23 | "aws", 24 | "aws-cn" 25 | ], 26 | "@aws-cdk-containers/ecs-service-extensions:enableDefaultLogDriver": true, 27 | "@aws-cdk/aws-ec2:uniqueImdsv2TemplateName": true, 28 | "@aws-cdk/aws-ecs:arnFormatIncludesClusterName": true, 29 | "@aws-cdk/aws-iam:minimizePolicies": true, 30 | "@aws-cdk/core:validateSnapshotRemovalPolicy": true, 31 | "@aws-cdk/aws-codepipeline:crossAccountKeyAliasStackSafeResourceName": true, 32 | "@aws-cdk/aws-s3:createDefaultLoggingPolicy": true, 33 | "@aws-cdk/aws-sns-subscriptions:restrictSqsDescryption": true, 34 | "@aws-cdk/aws-apigateway:disableCloudWatchRole": true, 35 | "@aws-cdk/core:enablePartitionLiterals": true, 36 | "@aws-cdk/aws-events:eventsTargetQueueSameAccount": true, 37 | "@aws-cdk/aws-iam:standardizedServicePrincipals": true, 38 | "@aws-cdk/aws-ecs:disableExplicitDeploymentControllerForCircuitBreaker": true, 39 | "@aws-cdk/aws-iam:importedRoleStackSafeDefaultPolicyName": true, 40 | "@aws-cdk/aws-s3:serverAccessLogsUseBucketPolicy": true, 41 | "@aws-cdk/aws-route53-patters:useCertificate": true, 42 | "@aws-cdk/customresources:installLatestAwsSdkDefault": false, 43 | "@aws-cdk/aws-rds:databaseProxyUniqueResourceName": true, 44 | "@aws-cdk/aws-codedeploy:removeAlarmsFromDeploymentGroup": true, 45 | "@aws-cdk/aws-apigateway:authorizerChangeDeploymentLogicalId": true, 46 | "@aws-cdk/aws-ec2:launchTemplateDefaultUserData": true, 47 | "@aws-cdk/aws-secretsmanager:useAttachedSecretResourcePolicyForSecretTargetAttachments": true, 48 | "@aws-cdk/aws-redshift:columnId": true, 49 | "@aws-cdk/aws-stepfunctions-tasks:enableEmrServicePolicyV2": true, 50 | "@aws-cdk/aws-ec2:restrictDefaultSecurityGroup": true, 51 | "@aws-cdk/aws-apigateway:requestValidatorUniqueId": true, 52 | "@aws-cdk/aws-kms:aliasNameRef": true, 53 | "@aws-cdk/aws-autoscaling:generateLaunchTemplateInsteadOfLaunchConfig": true, 54 | "@aws-cdk/core:includePrefixInUniqueNameGeneration": true, 55 | "@aws-cdk/aws-efs:denyAnonymousAccess": true, 56 | "@aws-cdk/aws-opensearchservice:enableOpensearchMultiAzWithStandby": true, 57 | "@aws-cdk/aws-lambda-nodejs:useLatestRuntimeVersion": true, 58 | "@aws-cdk/aws-efs:mountTargetOrderInsensitiveLogicalId": true, 59 | "@aws-cdk/aws-rds:auroraClusterChangeScopeOfInstanceParameterGroupWithEachParameters": true, 60 | "@aws-cdk/aws-appsync:useArnForSourceApiAssociationIdentifier": true, 61 | "@aws-cdk/aws-rds:preventRenderingDeprecatedCredentials": true, 62 | "@aws-cdk/aws-codepipeline-actions:useNewDefaultBranchForCodeCommitSource": true, 63 | "@aws-cdk/aws-cloudwatch-actions:changeLambdaPermissionLogicalIdForLambdaAction": true, 64 | "@aws-cdk/aws-codepipeline:crossAccountKeysDefaultValueToFalse": true 65 | } 66 | } 67 | -------------------------------------------------------------------------------- /constructs/kinesis-data-analytics-flink-construct.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2023 Amazon.com, Inc. and its affiliates. All Rights Reserved. 3 | * 4 | * Licensed under the Amazon Software License (the "License"). 5 | * You may not use this file except in compliance with the License. 6 | * A copy of the License is located at 7 | * 8 | * http://aws.amazon.com/asl/ 9 | * 10 | * or in the "license" file accompanying this file. This file is distributed 11 | * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either 12 | * express or implied. See the License for the specific language governing 13 | * permissions and limitations under the License. 14 | */ 15 | 16 | import * as cdk from "aws-cdk-lib"; 17 | import { NagSuppressions } from "cdk-nag"; 18 | import { Construct } from "constructs"; 19 | 20 | export interface KinesisAnalyticsFlinkConstructProps extends cdk.StackProps { 21 | filePath: string; 22 | role?: cdk.aws_iam.Role; 23 | propertyConfig?: cdk.aws_kinesisanalyticsv2.CfnApplication.EnvironmentPropertiesProperty; 24 | } 25 | 26 | const defaultProps: Partial = {}; 27 | 28 | /** 29 | * Deploys the Kinesis Flink Jar construct 30 | */ 31 | export class KinesisAnalyticsFlinkConstruct extends Construct { 32 | application: cdk.aws_kinesisanalyticsv2.CfnApplication; 33 | bucket: cdk.aws_s3.Bucket; 34 | role: cdk.aws_iam.Role; 35 | constructor( 36 | parent: Construct, 37 | name: string, 38 | props: KinesisAnalyticsFlinkConstructProps 39 | ) { 40 | super(parent, name); 41 | 42 | props = { ...defaultProps, ...props }; 43 | 44 | // We'll use the role from props OR create one 45 | this.role = props.role || this.createKinesisDataAnalyticsRole(); 46 | 47 | // Grant Read/Write access to the role for the uploaded bucket 48 | const updatedBucket = this.createJarUploadBucket(props.filePath); 49 | this.bucket = updatedBucket.jarFileBucket; 50 | this.bucket.grantReadWrite(this.role); 51 | 52 | // We get the bucket deployment so we can select the first element of the array 53 | // and use it as the Jar key of the application 54 | const bucketDeployment = updatedBucket.bucket_deployment; 55 | 56 | // Create the app 57 | this.application = this.createApplication( 58 | name, 59 | this.bucket, 60 | cdk.Fn.select(0, bucketDeployment.objectKeys), 61 | this.role, 62 | props.propertyConfig || {} 63 | ); 64 | 65 | 66 | // CDK Nag supressions 67 | this.supressBucketDeploymentNags(); 68 | this.supressBucketNags(this.bucket); 69 | this.supressKDARoleNags(this.role); 70 | } 71 | 72 | private createKinesisDataAnalyticsRole(): cdk.aws_iam.Role { 73 | const role = new cdk.aws_iam.Role(this, `KDARole`, { 74 | assumedBy: new cdk.aws_iam.ServicePrincipal( 75 | "kinesisanalytics.amazonaws.com" 76 | ), 77 | managedPolicies: [ 78 | cdk.aws_iam.ManagedPolicy.fromAwsManagedPolicyName( 79 | "AmazonKinesisAnalyticsFullAccess" 80 | ), 81 | cdk.aws_iam.ManagedPolicy.fromAwsManagedPolicyName( 82 | "CloudWatchFullAccess" 83 | ), 84 | ], 85 | }); 86 | return role; 87 | } 88 | 89 | private createJarUploadBucket(jarPath: string): { 90 | jarFileBucket: cdk.aws_s3.Bucket; 91 | bucket_deployment: cdk.aws_s3_deployment.BucketDeployment; 92 | } { 93 | const jarFileBucket = new cdk.aws_s3.Bucket(this, `KDAJarFileBucket`, { 94 | removalPolicy: cdk.RemovalPolicy.DESTROY, 95 | autoDeleteObjects: true, 96 | blockPublicAccess: cdk.aws_s3.BlockPublicAccess.BLOCK_ALL, 97 | encryption: cdk.aws_s3.BucketEncryption.S3_MANAGED, 98 | enforceSSL: true, 99 | }); 100 | 101 | // default of 128MiB isn't large enough for larger deployments. More memory doesn't improve the performance. 102 | // You want just enough memory to guarantee deployment 103 | const memoryLimit = 512; 104 | const bucket_deployment = new cdk.aws_s3_deployment.BucketDeployment( 105 | this, 106 | `KDAJarDeployment`, 107 | { 108 | sources: [cdk.aws_s3_deployment.Source.asset(jarPath)], 109 | destinationBucket: jarFileBucket, 110 | extract: false, 111 | memoryLimit, 112 | } 113 | ); 114 | this.supressBucketDeploymentNags(); 115 | 116 | return { jarFileBucket, bucket_deployment }; 117 | } 118 | private createApplication( 119 | name: string, 120 | bucket: cdk.aws_s3.Bucket, 121 | jarFile: string, 122 | executionRole: cdk.aws_iam.Role, 123 | properties: cdk.aws_kinesisanalyticsv2.CfnApplication.EnvironmentPropertiesProperty, 124 | runtimeEnvironment: string = "FLINK-1_19", 125 | applicationMode: string = "STREAMING", 126 | codeContentType: string = "ZIPFILE", 127 | snapshotsEnabled: boolean = true, 128 | loggingEnabled: boolean = true 129 | ): cdk.aws_kinesisanalyticsv2.CfnApplication { 130 | const application = new cdk.aws_kinesisanalyticsv2.CfnApplication( 131 | this, 132 | `KDA`, 133 | { 134 | applicationName: name, 135 | applicationMode: applicationMode, 136 | runtimeEnvironment: runtimeEnvironment, 137 | serviceExecutionRole: executionRole.roleArn, 138 | runConfiguration: { 139 | flinkRunConfiguration: { 140 | allowNonRestoredState: false, 141 | }, 142 | applicationRestoreConfiguration: { 143 | applicationRestoreType: "RESTORE_FROM_LATEST_SNAPSHOT", 144 | }, 145 | }, 146 | applicationConfiguration: { 147 | environmentProperties: properties, 148 | applicationSnapshotConfiguration: { 149 | snapshotsEnabled: snapshotsEnabled, 150 | }, 151 | flinkApplicationConfiguration: { 152 | checkpointConfiguration: { 153 | configurationType: "CUSTOM", 154 | checkpointingEnabled: true, 155 | checkpointInterval: 60000, 156 | minPauseBetweenCheckpoints: 5000, 157 | }, 158 | monitoringConfiguration: { 159 | configurationType: "CUSTOM", 160 | logLevel: "WARN", 161 | }, 162 | }, 163 | applicationCodeConfiguration: { 164 | codeContentType: codeContentType, 165 | codeContent: { 166 | s3ContentLocation: { 167 | bucketArn: bucket.bucketArn, 168 | fileKey: jarFile, 169 | }, 170 | }, 171 | }, 172 | }, 173 | } 174 | ); 175 | if (loggingEnabled && application.applicationName) { 176 | // Create log stream 177 | const logGroup = new cdk.aws_logs.LogGroup( 178 | this, 179 | "KDAApplicationLogGroup", 180 | { 181 | logGroupName: `/aws/kinesisanalyticsv2/application/${application.applicationName}`, 182 | removalPolicy: cdk.RemovalPolicy.DESTROY, 183 | retention: cdk.aws_logs.RetentionDays.ONE_WEEK, 184 | } 185 | ); 186 | const logStream = new cdk.aws_logs.LogStream( 187 | this, 188 | "KDAApplicationLogStream", 189 | { 190 | logGroup: logGroup, 191 | removalPolicy: cdk.RemovalPolicy.DESTROY, 192 | } 193 | ); 194 | const logging = 195 | new cdk.aws_kinesisanalyticsv2.CfnApplicationCloudWatchLoggingOption( 196 | this, 197 | "KDAApplicationCloudWatchLoggingOption", 198 | { 199 | applicationName: application.applicationName, 200 | cloudWatchLoggingOption: { 201 | logStreamArn: `arn:aws:logs:${cdk.Aws.REGION}:${cdk.Aws.ACCOUNT_ID}:log-group:${logGroup.logGroupName}:log-stream:${logStream.logStreamName}`, 202 | }, 203 | } 204 | ); 205 | logging.node.addDependency(application); 206 | } 207 | 208 | return application; 209 | } 210 | 211 | private supressBucketDeploymentNags(): void { 212 | const stack = cdk.Stack.of(this); 213 | stack.node.findAll().forEach(({ node }: { node: any }) => { 214 | const re = [ 215 | new RegExp( 216 | `${stack.stackName}/Custom::CDKBucketDeployment.+/Resource`, 217 | "g" 218 | ), 219 | new RegExp( 220 | `${stack.stackName}/Custom::CDKBucketDeployment.+/ServiceRole/.+/Resource`, 221 | "g" 222 | ), 223 | ]; 224 | if (re.some((r) => r.test(node.path))) { 225 | NagSuppressions.addResourceSuppressionsByPath( 226 | stack, 227 | node.path, 228 | [ 229 | { 230 | id: "AwsSolutions-L1", 231 | reason: "Buckey Deployment Lambda uses older runtime version", 232 | }, 233 | { 234 | id: "AwsSolutions-IAM5", 235 | reason: 236 | "Bucket Deployment uses several IAM wildcards that are necessary", 237 | }, 238 | { 239 | id: "AwsSolutions-IAM4", 240 | reason: 241 | "Bucket Deployment uses several IAM wildcards that are necessary", 242 | }, 243 | ], 244 | true 245 | ); 246 | } 247 | }); 248 | } 249 | 250 | private supressKDARoleNags(role: cdk.aws_iam.Role): void { 251 | NagSuppressions.addResourceSuppressions( 252 | role, 253 | [ 254 | { 255 | id: "AwsSolutions-IAM4", 256 | reason: "Role needs full access to Cloudwatch & KDA", 257 | appliesTo: [ 258 | { 259 | regex: 260 | "/^Policy::arn::iam::aws:policy/(CloudWatchFullAccess|AmazonKinesisAnalyticsFullAccess)$/g", 261 | }, 262 | ], 263 | }, 264 | { 265 | id: "AwsSolutions-IAM5", 266 | reason: "Allow access to S3 Bucket & S3 read/write via CDK grant method", 267 | appliesTo: [ 268 | { 269 | regex: 270 | "/^Action::s3:.*$/g", 271 | }, 272 | { 273 | regex: 274 | "/^Resource::.*\.Arn>\/.*$/g", 275 | }, 276 | ] 277 | }, 278 | ], 279 | true 280 | ); 281 | } 282 | 283 | private supressBucketNags(bucket: cdk.aws_s3.Bucket): void { 284 | NagSuppressions.addResourceSuppressions( 285 | bucket, 286 | [ 287 | { 288 | id: "AwsSolutions-S1", 289 | reason: "Server access logs not necessary for demo", 290 | }, 291 | ], 292 | true 293 | ); 294 | } 295 | } 296 | -------------------------------------------------------------------------------- /events/rules.json: -------------------------------------------------------------------------------- 1 | { 2 | "Records": [ 3 | { 4 | "id": "cda160c0-c790-47da-bd65-4abae838af3a", 5 | "name": "RuleTest1", 6 | "status": "ACTIVE", 7 | "equipmentName": "THERMOSTAT_1", 8 | "ruleExpression": "(SENSOR_cebb1baf_2df0_4267_b489_28be562fccea.hasNotChanged(5))", 9 | "sensorWindowMap": { 10 | "SENSOR_cebb1baf_2df0_4267_b489_28be562fccea": 5 11 | } 12 | } 13 | ] 14 | } 15 | -------------------------------------------------------------------------------- /events/sensor-events.json: -------------------------------------------------------------------------------- 1 | { 2 | "Records": [ 3 | { 4 | "equipment": { 5 | "id": "THERMOSTAT_1" 6 | }, 7 | "id": "SENSOR_cebb1baf_2df0_4267_b489_28be562fccea", 8 | "measureValue": 10, 9 | "eventTimestamp": 1721666423000 10 | } 11 | ] 12 | } 13 | -------------------------------------------------------------------------------- /jest.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | testEnvironment: 'node', 3 | roots: ['/test'], 4 | testMatch: ['**/*.test.ts'], 5 | transform: { 6 | '^.+\\.tsx?$': 'ts-jest' 7 | } 8 | }; 9 | -------------------------------------------------------------------------------- /lambda/alert-ingestion/index.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright 2024 Amazon.com, Inc. and its affiliates. All Rights Reserved. 3 | 4 | Licensed under the Amazon Software License (the "License"). 5 | You may not use this file except in compliance with the License. 6 | A copy of the License is located at 7 | 8 | http://aws.amazon.com/asl/ 9 | 10 | or in the "license" file accompanying this file. This file is distributed 11 | on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either 12 | express or implied. See the License for the specific language governing 13 | permissions and limitations under the License. 14 | """ 15 | 16 | import base64 17 | import boto3 18 | import json 19 | import logging 20 | import os 21 | import traceback 22 | 23 | # region Logging 24 | 25 | LOG_LEVEL = os.getenv("LOG_LEVEL", "INFO") 26 | logger = logging.getLogger() 27 | 28 | if logger.hasHandlers(): 29 | # The Lambda environment pre-configures a handler logging to stderr. If a handler is already configured, 30 | # `.basicConfig` does not execute. Thus we set the level directly. 31 | logger.setLevel(LOG_LEVEL) 32 | else: 33 | logging.basicConfig(level=LOG_LEVEL) 34 | 35 | # endregion 36 | 37 | 38 | def mask_sensitive_data(event): 39 | # remove sensitive data from request object before logging 40 | keys_to_redact = ["authorization"] 41 | result = {} 42 | for k, v in event.items(): 43 | if isinstance(v, dict): 44 | result[k] = mask_sensitive_data(v) 45 | elif k in keys_to_redact: 46 | result[k] = "" 47 | else: 48 | result[k] = v 49 | return result 50 | 51 | 52 | def build_response(http_code, body): 53 | return { 54 | "headers": { 55 | "Cache-Control": "no-cache, no-store", 56 | "Content-Type": "application/json", 57 | }, 58 | "statusCode": http_code, 59 | "body": body, 60 | } 61 | 62 | 63 | def lambda_handler(event, context): 64 | logger.info(f"Lambda got the following event:\n {event}") 65 | try: 66 | # Read the records from Kinesis 67 | records = event["Records"] 68 | logger.info(f"Processing {len(records)} records...") 69 | 70 | # Iterate through each record 71 | for record in records: 72 | # Get the payload from the record 73 | payload = base64.b64decode(record["kinesis"]["data"]).decode("utf-8") 74 | # Parse the payload as JSON 75 | payload_json = json.loads(payload) 76 | # Log payload 77 | logger.info(mask_sensitive_data(payload_json)) 78 | except Exception as ex: 79 | logger.error(traceback.format_exc()) 80 | 81 | 82 | if __name__ == "__main__": 83 | 84 | example_event = {} 85 | response = lambda_handler(example_event, {}) 86 | print(json.dumps(response)) 87 | -------------------------------------------------------------------------------- /lambda/event-ingestion/index.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright 2024 Amazon.com, Inc. and its affiliates. All Rights Reserved. 3 | 4 | Licensed under the Amazon Software License (the "License"). 5 | You may not use this file except in compliance with the License. 6 | A copy of the License is located at 7 | 8 | http://aws.amazon.com/asl/ 9 | 10 | or in the "license" file accompanying this file. This file is distributed 11 | on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either 12 | express or implied. See the License for the specific language governing 13 | permissions and limitations under the License. 14 | """ 15 | 16 | import time 17 | import boto3 18 | import json 19 | import logging 20 | import os 21 | import traceback 22 | 23 | # region Logging 24 | 25 | LOG_LEVEL = os.getenv("LOG_LEVEL", "INFO") 26 | logger = logging.getLogger() 27 | 28 | if logger.hasHandlers(): 29 | # The Lambda environment pre-configures a handler logging to stderr. If a handler is already configured, 30 | # `.basicConfig` does not execute. Thus we set the level directly. 31 | logger.setLevel(LOG_LEVEL) 32 | else: 33 | logging.basicConfig(level=LOG_LEVEL) 34 | 35 | # endregion 36 | 37 | # region Kinesis 38 | 39 | kinesis = boto3.client("kinesis") 40 | KINESIS_STREAM_ARN = os.getenv("KINESIS_STREAM_ARN", None) 41 | if not KINESIS_STREAM_ARN: 42 | raise ValueError("KINESIS_STREAM_ARN environment variable is not set") 43 | else: 44 | logger.info(f"Kinesis stream ARN: {KINESIS_STREAM_ARN}") 45 | 46 | # endregion 47 | 48 | 49 | def mask_sensitive_data(event): 50 | # remove sensitive data from request object before logging 51 | keys_to_redact = ["authorization"] 52 | result = {} 53 | for k, v in event.items(): 54 | if isinstance(v, dict): 55 | result[k] = mask_sensitive_data(v) 56 | elif k in keys_to_redact: 57 | result[k] = "" 58 | else: 59 | result[k] = v 60 | return result 61 | 62 | 63 | def build_response(http_code, body): 64 | return { 65 | "headers": { 66 | "Cache-Control": "no-cache, no-store", 67 | "Content-Type": "application/json", 68 | }, 69 | "statusCode": http_code, 70 | "body": body, 71 | } 72 | 73 | 74 | def stream_to_kinesis(batch): 75 | logger.info(f"Streaming batch of {len(batch)} records to Kinesis") 76 | 77 | for record in batch: 78 | if 'id' in record: 79 | partition_key = record['id'] 80 | else: 81 | partition_key = str(int(time.time())) 82 | response = kinesis.put_record( 83 | Data=json.dumps(record).encode('utf-8'), 84 | PartitionKey=partition_key, 85 | StreamARN=KINESIS_STREAM_ARN 86 | ) 87 | logger.info(response) 88 | if response['ResponseMetadata']['HTTPStatusCode'] != 200: 89 | raise Exception("Failed to stream record to Kinesis") 90 | 91 | 92 | def lambda_handler(event, context): 93 | logger.info(mask_sensitive_data(event)) 94 | try: 95 | stream_to_kinesis(event["Records"]) 96 | except Exception as ex: 97 | logger.error(traceback.format_exc()) 98 | 99 | 100 | if __name__ == "__main__": 101 | 102 | example_event = {} 103 | response = lambda_handler(example_event, {}) 104 | print(json.dumps(response)) 105 | -------------------------------------------------------------------------------- /lambda/rule-ingestion/index.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright 2024 Amazon.com, Inc. and its affiliates. All Rights Reserved. 3 | 4 | Licensed under the Amazon Software License (the "License"). 5 | You may not use this file except in compliance with the License. 6 | A copy of the License is located at 7 | 8 | http://aws.amazon.com/asl/ 9 | 10 | or in the "license" file accompanying this file. This file is distributed 11 | on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either 12 | express or implied. See the License for the specific language governing 13 | permissions and limitations under the License. 14 | """ 15 | 16 | import time 17 | import boto3 18 | import json 19 | import logging 20 | import os 21 | import traceback 22 | 23 | # region Logging 24 | 25 | LOG_LEVEL = os.getenv("LOG_LEVEL", "INFO") 26 | logger = logging.getLogger() 27 | 28 | if logger.hasHandlers(): 29 | # The Lambda environment pre-configures a handler logging to stderr. If a handler is already configured, 30 | # `.basicConfig` does not execute. Thus we set the level directly. 31 | logger.setLevel(LOG_LEVEL) 32 | else: 33 | logging.basicConfig(level=LOG_LEVEL) 34 | 35 | # endregion 36 | 37 | # region Kinesis 38 | 39 | kinesis = boto3.client("kinesis") 40 | KINESIS_STREAM_ARN = os.getenv("KINESIS_STREAM_ARN", None) 41 | if not KINESIS_STREAM_ARN: 42 | raise ValueError("KINESIS_STREAM_ARN environment variable is not set") 43 | else: 44 | logger.info(f"Kinesis stream ARN: {KINESIS_STREAM_ARN}") 45 | 46 | # endregion 47 | 48 | 49 | def mask_sensitive_data(event): 50 | # remove sensitive data from request object before logging 51 | keys_to_redact = ["authorization"] 52 | result = {} 53 | for k, v in event.items(): 54 | if isinstance(v, dict): 55 | result[k] = mask_sensitive_data(v) 56 | elif k in keys_to_redact: 57 | result[k] = "" 58 | else: 59 | result[k] = v 60 | return result 61 | 62 | 63 | def build_response(http_code, body): 64 | return { 65 | "headers": { 66 | "Cache-Control": "no-cache, no-store", 67 | "Content-Type": "application/json", 68 | }, 69 | "statusCode": http_code, 70 | "body": body, 71 | } 72 | 73 | 74 | def stream_to_kinesis(batch): 75 | logger.info(f"Streaming batch of {len(batch)} records to Kinesis") 76 | 77 | for record in batch: 78 | if 'id' in record: 79 | partition_key = record['id'] 80 | else: 81 | partition_key = str(int(time.time())) 82 | response = kinesis.put_record( 83 | Data=json.dumps(record).encode('utf-8'), 84 | PartitionKey=partition_key, 85 | StreamARN=KINESIS_STREAM_ARN 86 | ) 87 | logger.info(response) 88 | if response['ResponseMetadata']['HTTPStatusCode'] != 200: 89 | raise Exception("Failed to stream record to Kinesis") 90 | 91 | 92 | def lambda_handler(event, context): 93 | logger.info(mask_sensitive_data(event)) 94 | try: 95 | stream_to_kinesis(event["Records"]) 96 | except Exception as ex: 97 | logger.error(traceback.format_exc()) 98 | 99 | 100 | if __name__ == "__main__": 101 | 102 | example_event = {} 103 | response = lambda_handler(example_event, {}) 104 | print(json.dumps(response)) 105 | -------------------------------------------------------------------------------- /lambda/rule-ops-ingestion/index.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright 2024 Amazon.com, Inc. and its affiliates. All Rights Reserved. 3 | 4 | Licensed under the Amazon Software License (the "License"). 5 | You may not use this file except in compliance with the License. 6 | A copy of the License is located at 7 | 8 | http://aws.amazon.com/asl/ 9 | 10 | or in the "license" file accompanying this file. This file is distributed 11 | on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either 12 | express or implied. See the License for the specific language governing 13 | permissions and limitations under the License. 14 | """ 15 | 16 | import base64 17 | import boto3 18 | import json 19 | import logging 20 | import os 21 | import traceback 22 | 23 | # region Logging 24 | 25 | LOG_LEVEL = os.getenv("LOG_LEVEL", "INFO") 26 | logger = logging.getLogger() 27 | 28 | if logger.hasHandlers(): 29 | # The Lambda environment pre-configures a handler logging to stderr. If a handler is already configured, 30 | # `.basicConfig` does not execute. Thus we set the level directly. 31 | logger.setLevel(LOG_LEVEL) 32 | else: 33 | logging.basicConfig(level=LOG_LEVEL) 34 | 35 | # endregion 36 | 37 | 38 | def mask_sensitive_data(event): 39 | # remove sensitive data from request object before logging 40 | keys_to_redact = ["authorization"] 41 | result = {} 42 | for k, v in event.items(): 43 | if isinstance(v, dict): 44 | result[k] = mask_sensitive_data(v) 45 | elif k in keys_to_redact: 46 | result[k] = "" 47 | else: 48 | result[k] = v 49 | return result 50 | 51 | 52 | def build_response(http_code, body): 53 | return { 54 | "headers": { 55 | "Cache-Control": "no-cache, no-store", 56 | "Content-Type": "application/json", 57 | }, 58 | "statusCode": http_code, 59 | "body": body, 60 | } 61 | 62 | 63 | def lambda_handler(event, context): 64 | logger.info(f"Lambda got the following event:\n {event}") 65 | try: 66 | # Read the records from Kinesis 67 | records = event["Records"] 68 | logger.info(f"Processing {len(records)} records...") 69 | 70 | # Iterate through each record 71 | for record in records: 72 | # Get the payload from the record 73 | payload = base64.b64decode(record["kinesis"]["data"]).decode("utf-8") 74 | # Parse the payload as JSON 75 | payload_json = json.loads(payload) 76 | # Log payload 77 | logger.info(mask_sensitive_data(payload_json)) 78 | except Exception as ex: 79 | logger.error(traceback.format_exc()) 80 | 81 | 82 | if __name__ == "__main__": 83 | 84 | example_event = {} 85 | response = lambda_handler(example_event, {}) 86 | print(json.dumps(response)) 87 | -------------------------------------------------------------------------------- /lib/rules-engine-cdk-stack.ts: -------------------------------------------------------------------------------- 1 | import * as cdk from "aws-cdk-lib"; 2 | import { Construct } from "constructs"; 3 | import { KinesisAnalyticsFlinkConstruct } from "../constructs/kinesis-data-analytics-flink-construct"; 4 | import { NagSuppressions } from "cdk-nag"; 5 | 6 | export class RulesEngineCdkStack extends cdk.Stack { 7 | constructor(scope: Construct, id: string, props?: cdk.StackProps) { 8 | super(scope, id, props); 9 | 10 | const rulesStream = this.createKinesisDataStream("RulesStream"); 11 | const sensorValueStream = this.createKinesisDataStream("SensorValueStream"); 12 | const alertStream = this.createKinesisDataStream("AlertStream"); 13 | const rulesOpStream = this.createKinesisDataStream("RulesOpStream"); 14 | 15 | const rulesEngine = new KinesisAnalyticsFlinkConstruct( 16 | this, 17 | "RulesEngine", 18 | { 19 | filePath: 20 | "rules-engine/target/amazon-msf-java-dynamic-rules-engine-1.0.jar", 21 | propertyConfig: { 22 | propertyGroups: [ 23 | { 24 | propertyGroupId: "kinesis", 25 | propertyMap: { 26 | dataTopicName: sensorValueStream.streamName, 27 | alertsTopicARN: alertStream.streamArn, 28 | rulesTopicName: rulesStream.streamName, 29 | ruleOperationsTopicARN: rulesOpStream.streamArn, 30 | region: this.region, 31 | streamPosition: "LATEST", 32 | }, 33 | }, 34 | ], 35 | }, 36 | } 37 | ); 38 | 39 | rulesStream.grantRead(rulesEngine.role); 40 | sensorValueStream.grantRead(rulesEngine.role); 41 | alertStream.grantWrite(rulesEngine.role); 42 | rulesOpStream.grantWrite(rulesEngine.role); 43 | 44 | this.createIngestionLambdaFunction( 45 | "SensorIngestFn", 46 | "event-ingestion", 47 | sensorValueStream 48 | ); 49 | this.createIngestionLambdaFunction( 50 | "RuleIngestionFn", 51 | "rule-ingestion", 52 | rulesStream 53 | ); 54 | this.createProcessingLambdaFunction( 55 | "RuleProcessingFn", 56 | "rule-ops-ingestion", 57 | rulesOpStream 58 | ); 59 | this.createProcessingLambdaFunction( 60 | "AlertProcessingFn", 61 | "alert-ingestion", 62 | alertStream 63 | ); 64 | } 65 | 66 | private createKinesisDataStream(name: string): cdk.aws_kinesis.Stream { 67 | return new cdk.aws_kinesis.Stream(this, name, { 68 | streamName: name, 69 | shardCount: 1, 70 | }); 71 | } 72 | 73 | private createIngestionLambdaFunction( 74 | name: string, 75 | functionFolder: string, 76 | stream: cdk.aws_kinesis.Stream 77 | ): cdk.aws_lambda.Function { 78 | const func = new cdk.aws_lambda.Function(this, name, { 79 | runtime: cdk.aws_lambda.Runtime.PYTHON_3_12, 80 | code: cdk.aws_lambda.Code.fromAsset(`lambda/${functionFolder}/`), 81 | handler: "index.lambda_handler", 82 | environment: { 83 | KINESIS_STREAM_ARN: stream.streamArn, 84 | }, 85 | }); 86 | stream.grantWrite(func.role!); 87 | this.supressLambdaExecutionRoleNags(func); 88 | return func; 89 | } 90 | private createProcessingLambdaFunction( 91 | name: string, 92 | functionFolder: string, 93 | stream: cdk.aws_kinesis.Stream 94 | ): cdk.aws_lambda.Function { 95 | const func = new cdk.aws_lambda.Function(this, name, { 96 | runtime: cdk.aws_lambda.Runtime.PYTHON_3_12, 97 | code: cdk.aws_lambda.Code.fromAsset(`lambda/${functionFolder}/`), 98 | handler: "index.lambda_handler", 99 | }); 100 | stream.grantRead(func.role!); 101 | // connect stream to function 102 | func.addEventSource( 103 | new cdk.aws_lambda_event_sources.KinesisEventSource(stream, { 104 | startingPosition: cdk.aws_lambda.StartingPosition.LATEST, 105 | }) 106 | ); 107 | this.supressLambdaExecutionRoleNags(func); 108 | return func; 109 | } 110 | private supressLambdaExecutionRoleNags(functionRole: cdk.aws_lambda.Function): void { 111 | NagSuppressions.addResourceSuppressions( 112 | functionRole, 113 | [ 114 | { 115 | id: "AwsSolutions-IAM4", 116 | reason: "Grantable roles added to Lambda Function role", 117 | appliesTo: [ 118 | { 119 | regex: "/^Policy::arn::iam::aws:policy\/service-role\/AWSLambdaBasicExecutionRole$/g", 120 | }, 121 | ], 122 | }, 123 | ], 124 | true 125 | ); 126 | } 127 | } 128 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "rules-engine-cdk", 3 | "version": "0.1.0", 4 | "bin": { 5 | "rules-engine-cdk": "bin/rules-engine-cdk.js" 6 | }, 7 | "scripts": { 8 | "build": "tsc", 9 | "watch": "tsc -w", 10 | "test": "jest", 11 | "cdk": "cdk" 12 | }, 13 | "devDependencies": { 14 | "@types/jest": "^29.5.12", 15 | "@types/node": "20.11.19", 16 | "jest": "^29.7.0", 17 | "ts-jest": "^29.1.2", 18 | "aws-cdk": "2.150.0", 19 | "ts-node": "^10.9.2", 20 | "cdk-nag": "2.28.164", 21 | "typescript": "~5.3.3" 22 | }, 23 | "dependencies": { 24 | "aws-cdk-lib": "2.150.0", 25 | "constructs": "^10.0.0", 26 | "source-map-support": "^0.5.21" 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /rules-engine/README.md: -------------------------------------------------------------------------------- 1 | ## Rules Engine -------------------------------------------------------------------------------- /rules-engine/dependency-reduced-pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4.0.0 4 | com.amazonaws 5 | amazon-msf-java-dynamic-rules-engine 6 | 1.0 7 | 8 | ${buildDirectory} 9 | ${jar.finalName} 10 | 11 | 12 | maven-compiler-plugin 13 | 3.8.1 14 | 15 | ${target.java.version} 16 | ${target.java.version} 17 | 18 | 19 | 20 | maven-shade-plugin 21 | 3.2.1 22 | 23 | 24 | package 25 | 26 | shade 27 | 28 | 29 | 30 | 31 | org.apache.flink:force-shading 32 | com.google.code.findbugs:jsr305 33 | org.slf4j:* 34 | log4j:* 35 | 36 | 37 | 38 | 39 | *:* 40 | 41 | META-INF/*.SF 42 | META-INF/*.DSA 43 | META-INF/*.RSA 44 | 45 | 46 | 47 | 48 | 49 | 50 | com.aws.rulesengine.dynamicrules.Main 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | org.apache.flink 62 | flink-clients 63 | 1.19.1 64 | provided 65 | 66 | 67 | flink-runtime 68 | org.apache.flink 69 | 70 | 71 | flink-optimizer 72 | org.apache.flink 73 | 74 | 75 | flink-java 76 | org.apache.flink 77 | 78 | 79 | commons-cli 80 | commons-cli 81 | 82 | 83 | 84 | 85 | org.apache.flink 86 | flink-streaming-java 87 | 1.19.1 88 | provided 89 | 90 | 91 | flink-file-sink-common 92 | org.apache.flink 93 | 94 | 95 | commons-math3 96 | org.apache.commons 97 | 98 | 99 | flink-connector-datagen 100 | org.apache.flink 101 | 102 | 103 | flink-runtime 104 | org.apache.flink 105 | 106 | 107 | flink-java 108 | org.apache.flink 109 | 110 | 111 | 112 | 113 | org.apache.flink 114 | flink-runtime-web 115 | 1.19.1 116 | provided 117 | 118 | 119 | flink-shaded-netty 120 | org.apache.flink 121 | 122 | 123 | flink-runtime 124 | org.apache.flink 125 | 126 | 127 | 128 | 129 | org.apache.flink 130 | flink-connector-base 131 | 1.19.1 132 | provided 133 | 134 | 135 | org.projectlombok 136 | lombok 137 | 1.18.34 138 | provided 139 | 140 | 141 | 142 | 143 | 144 | com.amazonaws 145 | aws-java-sdk-bom 146 | 1.12.677 147 | pom 148 | import 149 | 150 | 151 | 152 | 153 | ${target.java.version} 154 | ${project.name}-${project.version} 155 | 1.19.1 156 | 4.3.0-1.19 157 | 1.18.34 158 | ${target.java.version} 159 | UTF-8 160 | 2.23.1 161 | 3.4.0 162 | ${project.basedir}/target 163 | 11 164 | 1.2.0 165 | 166 | 167 | -------------------------------------------------------------------------------- /rules-engine/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 4.0.0 6 | 7 | com.amazonaws 8 | amazon-msf-java-dynamic-rules-engine 9 | 1.0 10 | jar 11 | 12 | 13 | UTF-8 14 | ${project.basedir}/target 15 | ${project.name}-${project.version} 16 | 11 17 | ${target.java.version} 18 | ${target.java.version} 19 | 1.19.1 20 | 4.3.0-1.19 21 | 1.2.0 22 | 2.23.1 23 | 3.4.0 24 | 1.18.34 25 | 26 | 27 | 28 | 29 | 30 | com.amazonaws 31 | aws-java-sdk-bom 32 | 33 | 1.12.677 34 | pom 35 | import 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | org.apache.flink 45 | flink-clients 46 | ${flink.version} 47 | provided 48 | 49 | 50 | org.apache.flink 51 | flink-streaming-java 52 | ${flink.version} 53 | provided 54 | 55 | 56 | org.apache.flink 57 | flink-runtime-web 58 | ${flink.version} 59 | provided 60 | 61 | 62 | 63 | 64 | com.amazonaws 65 | aws-kinesisanalytics-runtime 66 | ${kda.runtime.version} 67 | 68 | 69 | 70 | 71 | org.apache.flink 72 | flink-connector-base 73 | ${flink.version} 74 | provided 75 | 76 | 77 | org.apache.flink 78 | flink-connector-kinesis 79 | ${aws.connector.version} 80 | 81 | 82 | org.apache.flink 83 | flink-connector-aws-kinesis-streams 84 | ${aws.connector.version} 85 | 86 | 87 | 88 | 89 | org.apache.commons 90 | commons-jexl3 91 | ${jexl.version} 92 | 93 | 94 | 95 | 96 | org.projectlombok 97 | lombok 98 | ${lombok.version} 99 | provided 100 | 101 | 102 | 103 | 104 | 105 | 106 | org.apache.logging.log4j 107 | log4j-slf4j-impl 108 | ${log4j.version} 109 | compile 110 | 111 | 112 | org.apache.logging.log4j 113 | log4j-api 114 | ${log4j.version} 115 | compile 116 | 117 | 118 | org.apache.logging.log4j 119 | log4j-core 120 | ${log4j.version} 121 | compile 122 | 123 | 124 | 125 | 126 | ${buildDirectory} 127 | ${jar.finalName} 128 | 129 | 130 | 131 | org.apache.maven.plugins 132 | maven-compiler-plugin 133 | 3.8.1 134 | 135 | ${target.java.version} 136 | ${target.java.version} 137 | 138 | 139 | 140 | 141 | 142 | org.apache.maven.plugins 143 | maven-shade-plugin 144 | 3.2.1 145 | 146 | 147 | 148 | package 149 | 150 | shade 151 | 152 | 153 | 154 | 155 | org.apache.flink:force-shading 156 | com.google.code.findbugs:jsr305 157 | org.slf4j:* 158 | log4j:* 159 | 160 | 161 | 162 | 163 | 165 | *:* 166 | 167 | META-INF/*.SF 168 | META-INF/*.DSA 169 | META-INF/*.RSA 170 | 171 | 172 | 173 | 174 | 176 | 178 | com.aws.rulesengine.dynamicrules.Main 179 | 180 | 181 | 182 | 183 | 184 | 185 | 186 | 187 | 188 | -------------------------------------------------------------------------------- /rules-engine/src/main/java/com/aws/rulesengine/dynamicrules/Main.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package com.aws.rulesengine.dynamicrules; 20 | 21 | public class Main { 22 | 23 | public static void main(String[] args) throws Exception { 24 | RulesEvaluator rulesEvaluator = new RulesEvaluator(); 25 | rulesEvaluator.run(); 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /rules-engine/src/main/java/com/aws/rulesengine/dynamicrules/RulesEvaluator.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package com.aws.rulesengine.dynamicrules; 20 | 21 | import java.io.IOException; 22 | import java.util.Properties; 23 | 24 | import org.apache.flink.api.common.serialization.SimpleStringSchema; 25 | import org.apache.flink.connector.kinesis.sink.KinesisStreamsSink; 26 | import org.apache.flink.streaming.api.datastream.BroadcastStream; 27 | import org.apache.flink.streaming.api.datastream.DataStream; 28 | import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator; 29 | import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; 30 | import org.apache.flink.streaming.connectors.kinesis.FlinkKinesisConsumer; 31 | import org.apache.flink.streaming.connectors.kinesis.config.ConsumerConfigConstants; 32 | 33 | import com.aws.rulesengine.dynamicrules.functions.DynamicAlertFunction; 34 | import com.aws.rulesengine.dynamicrules.functions.DynamicKeyFunction; 35 | import com.aws.rulesengine.dynamicrules.objects.Alert; 36 | import com.aws.rulesengine.dynamicrules.objects.Rule; 37 | import com.aws.rulesengine.dynamicrules.objects.RuleOperationStatus; 38 | import com.aws.rulesengine.dynamicrules.objects.SensorEvent; 39 | import com.aws.rulesengine.dynamicrules.utils.Descriptors; 40 | import com.aws.rulesengine.dynamicrules.utils.JsonDeserializer; 41 | import com.aws.rulesengine.dynamicrules.utils.JsonSerializer; 42 | import com.aws.rulesengine.dynamicrules.utils.KinesisUtils; 43 | import com.aws.rulesengine.dynamicrules.utils.RuleDeserializer; 44 | import com.aws.rulesengine.dynamicrules.utils.TimeStamper; 45 | 46 | import lombok.extern.slf4j.Slf4j; 47 | 48 | @Slf4j 49 | public class RulesEvaluator { 50 | 51 | RulesEvaluator() { 52 | } 53 | 54 | /** 55 | * The main method that sets up the Flink streaming pipeline for processing 56 | * sensor events, 57 | * evaluating rules, and generating alerts and rule operation status updates. 58 | * 59 | * @throws Exception if an error occurs during pipeline execution 60 | */ 61 | public void run() throws Exception { 62 | // Environment setup 63 | StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); 64 | 65 | // Source and Sink Properties setup 66 | Properties sourceProperties = createSourceProperties(); 67 | Properties sinkProperties = createSinkProperties(); 68 | 69 | // Streams setup 70 | DataStream sensorEvents = createSensorEventStream(env, sourceProperties); 71 | BroadcastStream rulesStream = createRuleStream(env, sourceProperties) 72 | .broadcast(Descriptors.rulesDescriptor); 73 | 74 | // Processing pipeline setup 75 | DataStream alerts = sensorEvents 76 | .connect(rulesStream) 77 | .process(new DynamicKeyFunction()) 78 | .uid("partition-sensor-data") 79 | .name("Partition Sensor Data by Equipment and RuleId") 80 | .keyBy((equipmentSensorHash) -> equipmentSensorHash.getKey()) 81 | .connect(rulesStream) 82 | .process(new DynamicAlertFunction()) 83 | .uid("rule-evaluator") 84 | .name("Rule Evaluator"); 85 | 86 | DataStream ruleOperationsSink = ((SingleOutputStreamOperator) alerts) 87 | .getSideOutput(Descriptors.ruleOperations); 88 | 89 | alerts.flatMap(new JsonSerializer<>(Alert.class)) 90 | .name("Alerts Deserialization").sinkTo(createAlertSink(sinkProperties)) 91 | .uid("alerts-json-sink") 92 | .name("Alerts JSON Sink"); 93 | 94 | ruleOperationsSink 95 | .flatMap(new JsonSerializer<>(RuleOperationStatus.class)) 96 | .name("Rule Operations Data Stream").sinkTo(createRuleOperationsSink(sinkProperties)) 97 | .uid("rule-operations-json-sink") 98 | .name("Rule Operations JSON Sink"); 99 | 100 | env.execute("Dynamic Rules Engine"); 101 | } 102 | 103 | /** 104 | * Creates a DataStream of SensorEvent objects by consuming sensor event data 105 | * from a Kinesis stream. 106 | * 107 | * @param env The StreamExecutionEnvironment for the Flink job 108 | * @return A DataStream of SensorEvent objects 109 | * @throws IOException if an error occurs while reading Kinesis properties 110 | */ 111 | private DataStream createSensorEventStream(StreamExecutionEnvironment env, 112 | Properties sourceProperties) throws IOException { 113 | String DATA_SOURCE = KinesisUtils.getKinesisRuntimeProperty("kinesis", "dataTopicName"); 114 | FlinkKinesisConsumer kinesisConsumer = new FlinkKinesisConsumer<>(DATA_SOURCE, 115 | new SimpleStringSchema(), 116 | sourceProperties); 117 | DataStream transactionsStringsStream = env.addSource(kinesisConsumer) 118 | .name("EventStream") 119 | .uid("sensor-events-stream"); 120 | 121 | return transactionsStringsStream.flatMap(new JsonDeserializer<>(SensorEvent.class)) 122 | .returns(SensorEvent.class) 123 | .flatMap(new TimeStamper<>()) 124 | .returns(SensorEvent.class) 125 | .name("Transactions Deserialization"); 126 | } 127 | 128 | /** 129 | * Creates a DataStream of Rule objects by consuming rule data from a Kinesis 130 | * stream. 131 | * 132 | * @param env The StreamExecutionEnvironment for the Flink job 133 | * @return A DataStream of Rule objects 134 | * @throws IOException if an error occurs while reading Kinesis properties 135 | */ 136 | private DataStream createRuleStream(StreamExecutionEnvironment env, Properties sourceProperties) 137 | throws IOException { 138 | String RULES_SOURCE = KinesisUtils.getKinesisRuntimeProperty("kinesis", "rulesTopicName"); 139 | FlinkKinesisConsumer kinesisConsumer = new FlinkKinesisConsumer<>(RULES_SOURCE, 140 | new SimpleStringSchema(), 141 | sourceProperties); 142 | DataStream rulesStrings = env.addSource(kinesisConsumer) 143 | .name("RulesStream") 144 | .uid("rules-stream"); 145 | return rulesStrings.flatMap(new RuleDeserializer()).name("Rule Deserialization"); 146 | } 147 | 148 | /** 149 | * Creates a KinesisStreamsSink for writing alert data to a Kinesis stream. 150 | * 151 | * @return A KinesisStreamsSink for alert data 152 | * @throws IOException if an error occurs while reading Kinesis properties 153 | */ 154 | private KinesisStreamsSink createAlertSink(Properties sinkProperties) throws IOException { 155 | String ALERTS_TOPIC = KinesisUtils.getKinesisRuntimeProperty("kinesis", "alertsTopicARN"); 156 | KinesisStreamsSink kdsSink = KinesisStreamsSink.builder() 157 | .setSerializationSchema(new SimpleStringSchema()) 158 | .setPartitionKeyGenerator(element -> String.valueOf(element.hashCode())) 159 | .setKinesisClientProperties(sinkProperties) 160 | .setStreamArn(ALERTS_TOPIC) 161 | .build(); 162 | return kdsSink; 163 | } 164 | 165 | /** 166 | * Creates a KinesisStreamsSink for writing rule operation status data to a 167 | * Kinesis stream. 168 | * 169 | * @return A KinesisStreamsSink for rule operation status data 170 | * @throws IOException if an error occurs while reading Kinesis properties 171 | */ 172 | private KinesisStreamsSink createRuleOperationsSink(Properties sinkProperties) throws IOException { 173 | String RULE_OPS_TOPIC = KinesisUtils.getKinesisRuntimeProperty("kinesis", "ruleOperationsTopicARN"); 174 | KinesisStreamsSink kdsSink = KinesisStreamsSink.builder() 175 | .setSerializationSchema(new SimpleStringSchema()) 176 | .setPartitionKeyGenerator(element -> String.valueOf(element.hashCode())) 177 | .setStreamArn(RULE_OPS_TOPIC) 178 | .setKinesisClientProperties(sinkProperties) 179 | .build(); 180 | return kdsSink; 181 | } 182 | 183 | private Properties createSourceProperties() throws IOException { 184 | Properties sourceProperties = new Properties(); 185 | String region = KinesisUtils.getKinesisRuntimeProperty("kinesis", "region"); 186 | String streamPosition = KinesisUtils.getKinesisRuntimeProperty("kinesis", "streamPosition"); 187 | sourceProperties.setProperty(ConsumerConfigConstants.AWS_REGION, region); 188 | sourceProperties.setProperty(ConsumerConfigConstants.STREAM_INITIAL_POSITION, streamPosition); 189 | return sourceProperties; 190 | } 191 | 192 | private Properties createSinkProperties() throws IOException { 193 | Properties sinkProperties = new Properties(); 194 | String region = KinesisUtils.getKinesisRuntimeProperty("kinesis", "region"); 195 | sinkProperties.setProperty(ConsumerConfigConstants.AWS_REGION, region); 196 | return sinkProperties; 197 | } 198 | 199 | } 200 | -------------------------------------------------------------------------------- /rules-engine/src/main/java/com/aws/rulesengine/dynamicrules/exceptions/InsufficientDataException.java: -------------------------------------------------------------------------------- 1 | package com.aws.rulesengine.dynamicrules.exceptions; 2 | 3 | import com.aws.rulesengine.dynamicrules.objects.OperationStatus; 4 | import com.aws.rulesengine.dynamicrules.objects.Rule; 5 | 6 | public class InsufficientDataException extends Exception { 7 | 8 | private Rule rule; 9 | private OperationStatus status = OperationStatus.INSUFFICIENT_DATA; 10 | 11 | public InsufficientDataException(String message) { 12 | super(message); 13 | } 14 | 15 | public InsufficientDataException(Rule rule) { 16 | super(String.format("Insufficient data for rule %s", rule)); 17 | this.rule = rule; 18 | } 19 | 20 | public Rule getRule() { 21 | return rule; 22 | } 23 | 24 | public OperationStatus getStatus() { 25 | return status; 26 | } 27 | 28 | } 29 | 30 | -------------------------------------------------------------------------------- /rules-engine/src/main/java/com/aws/rulesengine/dynamicrules/exceptions/RuleNotFoundException.java: -------------------------------------------------------------------------------- 1 | package com.aws.rulesengine.dynamicrules.exceptions; 2 | 3 | public class RuleNotFoundException extends Exception{ 4 | 5 | public RuleNotFoundException(String message) { 6 | super(message); 7 | } 8 | 9 | @Override 10 | public String toString() { 11 | return "Rule not found"; 12 | } 13 | 14 | } 15 | -------------------------------------------------------------------------------- /rules-engine/src/main/java/com/aws/rulesengine/dynamicrules/functions/DynamicAlertFunction.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package com.aws.rulesengine.dynamicrules.functions; 20 | 21 | import java.io.IOException; 22 | import java.util.Collections; 23 | import java.util.Iterator; 24 | import java.util.List; 25 | import java.util.Map; 26 | 27 | import org.apache.commons.jexl3.JexlBuilder; 28 | import org.apache.commons.jexl3.JexlContext; 29 | import org.apache.commons.jexl3.JexlEngine; 30 | import org.apache.commons.jexl3.JexlException; 31 | import org.apache.commons.jexl3.JexlExpression; 32 | import org.apache.commons.jexl3.MapContext; 33 | import org.apache.commons.jexl3.introspection.JexlPermissions; 34 | import org.apache.flink.api.common.state.BroadcastState; 35 | import org.apache.flink.api.common.state.MapState; 36 | import org.apache.flink.api.common.state.MapStateDescriptor; 37 | import org.apache.flink.api.common.state.ValueState; 38 | import org.apache.flink.api.common.state.ValueStateDescriptor; 39 | import org.apache.flink.api.common.typeinfo.BasicTypeInfo; 40 | import org.apache.flink.api.common.typeinfo.TypeHint; 41 | import org.apache.flink.api.common.typeinfo.TypeInformation; 42 | import org.apache.flink.configuration.Configuration; 43 | import org.apache.flink.streaming.api.functions.co.KeyedBroadcastProcessFunction; 44 | import org.apache.flink.util.Collector; 45 | 46 | import com.aws.rulesengine.dynamicrules.exceptions.InsufficientDataException; 47 | import com.aws.rulesengine.dynamicrules.exceptions.RuleNotFoundException; 48 | import com.aws.rulesengine.dynamicrules.objects.Alert; 49 | import com.aws.rulesengine.dynamicrules.objects.AlertStatus; 50 | import com.aws.rulesengine.dynamicrules.objects.OperationStatus; 51 | import com.aws.rulesengine.dynamicrules.objects.Rule; 52 | import com.aws.rulesengine.dynamicrules.objects.RuleOperationStatus; 53 | import com.aws.rulesengine.dynamicrules.objects.SensorEvent; 54 | import com.aws.rulesengine.dynamicrules.objects.SensorMapState; 55 | import com.aws.rulesengine.dynamicrules.objects.Status; 56 | import com.aws.rulesengine.dynamicrules.utils.AlertUtils; 57 | import com.aws.rulesengine.dynamicrules.utils.Descriptors; 58 | import com.aws.rulesengine.dynamicrules.utils.Keyed; 59 | import com.aws.rulesengine.dynamicrules.utils.ProcessingUtils; 60 | 61 | import lombok.extern.slf4j.Slf4j; 62 | 63 | /** 64 | * Implements main rule evaluation and alerting logic. 65 | */ 66 | @Slf4j 67 | public class DynamicAlertFunction 68 | extends KeyedBroadcastProcessFunction, Rule, Alert> { 69 | 70 | private static final JexlEngine jexl = new JexlBuilder() 71 | .permissions(JexlPermissions.RESTRICTED.compose("com.aws.rulesengine.dynamicrules.*")).cache(512) 72 | .strict(true).silent(false) 73 | .create(); 74 | private final MapStateDescriptor mapStateDescriptor = new MapStateDescriptor<>("mapState", 75 | BasicTypeInfo.STRING_TYPE_INFO, TypeInformation.of(new TypeHint() { 76 | })); 77 | private final ValueStateDescriptor ruleStateDescriptor = new ValueStateDescriptor<>("ruleState", 78 | TypeInformation.of(new TypeHint() { 79 | })); 80 | private final ValueStateDescriptor alertStateDescriptor = new ValueStateDescriptor<>("alertState", 81 | TypeInformation.of(new TypeHint() { 82 | })); 83 | 84 | private transient ValueState latestRuleValue; 85 | private transient MapState sensorWindow; 86 | private transient ValueState lastAlertState; 87 | 88 | @Override 89 | public void open(Configuration parameters) { 90 | // Create States to persist info 91 | sensorWindow = getRuntimeContext().getMapState(mapStateDescriptor); 92 | latestRuleValue = getRuntimeContext().getState(ruleStateDescriptor); 93 | lastAlertState = getRuntimeContext().getState(alertStateDescriptor); 94 | } 95 | 96 | @Override 97 | public void processElement( 98 | Keyed value, ReadOnlyContext ctx, Collector out) 99 | throws Exception { 100 | 101 | SensorEvent sensorEvent = value.getWrapped(); 102 | String sensorId = sensorEvent.getId(); 103 | 104 | log.debug("Processing event for sensorId {} with rule {}", sensorId, value.getId()); 105 | 106 | // Clean up old events 107 | long currentEventTime = value.getWrapped().getEventTimestamp(); 108 | log.debug("Current event time {} vs timestamp {}", currentEventTime, ctx.timestamp()); 109 | 110 | Long currentEvalTime = System.currentTimeMillis(); 111 | 112 | ProcessingUtils.addToWindow(sensorWindow, sensorId, value.getWrapped()); 113 | 114 | Rule rule = ctx.getBroadcastState(Descriptors.rulesDescriptor).get(value.getId()); 115 | 116 | ProcessingUtils.updateRule(latestRuleValue, rule); 117 | 118 | // Evaluate rule 119 | evaluateRule(currentEvalTime, rule, ctx, out, "ProcessElement"); 120 | 121 | // Schedule re-evaluation after the minimium sensor window time 122 | ctx.timerService().registerProcessingTimeTimer(getReevaluationTime(rule)); 123 | } 124 | 125 | private void evaluateRule(Long currentEvalTime, Rule rule, ReadOnlyContext ctx, 126 | Collector out, String source) throws Exception { 127 | try { 128 | log.debug("Evaluating rule {} from {}", rule.getId(), source); 129 | if (isRuleValidForEvaluation(rule)) { 130 | JexlContext context = new MapContext(); 131 | 132 | // Check and make sure we have all the necessary data in the context window 133 | for (String ruleSensorId : rule.getSensorWindowMap().keySet()) { 134 | if (!sensorWindow.contains(ruleSensorId)) { 135 | // Here is where we'll send an alert that we have insufficient data 136 | log.debug("DynamicAlertFunction - SensorId " + ruleSensorId + " not found"); 137 | throw new InsufficientDataException(rule); 138 | } 139 | context.set(ruleSensorId, sensorWindow.get(ruleSensorId)); 140 | } 141 | // Rule Evaluation Logic 142 | JexlExpression expression = jexl.createExpression(rule.getRuleExpression()); 143 | Boolean isAlertTriggered = (Boolean) expression.evaluate(context); 144 | 145 | // Logic to handle the creation & triggering of an alert 146 | handleAlert(isAlertTriggered, currentEvalTime, rule, context, out); 147 | 148 | // Update the status of the rule that is was successfully evaluated (true or 149 | // false doesn't matter for this status) 150 | outputRuleOpData(rule, OperationStatus.SUCCESS, currentEvalTime, ctx); 151 | } 152 | } catch (RuleNotFoundException e) { 153 | log.error("Error while evaluating rule", e); 154 | } catch (InsufficientDataException e) { 155 | log.debug("Insufficient data for rule evaluation"); 156 | outputRuleOpData(rule, OperationStatus.INSUFFICIENT_DATA, currentEvalTime, ctx); 157 | } catch (JexlException e) { 158 | log.error("Error while evaluating JEXL expression", e); 159 | outputRuleOpData(rule, OperationStatus.FAILURE, currentEvalTime, e.getMessage(), ctx); 160 | } 161 | } 162 | 163 | private boolean isRuleValidForEvaluation(Rule rule) { 164 | if (rule == null) { 165 | log.error("Rule does not exist"); 166 | return false; 167 | } 168 | if (rule.getStatus() != Status.ACTIVE) { 169 | log.debug("Rule {} is not active, skipping evaluation", rule.getId()); 170 | return false; 171 | } 172 | return true; 173 | } 174 | 175 | // This is our stream of the rule operations status that we use to update the 176 | // rule operation dashboard 177 | // so that we know the latest state of the rule 178 | private void outputRuleOpData(Rule rule, OperationStatus status, Long currentEventTime, ReadOnlyContext ctx) { 179 | outputRuleOpData(rule, status, currentEventTime, null, ctx); 180 | } 181 | 182 | private void outputRuleOpData(Rule rule, OperationStatus status, Long currentEventTime, String message, 183 | ReadOnlyContext ctx) { 184 | RuleOperationStatus ruleOperationStatus = new RuleOperationStatus( 185 | rule.getEquipmentName(), 186 | rule.getName(), 187 | rule.getId(), 188 | status, 189 | currentEventTime, message); 190 | log.debug("Rule Operation Update - {}", ruleOperationStatus.getStatus()); 191 | ctx.output(Descriptors.ruleOperations, ruleOperationStatus); 192 | } 193 | 194 | private void outputRuleOpData(Rule rule, OperationStatus status, Long currentEventTime, Context ctx) { 195 | RuleOperationStatus ruleOperationStatus = new RuleOperationStatus( 196 | rule.getEquipmentName(), 197 | rule.getName(), 198 | rule.getId(), 199 | status, 200 | currentEventTime, null); 201 | log.debug("Rule Operation Update - {}", ruleOperationStatus.getStatus()); 202 | ctx.output(Descriptors.ruleOperations, ruleOperationStatus); 203 | } 204 | 205 | @Override 206 | public void processBroadcastElement(Rule rule, Context ctx, Collector out) 207 | throws Exception { 208 | BroadcastState broadcastState = ctx.getBroadcastState(Descriptors.rulesDescriptor); 209 | Long currentProcessTime = System.currentTimeMillis(); 210 | // If we get a new rule, we'll give it insufficient data rule op status 211 | if (!broadcastState.contains(rule.getId())) { 212 | outputRuleOpData(rule, OperationStatus.INSUFFICIENT_DATA, currentProcessTime, ctx); 213 | } 214 | ProcessingUtils.handleRuleBroadcast(rule, broadcastState); 215 | } 216 | 217 | @Override 218 | public void onTimer(final long timestamp, 219 | final KeyedBroadcastProcessFunction, Rule, Alert>.OnTimerContext ctx, 220 | final Collector out) 221 | throws Exception { 222 | 223 | // Get the applicable rule 224 | Rule rule = latestRuleValue.value(); 225 | 226 | // Clean up state 227 | if (rule.getStatus() == Status.INACTIVE) { 228 | ProcessingUtils.clearMapState(sensorWindow); 229 | } 230 | // This timer will allow us to reevaluate the rule 231 | // But first we should make sure we get rid of the outdated data 232 | evictAgedElementsFromWindow(timestamp, rule); 233 | evaluateRule(timestamp, rule, ctx, out, "onTimer"); 234 | 235 | } 236 | 237 | private void evictAgedElementsFromWindow(final long timestamp, Rule rule) { 238 | try { 239 | Iterator> windowStateKV = sensorWindow.iterator(); 240 | // Iterate over the map and remove the entries that are older than the sensor 241 | // window 242 | while (windowStateKV.hasNext()) { 243 | Map.Entry kv = windowStateKV.next(); 244 | Integer sensorWindow = rule.getSensorWindowMap().getOrDefault(kv.getKey(), 15); 245 | long timeToEvict = timestamp - (sensorWindow * 60000); 246 | log.debug("Rule {} Time to evict {} w/ sensor window {} for sensor_id {}", rule.getName(), timeToEvict, 247 | sensorWindow, kv.getKey()); 248 | kv.getValue().removeOlderThan(timeToEvict); 249 | if (kv.getValue().hasNoEvents()) { 250 | windowStateKV.remove(); 251 | log.debug("SensorId {} has no events", kv.getKey()); 252 | } 253 | } 254 | } catch (Exception ex) { 255 | throw new RuntimeException(ex); 256 | } 257 | } 258 | 259 | // Returns time of when to reevaluate the rule again (for timebased rules) 260 | private long getReevaluationTime(Rule rule) { 261 | Integer minSensorWindow = rule.getSensorWindowMap().values().stream().min(Integer::compare).orElse(15); 262 | return System.currentTimeMillis() + (minSensorWindow * 60000); 263 | } 264 | 265 | private void handleAlert(Boolean isAlertTriggered, Long currentEvalTime, Rule rule, JexlContext context, 266 | Collector out) throws IOException { 267 | 268 | // Create triggering events 269 | List triggeringEvents = isAlertTriggered 270 | ? AlertUtils.alertMapToSensorEventList(context, rule.getSensorWindowMap().keySet()) 271 | : Collections.emptyList(); 272 | 273 | // Create alert 274 | Alert alert = new Alert(rule.getEquipmentName(), rule.getName(), rule.getId(), 275 | isAlertTriggered ? AlertStatus.START : AlertStatus.STOP, 276 | triggeringEvents, currentEvalTime); 277 | 278 | // Determine if an alert should be emitted 279 | boolean shouldEmitAlert = false; 280 | 281 | if (lastAlertState.value() == null) { 282 | // If there's no previous alert state, emit the alert if it's a START alert 283 | // Doesn't make sense to emit a 'STOP' state first 284 | shouldEmitAlert = (alert.getStatus() == AlertStatus.START); 285 | } else { 286 | // If there's a previous alert state, emit the alert if the status has changed 287 | // If we transition from START -> STOP or vice versa 288 | shouldEmitAlert = (lastAlertState.value().getStatus() != alert.getStatus()); 289 | } 290 | if (shouldEmitAlert) { 291 | log.debug("Pushing {} alert for {}", alert.getStatus(), rule.getName()); 292 | out.collect(alert); 293 | lastAlertState.update(alert); 294 | } 295 | out.collect(alert); 296 | } 297 | 298 | } 299 | -------------------------------------------------------------------------------- /rules-engine/src/main/java/com/aws/rulesengine/dynamicrules/functions/DynamicKeyFunction.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package com.aws.rulesengine.dynamicrules.functions; 20 | 21 | import java.util.Map; 22 | 23 | import org.apache.flink.api.common.state.BroadcastState; 24 | import org.apache.flink.api.common.state.ReadOnlyBroadcastState; 25 | import org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction; 26 | import org.apache.flink.util.Collector; 27 | 28 | import com.aws.rulesengine.dynamicrules.objects.Rule; 29 | import com.aws.rulesengine.dynamicrules.objects.SensorEvent; 30 | import com.aws.rulesengine.dynamicrules.utils.Descriptors; 31 | import com.aws.rulesengine.dynamicrules.utils.Keyed; 32 | import com.aws.rulesengine.dynamicrules.utils.ProcessingUtils; 33 | 34 | import lombok.extern.slf4j.Slf4j; 35 | 36 | @Slf4j 37 | public class DynamicKeyFunction 38 | extends BroadcastProcessFunction> { 39 | 40 | @Override 41 | public void processElement( 42 | SensorEvent event, ReadOnlyContext ctx, Collector> out) 43 | throws Exception { 44 | ReadOnlyBroadcastState rulesState = ctx.getBroadcastState(Descriptors.rulesDescriptor); 45 | // We want to fork the event for each rule that contains the sensorId of the event 46 | forkEventForEachGroupingKey(event, rulesState, out); 47 | } 48 | 49 | private void forkEventForEachGroupingKey( 50 | SensorEvent event, 51 | ReadOnlyBroadcastState rulesState, 52 | Collector> out) 53 | throws Exception { 54 | for (Map.Entry entry : rulesState.immutableEntries()) { 55 | final Rule rule = entry.getValue(); 56 | if (rule.getSensorWindowMap().containsKey(event.getId())) { 57 | String key = rule.getId() + "_" + event.getEquipment().getId(); 58 | log.debug("Found sensor event {} for rule {}", event.getId(), rule.getName()); 59 | out.collect( 60 | new Keyed<>( 61 | event, key, rule.getId())); 62 | } 63 | } 64 | } 65 | 66 | @Override 67 | public void processBroadcastElement( 68 | Rule rule, Context ctx, Collector> out) throws Exception { 69 | BroadcastState broadcastState = ctx.getBroadcastState(Descriptors.rulesDescriptor); 70 | ProcessingUtils.handleRuleBroadcast(rule, broadcastState); 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /rules-engine/src/main/java/com/aws/rulesengine/dynamicrules/objects/Alert.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package com.aws.rulesengine.dynamicrules.objects; 20 | 21 | import java.util.List; 22 | 23 | import lombok.AllArgsConstructor; 24 | import lombok.Data; 25 | 26 | @Data 27 | @AllArgsConstructor 28 | public class Alert { 29 | private String equipmentName; 30 | private String ruleName; 31 | private String ruleId; 32 | private AlertStatus status; 33 | private List triggeringEvents; 34 | private Long timestamp; 35 | } 36 | -------------------------------------------------------------------------------- /rules-engine/src/main/java/com/aws/rulesengine/dynamicrules/objects/AlertStatus.java: -------------------------------------------------------------------------------- 1 | package com.aws.rulesengine.dynamicrules.objects; 2 | 3 | public enum AlertStatus { 4 | STOP(0), 5 | START(1); 6 | 7 | private final int value; 8 | 9 | AlertStatus(int i) { 10 | this.value = i; 11 | } 12 | }; 13 | -------------------------------------------------------------------------------- /rules-engine/src/main/java/com/aws/rulesengine/dynamicrules/objects/Equipment.java: -------------------------------------------------------------------------------- 1 | package com.aws.rulesengine.dynamicrules.objects; 2 | 3 | import javax.annotation.Nullable; 4 | 5 | import lombok.AllArgsConstructor; 6 | import lombok.Builder; 7 | import lombok.Data; 8 | import lombok.EqualsAndHashCode; 9 | import lombok.NoArgsConstructor; 10 | import lombok.NonNull; 11 | import lombok.ToString; 12 | 13 | @EqualsAndHashCode 14 | @ToString 15 | @Data 16 | @NoArgsConstructor(force = true) 17 | @AllArgsConstructor 18 | @Builder 19 | public class Equipment { 20 | @NonNull 21 | private String id; 22 | @Nullable 23 | private String type; 24 | @Nullable 25 | private String name; 26 | 27 | } 28 | -------------------------------------------------------------------------------- /rules-engine/src/main/java/com/aws/rulesengine/dynamicrules/objects/OperationStatus.java: -------------------------------------------------------------------------------- 1 | package com.aws.rulesengine.dynamicrules.objects; 2 | 3 | public enum OperationStatus { 4 | INSUFFICIENT_DATA (0), 5 | SUCCESS (1), 6 | FAILURE (2); 7 | 8 | private final int value; 9 | 10 | OperationStatus(int value) { 11 | this.value = value; 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /rules-engine/src/main/java/com/aws/rulesengine/dynamicrules/objects/Rule.java: -------------------------------------------------------------------------------- 1 | package com.aws.rulesengine.dynamicrules.objects; 2 | 3 | import java.util.List; 4 | import java.util.Map; 5 | 6 | import lombok.AllArgsConstructor; 7 | import lombok.Data; 8 | import lombok.EqualsAndHashCode; 9 | import lombok.NoArgsConstructor; 10 | import lombok.NonNull; 11 | import lombok.Singular; 12 | import lombok.ToString; 13 | 14 | @EqualsAndHashCode 15 | @ToString 16 | @Data 17 | @NoArgsConstructor(force = true) 18 | @AllArgsConstructor 19 | public class Rule { 20 | @NonNull 21 | private String id; 22 | @NonNull 23 | private String name; 24 | @NonNull 25 | private Status status; 26 | @NonNull 27 | private String equipmentName; 28 | @NonNull 29 | private String ruleExpression; 30 | @NonNull 31 | @Singular 32 | private Map sensorWindowMap; 33 | } -------------------------------------------------------------------------------- /rules-engine/src/main/java/com/aws/rulesengine/dynamicrules/objects/RuleOperationStatus.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package com.aws.rulesengine.dynamicrules.objects; 20 | 21 | import lombok.AllArgsConstructor; 22 | import lombok.Data; 23 | import lombok.EqualsAndHashCode; 24 | import lombok.NoArgsConstructor; 25 | import lombok.ToString; 26 | 27 | @EqualsAndHashCode 28 | @ToString 29 | @Data 30 | @NoArgsConstructor(force = true) 31 | @AllArgsConstructor 32 | public class RuleOperationStatus { 33 | private String equipmentName; 34 | private String ruleName; 35 | private String ruleId; 36 | private OperationStatus status; 37 | private Long timestamp; 38 | private String message; 39 | } 40 | -------------------------------------------------------------------------------- /rules-engine/src/main/java/com/aws/rulesengine/dynamicrules/objects/SensorEvent.java: -------------------------------------------------------------------------------- 1 | package com.aws.rulesengine.dynamicrules.objects; 2 | 3 | import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.annotation.JsonAlias; 4 | import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.annotation.JsonIgnoreProperties; 5 | 6 | import lombok.AllArgsConstructor; 7 | import lombok.Builder; 8 | import lombok.Data; 9 | import lombok.EqualsAndHashCode; 10 | import lombok.NoArgsConstructor; 11 | import lombok.NonNull; 12 | import lombok.ToString; 13 | 14 | @EqualsAndHashCode 15 | @ToString 16 | @Data 17 | @NoArgsConstructor(force = true) 18 | @AllArgsConstructor 19 | @Builder 20 | @JsonIgnoreProperties({ "asset_uuid", "measure_name", "utc_date"}) 21 | public class SensorEvent implements TimestampAssignable { 22 | @NonNull 23 | private Equipment equipment; 24 | @NonNull 25 | @JsonAlias({ "point_uuid", "pointUuid", "id" }) 26 | private String id; 27 | @NonNull 28 | @JsonAlias({ "measure_value", "measureValue" }) 29 | private Double measureValue; 30 | @NonNull 31 | @JsonAlias({ "utc_date_ms", "eventTimestamp" }) 32 | private Long eventTimestamp; 33 | private Long ingestionTimestamp; 34 | 35 | @Override 36 | public void assignIngestionTimestamp(Long timestamp) { 37 | this.ingestionTimestamp = timestamp; 38 | } 39 | } 40 | 41 | -------------------------------------------------------------------------------- /rules-engine/src/main/java/com/aws/rulesengine/dynamicrules/objects/SensorMapState.java: -------------------------------------------------------------------------------- 1 | package com.aws.rulesengine.dynamicrules.objects; 2 | 3 | import java.util.Set; 4 | import java.util.TreeMap; 5 | 6 | import lombok.Data; 7 | import lombok.EqualsAndHashCode; 8 | import lombok.NonNull; 9 | import lombok.Singular; 10 | import lombok.ToString; 11 | import lombok.extern.slf4j.Slf4j; 12 | 13 | @EqualsAndHashCode 14 | @ToString 15 | @Data 16 | @Slf4j 17 | public class SensorMapState { 18 | @NonNull 19 | @Singular 20 | private TreeMap sensorEvents; 21 | 22 | public SensorMapState() { 23 | sensorEvents = new TreeMap<>(); 24 | } 25 | 26 | public void addSensorEvent(SensorEvent sensorEvent) { 27 | log.debug("Adding sensorEvent {} w/ time {}", sensorEvent.getId(), sensorEvent.getEventTimestamp()); 28 | sensorEvents.put(sensorEvent.getEventTimestamp(), sensorEvent); 29 | } 30 | 31 | public void removeOlderThan(Long timestamp) { 32 | Set keys = sensorEvents.keySet(); 33 | for (Long key : keys) { 34 | if (key < timestamp) { 35 | log.debug("Removing sensorEvent w/ timestamp {}; older than {}", key, timestamp); 36 | sensorEvents.remove(key); 37 | } 38 | } 39 | } 40 | 41 | public boolean hasNoEvents() { 42 | return sensorEvents.isEmpty(); 43 | } 44 | 45 | public Double getValue(){ 46 | if (hasNoEvents()) { 47 | return 0.0; 48 | } 49 | SensorEvent sensorEvent = sensorEvents.lastEntry().getValue(); 50 | return sensorEvent.getMeasureValue(); 51 | } 52 | 53 | public Boolean isValueBetween(Double start, Double end){ 54 | if (hasNoEvents()) { 55 | return false; 56 | } 57 | SensorEvent sensorEvent = sensorEvents.lastEntry().getValue(); 58 | return sensorEvent.getMeasureValue() >= start && sensorEvent.getMeasureValue() <= end; 59 | } 60 | 61 | public Long getMinutesSinceChange() { 62 | if (hasNoEvents()) { 63 | return 0L; 64 | } 65 | Double initialSensorValue = sensorEvents.lastEntry().getValue().getMeasureValue(); 66 | Long minutesSinceValueChange = 0L; 67 | for (Long mapKey : sensorEvents.descendingKeySet()) { 68 | minutesSinceValueChange = mapKey; 69 | Double sensorValue = sensorEvents.get(minutesSinceValueChange).getMeasureValue(); 70 | if (initialSensorValue != sensorValue) { 71 | return (System.currentTimeMillis() - minutesSinceValueChange) / 60000; 72 | } 73 | } 74 | return (System.currentTimeMillis() - minutesSinceValueChange) / 60000; 75 | } 76 | 77 | public Boolean hasChanged(Integer time) { 78 | Long minutesSinceChange = getMinutesSinceChange(); 79 | log.debug("Time: " + time + " | Minutes since change: " + minutesSinceChange); 80 | return minutesSinceChange <= time; 81 | } 82 | 83 | public Boolean hasNotChanged(Integer time) { 84 | Long minutesSinceChange = getMinutesSinceChange(); 85 | log.debug("Time: " + time + " | Minutes since change: " + minutesSinceChange); 86 | return minutesSinceChange > time; 87 | } 88 | 89 | public Long getMinutesSinceComparison(Double compareValue, String operator) { 90 | if(operator.equals(">")) { 91 | return getMinutesSinceValueGreaterThan(compareValue); 92 | } 93 | else if (operator.equals(">=")) { 94 | return getMinutesSinceValueGreaterEqualThan(compareValue); 95 | } 96 | else if (operator.equals("<")) { 97 | return getMinutesSinceValueLessThan(compareValue); 98 | } 99 | else if (operator.equals("<=")) { 100 | return getMinutesSinceValueLessEqualThan(compareValue); 101 | } 102 | else if (operator.equals("==")) { 103 | return getMinutesSinceValueEquals(compareValue); 104 | } 105 | else if (operator.equals("!=")) { 106 | return getMinutesSinceValueNotEquals(compareValue); 107 | } 108 | return 0L; 109 | } 110 | 111 | public Long getMinutesSinceComparison(Integer compareValueInt, String operator) { 112 | Double compareValue = compareValueInt.doubleValue(); 113 | return getMinutesSinceComparison(compareValue,operator); 114 | } 115 | 116 | public Long getMinutesSinceValueGreaterThan(Double greaterThan) { 117 | Long minutesSinceValueChange = 0L; 118 | for (Long mapKey : sensorEvents.descendingKeySet()) { 119 | minutesSinceValueChange = mapKey; 120 | Double sensorValue = sensorEvents.get(mapKey).getMeasureValue(); 121 | log.debug("MapKey: {} | SensorValue: {}", mapKey, sensorValue); 122 | // if sensorValue is NOT between start and end, then return the time since the first sensor event 123 | if (sensorValue <= greaterThan) { 124 | return (System.currentTimeMillis() - minutesSinceValueChange) / 60000; 125 | } 126 | } 127 | return (System.currentTimeMillis() - minutesSinceValueChange) / 60000; 128 | } 129 | 130 | public Long getMinutesSinceValueGreaterEqualThan(Double greaterThan) { 131 | Long minutesSinceValueChange = 0L; 132 | for (Long mapKey : sensorEvents.descendingKeySet()) { 133 | minutesSinceValueChange = mapKey; 134 | Double sensorValue = sensorEvents.get(mapKey).getMeasureValue(); 135 | log.debug("MapKey: {} | SensorValue: {}", mapKey, sensorValue); 136 | // if sensorValue is NOT between start and end, then return the time since the first sensor event 137 | if (sensorValue < greaterThan) { 138 | return (System.currentTimeMillis() - minutesSinceValueChange) / 60000; 139 | } 140 | } 141 | return (System.currentTimeMillis() - minutesSinceValueChange) / 60000; 142 | } 143 | 144 | public Long getMinutesSinceValueLessThan(Double lessThan) { 145 | Long minutesSinceValueChange = 0L; 146 | for (Long mapKey : sensorEvents.descendingKeySet()) { 147 | minutesSinceValueChange = mapKey; 148 | Double sensorValue = sensorEvents.get(mapKey).getMeasureValue(); 149 | log.debug("MapKey: {} | SensorValue: {}", mapKey, sensorValue); 150 | // if sensorValue is NOT between start and end, then return the time since the first sensor event 151 | if (sensorValue >= lessThan) { 152 | return (System.currentTimeMillis() - minutesSinceValueChange) / 60000; 153 | } 154 | } 155 | return (System.currentTimeMillis() - minutesSinceValueChange) / 60000; 156 | } 157 | 158 | public Long getMinutesSinceValueLessEqualThan(Double lessThan) { 159 | Long minutesSinceValueChange = 0L; 160 | for (Long mapKey : sensorEvents.descendingKeySet()) { 161 | minutesSinceValueChange = mapKey; 162 | Double sensorValue = sensorEvents.get(mapKey).getMeasureValue(); 163 | log.debug("MapKey: {} | SensorValue: {}", mapKey, sensorValue); 164 | // if sensorValue is NOT between start and end, then return the time since the first sensor event 165 | if (sensorValue > lessThan) { 166 | return (System.currentTimeMillis() - minutesSinceValueChange) / 60000; 167 | } 168 | } 169 | return (System.currentTimeMillis() - minutesSinceValueChange) / 60000; 170 | } 171 | 172 | public Long getMinutesSinceValueEquals(Double equals) { 173 | Long minutesSinceValueChange = 0L; 174 | for (Long mapKey : sensorEvents.descendingKeySet()) { 175 | minutesSinceValueChange = mapKey; 176 | Double sensorValue = sensorEvents.get(mapKey).getMeasureValue(); 177 | log.debug("MapKey: {} | SensorValue: {}", mapKey, sensorValue); 178 | // if sensorValue is NOT between start and end, then return the time since the first sensor event 179 | if (sensorValue != equals) { 180 | return (System.currentTimeMillis() - minutesSinceValueChange) / 60000; 181 | } 182 | } 183 | return (System.currentTimeMillis() - minutesSinceValueChange) / 60000; 184 | } 185 | 186 | public Long getMinutesSinceValueNotEquals(Double notEquals) { 187 | Long minutesSinceValueChange = 0L; 188 | for (Long mapKey : sensorEvents.descendingKeySet()) { 189 | minutesSinceValueChange = mapKey; 190 | Double sensorValue = sensorEvents.get(mapKey).getMeasureValue(); 191 | log.debug("MapKey: {} | SensorValue: {}", mapKey, sensorValue); 192 | // if sensorValue is NOT between start and end, then return the time since the first sensor event 193 | if (sensorValue == notEquals) { 194 | return (System.currentTimeMillis() - minutesSinceValueChange) / 60000; 195 | } 196 | } 197 | return (System.currentTimeMillis() - minutesSinceValueChange) / 60000; 198 | } 199 | 200 | public Long getMinutesSinceValueBetween(Double start, Double end) { 201 | if (start == end) { 202 | return 0L; 203 | } 204 | Long minutesSinceValueChange = 0L; 205 | for (Long mapKey : sensorEvents.descendingKeySet()) { 206 | minutesSinceValueChange = mapKey; 207 | Double sensorValue = sensorEvents.get(mapKey).getMeasureValue(); 208 | log.debug("MapKey: {} | SensorValue: {}", mapKey, sensorValue); 209 | // if sensorValue is NOT between start and end, then return the time since the first sensor event 210 | if (sensorValue < start || sensorValue > end) { 211 | return (System.currentTimeMillis() - minutesSinceValueChange) / 60000; 212 | } 213 | } 214 | return (System.currentTimeMillis() - minutesSinceValueChange) / 60000; 215 | } 216 | } -------------------------------------------------------------------------------- /rules-engine/src/main/java/com/aws/rulesengine/dynamicrules/objects/Status.java: -------------------------------------------------------------------------------- 1 | package com.aws.rulesengine.dynamicrules.objects; 2 | 3 | public enum Status { 4 | ACTIVE, 5 | INACTIVE 6 | } 7 | -------------------------------------------------------------------------------- /rules-engine/src/main/java/com/aws/rulesengine/dynamicrules/objects/TimestampAssignable.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package com.aws.rulesengine.dynamicrules.objects; 20 | 21 | public interface TimestampAssignable { 22 | void assignIngestionTimestamp(T timestamp); 23 | } 24 | -------------------------------------------------------------------------------- /rules-engine/src/main/java/com/aws/rulesengine/dynamicrules/parsers/RuleParser.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package com.aws.rulesengine.dynamicrules.parsers; 20 | 21 | import java.io.IOException; 22 | 23 | import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper; 24 | 25 | import com.aws.rulesengine.dynamicrules.objects.Rule; 26 | 27 | public class RuleParser { 28 | 29 | private final ObjectMapper objectMapper = new ObjectMapper(); 30 | 31 | public Rule fromString(String line) throws IOException { 32 | return parseJson(line); 33 | } 34 | 35 | private Rule parseJson(String ruleString) throws IOException { 36 | return objectMapper.readValue(ruleString, Rule.class); 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /rules-engine/src/main/java/com/aws/rulesengine/dynamicrules/parsers/SensorEventParser.java: -------------------------------------------------------------------------------- 1 | package com.aws.rulesengine.dynamicrules.parsers; 2 | 3 | import java.io.IOException; 4 | 5 | import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper; 6 | 7 | import com.aws.rulesengine.dynamicrules.objects.SensorEvent; 8 | 9 | public class SensorEventParser { 10 | private final ObjectMapper objectMapper = new ObjectMapper(); 11 | 12 | public SensorEvent fromString(String line) throws IOException { 13 | return parseJson(line); 14 | } 15 | 16 | private SensorEvent parseJson(String ruleString) throws IOException { 17 | return objectMapper.readValue(ruleString, SensorEvent.class); 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /rules-engine/src/main/java/com/aws/rulesengine/dynamicrules/utils/AlertUtils.java: -------------------------------------------------------------------------------- 1 | package com.aws.rulesengine.dynamicrules.utils; 2 | 3 | import java.util.ArrayList; 4 | import java.util.List; 5 | import java.util.Set; 6 | 7 | import org.apache.commons.jexl3.JexlContext; 8 | 9 | import com.aws.rulesengine.dynamicrules.objects.SensorEvent; 10 | import com.aws.rulesengine.dynamicrules.objects.SensorMapState; 11 | 12 | import lombok.extern.slf4j.Slf4j; 13 | 14 | @Slf4j 15 | public class AlertUtils { 16 | 17 | public static List alertMapToSensorEventList(JexlContext mapContext, Set sensorIds) { 18 | List sensorEvents = new ArrayList<>(); 19 | sensorIds.forEach(sensorId -> { 20 | SensorMapState sensorMapState = (SensorMapState) mapContext.get(sensorId); 21 | sensorEvents.addAll(sensorMapState.getSensorEvents().values()); 22 | }); 23 | return sensorEvents; 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /rules-engine/src/main/java/com/aws/rulesengine/dynamicrules/utils/Descriptors.java: -------------------------------------------------------------------------------- 1 | package com.aws.rulesengine.dynamicrules.utils; 2 | 3 | import org.apache.flink.api.common.state.MapStateDescriptor; 4 | import org.apache.flink.api.common.typeinfo.BasicTypeInfo; 5 | import org.apache.flink.api.common.typeinfo.TypeInformation; 6 | import org.apache.flink.util.OutputTag; 7 | 8 | import com.aws.rulesengine.dynamicrules.objects.Rule; 9 | import com.aws.rulesengine.dynamicrules.objects.RuleOperationStatus; 10 | 11 | public class Descriptors { 12 | public static final MapStateDescriptor rulesDescriptor = new MapStateDescriptor<>( 13 | "rules", BasicTypeInfo.STRING_TYPE_INFO, TypeInformation.of(Rule.class)); 14 | 15 | public static final OutputTag ruleOperations = new OutputTag( 16 | "rule-operations") { 17 | }; 18 | } -------------------------------------------------------------------------------- /rules-engine/src/main/java/com/aws/rulesengine/dynamicrules/utils/JsonDeserializer.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package com.aws.rulesengine.dynamicrules.utils; 20 | 21 | import org.apache.flink.api.common.functions.RichFlatMapFunction; 22 | import org.apache.flink.configuration.Configuration; 23 | import org.apache.flink.util.Collector; 24 | 25 | import lombok.extern.slf4j.Slf4j; 26 | 27 | @Slf4j 28 | public class JsonDeserializer extends RichFlatMapFunction { 29 | 30 | private final Class targetClass; 31 | private JsonMapper parser; 32 | 33 | public JsonDeserializer(Class targetClass) { 34 | this.targetClass = targetClass; 35 | } 36 | 37 | @Override 38 | public void open(Configuration parameters) throws Exception { 39 | super.open(parameters); 40 | parser = new JsonMapper<>(targetClass); 41 | } 42 | 43 | @Override 44 | public void flatMap(String value, Collector out) throws Exception { 45 | try { 46 | T parsed = parser.fromString(value); 47 | out.collect(parsed); 48 | } catch (Exception e) { 49 | log.warn("Failed parsing rule, dropping it:", e); 50 | } 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /rules-engine/src/main/java/com/aws/rulesengine/dynamicrules/utils/JsonMapper.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package com.aws.rulesengine.dynamicrules.utils; 20 | 21 | import java.io.IOException; 22 | 23 | import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper; 24 | 25 | public class JsonMapper { 26 | 27 | private final Class targetClass; 28 | private final ObjectMapper objectMapper; 29 | 30 | public JsonMapper(Class targetClass) { 31 | this.targetClass = targetClass; 32 | objectMapper = new ObjectMapper(); 33 | } 34 | 35 | public T fromString(String line) throws IOException { 36 | return objectMapper.readValue(line, targetClass); 37 | } 38 | 39 | public String toString(T line) throws IOException { 40 | return objectMapper.writeValueAsString(line); 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /rules-engine/src/main/java/com/aws/rulesengine/dynamicrules/utils/JsonSerializer.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package com.aws.rulesengine.dynamicrules.utils; 20 | 21 | import org.apache.flink.api.common.functions.RichFlatMapFunction; 22 | import org.apache.flink.configuration.Configuration; 23 | import org.apache.flink.util.Collector; 24 | 25 | import lombok.extern.slf4j.Slf4j; 26 | 27 | @Slf4j 28 | public class JsonSerializer extends RichFlatMapFunction { 29 | 30 | private final Class targetClass; 31 | private JsonMapper parser; 32 | 33 | public JsonSerializer(Class sourceClass) { 34 | this.targetClass = sourceClass; 35 | } 36 | 37 | @Override 38 | public void open(Configuration parameters) throws Exception { 39 | super.open(parameters); 40 | parser = new JsonMapper<>(targetClass); 41 | } 42 | 43 | @Override 44 | public void flatMap(T value, Collector out) throws Exception { 45 | System.out.println(value); 46 | try { 47 | String serialized = parser.toString(value); 48 | out.collect(serialized); 49 | } catch (Exception e) { 50 | log.warn("Failed serializing to JSON dropping it:", e); 51 | } 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /rules-engine/src/main/java/com/aws/rulesengine/dynamicrules/utils/Keyed.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package com.aws.rulesengine.dynamicrules.utils; 20 | 21 | import lombok.AllArgsConstructor; 22 | import lombok.Data; 23 | import lombok.NoArgsConstructor; 24 | 25 | @Data 26 | @NoArgsConstructor 27 | @AllArgsConstructor 28 | public class Keyed { 29 | private IN wrapped; 30 | private KEY key; 31 | private ID id; 32 | } 33 | -------------------------------------------------------------------------------- /rules-engine/src/main/java/com/aws/rulesengine/dynamicrules/utils/KinesisUtils.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package com.aws.rulesengine.dynamicrules.utils; 20 | 21 | import java.io.IOException; 22 | 23 | import com.amazonaws.services.kinesisanalytics.runtime.KinesisAnalyticsRuntime; 24 | 25 | import lombok.extern.slf4j.Slf4j; 26 | @Slf4j 27 | 28 | public class KinesisUtils { 29 | 30 | public static String getKinesisRuntimeProperty(String groupId, String value) throws IOException { 31 | return KinesisAnalyticsRuntime.getApplicationProperties().get(groupId).getProperty(value); 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /rules-engine/src/main/java/com/aws/rulesengine/dynamicrules/utils/ProcessingUtils.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package com.aws.rulesengine.dynamicrules.utils; 20 | 21 | import java.util.Iterator; 22 | import java.util.Map; 23 | 24 | import org.apache.flink.api.common.state.BroadcastState; 25 | import org.apache.flink.api.common.state.MapState; 26 | import org.apache.flink.api.common.state.ValueState; 27 | 28 | import com.aws.rulesengine.dynamicrules.objects.Rule; 29 | import com.aws.rulesengine.dynamicrules.objects.SensorEvent; 30 | import com.aws.rulesengine.dynamicrules.objects.SensorMapState; 31 | 32 | public class ProcessingUtils { 33 | 34 | public static void handleRuleBroadcast(Rule rule, BroadcastState broadcastState) 35 | throws Exception { 36 | switch (rule.getStatus()) { 37 | case ACTIVE: 38 | broadcastState.put(rule.getId(), rule); 39 | break; 40 | case INACTIVE: 41 | broadcastState.remove(rule.getId()); 42 | break; 43 | } 44 | } 45 | 46 | public static void addToWindow(MapState mapState, String key, SensorEvent value) 47 | throws Exception { 48 | 49 | SensorMapState sensorMapState = mapState.get(key); 50 | 51 | if (sensorMapState == null) { 52 | sensorMapState = new SensorMapState(); 53 | sensorMapState.addSensorEvent(value); 54 | mapState.put(key, sensorMapState); 55 | } else { 56 | sensorMapState.addSensorEvent(value); 57 | } 58 | } 59 | 60 | public static void clearMapState(MapState mapState) { 61 | try { 62 | Iterator> mapStateKV = mapState.iterator(); 63 | while (mapStateKV.hasNext()) { 64 | mapStateKV.next(); 65 | mapStateKV.remove(); 66 | } 67 | } catch (Exception ex) { 68 | throw new RuntimeException(ex); 69 | } 70 | } 71 | 72 | public static Rule updateRule(ValueState valueState, Rule rule) 73 | throws Exception { 74 | valueState.update(rule); 75 | return valueState.value(); 76 | } 77 | 78 | 79 | } 80 | -------------------------------------------------------------------------------- /rules-engine/src/main/java/com/aws/rulesengine/dynamicrules/utils/RuleDeserializer.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package com.aws.rulesengine.dynamicrules.utils; 20 | 21 | import org.apache.flink.api.common.functions.RichFlatMapFunction; 22 | import org.apache.flink.configuration.Configuration; 23 | import org.apache.flink.util.Collector; 24 | 25 | import com.aws.rulesengine.dynamicrules.objects.Rule; 26 | import com.aws.rulesengine.dynamicrules.parsers.RuleParser; 27 | 28 | import lombok.extern.slf4j.Slf4j; 29 | 30 | @Slf4j 31 | public class RuleDeserializer extends RichFlatMapFunction { 32 | 33 | private RuleParser ruleParser; 34 | 35 | @Override 36 | public void open(Configuration parameters) throws Exception { 37 | super.open(parameters); 38 | ruleParser = new RuleParser(); 39 | } 40 | 41 | @Override 42 | public void flatMap(String value, Collector out) throws Exception { 43 | try { 44 | Rule rule = ruleParser.fromString(value); 45 | out.collect(rule); 46 | } catch (Exception e) { 47 | log.warn("Failed parsing rule, dropping it:", e); 48 | } 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /rules-engine/src/main/java/com/aws/rulesengine/dynamicrules/utils/TimeStamper.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package com.aws.rulesengine.dynamicrules.utils; 20 | 21 | import org.apache.flink.api.common.functions.RichFlatMapFunction; 22 | import org.apache.flink.util.Collector; 23 | 24 | import com.aws.rulesengine.dynamicrules.objects.TimestampAssignable; 25 | 26 | public class TimeStamper> extends RichFlatMapFunction { 27 | 28 | @Override 29 | public void flatMap(T value, Collector out) throws Exception { 30 | value.assignIngestionTimestamp(System.currentTimeMillis()); 31 | out.collect(value); 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /rules-engine/src/main/resources/flink-application-properties-dev.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "PropertyGroupId": "InputStream0", 4 | "PropertyMap": { 5 | "aws.region": "us-east-1", 6 | "stream.name": "ExampleInputStream" 7 | } 8 | }, 9 | { 10 | "PropertyGroupId": "OutputStream0", 11 | "PropertyMap": { 12 | "aws.region": "us-east-1", 13 | "stream.name": "ExampleOutputStream" 14 | } 15 | } 16 | ] 17 | -------------------------------------------------------------------------------- /rules-engine/src/main/resources/log4j2.properties: -------------------------------------------------------------------------------- 1 | rootLogger.level = INFO 2 | rootLogger.appenderRef.console.ref = ConsoleAppender 3 | 4 | appender.console.name = ConsoleAppender 5 | appender.console.type = CONSOLE 6 | appender.console.layout.type = PatternLayout 7 | appender.console.layout.pattern = %d{HH:mm:ss,SSS} %-5p %-60c %x - %m%n 8 | -------------------------------------------------------------------------------- /rules-engine/src/test/com.aws.rulesengine.dynamicrules/RuleEvaluationTest.java: -------------------------------------------------------------------------------- 1 | package com.aws.rulesengine.dynamicrules; 2 | 3 | import com.carrier.rulesengine.dynamicrules.objects.SensorEvent; 4 | import com.carrier.rulesengine.dynamicrules.objects.SensorMapState; 5 | import org.apache.commons.jexl3.*; 6 | import org.apache.flink.api.common.state.MapState; 7 | import org.junit.Before; 8 | import org.junit.Test; 9 | 10 | import java.io.IOException; 11 | import java.util.Set; 12 | 13 | import static org.junit.Assert.assertEquals; 14 | 15 | public class RuleEvaluationTest { 16 | private static final JexlEngine jexl = new JexlBuilder() 17 | .permissions(JexlPermissions.RESTRICTED.compose("com.aws.rulesengine.dynamicrules.*")).cache(512) 18 | .strict(true).silent(false) 19 | .create(); 20 | 21 | private transient SensorMapState sensorMapState; 22 | private JsonMapper jsonMapper = new JsonMapper(SensorEvent.class); 23 | @Before 24 | public void setup() throws IOException { 25 | sensorMapState = new SensorMapState(); 26 | SensorEvent event1 = new SensorEvent(); 27 | event1.setId("SENSOR_9d5ef9bf_a7bf43368f60d1a_9d9d12c2a"); 28 | event1.setMeasureValue(7D); 29 | event1.setEventTimestamp(System.currentTimeMillis()-5000); 30 | sensorMapState.addSensorEvent(event1); 31 | SensorEvent event2 = new SensorEvent(); 32 | event2.setId("SENSOR_9d5ef9bf_a7bf43368f60d1a_9d9d12c2a"); 33 | event2.setMeasureValue(7D); 34 | event2.setEventTimestamp(System.currentTimeMillis()-4000); 35 | sensorMapState.addSensorEvent(event2); 36 | } 37 | 38 | @Test 39 | public void testRuleEvaluation() throws Exception { 40 | String SENSOR_9d5ef9bfa7bf43368f60d1a9d9d12c2a = ""; 41 | String rule = "A.getSecondsSinceValueLessThan(3.0) >= 4.0 and A.getSecondsSinceValueChange() >= 1.0 and A.getSecondsSinceValueBetween(1.0,4.0) >= 5.0 and A.getAverageSensorValue() >= 1.0"; 42 | JexlExpression expression = jexl.createExpression(rule); 43 | JexlContext context = new MapContext(); 44 | context.set("A", sensorMapState); 45 | boolean result = (boolean) expression.evaluate(context); 46 | assertEquals(true, result); 47 | } 48 | 49 | @Test 50 | public void testRuleEvaluationWithLongNames() throws Exception { 51 | String rule = "SENSOR_9d5ef9bf_a7bf43368f60d1a_9d9d12c2a.hasChanged(30)"; 52 | JexlExpression expression = jexl.createExpression(rule); 53 | JexlContext context = new MapContext(); 54 | context.set("SENSOR_9d5ef9bf_a7bf43368f60d1a_9d9d12c2a", sensorMapState); 55 | boolean result = (boolean) expression.evaluate(context); 56 | assertEquals(true, result); 57 | } 58 | 59 | } 60 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "ES2020", 4 | "module": "commonjs", 5 | "lib": [ 6 | "es2020", 7 | "dom" 8 | ], 9 | "declaration": true, 10 | "strict": true, 11 | "noImplicitAny": true, 12 | "strictNullChecks": true, 13 | "noImplicitThis": true, 14 | "alwaysStrict": true, 15 | "noUnusedLocals": false, 16 | "noUnusedParameters": false, 17 | "noImplicitReturns": true, 18 | "noFallthroughCasesInSwitch": false, 19 | "inlineSourceMap": true, 20 | "inlineSources": true, 21 | "experimentalDecorators": true, 22 | "strictPropertyInitialization": false, 23 | "typeRoots": [ 24 | "./node_modules/@types" 25 | ] 26 | }, 27 | "exclude": [ 28 | "node_modules", 29 | "cdk.out" 30 | ] 31 | } 32 | --------------------------------------------------------------------------------