├── README.MD ├── X档案 ├── base_docs │ ├── canal-log.md │ ├── drools_模板.drl.txt │ ├── event_detail_map.md │ ├── v4版本联调测试.md │ ├── 动态化改造后的联调测试.md │ └── 条件查询.md └── 设计图 │ ├── clickhouse直接摄取kafka数据示意图.png │ ├── sequence类条件查询全流程.jpg │ ├── 动态规则核心逻辑.png │ ├── 易牛Flink动态规则实时运营系统-架构图.png │ ├── 查询分界点设计方案.png │ ├── 查询缓存分界点设计(2).png │ ├── 查询缓存分界点设计(3).png │ ├── 查询路由分发模块.jpg │ ├── 缓存数据模型设计.png │ ├── 缓存有效性机制.png │ ├── 缓存查询处理核心逻辑设计.png │ ├── 行为次序最大匹配算法-改进版.png │ ├── 行为次序最大匹配算法.png │ ├── 规则动态注入全流程.jpg │ └── 跨界查询的关键逻辑设计.png ├── data_analysis ├── ReadMe.md ├── pom.xml └── src │ └── main │ └── java │ └── cn │ └── doitedu │ └── data_analysis │ └── demo │ └── Demo.java ├── dynamic_rule_engine ├── pom.xml ├── rules_drl │ ├── rule1.drl │ ├── rule1_cnt.sql │ ├── rule1_seq.sql │ ├── rule2.drl │ ├── rule2_cnt.sql │ └── rule2_seq.sql └── src │ ├── main │ ├── java │ │ └── cn │ │ │ └── doitedu │ │ │ └── dynamic_rule │ │ │ ├── benchmark │ │ │ ├── ClickHouseQueryTest.java │ │ │ └── HbaseGetTest.java │ │ │ ├── datagen │ │ │ ├── ActionLogAutoGen.java │ │ │ ├── ActionLogGenOne.java │ │ │ └── UserProfileDataGen.java │ │ │ ├── demos │ │ │ ├── Action.java │ │ │ ├── Applicant.java │ │ │ ├── CanalRecordBean.java │ │ │ ├── DroolsDemo.java │ │ │ ├── DroolsDemo2.java │ │ │ ├── FlinkDroolsCanalDemo.java │ │ │ ├── FlinkDroolsDemo.java │ │ │ ├── ReadDrlToKafka.java │ │ │ ├── ReadDrlToMySql.java │ │ │ ├── RedisDemo.java │ │ │ └── Slf4jLog4jDemo.java │ │ │ ├── engine │ │ │ ├── RuleEngineDemo.java │ │ │ ├── RuleEngineV1.java │ │ │ ├── RuleEngineV2.java │ │ │ ├── RuleEngineV3.java │ │ │ ├── RuleEngineV4.java │ │ │ └── RuleEngineV5.java │ │ │ ├── functions │ │ │ ├── DeviceKeySelector.java │ │ │ ├── Json2BeanMapFunction.java │ │ │ ├── RuleProcessFunction.java │ │ │ ├── RuleProcessFunctionV2.java │ │ │ ├── RuleProcessFunctionV3.java │ │ │ ├── RuleProcessFunctionV4.java │ │ │ ├── RuleProcessFunctionV5.java │ │ │ └── SourceFunctions.java │ │ │ ├── moduletest │ │ │ ├── ActionCountsQueryClickhouseTest.java │ │ │ ├── ActionCountsQueryTest.java │ │ │ ├── ActionSequenceQueryClickhouseTest.java │ │ │ ├── ActionSequenceQueryTest.java │ │ │ └── ProfileQueryTest.java │ │ │ ├── pojo │ │ │ ├── BufferAvailableLevel.java │ │ │ ├── BufferResult.java │ │ │ ├── DroolFact.java │ │ │ ├── LogBean.java │ │ │ ├── ResultBean.java │ │ │ ├── RuleAtomicParam.java │ │ │ ├── RuleCanalBean.java │ │ │ ├── RuleParam.java │ │ │ ├── RuleStateBean.java │ │ │ └── RuleTableRecord.java │ │ │ ├── service │ │ │ ├── BufferManager.java │ │ │ ├── QueryRouterV3.java │ │ │ ├── QueryRouterV4.java │ │ │ ├── UserActionCountQueryService.java │ │ │ ├── UserActionCountQueryServiceClickhouseImpl.java │ │ │ ├── UserActionCountQueryServiceStateImpl.java │ │ │ ├── UserActionSequenceQueryService.java │ │ │ ├── UserActionSequenceQueryServiceClickhouseImpl.java │ │ │ ├── UserActionSequenceQueryServiceStateImpl.java │ │ │ ├── UserProfileQueryService.java │ │ │ └── UserProfileQueryServiceHbaseImpl.java │ │ │ └── utils │ │ │ ├── ClickhouseCountQuerySqlUtil.java │ │ │ ├── ConnectionUtils.java │ │ │ ├── RuleCalcUtil.java │ │ │ ├── RuleOperationHandler.java │ │ │ ├── RuleSimulator.java │ │ │ └── StateDescUtil.java │ └── resources │ │ ├── META-INF │ │ └── kmodule.xml │ │ ├── bak │ │ ├── log4j.properties │ │ └── log4j2.properties │ │ ├── log4j.properties │ │ ├── log4j2.properties │ │ └── rules │ │ ├── flink.drl │ │ └── test.drl │ └── test │ └── java │ ├── ListRemoveDemo.java │ ├── RuleTest.java │ └── Test.java ├── log4jdemo ├── pom.xml └── src │ └── main │ ├── java │ └── cn │ │ └── doitedu │ │ └── log4j │ │ └── Demo.java │ └── resources │ └── log4j.properties ├── manageplatform ├── HELP.md ├── mvnw ├── mvnw.cmd ├── pom.xml ├── src │ ├── main │ │ ├── java │ │ │ └── cn │ │ │ │ └── doitedu │ │ │ │ └── yinew │ │ │ │ └── manageplatform │ │ │ │ ├── ManageplatformApplication.java │ │ │ │ ├── controller │ │ │ │ ├── FreeMarkerDemoController.java │ │ │ │ ├── RuleDashboardController.java │ │ │ │ └── RuleHandleController.java │ │ │ │ └── pojo │ │ │ │ ├── Animal.java │ │ │ │ ├── RuleAtomicParam.java │ │ │ │ ├── RuleDefine.java │ │ │ │ └── RuleStatus.java │ │ └── resources │ │ │ ├── application.properties │ │ │ └── templates │ │ │ ├── demo.ftl │ │ │ └── eventCountModel.ftl │ └── test │ │ └── java │ │ └── cn │ │ └── doitedu │ │ └── yinew │ │ └── manageplatform │ │ └── ManageplatformApplicationTests.java └── template │ ├── rule2.drl │ ├── rule2_cnt.sql │ └── rule2_seq.sql ├── pom.xml ├── user_profile ├── pom.xml └── src │ └── main │ └── java │ └── cn │ └── doitedu │ └── userprofile │ └── demo │ └── Demo.java └── 项目介绍.MD /README.MD: -------------------------------------------------------------------------------- 1 | # 易牛鹰眼实时智能营销推送系统 2 | 3 | ## 项目概述 4 | 项目出品人:多易教育(易学课堂) 5 | 本project包含两个模块: 6 | - 实时智能营销推送系统的后端规则引擎 7 | - 管理监控平台 8 | 9 | 规则引擎主要开发技术: 10 | - flink 11 | - kafka 12 | - redis 13 | - clickhouse 14 | - hbase 15 | - drools 16 | 17 | 管理监控平台主要开发技术: 18 | - springboot 19 | - mybatis 20 | - react-js 21 | 22 | 23 | ## 系统启动环境准备 24 | 25 | - 启动kafka 26 | - 启动redis 27 | - 启动hbase 28 | - 启动clickhouse 29 | - 启动用户行为数据模拟器 30 | - 启动canal 31 | 32 | ## 系统启动步骤 33 | - 启动manageplatform管理平台web系统 34 | - 启动dynamic_rule_engine实时flink动态规则引擎 35 | 36 | 37 | ## More 38 | 39 | 购买项目配套视频教程及文档资料等,可在“易学课堂”上方便获得 40 | https://v.51doit.cn 41 | -------------------------------------------------------------------------------- /X档案/base_docs/canal-log.md: -------------------------------------------------------------------------------- 1 | # canal监听binlog后的输出结果格式 2 | 3 | 4 | - DELETE操作 5 | ``` 6 | { 7 | "data": [ 8 | { 9 | "id": "13", 10 | "rule_name": "r1", 11 | "rule_code": "code-new", 12 | "rule_type": "0", 13 | "rule_status": "0", 14 | "eventCountQuerySqls": "sql1", 15 | "eventSeqQuerySql": "sql2", 16 | "create_time": "2021-04-05 23:33:11", 17 | "modify_time": "2021-04-05 23:33:14" 18 | } 19 | ], 20 | "database": "realtimedw", 21 | "es": 1617637022000, 22 | "id": 5, 23 | "isDdl": false, 24 | "mysqlType": { 25 | "id": "int", 26 | "rule_name": "varchar(255)", 27 | "rule_code": "varchar(4096)", 28 | "rule_type": "varchar(255)", 29 | "rule_status": "varchar(255)", 30 | "eventCountQuerySqls": "varchar(4096)", 31 | "eventSeqQuerySql": "varchar(4096)", 32 | "create_time": "datetime", 33 | "modify_time": "datetime" 34 | }, 35 | "old": null, 36 | "pkNames": [ 37 | "id" 38 | ], 39 | "sql": "", 40 | "sqlType": { 41 | "id": 4, 42 | "rule_name": 12, 43 | "rule_code": 12, 44 | "rule_type": 12, 45 | "rule_status": 12, 46 | "eventCountQuerySqls": 12, 47 | "eventSeqQuerySql": 12, 48 | "create_time": 93, 49 | "modify_time": 93 50 | }, 51 | "table": "canal_rule", 52 | "ts": 1617637022998, 53 | "type": "DELETE" 54 | } 55 | ``` 56 | 57 | 58 | - INSERT操作 59 | 60 | ``` 61 | { 62 | "data": [ 63 | { 64 | "id": "2", 65 | "rule_name": "rule2", 66 | "rule_code": "code2" 67 | } 68 | ], 69 | "database": "realtimedw", 70 | "es": 1617699081000, 71 | "id": 3, 72 | "isDdl": false, 73 | "mysqlType": { 74 | "id": "int", 75 | "rule_name": "varchar(255)", 76 | "rule_code": "varchar(255)" 77 | }, 78 | "old": null, 79 | "pkNames": [ 80 | "id" 81 | ], 82 | "sql": "", 83 | "sqlType": { 84 | "id": 4, 85 | "rule_name": 12, 86 | "rule_code": 12 87 | }, 88 | "table": "test_drools", 89 | "ts": 1617699082093, 90 | "type": "INSERT" 91 | } 92 | 93 | 94 | ``` -------------------------------------------------------------------------------- /X档案/base_docs/drools_模板.drl.txt: -------------------------------------------------------------------------------- 1 | package rules 2 | 3 | dialect "java" 4 | import java.util.List 5 | import java.util.ArrayList 6 | import java.util.HashMap 7 | import cn.doitedu.dynamic_rule.pojo.DroolRuleFact 8 | import cn.doitedu.dynamic_rule.pojo.RuleParam 9 | import cn.doitedu.dynamic_rule.service.QueryRouterV4 10 | import cn.doitedu.dynamic_rule.pojo.LogBean 11 | 12 | // 规则名称 13 | rule "triggerTest4" 14 | when 15 | //触发事件为 addcart 16 | $f:DroolRuleFact() 17 | $r:RuleParam(triggerParam.eventId == "E") from $f.getRuleParam() 18 | then 19 | QueryRouterV4 queryRouter = $f.getQueryRouter(); 20 | RuleParam ruleParam = $f.getRuleParam(); 21 | LogBean logBean = $f.getLogBean(); 22 | 23 | // 添加各类规则条件 24 | 25 | 26 | // 进行条件判断 27 | if( 28 | queryRouter.profileQuery(logBean, ruleParam) && 29 | queryRouter.sequenceConditionQuery(logBean, ruleParam) && 30 | queryRouter.countConditionQuery(logBean, ruleParam) 31 | ){ 32 | // 设置结果 33 | $f.setMatch(true); 34 | } 35 | end -------------------------------------------------------------------------------- /X档案/base_docs/event_detail_map.md: -------------------------------------------------------------------------------- 1 | ## 创建事件明细表 2 | ``` 3 | allow_experimental_map_type = 1; 4 | create table default.yinew_detail 5 | ( 6 | account String , 7 | appId String , 8 | appVersion String , 9 | carrier String , 10 | deviceId String , 11 | deviceType String , 12 | eventId String , 13 | ip String , 14 | latitude Float64 , 15 | longitude Float64 , 16 | netType String , 17 | osName String , 18 | osVersion String , 19 | properties Map(String,String), 20 | releaseChannel String, 21 | resolution String, 22 | sessionId String, 23 | timeStamp Int64 , 24 | INDEX u (deviceId) TYPE minmax GRANULARITY 3, 25 | INDEX t (timeStamp) TYPE minmax GRANULARITY 3 26 | ) ENGINE = MergeTree() 27 | ORDER BY (deviceId,timeStamp) 28 | ; 29 | ``` 30 | 31 | 32 | ## 创建kafka引擎表 33 | ``` 34 | set allow_experimental_map_type = 1; 35 | drop table default.yinew_detail_kafka; 36 | create table default.yinew_detail_kafka 37 | ( 38 | account String , 39 | appId String , 40 | appVersion String , 41 | carrier String , 42 | deviceId String , 43 | deviceType String , 44 | eventId String , 45 | ip String , 46 | latitude Float64 , 47 | longitude Float64 , 48 | netType String , 49 | osName String , 50 | osVersion String , 51 | properties Map(String,String), 52 | releaseChannel String, 53 | resolution String, 54 | sessionId String, 55 | timeStamp Int64 56 | ) ENGINE = Kafka('hdp01:9092,hdp02:9092,hdp03:9092','yinew_applog','group1','JSONEachRow'); 57 | ``` 58 | 59 | ## 创建物化视图 60 | ``` 61 | create MATERIALIZED VIEW yinew_view TO yinew_detail 62 | as 63 | select 64 | account , 65 | appId , 66 | appVersion , 67 | carrier , 68 | deviceId , 69 | deviceType , 70 | eventId , 71 | ip , 72 | latitude , 73 | longitude , 74 | netType , 75 | osName , 76 | osVersion , 77 | properties , 78 | releaseChannel , 79 | resolution , 80 | sessionId , 81 | timeStamp 82 | from yinew_detail_kafka 83 | ; 84 | ``` -------------------------------------------------------------------------------- /X档案/base_docs/v4版本联调测试.md: -------------------------------------------------------------------------------- 1 | # 规则 2 | ``` 3 | - 触发事件: E 4 | - 画像: tag5=v5 5 | - 次数类条件: 6 | - B(p1=v1):10 7 | - D(p2=v2):22 8 | - 序列条件: 9 | - A(p1=v1),C(p2=v2) 10 | ``` 11 | 12 | 13 | # 先找一些满足tag5=v5的hbase中的数据 14 | ``` 15 | scan 'yinew_profile', {LIMIT=>20,FILTER => "(QualifierFilter(=, 'binary:tag5')) AND (ValueFilter(=,'binary:v1'))"} 16 | ``` 17 | 挑选了一个用户:000034 18 | 19 | # 然后到clickhouse中去寻找上述用户中做过B/D事件的 20 | ``` 21 | select 22 | deviceId, 23 | count(1) as cnt 24 | from yinew_detail 25 | where 26 | eventId='B' and properties['p1']='v1' 27 | group by deviceId 28 | having count()<10 29 | ``` 30 | 31 | 32 | > B事件: │ 000034 │ 9 │ 33 | > D事件: │ 000034 │ 22 │ 34 | 35 | 36 | # 然后看000034这个人 A-C序列的情况 37 | ``` 38 | SELECT 39 | deviceId, 40 | sequenceMatch('.*(?1).*(?2).*')( 41 | toDateTime(`timeStamp`), 42 | eventId = 'A' and properties['p1']='v1', 43 | eventId = 'C' and properties['p2']='v2' 44 | ) as isMatch2, 45 | 46 | sequenceMatch('.*(?1).*')( 47 | toDateTime(`timeStamp`), 48 | eventId = 'A' and properties['p1']='v1', 49 | eventId = 'C' and properties['p2']='v2' 50 | ) as isMatch1 51 | 52 | from yinew_detail 53 | where 54 | deviceId = '000034' 55 | and 56 | timeStamp >= 0 57 | and 58 | timeStamp <= 5235295739479 59 | and 60 | ( 61 | (eventId='A' and properties['p1']='v1') 62 | or (eventId = 'C' and properties['p2']='v2') 63 | ) 64 | group by deviceId; 65 | 66 | ``` 67 | 68 | ``` 69 | ┌─deviceId─┬─isMatch2─┬─isMatch1─┐ 70 | │ 000034 │ 1 │ 1 │ 71 | └──────────┴──────────┴──────────┘ 72 | ``` 73 | # 总结: 74 | 000034用户,B事件差1次,D事件已满足, A-C序列已满足 75 | 76 | 77 | # 开始测试 78 | 先生成一条000034用户的E事件 79 | 观察输出日志: 80 | ``` 81 | 82 | 2021-04-06 12:18:50 [WARN] QueryRouterV4: 规则:test_rule_1,用户:000034,画像查询条件:{tag5=v1},耗时:1285,结果为:true 83 | 2021-04-06 12:18:50 [WARN] QueryRouterV4: 规则:test_rule_1,用户:000034,seq缓存查询完毕,avl状态:UN_AVL,value:null 84 | 2021-04-06 12:18:50 [WARN] QueryRouterV4: 规则:test_rule_1,用户:000034,seq跨界-近期碰运气,结果step:0,条件step:2, 85 | 2021-04-06 12:18:50 [WARN] QueryRouterV4: 规则:test_rule_1,用户:000034,seq跨界-近期碰运气失败,恢复step:0 86 | 2021-04-06 12:18:50 [WARN] QueryRouterV4: 规则:test_rule_1,用户:000034,seq跨界-clickhouse耗时:13,结果step:2,条件step:2, 87 | 2021-04-06 12:18:50 [WARN] QueryRouterV4: 用户:000034,count缓存avl:UN_AVL,key:000034-B-p1-v1,value:null,条件阈值:10,缓存start:null,缓存end:null,条件初始start:0,条件初始end:9223372036854775807,更新后start:0,更新后end:9223372036854775807,更新后条件size:2 88 | 2021-04-06 12:18:50 [WARN] QueryRouterV4: 用户:000034,count缓存avl:UN_AVL,key:000034-D-p2-v2,value:null,条件阈值:22,缓存start:null,缓存end:null,条件初始start:1617094800000,条件初始end:9223372036854775807,更新后start:1617094800000,更新后end:9223372036854775807,更新后条件size:2 89 | 2021-04-06 12:18:50 [WARN] QueryRouterV4: 规则:test_rule_1,用户:000034,count条件组划分,分界点:1617678000000,近期组size:0,跨界组size:2,远期组size:0 90 | 2021-04-06 12:18:50 [WARN] QueryRouterV4: 规则:test_rule_1,用户:000034,count跨界查询-state,start:1617678000000,end:9223372036854775807,结果:0,条件EID:B,条件props:{p1=v1},阈值:10 91 | 2021-04-06 12:18:50 [WARN] QueryRouterV4: 规则:test_rule_1,用户:000034,count跨界查询-clickhouse,耗时:8,结果插入缓存,key:000034-B-p1-v1,value:9,start:0,end:1617682728523,条件阈值:10 92 | ``` 93 | 94 | 再生成一次E事件 95 | 观察输出日志(A-C序列step应该缓存完全匹配,B事件次数缓存应该部分匹配) 96 | ``` 97 | 2021-04-06 12:19:54 [WARN] QueryRouterV4: 规则:test_rule_1,用户:000034,画像查询条件:{tag5=v1},耗时:2,结果为:true 98 | 2021-04-06 12:19:54 [WARN] BufferManager: 000034-A-p1-v1-C-p2-v2 缓存完全匹配,value为: 2 99 | 2021-04-06 12:19:54 [WARN] BufferManager: count缓存部分匹配 100 | 2021-04-06 12:19:54 [WARN] QueryRouterV4: 用户:000034,count缓存avl:PARTIAL_AVL,key:000034-B-p1-v1,value:9,条件阈值:10,缓存start:0,缓存end:1617682728523,条件初始start:0,条件初始end:9223372036854775807,更新后start:1617682728523,更新后end:9223372036854775807,更新后条件size:2 101 | 2021-04-06 12:19:54 [WARN] QueryRouterV4: 用户:000034,count缓存avl:UN_AVL,key:000034-D-p2-v2,value:null,条件阈值:22,缓存start:null,缓存end:null,条件初始start:1617094800000,条件初始end:9223372036854775807,更新后start:1617094800000,更新后end:9223372036854775807,更新后条件size:2 102 | 2021-04-06 12:19:54 [WARN] QueryRouterV4: 规则:test_rule_1,用户:000034,count条件组划分,分界点:1617678000000,近期组size:1,跨界组size:1,远期组size:0 103 | 2021-04-06 12:19:54 [WARN] QueryRouterV4: count近期组条件,查询结果插入缓存,key:000034-B-p1-v1,value:9,条件阈值:10,buffer_start:0,buffer_end:1617682794478 104 | 2021-04-06 12:19:54 [WARN] QueryRouterV4: 规则:test_rule_1,用户:000034,count近期条件组size:1,查询结果:false 105 | ``` 106 | 107 | 再生成一次B事件 108 | 观察输出日志: 应该什么日志都没有 109 | 110 | 111 | 再生成一个E事件 112 | 观察输出日志(B事件应该会达到阈值,D事件也开始查询并达到阈值,最终规则匹配结果应该输出) 113 | ``` 114 | 2021-04-06 12:19:54 [WARN] QueryRouterV4: 规则:test_rule_1,用户:000034,count近期条件组size:1,查询结果:false 115 | 2021-04-06 12:21:26 [WARN] QueryRouterV4: 规则:test_rule_1,用户:000034,画像查询条件:{tag5=v1},耗时:2,结果为:true 116 | 2021-04-06 12:21:26 [WARN] BufferManager: 000034-A-p1-v1-C-p2-v2 缓存完全匹配,value为: 2 117 | 2021-04-06 12:21:26 [WARN] BufferManager: count缓存部分匹配 118 | 2021-04-06 12:21:26 [WARN] QueryRouterV4: 用户:000034,count缓存avl:PARTIAL_AVL,key:000034-B-p1-v1,value:9,条件阈值:10,缓存start:0,缓存end:1617682794478,条件初始start:0,条件初始end:9223372036854775807,更新后start:1617682794478,更新后end:9223372036854775807,更新后条件size:2 119 | 2021-04-06 12:21:26 [WARN] QueryRouterV4: 用户:000034,count缓存avl:UN_AVL,key:000034-D-p2-v2,value:null,条件阈值:22,缓存start:null,缓存end:null,条件初始start:1617094800000,条件初始end:9223372036854775807,更新后start:1617094800000,更新后end:9223372036854775807,更新后条件size:2 120 | 2021-04-06 12:21:26 [WARN] QueryRouterV4: 规则:test_rule_1,用户:000034,count条件组划分,分界点:1617678000000,近期组size:1,跨界组size:1,远期组size:0 121 | 2021-04-06 12:21:26 [WARN] QueryRouterV4: count近期组条件,查询结果插入缓存,key:000034-B-p1-v1,value:10,条件阈值:10,buffer_start:0,buffer_end:1617682886502 122 | 2021-04-06 12:21:26 [WARN] QueryRouterV4: 规则:test_rule_1,用户:000034,count近期条件组size:1,查询结果:true 123 | 2021-04-06 12:21:26 [WARN] QueryRouterV4: 规则:test_rule_1,用户:000034,count跨界查询-state,start:1617678000000,end:9223372036854775807,结果:0,条件EID:D,条件props:{p2=v2},阈值:22 124 | 2021-04-06 12:21:26 [WARN] QueryRouterV4: 规则:test_rule_1,用户:000034,count跨界查询-clickhouse,耗时:10,结果插入缓存,key:000034-D-p2-v2,value:22,start:1617094800000,end:1617682886502,条件阈值:22 125 | 5> ResultBean(ruleId=test_rule_1, deviceId=000034, timeStamp=1617682886502) 126 | ``` -------------------------------------------------------------------------------- /X档案/base_docs/动态化改造后的联调测试.md: -------------------------------------------------------------------------------- 1 | # 创建kafka中的各topic 2 | 3 | - 事件日志topic: yinew_applog 4 | ``` 5 | bin/kafka-topics.sh --create --topic yinew_applog --replication-factor 1 --partitions 3 --zookeeper hdp01:2181 6 | ``` 7 | 8 | 9 | - 规则数据topic: yinew_drl_rule 10 | ``` 11 | bin/kafka-topics.sh --create --topic yinew_drl_rule --replication-factor 1 --partitions 1 --zookeeper hdp01:2181 12 | ``` 13 | 14 | 15 | # 修改canal输出目标topic为: yinew_drl_rule 16 | ``` 17 | [root@hdp01 example]# pwd 18 | /opt/apps/canal/conf/example 19 | [root@hdp01 example]# vi instance.properties 20 | 21 | # mq config 22 | canal.mq.topic=yinew_drl_rule 23 | ``` 24 | 25 | # 建hbase中的画像标签数据表 26 | hbase> create 'yinew_profile','f' 27 | 28 | - mysql中建规则表 29 | ``` 30 | CREATE TABLE `yinew_drl_rule` ( 31 | `id` int(11) NOT NULL AUTO_INCREMENT, 32 | `rule_name` varchar(255) DEFAULT NULL, 33 | `rule_code` varchar(4096) DEFAULT NULL, 34 | `rule_status` int(11) DEFAULT NULL, 35 | `rule_type` varchar(255) DEFAULT NULL, 36 | `rule_version` varchar(255) DEFAULT NULL, 37 | `cnt_sqls` varchar(4096) DEFAULT NULL, 38 | `seq_sqls` varchar(4096) DEFAULT NULL, 39 | `rule_creator` varchar(255) DEFAULT NULL, 40 | `rule_auditor` varchar(255) DEFAULT NULL, 41 | `create_time` datetime DEFAULT NULL, 42 | `update_time` datetime DEFAULT NULL, 43 | PRIMARY KEY (`id`) 44 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8; 45 | ``` 46 | 47 | # clickhouse中的表创建 48 | ## 创建事件明细表 49 | ``` 50 | allow_experimental_map_type = 1; 51 | create table default.yinew_detail 52 | ( 53 | account String , 54 | appId String , 55 | appVersion String , 56 | carrier String , 57 | deviceId String , 58 | deviceType String , 59 | eventId String , 60 | ip String , 61 | latitude Float64 , 62 | longitude Float64 , 63 | netType String , 64 | osName String , 65 | osVersion String , 66 | properties Map(String,String), 67 | releaseChannel String, 68 | resolution String, 69 | sessionId String, 70 | timeStamp Int64 , 71 | INDEX u (deviceId) TYPE minmax GRANULARITY 3, 72 | INDEX t (timeStamp) TYPE minmax GRANULARITY 3 73 | ) ENGINE = MergeTree() 74 | ORDER BY (deviceId,timeStamp) 75 | ; 76 | ``` 77 | 78 | 79 | # 创建kafka引擎表 80 | ``` 81 | set allow_experimental_map_type = 1; 82 | drop table default.yinew_detail_kafka; 83 | create table default.yinew_detail_kafka 84 | ( 85 | account String , 86 | appId String , 87 | appVersion String , 88 | carrier String , 89 | deviceId String , 90 | deviceType String , 91 | eventId String , 92 | ip String , 93 | latitude Float64 , 94 | longitude Float64 , 95 | netType String , 96 | osName String , 97 | osVersion String , 98 | properties Map(String,String), 99 | releaseChannel String, 100 | resolution String, 101 | sessionId String, 102 | timeStamp Int64 103 | ) ENGINE = Kafka('hdp01:9092,hdp02:9092,hdp03:9092','yinew_applog','group1','JSONEachRow'); 104 | ``` 105 | 106 | ## 创建物化视图 107 | ``` 108 | create MATERIALIZED VIEW yinew_view TO yinew_detail 109 | as 110 | select 111 | account , 112 | appId , 113 | appVersion , 114 | carrier , 115 | deviceId , 116 | deviceType , 117 | eventId , 118 | ip , 119 | latitude , 120 | longitude , 121 | netType , 122 | osName , 123 | osVersion , 124 | properties , 125 | releaseChannel , 126 | resolution , 127 | sessionId , 128 | timeStamp 129 | from yinew_detail_kafka 130 | ; 131 | ``` 132 | 133 | 134 | 135 | -------------------------------------------------------------------------------- /X档案/base_docs/条件查询.md: -------------------------------------------------------------------------------- 1 | ## 查询事件 2 | ``` 3 | select 4 | deviceId, 5 | count(1) as cnt 6 | from event_detail 7 | where deviceId='nJBTQejDxDmc' and eventId='adShow' and properties['adId']='14' 8 | and timeStamp between 1615900460000 and 1615900580000 9 | group by deviceId 10 | ; 11 | ``` 12 | ## 事件序列查询sql 13 | ``` 14 | /* 15 | 16 | ┌─deviceId─┬─isMatch3─┬─isMatch2─┬─isMatch1─┐ 17 | │ 000001 │ 0 │ 1 │ 1 │ 18 | └──────────┴──────────┴──────────┴──────────┘ 19 | */ 20 | ``` 21 | 22 | 23 | ``` 24 | SELECT 25 | deviceId, 26 | sequenceMatch('.*(?1).*(?2).*(?3)')( 27 | toDateTime(`timeStamp`), 28 | eventId = 'Y' and properties['p1']='v1', 29 | eventId = 'B' and properties['p6']='v4', 30 | eventId = 'O' and properties['p1']='vv' 31 | ) as isMatch3, 32 | 33 | sequenceMatch('.*(?1).*(?2).*')( 34 | toDateTime(`timeStamp`), 35 | eventId = 'Y' and properties['p1']='v1', 36 | eventId = 'B' and properties['p6']='v4', 37 | eventId = 'O' and properties['p1']='vv' 38 | ) as isMatch2, 39 | 40 | sequenceMatch('.*(?1).*')( 41 | toDateTime(`timeStamp`), 42 | eventId = 'Y' and properties['p1']='v1', 43 | eventId = 'B' and properties['p6']='v4', 44 | eventId = 'O' and properties['p1']='vv' 45 | ) as isMatch1 46 | 47 | from yinew_detail 48 | where 49 | deviceId = '000001' 50 | and 51 | timeStamp >= 0 52 | and 53 | timeStamp <= 5235295739479 54 | and 55 | ( 56 | (eventId='Y' and properties['p1']='v1') 57 | or (eventId = 'B' and properties['p6']='v4') 58 | or (eventId = 'O' and properties['p1']='vv') 59 | ) 60 | group by deviceId; 61 | 62 | ``` -------------------------------------------------------------------------------- /X档案/设计图/clickhouse直接摄取kafka数据示意图.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/coderblack/yinew_marketing/45ef4c47dec6f42c5e963a915ddaa88ad2a5fcaf/X档案/设计图/clickhouse直接摄取kafka数据示意图.png -------------------------------------------------------------------------------- /X档案/设计图/sequence类条件查询全流程.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/coderblack/yinew_marketing/45ef4c47dec6f42c5e963a915ddaa88ad2a5fcaf/X档案/设计图/sequence类条件查询全流程.jpg -------------------------------------------------------------------------------- /X档案/设计图/动态规则核心逻辑.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/coderblack/yinew_marketing/45ef4c47dec6f42c5e963a915ddaa88ad2a5fcaf/X档案/设计图/动态规则核心逻辑.png -------------------------------------------------------------------------------- /X档案/设计图/易牛Flink动态规则实时运营系统-架构图.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/coderblack/yinew_marketing/45ef4c47dec6f42c5e963a915ddaa88ad2a5fcaf/X档案/设计图/易牛Flink动态规则实时运营系统-架构图.png -------------------------------------------------------------------------------- /X档案/设计图/查询分界点设计方案.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/coderblack/yinew_marketing/45ef4c47dec6f42c5e963a915ddaa88ad2a5fcaf/X档案/设计图/查询分界点设计方案.png -------------------------------------------------------------------------------- /X档案/设计图/查询缓存分界点设计(2).png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/coderblack/yinew_marketing/45ef4c47dec6f42c5e963a915ddaa88ad2a5fcaf/X档案/设计图/查询缓存分界点设计(2).png -------------------------------------------------------------------------------- /X档案/设计图/查询缓存分界点设计(3).png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/coderblack/yinew_marketing/45ef4c47dec6f42c5e963a915ddaa88ad2a5fcaf/X档案/设计图/查询缓存分界点设计(3).png -------------------------------------------------------------------------------- /X档案/设计图/查询路由分发模块.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/coderblack/yinew_marketing/45ef4c47dec6f42c5e963a915ddaa88ad2a5fcaf/X档案/设计图/查询路由分发模块.jpg -------------------------------------------------------------------------------- /X档案/设计图/缓存数据模型设计.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/coderblack/yinew_marketing/45ef4c47dec6f42c5e963a915ddaa88ad2a5fcaf/X档案/设计图/缓存数据模型设计.png -------------------------------------------------------------------------------- /X档案/设计图/缓存有效性机制.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/coderblack/yinew_marketing/45ef4c47dec6f42c5e963a915ddaa88ad2a5fcaf/X档案/设计图/缓存有效性机制.png -------------------------------------------------------------------------------- /X档案/设计图/缓存查询处理核心逻辑设计.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/coderblack/yinew_marketing/45ef4c47dec6f42c5e963a915ddaa88ad2a5fcaf/X档案/设计图/缓存查询处理核心逻辑设计.png -------------------------------------------------------------------------------- /X档案/设计图/行为次序最大匹配算法-改进版.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/coderblack/yinew_marketing/45ef4c47dec6f42c5e963a915ddaa88ad2a5fcaf/X档案/设计图/行为次序最大匹配算法-改进版.png -------------------------------------------------------------------------------- /X档案/设计图/行为次序最大匹配算法.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/coderblack/yinew_marketing/45ef4c47dec6f42c5e963a915ddaa88ad2a5fcaf/X档案/设计图/行为次序最大匹配算法.png -------------------------------------------------------------------------------- /X档案/设计图/规则动态注入全流程.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/coderblack/yinew_marketing/45ef4c47dec6f42c5e963a915ddaa88ad2a5fcaf/X档案/设计图/规则动态注入全流程.jpg -------------------------------------------------------------------------------- /X档案/设计图/跨界查询的关键逻辑设计.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/coderblack/yinew_marketing/45ef4c47dec6f42c5e963a915ddaa88ad2a5fcaf/X档案/设计图/跨界查询的关键逻辑设计.png -------------------------------------------------------------------------------- /data_analysis/ReadMe.md: -------------------------------------------------------------------------------- 1 | # 易牛数据分析系统 -------------------------------------------------------------------------------- /data_analysis/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | yinew_marketing 7 | cn.doitedu 8 | 1.0 9 | 10 | 4.0.0 11 | 12 | data_analysis 13 | 14 | 15 | -------------------------------------------------------------------------------- /data_analysis/src/main/java/cn/doitedu/data_analysis/demo/Demo.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.data_analysis.demo; 2 | 3 | public class Demo { 4 | } 5 | -------------------------------------------------------------------------------- /dynamic_rule_engine/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | yinew_marketing 7 | cn.doitedu 8 | 1.0 9 | 10 | 4.0.0 11 | 12 | dynamic_rule_engine 13 | 14 | 15 | 16 | 17 | 18 | org.apache.commons 19 | commons-lang3 20 | 3.11 21 | 22 | 23 | 24 | org.apache.kafka 25 | kafka-clients 26 | 2.0.1 27 | 28 | 29 | 30 | org.apache.hbase 31 | hbase-client 32 | 2.0.6 33 | 34 | 35 | org.slf4j 36 | slf4j-log4j12 37 | 38 | 39 | 40 | 41 | 42 | org.apache.hbase 43 | hbase-common 44 | 2.0.6 45 | 46 | 47 | log4j 48 | log4j 49 | 50 | 51 | 52 | 53 | 54 | 55 | org.apache.flink 56 | flink-streaming-java_2.11 57 | 1.12.0 58 | 59 | 60 | 61 | org.apache.flink 62 | flink-clients_2.11 63 | 1.12.0 64 | 65 | 66 | 67 | org.apache.flink 68 | flink-connector-kafka_2.11 69 | 1.12.0 70 | 71 | 72 | 73 | org.apache.flink 74 | flink-runtime-web_2.11 75 | 1.12.0 76 | 77 | 78 | 79 | org.apache.flink 80 | flink-connector-jdbc_2.11 81 | 1.12.0 82 | 83 | 84 | 85 | mysql 86 | mysql-connector-java 87 | 8.0.16 88 | 89 | 90 | 95 | 96 | 97 | ru.yandex.clickhouse 98 | clickhouse-jdbc 99 | 0.3.0 100 | 101 | 102 | 103 | redis.clients 104 | jedis 105 | 3.3.0 106 | 107 | 108 | 109 | 110 | 111 | org.slf4j 112 | slf4j-api 113 | 1.7.25 114 | 115 | 116 | 117 | 118 | org.apache.logging.log4j 119 | log4j-slf4j-impl 120 | 2.8.2 121 | 122 | 123 | 124 | 125 | org.apache.logging.log4j 126 | log4j-api 127 | 2.8.2 128 | 129 | 130 | org.apache.logging.log4j 131 | log4j-core 132 | 2.8.2 133 | 134 | 135 | org.drools 136 | drools-compiler 137 | 7.23.0.Final 138 | 139 | 140 | 141 | 142 | 143 | -------------------------------------------------------------------------------- /dynamic_rule_engine/rules_drl/rule1.drl: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rules.rules 2 | import cn.doitedu.dynamic_rule.pojo.DroolFact 3 | import cn.doitedu.dynamic_rule.pojo.LogBean 4 | import cn.doitedu.dynamic_rule.pojo.RuleParam 5 | import cn.doitedu.dynamic_rule.service.QueryRouterV4 6 | import java.util.HashMap 7 | import cn.doitedu.dynamic_rule.pojo.RuleAtomicParam 8 | import java.util.List 9 | import java.util.ArrayList 10 | 11 | rule "rule1" 12 | 13 | when 14 | $d:DroolFact() 15 | $b:LogBean(eventId == "E") from $d.logBean 16 | then 17 | RuleParam ruleParam = $d.getRuleParam(); 18 | ruleParam.setRuleName("rule1"); 19 | QueryRouterV4 queryRouter = $d.getQueryRouterV4(); 20 | 21 | // 填充本规则的画像条件 22 | HashMap profileParams = new HashMap<>(); 23 | profileParams.put("tag5","v1"); 24 | ruleParam.setUserProfileParams(profileParams); 25 | 26 | // 填充本规则的count类条件(直接从ruleparam中取出cnt条件list,里面已经拥有sql了) 27 | List countParams = ruleParam.getUserActionCountParams(); 28 | 29 | RuleAtomicParam param0 = countParams.get(0); 30 | param0.setEventId("B"); 31 | HashMap props0 = new HashMap<>(); 32 | props0.put("p1","v1"); 33 | param0.setProperties(props0); 34 | param0.setCnt(8); 35 | param0.setOriginStart(0); 36 | param0.setOriginEnd(Long.MAX_VALUE); 37 | 38 | 39 | RuleAtomicParam param1 = countParams.get(1); 40 | param1.setEventId("D"); 41 | HashMap props1 = new HashMap<>(); 42 | props1.put("p2","v2"); 43 | param1.setProperties(props1); 44 | param1.setCnt(8); 45 | param1.setOriginStart(0); 46 | param1.setOriginEnd(Long.MAX_VALUE); 47 | 48 | 49 | // 填充本规则的次序列条件 50 | ArrayList seqParams = new ArrayList<>(); 51 | 52 | RuleAtomicParam seq0 = new RuleAtomicParam(); 53 | seq0.setEventId("A"); 54 | HashMap seq0prop = new HashMap<>(); 55 | seq0prop.put("p1","v1"); 56 | seq0.setProperties(seq0prop); 57 | seq0.setOriginStart(0); 58 | seq0.setOriginEnd(Long.MAX_VALUE); 59 | 60 | 61 | RuleAtomicParam seq1 = new RuleAtomicParam(); 62 | seq1.setEventId("C"); 63 | HashMap seq1prop = new HashMap<>(); 64 | seq1prop.put("p2","v2"); 65 | seq1.setProperties(seq1prop); 66 | seq1.setOriginStart(0); 67 | seq1.setOriginEnd(Long.MAX_VALUE); 68 | 69 | seqParams.add(seq0); 70 | seqParams.add(seq1); 71 | 72 | ruleParam.setUserActionSequenceParams(seqParams); 73 | 74 | // 执行匹配查询计算 75 | if( 76 | queryRouter.profileQuery($b, ruleParam) 77 | && 78 | queryRouter.sequenceConditionQuery($b, ruleParam) 79 | && 80 | queryRouter.countConditionQuery($b, ruleParam) 81 | ){ 82 | // 设置结果 83 | $d.setMatch(true); 84 | } 85 | 86 | end -------------------------------------------------------------------------------- /dynamic_rule_engine/rules_drl/rule1_cnt.sql: -------------------------------------------------------------------------------- 1 | select 2 | deviceId, 3 | count(1) as cnt 4 | from yinew_detail 5 | where deviceId = ? and eventId = 'B' and properties['p1']='v1' 6 | and timeStamp between ? and ? 7 | group by deviceId 8 | ; 9 | select 10 | deviceId, 11 | count(1) as cnt 12 | from yinew_detail 13 | where deviceId = ? and eventId = 'D' and properties['p2']='v2' 14 | and timeStamp between ? and ? 15 | group by deviceId 16 | ; -------------------------------------------------------------------------------- /dynamic_rule_engine/rules_drl/rule1_seq.sql: -------------------------------------------------------------------------------- 1 | SELECT 2 | deviceId, 3 | sequenceMatch('.*(?1).*(?2).*')( 4 | toDateTime(`timeStamp`), 5 | eventId = 'A' and properties['p1']='v1', 6 | eventId = 'C' and properties['p2']='v2' 7 | ) as isMatch2, 8 | sequenceMatch('.*(?1).*')( 9 | toDateTime(`timeStamp`), 10 | eventId = 'A' and properties['p1']='v1', 11 | eventId = 'C' and properties['p2']='v2' 12 | ) as isMatch1 13 | from yinew_detail 14 | where 15 | deviceId = ? 16 | and 17 | timeStamp BETWEEN ? AND ? 18 | and 19 | ( 20 | (eventId='A' and properties['p1']='v1') 21 | or (eventId = 'C' and properties['p2']='v2') 22 | ) 23 | group by deviceId; -------------------------------------------------------------------------------- /dynamic_rule_engine/rules_drl/rule2.drl: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rules.rules 2 | import cn.doitedu.dynamic_rule.pojo.DroolFact 3 | import cn.doitedu.dynamic_rule.pojo.LogBean 4 | import cn.doitedu.dynamic_rule.pojo.RuleParam 5 | import cn.doitedu.dynamic_rule.service.QueryRouterV4 6 | import java.util.HashMap 7 | import cn.doitedu.dynamic_rule.pojo.RuleAtomicParam 8 | import java.util.List 9 | import java.util.ArrayList 10 | 11 | rule "rule2" 12 | 13 | when 14 | $d:DroolFact() 15 | $b:LogBean(eventId == "F") from $d.logBean 16 | then 17 | RuleParam ruleParam = $d.getRuleParam(); 18 | //ruleParam.setRuleName("rule2"); 19 | QueryRouterV4 queryRouter = $d.getQueryRouterV4(); 20 | 21 | // 填充本规则的画像条件 22 | HashMap profileParams = new HashMap<>(); 23 | profileParams.put("tag7","v2"); 24 | profileParams.put("tag3","v1"); 25 | ruleParam.setUserProfileParams(profileParams); 26 | 27 | // 填充本规则的count类条件(直接从ruleparam中取出cnt条件list,里面已经拥有sql了) 28 | List countParams = ruleParam.getUserActionCountParams(); 29 | 30 | RuleAtomicParam param0 = countParams.get(0); 31 | param0.setEventId("H"); 32 | HashMap props0 = new HashMap<>(); 33 | props0.put("p1","v1"); 34 | props0.put("p2","v3"); 35 | param0.setProperties(props0); 36 | param0.setCnt(8); 37 | param0.setOriginStart(0); 38 | param0.setOriginEnd(Long.MAX_VALUE); 39 | 40 | 41 | // 填充本规则的次序列条件 42 | ArrayList seqParams = new ArrayList<>(); 43 | 44 | RuleAtomicParam seq0 = new RuleAtomicParam(); 45 | seq0.setEventId("A"); 46 | HashMap seq0prop = new HashMap<>(); 47 | seq0prop.put("p1","v1"); 48 | seq0.setProperties(seq0prop); 49 | seq0.setOriginStart(0); 50 | seq0.setOriginEnd(Long.MAX_VALUE); 51 | 52 | 53 | RuleAtomicParam seq1 = new RuleAtomicParam(); 54 | seq1.setEventId("C"); 55 | HashMap seq1prop = new HashMap<>(); 56 | seq1prop.put("p2","v2"); 57 | seq1.setProperties(seq1prop); 58 | seq1.setOriginStart(0); 59 | seq1.setOriginEnd(Long.MAX_VALUE); 60 | 61 | seqParams.add(seq0); 62 | seqParams.add(seq1); 63 | 64 | ruleParam.setUserActionSequenceParams(seqParams); 65 | 66 | // 执行匹配查询计算 67 | if( 68 | queryRouter.profileQuery($b, ruleParam) 69 | && 70 | queryRouter.sequenceConditionQuery($b, ruleParam) 71 | && 72 | queryRouter.countConditionQuery($b, ruleParam) 73 | ){ 74 | // 设置结果 75 | $d.setMatch(true); 76 | } 77 | 78 | end -------------------------------------------------------------------------------- /dynamic_rule_engine/rules_drl/rule2_cnt.sql: -------------------------------------------------------------------------------- 1 | select 2 | deviceId, 3 | count(1) as cnt 4 | from yinew_detail 5 | where deviceId = ? and eventId = 'H' and properties['p1']='v1' 6 | and timeStamp between ? and ? 7 | group by deviceId -------------------------------------------------------------------------------- /dynamic_rule_engine/rules_drl/rule2_seq.sql: -------------------------------------------------------------------------------- 1 | SELECT 2 | deviceId, 3 | sequenceMatch('.*(?1).*(?2).*')( 4 | toDateTime(`timeStamp`), 5 | eventId = 'A' and properties['p1']='v1', 6 | eventId = 'C' and properties['p2']='v2' 7 | ) as isMatch2, 8 | sequenceMatch('.*(?1).*')( 9 | toDateTime(`timeStamp`), 10 | eventId = 'A' and properties['p1']='v1', 11 | eventId = 'C' and properties['p2']='v2' 12 | ) as isMatch1 13 | from yinew_detail 14 | where 15 | deviceId = ? 16 | and 17 | timeStamp BETWEEN ? AND ? 18 | and 19 | ( 20 | (eventId='A' and properties['p1']='v1') 21 | or (eventId = 'C' and properties['p2']='v2') 22 | ) 23 | group by deviceId; -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/benchmark/ClickHouseQueryTest.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.benchmark; 2 | 3 | import cn.doitedu.dynamic_rule.utils.ConnectionUtils; 4 | import org.apache.commons.collections.CollectionUtils; 5 | import org.apache.commons.lang3.StringUtils; 6 | 7 | import java.sql.Connection; 8 | import java.sql.PreparedStatement; 9 | import java.sql.ResultSet; 10 | import java.util.ArrayList; 11 | import java.util.Collections; 12 | 13 | public class ClickHouseQueryTest { 14 | 15 | public static void main(String[] args) throws Exception { 16 | String sql = "select \n" + 17 | " deviceId,count() as cnt \n" + 18 | " from yinew_detail \n" + 19 | " where deviceId= ?\n" + 20 | " and \n" + 21 | " eventId = 'W' \n" + 22 | " and \n" + 23 | " timeStamp >= 0 and timeStamp <=9223372036854775807\n" + 24 | " and properties['p2']='v3'\n" + 25 | " group by deviceId"; 26 | 27 | ArrayList ss = new ArrayList<>(); 28 | ArrayList ee = new ArrayList<>(); 29 | 30 | for (int i = 0; i < 1; i++) { 31 | new Thread(new Runnable() { 32 | Connection conn = ConnectionUtils.getClickhouseConnection(); 33 | PreparedStatement stmt = conn.prepareStatement(sql); 34 | 35 | @Override 36 | public void run() { 37 | try { 38 | long s = System.currentTimeMillis(); 39 | ss.add(s); 40 | for (int i = 0; i < 1000; i++) { 41 | stmt.setString(1, StringUtils.leftPad(i + "", 6, "0")); 42 | ResultSet resultSet = stmt.executeQuery(); 43 | while (resultSet.next()) { 44 | long cnt = resultSet.getLong(2); 45 | } 46 | } 47 | long e = System.currentTimeMillis(); 48 | System.out.println(e-s); 49 | ee.add(e); 50 | } catch (Exception e) { 51 | } 52 | } 53 | }).start(); 54 | } 55 | 56 | Thread.sleep(8000); 57 | 58 | Collections.sort(ss); 59 | Collections.sort(ee); 60 | 61 | //System.out.println(ee.get(ee.size()-1) - ss.get(0)); 62 | 63 | 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/benchmark/HbaseGetTest.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.benchmark; 2 | 3 | import org.apache.commons.lang3.RandomUtils; 4 | import org.apache.commons.lang3.StringUtils; 5 | import org.apache.hadoop.conf.Configuration; 6 | import org.apache.hadoop.hbase.TableName; 7 | import org.apache.hadoop.hbase.client.*; 8 | import org.apache.hadoop.hbase.util.Bytes; 9 | 10 | import java.io.IOException; 11 | /*** 12 | * @author 涛哥 13 | * @nick_name "deep as the sea" 14 | * @contack qq:657270652 wx:doit_edu 15 | * @site www.51doit.cn 16 | * @date 2021/3/29 17 | * @desc hbase查询性能简单测试代码 18 | **/ 19 | public class HbaseGetTest { 20 | public static void main(String[] args) throws IOException { 21 | 22 | 23 | Configuration conf = new Configuration(); 24 | conf.set("hbase.zookeeper.quorum", "hdp01:2181,hdp02:2181,hdp03:2181"); 25 | 26 | Connection conn = ConnectionFactory.createConnection(conf); 27 | Table table = conn.getTable(TableName.valueOf("yinew_profile")); 28 | 29 | 30 | long s = System.currentTimeMillis(); 31 | for(int i=0;i<1000;i++){ 32 | Get get = new Get(StringUtils.leftPad(RandomUtils.nextInt(1, 900000) + "", 6, "0").getBytes()); 33 | int i1 = RandomUtils.nextInt(1, 100); 34 | int i2 = RandomUtils.nextInt(1, 100); 35 | int i3 = RandomUtils.nextInt(1, 100); 36 | get.addColumn("f".getBytes(), Bytes.toBytes("tag"+i1)); 37 | get.addColumn("f".getBytes(), Bytes.toBytes("tag"+i2)); 38 | get.addColumn("f".getBytes(), Bytes.toBytes("tag"+i3)); 39 | 40 | 41 | Result result = table.get(get); 42 | byte[] v1 = result.getValue("f".getBytes(), Bytes.toBytes("tag" + i1)); 43 | byte[] v2 = result.getValue("f".getBytes(), Bytes.toBytes("tag" + i2)); 44 | byte[] v3 = result.getValue("f".getBytes(), Bytes.toBytes("tag" + i3)); 45 | } 46 | long e = System.currentTimeMillis(); 47 | 48 | System.out.println(e-s); 49 | conn.close(); 50 | 51 | } 52 | 53 | } 54 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/datagen/ActionLogAutoGen.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.datagen; 2 | 3 | import cn.doitedu.dynamic_rule.pojo.LogBean; 4 | import com.alibaba.fastjson.JSON; 5 | import org.apache.commons.lang3.RandomUtils; 6 | import org.apache.commons.lang3.StringUtils; 7 | import org.apache.commons.lang3.RandomStringUtils; 8 | import org.apache.kafka.clients.producer.KafkaProducer; 9 | import org.apache.kafka.clients.producer.ProducerRecord; 10 | 11 | import java.util.HashMap; 12 | import java.util.Properties; 13 | 14 | /** 15 | * @author 涛哥 16 | * @nick_name "deep as the sea" 17 | * @contact qq:657270652 wx:doit_edu 18 | * @site www.doitedu.cn 19 | * @date 2021-03-27 20 | * @desc 行为日志生成模拟器 21 | * 22 | * { 23 | * "account": "Vz54E9Ya", 24 | * "appId": "cn.doitedu.app1", 25 | * "appVersion": "3.4", 26 | * "carrier": "中国移动", 27 | * "deviceId": "WEISLD0235S0934OL", 28 | * "deviceType": "MI-6", 29 | * "ip": "24.93.136.175", 30 | * "latitude": 42.09287620431088, 31 | * "longitude": 79.42106825764643, 32 | * "netType": "WIFI", 33 | * "osName": "android", 34 | * "osVersion": "6.5", 35 | * "releaseChannel": "豌豆荚", 36 | * "resolution": "1024*768", 37 | * "sessionId": "SE18329583458", 38 | * "timeStamp": 1594534406220 39 | * "eventId": "productView", 40 | * "properties": { 41 | * "pageId": "646", 42 | * "productId": "157", 43 | * "refType": "4", 44 | * "refUrl": "805", 45 | * "title": "爱得堡 男靴中高帮马丁靴秋冬雪地靴 H1878 复古黄 40码", 46 | * "url": "https://item.jd.com/36506691363.html", 47 | * "utm_campain": "4", 48 | * "utm_loctype": "1", 49 | * "utm_source": "10" 50 | * } 51 | * 52 | * } 53 | * 54 | * 55 | * kafka中要先创建好topic 56 | * [root@hdp01 kafka_2.11-2.0.0]# bin/kafka-topics.sh --create --topic yinew_applog --partitions 2 --replication-factor 1 --zookeeper hdp01:2181,hdp02:2181,hdp03:2181 57 | * 58 | * 创建完后,检查一下是否创建成功: 59 | * [root@hdp01 kafka_2.11-2.0.0]# bin/kafka-topics.sh --list --zookeeper hdp01:2181 60 | * 61 | */ 62 | public class ActionLogAutoGen { 63 | public static void main(String[] args) throws InterruptedException { 64 | Properties props = new Properties(); 65 | props.setProperty("bootstrap.servers", "hdp01:9092,hdp02:9092,hdp03:9092"); 66 | props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); 67 | props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); 68 | // 创建多个线程,并行执行 69 | genBatch(props); 70 | } 71 | 72 | private static void genBatch(Properties props) { 73 | for(int i=0;i<10;i++) { 74 | new Thread(new Runnable() { 75 | @Override 76 | public void run() { 77 | // 构造一个kafka生产者客户端 78 | KafkaProducer kafkaProducer = new KafkaProducer<>(props); 79 | while (true) { 80 | 81 | LogBean logBean = getLogBean(); 82 | // 将日志对象,转成JSON 83 | String log = JSON.toJSONString(logBean); 84 | // 写入kafka的topic: yinew_applog 85 | ProducerRecord record = new ProducerRecord<>("yinew_applog", log); 86 | kafkaProducer.send(record); 87 | try { 88 | Thread.sleep(RandomUtils.nextInt(100, 200)); 89 | } catch (InterruptedException e) { 90 | e.printStackTrace(); 91 | } 92 | 93 | } 94 | } 95 | }).start(); 96 | } 97 | } 98 | 99 | 100 | public static LogBean getLogBean(){ 101 | LogBean logBean = new LogBean(); 102 | // 生成的账号形如: 004078 103 | String account = StringUtils.leftPad(RandomUtils.nextInt(1, 10000) + "", 6, "0"); 104 | logBean.setAccount(account); 105 | logBean.setAppId("cn.doitedu.yinew"); 106 | logBean.setAppVersion("2.5"); 107 | logBean.setCarrier("中国移动"); 108 | // deviceid直接用account 109 | logBean.setDeviceId(account); 110 | logBean.setIp("10.102.36.88"); 111 | logBean.setLatitude(RandomUtils.nextDouble(10.0, 52.0)); 112 | logBean.setLongitude(RandomUtils.nextDouble(120.0, 160.0)); 113 | logBean.setDeviceType("mi6"); 114 | logBean.setNetType("5G"); 115 | logBean.setOsName("android"); 116 | logBean.setOsVersion("7.5"); 117 | logBean.setReleaseChannel("小米应用市场"); 118 | logBean.setResolution("2048*1024"); 119 | 120 | /** 121 | * 生成事件ID 122 | */ 123 | logBean.setEventId(RandomStringUtils.randomAlphabetic(1).toUpperCase()); 124 | 125 | HashMap properties = new HashMap(); 126 | for (int i = 0; i < RandomUtils.nextInt(1, 5); i++) { 127 | // 生成的属性形如: p1=v1, p2=v1, p3=v2,p4=v1,..... p10= 128 | properties.put("p" + RandomUtils.nextInt(1, 11), "v" + RandomUtils.nextInt(1, 3)); 129 | } 130 | logBean.setProperties(properties); 131 | logBean.setTimeStamp(System.currentTimeMillis()); 132 | logBean.setSessionId(RandomStringUtils.randomNumeric(10, 10)); 133 | 134 | return logBean; 135 | } 136 | } 137 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/datagen/ActionLogGenOne.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.datagen; 2 | 3 | import cn.doitedu.dynamic_rule.pojo.LogBean; 4 | import com.alibaba.fastjson.JSON; 5 | import org.apache.commons.lang3.RandomStringUtils; 6 | import org.apache.commons.lang3.RandomUtils; 7 | import org.apache.commons.lang3.StringUtils; 8 | import org.apache.kafka.clients.producer.KafkaProducer; 9 | import org.apache.kafka.clients.producer.ProducerRecord; 10 | 11 | import java.util.HashMap; 12 | import java.util.Map; 13 | import java.util.Properties; 14 | 15 | /*** 16 | * @author hunter.d 17 | * @qq 657270652 18 | * @wx haitao-duan 19 | * @date 2021/4/5 20 | **/ 21 | public class ActionLogGenOne { 22 | public static void main(String[] args) { 23 | Properties props = new Properties(); 24 | props.setProperty("bootstrap.servers", "hdp01:9092,hdp02:9092,hdp03:9092"); 25 | props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); 26 | props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); 27 | KafkaProducer kafkaProducer = new KafkaProducer<>(props); 28 | 29 | LogBean logBean = new LogBean(); 30 | logBean.setDeviceId("000053"); 31 | logBean.setEventId("E"); 32 | Map ps = new HashMap(); 33 | props.put("p1", "v1"); 34 | logBean.setProperties(ps); 35 | logBean.setTimeStamp(System.currentTimeMillis()); 36 | 37 | String log = JSON.toJSONString(logBean); 38 | ProducerRecord record = new ProducerRecord<>("yinew_applog", log); 39 | kafkaProducer.send(record); 40 | kafkaProducer.flush(); 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/datagen/UserProfileDataGen.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.datagen; 2 | 3 | import org.apache.commons.lang3.RandomUtils; 4 | import org.apache.commons.lang3.StringUtils; 5 | import org.apache.hadoop.conf.Configuration; 6 | import org.apache.hadoop.hbase.TableName; 7 | import org.apache.hadoop.hbase.client.Connection; 8 | import org.apache.hadoop.hbase.client.ConnectionFactory; 9 | import org.apache.hadoop.hbase.client.Put; 10 | import org.apache.hadoop.hbase.client.Table; 11 | import org.apache.hadoop.hbase.util.Bytes; 12 | 13 | import java.io.IOException; 14 | import java.util.ArrayList; 15 | 16 | /** 17 | * @author 涛哥 18 | * @nick_name "deep as the sea" 19 | * @contact qq:657270652 wx:doit_edu 20 | * @site www.doitedu.cn 21 | * @date 2021-03-27 22 | * @desc 用户画像数据模拟器 23 | *

24 | * deviceid,k1=v1 25 | *

26 | * hbase中需要先创建好画像标签表 27 | * [root@hdp01 ~]# hbase shell 28 | * hbase> create 'yinew_profile','f' 29 | */ 30 | public class UserProfileDataGen { 31 | public static void main(String[] args) throws IOException { 32 | 33 | Configuration conf = new Configuration(); 34 | conf.set("hbase.zookeeper.quorum", "hdp01:2181,hdp02:2181,hdp03:2181"); 35 | 36 | Connection conn = ConnectionFactory.createConnection(conf); 37 | Table table = conn.getTable(TableName.valueOf("yinew_profile")); 38 | 39 | ArrayList puts = new ArrayList<>(); 40 | for (int i = 0; i < 100000; i++) { 41 | 42 | // 生成一个用户的画像标签数据 43 | String deviceId = StringUtils.leftPad(i + "", 6, "0"); 44 | Put put = new Put(Bytes.toBytes(deviceId)); 45 | for (int k = 1; k <= 100; k++) { 46 | String key = "tag" + k; 47 | String value = "v" + RandomUtils.nextInt(1, 3); 48 | put.addColumn(Bytes.toBytes("f"), Bytes.toBytes(key), Bytes.toBytes(value)); 49 | } 50 | 51 | // 将这一条画像数据,添加到list中 52 | puts.add(put); 53 | 54 | // 攒满100条一批 55 | if(puts.size()==100) { 56 | table.put(puts); 57 | puts.clear(); 58 | } 59 | 60 | } 61 | 62 | // 提交最后一批 63 | if(puts.size()>0) table.put(puts); 64 | 65 | conn.close(); 66 | } 67 | } 68 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/demos/Action.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.demos; 2 | 3 | import lombok.Data; 4 | 5 | @Data 6 | public class Action { 7 | private String msg; 8 | public Action(String msg) { 9 | this.msg = msg; 10 | } 11 | public void doSomeThing() { 12 | System.out.println(msg); 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/demos/Applicant.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.demos; 2 | 3 | import lombok.Data; 4 | 5 | /** 6 | * 申请人,用作fact 7 | */ 8 | @Data 9 | public class Applicant { 10 | private String name; 11 | private int age; 12 | private boolean valid; 13 | public Applicant(String name, int age) { 14 | this.name = name; 15 | this.age = age; 16 | this.valid = true; 17 | } 18 | } -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/demos/CanalRecordBean.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.demos; 2 | 3 | import lombok.Data; 4 | 5 | import java.util.List; 6 | 7 | @Data 8 | public class CanalRecordBean { 9 | 10 | private List data; 11 | private String type; 12 | 13 | } 14 | 15 | 16 | @Data 17 | class RuleTableRecord{ 18 | 19 | private String ruleName; 20 | private String ruleCode; 21 | 22 | 23 | } -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/demos/DroolsDemo.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.demos; 2 | 3 | 4 | import org.kie.api.KieServices; 5 | import org.kie.api.runtime.KieContainer; 6 | import org.kie.api.runtime.KieSession; 7 | 8 | public class DroolsDemo { 9 | 10 | public static void main(String[] args) { 11 | 12 | KieServices kieServices = KieServices.Factory.get(); 13 | //默认自动加载 META-INF/kmodule.xml 14 | KieContainer kieContainer = kieServices.getKieClasspathContainer(); 15 | //kmodule.xml 中定义的 ksession name 16 | KieSession kieSession = kieContainer.newKieSession("all-rules"); 17 | 18 | 19 | Applicant applicant = new Applicant("康康", 17); 20 | 21 | // 向引擎插入一个fact数据 22 | kieSession.insert(applicant); 23 | // 启动规则计算 24 | kieSession.fireAllRules(); 25 | 26 | // 销毁kie会话 27 | kieSession.dispose(); 28 | 29 | } 30 | 31 | } 32 | 33 | 34 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/demos/DroolsDemo2.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.demos; 2 | 3 | import org.apache.commons.io.FileUtils; 4 | import org.kie.api.io.ResourceType; 5 | import org.kie.api.runtime.KieSession; 6 | import org.kie.internal.utils.KieHelper; 7 | 8 | import java.io.File; 9 | import java.io.IOException; 10 | 11 | public class DroolsDemo2 { 12 | 13 | public static void main(String[] args) throws IOException { 14 | 15 | KieHelper kieHelper = new KieHelper(); 16 | 17 | 18 | String drlFilePath = "E:\\yinew_marketing\\dynamic_rule_engine\\src\\main\\resources\\rules\\test.drl"; 19 | String s = FileUtils.readFileToString(new File(drlFilePath), "utf-8"); 20 | 21 | kieHelper.addContent(s, ResourceType.DRL); 22 | KieSession kieSession = kieHelper.build().newKieSession(); 23 | 24 | 25 | Applicant 康康 = new Applicant("康康", 16); 26 | kieSession.insert(康康); 27 | kieSession.fireAllRules(); 28 | 29 | 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/demos/FlinkDroolsCanalDemo.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.demos; 2 | 3 | import com.alibaba.fastjson.JSON; 4 | import com.alibaba.fastjson.JSONArray; 5 | import com.alibaba.fastjson.JSONObject; 6 | import lombok.extern.slf4j.Slf4j; 7 | import org.apache.flink.api.common.functions.MapFunction; 8 | import org.apache.flink.api.common.serialization.SimpleStringSchema; 9 | import org.apache.flink.api.common.state.BroadcastState; 10 | import org.apache.flink.api.common.state.MapStateDescriptor; 11 | import org.apache.flink.api.java.functions.KeySelector; 12 | import org.apache.flink.configuration.Configuration; 13 | import org.apache.flink.streaming.api.datastream.*; 14 | import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; 15 | import org.apache.flink.streaming.api.functions.co.KeyedBroadcastProcessFunction; 16 | import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer; 17 | import org.apache.flink.util.Collector; 18 | import org.kie.api.io.ResourceType; 19 | import org.kie.api.runtime.KieSession; 20 | import org.kie.internal.utils.KieHelper; 21 | 22 | import java.util.Iterator; 23 | import java.util.Map; 24 | import java.util.Properties; 25 | 26 | /** 27 | * @author 涛哥 28 | * @nick_name "deep as the sea" 29 | * @contact qq:657270652 wx:doit_edu 30 | * @site www.doitedu.cn 31 | * @date 2021-04-06 32 | * @desc 测试准备: 33 | * kafka中创建applicant topic 34 | * [root@hdp03 kafka_2.11-2.0.0]# bin/kafka-topics.sh --create --topic applicant --replication-factor 1 --partitions 1 --zookeeper hdp01:2181 35 | * [root@hdp03 kafka_2.11-2.0.0]# bin/kafka-topics.sh --create --topic test_drools --replication-factor 1 --partitions 1 --zookeeper hdp01:2181 36 | * 37 | * 38 | * 39 | */ 40 | @Slf4j 41 | public class FlinkDroolsCanalDemo { 42 | 43 | public static void main(String[] args) throws Exception { 44 | 45 | StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(new Configuration()); 46 | 47 | // 读申请信息流 48 | Properties props = new Properties(); 49 | props.setProperty("bootstrap.servers", "hdp01:9092,hdp02:9092,hdp03:9092"); 50 | props.setProperty("auto.offset.reset", "latest"); 51 | FlinkKafkaConsumer applicantConsumer = new FlinkKafkaConsumer<>("applicant", new SimpleStringSchema(), props); 52 | 53 | // joke,18 54 | DataStreamSource applicantStrStream = env.addSource(applicantConsumer); 55 | 56 | // 将string流转成Applicant对象流 57 | SingleOutputStreamOperator applicantStream = applicantStrStream.map(new MapFunction() { 58 | @Override 59 | public Applicant map(String value) throws Exception { 60 | String[] split = value.split(","); 61 | return new Applicant(split[0],Integer.parseInt(split[1])); 62 | } 63 | }); 64 | 65 | // 按照name来keyby 66 | KeyedStream keyedStream = applicantStream.keyBy(new KeySelector() { 67 | @Override 68 | public String getKey(Applicant value) throws Exception { 69 | return value.getName(); 70 | } 71 | }); 72 | 73 | 74 | // 读规则流,并广播 75 | FlinkKafkaConsumer ruleConsumer = new FlinkKafkaConsumer<>("test_drools", new SimpleStringSchema(), props); 76 | DataStreamSource ruleStream = env.addSource(ruleConsumer); 77 | 78 | MapStateDescriptor stateDescriptor = new MapStateDescriptor<>("ruleState", String.class, KieSession.class); 79 | BroadcastStream broadcastStream = ruleStream.broadcast(stateDescriptor); 80 | 81 | // connect两个流 82 | BroadcastConnectedStream connectedStream = keyedStream.connect(broadcastStream); 83 | 84 | 85 | // 处理 86 | SingleOutputStreamOperator result = connectedStream.process(new KeyedBroadcastProcessFunction() { 87 | 88 | BroadcastState broadcastState; 89 | 90 | /** 91 | * 处理数据流 92 | * @param applicant 93 | * @param ctx 94 | * @param out 95 | * @throws Exception 96 | */ 97 | @Override 98 | public void processElement(Applicant applicant, ReadOnlyContext ctx, Collector out) throws Exception { 99 | 100 | Iterator> rulesIterator = broadcastState.iterator(); 101 | while(rulesIterator.hasNext()){ 102 | Map.Entry entry = rulesIterator.next(); 103 | KieSession kieSession = entry.getValue(); 104 | 105 | applicant.setValid(true); 106 | kieSession.insert(applicant); 107 | kieSession.fireAllRules(); 108 | 109 | if(applicant.isValid()){ 110 | out.collect(applicant.getName()+",合法"); 111 | }else{ 112 | out.collect(applicant.getName()+",不合法"); 113 | } 114 | 115 | } 116 | } 117 | 118 | /** 119 | * 处理广播流中的数据 120 | * @param value 121 | * @param ctx 122 | * @param out 123 | * @throws Exception 124 | */ 125 | @Override 126 | public void processBroadcastElement(String value, Context ctx, Collector out) throws Exception { 127 | broadcastState = ctx.getBroadcastState(stateDescriptor); 128 | 129 | // 进来的规则信息,是canal从mysql中监听到一个json串 130 | CanalRecordBean canalRecordBean = JSON.parseObject(value, CanalRecordBean.class); 131 | 132 | // 取出规则表数据 133 | RuleTableRecord tableRec = canalRecordBean.getData().get(0); 134 | 135 | String ruleName = tableRec.getRuleName(); 136 | String ruleCode = tableRec.getRuleCode(); 137 | 138 | KieSession kieSession = new KieHelper().addContent(ruleCode, ResourceType.DRL).build().newKieSession(); 139 | 140 | broadcastState.put(ruleName,kieSession); 141 | 142 | } 143 | }); 144 | 145 | // 打印 146 | result.print(); 147 | 148 | env.execute(); 149 | 150 | 151 | } 152 | } 153 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/demos/FlinkDroolsDemo.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.demos; 2 | 3 | import lombok.extern.slf4j.Slf4j; 4 | import org.apache.flink.api.common.functions.MapFunction; 5 | import org.apache.flink.api.common.serialization.SimpleStringSchema; 6 | import org.apache.flink.api.common.state.BroadcastState; 7 | import org.apache.flink.api.common.state.MapStateDescriptor; 8 | import org.apache.flink.api.java.functions.KeySelector; 9 | import org.apache.flink.configuration.Configuration; 10 | import org.apache.flink.streaming.api.datastream.*; 11 | import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; 12 | import org.apache.flink.streaming.api.functions.co.KeyedBroadcastProcessFunction; 13 | import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer; 14 | import org.apache.flink.util.Collector; 15 | import org.kie.api.io.ResourceType; 16 | import org.kie.api.runtime.KieSession; 17 | import org.kie.internal.utils.KieHelper; 18 | 19 | import java.util.Iterator; 20 | import java.util.Map; 21 | import java.util.Properties; 22 | 23 | /** 24 | * @author 涛哥 25 | * @nick_name "deep as the sea" 26 | * @contact qq:657270652 wx:doit_edu 27 | * @site www.doitedu.cn 28 | * @date 2021-04-06 29 | * @desc 测试准备: 30 | * kafka中创建applicant topic 31 | * [root@hdp03 kafka_2.11-2.0.0]# bin/kafka-topics.sh --create --topic applicant --replication-factor 1 --partitions 1 --zookeeper hdp01:2181 32 | * [root@hdp03 kafka_2.11-2.0.0]# bin/kafka-topics.sh --create --topic test_drools --replication-factor 1 --partitions 1 --zookeeper hdp01:2181 33 | * 34 | * 35 | * 36 | */ 37 | @Slf4j 38 | public class FlinkDroolsDemo { 39 | 40 | public static void main(String[] args) throws Exception { 41 | 42 | StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(new Configuration()); 43 | 44 | // 读申请信息流 45 | Properties props = new Properties(); 46 | props.setProperty("bootstrap.servers", "hdp01:9092,hdp02:9092,hdp03:9092"); 47 | props.setProperty("auto.offset.reset", "latest"); 48 | FlinkKafkaConsumer applicantConsumer = new FlinkKafkaConsumer<>("applicant", new SimpleStringSchema(), props); 49 | 50 | // joke,18 51 | DataStreamSource applicantStrStream = env.addSource(applicantConsumer); 52 | 53 | // 将string流转成Applicant对象流 54 | SingleOutputStreamOperator applicantStream = applicantStrStream.map(new MapFunction() { 55 | @Override 56 | public Applicant map(String value) throws Exception { 57 | String[] split = value.split(","); 58 | return new Applicant(split[0],Integer.parseInt(split[1])); 59 | } 60 | }); 61 | 62 | // 按照name来keyby 63 | KeyedStream keyedStream = applicantStream.keyBy(new KeySelector() { 64 | @Override 65 | public String getKey(Applicant value) throws Exception { 66 | return value.getName(); 67 | } 68 | }); 69 | 70 | 71 | // 读规则流,并广播 72 | FlinkKafkaConsumer ruleConsumer = new FlinkKafkaConsumer<>("test_drools", new SimpleStringSchema(), props); 73 | DataStreamSource ruleStream = env.addSource(ruleConsumer); 74 | 75 | MapStateDescriptor stateDescriptor = new MapStateDescriptor<>("ruleState", String.class, KieSession.class); 76 | BroadcastStream broadcastStream = ruleStream.broadcast(stateDescriptor); 77 | 78 | // connect两个流 79 | BroadcastConnectedStream connectedStream = keyedStream.connect(broadcastStream); 80 | 81 | 82 | // 处理 83 | SingleOutputStreamOperator result = connectedStream.process(new KeyedBroadcastProcessFunction() { 84 | 85 | BroadcastState broadcastState; 86 | 87 | /** 88 | * 处理数据流 89 | * @param applicant 90 | * @param ctx 91 | * @param out 92 | * @throws Exception 93 | */ 94 | @Override 95 | public void processElement(Applicant applicant, ReadOnlyContext ctx, Collector out) throws Exception { 96 | 97 | Iterator> rulesIterator = broadcastState.iterator(); 98 | while(rulesIterator.hasNext()){ 99 | Map.Entry entry = rulesIterator.next(); 100 | KieSession kieSession = entry.getValue(); 101 | 102 | applicant.setValid(true); 103 | kieSession.insert(applicant); 104 | kieSession.fireAllRules(); 105 | 106 | if(applicant.isValid()){ 107 | out.collect(applicant.getName()+",合法"); 108 | }else{ 109 | out.collect(applicant.getName()+",不合法"); 110 | } 111 | 112 | } 113 | } 114 | 115 | /** 116 | * 处理广播流中的数据 117 | * @param value 118 | * @param ctx 119 | * @param out 120 | * @throws Exception 121 | */ 122 | @Override 123 | public void processBroadcastElement(String value, Context ctx, Collector out) throws Exception { 124 | broadcastState = ctx.getBroadcastState(stateDescriptor); 125 | 126 | // value: rule-name,rule-code 127 | String[] split = value.split(","); 128 | 129 | KieHelper kieHelper = new KieHelper(); 130 | KieSession kieSession = kieHelper.addContent(split[1], ResourceType.DRL).build().newKieSession(); 131 | 132 | 133 | log.warn("收到一条新的规则,并插入了state"); 134 | broadcastState.put(split[0],kieSession); 135 | 136 | } 137 | }); 138 | 139 | // 打印 140 | result.print(); 141 | 142 | env.execute(); 143 | 144 | 145 | } 146 | } 147 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/demos/ReadDrlToKafka.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.demos; 2 | 3 | import org.apache.commons.io.FileUtils; 4 | import org.apache.kafka.clients.producer.KafkaProducer; 5 | import org.apache.kafka.clients.producer.ProducerRecord; 6 | 7 | import java.io.File; 8 | import java.io.IOException; 9 | import java.util.Properties; 10 | 11 | public class ReadDrlToKafka { 12 | public static void main(String[] args) throws IOException { 13 | 14 | Properties props = new Properties(); 15 | props.setProperty("bootstrap.servers", "hdp01:9092,hdp02:9092,hdp03:9092"); 16 | props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); 17 | props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); 18 | KafkaProducer kafkaProducer = new KafkaProducer<>(props); 19 | 20 | String s = FileUtils.readFileToString(new File("dynamic_rule_engine/src/main/resources/rules/flink.drl"), "utf-8"); 21 | 22 | ProducerRecord record = new ProducerRecord<>("test_drools", "rule1,"+s); 23 | kafkaProducer.send(record); 24 | kafkaProducer.flush(); 25 | 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/demos/ReadDrlToMySql.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.demos; 2 | 3 | import org.apache.commons.io.FileUtils; 4 | import org.apache.kafka.clients.producer.KafkaProducer; 5 | import org.apache.kafka.clients.producer.ProducerRecord; 6 | 7 | import java.io.File; 8 | import java.io.IOException; 9 | import java.sql.*; 10 | import java.util.Properties; 11 | 12 | /** 13 | * @author 涛哥 14 | * @nick_name "deep as the sea" 15 | * @contact qq:657270652 wx:doit_edu 16 | * @site www.doitedu.cn 17 | * @date 2021-04-07 18 | * @desc 19 | * 20 | * `id` int(11) NOT NULL AUTO_INCREMENT, 21 | * `rule_name` varchar(255) DEFAULT NULL, 22 | * `rule_code` varchar(4096) DEFAULT NULL, 23 | * `rule_status` int(11) DEFAULT NULL, 24 | * `rule_type` varchar(255) DEFAULT NULL, 25 | * `rule_version` varchar(255) DEFAULT NULL, 26 | * `cnt_sqls` varchar(4096) DEFAULT NULL, 27 | * `seq_sqls` varchar(4096) DEFAULT NULL, 28 | * `rule_creator` varchar(255) DEFAULT NULL, 29 | * `rule_auditor` varchar(255) DEFAULT NULL, 30 | * `create_time` datetime DEFAULT NULL, 31 | * `update_time` datetime DEFAULT NULL, 32 | * 33 | */ 34 | public class ReadDrlToMySql { 35 | public static void main(String[] args) throws IOException, SQLException { 36 | String ruleName = "rule2"; 37 | String ruleCode = FileUtils.readFileToString(new File("dynamic_rule_engine/rules_drl/rule2.drl"), "utf-8"); 38 | int ruleStatus = 1; 39 | String ruleType = "1"; 40 | String ruleVersion = "1"; 41 | String cntSqls = FileUtils.readFileToString(new File("dynamic_rule_engine/rules_drl/rule2_cnt.sql"), "utf-8"); 42 | String seqSqls = FileUtils.readFileToString(new File("dynamic_rule_engine/rules_drl/rule2_seq.sql"), "utf-8"); 43 | String ruleCreator = "doitedu"; 44 | String ruleAuditor = "hunter.d"; 45 | Date createTime = new Date(System.currentTimeMillis()); 46 | Date updateTime = createTime; 47 | Connection conn = DriverManager.getConnection("jdbc:mysql://hdp01:3306/realtimedw", "root", "ABC123abc.123"); 48 | PreparedStatement pst = conn.prepareStatement("insert into yinew_drl_rule (rule_name,rule_code,rule_status,rule_type,rule_version,cnt_sqls,seq_sqls,rule_creator,rule_auditor,create_time,update_time) " + 49 | "values (?,?,?,?,?,?,?,?,?,?,?)"); 50 | pst.setString(1,ruleName); 51 | pst.setString(2,ruleCode); 52 | pst.setInt(3,ruleStatus); 53 | pst.setString(4,ruleType); 54 | pst.setString(5,ruleVersion); 55 | pst.setString(6,cntSqls); 56 | pst.setString(7,seqSqls); 57 | pst.setString(8,ruleCreator); 58 | pst.setString(9,ruleAuditor); 59 | pst.setDate(10,createTime); 60 | pst.setDate(11,updateTime); 61 | 62 | boolean execute = pst.execute(); 63 | 64 | pst.close(); 65 | conn.close(); 66 | 67 | 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/demos/RedisDemo.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.demos; 2 | 3 | import com.alibaba.fastjson.JSON; 4 | import redis.clients.jedis.Jedis; 5 | 6 | public class RedisDemo { 7 | 8 | public static void main(String[] args) { 9 | 10 | Jedis jedis = new Jedis("hdp02", 6379); 11 | 12 | String res = jedis.ping(); 13 | System.out.println(res); 14 | 15 | // 插入一条扁平数据 16 | jedis.set("key01","value01"); 17 | 18 | String res2 = jedis.get("key01"); 19 | System.out.println(res2); 20 | 21 | 22 | // 插入一个json 23 | jedis.set("key02","{'a':1,'b':2,c:[4,5,6]}"); 24 | String res3 = jedis.get("key02"); 25 | System.out.println(res3); 26 | 27 | 28 | // 插入一个person对象 29 | Person p = new Person("宴梅", 30); 30 | String pjson = JSON.toJSONString(p); 31 | jedis.set("yanmei",pjson); 32 | String res4 = jedis.get("yanmei"); 33 | System.out.println(res4); 34 | 35 | } 36 | } 37 | 38 | class Person{ 39 | String name; 40 | int age; 41 | 42 | public Person(String name, int age) { 43 | this.name = name; 44 | this.age = age; 45 | } 46 | 47 | public String getName() { 48 | return name; 49 | } 50 | 51 | public void setName(String name) { 52 | this.name = name; 53 | } 54 | 55 | public int getAge() { 56 | return age; 57 | } 58 | 59 | public void setAge(int age) { 60 | this.age = age; 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/demos/Slf4jLog4jDemo.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.demos; 2 | 3 | import lombok.extern.slf4j.Slf4j; 4 | import org.slf4j.Logger; 5 | import org.slf4j.LoggerFactory; 6 | 7 | 8 | @Slf4j 9 | public class Slf4jLog4jDemo { 10 | 11 | public static void main(String[] args) throws InterruptedException { 12 | 13 | while(true) { 14 | log.debug("哈哈哈哈哈"); 15 | log.info("哈哈哈哈哈"); 16 | log.warn("哈哈哈哈哈"); 17 | log.error("哈哈哈哈哈"); 18 | 19 | Thread.sleep(1000); 20 | } 21 | 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/engine/RuleEngineV1.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.engine; 2 | 3 | import cn.doitedu.dynamic_rule.functions.DeviceKeySelector; 4 | import cn.doitedu.dynamic_rule.functions.Json2BeanMapFunction; 5 | import cn.doitedu.dynamic_rule.functions.RuleProcessFunction; 6 | import cn.doitedu.dynamic_rule.functions.SourceFunctions; 7 | import cn.doitedu.dynamic_rule.pojo.LogBean; 8 | import cn.doitedu.dynamic_rule.pojo.ResultBean; 9 | import org.apache.avro.data.Json; 10 | import org.apache.flink.configuration.Configuration; 11 | import org.apache.flink.streaming.api.datastream.DataStreamSource; 12 | import org.apache.flink.streaming.api.datastream.KeyedStream; 13 | import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator; 14 | import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; 15 | 16 | /** 17 | * @author 涛哥 18 | * @nick_name "deep as the sea" 19 | * @contact qq:657270652 wx:doit_edu 20 | * @site www.doitedu.cn 21 | * @date 2021-03-28 22 | * @desc 静态规则引擎版本1主程序 23 | */ 24 | public class RuleEngineV1 { 25 | 26 | public static void main(String[] args) throws Exception { 27 | 28 | StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(new Configuration()); 29 | 30 | // 添加一个消费kafka中用户实时行为事件数据的source 31 | DataStreamSource logStream = env.addSource(SourceFunctions.getKafkaEventSource()); 32 | 33 | // 将json格式的数据,转成 logbean格式的数据 34 | SingleOutputStreamOperator beanStream = logStream.map(new Json2BeanMapFunction()); 35 | 36 | // 对数据按用户deviceid分key 37 | KeyedStream keyed = beanStream.keyBy(new DeviceKeySelector()); 38 | 39 | // 开始核心计算处理 40 | SingleOutputStreamOperator resultStream = keyed.process(new RuleProcessFunction()); 41 | 42 | // 打印 43 | resultStream.print(); 44 | 45 | env.execute(); 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/engine/RuleEngineV2.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.engine; 2 | 3 | import cn.doitedu.dynamic_rule.functions.*; 4 | import cn.doitedu.dynamic_rule.pojo.LogBean; 5 | import cn.doitedu.dynamic_rule.pojo.ResultBean; 6 | import org.apache.flink.configuration.Configuration; 7 | import org.apache.flink.streaming.api.datastream.DataStreamSource; 8 | import org.apache.flink.streaming.api.datastream.KeyedStream; 9 | import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator; 10 | import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; 11 | 12 | /** 13 | * @author 涛哥 14 | * @nick_name "deep as the sea" 15 | * @contact qq:657270652 wx:doit_edu 16 | * @site www.doitedu.cn 17 | * @date 2021-03-28 18 | * @desc 静态规则引擎版本2主程序 19 | * 相对于v1.0来说,只有一处改变: keyed.process(这里用了 RuleProcessFunctionV2) 20 | */ 21 | public class RuleEngineV2 { 22 | 23 | public static void main(String[] args) throws Exception { 24 | 25 | StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(new Configuration()); 26 | 27 | // 添加一个消费kafka中用户实时行为事件数据的source 28 | DataStreamSource logStream = env.addSource(SourceFunctions.getKafkaEventSource()); 29 | 30 | // 将json格式的数据,转成 logBean格式的数据 31 | SingleOutputStreamOperator beanStream = logStream.map(new Json2BeanMapFunction()); 32 | 33 | // 对数据按用户deviceid分key 34 | KeyedStream keyed = beanStream.keyBy(new DeviceKeySelector()); 35 | 36 | // 开始核心计算处理 37 | SingleOutputStreamOperator resultStream = keyed.process(new RuleProcessFunctionV2()); 38 | 39 | // 打印 40 | resultStream.print(); 41 | 42 | env.execute(); 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/engine/RuleEngineV3.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.engine; 2 | 3 | import cn.doitedu.dynamic_rule.functions.*; 4 | import cn.doitedu.dynamic_rule.pojo.LogBean; 5 | import cn.doitedu.dynamic_rule.pojo.ResultBean; 6 | import org.apache.flink.configuration.Configuration; 7 | import org.apache.flink.streaming.api.datastream.DataStreamSource; 8 | import org.apache.flink.streaming.api.datastream.KeyedStream; 9 | import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator; 10 | import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; 11 | 12 | /** 13 | * @author 涛哥 14 | * @nick_name "deep as the sea" 15 | * @contact qq:657270652 wx:doit_edu 16 | * @site www.doitedu.cn 17 | * @date 2021-03-28 18 | * @desc 静态规则引擎版本2主程序 19 | * 相对于v1.0来说,只有一处改变: keyed.process(这里用了 RuleProcessFunctionV2) 20 | */ 21 | public class RuleEngineV3 { 22 | 23 | public static void main(String[] args) throws Exception { 24 | 25 | StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(new Configuration()); 26 | 27 | // 添加一个消费kafka中用户实时行为事件数据的source 28 | DataStreamSource logStream = env.addSource(SourceFunctions.getKafkaEventSource()); 29 | 30 | // 将json格式的数据,转成 logBean格式的数据 31 | SingleOutputStreamOperator beanStream = logStream.map(new Json2BeanMapFunction()); 32 | 33 | // 对数据按用户deviceid分key 34 | KeyedStream keyed = beanStream.keyBy(new DeviceKeySelector()); 35 | 36 | // 开始核心计算处理 37 | SingleOutputStreamOperator resultStream = keyed.process(new RuleProcessFunctionV3()); 38 | 39 | // 打印 40 | resultStream.print(); 41 | 42 | env.execute(); 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/engine/RuleEngineV4.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.engine; 2 | 3 | import cn.doitedu.dynamic_rule.functions.*; 4 | import cn.doitedu.dynamic_rule.pojo.LogBean; 5 | import cn.doitedu.dynamic_rule.pojo.ResultBean; 6 | import org.apache.flink.configuration.Configuration; 7 | import org.apache.flink.streaming.api.datastream.DataStreamSource; 8 | import org.apache.flink.streaming.api.datastream.KeyedStream; 9 | import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator; 10 | import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; 11 | 12 | /** 13 | * @author 涛哥 14 | * @nick_name "deep as the sea" 15 | * @contact qq:657270652 wx:doit_edu 16 | * @site www.doitedu.cn 17 | * @date 2021-03-28 18 | * @desc 静态规则引擎版本2主程序 19 | * 相对于v1.0来说,只有一处改变: keyed.process(这里用了 RuleProcessFunctionV2) 20 | */ 21 | public class RuleEngineV4 { 22 | 23 | public static void main(String[] args) throws Exception { 24 | 25 | StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(new Configuration()); 26 | 27 | // 添加一个消费kafka中用户实时行为事件数据的source 28 | DataStreamSource logStream = env.addSource(SourceFunctions.getKafkaEventSource()); 29 | 30 | // 将json格式的数据,转成 logBean格式的数据 31 | SingleOutputStreamOperator beanStream = logStream.map(new Json2BeanMapFunction()); 32 | 33 | // 对数据按用户deviceid分key 34 | // TODO 后续可以升级改造成 动态keyBy 35 | KeyedStream keyed = beanStream.keyBy(new DeviceKeySelector()); 36 | 37 | // 开始核心计算处理 38 | SingleOutputStreamOperator resultStream = keyed.process(new RuleProcessFunctionV4()); 39 | 40 | // 打印 41 | resultStream.print(); 42 | 43 | env.execute(); 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/engine/RuleEngineV5.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.engine; 2 | 3 | import cn.doitedu.dynamic_rule.functions.*; 4 | import cn.doitedu.dynamic_rule.pojo.LogBean; 5 | import cn.doitedu.dynamic_rule.pojo.ResultBean; 6 | import cn.doitedu.dynamic_rule.utils.StateDescUtil; 7 | import org.apache.flink.configuration.Configuration; 8 | import org.apache.flink.streaming.api.datastream.*; 9 | import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; 10 | 11 | /** 12 | * @author 涛哥 13 | * @nick_name "deep as the sea" 14 | * @contact qq:657270652 wx:doit_edu 15 | * @site www.doitedu.cn 16 | * @date 2021-03-28 17 | * @desc 静态规则引擎版本2主程序 18 | * 相对于v4.0来说,只有一处改变: 新增了规则流的读取 19 | */ 20 | public class RuleEngineV5 { 21 | 22 | public static void main(String[] args) throws Exception { 23 | 24 | StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(new Configuration()); 25 | 26 | // 添加一个消费kafka中用户实时行为事件数据的source 27 | DataStreamSource logStream = env.addSource(SourceFunctions.getKafkaEventSource()); 28 | 29 | // 将json格式的数据,转成 logBean格式的数据 30 | SingleOutputStreamOperator beanStream = logStream.map(new Json2BeanMapFunction()); 31 | 32 | // 对数据按用户deviceid分key 33 | // TODO 后续可以升级改造成 动态keyBy 34 | KeyedStream keyed = beanStream.keyBy(new DeviceKeySelector()); 35 | 36 | // 读取规则信息流 37 | DataStreamSource ruleStream = env.addSource(SourceFunctions.getKafkaRuleSource()); 38 | // 广播 39 | BroadcastStream broadcastStream = ruleStream.broadcast(StateDescUtil.ruleKieStateDesc); 40 | 41 | // 连接 事件流 & 规则广播流 42 | BroadcastConnectedStream connected = keyed.connect(broadcastStream); 43 | 44 | // 开始核心计算处理 45 | SingleOutputStreamOperator resultStream = connected.process(new RuleProcessFunctionV5()); 46 | 47 | // 打印 48 | resultStream.print(); 49 | 50 | env.execute(); 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/functions/DeviceKeySelector.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.functions; 2 | 3 | import cn.doitedu.dynamic_rule.pojo.LogBean; 4 | import org.apache.flink.api.java.functions.KeySelector; 5 | 6 | public class DeviceKeySelector implements KeySelector { 7 | @Override 8 | public String getKey(LogBean value) throws Exception { 9 | 10 | return value.getDeviceId(); 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/functions/Json2BeanMapFunction.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.functions; 2 | 3 | import cn.doitedu.dynamic_rule.pojo.LogBean; 4 | import com.alibaba.fastjson.JSON; 5 | import org.apache.flink.api.common.functions.MapFunction; 6 | 7 | public class Json2BeanMapFunction implements MapFunction { 8 | @Override 9 | public LogBean map(String value) throws Exception { 10 | return JSON.parseObject(value,LogBean.class); 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/functions/RuleProcessFunction.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.functions; 2 | 3 | import cn.doitedu.dynamic_rule.pojo.LogBean; 4 | import cn.doitedu.dynamic_rule.pojo.ResultBean; 5 | import cn.doitedu.dynamic_rule.pojo.RuleParam; 6 | import cn.doitedu.dynamic_rule.service.*; 7 | import cn.doitedu.dynamic_rule.utils.RuleSimulator; 8 | import org.apache.flink.api.common.state.ListState; 9 | import org.apache.flink.api.common.state.ListStateDescriptor; 10 | import org.apache.flink.configuration.Configuration; 11 | import org.apache.flink.streaming.api.functions.KeyedProcessFunction; 12 | import org.apache.flink.util.Collector; 13 | 14 | /** 15 | * @author 涛哥 16 | * @nick_name "deep as the sea" 17 | * @contact qq:657270652 wx:doit_edu 18 | * @site www.doitedu.cn 19 | * @date 2021-03-28 20 | * @desc 规则核心处理函数 21 | */ 22 | public class RuleProcessFunction extends KeyedProcessFunction { 23 | 24 | private UserProfileQueryService userProfileQueryService; 25 | private UserActionCountQueryService userActionCountQueryService; 26 | private UserActionSequenceQueryService userActionSequenceQueryService; 27 | 28 | ListState eventState; 29 | 30 | RuleParam ruleParam; 31 | 32 | @Override 33 | public void open(Configuration parameters) throws Exception { 34 | 35 | /** 36 | * 准备一个存储明细事件的state 37 | */ 38 | ListStateDescriptor desc = new ListStateDescriptor<>("eventState", LogBean.class); 39 | eventState = getRuntimeContext().getListState(desc); 40 | /** 41 | * 构造底层的核心查询服务 42 | */ 43 | userProfileQueryService = new UserProfileQueryServiceHbaseImpl(); 44 | userActionCountQueryService = new UserActionCountQueryServiceStateImpl(eventState); 45 | userActionSequenceQueryService = new UserActionSequenceQueryServiceStateImpl(eventState); 46 | 47 | /** 48 | * 获取规则参数 49 | */ 50 | ruleParam = RuleSimulator.getRuleParam(); 51 | 52 | 53 | 54 | } 55 | 56 | @Override 57 | public void processElement(LogBean logBean, Context ctx, Collector out) throws Exception { 58 | // 将收到的事件放入历史明细state存储中 59 | eventState.add(logBean); 60 | 61 | 62 | // 判断是否满足触发条件 63 | if (ruleParam.getTriggerParam().getEventId().equals(logBean.getEventId())) { 64 | System.out.println("规则计算被触发:" + logBean.getDeviceId() + ","+logBean.getEventId()); 65 | 66 | // 查询画像条件 67 | boolean profileMatch = userProfileQueryService.judgeProfileCondition(logBean.getDeviceId(), ruleParam); 68 | if(!profileMatch) return; 69 | 70 | // 查询行为次数条件 71 | boolean countMatch = userActionCountQueryService.queryActionCounts("", ruleParam); 72 | if(!countMatch) return; 73 | 74 | // 查询行为序列条件 75 | boolean sequenceMatch = userActionSequenceQueryService.queryActionSequence(null, ruleParam); 76 | if(!sequenceMatch) return; 77 | 78 | 79 | // 输出一个规则匹配成功的结果 80 | ResultBean resultBean = new ResultBean(); 81 | resultBean.setTimeStamp(logBean.getTimeStamp()); 82 | resultBean.setRuleId(ruleParam.getRuleName()); 83 | resultBean.setDeviceId(logBean.getDeviceId()); 84 | 85 | out.collect(resultBean); 86 | } 87 | } 88 | } 89 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/functions/RuleProcessFunctionV3.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.functions; 2 | 3 | import cn.doitedu.dynamic_rule.pojo.LogBean; 4 | import cn.doitedu.dynamic_rule.pojo.ResultBean; 5 | import cn.doitedu.dynamic_rule.pojo.RuleParam; 6 | import cn.doitedu.dynamic_rule.service.*; 7 | import cn.doitedu.dynamic_rule.utils.RuleSimulator; 8 | import org.apache.flink.api.common.state.ListState; 9 | import org.apache.flink.api.common.state.ListStateDescriptor; 10 | import org.apache.flink.api.common.state.StateTtlConfig; 11 | import org.apache.flink.api.common.time.Time; 12 | import org.apache.flink.configuration.Configuration; 13 | import org.apache.flink.streaming.api.functions.KeyedProcessFunction; 14 | import org.apache.flink.util.Collector; 15 | 16 | /** 17 | * @author 涛哥 18 | * @nick_name "deep as the sea" 19 | * @contact qq:657270652 wx:doit_edu 20 | * @site www.doitedu.cn 21 | * @date 2021-03-28 22 | * @desc 规则核心处理函数版本3.0 23 | */ 24 | public class RuleProcessFunctionV3 extends KeyedProcessFunction { 25 | 26 | QueryRouterV3 queryRouterV3; 27 | 28 | ListState eventState; 29 | 30 | RuleParam ruleParam; 31 | 32 | @Override 33 | public void open(Configuration parameters) throws Exception { 34 | 35 | 36 | // 构造一个查询路由控制器 37 | queryRouterV3 = new QueryRouterV3(); 38 | 39 | 40 | /** 41 | * 获取规则参数 42 | * TODO 规则的获取,现在是通过模拟器生成 43 | * TODO 后期需要改造成从外部获取 44 | */ 45 | ruleParam = RuleSimulator.getRuleParam(); 46 | 47 | /** 48 | * 准备一个存储明细事件的state 49 | * 控制state的ttl周期为最近2小时 50 | */ 51 | ListStateDescriptor desc = new ListStateDescriptor<>("eventState", LogBean.class); 52 | StateTtlConfig ttlConfig = StateTtlConfig.newBuilder(Time.hours(2)).updateTtlOnCreateAndWrite().build(); 53 | desc.enableTimeToLive(ttlConfig); 54 | eventState = getRuntimeContext().getListState(desc); 55 | 56 | } 57 | 58 | 59 | /** 60 | * 规则计算核心方法 61 | * @param logBean 62 | * @param ctx 63 | * @param out 64 | * @throws Exception 65 | */ 66 | @Override 67 | public void processElement(LogBean logBean, Context ctx, Collector out) throws Exception { 68 | 69 | // 将收到的事件放入历史明细state存储中 70 | // 超过2小时的logBean会被自动清除(前面设置了ttl存活时长) 71 | eventState.add(logBean); 72 | 73 | 74 | 75 | /** 76 | * 主逻辑,进行规则触发和计算 77 | */ 78 | if (ruleParam.getTriggerParam().getEventId().equals(logBean.getEventId())) { 79 | System.out.println("规则计算被触发:" + logBean.getDeviceId() + ","+logBean.getEventId()); 80 | 81 | boolean b1 = queryRouterV3.profileQuery(logBean, ruleParam); 82 | if(!b1) return; 83 | 84 | boolean b2 = queryRouterV3.sequenceConditionQuery(logBean, ruleParam, eventState); 85 | if(!b2) return; 86 | 87 | boolean b3 = queryRouterV3.countConditionQuery(logBean, ruleParam, eventState); 88 | if(!b3) return; 89 | 90 | 91 | // 输出一个规则匹配成功的结果 92 | ResultBean resultBean = new ResultBean(); 93 | resultBean.setTimeStamp(logBean.getTimeStamp()); 94 | resultBean.setRuleId(ruleParam.getRuleName()); 95 | resultBean.setDeviceId(logBean.getDeviceId()); 96 | 97 | out.collect(resultBean); 98 | } 99 | } 100 | } 101 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/functions/RuleProcessFunctionV4.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.functions; 2 | 3 | import cn.doitedu.dynamic_rule.pojo.LogBean; 4 | import cn.doitedu.dynamic_rule.pojo.ResultBean; 5 | import cn.doitedu.dynamic_rule.pojo.RuleParam; 6 | import cn.doitedu.dynamic_rule.service.QueryRouterV4; 7 | import cn.doitedu.dynamic_rule.utils.RuleSimulator; 8 | import lombok.extern.slf4j.Slf4j; 9 | import org.apache.flink.api.common.state.ListState; 10 | import org.apache.flink.api.common.state.ListStateDescriptor; 11 | import org.apache.flink.api.common.state.StateTtlConfig; 12 | import org.apache.flink.api.common.time.Time; 13 | import org.apache.flink.configuration.Configuration; 14 | import org.apache.flink.streaming.api.functions.KeyedProcessFunction; 15 | import org.apache.flink.util.Collector; 16 | 17 | /** 18 | * @author 涛哥 19 | * @nick_name "deep as the sea" 20 | * @contact qq:657270652 wx:doit_edu 21 | * @site www.doitedu.cn 22 | * @date 2021-03-28 23 | * @desc 规则核心处理函数版本4.0 24 | */ 25 | @Slf4j 26 | public class RuleProcessFunctionV4 extends KeyedProcessFunction { 27 | 28 | QueryRouterV4 queryRouterV4; 29 | 30 | ListState eventState; 31 | 32 | // RuleParam ruleParam; 33 | 34 | @Override 35 | public void open(Configuration parameters) throws Exception { 36 | 37 | /* 38 | * 获取规则参数 39 | * TODO 规则的获取,现在是通过模拟器生成 40 | * TODO 后期需要改造成从外部获取 41 | */ 42 | // ruleParam = RuleSimulator.getRuleParam(); 43 | 44 | /* 45 | * 准备一个存储明细事件的state 46 | * 控制state的ttl周期为最近2小时 47 | */ 48 | ListStateDescriptor desc = new ListStateDescriptor<>("eventState", LogBean.class); 49 | StateTtlConfig ttlConfig = StateTtlConfig.newBuilder(Time.hours(2)).updateTtlOnCreateAndWrite().build(); 50 | desc.enableTimeToLive(ttlConfig); 51 | eventState = getRuntimeContext().getListState(desc); 52 | 53 | // 构造一个查询路由控制器 54 | queryRouterV4 = new QueryRouterV4(eventState); 55 | 56 | } 57 | 58 | 59 | /** 60 | * 规则计算核心方法 61 | * @param logBean 事件bean 62 | * @param ctx 上下文 63 | * @param out 输出 64 | * @throws Exception 异常 65 | */ 66 | @Override 67 | public void processElement(LogBean logBean, Context ctx, Collector out) throws Exception { 68 | 69 | // 将收到的事件放入历史明细state存储中 70 | // 超过2小时的logBean会被自动清除(前面设置了ttl存活时长) 71 | eventState.add(logBean); 72 | 73 | // TODO 重大bug,下面的处理过程会不断修改ruleParam中条件的时间字段,如果不在这里重新获取规则,则下一次触发计算时,条件已经不是原来的条件了 74 | RuleParam ruleParam = RuleSimulator.getRuleParam(); 75 | 76 | 77 | /* 78 | * 主逻辑,进行规则触发和计算 79 | */ 80 | if (ruleParam.getTriggerParam().getEventId().equals(logBean.getEventId())) { 81 | log.debug("规则:{},用户:{},触发事件:{},触发时间:{}", ruleParam.getRuleName(),logBean.getDeviceId(),logBean.getEventId(),logBean.getTimeStamp()); 82 | 83 | boolean b1 = queryRouterV4.profileQuery(logBean, ruleParam); 84 | if(!b1) return; 85 | 86 | boolean b2 = queryRouterV4.sequenceConditionQuery(logBean, ruleParam); 87 | if(!b2) return; 88 | 89 | boolean b3 = queryRouterV4.countConditionQuery(logBean, ruleParam); 90 | if(!b3) return; 91 | 92 | 93 | // 输出一个规则匹配成功的结果 94 | ResultBean resultBean = new ResultBean(); 95 | resultBean.setTimeStamp(logBean.getTimeStamp()); 96 | resultBean.setRuleId(ruleParam.getRuleName()); 97 | resultBean.setDeviceId(logBean.getDeviceId()); 98 | log.info("{}规则,触发人:{},计算匹配成功", ruleParam.getRuleName(),logBean.getDeviceId()); 99 | 100 | out.collect(resultBean); 101 | } 102 | } 103 | } 104 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/functions/RuleProcessFunctionV5.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.functions; 2 | 3 | import cn.doitedu.dynamic_rule.pojo.*; 4 | import cn.doitedu.dynamic_rule.service.QueryRouterV4; 5 | import cn.doitedu.dynamic_rule.utils.RuleOperationHandler; 6 | import cn.doitedu.dynamic_rule.utils.RuleSimulator; 7 | import cn.doitedu.dynamic_rule.utils.StateDescUtil; 8 | import com.alibaba.fastjson.JSON; 9 | import lombok.extern.slf4j.Slf4j; 10 | import org.apache.flink.api.common.state.*; 11 | import org.apache.flink.api.common.time.Time; 12 | import org.apache.flink.configuration.Configuration; 13 | import org.apache.flink.streaming.api.functions.KeyedProcessFunction; 14 | import org.apache.flink.streaming.api.functions.co.KeyedBroadcastProcessFunction; 15 | import org.apache.flink.util.Collector; 16 | import org.kie.api.runtime.KieSession; 17 | 18 | import java.util.ArrayList; 19 | import java.util.Map; 20 | 21 | /** 22 | * @author 涛哥 23 | * @nick_name "deep as the sea" 24 | * @contact qq:657270652 wx:doit_edu 25 | * @site www.doitedu.cn 26 | * @date 2021-03-28 27 | * @desc 规则核心处理函数版本4.0 28 | */ 29 | @Slf4j 30 | public class RuleProcessFunctionV5 extends KeyedBroadcastProcessFunction { 31 | 32 | QueryRouterV4 queryRouterV4; 33 | 34 | ListState eventState; 35 | 36 | // RuleParam ruleParam; 37 | 38 | @Override 39 | public void open(Configuration parameters) throws Exception { 40 | 41 | /* 42 | * 准备一个存储明细事件的state 43 | * 控制state的ttl周期为最近2小时 44 | */ 45 | 46 | ListStateDescriptor eventStateDesc = StateDescUtil.eventStateDesc; 47 | StateTtlConfig ttlConfig = StateTtlConfig.newBuilder(Time.hours(2)).updateTtlOnCreateAndWrite().build(); 48 | eventStateDesc.enableTimeToLive(ttlConfig); 49 | eventState = getRuntimeContext().getListState(eventStateDesc); 50 | 51 | // 构造一个查询路由控制器 52 | queryRouterV4 = new QueryRouterV4(eventState); 53 | 54 | } 55 | 56 | 57 | /** 58 | * 规则计算核心方法 59 | * @param logBean 事件bean 60 | * @param ctx 上下文 61 | * @param out 输出 62 | * @throws Exception 异常 63 | */ 64 | @Override 65 | public void processElement(LogBean logBean, ReadOnlyContext ctx, Collector out) throws Exception { 66 | 67 | 68 | ReadOnlyBroadcastState ruleState = ctx.getBroadcastState(StateDescUtil.ruleKieStateDesc); 69 | 70 | // 将收到的事件放入历史明细state存储中 71 | // 超过2小时的logBean会被自动清除(前面设置了ttl存活时长) 72 | eventState.add(logBean); 73 | 74 | Iterable> entries = ruleState.immutableEntries(); 75 | for (Map.Entry entry : entries) { 76 | 77 | String ruleName = entry.getKey(); 78 | RuleStateBean stateBean = entry.getValue(); 79 | 80 | // 从rulestate中取sql 81 | String cntSqls = stateBean.getCntSqls(); 82 | String seqSqls = stateBean.getSeqSqls(); 83 | 84 | // 从rulestate中取出kiesession 85 | KieSession kieSession = stateBean.getKieSession(); 86 | 87 | // 构造ruleparam对象 88 | RuleParam ruleParam = new RuleParam(); 89 | ruleParam.setRuleName(ruleName); 90 | 91 | 92 | // 放入cntsql 93 | String[] cntSqlArr = cntSqls.split(";"); 94 | ArrayList countParams = new ArrayList<>(); 95 | for (String cntSql : cntSqlArr) { 96 | RuleAtomicParam ruleAtomicParam = new RuleAtomicParam(); 97 | ruleAtomicParam.setCountQuerySql(cntSql); 98 | countParams.add(ruleAtomicParam); 99 | } 100 | // 将封装好sql的count类条件,放入规则总参数ruleParam 101 | ruleParam.setUserActionCountParams(countParams); 102 | 103 | // 放入seqsql 104 | ruleParam.setActionSequenceQuerySql(seqSqls); 105 | 106 | 107 | // 构建一个queryRouter 108 | QueryRouterV4 queryRouterV4 = new QueryRouterV4(eventState); 109 | 110 | 111 | // 构造DroolFact对象 112 | // TODO 这里还可以完善: 判断规则的类型(规则组),给它注入对应的queryRouter 113 | /** 114 | * String className = stateBean.getRouterClass(); 115 | * String classJavaCode = stateBean.getClassCode() 116 | * // 即时编译api调用 117 | * Class cls = jitCompile(classJavaCode) 118 | * QueryRouter router = (QueryRouter)cls.newInstance() 119 | */ 120 | DroolFact droolFact = new DroolFact(logBean, ruleParam, queryRouterV4, false); 121 | 122 | // 将droolfact插入kiesssion 123 | kieSession.insert(droolFact); 124 | 125 | // 发射 fire 126 | kieSession.fireAllRules(); 127 | 128 | // 判断计算后的结果是否匹配 129 | if(droolFact.isMatch()) { 130 | // 如果匹配,输出一条匹配结果 131 | out.collect(new ResultBean(ruleName,logBean.getDeviceId(),logBean.getTimeStamp())); 132 | } 133 | 134 | } 135 | } 136 | 137 | 138 | /** 139 | * 处理输入的规则操作信息,是canal从mysql中监听到并写入kafka的json数据 140 | * @param canalJson 141 | * @param ctx 142 | * @param out 143 | * @throws Exception 144 | */ 145 | @Override 146 | public void processBroadcastElement(String canalJson, Context ctx, Collector out) throws Exception { 147 | 148 | BroadcastState mapState = ctx.getBroadcastState(StateDescUtil.ruleKieStateDesc); 149 | 150 | // 解析json成对象 151 | RuleCanalBean ruleCanalBean = JSON.parseObject(canalJson, RuleCanalBean.class); 152 | log.info("收到一个规则库的操作,信息为: {}",ruleCanalBean); 153 | 154 | // 分情况处理获取的规则操作信息(新增,更新,删除,停用,启用) 155 | RuleOperationHandler.handleRuleOper(ruleCanalBean,mapState); 156 | 157 | } 158 | } 159 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/functions/SourceFunctions.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.functions; 2 | 3 | import org.apache.flink.api.common.serialization.SimpleStringSchema; 4 | import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer; 5 | 6 | import java.util.Properties; 7 | 8 | public class SourceFunctions { 9 | 10 | public static FlinkKafkaConsumer getKafkaEventSource(){ 11 | 12 | Properties props = new Properties(); 13 | props.setProperty("bootstrap.servers", "hdp01:9092,hdp02:9092,hdp03:9092"); 14 | props.setProperty("auto.offset.reset", "latest"); 15 | FlinkKafkaConsumer source = new FlinkKafkaConsumer<>("yinew_applog", new SimpleStringSchema(), props); 16 | 17 | 18 | return source; 19 | } 20 | 21 | 22 | public static FlinkKafkaConsumer getKafkaRuleSource() { 23 | 24 | 25 | Properties props = new Properties(); 26 | props.setProperty("bootstrap.servers", "hdp01:9092,hdp02:9092,hdp03:9092"); 27 | props.setProperty("auto.offset.reset", "latest"); 28 | FlinkKafkaConsumer source = new FlinkKafkaConsumer<>("yinew_drl_rule", new SimpleStringSchema(), props); 29 | 30 | 31 | return source; 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/moduletest/ActionCountsQueryClickhouseTest.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.moduletest; 2 | 3 | import cn.doitedu.dynamic_rule.pojo.RuleAtomicParam; 4 | import cn.doitedu.dynamic_rule.pojo.RuleParam; 5 | import cn.doitedu.dynamic_rule.service.UserActionCountQueryServiceClickhouseImpl; 6 | 7 | import java.util.ArrayList; 8 | import java.util.HashMap; 9 | import java.util.List; 10 | 11 | public class ActionCountsQueryClickhouseTest { 12 | 13 | public static void main(String[] args) throws Exception { 14 | 15 | UserActionCountQueryServiceClickhouseImpl impl = new UserActionCountQueryServiceClickhouseImpl(); 16 | 17 | 18 | // 构造2个规则原子条件 19 | RuleAtomicParam param1 = new RuleAtomicParam(); 20 | param1.setEventId("B"); 21 | HashMap paramProps1 = new HashMap<>(); 22 | paramProps1.put("p1","v7"); 23 | param1.setRangeStart(0); 24 | param1.setRangeEnd(Long.MAX_VALUE); 25 | param1.setProperties(paramProps1); 26 | param1.setCnt(2); 27 | 28 | RuleAtomicParam param2 = new RuleAtomicParam(); 29 | param2.setEventId("W"); 30 | HashMap paramProps2 = new HashMap<>(); 31 | paramProps2.put("p2","v3"); 32 | param2.setProperties(paramProps2); 33 | param2.setRangeStart(0); 34 | param2.setRangeEnd(Long.MAX_VALUE); 35 | param2.setCnt(2); 36 | 37 | ArrayList ruleParams = new ArrayList<>(); 38 | ruleParams.add(param1); 39 | ruleParams.add(param2); 40 | 41 | RuleParam ruleParam = new RuleParam(); 42 | ruleParam.setUserActionCountParams(ruleParams); 43 | 44 | 45 | boolean b = impl.queryActionCounts("000001", ruleParam); 46 | List params = ruleParam.getUserActionCountParams(); 47 | for (RuleAtomicParam param : params) { 48 | System.out.println(param.getCnt() + ", " + param.getRealCnt()); 49 | } 50 | 51 | 52 | System.out.println(b); 53 | 54 | 55 | 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/moduletest/ActionCountsQueryTest.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.moduletest; 2 | 3 | import cn.doitedu.dynamic_rule.pojo.LogBean; 4 | import cn.doitedu.dynamic_rule.pojo.RuleAtomicParam; 5 | import cn.doitedu.dynamic_rule.service.UserActionCountQueryServiceStateImpl; 6 | 7 | import java.util.ArrayList; 8 | import java.util.HashMap; 9 | 10 | /** 11 | * @author 涛哥 12 | * @nick_name "deep as the sea" 13 | * @contact qq:657270652 wx:doit_edu 14 | * @site www.doitedu.cn 15 | * @date 2021-03-28 16 | * @desc 行为次数查询服务功能测试 17 | */ 18 | public class ActionCountsQueryTest { 19 | public static void main(String[] args) { 20 | 21 | 22 | UserActionCountQueryServiceStateImpl service = new UserActionCountQueryServiceStateImpl(null); 23 | 24 | // 构造几个明细事件 25 | LogBean logBean1 = new LogBean(); 26 | logBean1.setEventId("000010"); 27 | HashMap props1 = new HashMap<>(); 28 | props1.put("p1","v1"); 29 | logBean1.setProperties(props1); 30 | 31 | 32 | LogBean logBean2 = new LogBean(); 33 | logBean2.setEventId("000010"); 34 | HashMap props2 = new HashMap<>(); 35 | props2.put("p1","v2"); 36 | logBean2.setProperties(props2); 37 | 38 | 39 | LogBean logBean3 = new LogBean(); 40 | logBean3.setEventId("000020"); 41 | HashMap props3 = new HashMap<>(); 42 | props3.put("p2","v3"); 43 | logBean3.setProperties(props3); 44 | 45 | 46 | LogBean logBean4 = new LogBean(); 47 | logBean4.setEventId("000020"); 48 | HashMap props4 = new HashMap<>(); 49 | props4.put("p2","v3"); 50 | props4.put("p3","v4"); 51 | logBean4.setProperties(props4); 52 | 53 | 54 | ArrayList eventList = new ArrayList<>(); 55 | eventList.add(logBean1); 56 | eventList.add(logBean2); 57 | eventList.add(logBean3); 58 | eventList.add(logBean4); 59 | 60 | 61 | // 构造2个规则原子条件 62 | RuleAtomicParam param1 = new RuleAtomicParam(); 63 | param1.setEventId("000010"); 64 | HashMap paramProps1 = new HashMap<>(); 65 | paramProps1.put("p1","v1"); 66 | param1.setProperties(paramProps1); 67 | param1.setCnt(2); 68 | 69 | RuleAtomicParam param2 = new RuleAtomicParam(); 70 | param2.setEventId("000020"); 71 | HashMap paramProps2 = new HashMap<>(); 72 | paramProps2.put("p2","v3"); 73 | param2.setProperties(paramProps2); 74 | param2.setCnt(2); 75 | 76 | ArrayList ruleParams = new ArrayList<>(); 77 | ruleParams.add(param1); 78 | ruleParams.add(param2); 79 | 80 | service.queryActionCountsHelper(eventList,ruleParams); 81 | 82 | for (RuleAtomicParam ruleParam : ruleParams) { 83 | System.out.println(ruleParam.getEventId()+","+ruleParam.getCnt() + "," + ruleParam.getRealCnt()); 84 | } 85 | 86 | 87 | } 88 | } 89 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/moduletest/ActionSequenceQueryClickhouseTest.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.moduletest; 2 | 3 | import cn.doitedu.dynamic_rule.pojo.RuleAtomicParam; 4 | import cn.doitedu.dynamic_rule.pojo.RuleParam; 5 | import cn.doitedu.dynamic_rule.service.UserActionSequenceQueryServiceClickhouseImpl; 6 | 7 | import java.util.ArrayList; 8 | import java.util.HashMap; 9 | 10 | /** 11 | * @author 涛哥 12 | * @nick_name "deep as the sea" 13 | * @contact qq:657270652 wx:doit_edu 14 | * @site www.doitedu.cn 15 | * @date 2021-03-30 16 | * @desc 用户行为路径类匹配查询测试 17 | */ 18 | public class ActionSequenceQueryClickhouseTest { 19 | public static void main(String[] args) throws Exception { 20 | 21 | String sql = "SELECT\n" + 22 | " deviceId,\n" + 23 | " sequenceMatch('.*(?1).*(?2).*(?3)')(\n" + 24 | " toDateTime(`timeStamp`),\n" + 25 | " eventId = 'Y' and properties['p1']='vy',\n" + 26 | " eventId = 'B' and properties['p6']='v4',\n" + 27 | " eventId = 'O' and properties['p1']='v9'\n" + 28 | " ) as isMatch3,\n" + 29 | " \n" + 30 | " sequenceMatch('.*(?1).*(?2).*')(\n" + 31 | " toDateTime(`timeStamp`),\n" + 32 | " eventId = 'Y' and properties['p1']='vy',\n" + 33 | " eventId = 'B' and properties['p6']='v4',\n" + 34 | " eventId = 'O' and properties['p1']='v9'\n" + 35 | " ) as isMatch2,\n" + 36 | " \n" + 37 | " sequenceMatch('.*(?1).*')(\n" + 38 | " toDateTime(`timeStamp`),\n" + 39 | " eventId = 'Y' and properties['p1']='vy',\n" + 40 | " eventId = 'B' and properties['p6']='v4',\n" + 41 | " eventId = 'O' and properties['p1']='v9'\n" + 42 | " ) as isMatch1\n" + 43 | "\n" + 44 | "from yinew_detail\n" + 45 | "where \n" + 46 | " deviceId = '000001' \n" + 47 | " and \n" + 48 | " timeStamp >= 0\n" + 49 | " and \n" + 50 | " timeStamp <= 5235295739479\n" + 51 | " and \n" + 52 | " (\n" + 53 | " (eventId='Y' and properties['p1']='vy')\n" + 54 | " or (eventId = 'B' and properties['p6']='v4')\n" + 55 | " or (eventId = 'O' and properties['p1']='v9')\n" + 56 | " )\n" + 57 | "group by deviceId;"; 58 | 59 | 60 | // 构造一个序列条件 61 | RuleAtomicParam param1 = new RuleAtomicParam(); 62 | param1.setEventId("Y"); 63 | HashMap paramProps1 = new HashMap<>(); 64 | paramProps1.put("p1","v1"); 65 | param1.setProperties(paramProps1); 66 | param1.setRangeStart(0); 67 | param1.setRangeEnd(Long.MAX_VALUE); 68 | 69 | RuleAtomicParam param2 = new RuleAtomicParam(); 70 | param2.setEventId("B"); 71 | HashMap paramProps2 = new HashMap<>(); 72 | paramProps2.put("p6","v4"); 73 | param2.setProperties(paramProps2); 74 | param2.setRangeStart(0); 75 | param2.setRangeEnd(Long.MAX_VALUE); 76 | 77 | 78 | 79 | RuleAtomicParam param3 = new RuleAtomicParam(); 80 | param3.setEventId("O"); 81 | HashMap paramProps3 = new HashMap<>(); 82 | paramProps3.put("p1","v9"); 83 | param3.setProperties(paramProps3); 84 | param3.setRangeStart(0); 85 | param3.setRangeEnd(Long.MAX_VALUE); 86 | 87 | ArrayList ruleParams = new ArrayList<>(); 88 | ruleParams.add(param1); 89 | ruleParams.add(param2); 90 | ruleParams.add(param3); 91 | 92 | 93 | RuleParam ruleParam = new RuleParam(); 94 | ruleParam.setUserActionSequenceParams(ruleParams); 95 | ruleParam.setActionSequenceQuerySql(sql); 96 | 97 | 98 | UserActionSequenceQueryServiceClickhouseImpl impl = new UserActionSequenceQueryServiceClickhouseImpl(); 99 | 100 | boolean b = impl.queryActionSequence("000001", ruleParam); 101 | System.out.println(ruleParam.getUserActionSequenceQueriedMaxStep()); 102 | System.out.println(b); 103 | 104 | } 105 | 106 | } 107 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/moduletest/ActionSequenceQueryTest.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.moduletest; 2 | 3 | import cn.doitedu.dynamic_rule.pojo.LogBean; 4 | import cn.doitedu.dynamic_rule.pojo.RuleAtomicParam; 5 | import cn.doitedu.dynamic_rule.service.UserActionSequenceQueryServiceStateImpl; 6 | 7 | import java.util.ArrayList; 8 | import java.util.HashMap; 9 | 10 | public class ActionSequenceQueryTest { 11 | public static void main(String[] args) { 12 | 13 | // 构造一些事件 14 | LogBean logBean1 = new LogBean(); 15 | logBean1.setEventId("000010"); 16 | HashMap props1 = new HashMap<>(); 17 | props1.put("p1","v1"); 18 | logBean1.setProperties(props1); 19 | 20 | LogBean logBean5 = new LogBean(); 21 | logBean5.setEventId("000020"); 22 | HashMap props5 = new HashMap<>(); 23 | props5.put("p2","v3"); 24 | logBean5.setProperties(props5); 25 | 26 | 27 | LogBean logBean2 = new LogBean(); 28 | logBean2.setEventId("000310"); 29 | HashMap props2 = new HashMap<>(); 30 | props2.put("p1","v2"); 31 | logBean2.setProperties(props2); 32 | 33 | 34 | LogBean logBean3 = new LogBean(); 35 | logBean3.setEventId("000020"); 36 | HashMap props3 = new HashMap<>(); 37 | props3.put("p2","v3"); 38 | props3.put("p4","v5"); 39 | logBean3.setProperties(props3); 40 | 41 | 42 | LogBean logBean4 = new LogBean(); 43 | logBean4.setEventId("000022"); 44 | HashMap props4 = new HashMap<>(); 45 | props4.put("p2","v3"); 46 | props4.put("p3","v4"); 47 | logBean4.setProperties(props4); 48 | 49 | 50 | ArrayList eventList = new ArrayList<>(); 51 | eventList.add(logBean1); 52 | eventList.add(logBean5); 53 | eventList.add(logBean2); 54 | eventList.add(logBean3); 55 | eventList.add(logBean4); 56 | 57 | // 构造一个序列条件 58 | RuleAtomicParam param1 = new RuleAtomicParam(); 59 | param1.setEventId("000010"); 60 | HashMap paramProps1 = new HashMap<>(); 61 | paramProps1.put("p1","v1"); 62 | param1.setProperties(paramProps1); 63 | 64 | RuleAtomicParam param2 = new RuleAtomicParam(); 65 | param2.setEventId("000020"); 66 | HashMap paramProps2 = new HashMap<>(); 67 | paramProps2.put("p2","v3"); 68 | param2.setProperties(paramProps2); 69 | 70 | RuleAtomicParam param3 = new RuleAtomicParam(); 71 | param3.setEventId("000020"); 72 | HashMap paramProps3 = new HashMap<>(); 73 | paramProps3.put("p4","v6"); 74 | param3.setProperties(paramProps3); 75 | 76 | ArrayList ruleParams = new ArrayList<>(); 77 | ruleParams.add(param1); 78 | ruleParams.add(param2); 79 | ruleParams.add(param3); 80 | 81 | 82 | // 调用sevice计算 83 | UserActionSequenceQueryServiceStateImpl service = new UserActionSequenceQueryServiceStateImpl(null); 84 | int maxStep = service.queryActionSequenceHelper2(eventList, ruleParams); 85 | System.out.println(maxStep); 86 | 87 | 88 | } 89 | } 90 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/moduletest/ProfileQueryTest.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.moduletest; 2 | 3 | import cn.doitedu.dynamic_rule.pojo.RuleParam; 4 | import cn.doitedu.dynamic_rule.service.UserProfileQueryServiceHbaseImpl; 5 | import org.junit.Test; 6 | 7 | import java.io.IOException; 8 | import java.util.HashMap; 9 | 10 | /** 11 | * @author 涛哥 12 | * @nick_name "deep as the sea" 13 | * @contact qq:657270652 wx:doit_edu 14 | * @site www.doitedu.cn 15 | * @date 2021-03-28 16 | * @desc 画像条件查询服务模块测试类 17 | */ 18 | public class ProfileQueryTest { 19 | 20 | @Test 21 | public void testQueryProfile() throws IOException { 22 | // 构造参数 23 | HashMap userProfileParams = new HashMap<>(); 24 | userProfileParams.put("tag12","v92"); 25 | userProfileParams.put("tag22","v3"); 26 | 27 | RuleParam ruleParam = new RuleParam(); 28 | ruleParam.setUserProfileParams(userProfileParams); 29 | 30 | 31 | // 构造一个查询服务 32 | UserProfileQueryServiceHbaseImpl impl = new UserProfileQueryServiceHbaseImpl(); 33 | boolean b = impl.judgeProfileCondition("000645", ruleParam); 34 | System.out.println(b); 35 | 36 | } 37 | 38 | } 39 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/pojo/BufferAvailableLevel.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.pojo; 2 | 3 | public enum BufferAvailableLevel { 4 | // 缓存结果部分有效 5 | PARTIAL_AVL, 6 | // 缓存结果完全有效 7 | WHOLE_AVL, 8 | // 缓存结果无效 9 | UN_AVL 10 | 11 | } 12 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/pojo/BufferResult.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.pojo; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Data; 5 | import lombok.NoArgsConstructor; 6 | import lombok.ToString; 7 | 8 | /** 9 | * @author 涛哥 10 | * @nick_name "deep as the sea" 11 | * @contact qq:657270652 wx:doit_edu 12 | * @site www.doitedu.cn 13 | * @date 2021-03-31 14 | * @desc 封装从缓存中查询到的结果的实体 15 | */ 16 | @Data 17 | @NoArgsConstructor 18 | @AllArgsConstructor 19 | @ToString 20 | public class BufferResult { 21 | 22 | // 缓存结果所对应的key 23 | private String bufferKey; 24 | 25 | // 缓存结果中的value 26 | private Integer bufferValue; 27 | 28 | // 缓存数据的时间窗口起始 29 | private Long bufferRangeStart; 30 | 31 | // 缓存数据的时间窗口结束 32 | private Long bufferRangeEnd; 33 | 34 | // 缓存结果的有效性等级 35 | private BufferAvailableLevel bufferAvailableLevel; 36 | 37 | 38 | // 调整后的后续查询窗口起始点 39 | private Long outSideQueryStart; 40 | 41 | 42 | } 43 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/pojo/DroolFact.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.pojo; 2 | 3 | 4 | import cn.doitedu.dynamic_rule.service.QueryRouterV4; 5 | import lombok.AllArgsConstructor; 6 | import lombok.Data; 7 | import lombok.NoArgsConstructor; 8 | 9 | /** 10 | * @author 涛哥 11 | * @nick_name "deep as the sea" 12 | * @contact qq:657270652 wx:doit_edu 13 | * @site www.doitedu.cn 14 | * @date 2021-04-07 15 | * @desc 封装要insert到drools kiesession中数据的fact实体 16 | */ 17 | 18 | @Data 19 | @NoArgsConstructor 20 | @AllArgsConstructor 21 | public class DroolFact { 22 | 23 | private LogBean logBean; 24 | 25 | private RuleParam ruleParam; 26 | 27 | private QueryRouterV4 queryRouterV4; 28 | 29 | private boolean match; 30 | 31 | 32 | } 33 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/pojo/LogBean.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.pojo; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Data; 5 | import lombok.NoArgsConstructor; 6 | import lombok.ToString; 7 | 8 | import java.util.Map; 9 | 10 | @Data 11 | @NoArgsConstructor 12 | @AllArgsConstructor 13 | @ToString 14 | public class LogBean { 15 | 16 | private String account ; 17 | private String appId ; 18 | private String appVersion ; 19 | private String carrier ; 20 | private String deviceId ; 21 | private String deviceType ; 22 | private String ip ; 23 | private double latitude ; 24 | private double longitude ; 25 | private String netType ; 26 | private String osName ; 27 | private String osVersion ; 28 | private String releaseChannel ; 29 | private String resolution ; 30 | private String sessionId ; 31 | private long timeStamp ; 32 | private String eventId ; 33 | private Map properties; 34 | 35 | } 36 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/pojo/ResultBean.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.pojo; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Data; 5 | import lombok.NoArgsConstructor; 6 | 7 | @Data 8 | @NoArgsConstructor 9 | @AllArgsConstructor 10 | public class ResultBean { 11 | 12 | private String ruleId; 13 | private String deviceId; 14 | private long timeStamp; 15 | } 16 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/pojo/RuleAtomicParam.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.pojo; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Data; 5 | import lombok.NoArgsConstructor; 6 | 7 | import java.io.Serializable; 8 | import java.util.HashMap; 9 | 10 | /** 11 | * @author 涛哥 12 | * @nick_name "deep as the sea" 13 | * @contact qq:657270652 wx:doit_edu 14 | * @site www.doitedu.cn 15 | * @date 2021-03-28 16 | * @desc 规则参数中的原子条件封装实体 17 | */ 18 | @Data 19 | @NoArgsConstructor 20 | @AllArgsConstructor 21 | public class RuleAtomicParam implements Serializable { 22 | 23 | // 事件的类型要求 24 | private String eventId; 25 | 26 | // 事件的属性要求 27 | private HashMap properties; 28 | 29 | // 规则要求的阈值 30 | private int cnt; 31 | 32 | // 要求的事件发生时间段起始 33 | private long rangeStart; 34 | 35 | // 要求的事件发生时间段结束 36 | private long rangeEnd; 37 | 38 | // 条件对应的clickhouse查询sql 39 | private String countQuerySql; 40 | 41 | // 用于记录查询服务所返回的查询值 42 | private int realCnt; 43 | 44 | // 用于记录初始 range 45 | private long originStart; 46 | public void setOriginStart(long originStart){ 47 | this.originStart = originStart; 48 | this.rangeStart = originStart; 49 | } 50 | private long originEnd; 51 | public void setOriginEnd(long originEnd){ 52 | this.originEnd = originEnd; 53 | this.rangeEnd = originEnd; 54 | } 55 | 56 | 57 | } 58 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/pojo/RuleCanalBean.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.pojo; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Data; 5 | import lombok.NoArgsConstructor; 6 | 7 | import java.util.List; 8 | 9 | @Data 10 | @AllArgsConstructor 11 | @NoArgsConstructor 12 | public class RuleCanalBean { 13 | 14 | private List data; 15 | private String type; 16 | 17 | } 18 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/pojo/RuleParam.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.pojo; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Data; 5 | import lombok.NoArgsConstructor; 6 | 7 | import java.io.Serializable; 8 | import java.util.HashMap; 9 | import java.util.List; 10 | 11 | /** 12 | * @author 涛哥 13 | * @nick_name "deep as the sea" 14 | * @contact qq:657270652 wx:doit_edu 15 | * @site www.doitedu.cn 16 | * @date 2021-03-28 17 | * @desc 规则整体条件封装实体 18 | * 19 | * 需求中要实现的判断规则: 20 | * 触发条件:E事件 21 | * 画像属性条件: k3=v3 , k100=v80 , k230=v360 22 | * 行为属性条件: U(p1=v3,p2=v2) >= 3次 且 G(p6=v8,p4=v5,p1=v2)>=1 23 | * 行为次序条件: 依次做过: W(p1=v4) -> R(p2=v3) -> F 24 | * 25 | */ 26 | @Data 27 | @NoArgsConstructor 28 | @AllArgsConstructor 29 | public class RuleParam implements Serializable { 30 | private String ruleName; 31 | 32 | // 规则中的触发条件 33 | private RuleAtomicParam triggerParam; 34 | 35 | // 规则中的用户画像条件 36 | private HashMap userProfileParams; 37 | 38 | // 规则中的行为次数类条件 39 | private List userActionCountParams; 40 | 41 | // 规则中的行为序列类条件 42 | private List userActionSequenceParams; 43 | 44 | // 序列模式匹配查询sql 45 | private String actionSequenceQuerySql; 46 | 47 | // 用于记录查询服务所返回的序列中匹配的最大步骤号 48 | private int userActionSequenceQueriedMaxStep; 49 | 50 | 51 | 52 | } 53 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/pojo/RuleStateBean.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.pojo; 2 | 3 | 4 | import lombok.AllArgsConstructor; 5 | import lombok.Data; 6 | import lombok.NoArgsConstructor; 7 | import org.kie.api.runtime.KieSession; 8 | 9 | /** 10 | * @author 涛哥 11 | * @nick_name "deep as the sea" 12 | * @contact qq:657270652 wx:doit_edu 13 | * @site www.doitedu.cn 14 | * @date 2021-04-07 15 | * @desc 用于封装放入state中的规则相关信息 16 | */ 17 | @Data 18 | @NoArgsConstructor 19 | @AllArgsConstructor 20 | public class RuleStateBean { 21 | 22 | private String ruleName; 23 | private KieSession kieSession; 24 | private RuleParam ruleParam; 25 | private String ruleType; 26 | // cn.doitedu.dynamic_rule.service.XqueryRouter 27 | private String routerClass; 28 | private String cntSqls; 29 | private String seqSqls; 30 | 31 | } 32 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/pojo/RuleTableRecord.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.pojo; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Data; 5 | import lombok.NoArgsConstructor; 6 | 7 | @Data 8 | @AllArgsConstructor 9 | @NoArgsConstructor 10 | public class RuleTableRecord { 11 | 12 | private int id; 13 | private String rule_name; 14 | private String rule_code; 15 | private int rule_status; 16 | private String rule_type; 17 | private String rule_versioin; 18 | private String cnt_sqls; 19 | private String seq_sqls; 20 | 21 | } 22 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/service/BufferManager.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.service; 2 | 3 | import cn.doitedu.dynamic_rule.pojo.BufferAvailableLevel; 4 | import cn.doitedu.dynamic_rule.pojo.BufferResult; 5 | import cn.doitedu.dynamic_rule.pojo.RuleAtomicParam; 6 | import lombok.extern.slf4j.Slf4j; 7 | import redis.clients.jedis.Jedis; 8 | 9 | /** 10 | * @author 涛哥 11 | * @nick_name "deep as the sea" 12 | * @contact qq:657270652 wx:doit_edu 13 | * @site www.doitedu.cn 14 | * @date 2021-03-31 15 | * @desc 缓存管理器 16 | */ 17 | @Slf4j 18 | public class BufferManager { 19 | Jedis jedis; 20 | 21 | public BufferManager(){ 22 | jedis = new Jedis("hdp02", 6379); 23 | } 24 | 25 | 26 | /** 27 | * 获取缓存数据并返回 28 | * @param bufferKey 缓存key 29 | * @param atomicParam 规则原子条件对象 30 | * @return 缓存数据 31 | */ 32 | public BufferResult getBufferData(String bufferKey,RuleAtomicParam atomicParam){ 33 | BufferResult bufferResult = getBufferData(bufferKey, atomicParam.getRangeStart(), atomicParam.getRangeEnd(), atomicParam.getCnt()); 34 | return bufferResult; 35 | } 36 | 37 | /** 38 | * 获取缓存数据并返回 39 | * @param bufferKey 缓存key 40 | * @param paramRangeStart 缓存数据时间start 41 | * @param paramRangeEnd 缓存数据时间end 42 | * @param threshold 缓存数据对应查询条件的阈值 43 | * @return 缓存数据结果 44 | */ 45 | public BufferResult getBufferData(String bufferKey,long paramRangeStart,long paramRangeEnd,int threshold){ 46 | // 准备缓存返回结果实体对象 47 | BufferResult bufferResult = new BufferResult(); 48 | bufferResult.setBufferAvailableLevel(BufferAvailableLevel.UN_AVL); 49 | 50 | /* 51 | * 解析缓存数据 52 | * 格式: 2|t1,t8 53 | */ 54 | String data = jedis.get(bufferKey); 55 | // data可能为null,这里直接切,会产生空指针异常 56 | if(data ==null || data.split("\\|").length<2) return bufferResult; 57 | 58 | String[] split = data.split("\\|"); 59 | String[] timeRange = split[1].split(","); 60 | 61 | bufferResult.setBufferKey(bufferKey); 62 | bufferResult.setBufferValue(Integer.parseInt(split[0])); 63 | bufferResult.setBufferRangeStart(Long.parseLong(timeRange[0])); 64 | bufferResult.setBufferRangeEnd(Long.parseLong(timeRange[1])); 65 | 66 | /* 67 | * 判断缓存有效性: 完全有效 68 | */ 69 | if(paramRangeStart<= bufferResult.getBufferRangeStart() 70 | && paramRangeEnd>=bufferResult.getBufferRangeEnd() 71 | && bufferResult.getBufferValue()>=threshold){ 72 | bufferResult.setBufferAvailableLevel(BufferAvailableLevel.WHOLE_AVL); 73 | log.warn("{} 缓存完全匹配,value为: {}",bufferKey,bufferResult.getBufferValue()); 74 | } 75 | 76 | /* 77 | * 判断缓存有效性: 部分有效 78 | * TODO 重大bug修改,少了个else,逻辑就完全错误了 79 | */ 80 | else if(paramRangeStart == bufferResult.getBufferRangeStart() && paramRangeEnd>=bufferResult.getBufferRangeEnd()){ 81 | bufferResult.setBufferAvailableLevel(BufferAvailableLevel.PARTIAL_AVL); 82 | // 更新外部后续查询的条件窗口起始点 83 | //bufferResult.setOutSideQueryStart(bufferResult.getBufferRangeEnd()); 84 | log.warn("count缓存部分匹配"); 85 | } 86 | 87 | return bufferResult; 88 | } 89 | 90 | // 插入数据到缓存 91 | public void putBufferData(String bufferKey,int value,long bufferRangeStart,long bufferRangeEnd){ 92 | jedis.setex(bufferKey,4*60*60*1000,value+"|"+bufferRangeStart+","+bufferRangeEnd); 93 | } 94 | 95 | 96 | // 更新已存在的缓存数据 97 | 98 | 99 | // 删除已存在的缓存数据 100 | 101 | } 102 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/service/UserActionCountQueryService.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.service; 2 | 3 | import cn.doitedu.dynamic_rule.pojo.LogBean; 4 | import cn.doitedu.dynamic_rule.pojo.RuleAtomicParam; 5 | import cn.doitedu.dynamic_rule.pojo.RuleParam; 6 | import org.apache.flink.api.common.state.ListState; 7 | 8 | /** 9 | * @author 涛哥 10 | * @nick_name "deep as the sea" 11 | * @contact qq:657270652 wx:doit_edu 12 | * @site www.doitedu.cn 13 | * @date 2021-03-28 14 | * @desc 用户行为次数类条件查询服务接口 15 | */ 16 | public interface UserActionCountQueryService { 17 | 18 | public boolean queryActionCounts(String deviceId, RuleParam ruleParam) throws Exception; 19 | 20 | public boolean queryActionCounts(String deviceId, RuleAtomicParam atomicParam,String ruleId) throws Exception; 21 | 22 | } 23 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/service/UserActionCountQueryServiceClickhouseImpl.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.service; 2 | 3 | import cn.doitedu.dynamic_rule.pojo.RuleAtomicParam; 4 | import cn.doitedu.dynamic_rule.pojo.RuleParam; 5 | import cn.doitedu.dynamic_rule.utils.ConnectionUtils; 6 | import lombok.extern.slf4j.Slf4j; 7 | 8 | import java.sql.Connection; 9 | import java.sql.PreparedStatement; 10 | import java.sql.ResultSet; 11 | import java.util.List; 12 | 13 | @Slf4j 14 | public class UserActionCountQueryServiceClickhouseImpl implements UserActionCountQueryService { 15 | 16 | private static Connection conn; 17 | 18 | static { 19 | // 获取clickhouse的jdbc连接对象 20 | try { 21 | conn = ConnectionUtils.getClickhouseConnection(); 22 | } catch (Exception e) { 23 | e.printStackTrace(); 24 | } 25 | } 26 | 27 | 28 | /** 29 | * 30 | * 根据给定的deviceId,查询这个人是否满足ruleParam中的所有“次数类"规则条件 31 | * @param deviceId 要查询的用户 32 | * @param ruleParam 规则参数对象 33 | * @return 条件查询的结果是否成立 34 | * @throws Exception 35 | */ 36 | @Override 37 | public boolean queryActionCounts(String deviceId, RuleParam ruleParam) throws Exception { 38 | 39 | List userActionCountParams = ruleParam.getUserActionCountParams(); 40 | 41 | // 遍历每一个原子条件进行查询判断 42 | for (RuleAtomicParam atomicParam : userActionCountParams) { 43 | queryActionCounts(deviceId,atomicParam,ruleParam.getRuleName()); 44 | } 45 | // 如果走到这一句代码,说明上面的每一个原子条件查询后都满足规则,那么返回最终结果true 46 | return true; 47 | } 48 | 49 | /** 50 | * 51 | * 查询单个行为count条件是否成立 52 | * @param deviceId 设备id 53 | * @param atomicParam 原子条件 54 | * @return 是否成立 55 | * @throws Exception 异常 56 | */ 57 | @Override 58 | public boolean queryActionCounts(String deviceId, RuleAtomicParam atomicParam,String ruleId) throws Exception { 59 | // 对当前的原子条件拼接查询sql 60 | String sql = atomicParam.getCountQuerySql(); 61 | // 获取一个clickhouse 的jdbc连接 62 | PreparedStatement ps = conn.prepareStatement(sql); 63 | // 需要将sql中的deviceId占位符替换成真实deviceId 64 | ps.setString(1,deviceId); 65 | ps.setLong(2,atomicParam.getRangeStart()); 66 | ps.setLong(3,atomicParam.getRangeEnd()); 67 | 68 | ResultSet resultSet = ps.executeQuery(); 69 | 70 | /*sql = sql.replaceAll("\\$\\{did\\}",deviceId); 71 | Statement statement = conn.createStatement(); 72 | ResultSet resultSet = statement.executeQuery(sql);*/ 73 | 74 | /* * 75 | * deviceId,cnt 76 | * 000001 ,6 77 | **/ 78 | // resultSet只有一行,所以while只循环一次 79 | while (resultSet.next()) { 80 | // 从结果中取出cnt字段 81 | int realCnt = (int) resultSet.getLong(2); 82 | // 将查询结果赛回规则参数对象 83 | log.info("规则:{},用户:{},查询clickhouse次数条件,查询前cnt:{},本次cnt:{},累加后cnt:{}",ruleId,deviceId,atomicParam.getRealCnt(),realCnt,atomicParam.getRealCnt()+realCnt); 84 | atomicParam.setRealCnt(atomicParam.getRealCnt() + realCnt); 85 | } 86 | 87 | // 只要有一个原子条件查询结果不满足,则直接返回最终结果false 88 | if (atomicParam.getRealCnt() < atomicParam.getCnt()) { 89 | return false; 90 | } else { 91 | return true; 92 | } 93 | } 94 | } 95 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/service/UserActionCountQueryServiceStateImpl.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.service; 2 | 3 | import cn.doitedu.dynamic_rule.pojo.LogBean; 4 | import cn.doitedu.dynamic_rule.pojo.RuleAtomicParam; 5 | import cn.doitedu.dynamic_rule.pojo.RuleParam; 6 | import cn.doitedu.dynamic_rule.utils.RuleCalcUtil; 7 | import lombok.extern.slf4j.Slf4j; 8 | import org.apache.flink.api.common.state.ListState; 9 | 10 | import java.util.List; 11 | 12 | /** 13 | * @author 涛哥 14 | * @nick_name "deep as the sea" 15 | * @contact qq:657270652 wx:doit_edu 16 | * @site www.doitedu.cn 17 | * @date 2021-03-28 18 | * @desc 用户行为次数类条件查询服务实现:在flink的state中统计行为次数 19 | */ 20 | @Slf4j 21 | public class UserActionCountQueryServiceStateImpl implements UserActionCountQueryService { 22 | 23 | ListState eventState; 24 | public UserActionCountQueryServiceStateImpl(ListState eventState){ 25 | 26 | this.eventState = eventState; 27 | } 28 | 29 | /** 30 | * 查询规则参数对象中,要求的用户行为次数类条件是否满足 31 | * 同时,将查询到的真实次数,set回 规则参数对象中 32 | * 33 | * @param ruleParam 规则整体参数对象 34 | * @return 条件是否满足 35 | */ 36 | @Override 37 | public boolean queryActionCounts(String deviceId,RuleParam ruleParam) throws Exception { 38 | 39 | // 取出各个用户行为次数原子条件 40 | List userActionCountParams = ruleParam.getUserActionCountParams(); 41 | 42 | // 取出历史明细数据 43 | Iterable logBeansIterable = eventState.get(); 44 | 45 | // 统计每一个原子条件所发生的真实次数,就在原子条件参数对象中:realCnts 46 | queryActionCountsHelper(logBeansIterable, userActionCountParams); 47 | 48 | // 经过上面的方法执行后,每一个原子条件中,都拥有了一个真实发生次数,我们在此判断是否每个原子条件都满足 49 | for (RuleAtomicParam userActionCountParam : userActionCountParams) { 50 | if (userActionCountParam.getRealCnt() < userActionCountParam.getCnt()) { 51 | return false; 52 | } 53 | } 54 | 55 | // 如果到达这一句话,说明上面的判断中,每个原子条件都满足,则返回整体结果true 56 | return true; 57 | } 58 | 59 | 60 | /** 61 | * 根据传入的历史明细,和规则条件 62 | * 挨个统计每一个规则原子条件的真实发生次数,并将结果set回规则条件参数中 63 | * 64 | * @param logBeansIterable 65 | * @param userActionCountParams 66 | */ 67 | public void queryActionCountsHelper(Iterable logBeansIterable, List userActionCountParams) { 68 | for (LogBean logBean : logBeansIterable) { 69 | 70 | for (RuleAtomicParam userActionCountParam : userActionCountParams) { 71 | 72 | // 判断当前logbean 和当前 规则原子条件userActionCountParam 是否一致 73 | boolean isMatch = RuleCalcUtil.eventBeanMatchEventParam(logBean, userActionCountParam,true); 74 | log.debug("用户:{},查询了近期count条件,{},查询到的结果:{}",logBean.getDeviceId(),userActionCountParam.getEventId(),userActionCountParam.getRealCnt()); 75 | 76 | // 如果一致,则查询次数结果+1 77 | if (isMatch) { 78 | userActionCountParam.setRealCnt(userActionCountParam.getRealCnt() + 1); 79 | } 80 | } 81 | 82 | } 83 | 84 | } 85 | 86 | 87 | /** 88 | * 接收一个原子count类条件 89 | * 进行查询,并返回是否匹配 90 | * 并且,将查询到的realcount塞回参数对象 91 | * @param deviceId 92 | * @param atomicParam 93 | * @return 94 | * @throws Exception 95 | */ 96 | @Override 97 | public boolean queryActionCounts(String deviceId, RuleAtomicParam atomicParam,String ruleId) throws Exception { 98 | 99 | Iterable logBeans = eventState.get(); 100 | for (LogBean logBean : logBeans) { 101 | boolean b = RuleCalcUtil.eventBeanMatchEventParam(logBean, atomicParam, true); 102 | // 如果事件和条件匹配,则真实次数+1 103 | if(b) atomicParam.setRealCnt(atomicParam.getRealCnt()+1); 104 | } 105 | 106 | return atomicParam.getRealCnt() >= atomicParam.getCnt(); 107 | } 108 | 109 | 110 | } 111 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/service/UserActionSequenceQueryService.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.service; 2 | 3 | 4 | import cn.doitedu.dynamic_rule.pojo.LogBean; 5 | import cn.doitedu.dynamic_rule.pojo.RuleParam; 6 | import org.apache.flink.api.common.state.ListState; 7 | 8 | /** 9 | * @author 涛哥 10 | * @nick_name "deep as the sea" 11 | * @contact qq:657270652 wx:doit_edu 12 | * @site www.doitedu.cn 13 | * @date 2021-03-28 14 | * @desc 用户行为次序列条件查询服务接口 15 | */ 16 | public interface UserActionSequenceQueryService { 17 | 18 | public boolean queryActionSequence(String deviceId, RuleParam ruleParam) throws Exception; 19 | } 20 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/service/UserActionSequenceQueryServiceClickhouseImpl.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.service; 2 | 3 | import cn.doitedu.dynamic_rule.pojo.LogBean; 4 | import cn.doitedu.dynamic_rule.pojo.RuleAtomicParam; 5 | import cn.doitedu.dynamic_rule.pojo.RuleParam; 6 | import cn.doitedu.dynamic_rule.utils.ConnectionUtils; 7 | import lombok.extern.slf4j.Slf4j; 8 | import org.apache.flink.api.common.state.ListState; 9 | 10 | import java.sql.Connection; 11 | import java.sql.PreparedStatement; 12 | import java.sql.ResultSet; 13 | import java.sql.Statement; 14 | 15 | /** 16 | * @author 涛哥 17 | * @nick_name "deep as the sea" 18 | * @contact qq:657270652 wx:doit_edu 19 | * @site www.doitedu.cn 20 | * @date 2021-03-30 21 | * @desc 行为序列类路径匹配查询service,clickhouse实现 22 | */ 23 | @Slf4j 24 | public class UserActionSequenceQueryServiceClickhouseImpl implements UserActionSequenceQueryService{ 25 | 26 | private static Connection conn; 27 | 28 | 29 | static { 30 | try { 31 | conn = ConnectionUtils.getClickhouseConnection(); 32 | } catch (Exception e) { 33 | e.printStackTrace(); 34 | } 35 | } 36 | 37 | /** 38 | * 从clickhouse中查询行为序列条件是否满足 39 | * 40 | * @param deviceId 41 | * @param ruleParam 42 | * @return 43 | * @throws Exception 44 | */ 45 | @Override 46 | public boolean queryActionSequence(String deviceId, RuleParam ruleParam) throws Exception { 47 | // 获取规则中,路径模式的总步骤数 48 | int totalStep = ruleParam.getUserActionSequenceParams().size(); 49 | RuleAtomicParam ruleAtomicParam = ruleParam.getUserActionSequenceParams().get(0); 50 | 51 | // 取出查询sql 52 | String sql = ruleParam.getActionSequenceQuerySql(); 53 | PreparedStatement ps = conn.prepareStatement(sql); 54 | // 需要将sql中的deviceId占位符替换成真实deviceId 55 | ps.setString(1,deviceId); 56 | ps.setLong(2,ruleAtomicParam.getRangeStart()); 57 | ps.setLong(3,ruleAtomicParam.getRangeEnd()); 58 | ResultSet resultSet = ps.executeQuery(); 59 | 60 | /* 61 | sql = sql.replaceAll("\\$\\{did\\}",deviceId); 62 | Statement statement = conn.createStatement(); 63 | ResultSet resultSet = statement.executeQuery(sql); 64 | */ 65 | 66 | // 执行查询 67 | long s = System.currentTimeMillis(); 68 | 69 | 70 | 71 | /* 从返回结果中进行条件判断 72 | * ┌─deviceId─┬─isMatch3─┬─isMatch2─┬─isMatch1─┐ 73 | * │ 000001 │ 0 │ 0 │ 1 │ 74 | * └──────────┴──────────┴──────────┴──────────┘ 75 | * 重要逻辑: 查询结果中有几个1,就意味着最大完成步骤是几!!! 76 | */ 77 | int maxStep = 0; 78 | while(resultSet.next()){ // 返回结果最多就1行,这个while就走一次!!! 79 | // 对一行结果中的1进行累加,累加结果即完成的最大步骤数 80 | for(int i =2;i eventState; 28 | 29 | public UserActionSequenceQueryServiceStateImpl(ListState eventState){ 30 | this.eventState = eventState; 31 | } 32 | 33 | 34 | 35 | /** 36 | * 行为序列: A(p1=v2,p3=v8) B(p6=v9,p7=v7) C() 37 | * 意味着用户要依顺序做过上面的事件 38 | *

39 | * 查询规则条件中的行为序列条件是否满足 40 | * 会将查询到的最大匹配步骤,set回 ruleParam对象中 41 | * @param ruleParam 规则参数对象 42 | * @return 条件成立与否 43 | */ 44 | @Override 45 | public boolean queryActionSequence(String deviceId,RuleParam ruleParam) throws Exception { 46 | 47 | 48 | Iterable logBeans = eventState.get(); 49 | List userActionSequenceParams = ruleParam.getUserActionSequenceParams(); 50 | 51 | // 调用helper统计实际匹配的最大步骤号 52 | int maxStep = queryActionSequenceHelper2(logBeans, userActionSequenceParams); 53 | 54 | // 将这个maxStep丢回规则参数对象,以便本服务的调用者可以根据需要获取到这个最大匹配步骤号 55 | ruleParam.setUserActionSequenceQueriedMaxStep(ruleParam.getUserActionSequenceQueriedMaxStep()+maxStep); 56 | 57 | // 然后判断整个序列条件是否满足:真实最大匹配步骤是否等于条件的步骤数 58 | log.debug("序列匹配:state,规则:{},用户:{},结果maxStep:{},条件步数:{}, ",ruleParam.getRuleName(),deviceId,maxStep,userActionSequenceParams.size()); 59 | return maxStep == userActionSequenceParams.size(); 60 | } 61 | 62 | 63 | /** 64 | * 统计明细事件中,与序列条件匹配到的最大步骤 65 | * 66 | * @param events 事件明细 67 | * @param userActionSequenceParams 行为序列规则条件 68 | * @return 行为次数 69 | */ 70 | public int queryActionSequenceHelper(Iterable events, List userActionSequenceParams) { 71 | 72 | ArrayList eventList = new ArrayList<>(); 73 | CollectionUtils.addAll(eventList, events.iterator()); 74 | 75 | // 外循环,遍历每一个条件 76 | int maxStep = 0; 77 | int index = 0; 78 | for (RuleAtomicParam userActionSequenceParam : userActionSequenceParams) { 79 | 80 | // 内循环,遍历每一个历史明细事件,看看能否找到与当前条件匹配的事件 81 | boolean isFind = false; 82 | for (int i = index; i < eventList.size(); i++) { 83 | LogBean logBean = eventList.get(i); 84 | // 判断当前的这个事件 logBean,是否满足当前规则条件 userActionSequenceParam 85 | boolean match = RuleCalcUtil.eventBeanMatchEventParam(logBean, userActionSequenceParam, true); 86 | // 如果匹配,则最大步骤号+1,且更新下一次内循环的起始位置,并跳出本轮内循环 87 | if (match) { 88 | maxStep++; 89 | index = i + 1; 90 | isFind = true; 91 | break; 92 | } 93 | } 94 | 95 | if (!isFind) break; 96 | 97 | } 98 | 99 | log.debug("在state中步骤匹配计算完成: 查询到的最大步骤号为: " + maxStep + ",条件中的步骤数为:" + userActionSequenceParams.size()); 100 | return maxStep; 101 | } 102 | 103 | /** 104 | * 序列匹配,性能改进版 105 | * @param events 事件明细 106 | * @param userActionSequenceParams 行为序列规则条件 107 | * @return 次数 108 | */ 109 | public int queryActionSequenceHelper2(Iterable events, List userActionSequenceParams) { 110 | 111 | int maxStep = 0; 112 | for (LogBean event : events) { 113 | if (RuleCalcUtil.eventBeanMatchEventParam(event, userActionSequenceParams.get(maxStep))) { 114 | maxStep++; 115 | } 116 | if (maxStep == userActionSequenceParams.size()) break; 117 | } 118 | return maxStep; 119 | } 120 | 121 | } 122 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/service/UserProfileQueryService.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.service; 2 | 3 | import cn.doitedu.dynamic_rule.pojo.RuleParam; 4 | 5 | import java.io.IOException; 6 | 7 | /** 8 | * @author 涛哥 9 | * @nick_name "deep as the sea" 10 | * @contact qq:657270652 wx:doit_edu 11 | * @site www.doitedu.cn 12 | * @date 2021-03-28 13 | * @desc 用户画像数据查询服务接口 14 | */ 15 | public interface UserProfileQueryService { 16 | 17 | public boolean judgeProfileCondition(String deviceId, RuleParam ruleParam); 18 | 19 | } 20 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/service/UserProfileQueryServiceHbaseImpl.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.service; 2 | 3 | import cn.doitedu.dynamic_rule.pojo.RuleParam; 4 | import lombok.extern.slf4j.Slf4j; 5 | import org.apache.hadoop.conf.Configuration; 6 | import org.apache.hadoop.hbase.TableName; 7 | import org.apache.hadoop.hbase.client.*; 8 | 9 | import java.io.IOException; 10 | import java.util.HashMap; 11 | import java.util.Set; 12 | 13 | /** 14 | * @author 涛哥 15 | * @nick_name "deep as the sea" 16 | * @contact qq:657270652 wx:doit_edu 17 | * @site www.doitedu.cn 18 | * @date 2021-03-28 19 | * @desc 用户画像查询服务,hbase查询实现类 20 | */ 21 | @Slf4j 22 | public class UserProfileQueryServiceHbaseImpl implements UserProfileQueryService { 23 | 24 | static Connection conn; 25 | static Table table; 26 | 27 | static { 28 | Configuration conf = new Configuration(); 29 | conf.set("hbase.zookeeper.quorum", "hdp01:2181,hdp02:2181,hdp03:2181"); 30 | 31 | log.debug("hbase连接准备创建"); 32 | try { 33 | conn = ConnectionFactory.createConnection(conf); 34 | table = conn.getTable(TableName.valueOf("yinew_profile")); 35 | log.debug("hbase连接创建完毕"); 36 | }catch (Exception e){ 37 | e.printStackTrace(); 38 | } 39 | } 40 | 41 | /** 42 | * 构造函数 43 | */ 44 | public UserProfileQueryServiceHbaseImpl() throws IOException { 45 | /*Configuration conf = new Configuration(); 46 | conf.set("hbase.zookeeper.quorum", "hdp01:2181,hdp02:2181,hdp03:2181"); 47 | 48 | log.debug("hbase连接准备创建"); 49 | conn = ConnectionFactory.createConnection(conf); 50 | table = conn.getTable(TableName.valueOf("yinew_profile")); 51 | log.debug("hbase连接创建完毕");*/ 52 | } 53 | 54 | /** 55 | * 传入一个用户号,以及要查询的条件 56 | * 返回这些条件是否满足 57 | * TODO 本查询只返回了成立与否,而查询到的画像数据值并没有返回 可能为将来的缓存模块带来不便,有待改造 58 | * @param deviceId 设备id 59 | * @param ruleParam 规则参数对象 60 | * @return 是否成立 61 | */ 62 | @Override 63 | public boolean judgeProfileCondition(String deviceId, RuleParam ruleParam){ 64 | 65 | // 从规则参数中取出画像标签属性条件 66 | HashMap userProfileParams = ruleParam.getUserProfileParams(); 67 | 68 | // 取出条件中所要求的所有待查询标签名 69 | Set tagNames = userProfileParams.keySet(); 70 | 71 | // 构造一个hbase的查询参数对象 72 | Get get = new Get(deviceId.getBytes()); 73 | // 把要查询的标签(hbase表中的列)逐一添加到get参数中 74 | for (String tagName : tagNames) { 75 | get.addColumn("f".getBytes(),tagName.getBytes()); 76 | } 77 | 78 | 79 | 80 | // 调用hbase的api执行查询 81 | String valueStr = ""; 82 | long ts =0; 83 | long te =0; 84 | try { 85 | ts = System.currentTimeMillis(); 86 | Result result = table.get(get); 87 | // 判断结果和条件中的要求是否一致 88 | for (String tagName : tagNames) { 89 | // 从查询结果中取出该标签的值 90 | byte[] valueBytes = result.getValue("f".getBytes(), tagName.getBytes()); 91 | // 判断查询到的value和条件中要求的value是否一致,如果不一致,方法直接返回:false 92 | te = System.currentTimeMillis(); 93 | if(valueBytes == null){ 94 | log.info("规则:{},用户:{},查询Hbase,要求的条件是:{},{},查询结果为:{},匹配失败,耗费时长:{}",ruleParam.getRuleName(), 95 | deviceId,tagName,userProfileParams.get(tagName),"null",te-ts); 96 | return false; 97 | } 98 | valueStr = new String(valueBytes); 99 | if(!valueStr.equals(userProfileParams.get(tagName))){ 100 | log.debug("规则:{},用户:{},查询Hbase,要求的条件是:{},{},查询结果为:{},匹配失败,耗费时长:{}",ruleParam.getRuleName(), 101 | deviceId,tagName,userProfileParams.get(tagName),new String(valueBytes),te-ts); 102 | return false; 103 | } 104 | } 105 | 106 | log.info("规则:{},用户:{},查询Hbase,要求的条件是:{},查询结果为:{},匹配成功,耗费时长:{}",ruleParam.getRuleName(), 107 | deviceId,userProfileParams,valueStr,te-ts); 108 | // 如果上面的for循环走完了,那说明每个标签的查询值都等于条件中要求的值,则可以返回true 109 | return true; 110 | } catch (IOException e) { 111 | e.printStackTrace(); 112 | } 113 | 114 | // 如果到了这,说明前面的查询出异常了,返回false即可 115 | return false; 116 | } 117 | } 118 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/utils/ClickhouseCountQuerySqlUtil.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.utils; 2 | 3 | import cn.doitedu.dynamic_rule.pojo.RuleAtomicParam; 4 | 5 | import java.util.HashMap; 6 | import java.util.List; 7 | import java.util.Map; 8 | import java.util.Set; 9 | 10 | /** 11 | * @author 涛哥 12 | * @nick_name "deep as the sea" 13 | * @contact qq:657270652 wx:doit_edu 14 | * @site www.doitedu.cn 15 | * @date 2021-03-30 16 | * @desc 行为次数类条件查询sql拼接工具 17 | */ 18 | public class ClickhouseCountQuerySqlUtil { 19 | 20 | public static String getSql(String deviceId,RuleAtomicParam atomicParam){ 21 | 22 | String templet1 = 23 | " select " + 24 | "\n deviceId,count() as cnt " + 25 | "\n from yinew_detail " + 26 | "\n where deviceId= '" +deviceId+"'" + 27 | "\n and " + 28 | "\n eventId = '" + atomicParam.getEventId()+"' " + 29 | "\n and " + 30 | "\n timeStamp >= "+atomicParam.getRangeStart()+" and timeStamp <=" + atomicParam.getRangeEnd(); 31 | 32 | 33 | String templet3 = "\n group by deviceId"; 34 | 35 | 36 | HashMap properties = atomicParam.getProperties(); 37 | Set> entries = properties.entrySet(); 38 | 39 | 40 | StringBuffer sb = new StringBuffer(); 41 | for (Map.Entry entry : entries) { 42 | // "and properties['pageId'] = 'page006'" 43 | sb.append("\n and properties['"+entry.getKey()+"'] = '"+entry.getValue()+"'"); 44 | 45 | } 46 | 47 | return templet1+sb.toString()+ templet3; 48 | } 49 | 50 | public static void main(String[] args) { 51 | 52 | RuleAtomicParam ruleAtomicParam = new RuleAtomicParam(); 53 | ruleAtomicParam.setEventId("R"); 54 | HashMap props = new HashMap<>(); 55 | props.put("p1","v2"); 56 | props.put("p3","v4"); 57 | props.put("p7","v5"); 58 | ruleAtomicParam.setProperties(props); 59 | ruleAtomicParam.setRangeStart(1616925000815L); 60 | ruleAtomicParam.setRangeEnd(1616925004815L); 61 | 62 | String sql = getSql("000001", ruleAtomicParam); 63 | System.out.println(sql); 64 | 65 | 66 | } 67 | 68 | } 69 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/utils/ConnectionUtils.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.utils; 2 | 3 | import lombok.extern.slf4j.Slf4j; 4 | 5 | import java.sql.Connection; 6 | import java.sql.DriverManager; 7 | 8 | /** 9 | * @author 涛哥 10 | * @nick_name "deep as the sea" 11 | * @contact qq:657270652 wx:doit_edu 12 | * @site www.doitedu.cn 13 | * @date 2021-03-30 14 | * @desc 各类外部链接创建工具类 15 | */ 16 | @Slf4j 17 | public class ConnectionUtils { 18 | 19 | 20 | public static Connection getClickhouseConnection() throws Exception { 21 | //String ckDriver = "com.github.housepower.jdbc.ClickHouseDriver"; 22 | String ckDriver = "ru.yandex.clickhouse.ClickHouseDriver"; 23 | String ckUrl = "jdbc:clickhouse://192.168.77.63:8123/default"; 24 | String table = "yinew_detail"; 25 | 26 | Class.forName(ckDriver); 27 | Connection conn = DriverManager.getConnection(ckUrl); 28 | log.debug("clickhouse jdbc 连接创建完成"); 29 | return conn; 30 | } 31 | 32 | 33 | } 34 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/utils/RuleCalcUtil.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.utils; 2 | 3 | import cn.doitedu.dynamic_rule.pojo.LogBean; 4 | import cn.doitedu.dynamic_rule.pojo.RuleAtomicParam; 5 | import org.apache.commons.lang3.time.DateUtils; 6 | 7 | import java.util.*; 8 | 9 | /** 10 | * @author 涛哥 11 | * @nick_name "deep as the sea" 12 | * @contact qq:657270652 wx:doit_edu 13 | * @site www.doitedu.cn 14 | * @date 2021-03-28 15 | * @desc 规则计算中可能用到的各类工具方法 16 | */ 17 | public class RuleCalcUtil { 18 | /** 19 | * 工具方法,用于判断一个待判断事件和一个规则中的原子条件是否一致 20 | * @param eventBean 21 | * @param eventParam 22 | * @return 23 | */ 24 | public static boolean eventBeanMatchEventParam(LogBean eventBean, RuleAtomicParam eventParam){ 25 | // 如果传入的一个事件的事件id与参数中的事件id相同,才开始进行属性判断 26 | if(eventBean.getEventId().equals(eventParam.getEventId())){ 27 | 28 | // 取出待判断事件中的属性 29 | Map eventProperties = eventBean.getProperties(); 30 | 31 | // 取出条件中的事件属性 32 | HashMap paramProperties = eventParam.getProperties(); 33 | Set> entries = paramProperties.entrySet(); 34 | // 遍历条件中的每个属性及值 35 | for (Map.Entry entry : entries) { 36 | if(!entry.getValue().equals(eventProperties.get(entry.getKey()))){ 37 | return false; 38 | } 39 | } 40 | 41 | return true; 42 | } 43 | 44 | return false; 45 | } 46 | 47 | public static boolean eventBeanMatchEventParam(LogBean eventBean, RuleAtomicParam eventParam,boolean neeTimeCompare){ 48 | boolean b = eventBeanMatchEventParam(eventBean, eventParam); 49 | 50 | // 要考虑一点,外部传入的条件中,时间范围条件,如果起始、结束没有约束,应该传入一个 -1 51 | long start = eventParam.getRangeStart(); 52 | long end = eventParam.getRangeEnd(); 53 | 54 | return b && eventBean.getTimeStamp()>= (start==-1?0:start) && eventBean.getTimeStamp()<= (end==-1?Long.MAX_VALUE:end); 55 | 56 | } 57 | 58 | 59 | public static String getBufferKey(String deviceId,RuleAtomicParam atomicParam){ 60 | // deviceId-EVENT-p1-v1-p2-v2 61 | StringBuffer sb = new StringBuffer(); 62 | sb.append(deviceId).append("-").append(atomicParam.getEventId()); 63 | 64 | HashMap properties = atomicParam.getProperties(); 65 | Set> entries = properties.entrySet(); 66 | for (Map.Entry entry : entries) { 67 | sb.append("-").append(entry.getKey()).append("-").append(entry.getValue()); 68 | } 69 | 70 | return sb.toString(); 71 | } 72 | 73 | public static String getBufferKey(String deviceId, List paramList){ 74 | // deviceId-EVENT1-p1-v1-p2-v2-EVENT2-p1-v1-p2-v2 75 | StringBuffer sb = new StringBuffer(); 76 | sb.append(deviceId); 77 | 78 | for (RuleAtomicParam ruleAtomicParam : paramList) { 79 | sb.append("-").append(ruleAtomicParam.getEventId()); 80 | HashMap properties = ruleAtomicParam.getProperties(); 81 | Set> entries = properties.entrySet(); 82 | for (Map.Entry entry : entries) { 83 | sb.append("-").append(entry.getKey()).append("-").append(entry.getValue()); 84 | } 85 | } 86 | 87 | return sb.toString(); 88 | } 89 | 90 | 91 | public static long getQuerySplit(){ 92 | return DateUtils.addHours(DateUtils.ceiling(new Date(), Calendar.HOUR), -2).getTime(); 93 | } 94 | 95 | 96 | } 97 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/utils/RuleOperationHandler.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.utils; 2 | 3 | import cn.doitedu.dynamic_rule.pojo.RuleCanalBean; 4 | import cn.doitedu.dynamic_rule.pojo.RuleStateBean; 5 | import cn.doitedu.dynamic_rule.pojo.RuleTableRecord; 6 | import lombok.extern.slf4j.Slf4j; 7 | import org.apache.flink.api.common.state.BroadcastState; 8 | import org.apache.flink.api.common.state.MapState; 9 | import org.kie.api.io.ResourceType; 10 | import org.kie.api.runtime.KieSession; 11 | import org.kie.internal.utils.KieHelper; 12 | 13 | import java.util.List; 14 | 15 | @Slf4j 16 | public class RuleOperationHandler { 17 | 18 | 19 | /** 20 | * 规则操作处理入口方法 21 | * 22 | * @param ruleCanalBean 23 | * @param mapState 24 | */ 25 | public static boolean handleRuleOper(RuleCanalBean ruleCanalBean, BroadcastState mapState) { 26 | 27 | try { 28 | // 从canal信息中,取到规则表的行数据List(id,规则名,规则代码,cntsql,seqsql) 29 | List dataList = ruleCanalBean.getData(); 30 | if (dataList == null || dataList.size() < 1) return true; 31 | 32 | // 从行数据List中取到第一行(其实就只有一行)(id,规则名,规则代码,cntsql,seqsql) 33 | RuleTableRecord ruleTableRecord = dataList.get(0); 34 | String ruleName = ruleTableRecord.getRule_name(); 35 | 36 | // 如果status=1,则做新增规则的操作 37 | if (ruleTableRecord.getRule_status() == 1) { 38 | 39 | // status =1 表示,有一条规则要使用,则往state中插入该规则信息 40 | RuleStateBean ruleStateBean = new RuleStateBean(); 41 | ruleStateBean.setRuleName(ruleName); 42 | 43 | // 往statebean中cntsql 44 | ruleStateBean.setCntSqls(ruleTableRecord.getCnt_sqls()); 45 | 46 | // 往statebean中seqsql 47 | ruleStateBean.setSeqSqls(ruleTableRecord.getSeq_sqls()); 48 | 49 | // 往statebean中放kiesession 50 | KieHelper kieHelper = new KieHelper(); 51 | kieHelper.addContent(ruleTableRecord.getRule_code(), ResourceType.DRL); 52 | KieSession kieSession = kieHelper.build().newKieSession(); 53 | 54 | ruleStateBean.setKieSession(kieSession); 55 | 56 | 57 | // 把statebean放入state 58 | mapState.put(ruleName, ruleStateBean); 59 | 60 | } 61 | // 否则,只有删除这种情况,则做删除操作 62 | else { 63 | mapState.remove(ruleName); 64 | } 65 | return true; 66 | } catch (Exception e) { 67 | 68 | log.error("规则处理出现异常,异常信息: \n {}",e.getMessage()); 69 | 70 | return false; 71 | } 72 | } 73 | 74 | } 75 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/utils/RuleSimulator.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.utils; 2 | 3 | import cn.doitedu.dynamic_rule.pojo.RuleAtomicParam; 4 | import cn.doitedu.dynamic_rule.pojo.RuleParam; 5 | 6 | import java.util.ArrayList; 7 | import java.util.HashMap; 8 | 9 | /** 10 | * @author 涛哥 11 | * @nick_name "deep as the sea" 12 | * @contact qq:657270652 wx:doit_edu 13 | * @site www.doitedu.cn 14 | * @date 2021-03-28 15 | * @desc 规则模拟器 16 | */ 17 | public class RuleSimulator { 18 | 19 | public static RuleParam getRuleParam(){ 20 | 21 | RuleParam ruleParam = new RuleParam(); 22 | ruleParam.setRuleName("test_rule_1"); 23 | 24 | // 构造触发条件 25 | RuleAtomicParam trigger = new RuleAtomicParam(); 26 | trigger.setEventId("E"); 27 | ruleParam.setTriggerParam(trigger); 28 | 29 | 30 | // 构造画像条件 31 | HashMap userProfileParams = new HashMap<>(); 32 | userProfileParams.put("tag5","v1"); 33 | //userProfileParams.put("tag6","v2"); 34 | ruleParam.setUserProfileParams(userProfileParams); 35 | 36 | 37 | // 行为次数条件 38 | RuleAtomicParam count1 = new RuleAtomicParam(); 39 | count1.setEventId("B"); 40 | HashMap paramProps1 = new HashMap<>(); 41 | paramProps1.put("p1","v1"); 42 | count1.setProperties(paramProps1); 43 | count1.setOriginStart(0); 44 | count1.setOriginEnd(Long.MAX_VALUE); 45 | count1.setCnt(10); 46 | String sql1 = "select\n" + 47 | " deviceId,\n" + 48 | " count(1) as cnt\n" + 49 | "from yinew_detail\n" + 50 | "where deviceId= '${did}' and eventId='B' and properties['p1']='v1'\n" + 51 | " and timeStamp between 0 and 6615900580000\n" + 52 | "group by deviceId\n" + 53 | ";"; 54 | count1.setCountQuerySql(sql1); 55 | 56 | RuleAtomicParam count2 = new RuleAtomicParam(); 57 | count2.setEventId("D"); 58 | HashMap paramProps2 = new HashMap<>(); 59 | paramProps2.put("p2","v2"); 60 | count2.setProperties(paramProps2); 61 | count2.setOriginStart(1617094800000L); 62 | count2.setOriginEnd(Long.MAX_VALUE); 63 | count2.setCnt(22); 64 | String sql2 = "select\n" + 65 | " deviceId,\n" + 66 | " count(1) as cnt\n" + 67 | "from yinew_detail\n" + 68 | "where deviceId= '${did}' and eventId='D' and properties['p2']='v2'\n" + 69 | " and timeStamp between 1617094800000 and 6615900580000\n" + 70 | "group by deviceId \n" + 71 | ";"; 72 | count2.setCountQuerySql(sql2); 73 | 74 | 75 | ArrayList countParams = new ArrayList<>(); 76 | countParams.add(count1); 77 | countParams.add(count2); 78 | ruleParam.setUserActionCountParams(countParams); 79 | 80 | 81 | // 行为序列(行为路径)条件(2个事件的序列) 82 | RuleAtomicParam param1 = new RuleAtomicParam(); 83 | param1.setEventId("A"); 84 | HashMap seqProps1 = new HashMap<>(); 85 | seqProps1.put("p1","v1"); 86 | param1.setProperties(seqProps1); 87 | param1.setOriginStart(0); 88 | param1.setOriginEnd(Long.MAX_VALUE); 89 | 90 | RuleAtomicParam param2 = new RuleAtomicParam(); 91 | param2.setEventId("C"); 92 | HashMap seqProps2 = new HashMap<>(); 93 | seqProps2.put("p2","v2"); 94 | param2.setProperties(seqProps2); 95 | param2.setOriginStart(0); 96 | param2.setOriginEnd(Long.MAX_VALUE); 97 | 98 | 99 | ArrayList ruleParams = new ArrayList<>(); 100 | ruleParams.add(param1); 101 | ruleParams.add(param2); 102 | 103 | ruleParam.setUserActionSequenceParams(ruleParams); 104 | String sql = "SELECT\n" + 105 | " deviceId,\n" + 106 | " sequenceMatch('.*(?1).*(?2).*')(\n" + 107 | " toDateTime(`timeStamp`),\n" + 108 | " eventId = 'A' and properties['p1']='v1',\n" + 109 | " eventId = 'C' and properties['p2']='v2'\n" + 110 | " ) as isMatch2,\n" + 111 | "\n" + 112 | " sequenceMatch('.*(?1).*')(\n" + 113 | " toDateTime(`timeStamp`),\n" + 114 | " eventId = 'A' and properties['p1']='v1',\n" + 115 | " eventId = 'C' and properties['p2']='v2'\n" + 116 | " ) as isMatch1\n" + 117 | "\n" + 118 | "from yinew_detail\n" + 119 | "where\n" + 120 | " deviceId = '${did}' \n" + 121 | " and\n" + 122 | " timeStamp >= 0\n" + 123 | " and\n" + 124 | " timeStamp <= 6235295739479\n" + 125 | " and\n" + 126 | " (\n" + 127 | " (eventId='A' and properties['p1']='v1')\n" + 128 | " or (eventId = 'C' and properties['p2']='v2')\n" + 129 | " )\n" + 130 | "group by deviceId;"; 131 | ruleParam.setActionSequenceQuerySql(sql); 132 | 133 | return ruleParam; 134 | } 135 | } 136 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/java/cn/doitedu/dynamic_rule/utils/StateDescUtil.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rule.utils; 2 | 3 | import cn.doitedu.dynamic_rule.pojo.LogBean; 4 | import cn.doitedu.dynamic_rule.pojo.RuleStateBean; 5 | import org.apache.flink.api.common.state.ListStateDescriptor; 6 | import org.apache.flink.api.common.state.MapStateDescriptor; 7 | import org.kie.api.runtime.KieSession; 8 | 9 | public class StateDescUtil { 10 | 11 | /** 12 | * 存放drools规则容器session的state定义器 13 | */ 14 | public static final MapStateDescriptor ruleKieStateDesc = new MapStateDescriptor("ruleKieState",String.class,RuleStateBean.class); 15 | 16 | public static final ListStateDescriptor eventStateDesc = new ListStateDescriptor<>("eventState", LogBean.class); 17 | 18 | 19 | } 20 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/resources/META-INF/kmodule.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/resources/bak/log4j.properties: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/coderblack/yinew_marketing/45ef4c47dec6f42c5e963a915ddaa88ad2a5fcaf/dynamic_rule_engine/src/main/resources/bak/log4j.properties -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/resources/bak/log4j2.properties: -------------------------------------------------------------------------------- 1 | ################################################################################ 2 | # Licensed to the Apache Software Foundation (ASF) under one 3 | # or more contributor license agreements. See the NOTICE file 4 | # distributed with this work for additional information 5 | # regarding copyright ownership. The ASF licenses this file 6 | # to you under the Apache License, Version 2.0 (the 7 | # "License"); you may not use this file except in compliance 8 | # with the License. You may obtain a copy of the License at 9 | # 10 | # http://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | # See the License for the specific language governing permissions and 16 | # limitations under the License. 17 | ################################################################################ 18 | 19 | # Allows this configuration to be modified at runtime. The file will be checked every 30 seconds. 20 | monitorInterval=30 21 | 22 | # This affects logging for both user code and Flink 23 | rootLogger.level = INFO 24 | # rootLogger.appenderRef.file.ref = MainAppender 25 | rootLogger.appenderRef.console.ref = Console_OUT 26 | 27 | # Uncomment this if you want to _only_ change Flink's logging 28 | logger.flink.name = org.apache.flink 29 | logger.flink.level = ERROR 30 | 31 | appender.console.type = Console 32 | appender.console.name = Console_OUT 33 | appender.console.layout.type = PatternLayout 34 | appender.console.layout.pattern = %d{yyyy-MM-dd HH:mm:ss} [%p] %c: %msg%n 35 | appender.console.filter.threshold.type = ThresholdFilter 36 | appender.console.filter.threshold.level = INFO 37 | 38 | 39 | # The following lines keep the log level of common libraries/connectors on 40 | # log level INFO. The root logger does not override this. You have to manually 41 | # change the log levels here. 42 | logger.akka.name = akka 43 | logger.akka.level = ERROR 44 | logger.kafka.name= org.apache.kafka 45 | logger.kafka.level = ERROR 46 | logger.hadoop.name = org.apache.hadoop 47 | logger.hadoop.level = ERROR 48 | logger.zookeeper.name = org.apache.zookeeper 49 | logger.zookeeper.level = ERROR 50 | 51 | logger.doit.name = cn.doitedu.dynamic_rule 52 | logger.doit.level = INFO 53 | #logger.doit.appenderRef.console.ref = Console_OUT 54 | logger.doit.appenderRef.file.ref = MainAppender 55 | logger.doit.additivity = false 56 | 57 | logger.clickhouse.name = ru.yandex.clickhouse 58 | logger.clickhouse.level = WARN 59 | 60 | # Log all infos in the given file 61 | appender.main.name = MainAppender 62 | appender.main.type = RollingFile 63 | appender.main.append = true 64 | #appender.main.fileName = ${sys:log.file} 65 | #appender.main.filePattern = ${sys:log.file}.%i 66 | appender.main.fileName = d:/yinew.log 67 | appender.main.filePattern = d:/yinew.log.%i 68 | appender.main.layout.type = PatternLayout 69 | appender.main.layout.pattern = %d{yyyy-MM-dd HH:mm:ss,SSS} %-5p %-60c %x - %m%n 70 | appender.main.policies.type = Policies 71 | appender.main.policies.size.type = SizeBasedTriggeringPolicy 72 | appender.main.policies.size.size = 100MB 73 | appender.main.policies.startup.type = OnStartupTriggeringPolicy 74 | appender.main.strategy.type = DefaultRolloverStrategy 75 | #appender.main.strategy.max = ${env:MAX_LOG_FILE_NUMBER:-10} 76 | appender.main.strategy.max = 10 77 | 78 | # Suppress the irrelevant (wrong) warnings from the Netty channel handler 79 | logger.netty.name = org.apache.flink.shaded.akka.org.jboss.netty.channel.DefaultChannelPipeline 80 | logger.netty.level = OFF 81 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/resources/log4j.properties: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/coderblack/yinew_marketing/45ef4c47dec6f42c5e963a915ddaa88ad2a5fcaf/dynamic_rule_engine/src/main/resources/log4j.properties -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/resources/log4j2.properties: -------------------------------------------------------------------------------- 1 | ################################################################################ 2 | # Licensed to the Apache Software Foundation (ASF) under one 3 | # or more contributor license agreements. See the NOTICE file 4 | # distributed with this work for additional information 5 | # regarding copyright ownership. The ASF licenses this file 6 | # to you under the Apache License, Version 2.0 (the 7 | # "License"); you may not use this file except in compliance 8 | # with the License. You may obtain a copy of the License at 9 | # 10 | # http://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | # See the License for the specific language governing permissions and 16 | # limitations under the License. 17 | ################################################################################ 18 | 19 | # Allows this configuration to be modified at runtime. The file will be checked every 30 seconds. 20 | monitorInterval=30 21 | 22 | # This affects logging for both user code and Flink 23 | rootLogger.level = INFO 24 | # rootLogger.appenderRef.file.ref = MainAppender 25 | rootLogger.appenderRef.console.ref = Console_OUT 26 | 27 | 28 | # Uncomment this if you want to _only_ change Flink's logging 29 | logger.flink.name = org.apache.flink 30 | logger.flink.level = ERROR 31 | 32 | appender.console.type = Console 33 | appender.console.name = Console_OUT 34 | appender.console.layout.type = PatternLayout 35 | appender.console.layout.pattern = %d{yyyy-MM-dd HH:mm:ss} [%p] %c: %msg%n 36 | appender.console.filter.threshold.type = ThresholdFilter 37 | appender.console.filter.threshold.level = DEBUG 38 | 39 | 40 | # The following lines keep the log level of common libraries/connectors on 41 | # log level INFO. The root logger does not override this. You have to manually 42 | # change the log levels here. 43 | logger.akka.name = akka 44 | logger.akka.level = ERROR 45 | logger.kafka.name= org.apache.kafka 46 | logger.kafka.level = ERROR 47 | logger.hadoop.name = org.apache.hadoop 48 | logger.hadoop.level = ERROR 49 | logger.zookeeper.name = org.apache.zookeeper 50 | logger.zookeeper.level = ERROR 51 | 52 | logger.doit.name = cn.doitedu.dynamic_rule 53 | logger.doit.level = INFO 54 | logger.doit.appenderRef.console.ref = Console_OUT 55 | #logger.doit.appenderRef.file.ref = MainAppender 56 | logger.doit.additivity = false 57 | 58 | logger.clickhouse.name = ru.yandex.clickhouse 59 | logger.clickhouse.level = WARN 60 | 61 | logger.kie.name = org.kie 62 | logger.kie.level = ERROR 63 | 64 | logger.drools.name = org.drools 65 | logger.drools.level = ERROR 66 | 67 | # Log all infos in the given file 68 | appender.main.name = MainAppender 69 | appender.main.type = RollingFile 70 | appender.main.append = true 71 | #appender.main.fileName = ${sys:log.file} 72 | #appender.main.filePattern = ${sys:log.file}.%i 73 | appender.main.fileName = d:/yinew.log 74 | appender.main.filePattern = d:/yinew.log.%i 75 | appender.main.layout.type = PatternLayout 76 | appender.main.layout.pattern = %d{yyyy-MM-dd HH:mm:ss,SSS} %-5p %-60c %x - %m%n 77 | appender.main.policies.type = Policies 78 | appender.main.policies.size.type = SizeBasedTriggeringPolicy 79 | appender.main.policies.size.size = 100MB 80 | appender.main.policies.startup.type = OnStartupTriggeringPolicy 81 | appender.main.strategy.type = DefaultRolloverStrategy 82 | #appender.main.strategy.max = ${env:MAX_LOG_FILE_NUMBER:-10} 83 | appender.main.strategy.max = 10 84 | 85 | # Suppress the irrelevant (wrong) warnings from the Netty channel handler 86 | logger.netty.name = org.apache.flink.shaded.akka.org.jboss.netty.channel.DefaultChannelPipeline 87 | logger.netty.level = OFF 88 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/resources/rules/flink.drl: -------------------------------------------------------------------------------- 1 | package cn.doitedu.demo.droolsdemo.rules 2 | import cn.doitedu.dynamic_rule.demos.Action 3 | 4 | import cn.doitedu.dynamic_rule.demos.Applicant 5 | import cn.doitedu.dynamic_rule.demos.Action 6 | 7 | rule "age if valid" 8 | when 9 | $a: Applicant() 10 | then 11 | if($a.getAge() >=18){ 12 | $a.setValid(true); 13 | }else{ 14 | $a.setValid(false); 15 | } 16 | end 17 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/main/resources/rules/test.drl: -------------------------------------------------------------------------------- 1 | package cn.doitedu.demo.droolsdemo.rules 2 | import cn.doitedu.dynamic_rule.demos.Action 3 | 4 | // 导包 5 | import cn.doitedu.dynamic_rule.demos.Applicant 6 | import cn.doitedu.dynamic_rule.demos.Action 7 | 8 | // rule1 名称 9 | rule "age if valid" 10 | when // 触发判断条件 11 | $a: Applicant(age < 18) 12 | then // 满足条件后要执行的逻辑 13 | $a.setValid(false);// 可以执行任何java代码(对输入的对象调用方法) 14 | new Action("执行动作").doSomeThing(); //可以执行任何java代码(创建对象调方法) 15 | insert(new Action("不合法")); //可以继续给容器插入对象,以触发其他规则 16 | end 17 | 18 | // rule2 名称 19 | rule "send_act" 20 | when 21 | $a: Action() 22 | then 23 | $a.doSomeThing(); 24 | end -------------------------------------------------------------------------------- /dynamic_rule_engine/src/test/java/ListRemoveDemo.java: -------------------------------------------------------------------------------- 1 | import java.util.ArrayList; 2 | 3 | public class ListRemoveDemo { 4 | public static void main(String[] args) { 5 | 6 | ArrayList lst = new ArrayList<>(); 7 | lst.add("a"); 8 | /* lst.add("b"); 9 | lst.add("c"); 10 | lst.add("d"); 11 | lst.add("e");*/ 12 | 13 | /*for (String s : lst) { 14 | if(s.equals("c")) lst.remove("c"); 15 | }*/ 16 | 17 | 18 | for (int i = 0; i < lst.size(); i++) { 19 | 20 | String s = lst.get(i); 21 | if (s.equals("a") || s.equals("d")) { 22 | lst.remove(i); 23 | i--; 24 | } 25 | 26 | } 27 | 28 | System.out.println(lst); 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/test/java/RuleTest.java: -------------------------------------------------------------------------------- 1 | import cn.doitedu.dynamic_rule.pojo.DroolFact; 2 | import cn.doitedu.dynamic_rule.pojo.LogBean; 3 | import org.apache.commons.io.FileUtils; 4 | import org.kie.api.io.ResourceType; 5 | import org.kie.api.runtime.KieSession; 6 | import org.kie.internal.utils.KieHelper; 7 | 8 | import java.io.File; 9 | import java.io.IOException; 10 | 11 | public class RuleTest { 12 | 13 | public static void main(String[] args) throws IOException { 14 | 15 | 16 | String s = FileUtils.readFileToString(new File("dynamic_rule_engine/rules_drl/rule1.drl"), "utf-8"); 17 | 18 | KieHelper kieHelper = new KieHelper(); 19 | KieSession kieSession = kieHelper.addContent(s, ResourceType.DRL).build().newKieSession(); 20 | 21 | 22 | DroolFact droolFact = new DroolFact(); 23 | 24 | LogBean logBean = new LogBean(); 25 | logBean.setEventId("D"); 26 | 27 | droolFact.setLogBean(logBean); 28 | 29 | 30 | kieSession.insert(droolFact); 31 | kieSession.fireAllRules(); 32 | 33 | 34 | kieSession.dispose(); 35 | 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /dynamic_rule_engine/src/test/java/Test.java: -------------------------------------------------------------------------------- 1 | import org.apache.commons.lang3.RandomStringUtils; 2 | import org.apache.commons.lang3.time.DateUtils; 3 | 4 | import java.util.Calendar; 5 | import java.util.Date; 6 | 7 | public class Test { 8 | public static void main(String[] args) { 9 | 10 | /*for(int i=0;i<100;i++){ 11 | System.out.println(RandomStringUtils.randomAlphabetic(1)); 12 | }*/ 13 | 14 | long x = System.currentTimeMillis() - 2*60*60*1000; 15 | System.out.println(x); 16 | 17 | 18 | Date date = DateUtils.addHours(DateUtils.ceiling(new Date(), Calendar.HOUR), -2); 19 | System.out.println(date); 20 | 21 | 22 | 23 | 24 | 25 | 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /log4jdemo/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | yinew_marketing 7 | cn.doitedu 8 | 1.0 9 | 10 | 4.0.0 11 | 12 | log4jdemo 13 | 14 | 15 | 16 | log4j 17 | log4j 18 | 1.2.17 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | -------------------------------------------------------------------------------- /log4jdemo/src/main/java/cn/doitedu/log4j/Demo.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.log4j; 2 | 3 | import org.apache.log4j.Logger; 4 | 5 | public class Demo { 6 | 7 | public static void main(String[] args) { 8 | 9 | // 根logger 10 | Logger rootLogger = Logger.getRootLogger(); 11 | 12 | // 每一个logger对象都有自己的命名 13 | Logger logger1 = Logger.getLogger("cn"); 14 | 15 | // cn.doitedu这个logger是 cn这个logger的 子logger 16 | // 子logger会继承父logger的配置 17 | Logger logger2 = Logger.getLogger("cn.doitedu"); 18 | 19 | 20 | Logger demoLogger = Logger.getLogger(Demo.class); 21 | 22 | 23 | // debug的级别最低 24 | logger1.debug("这是一条debug级别的日志"); 25 | 26 | // info级别大于debug 27 | logger1.info("这是一条info级别的日志"); 28 | 29 | // warn级别大于info 30 | logger1.warn("这是一条warn级别的日志"); 31 | 32 | // error级别大于warn 33 | logger1.error("这是一条error级别的日志"); 34 | 35 | 36 | logger2.debug("这是cn.doitedu 这个 logger的debug日志"); 37 | logger2.info("这是cn.doitedu 这个 logger的info日志"); 38 | logger2.warn("这是cn.doitedu 这个 logger的warn日志"); 39 | logger2.error("这是cn.doitedu 这个 logger的error日志"); 40 | 41 | 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /log4jdemo/src/main/resources/log4j.properties: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/coderblack/yinew_marketing/45ef4c47dec6f42c5e963a915ddaa88ad2a5fcaf/log4jdemo/src/main/resources/log4j.properties -------------------------------------------------------------------------------- /manageplatform/HELP.md: -------------------------------------------------------------------------------- 1 | # Getting Started 2 | 3 | ### Reference Documentation 4 | For further reference, please consider the following sections: 5 | 6 | * [Official Apache Maven documentation](https://maven.apache.org/guides/index.html) 7 | * [Spring Boot Maven Plugin Reference Guide](https://docs.spring.io/spring-boot/docs/2.4.4/maven-plugin/reference/html/) 8 | * [Create an OCI image](https://docs.spring.io/spring-boot/docs/2.4.4/maven-plugin/reference/html/#build-image) 9 | * [Spring Configuration Processor](https://docs.spring.io/spring-boot/docs/2.4.4/reference/htmlsingle/#configuration-metadata-annotation-processor) 10 | * [Spring Web](https://docs.spring.io/spring-boot/docs/2.4.4/reference/htmlsingle/#boot-features-developing-web-applications) 11 | * [Spring Boot DevTools](https://docs.spring.io/spring-boot/docs/2.4.4/reference/htmlsingle/#using-boot-devtools) 12 | 13 | ### Guides 14 | The following guides illustrate how to use some features concretely: 15 | 16 | * [Building a RESTful Web Service](https://spring.io/guides/gs/rest-service/) 17 | * [Serving Web Content with Spring MVC](https://spring.io/guides/gs/serving-web-content/) 18 | * [Building REST services with Spring](https://spring.io/guides/tutorials/bookmarks/) 19 | 20 | -------------------------------------------------------------------------------- /manageplatform/mvnw.cmd: -------------------------------------------------------------------------------- 1 | @REM ---------------------------------------------------------------------------- 2 | @REM Licensed to the Apache Software Foundation (ASF) under one 3 | @REM or more contributor license agreements. See the NOTICE file 4 | @REM distributed with this work for additional information 5 | @REM regarding copyright ownership. The ASF licenses this file 6 | @REM to you under the Apache License, Version 2.0 (the 7 | @REM "License"); you may not use this file except in compliance 8 | @REM with the License. You may obtain a copy of the License at 9 | @REM 10 | @REM https://www.apache.org/licenses/LICENSE-2.0 11 | @REM 12 | @REM Unless required by applicable law or agreed to in writing, 13 | @REM software distributed under the License is distributed on an 14 | @REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 15 | @REM KIND, either express or implied. See the License for the 16 | @REM specific language governing permissions and limitations 17 | @REM under the License. 18 | @REM ---------------------------------------------------------------------------- 19 | 20 | @REM ---------------------------------------------------------------------------- 21 | @REM Maven Start Up Batch script 22 | @REM 23 | @REM Required ENV vars: 24 | @REM JAVA_HOME - location of a JDK home dir 25 | @REM 26 | @REM Optional ENV vars 27 | @REM M2_HOME - location of maven2's installed home dir 28 | @REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands 29 | @REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a keystroke before ending 30 | @REM MAVEN_OPTS - parameters passed to the Java VM when running Maven 31 | @REM e.g. to debug Maven itself, use 32 | @REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 33 | @REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files 34 | @REM ---------------------------------------------------------------------------- 35 | 36 | @REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on' 37 | @echo off 38 | @REM set title of command window 39 | title %0 40 | @REM enable echoing by setting MAVEN_BATCH_ECHO to 'on' 41 | @if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO% 42 | 43 | @REM set %HOME% to equivalent of $HOME 44 | if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%") 45 | 46 | @REM Execute a user defined script before this one 47 | if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre 48 | @REM check for pre script, once with legacy .bat ending and once with .cmd ending 49 | if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat" 50 | if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd" 51 | :skipRcPre 52 | 53 | @setlocal 54 | 55 | set ERROR_CODE=0 56 | 57 | @REM To isolate internal variables from possible post scripts, we use another setlocal 58 | @setlocal 59 | 60 | @REM ==== START VALIDATION ==== 61 | if not "%JAVA_HOME%" == "" goto OkJHome 62 | 63 | echo. 64 | echo Error: JAVA_HOME not found in your environment. >&2 65 | echo Please set the JAVA_HOME variable in your environment to match the >&2 66 | echo location of your Java installation. >&2 67 | echo. 68 | goto error 69 | 70 | :OkJHome 71 | if exist "%JAVA_HOME%\bin\java.exe" goto init 72 | 73 | echo. 74 | echo Error: JAVA_HOME is set to an invalid directory. >&2 75 | echo JAVA_HOME = "%JAVA_HOME%" >&2 76 | echo Please set the JAVA_HOME variable in your environment to match the >&2 77 | echo location of your Java installation. >&2 78 | echo. 79 | goto error 80 | 81 | @REM ==== END VALIDATION ==== 82 | 83 | :init 84 | 85 | @REM Find the project base dir, i.e. the directory that contains the folder ".mvn". 86 | @REM Fallback to current working directory if not found. 87 | 88 | set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR% 89 | IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir 90 | 91 | set EXEC_DIR=%CD% 92 | set WDIR=%EXEC_DIR% 93 | :findBaseDir 94 | IF EXIST "%WDIR%"\.mvn goto baseDirFound 95 | cd .. 96 | IF "%WDIR%"=="%CD%" goto baseDirNotFound 97 | set WDIR=%CD% 98 | goto findBaseDir 99 | 100 | :baseDirFound 101 | set MAVEN_PROJECTBASEDIR=%WDIR% 102 | cd "%EXEC_DIR%" 103 | goto endDetectBaseDir 104 | 105 | :baseDirNotFound 106 | set MAVEN_PROJECTBASEDIR=%EXEC_DIR% 107 | cd "%EXEC_DIR%" 108 | 109 | :endDetectBaseDir 110 | 111 | IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig 112 | 113 | @setlocal EnableExtensions EnableDelayedExpansion 114 | for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a 115 | @endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS% 116 | 117 | :endReadAdditionalConfig 118 | 119 | SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe" 120 | set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar" 121 | set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain 122 | 123 | set DOWNLOAD_URL="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar" 124 | 125 | FOR /F "tokens=1,2 delims==" %%A IN ("%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties") DO ( 126 | IF "%%A"=="wrapperUrl" SET DOWNLOAD_URL=%%B 127 | ) 128 | 129 | @REM Extension to allow automatically downloading the maven-wrapper.jar from Maven-central 130 | @REM This allows using the maven wrapper in projects that prohibit checking in binary data. 131 | if exist %WRAPPER_JAR% ( 132 | if "%MVNW_VERBOSE%" == "true" ( 133 | echo Found %WRAPPER_JAR% 134 | ) 135 | ) else ( 136 | if not "%MVNW_REPOURL%" == "" ( 137 | SET DOWNLOAD_URL="%MVNW_REPOURL%/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar" 138 | ) 139 | if "%MVNW_VERBOSE%" == "true" ( 140 | echo Couldn't find %WRAPPER_JAR%, downloading it ... 141 | echo Downloading from: %DOWNLOAD_URL% 142 | ) 143 | 144 | powershell -Command "&{"^ 145 | "$webclient = new-object System.Net.WebClient;"^ 146 | "if (-not ([string]::IsNullOrEmpty('%MVNW_USERNAME%') -and [string]::IsNullOrEmpty('%MVNW_PASSWORD%'))) {"^ 147 | "$webclient.Credentials = new-object System.Net.NetworkCredential('%MVNW_USERNAME%', '%MVNW_PASSWORD%');"^ 148 | "}"^ 149 | "[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12; $webclient.DownloadFile('%DOWNLOAD_URL%', '%WRAPPER_JAR%')"^ 150 | "}" 151 | if "%MVNW_VERBOSE%" == "true" ( 152 | echo Finished downloading %WRAPPER_JAR% 153 | ) 154 | ) 155 | @REM End of extension 156 | 157 | @REM Provide a "standardized" way to retrieve the CLI args that will 158 | @REM work with both Windows and non-Windows executions. 159 | set MAVEN_CMD_LINE_ARGS=%* 160 | 161 | %MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %* 162 | if ERRORLEVEL 1 goto error 163 | goto end 164 | 165 | :error 166 | set ERROR_CODE=1 167 | 168 | :end 169 | @endlocal & set ERROR_CODE=%ERROR_CODE% 170 | 171 | if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost 172 | @REM check for post script, once with legacy .bat ending and once with .cmd ending 173 | if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat" 174 | if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd" 175 | :skipRcPost 176 | 177 | @REM pause the script if MAVEN_BATCH_PAUSE is set to 'on' 178 | if "%MAVEN_BATCH_PAUSE%" == "on" pause 179 | 180 | if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE% 181 | 182 | exit /B %ERROR_CODE% 183 | -------------------------------------------------------------------------------- /manageplatform/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 4 | 4.0.0 5 | 6 | org.springframework.boot 7 | spring-boot-starter-parent 8 | 2.4.4 9 | 10 | 11 | cn.doitedu.yinew 12 | manageplatform 13 | 0.0.1-SNAPSHOT 14 | manageplatform 15 | Demo project for Spring Boot 16 | 17 | 1.8 18 | 19 | 20 | 21 | org.springframework.boot 22 | spring-boot-starter-web 23 | 24 | 25 | 26 | org.springframework.boot 27 | spring-boot-starter-freemarker 28 | 29 | 30 | 31 | org.springframework.boot 32 | spring-boot-devtools 33 | runtime 34 | true 35 | 36 | 37 | org.springframework.boot 38 | spring-boot-configuration-processor 39 | true 40 | 41 | 42 | org.projectlombok 43 | lombok 44 | true 45 | 46 | 47 | org.springframework.boot 48 | spring-boot-starter-test 49 | test 50 | 51 | 52 | 53 | redis.clients 54 | jedis 55 | 2.9.0 56 | 57 | 58 | 59 | com.alibaba 60 | fastjson 61 | 1.2.75 62 | 63 | 64 | commons-io 65 | commons-io 66 | 2.7 67 | compile 68 | 69 | 70 | 71 | mysql 72 | mysql-connector-java 73 | 8.0.23 74 | 75 | 76 | 77 | 78 | 79 | 80 | org.springframework.boot 81 | spring-boot-maven-plugin 82 | 83 | 84 | 85 | org.projectlombok 86 | lombok 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | -------------------------------------------------------------------------------- /manageplatform/src/main/java/cn/doitedu/yinew/manageplatform/ManageplatformApplication.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.yinew.manageplatform; 2 | 3 | import org.springframework.boot.SpringApplication; 4 | import org.springframework.boot.autoconfigure.SpringBootApplication; 5 | 6 | @SpringBootApplication 7 | public class ManageplatformApplication { 8 | 9 | public static void main(String[] args) { 10 | SpringApplication.run(ManageplatformApplication.class, args); 11 | } 12 | 13 | } 14 | -------------------------------------------------------------------------------- /manageplatform/src/main/java/cn/doitedu/yinew/manageplatform/controller/FreeMarkerDemoController.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.yinew.manageplatform.controller; 2 | 3 | import cn.doitedu.yinew.manageplatform.pojo.Animal; 4 | import cn.doitedu.yinew.manageplatform.pojo.RuleAtomicParam; 5 | import freemarker.template.Configuration; 6 | import freemarker.template.Template; 7 | import freemarker.template.TemplateExceptionHandler; 8 | import org.springframework.web.bind.annotation.RequestMapping; 9 | import org.springframework.web.bind.annotation.RestController; 10 | 11 | import java.io.File; 12 | import java.io.OutputStreamWriter; 13 | import java.io.Writer; 14 | import java.util.ArrayList; 15 | import java.util.HashMap; 16 | 17 | @RestController 18 | public class FreeMarkerDemoController { 19 | 20 | @RequestMapping("/demo") 21 | public String demo(String name, String animalStr) throws Exception { 22 | 23 | String[] split = animalStr.split("-"); 24 | ArrayList animals = new ArrayList<>(); 25 | for (String s : split) { 26 | String[] s1 = s.split("_"); 27 | Animal animal = new Animal(s1[0], Integer.parseInt(s1[1])); 28 | animals.add(animal); 29 | } 30 | 31 | // 数据封装 32 | HashMap data = new HashMap<>(); 33 | data.put("user", name); 34 | data.put("animals", animals); 35 | 36 | // 构造模板引擎 37 | Configuration cfg = new Configuration(Configuration.VERSION_2_3_22); 38 | cfg.setDirectoryForTemplateLoading(new File("manageplatform/src/main/resources/templates")); 39 | cfg.setDefaultEncoding("UTF-8"); 40 | cfg.setTemplateExceptionHandler(TemplateExceptionHandler.RETHROW_HANDLER); 41 | Template temp = cfg.getTemplate("demo.ftl"); 42 | Writer out = new OutputStreamWriter(System.out); 43 | 44 | // 调用模板引擎渲染 45 | temp.process(data, out); 46 | 47 | return "ok"; 48 | } 49 | 50 | 51 | @RequestMapping("/cntsql") 52 | public String getRuleCountSql() throws Exception { 53 | ArrayList params = new ArrayList<>(); 54 | 55 | RuleAtomicParam param1 = new RuleAtomicParam(); 56 | param1.setEventId("H"); 57 | HashMap prop1 = new HashMap<>(); 58 | prop1.put("p1","v1"); 59 | prop1.put("p2","v3"); 60 | prop1.put("p4","v5"); 61 | param1.setProperties(prop1); 62 | 63 | 64 | RuleAtomicParam param2 = new RuleAtomicParam(); 65 | param2.setEventId("C"); 66 | HashMap prop2 = new HashMap<>(); 67 | prop2.put("p5","v6"); 68 | param2.setProperties(prop2); 69 | 70 | 71 | params.add(param1); 72 | params.add(param2); 73 | 74 | 75 | // 数据封装 76 | HashMap data = new HashMap<>(); 77 | data.put("events",params); 78 | 79 | Configuration cfg = new Configuration(Configuration.VERSION_2_3_22); 80 | cfg.setDirectoryForTemplateLoading(new File("manageplatform/src/main/resources/templates")); 81 | cfg.setDefaultEncoding("UTF-8"); 82 | cfg.setTemplateExceptionHandler(TemplateExceptionHandler.RETHROW_HANDLER); 83 | Template temp = cfg.getTemplate("eventCountModel.ftl"); 84 | Writer out = new OutputStreamWriter(System.out); 85 | 86 | // 调用模板引擎渲染 87 | temp.process(data, out); 88 | 89 | return "ok"; 90 | 91 | } 92 | 93 | } 94 | -------------------------------------------------------------------------------- /manageplatform/src/main/java/cn/doitedu/yinew/manageplatform/controller/RuleDashboardController.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.yinew.manageplatform.controller; 2 | 3 | 4 | import cn.doitedu.yinew.manageplatform.pojo.RuleStatus; 5 | import com.alibaba.fastjson.JSON; 6 | import org.springframework.web.bind.annotation.*; 7 | import redis.clients.jedis.Jedis; 8 | 9 | import java.util.ArrayList; 10 | import java.util.List; 11 | import java.util.Map; 12 | import java.util.Set; 13 | 14 | @RestController 15 | public class RuleDashboardController { 16 | Jedis jedis = null; 17 | public RuleDashboardController(){ 18 | jedis = new Jedis("hdp02", 6379); 19 | } 20 | 21 | /** 22 | * 获取所有规则的状态信息 23 | * 24 | * { 25 | * ruleName: '运营公众号拉新', 26 | * ruleType: '触发型', 27 | * publishTime: '2021-04-01 12:30:45', 28 | * lastTrigTime: '2021-06-08 13:30:30', 29 | * trigCount: 80, 30 | * hitCount: 20, 31 | * hitRatio: '30%', 32 | * compareGroupRatio: '20%', 33 | * ruleGroupRation: '40%', 34 | * runStatus:"1" 35 | * } 36 | * 37 | * @return 38 | */ 39 | @RequestMapping(method = RequestMethod.POST,value = "/api/getrulestatus") 40 | @CrossOrigin(origins = "http://localhost:8000") 41 | public List getRuleStatus(@RequestBody String a){ 42 | 43 | Set ruleKeys = jedis.keys("rule_status_*"); 44 | ArrayList lst = new ArrayList<>(); 45 | for (String ruleKey : ruleKeys) { 46 | Map stringStringMap = jedis.hgetAll(ruleKey); 47 | String json = JSON.toJSONString(stringStringMap); 48 | RuleStatus ruleStatus = JSON.parseObject(json, RuleStatus.class); 49 | lst.add(ruleStatus); 50 | } 51 | 52 | 53 | /* 54 | ArrayList lst = new ArrayList(); 55 | RuleStatus r1 = new RuleStatus("运营公众号拉新", "001", "2021-06-10 12:30:30", "2021-06-10 12:30:30", "触发型", "800", "40%", "200", "20%", "35%", "1"); 56 | RuleStatus r2 = new RuleStatus("3H爆款激活用户", "002", "2021-06-10 12:30:30", "2021-06-10 12:30:30", "触发型", "800", "40%", "200", "20%", "35%", "1"); 57 | RuleStatus r3 = new RuleStatus("拉新促销", "003", "2021-06-10 12:30:30", "2021-06-10 12:30:30", "触发型", "800", "40%", "200", "20%", "35%", "0"); 58 | RuleStatus r4 = new RuleStatus("优惠关键词", "004", "2021-06-10 12:30:30", "2021-06-10 12:30:30", "触发型", "800", "40%", "200", "20%", "35%", "1"); 59 | RuleStatus r5 = new RuleStatus("高流失风险客户挽留", "005", "2021-06-10 12:30:30", "2021-06-10 12:30:30", "触发型", "800", "40%", "200", "20%", "35%", "1"); 60 | RuleStatus r6 = new RuleStatus("新客激活优惠券发送", "006", "2021-06-10 12:30:30", "2021-06-10 12:30:30", "触发型", "800", "40%", "200", "20%", "35%", "0"); 61 | RuleStatus r7 = new RuleStatus("双11爆款硬推", "007", "2021-06-10 12:30:30", "2021-06-10 12:30:30", "触发型", "800", "40%", "200", "20%", "35%", "1"); 62 | RuleStatus r8 = new RuleStatus("夏日饮品新上架普推", "008", "2021-06-10 12:30:30", "2021-06-10 12:30:30", "触发型", "800", "40%", "200", "20%", "35%", "1"); 63 | lst.add(r1); 64 | lst.add(r2); 65 | lst.add(r3); 66 | lst.add(r4); 67 | lst.add(r5); 68 | lst.add(r6); 69 | lst.add(r7); 70 | lst.add(r8);*/ 71 | 72 | return lst; 73 | } 74 | 75 | 76 | 77 | 78 | } 79 | -------------------------------------------------------------------------------- /manageplatform/src/main/java/cn/doitedu/yinew/manageplatform/controller/RuleHandleController.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.yinew.manageplatform.controller; 2 | 3 | import cn.doitedu.yinew.manageplatform.pojo.RuleDefine; 4 | import cn.doitedu.yinew.manageplatform.pojo.RuleStatus; 5 | import com.alibaba.fastjson.JSON; 6 | import org.apache.commons.io.FileUtils; 7 | import org.springframework.web.bind.annotation.*; 8 | import redis.clients.jedis.Jedis; 9 | 10 | import java.io.File; 11 | import java.io.IOException; 12 | import java.sql.Connection; 13 | import java.sql.DriverManager; 14 | import java.sql.PreparedStatement; 15 | import java.text.SimpleDateFormat; 16 | import java.util.*; 17 | 18 | /*** 19 | * @author hunter.d 20 | * @qq 657270652 21 | * @wx haitao-duan 22 | * @date 2021/5/9 23 | * 24 | * 仅为演示代码 25 | * 后端管理平台,开发工作量很大,会javaweb开发的同学看到演示代码即可知真实开发思路 26 | **/ 27 | @RestController 28 | public class RuleHandleController { 29 | 30 | Jedis jedis = null; 31 | public RuleHandleController(){ 32 | jedis = new Jedis("hdp02", 6379); 33 | } 34 | 35 | 36 | @RequestMapping(method = RequestMethod.POST, value = "/api/publishrule") 37 | @CrossOrigin(origins = "http://localhost:8000") 38 | public String publishRule(@RequestBody RuleDefine ruleDefine) throws Exception { 39 | 40 | // 接收到规则定义信息后 41 | // 利用freemarker或者velocity,生成规则所需的sql模板,和drools模板文件 42 | // 将规则定义信息及生成好的sql、drools代码,插入mysql数据库,以供canal监听并通过kafka传递给flink规则引擎 43 | insertRule2Mysql(ruleDefine.getRuleName()); 44 | 45 | // 直接将规则信息写入“监控平台”所要读取的redis库 46 | HashMap ruleStatus = new HashMap<>(); 47 | Long ruleId = jedis.incr("rule_num"); 48 | 49 | SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); 50 | String publishTime = sdf.format(new Date()); 51 | String ruleType = new Random().nextInt(10)%3 ==0 ?"触发型":"单次型"; 52 | 53 | ruleStatus.put("ruleName", ruleDefine.getRuleName()); 54 | RuleStatus r1 = new RuleStatus(ruleDefine.getRuleName(), ruleId + "", "", publishTime, ruleType, "0", "0%", "0", "0%", "0%", "1"); 55 | String s = JSON.toJSONString(r1); 56 | HashMap map = (HashMap) JSON.parseObject(s, Map.class); 57 | System.out.println(map); 58 | Set> entries = map.entrySet(); 59 | for (Map.Entry entry : entries) { 60 | jedis.hset("rule_status_" + ruleId, entry.getKey(),entry.getValue()); 61 | } 62 | return "ok"; 63 | } 64 | 65 | 66 | private void insertRule2Mysql(String ruleName) throws Exception { 67 | String ruleCode = FileUtils.readFileToString(new File("dynamic_rule_engine/rules_drl/rule2.drl"), "utf-8"); 68 | int ruleStatus = 1; 69 | String ruleType = "1"; 70 | String ruleVersion = "1"; 71 | String cntSqls = FileUtils.readFileToString(new File("dynamic_rule_engine/rules_drl/rule2_cnt.sql"), "utf-8"); 72 | String seqSqls = FileUtils.readFileToString(new File("dynamic_rule_engine/rules_drl/rule2_seq.sql"), "utf-8"); 73 | String ruleCreator = "doitedu"; 74 | String ruleAuditor = "hunter.d"; 75 | java.sql.Date createTime = new java.sql.Date(System.currentTimeMillis()); 76 | java.sql.Date updateTime = createTime; 77 | Connection conn = DriverManager.getConnection("jdbc:mysql://hdp01:3306/realtimedw?useUnicode=true&characterEncoding=utf8", "root", "ABC123abc.123"); 78 | PreparedStatement pst = conn.prepareStatement("insert into yinew_drl_rule (rule_name,rule_code,rule_status,rule_type,rule_version,cnt_sqls,seq_sqls,rule_creator,rule_auditor,create_time,update_time) " + 79 | "values (?,?,?,?,?,?,?,?,?,?,?)"); 80 | pst.setString(1,ruleName); 81 | pst.setString(2,ruleCode); 82 | pst.setInt(3,ruleStatus); 83 | pst.setString(4,ruleType); 84 | pst.setString(5,ruleVersion); 85 | pst.setString(6,cntSqls); 86 | pst.setString(7,seqSqls); 87 | pst.setString(8,ruleCreator); 88 | pst.setString(9,ruleAuditor); 89 | pst.setDate(10,createTime); 90 | pst.setDate(11,updateTime); 91 | 92 | boolean execute = pst.execute(); 93 | 94 | pst.close(); 95 | conn.close(); 96 | 97 | 98 | } 99 | } 100 | -------------------------------------------------------------------------------- /manageplatform/src/main/java/cn/doitedu/yinew/manageplatform/pojo/Animal.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.yinew.manageplatform.pojo; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Data; 5 | import lombok.NoArgsConstructor; 6 | 7 | @Data 8 | @NoArgsConstructor 9 | @AllArgsConstructor 10 | public class Animal { 11 | 12 | private String name; 13 | private int price; 14 | } 15 | -------------------------------------------------------------------------------- /manageplatform/src/main/java/cn/doitedu/yinew/manageplatform/pojo/RuleAtomicParam.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.yinew.manageplatform.pojo; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Data; 5 | import lombok.NoArgsConstructor; 6 | 7 | import java.io.Serializable; 8 | import java.util.HashMap; 9 | 10 | /** 11 | * @author 涛哥 12 | * @nick_name "deep as the sea" 13 | * @contact qq:657270652 wx:doit_edu 14 | * @site www.doitedu.cn 15 | * @date 2021-03-28 16 | * @desc 规则参数中的原子条件封装实体 17 | */ 18 | @Data 19 | @NoArgsConstructor 20 | @AllArgsConstructor 21 | public class RuleAtomicParam implements Serializable { 22 | 23 | // 事件的类型要求 24 | private String eventId; 25 | 26 | // 事件的属性要求 27 | private HashMap properties; 28 | 29 | // 规则要求的阈值 30 | private int cnt; 31 | 32 | // 要求的事件发生时间段起始 33 | private long rangeStart; 34 | 35 | // 要求的事件发生时间段结束 36 | private long rangeEnd; 37 | 38 | // 条件对应的clickhouse查询sql 39 | private String countQuerySql; 40 | 41 | // 用于记录查询服务所返回的查询值 42 | private int realCnt; 43 | 44 | // 用于记录初始 range 45 | private long originStart; 46 | public void setOriginStart(long originStart){ 47 | this.originStart = originStart; 48 | this.rangeStart = originStart; 49 | } 50 | private long originEnd; 51 | public void setOriginEnd(long originEnd){ 52 | this.originEnd = originEnd; 53 | this.rangeEnd = originEnd; 54 | } 55 | 56 | 57 | } 58 | -------------------------------------------------------------------------------- /manageplatform/src/main/java/cn/doitedu/yinew/manageplatform/pojo/RuleDefine.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.yinew.manageplatform.pojo; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Data; 5 | import lombok.NoArgsConstructor; 6 | import org.springframework.web.context.annotation.ApplicationScope; 7 | 8 | import java.io.Serializable; 9 | import java.util.List; 10 | 11 | /*** 12 | * @author hunter.d 13 | * @qq 657270652 14 | * @wx haitao-duan 15 | * @date 2021/5/9 16 | **/ 17 | @Data 18 | @NoArgsConstructor 19 | @AllArgsConstructor 20 | public class RuleDefine implements Serializable { 21 | private String ruleName; 22 | } 23 | -------------------------------------------------------------------------------- /manageplatform/src/main/java/cn/doitedu/yinew/manageplatform/pojo/RuleStatus.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.yinew.manageplatform.pojo; 2 | 3 | 4 | import lombok.AllArgsConstructor; 5 | import lombok.Data; 6 | import lombok.NoArgsConstructor; 7 | 8 | import java.io.Serializable; 9 | 10 | @Data 11 | @NoArgsConstructor 12 | @AllArgsConstructor 13 | public class RuleStatus implements Serializable { 14 | /** 15 | * { 16 | * ruleName: '运营公众号拉新', 17 | * ruleType: '触发型', 18 | * publishTime: '2021-04-01 12:30:45', 19 | * lastTrigTime: '2021-06-08 13:30:30', 20 | * trigCount: 80, 21 | * hitCount: 20, 22 | * hitRatio: '30%', 23 | * compareGroupRatio: '20%', 24 | * ruleGroupRatio: '40%', 25 | * runStatus:true 26 | * } 27 | */ 28 | 29 | private String ruleName; 30 | private String ruleId; 31 | private String lastTrigTime; 32 | private String publishTime; 33 | private String ruleType; 34 | private String trigCount; 35 | private String hitRatio; 36 | private String hitCount; 37 | private String compareGroupRatio; 38 | private String ruleGroupRatio; 39 | private String runStatus; 40 | 41 | } 42 | -------------------------------------------------------------------------------- /manageplatform/src/main/resources/application.properties: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /manageplatform/src/main/resources/templates/demo.ftl: -------------------------------------------------------------------------------- 1 | [BR] 2 | [BR] 3 | Welcome![BR] 4 | [BR] 5 | [BR] 6 | <#-- Greet the user with his/her name -->[BR] 7 |

Welcome ${user}!

[BR] 8 |

We have these animals:[BR] 9 |

    [BR] 10 | <#list animals as animal>[BR] 11 |
  • ${animal.name} for ${animal.price} Euros[BR]
  • 12 | [BR] 13 |
[BR] 14 | [BR] 15 | -------------------------------------------------------------------------------- /manageplatform/src/main/resources/templates/eventCountModel.ftl: -------------------------------------------------------------------------------- 1 | <#list events as eparam > 2 | select 3 | deviceId, 4 | count(1) as cnt 5 | from yinew_detail 6 | where 7 | deviceId = ? 8 | and eventId='${eparam.eventId}' 9 | <#list eparam.properties?keys as key> 10 | and properties['${key}']='${eparam.properties[key]}' 11 | 12 | and timeStamp>= ? 13 | and timeStamp <= ? 14 | group by deviceId; 15 | -------------------------------------------------------------------------------- /manageplatform/src/test/java/cn/doitedu/yinew/manageplatform/ManageplatformApplicationTests.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.yinew.manageplatform; 2 | 3 | import org.junit.jupiter.api.Test; 4 | import org.springframework.boot.test.context.SpringBootTest; 5 | 6 | @SpringBootTest 7 | class ManageplatformApplicationTests { 8 | 9 | @Test 10 | void contextLoads() { 11 | } 12 | 13 | } 14 | -------------------------------------------------------------------------------- /manageplatform/template/rule2.drl: -------------------------------------------------------------------------------- 1 | package cn.doitedu.dynamic_rules.rules 2 | import cn.doitedu.dynamic_rule.pojo.DroolFact 3 | import cn.doitedu.dynamic_rule.pojo.LogBean 4 | import cn.doitedu.dynamic_rule.pojo.RuleParam 5 | import cn.doitedu.dynamic_rule.service.QueryRouterV4 6 | import java.util.HashMap 7 | import cn.doitedu.dynamic_rule.pojo.RuleAtomicParam 8 | import java.util.List 9 | import java.util.ArrayList 10 | 11 | rule "rule2" 12 | 13 | when 14 | $d:DroolFact() 15 | $b:LogBean(eventId == "F") from $d.logBean 16 | then 17 | RuleParam ruleParam = $d.getRuleParam(); 18 | ruleParam.setRuleId("rule2"); 19 | QueryRouterV4 queryRouter = $d.getQueryRouterV4(); 20 | 21 | // 填充本规则的画像条件 22 | HashMap profileParams = new HashMap<>(); 23 | profileParams.put("tag7","v2"); 24 | profileParams.put("tag3","v1"); 25 | ruleParam.setUserProfileParams(profileParams); 26 | 27 | // 填充本规则的count类条件(直接从ruleparam中取出cnt条件list,里面已经拥有sql了) 28 | List countParams = ruleParam.getUserActionCountParams(); 29 | 30 | RuleAtomicParam param0 = countParams.get(0); 31 | param0.setEventId("H"); 32 | HashMap props0 = new HashMap<>(); 33 | props0.put("p1","v1"); 34 | props0.put("p2","v3"); 35 | param0.setProperties(props0); 36 | param0.setCnt(8); 37 | param0.setOriginStart(0); 38 | param0.setOriginEnd(Long.MAX_VALUE); 39 | 40 | 41 | // 填充本规则的次序列条件 42 | ArrayList seqParams = new ArrayList<>(); 43 | 44 | RuleAtomicParam seq0 = new RuleAtomicParam(); 45 | seq0.setEventId("A"); 46 | HashMap seq0prop = new HashMap<>(); 47 | seq0prop.put("p1","v1"); 48 | seq0.setProperties(seq0prop); 49 | seq0.setOriginStart(0); 50 | seq0.setOriginEnd(Long.MAX_VALUE); 51 | 52 | 53 | RuleAtomicParam seq1 = new RuleAtomicParam(); 54 | seq1.setEventId("C"); 55 | HashMap seq1prop = new HashMap<>(); 56 | seq1prop.put("p2","v2"); 57 | seq1.setProperties(seq1prop); 58 | seq1.setOriginStart(0); 59 | seq1.setOriginEnd(Long.MAX_VALUE); 60 | 61 | seqParams.add(seq0); 62 | seqParams.add(seq1); 63 | 64 | ruleParam.setUserActionSequenceParams(seqParams); 65 | 66 | // 执行匹配查询计算 67 | if( 68 | queryRouter.profileQuery($b, ruleParam) 69 | && 70 | queryRouter.sequenceConditionQuery($b, ruleParam) 71 | && 72 | queryRouter.countConditionQuery($b, ruleParam) 73 | ){ 74 | // 设置结果 75 | $d.setMatch(true); 76 | } 77 | 78 | end -------------------------------------------------------------------------------- /manageplatform/template/rule2_cnt.sql: -------------------------------------------------------------------------------- 1 | select 2 | deviceId, 3 | count(1) as cnt 4 | from yinew_detail 5 | where deviceId = ? and eventId = 'H' and properties['p1']='v1' 6 | and timeStamp between ? and ? 7 | group by deviceId -------------------------------------------------------------------------------- /manageplatform/template/rule2_seq.sql: -------------------------------------------------------------------------------- 1 | SELECT 2 | deviceId, 3 | sequenceMatch('.*(?1).*(?2).*')( 4 | toDateTime(`timeStamp`), 5 | eventId = 'A' and properties['p1']='v1', 6 | eventId = 'C' and properties['p2']='v2' 7 | ) as isMatch2, 8 | sequenceMatch('.*(?1).*')( 9 | toDateTime(`timeStamp`), 10 | eventId = 'A' and properties['p1']='v1', 11 | eventId = 'C' and properties['p2']='v2' 12 | ) as isMatch1 13 | from yinew_detail 14 | where 15 | deviceId = ? 16 | and 17 | timeStamp BETWEEN ? AND ? 18 | and 19 | ( 20 | (eventId='A' and properties['p1']='v1') 21 | or (eventId = 'C' and properties['p2']='v2') 22 | ) 23 | group by deviceId; -------------------------------------------------------------------------------- /pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 4.0.0 6 | 7 | cn.doitedu 8 | yinew_marketing 9 | pom 10 | 1.0 11 | 12 | dynamic_rule_engine 13 | user_profile 14 | data_analysis 15 | log4jdemo 16 | 17 | 18 | 19 | 20 | 21 | nexus-aliyun 22 | Nexus aliyun 23 | default 24 | http://maven.aliyun.com/nexus/content/groups/public 25 | 26 | false 27 | never 28 | 29 | 30 | true 31 | never 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | ali-plugin 40 | http://maven.aliyun.com/nexus/content/groups/public/ 41 | 42 | false 43 | never 44 | 45 | 46 | true 47 | never 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | org.projectlombok 56 | lombok 57 | 1.18.18 58 | 59 | 60 | 61 | com.alibaba 62 | fastjson 63 | 1.2.75 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | org.apache.maven.plugins 73 | maven-compiler-plugin 74 | 3.5.1 75 | 76 | 1.8 77 | 1.8 78 | 79 | 80 | 81 | 82 | 83 | 84 | -------------------------------------------------------------------------------- /user_profile/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | yinew_marketing 7 | cn.doitedu 8 | 1.0 9 | 10 | 4.0.0 11 | 12 | user_profile 13 | 14 | 15 | -------------------------------------------------------------------------------- /user_profile/src/main/java/cn/doitedu/userprofile/demo/Demo.java: -------------------------------------------------------------------------------- 1 | package cn.doitedu.userprofile.demo; 2 | 3 | public class Demo { 4 | } 5 | -------------------------------------------------------------------------------- /项目介绍.MD: -------------------------------------------------------------------------------- 1 | # 背景介绍 2 | - 本项目是一个将大数据技术综合运用于核心业系统的经典案例 3 | - 项目的目标功能:提供可动态制定规则的实时营销消息推送 4 | - 项目的可扩展适配场景:实时推荐,实时风控,实时精准广告推送等 5 | 6 | 7 | # 需求示例 8 | 通过如下示例,可以对项目所适配的需求进行直观理解 9 | 10 | ## 运营消息推送(市场营销) 11 | 试想场景(1):公司想对沉默用户进行送券激活 12 | 13 | > - 2021-02月至2021-05月之间 14 | > - 消费总额<500 15 | > - 且月平均访问次数<4 16 | > - 且性别为男 17 | > - 年龄阶段在20-40之间的会员 18 | 19 | 只要发现他登录app,则立马给他在app上弹出一个“购物满100减60”的消息,并赠送该优惠券 20 | 21 | ---- 22 | 23 | 24 | 试想场景(2):公司想对某品类下的购买犹豫潜客进行促单 25 | 26 | > 最近一周内,该用户依次做过如下行为 27 | > - 浏览过移动硬盘品类的商品 28 | > - 收藏过移动硬盘品类的商品 29 | > - 点击过移动硬盘品类的运营位广告 30 | > - 添加过移动硬盘品类商品到购物车 31 | 32 | 只要发现他再次浏览移动硬盘品类的商品,则立马给它推送一个“限时折扣购买希捷移动硬盘”的短信 33 | 34 | ---- 35 | 36 | 其他类似场景示意 37 | 38 | ![图片1.png](https://i.loli.net/2021/05/10/f9XvdQz4RtZaWKh.png) 39 | 40 | ![图片2.png](https://i.loli.net/2021/05/10/p85JQlxecNw6y1F.png) 41 | 42 | ![图片3.png](https://i.loli.net/2021/05/10/pksPeLqiorhOjIK.png) 43 | 44 | ![图片4.png](https://i.loli.net/2021/05/10/yH42XBcKa9zmuh8.png) 45 | 46 | ![图片5.png](https://i.loli.net/2021/05/10/Q1txel4viMJZoYV.png) 47 | 48 | 49 | 50 | ## 实时规则风控(风控系统) 51 | 52 | - 用户名与身份证姓名不一致; 53 | 54 | - 某IP最近1小时注册账号数超过10个; 55 | 56 | - 某账号最近3分钟登陆次数大于5次; 57 | 58 | - 某账号群体最近1小时购买优惠商品超过100件; 59 | 60 | - 某账号最近3分钟领券超过3张; 61 | 62 | 63 | 64 | ## 实时规则推荐(推荐系统) 65 | 66 | 67 | 68 | ## 实时广告智能投放(广告推送) 69 | 70 | 71 | 72 | 73 | 74 | > 总结:一切皆规则;一切皆动态规则 75 | 76 | 77 | 78 | 79 | 80 | # 项目整体结构 81 | 82 | ![图片6.png](https://i.loli.net/2021/05/10/Bq2pLjtOvJ4FYx7.png) 83 | 84 | 85 | 86 | --------------------------------------------------------------------------------