├── .gitignore
├── RiskEngine-api
├── pom.xml
└── src
│ ├── main
│ └── java
│ │ └── com
│ │ └── linqi
│ │ └── RiskCtrlSys
│ │ └── api
│ │ ├── Application.java
│ │ ├── controller
│ │ └── HelloController.java
│ │ └── exception
│ │ └── ApiExceptionHandler.java
│ └── test
│ └── java
│ └── com
│ └── imooc
│ └── RiskCtrl
│ └── api
│ └── controller
│ └── HelloControllerTest.java
├── RiskEngine-commons
└── src
│ ├── main
│ └── java
│ │ └── com
│ │ └── linqi
│ │ └── RiskCtrlSys
│ │ └── commons
│ │ ├── constants
│ │ └── ConstantsUtil.java
│ │ └── exception
│ │ ├── BizRuntimeException.java
│ │ ├── CustomExceptionDemo.java
│ │ ├── Log4j2AndSlf4jDemo.java
│ │ ├── custom
│ │ ├── FlinkPropertiesException.java
│ │ ├── RedisException.java
│ │ └── UtilException.java
│ │ └── enums
│ │ ├── BizExceptionInfo.java
│ │ ├── FlinkPropertiesExceptionInfo.java
│ │ ├── RedisExceptionInfo.java
│ │ └── UtilExceptionInfo.java
│ └── test
│ └── java
│ └── com
│ └── linqi
│ └── RiskCtrlSys
│ └── commons
│ └── exception
│ ├── CustomExceptionTest.java
│ └── Log4j2AndSlf4jTest.java
├── RiskEngine-dao
├── pom.xml
└── src
│ └── main
│ ├── java
│ └── com
│ │ └── linqi
│ │ └── RiskCtrlSys
│ │ └── dao
│ │ ├── CodeGenerator
│ │ ├── GlobalConfigGenerator.java
│ │ ├── PackageConfigGenerator.java
│ │ └── StrategyConfigGenerator.java
│ │ └── MybatisPlusCodeGenerator.java
│ └── resources
│ └── application.yml
├── RiskEngine-flink
├── pom.xml
└── src
│ └── main
│ ├── groovy
│ └── scripts
│ │ └── cep
│ │ ├── LoginFailBySingleton.groovy
│ │ └── template
│ │ ├── Circulate.groovy
│ │ ├── CirculateWithConsecutive.groovy
│ │ ├── CompositeBy2.groovy
│ │ ├── CompositeBy2WithConsecutive.groovy
│ │ └── CompositeBy3WithConsecutive.groovy
│ ├── java
│ ├── flink
│ │ ├── CepJudgeJob.java
│ │ ├── GeneralMetricJob.java
│ │ ├── RuleJudgeJob.java
│ │ ├── clickhouse
│ │ │ ├── sink
│ │ │ │ ├── ClickHouseJdbcSink.java
│ │ │ │ └── ClickHouseJdbcStatementBuilder.java
│ │ │ └── source
│ │ │ │ └── ClickHouseSource.java
│ │ ├── redis
│ │ │ ├── conf
│ │ │ │ ├── ImoocRedisCommand.java
│ │ │ │ ├── ImoocRedisDataType.java
│ │ │ │ ├── JedisBuilder.java
│ │ │ │ └── JedisConf.java
│ │ │ ├── sink
│ │ │ │ └── RedisSinkByBahirWithString.java
│ │ │ └── source
│ │ │ │ └── ImoocRedisSource.java
│ │ └── utils
│ │ │ ├── AviatorUtil.java
│ │ │ ├── ClickHouseUtil.java
│ │ │ ├── DataStreamUtil.java
│ │ │ ├── EventConstantUtil.java
│ │ │ ├── FlinkCDCUtil.java
│ │ │ ├── GroovyUtil.java
│ │ │ ├── JoinUtil.java
│ │ │ ├── KafkaUtil.java
│ │ │ ├── MysqlUtil.java
│ │ │ ├── ParameterConstantsUtil.java
│ │ │ ├── ParameterUtil.java
│ │ │ ├── RedisKeyUtil.java
│ │ │ ├── RedisReadUtil.java
│ │ │ ├── RedisWriteUtil.java
│ │ │ └── RuleUtil.java
│ └── org
│ │ └── apache
│ │ └── flink
│ │ └── cep
│ │ └── time
│ │ ├── TimeContext.java
│ │ └── TimerService.java
│ └── resources
│ ├── flink-dev.properties
│ ├── flink-prod.properties
│ ├── flink.properties
│ └── log4j2.xml
├── RiskEngine-model
├── pom.xml
└── src
│ └── main
│ └── java
│ └── com
│ └── linqi
│ └── RiskCtrlSys
│ └── model
│ ├── ActionPO.java
│ ├── ActivityPO.java
│ ├── CHTestPO.java
│ ├── DevicePO.java
│ ├── EventContextPO.java
│ ├── EventPO.java
│ ├── KafkaMessagePO.java
│ ├── MetricsConfPO.java
│ ├── ProductPO.java
│ ├── ProfilePO.java
│ ├── RedisPO.java
│ ├── RiskInfoPO.java
│ ├── RulesPO.java
│ └── SingleRulePO.java
├── RiskEngine-service
├── pom.xml
└── src
│ └── main
│ └── java
│ └── com
│ └── linqi
│ └── RiskCtrlSys
│ └── service
│ ├── actions
│ ├── Action.java
│ ├── EmailAction.java
│ ├── InclusionAction.java
│ ├── MarkAction.java
│ ├── RiskWeightAction.java
│ └── WarnAlertAction.java
│ └── activity
│ ├── Activity.java
│ ├── InclusionActivity.java
│ └── MarkActivity.java
├── RiskEngine-utils
├── pom.xml
└── src
│ ├── main
│ ├── java
│ │ └── com
│ │ │ └── linqi
│ │ │ └── RiskCtrlSys
│ │ │ └── utils
│ │ │ ├── Application.java
│ │ │ ├── common
│ │ │ └── CommonUtil.java
│ │ │ ├── date
│ │ │ └── DateUtil.java
│ │ │ ├── hbase
│ │ │ ├── HbaseConf.java
│ │ │ ├── HbaseProperties.java
│ │ │ └── HbaseUtil.java
│ │ │ ├── json
│ │ │ └── JsonUtil.java
│ │ │ └── redis
│ │ │ ├── RedisConfig.java
│ │ │ ├── RedisPoolProperties.java
│ │ │ ├── RedisProperties.java
│ │ │ └── RedisUtil.java
│ └── resources
│ │ └── application.yml
│ └── test
│ └── java
│ └── com
│ └── linqi
│ └── RiskCtrlSys
│ └── utils
│ ├── common
│ └── CommonUtilTest.java
│ ├── date
│ ├── DateUtilTest.java
│ └── DateUtilTest11.java
│ ├── hbase
│ └── HbaseUtilTest.java
│ ├── json
│ └── JsonUtilTest.java
│ └── redis
│ └── RedisUtilTest.java
└── pom.xml
/.gitignore:
--------------------------------------------------------------------------------
1 | ### Java template
2 | # Compiled class file
3 | *.class
4 |
5 | # Log file
6 | *.log
7 |
8 | # BlueJ files
9 | *.ctxt
10 |
11 | # Mobile Tools for Java (J2ME)
12 | .mtj.tmp/
13 |
14 | # Package Files #
15 | *.jar
16 | *.war
17 | *.nar
18 | *.ear
19 | *.zip
20 | *.tar.gz
21 | *.rar
22 |
23 | # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml
24 | hs_err_pid*
25 |
26 | ### Example user template template
27 | ### Example user template
28 |
29 | # IntelliJ project files
30 | .idea
31 | *.iml
32 | out
33 | gen
34 |
--------------------------------------------------------------------------------
/RiskEngine-api/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | LinQiRiskCtrlSys
7 | com.linqi
8 | 1.0-SNAPSHOT
9 |
10 | 4.0.0
11 |
12 | RiskEngine-api
13 | jar
14 |
15 |
16 |
17 |
18 | org.springframework.boot
19 | spring-boot-starter-web
20 |
21 |
22 | org.springframework.boot
23 | spring-boot-starter
24 |
25 |
26 |
27 |
28 |
29 |
30 | com.linqi
31 | RiskEngine-commons
32 | 1.0-SNAPSHOT
33 |
34 |
35 |
36 |
37 | com.linqi
38 | RiskEngine-model
39 | 1.0-SNAPSHOT
40 |
41 |
42 |
43 |
44 | com.linqi
45 | RiskEngine-service
46 | 1.0-SNAPSHOT
47 |
48 |
49 |
50 |
51 |
52 |
--------------------------------------------------------------------------------
/RiskEngine-api/src/main/java/com/linqi/RiskCtrlSys/api/Application.java:
--------------------------------------------------------------------------------
1 | package com.linqi.RiskCtrlSys.api;
2 |
3 | import org.springframework.boot.SpringApplication;
4 | import org.springframework.boot.autoconfigure.SpringBootApplication;
5 |
6 | /**
7 | * author: linqi
8 | * description: SpringBoot 需要启动类
9 | * date: 2024
10 | */
11 |
12 | @SpringBootApplication
13 | public class Application {
14 | public static void main(String[] args) {
15 | SpringApplication.run(Application.class);
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/RiskEngine-api/src/main/java/com/linqi/RiskCtrlSys/api/controller/HelloController.java:
--------------------------------------------------------------------------------
1 | package com.linqi.RiskCtrlSys.api.controller;
2 |
3 | import org.springframework.beans.factory.annotation.Autowired;
4 | import org.springframework.web.bind.annotation.RequestMapping;
5 | import org.springframework.web.bind.annotation.RequestMethod;
6 | import org.springframework.web.bind.annotation.RestController;
7 |
8 | import javax.annotation.Resource;
9 |
10 | /**
11 | * author: linqi
12 | * description: Controller Demo
13 | * date: 2024
14 | */
15 |
16 | @RestController
17 | @RequestMapping(value = "/hello")
18 | public class HelloController {
19 |
20 | @RequestMapping(value = "/test",method = RequestMethod.POST)
21 | public String testHello() {
22 | return "this junit5 MockMvc Test";
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/RiskEngine-api/src/main/java/com/linqi/RiskCtrlSys/api/exception/ApiExceptionHandler.java:
--------------------------------------------------------------------------------
1 | package com.linqi.RiskCtrlSys.api.exception;
2 |
3 | import com.linqi.RiskCtrlSys.commons.exception.custom.RedisException;
4 | import org.springframework.web.bind.annotation.ExceptionHandler;
5 | import org.springframework.web.bind.annotation.RestControllerAdvice;
6 |
7 | /**
8 | * author: linqi
9 | * description: 全局的异常捕抓 (Api)
10 | * date: 2024
11 | */
12 |
13 | @RestControllerAdvice
14 | public class ApiExceptionHandler {
15 |
16 | @ExceptionHandler(value = RedisException.class)
17 | public void RedisExceptionHandler(RedisException e) {
18 | System.out.println("RedisExceptionHandler!!!!!!!");
19 | //TODO 错误处理
20 | }
21 |
22 | }
23 |
--------------------------------------------------------------------------------
/RiskEngine-api/src/test/java/com/imooc/RiskCtrl/api/controller/HelloControllerTest.java:
--------------------------------------------------------------------------------
1 | package com.linqi.RiskCtrl.api.controller;
2 |
3 | import com.linqi.RiskCtrlSys.api.Application;
4 | import org.junit.jupiter.api.*;
5 | import org.springframework.beans.factory.annotation.Autowired;
6 | import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc;
7 | import org.springframework.boot.test.context.SpringBootTest;
8 | import org.springframework.http.MediaType;
9 | import org.springframework.test.web.servlet.MockMvc;
10 | import org.springframework.test.web.servlet.MvcResult;
11 |
12 |
13 | import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
14 | import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print;
15 | import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
16 |
17 | /**
18 | * author: linqi
19 | * description: 使用 Junit5 测试 Web api
20 | * date: 2024
21 | */
22 |
23 | /* **********************
24 | * Junit4需要@RunWith(SpringRunner.class)
25 | * Junit5用@extendWith(SpringExtension.class)代替了RunWith(),@SpringBootTest已包含了@extendWith()
26 | *
27 | * Junit4有@Before, Junit5用@BeforeEach代替
28 | *
29 | * *********************/
30 |
31 | /* **********************
32 | *
33 | * Mock对象:
34 | * 模拟对象,可以模拟真实对象的行为
35 | * 如果在单元测试中无法使用真实对象,可以使用Mock对象
36 | *
37 | * MockMvc对象:
38 | * 模拟Http请求,能够对Controller web api进行单元测试
39 | *
40 | * *********************/
41 |
42 | /* **********************
43 | * 注意:
44 | *
45 | * @SpringBootTest默认去寻找Spring的启动类,
46 | * 因为模块没有建启动类:也就是没有指明@SpringBootApplication
47 | * 所以需要告诉@SpringBootTest启动的是ApiTest这个测试类
48 | *
49 | * *********************/
50 |
51 | //注意,这里必须启动RiskEngine-api的启动类,否则MockMvc请求不到URL
52 | @SpringBootTest(classes = Application.class)
53 | /* **********************
54 | *
55 | * @TestMethodOrder作用:
56 | * 测试方法的优先级,
57 | * 需要配置@Order使用
58 | *
59 | * *********************/
60 | @TestMethodOrder(MethodOrderer.OrderAnnotation.class)
61 | /* **********************
62 | *
63 | * @AutoConfigureMockMvc作用:
64 | *
65 | * 自动配置MockMvc,
66 | * 需要搭配 @Autowired
67 | *
68 | * *********************/
69 | @AutoConfigureMockMvc
70 | public class HelloControllerTest {
71 |
72 | @Autowired
73 | private MockMvc mockMvc;
74 |
75 | @DisplayName("测试Junit5 Hello World")
76 | @Test
77 | @Order(2)
78 | public void testHello2() {
79 | System.out.println("linqi-hello2");
80 | }
81 |
82 | @DisplayName("测试方法优先级")
83 | @Test
84 | @Order(1)
85 | public void testHello1() {
86 | System.out.println("linqi-hello1");
87 | }
88 |
89 | @BeforeEach
90 | public void setup(){
91 | System.out.println("====== start =======");
92 | }
93 |
94 | @DisplayName("测试MockMvc模拟Api调用")
95 | @Test
96 | @Order(3)
97 | public void testMockMvc() throws Exception {
98 | //构造Request
99 | MvcResult result = mockMvc.perform(
100 | post("/hello/test")
101 | //设置内容类型
102 | .contentType(MediaType.APPLICATION_JSON)
103 | .accept(MediaType.APPLICATION_JSON)
104 | )
105 | //断言
106 | .andExpect(status().isOk())
107 | //打印请求信息
108 | .andDo(print())
109 | .andReturn();
110 |
111 | //打印接口返回信息
112 | System.out.println(result.getResponse().getContentAsString());
113 |
114 |
115 | }
116 |
117 |
118 | }
119 |
--------------------------------------------------------------------------------
/RiskEngine-commons/src/main/java/com/linqi/RiskCtrlSys/commons/constants/ConstantsUtil.java:
--------------------------------------------------------------------------------
1 | package com.linqi.RiskCtrlSys.commons.constants;
2 |
3 | /**
4 | * author: linqi
5 | * description: 常量工具类
6 | * date: 2024
7 | */
8 |
9 | public class ConstantsUtil {
10 |
11 | /**
12 | * 项目路径
13 | */
14 | public static final String PROJECT_DIR = System.getProperty("user.dir");
15 | /**
16 | * 包路径
17 | */
18 | public static final String PACKAGE_DIR = "com/linqi/RiskCtrlSys";
19 |
20 | /**
21 | * src路径
22 | */
23 | public static final String SRC_MAIN_JAVA = "/src/main/java/";
24 | /**
25 | * xml路径
26 | */
27 | public static final String XML_PATH = PROJECT_DIR + "/RiskEngine-dao/src/main/resources/mapper";
28 | /**
29 | * entity路径
30 | */
31 | public static final String ENTITY_PATH = PROJECT_DIR + "/RiskEngine-model/src/main/java/"+ PACKAGE_DIR +"/model/mapper";
32 | /**
33 | * mapper路径
34 | */
35 | public static final String MAPPER_PATH = PROJECT_DIR + "/RiskEngine-dao/src/main/java/"+ PACKAGE_DIR +"/dao/mapper";
36 | /**
37 | * service路径
38 | */
39 | public static final String SERVICE_PATH = PROJECT_DIR + "/RiskEngine-service/src/main/java/"+ PACKAGE_DIR +"/service/mapper";
40 | /**
41 | * serviceImpl路径
42 | */
43 | public static final String SERVICE_IMPL_PATH = PROJECT_DIR + "/RiskEngine-service/src/main/java/"+ PACKAGE_DIR +"/service/impl";
44 | /**
45 | * controller路径
46 | */
47 | public static final String CONTROLLER_PATH = PROJECT_DIR + "/RiskEngine-api/src/main/java/"+ PACKAGE_DIR +"/api/controller";
48 |
49 | /**
50 | * 父包路径
51 | */
52 | public static final String PARENT_PACKAGE = "com.linqi.RiskCtrlSys";
53 |
54 | /**
55 | * 控制器包路径
56 | */
57 | public static final String CONTROLLER_PACKAGE = "api.controller";
58 |
59 | /**
60 | * 实体包路径
61 | */
62 | public static final String ENTITY_PACKAGE = "model.mapper";
63 |
64 | /**
65 | * 数据库映射包路径
66 | */
67 | public static final String MAPPER_PACKAGE = "dao.mapper";
68 |
69 | /**
70 | * 服务包路径
71 | */
72 | public static final String SERVICE_PACKAGE = "service.mapper";
73 | public static final String SERVICE_IMPL_PACKAGE = "service.impl";
74 |
75 |
76 | public static final String MAPPER_XML_PACKAGE = "dao.mapper";
77 |
78 |
79 | /**
80 | * 数据库url
81 | */
82 | public static final String MYSQL_URL = "jdbc:mysql://mysql:3306/linqi?serverTimezone=UTC&useUnicode=true&characterEncoding=utf-8&useSSL=false"
83 | ;
84 | /**
85 | * 数据库用户名
86 | */
87 | public static final String USERNAME = "root";
88 | /**
89 | * 数据库密码
90 | */
91 | public static final String PASSWORD = "123456";
92 | /**
93 | * 作者
94 | */
95 | public static final String AUTHOR = "linqi";
96 | /**
97 | * 日期格式
98 | */
99 | public static final String DATE = "yyyy-MM-dd";
100 | /**
101 | * 工具模块名称
102 | */
103 | public static final String MODULE_UTILS = "RiskEngine-utils";
104 |
105 | /**
106 | * Flink模块名称
107 | */
108 | public static final String MODULE_FLINK = "RiskEngine-flink";
109 | /**
110 | * groovy脚本路径
111 | */
112 | public static final String GROOVY_SCRIPTS_PATH = "\\src\\main\\groovy\\scripts\\cep\\";
113 |
114 | /**
115 | * groovy脚本模板路径
116 | */
117 | public static final String GROOVY_SCRIPTS_TEMP_PATH = "\\src\\main\\groovy\\scripts\\cep\\template\\";
118 |
119 |
120 |
121 | /**
122 | * DDL_METRIC_ATTR变量保存了一个metric属性的定义信息。
123 | * metric_id: metric的id,类型为BIGINT。
124 | * metric_name: metric的名称,类型为STRING。
125 | * metric_sign: metric的符号,类型为STRING。
126 | * metric_code: metric的编码,类型为STRING。
127 | * scene: metric的应用场景,类型为STRING。
128 | * event: metric的事件,类型为STRING。
129 | * main_dim: metric的主维度,类型为STRING。
130 | * aggregation: metric的聚合方式,类型为STRING。
131 | * is_enable: metric是否启用,类型为STRING。
132 | * window_size: metric的窗口大小,类型为STRING。
133 | * window_step: metric的窗口步长,类型为STRING。
134 | * window_type: metric的窗口类型,类型为STRING。
135 | * flink_filter: metric在Flink中的过滤条件,类型为STRING。
136 | * flink_keyby: metric在Flink中的分键字段,类型为STRING。
137 | * flink_watermark: metric在Flink中的水印策略,类型为STRING。
138 | * metric_agg_type: metric的聚合类型,类型为STRING。
139 | * metric_store: metric的存储方式,类型为STRING。
140 | * datasource: metric的数据源,类型为STRING。
141 | * rule_id: metric所属的规则id,类型为BIGINT。
142 | */
143 | public static final String DDL_METRIC_ATTR = ""+
144 | "metric_id BIGINT,\n"+
145 | "metric_name STRING, \n" +
146 | "metric_code STRING, \n" +
147 | "scene STRING, \n" +
148 | "event STRING, \n" +
149 | "main_dim STRING, \n" +
150 | "aggregation STRING, \n" +
151 | "is_enable STRING, \n" +
152 | "window_size STRING, \n" +
153 | "window_step STRING, \n" +
154 | "window_type STRING, \n" +
155 | "flink_filter STRING, \n" +
156 | "flink_keyby STRING, \n" +
157 | "flink_watermark STRING, \n" +
158 | "metric_agg_type STRING, \n" +
159 | "metric_store STRING, \n" +
160 | "datasource STRING, \n" +
161 | "rule_id BIGINT, \n" +
162 | "PRIMARY KEY (metric_id) NOT ENFORCED \n"
163 | ;
164 |
165 | /**
166 | * 表名:风控指标属性
167 | */
168 | public static final String TABLE_NAME_METRIC_ATTR = "metric_attr";
169 |
170 | /**
171 | * Flink Job提交的参数Key:规则组唯一编码
172 | */
173 | public static final String ARGS_SET_CODE = "set.code";
174 | /**
175 | * Flink Job提交的参数Key:规则唯一编码
176 | */
177 | public static final String ARGS_RULE_CODE = "rule.code";
178 | /**
179 | * Flink Job提交的参数Key:groovy 模板名称
180 | */
181 | public static final String ARGS_GROOVY_NAME = "groovy.name";
182 |
183 | /**
184 | * 表名:风控规则组表
185 | */
186 | public static final String TABLE_NAME_RULE_SET = "rule_set";
187 |
188 |
189 | /**
190 | * 规则集的DDL定义
191 | */
192 | public static final String DDL_RULE_SET = ""
193 | + "auto_id BIGINT, \n" // 自动ID
194 | + "set_code STRING, \n" // 规则组编码
195 | + "rule_code STRING, \n" // 规则编码
196 | + "rule_set_name STRING, \n" // 规则集名称
197 | + "PRIMARY KEY (auto_id) NOT ENFORCED \n" // 主键(auto_id),不强制执行
198 | ;
199 |
200 |
201 | /**
202 | * 表名:原子规则表
203 | */
204 | public static final String TABLE_NAME_RULE = "rule";
205 | /**
206 | * 路径:MetricRedisFunction类
207 | */
208 | public static final String PATH_CLASS_METRIC_REDIS_FUNCTION = "com.linqi.RiskCtrlSys.flink.job.aviator.MetricRedisFunction";
209 |
210 |
211 | }
212 |
--------------------------------------------------------------------------------
/RiskEngine-commons/src/main/java/com/linqi/RiskCtrlSys/commons/exception/BizRuntimeException.java:
--------------------------------------------------------------------------------
1 | package com.linqi.RiskCtrlSys.commons.exception;
2 |
3 | import com.linqi.RiskCtrlSys.commons.exception.enums.BizExceptionInfo;
4 | import lombok.extern.slf4j.Slf4j;
5 |
6 | /**
7 | * author: linqi
8 | * description: 自定义异常类的根类
9 | * date: 2024
10 | */
11 |
12 | /* **********************
13 | *
14 | *
15 | *
16 | * 1.
17 | *
18 | * Java将所有的错误封装为一个对象
19 | * 所有的错误的根对象 Throwable,Throwable有两个子类:Error 和 Exception
20 | * Error: 应用程序无法处理的错误
21 | * Exception:应用程序可以处理的错误,Exception 分为两种错误:
22 | * a. RuntimeException: 运行错误, 编译器是不会检查这种错误。
23 | * b. 非RuntimeException:编译错误,在编译阶段就能够捕抓这种错误,不处理的话,编译是不会通过的。
24 | *
25 | * *********************/
26 |
27 | @Slf4j
28 | public class BizRuntimeException extends RuntimeException {
29 |
30 | /**
31 | * author: linqi
32 | * description: 自定义异常类构造方法
33 | * @param info: 自定义异常枚举对象
34 | * @return null
35 | */
36 | public BizRuntimeException(BizExceptionInfo info) {
37 |
38 | log.error(info.getExceptionMsg());
39 | }
40 | }
41 |
--------------------------------------------------------------------------------
/RiskEngine-commons/src/main/java/com/linqi/RiskCtrlSys/commons/exception/CustomExceptionDemo.java:
--------------------------------------------------------------------------------
1 | package com.linqi.RiskCtrlSys.commons.exception;
2 |
3 | import com.linqi.RiskCtrlSys.commons.exception.custom.RedisException;
4 | import com.linqi.RiskCtrlSys.commons.exception.enums.RedisExceptionInfo;
5 |
6 | /**
7 | * author: linqi
8 | * description: 自定义异常类Demo
9 | * date: 2024
10 | */
11 |
12 | public class CustomExceptionDemo {
13 |
14 | /**
15 | * author: linqi
16 | * description: 抛出自定义异常
17 | * @param :
18 | * @return void
19 | */
20 | public static void throwCustomException() throws RedisException {
21 | throw new RedisException(RedisExceptionInfo.REDISTEMPLATE_NULL);
22 | }
23 | }
24 |
--------------------------------------------------------------------------------
/RiskEngine-commons/src/main/java/com/linqi/RiskCtrlSys/commons/exception/Log4j2AndSlf4jDemo.java:
--------------------------------------------------------------------------------
1 | package com.linqi.RiskCtrlSys.commons.exception;
2 |
3 | import lombok.extern.slf4j.Slf4j;
4 | import org.apache.logging.log4j.LogManager;
5 | import org.apache.logging.log4j.Logger;
6 |
7 | /**
8 | * author: linqi
9 | * description: Log4j2和Slf4j输出Demo
10 | * date: 2024
11 | */
12 |
13 | @Slf4j
14 | public class Log4j2AndSlf4jDemo {
15 |
16 | private static final Logger logger = LogManager.getLogger(Log4j2AndSlf4jDemo.class);
17 |
18 | /**
19 | * author: linqi
20 | * description: slf4j输出
21 | * @param :
22 | * @return void
23 | */
24 | public static void slf4jOutput() {
25 | log.warn("this is slf4j output");
26 | }
27 |
28 | /**
29 | * author: linqi
30 | * description: log4j2输出
31 | * @param :
32 | * @return void
33 | */
34 | public static void log4j2Output() {
35 | logger.error("this is log4j2 error output");
36 | logger.info("this is log4j2 info output");
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/RiskEngine-commons/src/main/java/com/linqi/RiskCtrlSys/commons/exception/custom/FlinkPropertiesException.java:
--------------------------------------------------------------------------------
1 | package com.linqi.RiskCtrlSys.commons.exception.custom;
2 |
3 | import com.linqi.RiskCtrlSys.commons.exception.BizRuntimeException;
4 | import com.linqi.RiskCtrlSys.commons.exception.enums.BizExceptionInfo;
5 | import lombok.extern.slf4j.Slf4j;
6 |
7 | /**
8 | * author: linqi
9 | * description: Flink 配置信息自定义错误
10 | * date: 2024
11 | */
12 |
13 | @Slf4j
14 | public class FlinkPropertiesException extends BizRuntimeException {
15 |
16 | public FlinkPropertiesException(BizExceptionInfo info) {
17 | super(info);
18 | }
19 | }
20 |
--------------------------------------------------------------------------------
/RiskEngine-commons/src/main/java/com/linqi/RiskCtrlSys/commons/exception/custom/RedisException.java:
--------------------------------------------------------------------------------
1 | package com.linqi.RiskCtrlSys.commons.exception.custom;
2 |
3 | import com.linqi.RiskCtrlSys.commons.exception.BizRuntimeException;
4 | import com.linqi.RiskCtrlSys.commons.exception.enums.BizExceptionInfo;
5 |
6 | /**
7 | * author: linqi
8 | * description: Redis 自定义异常类
9 | * date: 2024
10 | */
11 |
12 | public class RedisException extends BizRuntimeException {
13 |
14 |
15 | public RedisException(BizExceptionInfo info) {
16 | super(info);
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/RiskEngine-commons/src/main/java/com/linqi/RiskCtrlSys/commons/exception/custom/UtilException.java:
--------------------------------------------------------------------------------
1 | package com.linqi.RiskCtrlSys.commons.exception.custom;
2 |
3 | import com.linqi.RiskCtrlSys.commons.exception.BizRuntimeException;
4 | import com.linqi.RiskCtrlSys.commons.exception.enums.BizExceptionInfo;
5 |
6 | /**
7 | * author: linqi
8 | * description: 工具类自定义错误
9 | * date: 2024
10 | */
11 |
12 | public class UtilException extends BizRuntimeException {
13 | /**
14 | * author: linqi
15 | * description: 自定义异常类构造方法
16 | *
17 | * @param info : 自定义异常枚举对象
18 | * @return null
19 | */
20 | public UtilException(BizExceptionInfo info) {
21 | super(info);
22 | }
23 | }
24 |
--------------------------------------------------------------------------------
/RiskEngine-commons/src/main/java/com/linqi/RiskCtrlSys/commons/exception/enums/BizExceptionInfo.java:
--------------------------------------------------------------------------------
1 | package com.linqi.RiskCtrlSys.commons.exception.enums;
2 |
3 | /**
4 | * author: linqi
5 | * description: 异常枚举类接口
6 | * date: 2024
7 | */
8 |
9 | public interface BizExceptionInfo {
10 |
11 | /**
12 | * author: linqi
13 | * description: 获取异常错误码
14 | * @param :
15 | * @return java.lang.String
16 | */
17 | String getExceptionCode();
18 |
19 | /**
20 | * author: linqi
21 | * description: 获取异常信息
22 | * @param :
23 | * @return java.lang.String
24 | */
25 | String getExceptionMsg();
26 | }
27 |
--------------------------------------------------------------------------------
/RiskEngine-commons/src/main/java/com/linqi/RiskCtrlSys/commons/exception/enums/FlinkPropertiesExceptionInfo.java:
--------------------------------------------------------------------------------
1 | package com.linqi.RiskCtrlSys.commons.exception.enums;
2 |
3 | import lombok.Getter;
4 |
5 | /**
6 | * author: linqi
7 | * description: Flink 配置信息异常枚举类
8 | * date: 2024
9 | */
10 |
11 | @Getter
12 | public enum FlinkPropertiesExceptionInfo implements BizExceptionInfo {
13 |
14 | PROPERTIES_NULL("-300", "配置参数不存在");
15 |
16 | private String exceptionCode;
17 | private String exceptionMsg;
18 |
19 | FlinkPropertiesExceptionInfo(
20 | String exceptionCode,
21 | String exceptionMsg) {
22 | this.exceptionCode = exceptionCode;
23 | this.exceptionMsg = exceptionMsg;
24 | }
25 |
26 | }
27 |
--------------------------------------------------------------------------------
/RiskEngine-commons/src/main/java/com/linqi/RiskCtrlSys/commons/exception/enums/RedisExceptionInfo.java:
--------------------------------------------------------------------------------
1 | package com.linqi.RiskCtrlSys.commons.exception.enums;
2 |
3 | import lombok.Getter;
4 |
5 | /**
6 | * author: linqi
7 | * description: Redis异常枚举类
8 | * date: 2024
9 | */
10 |
11 | @Getter
12 | public enum RedisExceptionInfo implements BizExceptionInfo {
13 |
14 | REDISTEMPLATE_NULL("-300", "RedisTemplate对象为null");
15 |
16 | private String exceptionCode;
17 | private String exceptionMsg;
18 |
19 | RedisExceptionInfo(String exceptionCode,
20 | String exceptionMsg) {
21 | this.exceptionCode = exceptionCode;
22 | this.exceptionMsg = exceptionMsg;
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/RiskEngine-commons/src/main/java/com/linqi/RiskCtrlSys/commons/exception/enums/UtilExceptionInfo.java:
--------------------------------------------------------------------------------
1 | package com.linqi.RiskCtrlSys.commons.exception.enums;
2 |
3 | import lombok.Getter;
4 |
5 | /**
6 | * author: linqi
7 | * description: 工具类异常信息枚举类
8 | * date: 2024
9 | */
10 | @Getter
11 | public enum UtilExceptionInfo implements BizExceptionInfo {
12 |
13 | INVOKE_METHOD_NULL("-100", "反射方法执行错误");
14 |
15 | private String exceptionCode;
16 | private String exceptionMsg;
17 |
18 | UtilExceptionInfo(
19 | String exceptionCode,
20 | String exceptionMsg) {
21 | this.exceptionCode = exceptionCode;
22 | this.exceptionMsg = exceptionMsg;
23 | }
24 |
25 | }
26 |
--------------------------------------------------------------------------------
/RiskEngine-commons/src/test/java/com/linqi/RiskCtrlSys/commons/exception/CustomExceptionTest.java:
--------------------------------------------------------------------------------
1 | package com.linqi.RiskCtrlSys.commons.exception;
2 |
3 | import com.linqi.RiskCtrlSys.commons.exception.custom.RedisException;
4 | import org.junit.jupiter.api.DisplayName;
5 | import org.junit.jupiter.api.Test;
6 | import org.springframework.boot.test.context.SpringBootTest;
7 |
8 | import static org.junit.jupiter.api.Assertions.assertThrows;
9 |
10 | /**
11 | * author: linqi
12 | * description: 自定义异常单元测试
13 | * date: 2024
14 | */
15 | @SpringBootTest(classes = CustomExceptionTest.class)
16 | public class CustomExceptionTest {
17 |
18 | @DisplayName("测试自定义异常捕捉")
19 | @Test
20 | public void testThrowCustomException(){
21 | Throwable thrown =
22 | assertThrows(
23 | RedisException.class,
24 | () -> CustomExceptionDemo.throwCustomException()
25 | );
26 |
27 | // System.out.println("thrown数据类型: "+thrown);
28 | }
29 |
30 | }
31 |
--------------------------------------------------------------------------------
/RiskEngine-commons/src/test/java/com/linqi/RiskCtrlSys/commons/exception/Log4j2AndSlf4jTest.java:
--------------------------------------------------------------------------------
1 | package com.linqi.RiskCtrlSys.commons.exception;
2 |
3 | import org.junit.jupiter.api.DisplayName;
4 | import org.junit.jupiter.api.Test;
5 | import org.springframework.boot.test.context.SpringBootTest;
6 |
7 | /**
8 | * author: linqi
9 | * description: 测试Log4j2和Slf4j的日志输出
10 | * date: 2024
11 | */
12 |
13 | @SpringBootTest(classes = Log4j2AndSlf4jTest.class)
14 | public class Log4j2AndSlf4jTest {
15 |
16 |
17 | @DisplayName("测试Slf4j日志输出")
18 | @Test
19 | void testSlf4jOutput() {
20 | Log4j2AndSlf4jDemo.slf4jOutput();
21 | }
22 |
23 | @DisplayName("测试log4j2日志输出")
24 | @Test
25 | void testLog4j2Output() {
26 | Log4j2AndSlf4jDemo.log4j2Output();
27 | }
28 | }
--------------------------------------------------------------------------------
/RiskEngine-dao/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | LinQiRiskCtrlSys
7 | com.linqi
8 | 1.0-SNAPSHOT
9 |
10 | 4.0.0
11 |
12 | RiskEngine-dao
13 |
14 |
15 | 8
16 | 8
17 | UTF-8
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 | com.baomidou
26 | mybatis-plus-boot-starter
27 |
28 |
29 |
30 |
31 | com.baomidou
32 | mybatis-plus-generator
33 |
34 |
35 |
36 |
37 | org.apache.velocity
38 | velocity-engine-core
39 |
40 |
41 |
42 |
43 | mysql
44 | mysql-connector-java
45 |
46 |
47 |
48 |
49 | com.linqi
50 | RiskEngine-commons
51 | 1.0-SNAPSHOT
52 |
53 |
54 |
55 |
56 | com.linqi
57 | RiskEngine-model
58 | 1.0-SNAPSHOT
59 |
60 |
61 |
62 |
63 |
--------------------------------------------------------------------------------
/RiskEngine-dao/src/main/java/com/linqi/RiskCtrlSys/dao/CodeGenerator/GlobalConfigGenerator.java:
--------------------------------------------------------------------------------
1 | package com.linqi.RiskCtrlSys.dao.CodeGenerator;
2 |
3 | import com.baomidou.mybatisplus.generator.config.GlobalConfig;
4 | import com.linqi.RiskCtrlSys.commons.constants.ConstantsUtil;
5 |
6 | import java.util.function.Consumer;
7 |
8 | /**
9 | * author: linqi
10 | * description: MybatisPlus 代码自动生成器 全局配置
11 | * date: 2024
12 | */
13 |
14 | public class GlobalConfigGenerator implements Consumer {
15 | @Override
16 | public void accept(GlobalConfig.Builder builder) {
17 | builder
18 | //作者
19 | .author(ConstantsUtil.AUTHOR)
20 | //日期
21 | .commentDate(ConstantsUtil.DATE)
22 | //输出目录 (src/main/java的目录绝对路径)
23 | //System.getProperty("user.dir") 是用户当前工作目录
24 | .outputDir(System.getProperty("user.dir"))
25 | // 开启 swagger 模式 (需要导包)
26 | //.enableSwagger()
27 | //禁止打开输出目录
28 | .disableOpenDir()
29 | ;
30 | }
31 |
32 | }
33 |
--------------------------------------------------------------------------------
/RiskEngine-dao/src/main/java/com/linqi/RiskCtrlSys/dao/CodeGenerator/PackageConfigGenerator.java:
--------------------------------------------------------------------------------
1 | package com.linqi.RiskCtrlSys.dao.CodeGenerator;
2 |
3 | import com.baomidou.mybatisplus.generator.config.OutputFile;
4 | import com.baomidou.mybatisplus.generator.config.PackageConfig;
5 | import com.linqi.RiskCtrlSys.commons.constants.ConstantsUtil;
6 |
7 | import java.util.HashMap;
8 | import java.util.Map;
9 | import java.util.function.Consumer;
10 |
11 | /**
12 | * author: linqi
13 | * description: MybatisPlus 代码自动生成器 包配置
14 | * date: 2024
15 | */
16 |
17 | public class PackageConfigGenerator implements Consumer {
18 | /**
19 | * 接受并设置包配置器,设置包配置器的各个属性
20 | *
21 | * @param builder 包配置器构建器
22 | */
23 | @Override
24 | public void accept(PackageConfig.Builder builder) {
25 | builder
26 | .parent(ConstantsUtil.PARENT_PACKAGE) // 设置父包
27 | .controller(ConstantsUtil.CONTROLLER_PACKAGE) // 设置控制器包
28 | .mapper(ConstantsUtil.MAPPER_PACKAGE) // 设置映射器包
29 | .service(ConstantsUtil.SERVICE_PACKAGE) // 设置服务包
30 | .serviceImpl(ConstantsUtil.SERVICE_IMPL_PACKAGE) // 设置服务实现包
31 | .entity(ConstantsUtil.ENTITY_PACKAGE) // 设置实体包
32 | .xml(ConstantsUtil.MAPPER_XML_PACKAGE) // 设置映射器XML包
33 |
34 | .pathInfo(getPathInfo()) // 设置路径信息 (绝对路径)
35 | ;
36 | }
37 |
38 |
39 |
40 | /**
41 | * 获取路径信息
42 | *
43 | * @return 包含OutputFile和对应路径的Map
44 | */
45 | private Map getPathInfo() {
46 | Map pathInfo = new HashMap<>(5);
47 | pathInfo.put(OutputFile.entity, ConstantsUtil.ENTITY_PATH);
48 | pathInfo.put(OutputFile.mapper, ConstantsUtil.MAPPER_PATH);
49 | pathInfo.put(OutputFile.xml, ConstantsUtil.XML_PATH);
50 | pathInfo.put(OutputFile.controller, ConstantsUtil.CONTROLLER_PATH);
51 | pathInfo.put(OutputFile.service, ConstantsUtil.SERVICE_PATH);
52 | pathInfo.put(OutputFile.serviceImpl, ConstantsUtil.SERVICE_IMPL_PATH);
53 |
54 | return pathInfo;
55 | }
56 |
57 |
58 | }
59 |
--------------------------------------------------------------------------------
/RiskEngine-dao/src/main/java/com/linqi/RiskCtrlSys/dao/CodeGenerator/StrategyConfigGenerator.java:
--------------------------------------------------------------------------------
1 | package com.linqi.RiskCtrlSys.dao.CodeGenerator;
2 |
3 |
4 | import com.baomidou.mybatisplus.generator.config.StrategyConfig;
5 | import com.baomidou.mybatisplus.generator.config.rules.NamingStrategy;
6 |
7 | import java.util.function.Consumer;
8 |
9 | /**
10 | * author: linqi
11 | * description: MybatisPlus 代码自动生成器 包配置
12 | * date: 2024
13 | */
14 |
15 | public class StrategyConfigGenerator implements Consumer {
16 |
17 | private String table;
18 |
19 | public StrategyConfigGenerator(String table) {
20 | this.table = table;
21 | }
22 |
23 | @Override
24 | public void accept(StrategyConfig.Builder builder) {
25 | builder
26 | // 设置需要生成的表名
27 | .addInclude(table)
28 |
29 | // Entity 策略配置
30 | .entityBuilder()
31 | .formatFileName("%sEntity")
32 | //开启 Lombok
33 | .enableLombok()
34 | // 覆盖已生成文件
35 | .enableFileOverride()
36 | //数据库表映射到实体的命名策略:下划线转驼峰命
37 | .naming(NamingStrategy.no_change)
38 | //数据库表字段映射到实体的命名策略:下划线转驼峰命
39 | .columnNaming(NamingStrategy.no_change)
40 |
41 | // Mapper 策略配置
42 | .mapperBuilder()
43 | // 覆盖已生成文件
44 | .enableFileOverride()
45 |
46 | // Service 策略配置
47 | .serviceBuilder()
48 | // 覆盖已生成文件
49 | .enableFileOverride()
50 | //格式化 service 接口文件名称
51 | .formatServiceFileName("%sService")
52 | //格式化 service 实现类文件名称
53 | .formatServiceImplFileName("%sServiceImpl")
54 |
55 | // Controller 策略配置
56 | .controllerBuilder()
57 | .formatFileName("%sController")
58 | // 覆盖已生成文件
59 | .enableFileOverride()
60 | ;
61 | }
62 | }
63 |
--------------------------------------------------------------------------------
/RiskEngine-dao/src/main/java/com/linqi/RiskCtrlSys/dao/MybatisPlusCodeGenerator.java:
--------------------------------------------------------------------------------
1 | package com.linqi.RiskCtrlSys.dao;
2 |
3 | import com.baomidou.mybatisplus.generator.FastAutoGenerator;
4 | import com.linqi.RiskCtrlSys.commons.constants.ConstantsUtil;
5 | import com.linqi.RiskCtrlSys.dao.CodeGenerator.GlobalConfigGenerator;
6 | import com.linqi.RiskCtrlSys.dao.CodeGenerator.PackageConfigGenerator;
7 | import com.linqi.RiskCtrlSys.dao.CodeGenerator.StrategyConfigGenerator;
8 | import org.apache.commons.lang3.StringUtils;
9 |
10 | import java.util.Scanner;
11 |
12 | /**
13 | * author: linqi
14 | * description: MybatisPlus 代码自动生成器
15 | * date: 2024
16 | */
17 |
18 | public class MybatisPlusCodeGenerator {
19 | public static void main(String[] args) {
20 |
21 | /* **********************
22 | *
23 | * 注意:
24 | *
25 | * 1.
26 | *
27 | * mybatis-plus 3.5.1 +以上的版本 使用
28 | * FastAutoGenerator 生成代码
29 | *
30 | * 2.
31 | * mybatis-plus 代码生成器需要模板引擎:
32 | * a. velocity
33 | * b. freemarker、
34 | * velocity是生成器中默认使用的
35 | *
36 | * *********************/
37 |
38 | FastAutoGenerator
39 | //配置数据源
40 | .create(
41 | ConstantsUtil.MYSQL_URL,
42 | ConstantsUtil.USERNAME,
43 | ConstantsUtil.PASSWORD
44 | )
45 | // 全局配置
46 | .globalConfig(new GlobalConfigGenerator())
47 | // 包配置
48 | .packageConfig(new PackageConfigGenerator())
49 | // 策略配置
50 | .strategyConfig(new StrategyConfigGenerator(scanner("需要逆向的表名")))
51 |
52 | // 使用Freemarker引擎模板(需要导包)
53 | //.templateEngine(new FreemarkerTemplateEngine())
54 | // 执行
55 | .execute();
56 |
57 | }
58 |
59 | /**
60 | * author: linqi
61 | * description: 输入需要逆向的表名
62 | * @param tip: 提示语
63 | * @return java.lang.String
64 | */
65 | private static String scanner(String tip) {
66 | Scanner scanner = new Scanner(System.in);
67 | StringBuilder helper = new StringBuilder();
68 | helper.append(tip); // 添加提示语
69 | System.out.println(helper.toString()); // 输出提示语
70 | if (scanner.hasNext()) {
71 | String table = scanner.next(); // 输入表名
72 | if (StringUtils.isNotBlank(table)) {
73 | return table; // 返回输入的表名
74 | }
75 | }
76 |
77 | throw new RuntimeException("请输入正确的 " + tip); // 抛出异常,要求输入正确的提示语
78 | }
79 |
80 |
81 | }
82 |
--------------------------------------------------------------------------------
/RiskEngine-dao/src/main/resources/application.yml:
--------------------------------------------------------------------------------
1 | #mysql
2 | spring:
3 | datasource:
4 | driver-class-name: com.mysql.cj.jdbc.Driver
5 | url: jdbc:mysql://mysql:3306/linqi?serverTimezone=UTC&useUnicode=true&characterEncoding=utf-8&useSSL=false
6 | username: root
7 | password: 123456
8 |
9 | #mybatis-plus
10 | mybatis-plus:
11 | mapper-locations: classpath*:mapper/*Mapper.xml
12 | global-config:
13 | db-config:
14 | id-type: auto
15 | logic-delete-value: 1
16 | logic-not-delete-value: 0
17 | configuration:
18 | map-underscore-to-camel-case: true
19 | log-impl: org.apache.ibatis.logging.stdout.StdOutImpl
--------------------------------------------------------------------------------
/RiskEngine-flink/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | LinQiRiskCtrlSys
7 | com.linqi
8 | 1.0-SNAPSHOT
9 |
10 | 4.0.0
11 |
12 | RiskEngine-flink
13 |
14 |
15 |
16 |
17 |
18 | org.apache.flink
19 | flink-java
20 |
21 |
22 |
23 | org.apache.flink
24 | flink-runtime-web_${scala.binary.version}
25 |
26 |
27 |
28 | org.apache.flink
29 | flink-streaming-java_${scala.binary.version}
30 |
31 |
32 |
33 | org.apache.flink
34 | flink-table-api-java
35 |
36 |
37 |
38 | org.apache.flink
39 | flink-table-common
40 |
41 |
42 |
43 | org.apache.flink
44 | flink-table-api-java-bridge_${scala.binary.version}
45 |
46 |
47 |
48 | org.apache.flink
49 | flink-table-planner_${scala.binary.version}
50 |
51 |
52 |
53 | org.apache.flink
54 | flink-clients_${scala.binary.version}
55 |
56 |
57 | org.apache.bahir
58 | flink-connector-redis_${scala.binary.version}
59 |
60 |
61 |
62 | org.apache.flink
63 | flink-connector-kafka_${scala.binary.version}
64 |
65 |
66 |
67 | org.apache.flink
68 | flink-connector-jdbc_${scala.binary.version}
69 |
70 |
71 |
72 | com.ververica
73 | flink-sql-connector-mysql-cdc
74 |
75 |
76 |
77 | org.apache.flink
78 | flink-test-utils_${scala.binary.version}
79 |
80 |
81 | log4j
82 | log4j
83 |
84 |
85 |
86 |
87 | org.apache.flink
88 | flink-streaming-java_${scala.binary.version}
89 | tests
90 |
91 |
92 |
93 | redis.clients
94 | jedis
95 |
96 |
97 |
98 | com.clickhouse
99 | clickhouse-jdbc
100 |
101 |
102 |
103 | mysql
104 | mysql-connector-java
105 |
106 |
107 |
108 | org.apache.groovy
109 | groovy
110 |
111 |
112 |
113 | com.googlecode.aviator
114 | aviator
115 |
116 |
117 |
118 | com.linqi
119 | RiskEngine-model
120 | 1.0-SNAPSHOT
121 |
122 |
123 |
124 | com.linqi
125 | RiskEngine-utils
126 | 1.0-SNAPSHOT
127 |
128 |
129 |
130 | com.linqi
131 | RiskEngine-commons
132 | 1.0-SNAPSHOT
133 |
134 |
135 |
136 |
--------------------------------------------------------------------------------
/RiskEngine-flink/src/main/groovy/scripts/cep/LoginFailBySingleton.groovy:
--------------------------------------------------------------------------------
1 | package scripts.cep
2 |
3 | import com.linqi.RiskCtrlSys.flink.job.groovy.GroovyRule
4 | import org.apache.flink.cep.pattern.Pattern
5 | /**
6 | * 基于个体模式检测最近1分钟内登录失败超过3次的用户 Groovy脚本
7 | * @param
8 | */
9 | class LoginFailBySingleton implements GroovyRule {
10 | @Override
11 | Pattern getPattern() {
12 | return Pattern
13 | .begin("login_fail")
14 | }
15 | }
16 |
--------------------------------------------------------------------------------
/RiskEngine-flink/src/main/groovy/scripts/cep/template/Circulate.groovy:
--------------------------------------------------------------------------------
1 | package scripts.cep.template
2 |
3 | import com.linqi.RiskCtrlSys.flink.job.groovy.LoginFailBySingletonCondition
4 | import com.linqi.RiskCtrlSys.flink.job.groovy.GroovyRule
5 | import org.apache.flink.cep.pattern.Pattern
6 | import org.apache.flink.streaming.api.windowing.time.Time
7 |
8 | /**
9 | * 循环模式Pattern模板
10 | * @param
11 | */
12 | class Circulate implements GroovyRule {
13 | @Override
14 | Pattern getPattern() {
15 | return Pattern
16 | .begin("__START__")
17 | .where(new LoginFailBySingletonCondition("__START_FIELD__","__START_EXP__"))
18 | .times(Integer.parseInt("__TIMES__"))
19 | .within(Time.seconds(Integer.parseInt("__WITHIN__")))
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/RiskEngine-flink/src/main/groovy/scripts/cep/template/CirculateWithConsecutive.groovy:
--------------------------------------------------------------------------------
1 | package scripts.cep.template
2 |
3 | import com.linqi.RiskCtrlSys.flink.job.groovy.LoginFailBySingletonCondition
4 | import com.linqi.RiskCtrlSys.flink.job.groovy.GroovyRule
5 | import org.apache.flink.cep.pattern.Pattern
6 | import org.apache.flink.streaming.api.windowing.time.Time
7 |
8 | /**
9 | * 循环模式检测严格连续事件Pattern模板
10 | * @param
11 | */
12 | class CirculateWithConsecutive implements GroovyRule {
13 | @Override
14 | Pattern getPattern() {
15 | return Pattern
16 | .begin("__START__")
17 | .where(new LoginFailBySingletonCondition("__START_FIELD__","__START_EXP__"))
18 | .times(Integer.parseInt("__TIMES__"))
19 | .consecutive()
20 | .within(Time.seconds(Integer.parseInt("__WITHIN__")))
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/RiskEngine-flink/src/main/groovy/scripts/cep/template/CompositeBy2.groovy:
--------------------------------------------------------------------------------
1 | package scripts.cep.template
2 |
3 | import com.linqi.RiskCtrlSys.flink.job.groovy.LoginFailBySingletonCondition
4 | import com.linqi.RiskCtrlSys.flink.job.groovy.GroovyRule
5 | import org.apache.flink.cep.pattern.Pattern
6 | import org.apache.flink.streaming.api.windowing.time.Time
7 |
8 | /**
9 | * 组合模式(2个模式)检测连续事件Pattern模板
10 | * @param
11 | */
12 | class CompositeBy2 implements GroovyRule {
13 | @Override
14 | Pattern getPattern() {
15 | return Pattern
16 | .begin("__START__")
17 | .where(new LoginFailBySingletonCondition("__START_FIELD__","__START_EXP__"))
18 | .followedBy("__SECOND__")
19 | .where(new LoginFailBySingletonCondition("__SEC_FIELD__","__SEC_EXP__"))
20 | .within(Time.seconds(Integer.parseInt("__WITHIN__")))
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/RiskEngine-flink/src/main/groovy/scripts/cep/template/CompositeBy2WithConsecutive.groovy:
--------------------------------------------------------------------------------
1 | package scripts.cep.template
2 |
3 | import com.linqi.RiskCtrlSys.flink.job.groovy.LoginFailBySingletonCondition
4 | import com.linqi.RiskCtrlSys.flink.job.groovy.GroovyRule
5 | import org.apache.flink.cep.pattern.Pattern
6 | import org.apache.flink.streaming.api.windowing.time.Time
7 |
8 | /**
9 | * 组合模式(2个模式)检测严格连续事件Pattern模板
10 | * @param
11 | */
12 | class CompositeBy2WithConsecutive implements GroovyRule {
13 | @Override
14 | Pattern getPattern() {
15 | return Pattern
16 | .begin("__START__")
17 | .where(new LoginFailBySingletonCondition("__START_FIELD__","__START_EXP__"))
18 | .next("__SECOND__")
19 | .where(new LoginFailBySingletonCondition("__SEC_FIELD__","__SEC_EXP__"))
20 | .within(Time.seconds(Integer.parseInt("__WITHIN__")))
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/RiskEngine-flink/src/main/groovy/scripts/cep/template/CompositeBy3WithConsecutive.groovy:
--------------------------------------------------------------------------------
1 | package scripts.cep.template
2 |
3 | import com.linqi.RiskCtrlSys.flink.job.groovy.LoginFailBySingletonCondition
4 | import com.linqi.RiskCtrlSys.flink.job.groovy.GroovyRule
5 | import org.apache.flink.cep.pattern.Pattern
6 | import org.apache.flink.streaming.api.windowing.time.Time
7 |
8 | /**
9 | * 组合模式(3个模式)检测严格连续事件Pattern模板
10 | * @param
11 | */
12 | class CompositeBy3WithConsecutive implements GroovyRule {
13 | @Override
14 | Pattern getPattern() {
15 | return Pattern
16 | .begin("__START__")
17 | .where(new LoginFailBySingletonCondition("__START_FIELD__","__START_EXP__"))
18 | .next("__SECOND__")
19 | .where(new LoginFailBySingletonCondition("__SEC_FIELD__","__SEC_EXP__"))
20 | .next("__THIRD__")
21 | .where(new LoginFailBySingletonCondition("__TIR_FIELD__","__TIR_EXP__"))
22 | .within(Time.seconds(Integer.parseInt("__WITHIN__")))
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/RiskEngine-flink/src/main/java/flink/CepJudgeJob.java:
--------------------------------------------------------------------------------
1 | package flink;
2 |
3 |
4 | import org.apache.flink.api.common.eventtime.WatermarkStrategy;
5 | import org.apache.flink.api.java.utils.ParameterTool;
6 | import org.apache.flink.cep.CEP;
7 | import org.apache.flink.cep.PatternSelectFunction;
8 | import org.apache.flink.cep.PatternStream;
9 | import org.apache.flink.streaming.api.datastream.DataStream;
10 | import org.apache.flink.streaming.api.datastream.KeyedStream;
11 | import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
12 |
13 | import java.time.Duration;
14 | import java.util.List;
15 | import java.util.Map;
16 |
17 | /**
18 | * @Author: 123
19 | * @Description:
20 | * @DateTime: 2024
21 | */
22 |
23 |
24 | public class CepJudgeJob {
25 | public static void main(String[] args) throws Exception {
26 | /* **********************
27 | *
28 | * Flink配置 (获取配置文件以及任务提交参数)
29 | *
30 | * *********************/
31 | //参数工具对象
32 | ParameterTool tool = ParameterUtil.getParameters(args);
33 | //获取任务提交参数传入的规则唯一编码
34 | String rule_code = tool.get(ConstantsUtil.ARGS_RULE_CODE);
35 | //获取任务提交参数传入的groovy 脚本名称
36 | String groovy_clazz = tool.get(ConstantsUtil.ARGS_GROOVY_NAME);
37 |
38 | /* **********************
39 | *
40 | * 消费Kafka, 生成行为事件流,并按照用户ID分组
41 | *
42 | * *********************/
43 | //消费Kafka
44 | DataStream eventStream = KafkaUtil.read(tool);
45 | //生成水印
46 | DataStream eventStreamWithWatermarks =eventStream.assignTimestampsAndWatermarks(WatermarkStrategy.forBoundedOutOfOrderness(Duration.ZERO).withTimestampAssigner(new MetricTimestampAssigner()));
47 | //分组
48 | KeyedStream keyedStream = eventStreamWithWatermarks.keyBy(EventPO::getUser_id_int);
49 | //env
50 | final StreamExecutionEnvironment env = KafkaUtil.env;
51 |
52 | /* **********************
53 | *
54 | * 实现Flink任务不停机的动态加载 cep 风控规则需要的前提:
55 | *
56 | * 1. 动态的生成 Pattern (任务运行时,如何触发Groovy重新解析脚本,生成 Pattern ?)
57 | * 2. Flink 任务动态的加载新的 Pattern (任务运行时,如何触发 CEP 重新加载 新的Pattern ?)
58 | *
59 | * 实现动态 cep 风控规则方案:
60 | *
61 | * 1. 传递 Pattern 的动态配置 (定时器 or OperatorState, 推荐OperatorState )
62 | * 2. 构建新的 Pattern
63 | * 3. 基于新的 Pattern 构建新的 NFA
64 | * 4. 队列数据的清理 (elementQueueState,ComputationState,PartialMatches)
65 | *
66 | *
67 | * *********************/
68 |
69 |
70 |
71 |
72 | /* **********************
73 | *
74 | * 知识点:
75 | *
76 | * 1. 底层原理
77 | * cep 是通过 NFA (非确定有限状态机) 实现, Pattern 匹配的过程就是状态转换的过程
78 | *
79 | * 2. 源码程序实现
80 | * a. NFACompiler 将 Pattern 对象编译为状态, 即 Pattern 对象的每个模式都对应一个cep State, 并放入一个叫ComputationState的队列
81 | * b. 每来一条数据,都会遍历 这个ComputationState的队列, 看这条数据能否匹配队列里面的状态 (cep State),
82 | * 如果能匹配,就将数据放入缓存并匹配下一个状态
83 | * c. 所有状态都匹配成功的事件会放入 PartialMatches 的队列里
84 | *
85 | * 3.
86 | * cep 源码:
87 | * 主要关注 CepOperator类 open(), 包含了 NFA构建 (重点)
88 | * 主要关注 CepOperator类 processElement(), 事件进入匹配
89 | * 主要关注 NFA 类 advanceTime() 超时事件处理
90 | * 主要关注 NFA 类 process() NFA 状态的更新
91 | *
92 | *
93 | * *********************/
94 |
95 |
96 |
97 | /* **********************
98 | *
99 | * 修改cep源码, 返回携带自定义动态获取Pattern方法的 PatternStream
100 | * 自定义了4个方法, 2个代码块
101 | *
102 | * *********************/
103 |
104 | PatternStream patternStream = CEP.jPatternStream(keyedStream, new DynamicPattern(groovy_clazz,rule_code)).inEventTimeJuege();
105 |
106 | /* **********************
107 | *
108 | * 修改cep源码, 实现Flink不停机的动态加载新的 Pattern
109 | * 在 CepOperator类 一共 11 个自定义代码块
110 | *
111 | * *********************/
112 |
113 | patternStream.select(new PatternSelectFunction() {
114 | @Override
115 | public String select(Map> pattern) throws Exception {
116 | //TODO 处理风险事件
117 | return "";
118 | }
119 | });
120 |
121 |
122 | env.execute();
123 | }
124 | }
125 |
--------------------------------------------------------------------------------
/RiskEngine-flink/src/main/java/flink/GeneralMetricJob.java:
--------------------------------------------------------------------------------
1 | package flink;
2 |
3 |
4 | import org.apache.flink.api.common.eventtime.WatermarkStrategy;
5 | import org.apache.flink.api.java.tuple.Tuple2;
6 | import org.apache.flink.api.java.utils.ParameterTool;
7 | import org.apache.flink.streaming.api.datastream.DataStream;
8 | import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
9 |
10 | import java.time.Duration;
11 |
12 | /**
13 | * @Author: 123
14 | * @Description:
15 | * @DateTime: 2024
16 | */
17 |
18 | /* **********************
19 | *
20 | * 为什么需要指标的通用聚合计算框架?
21 | *
22 | * 1. 每个指标都需要有对应的聚合计算Job, Job的数量就会很多,
23 | * 2. Kafka的每条行为数据,会被多个Job重复消费。
24 | * 3. 指标的聚合计算的流程大体相同,每个Job的重复代码很多, 不易维护
25 | * 4. 通用聚合计算框架只是参数的不同, 运营人员可以在后台配置这些参数,
26 | * 例如按照哪些字段分组,按照什么时间字段进行窗口计算等等,
27 | * 从而可以根据后台配置的参数自动的生成指标计算Job
28 | *
29 | *
30 | * 指标的通用聚合计算框架的设计思路
31 | * 1. 通过运营后台配置指标的聚合计算的规则,
32 | * 2. 每个Job只需要调用通用的模块, 这些通用模块去读取运营后台对应指标的配置和计算规则, 然后进行聚合计算
33 | *
34 | *
35 | * Flink在什么地方去读取指标的聚合计算的规则 ?
36 | * 答案:在 FlatMap算子里的open()进行读取, 把读取到的聚合计算的规则, 写入到数据流 (DataStream)
37 | *
38 | *
39 | * Flink读取指标的聚合计算的规则是一次性读取, 还是多次读取 ?
40 | * 答案:多次读取。通过一个单独的线程去定时的读取。
41 | *
42 | *
43 | * *********************/
44 |
45 |
46 | public class GeneralMetricJob {
47 |
48 | public static void main(String[] args) throws Exception {
49 |
50 | /* **********************
51 | *
52 | * Flink配置 (获取配置文件以及任务提交参数)
53 | *
54 | * *********************/
55 | //参数工具对象
56 | ParameterTool tool = ParameterUtil.getParameters(args);
57 |
58 |
59 | SingleOutputStreamOperator> dataStream =
60 | KafkaUtil
61 | //读取Kafka
62 | .read(tool)
63 | /* **********************
64 | *
65 | * 知识点:
66 | *
67 | * 1.
68 | * Flink 1.12 之前的水印生成策略只有2种:
69 | * a. AssignerWithPunctuatedWatermarks
70 | * b. AssignerWithPeriodicWatermarks
71 | *
72 | * 要使用不同的水印生成,就要实现不同的接口,
73 | * 这样会造成代码的重复以及变得复杂,
74 | *
75 | * 所以 Flink 1.12 对水印生成进行了重构
76 | *
77 | * 2.
78 | *
79 | * Flink 1.12 后是使用 assignTimestampsAndWatermarks() 生成水印,
80 | * assignTimestampsAndWatermarks()的参数是 WatermarkStrategy 对象,
81 | *
82 | * 3.
83 | *
84 | * WatermarkStrategy 对象是什么?
85 | * WatermarkStrategy 对象提供了很多的静态方法很方便的生成水印,
86 | * 也提供了 createWatermarkGenerator(),
87 | * 可以自定义水印的生成。
88 | *
89 | * 4.
90 | *
91 | * WatermarkStrategy 对象必须包含:
92 | * a. 水印生成策略
93 | * b. 水印时间的生成 (从事件流提取)
94 | *
95 | * 4.
96 | * 水印生成策略, 课程使用的是 固定延迟生成水印 策略,
97 | * 为什么使用这个策略 ?
98 | * 这个策略可以对数据的延迟时间有一个大概的预估,
99 | * 在这个预估值内,就可以等到所有的延迟数据
100 | *
101 | * 5.
102 | * 通过 WatermarkStrategy 对象的静态方法 forBoundedOutOfOrderness(),
103 | * 就可以生成 固定延迟生成水印 策略,
104 | * 需要传入 Duration 类型的时间间隔
105 | *
106 | *
107 | *
108 | * *********************/
109 |
110 | //注册水印
111 | .assignTimestampsAndWatermarks(
112 | WatermarkStrategy
113 | //水印生成器: 实现一个延迟10秒的固定延迟水印
114 | .forBoundedOutOfOrderness(Duration.ofMillis(tool.getInt(ParameterConstantsUtil.FLINK_MAXOUTOFORDERNESS) * 1000L))
115 | //时间戳生成器:提取事件流的event_time字段
116 | .withTimestampAssigner(new MetricTimestampAssigner())
117 | )
118 | // 读取Mysql, 获取指标聚合计算的规则写入到事件流
119 | .flatMap(new MetricConfFlatMap())
120 | //根据指标聚合计算的规则过滤出计算所需要的行为事件
121 | .filter(new MetricFilter())
122 | //根据指标聚合计算的规则分组
123 | .keyBy(new MetricKeyBy())
124 | /* **********************
125 | *
126 | * 注意:
127 | *
128 | * 因为指标计算的时间是最近的xx时间段,
129 | * 所以窗口自定义只限定为滑动窗口,
130 | *
131 | * *********************/
132 | //根据指标聚合计算的规则指定计算窗口类型,大小,步长
133 | //以及将每条行为事件分配到对应的窗口中
134 | .window(new MetricWindowAssigner())
135 | //根据指标聚合计算的规则指定计算触发器
136 | //.trigger(new MetricTrigger())
137 |
138 | /* **********************
139 | *
140 | * 知识点:
141 | *
142 | * 1.
143 | * 聚合计算算子 为什么用 aggregate(), 不用 process() ?
144 | * a.
145 | * aggregate() 可以调用4种计算API,
146 | * process() 只能调用1种计算API, 但可以调用定时器等的方法, 如统计TopN,会用到
147 | * b.
148 | * aggregate() 累加计算
149 | * process() 全量计算 TopN计算
150 | *
151 | *
152 | * 2.
153 | * aggregate() 计算过程:
154 | * 每进入窗口一条数据, 就聚合计算一次, 直到窗口结束,
155 | * 当窗口触发的时候, 输出结果
156 | *
157 | *
158 | * 3.
159 | * 为什么 aggregate() 要2个入参 ?
160 | *
161 | * 若只有1个入参,即入参是 AggregateFunction 对象
162 | * 则 AggregateFunction 只能获取到当前数据,
163 | * 若要获取窗口信息, 需要缓存窗口的所有数据
164 | *
165 | * 若2个入参, 即入参是 AggregateFunction 对象和 WindowFunction 对象
166 | * 则 WindowFunction 不但可以获取 AggregateFunction 的计算输出,
167 | * 还能获取 keyBy 的 key, 以及窗口信息,如窗口大小
168 | *
169 | *
170 | * *********************/
171 |
172 | /* **********************
173 | *
174 | * 注意:
175 | *
176 | * 项目中的风控指标计算都是累加计算 和 平均计算,
177 | * 所以只自定义了 累加计算逻辑 和 平均计算逻辑,
178 | *
179 | *
180 | * *********************/
181 |
182 | //根据指标聚合计算的规则进行增量聚合计算
183 | .aggregate(new MetricAggFunction(), new MetricWindowFunction());
184 |
185 | /* **********************
186 | *
187 | * 为什么要写入 Redis ?
188 | * 因为Flink这里只是计算了最近1小时的登录次数,
189 | * 但如果指标需要最近2小时的登录次数呢? 最近3小时的登录次数呢?
190 | * 如何快速获取?难道要Flink再重新计算吗?
191 | *
192 | * 思路:
193 | * 在Redis将3个最近1小时的登录次数相加,
194 | * 不就可以得到最近3小时的登录次数,
195 | * 如此类推。
196 | *
197 | * 要做到这一步关键的就是 Redis key 的设计,
198 | * 这个key必须能够快速的定位到 最近1小时的登录次数
199 | *
200 | * 所以 Redis key 的格式为:
201 | * 指标id+uid+指标主维度+计算方式+指标版本号+编号
202 | *
203 | * 编号是指
204 | * 哪一个的 "最近1小时的登录次数" 才是 最近1小时,
205 | * 因为随着时间的推移,
206 | * 当前的 "最近1小时",
207 | * 相对于未来并不是 "最近1小时"
208 | *
209 | * 那么如何根据编号,在读取指标值的现在这一时间,
210 | * 距离这个当前时间的"最近1小时"的指标值是哪一个,
211 | * 因为Redis上存放了很多"最近1小时"的指标值,
212 | * 这是一个难点。
213 | *
214 | *
215 | * *********************/
216 |
217 | /* **********************
218 | *
219 | * 注意:
220 | *
221 | * 项目中的风控指标计算结果限定写入Redis,
222 | * 不实现写入ClickHouse的逻辑
223 | *
224 | *
225 | * *********************/
226 |
227 | //组装 Redis Key
228 | DataStream> redisKeyStream =
229 | dataStream.map(new MetricMapForRedisKey());
230 | //写入Redis
231 | RedisWriteUtil.writeByBahirWithString(redisKeyStream);
232 |
233 | KafkaUtil.env.execute();
234 |
235 |
236 | /* **********************
237 | *
238 | * 知识扩展:
239 | *
240 | * 一般做法,
241 | * 1个 Job 对应1个 jar包,
242 | * 有多少 Job 就有多少的 jar 包。
243 | * 即使使用聚合计算的通用框架也是这样子,
244 | * 通用框架也只是把重复的代码封装起来,
245 | *
246 | * 这样子,就会有很多 jar 包,但这不是最主要的,
247 | * 主要是每个 Job 的启动都会耗费资源,
248 | * 并且这些 Job 都是业务逻辑基本相同的指标计算,
249 | * 有没有什么方法能合并这些 Job ?
250 | *
251 | * 2种方案:
252 | * 1. 在提交的时候,将多个Job合并
253 | * 优点:
254 | * 这些作业是使用同一份代码,
255 | * 例如聚合计算的通用框架,只是参数不同
256 | * 将这些作业合并到1个Job执行,
257 | * 可以节省启动的资源开销
258 | * 缺点:
259 | * 需要修改Flink源码,难度比较大
260 | *
261 | * 2.
262 | * 通过工厂设计模式调用对应的Job类
263 | *
264 | *
265 | * *********************/
266 |
267 | }
268 | }
269 |
--------------------------------------------------------------------------------
/RiskEngine-flink/src/main/java/flink/RuleJudgeJob.java:
--------------------------------------------------------------------------------
1 | package flink;
2 |
3 |
4 | import org.apache.flink.api.java.utils.ParameterTool;
5 | import org.apache.flink.streaming.api.datastream.DataStream;
6 | import org.apache.flink.streaming.api.datastream.KeyedStream;
7 | import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
8 | import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
9 |
10 | import java.util.Properties;
11 |
12 | /**
13 | * @Author: 123
14 | * @Description:
15 | * @DateTime: 2024
16 | */
17 |
18 | public class RuleJudgeJob {
19 | public static void main(String[] args) throws Exception {
20 | /* **********************
21 | *
22 | * Flink配置 (获取配置文件以及任务提交参数)
23 | *
24 | * *********************/
25 | //参数工具对象
26 | ParameterTool tool = ParameterUtil.getParameters(args);
27 | //获取任务提交参数传入的规则组唯一编码
28 | String set_code = tool.get(ConstantsUtil.ARGS_SET_CODE);
29 | //Properties属性对象 (Jdbc和debezium配置)
30 | Properties p = new Properties();
31 |
32 | /* **********************
33 | *
34 | * 消费Kafka, 生成行为事件流,并按照用户ID分组
35 | *
36 | * *********************/
37 | //消费Kafka
38 | DataStream eventStream = KafkaUtil.read(tool);
39 | //分组
40 | KeyedStream keyedStream = eventStream.keyBy(EventPO::getUser_id_int);
41 | //env
42 | StreamExecutionEnvironment env = KafkaUtil.env;
43 |
44 | /* **********************
45 | *
46 | * Flink-CDC 监听原子规则表
47 | *
48 | * *********************/
49 | //表名
50 | String ruleTableN = ConstantsUtil.TABLE_NAME_RULE;
51 | //数据源名称
52 | String ruleSourceN = ParameterConstantsUtil.FLINK_CDC_SINGLE_RULE_SOURCE_NAME;
53 | DataStream ruleStream = DataStreamUtil.buildMysqlCDCStream(env,tool,p,ruleTableN,new RuleDebeziumDeserializer(),ruleSourceN,"1",new SingleRuleSerializableTimestampAssigner());
54 |
55 | /* **********************
56 | *
57 | * Flink-CDC 监听规则组表
58 | *
59 | * *********************/
60 | //表名
61 | String rulesTableN = ConstantsUtil.TABLE_NAME_RULE_SET;
62 | //数据源名称
63 | String rulesSourceN = ParameterConstantsUtil.FLINK_CDC_RULES_SOURCE_NAME;
64 | DataStream rulesStream = DataStreamUtil.buildMysqlCDCStream(env,tool,p,rulesTableN,new RulesDebeziumDeserializer(),rulesSourceN,"2",new RulesSerializableTimestampAssigner());
65 |
66 |
67 | /* **********************
68 | *
69 | * 原子规则 和 规则组 双流Join
70 | *
71 | * *********************/
72 | //以规则流的 rule_code 作为Join的key
73 | KeyedStream ruleKeyedStream = ruleStream.keyBy(SingleRulePO::getRule_code);
74 | //以规则组流的 rule_code 作为Join的key
75 | KeyedStream rulesKeyedStream = rulesStream.keyBy(RulesPO::getRule_code);
76 | //使用 interval Join
77 | DataStream joinStream = JoinUtil.intervalJoinStream(
78 | rulesKeyedStream,
79 | ruleKeyedStream,
80 | //数值根据实际情况调整
81 | -5,5,
82 | new RulesProcessJoinFunction());
83 |
84 | /* **********************
85 | *
86 | * 规则广播流合并行为事件流,并将规则组写入行为事件
87 | *
88 | * *********************/
89 | //合并后的事件流 (携带了指定的规则组)
90 | SingleOutputStreamOperator theRulesStream = RuleUtil.doRuleBroadcastStream(env,tool,joinStream,keyedStream,set_code);
91 | /* **********************
92 | *
93 | * 循环规则组, 将规则写入匹配的行为事件 (根据event_name匹配), 会产生冗余行为事件数据
94 | * 因为同一条行为事件, 会适配多个规则,
95 | *
96 | * *********************/
97 | SingleOutputStreamOperator eventRuleStream = theRulesStream.flatMap(new RulesFlatMap());
98 |
99 | /* **********************
100 | *
101 | * 基于Aviator进行规则判断
102 | *
103 | * *********************/
104 | SingleOutputStreamOperator actionStream =
105 | eventRuleStream
106 | .keyBy(EventPO::getUser_id_int)
107 | //若需要窗口聚合, 在此处配置窗口聚合
108 | //.window()
109 | .process(new WarningKeyedProcessFunction());
110 | /* **********************
111 | *
112 | * 规则命中后告警动作
113 | * (项目中只将风险用户打印出来)
114 | *
115 | * *********************/
116 | //TODO
117 |
118 | env.execute();
119 | }
120 | }
121 |
--------------------------------------------------------------------------------
/RiskEngine-flink/src/main/java/flink/clickhouse/sink/ClickHouseJdbcSink.java:
--------------------------------------------------------------------------------
1 | package flink.clickhouse.sink;
2 |
3 | import org.apache.flink.connector.jdbc.JdbcConnectionOptions;
4 | import org.apache.flink.connector.jdbc.JdbcExecutionOptions;
5 | import org.apache.flink.connector.jdbc.JdbcSink;
6 | import org.apache.flink.streaming.api.functions.sink.SinkFunction;
7 |
8 | import java.lang.reflect.Field;
9 | import java.sql.PreparedStatement;
10 | import java.sql.SQLException;
11 |
12 | /**
13 | * @Author: 123
14 | * @Description: ClickHouseJdbcSink
15 | * @DateTime: 2025/2/22 15:22
16 | */
17 |
18 | /* **********************
19 | *
20 | * 知识点:
21 | *
22 | * 一:
23 | *
24 | * Flink-jdbc-connector:通过这个连接器,可以以流计算的方式操作数据库(ClickHouse,Mysql)
25 | *
26 | * 二:
27 | *
28 | * 需要添加的依赖:
29 | * 1. flink-connector-jdbc依赖
30 | * 2. 数据库所对应的JDBC驱动
31 | *
32 | * *********************/
33 | public class ClickHouseJdbcSink {
34 |
35 | private final SinkFunction sink;
36 | private final static String NA = "null";
37 |
38 | public ClickHouseJdbcSink(String sql,int batchSize,String url) {
39 |
40 | sink = JdbcSink.sink(
41 | sql,
42 | //对sql语句进行预编译
43 | new ClickHouseJdbcStatementBuilder(),
44 | //设置批量插入数据
45 | new JdbcExecutionOptions.Builder().withBatchSize(batchSize).build(),
46 | //设置ClickHouse连接配置
47 | new JdbcConnectionOptions.JdbcConnectionOptionsBuilder()
48 | .withUrl(url)
49 | .build()
50 | );
51 |
52 | }
53 |
54 | public SinkFunction getSink() {
55 | return this.sink;
56 | }
57 |
58 |
59 | /* **********************
60 | *
61 | * 知识点:
62 | *
63 | * Java 对sql语句处理的两个对象
64 | *
65 | * 1. PreparedStatement对象:能够对预编译之后的sql语句进行处理
66 | * 2. Statement对象:只能对静态的sql语句进行处理
67 | *
68 | * *********************/
69 |
70 | /**
71 | * @Author: 123
72 | * @Description: setPreparedStatement
73 | * @DateTime: 2025/2/22 15:22
74 | */
75 | public static void setPreparedStatement(
76 | PreparedStatement ps,
77 | Field[] fields,
78 | Object object) throws IllegalAccessException, SQLException {
79 |
80 | //遍历 Field[]
81 | for (int i = 1; i <= fields.length; i++) {
82 | //取出每个Field实例
83 | Field field = fields[i - 1];
84 | //指示反射的对象在使用时应该取消 Java 语言访问检查
85 | field.setAccessible(true);
86 | //通过Field实例的get方法返回指定的对象
87 | Object o = field.get(object);
88 | if (o == null) {
89 | ps.setNull(i, 0);
90 | continue;
91 | }
92 |
93 | //这里统一设为字符型
94 | String fieldValue = o.toString();
95 |
96 | //判断字符串是否为null或者空
97 | /* **********************
98 | * 知识点:
99 | * 一。
100 | * 变量和常量的比较,通常将常量放前,可以避免空指针
101 | *
102 | * 二。
103 | * equals的比较是内容的比较
104 | * == 是内存地址的比较
105 | *
106 | * *********************/
107 | if (!NA.equals(fieldValue) && !"".equals(fieldValue)) {
108 | //替换对应位置的占位符
109 | ps.setObject(i, fieldValue);
110 | } else {
111 | ps.setNull(i, 0);
112 | }
113 | }
114 | }
115 |
116 | }
117 |
--------------------------------------------------------------------------------
/RiskEngine-flink/src/main/java/flink/clickhouse/sink/ClickHouseJdbcStatementBuilder.java:
--------------------------------------------------------------------------------
1 | package flink.clickhouse.sink;
2 |
3 | import org.apache.flink.connector.jdbc.JdbcStatementBuilder;
4 |
5 | import java.lang.reflect.Field;
6 | import java.sql.PreparedStatement;
7 | import java.sql.SQLException;
8 |
9 | /**
10 | * @Author: 123
11 | * @Description: ClickHouseJdbcStatementBuilder
12 | * @DateTime: 2025/2/22 15:21
13 | */
14 |
15 | public class ClickHouseJdbcStatementBuilder implements JdbcStatementBuilder {
16 |
17 | /**
18 | * @Author: 123
19 | * @Description: accept
20 | * @DateTime: 2025/2/22 15:22
21 | */
22 | @Override
23 | public void accept(PreparedStatement preparedStatement, T t) throws SQLException {
24 |
25 | /* **********************
26 | *
27 | * 知识点:
28 | *
29 | * 一。
30 | *
31 | * SQL 语句预编译:通过占位符实现
32 | *
33 | * 二。
34 | *
35 | * Java通过反射获取类的字段:
36 | *
37 | * 1. getDeclaredFields():获取所有的字段,不会获取父类的字段
38 | * 2. getFields(): 只能会public字段,获取包含父类的字段
39 | *
40 | * *********************/
41 |
42 | Field[] fields = t.getClass().getDeclaredFields();
43 |
44 | //将获取到的字段替换sql预编译之后的占位符。
45 | try {
46 | ClickHouseJdbcSink.setPreparedStatement(preparedStatement, fields, t);
47 | } catch (IllegalAccessException e) {
48 | e.printStackTrace();
49 | }
50 |
51 | }
52 | }
53 |
--------------------------------------------------------------------------------
/RiskEngine-flink/src/main/java/flink/clickhouse/source/ClickHouseSource.java:
--------------------------------------------------------------------------------
1 | package flink.clickhouse.source;
2 |
3 | import com.clickhouse.jdbc.ClickHouseConnection;
4 | import com.clickhouse.jdbc.ClickHouseDataSource;
5 | import org.apache.flink.streaming.api.functions.source.SourceFunction;
6 |
7 | import java.sql.ResultSet;
8 | import java.sql.Statement;
9 | import java.util.Properties;
10 |
11 | /**
12 | * @Author: 123
13 | * @Description: ClickHouseSource
14 | * @DateTime: 2025/2/22 15:22
15 | */
16 |
17 | /* **********************
18 | *
19 | * 注意:
20 | *
21 | * flink 读取 ch 数据,使用 Clickhouse JDBC的方式读取,
22 | * Clickhouse JDBC 有两个:
23 | * 1. ClickHouse 官方提供Clickhouse JDBC。
24 | * 2. 第3方提供的Clickhouse JDBC。ru.yandex.clickhouse.ClickHouseDriver.
25 | *
26 | * ru.yandex.clickhouse.ClickHouseDriver.现在是没有维护
27 | * ClickHouse 官方提供Clickhouse JDBC的包名:com.clickhouse.jdbc.*,
28 | * 有些版本com.clickhouse.jdbc.* 包含了 ru.yandex.clickhouse.ClickHouseDriver.
29 | * 所以在加载包的时候一定要注意导入的包名
30 | *
31 | * *********************/
32 |
33 | public class ClickHouseSource implements SourceFunction {
34 |
35 | private String URL;
36 | private String SQL;
37 |
38 | public ClickHouseSource(String URL, String SQL) {
39 | this.URL = URL;
40 | this.SQL = SQL;
41 | }
42 |
43 | @Override
44 | public void run(SourceContext output) throws Exception {
45 |
46 | /* **********************
47 | *
48 | * 知识点:
49 | *
50 | * Properties是持久化的属性集
51 | * Properties的key和value都是字符串
52 | *
53 | * *********************/
54 | Properties properties = new Properties();
55 | ClickHouseDataSource clickHouseDataSource = new ClickHouseDataSource(URL,properties);
56 |
57 | /* **********************
58 | *
59 | * 知识点:
60 | * 使用 try-with-resoured 方式关闭JDBC连接。
61 | * 不需要手动关闭
62 | *
63 | * *********************/
64 | try(ClickHouseConnection conn = clickHouseDataSource.getConnection()) {
65 | /* **********************
66 | *
67 | * clickhouse 通过游标的方式读取数据
68 | *
69 | * *********************/
70 |
71 |
72 | Statement stmt = conn.createStatement();
73 | ResultSet rs = stmt.executeQuery(SQL);
74 | while (rs.next()) {
75 | String name = rs.getString(1);
76 | output.collect(new CHTestPO(name));
77 | }
78 | }
79 |
80 | }
81 |
82 | @Override
83 | public void cancel() {
84 |
85 | }
86 | }
87 |
--------------------------------------------------------------------------------
/RiskEngine-flink/src/main/java/flink/redis/conf/ImoocRedisCommand.java:
--------------------------------------------------------------------------------
1 | package flink.redis.conf;
2 |
3 | import lombok.Getter;
4 |
5 | /**
6 | * @Author: 123
7 | * @Description: JRedisCommand
8 | * @DateTime: 2025/2/22 15:23
9 | */
10 |
11 | @Getter
12 | public enum JRedisCommand {
13 |
14 | GET(JRedisDataType.STRING);
15 |
16 | private JRedisDataType jRedisDataType;
17 |
18 | JRedisCommand(JRedisDataType jRedisDataType) {
19 | this.jRedisDataType = jRedisDataType;
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/RiskEngine-flink/src/main/java/flink/redis/conf/ImoocRedisDataType.java:
--------------------------------------------------------------------------------
1 | package flink.redis.conf;
2 |
3 | import lombok.Getter;
4 |
5 | /**
6 | * @Author: 123
7 | * @Description: JRedisDataType
8 | * @DateTime: 2025/2/22
9 | */
10 |
11 | @Getter
12 | public enum JRedisDataType {
13 |
14 | STRING,
15 | HASH,
16 | LIST,
17 | SET,
18 | SORTED_SET,
19 | ;
20 |
21 | JRedisDataType() {
22 | }
23 | }
24 |
--------------------------------------------------------------------------------
/RiskEngine-flink/src/main/java/flink/redis/conf/JedisBuilder.java:
--------------------------------------------------------------------------------
1 | package flink.redis.conf;
2 |
3 | import redis.clients.jedis.JedisCluster;
4 |
5 | /**
6 | * @Author: 123
7 | * @Description: Builder
8 | * @DateTime: 2025/2/22
9 | */
10 |
11 | public class JBuilder {
12 |
13 | private JedisCluster jedis = null;
14 |
15 | public JBuilder(JedisCluster jedisCluster) {
16 | this.jedis = jedisCluster;
17 | }
18 |
19 | public void close() {
20 | if (this.jedis != null) {
21 | this.jedis.close();
22 | }
23 | }
24 |
25 | /**
26 | * author: 123
27 | * description: Redis的Get方法
28 | * @param key: redis key
29 | * @return java.lang.String
30 | */
31 | public String get(String key) {
32 | return jedis.get(key);
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/RiskEngine-flink/src/main/java/flink/redis/conf/JedisConf.java:
--------------------------------------------------------------------------------
1 | package flink.redis.conf;
2 |
3 |
4 | import org.apache.flink.api.java.utils.ParameterTool;
5 | import redis.clients.jedis.HostAndPort;
6 | import redis.clients.jedis.JedisCluster;
7 |
8 | import java.io.IOException;
9 | import java.util.HashSet;
10 | import java.util.Set;
11 |
12 | /**
13 | * @Author: 123
14 | * @Description: JedisConf
15 | * @DateTime: 2025/2/22
16 | */
17 |
18 | public class JedisConf {
19 |
20 | public static JedisCluster getJedisCluster() throws IOException {
21 |
22 | ParameterTool parameterTool =
23 | ParameterUtil.getParameters();
24 | String host = parameterTool.get("redis.host");
25 | String port = parameterTool.get("redis.port");
26 |
27 | /* **********************
28 | *
29 | * 知识点:
30 | *
31 | * Jedis对象
32 | *
33 | * JedisPool : 用于redis单机版
34 | * JedisCluster: 用于redis集群
35 | *
36 | * JedisCluster对象能够自动发现正常的redis结节
37 | *
38 | * *********************/
39 |
40 | HostAndPort hostAndPort = new HostAndPort(
41 | host,
42 | Integer.parseInt(port)
43 | );
44 | Set nodes = new HashSet<>();
45 | nodes.add(hostAndPort);
46 |
47 | return new JedisCluster(nodes);
48 |
49 | }
50 | }
51 |
--------------------------------------------------------------------------------
/RiskEngine-flink/src/main/java/flink/redis/sink/RedisSinkByBahirWithString.java:
--------------------------------------------------------------------------------
1 | package flink.redis.sink;
2 |
3 | import org.apache.flink.api.java.tuple.Tuple2;
4 | import org.apache.flink.streaming.connectors.redis.common.mapper.RedisCommand;
5 | import org.apache.flink.streaming.connectors.redis.common.mapper.RedisCommandDescription;
6 | import org.apache.flink.streaming.connectors.redis.common.mapper.RedisMapper;
7 |
8 | /**
9 | * @Author: 123
10 | * @Description:
11 | * @DateTime: 2025
12 | */
13 |
14 | /* **********************
15 | *
16 | * redis的数据类型:
17 | * 1. String
18 | * 2. Hash
19 | * 3. List
20 | * 4. Set
21 | * 5. z-Set
22 | *
23 | * *********************/
24 | public class RedisSinkByBahirWithString implements RedisMapper> {
25 |
26 | /**
27 | * @Author: 123
28 | * @Description: getCommandDescription
29 | * @DateTime: 2025
30 | */
31 | @Override
32 | public RedisCommandDescription getCommandDescription() {
33 | /* **********************
34 | *
35 | * 如果Redis的数据类型是 hash 或 z-Set
36 | * RedisCommandDescription 的构造方法必须传入 additionalKey
37 | * additionalKey就是Redis的键
38 | *
39 | * *********************/
40 | return new RedisCommandDescription(RedisCommand.SET);
41 | }
42 |
43 | /**
44 | * @Author: 123
45 | * @Description: getKeyFromData
46 | * @DateTime: 2025
47 | */
48 | @Override
49 | public String getKeyFromData(Tuple2 input) {
50 | return input.f0;
51 | }
52 |
53 | /**
54 | * @Author: 123
55 | * @Description: getValueFromData
56 | * @DateTime: 2025
57 | */
58 | @Override
59 | public String getValueFromData(Tuple2 input) {
60 | return input.f1;
61 | }
62 | }
63 |
--------------------------------------------------------------------------------
/RiskEngine-flink/src/main/java/flink/redis/source/ImoocRedisSource.java:
--------------------------------------------------------------------------------
1 | package flink.redis.source;
2 |
3 | import org.apache.flink.api.common.state.ListState;
4 | import org.apache.flink.configuration.Configuration;
5 | import org.apache.flink.streaming.api.functions.source.RichSourceFunction;
6 | import redis.clients.jedis.JedisCluster;
7 |
8 | /**
9 | * @Author: 123
10 | * @Description: JRedisSource
11 | * @DateTime: 2024
12 | */
13 |
14 | /* **********************
15 | *
16 | * 知识点:
17 | *
18 | * 一:
19 | *
20 | * RichSourceFunction RichSinkFunction,RichMapFunction 富函数类
21 | * Flink API集合分为2大类:
22 | * 1. 函数类 (MapFunction)
23 | * 2. 富函数类 (RichMapFunction)
24 | *
25 | * 富函数类 比函数类提供了更多函数生命周期,提供了获取上下文的方法
26 | * 富函数类通常是抽象类
27 | *
28 | * 二:
29 | *
30 | * RichParallelSourceFunction 和 RichSourceFunction 不同:
31 | * RichParallelSourceFunction 可以设置并行度
32 | * RichParallelSourceFunction 和 RichSourceFunction 代码是可以互相套用
33 | *
34 | * RichParallelSourceFunction 默认的并行度是cpu 的 core数
35 | * RichSourceFunction 的并行度只能是1
36 | *
37 | * *********************/
38 | public class JRedisSource extends RichSourceFunction {
39 |
40 | /**
41 | * Jedis对象
42 | */
43 | private JBuilder jedisBuilder;
44 |
45 | /**
46 | * Redis命令枚举对象
47 | */
48 | private final JRedisCommand jRedisCommand;
49 |
50 | /**
51 | * redis key
52 | */
53 | private final String key;
54 |
55 |
56 | /**
57 | * pattern 状态 (OperatorState类型)
58 | */
59 | private transient ListState patternState;
60 |
61 | public JRedisSource(JRedisCommand jRedisCommand, String key) {
62 | this.jRedisCommand = jRedisCommand;
63 | this.key = key;
64 | }
65 |
66 | /* **********************
67 | *
68 | * 知识点:
69 | *
70 | * volatile 修饰的变量,它的更新都会通知其他线程.
71 | *
72 | * *********************/
73 | private volatile boolean isRunning = true;
74 |
75 | /**
76 | * author: 123
77 | * description: Redis数据的读取
78 | * @param output:
79 | * @return void
80 | */
81 | @Override
82 | public void run(SourceContext output) throws Exception {
83 |
84 | /* **********************
85 | *
86 | * 一直监听Redis数据的读取
87 | *
88 | * *********************/
89 |
90 | String data = null;
91 | //while (isRunning) {
92 |
93 | switch (jRedisCommand.getjRedisDataType()) {
94 | case STRING :
95 | data = jedisBuilder.get(key);
96 | }
97 | RedisPO redisPO = new RedisPO(data);
98 |
99 | output.collect(redisPO);
100 | //}
101 |
102 | }
103 |
104 |
105 | @Override
106 | public void cancel() {
107 | this.isRunning = false;
108 | }
109 |
110 | /**
111 | * author: 123
112 | * description: Redis的连接初始化
113 | * @param parameters:
114 | * @return void
115 | */
116 | @Override
117 | public void open(Configuration parameters) throws Exception {
118 | JedisCluster jedisCluster = JedisConf.getJedisCluster();
119 | jedisBuilder = new JBuilder(jedisCluster);
120 |
121 | }
122 |
123 |
124 |
125 | }
126 |
--------------------------------------------------------------------------------
/RiskEngine-flink/src/main/java/flink/utils/AviatorUtil.java:
--------------------------------------------------------------------------------
1 | package flink.utils;
2 |
3 | import com.googlecode.aviator.AviatorEvaluator;
4 | import com.googlecode.aviator.Expression;
5 | import com.googlecode.aviator.runtime.function.AbstractFunction;
6 |
7 | /**
8 | * @Author: 123
9 | * @Description: AviatorUtil
10 | * @DateTime: 2025/2/22
11 | */
12 |
13 | public class AviatorUtil {
14 |
15 | /**
16 | * @Author: 123
17 | * @Description: execute
18 | * @DateTime: 2024
19 | */
20 | public static Object execute(String str) {
21 |
22 | //执行AviatorEvaluator 对象的 execute(),获取字符串表达式运算后结果
23 | return AviatorEvaluator.execute(str);
24 |
25 | }
26 |
27 | /**
28 | * @Author: 123
29 | * @Description:
30 | * @DateTime: 2024
31 | */
32 | // public static Object execute(
33 | // String str,
34 | // Map map) {
35 | //
36 | // //将字符串表达式解析为 Expression 对象
37 | // Expression compileExp = AviatorEvaluator.compile(str,true);
38 | // //执行Expression 对象的 execute(),获取字符串表达式运算后结果
39 | // return compileExp.execute(map);
40 | //
41 | // }
42 |
43 | /**
44 | * @Author: 123
45 | * @Description: execute
46 | * @DateTime: 2024
47 | */
48 | public static Object execute(
49 | String str,
50 | AbstractFunction func) {
51 |
52 | //注册自定义函数
53 | AviatorEvaluator.addFunction(func);
54 | //将字符串表达式解析为 Expression 对象
55 | Expression compileExp = AviatorEvaluator.compile(str,true);
56 | //执行Expression 对象的 execute(),获取字符串表达式运算后结果
57 | return compileExp.execute();
58 |
59 | }
60 |
61 | }
62 |
--------------------------------------------------------------------------------
/RiskEngine-flink/src/main/java/flink/utils/ClickHouseUtil.java:
--------------------------------------------------------------------------------
1 | package flink.utils;
2 |
3 |
4 | import org.apache.flink.api.java.utils.ParameterTool;
5 | import org.apache.flink.streaming.api.datastream.DataStream;
6 | import org.apache.flink.streaming.api.datastream.DataStreamSink;
7 | import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
8 |
9 | /**
10 | * @Author: 123
11 | * @Description: ClickHouseUtil
12 | * @DateTime: 2024
13 | */
14 |
15 | public class ClickHouseUtil {
16 |
17 | private static String URL = null;
18 |
19 | static {
20 | ParameterTool parameterTool = ParameterUtil.getParameters();
21 | URL = parameterTool.get("clickhouse.url");
22 |
23 | }
24 |
25 | /**
26 | * @Author: 123
27 | * @Description: read
28 | * @DateTime: 2024
29 | */
30 | public static DataStream read(StreamExecutionEnvironment env, String sql) {
31 | return env.addSource(new ClickHouseSource(URL,sql));
32 | }
33 |
34 |
35 | /**
36 | * @Author: 123
37 | * @Description: batchWrite
38 | * @DateTime: 2024
39 | */
40 | public static DataStreamSink batchWrite(
41 | DataStream dataStream,
42 | String sql,
43 | int batchSize) {
44 |
45 | //生成 SinkFunction
46 |
47 | ClickHouseJdbcSink clickHouseJdbcSink =
48 | new ClickHouseJdbcSink(sql,batchSize,URL);
49 |
50 | return dataStream.addSink(clickHouseJdbcSink.getSink());
51 | }
52 |
53 | }
54 |
--------------------------------------------------------------------------------
/RiskEngine-flink/src/main/java/flink/utils/DataStreamUtil.java:
--------------------------------------------------------------------------------
1 | package flink.utils;
2 |
3 | import com.ververica.cdc.connectors.mysql.source.MySqlSource;
4 | import com.ververica.cdc.debezium.DebeziumDeserializationSchema;
5 | import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
6 | import org.apache.flink.api.common.eventtime.WatermarkStrategy;
7 | import org.apache.flink.api.common.state.MapStateDescriptor;
8 | import org.apache.flink.api.java.utils.ParameterTool;
9 | import org.apache.flink.streaming.api.datastream.*;
10 | import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
11 | import org.apache.flink.streaming.api.functions.co.KeyedBroadcastProcessFunction;
12 | import org.apache.flink.streaming.api.functions.source.RichSourceFunction;
13 | import org.apache.flink.table.api.EnvironmentSettings;
14 | import org.apache.flink.table.api.SqlDialect;
15 | import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
16 |
17 | import java.time.Duration;
18 | import java.util.Properties;
19 |
20 | /**
21 | * @Author: 123
22 | * @Description:
23 | * @DateTime: 2024
24 | */
25 |
26 | public class DataStreamUtil {
27 |
28 | //流式计算上下文环境
29 | //设为 final 以及 public
30 | public static final StreamExecutionEnvironment env =
31 | StreamExecutionEnvironment.getExecutionEnvironment();
32 |
33 |
34 | //参数工具
35 | private static ParameterTool parameterTool =
36 | ParameterUtil.getParameters();
37 |
38 | /**
39 | * 初始化环境的StreamExecutionEnvironment。(重载 initEnv 方法)
40 | *
41 | * @return 初始化后的StreamExecutionEnvironment实例。
42 | */
43 | public static StreamExecutionEnvironment initEnv() {
44 |
45 | //ParameterTool 注册为 global
46 | env.getConfig().setGlobalJobParameters(parameterTool);
47 | // 配置上下文环境
48 | ParameterUtil.envWithConfig(env,parameterTool);
49 |
50 | return env;
51 | }
52 |
53 | /**
54 | * 初始化StreamExecutionEnvironment并返回该环境 (重载 initEnv 方法)
55 | * @param args 命令行参数
56 | * @return StreamExecutionEnvironment实例
57 | */
58 | public static StreamExecutionEnvironment initEnv(String[] args) {
59 |
60 | //ParameterTool 注册为 global
61 | parameterTool = ParameterUtil.getParameters(args);
62 | env.getConfig().setGlobalJobParameters(parameterTool);
63 | // 配置上下文环境
64 | ParameterUtil.envWithConfig(env,parameterTool);
65 |
66 | return env;
67 |
68 | }
69 |
70 | /**
71 | * 获取TableEnv
72 | * @param env StreamExecutionEnvironment
73 | * @return StreamTableEnvironment
74 | */
75 | public static StreamTableEnvironment getTableEnv(
76 | StreamExecutionEnvironment env
77 | ) {
78 |
79 | //创建TableApi运行环境
80 | EnvironmentSettings bsSettings =
81 | EnvironmentSettings.newInstance()
82 | // Flink 1.14不需要再设置 Planner
83 | //.useBlinkPlanner()
84 | // 设置流计算模式
85 | .inStreamingMode()
86 | .build();
87 |
88 | // 创建StreamTableEnvironment实例
89 | StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env,bsSettings);
90 |
91 | //指定方言 (选择使用SQL语法还是HQL语法)
92 | tableEnv.getConfig().setSqlDialect(SqlDialect.DEFAULT);
93 |
94 | return tableEnv;
95 | }
96 |
97 | /**
98 | * 为给定的富源函数创建一个类型为 T 的数据流 (重载 streamBuilder)
99 | *
100 | * @param sourceFunc 富源函数对象,用于产生数据流事件
101 | * @param 数据流元素的类型
102 | * @return 类型为 T 的数据流
103 | */
104 | public static DataStream streamBuilder(
105 | RichSourceFunction sourceFunc
106 | ) {
107 |
108 | //初始化流式计算上下文环境
109 | initEnv();
110 | //返回类型为 T 的事件流
111 | return env.addSource(sourceFunc);
112 | }
113 |
114 | /**
115 | * 为给定的参数工具和富源函数创建一个类型为 T 的数据流 (重载 streamBuilder)
116 | *
117 | * @param args 程序参数数组
118 | * @param sourceFunc 富源函数对象,用于产生数据流事件
119 | * @param 数据流元素的类型
120 | * @return 类型为 T 的数据流
121 | */
122 | public static DataStream streamBuilder(
123 | String[] args,
124 | RichSourceFunction sourceFunc
125 | ) {
126 |
127 | //初始化流式计算上下文环境
128 | initEnv(args);
129 | //返回类型为 T 的事件流
130 | return env.addSource(sourceFunc);
131 | }
132 |
133 |
134 |
135 | /**
136 | * @Author: 123
137 | * @Description: broadcastStreamBuilder
138 | * @DateTime: 2024
139 | */
140 | public static BroadcastStream broadcastStreamBuilder(
141 | DataStream dataStream,
142 | MapStateDescriptor mapState
143 | ){
144 |
145 | /* **********************
146 | *
147 | * 知识点:
148 | *
149 | * 1.
150 | * 什么是广播流
151 | * 广播流是事件流, 是属于低吞吐的事件流
152 | *
153 | * 2.
154 | * 什么时候会使用广播流
155 | * 将配置,规则 应用到另外一个事件流,
156 | * 通过广播流,将配置,规则传递到下游Task,
157 | * 这些下游Task可以将配置,规则保存为 广播状态,
158 | * 然后将广播状态应用到另外一个事件流
159 | *
160 | * 2.
161 | * 广播状态
162 | * 什么是广播状态
163 | * 广播状态是Map结构,K-V结构
164 | * 广播状态是算子状态( Operator State )的一种
165 | *
166 | *
167 | *
168 | *
169 | * *********************/
170 |
171 | return dataStream.broadcast(mapState);
172 | }
173 |
174 |
175 | /**
176 | * @Author: 123
177 | * @Description: streamConnect
178 | * @DateTime: 2024
179 | */
180 | public static BroadcastConnectedStream streamConnect(
181 | KeyedStream keyedStream,
182 | BroadcastStream broadcastStream
183 | ) {
184 | return keyedStream.connect(broadcastStream);
185 | }
186 |
187 |
188 | /**
189 | * @Author: 123
190 | * @Description: processFuncWithKey
191 | * @DateTime: 2024
192 | */
193 | public static SingleOutputStreamOperator processFuncWithKey(
194 | BroadcastConnectedStream broadcastConnectedStream,
195 | KeyedBroadcastProcessFunction keyedBroadcastProcessFunction
196 | ) {
197 | /* **********************
198 | *
199 | * 知识点:
200 | * 3.
201 | * Flink 针对不同类型的DataStream (事件流)提供不同的 process(ProcessFunction)
202 | * a. 普通DataStream调用process(), 入参是 ProcessFunction 实例
203 | * b. KeyedStream 调用process(), 入参是 KeyedProcessFunction 实例
204 | *
205 | * c. BroadcastConnectedStream, 调用process():
206 | * c1. 若行为事件流(非广播流), 是按照keyby()分组(KeyedStream),
207 | * 入参是 KeyedBroadcastProcessFunction 类型
208 | *
209 | * c2. 若行为事件流(非广播流) 没有进行分组,
210 | * 入参是 BroadcastProcessFunction 类型
211 | *
212 | * *********************/
213 |
214 | return broadcastConnectedStream.process(keyedBroadcastProcessFunction);
215 | }
216 |
217 |
218 |
219 | /**
220 | * @Author: 123
221 | * @Description: buildMysqlCDCStream
222 | * @DateTime: 2024
223 | */
224 | public static DataStream buildMysqlCDCStream(
225 | StreamExecutionEnvironment env,
226 | ParameterTool parameterTool,
227 | Properties properties,
228 | String table_name,
229 | DebeziumDeserializationSchema deserializer,
230 | String source_name,
231 | String serverId,
232 | SerializableTimestampAssigner serializableTimestampAssigner
233 | ) {
234 |
235 | //监听的数据库 (这里限定只有一个数据库)
236 | String cdc_database_name = parameterTool.get(ParameterConstantsUtil.FLINK_CDC_MYSQL_DATABASE);
237 | //监听的表要要加上库名
238 | String cdc_table_name = cdc_database_name+"."+table_name;
239 |
240 | // 创建CDC源
241 | MySqlSource mySqlSource = FlinkCDCUtil.getMysqlCDCSource(
242 | parameterTool,
243 | properties,
244 | deserializer,
245 | serverId,
246 | cdc_table_name
247 | );
248 |
249 | // 从源获取数据生成带有水印的事件流
250 | return mysqlCDCFromSourceWithWatermark(env,mySqlSource,serializableTimestampAssigner,source_name);
251 |
252 | }
253 |
254 | /**
255 | * @Author: 123
256 | * @Description: mysqlCDCFromSourceWithWatermark
257 | * @DateTime: 2024
258 | */
259 | public static DataStream mysqlCDCFromSourceWithWatermark(
260 | StreamExecutionEnvironment env,
261 | MySqlSource source,
262 | SerializableTimestampAssigner serializableTimestampAssigner,
263 | String source_name
264 | ) {
265 | return env.fromSource(
266 | source,
267 | WatermarkStrategy
268 | .forBoundedOutOfOrderness(Duration.ofSeconds(1L))
269 | .withTimestampAssigner(serializableTimestampAssigner),
270 | source_name);
271 | }
272 |
273 |
274 | /**
275 | * @Author: 123
276 | * @Description: mysqlCDCFromSourceNoWatermark
277 | * @DateTime: 2024
278 | */
279 | public static DataStream mysqlCDCFromSourceNoWatermark(
280 | StreamExecutionEnvironment env,
281 | MySqlSource source,
282 | String source_name
283 | ) {
284 | return env.fromSource(
285 | source,
286 | WatermarkStrategy.noWatermarks(),
287 | source_name);
288 | }
289 |
290 |
291 | }
292 |
--------------------------------------------------------------------------------
/RiskEngine-flink/src/main/java/flink/utils/EventConstantUtil.java:
--------------------------------------------------------------------------------
1 | package flink.utils;
2 |
3 | /**
4 | * @Author: 123
5 | * @Description:
6 | * @DateTime: 2024
7 | */
8 |
9 | public class EventConstantUtil {
10 |
11 | //登录成功事件
12 | public static final String LOGIN_SUCCESS = "login_success";
13 | //登录失败事件
14 | public static final String LOGIN_FAIL = "login_fail";
15 | //下单事件
16 | public static final String ORDER = "order";
17 | //支付事件
18 | public static final String PAY = "pay";
19 | //用户信息修改事件
20 | public static final String USER_PROFILE_MODIFY = "user_profile_modify";
21 | //优惠券领取事件
22 | public static final String COUPON_RECEIVE = "coupons_receive";
23 | //优惠券使用事件
24 | public static final String COUPON_USE = "coupons_use";
25 | //发表评论事件
26 | public static final String COMMENT = "comment";
27 | //商品收藏事件
28 | public static final String FAVORITES = "favorites";
29 | //浏览事件
30 | public static final String BROWSE = "browse";
31 | //加入购物车事件
32 | public static final String CART_ADD = "cart_add";
33 | //注册事件
34 | public static final String REGISTER = "register";
35 | //白名单
36 | public static final String WHITE_LIST = "white";
37 | //黑名单
38 | public static final String BLACK_LIST = "black";
39 | //灰名单
40 | public static final String GRAY_LIST = "gray";
41 |
42 | }
43 |
--------------------------------------------------------------------------------
/RiskEngine-flink/src/main/java/flink/utils/FlinkCDCUtil.java:
--------------------------------------------------------------------------------
1 | package flink.utils;
2 |
3 | import com.ververica.cdc.connectors.mysql.source.MySqlSource;
4 | import com.ververica.cdc.connectors.mysql.table.StartupOptions;
5 | import com.ververica.cdc.connectors.shaded.org.apache.kafka.connect.data.Field;
6 | import com.ververica.cdc.connectors.shaded.org.apache.kafka.connect.data.Schema;
7 | import com.ververica.cdc.connectors.shaded.org.apache.kafka.connect.data.Struct;
8 | import com.ververica.cdc.debezium.DebeziumDeserializationSchema;
9 | import org.apache.flink.api.java.utils.ParameterTool;
10 |
11 | import java.util.HashMap;
12 | import java.util.Map;
13 | import java.util.Properties;
14 |
15 | /**
16 | * author: 123
17 | * description: Flink CDC 工具类
18 | * date: 2023
19 | */
20 |
21 | /* **********************
22 | *
23 | *
24 | * 知识点:
25 | *
26 | * 1.
27 | * Flink-CDC 应用场景:
28 | * a. 实时捕获 Mysql 数据变更
29 | * b. Mysql 数据同步
30 | *
31 | * 2.
32 | * CDC:捕获数据变更
33 | * Debezium: 将数据库转化为事件流的分布式服务, 可以捕获数据库中表的每一行的数据更改
34 | *
35 | * 3.
36 | * Flink-CDC 底层原理
37 | * a. Flink-CDC 底层封装了Debezium
38 | * b. Debezium 可以将数据库转化为事件流
39 | * c. Flink 消费 Debezium转化的事件流, 就可以捕获数据库中表的每一行的数据更改
40 | * d. Debezium 是读取 Mysql binlog 日志 , 再转化为事件流
41 | * e. Debezium 在读取 Mysql binlog 日志时, 是有全局锁
42 | *
43 | * 4.
44 | * Flink-CDC 和 Canal 同步方案的对比:
45 | * a. Canal 只针对Mysql,
46 | * b. Canal 只支持增量同步,
47 | * c. Canal 全量同步要借助于 DataX 或 Sqoop
48 | * d. Canal 不支持分布式
49 | *
50 | * 5.
51 | * Flink-CDC 读取 Mysql binlog 日志,
52 | * Mysql binlog 日志格式要设置为 Row,
53 | * 因为 Flink-CDC 是伪装为slave去拉取binlog,
54 | * 但并不具备sql执行引擎,
55 | * 所以要将binlog 日志格式要设置为 Row
56 | *
57 | * binlog Row 模式是只记录了哪一条数据被修改了, 修改之后的样子,
58 | * 但不会记录数据修改的sql执行的上下文信息
59 | *
60 | *
61 | * *********************/
62 |
63 | public class FlinkCDCUtil {
64 |
65 |
66 | /**
67 | * 获取MySQL CDC数据源
68 | *
69 | * @param parameterTool 参数工具
70 | * @param p Properties 属性
71 | * @param tableList 需要监听的表
72 | * @return MySqlSource Mysql CDC数据源
73 | */
74 | public static MySqlSource getMysqlCDCSource(
75 | ParameterTool parameterTool,
76 | Properties p,
77 | DebeziumDeserializationSchema deserializer,
78 | String serverId,
79 | String... tableList
80 | ) {
81 |
82 | /* **********************
83 | *
84 | * 注意:
85 | *
86 | * 若 Mysql 开启了 SSL 加密,
87 | * 则需要配置
88 | * jdbc.properties.useSSL=true
89 | * debezium.database.ssl.mode=required
90 | *
91 | * 如
92 | * Properties p = new Properties()
93 | * p.setProperty("jdbc.properties.useSSL","true");
94 | * p.setProperty("debezium.database.ssl.mode","required");
95 | *
96 | * *********************/
97 |
98 | return MySqlSource.builder()
99 | .serverTimeZone("Asia/Shanghai")
100 | .hostname(parameterTool.get(ParameterConstantsUtil.Mysql_HOST))
101 | .port(parameterTool.getInt(ParameterConstantsUtil.Mysql_PORT))
102 | .username(parameterTool.get(ParameterConstantsUtil.Mysql_USERNAME))
103 | .password(parameterTool.get(ParameterConstantsUtil.Mysql_PASSWD))
104 | //binlog读取起始位置
105 | /* **********************
106 | *
107 | * 知识点:
108 | *
109 | * 6.
110 | * StartupOptions 值:
111 | * a. initial :
112 | * Flink程序首次启动, 全量读取,
113 | * 然后增量读取
114 | * b. earliest: 从第一行读取
115 | * c. latest: 读取最新的数据, 这里的最新是指从Flink程序启动后
116 | * d. timestamp : 指定时间
117 | * e. specificOffset: 指定offset
118 | *
119 | * 注意:
120 | * Mysql-CDC-Connector 2.2 startupOptions只支持 initial, latest
121 | *
122 | * *********************/
123 | .startupOptions(StartupOptions.initial())
124 | //是否扫描新增表 (Flink-CDC 2.2+ 才支持)
125 | .scanNewlyAddedTableEnabled(true)
126 | //监听的表所在的库
127 | /* **********************
128 | *
129 | * 注意:
130 | *
131 | * databaseList() 是不定参数
132 | *
133 | * *********************/
134 | .databaseList(parameterTool.get(ParameterConstantsUtil.FLINK_CDC_MYSQL_DATABASE))
135 | //需要监听的表,多个表用逗号隔开
136 | /* **********************
137 | *
138 | * 注意:
139 | *
140 | * 1. tableList() 是不定参数
141 | * 2. 表名必须加上库名 "j.mysql_test","j2.mysql_tst2"
142 | *
143 | * *********************/
144 | .tableList(tableList)
145 | //jdbc配置
146 | .jdbcProperties(p)
147 | //debezium配置
148 | .debeziumProperties(p)
149 | //序列化为Json String
150 | /* **********************
151 | *
152 | * 若自定义反序列化,
153 | * 实现接口DebeziumDeserializationSchema,
154 | * 重写deserialize
155 | *
156 | * *********************/
157 | .deserializer(deserializer)
158 | //标识 MySQL 数据库的唯一标识
159 | /* **********************
160 | *
161 | * 当多个 Flink CDC 实例连接到同一个 MySQL 数据库时,
162 | * 每个实例都需要使用不同的 server-id 来避免冲突
163 | *
164 | * *********************/
165 | .serverId(serverId)
166 | .build();
167 |
168 | }
169 |
170 |
171 | /**
172 | * 建立Flink-CDC mysql虚拟表的ddl语句
173 | * @param parameterTool 参数工具
174 | * @param tableName 需要同步的表名
175 | * @param databaseName 数据库名
176 | * @param ddlString 表的定义语句
177 | * @param jdbcPropertiesString JDBC连接参数
178 | */
179 | public static String buildMysqlCdcDdl(
180 | ParameterTool parameterTool,
181 | String tableName,
182 | String databaseName,
183 | String ddlString,
184 | String jdbcPropertiesString
185 | ) {
186 |
187 | // 从参数工具中获取mysql连接的用户名
188 | String username = parameterTool.get(ParameterConstantsUtil.Mysql_USERNAME);
189 | // 从参数工具中获取mysql连接的密码
190 | String passwd = parameterTool.get(ParameterConstantsUtil.Mysql_PASSWD);
191 | String hostname = parameterTool.get(ParameterConstantsUtil.Mysql_HOST);
192 | String port = parameterTool.get(ParameterConstantsUtil.Mysql_PORT);
193 |
194 |
195 | /* **********************
196 | *
197 | *
198 | * 注意:
199 | *
200 | * Flink-CDC 的 mysql DDL 格式
201 | *
202 | *
203 | * *********************/
204 |
205 | //编写DDL ( 数据定义语言 )
206 | return "" +
207 | "CREATE TABLE IF NOT EXISTS " +
208 | tableName +
209 | " (\n" +
210 | ddlString +
211 | ")" +
212 | " WITH (\n" +
213 | jdbcPropertiesString +
214 | "'connector' = 'mysql-cdc',\n" +
215 | "'hostname' = '" + hostname + "',\n" +
216 | "'port' = '" + port + "',\n" +
217 | "'username' = '" + username + "',\n" +
218 | "'password' = '" + passwd + "',\n" +
219 | "'database-name' = '" + databaseName + "',\n" +
220 | "'table-name' = '" + tableName + "'\n" +
221 | ")";
222 |
223 | }
224 |
225 |
226 | public static Map deserialize(
227 | Struct value
228 | ) {
229 |
230 | // sourceRecord携带的数据,是Struct类型
231 | /* **********************
232 | *
233 | * 注意:
234 | * Struct对象是
235 | * com.ververica.cdc.connectors.shaded.org.apache.kafka.connect.data 包
236 | *
237 | * 因为
238 | * Flink CDC 的 shaded 使用了 Kafka Connect 类库
239 | *
240 | * *********************/
241 |
242 | // 获取变更后的数据,名称after
243 | Struct after = value.getStruct("after");
244 |
245 | Map afterMap = new HashMap<>();
246 | //处理after数据
247 | if (null != after) {
248 |
249 | // 获取Struct中包含所有字段名,遍历即可
250 | Schema afterSchema = after.schema();
251 | for (Field field : afterSchema.fields()) {
252 | String k = field.name();
253 | String v = after.get(field).toString();
254 | afterMap.put(k,v);
255 | }
256 | }
257 |
258 | //获取 ts_ms 时间戳
259 | Long ts_ms = (Long)value.get("ts_ms");
260 | afterMap.put("ts_ms",ts_ms.toString());
261 |
262 |
263 | return afterMap;
264 | }
265 |
266 | }
267 |
--------------------------------------------------------------------------------
/RiskEngine-flink/src/main/java/flink/utils/GroovyUtil.java:
--------------------------------------------------------------------------------
1 | package flink.utils;
2 |
3 | import groovy.lang.GroovyClassLoader;
4 | import groovy.lang.GroovyObject;
5 | import groovy.lang.Script;
6 | import org.codehaus.groovy.control.CompilerConfiguration;
7 |
8 | import java.io.File;
9 | import java.security.MessageDigest;
10 | import java.util.concurrent.ConcurrentHashMap;
11 |
12 | /**
13 | * @Author: 123
14 | * @Description:
15 | * @DateTime: 2024
16 | */
17 |
18 | public class GroovyUtil {
19 |
20 | /**
21 | * groovy GroovyClassLoader 对象 (用来解析Groovy脚本)
22 | */
23 | private static GroovyClassLoader LOADER = null;
24 |
25 | /**
26 | * 存放每个 Groovy 脚本内容的 MD5 指纹和所对应的groovy.lang.Script
27 | * key: md5指纹
28 | * value: groovy.lang.Script
29 | */
30 | private static ConcurrentHashMap> clazzMaps
31 | = new ConcurrentHashMap>();
32 |
33 |
34 |
35 | /**
36 | * @Author: 123
37 | * @Description: getCompilerConfiguration
38 | * @DateTime: 2024
39 | */
40 | private static CompilerConfiguration getCompilerConfiguration() {
41 | CompilerConfiguration config = new CompilerConfiguration();
42 | return config;
43 | }
44 |
45 | /**
46 | * @Author: 123
47 | * @Description: getEngineByClassLoader
48 | * @DateTime: 2024
49 | */
50 | public static GroovyClassLoader getEngineByClassLoader(String key) {
51 |
52 | GroovyClassLoader groovyClassLoader = null;
53 | Class