├── src └── main │ ├── resources │ ├── kafka.properties │ ├── database.properties │ ├── logback.xml │ └── log4j.properties │ └── java │ └── com │ ├── sinks │ ├── SinkToHbase.java │ ├── SinkToKafka.java │ ├── SinkToGreenplum.java │ └── SinkToMySQL.java │ ├── sqlquery │ ├── DimensionSQLQuery.java │ └── JoinedSQLQuery.java │ ├── Seetings │ ├── StreamTableSeetings.java │ ├── DimensionTableSeetings.java │ ├── ReadJDBCPro.java │ └── CreateJDBCInputFormat.java │ ├── model │ └── Meeting.java │ ├── utils │ ├── BinLogBean.java │ ├── KafkaConfigUtil.java │ ├── Tuple2ToMeeting.java │ └── JsonFilter.java │ └── Main.java ├── 会议管理系统实时分析总结.assets ├── 总体方案.png ├── raHGlh.jpg ├── siWsAK.jpg ├── 20200713145329100.png ├── image-20200710154201590.png ├── image-20200710154343692.png └── image-20200710154514531.png ├── .idea ├── codeStyles │ ├── codeStyleConfig.xml │ └── Project.xml ├── .gitignore ├── vcs.xml ├── modules.xml ├── encodings.xml ├── misc.xml ├── libraries │ ├── Maven__log4j_log4j_1_2_17.xml │ ├── Maven__com_mchange_c3p0_0_9_5_2.xml │ ├── Maven__org_lz4_lz4_java_1_5_0.xml │ ├── Maven__com_101tec_zkclient_0_10.xml │ ├── Maven__com_typesafe_config_1_3_3.xml │ ├── Maven__com_google_guava_guava_18_0.xml │ ├── Maven__commons_io_commons_io_2_4.xml │ ├── Maven__io_vertx_vertx_core_3_7_0.xml │ ├── Maven__org_objenesis_objenesis_2_1.xml │ ├── Maven__aopalliance_aopalliance_1_0.xml │ ├── Maven__com_alibaba_fastjson_1_2_69.xml │ ├── Maven__org_slf4j_slf4j_api_1_7_15.xml │ ├── Maven__com_twitter_chill_2_12_0_7_6.xml │ ├── Maven__com_twitter_chill_java_0_7_6.xml │ ├── Maven__commons_cli_commons_cli_1_3_1.xml │ ├── Maven__commons_lang_commons_lang_2_6.xml │ ├── Maven__org_codehaus_janino_janino_3_0_9.xml │ ├── Maven__commons_codec_commons_codec_1_9.xml │ ├── Maven__org_projectlombok_lombok_1_16_20.xml │ ├── Maven__org_slf4j_slf4j_log4j12_1_7_7.xml │ ├── Maven__org_postgresql_postgresql_42_1_4.xml │ ├── Maven__com_github_luben_zstd_jni_1_3_8_1.xml │ ├── Maven__com_github_scopt_scopt_2_12_3_5_0.xml │ ├── Maven__io_netty_netty_all_4_1_6_Final.xml │ ├── Maven__org_apache_flink_flink_core_1_9_0.xml │ ├── Maven__org_apache_flink_flink_java_1_9_0.xml │ ├── Maven__org_jboss_netty_netty_3_2_2_Final.xml │ ├── Maven__ch_qos_logback_logback_core_1_1_3.xml │ ├── Maven__com_esotericsoftware_kryo_kryo_2_24_0.xml │ ├── Maven__com_esotericsoftware_minlog_minlog_1_2.xml │ ├── Maven__com_google_code_findbugs_jsr305_1_3_9.xml │ ├── Maven__io_vertx_vertx_sql_common_3_7_0.xml │ ├── Maven__org_slf4j_jcl_over_slf4j_1_7_12.xml │ ├── Maven__org_apache_zookeeper_zookeeper_3_4_5.xml │ ├── Maven__org_javassist_javassist_3_19_0_GA.xml │ ├── Maven__org_reflections_reflections_0_9_10.xml │ ├── Maven__org_xerial_snappy_snappy_java_1_1_4.xml │ ├── Maven__io_vertx_vertx_jdbc_client_3_7_0.xml │ ├── Maven__org_apache_commons_commons_math3_3_5.xml │ ├── Maven__com_alibaba_otter_canal_common_1_1_2.xml │ ├── Maven__commons_logging_commons_logging_1_2.xml │ ├── Maven__org_scala_lang_scala_library_2_12_7.xml │ ├── Maven__org_scala_lang_scala_reflect_2_12_7.xml │ ├── Maven__io_netty_netty_codec_4_1_30_Final.xml │ ├── Maven__org_apache_flink_force_shading_1_9_0.xml │ ├── Maven__org_apache_kafka_kafka_clients_2_2_0.xml │ ├── Maven__ch_qos_logback_logback_classic_1_1_3.xml │ ├── Maven__io_netty_netty_buffer_4_1_30_Final.xml │ ├── Maven__io_netty_netty_common_4_1_30_Final.xml │ ├── Maven__mysql_mysql_connector_java_5_1_34.xml │ ├── Maven__org_scala_lang_scala_compiler_2_12_7.xml │ ├── Maven__com_alibaba_otter_canal_protocol_1_1_2.xml │ ├── Maven__org_apache_commons_commons_dbcp2_2_1_1.xml │ ├── Maven__org_apache_commons_commons_lang3_3_3_2.xml │ ├── Maven__org_apache_commons_commons_pool2_2_4_2.xml │ ├── Maven__com_google_protobuf_protobuf_java_3_6_1.xml │ ├── Maven__io_netty_netty_handler_4_1_30_Final.xml │ ├── Maven__org_apache_flink_flink_hadoop_fs_1_9_0.xml │ ├── Maven__org_apache_flink_flink_jdbc_2_12_1_9_0.xml │ ├── Maven__io_netty_netty_resolver_4_1_30_Final.xml │ ├── Maven__org_apache_flink_flink_scala_2_12_1_9_0.xml │ ├── Maven__org_clapper_grizzled_slf4j_2_12_1_3_2.xml │ ├── Maven__com_typesafe_akka_akka_actor_2_12_2_5_21.xml │ ├── Maven__com_typesafe_akka_akka_slf4j_2_12_2_5_21.xml │ ├── Maven__org_apache_commons_commons_compress_1_18.xml │ ├── Maven__io_netty_netty_codec_dns_4_1_30_Final.xml │ ├── Maven__io_netty_netty_transport_4_1_30_Final.xml │ ├── Maven__org_apache_flink_flink_annotations_1_9_0.xml │ ├── Maven__com_mchange_mchange_commons_java_0_2_11.xml │ ├── Maven__com_typesafe_akka_akka_stream_2_12_2_5_21.xml │ ├── Maven__com_typesafe_ssl_config_core_2_12_0_3_7.xml │ ├── Maven__org_codehaus_janino_commons_compiler_3_0_9.xml │ ├── Maven__org_reactivestreams_reactive_streams_1_0_2.xml │ ├── Maven__org_scala_lang_modules_scala_xml_2_12_1_0_6.xml │ ├── Maven__com_fasterxml_jackson_core_jackson_core_2_9_8.xml │ ├── Maven__io_netty_netty_codec_http_4_1_30_Final.xml │ ├── Maven__org_apache_flink_flink_clients_2_12_1_9_0.xml │ ├── Maven__org_apache_flink_flink_metrics_core_1_9_0.xml │ ├── Maven__org_apache_flink_flink_runtime_2_12_1_9_0.xml │ ├── Maven__org_apache_flink_flink_table_common_1_9_0.xml │ ├── Maven__io_netty_netty_codec_http2_4_1_30_Final.xml │ ├── Maven__io_netty_netty_codec_socks_4_1_30_Final.xml │ ├── Maven__com_typesafe_akka_akka_protobuf_2_12_2_5_21.xml │ ├── Maven__org_apache_calcite_avatica_avatica_core_1_15_0.xml │ ├── Maven__io_netty_netty_resolver_dns_4_1_30_Final.xml │ ├── Maven__org_apache_flink_flink_optimizer_2_12_1_9_0.xml │ ├── Maven__org_apache_flink_flink_table_api_java_1_9_0.xml │ ├── Maven__org_springframework_spring_tx_3_2_18_RELEASE.xml │ ├── Maven__commons_collections_commons_collections_3_2_2.xml │ ├── Maven__io_netty_netty_handler_proxy_4_1_30_Final.xml │ ├── Maven__org_apache_flink_flink_shaded_guava_18_0_7_0.xml │ ├── Maven__org_springframework_spring_aop_3_2_18_RELEASE.xml │ ├── Maven__org_springframework_spring_orm_3_2_18_RELEASE.xml │ ├── Maven__com_fasterxml_jackson_core_jackson_databind_2_9_8.xml │ ├── Maven__org_apache_flink_flink_shaded_asm_6_6_2_1_7_0.xml │ ├── Maven__org_springframework_spring_core_3_2_18_RELEASE.xml │ ├── Maven__org_springframework_spring_jdbc_3_2_18_RELEASE.xml │ ├── Maven__org_springframework_spring_beans_3_2_18_RELEASE.xml │ ├── Maven__org_apache_flink_flink_shaded_jackson_2_9_8_7_0.xml │ ├── Maven__com_fasterxml_jackson_core_jackson_annotations_2_9_0.xml │ ├── Maven__org_apache_flink_flink_streaming_java_2_12_1_9_0.xml │ ├── Maven__org_springframework_spring_context_3_2_18_RELEASE.xml │ ├── Maven__org_apache_flink_flink_connector_kafka_2_12_1_9_0.xml │ ├── Maven__org_apache_flink_flink_streaming_scala_2_12_1_9_0.xml │ ├── Maven__org_apache_flink_flink_table_api_scala_2_12_1_9_0.xml │ ├── Maven__org_scala_lang_modules_scala_java8_compat_2_12_0_8_0.xml │ ├── Maven__org_springframework_spring_expression_3_2_18_RELEASE.xml │ ├── Maven__org_apache_flink_flink_shaded_netty_4_1_32_Final_7_0.xml │ ├── Maven__org_apache_flink_flink_connector_kafka_0_9_2_12_1_9_0.xml │ ├── Maven__org_apache_flink_flink_table_planner_blink_2_12_1_9_0.xml │ ├── Maven__org_apache_flink_flink_table_runtime_blink_2_12_1_9_0.xml │ ├── Maven__org_apache_flink_flink_connector_kafka_0_10_2_12_1_9_0.xml │ ├── Maven__org_apache_flink_flink_connector_kafka_0_11_2_12_1_9_0.xml │ ├── Maven__org_apache_flink_flink_connector_kafka_base_2_12_1_9_0.xml │ ├── Maven__org_apache_flink_flink_table_api_java_bridge_2_12_1_9_0.xml │ ├── Maven__org_scala_lang_modules_scala_parser_combinators_2_12_1_1_1.xml │ ├── Maven__org_apache_flink_flink_queryable_state_client_java_1_9_0.xml │ └── Maven__org_apache_flink_flink_table_api_scala_bridge_2_12_1_9_0.xml ├── compiler.xml ├── jarRepositories.xml └── uiDesigner.xml ├── pom.xml ├── flink.iml └── README.md /src/main/resources/kafka.properties: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/main/java/com/sinks/SinkToHbase.java: -------------------------------------------------------------------------------- 1 | package com.sinks; 2 | 3 | public class SinkToHbase { 4 | } 5 | -------------------------------------------------------------------------------- /src/main/java/com/sinks/SinkToKafka.java: -------------------------------------------------------------------------------- 1 | package com.sinks; 2 | 3 | public class SinkToKafka { 4 | } 5 | -------------------------------------------------------------------------------- /会议管理系统实时分析总结.assets/总体方案.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/liwei199411/FlinkStreamETL/HEAD/会议管理系统实时分析总结.assets/总体方案.png -------------------------------------------------------------------------------- /会议管理系统实时分析总结.assets/raHGlh.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/liwei199411/FlinkStreamETL/HEAD/会议管理系统实时分析总结.assets/raHGlh.jpg -------------------------------------------------------------------------------- /会议管理系统实时分析总结.assets/siWsAK.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/liwei199411/FlinkStreamETL/HEAD/会议管理系统实时分析总结.assets/siWsAK.jpg -------------------------------------------------------------------------------- /会议管理系统实时分析总结.assets/20200713145329100.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/liwei199411/FlinkStreamETL/HEAD/会议管理系统实时分析总结.assets/20200713145329100.png -------------------------------------------------------------------------------- /会议管理系统实时分析总结.assets/image-20200710154201590.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/liwei199411/FlinkStreamETL/HEAD/会议管理系统实时分析总结.assets/image-20200710154201590.png -------------------------------------------------------------------------------- /会议管理系统实时分析总结.assets/image-20200710154343692.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/liwei199411/FlinkStreamETL/HEAD/会议管理系统实时分析总结.assets/image-20200710154343692.png -------------------------------------------------------------------------------- /会议管理系统实时分析总结.assets/image-20200710154514531.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/liwei199411/FlinkStreamETL/HEAD/会议管理系统实时分析总结.assets/image-20200710154514531.png -------------------------------------------------------------------------------- /.idea/codeStyles/codeStyleConfig.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 5 | -------------------------------------------------------------------------------- /src/main/java/com/sqlquery/DimensionSQLQuery.java: -------------------------------------------------------------------------------- 1 | package com.sqlquery; 2 | /** 3 | * 维度表查询 4 | * */ 5 | public class DimensionSQLQuery { 6 | public static String Query="SELECT * FROM meeting_address"; 7 | } 8 | -------------------------------------------------------------------------------- /.idea/.gitignore: -------------------------------------------------------------------------------- 1 | # Default ignored files 2 | /shelf/ 3 | /workspace.xml 4 | # Datasource local storage ignored files 5 | /dataSources/ 6 | /dataSources.local.xml 7 | # Editor-based HTTP Client requests 8 | /httpRequests/ 9 | -------------------------------------------------------------------------------- /.idea/vcs.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | -------------------------------------------------------------------------------- /.idea/modules.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /.idea/encodings.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | -------------------------------------------------------------------------------- /.idea/codeStyles/Project.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 6 | 7 | -------------------------------------------------------------------------------- /src/main/java/com/Seetings/StreamTableSeetings.java: -------------------------------------------------------------------------------- 1 | package com.Seetings; 2 | /** 3 | * 流式数据表 4 | * */ 5 | public class StreamTableSeetings { 6 | public static String streamField="meeting_id, meeting_code,address_id,mstart_date,mend_date,proctime.proctime"; 7 | public static String streamTableName="meeting_info"; 8 | } 9 | -------------------------------------------------------------------------------- /src/main/resources/database.properties: -------------------------------------------------------------------------------- 1 | ## greenplum 2 | driver=org.postgresql.Driver 3 | url=jdbc:postgresql://ip_address/schema?serverTimezone=GMT=8 4 | Username=*** 5 | Password=*** 6 | 7 | ## mysql 8 | mysql_driver=com.mysql.jdbc.Driver 9 | mysql_url=jdbc:mysql://master:3306/******** 10 | mysql_Username=**** 11 | mysql_Password=**** 12 | -------------------------------------------------------------------------------- /src/main/java/com/Seetings/DimensionTableSeetings.java: -------------------------------------------------------------------------------- 1 | package com.Seetings; 2 | /** 3 | * 维度表 4 | * */ 5 | 6 | public class DimensionTableSeetings { 7 | public static String DimensionTableName="meeting_address"; 8 | public static String DimensionTableField="meetingroom_id,meetingroom_name,location_id,location_name,city"; 9 | 10 | } 11 | -------------------------------------------------------------------------------- /.idea/misc.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 9 | 10 | 11 | -------------------------------------------------------------------------------- /src/main/java/com/sqlquery/JoinedSQLQuery.java: -------------------------------------------------------------------------------- 1 | package com.sqlquery; 2 | /** 3 | * 流式表与维度表Join 4 | * */ 5 | public class JoinedSQLQuery { 6 | public static String Query="SELECT mi.meeting_id, mi.meeting_code,ma.meetingroom_id,ma.meetingroom_name,ma.location_name,ma.city" + 7 | " FROM meeting_info AS mi " + 8 | "LEFT JOIN " + 9 | "meeting_address AS ma " + 10 | "ON mi.address_id=ma.meetingroom_id"; 11 | } 12 | -------------------------------------------------------------------------------- /src/main/java/com/model/Meeting.java: -------------------------------------------------------------------------------- 1 | package com.model; 2 | import lombok.AllArgsConstructor; 3 | import lombok.Data; 4 | import lombok.NoArgsConstructor; 5 | 6 | /** 7 | * Meeting 实体类对象 8 | * */ 9 | @Data 10 | @AllArgsConstructor 11 | @NoArgsConstructor 12 | public class Meeting { 13 | public int meeting_id; 14 | public String meeting_code; 15 | public int meetingroom_id; 16 | public String meetingroom_name; 17 | public String location_name; 18 | public String city; 19 | } 20 | -------------------------------------------------------------------------------- /src/main/resources/logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__log4j_log4j_1_2_17.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__com_mchange_c3p0_0_9_5_2.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_lz4_lz4_java_1_5_0.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__com_101tec_zkclient_0_10.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__com_typesafe_config_1_3_3.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__com_google_guava_guava_18_0.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__commons_io_commons_io_2_4.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__io_vertx_vertx_core_3_7_0.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_objenesis_objenesis_2_1.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__aopalliance_aopalliance_1_0.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__com_alibaba_fastjson_1_2_69.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_slf4j_slf4j_api_1_7_15.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__com_twitter_chill_2_12_0_7_6.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__com_twitter_chill_java_0_7_6.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__commons_cli_commons_cli_1_3_1.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__commons_lang_commons_lang_2_6.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_codehaus_janino_janino_3_0_9.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__commons_codec_commons_codec_1_9.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_projectlombok_lombok_1_16_20.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_slf4j_slf4j_log4j12_1_7_7.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_postgresql_postgresql_42_1_4.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__com_github_luben_zstd_jni_1_3_8_1.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__com_github_scopt_scopt_2_12_3_5_0.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__io_netty_netty_all_4_1_6_Final.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_apache_flink_flink_core_1_9_0.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_apache_flink_flink_java_1_9_0.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_jboss_netty_netty_3_2_2_Final.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__ch_qos_logback_logback_core_1_1_3.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__com_esotericsoftware_kryo_kryo_2_24_0.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__com_esotericsoftware_minlog_minlog_1_2.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__com_google_code_findbugs_jsr305_1_3_9.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__io_vertx_vertx_sql_common_3_7_0.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_slf4j_jcl_over_slf4j_1_7_12.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_apache_zookeeper_zookeeper_3_4_5.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_javassist_javassist_3_19_0_GA.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_reflections_reflections_0_9_10.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_xerial_snappy_snappy_java_1_1_4.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__io_vertx_vertx_jdbc_client_3_7_0.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_apache_commons_commons_math3_3_5.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__com_alibaba_otter_canal_common_1_1_2.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__commons_logging_commons_logging_1_2.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_scala_lang_scala_library_2_12_7.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_scala_lang_scala_reflect_2_12_7.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__io_netty_netty_codec_4_1_30_Final.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_apache_flink_force_shading_1_9_0.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_apache_kafka_kafka_clients_2_2_0.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__ch_qos_logback_logback_classic_1_1_3.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__io_netty_netty_buffer_4_1_30_Final.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__io_netty_netty_common_4_1_30_Final.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__mysql_mysql_connector_java_5_1_34.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_scala_lang_scala_compiler_2_12_7.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__com_alibaba_otter_canal_protocol_1_1_2.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_apache_commons_commons_dbcp2_2_1_1.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_apache_commons_commons_lang3_3_3_2.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_apache_commons_commons_pool2_2_4_2.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__com_google_protobuf_protobuf_java_3_6_1.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__io_netty_netty_handler_4_1_30_Final.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_apache_flink_flink_hadoop_fs_1_9_0.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_apache_flink_flink_jdbc_2_12_1_9_0.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__io_netty_netty_resolver_4_1_30_Final.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_apache_flink_flink_scala_2_12_1_9_0.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_clapper_grizzled_slf4j_2_12_1_3_2.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__com_typesafe_akka_akka_actor_2_12_2_5_21.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__com_typesafe_akka_akka_slf4j_2_12_2_5_21.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_apache_commons_commons_compress_1_18.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__io_netty_netty_codec_dns_4_1_30_Final.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__io_netty_netty_transport_4_1_30_Final.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_apache_flink_flink_annotations_1_9_0.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__com_mchange_mchange_commons_java_0_2_11.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__com_typesafe_akka_akka_stream_2_12_2_5_21.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__com_typesafe_ssl_config_core_2_12_0_3_7.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_codehaus_janino_commons_compiler_3_0_9.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_reactivestreams_reactive_streams_1_0_2.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_scala_lang_modules_scala_xml_2_12_1_0_6.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__com_fasterxml_jackson_core_jackson_core_2_9_8.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__io_netty_netty_codec_http_4_1_30_Final.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_apache_flink_flink_clients_2_12_1_9_0.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_apache_flink_flink_metrics_core_1_9_0.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_apache_flink_flink_runtime_2_12_1_9_0.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_apache_flink_flink_table_common_1_9_0.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__io_netty_netty_codec_http2_4_1_30_Final.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__io_netty_netty_codec_socks_4_1_30_Final.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__com_typesafe_akka_akka_protobuf_2_12_2_5_21.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_apache_calcite_avatica_avatica_core_1_15_0.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__io_netty_netty_resolver_dns_4_1_30_Final.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_apache_flink_flink_optimizer_2_12_1_9_0.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_apache_flink_flink_table_api_java_1_9_0.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_springframework_spring_tx_3_2_18_RELEASE.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__commons_collections_commons_collections_3_2_2.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__io_netty_netty_handler_proxy_4_1_30_Final.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_apache_flink_flink_shaded_guava_18_0_7_0.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_springframework_spring_aop_3_2_18_RELEASE.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_springframework_spring_orm_3_2_18_RELEASE.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__com_fasterxml_jackson_core_jackson_databind_2_9_8.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_apache_flink_flink_shaded_asm_6_6_2_1_7_0.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_springframework_spring_core_3_2_18_RELEASE.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_springframework_spring_jdbc_3_2_18_RELEASE.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_springframework_spring_beans_3_2_18_RELEASE.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_apache_flink_flink_shaded_jackson_2_9_8_7_0.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__com_fasterxml_jackson_core_jackson_annotations_2_9_0.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_apache_flink_flink_streaming_java_2_12_1_9_0.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_springframework_spring_context_3_2_18_RELEASE.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_apache_flink_flink_connector_kafka_2_12_1_9_0.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_apache_flink_flink_streaming_scala_2_12_1_9_0.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_apache_flink_flink_table_api_scala_2_12_1_9_0.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/compiler.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_scala_lang_modules_scala_java8_compat_2_12_0_8_0.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_springframework_spring_expression_3_2_18_RELEASE.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_apache_flink_flink_shaded_netty_4_1_32_Final_7_0.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_apache_flink_flink_connector_kafka_0_9_2_12_1_9_0.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_apache_flink_flink_table_planner_blink_2_12_1_9_0.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_apache_flink_flink_table_runtime_blink_2_12_1_9_0.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_apache_flink_flink_connector_kafka_0_10_2_12_1_9_0.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_apache_flink_flink_connector_kafka_0_11_2_12_1_9_0.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_apache_flink_flink_connector_kafka_base_2_12_1_9_0.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_apache_flink_flink_table_api_java_bridge_2_12_1_9_0.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_scala_lang_modules_scala_parser_combinators_2_12_1_1_1.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_apache_flink_flink_queryable_state_client_java_1_9_0.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/libraries/Maven__org_apache_flink_flink_table_api_scala_bridge_2_12_1_9_0.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /src/main/java/com/utils/BinLogBean.java: -------------------------------------------------------------------------------- 1 | package com.utils; 2 | import lombok.AllArgsConstructor; 3 | import lombok.Data; 4 | import lombok.NoArgsConstructor; 5 | 6 | import java.io.Serializable; 7 | 8 | /** 9 | * 暂时没有用到 10 | */ 11 | @Data 12 | @AllArgsConstructor 13 | @NoArgsConstructor 14 | public class BinLogBean implements Serializable{ 15 | public String data;//最新的数据,为JSON数组,如果是插入则表示最新插入的数据;如果是更新,则表示更新后的最新数据;如果是删除,则表示被删除的数据 16 | public String database;//数据库 17 | public Long es;//事件时间,13位的时间戳 18 | public Long id;//事件操作的序列号,1,2,3 19 | public Boolean isDdl;//是否是DDL操作 20 | public String mysqlType; //字段类型 21 | public String old;//旧数据 22 | public String sql;//SQL 语句 23 | public String sqlType;// 经过Canal转换处理的,unsigned int 会被转化为Long,unsigned long会被转换为BigDecimal 24 | public String table;//TableName 25 | public Long ts;//日志时间戳 26 | public String type;//操作类型,包含Insert,Delete,Update 27 | } -------------------------------------------------------------------------------- /src/main/java/com/Seetings/ReadJDBCPro.java: -------------------------------------------------------------------------------- 1 | package com.Seetings; 2 | 3 | import java.io.FileReader; 4 | import java.io.IOException; 5 | import java.util.Properties; 6 | 7 | public class ReadJDBCPro { 8 | public static Properties buildGreenPlumJDBCProps(){ 9 | Properties properties = new Properties(); 10 | try { 11 | properties.load(new FileReader("database.properties")); 12 | } catch (IOException e) { 13 | e.printStackTrace(); 14 | } 15 | properties.setProperty("url","jdbc:postgresql://******:5432/datahub?serverTimezone=GMT+8"); 16 | properties.setProperty("Username","******"); 17 | properties.setProperty("Password","******"); 18 | return properties; 19 | } 20 | public static Properties buildMysqlJDBCProps(){ 21 | Properties properties = new Properties(); 22 | properties.setProperty("url","jdbc:mysql://******:3306/canal_destination??useUnicode=true&characterEncoding=UTF-8"); 23 | properties.setProperty("Username","*****"); 24 | properties.setProperty("Password","*****"); 25 | return properties; 26 | } 27 | 28 | } 29 | -------------------------------------------------------------------------------- /.idea/jarRepositories.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 9 | 10 | 14 | 15 | 19 | 20 | 24 | 25 | -------------------------------------------------------------------------------- /src/main/resources/log4j.properties: -------------------------------------------------------------------------------- 1 | ################################################################################ 2 | # Licensed to the Apache Software Foundation (ASF) under one 3 | # or more contributor license agreements. See the NOTICE file 4 | # distributed with this work for additional information 5 | # regarding copyright ownership. The ASF licenses this file 6 | # to you under the Apache License, Version 2.0 (the 7 | # "License"); you may not use this file except in compliance 8 | # with the License. You may obtain a copy of the License at 9 | # 10 | # http://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | # See the License for the specific language governing permissions and 16 | # limitations under the License. 17 | ################################################################################ 18 | 19 | log4j.rootLogger=ERROR, console 20 | #log4j.rootLogger=WARN, console 21 | 22 | log4j.appender.console=org.apache.log4j.ConsoleAppender 23 | log4j.appender.console.layout=org.apache.log4j.PatternLayout 24 | log4j.appender.console.layout.ConversionPattern=%d{HH:mm:ss,SSS} %-5p %-60c %x - %m%n 25 | -------------------------------------------------------------------------------- /src/main/java/com/utils/KafkaConfigUtil.java: -------------------------------------------------------------------------------- 1 | package com.utils; 2 | import lombok.extern.log4j.Log4j; 3 | import java.util.Properties; 4 | /** 5 | * Kafka配置文件 6 | * */ 7 | @Log4j 8 | public class KafkaConfigUtil { 9 | public static String topic="example";//Kafka的topic 10 | public static String fieldDelimiter = ",";//字段分隔符,用于分隔Json解析后的字段 11 | 12 | public static Properties buildKafkaProps(){ 13 | Properties properties = new Properties(); 14 | properties.setProperty("bootstrap.servers", "master:9092,slave01:9092,slave02:9092"); 15 | properties.setProperty("zookeeper.connect", "master:2181,slave01:2181,slave02:2181"); 16 | properties.setProperty("group.id", "meeting_group3");// 17 | properties.put("auto.offset.reset", "latest"); 18 | /** earliest 19 | 当各分区下有已提交的offset时,从提交的offset开始消费;无提交的offset时,从头开始消费 20 | latest 21 | 当各分区下有已提交的offset时,从提交的offset开始消费;无提交的offset时,消费新产生的该分区下的数据 22 | none 23 | topic各分区都存在已提交的offset时,从offset后开始消费;只要有一个分区不存在已提交的offset,则抛出异常 24 | */ 25 | properties.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); 26 | properties.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); 27 | 28 | log.info("get kafka config, config map-> " + properties.toString()); 29 | return properties; 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /src/main/java/com/utils/Tuple2ToMeeting.java: -------------------------------------------------------------------------------- 1 | package com.utils; 2 | 3 | import com.model.Meeting; 4 | import org.apache.flink.api.java.tuple.Tuple2; 5 | import org.apache.flink.types.Row; 6 | /** 7 | * 将Tuple转为实体类对象 8 | * */ 9 | public class Tuple2ToMeeting { 10 | public Meeting getTuple2ToMeeting(Tuple2 booleanRowTuple2) throws Exception { 11 | Meeting meeting=new Meeting(); 12 | meeting.setMeeting_id((Integer) booleanRowTuple2.f1.getField(0)); 13 | System.out.println("meeting_id:"+booleanRowTuple2.f1.getField(0)); 14 | 15 | meeting.setMeeting_code((String) booleanRowTuple2.f1.getField(1)); 16 | System.out.println("meeting_code:"+booleanRowTuple2.f1.getField(1)); 17 | 18 | meeting.setMeetingroom_id((Integer) booleanRowTuple2.f1.getField(2)); 19 | System.out.println("meetingroom_id:"+booleanRowTuple2.f1.getField(2)); 20 | 21 | meeting.setMeetingroom_name((String) booleanRowTuple2.f1.getField(3)); 22 | System.out.println("meetingroom_name:"+booleanRowTuple2.f1.getField(3)); 23 | 24 | meeting.setLocation_name((String) booleanRowTuple2.f1.getField(4)); 25 | System.out.println("location_name:"+booleanRowTuple2.f1.getField(4)); 26 | 27 | meeting.setCity((String) booleanRowTuple2.f1.getField(5)); 28 | System.out.println("city:"+booleanRowTuple2.f1.getField(5)); 29 | 30 | return meeting; 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /src/main/java/com/Seetings/CreateJDBCInputFormat.java: -------------------------------------------------------------------------------- 1 | package com.Seetings; 2 | 3 | import org.apache.flink.api.common.typeinfo.BasicTypeInfo; 4 | import org.apache.flink.api.common.typeinfo.TypeInformation; 5 | import org.apache.flink.api.java.io.jdbc.JDBCInputFormat; 6 | import org.apache.flink.api.java.typeutils.RowTypeInfo; 7 | /** 8 | 会议室维表同步 9 | 目前存在的问题:JDBCInputFormat一次性拿全量数据放入state中,无法感知mysql维表的变化,也会占用大量的state空间。 10 | 后面需要实现用async io+cache+异步jdbc 才可以 11 | 维表 12 | */ 13 | public class CreateJDBCInputFormat { 14 | TypeInformation[] fieldTypes=new TypeInformation[]{ 15 | BasicTypeInfo.INT_TYPE_INFO, 16 | BasicTypeInfo.STRING_TYPE_INFO, 17 | BasicTypeInfo.STRING_TYPE_INFO, 18 | BasicTypeInfo.STRING_TYPE_INFO, 19 | BasicTypeInfo.STRING_TYPE_INFO 20 | }; 21 | RowTypeInfo rowTypeInfo=new RowTypeInfo(fieldTypes); 22 | public JDBCInputFormat createJDBCInputFormat(){ 23 | JDBCInputFormat jdbcInputFormat=JDBCInputFormat.buildJDBCInputFormat() 24 | .setDrivername("com.mysql.jdbc.Driver") 25 | .setDBUrl("jdbc:mysql://master/canal_test") 26 | .setUsername("root") 27 | .setPassword("root") 28 | .setQuery("SELECT tma.id AS meetingroom_id,tma.name as meetingroom_name,tma.location as location_id,tml.full_name as location_name,tmr.`name` AS city\n" + 29 | "FROM t_meeting_address as tma LEFT JOIN t_meeting_location AS tml \n" + 30 | "ON tma.location=tml.code \n" + 31 | "LEFT JOIN t_meeting_region AS tmr ON tml.region_id=tmr.id") //维表 32 | .setRowTypeInfo(rowTypeInfo) 33 | .finish(); 34 | return jdbcInputFormat; 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /src/main/java/com/utils/JsonFilter.java: -------------------------------------------------------------------------------- 1 | package com.utils; 2 | 3 | import com.alibaba.fastjson.JSON; 4 | import com.alibaba.fastjson.JSONArray; 5 | import com.alibaba.fastjson.JSONObject; 6 | import com.alibaba.fastjson.parser.Feature; 7 | import org.apache.flink.api.java.tuple.Tuple5; 8 | 9 | import java.util.ArrayList; 10 | import java.util.List; 11 | import java.util.Map; 12 | 13 | public class JsonFilter { 14 | public String IsDdl = "false"; 15 | public String TableName = "t_meeting_info"; 16 | public String Type = "INSERT"; 17 | public String Data="data"; 18 | public static String fieldDelimiter = ",";//字段分隔符,用于分隔Json解析后的字段 19 | 20 | public JsonFilter() { 21 | } 22 | 23 | public Boolean getJsonFilter(String string) { 24 | JSONObject record = JSON.parseObject(string, Feature.OrderedField); 25 | return record.getString("isDdl").equals(IsDdl) && record.getString("table").equals(TableName) && record.getString("type").equals(Type); 26 | } 27 | 28 | public String dataMap(String jsonvalue) throws Exception { 29 | StringBuilder fieldValue = new StringBuilder(); 30 | JSONObject record = JSON.parseObject(jsonvalue, Feature.OrderedField); 31 | //获取最新的字段值 32 | JSONArray data = record.getJSONArray(Data); 33 | //遍历,字段值的JSON数组,只有一个元素 34 | for (int i = 0; i < data.size(); i++) { 35 | //获取data数组的所有字段 36 | JSONObject obj = data.getJSONObject(i); 37 | if (obj != null) { 38 | for (Map.Entry entry : obj.entrySet()) { 39 | fieldValue.append(entry.getValue()); 40 | fieldValue.append(fieldDelimiter); 41 | } 42 | } 43 | } 44 | return fieldValue.toString(); 45 | } 46 | 47 | public Tuple5 fieldMap(String datafield) throws Exception { 48 | Integer meeting_id= Integer.valueOf(datafield.split("[\\,]")[0]); 49 | String meeting_code=datafield.split("[\\,]")[1]; 50 | Integer address_id= Integer.valueOf(datafield.split("[\\,]")[7]); 51 | String mstart_date=datafield.split("[\\,]")[13]; 52 | String mend_date=datafield.split("[\\,]")[14]; 53 | return new Tuple5(meeting_id, meeting_code,address_id,mstart_date,mend_date) ; 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /src/main/java/com/sinks/SinkToGreenplum.java: -------------------------------------------------------------------------------- 1 | package com.sinks; 2 | import com.Seetings.ReadJDBCPro; 3 | import com.model.Meeting; 4 | import org.apache.commons.dbcp2.BasicDataSource; 5 | import org.apache.flink.configuration.Configuration; 6 | import org.apache.flink.streaming.api.functions.sink.RichSinkFunction; 7 | 8 | import java.io.FileInputStream; 9 | import java.io.IOException; 10 | import java.sql.Connection; 11 | import java.sql.PreparedStatement; 12 | import java.util.Properties; 13 | 14 | /** 15 | * sink to Greenplum 16 | * */ 17 | 18 | public class SinkToGreenplum extends RichSinkFunction{ 19 | PreparedStatement ps; 20 | BasicDataSource dataSource; 21 | private Connection connection; 22 | /** 23 | * open() 方法中建立连接,这样不用每次invoke的时候都要建立连接和释放连接 24 | * @param parameters 25 | * @throws Exception 26 | * */ 27 | 28 | @Override 29 | public void open(Configuration parameters) throws Exception{ 30 | super.open(parameters); 31 | dataSource=new BasicDataSource(); 32 | connection=getConnection(dataSource); 33 | String sql="INSERT INTO public .meeting_result(meeting_id, meeting_code, meetingroom_id,meetingroom_name,location_name,city) values(?, ?, ?,?,?,?);"; 34 | ps=this.connection.prepareStatement(sql); 35 | } 36 | @Override 37 | public void close() throws Exception { 38 | super.close(); 39 | //关闭连接和释放资源 40 | if(connection!=null){ 41 | connection.close(); 42 | } 43 | if(ps!=null){ 44 | connection.close(); 45 | } 46 | } 47 | 48 | /** 49 | * 每条数据的插入都需要调用一次invoke()方法 50 | * @param meeting 51 | * @param context 52 | * @throws Exception 53 | * */ 54 | @Override 55 | public void invoke(Meeting meeting,Context context) throws Exception{ 56 | ps.setInt(1,meeting.getMeeting_id()); 57 | ps.setString(2,meeting.getMeeting_code()); 58 | ps.setInt(3,meeting.getMeetingroom_id()); 59 | ps.setString(4,meeting.getMeetingroom_name()); 60 | ps.setString(5,meeting.getLocation_name()); 61 | ps.setString(6,meeting.getCity()); 62 | ps.executeUpdate(); 63 | System.out.println("插入成功:"+meeting.toString()); 64 | } 65 | 66 | private static Connection getConnection(BasicDataSource dataSource) { 67 | Properties prop=new Properties(); 68 | try { 69 | prop.load(new FileInputStream("D:\\flink\\src\\main\\resources\\database.properties")); 70 | String driver=prop.getProperty("driver"); 71 | String url=prop.getProperty("url"); 72 | String username=prop.getProperty("Username"); 73 | String password=prop.getProperty("Password"); 74 | 75 | dataSource.setDriverClassName(driver); 76 | dataSource.setUrl(url); 77 | dataSource.setUsername(username); 78 | dataSource.setPassword(password); 79 | } catch (IOException e) { 80 | e.printStackTrace(); 81 | } 82 | 83 | //设置连接池的参数 84 | dataSource.setInitialSize(10); 85 | dataSource.setMaxTotal(50); 86 | dataSource.setMinIdle(2); 87 | 88 | Connection con=null; 89 | try{ 90 | con=dataSource.getConnection(); 91 | System.out.println("创建连接池:"+con); 92 | } catch (Exception e) { 93 | System.out.println("-----------greenplum get connection has exception,msg=" +e.getMessage()); 94 | } 95 | return con; 96 | } 97 | } -------------------------------------------------------------------------------- /src/main/java/com/sinks/SinkToMySQL.java: -------------------------------------------------------------------------------- 1 | package com.sinks; 2 | 3 | import com.Seetings.ReadJDBCPro; 4 | import com.model.Meeting; 5 | import org.apache.commons.dbcp2.BasicDataSource; 6 | import org.apache.flink.configuration.Configuration; 7 | import org.apache.flink.streaming.api.functions.sink.RichSinkFunction; 8 | 9 | import java.io.FileInputStream; 10 | import java.io.IOException; 11 | import java.sql.Connection; 12 | import java.sql.PreparedStatement; 13 | import java.util.Properties; 14 | 15 | /** 16 | * sinktoMysql 17 | * 另外一种实现方法 18 | * */ 19 | 20 | public class SinkToMySQL extends RichSinkFunction{ 21 | PreparedStatement ps; 22 | BasicDataSource dataSource; 23 | private Connection connection; 24 | /** 25 | * open() 方法中建立连接,这样不用每次invoke的时候都要建立连接和释放连接 26 | * @param parameters 27 | * @throws Exception 28 | * */ 29 | 30 | @Override 31 | public void open(Configuration parameters) throws Exception{ 32 | super.open(parameters); 33 | dataSource=new BasicDataSource(); 34 | connection=getConnection(dataSource); 35 | String sql="replace into meeting_result(meeting_id, meeting_code, meetingroom_id,meetingroom_name,location_name,city) values(?, ?, ?,?,?,?);"; 36 | ps=this.connection.prepareStatement(sql); 37 | 38 | } 39 | @Override 40 | public void close() throws Exception { 41 | super.close(); 42 | //关闭连接和释放资源 43 | if(connection!=null){ 44 | connection.close(); 45 | } 46 | if(ps!=null){ 47 | connection.close(); 48 | } 49 | } 50 | /** 51 | * 每条数据的插入都需要调用一次invoke()方法 52 | * @param meeting 53 | * @param context 54 | * @throws Exception 55 | * */ 56 | @Override 57 | public void invoke(Meeting meeting,Context context) throws Exception{ 58 | ps.setInt(1,meeting.getMeeting_id()); 59 | ps.setString(2,meeting.getMeeting_code()); 60 | ps.setInt(3,meeting.getMeetingroom_id()); 61 | ps.setString(4,meeting.getMeetingroom_name()); 62 | ps.setString(5,meeting.getLocation_name()); 63 | ps.setString(6,meeting.getCity()); 64 | ps.executeUpdate(); 65 | } 66 | 67 | private static Connection getConnection(BasicDataSource dataSource) { 68 | Properties mysqlprop=new Properties(); 69 | try { 70 | mysqlprop.load(new FileInputStream("D:\\flink\\src\\main\\java\\com\\sinks\\database.properties")); 71 | String mysqldriver=mysqlprop.getProperty("mysql_driver"); 72 | String mysqlurl=mysqlprop.getProperty("mysql_url"); 73 | String mysqlusername=mysqlprop.getProperty("mysql_Username"); 74 | String mysqlpassword=mysqlprop.getProperty("mysql_Password"); 75 | 76 | dataSource.setDriverClassName(mysqldriver); 77 | dataSource.setUrl(mysqlurl); 78 | dataSource.setUsername(mysqlusername); 79 | dataSource.setPassword(mysqlpassword); 80 | } catch (IOException e) { 81 | e.printStackTrace(); 82 | } 83 | 84 | //设置连接池的参数 85 | dataSource.setInitialSize(10); 86 | dataSource.setMaxTotal(50); 87 | dataSource.setMinIdle(2); 88 | 89 | Connection con=null; 90 | try{ 91 | con=dataSource.getConnection(); 92 | System.out.println("创建连接池:"+con); 93 | } catch (Exception e) { 94 | System.out.println("-----------mysql get connection has exception,msg=" +e.getMessage()); 95 | } 96 | return con; 97 | } 98 | } -------------------------------------------------------------------------------- /src/main/java/com/Main.java: -------------------------------------------------------------------------------- 1 | package com; 2 | 3 | import com.Seetings.DimensionTableSeetings; 4 | import com.alibaba.fastjson.JSON; 5 | import com.model.Meeting; 6 | import com.sinks.SinkToGreenplum; 7 | import com.Seetings.CreateJDBCInputFormat; 8 | import com.sqlquery.DimensionSQLQuery; 9 | import com.sqlquery.JoinedSQLQuery; 10 | import com.utils.JsonFilter; 11 | import com.utils.KafkaConfigUtil; 12 | import com.Seetings.StreamTableSeetings; 13 | import com.utils.Tuple2ToMeeting; 14 | import org.apache.flink.api.common.functions.FilterFunction; 15 | import org.apache.flink.api.common.functions.MapFunction; 16 | import org.apache.flink.api.common.serialization.SimpleStringSchema; 17 | import org.apache.flink.api.common.time.Time; 18 | import org.apache.flink.api.java.io.jdbc.JDBCInputFormat; 19 | import org.apache.flink.api.java.tuple.Tuple2; 20 | import org.apache.flink.api.java.tuple.Tuple5; 21 | import org.apache.flink.streaming.api.CheckpointingMode; 22 | import org.apache.flink.streaming.api.TimeCharacteristic; 23 | import org.apache.flink.streaming.api.datastream.DataStream; 24 | import org.apache.flink.streaming.api.datastream.DataStreamSource; 25 | import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator; 26 | import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; 27 | import org.apache.flink.streaming.api.functions.ProcessFunction; 28 | import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer; 29 | import org.apache.flink.table.api.EnvironmentSettings; 30 | import org.apache.flink.table.api.Table; 31 | import org.apache.flink.table.api.java.StreamTableEnvironment; 32 | import org.apache.flink.types.Row; 33 | import org.apache.flink.util.Collector; 34 | import org.slf4j.Logger; 35 | import org.slf4j.LoggerFactory; 36 | import java.util.Properties; 37 | 38 | /** 39 | * Flink 实时计算MysqlBinLog日志,并写入数据库 40 | * */ 41 | public class Main { 42 | private static Logger log = LoggerFactory.getLogger(Main.class); 43 | public static void main(String[] args) throws Exception { 44 | /** 45 | * Flink 配置 46 | * */ 47 | StreamExecutionEnvironment env=StreamExecutionEnvironment.getExecutionEnvironment(); 48 | env.getConfig().disableSysoutLogging(); //设置此可以屏蔽掉日记打印情况 49 | env.enableCheckpointing(1000);////非常关键,一定要设置启动检查点 50 | env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);//设置事件时间 51 | env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE); 52 | EnvironmentSettings bsSettings=EnvironmentSettings.newInstance()//使用Blink planner、创建TableEnvironment,并且设置状态过期时间,避免Job OOM 53 | .useBlinkPlanner() 54 | .inStreamingMode() 55 | .build(); 56 | StreamTableEnvironment tEnv = StreamTableEnvironment.create(env,bsSettings); 57 | tEnv.getConfig().setIdleStateRetentionTime(Time.days(1),Time.days(2)); 58 | /** 59 | * Kafka配置 60 | * */ 61 | Properties properties = KafkaConfigUtil.buildKafkaProps();//kafka参数配置 62 | FlinkKafkaConsumer consumer = new FlinkKafkaConsumer<>(KafkaConfigUtil.topic, new SimpleStringSchema(), properties); 63 | /** 64 | * 将Kafka-consumer的数据作为源 65 | * 并对Json格式进行解析 66 | * */ 67 | SingleOutputStreamOperator> meeting_stream=env.addSource(consumer) 68 | .filter(new FilterFunction() { //过滤掉JSON格式中的DDL操作 69 | @Override 70 | public boolean filter(String jsonVal) throws Exception { 71 | //json格式解析:"isDdl":false,"table":t_meeting_info,"type":"INSERT" 72 | return new JsonFilter().getJsonFilter(jsonVal); 73 | } 74 | }) 75 | .map(new MapFunction() { 76 | @Override 77 | //获取字段数据 78 | public String map(String jsonvalue) throws Exception { 79 | return new JsonFilter().dataMap(jsonvalue); 80 | } 81 | }).map(new MapFunction>() { 82 | @Override 83 | public Tuple5 map(String dataField) throws Exception { 84 | return new JsonFilter().fieldMap(dataField); 85 | } 86 | }); 87 | /** 88 | * 将流式数据(元组类型)注册为表 89 | * 会议室维表同步 90 | */ 91 | tEnv.registerDataStream(StreamTableSeetings.streamTableName,meeting_stream,StreamTableSeetings.streamField); 92 | CreateJDBCInputFormat createJDBCFormat=new CreateJDBCInputFormat(); 93 | JDBCInputFormat jdbcInputFormat=createJDBCFormat.createJDBCInputFormat(); 94 | DataStreamSource dataStreamSource=env.createInput(jdbcInputFormat);//字段类型 95 | tEnv.registerDataStream(DimensionTableSeetings.DimensionTableName,dataStreamSource,DimensionTableSeetings.DimensionTableField); 96 | 97 | //流表与维表join,并对结果表进行查询 98 | Table meeting_info=tEnv.scan(StreamTableSeetings.streamTableName); 99 | Table meeting_address=tEnv.sqlQuery(DimensionSQLQuery.Query); 100 | Table joined=tEnv.sqlQuery(JoinedSQLQuery.Query); 101 | /** 102 | 对结果表进行查询,TO_TIMESTAMP是Flink的时间函数,对时间格式进行转换,具体请看官网 103 | 只对开始的会议进行转换。 统计空置率指的是统计当下时间里,已经在会议中的会议室,还是已经预定的呢 104 | Table joined=tEnv.sqlQuery("select meeting_id, meeting_code,TO_TIMESTAMP(mstart_date),TO_TIMESTAMP(mend_date),proctime.proctime " + 105 | "from meeting_info " + 106 | "where TO_TIMESTAMP(mstart_date)> stream1 =tEnv.toRetractStream(joined,Row.class).filter(new FilterFunction>() { 114 | @Override 115 | public boolean filter(Tuple2 booleanRowTuple2) throws Exception { 116 | return booleanRowTuple2.f0; 117 | } 118 | }); 119 | stream1.print(); 120 | */ 121 | //适用于维表查询的情况2 122 | DataStream> stream_tosink =tEnv.toRetractStream(joined,Row.class); 123 | stream_tosink.process(new ProcessFunction, Object>() { 124 | private Tuple2 booleanRowTuple2; 125 | private ProcessFunction, Object>.Context context; 126 | private Collector collector; 127 | @Override 128 | public void processElement(Tuple2 booleanRowTuple2, Context context, Collector collector) throws Exception { 129 | if(booleanRowTuple2.f0){ 130 | System.out.println(JSON.toJSONString(booleanRowTuple2.f1)); 131 | } 132 | } 133 | }); 134 | stream_tosink.print();//测试输出 135 | 136 | //转换Tuple元组到实体类对象 137 | DataStream dataStream=stream_tosink.map(new MapFunction, Meeting>() { 138 | @Override 139 | public Meeting map(Tuple2 booleanRowTuple2) throws Exception { 140 | return new Tuple2ToMeeting().getTuple2ToMeeting(booleanRowTuple2); 141 | } 142 | }); 143 | /** 144 | * Sink 145 | * */ 146 | dataStream.print(); 147 | //dataStream.addSink(new SinkToMySQL());//测试ok 148 | dataStream.addSink(new SinkToGreenplum());//测试ok 149 | //执行 150 | env.execute("Meeting Streaming job"); 151 | } 152 | } -------------------------------------------------------------------------------- /.idea/uiDesigner.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | 107 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | -------------------------------------------------------------------------------- /pom.xml: -------------------------------------------------------------------------------- 1 | 19 | 21 | 4.0.0 22 | 23 | com.test 24 | flink 25 | 1.0.0 26 | jar 27 | 28 | Flink Quickstart Job 29 | http://www.myorganization.org 30 | 31 | 32 | UTF-8 33 | 1.9.0 34 | 1.8 35 | 2.12 36 | ${java.version} 37 | ${java.version} 38 | 1.16.20 39 | 42.1.4 40 | 41 | 42 | 43 | 44 | 45 | apache.snapshots 46 | Apache Development Snapshot Repository 47 | https://repository.apache.org/content/repositories/snapshots/ 48 | 49 | false 50 | 51 | 52 | true 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | org.apache.flink 62 | flink-java 63 | ${flink.version} 64 | 65 | 66 | org.apache.flink 67 | flink-scala_${scala.binary.version} 68 | ${flink.version} 69 | 70 | 71 | org.apache.flink 72 | flink-streaming-java_${scala.binary.version} 73 | ${flink.version} 74 | 75 | 76 | 77 | org.apache.flink 78 | flink-core 79 | ${flink.version} 80 | 81 | 82 | 83 | 84 | org.apache.flink 85 | flink-table-planner-blink_${scala.binary.version} 86 | ${flink.version} 87 | 88 | 89 | 90 | 91 | org.apache.flink 92 | flink-connector-kafka_2.12 93 | ${flink.version} 94 | 95 | 96 | org.apache.flink 97 | flink-connector-kafka-0.10_${scala.binary.version} 98 | ${flink.version} 99 | 100 | 101 | org.apache.flink 102 | flink-connector-kafka-0.11_2.12 103 | 1.9.0 104 | 105 | 106 | 107 | 108 | com.alibaba.otter 109 | canal.protocol 110 | 1.1.2 111 | 112 | 113 | 114 | 115 | 116 | 117 | org.slf4j 118 | slf4j-log4j12 119 | 1.7.7 120 | 121 | 122 | log4j 123 | log4j 124 | 1.2.17 125 | 126 | 127 | 128 | com.alibaba 129 | fastjson 130 | 1.2.69 131 | 132 | 133 | org.projectlombok 134 | lombok 135 | ${lombok.version} 136 | provided 137 | 138 | 139 | org.apache.flink 140 | flink-jdbc_${scala.binary.version} 141 | ${flink.version} 142 | 143 | 144 | 145 | io.vertx 146 | vertx-jdbc-client 147 | 3.7.0 148 | 149 | 150 | 151 | mysql 152 | mysql-connector-java 153 | 5.1.34 154 | 155 | 156 | org.postgresql 157 | postgresql 158 | ${postgresql-jdbc.version} 159 | 160 | 161 | 162 | org.apache.commons 163 | commons-dbcp2 164 | 2.1.1 165 | 166 | 167 | 168 | 169 | 170 | 171 | 172 | 173 | 174 | org.apache.maven.plugins 175 | maven-compiler-plugin 176 | 3.1 177 | 178 | ${java.version} 179 | ${java.version} 180 | 181 | 182 | 183 | 184 | 185 | 186 | org.apache.maven.plugins 187 | maven-shade-plugin 188 | 3.0.0 189 | 190 | 191 | 192 | package 193 | 194 | shade 195 | 196 | 197 | 198 | 199 | org.apache.flink:force-shading 200 | com.google.code.findbugs:jsr305 201 | org.slf4j:* 202 | log4j:* 203 | 204 | 205 | 206 | 207 | 209 | *:* 210 | 211 | META-INF/*.SF 212 | META-INF/*.DSA 213 | META-INF/*.RSA 214 | 215 | 216 | 217 | 218 | 219 | com.test.TestFlinkKafka 220 | 221 | 222 | 223 | 224 | 225 | 226 | 227 | 228 | 229 | 230 | 231 | 232 | 233 | org.eclipse.m2e 234 | lifecycle-mapping 235 | 1.0.0 236 | 237 | 238 | 239 | 240 | 241 | org.apache.maven.plugins 242 | maven-shade-plugin 243 | [3.0.0,) 244 | 245 | shade 246 | 247 | 248 | 249 | 250 | 251 | 252 | 253 | 254 | org.apache.maven.plugins 255 | maven-compiler-plugin 256 | [3.1,) 257 | 258 | testCompile 259 | compile 260 | 261 | 262 | 263 | 264 | 265 | 266 | 267 | 268 | 269 | 270 | 271 | 272 | 273 | 274 | 275 | 276 | 277 | 278 | 279 | add-dependencies-for-IDEA 280 | 281 | 282 | 283 | idea.version 284 | 285 | 286 | 287 | 288 | 289 | org.apache.flink 290 | flink-java 291 | ${flink.version} 292 | compile 293 | 294 | 295 | org.apache.flink 296 | flink-streaming-java_${scala.binary.version} 297 | ${flink.version} 298 | compile 299 | 300 | 301 | 302 | 303 | 304 | 305 | -------------------------------------------------------------------------------- /flink.iml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | 107 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | 128 | 129 | 130 | 131 | 132 | 133 | 134 | 135 | 136 | 137 | 138 | 139 | 140 | 141 | 142 | 143 | 144 | 145 | 146 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # FlinkStreamETL 2 | 3 | ## 0.功能说明 4 | **概括**:利用Flink实时统计Mysql数据库BinLog日志数据,并将流式数据注册为流表,利用Flink SQL将流表与Mysql的维表进行JOIN,最后将计算结果实时写入Greenplum/Mysql。 5 | 6 | ## 1.需求分析 7 | 8 | ### **1.1需求** 9 | 10 | 实时统计各个地区会议室的空置率,预定率,并在前端看板上实时展示。源系统的数据库是`Mysql`,它有三张表,分别是:t_meeting_info(会议室预定信息表)、t_meeting_location(属地表,维度表)、t_meeting_address(会议室属地表,维度表)。 11 | 12 | ### **1.2说明** 13 | 14 | `t_meeting_info`表中的数据每时每刻都在更新数据,若通过**`JDBC`**方式定时查询`Mysql`,会给源系统数据库造成大量无形的压力,甚至会影响正常业务的使用,并且时效性也不高。需要在基本不影响**`Mysql`**正常使用的情况下完成对增量数据的处理。 15 | 16 | 上面三张表的`DDL`语句如下: 17 | 18 | - t_meeting_info(会议室预定信息表,这张表数据会实时更新) 19 | 20 | ```sql 21 | CREATE TABLE `t_meeting_info` ( 22 | `id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键id', 23 | `meeting_code` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL COMMENT '会议业务唯一编号', 24 | `msite` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci DEFAULT NULL COMMENT '会议名称', 25 | `mcontent` varchar(4096) CHARACTER SET utf8 COLLATE utf8_general_ci DEFAULT NULL COMMENT '会议内容', 26 | `attend_count` int(5) DEFAULT NULL COMMENT '参会人数', 27 | `type` int(5) DEFAULT NULL COMMENT '会议类型 1 普通会议 2 融合会议 3 视频会议 4 电话会议', 28 | `status` int(255) DEFAULT NULL COMMENT '会议状态 ', 29 | `address_id` int(11) DEFAULT NULL COMMENT '会议室id', 30 | `email` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci DEFAULT NULL COMMENT '创建人邮箱', 31 | `contact_tel` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci DEFAULT NULL COMMENT '联系电话', 32 | `create_user_name` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci DEFAULT NULL COMMENT '创建人姓名', 33 | `create_user_id` varchar(100) CHARACTER SET utf8 COLLATE utf8_general_ci DEFAULT NULL COMMENT '创建人工号', 34 | `creator_org` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci DEFAULT NULL COMMENT '创建人组织', 35 | `mstart_date` datetime DEFAULT NULL COMMENT '会议开始时间', 36 | `mend_date` datetime DEFAULT NULL COMMENT '会议结束时间', 37 | `create_time` datetime DEFAULT NULL COMMENT '创建时间', 38 | `update_user` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci DEFAULT NULL COMMENT '更新人', 39 | `update_time` datetime DEFAULT NULL COMMENT '更新时间', 40 | `company` int(10) DEFAULT NULL COMMENT '会议所在属地code', 41 | `sign_status` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci DEFAULT NULL COMMENT '预留字段', 42 | PRIMARY KEY (`id`) USING BTREE, 43 | KEY `t_meeting_info_meeting_code_index` (`meeting_code`) USING BTREE, 44 | KEY `t_meeting_info_address_id_index` (`address_id`) USING BTREE, 45 | KEY `t_meeting_info_create_user_id_index` (`create_user_id`) 46 | ) ENGINE=InnoDB AUTO_INCREMENT=65216 DEFAULT CHARSET=utf8 ROW_FORMAT=DYNAMIC COMMENT='会议主表'; 47 | ``` 48 | 49 | - t_meeting_location(属地表,地区维表) 50 | 51 | ```sql 52 | CREATE TABLE `t_meeting_location` ( 53 | `id` int(11) NOT NULL AUTO_INCREMENT, 54 | `short_name` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci DEFAULT NULL COMMENT '属地简称', 55 | `full_name` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci DEFAULT NULL COMMENT '属地全称', 56 | `code` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL COMMENT '属地code', 57 | `region_id` int(11) DEFAULT NULL COMMENT '地区id', 58 | `create_user` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci DEFAULT NULL COMMENT '创建人', 59 | `update_user` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci DEFAULT NULL COMMENT '更新人', 60 | `create_time` datetime DEFAULT NULL COMMENT '创建时间', 61 | `update_time` datetime DEFAULT NULL COMMENT '更新时间', 62 | PRIMARY KEY (`id`) USING BTREE, 63 | UNIQUE KEY `t_meeting_location_code_uindex` (`code`) USING BTREE 64 | ) ENGINE=InnoDB AUTO_INCREMENT=103 DEFAULT CHARSET=utf8 ROW_FORMAT=DYNAMIC COMMENT='属地表'; 65 | ``` 66 | 67 | - t_meeting_address(会议室属地表,会议室维表) 68 | 69 | ```sql 70 | CREATE TABLE `t_meeting_address` ( 71 | `id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键id', 72 | `name` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci DEFAULT NULL COMMENT '会议室名称', 73 | `location` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci DEFAULT NULL COMMENT '所在属地', 74 | `shared` int(3) DEFAULT NULL COMMENT '是否共享 0 默认不共享 1 全部共享 2 选择性共享', 75 | `cost` int(10) DEFAULT NULL COMMENT '每小时成本', 76 | `size` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci DEFAULT NULL COMMENT '会议室容量大小', 77 | `bvm_ip` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci DEFAULT NULL COMMENT 'BVM IP', 78 | `type` int(2) DEFAULT NULL COMMENT '会议室类型 1 普通会议室 2 视频会议室', 79 | `create_time` datetime DEFAULT NULL COMMENT '创建时间', 80 | `create_user` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci DEFAULT NULL COMMENT '创建人', 81 | `update_time` datetime DEFAULT NULL COMMENT '更新时间', 82 | `update_user` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci DEFAULT NULL COMMENT '更新人', 83 | `status` int(2) DEFAULT NULL COMMENT '是否启用 ,0 未启用 1已启用 2已删除', 84 | `order` int(5) DEFAULT NULL COMMENT '排序', 85 | `approve` int(2) DEFAULT NULL COMMENT '是否审批 0 不审批 1 审批', 86 | PRIMARY KEY (`id`) USING BTREE, 87 | KEY `t_meeting_address_location_index` (`location`) USING BTREE, 88 | KEY `order` (`order`) USING BTREE 89 | ) ENGINE=InnoDB AUTO_INCREMENT=554 DEFAULT CHARSET=utf8 ROW_FORMAT=DYNAMIC COMMENT='会议室表'; 90 | ``` 91 | 92 | ## 2.实现方案 93 | 94 | 方案如下图所示: 95 | 96 | - 利用**Canal**监听**`Mysql`**数据库的增量`BinLog`日志数据(`JSON格式`) 97 | - 将增量日志数据作为**Kafka**的生产者,Flink解析**Kafka**的`Topic` 中的数据并消费 98 | - 将计算后的流式数据(Stream)注册为Flink 中的表(Table) 99 | - 最后利用Flink与t_meeting_location、t_meeting_address维表进行JOIN,将最终的结果写入数据库。 100 | 101 | ![img](%E4%BC%9A%E8%AE%AE%E7%AE%A1%E7%90%86%E7%B3%BB%E7%BB%9F%E5%AE%9E%E6%97%B6%E5%88%86%E6%9E%90%E6%80%BB%E7%BB%93.assets/%E6%80%BB%E4%BD%93%E6%96%B9%E6%A1%88.png) 102 | 103 | 需要**服务器:CentOS7,JDK8、Scala 2.12.6、Mysql、Canal、Flink1.9、Zookkeeper、Kafka** 104 | 105 | ### 2.1 Canal简介 106 | 107 | **Canal**是阿里巴巴开源的纯`java`开发的基于数据库`binlog`的增量订阅&消费组件。Canal的原理是模拟为一个`Mysql slave`的交互协议,伪装为`MySQL slave`,向`Mysql Master`发送dump协议,然后`Mysql master`接收到这个请求后将`binary log`推送给slave(也就是Canal),Canal解析binary log对象。 108 | 109 | #### 2.1 Canal安装并配置(Cent Os服务器上) 110 | 111 | **`Mysql`数据库配置** 112 | 113 | - a. 开启`Mysql`的`Binlog`,修改`/etc/my.cnf`,在`[mysqld]`下添加如下配置,改完之后重启 `Mysql`,命令是: /etc/init.d/mysql restart。 114 | 115 | ```shell 116 | [mysqld] 117 | #添加这一行就ok 118 | log-bin=mysql-bin 119 | #选择row模式 120 | binlog-format=ROW 121 | #配置mysql replaction需要定义,不能和canal的slaveId重复 122 | server_id=1 123 | ``` 124 | 125 | - b.创建一个`Mysql`用户并赋予相应的权限,用于**Canal**使用 126 | 127 | ```sql 128 | mysql> CREATE USER canal IDENTIFIED BY 'canal'; 129 | mysql> GRANT SELECT, REPLICATION SLAVE, REPLICATION CLIENT ON *.* TO 'canal'@'%'; 130 | mysql> GRANT ALL PRIVILEGES ON *.* TO 'canal'@'%' ; 131 | mysql> FLUSH PRIVILEGES; 132 | ``` 133 | 134 | - c.`Zookeeper` 135 | 136 | 安装Kafka时需要依赖于`Zookeeper`(CDH 6.2版本已安装) 137 | 138 | - d.安装Kafka,创建一个Topic 139 | 140 | ```shell 141 | kafka-topics.sh --create --zookeeper master:2181,slave01:2181,slave02:2181 --partitions 2 --replication-factor 1 --topic example 142 | ``` 143 | 144 | **Canal安装**: 145 | 146 | - Canal下载地址 147 | 148 | ```shell 149 | https://github.com/alibaba/canal/releases/tag/canal-1.1.2 150 | ``` 151 | 152 | - 解压(在解压之前创建一个canal目录,直接解压会覆盖文件) 153 | 154 | ```shell 155 | mkdir -p /usr/local/canal 156 | mv canal.deployer-1.1.2.tar.gz /usr/local/canal/ 157 | tar -zxvf canal.deployer-1.1.2.tar.gz 158 | ``` 159 | 160 | - 修改instance配置文件(在`/usr/local/canal/conf/example/instance.properties`下) 161 | 162 | ```shell 163 | ## mysql serverId , v1.0.26+ will autoGen , 不要和server_id重复 164 | canal.instance.mysql.slaveId=3 165 | 166 | # position info。设置要监听的Mysql数据库的url 167 | canal.instance.master.address=10.252.70.6:3306 168 | 169 | # table meta tsdb info 170 | canal.instance.tsdb.enable=false 171 | 172 | # 这里配置前面在Mysql分配的用户名和密码 173 | canal.instance.dbUsername=canal 174 | canal.instance.dbPassword=canal 175 | canal.instance.connectionCharset=UTF-8 176 | # 配置需要检测的库名,可以不配置,这里只检测canal_test库 177 | canal.instance.defaultDatabaseName=canal_test 178 | # enable druid Decrypt database password 179 | canal.instance.enableDruid=false 180 | 181 | # 配置过滤的正则表达式,监测canal_test库下的所有表 182 | canal.instance.filter.regex=canal_test\\..* 183 | 184 | # 配置MQ 185 | ## 配置上在Kafka创建的那个Topic名字 186 | canal.mq.topic=example 187 | ## 配置分区编号为1() 188 | canal.mq.partition=1 189 | ``` 190 | 191 | - 修改canal.properties配置文件 192 | 193 | ```shell 194 | 195 | vim $CANAL_HOME/conf/canal.properties,修改如下项,其他默认即可 196 | # 这个是如果开启的是tcp模式,会占用这个11111端口,canal客户端通过这个端口获取数据 197 | canal.port = 11111 198 | 199 | # 可以配置为:tcp, kafka, RocketMQ,这里配置为kafka 200 | canal.serverMode = kafka 201 | 202 | ################################################## 203 | ######### destinations ############# 204 | ################################################## 205 | 206 | # 这里将这个注释掉,否则启动会有一个警告 207 | #canal.instance.tsdb.spring.xml = classpath:spring/tsdb/h2-tsdb.xml 208 | 209 | ################################################## 210 | ######### MQ ############# 211 | ################################################## 212 | ##Kafka集群 213 | canal.mq.servers = master:9092,slave01:9092,slave02:9092 214 | canal.mq.retries = 0 215 | canal.mq.batchSize = 16384 216 | canal.mq.maxRequestSize = 1048576 217 | canal.mq.lingerMs = 1 218 | canal.mq.bufferMemory = 33554432 219 | # Canal的batch size, 默认50K, 由于kafka最大消息体限制请勿超过1M(900K以下) 220 | canal.mq.canalBatchSize = 50 221 | # Canal get数据的超时时间, 单位: 毫秒, 空为不限超时 222 | canal.mq.canalGetTimeout = 100 223 | # 是否为flat json格式对象 224 | canal.mq.flatMessage = true 225 | canal.mq.compressionType = none 226 | canal.mq.acks = all 227 | # kafka消息投递是否使用事务 228 | #canal.mq.transaction = false 229 | ``` 230 | 231 | **启动Canal** 232 | 233 | ```shell 234 | $CANAL_HOME/bin/startup.sh 235 | ``` 236 | 237 | logs下会生成两个日志文件:logs/canal/canal.log、logs/example/example.log,查看这两个日志,保证没有报错日志。 238 | 239 | ```shell 240 | tail -f $CANAL_HOME/logs/example/example.log 241 | tail -f $CANAL_HOME/logs/canal/canal.log 242 | ``` 243 | 244 | **测试一下** 245 | 246 | 在`Mysql`数据库中进行增删改查的操作,然后查看Kafka的topic为 example 的数据 247 | 248 | ```shell 249 | kafka-console-consumer.sh --bootstrap-server master:9092,slave01:9092,slave02:9092 --from-beginning --topic example 250 | ``` 251 | 252 | - 向Mysql数据库中插入几条数据 253 | 254 | ![image-20200710154201590](%E4%BC%9A%E8%AE%AE%E7%AE%A1%E7%90%86%E7%B3%BB%E7%BB%9F%E5%AE%9E%E6%97%B6%E5%88%86%E6%9E%90%E6%80%BB%E7%BB%93.assets/image-20200710154201590.png) 255 | 256 | - 在Kafka中查看这些插入的数据 257 | 258 | ![image-20200710154343692](%E4%BC%9A%E8%AE%AE%E7%AE%A1%E7%90%86%E7%B3%BB%E7%BB%9F%E5%AE%9E%E6%97%B6%E5%88%86%E6%9E%90%E6%80%BB%E7%BB%93.assets/image-20200710154343692.png) 259 | 260 | - 删除几条数据 261 | 262 | ![image-20200710154514531](%E4%BC%9A%E8%AE%AE%E7%AE%A1%E7%90%86%E7%B3%BB%E7%BB%9F%E5%AE%9E%E6%97%B6%E5%88%86%E6%9E%90%E6%80%BB%E7%BB%93.assets/image-20200710154514531.png) 263 | 264 | **关闭Canal** 265 | 266 | ```shell 267 | # 不用的时候一定要通过这个命令关闭,如果是用kill或者关机,当再次启动依然会提示要先执行stop.sh脚本后才能再启动。 268 | $CANAL_HOME/bin/stop.sh 269 | ``` 270 | 271 | **备注:**如果我们不使用Kafka作为Canal客户端,我们也可以用代码编写自己的Canal客户端,然后在代码中指定我们的数据去向。此时只需要将canal.properties配置文件中的`canal.serverMode`值改为`tcp`。**编写我们的客户端代码。** 272 | 273 | ### 2.2.实时计算框架去消费Kafka中的数据(Flink) 274 | 275 | 通过上一步已经可以获取到`cannal_test`库中的增量数据,并且可以将变化的数据实时推送到Kafka中。Kafka接收到的数据是一条`Json`格式的数据。我们需要对 INSERT 和 UPDATE 类型的数据处理。 276 | 277 | #### 2.2.1 源系统表信息 278 | 279 | `Mysql`数据建表语句如下: 280 | 281 | - t_meeting_info(会议室预定信息表,这张表数据会实时更新) 282 | 283 | ```sql 284 | CREATE TABLE `t_meeting_info` ( 285 | `id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键id', 286 | `meeting_code` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL COMMENT '会议业务唯一编号', 287 | `msite` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci DEFAULT NULL COMMENT '会议名称', 288 | `mcontent` varchar(4096) CHARACTER SET utf8 COLLATE utf8_general_ci DEFAULT NULL COMMENT '会议内容', 289 | `attend_count` int(5) DEFAULT NULL COMMENT '参会人数', 290 | `type` int(5) DEFAULT NULL COMMENT '会议类型 1 普通会议 2 融合会议 3 视频会议 4 电话会议', 291 | `status` int(255) DEFAULT NULL COMMENT '会议状态 ', 292 | `address_id` int(11) DEFAULT NULL COMMENT '会议室id', 293 | `email` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci DEFAULT NULL COMMENT '创建人邮箱', 294 | `contact_tel` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci DEFAULT NULL COMMENT '联系电话', 295 | `create_user_name` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci DEFAULT NULL COMMENT '创建人姓名', 296 | `create_user_id` varchar(100) CHARACTER SET utf8 COLLATE utf8_general_ci DEFAULT NULL COMMENT '创建人工号', 297 | `creator_org` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci DEFAULT NULL COMMENT '创建人组织', 298 | `mstart_date` datetime DEFAULT NULL COMMENT '会议开始时间', 299 | `mend_date` datetime DEFAULT NULL COMMENT '会议结束时间', 300 | `create_time` datetime DEFAULT NULL COMMENT '创建时间', 301 | `update_user` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci DEFAULT NULL COMMENT '更新人', 302 | `update_time` datetime DEFAULT NULL COMMENT '更新时间', 303 | `company` int(10) DEFAULT NULL COMMENT '会议所在属地code', 304 | `sign_status` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci DEFAULT NULL COMMENT '预留字段', 305 | PRIMARY KEY (`id`) USING BTREE, 306 | KEY `t_meeting_info_meeting_code_index` (`meeting_code`) USING BTREE, 307 | KEY `t_meeting_info_address_id_index` (`address_id`) USING BTREE, 308 | KEY `t_meeting_info_create_user_id_index` (`create_user_id`) 309 | ) ENGINE=InnoDB AUTO_INCREMENT=65216 DEFAULT CHARSET=utf8 ROW_FORMAT=DYNAMIC COMMENT='会议主表'; 310 | ``` 311 | 312 | - t_meeting_location(属地表,地区维表) 313 | 314 | ```sql 315 | CREATE TABLE `t_meeting_location` ( 316 | `id` int(11) NOT NULL AUTO_INCREMENT, 317 | `short_name` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci DEFAULT NULL COMMENT '属地简称', 318 | `full_name` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci DEFAULT NULL COMMENT '属地全称', 319 | `code` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL COMMENT '属地code', 320 | `region_id` int(11) DEFAULT NULL COMMENT '地区id', 321 | `create_user` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci DEFAULT NULL COMMENT '创建人', 322 | `update_user` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci DEFAULT NULL COMMENT '更新人', 323 | `create_time` datetime DEFAULT NULL COMMENT '创建时间', 324 | `update_time` datetime DEFAULT NULL COMMENT '更新时间', 325 | PRIMARY KEY (`id`) USING BTREE, 326 | UNIQUE KEY `t_meeting_location_code_uindex` (`code`) USING BTREE 327 | ) ENGINE=InnoDB AUTO_INCREMENT=103 DEFAULT CHARSET=utf8 ROW_FORMAT=DYNAMIC COMMENT='属地表'; 328 | ``` 329 | 330 | - t_meeting_address(会议室属地表,会议室维表) 331 | 332 | ```sql 333 | CREATE TABLE `t_meeting_address` ( 334 | `id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键id', 335 | `name` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci DEFAULT NULL COMMENT '会议室名称', 336 | `location` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci DEFAULT NULL COMMENT '所在属地', 337 | `shared` int(3) DEFAULT NULL COMMENT '是否共享 0 默认不共享 1 全部共享 2 选择性共享', 338 | `cost` int(10) DEFAULT NULL COMMENT '每小时成本', 339 | `size` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci DEFAULT NULL COMMENT '会议室容量大小', 340 | `bvm_ip` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci DEFAULT NULL COMMENT 'BVM IP', 341 | `type` int(2) DEFAULT NULL COMMENT '会议室类型 1 普通会议室 2 视频会议室', 342 | `create_time` datetime DEFAULT NULL COMMENT '创建时间', 343 | `create_user` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci DEFAULT NULL COMMENT '创建人', 344 | `update_time` datetime DEFAULT NULL COMMENT '更新时间', 345 | `update_user` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci DEFAULT NULL COMMENT '更新人', 346 | `status` int(2) DEFAULT NULL COMMENT '是否启用 ,0 未启用 1已启用 2已删除', 347 | `order` int(5) DEFAULT NULL COMMENT '排序', 348 | `approve` int(2) DEFAULT NULL COMMENT '是否审批 0 不审批 1 审批', 349 | PRIMARY KEY (`id`) USING BTREE, 350 | KEY `t_meeting_address_location_index` (`location`) USING BTREE, 351 | KEY `order` (`order`) USING BTREE 352 | ) ENGINE=InnoDB AUTO_INCREMENT=554 DEFAULT CHARSET=utf8 ROW_FORMAT=DYNAMIC COMMENT='会议室表'; 353 | ``` 354 | 355 | #### 2.2.2 解析`JSON`格式日志 356 | 357 | **`Json`格式示例** 358 | 359 | - Insert 360 | 361 | ```json 362 | { 363 | "data":[{ 364 | "id":"18", 365 | "meeting_code":"M201907080071", 366 | "msite":"项目会议", 367 | "mcontent":"1", 368 | "attend_count":"5", 369 | "type":"1", 370 | "status":"5", 371 | "address_id":"72", 372 | "email":"*******", 373 | "contact_tel":"+86 *******", 374 | "create_user_name":"*******", 375 | "create_user_id":"*******", 376 | "creator_org":"*******", 377 | "mstart_date":"2019-07-19 08:30:00", 378 | "mend_date":"2019-07-19 18:00:00", 379 | "create_time":"2019-07-08 08:37:07", 380 | "update_user":null, 381 | "update_time":null, 382 | "company":"100", 383 | "sign_status":null 384 | }], 385 | "database":"canal_test", 386 | "es":1595491574000, 387 | "id":41327, 388 | "isDdl":false, 389 | "mysqlType":{ 390 | "id":"int(11)", 391 | "meeting_code":"varchar(255)", 392 | "msite":"varchar(255)", 393 | "mcontent":"varchar(4096)", 394 | "attend_count":"int(5)", 395 | "type":"int(5)", 396 | "status":"int(255)", 397 | "address_id":"int(11)", 398 | "email":"varchar(255)", 399 | "contact_tel":"varchar(255)", 400 | "create_user_name":"varchar(255)", 401 | "create_user_id":"varchar(100)", 402 | "creator_org":"varchar(255)", 403 | "mstart_date":"datetime", 404 | "mend_date":"datetime", 405 | "create_time":"datetime", 406 | "update_user":"varchar(255)", 407 | "update_time":"datetime", 408 | "company":"int(10)", 409 | "sign_status":"varchar(255)" 410 | }, 411 | "old":null, 412 | "sql":"", 413 | "sqlType":{ "id":4,"meeting_code":12, 414 | "msite":12, 415 | "mcontent":12, 416 | "attend_count":4, 417 | "type":4,"status":4, 418 | "address_id":4, 419 | "email":12, 420 | "contact_tel":12, 421 | "create_user_name":12, 422 | "create_user_id":12, 423 | "creator_org":12, 424 | "mstart_date":93, 425 | "mend_date":93, 426 | "create_time":93, 427 | "update_user":12, 428 | "update_time":93, 429 | "company":4, 430 | "sign_status":12 431 | }, 432 | "table":"t_meeting_info", 433 | "ts":1595491574978, 434 | "type":"INSERT" 435 | } 436 | ``` 437 | 438 | - Update 439 | 440 | ```json 441 | {"data":[{ 442 | "id":"18", 443 | "meeting_code":"M201907080071", 444 | "msite":"项目会议", 445 | "mcontent":"1", 446 | "attend_count":"5", 447 | "type":"1", 448 | "status":"5", 449 | "address_id":"72", 450 | "email":"*******", 451 | "contact_tel":"+86 *******", 452 | "create_user_name":"*******", 453 | "create_user_id":"*******", 454 | "creator_org":"*******", 455 | "mstart_date":"2019-07-20 08:30:00", 456 | "mend_date":"2019-07-20 18:00:00", 457 | "create_time":"2019-07-08 08:37:07", 458 | "update_user":null, 459 | "update_time":null, 460 | "company":"100", 461 | "sign_status":null}], 462 | "database":"canal_test", 463 | "es":1595492169000, 464 | "id":41368, 465 | "isDdl":false, 466 | "mysqlType":{ 467 | "id":"int(11)", 468 | "meeting_code":"varchar(255)", 469 | "msite":"varchar(255)", 470 | "mcontent":"varchar(4096)", 471 | "attend_count":"int(5)", 472 | "type":"int(5)", 473 | "status":"int(255)", 474 | "address_id":"int(11)", 475 | "email":"varchar(255)", 476 | "contact_tel":"varchar(255)", 477 | "create_user_name":"varchar(255)", 478 | "create_user_id":"varchar(100)", 479 | "creator_org":"varchar(255)", 480 | "mstart_date":"datetime", 481 | "mend_date":"datetime", 482 | "create_time":"datetime", 483 | "update_user":"varchar(255)", 484 | "update_time":"datetime", 485 | "company":"int(10)", 486 | "sign_status":"varchar(255)" 487 | }, 488 | "old":[{ 489 | "mstart_date":"2019-07-19 08:30:00", 490 | "mend_date":"2019-07-19 18:00:00"}], 491 | "sql":"", 492 | "sqlType":{ 493 | "id":4,"meeting_code":12, 494 | "msite":12, 495 | "mcontent":12, 496 | "attend_count":4, 497 | "type":4, 498 | "status":4, 499 | "address_id":4, 500 | "email":12, 501 | "contact_tel":12, 502 | "create_user_name":12, 503 | "create_user_id":12, 504 | "creator_org":12, 505 | "mstart_date":93, 506 | "mend_date":93, 507 | "create_time":93, 508 | "update_user":12, 509 | "update_time":93, 510 | "company":4, 511 | "sign_status":12}, 512 | "table":"t_meeting_info", 513 | "ts":1595492169315, 514 | "type":"UPDATE"} 515 | ``` 516 | 517 | - Delete 518 | 519 | ```json 520 | {"data":[{ 521 | "id":"18", 522 | "meeting_code":"M201907080071", 523 | "msite":"项目会议", 524 | "mcontent":"1", 525 | "attend_count":"5", 526 | "type":"1", 527 | "status":"5", 528 | "address_id":"72", 529 | "email":"*******", 530 | "contact_tel":"+86 *******", 531 | "create_user_name":"*******", 532 | "create_user_id":"*******", 533 | "creator_org":"*******", 534 | "mstart_date":"2019-07-20 08:30:00", 535 | "mend_date":"2019-07-20 18:00:00", 536 | "create_time":"2019-07-08 08:37:07", 537 | "update_user":null, 538 | "update_time":null, 539 | "company":"100", 540 | "sign_status":null 541 | }], 542 | "database":"canal_test", 543 | "es":1595492208000, 544 | "id":41372, 545 | "isDdl":false, 546 | "mysqlType":{ 547 | "id":"int(11)", 548 | "meeting_code":"varchar(255)", 549 | "msite":"varchar(255)", 550 | "mcontent":"varchar(4096)", 551 | "attend_count":"int(5)", 552 | "type":"int(5)", 553 | "status":"int(255)", 554 | "address_id":"int(11)", 555 | "email":"varchar(255)", 556 | "contact_tel":"varchar(255)", 557 | "create_user_name":"varchar(255)", 558 | "create_user_id":"varchar(100)", 559 | "creator_org":"varchar(255)", 560 | "mstart_date":"datetime", 561 | "mend_date":"datetime", 562 | "create_time":"datetime", 563 | "update_user":"varchar(255)", 564 | "update_time":"datetime", 565 | "company":"int(10)", 566 | "sign_status":"varchar(255)" 567 | }, 568 | "old":null, 569 | "sql":"", 570 | "sqlType":{ 571 | "id":4, 572 | "meeting_code":12, 573 | "msite":12, 574 | "mcontent":12, 575 | "attend_count":4, 576 | "type":4, 577 | "status":4, 578 | "address_id":4, 579 | "email":12, 580 | "contact_tel":12, 581 | "create_user_name":12, 582 | "create_user_id":12, 583 | "creator_org":12, 584 | "mstart_date":93, 585 | "mend_date":93, 586 | "create_time":93, 587 | "update_user":12, 588 | "update_time":93, 589 | "company":4, 590 | "sign_status":12 591 | }, 592 | "table":"t_meeting_info", 593 | "ts":1595492208356, 594 | "type":"DELETE"} 595 | ``` 596 | 597 | **`Json`格式解释** 598 | 599 | - **data**:最新的数据,为JSON数组,如果是插入则表示最新插入的数据;如果是更新,则表示更新后的最新数据;如果是删除,则表示被删除的数据 600 | - **database**:数据库名称 601 | - **es**:事件时间,13位的时间戳 602 | - **id**:事件操作的序列号,1,2,3 603 | - **isDdl**:是否是DDL操作 604 | - **mysql Type**:字段类型 605 | - **old**:旧数据 606 | - **pkNames**:主键名称 607 | - **sql**:SQL语句 608 | - **sqlType**:经过Canal转换处理的,unsigned int 会被转化为Long,unsigned long会被转换为BigDecimal 609 | - **table**:表名 610 | - **ts**:日志时间 611 | - **type**:操作类型,例如DELETE、UPDATE、INSERT 612 | 613 | **解析代码** 614 | 615 | 需要从info这个表里取:`id(int)`,`meeting_code(varchar)`,`address_id(int)`,`mstart_date(datetime)`,`mend_date(datetime)` 616 | 617 | 地区维表: 618 | 619 | ```sql 620 | SELECT tma.id AS meetingroom_id,tma.name as meetingroom_name,tma.location as location_id,tml.full_name as location_name,tmr.`name` AS city 621 | FROM t_meeting_address as tma 622 | LEFT JOIN t_meeting_location AS tml 623 | ON tma.location=tml.code 624 | LEFT JOIN t_meeting_region AS tmr ON tml.region_id=tmr.id 625 | ``` 626 | 627 | ```java 628 | SingleOutputStreamOperator> meeting_stream=env.addSource(consumer) 629 | .filter(new FilterFunction() { //过滤掉JSON格式中的DDL操作 630 | @Override 631 | public boolean filter(String jsonVal) throws Exception { 632 | JSONObject record= JSON.parseObject(jsonVal, Feature.OrderedField); 633 | //json格式:"isDdl":false 634 | return record.getString("isDdl").equals("false"); 635 | } 636 | }) 637 | .map(new MapFunction() { 638 | @Override 639 | public String map(String jsonvalue) throws Exception { 640 | StringBuilder fieldsBuilder=new StringBuilder(); 641 | StringBuilder fieldValue=new StringBuilder(); 642 | //解析Json数据 643 | JSONObject record=JSON.parseObject(jsonvalue,Feature.OrderedField); 644 | //获取最新的字段值 645 | JSONArray data=record.getJSONArray("data"); 646 | //遍历,字段值得JSON数组,只有一个元素 647 | for (int i = 0; i entry:obj.entrySet()){ 659 | fieldValue.append(entry.getValue()); 660 | fieldValue.append(fieldDelimiter); 661 | // fieldsBuilder.append(fieldDelimiter); 662 | // fieldsBuilder.append(entry.getValue());//获取表字段数据 663 | } 664 | } 665 | } 666 | return fieldValue.toString(); 667 | } 668 | }).map(new MapFunction>() { 669 | @Override 670 | public Tuple5 map(String field) throws Exception { 671 | Integer meeting_id= Integer.valueOf(field.split("[\\,]")[0]); 672 | String meeting_code=field.split("[\\,]")[1]; 673 | Integer address_id= Integer.valueOf(field.split("[\\,]")[7]); 674 | String mstart_date=field.split("[\\,]")[13]; 675 | String mend_date=field.split("[\\,]")[14]; 676 | return new Tuple5(meeting_id, meeting_code,address_id,mstart_date,mend_date) ; 677 | } 678 | }); 679 | ``` 680 | 681 | #### 2.2.3 Flink计算逻辑 682 | 683 | ```java 684 | package com; 685 | 686 | import com.Seetings.DimensionTableSeetings; 687 | import com.alibaba.fastjson.JSON; 688 | import com.model.Meeting; 689 | import com.sinks.SinkToGreenplum; 690 | import com.Seetings.CreateJDBCInputFormat; 691 | import com.sqlquery.DimensionSQLQuery; 692 | import com.sqlquery.JoinedSQLQuery; 693 | import com.utils.JsonFilter; 694 | import com.utils.KafkaConfigUtil; 695 | import com.Seetings.StreamTableSeetings; 696 | import com.utils.Tuple2ToMeeting; 697 | import org.apache.flink.api.common.functions.FilterFunction; 698 | import org.apache.flink.api.common.functions.MapFunction; 699 | import org.apache.flink.api.common.serialization.SimpleStringSchema; 700 | import org.apache.flink.api.common.time.Time; 701 | import org.apache.flink.api.java.io.jdbc.JDBCInputFormat; 702 | import org.apache.flink.api.java.tuple.Tuple2; 703 | import org.apache.flink.api.java.tuple.Tuple5; 704 | import org.apache.flink.streaming.api.CheckpointingMode; 705 | import org.apache.flink.streaming.api.TimeCharacteristic; 706 | import org.apache.flink.streaming.api.datastream.DataStream; 707 | import org.apache.flink.streaming.api.datastream.DataStreamSource; 708 | import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator; 709 | import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; 710 | import org.apache.flink.streaming.api.functions.ProcessFunction; 711 | import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer; 712 | import org.apache.flink.table.api.EnvironmentSettings; 713 | import org.apache.flink.table.api.Table; 714 | import org.apache.flink.table.api.java.StreamTableEnvironment; 715 | import org.apache.flink.types.Row; 716 | import org.apache.flink.util.Collector; 717 | import org.slf4j.Logger; 718 | import org.slf4j.LoggerFactory; 719 | import java.util.Properties; 720 | 721 | /** 722 | * Flink 实时计算MysqlBinLog日志,并写入数据库 723 | * */ 724 | public class Main { 725 | private static Logger log = LoggerFactory.getLogger(Main.class); 726 | public static void main(String[] args) throws Exception { 727 | /** 728 | * Flink 配置 729 | * */ 730 | StreamExecutionEnvironment env=StreamExecutionEnvironment.getExecutionEnvironment(); 731 | env.getConfig().disableSysoutLogging(); //设置此可以屏蔽掉日记打印情况 732 | env.enableCheckpointing(1000);////非常关键,一定要设置启动检查点 733 | env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);//设置事件时间 734 | env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE); 735 | EnvironmentSettings bsSettings=EnvironmentSettings.newInstance()//使用Blink planner、创建TableEnvironment,并且设置状态过期时间,避免Job OOM 736 | .useBlinkPlanner() 737 | .inStreamingMode() 738 | .build(); 739 | StreamTableEnvironment tEnv = StreamTableEnvironment.create(env,bsSettings); 740 | tEnv.getConfig().setIdleStateRetentionTime(Time.days(1),Time.days(2)); 741 | /** 742 | * Kafka配置 743 | * */ 744 | Properties properties = KafkaConfigUtil.buildKafkaProps();//kafka参数配置 745 | FlinkKafkaConsumer consumer = new FlinkKafkaConsumer<>(KafkaConfigUtil.topic, new SimpleStringSchema(), properties); 746 | /** 747 | * 将Kafka-consumer的数据作为源 748 | * 并对Json格式进行解析 749 | * */ 750 | SingleOutputStreamOperator> meeting_stream=env.addSource(consumer) 751 | .filter(new FilterFunction() { //过滤掉JSON格式中的DDL操作 752 | @Override 753 | public boolean filter(String jsonVal) throws Exception { 754 | //json格式解析:"isDdl":false,"table":t_meeting_info,"type":"INSERT" 755 | return new JsonFilter().getJsonFilter(jsonVal); 756 | } 757 | }) 758 | .map(new MapFunction() { 759 | @Override 760 | //获取字段数据 761 | public String map(String jsonvalue) throws Exception { 762 | return new JsonFilter().dataMap(jsonvalue); 763 | } 764 | }).map(new MapFunction>() { 765 | @Override 766 | public Tuple5 map(String dataField) throws Exception { 767 | return new JsonFilter().fieldMap(dataField); 768 | } 769 | }); 770 | /** 771 | * 将流式数据(元组类型)注册为表 772 | * 会议室维表同步 773 | */ 774 | tEnv.registerDataStream(StreamTableSeetings.streamTableName,meeting_stream,StreamTableSeetings.streamField); 775 | CreateJDBCInputFormat createJDBCFormat=new CreateJDBCInputFormat(); 776 | JDBCInputFormat jdbcInputFormat=createJDBCFormat.createJDBCInputFormat(); 777 | DataStreamSource dataStreamSource=env.createInput(jdbcInputFormat);//字段类型 778 | tEnv.registerDataStream(DimensionTableSeetings.DimensionTableName,dataStreamSource,DimensionTableSeetings.DimensionTableField); 779 | 780 | //流表与维表join,并对结果表进行查询 781 | Table meeting_info=tEnv.scan(StreamTableSeetings.streamTableName); 782 | Table meeting_address=tEnv.sqlQuery(DimensionSQLQuery.Query); 783 | Table joined=tEnv.sqlQuery(JoinedSQLQuery.Query); 784 | /** 785 | 对结果表进行查询,TO_TIMESTAMP是Flink的时间函数,对时间格式进行转换,具体请看官网 786 | 只对开始的会议进行转换。 统计空置率指的是统计当下时间里,已经在会议中的会议室,还是已经预定的呢 787 | Table joined=tEnv.sqlQuery("select meeting_id, meeting_code,TO_TIMESTAMP(mstart_date),TO_TIMESTAMP(mend_date),proctime.proctime " + 788 | "from meeting_info " + 789 | "where TO_TIMESTAMP(mstart_date)> stream1 =tEnv.toRetractStream(joined,Row.class).filter(new FilterFunction>() { 797 | @Override 798 | public boolean filter(Tuple2 booleanRowTuple2) throws Exception { 799 | return booleanRowTuple2.f0; 800 | } 801 | }); 802 | stream1.print(); 803 | */ 804 | //适用于维表查询的情况2 805 | DataStream> stream_tosink =tEnv.toRetractStream(joined,Row.class); 806 | stream_tosink.process(new ProcessFunction, Object>() { 807 | private Tuple2 booleanRowTuple2; 808 | private ProcessFunction, Object>.Context context; 809 | private Collector collector; 810 | @Override 811 | public void processElement(Tuple2 booleanRowTuple2, Context context, Collector collector) throws Exception { 812 | if(booleanRowTuple2.f0){ 813 | System.out.println(JSON.toJSONString(booleanRowTuple2.f1)); 814 | } 815 | } 816 | }); 817 | stream_tosink.print();//测试输出 818 | 819 | //转换Tuple元组到实体类对象 820 | DataStream dataStream=stream_tosink.map(new MapFunction, Meeting>() { 821 | @Override 822 | public Meeting map(Tuple2 booleanRowTuple2) throws Exception { 823 | return new Tuple2ToMeeting().getTuple2ToMeeting(booleanRowTuple2); 824 | } 825 | }); 826 | /** 827 | * Sink 828 | * */ 829 | dataStream.print(); 830 | //dataStream.addSink(new SinkMeetingToMySQL()); //测试ok 831 | //dataStream.addSink(new SinkToMySQL());//测试ok 832 | dataStream.addSink(new SinkToGreenplum());//测试ok 833 | //执行 834 | env.execute("Meeting Streaming job"); 835 | } 836 | } 837 | ``` 838 | 839 | ### 2.3 将结果写入数据库 840 | 841 | ![img](%E4%BC%9A%E8%AE%AE%E7%AE%A1%E7%90%86%E7%B3%BB%E7%BB%9F%E5%AE%9E%E6%97%B6%E5%88%86%E6%9E%90%E6%80%BB%E7%BB%93.assets/raHGlh.jpg) 842 | 843 | **sink** 的意思也不一定非得说成要把数据存储到某个地方去。其实官网用的 **Connector** 来形容要去的地方更合适,这个 Connector 可以有 MySQL、ElasticSearch、Kafka、Cassandra RabbitMQ 、HDFS等,请看下面这张图片: 844 | 845 | ![img](%E4%BC%9A%E8%AE%AE%E7%AE%A1%E7%90%86%E7%B3%BB%E7%BB%9F%E5%AE%9E%E6%97%B6%E5%88%86%E6%9E%90%E6%80%BB%E7%BB%93.assets/siWsAK.jpg) 846 | 847 | #### 2.3.1 SinkToMysql 848 | 849 | ```java 850 | package com.sinks; 851 | 852 | import com.Seetings.ReadJDBCPro; 853 | import com.model.Meeting; 854 | import org.apache.commons.dbcp2.BasicDataSource; 855 | import org.apache.flink.configuration.Configuration; 856 | import org.apache.flink.streaming.api.functions.sink.RichSinkFunction; 857 | 858 | import java.io.FileInputStream; 859 | import java.io.IOException; 860 | import java.sql.Connection; 861 | import java.sql.PreparedStatement; 862 | import java.util.Properties; 863 | 864 | /** 865 | * sinktoMysql 866 | * 另外一种实现方法 867 | * */ 868 | 869 | public class SinkToMySQL extends RichSinkFunction{ 870 | PreparedStatement ps; 871 | BasicDataSource dataSource; 872 | private Connection connection; 873 | /** 874 | * open() 方法中建立连接,这样不用每次invoke的时候都要建立连接和释放连接 875 | * @param parameters 876 | * @throws Exception 877 | * */ 878 | 879 | @Override 880 | public void open(Configuration parameters) throws Exception{ 881 | super.open(parameters); 882 | dataSource=new BasicDataSource(); 883 | connection=getConnection(dataSource); 884 | String sql="replace into meeting_result(meeting_id, meeting_code, meetingroom_id,meetingroom_name,location_name,city) values(?, ?, ?,?,?,?);"; 885 | ps=this.connection.prepareStatement(sql); 886 | 887 | } 888 | @Override 889 | public void close() throws Exception { 890 | super.close(); 891 | //关闭连接和释放资源 892 | if(connection!=null){ 893 | connection.close(); 894 | } 895 | if(ps!=null){ 896 | connection.close(); 897 | } 898 | } 899 | /** 900 | * 每条数据的插入都需要调用一次invoke()方法 901 | * @param meeting 902 | * @param context 903 | * @throws Exception 904 | * */ 905 | @Override 906 | public void invoke(Meeting meeting,Context context) throws Exception{ 907 | ps.setInt(1,meeting.getMeeting_id()); 908 | ps.setString(2,meeting.getMeeting_code()); 909 | ps.setInt(3,meeting.getMeetingroom_id()); 910 | ps.setString(4,meeting.getMeetingroom_name()); 911 | ps.setString(5,meeting.getLocation_name()); 912 | ps.setString(6,meeting.getCity()); 913 | ps.executeUpdate(); 914 | } 915 | 916 | private static Connection getConnection(BasicDataSource dataSource) { 917 | Properties mysqlprop=new Properties(); 918 | try { 919 | mysqlprop.load(new FileInputStream("D:\\flink\\src\\main\\java\\com\\sinks\\database.properties")); 920 | String mysqldriver=mysqlprop.getProperty("mysql_driver"); 921 | String mysqlurl=mysqlprop.getProperty("mysql_url"); 922 | String mysqlusername=mysqlprop.getProperty("mysql_Username"); 923 | String mysqlpassword=mysqlprop.getProperty("mysql_Password"); 924 | 925 | dataSource.setDriverClassName(mysqldriver); 926 | dataSource.setUrl(mysqlurl); 927 | dataSource.setUsername(mysqlusername); 928 | dataSource.setPassword(mysqlpassword); 929 | } catch (IOException e) { 930 | e.printStackTrace(); 931 | } 932 | 933 | //设置连接池的参数 934 | dataSource.setInitialSize(10); 935 | dataSource.setMaxTotal(50); 936 | dataSource.setMinIdle(2); 937 | 938 | Connection con=null; 939 | try{ 940 | con=dataSource.getConnection(); 941 | System.out.println("创建连接池:"+con); 942 | } catch (Exception e) { 943 | System.out.println("-----------mysql get connection has exception,msg=" +e.getMessage()); 944 | } 945 | return con; 946 | } 947 | } 948 | ``` 949 | 950 | #### 2.3.2 SinkToGreenplum 951 | 952 | ```java 953 | package com.sinks; 954 | import com.Seetings.ReadJDBCPro; 955 | import com.model.Meeting; 956 | import org.apache.commons.dbcp2.BasicDataSource; 957 | import org.apache.flink.configuration.Configuration; 958 | import org.apache.flink.streaming.api.functions.sink.RichSinkFunction; 959 | 960 | import java.io.FileInputStream; 961 | import java.io.IOException; 962 | import java.sql.Connection; 963 | import java.sql.PreparedStatement; 964 | import java.util.Properties; 965 | 966 | /** 967 | * sink to Greenplum 968 | * */ 969 | 970 | public class SinkToGreenplum extends RichSinkFunction{ 971 | PreparedStatement ps; 972 | BasicDataSource dataSource; 973 | private Connection connection; 974 | /** 975 | * open() 方法中建立连接,这样不用每次invoke的时候都要建立连接和释放连接 976 | * @param parameters 977 | * @throws Exception 978 | * */ 979 | 980 | @Override 981 | public void open(Configuration parameters) throws Exception{ 982 | super.open(parameters); 983 | dataSource=new BasicDataSource(); 984 | connection=getConnection(dataSource); 985 | String sql="INSERT INTO public .meeting_result(meeting_id, meeting_code, meetingroom_id,meetingroom_name,location_name,city) values(?, ?, ?,?,?,?);"; 986 | ps=this.connection.prepareStatement(sql); 987 | } 988 | @Override 989 | public void close() throws Exception { 990 | super.close(); 991 | //关闭连接和释放资源 992 | if(connection!=null){ 993 | connection.close(); 994 | } 995 | if(ps!=null){ 996 | connection.close(); 997 | } 998 | } 999 | 1000 | /** 1001 | * 每条数据的插入都需要调用一次invoke()方法 1002 | * @param meeting 1003 | * @param context 1004 | * @throws Exception 1005 | * */ 1006 | @Override 1007 | public void invoke(Meeting meeting,Context context) throws Exception{ 1008 | ps.setInt(1,meeting.getMeeting_id()); 1009 | ps.setString(2,meeting.getMeeting_code()); 1010 | ps.setInt(3,meeting.getMeetingroom_id()); 1011 | ps.setString(4,meeting.getMeetingroom_name()); 1012 | ps.setString(5,meeting.getLocation_name()); 1013 | ps.setString(6,meeting.getCity()); 1014 | ps.executeUpdate(); 1015 | System.out.println("插入成功:"+meeting.toString()); 1016 | } 1017 | 1018 | private static Connection getConnection(BasicDataSource dataSource) { 1019 | Properties prop=new Properties(); 1020 | try { 1021 | prop.load(new FileInputStream("D:\\flink\\src\\main\\resources\\database.properties")); 1022 | String driver=prop.getProperty("driver"); 1023 | String url=prop.getProperty("url"); 1024 | String username=prop.getProperty("Username"); 1025 | String password=prop.getProperty("Password"); 1026 | 1027 | dataSource.setDriverClassName(driver); 1028 | dataSource.setUrl(url); 1029 | dataSource.setUsername(username); 1030 | dataSource.setPassword(password); 1031 | } catch (IOException e) { 1032 | e.printStackTrace(); 1033 | } 1034 | 1035 | //设置连接池的参数 1036 | dataSource.setInitialSize(10); 1037 | dataSource.setMaxTotal(50); 1038 | dataSource.setMinIdle(2); 1039 | 1040 | Connection con=null; 1041 | try{ 1042 | con=dataSource.getConnection(); 1043 | System.out.println("创建连接池:"+con); 1044 | } catch (Exception e) { 1045 | System.out.println("-----------greenplum get connection has exception,msg=" +e.getMessage()); 1046 | } 1047 | return con; 1048 | } 1049 | } 1050 | ``` 1051 | 1052 | ## 3.可视化方案 1053 | 1054 | - Tableau实时刷新Greenplum,FineBI也可以(秒级) 1055 | - DataV也可以每几秒刷新一次 1056 | - Flink计算后的结果,写入到缓存,前端开发可视化组件进行展示(实时展示)。 1057 | 1058 | ## 4.项目地址 1059 | 1060 | https://github.com/liwei199411/FlinkStreamETL/tree/master 1061 | 1062 | ## 5.参考目录 1063 | 1064 | [1].[基于Spark Streaming + Canal + Kafka对Mysql增量数据实时进行监测分析](https://www.cnblogs.com/itboys/p/10624670.html) 1065 | 1066 | [2].[Canal](https://github.com/alibaba/canal) 1067 | 1068 | [3].[Canal 的 .NET 客户端](https://github.com/dotnetcore/CanalSharp) 1069 | 1070 | [4].[如何基于`MYSQL`做实时计算?](https://www.jianshu.com/p/19ab2cd28c63) 1071 | 1072 | [5].[基于Canal与`Flink`实现数据实时增量同步(一)](https://jiamaoxiang.top/2020/03/05/基于Canal与Flink实现数据实时增量同步-一/) 1073 | 1074 | [6].[美团DB数据同步到数据仓库的架构与实践](https://tech.meituan.com/2018/12/06/binlog-dw.html) 1075 | 1076 | [7].[处理`JSON`格式的日志数据,然后进行流式Join](https://github.com/linweijiang/Flink-Demo/blob/master/src/main/java/utils/BinLogUtil.java) 1077 | 1078 | [8].[`Flink`继续实践:从日志清洗到实时统计内容`PV`等多个指标](https://www.jianshu.com/p/52787491ea23) 1079 | 1080 | [9].[实时数据架构体系建设思路](https://dbaplus.cn/news-73-3184-1.html) 1081 | 1082 | [10].[Flink` 流与维表的关联]((https://liurio.github.io/2020/03/28/Flink流与维表的关联/)) 1083 | 1084 | [11].[**Flink DataStream`流表与维表Join(`Async` I/O)**]((https://blog.csdn.net/wangpei1949/article/details/96634493)) 1085 | 1086 | **12. `flink 流表join mysql表** 1087 | 1088 | 作者:`岳过山丘` 1089 | 链接:https://www.jianshu.com/p/44583b98ecbb 1090 | 1091 | **13. `flink1.9 使用LookupableTableSource实现异步维表关联** 1092 | 1093 | 作者:`todd5167` 1094 | 链接:https://www.jianshu.com/p/7ebe1ec8aa7c 1095 | 1096 | **14. `Flink异步之矛盾-锋利的Async I/O`** 1097 | 1098 | 作者:`王知无` 1099 | 链接:https://www.jianshu.com/p/85ee258aa41f 1100 | 1101 | **15.`Flink 的时间属性及原理解析`** 1102 | 1103 | https://blog.csdn.net/zhengzhaoyang122/article/details/107352934?utm_medium=distribute.pc_relevant.none-task-blog-baidujs-3 1104 | 1105 | **16.`大屏数据可视化`** 1106 | https://yyhsong.github.io/iDataV/ 1107 | 1108 | 1109 | 1110 | 1111 | 1112 | 1113 | --------------------------------------------------------------------------------