├── .gitignore ├── Flink-Forward-2020 └── README.md ├── Flink-Forward-Asia-2019-PPT └── README.md ├── Flink-Forward-Asia-2020-PPT └── README.md ├── Flink-Forward-Asia-2021-PPT └── README.md ├── LICENSE ├── README.md ├── books ├── README.md ├── flink-in-action-1.1.md ├── flink-in-action-1.2.md ├── flink-in-action-1.3.md ├── flink-in-action-10.1.md ├── flink-in-action-10.2.md ├── flink-in-action-11.1.md ├── flink-in-action-11.2.md ├── flink-in-action-11.3.md ├── flink-in-action-11.4.md ├── flink-in-action-11.5.md ├── flink-in-action-12.1.md ├── flink-in-action-12.2.md ├── flink-in-action-12.3.md ├── flink-in-action-2.1.md ├── flink-in-action-2.2.md ├── flink-in-action-2.3.md ├── flink-in-action-2.4.md ├── flink-in-action-3.1.md ├── flink-in-action-3.10.md ├── flink-in-action-3.11.md ├── flink-in-action-3.12.md ├── flink-in-action-3.2.md ├── flink-in-action-3.3.md ├── flink-in-action-3.4.md ├── flink-in-action-3.5.md ├── flink-in-action-3.6.md ├── flink-in-action-3.7.md ├── flink-in-action-3.8.md ├── flink-in-action-3.9.md ├── flink-in-action-4.1.md ├── flink-in-action-4.2.md ├── flink-in-action-4.3.md ├── flink-in-action-5.1.md ├── flink-in-action-5.2.md ├── flink-in-action-6.1.md ├── flink-in-action-6.2.md ├── flink-in-action-6.3.md ├── flink-in-action-6.4.md ├── flink-in-action-6.5.md ├── flink-in-action-7.1.md ├── flink-in-action-7.2.md ├── flink-in-action-8.1.md ├── flink-in-action-8.2.md ├── flink-in-action-9.1.md ├── flink-in-action-9.2.md ├── flink-in-action-9.3.md ├── flink-in-action-9.4.md ├── flink-in-action-9.5.md └── flink-in-action-9.6.md ├── flink-learning-basic ├── README.md ├── flink-learning-data-sinks │ ├── README.md │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── zhisheng │ │ │ └── data │ │ │ └── sinks │ │ │ ├── Main.java │ │ │ ├── Main2.java │ │ │ ├── model │ │ │ └── Student.java │ │ │ ├── sinks │ │ │ ├── MySink.java │ │ │ └── SinkToMySQL.java │ │ │ └── utils │ │ │ └── KafkaUtil.java │ │ └── resources │ │ ├── application.properties │ │ ├── logback.xml │ │ └── student.sql ├── flink-learning-data-sources │ ├── README.md │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── zhisheng │ │ │ └── data │ │ │ └── sources │ │ │ ├── Main.java │ │ │ ├── Main2.java │ │ │ ├── ScheduleMain.java │ │ │ ├── model │ │ │ ├── Rule.java │ │ │ └── Student.java │ │ │ ├── sources │ │ │ └── SourceFromMySQL.java │ │ │ └── utils │ │ │ ├── KafkaUtil.java │ │ │ └── MySQLUtil.java │ │ └── resources │ │ ├── application.properties │ │ ├── logback.xml │ │ ├── rule.sql │ │ └── student.sql ├── flink-learning-libraries │ ├── README.md │ ├── flink-learning-libraries-cep │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java │ │ │ └── com │ │ │ │ └── zhisheng │ │ │ │ └── libraries │ │ │ │ └── cep │ │ │ │ ├── CEPMain.java │ │ │ │ ├── CombinePatternMain.java │ │ │ │ ├── IndividualPatternQuantifier.java │ │ │ │ └── model │ │ │ │ ├── Alert.java │ │ │ │ ├── Event.java │ │ │ │ └── SubEvent.java │ │ │ └── resources │ │ │ ├── application.properties │ │ │ └── logback.xml │ ├── flink-learning-libraries-state-processor-api │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java │ │ │ └── com │ │ │ │ └── zhisheng │ │ │ │ └── libraries │ │ │ │ └── stateProcessApi │ │ │ │ ├── Main.java │ │ │ │ └── StatefulFunctionWithTime.java │ │ │ └── resources │ │ │ ├── application.properties │ │ │ └── logback.xml │ └── pom.xml ├── flink-learning-metrics │ ├── README.md │ ├── pom.xml │ └── src │ │ └── main │ │ └── java │ │ └── com │ │ └── zhisheng │ │ └── metrics │ │ └── custom │ │ ├── CustomCounterMetrics.java │ │ ├── CustomCounterMetrics2.java │ │ ├── CustomCounterMetrics3.java │ │ ├── CustomGaugeMetrics.java │ │ ├── CustomHistogramMetrics.java │ │ └── CustomMeterMetrics.java ├── flink-learning-state │ ├── README.md │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── zhisheng │ │ │ └── state │ │ │ ├── Main.java │ │ │ ├── metadata │ │ │ └── MetadataSerializer.java │ │ │ ├── operator │ │ │ └── state │ │ │ │ ├── UnionListStateExample.java │ │ │ │ └── util │ │ │ │ └── UnionListStateUtil.java │ │ │ └── queryablestate │ │ │ ├── ClimateLog.java │ │ │ ├── QueryClient.java │ │ │ └── QuerybleStateStream.java │ │ └── resources │ │ └── _metadata ├── flink-learning-window │ ├── README.md │ ├── pom.xml │ └── src │ │ ├── main │ │ ├── java │ │ │ └── com │ │ │ │ └── zhisheng │ │ │ │ ├── constant │ │ │ │ └── WindowConstant.java │ │ │ │ ├── function │ │ │ │ ├── CustomSource.java │ │ │ │ ├── CustomTrigger.java │ │ │ │ └── LineSplitter.java │ │ │ │ └── window │ │ │ │ ├── CustomTriggerMain.java │ │ │ │ ├── Main.java │ │ │ │ ├── Main2.java │ │ │ │ ├── Main3.java │ │ │ │ ├── Main4.java │ │ │ │ ├── Main5.java │ │ │ │ └── WindowAll.java │ │ └── resources │ │ │ ├── application.properties │ │ │ └── logback.xml │ │ └── test │ │ └── java │ │ └── TestWindowSize.java └── pom.xml ├── flink-learning-cdc ├── README.md ├── flink-db2-cdc │ └── pom.xml ├── flink-mongodb-cdc │ └── pom.xml ├── flink-mysql-cdc │ └── pom.xml ├── flink-oceanbase-cdc │ └── pom.xml ├── flink-oracle-cdc │ └── pom.xml ├── flink-postgres-cdc │ └── pom.xml ├── flink-sqlserver-cdc │ └── pom.xml ├── flink-tidb-cdc │ └── pom.xml └── pom.xml ├── flink-learning-common ├── README.md ├── pom.xml └── src │ ├── main │ ├── java │ │ └── com │ │ │ └── zhisheng │ │ │ └── common │ │ │ ├── constant │ │ │ ├── MachineConstant.java │ │ │ └── PropertiesConstants.java │ │ │ ├── model │ │ │ ├── LogEvent.java │ │ │ ├── MetricEvent.java │ │ │ ├── OrderEvent.java │ │ │ ├── OrderLineEvent.java │ │ │ ├── ProductEvent.java │ │ │ ├── ShopEvent.java │ │ │ ├── UserEvent.java │ │ │ └── WordEvent.java │ │ │ ├── schemas │ │ │ ├── KafkaMetricSchema.java │ │ │ ├── LogSchema.java │ │ │ ├── MetricSchema.java │ │ │ ├── OrderLineSchema.java │ │ │ ├── OrderSchema.java │ │ │ ├── ProductSchema.java │ │ │ ├── ShopSchema.java │ │ │ └── UserSchema.java │ │ │ ├── utils │ │ │ ├── CheckPointUtil.java │ │ │ ├── DateUtil.java │ │ │ ├── ExecutionEnvUtil.java │ │ │ ├── GsonUtil.java │ │ │ ├── HttpUtil.java │ │ │ └── KafkaConfigUtil.java │ │ │ └── watermarks │ │ │ └── MetricWatermark.java │ └── resources │ │ └── product.sql │ └── test │ └── java │ └── com │ └── zhisheng │ └── common │ └── utils │ └── DateUtilTests.java ├── flink-learning-configuration-center ├── flink-learning-configuration-center-apollo │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── zhisheng │ │ │ └── configuration │ │ │ └── apollo │ │ │ └── FlinkApolloTest.java │ │ └── resources │ │ └── META-INF │ │ └── app.properties ├── flink-learning-configuration-center-nacos │ ├── pom.xml │ └── src │ │ └── main │ │ └── java │ │ └── com │ │ └── zhisheng │ │ └── configuration │ │ └── nacos │ │ ├── FlinkNacosTest.java │ │ └── FlinkNacosTest2.java └── pom.xml ├── flink-learning-connectors ├── README.md ├── flink-learning-connectors-activemq │ ├── README.md │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── zhisheng │ │ │ └── connectors │ │ │ └── activemq │ │ │ └── Main.java │ │ └── resources │ │ ├── application.properties │ │ └── logback.xml ├── flink-learning-connectors-cassandra │ ├── README.md │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── zhisheng │ │ │ └── connectors │ │ │ └── cassandra │ │ │ ├── batch │ │ │ ├── BatchExample.java │ │ │ ├── BatchPojoExample.java │ │ │ └── CustomCassandraAnnotatedPojo.java │ │ │ └── streaming │ │ │ ├── CassandraPojoSinkExample.java │ │ │ ├── CassandraTupleSinkExample.java │ │ │ ├── CassandraTupleWriteAheadSinkExample.java │ │ │ └── Message.java │ │ └── resources │ │ ├── application.properties │ │ └── logback.xml ├── flink-learning-connectors-clickhouse │ ├── README.md │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── zhisheng │ │ │ └── connectors │ │ │ └── clickhouse │ │ │ ├── ClickhouseSink.java │ │ │ ├── applied │ │ │ ├── ClickhouseSinkBuffer.java │ │ │ ├── ClickhouseSinkManager.java │ │ │ ├── ClickhouseSinkScheduledChecker.java │ │ │ └── ClickhouseWriter.java │ │ │ ├── model │ │ │ ├── ClickhouseClusterSettings.java │ │ │ ├── ClickhouseRequestBlank.java │ │ │ ├── ClickhouseSinkCommonParams.java │ │ │ └── ClickhouseSinkConsts.java │ │ │ └── util │ │ │ ├── ConfigUtil.java │ │ │ └── ThreadUtil.java │ │ └── resources │ │ ├── application.properties │ │ ├── logback.xml │ │ └── reference.conf ├── flink-learning-connectors-es │ ├── flink-learning-connectors-es-common │ │ └── pom.xml │ ├── flink-learning-connectors-es-universal │ │ ├── README.md │ │ └── pom.xml │ ├── flink-learning-connectors-es5 │ │ ├── README.md │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java │ │ │ └── com │ │ │ │ └── zhisheng │ │ │ │ └── connectors │ │ │ │ └── es5 │ │ │ │ └── Sink2ES5Main.java │ │ │ └── resources │ │ │ └── logback.xml │ ├── flink-learning-connectors-es6 │ │ ├── README.md │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java │ │ │ └── com │ │ │ │ └── zhisheng │ │ │ │ └── connectors │ │ │ │ └── es6 │ │ │ │ ├── Sink2ES6Main.java │ │ │ │ └── utils │ │ │ │ ├── ESSinkUtil.java │ │ │ │ └── RetryRequestFailureHandler.java │ │ │ └── resources │ │ │ ├── application.properties │ │ │ ├── es_index_template.json │ │ │ └── logback.xml │ ├── flink-learning-connectors-es7 │ │ ├── README.md │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java │ │ │ └── com │ │ │ │ └── zhisheng │ │ │ │ └── connectors │ │ │ │ └── es7 │ │ │ │ ├── Sink2ES7Main.java │ │ │ │ └── util │ │ │ │ ├── ESSinkUtil.java │ │ │ │ └── RetryRequestFailureHandler.java │ │ │ └── resources │ │ │ ├── application.properties │ │ │ ├── es_index_template.json │ │ │ └── logback.xml │ └── pom.xml ├── flink-learning-connectors-flume │ ├── README.md │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── zhisheng │ │ │ └── connectors │ │ │ └── flume │ │ │ ├── FlumeEventBuilder.java │ │ │ ├── FlumeSink.java │ │ │ ├── Main.java │ │ │ └── utils │ │ │ └── FlumeUtil.java │ │ └── resources │ │ ├── application.properties │ │ └── logback.xml ├── flink-learning-connectors-gcp-pubsub │ ├── README.md │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── zhisheng │ │ │ └── connectors │ │ │ └── gcp │ │ │ └── pubsub │ │ │ ├── IntegerSerializer.java │ │ │ ├── Main.java │ │ │ └── PubSubPublisherUtil.java │ │ └── resources │ │ ├── application.properties │ │ └── logback.xml ├── flink-learning-connectors-hbase │ ├── README.md │ ├── flink-learning-connectors-hbase-1.4 │ │ └── pom.xml │ ├── flink-learning-connectors-hbase-2.2 │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java │ │ │ └── hbase │ │ │ │ ├── HBaseStreamWriteMain.java │ │ │ │ ├── Main.java │ │ │ │ └── constant │ │ │ │ └── HBaseConstant.java │ │ │ └── resources │ │ │ ├── application.properties │ │ │ └── logback.xml │ └── pom.xml ├── flink-learning-connectors-hdfs │ ├── README.md │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── zhisheng │ │ │ └── connectors │ │ │ └── hdfs │ │ │ └── Main.java │ │ └── resources │ │ ├── application.properties │ │ └── logback.xml ├── flink-learning-connectors-hive │ ├── README.md │ ├── flink-learning-connectors-hive-1.2.2 │ │ └── pom.xml │ ├── flink-learning-connectors-hive-2.2.0 │ │ └── pom.xml │ ├── flink-learning-connectors-hive-2.3.6 │ │ └── pom.xml │ ├── flink-learning-connectors-hive-3.1.2 │ │ └── pom.xml │ └── pom.xml ├── flink-learning-connectors-influxdb │ ├── README.md │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── zhisheng │ │ │ └── connectors │ │ │ └── influxdb │ │ │ ├── InfluxDBConfig.java │ │ │ ├── InfluxDBSink.java │ │ │ └── Main.java │ │ └── resources │ │ ├── application.properties │ │ └── logback.xml ├── flink-learning-connectors-jdbc │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── zhisheng │ │ │ └── connectors │ │ │ └── jdbc │ │ │ └── Main.java │ │ └── resources │ │ ├── application.properties │ │ └── logback.xml ├── flink-learning-connectors-kafka │ ├── README.md │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── zhisheng │ │ │ └── connectors │ │ │ └── kafka │ │ │ ├── FlinkKafkaConsumerTest1.java │ │ │ ├── FlinkKafkaConsumerTest2.java │ │ │ ├── FlinkKafkaProducerTest1.java │ │ │ ├── FlinkKafkaSchemaTest1.java │ │ │ ├── JSONKeyValueDeserializationSchemaTest.java │ │ │ ├── KafkaDeserializationSchemaTest.java │ │ │ └── Main.java │ │ └── resources │ │ ├── application.properties │ │ └── logback.xml ├── flink-learning-connectors-kudu │ ├── README.md │ └── pom.xml ├── flink-learning-connectors-mysql │ ├── README.md │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── zhisheng │ │ │ └── connectors │ │ │ └── mysql │ │ │ ├── Main.java │ │ │ ├── model │ │ │ └── Student.java │ │ │ ├── sinks │ │ │ └── SinkToMySQL.java │ │ │ └── utils │ │ │ └── KafkaUtil.java │ │ └── resources │ │ ├── application.properties │ │ └── logback.xml ├── flink-learning-connectors-netty │ ├── pom.xml │ └── src │ │ └── main │ │ └── java │ │ └── com │ │ └── zhisheng │ │ └── connectors │ │ └── netty │ │ └── Main.java ├── flink-learning-connectors-nifi │ ├── README.md │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── zhisheng │ │ │ └── connectors │ │ │ └── nifi │ │ │ ├── NiFiSinkMain.java │ │ │ └── NiFiSourceMain.java │ │ └── resources │ │ ├── application.properties │ │ └── logback.xml ├── flink-learning-connectors-pulsar │ ├── README.md │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── zhisheng │ │ │ └── connectors │ │ │ └── pulsar │ │ │ ├── PulsarSinkMain.java │ │ │ └── PulsarSourceMain.java │ │ └── resources │ │ ├── application.properties │ │ └── logback.xml ├── flink-learning-connectors-rabbitmq │ ├── README.md │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── zhisheng │ │ │ └── connectors │ │ │ └── rabbitmq │ │ │ ├── Main.java │ │ │ ├── Main1.java │ │ │ ├── model │ │ │ └── EndPoint.java │ │ │ └── utils │ │ │ └── RabbitMQProducerUtil.java │ │ └── resources │ │ ├── application.properties │ │ └── logback.xml ├── flink-learning-connectors-redis │ ├── README.md │ ├── pom.xml │ └── src │ │ ├── main │ │ ├── java │ │ │ └── com │ │ │ │ └── zhisheng │ │ │ │ └── connectors │ │ │ │ └── redis │ │ │ │ ├── Main.java │ │ │ │ └── utils │ │ │ │ └── ProductUtil.java │ │ └── resources │ │ │ ├── application.properties │ │ │ └── logback.xml │ │ └── test │ │ └── java │ │ └── RedisTest.java ├── flink-learning-connectors-rocketmq │ ├── README.md │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── zhisheng │ │ │ └── connectors │ │ │ └── rocketmq │ │ │ ├── RocketMQConfig.java │ │ │ ├── RocketMQSink.java │ │ │ ├── RocketMQSource.java │ │ │ ├── RocketMQUtils.java │ │ │ ├── RunningChecker.java │ │ │ ├── common │ │ │ ├── selector │ │ │ │ ├── DefaultTopicSelector.java │ │ │ │ ├── SimpleTopicSelector.java │ │ │ │ └── TopicSelector.java │ │ │ └── serialization │ │ │ │ ├── KeyValueDeserializationSchema.java │ │ │ │ ├── KeyValueSerializationSchema.java │ │ │ │ ├── SimpleKeyValueDeserializationSchema.java │ │ │ │ └── SimpleKeyValueSerializationSchema.java │ │ │ └── example │ │ │ ├── RocketMQFlinkExample.java │ │ │ ├── SimpleConsumer.java │ │ │ └── SimpleProducer.java │ │ └── resources │ │ ├── application.properties │ │ └── logback.xml └── pom.xml ├── flink-learning-core ├── pom.xml └── src │ └── main │ └── java │ └── com │ └── zhisheng │ └── core │ ├── exception │ └── FlinkRuntimeException.java │ ├── factory │ ├── DeserializerFactory.java │ ├── SerializerFactory.java │ ├── SinkFactory.java │ └── SourceFactory.java │ └── utils │ ├── ArrayUtils.java │ ├── CollectionUtil.java │ ├── ExecutorUtils.java │ ├── StringUtils.java │ └── TimeUtils.java ├── flink-learning-datalake ├── README.md ├── flink-learning-datalake-deltalake │ └── pom.xml ├── flink-learning-datalake-hudi │ └── pom.xml ├── flink-learning-datalake-iceberg │ └── pom.xml ├── flink-learning-table-store │ ├── README.md │ └── pom.xml └── pom.xml ├── flink-learning-examples ├── README.md ├── pom.xml └── src │ ├── main │ ├── java │ │ └── com │ │ │ └── zhisheng │ │ │ └── examples │ │ │ ├── batch │ │ │ ├── accumulator │ │ │ │ ├── Main.java │ │ │ │ └── Main2.java │ │ │ └── wordcount │ │ │ │ └── Main.java │ │ │ ├── streaming │ │ │ ├── async │ │ │ │ └── AsyncIOExample.java │ │ │ ├── broadcast │ │ │ │ ├── BroadcastAlertRule.java │ │ │ │ ├── DataSetBrocastMain.java │ │ │ │ ├── GetAlarmNotifyData.java │ │ │ │ ├── Main.java │ │ │ │ ├── Main2.java │ │ │ │ └── MyBroadcastProcessFunction.java │ │ │ ├── chain │ │ │ │ ├── DefaultChainMain.java │ │ │ │ ├── DisableChainMain.java │ │ │ │ ├── DisableChainMain1.java │ │ │ │ ├── DisableChainMain3.java │ │ │ │ ├── ExecutionPlanMain.java │ │ │ │ ├── SharingGroupMain.java │ │ │ │ └── StartNewChainMain.java │ │ │ ├── checkpoint │ │ │ │ ├── Main.java │ │ │ │ ├── PvStatExactlyOnce.java │ │ │ │ ├── PvStatLocalKeyByExactlyOnce.java │ │ │ │ └── util │ │ │ │ │ └── PvStatExactlyOnceKafkaUtil.java │ │ │ ├── config │ │ │ │ ├── ConfigurationMain.java │ │ │ │ ├── ConfigurationMain1.java │ │ │ │ ├── ParameterToolGetArgsMain.java │ │ │ │ ├── ParameterToolGetPropertiesMain.java │ │ │ │ └── ParameterToolGetSystemMain.java │ │ │ ├── file │ │ │ │ └── Main.java │ │ │ ├── join │ │ │ │ ├── WindowJoin.java │ │ │ │ └── WindowJoinSampleData.java │ │ │ ├── ml │ │ │ │ ├── IncrementalLearningSkeleton.java │ │ │ │ └── IncrementalLearningSkeletonData.java │ │ │ ├── parallelism │ │ │ │ └── Main.java │ │ │ ├── processFunction │ │ │ │ ├── KeyedProcessFunctionMain.java │ │ │ │ └── ProcessFunctionMain.java │ │ │ ├── remote │ │ │ │ └── Main.java │ │ │ ├── restartStrategy │ │ │ │ ├── AEMain.java │ │ │ │ ├── DefaultRestartStrategyMain.java │ │ │ │ ├── EnableCheckpointMain.java │ │ │ │ ├── FailureRateRestartStrategyMain.java │ │ │ │ ├── FixedDelayRestartStrategyMain.java │ │ │ │ └── NoRestartStrategyMain.java │ │ │ ├── sideoutput │ │ │ │ ├── FilterEvent.java │ │ │ │ ├── Main.java │ │ │ │ └── SideOutputEvent.java │ │ │ ├── socket │ │ │ │ ├── LambdaMain.java │ │ │ │ └── Main.java │ │ │ ├── state │ │ │ │ └── StateMain.java │ │ │ ├── watermark │ │ │ │ ├── Main.java │ │ │ │ ├── Main1.java │ │ │ │ ├── Main2.java │ │ │ │ ├── Main3.java │ │ │ │ ├── Main4.java │ │ │ │ ├── Word.java │ │ │ │ ├── WordPeriodicWatermark.java │ │ │ │ └── WordPunctuatedWatermark.java │ │ │ └── wordcount │ │ │ │ └── Main.java │ │ │ └── util │ │ │ ├── MySQLUtil.java │ │ │ └── ThrottledIterator.java │ └── resources │ │ ├── alarm-notify.sql │ │ ├── application.properties │ │ └── logback.xml │ └── test │ └── java │ └── Test1.java ├── flink-learning-extends ├── FlinkLogKafkaAppender │ ├── KafkaAppenderCommon │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ └── java │ │ │ └── com │ │ │ └── zhisheng │ │ │ └── flink │ │ │ ├── model │ │ │ └── LogEvent.java │ │ │ └── util │ │ │ ├── ExceptionUtil.java │ │ │ └── JacksonUtil.java │ ├── Log4j2KafkaAppender │ │ ├── pom.xml │ │ └── src │ │ │ ├── main │ │ │ ├── java │ │ │ │ └── com │ │ │ │ │ └── zhisheng │ │ │ │ │ └── log │ │ │ │ │ └── appender │ │ │ │ │ └── KafkaLog4j2Appender.java │ │ │ └── resources │ │ │ │ └── log4j2-example.properties │ │ │ └── test │ │ │ └── java │ │ │ └── ExceptionUtilTest.java │ ├── Log4jKafkaAppender │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java │ │ │ └── com │ │ │ │ └── zhisheng │ │ │ │ └── log │ │ │ │ └── appender │ │ │ │ └── KafkaLog4jAppender.java │ │ │ └── resources │ │ │ └── log4j-example.properties │ ├── README.md │ └── pom.xml ├── README.md ├── flink-metrics │ ├── flink-metrics-kafka │ │ ├── README.md │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java │ │ │ └── org │ │ │ │ └── apache │ │ │ │ └── flink │ │ │ │ └── metrics │ │ │ │ └── kafka │ │ │ │ ├── KafkaReporter.java │ │ │ │ ├── KafkaReporterFactory.java │ │ │ │ ├── KafkaReporterOptions.java │ │ │ │ ├── MetricEvent.java │ │ │ │ └── util │ │ │ │ └── JacksonUtil.java │ │ │ └── resources │ │ │ └── META-INF │ │ │ └── services │ │ │ └── org.apache.flink.metrics.reporter.MetricReporterFactory │ ├── flink-metrics-prometheus │ │ ├── README.md │ │ ├── pom.xml │ │ └── src │ │ │ ├── main │ │ │ ├── java │ │ │ │ └── org │ │ │ │ │ └── apache │ │ │ │ │ └── flink │ │ │ │ │ └── metrics │ │ │ │ │ └── prometheus │ │ │ │ │ ├── AbstractPrometheusReporter.java │ │ │ │ │ ├── ClusterMode.java │ │ │ │ │ ├── PrometheusPushGatewayReporter.java │ │ │ │ │ ├── PrometheusPushGatewayReporterFactory.java │ │ │ │ │ ├── PrometheusPushGatewayReporterOptions.java │ │ │ │ │ ├── PrometheusReporter.java │ │ │ │ │ └── PrometheusReporterFactory.java │ │ │ └── resources │ │ │ │ └── META-INF │ │ │ │ ├── NOTICE │ │ │ │ └── services │ │ │ │ └── org.apache.flink.metrics.reporter.MetricReporterFactory │ │ │ └── test │ │ │ └── resources │ │ │ └── log4j2-test.properties │ └── pom.xml └── pom.xml ├── flink-learning-k8s ├── README.md ├── blogs │ ├── Flink HA 配置.md │ ├── Flink K8s Pod 增加环境变量.md │ ├── Kubernetes 入门之知识点梳理.md │ ├── Pod 异常问题排查.md │ └── 合理设置 Request 与 Limit.md ├── dockerfile │ ├── Dockerfile-Hadoop-Hive │ ├── Dockerfile-example-statemachine │ ├── Dockerfile-flink-1.12.0-jar │ ├── Dockerfile-flink-1.12.0-sql │ ├── build_flink_docker_images.sh │ ├── build_ingress.sh │ ├── docker-entrypoint.sh │ └── ingress_template.yaml ├── flink-k8s │ ├── README.md │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── org │ │ │ └── apache │ │ │ └── flink │ │ │ └── kubernetes │ │ │ ├── KubernetesClusterClientFactory.java │ │ │ ├── KubernetesClusterDescriptor.java │ │ │ ├── KubernetesResourceManagerDriver.java │ │ │ ├── KubernetesWorkerNode.java │ │ │ ├── cli │ │ │ └── KubernetesSessionCli.java │ │ │ ├── configuration │ │ │ ├── KubernetesConfigOptions.java │ │ │ ├── KubernetesConfigOptionsInternal.java │ │ │ ├── KubernetesDeploymentTarget.java │ │ │ ├── KubernetesHighAvailabilityOptions.java │ │ │ ├── KubernetesLeaderElectionConfiguration.java │ │ │ └── KubernetesResourceManagerDriverConfiguration.java │ │ │ ├── entrypoint │ │ │ ├── KubernetesApplicationClusterEntrypoint.java │ │ │ ├── KubernetesEntrypointUtils.java │ │ │ ├── KubernetesResourceManagerFactory.java │ │ │ ├── KubernetesSessionClusterEntrypoint.java │ │ │ └── KubernetesWorkerResourceSpecFactory.java │ │ │ ├── executors │ │ │ ├── KubernetesSessionClusterExecutor.java │ │ │ └── KubernetesSessionClusterExecutorFactory.java │ │ │ ├── highavailability │ │ │ ├── KubernetesCheckpointIDCounter.java │ │ │ ├── KubernetesCheckpointRecoveryFactory.java │ │ │ ├── KubernetesCheckpointStoreUtil.java │ │ │ ├── KubernetesHaServices.java │ │ │ ├── KubernetesHaServicesFactory.java │ │ │ ├── KubernetesJobGraphStoreUtil.java │ │ │ ├── KubernetesLeaderElectionDriver.java │ │ │ ├── KubernetesLeaderElectionDriverFactory.java │ │ │ ├── KubernetesLeaderRetrievalDriver.java │ │ │ ├── KubernetesLeaderRetrievalDriverFactory.java │ │ │ ├── KubernetesRunningJobsRegistry.java │ │ │ └── KubernetesStateHandleStore.java │ │ │ ├── kubeclient │ │ │ ├── Endpoint.java │ │ │ ├── Fabric8FlinkKubeClient.java │ │ │ ├── FlinkKubeClient.java │ │ │ ├── FlinkKubeClientFactory.java │ │ │ ├── FlinkPod.java │ │ │ ├── KubeClientFactory.java │ │ │ ├── KubernetesJobManagerSpecification.java │ │ │ ├── decorators │ │ │ │ ├── AbstractKubernetesStepDecorator.java │ │ │ │ ├── EnvSecretsDecorator.java │ │ │ │ ├── ExternalServiceDecorator.java │ │ │ │ ├── FlinkConfMountDecorator.java │ │ │ │ ├── HadoopConfMountDecorator.java │ │ │ │ ├── HiveConfMountDecorator.java │ │ │ │ ├── InitJobManagerDecorator.java │ │ │ │ ├── InitTaskManagerDecorator.java │ │ │ │ ├── InternalServiceDecorator.java │ │ │ │ ├── JavaCmdJobManagerDecorator.java │ │ │ │ ├── JavaCmdTaskManagerDecorator.java │ │ │ │ ├── KerberosMountDecorator.java │ │ │ │ ├── KubernetesStepDecorator.java │ │ │ │ └── MountSecretsDecorator.java │ │ │ ├── factory │ │ │ │ ├── KubernetesJobManagerFactory.java │ │ │ │ └── KubernetesTaskManagerFactory.java │ │ │ ├── parameters │ │ │ │ ├── AbstractKubernetesParameters.java │ │ │ │ ├── KubernetesJobManagerParameters.java │ │ │ │ ├── KubernetesParameters.java │ │ │ │ └── KubernetesTaskManagerParameters.java │ │ │ └── resources │ │ │ │ ├── AbstractKubernetesWatcher.java │ │ │ │ ├── KubernetesConfigMap.java │ │ │ │ ├── KubernetesConfigMapWatcher.java │ │ │ │ ├── KubernetesException.java │ │ │ │ ├── KubernetesLeaderElector.java │ │ │ │ ├── KubernetesPod.java │ │ │ │ ├── KubernetesPodsWatcher.java │ │ │ │ ├── KubernetesResource.java │ │ │ │ ├── KubernetesSecretEnvVar.java │ │ │ │ ├── KubernetesService.java │ │ │ │ ├── KubernetesToleration.java │ │ │ │ ├── KubernetesTooOldResourceVersionException.java │ │ │ │ └── KubernetesWatch.java │ │ │ ├── taskmanager │ │ │ └── KubernetesTaskExecutorRunner.java │ │ │ └── utils │ │ │ ├── Constants.java │ │ │ └── KubernetesUtils.java │ │ └── resources │ │ └── META-INF │ │ ├── NOTICE │ │ └── services │ │ ├── org.apache.flink.client.deployment.ClusterClientFactory │ │ └── org.apache.flink.core.execution.PipelineExecutorFactory └── pom.xml ├── flink-learning-monitor ├── README.md ├── flink-learning-monitor-alert │ ├── README.md │ ├── pom.xml │ └── src │ │ ├── main │ │ ├── java │ │ │ └── com │ │ │ │ └── zhisheng │ │ │ │ └── alert │ │ │ │ ├── alert │ │ │ │ ├── AsyncIOAlert.java │ │ │ │ ├── BroadcastUpdateAlertRule.java │ │ │ │ ├── LogEventAlert.java │ │ │ │ └── OutageAlert.java │ │ │ │ ├── function │ │ │ │ ├── AlertRuleAsyncIOFunction.java │ │ │ │ ├── GetAlertRuleSourceFunction.java │ │ │ │ └── OutageProcessFunction.java │ │ │ │ ├── model │ │ │ │ ├── AlertEvent.java │ │ │ │ ├── AlertRule.java │ │ │ │ ├── AtMobiles.java │ │ │ │ ├── BaseMessage.java │ │ │ │ ├── Email.java │ │ │ │ ├── LinkMessage.java │ │ │ │ ├── MarkDownMessage.java │ │ │ │ ├── MessageType.java │ │ │ │ ├── OutageMetricEvent.java │ │ │ │ ├── TextMessage.java │ │ │ │ └── WorkNotify.java │ │ │ │ ├── utils │ │ │ │ ├── DingDingAccessTokenUtil.java │ │ │ │ ├── DingDingGroupMsgUtil.java │ │ │ │ ├── DingDingWorkspaceNoticeUtil.java │ │ │ │ ├── EmailNoticeUtil.java │ │ │ │ ├── PhoneNoticeUtil.java │ │ │ │ └── SMSNoticeUtil.java │ │ │ │ └── watermark │ │ │ │ └── OutageMetricWaterMark.java │ │ └── resources │ │ │ ├── LogEventDataExample.json │ │ │ ├── application.properties │ │ │ └── logback.xml │ │ └── test │ │ └── java │ │ ├── BuildLogEventDataUtil.java │ │ ├── BuildMachineMetricDataUtil.java │ │ ├── DingDingMsgTest.java │ │ └── LogEventDataExample.java ├── flink-learning-monitor-collector │ ├── README.md │ ├── flink_log_event.json │ ├── flink_metrics_event.json │ ├── pom.xml │ └── src │ │ └── main │ │ └── java │ │ └── com │ │ └── zhisheng │ │ └── collector │ │ └── FlinkJobMetricCollect.java ├── flink-learning-monitor-common │ ├── README.md │ ├── pom.xml │ └── src │ │ └── main │ │ └── java │ │ └── com │ │ └── zhisheng │ │ └── common │ │ ├── model │ │ ├── Job.java │ │ ├── JobStatus.java │ │ └── Task.java │ │ └── utils │ │ └── PropertiesUtil.java ├── flink-learning-monitor-dashboard │ ├── README.md │ └── pom.xml ├── flink-learning-monitor-log │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── zhisheng │ │ │ └── log │ │ │ ├── LogAlert.java │ │ │ ├── LogMain.java │ │ │ ├── LogSink2ES.java │ │ │ ├── function │ │ │ └── OriLog2LogEventFlatMapFunction.java │ │ │ ├── model │ │ │ └── OriginalLogEvent.java │ │ │ ├── schema │ │ │ └── OriginalLogEventSchema.java │ │ │ └── utils │ │ │ ├── ESSinkUtil.java │ │ │ ├── GrokUtil.java │ │ │ └── RetryRequestFailureHandler.java │ │ └── resources │ │ ├── application.properties │ │ └── patterns │ │ └── patterns ├── flink-learning-monitor-pvuv │ ├── README.md │ ├── pom.xml │ └── src │ │ └── main │ │ └── java │ │ └── com │ │ └── zhisheng │ │ └── monitor │ │ └── pvuv │ │ ├── HyperLogLogUvExample.java │ │ ├── MapStateUvExample.java │ │ ├── RedisSetUvExample.java │ │ ├── model │ │ └── UserVisitWebEvent.java │ │ └── utils │ │ └── UvExampleUtil.java ├── flink-learning-monitor-storage │ ├── README.md │ ├── flink_log_2es.sql │ ├── flink_metrics_2es.sql │ └── pom.xml ├── flink_monitor_measurements.md └── pom.xml ├── flink-learning-project ├── README.md ├── flink-learning-project-common │ └── pom.xml ├── flink-learning-project-deduplication │ ├── README.md │ ├── pom.xml │ └── src │ │ └── main │ │ └── java │ │ └── com │ │ └── zhisheng │ │ └── project │ │ └── deduplication │ │ ├── KeyedStateDeduplication.java │ │ ├── TuningKeyedStateDeduplication.java │ │ ├── model │ │ └── UserVisitWebEvent.java │ │ └── utils │ │ └── DeduplicationExampleUtil.java ├── flink-learning-project-flink-job-scaffold │ └── pom.xml ├── flink-learning-project-log │ └── pom.xml ├── flink-learning-project-monitor-alert │ └── pom.xml ├── flink-learning-project-monitor-dashboard │ └── pom.xml ├── flink-learning-project-real-time-computing-platform │ └── pom.xml ├── flink-learning-project-real-time-data-warehouse │ └── pom.xml ├── flink-learning-project-risk-management │ └── pom.xml └── pom.xml ├── flink-learning-sql ├── README.md ├── flink-learning-sql-blink │ ├── README.md │ ├── pom.xml │ └── src │ │ ├── main │ │ ├── java │ │ │ └── com │ │ │ │ └── zhisheng │ │ │ │ └── sql │ │ │ │ └── blink │ │ │ │ └── stream │ │ │ │ ├── catalog │ │ │ │ ├── CatalogAPI.java │ │ │ │ └── CatalogTypes.java │ │ │ │ └── example │ │ │ │ ├── FlinkSQLDistinctExample.java │ │ │ │ ├── SQLExampleData2PG.java │ │ │ │ ├── SQLExampleKafkaData2ES.java │ │ │ │ ├── SQLExampleKafkaData2HBase.java │ │ │ │ ├── SQLExampleKafkaData2Kafka.java │ │ │ │ ├── SQLExampleKafkaRowData2ES.java │ │ │ │ └── StreamWindowSQLExample.java │ │ └── resources │ │ │ ├── application.properties │ │ │ └── words.txt │ │ └── test │ │ └── java │ │ └── test │ │ └── TableEnvironmentExample1.java ├── flink-learning-sql-client │ ├── README.md │ ├── pom.xml │ └── src │ │ ├── main │ │ ├── java │ │ │ └── com │ │ │ │ └── zhisheng │ │ │ │ └── sql │ │ │ │ ├── SqlSubmit.java │ │ │ │ ├── cli │ │ │ │ ├── CliOptions.java │ │ │ │ ├── CliOptionsParser.java │ │ │ │ └── SqlCommandParser.java │ │ │ │ ├── constant │ │ │ │ ├── Constant.java │ │ │ │ └── UnitEnum.java │ │ │ │ ├── exception │ │ │ │ └── SqlParserException.java │ │ │ │ ├── planner │ │ │ │ ├── BatchPlanner.java │ │ │ │ ├── Planner.java │ │ │ │ └── StreamingPlanner.java │ │ │ │ └── utils │ │ │ │ ├── CloseableRowIteratorWrapper.java │ │ │ │ ├── Config.java │ │ │ │ └── HttpClient.java │ │ └── resources │ │ │ ├── dev │ │ │ ├── conf.properties │ │ │ └── logback.xml │ │ │ ├── pre │ │ │ ├── conf.properties │ │ │ └── logback.xml │ │ │ ├── prod │ │ │ ├── conf.properties │ │ │ └── logback.xml │ │ │ └── sql │ │ │ └── 124563.sql │ │ └── test │ │ ├── java │ │ └── SqlSubmitTest.java │ │ └── resources │ │ ├── dev │ │ ├── conf.properties │ │ └── logback.xml │ │ └── sql │ │ └── test.sql ├── flink-learning-sql-common │ ├── README.md │ └── pom.xml └── pom.xml ├── paper └── paper.md ├── pics └── Flink-code.png ├── pom.xml └── tree.md /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | .pampas/ 3 | 4 | **/.idea/* 5 | **/target/* 6 | 7 | .idea 8 | *.iml 9 | *.class 10 | 11 | .project 12 | **/.settings/* 13 | 14 | **/*/dependency-reduced-pom.xml 15 | 16 | # front dependencies 17 | endpoints/**/node_modules 18 | 19 | # production 20 | endpoints/**/dist 21 | 22 | endpoints/**/public/ 23 | endpoints/**/vendor/ 24 | endpoints/**/vendor_modules/ 25 | endpoints/**/components_vendor/ 26 | endpoints/**/components_eevee/ 27 | endpoints/**/app/components_vendor 28 | endpoints/**/lib/server/ 29 | endpoints/**/npm-debug.log* 30 | endpoints/**/yarn-error.log* 31 | 32 | debug.properties 33 | 34 | /dist/ 35 | 36 | .classpath 37 | .factorypath 38 | .vscode/ -------------------------------------------------------------------------------- /Flink-Forward-Asia-2020-PPT/README.md: -------------------------------------------------------------------------------- 1 | 2 | Flink Forward Asia 2020 在北京召开的,有主会场和几个分会场(企业实践、Apache Flink 核心技术、开源大数据生态、实时数仓、人工智能),内容涉及很多,可以查看下面图片介绍。 3 | 4 | ![](http://zhisheng-blog.oss-cn-hangzhou.aliyuncs.com/2020-12-21-142353.png) 5 | 6 | ![](http://zhisheng-blog.oss-cn-hangzhou.aliyuncs.com/2020-12-21-142431.png) 7 | 8 | ![](http://zhisheng-blog.oss-cn-hangzhou.aliyuncs.com/2020-12-21-142511.png) 9 | 10 | ![](http://zhisheng-blog.oss-cn-hangzhou.aliyuncs.com/2020-12-21-142538.png) 11 | 12 | ![](http://zhisheng-blog.oss-cn-hangzhou.aliyuncs.com/2020-12-21-142616.png) 13 | 14 | ![](http://zhisheng-blog.oss-cn-hangzhou.aliyuncs.com/2020-12-21-142643.png) 15 | 16 | 17 | ### 如何获取上面这些 PPT? 18 | 19 | 上面的这些 PPT 本人已经整理好了,你可以扫描下面二维码,关注微信公众号:zhisheng,然后在里面回复关键字: **ffa2020** 即可获取已放出的 PPT。 20 | 21 | ![](http://zhisheng-blog.oss-cn-hangzhou.aliyuncs.com/2019-12-28-144329.jpg) -------------------------------------------------------------------------------- /Flink-Forward-Asia-2021-PPT/README.md: -------------------------------------------------------------------------------- 1 | 2 | Flink Forward Asia 2021 在线上召开的,有企业实践、Apache Flink 核心技术、开源大数据生态、实时数仓、人工智能、流批一体、数据湖等会场,内容涉及很多,可以查看下面图片介绍。 3 | 4 | ![](https://tva1.sinaimg.cn/large/008i3skNly1gyr1wen0rnj31g00u0jvn.jpg) 5 | 6 | ![](https://tva1.sinaimg.cn/large/008i3skNly1gyr1y1zdupj31r50lnjuk.jpg) 7 | 8 | ![](https://tva1.sinaimg.cn/large/008i3skNly1gyr1yk6277j31s20h376q.jpg) 9 | 10 | ![](https://tva1.sinaimg.cn/large/008i3skNly1gyr1ytnwgfj31r90kaacx.jpg) 11 | 12 | ![](https://tva1.sinaimg.cn/large/008i3skNly1gyr1z97yb7j31qh0kkdj0.jpg) 13 | 14 | ![](https://tva1.sinaimg.cn/large/008i3skNly1gyr1zmyo5ej31q70jmju2.jpg) 15 | 16 | ![](https://tva1.sinaimg.cn/large/008i3skNly1gyr1zuw6fpj31qb0kq778.jpg) 17 | 18 | 19 | 20 | ### 如何获取上面这些 PPT? 21 | 22 | 上面的这些 PPT 本人已经整理好了,你可以扫描下面二维码,关注微信公众号:zhisheng,然后在里面回复关键字: **ffa2021** 即可获取已放出的 PPT。 23 | 24 | ![](http://zhisheng-blog.oss-cn-hangzhou.aliyuncs.com/2019-12-28-144329.jpg) -------------------------------------------------------------------------------- /flink-learning-basic/README.md: -------------------------------------------------------------------------------- 1 | ### Flink-learning-basic 2 | 3 | 该项目存放 Flink 基础功能的一些学习案例 -------------------------------------------------------------------------------- /flink-learning-basic/flink-learning-data-sinks/README.md: -------------------------------------------------------------------------------- 1 | ## Flink data sink 2 | 3 | [http://www.54tianzhisheng.cn/2018/10/29/flink-sink/](http://www.54tianzhisheng.cn/2018/10/29/flink-sink/) 4 | 5 | [http://www.54tianzhisheng.cn/2018/10/31/flink-create-sink/](http://www.54tianzhisheng.cn/2018/10/31/flink-create-sink/) -------------------------------------------------------------------------------- /flink-learning-basic/flink-learning-data-sinks/src/main/java/com/zhisheng/data/sinks/Main2.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.data.sinks; 2 | 3 | import com.zhisheng.data.sinks.sinks.MySink; 4 | import org.apache.flink.streaming.api.datastream.DataStreamSource; 5 | import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; 6 | 7 | /** 8 | * Desc: test RichSink function construction method and open function 9 | * https://t.zsxq.com/EIiyjeU 10 | * Created by zhisheng on 2019-09-26 11 | * blog:http://www.54tianzhisheng.cn/ 12 | * 微信公众号:zhisheng 13 | */ 14 | public class Main2 { 15 | public static void main(String[] args) throws Exception { 16 | final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); 17 | 18 | DataStreamSource source = env.socketTextStream("127.0.0.1", 9000); 19 | source.addSink(new MySink("6")).setParallelism(5); 20 | env.execute("xxxx"); 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /flink-learning-basic/flink-learning-data-sinks/src/main/java/com/zhisheng/data/sinks/model/Student.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.data.sinks.model; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Data; 5 | import lombok.NoArgsConstructor; 6 | 7 | /** 8 | * Desc: 9 | * Created by zhisheng on 2019-02-17 10 | * Blog: http://www.54tianzhisheng.cn/tags/Flink/ 11 | */ 12 | 13 | @Data 14 | @AllArgsConstructor 15 | @NoArgsConstructor 16 | public class Student { 17 | public int id; 18 | public String name; 19 | public String password; 20 | public int age; 21 | } 22 | -------------------------------------------------------------------------------- /flink-learning-basic/flink-learning-data-sinks/src/main/java/com/zhisheng/data/sinks/sinks/MySink.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.data.sinks.sinks; 2 | 3 | import org.apache.flink.configuration.Configuration; 4 | import org.apache.flink.streaming.api.functions.sink.RichSinkFunction; 5 | 6 | /** 7 | * Desc: 8 | * Created by zhisheng on 2019-09-26 9 | * blog:http://www.54tianzhisheng.cn/ 10 | * 微信公众号:zhisheng 11 | */ 12 | public class MySink extends RichSinkFunction { 13 | private String tx; 14 | 15 | public MySink(String tx) { 16 | System.out.println("+++++++++++++" + tx); 17 | this.tx = tx; 18 | } 19 | 20 | @Override 21 | public void open(Configuration parameters) throws Exception { 22 | tx = "5"; 23 | System.out.println("========"); 24 | super.open(parameters); 25 | } 26 | 27 | @Override 28 | public void invoke(String value, Context context) throws Exception { 29 | System.out.println(value + " " + tx); 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /flink-learning-basic/flink-learning-data-sinks/src/main/resources/application.properties: -------------------------------------------------------------------------------- 1 | kafka.brokers=localhost:9092 2 | kafka.group.id=metrics-group 3 | kafka.zookeeper.connect=localhost:2181 4 | metrics.topic=student 5 | stream.parallelism=5 6 | stream.checkpoint.interval=1000 7 | stream.checkpoint.enable=false -------------------------------------------------------------------------------- /flink-learning-basic/flink-learning-data-sinks/src/main/resources/logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 6 | 7 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n 8 | 9 | 10 | 11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /flink-learning-basic/flink-learning-data-sinks/src/main/resources/student.sql: -------------------------------------------------------------------------------- 1 | DROP TABLE IF EXISTS `student`; 2 | CREATE TABLE `student` ( 3 | `id` int(11) unsigned NOT NULL AUTO_INCREMENT, 4 | `name` varchar(25) COLLATE utf8_bin DEFAULT NULL, 5 | `password` varchar(25) COLLATE utf8_bin DEFAULT NULL, 6 | `age` int(10) DEFAULT NULL, 7 | PRIMARY KEY (`id`) 8 | ) ENGINE=InnoDB AUTO_INCREMENT=5 DEFAULT CHARSET=utf8 COLLATE=utf8_bin; 9 | 10 | 11 | -------------------------------------------------------------------------------- /flink-learning-basic/flink-learning-data-sources/README.md: -------------------------------------------------------------------------------- 1 | ## Flink data source 2 | 3 | [http://www.54tianzhisheng.cn/2018/10/28/flink-sources/](http://www.54tianzhisheng.cn/2018/10/28/flink-sources/) 4 | 5 | [http://www.54tianzhisheng.cn/2018/10/30/flink-create-source/](http://www.54tianzhisheng.cn/2018/10/30/flink-create-source/) 6 | 7 | 8 | 9 | 定时任务捞取 MySQL 数据:可以查看 ScheduleMain 类的实现 10 | 11 | ![](http://zhisheng-blog.oss-cn-hangzhou.aliyuncs.com/img/2019-05-24-124853.jpg) -------------------------------------------------------------------------------- /flink-learning-basic/flink-learning-data-sources/src/main/java/com/zhisheng/data/sources/Main2.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.data.sources; 2 | 3 | import com.zhisheng.data.sources.sources.SourceFromMySQL; 4 | import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; 5 | 6 | /** 7 | * Desc: 自定义 source,从 mysql 中读取数据 8 | * Created by zhisheng on 2019-02-17 9 | * Blog: http://www.54tianzhisheng.cn/tags/Flink/ 10 | */ 11 | public class Main2 { 12 | public static void main(String[] args) throws Exception { 13 | final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); 14 | 15 | env.addSource(new SourceFromMySQL()).print(); 16 | 17 | env.execute("Flink add data sourc"); 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /flink-learning-basic/flink-learning-data-sources/src/main/java/com/zhisheng/data/sources/model/Student.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.data.sources.model; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Data; 5 | import lombok.NoArgsConstructor; 6 | 7 | /** 8 | * Desc: 9 | * Created by zhisheng on 2019-02-17 10 | * Blog: http://www.54tianzhisheng.cn/tags/Flink/ 11 | */ 12 | 13 | @Data 14 | @AllArgsConstructor 15 | @NoArgsConstructor 16 | public class Student { 17 | public int id; 18 | public String name; 19 | public String password; 20 | public int age; 21 | } 22 | -------------------------------------------------------------------------------- /flink-learning-basic/flink-learning-data-sources/src/main/java/com/zhisheng/data/sources/utils/MySQLUtil.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.data.sources.utils; 2 | 3 | import java.sql.Connection; 4 | import java.sql.DriverManager; 5 | 6 | /** 7 | * Desc: MySQL 工具类 8 | * Created by zhisheng on 2019-05-24 9 | * blog:http://www.54tianzhisheng.cn/ 10 | * 微信公众号:zhisheng 11 | */ 12 | public class MySQLUtil { 13 | 14 | public static Connection getConnection(String driver, String url, String user, String password) { 15 | Connection con = null; 16 | try { 17 | Class.forName(driver); 18 | //注意,这里替换成你自己的mysql 数据库路径和用户名、密码 19 | con = DriverManager.getConnection(url, user, password); 20 | } catch (Exception e) { 21 | System.out.println("-----------mysql get connection has exception , msg = "+ e.getMessage()); 22 | } 23 | return con; 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /flink-learning-basic/flink-learning-data-sources/src/main/resources/application.properties: -------------------------------------------------------------------------------- 1 | kafka.brokers=localhost:9092 2 | kafka.group.id=metrics-group 3 | kafka.zookeeper.connect=localhost:2181 4 | metrics.topic=alert-metrics 5 | stream.parallelism=5 6 | stream.checkpoint.interval=1000 7 | stream.checkpoint.enable=false -------------------------------------------------------------------------------- /flink-learning-basic/flink-learning-data-sources/src/main/resources/logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 6 | 7 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n 8 | 9 | 10 | 11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /flink-learning-basic/flink-learning-data-sources/src/main/resources/student.sql: -------------------------------------------------------------------------------- 1 | DROP TABLE IF EXISTS `student`; 2 | CREATE TABLE `student` ( 3 | `id` int(11) unsigned NOT NULL AUTO_INCREMENT, 4 | `name` varchar(25) COLLATE utf8_bin DEFAULT NULL, 5 | `password` varchar(25) COLLATE utf8_bin DEFAULT NULL, 6 | `age` int(10) DEFAULT NULL, 7 | PRIMARY KEY (`id`) 8 | ) ENGINE=InnoDB AUTO_INCREMENT=5 DEFAULT CHARSET=utf8 COLLATE=utf8_bin; 9 | 10 | 11 | 12 | INSERT INTO `student` VALUES ('1', 'zhisheng01', '123456', '18'), ('2', 'zhisheng02', '123', '17'), ('3', 'zhisheng03', '1234', '18'), ('4', 'zhisheng04', '12345', '16'); 13 | COMMIT; 14 | -------------------------------------------------------------------------------- /flink-learning-basic/flink-learning-libraries/README.md: -------------------------------------------------------------------------------- 1 | ### flink-learning-libraries 2 | 3 | + [CEP](flink-learning-libraries-cep) 4 | + [Gelly](./flink-learning-libraries-gelly) 5 | + [Machine Learning](flink-learning-libraries-machine-learning) 6 | + [State Processor API](flink-learning-libraries-state-processor-api) -------------------------------------------------------------------------------- /flink-learning-basic/flink-learning-libraries/flink-learning-libraries-cep/src/main/java/com/zhisheng/libraries/cep/model/Alert.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.libraries.cep.model; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Builder; 5 | import lombok.Data; 6 | import lombok.NoArgsConstructor; 7 | 8 | /** 9 | * Desc: 10 | * Created by zhisheng on 2019/10/29 上午10:34 11 | * blog:http://www.54tianzhisheng.cn/ 12 | * 微信公众号:zhisheng 13 | */ 14 | @Data 15 | @NoArgsConstructor 16 | @AllArgsConstructor 17 | @Builder 18 | public class Alert { 19 | private String message; 20 | } 21 | -------------------------------------------------------------------------------- /flink-learning-basic/flink-learning-libraries/flink-learning-libraries-cep/src/main/java/com/zhisheng/libraries/cep/model/Event.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.libraries.cep.model; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Builder; 5 | import lombok.Data; 6 | import lombok.NoArgsConstructor; 7 | 8 | /** 9 | * Desc: 10 | * Created by zhisheng on 2019/10/29 上午10:33 11 | * blog:http://www.54tianzhisheng.cn/ 12 | * 微信公众号:zhisheng 13 | */ 14 | @Data 15 | @AllArgsConstructor 16 | @NoArgsConstructor 17 | @Builder 18 | public class Event { 19 | private Integer id; 20 | private String name; 21 | } 22 | -------------------------------------------------------------------------------- /flink-learning-basic/flink-learning-libraries/flink-learning-libraries-cep/src/main/java/com/zhisheng/libraries/cep/model/SubEvent.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.libraries.cep.model; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Builder; 5 | import lombok.Data; 6 | import lombok.NoArgsConstructor; 7 | 8 | /** 9 | * Desc: 10 | * Created by zhisheng on 2019/10/29 上午10:34 11 | * blog:http://www.54tianzhisheng.cn/ 12 | * 微信公众号:zhisheng 13 | */ 14 | @Data 15 | @NoArgsConstructor 16 | @AllArgsConstructor 17 | @Builder 18 | public class SubEvent { 19 | private Integer volume; 20 | } 21 | -------------------------------------------------------------------------------- /flink-learning-basic/flink-learning-libraries/flink-learning-libraries-cep/src/main/resources/application.properties: -------------------------------------------------------------------------------- 1 | kafka.brokers=localhost:9092 2 | kafka.group.id=zhisheng 3 | kafka.zookeeper.connect=localhost:2181 4 | metrics.topic=zhisheng_metrics 5 | stream.parallelism=1 6 | stream.checkpoint.enable=false -------------------------------------------------------------------------------- /flink-learning-basic/flink-learning-libraries/flink-learning-libraries-cep/src/main/resources/logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 6 | 7 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n 8 | 9 | 10 | 11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /flink-learning-basic/flink-learning-libraries/flink-learning-libraries-state-processor-api/src/main/java/com/zhisheng/libraries/stateProcessApi/StatefulFunctionWithTime.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.libraries.stateProcessApi; 2 | 3 | import org.apache.flink.api.common.state.ValueState; 4 | import org.apache.flink.api.common.state.ValueStateDescriptor; 5 | import org.apache.flink.api.common.typeinfo.Types; 6 | import org.apache.flink.configuration.Configuration; 7 | import org.apache.flink.streaming.api.functions.KeyedProcessFunction; 8 | import org.apache.flink.util.Collector; 9 | 10 | /** 11 | * Desc: 12 | * Created by zhisheng on 2020-01-06 17:33 13 | * blog:http://www.54tianzhisheng.cn/ 14 | * 微信公众号:zhisheng 15 | */ 16 | public class StatefulFunctionWithTime extends KeyedProcessFunction { 17 | 18 | ValueState state; 19 | 20 | @Override 21 | public void open(Configuration parameters) { 22 | ValueStateDescriptor stateDescriptor = new ValueStateDescriptor<>("state", Types.INT); 23 | state = getRuntimeContext().getState(stateDescriptor); 24 | } 25 | 26 | @Override 27 | public void processElement(Integer value, Context ctx, Collector out) throws Exception { 28 | state.update(value + 1); 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /flink-learning-basic/flink-learning-libraries/flink-learning-libraries-state-processor-api/src/main/resources/application.properties: -------------------------------------------------------------------------------- 1 | kafka.brokers=localhost:9092 2 | kafka.group.id=metrics-group 3 | kafka.zookeeper.connect=localhost:2181 4 | metrics.topic=alert-metrics 5 | stream.parallelism=5 6 | stream.checkpoint.interval=1000 7 | stream.checkpoint.enable=false -------------------------------------------------------------------------------- /flink-learning-basic/flink-learning-libraries/flink-learning-libraries-state-processor-api/src/main/resources/logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 6 | 7 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n 8 | 9 | 10 | 11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /flink-learning-basic/flink-learning-libraries/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | flink-learning-basic 7 | com.zhisheng.flink 8 | 1.0-SNAPSHOT 9 | 10 | 4.0.0 11 | 12 | flink-learning-libraries 13 | pom 14 | 15 | flink-learning-libraries-cep 16 | flink-learning-libraries-state-processor-api 17 | 18 | 19 | 20 | 21 | com.zhisheng.flink 22 | flink-learning-common 23 | ${project.version} 24 | 25 | 26 | -------------------------------------------------------------------------------- /flink-learning-basic/flink-learning-metrics/README.md: -------------------------------------------------------------------------------- 1 | ### flink-learning-metrics 2 | 3 | **Metrics 类型**: 4 | 5 | + Counter 6 | + Gauge 7 | + Histogram 8 | + Meter 9 | 10 | 在 `com.zhisheng.metrics.custom` 包下面有上面四种 Metrics 类型的自定义测试类,其中自定义 Histogram 和 Meter 需要引入依赖: 11 | 12 | ```xml 13 | 14 | org.apache.flink 15 | flink-metrics-dropwizard 16 | ${flink.version} 17 | 18 | ``` 19 | 20 | 关于 Flink Metrics 的源码解析可以参考我的博客: 21 | 22 | [Flink Metrics 源码解析](http://www.54tianzhisheng.cn/2019/07/02/Flink-code-metrics/) 23 | 24 | ![](http://zhisheng-blog.oss-cn-hangzhou.aliyuncs.com/img/2019-07-26-150037.jpg) -------------------------------------------------------------------------------- /flink-learning-basic/flink-learning-state/README.md: -------------------------------------------------------------------------------- 1 | 模版项目,不做任何代码编写,方便创建新的 module 时复制 -------------------------------------------------------------------------------- /flink-learning-basic/flink-learning-state/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | flink-learning-basic 7 | com.zhisheng.flink 8 | 1.0-SNAPSHOT 9 | 10 | 4.0.0 11 | 12 | flink-learning-state 13 | 14 | 15 | 16 | org.apache.flink 17 | flink-queryable-state-client-java 18 | ${flink.version} 19 | 20 | 21 | com.zhisheng.flink 22 | flink-learning-common 23 | ${project.version} 24 | 25 | 26 | 27 | -------------------------------------------------------------------------------- /flink-learning-basic/flink-learning-state/src/main/java/com/zhisheng/state/Main.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.state; 2 | 3 | /** 4 | * Desc: 5 | * Created by zhisheng on 2019-04-18 6 | * blog:http://www.54tianzhisheng.cn/ 7 | * 微信公众号:zhisheng 8 | */ 9 | public class Main { 10 | public static void main(String[] args) { 11 | 12 | 13 | 14 | 15 | //file:///netdata/addon/flink/data/state/log 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /flink-learning-basic/flink-learning-state/src/main/java/com/zhisheng/state/queryablestate/ClimateLog.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.state.queryablestate; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Data; 5 | 6 | /** 7 | * Desc: 8 | * Created by zhisheng on 2019-07-05 9 | * blog:http://www.54tianzhisheng.cn/ 10 | * 微信公众号:zhisheng 11 | */ 12 | @Data 13 | @AllArgsConstructor 14 | public class ClimateLog { 15 | private String country; 16 | private String state; 17 | private float temperature; 18 | private float humidity; 19 | } 20 | -------------------------------------------------------------------------------- /flink-learning-basic/flink-learning-state/src/main/java/com/zhisheng/state/queryablestate/QueryClient.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.state.queryablestate; 2 | 3 | /** 4 | * Desc: QueryClient 5 | * Created by zhisheng on 2019-07-05 6 | * blog:http://www.54tianzhisheng.cn/ 7 | * 微信公众号:zhisheng 8 | */ 9 | public class QueryClient { 10 | public static void main(String[] args) { 11 | 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /flink-learning-basic/flink-learning-state/src/main/resources/_metadata: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/zhisheng17/flink-learning/f56062702aeb408e3168da4ae4deb16d97d8b5b7/flink-learning-basic/flink-learning-state/src/main/resources/_metadata -------------------------------------------------------------------------------- /flink-learning-basic/flink-learning-window/README.md: -------------------------------------------------------------------------------- 1 | ### Flink-learning-window 2 | 3 | Flink Window 机制学习:https://t.zsxq.com/byZbyrb 4 | 5 | ![](http://zhisheng-blog.oss-cn-hangzhou.aliyuncs.com/img/2019-11-06-133449.png) 6 | 7 | #### Time Window 8 | 9 | #### Count Window 10 | 11 | #### Session Window -------------------------------------------------------------------------------- /flink-learning-basic/flink-learning-window/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | flink-learning-basic 7 | com.zhisheng.flink 8 | 1.0-SNAPSHOT 9 | 10 | 4.0.0 11 | 12 | flink-learning-window 13 | 14 | 15 | 16 | com.zhisheng.flink 17 | flink-learning-common 18 | ${project.version} 19 | 20 | 21 | 22 | -------------------------------------------------------------------------------- /flink-learning-basic/flink-learning-window/src/main/java/com/zhisheng/constant/WindowConstant.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.constant; 2 | 3 | /** 4 | * Desc: Flink Window 案例用到的常量 5 | * Created by zhisheng on 2019-05-14 6 | * blog:http://www.54tianzhisheng.cn/ 7 | * 微信公众号:zhisheng 8 | */ 9 | public class WindowConstant { 10 | public static final String HOST_NAME = "hostName"; 11 | public static final String PORT = "port"; 12 | } 13 | -------------------------------------------------------------------------------- /flink-learning-basic/flink-learning-window/src/main/java/com/zhisheng/function/LineSplitter.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.function; 2 | 3 | import lombok.extern.slf4j.Slf4j; 4 | import org.apache.flink.api.common.functions.FlatMapFunction; 5 | import org.apache.flink.api.java.tuple.Tuple2; 6 | import org.apache.flink.util.Collector; 7 | 8 | /** 9 | * Desc: 10 | * Created by zhisheng on 2019-08-06 11 | * blog:http://www.54tianzhisheng.cn/ 12 | * 微信公众号:zhisheng 13 | */ 14 | @Slf4j 15 | public class LineSplitter implements FlatMapFunction> { 16 | @Override 17 | public void flatMap(String s, Collector> collector) { 18 | String[] tokens = s.split(" "); 19 | 20 | if (tokens.length >= 2 && isValidLong(tokens[0])) { 21 | collector.collect(new Tuple2<>(Long.valueOf(tokens[0]), tokens[1])); 22 | } 23 | } 24 | 25 | private static boolean isValidLong(String str) { 26 | try { 27 | long _v = Long.parseLong(str); 28 | return true; 29 | } catch (NumberFormatException e) { 30 | log.info("the str = {} is not a number", str); 31 | return false; 32 | } 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /flink-learning-basic/flink-learning-window/src/main/resources/application.properties: -------------------------------------------------------------------------------- 1 | hostName: localhost 2 | port: 9000 -------------------------------------------------------------------------------- /flink-learning-basic/flink-learning-window/src/main/resources/logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 6 | 7 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n 8 | 9 | 10 | 11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /flink-learning-basic/flink-learning-window/src/test/java/TestWindowSize.java: -------------------------------------------------------------------------------- 1 | import org.apache.flink.api.common.time.Time; 2 | 3 | /** 4 | * Desc: 5 | * Created by zhisheng on 2019/11/4 上午12:11 6 | * blog:http://www.54tianzhisheng.cn/ 7 | * 微信公众号:zhisheng 8 | */ 9 | public class TestWindowSize { 10 | public static void main(String[] args) { 11 | long l = System.currentTimeMillis(); 12 | //timestamp - (timestamp - offset + slide) % slide; 13 | System.out.println(l - (l + 60 * 1000) % 60000); 14 | 15 | long size = Time.hours(24).toMilliseconds(); 16 | long slide = Time.hours(1).toMilliseconds(); 17 | long lastStart = (1572794063000l - (1572794063000l + slide) % slide); 18 | for (long start = lastStart; start > 1572794063000l - size; start -= slide) { 19 | System.out.println(start + " " + (start + size)); 20 | } 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /flink-learning-basic/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | flink-learning 7 | com.zhisheng.flink 8 | 1.0-SNAPSHOT 9 | 10 | 4.0.0 11 | 12 | flink-learning-basic 13 | pom 14 | 15 | 16 | flink-learning-data-sources 17 | flink-learning-data-sinks 18 | flink-learning-window 19 | flink-learning-state 20 | flink-learning-metrics 21 | flink-learning-libraries 22 | 23 | -------------------------------------------------------------------------------- /flink-learning-cdc/README.md: -------------------------------------------------------------------------------- 1 | ### Flink-learning-cdc 2 | 3 | [Flink CDC](https://ccgithub.com/ververica/flink-cdc-connectors) 基础、原理、实战、应用、源码相关的内容 4 | 5 | ### Flink CDC 学习资料 6 | 7 | + [Flink CDC 文档](https://ververica.github.io/flink-cdc-connectors/master/) 8 | 9 | 10 | ### Flink CDC 相关论文 11 | 12 | + [DBLog: A Watermark Based Change-Data-Capture Framework](https://arxiv.org/pdf/2010.12597v1.pdf) 13 | 14 | 15 | ### Flink 实战案例: 16 | 17 | + [https://github.com/morsapaes/flink-sql-CDC]() 18 | -------------------------------------------------------------------------------- /flink-learning-cdc/flink-db2-cdc/pom.xml: -------------------------------------------------------------------------------- 1 | 3 | 4 | flink-learning-cdc 5 | com.zhisheng.flink 6 | 1.0-SNAPSHOT 7 | 8 | 4.0.0 9 | 10 | flink-db2-cdc 11 | jar 12 | 13 | flink-db2-cdc 14 | 15 | 16 | 17 | com.ververica 18 | flink-sql-connector-db2-cdc 19 | 2.3.0 20 | 21 | 22 | 23 | -------------------------------------------------------------------------------- /flink-learning-cdc/flink-mongodb-cdc/pom.xml: -------------------------------------------------------------------------------- 1 | 3 | 4 | flink-learning-cdc 5 | com.zhisheng.flink 6 | 1.0-SNAPSHOT 7 | 8 | 4.0.0 9 | 10 | flink-mongodb-cdc 11 | jar 12 | 13 | flink-mongodb-cdc 14 | 15 | 16 | 17 | com.ververica 18 | flink-sql-connector-mongodb-cdc 19 | 2.3.0 20 | 21 | 22 | 23 | -------------------------------------------------------------------------------- /flink-learning-cdc/flink-mysql-cdc/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | flink-learning-cdc 7 | com.zhisheng.flink 8 | 1.0-SNAPSHOT 9 | 10 | 4.0.0 11 | 12 | flink-mysql-cdc 13 | jar 14 | 15 | 16 | 17 | com.ververica 18 | flink-sql-connector-mysql-cdc 19 | 2.3.0 20 | 21 | 22 | 23 | 24 | -------------------------------------------------------------------------------- /flink-learning-cdc/flink-oceanbase-cdc/pom.xml: -------------------------------------------------------------------------------- 1 | 3 | 4 | flink-learning-cdc 5 | com.zhisheng.flink 6 | 1.0-SNAPSHOT 7 | 8 | 4.0.0 9 | 10 | flink-oceanbase-cdc 11 | jar 12 | 13 | flink-oceanbase-cdc 14 | 15 | 16 | 17 | com.ververica 18 | flink-sql-connector-oceanbase-cdc 19 | 2.3.0 20 | 21 | 22 | 23 | -------------------------------------------------------------------------------- /flink-learning-cdc/flink-oracle-cdc/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | flink-learning-cdc 7 | com.zhisheng.flink 8 | 1.0-SNAPSHOT 9 | 10 | 4.0.0 11 | 12 | flink-oracle-cdc 13 | jar 14 | 15 | 16 | 17 | com.ververica 18 | flink-sql-connector-oracle-cdc 19 | 2.3.0 20 | 21 | 22 | -------------------------------------------------------------------------------- /flink-learning-cdc/flink-postgres-cdc/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | flink-learning-cdc 7 | com.zhisheng.flink 8 | 1.0-SNAPSHOT 9 | 10 | 4.0.0 11 | 12 | flink-postgres-cdc 13 | jar 14 | 15 | 16 | 17 | com.ververica 18 | flink-sql-connector-postgres-cdc 19 | 2.3.0 20 | 21 | 22 | -------------------------------------------------------------------------------- /flink-learning-cdc/flink-sqlserver-cdc/pom.xml: -------------------------------------------------------------------------------- 1 | 3 | 4 | flink-learning-cdc 5 | com.zhisheng.flink 6 | 1.0-SNAPSHOT 7 | 8 | 4.0.0 9 | 10 | flink-sqlserver-cdc 11 | jar 12 | 13 | flink-sqlserver-cdc 14 | 15 | 16 | 17 | com.ververica 18 | flink-sql-connector-sqlserver-cdc 19 | 2.3.0 20 | 21 | 22 | 23 | -------------------------------------------------------------------------------- /flink-learning-cdc/flink-tidb-cdc/pom.xml: -------------------------------------------------------------------------------- 1 | 3 | 4 | flink-learning-cdc 5 | com.zhisheng.flink 6 | 1.0-SNAPSHOT 7 | 8 | 4.0.0 9 | 10 | flink-tidb-cdc 11 | jar 12 | 13 | flink-tidb-cdc 14 | 15 | 16 | 17 | com.ververica 18 | flink-sql-connector-tidb-cdc 19 | 2.3.0 20 | 21 | 22 | 23 | -------------------------------------------------------------------------------- /flink-learning-cdc/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | flink-learning 7 | com.zhisheng.flink 8 | 1.0-SNAPSHOT 9 | 10 | 4.0.0 11 | 12 | flink-learning-cdc 13 | pom 14 | 15 | flink-mysql-cdc 16 | flink-postgres-cdc 17 | flink-oracle-cdc 18 | flink-tidb-cdc 19 | flink-sqlserver-cdc 20 | flink-oceanbase-cdc 21 | flink-mongodb-cdc 22 | flink-db2-cdc 23 | 24 | 25 | -------------------------------------------------------------------------------- /flink-learning-common/README.md: -------------------------------------------------------------------------------- 1 | ### Flink-learning-common 2 | 3 | 这个模块存放通用的代码(实体类、工具类、常量类) -------------------------------------------------------------------------------- /flink-learning-common/src/main/java/com/zhisheng/common/constant/MachineConstant.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.common.constant; 2 | 3 | /** 4 | * Desc: 5 | * Created by zhisheng on 2019/10/15 上午12:20 6 | * blog:http://www.54tianzhisheng.cn/ 7 | * 微信公众号:zhisheng 8 | */ 9 | public class MachineConstant { 10 | public static final String CLUSTER_NAME = "cluster_name"; 11 | public static final String HOST_IP = "host_ip"; 12 | public static final String LOAD5 = "load5"; 13 | public static final String USED_PERCENT = "usedPercent"; 14 | public static final String CPU = "cpu"; 15 | public static final String MEM = "mem"; 16 | public static final String LOAD = "load"; 17 | public static final String SWAP = "swap"; 18 | 19 | } 20 | -------------------------------------------------------------------------------- /flink-learning-common/src/main/java/com/zhisheng/common/model/LogEvent.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.common.model; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Builder; 5 | import lombok.Data; 6 | import lombok.NoArgsConstructor; 7 | 8 | import java.util.HashMap; 9 | import java.util.Map; 10 | 11 | /** 12 | * Desc: log event 13 | * Created by zhisheng on 2019/10/13 上午10:07 14 | * blog:http://www.54tianzhisheng.cn/ 15 | * 微信公众号:zhisheng 16 | */ 17 | @Data 18 | @NoArgsConstructor 19 | @AllArgsConstructor 20 | @Builder 21 | public class LogEvent { 22 | //the type of log(app、docker、...) 23 | private String type; 24 | 25 | // the timestamp of log 26 | private Long timestamp; 27 | 28 | //the level of log(debug/info/warn/error) 29 | private String level; 30 | 31 | //the message of log 32 | private String message; 33 | 34 | //the tag of log(appId、dockerId、machine hostIp、machine clusterName、...) 35 | private Map tags = new HashMap<>(); 36 | } -------------------------------------------------------------------------------- /flink-learning-common/src/main/java/com/zhisheng/common/model/MetricEvent.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.common.model; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Builder; 5 | import lombok.Data; 6 | import lombok.NoArgsConstructor; 7 | 8 | import java.util.Map; 9 | 10 | /** 11 | * blog:http://www.54tianzhisheng.cn/ 12 | * 微信公众号:zhisheng 13 | */ 14 | 15 | @Data 16 | @Builder 17 | @AllArgsConstructor 18 | @NoArgsConstructor 19 | public class MetricEvent { 20 | 21 | /** 22 | * Metric name 23 | */ 24 | private String name; 25 | 26 | /** 27 | * Metric timestamp 28 | */ 29 | private Long timestamp; 30 | 31 | /** 32 | * Metric fields 33 | */ 34 | private Map fields; 35 | 36 | /** 37 | * Metric tags 38 | */ 39 | private Map tags; 40 | } 41 | -------------------------------------------------------------------------------- /flink-learning-common/src/main/java/com/zhisheng/common/model/ShopEvent.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.common.model; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Builder; 5 | import lombok.Data; 6 | import lombok.NoArgsConstructor; 7 | 8 | /** 9 | * Desc: Shop 10 | * Created by zhisheng on 2019-04-18 11 | * blog:http://www.54tianzhisheng.cn/ 12 | * 微信公众号:zhisheng 13 | */ 14 | @Data 15 | @Builder 16 | @NoArgsConstructor 17 | @AllArgsConstructor 18 | public class ShopEvent { 19 | 20 | /** 21 | * Shop Id 22 | */ 23 | private Long id; 24 | 25 | /** 26 | * Shop name 27 | */ 28 | private String name; 29 | 30 | /** 31 | * shop owner Id 32 | */ 33 | private Long ownerId; 34 | 35 | /** 36 | * shop owner name 37 | */ 38 | private String ownerName; 39 | 40 | /** 41 | * shop status: (1:正常, -1:关闭, -2:冻结) 42 | */ 43 | private int status; 44 | 45 | /** 46 | * shop type: (1:门店 2:商家 3:出版社) 47 | */ 48 | private int type; 49 | 50 | /** 51 | * shop phone 52 | */ 53 | private String phone; 54 | 55 | /** 56 | * shop email 57 | */ 58 | private String email; 59 | 60 | /** 61 | * shop address 62 | */ 63 | private String address; 64 | 65 | /** 66 | * shop image url 67 | */ 68 | private String imageUrl; 69 | } 70 | -------------------------------------------------------------------------------- /flink-learning-common/src/main/java/com/zhisheng/common/model/UserEvent.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.common.model; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Builder; 5 | import lombok.Data; 6 | import lombok.NoArgsConstructor; 7 | 8 | /** 9 | * Desc: User 10 | * Created by zhisheng on 2019-04-18 11 | * blog:http://www.54tianzhisheng.cn/ 12 | * 微信公众号:zhisheng 13 | */ 14 | @Data 15 | @Builder 16 | @AllArgsConstructor 17 | @NoArgsConstructor 18 | public class UserEvent { 19 | /** 20 | * user Id 21 | */ 22 | private Long id; 23 | 24 | /** 25 | * User Name 26 | */ 27 | private String userName; 28 | 29 | /** 30 | * User email 31 | */ 32 | private String email; 33 | 34 | /** 35 | * User phone number 36 | */ 37 | private String phoneNumber; 38 | 39 | /** 40 | * User 真实姓名 41 | */ 42 | private String realName; 43 | 44 | /** 45 | * User 展示名称 46 | */ 47 | private String displayName; 48 | 49 | /** 50 | * User 头像 Url 51 | */ 52 | private String avatarUrl; 53 | 54 | /** 55 | * User 密码(加密后) 56 | */ 57 | private String password; 58 | 59 | /** 60 | * User 地址 61 | */ 62 | private String address; 63 | 64 | /** 65 | * User 注册来源(1:IOS、2:PC、3:Android) 66 | */ 67 | private int deviceSource; 68 | } 69 | -------------------------------------------------------------------------------- /flink-learning-common/src/main/java/com/zhisheng/common/model/WordEvent.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.common.model; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Builder; 5 | import lombok.Data; 6 | import lombok.NoArgsConstructor; 7 | 8 | /** 9 | * Desc: 10 | * Created by zhisheng on 2019-08-07 11 | * blog:http://www.54tianzhisheng.cn/ 12 | * 微信公众号:zhisheng 13 | */ 14 | @Data 15 | @AllArgsConstructor 16 | @NoArgsConstructor 17 | @Builder 18 | public class WordEvent { 19 | private String word; 20 | private int count; 21 | private long timestamp; 22 | } 23 | -------------------------------------------------------------------------------- /flink-learning-common/src/main/java/com/zhisheng/common/schemas/LogSchema.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.common.schemas; 2 | 3 | import com.google.gson.Gson; 4 | import com.zhisheng.common.model.LogEvent; 5 | import org.apache.flink.api.common.serialization.DeserializationSchema; 6 | import org.apache.flink.api.common.serialization.SerializationSchema; 7 | import org.apache.flink.api.common.typeinfo.TypeInformation; 8 | 9 | import java.io.IOException; 10 | import java.nio.charset.StandardCharsets; 11 | 12 | /** 13 | * Log Schema ,支持序列化和反序列化 14 | * 15 | * blog:http://www.54tianzhisheng.cn/ 16 | * 微信公众号:zhisheng 17 | * 18 | */ 19 | public class LogSchema implements DeserializationSchema, SerializationSchema { 20 | 21 | private static final Gson gson = new Gson(); 22 | 23 | @Override 24 | public LogEvent deserialize(byte[] bytes) throws IOException { 25 | return gson.fromJson(new String(bytes), LogEvent.class); 26 | } 27 | 28 | @Override 29 | public boolean isEndOfStream(LogEvent logEvent) { 30 | return false; 31 | } 32 | 33 | @Override 34 | public byte[] serialize(LogEvent logEvent) { 35 | return gson.toJson(logEvent).getBytes(StandardCharsets.UTF_8); 36 | } 37 | 38 | @Override 39 | public TypeInformation getProducedType() { 40 | return TypeInformation.of(LogEvent.class); 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /flink-learning-common/src/main/java/com/zhisheng/common/schemas/MetricSchema.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.common.schemas; 2 | 3 | import com.google.gson.Gson; 4 | import com.zhisheng.common.model.MetricEvent; 5 | import org.apache.flink.api.common.serialization.DeserializationSchema; 6 | import org.apache.flink.api.common.serialization.SerializationSchema; 7 | import org.apache.flink.api.common.typeinfo.TypeInformation; 8 | 9 | import java.io.IOException; 10 | import java.nio.charset.StandardCharsets; 11 | 12 | /** 13 | * Metric Schema ,支持序列化和反序列化 14 | *

15 | * blog:http://www.54tianzhisheng.cn/ 16 | * 微信公众号:zhisheng 17 | */ 18 | public class MetricSchema implements DeserializationSchema, SerializationSchema { 19 | 20 | private static final Gson gson = new Gson(); 21 | 22 | @Override 23 | public MetricEvent deserialize(byte[] bytes) throws IOException { 24 | return gson.fromJson(new String(bytes), MetricEvent.class); 25 | } 26 | 27 | @Override 28 | public boolean isEndOfStream(MetricEvent metricEvent) { 29 | return false; 30 | } 31 | 32 | @Override 33 | public byte[] serialize(MetricEvent metricEvent) { 34 | return gson.toJson(metricEvent).getBytes(StandardCharsets.UTF_8); 35 | } 36 | 37 | @Override 38 | public TypeInformation getProducedType() { 39 | return TypeInformation.of(MetricEvent.class); 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /flink-learning-common/src/main/java/com/zhisheng/common/schemas/OrderSchema.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.common.schemas; 2 | 3 | import com.google.gson.Gson; 4 | import com.zhisheng.common.model.OrderEvent; 5 | import org.apache.flink.api.common.serialization.DeserializationSchema; 6 | import org.apache.flink.api.common.serialization.SerializationSchema; 7 | import org.apache.flink.api.common.typeinfo.TypeInformation; 8 | 9 | import java.io.IOException; 10 | import java.nio.charset.StandardCharsets; 11 | 12 | /** 13 | * order Schema ,支持序列化和反序列化 14 | *

15 | * blog:http://www.54tianzhisheng.cn/ 16 | * 微信公众号:zhisheng 17 | */ 18 | public class OrderSchema implements DeserializationSchema, SerializationSchema { 19 | 20 | private static final Gson gson = new Gson(); 21 | 22 | @Override 23 | public OrderEvent deserialize(byte[] bytes) throws IOException { 24 | return gson.fromJson(new String(bytes), OrderEvent.class); 25 | } 26 | 27 | @Override 28 | public boolean isEndOfStream(OrderEvent orderEvent) { 29 | return false; 30 | } 31 | 32 | @Override 33 | public byte[] serialize(OrderEvent orderEvent) { 34 | return gson.toJson(orderEvent).getBytes(StandardCharsets.UTF_8); 35 | } 36 | 37 | @Override 38 | public TypeInformation getProducedType() { 39 | return TypeInformation.of(OrderEvent.class); 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /flink-learning-common/src/main/java/com/zhisheng/common/schemas/ShopSchema.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.common.schemas; 2 | 3 | import com.google.gson.Gson; 4 | import com.zhisheng.common.model.ShopEvent; 5 | import org.apache.flink.api.common.serialization.DeserializationSchema; 6 | import org.apache.flink.api.common.serialization.SerializationSchema; 7 | import org.apache.flink.api.common.typeinfo.TypeInformation; 8 | 9 | import java.io.IOException; 10 | import java.nio.charset.StandardCharsets; 11 | 12 | /** 13 | * Shop Schema ,支持序列化和反序列化 14 | *

15 | * blog:http://www.54tianzhisheng.cn/ 16 | * 微信公众号:zhisheng 17 | */ 18 | public class ShopSchema implements DeserializationSchema, SerializationSchema { 19 | 20 | private static final Gson gson = new Gson(); 21 | 22 | @Override 23 | public ShopEvent deserialize(byte[] bytes) throws IOException { 24 | return gson.fromJson(new String(bytes), ShopEvent.class); 25 | } 26 | 27 | @Override 28 | public boolean isEndOfStream(ShopEvent shopEvent) { 29 | return false; 30 | } 31 | 32 | @Override 33 | public byte[] serialize(ShopEvent shopEvent) { 34 | return gson.toJson(shopEvent).getBytes(StandardCharsets.UTF_8); 35 | } 36 | 37 | @Override 38 | public TypeInformation getProducedType() { 39 | return TypeInformation.of(ShopEvent.class); 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /flink-learning-common/src/main/java/com/zhisheng/common/schemas/UserSchema.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.common.schemas; 2 | 3 | import com.google.gson.Gson; 4 | import com.zhisheng.common.model.UserEvent; 5 | import org.apache.flink.api.common.serialization.DeserializationSchema; 6 | import org.apache.flink.api.common.serialization.SerializationSchema; 7 | import org.apache.flink.api.common.typeinfo.TypeInformation; 8 | 9 | import java.io.IOException; 10 | import java.nio.charset.StandardCharsets; 11 | 12 | /** 13 | * User Schema ,支持序列化和反序列化 14 | *

15 | * blog:http://www.54tianzhisheng.cn/ 16 | * 微信公众号:zhisheng 17 | */ 18 | public class UserSchema implements DeserializationSchema, SerializationSchema { 19 | 20 | private static final Gson gson = new Gson(); 21 | 22 | @Override 23 | public UserEvent deserialize(byte[] bytes) throws IOException { 24 | return gson.fromJson(new String(bytes), UserEvent.class); 25 | } 26 | 27 | @Override 28 | public boolean isEndOfStream(UserEvent userEvent) { 29 | return false; 30 | } 31 | 32 | @Override 33 | public byte[] serialize(UserEvent userEvent) { 34 | return gson.toJson(userEvent).getBytes(StandardCharsets.UTF_8); 35 | } 36 | 37 | @Override 38 | public TypeInformation getProducedType() { 39 | return TypeInformation.of(UserEvent.class); 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /flink-learning-common/src/main/java/com/zhisheng/common/utils/GsonUtil.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.common.utils; 2 | 3 | import com.google.gson.Gson; 4 | import com.google.gson.GsonBuilder; 5 | 6 | import java.lang.reflect.Type; 7 | import java.nio.charset.Charset; 8 | 9 | /** 10 | * blog:http://www.54tianzhisheng.cn/ 11 | * 微信公众号:zhisheng 12 | */ 13 | public class GsonUtil { 14 | private final static Gson gson = new Gson(); 15 | 16 | private final static Gson disableHtmlEscapingGson = new GsonBuilder().disableHtmlEscaping().create(); 17 | 18 | public static T fromJson(String value, Class type) { 19 | return gson.fromJson(value, type); 20 | } 21 | 22 | public static T fromJson(String value, Type type) { 23 | return gson.fromJson(value, type); 24 | } 25 | 26 | public static String toJson(Object value) { 27 | return gson.toJson(value); 28 | } 29 | 30 | public static String toJsonDisableHtmlEscaping(Object value) { 31 | return disableHtmlEscapingGson.toJson(value); 32 | } 33 | 34 | public static byte[] toJSONBytes(Object value) { 35 | return gson.toJson(value).getBytes(Charset.forName("UTF-8")); 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /flink-learning-common/src/main/java/com/zhisheng/common/watermarks/MetricWatermark.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.common.watermarks; 2 | 3 | import com.zhisheng.common.model.MetricEvent; 4 | import org.apache.flink.streaming.api.functions.AssignerWithPeriodicWatermarks; 5 | import org.apache.flink.streaming.api.watermark.Watermark; 6 | 7 | import javax.annotation.Nullable; 8 | 9 | /** 10 | * blog:http://www.54tianzhisheng.cn/ 11 | * 微信公众号:zhisheng 12 | */ 13 | public class MetricWatermark implements AssignerWithPeriodicWatermarks { 14 | 15 | private long currentTimestamp = Long.MIN_VALUE; 16 | 17 | @Override 18 | public long extractTimestamp(MetricEvent metricEvent, long previousElementTimestamp) { 19 | long timestamp = metricEvent.getTimestamp(); 20 | currentTimestamp = Math.max(timestamp, currentTimestamp); 21 | return timestamp; 22 | } 23 | 24 | @Nullable 25 | @Override 26 | public Watermark getCurrentWatermark() { 27 | long maxTimeLag = 5000; 28 | return new Watermark(currentTimestamp == Long.MIN_VALUE ? Long.MIN_VALUE : currentTimestamp - maxTimeLag); 29 | 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /flink-learning-configuration-center/flink-learning-configuration-center-apollo/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | flink-learning-configuration-center 7 | com.zhisheng.flink 8 | 1.0-SNAPSHOT 9 | 10 | 4.0.0 11 | 12 | flink-learning-configuration-center-apollo 13 | 14 | 15 | 16 | 17 | com.ctrip.framework.apollo 18 | apollo-client 19 | 1.5.1 20 | 21 | 22 | -------------------------------------------------------------------------------- /flink-learning-configuration-center/flink-learning-configuration-center-apollo/src/main/resources/META-INF/app.properties: -------------------------------------------------------------------------------- 1 | # test 2 | #app.id=flink-learning-configration-center-apollo 3 | app.id=SampleApp -------------------------------------------------------------------------------- /flink-learning-configuration-center/flink-learning-configuration-center-nacos/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | flink-learning-configuration-center 7 | com.zhisheng.flink 8 | 1.0-SNAPSHOT 9 | 10 | 4.0.0 11 | 12 | flink-learning-configuration-center-nacos 13 | 14 | 15 | 16 | com.alibaba.nacos 17 | nacos-client 18 | 1.1.4 19 | 20 | 21 | com.alibaba.nacos 22 | nacos-common 23 | 1.4.1 24 | 25 | 26 | 27 | -------------------------------------------------------------------------------- /flink-learning-configuration-center/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | flink-learning 7 | com.zhisheng.flink 8 | 1.0-SNAPSHOT 9 | 10 | 4.0.0 11 | 12 | flink-learning-configuration-center 13 | pom 14 | 15 | flink-learning-configuration-center-nacos 16 | flink-learning-configuration-center-apollo 17 | 18 | 19 | 20 | 21 | com.zhisheng.flink 22 | flink-learning-common 23 | ${project.version} 24 | 25 | 26 | -------------------------------------------------------------------------------- /flink-learning-connectors/README.md: -------------------------------------------------------------------------------- 1 | ### Flink connectors 2 | 3 | 暂时有这些 Cooncetor,其中这些并不是 Flink 自带的,需要自己定义,另外提供这些 Connector 的使用案例,大家可以参考。欢迎补充和点赞 4 | 5 | ```text 6 | . 7 | ├── README.md 8 | ├── flink-learning-connectors-activemq 9 | ├── flink-learning-connectors-akka 10 | ├── flink-learning-connectors-cassandra 11 | ├── flink-learning-connectors-clickhouse 12 | ├── flink-learning-connectors-es2 13 | ├── flink-learning-connectors-es5 14 | ├── flink-learning-connectors-es6 15 | ├── flink-learning-connectors-es7 16 | ├── flink-learning-connectors-flume 17 | ├── flink-learning-connectors-gcp-pubsub 18 | ├── flink-learning-connectors-hbase 19 | ├── flink-learning-connectors-hdfs 20 | ├── flink-learning-connectors-hive 21 | ├── flink-learning-connectors-influxdb 22 | ├── flink-learning-connectors-kafka 23 | ├── flink-learning-connectors-kinesis 24 | ├── flink-learning-connectors-kudu 25 | ├── flink-learning-connectors-mysql 26 | ├── flink-learning-connectors-netty 27 | ├── flink-learning-connectors-nifi 28 | ├── flink-learning-connectors-pulsar 29 | ├── flink-learning-connectors-rabbitmq 30 | ├── flink-learning-connectors-redis 31 | └── flink-learning-connectors-rocketmq 32 | ``` -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-activemq/README.md: -------------------------------------------------------------------------------- 1 | ### Flink connector ActiveMQ -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-activemq/src/main/java/com/zhisheng/connectors/activemq/Main.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.connectors.activemq; 2 | 3 | 4 | import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; 5 | 6 | public class Main { 7 | public static void main(String[] args) throws Exception { 8 | final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); 9 | 10 | env.execute("flink learning project template"); 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-activemq/src/main/resources/application.properties: -------------------------------------------------------------------------------- 1 | kafka.brokers=localhost:9092 2 | kafka.group.id=zhisheng 3 | kafka.zookeeper.connect=localhost:2181 4 | metrics.topic=zhisheng 5 | stream.parallelism=4 6 | stream.sink.parallelism=4 7 | stream.default.parallelism=4 8 | stream.checkpoint.interval=1000 9 | stream.checkpoint.enable=false -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-activemq/src/main/resources/logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 6 | 7 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n 8 | 9 | 10 | 11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-cassandra/README.md: -------------------------------------------------------------------------------- 1 | ### flink-learning-connectors-cassandra 2 | 3 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-cassandra/src/main/java/com/zhisheng/connectors/cassandra/batch/CustomCassandraAnnotatedPojo.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.connectors.cassandra.batch; 2 | 3 | import com.datastax.driver.mapping.annotations.Column; 4 | import com.datastax.driver.mapping.annotations.Table; 5 | import lombok.AllArgsConstructor; 6 | import lombok.Data; 7 | import lombok.NoArgsConstructor; 8 | 9 | /** 10 | * Desc: 11 | * Created by zhisheng on 2019-08-04 12 | * blog:http://www.54tianzhisheng.cn/ 13 | * 微信公众号:zhisheng 14 | */ 15 | @Table(name = CustomCassandraAnnotatedPojo.TABLE_NAME, keyspace = "flink") 16 | @NoArgsConstructor 17 | @AllArgsConstructor 18 | @Data 19 | public class CustomCassandraAnnotatedPojo { 20 | public static final String TABLE_NAME = "zhisheng"; 21 | 22 | @Column(name = "id") 23 | private String id; 24 | @Column(name = "counter") 25 | private Integer counter; 26 | @Column(name = "batch_id") 27 | private Integer batchId; 28 | } 29 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-cassandra/src/main/java/com/zhisheng/connectors/cassandra/streaming/Message.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.connectors.cassandra.streaming; 2 | 3 | import com.datastax.driver.mapping.annotations.Column; 4 | import com.datastax.driver.mapping.annotations.Table; 5 | import lombok.AllArgsConstructor; 6 | 7 | /** 8 | * Desc: 9 | * Created by zhisheng on 2019-08-04 10 | * blog:http://www.54tianzhisheng.cn/ 11 | * 微信公众号:zhisheng 12 | */ 13 | @Table(keyspace = "test", name = "message") 14 | @AllArgsConstructor 15 | public class Message { 16 | private static final long serialVersionUID = 1123119384361005680L; 17 | 18 | @Column(name = "body") 19 | private String message; 20 | 21 | public Message() { 22 | this(null); 23 | } 24 | 25 | public String getMessage() { 26 | return message; 27 | } 28 | 29 | public void setMessage(String word) { 30 | this.message = word; 31 | } 32 | 33 | public boolean equals(Object other) { 34 | if (other instanceof Message) { 35 | Message that = (Message) other; 36 | return this.message.equals(that.message); 37 | } 38 | return false; 39 | } 40 | 41 | @Override 42 | public int hashCode() { 43 | return message.hashCode(); 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-cassandra/src/main/resources/application.properties: -------------------------------------------------------------------------------- 1 | kafka.brokers=localhost:9092 2 | kafka.group.id=metrics-group 3 | kafka.zookeeper.connect=localhost:2181 4 | metrics.topic=alert-metrics 5 | stream.parallelism=5 6 | stream.checkpoint.interval=1000 7 | stream.checkpoint.enable=false -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-cassandra/src/main/resources/logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 6 | 7 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n 8 | 9 | 10 | 11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-clickhouse/README.md: -------------------------------------------------------------------------------- 1 | ### Flink connector Clickhouse 2 | 3 | https://github.com/ivi-ru/flink-clickhouse-sink -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-clickhouse/src/main/java/com/zhisheng/connectors/clickhouse/model/ClickhouseSinkConsts.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.connectors.clickhouse.model; 2 | 3 | /** 4 | * Desc: 5 | * Created by zhisheng on 2019/9/28 上午10:03 6 | * blog:http://www.54tianzhisheng.cn/ 7 | * 微信公众号:zhisheng 8 | */ 9 | public final class ClickhouseSinkConsts { 10 | private ClickhouseSinkConsts() { 11 | } 12 | 13 | public static final String TARGET_TABLE_NAME = "clickhouse.sink.target-table"; 14 | public static final String MAX_BUFFER_SIZE = "clickhouse.sink.max-buffer-size"; 15 | 16 | public static final String NUM_WRITERS = "clickhouse.sink.num-writers"; 17 | public static final String QUEUE_MAX_CAPACITY = "clickhouse.sink.queue-max-capacity"; 18 | public static final String TIMEOUT_SEC = "clickhouse.sink.timeout-sec"; 19 | public static final String NUM_RETRIES = "clickhouse.sink.retries"; 20 | public static final String FAILED_RECORDS_PATH = "clickhouse.sink.failed-records-path"; 21 | } 22 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-clickhouse/src/main/resources/application.properties: -------------------------------------------------------------------------------- 1 | kafka.brokers=localhost:9092 2 | kafka.group.id=zhisheng 3 | kafka.zookeeper.connect=localhost:2181 4 | metrics.topic=zhisheng 5 | stream.parallelism=4 6 | stream.sink.parallelism=4 7 | stream.default.parallelism=4 8 | stream.checkpoint.interval=1000 9 | stream.checkpoint.enable=false -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-clickhouse/src/main/resources/logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 6 | 7 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n 8 | 9 | 10 | 11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-clickhouse/src/main/resources/reference.conf: -------------------------------------------------------------------------------- 1 | clickhouse { 2 | sink { 3 | num-writers = 3 4 | timeout-sec = 1 5 | retries = 10 6 | queue-max-capacity = 1000 7 | failed-records-path = "/tmp/failed_records" 8 | } 9 | } -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-es/flink-learning-connectors-es-common/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | flink-learning-connectors-es 7 | com.zhisheng.flink 8 | 1.0-SNAPSHOT 9 | 10 | 4.0.0 11 | 12 | flink-learning-connectors-es-common 13 | 14 | 15 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-es/flink-learning-connectors-es-universal/README.md: -------------------------------------------------------------------------------- 1 | 2 | ### 功能 3 | 4 | 一个项目支持通用的版本,可以写入 ES 5/6/7 多个版本 5 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-es/flink-learning-connectors-es-universal/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | flink-learning-connectors-es 7 | com.zhisheng.flink 8 | 1.0-SNAPSHOT 9 | 10 | 4.0.0 11 | 12 | flink-learning-connectors-es-universal 13 | 14 | 15 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-es/flink-learning-connectors-es5/README.md: -------------------------------------------------------------------------------- 1 | ## Flink connector ElasticSearch 5.x 2 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-es/flink-learning-connectors-es5/src/main/java/com/zhisheng/connectors/es5/Sink2ES5Main.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.connectors.es5; 2 | 3 | /** 4 | * Desc: sink data to es5 5 | * Created by zhisheng on 2019/10/22 下午5:10 6 | * blog:http://www.54tianzhisheng.cn/ 7 | * 微信公众号:zhisheng 8 | */ 9 | public class Sink2ES5Main { 10 | } 11 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-es/flink-learning-connectors-es5/src/main/resources/logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 6 | 7 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n 8 | 9 | 10 | 11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-es/flink-learning-connectors-es6/README.md: -------------------------------------------------------------------------------- 1 | ## Flink connector ElasticSearch 6.x 2 | 3 | [http://www.54tianzhisheng.cn/2018/12/30/Flink-ElasticSearch-Sink/](http://www.54tianzhisheng.cn/2018/12/30/Flink-ElasticSearch-Sink/) 4 | 5 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-es/flink-learning-connectors-es6/src/main/resources/application.properties: -------------------------------------------------------------------------------- 1 | kafka.brokers=localhost:9092 2 | kafka.group.id=metrics-group-test 3 | kafka.zookeeper.connect=localhost:2181 4 | metrics.topic=alert-metrics 5 | stream.parallelism=5 6 | stream.checkpoint.interval=1000 7 | stream.checkpoint.enable=false 8 | elasticsearch.hosts=localhost:9200,localhost:9202,localhost:9203 9 | elasticsearch.bulk.flush.max.actions=1000 10 | stream.sink.parallelism=5 11 | 12 | # \u6743\u9650 13 | es.security.enable=false 14 | es.security.username=zhisheng 15 | es.security.password=zhisheng -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-es/flink-learning-connectors-es6/src/main/resources/logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 6 | 7 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n 8 | 9 | 10 | 11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-es/flink-learning-connectors-es7/README.md: -------------------------------------------------------------------------------- 1 | ## Flink connector ElasticSearch 7.x 2 | 3 | 4 | 遇到过的问题有: 5 | 6 | 1、ElasticSearch 的分片和副本的调优 7 | 8 | 2、ElasticSearch 的线程队列的调优 9 | 10 | 3、ElasticSearch 的刷新时间的调整 11 | 12 | 4、ElasticSearch 磁盘到 85% 后出现不写入 13 | 14 | 5、ElasticSearch 某个节点挂了导致写入的请求丢失不会重试 15 | 16 | 6、ElasticSearch bulk 写 17 | 18 | 7、权限认证的问题 19 | 20 | 8、Index template 初始化 21 | 22 | 9、使用 RestHighLevelClient 23 | 24 | 总之目的就是为了在能够高效的将数据写入进 ElasticSearch,还要保证 ElasticSearch 不挂 25 | 26 | 27 | ## TODO 28 | 29 | 1、X-Pack 权限认证 30 | 31 | 2、使用 RestHighLevelClient -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-es/flink-learning-connectors-es7/src/main/resources/application.properties: -------------------------------------------------------------------------------- 1 | kafka.brokers=localhost:9092 2 | kafka.group.id=metrics-group-test 3 | kafka.zookeeper.connect=localhost:2181 4 | metrics.topic=alert-metrics 5 | stream.parallelism=5 6 | stream.checkpoint.interval=1000 7 | stream.checkpoint.enable=false 8 | elasticsearch.hosts=localhost:9200 9 | elasticsearch.bulk.flush.max.actions=1000 10 | stream.sink.parallelism=1 11 | 12 | # \u6743\u9650 13 | es.security.enable=false 14 | es.security.username=zhisheng 15 | es.security.password=zhisheng -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-es/flink-learning-connectors-es7/src/main/resources/logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 6 | 7 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n 8 | 9 | 10 | 11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-es/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | flink-learning-connectors 7 | com.zhisheng.flink 8 | 1.0-SNAPSHOT 9 | 10 | 4.0.0 11 | 12 | flink-learning-connectors-es 13 | pom 14 | 15 | flink-learning-connectors-es-common 16 | flink-learning-connectors-es5 17 | flink-learning-connectors-es6 18 | flink-learning-connectors-es7 19 | flink-learning-connectors-es-universal 20 | 21 | 22 | 23 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-flume/README.md: -------------------------------------------------------------------------------- 1 | ### Flink connector Flume -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-flume/src/main/java/com/zhisheng/connectors/flume/FlumeEventBuilder.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.connectors.flume; 2 | 3 | import org.apache.flink.api.common.functions.Function; 4 | import org.apache.flink.api.common.functions.RuntimeContext; 5 | import org.apache.flume.Event; 6 | 7 | import java.io.Serializable; 8 | 9 | /** 10 | * Desc: A function that can create a Event from an incoming instance of the given type. 11 | * Created by zhisheng on 2019-05-04 12 | * blog:http://www.54tianzhisheng.cn/ 13 | * 微信公众号:zhisheng 14 | */ 15 | public interface FlumeEventBuilder extends Function, Serializable { 16 | 17 | Event createFlumeEvent(IN value, RuntimeContext ctx); 18 | 19 | } 20 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-flume/src/main/java/com/zhisheng/connectors/flume/Main.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.connectors.flume; 2 | 3 | 4 | import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; 5 | 6 | public class Main { 7 | public static void main(String[] args) throws Exception { 8 | final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); 9 | 10 | env.execute("flink learning project template"); 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-flume/src/main/resources/application.properties: -------------------------------------------------------------------------------- 1 | kafka.brokers=localhost:9092 2 | kafka.group.id=zhisheng 3 | kafka.zookeeper.connect=localhost:2181 4 | metrics.topic=zhisheng 5 | stream.parallelism=4 6 | stream.sink.parallelism=4 7 | stream.default.parallelism=4 8 | stream.checkpoint.interval=1000 9 | stream.checkpoint.enable=false -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-flume/src/main/resources/logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 6 | 7 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n 8 | 9 | 10 | 11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-gcp-pubsub/README.md: -------------------------------------------------------------------------------- 1 | ### Flink connector gcp pubsub 2 | 3 | 关于 gcp pubsub 的介绍 4 | 5 | https://cloud.google.com/pubsub/?hl=zh-cn 6 | 7 | ![](http://zhisheng-blog.oss-cn-hangzhou.aliyuncs.com/img/2019-11-23-130544.jpg) 8 | 9 | 添加依赖: 10 | 11 | ```xml 12 | 13 | org.apache.flink 14 | flink-connector-gcp-pubsub_${scala.binary.version} 15 | ${flink.version} 16 | 17 | ``` -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-gcp-pubsub/src/main/resources/application.properties: -------------------------------------------------------------------------------- 1 | stream.parallelism=4 2 | stream.sink.parallelism=4 3 | stream.default.parallelism=4 4 | stream.checkpoint.interval=1000 5 | stream.checkpoint.enable=false 6 | 7 | 8 | stream.project.name=zhisheng 9 | stream.input.topicName=zhisheng 10 | stream.input.subscription=zhisheng 11 | stream.output.topicName=zhisheng -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-gcp-pubsub/src/main/resources/logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 6 | 7 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n 8 | 9 | 10 | 11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-hbase/README.md: -------------------------------------------------------------------------------- 1 | ### Flink connector HBase 2 | 3 | https://blog.csdn.net/aA518189/article/details/86544844 4 | 5 | https://blog.csdn.net/aA518189/article/details/85298889 -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-hbase/flink-learning-connectors-hbase-2.2/src/main/java/hbase/constant/HBaseConstant.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.connectors.hbase.constant; 2 | 3 | 4 | public class HBaseConstant { 5 | 6 | public static final String HBASE_ZOOKEEPER_QUORUM = "hbase.zookeeper.quorum"; 7 | public static final String HBASE_CLIENT_RETRIES_NUMBER = "hbase.client.retries.number"; 8 | public static final String HBASE_MASTER_INFO_PORT = "hbase.master.info.port"; 9 | public static final String HBASE_ZOOKEEPER_PROPERTY_CLIENTPORT = "hbase.zookeeper.property.clientPort"; 10 | public static final String HBASE_RPC_TIMEOUT = "hbase.rpc.timeout"; 11 | public static final String HBASE_CLIENT_OPERATION_TIMEOUT = "hbase.client.operation.timeout"; 12 | public static final String HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD = "hbase.client.scanner.timeout.period"; 13 | 14 | public static final String HBASE_TABLE_NAME = "hbase.table.name"; 15 | public static final String HBASE_COLUMN_NAME = "hbase.column.name"; 16 | } 17 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-hbase/flink-learning-connectors-hbase-2.2/src/main/resources/application.properties: -------------------------------------------------------------------------------- 1 | kafka.brokers=localhost:9092 2 | kafka.group.id=zhisheng 3 | kafka.zookeeper.connect=localhost:2181 4 | metrics.topic=zhisheng 5 | stream.parallelism=4 6 | stream.sink.parallelism=4 7 | stream.default.parallelism=4 8 | stream.checkpoint.interval=1000 9 | stream.checkpoint.enable=false 10 | 11 | # HBase 12 | hbase.zookeeper.quorum=localhost:2181 13 | hbase.client.retries.number=1 14 | hbase.master.info.port=-1 15 | hbase.zookeeper.property.clientPort=2081 16 | hbase.rpc.timeout=30000 17 | hbase.client.operation.timeout=30000 18 | hbase.client.scanner.timeout.period=30000 19 | 20 | # HBase table name 21 | hbase.table.name=zhisheng_stream 22 | hbase.column.name=info_stream -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-hbase/flink-learning-connectors-hbase-2.2/src/main/resources/logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 6 | 7 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n 8 | 9 | 10 | 11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-hdfs/README.md: -------------------------------------------------------------------------------- 1 | 模版项目,不做任何代码编写,方便创建新的 module 时复制 -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-hdfs/src/main/java/com/zhisheng/connectors/hdfs/Main.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.connectors.hdfs; 2 | 3 | 4 | import com.zhisheng.common.model.MetricEvent; 5 | import com.zhisheng.common.utils.ExecutionEnvUtil; 6 | import com.zhisheng.common.utils.KafkaConfigUtil; 7 | import lombok.extern.slf4j.Slf4j; 8 | import org.apache.flink.api.java.utils.ParameterTool; 9 | import org.apache.flink.streaming.api.datastream.DataStreamSource; 10 | import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; 11 | 12 | /** 13 | * blog:http://www.54tianzhisheng.cn/ 14 | * 微信公众号:zhisheng 15 | */ 16 | @Slf4j 17 | public class Main { 18 | public static void main(String[] args) throws Exception { 19 | final ParameterTool parameterTool = ExecutionEnvUtil.createParameterTool(args); 20 | StreamExecutionEnvironment env = ExecutionEnvUtil.prepare(parameterTool); 21 | DataStreamSource data = KafkaConfigUtil.buildSource(env); 22 | 23 | 24 | env.execute("flink learning connectors hdfs"); 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-hdfs/src/main/resources/application.properties: -------------------------------------------------------------------------------- 1 | kafka.brokers=localhost:9092 2 | kafka.group.id=zhisheng 3 | kafka.zookeeper.connect=localhost:2181 4 | metrics.topic=zhisheng 5 | stream.parallelism=4 6 | stream.sink.parallelism=4 7 | stream.default.parallelism=4 8 | stream.checkpoint.interval=1000 9 | stream.checkpoint.enable=false -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-hdfs/src/main/resources/logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 6 | 7 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n 8 | 9 | 10 | 11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-hive/README.md: -------------------------------------------------------------------------------- 1 | ## Flink connector Hive 2 | 3 | Flink 1.9 版本开始支持 Hive Connector 4 | 5 | https://ci.apache.org/projects/flink/flink-docs-release-1.9/dev/table/hive/ -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-hive/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | flink-learning-connectors 7 | com.zhisheng.flink 8 | 1.0-SNAPSHOT 9 | 10 | 4.0.0 11 | 12 | flink-learning-connectors-hive 13 | pom 14 | 15 | flink-learning-connectors-hive-1.2.2 16 | flink-learning-connectors-hive-2.2.0 17 | flink-learning-connectors-hive-2.3.6 18 | flink-learning-connectors-hive-3.1.2 19 | 20 | 21 | 22 | 23 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-influxdb/README.md: -------------------------------------------------------------------------------- 1 | ### Flink connector influxDB -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-influxdb/src/main/resources/application.properties: -------------------------------------------------------------------------------- 1 | kafka.brokers=localhost:9092 2 | kafka.group.id=zhisheng 3 | kafka.zookeeper.connect=localhost:2181 4 | metrics.topic=zhisheng 5 | stream.parallelism=4 6 | stream.sink.parallelism=4 7 | stream.default.parallelism=4 8 | stream.checkpoint.interval=1000 9 | stream.checkpoint.enable=false 10 | 11 | # influxDB 12 | 13 | influxdb.url=http://localhost:8086 14 | influxdb.username=root 15 | influxdb.password=root 16 | influxdb.database=metric-db 17 | influxdb.batchActions=2000 18 | influxdb.flushDuration=100 19 | influxdb.flushDurationTimeUnit=MILLISECONDS 20 | influxdb.enableGzip=false 21 | influxdb.createDatabase=false -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-influxdb/src/main/resources/logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 6 | 7 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n 8 | 9 | 10 | 11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-jdbc/src/main/java/com/zhisheng/connectors/jdbc/Main.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.connectors.jdbc; 2 | 3 | /** 4 | * Desc: sink to mysql 5 | * Created by zhisheng on 2019-10-28 18:49 6 | * blog:http://www.54tianzhisheng.cn/ 7 | * 微信公众号:zhisheng 8 | */ 9 | public class Main { 10 | public static void main(String[] args) { 11 | 12 | 13 | 14 | 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-jdbc/src/main/resources/application.properties: -------------------------------------------------------------------------------- 1 | kafka.brokers=localhost:9092 2 | kafka.group.id=zhisheng 3 | kafka.zookeeper.connect=localhost:2181 4 | stream.parallelism=1 5 | stream.checkpoint.interval=1000 6 | stream.checkpoint.enable=false 7 | 8 | # JDBC 9 | 10 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-jdbc/src/main/resources/logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 6 | 7 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n 8 | 9 | 10 | 11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-kafka/README.md: -------------------------------------------------------------------------------- 1 | ## Flink connector Kafka 2 | 3 | [http://www.54tianzhisheng.cn/2019/01/06/Flink-Kafka-sink/](http://www.54tianzhisheng.cn/2019/01/06/Flink-Kafka-sink/) -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-kafka/src/main/java/com/zhisheng/connectors/kafka/FlinkKafkaConsumerTest2.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.connectors.kafka; 2 | 3 | import com.zhisheng.common.utils.ExecutionEnvUtil; 4 | import org.apache.flink.api.common.serialization.SimpleStringSchema; 5 | import org.apache.flink.api.java.utils.ParameterTool; 6 | import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; 7 | import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer; 8 | 9 | import java.util.Properties; 10 | 11 | import static com.zhisheng.common.utils.KafkaConfigUtil.buildKafkaProps; 12 | 13 | /** 14 | * Desc: 15 | * Created by zhisheng on 2020-03-13 15:20 16 | * blog:http://www.54tianzhisheng.cn/ 17 | * 微信公众号:zhisheng 18 | */ 19 | public class FlinkKafkaConsumerTest2 { 20 | public static void main(String[] args) throws Exception { 21 | final ParameterTool parameterTool = ExecutionEnvUtil.createParameterTool(args); 22 | StreamExecutionEnvironment env = ExecutionEnvUtil.prepare(parameterTool); 23 | env.setParallelism(1); 24 | Properties props = buildKafkaProps(parameterTool); 25 | 26 | FlinkKafkaConsumer consumer = new FlinkKafkaConsumer<>("user_behavior_sink", new SimpleStringSchema(), props); 27 | 28 | env.addSource(consumer).print(); 29 | 30 | env.execute("flink kafka connector test"); 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-kafka/src/main/resources/application.properties: -------------------------------------------------------------------------------- 1 | kafka.brokers=localhost:9092 2 | kafka.group.id=metrics-group-test 3 | kafka.zookeeper.connect=xxx:2181 4 | metrics.topic=zhisheng 5 | logs.topic=xxx 6 | kafka.sink.brokers=localhost:9092 7 | kafka.sink.topic=metric-test 8 | stream.parallelism=5 9 | stream.checkpoint.interval=1000 10 | stream.checkpoint.enable=false 11 | stream.sink.parallelism=5 -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-kafka/src/main/resources/logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 6 | 7 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n 8 | 9 | 10 | 11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-kudu/README.md: -------------------------------------------------------------------------------- 1 | ### Flink connectors Kudu 2 | 3 | https://github.com/apache/bahir-flink/blob/master/flink-connector-kudu/README.md 4 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-kudu/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | flink-learning-connectors 7 | com.zhisheng.flink 8 | 1.0-SNAPSHOT 9 | 10 | 4.0.0 11 | 12 | flink-learning-connectors-kudu 13 | 14 | 15 | 1.9.0 16 | 17 | 18 | 19 | 20 | 21 | org.apache.kudu 22 | kudu-client 23 | ${kudu.version} 24 | 25 | 26 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-mysql/README.md: -------------------------------------------------------------------------------- 1 | ## Flink connector MySQL 2 | 3 | [http://www.54tianzhisheng.cn/2019/01/15/Flink-MySQL-sink/](http://www.54tianzhisheng.cn/2019/01/15/Flink-MySQL-sink/) -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-mysql/src/main/java/com/zhisheng/connectors/mysql/model/Student.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.connectors.mysql.model; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Data; 5 | import lombok.NoArgsConstructor; 6 | 7 | /** 8 | * Desc: 9 | * Created by zhisheng on 2019-02-17 10 | * Blog: http://www.54tianzhisheng.cn/tags/Flink/ 11 | */ 12 | 13 | @Data 14 | @AllArgsConstructor 15 | @NoArgsConstructor 16 | public class Student { 17 | public int id; 18 | public String name; 19 | public String password; 20 | public int age; 21 | } 22 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-mysql/src/main/resources/application.properties: -------------------------------------------------------------------------------- 1 | kafka.brokers=localhost:9092 2 | kafka.group.id=metric-group 3 | kafka.zookeeper.connect=localhost:2181 4 | metrics.topic=student 5 | stream.parallelism=4 6 | stream.sink.parallelism=4 7 | stream.default.parallelism=4 8 | stream.checkpoint.interval=1000 9 | stream.checkpoint.enable=false -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-mysql/src/main/resources/logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 6 | 7 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n 8 | 9 | 10 | 11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-netty/src/main/java/com/zhisheng/connectors/netty/Main.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.connectors.netty; 2 | 3 | import com.zhisheng.common.utils.ExecutionEnvUtil; 4 | import org.apache.flink.api.java.utils.ParameterTool; 5 | import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; 6 | 7 | /** 8 | * Desc: Netty connector 9 | * Created by zhisheng on 2019-05-04 10 | * blog:http://www.54tianzhisheng.cn/ 11 | * 微信公众号:zhisheng 12 | */ 13 | public class Main { 14 | public static void main(String[] args) throws Exception { 15 | ParameterTool parameterTool = ExecutionEnvUtil.PARAMETER_TOOL; 16 | StreamExecutionEnvironment env = ExecutionEnvUtil.prepare(parameterTool); 17 | 18 | 19 | 20 | env.execute("flink netty connector"); 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-nifi/README.md: -------------------------------------------------------------------------------- 1 | ### Flink-learning-connectors-nifi 2 | 3 | **NiFi 介绍**:An easy to use, powerful, and reliable system to process and distribute data. 4 | 5 | ![](http://zhisheng-blog.oss-cn-hangzhou.aliyuncs.com/img/2019-11-24-030837.jpg) 6 | 7 | 8 | [Apache NiFi的一些学习资源](https://zhuanlan.zhihu.com/p/58168227) -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-nifi/src/main/resources/application.properties: -------------------------------------------------------------------------------- 1 | stream.parallelism=4 2 | stream.sink.parallelism=4 3 | stream.default.parallelism=4 4 | stream.checkpoint.interval=1000 5 | stream.checkpoint.enable=false -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-nifi/src/main/resources/logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 6 | 7 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n 8 | 9 | 10 | 11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-pulsar/README.md: -------------------------------------------------------------------------------- 1 | ### Flink-learning-connectors-pulsar 2 | 3 | **Pulsar 介绍**: 4 | 5 | [Pulsar 官网](https://pulsar.apache.org/) 6 | 7 | [Introduction to the Apache Pulsar pub-sub messaging platform](https://streaml.io/blog/intro-to-pulsar) 8 | 9 | 10 | **Pulsar & Flink**: 11 | 12 | [pulsar-flink](https://github.com/streamnative/pulsar-flink) 13 | 14 | [When Flink & Pulsar Come Together](https://flink.apache.org/2019/05/03/pulsar-flink.html) 15 | 16 | [Flink Pulsar Connector](https://cwiki.apache.org/confluence/display/FLINK/FLIP-72%3A+Introduce+Pulsar+Connector) 17 | 18 | 19 | **Pulsar VS Kafka**: 20 | 21 | [雅虎日本如何用 Pulsar 构建日均千亿的消息平台?](https://www.infoq.cn/article/pcfrbUj7THZH_qs9E6ZV) -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-pulsar/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | flink-learning-connectors 7 | com.zhisheng.flink 8 | 1.0-SNAPSHOT 9 | 10 | 4.0.0 11 | 12 | flink-learning-connectors-pulsar 13 | 14 | 15 | 16 | 17 | 18 | 19 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-pulsar/src/main/java/com/zhisheng/connectors/pulsar/PulsarSinkMain.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.connectors.pulsar; 2 | 3 | /** 4 | * Desc: Pulsar sink 5 | * Created by zhisheng on 2019-11-30 10:16 6 | * blog:http://www.54tianzhisheng.cn/ 7 | * 微信公众号:zhisheng 8 | */ 9 | public class PulsarSinkMain { 10 | public static void main(String[] args) { 11 | 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-pulsar/src/main/java/com/zhisheng/connectors/pulsar/PulsarSourceMain.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.connectors.pulsar; 2 | 3 | /** 4 | * Desc: Pulsar Source 5 | * Created by zhisheng on 2019-11-30 10:15 6 | * blog:http://www.54tianzhisheng.cn/ 7 | * 微信公众号:zhisheng 8 | */ 9 | public class PulsarSourceMain { 10 | public static void main(String[] args) { 11 | 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-pulsar/src/main/resources/application.properties: -------------------------------------------------------------------------------- 1 | stream.parallelism=4 2 | stream.sink.parallelism=4 3 | stream.default.parallelism=4 4 | stream.checkpoint.interval=1000 5 | stream.checkpoint.enable=false -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-pulsar/src/main/resources/logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 6 | 7 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n 8 | 9 | 10 | 11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-rabbitmq/README.md: -------------------------------------------------------------------------------- 1 | ## Flink connector RabbitMQ 2 | 3 | [http://www.54tianzhisheng.cn/2019/01/20/Flink-RabbitMQ-sink/](http://www.54tianzhisheng.cn/2019/01/20/Flink-RabbitMQ-sink/) -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-rabbitmq/src/main/java/com/zhisheng/connectors/rabbitmq/model/EndPoint.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.connectors.rabbitmq.model; 2 | 3 | import com.rabbitmq.client.Channel; 4 | import com.rabbitmq.client.Connection; 5 | import com.rabbitmq.client.ConnectionFactory; 6 | 7 | import java.io.IOException; 8 | 9 | /** 10 | * blog:http://www.54tianzhisheng.cn/ 11 | * 微信公众号:zhisheng 12 | */ 13 | public class EndPoint { 14 | protected Channel channel; 15 | protected Connection connection; 16 | protected String endPointName; 17 | 18 | public EndPoint(String endpointName) throws Exception { 19 | this.endPointName = endpointName; 20 | 21 | ConnectionFactory factory = new ConnectionFactory(); 22 | 23 | factory.setHost("127.0.0.1"); 24 | factory.setUsername("admin"); 25 | factory.setPassword("admin"); 26 | factory.setPort(5672); 27 | 28 | connection = factory.newConnection(); 29 | 30 | 31 | channel = connection.createChannel(); 32 | 33 | channel.queueDeclare(endpointName, false, false, false, null); 34 | } 35 | 36 | /** 37 | * 关闭channel和connection。并非必须,因为隐含是自动调用的 38 | * 39 | * @throws IOException 40 | */ 41 | public void close() throws Exception { 42 | this.channel.close(); 43 | this.connection.close(); 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-rabbitmq/src/main/resources/application.properties: -------------------------------------------------------------------------------- 1 | kafka.brokers=localhost:9092 2 | kafka.group.id=zhisheng 3 | kafka.zookeeper.connect=localhost:2181 4 | metrics.topic=student 5 | stream.parallelism=4 6 | stream.sink.parallelism=4 7 | stream.default.parallelism=4 8 | stream.checkpoint.interval=1000 9 | stream.checkpoint.enable=false 10 | rmq.host=localhost 11 | rmq.port=5672 12 | rmq.user=admin 13 | rmq.password=admin -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-rabbitmq/src/main/resources/logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 6 | 7 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n 8 | 9 | 10 | 11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-redis/README.md: -------------------------------------------------------------------------------- 1 | ### Flink connector Redis 2 | 3 | 利用自带的 Redis Connector 从 Kafka 中读取数据,然后写入到 Redis。 4 | 5 | Redis 分三种情况: 6 | 7 | + 单机 Redis 8 | 9 | + Redis 集群 10 | 11 | + Redis Sentinels -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-redis/src/main/resources/application.properties: -------------------------------------------------------------------------------- 1 | kafka.brokers=localhost:9092 2 | kafka.group.id=zhisheng 3 | kafka.zookeeper.connect=localhost:2181 4 | metrics.topic=zhisheng 5 | stream.parallelism=4 6 | stream.sink.parallelism=4 7 | stream.default.parallelism=4 8 | stream.checkpoint.interval=1000 9 | stream.checkpoint.enable=false 10 | redis.host=127.0.0.1 -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-redis/src/main/resources/logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 6 | 7 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n 8 | 9 | 10 | 11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-redis/src/test/java/RedisTest.java: -------------------------------------------------------------------------------- 1 | import redis.clients.jedis.Jedis; 2 | 3 | /** 4 | * Desc: 验证数据已经写入到 Redis 5 | * Created by zhisheng on 2019-04-29 6 | * blog:http://www.54tianzhisheng.cn/ 7 | * 微信公众号:zhisheng 8 | */ 9 | public class RedisTest { 10 | public static void main(String[] args) { 11 | Jedis jedis = new Jedis("127.0.0.1"); 12 | System.out.println("Server is running: " + jedis.ping()); 13 | System.out.println("result:" + jedis.hgetAll("zhisheng")); 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-rocketmq/README.md: -------------------------------------------------------------------------------- 1 | ### Flink-learning-connectors-rocketmq 2 | 3 | Flink 消费 RocketMQ 数据,转换后再将转换后到数据发送到 RocketMQ,demo 类可以参考 RocketMQFlinkExample 类。 -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-rocketmq/src/main/java/com/zhisheng/connectors/rocketmq/RocketMQUtils.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.connectors.rocketmq; 2 | 3 | import java.util.Properties; 4 | 5 | /** 6 | * Desc: RocketMQ 工具类 7 | * Created by zhisheng on 2019-06-05 8 | * blog:http://www.54tianzhisheng.cn/ 9 | * 微信公众号:zhisheng 10 | */ 11 | public final class RocketMQUtils { 12 | 13 | public static int getInteger(Properties props, String key, int defaultValue) { 14 | return Integer.parseInt(props.getProperty(key, String.valueOf(defaultValue))); 15 | } 16 | 17 | public static long getLong(Properties props, String key, long defaultValue) { 18 | return Long.parseLong(props.getProperty(key, String.valueOf(defaultValue))); 19 | } 20 | 21 | public static boolean getBoolean(Properties props, String key, boolean defaultValue) { 22 | return Boolean.parseBoolean(props.getProperty(key, String.valueOf(defaultValue))); 23 | } 24 | 25 | } 26 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-rocketmq/src/main/java/com/zhisheng/connectors/rocketmq/RunningChecker.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.connectors.rocketmq; 2 | 3 | import java.io.Serializable; 4 | 5 | /** 6 | * Desc: 7 | * Created by zhisheng on 2019-06-05 8 | * blog:http://www.54tianzhisheng.cn/ 9 | * 微信公众号:zhisheng 10 | */ 11 | public class RunningChecker implements Serializable { 12 | private volatile boolean isRunning = false; 13 | 14 | public boolean isRunning() { 15 | return isRunning; 16 | } 17 | 18 | public void setRunning(boolean running) { 19 | isRunning = running; 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-rocketmq/src/main/java/com/zhisheng/connectors/rocketmq/common/selector/DefaultTopicSelector.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.connectors.rocketmq.common.selector; 2 | 3 | /** 4 | * Desc: 5 | * Created by zhisheng on 2019-06-05 6 | * blog:http://www.54tianzhisheng.cn/ 7 | * 微信公众号:zhisheng 8 | */ 9 | public class DefaultTopicSelector implements TopicSelector { 10 | 11 | private final String topicName; 12 | private final String tagName; 13 | 14 | public DefaultTopicSelector(final String topicName) { 15 | this(topicName, ""); 16 | } 17 | 18 | public DefaultTopicSelector(String topicName, String tagName) { 19 | this.topicName = topicName; 20 | this.tagName = tagName; 21 | } 22 | 23 | @Override 24 | public String getTopic(T tuple) { 25 | return topicName; 26 | } 27 | 28 | @Override 29 | public String getTag(T tuple) { 30 | return tagName; 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-rocketmq/src/main/java/com/zhisheng/connectors/rocketmq/common/selector/TopicSelector.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.connectors.rocketmq.common.selector; 2 | 3 | import java.io.Serializable; 4 | 5 | /** 6 | * Desc: 7 | * Created by zhisheng on 2019-06-05 8 | * blog:http://www.54tianzhisheng.cn/ 9 | * 微信公众号:zhisheng 10 | */ 11 | public interface TopicSelector extends Serializable { 12 | 13 | String getTopic(T tuple); 14 | 15 | String getTag(T tuple); 16 | 17 | } -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-rocketmq/src/main/java/com/zhisheng/connectors/rocketmq/common/serialization/KeyValueDeserializationSchema.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.connectors.rocketmq.common.serialization; 2 | 3 | import org.apache.flink.api.java.typeutils.ResultTypeQueryable; 4 | 5 | import java.io.Serializable; 6 | 7 | /** 8 | * Desc: 9 | * Created by zhisheng on 2019-06-05 10 | * blog:http://www.54tianzhisheng.cn/ 11 | * 微信公众号:zhisheng 12 | */ 13 | public interface KeyValueDeserializationSchema extends ResultTypeQueryable, Serializable { 14 | T deserializeKeyAndValue(byte[] key, byte[] value); 15 | } 16 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-rocketmq/src/main/java/com/zhisheng/connectors/rocketmq/common/serialization/KeyValueSerializationSchema.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.connectors.rocketmq.common.serialization; 2 | 3 | import java.io.Serializable; 4 | 5 | /** 6 | * Desc: 7 | * Created by zhisheng on 2019-06-05 8 | * blog:http://www.54tianzhisheng.cn/ 9 | * 微信公众号:zhisheng 10 | */ 11 | public interface KeyValueSerializationSchema extends Serializable { 12 | byte[] serializeKey(T tuple); 13 | 14 | byte[] serializeValue(T tuple); 15 | } 16 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-rocketmq/src/main/java/com/zhisheng/connectors/rocketmq/example/SimpleProducer.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.connectors.rocketmq.example; 2 | 3 | import org.apache.rocketmq.client.exception.MQClientException; 4 | import org.apache.rocketmq.client.producer.DefaultMQProducer; 5 | import org.apache.rocketmq.common.message.Message; 6 | 7 | /** 8 | * Desc: 9 | * Created by zhisheng on 2019-06-07 10 | * blog:http://www.54tianzhisheng.cn/ 11 | * 微信公众号:zhisheng 12 | */ 13 | public class SimpleProducer { 14 | public static void main(String[] args) { 15 | DefaultMQProducer producer = new DefaultMQProducer("p001"); 16 | producer.setNamesrvAddr("localhost:9876"); 17 | try { 18 | producer.start(); 19 | } catch (MQClientException e) { 20 | e.printStackTrace(); 21 | } 22 | for (int i = 0; i < 10000; i++) { 23 | Message msg = new Message("zhisheng", "", "id_" + i, ("country_X province_" + i).getBytes()); 24 | try { 25 | producer.send(msg); 26 | } catch (Exception e) { 27 | e.printStackTrace(); 28 | } 29 | System.out.println("send " + i); 30 | try { 31 | Thread.sleep(10); 32 | } catch (InterruptedException e) { 33 | e.printStackTrace(); 34 | } 35 | } 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-rocketmq/src/main/resources/application.properties: -------------------------------------------------------------------------------- 1 | kafka.brokers=localhost:9092 2 | kafka.group.id=zhisheng 3 | kafka.zookeeper.connect=localhost:2181 4 | metrics.topic=zhisheng 5 | stream.parallelism=4 6 | stream.sink.parallelism=4 7 | stream.default.parallelism=4 8 | stream.checkpoint.interval=1000 9 | stream.checkpoint.enable=false -------------------------------------------------------------------------------- /flink-learning-connectors/flink-learning-connectors-rocketmq/src/main/resources/logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 6 | 7 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n 8 | 9 | 10 | 11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /flink-learning-core/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | flink-learning 7 | com.zhisheng.flink 8 | 1.0-SNAPSHOT 9 | 10 | 4.0.0 11 | 12 | flink-learning-core 13 | 14 | 15 | -------------------------------------------------------------------------------- /flink-learning-core/src/main/java/com/zhisheng/core/exception/FlinkRuntimeException.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.core.exception; 2 | 3 | /** 4 | * Desc: Base class of all Flink-specific unchecked exceptions. 5 | * Created by zhisheng on 2019-09-25 6 | * blog:http://www.54tianzhisheng.cn/ 7 | * 微信公众号:zhisheng 8 | */ 9 | public class FlinkRuntimeException extends RuntimeException { 10 | 11 | private static final long serialVersionUID = 193141189399279147L; 12 | 13 | /** 14 | * Creates a new Exception with the given message and null as the cause. 15 | * 16 | * @param message The exception message 17 | */ 18 | public FlinkRuntimeException(String message) { 19 | super(message); 20 | } 21 | 22 | /** 23 | * Creates a new exception with a null message and the given cause. 24 | * 25 | * @param cause The exception that caused this exception 26 | */ 27 | public FlinkRuntimeException(Throwable cause) { 28 | super(cause); 29 | } 30 | 31 | /** 32 | * Creates a new exception with the given message and cause. 33 | * 34 | * @param message The exception message 35 | * @param cause The exception that caused this exception 36 | */ 37 | public FlinkRuntimeException(String message, Throwable cause) { 38 | super(message, cause); 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /flink-learning-core/src/main/java/com/zhisheng/core/factory/DeserializerFactory.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.core.factory; 2 | 3 | /** 4 | * Desc: 5 | * blog:http://www.54tianzhisheng.cn/ 6 | * 微信公众号:zhisheng 7 | */ 8 | public class DeserializerFactory { 9 | } 10 | -------------------------------------------------------------------------------- /flink-learning-core/src/main/java/com/zhisheng/core/factory/SerializerFactory.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.core.factory; 2 | 3 | /** 4 | * Desc: 5 | * blog:http://www.54tianzhisheng.cn/ 6 | * 微信公众号:zhisheng 7 | */ 8 | public class SerializerFactory { 9 | } 10 | -------------------------------------------------------------------------------- /flink-learning-core/src/main/java/com/zhisheng/core/factory/SinkFactory.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.core.factory; 2 | 3 | /** 4 | * Desc: 5 | * blog:http://www.54tianzhisheng.cn/ 6 | * 微信公众号:zhisheng 7 | */ 8 | public class SinkFactory { 9 | } 10 | -------------------------------------------------------------------------------- /flink-learning-core/src/main/java/com/zhisheng/core/factory/SourceFactory.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.core.factory; 2 | 3 | /** 4 | * Desc: 5 | * blog:http://www.54tianzhisheng.cn/ 6 | * 微信公众号:zhisheng 7 | */ 8 | public class SourceFactory { 9 | } 10 | -------------------------------------------------------------------------------- /flink-learning-datalake/README.md: -------------------------------------------------------------------------------- 1 | ### Flink-learning-datalake 2 | 3 | Flink Data Lake 项目里面包含了数据湖四大组件的基础、原理、实战、应用、源码相关内容 4 | 5 | + [Hudi](https://github.com/apache/hudi) 6 | 7 | + [Iceberg](https://github.com/apache/iceberg) 8 | 9 | + [Delta lake](https://github.com/delta-io/delta) 10 | 11 | + [Flink Table Store](https://nightlies.apache.org/flink/flink-table-store-docs-release-0.2/docs/try-table-store/quick-start/) 12 | 13 | 14 | 15 | ### 数据湖资料 16 | 17 | + [delta lake 书籍](https://books.japila.pl/delta-lake-internals/overview/) 18 | 19 | 20 | ### 数据湖论文 21 | 22 | + [Lakehouse Architecture](http://cidrdb.org/cidr2021/papers/cidr2021_paper17.pdf) -------------------------------------------------------------------------------- /flink-learning-datalake/flink-learning-datalake-deltalake/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | flink-learning-datalake 7 | com.zhisheng.flink 8 | 1.0-SNAPSHOT 9 | 10 | 4.0.0 11 | 12 | flink-learning-datalake-deltalake 13 | 14 | 15 | -------------------------------------------------------------------------------- /flink-learning-datalake/flink-learning-datalake-hudi/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | flink-learning-datalake 7 | com.zhisheng.flink 8 | 1.0-SNAPSHOT 9 | 10 | 4.0.0 11 | 12 | flink-learning-datalake-hudi 13 | 14 | 15 | -------------------------------------------------------------------------------- /flink-learning-datalake/flink-learning-datalake-iceberg/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | flink-learning-datalake 7 | com.zhisheng.flink 8 | 1.0-SNAPSHOT 9 | 10 | 4.0.0 11 | 12 | flink-learning-datalake-iceberg 13 | 14 | -------------------------------------------------------------------------------- /flink-learning-datalake/flink-learning-table-store/README.md: -------------------------------------------------------------------------------- 1 | ### Flink-learning-table-store 2 | 3 | + [Flink Table Store](https://nightlies.apache.org/flink/flink-table-store-docs-release-0.2/docs/try-table-store/quick-start/) 4 | 5 | + [GitHub](https://github.com/apache/flink-table-store) 6 | 7 | + [Flink Table Store 是什么?](https://www.yuque.com/lijinsongzhixin/qxwonh/iktr4c) 8 | 9 | + [Flink Table Store ——从计算到存储提升流批统一端到端用户体验](https://mp.weixin.qq.com/s/siMnKbWzVFU4fic5-XFoRw) 10 | 11 | + [ Demo 使用 Flink CDC 写入 Table Store,使用 Spark 查询 Table Store](https://www.yuque.com/lijinsongzhixin/qxwonh/yhktz8) 12 | 13 | ### 架构 14 | 15 | ![](https://zhisheng-blog.oss-cn-hangzhou.aliyuncs.com/2022-12-16-075225.jpg) 16 | 17 | -------------------------------------------------------------------------------- /flink-learning-datalake/flink-learning-table-store/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | flink-learning-datalake 7 | com.zhisheng.flink 8 | 1.0-SNAPSHOT 9 | 10 | 4.0.0 11 | 12 | flink-learning-table-store 13 | 14 | 15 | 16 | org.apache.flink 17 | flink-table-store-dist 18 | 0.2.1 19 | 20 | 21 | 22 | -------------------------------------------------------------------------------- /flink-learning-datalake/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | flink-learning 7 | com.zhisheng.flink 8 | 1.0-SNAPSHOT 9 | 10 | 4.0.0 11 | 12 | flink-learning-datalake 13 | pom 14 | 15 | flink-learning-datalake-hudi 16 | flink-learning-datalake-iceberg 17 | flink-learning-datalake-deltalake 18 | flink-learning-table-store 19 | 20 | 21 | 22 | 8 23 | 8 24 | 25 | 26 | -------------------------------------------------------------------------------- /flink-learning-examples/README.md: -------------------------------------------------------------------------------- 1 | ### Flink-learning-example 2 | 3 | 该 module 存放一些简单的测试用例。 4 | 5 | #### batch 6 | 7 | + accumulator 8 | + wordcount 9 | 10 | 11 | #### Streaming 12 | 13 | + async 14 | + broadcast 15 | + checkpoint 16 | + exception 17 | + file 18 | + iteration 19 | + join 20 | + machine-learning 21 | + remote 22 | + sideoutput 23 | + socket 24 | + watermark 25 | + wordcount 26 | -------------------------------------------------------------------------------- /flink-learning-examples/src/main/java/com/zhisheng/examples/streaming/file/Main.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.examples.streaming.file; 2 | 3 | import org.apache.flink.streaming.api.datastream.DataStreamSource; 4 | import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; 5 | 6 | /** 7 | * 从文件读取数据 & 数据写入到文件 8 | * blog:http://www.54tianzhisheng.cn/ 9 | * 微信公众号:zhisheng 10 | */ 11 | public class Main { 12 | public static void main(String[] args) throws Exception { 13 | final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); 14 | env.setParallelism(1); 15 | 16 | DataStreamSource data = env.readTextFile("file:///usr/local/blink-1.5.1/README.txt"); 17 | data.print(); 18 | 19 | //两种格式都行,另外还支持写入到 hdfs 20 | // data.writeAsText("file:///usr/local/blink-1.5.1/README1.txt"); 21 | data.writeAsText("/usr/local/blink-1.5.1/README1.txt"); 22 | 23 | env.execute(); 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /flink-learning-examples/src/main/java/com/zhisheng/examples/streaming/ml/IncrementalLearningSkeletonData.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.examples.streaming.ml; 2 | 3 | /** 4 | * Data for IncrementalLearningSkeletonITCase. 5 | * blog:http://www.54tianzhisheng.cn/ 6 | * 微信公众号:zhisheng 7 | */ 8 | public class IncrementalLearningSkeletonData { 9 | 10 | public static final String RESULTS = "1\n" + "1\n" + "1\n" + "1\n" + "1\n" + "1\n" + "1\n" + 11 | "1\n" + "1\n" + "1\n" + "1\n" + "1\n" + "1\n" + "1\n" + "1\n" + "1\n" + "1\n" + "0\n" + 12 | "0\n" + "0\n" + "0\n" + "0\n" + "0\n" + "0\n" + "0\n" + "0\n" + "0\n" + "0\n" + "0\n" + 13 | "0\n" + "0\n" + "0\n" + "0\n" + "0\n" + "0\n" + "0\n" + "0\n" + "0\n" + "0\n" + "0\n" + 14 | "0\n" + "0\n" + "0\n" + "0\n" + "0\n" + "0\n" + "0\n" + "0\n" + "0\n" + "0\n" + "0\n" + 15 | "0\n" + "0\n" + "0\n" + "0\n" + "0\n" + "0\n" + "0\n" + "0\n" + "0\n" + "0\n" + "0\n" + 16 | "0\n" + "0\n" + "0\n" + "0\n" + "0\n"; 17 | 18 | private IncrementalLearningSkeletonData() { 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /flink-learning-examples/src/main/java/com/zhisheng/examples/streaming/remote/Main.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.examples.streaming.remote; 2 | 3 | import org.apache.flink.api.java.ExecutionEnvironment; 4 | 5 | /** 6 | * 向远程集群提交 job 7 | * blog:http://www.54tianzhisheng.cn/ 8 | * 微信公众号:zhisheng 9 | */ 10 | public class Main { 11 | public static void main(String[] args) throws Exception { 12 | ExecutionEnvironment env = ExecutionEnvironment.createRemoteEnvironment( 13 | "localhost", 14 | 6123, 15 | 1, 16 | "/usr/local/blink-1.5.1/examples/streaming/SessionWindowing.jar" 17 | ); 18 | 19 | 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /flink-learning-examples/src/main/java/com/zhisheng/examples/streaming/watermark/Word.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.examples.streaming.watermark; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Data; 5 | import lombok.NoArgsConstructor; 6 | 7 | /** 8 | * Desc: 9 | * Created by zhisheng on 2019-07-07 10 | * blog:http://www.54tianzhisheng.cn/ 11 | * 微信公众号:zhisheng 12 | */ 13 | @Data 14 | @AllArgsConstructor 15 | @NoArgsConstructor 16 | public class Word { 17 | private String word; 18 | private int count; 19 | private long timestamp; 20 | } 21 | -------------------------------------------------------------------------------- /flink-learning-examples/src/main/java/com/zhisheng/examples/streaming/watermark/WordPunctuatedWatermark.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.examples.streaming.watermark; 2 | 3 | import org.apache.flink.streaming.api.functions.AssignerWithPunctuatedWatermarks; 4 | import org.apache.flink.streaming.api.watermark.Watermark; 5 | 6 | import javax.annotation.Nullable; 7 | 8 | /** 9 | * Desc: Punctuated Watermark 10 | * Created by zhisheng on 2019-07-09 11 | * blog:http://www.54tianzhisheng.cn/ 12 | * 微信公众号:zhisheng 13 | */ 14 | public class WordPunctuatedWatermark implements AssignerWithPunctuatedWatermarks { 15 | 16 | @Nullable 17 | @Override 18 | public Watermark checkAndGetNextWatermark(Word lastElement, long extractedTimestamp) { 19 | return extractedTimestamp % 3 == 0 ? new Watermark(extractedTimestamp) : null; 20 | // return new Watermark(extractedTimestamp); 21 | } 22 | 23 | @Override 24 | public long extractTimestamp(Word element, long previousElementTimestamp) { 25 | return element.getTimestamp(); 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /flink-learning-examples/src/main/java/com/zhisheng/examples/util/MySQLUtil.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.examples.util; 2 | 3 | import com.google.common.base.Throwables; 4 | import lombok.extern.slf4j.Slf4j; 5 | 6 | import java.sql.Connection; 7 | import java.sql.DriverManager; 8 | 9 | @Slf4j 10 | public class MySQLUtil { 11 | 12 | public static Connection getConnection(String driver, String url, String user, String password) { 13 | Connection con = null; 14 | try { 15 | Class.forName(driver); 16 | con = DriverManager.getConnection(url, user, password); 17 | } catch (Exception e) { 18 | log.error("-----------mysql get connection has exception , msg = " + Throwables.getStackTraceAsString(e)); 19 | } 20 | return con; 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /flink-learning-examples/src/main/resources/application.properties: -------------------------------------------------------------------------------- 1 | kafka.brokers=localhost:9092 2 | kafka.group.id=zhisheng 3 | kafka.zookeeper.connect=localhost:2181 4 | metrics.topic=zhisheng_metrics 5 | stream.parallelism=5 6 | stream.checkpoint.interval=1000 7 | stream.checkpoint.enable=false 8 | #stream.checkpoint.type=memory 9 | #stream.checkpoint.type=fs 10 | stream.checkpoint.type=rocksdb 11 | #stream.checkpoint.dir=file:///usr/local/state/ 12 | stream.checkpoint.dir=/Users/zhisheng/Desktop 13 | 14 | 15 | #mysql 16 | mysql.host=localhost 17 | mysql.port=3306 18 | mysql.database=test 19 | mysql.username=root 20 | mysql.password=root123456 -------------------------------------------------------------------------------- /flink-learning-examples/src/main/resources/logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 6 | 7 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n 8 | 9 | 10 | 11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /flink-learning-extends/FlinkLogKafkaAppender/KafkaAppenderCommon/pom.xml: -------------------------------------------------------------------------------- 1 | 3 | 4 | FlinkLogKafkaAppender 5 | com.zhisheng.flink 6 | 1.0-SNAPSHOT 7 | 8 | 4.0.0 9 | 10 | KafkaAppenderCommon 11 | 12 | KafkaAppenderCommon 13 | 14 | 15 | 16 | -------------------------------------------------------------------------------- /flink-learning-extends/FlinkLogKafkaAppender/KafkaAppenderCommon/src/main/java/com/zhisheng/flink/model/LogEvent.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.flink.model; 2 | 3 | import lombok.Data; 4 | 5 | import java.util.HashMap; 6 | import java.util.Map; 7 | 8 | @Data 9 | public class LogEvent { 10 | 11 | private String source; // default is flink, maybe others will use this kafka appender in future 12 | 13 | private String id; // log id, default it is UUID 14 | 15 | private Long timestamp; 16 | 17 | private String content; // log message 18 | 19 | private Map tags = new HashMap<>(); // tags of the log, eg: host_name, application_id, job_name etc 20 | 21 | } 22 | -------------------------------------------------------------------------------- /flink-learning-extends/FlinkLogKafkaAppender/KafkaAppenderCommon/src/main/java/com/zhisheng/flink/util/JacksonUtil.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.flink.util; 2 | 3 | 4 | import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonProcessingException; 5 | import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper; 6 | 7 | public class JacksonUtil { 8 | 9 | private final static ObjectMapper mapper = new ObjectMapper(); 10 | 11 | /** 12 | * 将对象转换成普通的 JSON 数据 13 | * 14 | * @param value 15 | * @return 16 | * @throws JsonProcessingException 17 | */ 18 | public static String toJson(Object value) throws JsonProcessingException { 19 | return mapper.writeValueAsString(value); 20 | } 21 | 22 | 23 | /** 24 | * 将对象转换成结构化的 JSON 数据 25 | * 26 | * @param value 27 | * @return 28 | * @throws JsonProcessingException 29 | */ 30 | public static String toFormatJson(Object value) throws JsonProcessingException { 31 | return mapper.writerWithDefaultPrettyPrinter().writeValueAsString(value); 32 | } 33 | 34 | 35 | } 36 | -------------------------------------------------------------------------------- /flink-learning-extends/FlinkLogKafkaAppender/README.md: -------------------------------------------------------------------------------- 1 | ## FlinkLogKafkaAppender 2 | 3 | + Log4jKafkaAppender: 适用 Flink 1.10 版本(使用的是 log4j) 4 | + Log4j2KafkaAppender:适用 Flink 1.10 之后版本(使用的是 log4j2) 5 | 6 | ### 使用方式 7 | 8 | 1、将项目打出来打 kafka appender jar 包放到 flink lib 目录 9 | 10 | 2、按照项目提示的 flink log4j 配置去配置 flink conf 下面的 log4j.properties 文件,其中 k8s 的要配置 log4j-console.properties 文件 -------------------------------------------------------------------------------- /flink-learning-extends/README.md: -------------------------------------------------------------------------------- 1 | ### flink-learning-extends 2 | 3 | Flink 项目的扩展项目,比如自定义 Flink SQL Connector / Metrics Reporter / 日志收集 等扩展项目 -------------------------------------------------------------------------------- /flink-learning-extends/flink-metrics/flink-metrics-kafka/README.md: -------------------------------------------------------------------------------- 1 | ### flink-metrics-kafka 2 | 3 | compile the module and move the target `flink-metrics-kafka.jar` to flink lib folder, and add metrics reporter configuration in the `flink-config.xml`. eg: 4 | 5 | ```xml 6 | #============================================================================== 7 | #### Kafka Metrics Reporter 8 | ###============================================================================== 9 | 10 | metrics.reporter.kafka.class: org.apache.flink.metrics.kafka.KafkaReporter 11 | 12 | metrics.reporter.kafka.bootstrapServers: http://localhost:9092 13 | 14 | metrics.reporter.kafka.topic: metrics-flink-jobs 15 | 16 | metrics.reporter.kafka.acks: 0 17 | 18 | metrics.reporter.kafka.compressionType: lz4 19 | 20 | metrics.reporter.kafka.bufferMemory: 33554432 21 | 22 | metrics.reporter.kafka.retries: 0 23 | 24 | metrics.reporter.kafka.batchSize: 16384 25 | 26 | metrics.reporter.kafka.lingerMs: 5 27 | 28 | metrics.reporter.kafka.maxRequestSize: 1048576 29 | 30 | metrics.reporter.kafka.requestTimeoutMs: 30000 31 | 32 | ``` 33 | -------------------------------------------------------------------------------- /flink-learning-extends/flink-metrics/flink-metrics-kafka/src/main/java/org/apache/flink/metrics/kafka/KafkaReporterFactory.java: -------------------------------------------------------------------------------- 1 | package org.apache.flink.metrics.kafka; 2 | 3 | import org.apache.flink.metrics.reporter.InterceptInstantiationViaReflection; 4 | import org.apache.flink.metrics.reporter.MetricReporterFactory; 5 | 6 | import java.util.Properties; 7 | 8 | /** 9 | * {@link MetricReporterFactory} for {@link KafkaReporter}. 10 | */ 11 | @InterceptInstantiationViaReflection(reporterClassName = "org.apache.flink.metrics.kafka.KafkaReporter") 12 | public class KafkaReporterFactory implements MetricReporterFactory { 13 | 14 | @Override 15 | public KafkaReporter createMetricReporter(Properties properties) { 16 | return new KafkaReporter(); 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /flink-learning-extends/flink-metrics/flink-metrics-kafka/src/main/java/org/apache/flink/metrics/kafka/util/JacksonUtil.java: -------------------------------------------------------------------------------- 1 | package org.apache.flink.metrics.kafka.util; 2 | 3 | 4 | import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonProcessingException; 5 | import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper; 6 | 7 | public class JacksonUtil { 8 | 9 | private final static ObjectMapper mapper = new ObjectMapper(); 10 | 11 | /** 12 | * 将对象转换成普通的 JSON 数据 13 | * 14 | * @param value 15 | * @return 16 | * @throws JsonProcessingException 17 | */ 18 | public static String toJson(Object value) throws JsonProcessingException { 19 | return mapper.writeValueAsString(value); 20 | } 21 | 22 | 23 | /** 24 | * 将对象转换成结构化的 JSON 数据 25 | * 26 | * @param value 27 | * @return 28 | * @throws JsonProcessingException 29 | */ 30 | public static String toFormatJson(Object value) throws JsonProcessingException { 31 | return mapper.writerWithDefaultPrettyPrinter().writeValueAsString(value); 32 | } 33 | 34 | 35 | } 36 | -------------------------------------------------------------------------------- /flink-learning-extends/flink-metrics/flink-metrics-kafka/src/main/resources/META-INF/services/org.apache.flink.metrics.reporter.MetricReporterFactory: -------------------------------------------------------------------------------- 1 | # Licensed to the Apache Software Foundation (ASF) under one or more 2 | # contributor license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright ownership. 4 | # The ASF licenses this file to You under the Apache License, Version 2.0 5 | # (the "License"); you may not use this file except in compliance with 6 | # the License. You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | org.apache.flink.metrics.kafka.KafkaReporterFactory 17 | -------------------------------------------------------------------------------- /flink-learning-extends/flink-metrics/flink-metrics-prometheus/README.md: -------------------------------------------------------------------------------- 1 | ### flink-metrics-prometheus 2 | 3 | compile the module and move the target `flink-metrics-prometheus.jar` to flink lib folder, and add metrics reporter configuration in the `flink-config.xml`. eg: 4 | 5 | ```xml 6 | #============================================================================== 7 | # Metrics Reporter 8 | #============================================================================== 9 | 10 | metrics.reporter.promgateway.class: org.apache.flink.metrics.prometheus.PrometheusPushGatewayReporter 11 | 12 | metrics.reporter.promgateway.host: k8s 13 | # metrics.reporter.promgateway.host: localhost 14 | 15 | metrics.reporter.promgateway.port: 9091 16 | 17 | metrics.reporter.promgateway.clusterMode: k8s 18 | 19 | metrics.reporter.promgateway.jobName: flink-job 20 | 21 | metrics.reporter.promgateway.randomJobNameSuffix: false 22 | 23 | metrics.reporter.promgateway.deleteOnShutdown: true 24 | 25 | metrics.reporter.promgateway.interval: 5 SECONDS 26 | 27 | ``` 28 | 29 | ![](http://zhisheng-blog.oss-cn-hangzhou.aliyuncs.com/2022-05-08-074128.png) -------------------------------------------------------------------------------- /flink-learning-extends/flink-metrics/flink-metrics-prometheus/src/main/java/org/apache/flink/metrics/prometheus/ClusterMode.java: -------------------------------------------------------------------------------- 1 | package org.apache.flink.metrics.prometheus; 2 | 3 | /** 4 | * Desc: 5 | * Created by zhisheng on 2020-12-08 11:08 6 | */ 7 | public enum ClusterMode { 8 | 9 | /** 10 | * yarn mode. 11 | */ 12 | YARN, 13 | 14 | /** 15 | * K8s mode. 16 | */ 17 | K8S 18 | 19 | } 20 | -------------------------------------------------------------------------------- /flink-learning-extends/flink-metrics/flink-metrics-prometheus/src/main/resources/META-INF/NOTICE: -------------------------------------------------------------------------------- 1 | flink-metrics-prometheus 2 | Copyright 2014-2020 The Apache Software Foundation 3 | 4 | This product includes software developed at 5 | The Apache Software Foundation (http://www.apache.org/). 6 | 7 | This project bundles the following dependencies under the Apache Software License 2.0. (http://www.apache.org/licenses/LICENSE-2.0.txt) 8 | 9 | - io.prometheus:simpleclient:0.8.1 10 | - io.prometheus:simpleclient_common:0.8.1 11 | - io.prometheus:simpleclient_httpserver:0.8.1 12 | - io.prometheus:simpleclient_pushgateway:0.8.1 13 | -------------------------------------------------------------------------------- /flink-learning-extends/flink-metrics/flink-metrics-prometheus/src/main/resources/META-INF/services/org.apache.flink.metrics.reporter.MetricReporterFactory: -------------------------------------------------------------------------------- 1 | # Licensed to the Apache Software Foundation (ASF) under one or more 2 | # contributor license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright ownership. 4 | # The ASF licenses this file to You under the Apache License, Version 2.0 5 | # (the "License"); you may not use this file except in compliance with 6 | # the License. You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | org.apache.flink.metrics.prometheus.PrometheusReporterFactory 17 | org.apache.flink.metrics.prometheus.PrometheusPushGatewayReporterFactory 18 | -------------------------------------------------------------------------------- /flink-learning-extends/flink-metrics/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | flink-learning-extends 7 | com.zhisheng.flink 8 | 1.0-SNAPSHOT 9 | 10 | 4.0.0 11 | pom 12 | 13 | flink-metrics 14 | 15 | 16 | flink-metrics-prometheus 17 | flink-metrics-kafka 18 | 19 | -------------------------------------------------------------------------------- /flink-learning-extends/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | flink-learning 7 | com.zhisheng.flink 8 | 1.0-SNAPSHOT 9 | 10 | 4.0.0 11 | 12 | flink-learning-extends 13 | pom 14 | 15 | 16 | flink-metrics 17 | FlinkLogKafkaAppender 18 | 19 | -------------------------------------------------------------------------------- /flink-learning-k8s/README.md: -------------------------------------------------------------------------------- 1 | ### flink-learning-k8s 2 | 3 | 1、自定义构建 Flink 镜像 4 | 5 | ```shell 6 | ./build_flink_docker_images.sh flink $imageTag 7 | 8 | eg: ./build_flink_docker_images.sh flink 1.12.0-jar-pro-20220727 9 | ``` 10 | 11 | 2、Flink 任务提交到 K8s 集群,不同的运行模式: 12 | 13 | + Session mode 14 | + Native Application mode 15 | + Flink K8s Operator 16 | + Standalone mode 17 | 18 | 3、Ingress 19 | 20 | ```shell 21 | ./build_ingress.sh $cluster.id $namespace 22 | 23 | eg: ./build_ingress.sh statemachine-test1 namespace-flink 24 | ``` -------------------------------------------------------------------------------- /flink-learning-k8s/dockerfile/Dockerfile-example-statemachine: -------------------------------------------------------------------------------- 1 | FROM harbor.xxx.cn/flink/flink:1.12.0-20210625 2 | RUN mkdir -p $FLINK_HOME/usrlib 3 | COPY ./examples/streaming/StateMachineExample.jar $FLINK_HOME/usrlib/ -------------------------------------------------------------------------------- /flink-learning-k8s/dockerfile/Dockerfile-flink-1.12.0-jar: -------------------------------------------------------------------------------- 1 | # 先构建好 Hadoop Hive 基础镜像 2 | FROM harbor.xxx.cn/flink/hadoop:2.9.2 3 | 4 | # 复制 Flink 1.12 jar 客户端 5 | 6 | WORKDIR $FLINK_HOME 7 | COPY flink-1.12.0-jar.tar.gz $FLINK_HOME 8 | 9 | RUN set -ex; \ 10 | tar -xf $FLINK_HOME/flink-1.12.0-jar.tar.gz --strip-components=1 -C $FLINK_HOME; \ 11 | rm $FLINK_HOME/flink-1.12.0-jar.tar.gz; \ 12 | chmod 777 $FLINK_HOME/log; 13 | 14 | 15 | # Configure container 16 | ENTRYPOINT ["/docker-entrypoint.sh"] 17 | EXPOSE 6123 8081 18 | CMD ["help"] -------------------------------------------------------------------------------- /flink-learning-k8s/dockerfile/Dockerfile-flink-1.12.0-sql: -------------------------------------------------------------------------------- 1 | FROM harbor.xxx.cn/flink/hadoop:2.9.2 2 | 3 | # 复制 Flink 1.12 SQL 客户端(含 SQL Connector 和 UDF) 4 | 5 | WORKDIR $FLINK_HOME 6 | COPY flink-1.12.0-sql.tar.gz $FLINK_HOME 7 | 8 | RUN set -ex; \ 9 | tar -xf $FLINK_HOME/flink-1.12.0-sql.tar.gz --strip-components=1 -C $FLINK_HOME; \ 10 | rm $FLINK_HOME/flink-1.12.0-sql.tar.gz; \ 11 | chmod 777 $FLINK_HOME/log; 12 | 13 | # Configure container 14 | ENTRYPOINT ["/docker-entrypoint.sh"] 15 | EXPOSE 6123 8081 16 | CMD ["help"] -------------------------------------------------------------------------------- /flink-learning-k8s/dockerfile/build_flink_docker_images.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | [ ! $1 ] && echo "未配置 镜像名" && exit 4 | [ ! $2 ] && echo "未配置 镜像版本" && exit 5 | 6 | docker login -u flink -p xxx harbor.xxx.cn/flink 7 | 8 | docker build -t harbor.xxx.cn/flink/$1:$2 . 9 | 10 | docker push harbor.xxx.cn/flink/$1:$2% -------------------------------------------------------------------------------- /flink-learning-k8s/dockerfile/build_ingress.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | [ ! $1 ] && echo "未配置 kubernetes.cluster-id" && exit 4 | 5 | echo "配置的 kubernetes.cluster-id 为 $1" 6 | 7 | [ ! $2 ] && echo "未配置 namespace" && exit 8 | 9 | echo "配置的 namespace 为 $2" 10 | 11 | cat ./ingress_template.yaml | sed 's/$K8S_CLUSTER_ID/'"$1"'/g' | sed 's/$K8S_NAMESPACE/'"$2"'/g' > $1-ingress.yaml 12 | kubectl apply -f $1-ingress.yaml 13 | -------------------------------------------------------------------------------- /flink-learning-k8s/dockerfile/ingress_template.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: extensions/v1beta1 2 | kind: Ingress 3 | metadata: 4 | annotations: 5 | kubernetes.io/ingress.class: hke-nginx 6 | meta.helm.sh/release-name: $K8S_CLUSTER_ID 7 | meta.helm.sh/release-namespace: $K8S_NAMESPACE 8 | nginx.ingress.kubernetes.io/rewrite-target: /$2 9 | nginx.ingress.kubernetes.io/connection-proxy-header: close 10 | nginx.ingress.kubernetes.io/proxy-http-version: "1.0" 11 | labels: 12 | app.kubernetes.io/managed-by: Helm 13 | xxx.com/deployResource: HKE-FLINK-A-PROD-SH01 14 | xxx.com/env: PRO 15 | xxx.com/provider: xxx 16 | hke-app: $K8S_CLUSTER_ID 17 | name: $K8S_CLUSTER_ID 18 | namespace: $K8S_NAMESPACE 19 | spec: 20 | rules: 21 | - host: zhisheng-proxy-k8s.xxx.cn 22 | http: 23 | paths: 24 | - backend: 25 | serviceName: $K8S_CLUSTER_ID-rest 26 | servicePort: 8081 27 | path: /proxy/$K8S_CLUSTER_ID(/|$)(.*) 28 | pathType: Prefix% -------------------------------------------------------------------------------- /flink-learning-k8s/flink-k8s/README.md: -------------------------------------------------------------------------------- 1 | ## Flink-K8s 2 | 3 | 基于开源 1.12.0 版本添加一些自己改造的 feature -------------------------------------------------------------------------------- /flink-learning-k8s/flink-k8s/src/main/java/org/apache/flink/kubernetes/kubeclient/resources/KubernetesWatch.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.flink.kubernetes.kubeclient.resources; 20 | 21 | import io.fabric8.kubernetes.client.Watch; 22 | 23 | /** 24 | * Watch resource in Kubernetes. 25 | */ 26 | public class KubernetesWatch extends KubernetesResource { 27 | 28 | public KubernetesWatch(Watch watch) { 29 | super(watch); 30 | } 31 | 32 | public void close() { 33 | getInternalResource().close(); 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /flink-learning-k8s/flink-k8s/src/main/resources/META-INF/NOTICE: -------------------------------------------------------------------------------- 1 | flink-kubernetes 2 | Copyright 2014-2020 The Apache Software Foundation 3 | 4 | This project includes software developed at 5 | The Apache Software Foundation (http://www.apache.org/). 6 | 7 | This project bundles the following dependencies under the Apache Software License 2.0 (http://www.apache.org/licenses/LICENSE-2.0.txt) 8 | 9 | - com.fasterxml.jackson.core:jackson-annotations:2.10.1 10 | - com.fasterxml.jackson.core:jackson-core:2.10.1 11 | - com.fasterxml.jackson.core:jackson-databind:2.10.1 12 | - com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.10.1 13 | - com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.10.1 14 | - com.squareup.okhttp3:logging-interceptor:3.12.6 15 | - com.squareup.okhttp3:okhttp:3.12.1 16 | - com.squareup.okio:okio:1.15.0 17 | - io.fabric8:kubernetes-client:4.9.2 18 | - io.fabric8:kubernetes-model:4.9.2 19 | - io.fabric8:kubernetes-model-common:4.9.2 20 | - io.fabric8:zjsonpatch:0.3.0 21 | - org.yaml:snakeyaml:1.27 22 | 23 | This project bundles the following dependencies under the BSD License. 24 | See bundled license files for details. 25 | 26 | - dk.brics.automaton:automaton:1.11-8 27 | -------------------------------------------------------------------------------- /flink-learning-k8s/flink-k8s/src/main/resources/META-INF/services/org.apache.flink.client.deployment.ClusterClientFactory: -------------------------------------------------------------------------------- 1 | # Licensed to the Apache Software Foundation (ASF) under one or more 2 | # contributor license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright ownership. 4 | # The ASF licenses this file to You under the Apache License, Version 2.0 5 | # (the "License"); you may not use this file except in compliance with 6 | # the License. You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | org.apache.flink.kubernetes.KubernetesClusterClientFactory -------------------------------------------------------------------------------- /flink-learning-k8s/flink-k8s/src/main/resources/META-INF/services/org.apache.flink.core.execution.PipelineExecutorFactory: -------------------------------------------------------------------------------- 1 | # Licensed to the Apache Software Foundation (ASF) under one or more 2 | # contributor license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright ownership. 4 | # The ASF licenses this file to You under the Apache License, Version 2.0 5 | # (the "License"); you may not use this file except in compliance with 6 | # the License. You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | org.apache.flink.kubernetes.executors.KubernetesSessionClusterExecutorFactory -------------------------------------------------------------------------------- /flink-learning-k8s/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | flink-learning 7 | com.zhisheng.flink 8 | 1.0-SNAPSHOT 9 | 10 | 4.0.0 11 | 12 | flink-learning-k8s 13 | pom 14 | 15 | 16 | flink-k8s 17 | 18 | 19 | -------------------------------------------------------------------------------- /flink-learning-monitor/README.md: -------------------------------------------------------------------------------- 1 | ## Flink 监控、告警、存储、可视化展示 2 | 3 | 1、[监控数据采集](./flink-learning-monitor-collector) 4 | 5 | 2、[告警](./flink-learning-monitor-alert) 6 | 7 | 3、[日志处理](./flink-learning-monitor-log) 8 | 9 | 4、[监控数据存储](./flink-learning-monitor-storage) 10 | 11 | 5、[PV/UV](./flink-learning-monitor-pvuv) 12 | 13 | 6、[监控数据可视化展示](./flink-learning-monitor-dashboard) -------------------------------------------------------------------------------- /flink-learning-monitor/flink-learning-monitor-alert/README.md: -------------------------------------------------------------------------------- 1 | ## Flink 监控告警 2 | 3 | ### Flink JobManager 4 | 5 | 6 | ### Flink TaskManager 7 | 8 | ### Flink Jobs 9 | 10 | 11 | ## 通知方式 12 | 13 | ### 钉钉 14 | 15 | + text 格式消息 16 | + markdown 格式消息 17 | + link 格式消息 18 | + 同一个钉钉地址发送多条消息 19 | + 同一条消息发送多个群 20 | + 钉钉工作通知 21 | 22 | 工具类 [DingDingGroupMsgUtil.java](./src/main/java/com/zhisheng/alert/utils/DingDingGroupMsgUtil.java) 23 | 24 | 如何使用,请参考 [DingDingMsgTest.java](./src/test/java/DingDingMsgTest.java) 25 | 26 | ### 邮件 27 | 28 | 29 | ### 短信 30 | 31 | 32 | ### 电话 -------------------------------------------------------------------------------- /flink-learning-monitor/flink-learning-monitor-alert/src/main/java/com/zhisheng/alert/model/AlertEvent.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.alert.model; 2 | 3 | import com.zhisheng.common.model.MetricEvent; 4 | import lombok.AllArgsConstructor; 5 | import lombok.Builder; 6 | import lombok.Data; 7 | import lombok.NoArgsConstructor; 8 | 9 | /** 10 | * Desc: alert event 11 | * Created by zhisheng on 2019/10/13 上午10:14 12 | * blog:http://www.54tianzhisheng.cn/ 13 | * 微信公众号:zhisheng 14 | */ 15 | @Data 16 | @NoArgsConstructor 17 | @AllArgsConstructor 18 | @Builder 19 | public class AlertEvent { 20 | 21 | private String type; 22 | 23 | private MetricEvent metricEvent; 24 | 25 | private boolean recover; 26 | 27 | private Long trigerTime; 28 | 29 | private Long recoverTime; 30 | } -------------------------------------------------------------------------------- /flink-learning-monitor/flink-learning-monitor-alert/src/main/java/com/zhisheng/alert/model/AlertRule.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.alert.model; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Builder; 5 | import lombok.Data; 6 | import lombok.NoArgsConstructor; 7 | 8 | /** 9 | * Desc: alert rule 10 | * Created by zhisheng on 2019/10/16 下午5:07 11 | * blog:http://www.54tianzhisheng.cn/ 12 | * 微信公众号:zhisheng 13 | */ 14 | @Data 15 | @AllArgsConstructor 16 | @NoArgsConstructor 17 | @Builder 18 | public class AlertRule { 19 | private Integer id; 20 | private String name; 21 | private String measurement; 22 | private String thresholds; 23 | } -------------------------------------------------------------------------------- /flink-learning-monitor/flink-learning-monitor-alert/src/main/java/com/zhisheng/alert/model/AtMobiles.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.alert.model; 2 | 3 | import lombok.Data; 4 | 5 | import java.util.List; 6 | 7 | /** 8 | * @ 9 | * blog:http://www.54tianzhisheng.cn/ 10 | * 微信公众号:zhisheng 11 | */ 12 | @Data 13 | public class AtMobiles { 14 | /** 15 | * 被@人的手机号 16 | * 17 | * @return 18 | */ 19 | public List atMobiles; 20 | 21 | /** 22 | * @所有人时:true,否则为:false 23 | */ 24 | public Boolean isAtAll; 25 | } 26 | -------------------------------------------------------------------------------- /flink-learning-monitor/flink-learning-monitor-alert/src/main/java/com/zhisheng/alert/model/BaseMessage.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.alert.model; 2 | 3 | import java.io.Serializable; 4 | 5 | /** 6 | * 请求消息的抽象类 7 | * blog:http://www.54tianzhisheng.cn/ 8 | * 微信公众号:zhisheng 9 | */ 10 | public abstract class BaseMessage implements Serializable { 11 | 12 | public BaseMessage() { 13 | init(); 14 | } 15 | 16 | /** 17 | * 消息类型 18 | */ 19 | protected MessageType msgtype; 20 | 21 | 22 | public MessageType getMsgtype() { 23 | return msgtype; 24 | } 25 | 26 | /** 27 | * 初始化 MessageType 方法 28 | */ 29 | protected abstract void init(); 30 | } 31 | -------------------------------------------------------------------------------- /flink-learning-monitor/flink-learning-monitor-alert/src/main/java/com/zhisheng/alert/model/Email.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.alert.model; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Data; 5 | import lombok.NoArgsConstructor; 6 | 7 | import java.io.File; 8 | import java.util.Map; 9 | import java.util.Set; 10 | 11 | @Data 12 | @NoArgsConstructor 13 | @AllArgsConstructor 14 | public class Email { 15 | /** 16 | * 收件人 17 | */ 18 | private Set to; 19 | 20 | /** 21 | * 邮件主题 22 | */ 23 | private String subject; 24 | 25 | /** 26 | * 邮件正文 27 | */ 28 | private String content; 29 | 30 | /** 31 | * 正文是否是 HTML 32 | */ 33 | private boolean isHtml; 34 | 35 | /** 36 | * 附件路径 37 | */ 38 | private Map attachments; 39 | 40 | /** 41 | * 是否有附件 42 | */ 43 | private boolean isAttachment; 44 | } 45 | -------------------------------------------------------------------------------- /flink-learning-monitor/flink-learning-monitor-alert/src/main/java/com/zhisheng/alert/model/LinkMessage.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.alert.model; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Data; 5 | import lombok.NoArgsConstructor; 6 | 7 | /** 8 | * 链接类型钉钉消息 9 | * blog:http://www.54tianzhisheng.cn/ 10 | * 微信公众号:zhisheng 11 | */ 12 | @Data 13 | @AllArgsConstructor 14 | @NoArgsConstructor 15 | public class LinkMessage extends BaseMessage { 16 | 17 | public Link link; 18 | 19 | @Override 20 | protected void init() { 21 | this.msgtype = MessageType.link; 22 | } 23 | 24 | @Data 25 | public static class Link { 26 | /** 27 | * 消息简介 28 | */ 29 | private String text; 30 | 31 | /** 32 | * 消息标题 33 | */ 34 | private String title; 35 | 36 | /** 37 | * 封面图片URL 38 | */ 39 | private String picUrl; 40 | 41 | /** 42 | * 消息跳转URL 43 | */ 44 | private String messageUrl; 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /flink-learning-monitor/flink-learning-monitor-alert/src/main/java/com/zhisheng/alert/model/MarkDownMessage.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.alert.model; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Data; 5 | import lombok.NoArgsConstructor; 6 | 7 | /** 8 | * markdown 类型钉钉消息 9 | * blog:http://www.54tianzhisheng.cn/ 10 | * 微信公众号:zhisheng 11 | */ 12 | @Data 13 | @AllArgsConstructor 14 | @NoArgsConstructor 15 | public class MarkDownMessage extends BaseMessage { 16 | 17 | public MarkDownContent markdown; 18 | 19 | public AtMobiles at; 20 | 21 | @Override 22 | protected void init() { 23 | this.msgtype = MessageType.markdown; 24 | } 25 | 26 | 27 | @Data 28 | public static class MarkDownContent { 29 | /** 30 | * 首屏会话透出的展示内容 31 | */ 32 | private String title; 33 | 34 | /** 35 | * markdown格式的消息 36 | */ 37 | private String text; 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /flink-learning-monitor/flink-learning-monitor-alert/src/main/java/com/zhisheng/alert/model/MessageType.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.alert.model; 2 | 3 | /** 4 | * 消息类型:文本、链接、MarkDown、跳转卡片、消息卡片五种枚举值 5 | * blog:http://www.54tianzhisheng.cn/ 6 | * 微信公众号:zhisheng 7 | */ 8 | public enum MessageType { 9 | /** 10 | * 文本类型 11 | */ 12 | text, 13 | 14 | /** 15 | * 链接类型 16 | */ 17 | link, 18 | 19 | /** 20 | * MarkDown类型 21 | */ 22 | markdown, 23 | 24 | /** 25 | * 跳转卡片类型 26 | */ 27 | actionCard, 28 | 29 | /** 30 | * 消息卡片类型 31 | */ 32 | feedCard; 33 | } 34 | -------------------------------------------------------------------------------- /flink-learning-monitor/flink-learning-monitor-alert/src/main/java/com/zhisheng/alert/model/TextMessage.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.alert.model; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Data; 5 | import lombok.NoArgsConstructor; 6 | 7 | /** 8 | * 文本类型钉钉消息 9 | * blog:http://www.54tianzhisheng.cn/ 10 | * 微信公众号:zhisheng 11 | */ 12 | @Data 13 | @AllArgsConstructor 14 | @NoArgsConstructor 15 | public class TextMessage extends BaseMessage { 16 | 17 | /** 18 | * 消息内容 19 | */ 20 | public TextContent text; 21 | 22 | /** 23 | * @ 24 | */ 25 | public AtMobiles at; 26 | 27 | 28 | @Override 29 | protected void init() { 30 | this.msgtype = MessageType.text; 31 | } 32 | 33 | 34 | @Data 35 | public static class TextContent { 36 | /** 37 | * 消息内容 38 | */ 39 | private String content; 40 | } 41 | 42 | } 43 | -------------------------------------------------------------------------------- /flink-learning-monitor/flink-learning-monitor-alert/src/main/java/com/zhisheng/alert/model/WorkNotify.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.alert.model; 2 | 3 | 4 | import lombok.AllArgsConstructor; 5 | import lombok.Data; 6 | import lombok.NoArgsConstructor; 7 | 8 | /** 9 | * 钉钉工作通知 10 | * blog:http://www.54tianzhisheng.cn/ 11 | * 微信公众号:zhisheng 12 | */ 13 | @Data 14 | @NoArgsConstructor 15 | public class WorkNotify { 16 | 17 | /** 18 | * 微应用 id 19 | */ 20 | private Integer agent_id; 21 | 22 | /** 23 | * 接收人列表,逗号分隔 24 | */ 25 | private String userid_list; 26 | 27 | /** 28 | * 部门 id 列表,可选 29 | */ 30 | private String dept_id_list; 31 | 32 | /** 33 | * 是否发送给所有 34 | */ 35 | private Boolean to_all_user; 36 | 37 | /** 38 | * 消息体 39 | */ 40 | private Msg msg; 41 | 42 | @Data 43 | @AllArgsConstructor 44 | public static class Msg { 45 | 46 | /** 47 | * 消息类型 48 | */ 49 | private String msgtype; 50 | 51 | private Text text; 52 | 53 | @Data 54 | @AllArgsConstructor 55 | public static class Text { 56 | /** 57 | * 消息内容 58 | */ 59 | private String content; 60 | } 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /flink-learning-monitor/flink-learning-monitor-alert/src/main/java/com/zhisheng/alert/utils/DingDingWorkspaceNoticeUtil.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.alert.utils; 2 | 3 | 4 | /** 5 | * 钉钉工作通知工具类 6 | * https://open-doc.dingtalk.com/microapp/serverapi2/pgoxpy 7 | */ 8 | public class DingDingWorkspaceNoticeUtil { 9 | public static final String workNotifyUrl = "https://oapi.dingtalk.com/topapi/message/corpconversation/asyncsend_v2?access_token="; 10 | 11 | 12 | } 13 | -------------------------------------------------------------------------------- /flink-learning-monitor/flink-learning-monitor-alert/src/main/java/com/zhisheng/alert/utils/EmailNoticeUtil.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.alert.utils; 2 | 3 | 4 | /** 5 | * 邮件通知工具类 6 | */ 7 | public class EmailNoticeUtil { 8 | } 9 | -------------------------------------------------------------------------------- /flink-learning-monitor/flink-learning-monitor-alert/src/main/java/com/zhisheng/alert/utils/PhoneNoticeUtil.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.alert.utils; 2 | 3 | 4 | /** 5 | * 电话通知工具类 6 | */ 7 | public class PhoneNoticeUtil { 8 | } 9 | -------------------------------------------------------------------------------- /flink-learning-monitor/flink-learning-monitor-alert/src/main/java/com/zhisheng/alert/utils/SMSNoticeUtil.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.alert.utils; 2 | 3 | 4 | /** 5 | * 短信通知工具类 6 | */ 7 | public class SMSNoticeUtil { 8 | } 9 | -------------------------------------------------------------------------------- /flink-learning-monitor/flink-learning-monitor-alert/src/main/java/com/zhisheng/alert/watermark/OutageMetricWaterMark.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.alert.watermark; 2 | 3 | import com.zhisheng.alert.model.OutageMetricEvent; 4 | import org.apache.flink.streaming.api.functions.AssignerWithPeriodicWatermarks; 5 | import org.apache.flink.streaming.api.watermark.Watermark; 6 | 7 | /** 8 | * Desc: 9 | * Created by zhisheng on 2019/10/15 上午12:30 10 | * blog:http://www.54tianzhisheng.cn/ 11 | * 微信公众号:zhisheng 12 | */ 13 | public class OutageMetricWaterMark implements AssignerWithPeriodicWatermarks { 14 | private long currentTimestamp = Long.MIN_VALUE; 15 | 16 | private final long maxTimeLag = 5000; 17 | 18 | @Override 19 | public Watermark getCurrentWatermark() { 20 | return new Watermark(currentTimestamp == Long.MIN_VALUE ? Long.MIN_VALUE : currentTimestamp - maxTimeLag); 21 | } 22 | 23 | @Override 24 | public long extractTimestamp(OutageMetricEvent outageMetricEvent, long l) { 25 | long timestamp = outageMetricEvent.getTimestamp(); 26 | currentTimestamp = Math.max(timestamp, currentTimestamp); 27 | return timestamp; 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /flink-learning-monitor/flink-learning-monitor-alert/src/main/resources/LogEventDataExample.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "app", 3 | "id": "121", 4 | "timestamp": 1570941591229, 5 | "level": "error", 6 | "offset": 32313131, 7 | "content": "Exception in thread \"main\" java.lang.NoClassDefFoundError: org/apache/flink/api/common/ExecutionConfig$GlobalJobParameters", 8 | "tags": { 9 | "cluster_name": "zhisheng", 10 | "app_name": "zhisheng", 11 | "host_ip": "127.0.0.1", 12 | "app_id": "21" 13 | } 14 | } -------------------------------------------------------------------------------- /flink-learning-monitor/flink-learning-monitor-alert/src/main/resources/application.properties: -------------------------------------------------------------------------------- 1 | kafka.brokers=localhost:9092 2 | kafka.group.id=zhisheng 3 | kafka.zookeeper.connect=localhost:2181 4 | metrics.topic=zhisheng_metrics 5 | log.topic=zhisheng_log 6 | stream.parallelism=4 7 | stream.sink.parallelism=4 8 | stream.default.parallelism=4 9 | stream.checkpoint.interval=1000 10 | stream.checkpoint.enable=false -------------------------------------------------------------------------------- /flink-learning-monitor/flink-learning-monitor-alert/src/main/resources/logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 6 | 7 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n 8 | 9 | 10 | 11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /flink-learning-monitor/flink-learning-monitor-alert/src/test/java/LogEventDataExample.java: -------------------------------------------------------------------------------- 1 | import com.zhisheng.common.model.LogEvent; 2 | import com.zhisheng.common.utils.GsonUtil; 3 | 4 | import java.util.HashMap; 5 | import java.util.Map; 6 | 7 | /** 8 | * Desc: log event data example 9 | * Created by zhisheng on 2019/10/13 下午12:29 10 | * blog:http://www.54tianzhisheng.cn/ 11 | * 微信公众号:zhisheng 12 | */ 13 | public class LogEventDataExample { 14 | public static void main(String[] args) { 15 | 16 | LogEvent logEvent = new LogEvent(); 17 | logEvent.setType("app"); 18 | Map tags = new HashMap<>(); 19 | tags.put("cluster_name", "zhisheng"); 20 | tags.put("host_ip", "127.0.0.1"); 21 | tags.put("app_id", "21"); 22 | tags.put("app_name", "zhisheng"); 23 | 24 | 25 | String message = "Exception in thread \"main\" java.lang.NoClassDefFoundError: org/apache/flink/api/common/ExecutionConfig$GlobalJobParameters"; 26 | 27 | 28 | LogEvent event = new LogEvent().builder() 29 | .type("app") 30 | .timestamp(System.currentTimeMillis()) 31 | .level("error") 32 | .message(message) 33 | .tags(tags).build(); 34 | 35 | System.out.println(GsonUtil.toJson(event)); 36 | } 37 | 38 | } 39 | -------------------------------------------------------------------------------- /flink-learning-monitor/flink-learning-monitor-collector/README.md: -------------------------------------------------------------------------------- 1 | ## Flink 监控数据采集 2 | 3 | 因为数据采集是非常重要的,如果在底层数据采集的过程中就断了,那么后面的存储和告警链路将全部失效,所以为了保证后面链路的可用性, 4 | 数据采集的话就不能也是一个 Flink Job,否则因为 Flink 挂了的话,那么就会导致数据采集的 Job 失效,导致整个监控告警链路失效。 5 | 6 | 另外,也不符合 Flink 的特点,Flink 其实更在于计算,有数据源(source)和下发处(sink),这里我们采集数据的话就自己写一个项目利用 7 | Flink 自己暴露的 Rest API 去采集相关数据(JobManager、TaskManager、Job 等),将采集好的数据组织好成一个个 Metrics,然后发送到 Kafka。 -------------------------------------------------------------------------------- /flink-learning-monitor/flink-learning-monitor-collector/flink_log_event.json: -------------------------------------------------------------------------------- 1 | { 2 | "source": "flink-1.16.1-sql", 3 | "id": "d47d6b2d-c727-448d-b58b-4d2607df771f", 4 | "timestamp": 1741677244482, 5 | "content": "Request commitFiles return SUCCESS for 1741677153039-6421af218b4c8ce71454da9428a22759-0", 6 | "tags": { 7 | "task_name": "613750_1729863898342", 8 | "host_ip": "10.xxx.xxx.140", 9 | "node_ip": "10.xxx.xxx.22", 10 | "level": "INFO", 11 | "file_name": "Logging.scala", 12 | "task_id": "359158", 13 | "method_name": "logInfo", 14 | "line_number": "51", 15 | "thread_name": "celeborn-dispatcher-3", 16 | "container_type": "jobmanager", 17 | "logger_name": "org.apache.celeborn.client.commit.MapPartitionCommitHandler", 18 | "class_name": "org.apache.celeborn.common.internal.Logging", 19 | "app_id": "batch-flink-359158-1741677126342", 20 | "host_name": "batch-flink-359158-1741677126342-7d549799d4-q78jt", 21 | "container_id": "batch-flink-359158-1741677126342-7d549799d4-q78jt" 22 | } 23 | } -------------------------------------------------------------------------------- /flink-learning-monitor/flink-learning-monitor-collector/flink_metrics_event.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "taskmanager_job_task_isBackPressured", 3 | "timestamp": 1741677078908, 4 | "fields": { 5 | "value": "false" 6 | }, 7 | "tags": { 8 | "task_name": "Filter -> Map -> Filter -> Map -> Filter -> Map -> Filter -> Timestamps/Watermarks -> Sink: Unnamed", 9 | "task_attempt_id": "77ce8a827e33ef8582b0788c40d7193f", 10 | "node_ip": "10.xxx.xxx.13", 11 | "task_id": "20ba6b65f97481d5570070de90e4e791", 12 | "platform_task_name": "base_xxx-monitor", 13 | "task_attempt_num": "0", 14 | "job_name": "base_xxx_monitor_insert_to_es", 15 | "job_id": "a65ef348e256a85ff386cdbd3e8ea09a", 16 | "host": "10.xxx.xxx.251", 17 | "flink_version": "1.12.0", 18 | "container_type": "taskmanager", 19 | "app_id": "flink-2180-1732606614446", 20 | "platform_task_id": "2180", 21 | "container_id": "flink-2180-1732606614446-taskmanager-1-8", 22 | "subtask_index": "197" 23 | } 24 | } -------------------------------------------------------------------------------- /flink-learning-monitor/flink-learning-monitor-collector/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | flink-learning-monitor 7 | com.zhisheng.flink 8 | 1.0-SNAPSHOT 9 | 10 | 4.0.0 11 | 12 | flink-learning-monitor-collector 13 | 14 | 15 | 16 | com.zhisheng.flink 17 | flink-learning-monitor-common 18 | ${project.version} 19 | 20 | 21 | -------------------------------------------------------------------------------- /flink-learning-monitor/flink-learning-monitor-collector/src/main/java/com/zhisheng/collector/FlinkJobMetricCollect.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.collector; 2 | 3 | 4 | import com.zhisheng.common.utils.HttpUtil; 5 | import com.zhisheng.common.utils.PropertiesUtil; 6 | 7 | public class FlinkJobMetricCollect { 8 | 9 | public static void main(String[] args) { 10 | String jobManagerHost = PropertiesUtil.defaultProp.get("flink.jobmanager.host").toString(); 11 | String jobOverviewResult = HttpUtil.doGet("http://" + jobManagerHost + "/jobs/overview"); 12 | 13 | 14 | 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /flink-learning-monitor/flink-learning-monitor-common/README.md: -------------------------------------------------------------------------------- 1 | ## Flink 监控 common -------------------------------------------------------------------------------- /flink-learning-monitor/flink-learning-monitor-common/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | flink-learning-monitor 7 | com.zhisheng.flink 8 | 1.0-SNAPSHOT 9 | 10 | 4.0.0 11 | 12 | flink-learning-monitor-common 13 | 14 | 15 | 16 | 17 | com.zhisheng.flink 18 | flink-learning-common 19 | ${project.version} 20 | 21 | 22 | -------------------------------------------------------------------------------- /flink-learning-monitor/flink-learning-monitor-common/src/main/java/com/zhisheng/common/model/Job.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.common.model; 2 | 3 | import lombok.Data; 4 | import lombok.NoArgsConstructor; 5 | 6 | @Data 7 | @NoArgsConstructor 8 | public class Job { 9 | 10 | /** 11 | * job id 12 | */ 13 | private String jid; 14 | 15 | /** 16 | * job name 17 | */ 18 | private String name; 19 | 20 | /** 21 | * job status 22 | */ 23 | private JobStatus state; 24 | 25 | /** 26 | * job start time 27 | */ 28 | private Long startTime; 29 | 30 | /** 31 | * job end time 32 | */ 33 | private Long endTime; 34 | 35 | /** 36 | * job duration time 37 | */ 38 | private Long duration; 39 | 40 | /** 41 | * job last modify time 42 | */ 43 | private Long lastModification; 44 | 45 | /** 46 | * job tasks 47 | */ 48 | private Task tasks; 49 | } 50 | -------------------------------------------------------------------------------- /flink-learning-monitor/flink-learning-monitor-common/src/main/java/com/zhisheng/common/model/Task.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.common.model; 2 | 3 | import lombok.Data; 4 | import lombok.NoArgsConstructor; 5 | 6 | @Data 7 | @NoArgsConstructor 8 | public class Task { 9 | /** 10 | * task 总个数 11 | */ 12 | private int total; 13 | 14 | /** 15 | * 处于 created 状态的 task 个数 16 | */ 17 | private int created; 18 | 19 | /** 20 | * 处于 scheduled 状态的 task 个数 21 | */ 22 | private int scheduled; 23 | 24 | /** 25 | * 处于 deploying 状态的 task 个数 26 | */ 27 | private int deploying; 28 | 29 | /** 30 | * 处于 running 状态的 task 个数 31 | */ 32 | private int running; 33 | 34 | /** 35 | * 处于 finished 状态的 task 个数 36 | */ 37 | private int finished; 38 | 39 | /** 40 | * 处于 canceling 状态的 task 个数 41 | */ 42 | private int canceling; 43 | 44 | /** 45 | * 处于 canceled 状态的 task 个数 46 | */ 47 | private int canceled; 48 | 49 | /** 50 | * 处于 failed 状态的 task 个数 51 | */ 52 | private int failed; 53 | 54 | /** 55 | * 处于 reconciling 状态的 task 个数 56 | */ 57 | private int reconciling; 58 | } 59 | -------------------------------------------------------------------------------- /flink-learning-monitor/flink-learning-monitor-dashboard/README.md: -------------------------------------------------------------------------------- 1 | ## Flink Monitor Dashboard -------------------------------------------------------------------------------- /flink-learning-monitor/flink-learning-monitor-dashboard/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | flink-learning-monitor 7 | com.zhisheng.flink 8 | 1.0-SNAPSHOT 9 | 10 | 4.0.0 11 | 12 | flink-learning-monitor-dashboard 13 | 14 | 15 | 16 | com.zhisheng.flink 17 | flink-learning-monitor-common 18 | ${project.version} 19 | 20 | 21 | -------------------------------------------------------------------------------- /flink-learning-monitor/flink-learning-monitor-log/src/main/java/com/zhisheng/log/LogAlert.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.log; 2 | 3 | import com.zhisheng.common.model.LogEvent; 4 | import org.apache.flink.api.java.utils.ParameterTool; 5 | import org.apache.flink.streaming.api.datastream.DataStream; 6 | 7 | /** 8 | * Desc: log alert 9 | * Created by zhisheng on 2019/10/26 下午7:23 10 | * blog:http://www.54tianzhisheng.cn/ 11 | * 微信公众号:zhisheng 12 | */ 13 | public class LogAlert { 14 | public static void alert(DataStream logDataStream, ParameterTool parameterTool) { 15 | //异常日志事件 16 | logDataStream.filter(logEvent -> "ERROR".equals(logEvent.getLevel().toUpperCase())) 17 | .print(); 18 | 19 | //告警事件与应用通知方式和收敛方式的策略数据关联 20 | 21 | 22 | //sink 调用发送告警消息的接口 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /flink-learning-monitor/flink-learning-monitor-log/src/main/java/com/zhisheng/log/model/OriginalLogEvent.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.log.model; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Builder; 5 | import lombok.Data; 6 | import lombok.NoArgsConstructor; 7 | import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.annotation.JsonProperty; 8 | 9 | import java.util.Map; 10 | 11 | /* 12 | Created by zhisheng on 2019/10/26 下午7:15 13 | blog:http://www.54tianzhisheng.cn/ 14 | 微信公众号:zhisheng 15 | desc: Filebeat 发送到 Kafka 的日志如下 16 | 17 | { 18 | "@timestamp": "2019-10-26T09:23:16.848Z", 19 | "@metadata": { 20 | "beat": "filebeat", 21 | "type": "doc", 22 | "version": "6.8.4", 23 | "topic": "zhisheng_log" 24 | }, 25 | "host": { 26 | "name": "VM_0_2_centos" 27 | }, 28 | "source": "/var/logs/controller.log", 29 | "message": "[2019-10-26 17:23:11,769] TRACE [Controller id=0] Leader imbalance ratio for broker 0 is 0.0 (kafka.controller.KafkaController)" 30 | } 31 | */ 32 | @Data 33 | @NoArgsConstructor 34 | @AllArgsConstructor 35 | @Builder 36 | public class OriginalLogEvent { 37 | 38 | @JsonProperty("@timestamp") 39 | private String timestamp; //use Jackson JsonProperty 40 | 41 | @JsonProperty("@metadata") 42 | private Map metadata; 43 | 44 | private Map host; 45 | 46 | private String source; 47 | 48 | private String message; 49 | } -------------------------------------------------------------------------------- /flink-learning-monitor/flink-learning-monitor-log/src/main/java/com/zhisheng/log/schema/OriginalLogEventSchema.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.log.schema; 2 | 3 | import com.zhisheng.log.model.OriginalLogEvent; 4 | import org.apache.flink.api.common.serialization.DeserializationSchema; 5 | import org.apache.flink.api.common.typeinfo.TypeInformation; 6 | import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper; 7 | 8 | import java.io.IOException; 9 | 10 | /** 11 | * Desc: OriginalLogEvent Deserialization Schema 12 | * Created by zhisheng on 2019/10/26 下午7:15 13 | * blog:http://www.54tianzhisheng.cn/ 14 | * 微信公众号:zhisheng 15 | */ 16 | public class OriginalLogEventSchema implements DeserializationSchema { 17 | private static final ObjectMapper mapper = new ObjectMapper(); 18 | 19 | @Override 20 | public OriginalLogEvent deserialize(byte[] bytes) throws IOException { 21 | return mapper.readValue(new String(bytes), OriginalLogEvent.class); 22 | } 23 | 24 | @Override 25 | public boolean isEndOfStream(OriginalLogEvent originalLogEvent) { 26 | return false; 27 | } 28 | 29 | 30 | @Override 31 | public TypeInformation getProducedType() { 32 | return TypeInformation.of(OriginalLogEvent.class); 33 | } 34 | } -------------------------------------------------------------------------------- /flink-learning-monitor/flink-learning-monitor-log/src/main/resources/application.properties: -------------------------------------------------------------------------------- 1 | kafka.brokers=xxx:9092 2 | kafka.group.id=zhisheng-log-group 3 | kafka.zookeeper.connect=xxx:2181 4 | metrics.topic=zhisheng 5 | logs.topic=zhisheng_log 6 | kafka.sink.brokers=localhost:9092 7 | kafka.sink.topic=metric-test 8 | stream.parallelism=5 9 | stream.checkpoint.interval=1000 10 | stream.checkpoint.enable=false 11 | stream.sink.parallelism=5 12 | 13 | elasticsearch.hosts=localhost:9200 14 | elasticsearch.bulk.flush.max.actions=40 15 | 16 | # \u6743\u9650 17 | es.security.enable=false 18 | es.security.username=zhisheng 19 | es.security.password=zhisheng -------------------------------------------------------------------------------- /flink-learning-monitor/flink-learning-monitor-pvuv/README.md: -------------------------------------------------------------------------------- 1 | ## Flink 监控 pv uv -------------------------------------------------------------------------------- /flink-learning-monitor/flink-learning-monitor-pvuv/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | flink-learning-monitor 7 | com.zhisheng.flink 8 | 1.0-SNAPSHOT 9 | 10 | 4.0.0 11 | 12 | flink-learning-monitor-pvuv 13 | 14 | 15 | 16 | 17 | com.zhisheng.flink 18 | flink-learning-monitor-common 19 | ${project.version} 20 | 21 | 22 | org.apache.flink 23 | flink-connector-redis_2.10 24 | 1.1.5 25 | 26 | 27 | 28 | redis.clients 29 | jedis 30 | 2.9.0 31 | 32 | 33 | -------------------------------------------------------------------------------- /flink-learning-monitor/flink-learning-monitor-pvuv/src/main/java/com/zhisheng/monitor/pvuv/model/UserVisitWebEvent.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.monitor.pvuv.model; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Builder; 5 | import lombok.Data; 6 | import lombok.NoArgsConstructor; 7 | 8 | /** 9 | * @author fanrui 10 | * @date 2019-10-25 12:50:23 11 | * @desc 用户访问网页的日志 12 | */ 13 | @Data 14 | @NoArgsConstructor 15 | @AllArgsConstructor 16 | @Builder 17 | public class UserVisitWebEvent { 18 | 19 | /** 20 | * 日志的唯一 id 21 | */ 22 | private String id; 23 | 24 | /** 25 | * 日期,如:20191025 26 | */ 27 | private String date; 28 | 29 | /** 30 | * 页面 id 31 | */ 32 | private Integer pageId; 33 | 34 | /** 35 | * 用户的唯一标识,用户 id 36 | */ 37 | private String userId; 38 | 39 | /** 40 | * 页面的 url 41 | */ 42 | private String url; 43 | 44 | } -------------------------------------------------------------------------------- /flink-learning-monitor/flink-learning-monitor-storage/README.md: -------------------------------------------------------------------------------- 1 | ## Flink 监控数据存储 -------------------------------------------------------------------------------- /flink-learning-monitor/flink-learning-monitor-storage/flink_metrics_2es.sql: -------------------------------------------------------------------------------- 1 | -- 打印 flink 1.16 任务的消费延迟 metrics 监控数据 2 | 3 | 4 | CREATE TABLE metrics_yarn_flink_jobs ( 5 | name STRING, 6 | fields Map, 7 | tags Row 8 | ) WITH ( 9 | 'connector' = 'kafka', 10 | 'topic' = 'metrics-flink-jobs', 11 | 'properties.bootstrap.servers' = 'logs-kafka1.xxx:9092,logs-kafka2.xxx:9092,logs-kafka3.xxx:9092', 12 | 'properties.group.id' = 'test', 13 | 'format' = 'json' 14 | ); 15 | 16 | 17 | CREATE TABLE flink_jobs_metrics ( 18 | name STRING, 19 | fields Map, 20 | tags Row 21 | ) WITH ( 22 | 'connector' = 'print' 23 | ); 24 | 25 | insert into 26 | flink_jobs_metrics 27 | select 28 | name, 29 | fields, 30 | tags 31 | from 32 | metrics_yarn_flink_jobs ; -------------------------------------------------------------------------------- /flink-learning-monitor/flink-learning-monitor-storage/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | flink-learning-monitor 7 | com.zhisheng.flink 8 | 1.0-SNAPSHOT 9 | 10 | 4.0.0 11 | 12 | flink-learning-monitor-storage 13 | 14 | 15 | 16 | com.zhisheng.flink 17 | flink-learning-monitor-common 18 | ${project.version} 19 | 20 | 21 | -------------------------------------------------------------------------------- /flink-learning-monitor/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | flink-learning 7 | com.zhisheng.flink 8 | 1.0-SNAPSHOT 9 | 10 | 4.0.0 11 | 12 | flink-learning-monitor 13 | pom 14 | 15 | flink-learning-monitor-collector 16 | flink-learning-monitor-alert 17 | flink-learning-monitor-storage 18 | flink-learning-monitor-common 19 | flink-learning-monitor-pvuv 20 | flink-learning-monitor-dashboard 21 | flink-learning-monitor-log 22 | 23 | 24 | 25 | -------------------------------------------------------------------------------- /flink-learning-project/README.md: -------------------------------------------------------------------------------- 1 | ## Flink 项目 2 | 3 | + [基于 Apache Flink 的日志实时处理系统](./flink-learning-project-log) 4 | 5 | + [基于 Apache Flink 的百亿数据去重实践](./flink-learning-project-deduplication) 6 | 7 | + [基于 Apache Flink 的监控告警系统](./flink-learning-project-monitor-alert) 8 | 9 | + [基于 Apache Flink 的实时风控系统](./flink-learning-project-risk-management) 10 | 11 | + [基于 Apache Flink 的实时大屏系统](./flink-learning-project-monitor-dashboard) 12 | 13 | + [Apache Flink 实时作业脚手架](./flink-learning-project-flink-job-scaffold) 14 | 15 | + [基于 Apache Flink 的实时数仓建设](./flink-learning-project-real-time-data-warehouse) 16 | 17 | + [基于 Apache Flink 的实时计算平台建设](./flink-learning-project-real-time-computing-platform) 18 | -------------------------------------------------------------------------------- /flink-learning-project/flink-learning-project-common/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | flink-learning-project 7 | com.zhisheng.flink 8 | 1.0-SNAPSHOT 9 | 10 | 4.0.0 11 | 12 | flink-learning-project-common 13 | 14 | 15 | 16 | com.zhisheng.flink 17 | flink-learning-common 18 | ${project.version} 19 | 20 | 21 | -------------------------------------------------------------------------------- /flink-learning-project/flink-learning-project-deduplication/README.md: -------------------------------------------------------------------------------- 1 | ### flink-learning-project-deduplication 2 | 3 | 基于 Flink 的百亿数据去重实践 -------------------------------------------------------------------------------- /flink-learning-project/flink-learning-project-deduplication/src/main/java/com/zhisheng/project/deduplication/model/UserVisitWebEvent.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.project.deduplication.model; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Builder; 5 | import lombok.Data; 6 | import lombok.NoArgsConstructor; 7 | 8 | /** 9 | * @author fanrui 10 | * @date 2019-11-01 01:24:50 11 | * @desc 用户访问网页的日志 12 | */ 13 | @Data 14 | @NoArgsConstructor 15 | @AllArgsConstructor 16 | @Builder 17 | public class UserVisitWebEvent { 18 | 19 | /** 20 | * 日志的唯一 id 21 | */ 22 | private String id; 23 | 24 | /** 25 | * 日期,如:20191025 26 | */ 27 | private String date; 28 | 29 | /** 30 | * 页面 id 31 | */ 32 | private Integer pageId; 33 | 34 | /** 35 | * 用户的唯一标识,用户 id 36 | */ 37 | private String userId; 38 | 39 | /** 40 | * 页面的 url 41 | */ 42 | private String url; 43 | 44 | } -------------------------------------------------------------------------------- /flink-learning-project/flink-learning-project-flink-job-scaffold/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | flink-learning-project 7 | com.zhisheng.flink 8 | 1.0-SNAPSHOT 9 | 10 | 4.0.0 11 | 12 | flink-learning-project-flink-job-scaffold 13 | 14 | 15 | -------------------------------------------------------------------------------- /flink-learning-project/flink-learning-project-log/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | flink-learning-project 7 | com.zhisheng.flink 8 | 1.0-SNAPSHOT 9 | 10 | 4.0.0 11 | 12 | flink-learning-project-log 13 | 14 | 15 | -------------------------------------------------------------------------------- /flink-learning-project/flink-learning-project-monitor-alert/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | flink-learning-project 7 | com.zhisheng.flink 8 | 1.0-SNAPSHOT 9 | 10 | 4.0.0 11 | 12 | flink-learning-project-monitor-alert 13 | 14 | 15 | -------------------------------------------------------------------------------- /flink-learning-project/flink-learning-project-monitor-dashboard/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | flink-learning-project 7 | com.zhisheng.flink 8 | 1.0-SNAPSHOT 9 | 10 | 4.0.0 11 | 12 | flink-learning-project-monitor-dashboard 13 | 14 | 15 | -------------------------------------------------------------------------------- /flink-learning-project/flink-learning-project-real-time-computing-platform/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | flink-learning-project 7 | com.zhisheng.flink 8 | 1.0-SNAPSHOT 9 | 10 | 4.0.0 11 | 12 | flink-learning-project-real-time-computing-platform 13 | 14 | 15 | -------------------------------------------------------------------------------- /flink-learning-project/flink-learning-project-real-time-data-warehouse/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | flink-learning-project 7 | com.zhisheng.flink 8 | 1.0-SNAPSHOT 9 | 10 | 4.0.0 11 | 12 | flink-learning-project-real-time-data-warehouse 13 | 14 | 15 | -------------------------------------------------------------------------------- /flink-learning-project/flink-learning-project-risk-management/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | flink-learning-project 7 | com.zhisheng.flink 8 | 1.0-SNAPSHOT 9 | 10 | 4.0.0 11 | 12 | flink-learning-project-risk-management 13 | 14 | 15 | -------------------------------------------------------------------------------- /flink-learning-project/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | flink-learning 7 | com.zhisheng.flink 8 | 1.0-SNAPSHOT 9 | 10 | 4.0.0 11 | 12 | flink-learning-project 13 | pom 14 | 15 | flink-learning-project-deduplication 16 | flink-learning-project-common 17 | flink-learning-project-monitor-alert 18 | flink-learning-project-real-time-data-warehouse 19 | flink-learning-project-log 20 | flink-learning-project-risk-management 21 | flink-learning-project-monitor-dashboard 22 | flink-learning-project-flink-job-scaffold 23 | flink-learning-project-real-time-computing-platform 24 | 25 | 26 | 27 | -------------------------------------------------------------------------------- /flink-learning-sql/README.md: -------------------------------------------------------------------------------- 1 | ### Flink-learning-sql 2 | 3 | Flink Table API & SQL 4 | 5 | + [flink sql ago](./flink-learning-sql-ago) 6 | + [flink sql blink](./flink-learning-sql-blink) 7 | 8 | https://github.com/ververica/sql-training -------------------------------------------------------------------------------- /flink-learning-sql/flink-learning-sql-blink/README.md: -------------------------------------------------------------------------------- 1 | ### flink-learning-sql-blink 2 | 3 | 添加 Blink planner 的依赖: 4 | 5 | ```xml 6 | 7 | org.apache.flink 8 | flink-table-planner_${scala.binary.version} 9 | ${flink.version} 10 | 11 | ``` 12 | 13 | 14 | 15 | -------------------------------------------------------------------------------- /flink-learning-sql/flink-learning-sql-blink/src/main/resources/application.properties: -------------------------------------------------------------------------------- 1 | kafka.brokers=localhost:9092 2 | kafka.group.id=zhisheng 3 | kafka.zookeeper.connect=localhost:2181 4 | kafka.topic=zhisheng -------------------------------------------------------------------------------- /flink-learning-sql/flink-learning-sql-blink/src/main/resources/words.txt: -------------------------------------------------------------------------------- 1 | Hello 2 | World 3 | Hello 4 | Hello 5 | World 6 | Hello 7 | Flink -------------------------------------------------------------------------------- /flink-learning-sql/flink-learning-sql-blink/src/test/java/test/TableEnvironmentExample1.java: -------------------------------------------------------------------------------- 1 | package test; 2 | 3 | import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; 4 | import org.apache.flink.table.api.EnvironmentSettings; 5 | import org.apache.flink.table.api.TableConfig; 6 | import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; 7 | 8 | /** 9 | * Desc: blink planner TableEnvironment 10 | * Created by zhisheng on 2019/11/3 下午2:23 11 | * blog:http://www.54tianzhisheng.cn/ 12 | * 微信公众号:zhisheng 13 | */ 14 | public class TableEnvironmentExample1 { 15 | public static void main(String[] args) { 16 | //流作业 17 | StreamTableEnvironment.create(StreamExecutionEnvironment.getExecutionEnvironment()); 18 | //use EnvironmentSettings 19 | StreamTableEnvironment.create(StreamExecutionEnvironment.getExecutionEnvironment(), EnvironmentSettings.newInstance().useBlinkPlanner().inStreamingMode().build()); 20 | StreamTableEnvironment.create(StreamExecutionEnvironment.getExecutionEnvironment(), EnvironmentSettings.newInstance().useBlinkPlanner().inStreamingMode().build()); 21 | //use table config 22 | StreamTableEnvironment.create(StreamExecutionEnvironment.getExecutionEnvironment(), TableConfig.getDefault()); 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /flink-learning-sql/flink-learning-sql-client/src/main/java/com/zhisheng/sql/SqlSubmit.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.sql; 2 | 3 | 4 | import com.zhisheng.sql.cli.CliOptions; 5 | import com.zhisheng.sql.cli.CliOptionsParser; 6 | import com.zhisheng.sql.planner.BatchPlanner; 7 | import com.zhisheng.sql.planner.Planner; 8 | import com.zhisheng.sql.planner.StreamingPlanner; 9 | 10 | public class SqlSubmit { 11 | 12 | public static void main(String[] args) throws Exception { 13 | 14 | final CliOptions options = CliOptionsParser.parseClient(args); 15 | Planner planner; 16 | if (!Boolean.parseBoolean(options.getIsBatch())) { 17 | planner = StreamingPlanner.build(options); 18 | } else { 19 | planner = BatchPlanner.build(options); 20 | } 21 | 22 | planner.run(); 23 | } 24 | } -------------------------------------------------------------------------------- /flink-learning-sql/flink-learning-sql-client/src/main/java/com/zhisheng/sql/cli/CliOptions.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.sql.cli; 2 | 3 | 4 | public class CliOptions { 5 | 6 | private final String sqlFilePath; 7 | private final String workingSpace; 8 | private final String isTest; 9 | private final String isBatch; 10 | private final String k8sClusterId; 11 | 12 | public CliOptions(String sqlFilePath, String workingSpace, String isTest, String isBatch, String k8sClusterId) { 13 | this.sqlFilePath = sqlFilePath; 14 | this.workingSpace = workingSpace; 15 | this.isTest = isTest; 16 | this.isBatch = isBatch; 17 | this.k8sClusterId = k8sClusterId; 18 | } 19 | 20 | public String getSqlFilePath() { 21 | return sqlFilePath; 22 | } 23 | 24 | public String getWorkingSpace() { 25 | return workingSpace; 26 | } 27 | 28 | public String getIsTest() { 29 | return isTest; 30 | } 31 | 32 | public String getIsBatch() { 33 | return isBatch; 34 | } 35 | 36 | public String getK8sClusterId() { 37 | return k8sClusterId; 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /flink-learning-sql/flink-learning-sql-client/src/main/java/com/zhisheng/sql/constant/Constant.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.sql.constant; 2 | 3 | public class Constant { 4 | 5 | public static final String IDLE_STATERETENTIOO_TIME = "idle.state.retention.time"; 6 | 7 | } 8 | -------------------------------------------------------------------------------- /flink-learning-sql/flink-learning-sql-client/src/main/java/com/zhisheng/sql/constant/UnitEnum.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.sql.constant; 2 | 3 | public enum UnitEnum { 4 | m, h, d 5 | } 6 | -------------------------------------------------------------------------------- /flink-learning-sql/flink-learning-sql-client/src/main/java/com/zhisheng/sql/exception/SqlParserException.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.sql.exception; 2 | 3 | @SuppressWarnings("serial") 4 | public class SqlParserException extends RuntimeException { 5 | 6 | public SqlParserException(String msg) { 7 | super(msg); 8 | } 9 | 10 | } 11 | -------------------------------------------------------------------------------- /flink-learning-sql/flink-learning-sql-client/src/main/java/com/zhisheng/sql/utils/CloseableRowIteratorWrapper.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.sql.utils; 2 | 3 | import org.apache.flink.types.Row; 4 | import org.apache.flink.util.CloseableIterator; 5 | 6 | public class CloseableRowIteratorWrapper implements CloseableIterator { 7 | private final CloseableIterator iterator; 8 | private boolean isFirstRowReady = false; 9 | 10 | public CloseableRowIteratorWrapper(CloseableIterator iterator) { 11 | this.iterator = iterator; 12 | } 13 | 14 | @Override 15 | public void close() throws Exception { 16 | iterator.close(); 17 | } 18 | 19 | @Override 20 | public boolean hasNext() { 21 | boolean hasNext = iterator.hasNext(); 22 | isFirstRowReady = isFirstRowReady || hasNext; 23 | return hasNext; 24 | } 25 | 26 | @Override 27 | public Row next() { 28 | Row next = iterator.next(); 29 | isFirstRowReady = true; 30 | return next; 31 | } 32 | 33 | public boolean isFirstRowReady() { 34 | return isFirstRowReady || hasNext(); 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /flink-learning-sql/flink-learning-sql-client/src/main/java/com/zhisheng/sql/utils/Config.java: -------------------------------------------------------------------------------- 1 | package com.zhisheng.sql.utils; 2 | 3 | import org.slf4j.Logger; 4 | import org.slf4j.LoggerFactory; 5 | 6 | import java.io.IOException; 7 | import java.util.Properties; 8 | 9 | public class Config { 10 | 11 | protected static final Logger LOG = LoggerFactory.getLogger(Config.class); 12 | 13 | private static Properties p = new Properties(); 14 | private static String paramsFile = "/conf.properties"; 15 | 16 | static { 17 | try { 18 | p.load(Config.class.getResourceAsStream(paramsFile)); 19 | } catch (IOException e) { 20 | LOG.error("Config读取配置出错:" + e.getMessage()); 21 | } 22 | } 23 | 24 | public static String getString(String key) { 25 | return p.getProperty(key); 26 | } 27 | 28 | } 29 | -------------------------------------------------------------------------------- /flink-learning-sql/flink-learning-sql-client/src/main/resources/dev/conf.properties: -------------------------------------------------------------------------------- 1 | httpUrl=https://fat-zhisheng-sql.cn/index/k8sFlinkStatus -------------------------------------------------------------------------------- /flink-learning-sql/flink-learning-sql-client/src/main/resources/pre/conf.properties: -------------------------------------------------------------------------------- 1 | httpUrl=https://pre-zhisheng-sql.cn/index/k8sFlinkStatus -------------------------------------------------------------------------------- /flink-learning-sql/flink-learning-sql-client/src/main/resources/prod/conf.properties: -------------------------------------------------------------------------------- 1 | httpUrl=https://pro-zhisheng-sql.cn/index/k8sFlinkStatus -------------------------------------------------------------------------------- /flink-learning-sql/flink-learning-sql-client/src/main/resources/sql/124563.sql: -------------------------------------------------------------------------------- 1 | CREATE TEMPORARY FUNCTION json_value_udf AS 'com.zhisheng.udf.JsonValueUdf'; 2 | 3 | CREATE TABLE metrics_flink_jobs ( 4 | name STRING, 5 | fields Map, 6 | tags Row 7 | ) WITH ( 8 | 'connector' = 'kafka', 9 | 'topic' = 'metrics-flink-jobs', 10 | 'properties.bootstrap.servers' = 'logs-kafka1.xxx:9092,logs-kafka2.xxx:9092,logs-kafka3.xxx:9092', 11 | 'properties.group.id' = 'test', 12 | 'format' = 'json' 13 | ); 14 | 15 | 16 | CREATE TABLE flink_jobs_metrics ( 17 | name STRING, 18 | fields Map, 19 | tags Row 20 | ) WITH ( 21 | 'connector' = 'print' 22 | ); 23 | 24 | insert into 25 | flink_jobs_metrics 26 | select 27 | name, 28 | fields, 29 | tags 30 | from 31 | metrics_flink_jobs ; -------------------------------------------------------------------------------- /flink-learning-sql/flink-learning-sql-client/src/test/java/SqlSubmitTest.java: -------------------------------------------------------------------------------- 1 | import com.zhisheng.sql.SqlSubmit; 2 | import com.zhisheng.sql.cli.CliOptions; 3 | import org.junit.Test; 4 | 5 | import static org.junit.Assert.assertEquals; 6 | 7 | public class SqlSubmitTest { 8 | 9 | 10 | @Test 11 | public void testMain() throws Exception { 12 | String[] args = new String[]{"-w", "src/test/resources/sql", "-f", "test.sql", "-t", "false", "-b", "false", "-k8d", "flink-373362-1741687273399"}; 13 | SqlSubmit.main(args); 14 | } 15 | 16 | @Test 17 | public void testCliOptions() { 18 | CliOptions cliOptions = new CliOptions("sqlFilePath", "workingSpace", "isTest", "isBatch", "k8sClusterId"); 19 | assertEquals("sqlFilePath", cliOptions.getSqlFilePath()); 20 | assertEquals("workingSpace", cliOptions.getWorkingSpace()); 21 | assertEquals("isTest", cliOptions.getIsTest()); 22 | assertEquals("isBatch", cliOptions.getIsBatch()); 23 | assertEquals("k8sClusterId", cliOptions.getK8sClusterId()); 24 | } 25 | 26 | } 27 | -------------------------------------------------------------------------------- /flink-learning-sql/flink-learning-sql-client/src/test/resources/dev/conf.properties: -------------------------------------------------------------------------------- 1 | httpUrl=https://fat-zhisheng-sql.cn/index/k8sFlinkStatus -------------------------------------------------------------------------------- /flink-learning-sql/flink-learning-sql-client/src/test/resources/sql/test.sql: -------------------------------------------------------------------------------- 1 | -- 打印 flink 1.16 任务的消费延迟 metrics 监控数据 2 | 3 | CREATE TABLE metrics_flink_jobs ( 4 | name STRING, 5 | fields Map, 6 | tags Row 7 | ) WITH ( 8 | 'connector' = 'kafka', 9 | 'topic' = 'metrics-flink-jobs', 10 | 'properties.bootstrap.servers' = 'logs-kafka1.xxx:9092,logs-kafka2.xxx:9092,logs-kafka3.xxx:9092', 11 | 'properties.group.id' = 'test', 12 | 'format' = 'json' 13 | ); 14 | 15 | 16 | CREATE TABLE flink_jobs_metrics ( 17 | name STRING, 18 | fields Map, 19 | tags Row 20 | ) WITH ( 21 | 'connector' = 'print' 22 | ); 23 | 24 | insert into 25 | flink_jobs_metrics 26 | select 27 | name, 28 | fields, 29 | tags 30 | from 31 | metrics_flink_jobs ; -------------------------------------------------------------------------------- /flink-learning-sql/flink-learning-sql-common/README.md: -------------------------------------------------------------------------------- 1 | ### flink-learning-sql-common 2 | 3 | 添加 Table API & SQL 的公共依赖: 4 | 5 | ```xml 6 | 7 | org.apache.flink 8 | flink-table-api-java-bridge_${scala.binary.version} 9 | ${flink.version} 10 | 11 | 12 | org.apache.flink 13 | flink-streaming-scala_${scala.binary.version} 14 | ${flink.version} 15 | 16 | 17 | org.apache.flink 18 | flink-table-common 19 | ${flink.version} 20 | 21 | ``` 22 | 23 | 24 | 25 | -------------------------------------------------------------------------------- /flink-learning-sql/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | flink-learning 7 | com.zhisheng.flink 8 | 1.0-SNAPSHOT 9 | 10 | 4.0.0 11 | 12 | flink-learning-sql 13 | pom 14 | 15 | flink-learning-sql-blink 16 | flink-learning-sql-common 17 | flink-learning-sql-client 18 | 19 | 20 | -------------------------------------------------------------------------------- /pics/Flink-code.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/zhisheng17/flink-learning/f56062702aeb408e3168da4ae4deb16d97d8b5b7/pics/Flink-code.png --------------------------------------------------------------------------------