├── .gitignore ├── CNAME ├── LICENSE ├── README.md ├── dev └── create-release │ └── release.sh ├── docs ├── .nojekyll ├── CNAME ├── README.md ├── _sidebar.md ├── accumulator.md ├── anno.md ├── connector │ ├── adb.md │ ├── clickhouse.md │ ├── hbase.md │ ├── hive.md │ ├── jdbc.md │ ├── kafka.md │ ├── oracle.md │ └── rocketmq.md ├── datasource.md ├── dev │ ├── config.md │ ├── deploy-script.md │ ├── engine-env.md │ └── integration.md ├── feature.md ├── highlight │ ├── checkpoint.md │ └── spark-duration.md ├── img │ ├── Fire.png │ ├── anno-sql.png │ ├── anno_log.png │ ├── arthas-shell.png │ ├── checkpoint-duration.png │ ├── configuration.png │ ├── dingding.jpeg │ ├── fire-framework-logo.jpeg │ ├── fire-restful.png │ ├── streaming-duration.png │ ├── web-config.png │ └── weixin.png ├── index.html ├── platform.md ├── pom │ ├── flink-pom.xml │ └── spark-pom.xml ├── properties.md ├── restful.md ├── schedule.md └── threadpool.md ├── fire-bundle ├── fire-bundle-flink │ ├── pom.xml │ └── src │ │ └── main │ │ └── java │ │ └── com │ │ └── zto │ │ └── fire │ │ └── bundle │ │ └── FlinkBundle.java ├── fire-bundle-spark │ ├── pom.xml │ └── src │ │ └── main │ │ └── java │ │ └── com │ │ └── zto │ │ └── fire │ │ └── bundle │ │ └── SparkBundle.java └── pom.xml ├── fire-common ├── pom.xml └── src │ ├── main │ ├── java │ │ └── com │ │ │ └── zto │ │ │ └── fire │ │ │ └── common │ │ │ ├── anno │ │ │ ├── Config.java │ │ │ ├── FieldName.java │ │ │ ├── FireConf.java │ │ │ ├── Internal.java │ │ │ ├── Rest.java │ │ │ ├── Scheduled.java │ │ │ ├── SqlConfig.java │ │ │ └── TestStep.java │ │ │ ├── bean │ │ │ ├── ConsumerOffsetInfo.java │ │ │ ├── FireTask.java │ │ │ ├── Generator.java │ │ │ ├── JobConsumerInfo.java │ │ │ ├── analysis │ │ │ │ └── ExceptionMsg.java │ │ │ ├── config │ │ │ │ └── ConfigurationParam.java │ │ │ ├── lineage │ │ │ │ ├── Lineage.java │ │ │ │ ├── LineageResult.java │ │ │ │ ├── SQLLineage.java │ │ │ │ ├── SQLTable.java │ │ │ │ ├── SQLTableColumns.java │ │ │ │ ├── SQLTableColumnsRelations.java │ │ │ │ ├── SQLTablePartitions.java │ │ │ │ └── SQLTableRelations.java │ │ │ ├── rest │ │ │ │ ├── ResultMsg.java │ │ │ │ └── yarn │ │ │ │ │ └── App.java │ │ │ └── runtime │ │ │ │ ├── ClassLoaderInfo.java │ │ │ │ ├── CpuInfo.java │ │ │ │ ├── DiskInfo.java │ │ │ │ ├── DisplayInfo.java │ │ │ │ ├── HardwareInfo.java │ │ │ │ ├── JvmInfo.java │ │ │ │ ├── MemoryInfo.java │ │ │ │ ├── NetworkInfo.java │ │ │ │ ├── OSInfo.java │ │ │ │ ├── RuntimeInfo.java │ │ │ │ ├── ThreadInfo.java │ │ │ │ └── UsbInfo.java │ │ │ ├── enu │ │ │ ├── ConfigureLevel.java │ │ │ ├── Datasource.java │ │ │ ├── ErrorCode.java │ │ │ ├── ErrorTolerance.java │ │ │ ├── HiveTableStoredType.java │ │ │ ├── JdbcDriver.java │ │ │ ├── JobType.java │ │ │ ├── Operation.java │ │ │ ├── RequestMethod.scala │ │ │ ├── RunMode.java │ │ │ ├── SqlSemantic.java │ │ │ ├── ThreadPoolType.java │ │ │ ├── TimeCharacteristic.java │ │ │ └── YarnState.java │ │ │ ├── exception │ │ │ ├── FireException.java │ │ │ ├── FireFlinkException.java │ │ │ └── FireSparkException.java │ │ │ ├── pool │ │ │ ├── ClassEntity.java │ │ │ └── ClassPool.java │ │ │ └── util │ │ │ ├── Constant.java │ │ │ ├── ConsumerOffsetUtils.java │ │ │ ├── EncryptUtils.java │ │ │ ├── FileUtils.java │ │ │ ├── FindClassUtils.java │ │ │ ├── FireEngineUtils.java │ │ │ ├── HttpClientUtils.java │ │ │ ├── IOUtils.java │ │ │ ├── MathUtils.java │ │ │ ├── OSUtils.java │ │ │ ├── ParameterTool.java │ │ │ ├── ProcessUtil.java │ │ │ ├── ReflectionUtils.java │ │ │ ├── StringsUtils.java │ │ │ ├── TimeExpression.java │ │ │ ├── UnitFormatUtils.java │ │ │ └── YarnUtils.java │ ├── resources │ │ └── log4j.properties │ └── scala │ │ └── com │ │ └── zto │ │ └── fire │ │ └── common │ │ ├── bean │ │ ├── MQRecord.scala │ │ └── TableIdentifier.scala │ │ ├── conf │ │ ├── FireConf.scala │ │ ├── FireFrameworkConf.scala │ │ ├── FireHDFSConf.scala │ │ ├── FireHiveConf.scala │ │ ├── FireKafkaConf.scala │ │ ├── FirePS1Conf.scala │ │ ├── FireRocketMQConf.scala │ │ └── KeyNum.scala │ │ ├── ext │ │ ├── JavaExt.scala │ │ └── ScalaExt.scala │ │ ├── lineage │ │ ├── DatasourceDesc.scala │ │ ├── LineageManager.scala │ │ ├── SQLLineageManager.scala │ │ ├── SqlToDatasource.scala │ │ └── parser │ │ │ ├── ConnectorParser.scala │ │ │ ├── ConnectorParserManager.scala │ │ │ └── connector │ │ │ ├── BlackholeConnectorParser.scala │ │ │ ├── CdcConnectorParser.scala │ │ │ ├── ClickhouseConnectorParser.scala │ │ │ ├── CustomizeConnectorParser.scala │ │ │ ├── DatagenConnectorParser.scala │ │ │ ├── DorisConnectorParser.scala │ │ │ ├── DynamodbConnectorParser.scala │ │ │ ├── ElasticsearchConnectorParser.scala │ │ │ ├── FilesystemConnectorParser.scala │ │ │ ├── FirehoseConnectorParser.scala │ │ │ ├── HbaseConnectorParser.scala │ │ │ ├── HiveConnectorParser.scala │ │ │ ├── HudiConnectorParser.scala │ │ │ ├── IFileConnectorParser.scala │ │ │ ├── IJDBCConnectorParser.scala │ │ │ ├── IMQConnectorParser.scala │ │ │ ├── IVirtualConnectorParser.scala │ │ │ ├── IcebergConnectorParser.scala │ │ │ ├── JdbcConnectorParser.scala │ │ │ ├── KafkaConnectorParser.scala │ │ │ ├── KinesisConnectorParser.scala │ │ │ ├── MongodbConnectorParser.scala │ │ │ ├── PaimonConnectorParser.scala │ │ │ ├── PrintConnectorParser.scala │ │ │ ├── RocketmqConnectorParser.scala │ │ │ ├── StarrocksConnectorParser.scala │ │ │ ├── UnknownConnectorParser.scala │ │ │ └── UrlConnectorParser.scala │ │ ├── package.scala │ │ └── util │ │ ├── ConfigurationCenterManager.scala │ │ ├── DatasourceAlias.scala │ │ ├── DateFormatUtils.scala │ │ ├── ExceptionBus.scala │ │ ├── FireFunctions.scala │ │ ├── FireUtils.scala │ │ ├── HDFSUtils.scala │ │ ├── JSONUtils.scala │ │ ├── JavaTypeMap.scala │ │ ├── KafkaUtils.scala │ │ ├── LogUtils.scala │ │ ├── Logging.scala │ │ ├── MQProducer.scala │ │ ├── NumberFormatUtils.scala │ │ ├── PortPoolManager.scala │ │ ├── PropUtils.scala │ │ ├── RegularUtils.scala │ │ ├── RocketMQUtils.scala │ │ ├── SQLUtils.scala │ │ ├── ScalaTypeMap.scala │ │ ├── ScalaUtils.scala │ │ ├── ShutdownHookManager.scala │ │ ├── ThreadUtils.scala │ │ ├── Tools.scala │ │ └── ValueUtils.scala │ └── test │ └── scala │ └── com │ └── zto │ └── fire │ └── common │ └── util │ ├── RegularUtilsUnitTest.scala │ ├── SQLUtilsTest.scala │ ├── ShutdownHookManagerTest.scala │ └── ValueUtilsTest.scala ├── fire-connectors ├── .gitignore ├── base-connectors │ ├── fire-hbase │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java │ │ │ └── com │ │ │ │ └── zto │ │ │ │ └── fire │ │ │ │ └── hbase │ │ │ │ └── anno │ │ │ │ └── HConfig.java │ │ │ └── scala │ │ │ └── com │ │ │ └── zto │ │ │ └── fire │ │ │ └── hbase │ │ │ ├── HBaseConnector.scala │ │ │ ├── HBaseFunctions.scala │ │ │ ├── bean │ │ │ ├── HBaseBaseBean.java │ │ │ └── MultiVersionsBean.java │ │ │ ├── conf │ │ │ └── FireHBaseConf.scala │ │ │ └── utils │ │ │ └── HBaseUtils.scala │ ├── fire-jdbc │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java │ │ │ └── com │ │ │ │ └── zto │ │ │ │ └── fire │ │ │ │ └── jdbc │ │ │ │ └── JavaJdbcConnectorBridge.java │ │ │ ├── resources │ │ │ └── driver.properties │ │ │ └── scala │ │ │ └── com │ │ │ └── zto │ │ │ └── fire │ │ │ └── jdbc │ │ │ ├── JdbcConnector.scala │ │ │ ├── JdbcConnectorBridge.scala │ │ │ ├── JdbcFunctions.scala │ │ │ ├── conf │ │ │ └── FireJdbcConf.scala │ │ │ └── util │ │ │ └── DBUtils.scala │ └── pom.xml ├── flink-connectors │ ├── flink-clickhouse │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java-flink-1.12 │ │ │ └── org │ │ │ │ └── apache │ │ │ │ └── flink │ │ │ │ └── connector │ │ │ │ └── clickhouse │ │ │ │ ├── ClickHouseDynamicTableFactory.java │ │ │ │ ├── ClickHouseDynamicTableSink.java │ │ │ │ ├── catalog │ │ │ │ ├── ClickHouseCatalog.java │ │ │ │ ├── ClickHouseCatalogFactory.java │ │ │ │ └── ClickHouseCatalogValidator.java │ │ │ │ ├── config │ │ │ │ └── ClickHouseConfig.java │ │ │ │ ├── internal │ │ │ │ ├── AbstractClickHouseOutputFormat.java │ │ │ │ ├── ClickHouseBatchOutputFormat.java │ │ │ │ ├── ClickHouseShardOutputFormat.java │ │ │ │ ├── ClickHouseStatementFactory.java │ │ │ │ ├── connection │ │ │ │ │ └── ClickHouseConnectionProvider.java │ │ │ │ ├── converter │ │ │ │ │ ├── ClickHouseConverterUtils.java │ │ │ │ │ └── ClickHouseRowConverter.java │ │ │ │ ├── executor │ │ │ │ │ ├── ClickHouseBatchExecutor.java │ │ │ │ │ ├── ClickHouseExecutor.java │ │ │ │ │ └── ClickHouseUpsertExecutor.java │ │ │ │ ├── options │ │ │ │ │ └── ClickHouseOptions.java │ │ │ │ └── partitioner │ │ │ │ │ ├── BalancedPartitioner.java │ │ │ │ │ ├── ClickHousePartitioner.java │ │ │ │ │ ├── HashPartitioner.java │ │ │ │ │ └── ShufflePartitioner.java │ │ │ │ └── util │ │ │ │ ├── ClickHouseTypeUtil.java │ │ │ │ └── ClickHouseUtil.java │ │ │ ├── java-flink-1.13 │ │ │ └── org │ │ │ │ └── apache │ │ │ │ └── flink │ │ │ │ └── connector │ │ │ │ └── clickhouse │ │ │ │ ├── ClickHouseDynamicTableFactory.java │ │ │ │ ├── ClickHouseDynamicTableSink.java │ │ │ │ ├── ClickHouseDynamicTableSource.java │ │ │ │ ├── catalog │ │ │ │ ├── ClickHouseCatalog.java │ │ │ │ └── ClickHouseCatalogFactory.java │ │ │ │ ├── config │ │ │ │ ├── ClickHouseConfig.java │ │ │ │ └── ClickHouseConfigOptions.java │ │ │ │ ├── internal │ │ │ │ ├── AbstractClickHouseInputFormat.java │ │ │ │ ├── AbstractClickHouseOutputFormat.java │ │ │ │ ├── ClickHouseBatchInputFormat.java │ │ │ │ ├── ClickHouseBatchOutputFormat.java │ │ │ │ ├── ClickHouseShardInputFormat.java │ │ │ │ ├── ClickHouseShardOutputFormat.java │ │ │ │ ├── ClickHouseStatementFactory.java │ │ │ │ ├── common │ │ │ │ │ └── DistributedEngineFullSchema.java │ │ │ │ ├── connection │ │ │ │ │ └── ClickHouseConnectionProvider.java │ │ │ │ ├── converter │ │ │ │ │ ├── ClickHouseConverterUtils.java │ │ │ │ │ └── ClickHouseRowConverter.java │ │ │ │ ├── executor │ │ │ │ │ ├── ClickHouseBatchExecutor.java │ │ │ │ │ ├── ClickHouseExecutor.java │ │ │ │ │ └── ClickHouseUpsertExecutor.java │ │ │ │ ├── options │ │ │ │ │ ├── ClickHouseConnectionOptions.java │ │ │ │ │ ├── ClickHouseDmlOptions.java │ │ │ │ │ └── ClickHouseReadOptions.java │ │ │ │ └── partitioner │ │ │ │ │ ├── BalancedPartitioner.java │ │ │ │ │ ├── ClickHousePartitioner.java │ │ │ │ │ ├── HashPartitioner.java │ │ │ │ │ └── ShufflePartitioner.java │ │ │ │ ├── split │ │ │ │ ├── ClickHouseBatchBetweenParametersProvider.java │ │ │ │ ├── ClickHouseBetweenParametersProvider.java │ │ │ │ ├── ClickHouseParametersProvider.java │ │ │ │ ├── ClickHouseShardBetweenParametersProvider.java │ │ │ │ └── ClickHouseShardTableParametersProvider.java │ │ │ │ └── util │ │ │ │ ├── ClickHouseTypeUtil.java │ │ │ │ ├── ClickHouseUtil.java │ │ │ │ ├── FilterPushDownHelper.java │ │ │ │ └── SqlClause.java │ │ │ ├── java-flink-1.14 │ │ │ └── org │ │ │ │ └── apache │ │ │ │ └── flink │ │ │ │ └── connector │ │ │ │ └── clickhouse │ │ │ │ ├── ClickHouseDynamicTableFactory.java │ │ │ │ ├── ClickHouseDynamicTableSink.java │ │ │ │ ├── ClickHouseDynamicTableSource.java │ │ │ │ ├── catalog │ │ │ │ ├── ClickHouseCatalog.java │ │ │ │ └── ClickHouseCatalogFactory.java │ │ │ │ ├── config │ │ │ │ ├── ClickHouseConfig.java │ │ │ │ └── ClickHouseConfigOptions.java │ │ │ │ ├── internal │ │ │ │ ├── AbstractClickHouseInputFormat.java │ │ │ │ ├── AbstractClickHouseOutputFormat.java │ │ │ │ ├── ClickHouseBatchInputFormat.java │ │ │ │ ├── ClickHouseBatchOutputFormat.java │ │ │ │ ├── ClickHouseShardInputFormat.java │ │ │ │ ├── ClickHouseShardOutputFormat.java │ │ │ │ ├── ClickHouseStatementFactory.java │ │ │ │ ├── common │ │ │ │ │ └── DistributedEngineFullSchema.java │ │ │ │ ├── connection │ │ │ │ │ └── ClickHouseConnectionProvider.java │ │ │ │ ├── converter │ │ │ │ │ ├── ClickHouseConverterUtils.java │ │ │ │ │ └── ClickHouseRowConverter.java │ │ │ │ ├── executor │ │ │ │ │ ├── ClickHouseBatchExecutor.java │ │ │ │ │ ├── ClickHouseExecutor.java │ │ │ │ │ └── ClickHouseUpsertExecutor.java │ │ │ │ ├── options │ │ │ │ │ ├── ClickHouseConnectionOptions.java │ │ │ │ │ ├── ClickHouseDmlOptions.java │ │ │ │ │ └── ClickHouseReadOptions.java │ │ │ │ └── partitioner │ │ │ │ │ ├── BalancedPartitioner.java │ │ │ │ │ ├── ClickHousePartitioner.java │ │ │ │ │ ├── HashPartitioner.java │ │ │ │ │ └── ShufflePartitioner.java │ │ │ │ ├── split │ │ │ │ ├── ClickHouseBatchBetweenParametersProvider.java │ │ │ │ ├── ClickHouseBetweenParametersProvider.java │ │ │ │ ├── ClickHouseParametersProvider.java │ │ │ │ ├── ClickHouseShardBetweenParametersProvider.java │ │ │ │ └── ClickHouseShardTableParametersProvider.java │ │ │ │ └── util │ │ │ │ ├── ClickHouseTypeUtil.java │ │ │ │ ├── ClickHouseUtil.java │ │ │ │ ├── FilterPushDownHelper.java │ │ │ │ └── SqlClause.java │ │ │ ├── java-flink-1.15 │ │ │ └── org │ │ │ │ └── apache │ │ │ │ └── flink │ │ │ │ └── connector │ │ │ │ └── clickhouse │ │ │ │ ├── ClickHouseDynamicTableFactory.java │ │ │ │ ├── ClickHouseDynamicTableSink.java │ │ │ │ ├── ClickHouseDynamicTableSource.java │ │ │ │ ├── catalog │ │ │ │ ├── ClickHouseCatalog.java │ │ │ │ └── ClickHouseCatalogFactory.java │ │ │ │ ├── config │ │ │ │ ├── ClickHouseConfig.java │ │ │ │ └── ClickHouseConfigOptions.java │ │ │ │ ├── internal │ │ │ │ ├── AbstractClickHouseInputFormat.java │ │ │ │ ├── AbstractClickHouseOutputFormat.java │ │ │ │ ├── ClickHouseBatchInputFormat.java │ │ │ │ ├── ClickHouseBatchOutputFormat.java │ │ │ │ ├── ClickHouseShardInputFormat.java │ │ │ │ ├── ClickHouseShardOutputFormat.java │ │ │ │ ├── ClickHouseStatementFactory.java │ │ │ │ ├── common │ │ │ │ │ └── DistributedEngineFullSchema.java │ │ │ │ ├── connection │ │ │ │ │ └── ClickHouseConnectionProvider.java │ │ │ │ ├── converter │ │ │ │ │ ├── ClickHouseConverterUtils.java │ │ │ │ │ └── ClickHouseRowConverter.java │ │ │ │ ├── executor │ │ │ │ │ ├── ClickHouseBatchExecutor.java │ │ │ │ │ ├── ClickHouseExecutor.java │ │ │ │ │ └── ClickHouseUpsertExecutor.java │ │ │ │ ├── options │ │ │ │ │ ├── ClickHouseConnectionOptions.java │ │ │ │ │ ├── ClickHouseDmlOptions.java │ │ │ │ │ └── ClickHouseReadOptions.java │ │ │ │ └── partitioner │ │ │ │ │ ├── BalancedPartitioner.java │ │ │ │ │ ├── ClickHousePartitioner.java │ │ │ │ │ ├── HashPartitioner.java │ │ │ │ │ └── ShufflePartitioner.java │ │ │ │ ├── split │ │ │ │ ├── ClickHouseBatchBetweenParametersProvider.java │ │ │ │ ├── ClickHouseBetweenParametersProvider.java │ │ │ │ ├── ClickHouseParametersProvider.java │ │ │ │ ├── ClickHouseShardBetweenParametersProvider.java │ │ │ │ └── ClickHouseShardTableParametersProvider.java │ │ │ │ └── util │ │ │ │ ├── ClickHouseTypeUtil.java │ │ │ │ ├── ClickHouseUtil.java │ │ │ │ ├── FilterPushDownHelper.java │ │ │ │ └── SqlClause.java │ │ │ ├── java-flink-1.16 │ │ │ └── org │ │ │ │ └── apache │ │ │ │ └── flink │ │ │ │ └── connector │ │ │ │ └── clickhouse │ │ │ │ ├── ClickHouseDynamicTableFactory.java │ │ │ │ ├── ClickHouseDynamicTableSink.java │ │ │ │ ├── ClickHouseDynamicTableSource.java │ │ │ │ ├── catalog │ │ │ │ ├── ClickHouseCatalog.java │ │ │ │ └── ClickHouseCatalogFactory.java │ │ │ │ ├── config │ │ │ │ ├── ClickHouseConfig.java │ │ │ │ └── ClickHouseConfigOptions.java │ │ │ │ ├── internal │ │ │ │ ├── AbstractClickHouseInputFormat.java │ │ │ │ ├── AbstractClickHouseOutputFormat.java │ │ │ │ ├── ClickHouseBatchInputFormat.java │ │ │ │ ├── ClickHouseBatchOutputFormat.java │ │ │ │ ├── ClickHouseShardInputFormat.java │ │ │ │ ├── ClickHouseShardOutputFormat.java │ │ │ │ ├── ClickHouseStatementFactory.java │ │ │ │ ├── common │ │ │ │ │ └── DistributedEngineFullSchema.java │ │ │ │ ├── connection │ │ │ │ │ └── ClickHouseConnectionProvider.java │ │ │ │ ├── converter │ │ │ │ │ ├── ClickHouseConverterUtils.java │ │ │ │ │ └── ClickHouseRowConverter.java │ │ │ │ ├── executor │ │ │ │ │ ├── ClickHouseBatchExecutor.java │ │ │ │ │ ├── ClickHouseExecutor.java │ │ │ │ │ └── ClickHouseUpsertExecutor.java │ │ │ │ ├── options │ │ │ │ │ ├── ClickHouseConnectionOptions.java │ │ │ │ │ ├── ClickHouseDmlOptions.java │ │ │ │ │ └── ClickHouseReadOptions.java │ │ │ │ └── partitioner │ │ │ │ │ ├── BalancedPartitioner.java │ │ │ │ │ ├── ClickHousePartitioner.java │ │ │ │ │ ├── HashPartitioner.java │ │ │ │ │ └── ShufflePartitioner.java │ │ │ │ ├── split │ │ │ │ ├── ClickHouseBatchBetweenParametersProvider.java │ │ │ │ ├── ClickHouseBetweenParametersProvider.java │ │ │ │ ├── ClickHouseParametersProvider.java │ │ │ │ ├── ClickHouseShardBetweenParametersProvider.java │ │ │ │ └── ClickHouseShardTableParametersProvider.java │ │ │ │ └── util │ │ │ │ ├── ClickHouseTypeUtil.java │ │ │ │ ├── ClickHouseUtil.java │ │ │ │ ├── FilterPushDownHelper.java │ │ │ │ └── SqlClause.java │ │ │ ├── java-flink-1.17 │ │ │ └── org │ │ │ │ └── apache │ │ │ │ └── flink │ │ │ │ └── connector │ │ │ │ └── clickhouse │ │ │ │ ├── ClickHouseDynamicTableFactory.java │ │ │ │ ├── ClickHouseDynamicTableSink.java │ │ │ │ ├── ClickHouseDynamicTableSource.java │ │ │ │ ├── catalog │ │ │ │ ├── ClickHouseCatalog.java │ │ │ │ └── ClickHouseCatalogFactory.java │ │ │ │ ├── config │ │ │ │ ├── ClickHouseConfig.java │ │ │ │ └── ClickHouseConfigOptions.java │ │ │ │ ├── internal │ │ │ │ ├── AbstractClickHouseInputFormat.java │ │ │ │ ├── AbstractClickHouseOutputFormat.java │ │ │ │ ├── ClickHouseBatchInputFormat.java │ │ │ │ ├── ClickHouseBatchOutputFormat.java │ │ │ │ ├── ClickHouseShardInputFormat.java │ │ │ │ ├── ClickHouseShardOutputFormat.java │ │ │ │ ├── ClickHouseStatementFactory.java │ │ │ │ ├── common │ │ │ │ │ └── DistributedEngineFullSchema.java │ │ │ │ ├── connection │ │ │ │ │ └── ClickHouseConnectionProvider.java │ │ │ │ ├── converter │ │ │ │ │ ├── ClickHouseConverterUtils.java │ │ │ │ │ └── ClickHouseRowConverter.java │ │ │ │ ├── executor │ │ │ │ │ ├── ClickHouseBatchExecutor.java │ │ │ │ │ ├── ClickHouseExecutor.java │ │ │ │ │ └── ClickHouseUpsertExecutor.java │ │ │ │ ├── options │ │ │ │ │ ├── ClickHouseConnectionOptions.java │ │ │ │ │ ├── ClickHouseDmlOptions.java │ │ │ │ │ └── ClickHouseReadOptions.java │ │ │ │ └── partitioner │ │ │ │ │ ├── BalancedPartitioner.java │ │ │ │ │ ├── ClickHousePartitioner.java │ │ │ │ │ ├── HashPartitioner.java │ │ │ │ │ └── ShufflePartitioner.java │ │ │ │ ├── split │ │ │ │ ├── ClickHouseBatchBetweenParametersProvider.java │ │ │ │ ├── ClickHouseBetweenParametersProvider.java │ │ │ │ ├── ClickHouseParametersProvider.java │ │ │ │ ├── ClickHouseShardBetweenParametersProvider.java │ │ │ │ └── ClickHouseShardTableParametersProvider.java │ │ │ │ └── util │ │ │ │ ├── ClickHouseTypeUtil.java │ │ │ │ ├── ClickHouseUtil.java │ │ │ │ ├── FilterPushDownHelper.java │ │ │ │ └── SqlClause.java │ │ │ ├── java-flink-1.18 │ │ │ └── org │ │ │ │ └── apache │ │ │ │ └── flink │ │ │ │ └── connector │ │ │ │ └── clickhouse │ │ │ │ ├── ClickHouseDynamicTableFactory.java │ │ │ │ ├── ClickHouseDynamicTableSink.java │ │ │ │ ├── ClickHouseDynamicTableSource.java │ │ │ │ ├── catalog │ │ │ │ ├── ClickHouseCatalog.java │ │ │ │ └── ClickHouseCatalogFactory.java │ │ │ │ ├── config │ │ │ │ ├── ClickHouseConfig.java │ │ │ │ └── ClickHouseConfigOptions.java │ │ │ │ ├── internal │ │ │ │ ├── AbstractClickHouseInputFormat.java │ │ │ │ ├── AbstractClickHouseOutputFormat.java │ │ │ │ ├── ClickHouseBatchInputFormat.java │ │ │ │ ├── ClickHouseBatchOutputFormat.java │ │ │ │ ├── ClickHouseShardInputFormat.java │ │ │ │ ├── ClickHouseShardOutputFormat.java │ │ │ │ ├── ClickHouseStatementFactory.java │ │ │ │ ├── common │ │ │ │ │ └── DistributedEngineFullSchema.java │ │ │ │ ├── connection │ │ │ │ │ └── ClickHouseConnectionProvider.java │ │ │ │ ├── converter │ │ │ │ │ ├── ClickHouseConverterUtils.java │ │ │ │ │ └── ClickHouseRowConverter.java │ │ │ │ ├── executor │ │ │ │ │ ├── ClickHouseBatchExecutor.java │ │ │ │ │ ├── ClickHouseExecutor.java │ │ │ │ │ └── ClickHouseUpsertExecutor.java │ │ │ │ ├── options │ │ │ │ │ ├── ClickHouseConnectionOptions.java │ │ │ │ │ ├── ClickHouseDmlOptions.java │ │ │ │ │ └── ClickHouseReadOptions.java │ │ │ │ └── partitioner │ │ │ │ │ ├── BalancedPartitioner.java │ │ │ │ │ ├── ClickHousePartitioner.java │ │ │ │ │ ├── HashPartitioner.java │ │ │ │ │ └── ShufflePartitioner.java │ │ │ │ ├── split │ │ │ │ ├── ClickHouseBatchBetweenParametersProvider.java │ │ │ │ ├── ClickHouseBetweenParametersProvider.java │ │ │ │ ├── ClickHouseParametersProvider.java │ │ │ │ ├── ClickHouseShardBetweenParametersProvider.java │ │ │ │ └── ClickHouseShardTableParametersProvider.java │ │ │ │ └── util │ │ │ │ ├── ClickHouseTypeUtil.java │ │ │ │ ├── ClickHouseUtil.java │ │ │ │ ├── FilterPushDownHelper.java │ │ │ │ └── SqlClause.java │ │ │ ├── java-flink-1.19 │ │ │ └── org │ │ │ │ └── apache │ │ │ │ └── flink │ │ │ │ └── connector │ │ │ │ └── clickhouse │ │ │ │ ├── ClickHouseDynamicTableFactory.java │ │ │ │ ├── ClickHouseDynamicTableSink.java │ │ │ │ ├── ClickHouseDynamicTableSource.java │ │ │ │ ├── catalog │ │ │ │ ├── ClickHouseCatalog.java │ │ │ │ └── ClickHouseCatalogFactory.java │ │ │ │ ├── config │ │ │ │ ├── ClickHouseConfig.java │ │ │ │ └── ClickHouseConfigOptions.java │ │ │ │ ├── internal │ │ │ │ ├── AbstractClickHouseInputFormat.java │ │ │ │ ├── AbstractClickHouseOutputFormat.java │ │ │ │ ├── ClickHouseBatchInputFormat.java │ │ │ │ ├── ClickHouseBatchOutputFormat.java │ │ │ │ ├── ClickHouseShardInputFormat.java │ │ │ │ ├── ClickHouseShardOutputFormat.java │ │ │ │ ├── ClickHouseStatementFactory.java │ │ │ │ ├── common │ │ │ │ │ └── DistributedEngineFullSchema.java │ │ │ │ ├── connection │ │ │ │ │ └── ClickHouseConnectionProvider.java │ │ │ │ ├── converter │ │ │ │ │ ├── ClickHouseConverterUtils.java │ │ │ │ │ └── ClickHouseRowConverter.java │ │ │ │ ├── executor │ │ │ │ │ ├── ClickHouseBatchExecutor.java │ │ │ │ │ ├── ClickHouseExecutor.java │ │ │ │ │ └── ClickHouseUpsertExecutor.java │ │ │ │ ├── options │ │ │ │ │ ├── ClickHouseConnectionOptions.java │ │ │ │ │ ├── ClickHouseDmlOptions.java │ │ │ │ │ └── ClickHouseReadOptions.java │ │ │ │ └── partitioner │ │ │ │ │ ├── BalancedPartitioner.java │ │ │ │ │ ├── ClickHousePartitioner.java │ │ │ │ │ ├── HashPartitioner.java │ │ │ │ │ └── ShufflePartitioner.java │ │ │ │ ├── split │ │ │ │ ├── ClickHouseBatchBetweenParametersProvider.java │ │ │ │ ├── ClickHouseBetweenParametersProvider.java │ │ │ │ ├── ClickHouseParametersProvider.java │ │ │ │ ├── ClickHouseShardBetweenParametersProvider.java │ │ │ │ └── ClickHouseShardTableParametersProvider.java │ │ │ │ └── util │ │ │ │ ├── ClickHouseTypeUtil.java │ │ │ │ ├── ClickHouseUtil.java │ │ │ │ ├── FilterPushDownHelper.java │ │ │ │ └── SqlClause.java │ │ │ └── resources │ │ │ └── META-INF │ │ │ └── services │ │ │ ├── org.apache.flink.table.factories.Factory │ │ │ └── org.apache.flink.table.factories.TableFactory │ ├── flink-format │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java-flink-1.14 │ │ │ └── com │ │ │ │ └── zto │ │ │ │ └── fire │ │ │ │ └── flink │ │ │ │ └── formats │ │ │ │ └── json │ │ │ │ ├── ZDTPJsonDecodingFormat.java │ │ │ │ ├── ZDTPJsonDeserializationSchema.java │ │ │ │ ├── ZDTPJsonFormatFactory.java │ │ │ │ └── ZDTPJsonOptions.java │ │ │ ├── java │ │ │ └── com │ │ │ │ └── zto │ │ │ │ └── fire │ │ │ │ └── flink │ │ │ │ └── formats │ │ │ │ └── FlinkFormat.java │ │ │ └── resources │ │ │ └── META-INF │ │ │ └── services │ │ │ └── org.apache.flink.table.factories.Factory │ ├── flink-rocketmq │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java-flink-1.15 │ │ │ └── org │ │ │ │ └── apache │ │ │ │ └── flink │ │ │ │ ├── shaded │ │ │ │ └── curator4 │ │ │ │ │ └── org │ │ │ │ │ └── apache │ │ │ │ │ └── curator │ │ │ │ │ └── shaded │ │ │ │ │ └── com │ │ │ │ │ └── google │ │ │ │ │ └── common │ │ │ │ │ └── util │ │ │ │ │ └── concurrent │ │ │ │ │ └── ThreadFactoryBuilder.java │ │ │ │ └── table │ │ │ │ └── types │ │ │ │ └── logical │ │ │ │ └── utils │ │ │ │ └── LogicalTypeChecks.java │ │ │ ├── java-flink-1.16 │ │ │ └── org │ │ │ │ └── apache │ │ │ │ └── flink │ │ │ │ ├── formats │ │ │ │ └── json │ │ │ │ │ ├── JsonRowDataDeserializationSchema.java │ │ │ │ │ └── JsonRowDataSerializationSchema.java │ │ │ │ ├── shaded │ │ │ │ └── curator4 │ │ │ │ │ └── org │ │ │ │ │ └── apache │ │ │ │ │ └── curator │ │ │ │ │ └── shaded │ │ │ │ │ └── com │ │ │ │ │ └── google │ │ │ │ │ └── common │ │ │ │ │ └── util │ │ │ │ │ └── concurrent │ │ │ │ │ └── ThreadFactoryBuilder.java │ │ │ │ └── table │ │ │ │ └── types │ │ │ │ └── logical │ │ │ │ └── utils │ │ │ │ └── LogicalTypeChecks.java │ │ │ ├── java-flink-1.17 │ │ │ └── org │ │ │ │ └── apache │ │ │ │ └── flink │ │ │ │ ├── formats │ │ │ │ └── json │ │ │ │ │ ├── JsonRowDataDeserializationSchema.java │ │ │ │ │ └── JsonRowDataSerializationSchema.java │ │ │ │ ├── shaded │ │ │ │ └── curator4 │ │ │ │ │ └── org │ │ │ │ │ └── apache │ │ │ │ │ └── curator │ │ │ │ │ └── shaded │ │ │ │ │ └── com │ │ │ │ │ └── google │ │ │ │ │ └── common │ │ │ │ │ └── util │ │ │ │ │ └── concurrent │ │ │ │ │ └── ThreadFactoryBuilder.java │ │ │ │ └── table │ │ │ │ └── types │ │ │ │ └── logical │ │ │ │ └── utils │ │ │ │ └── LogicalTypeChecks.java │ │ │ ├── java-flink-1.18 │ │ │ └── org │ │ │ │ └── apache │ │ │ │ └── flink │ │ │ │ ├── formats │ │ │ │ └── json │ │ │ │ │ ├── AbstractJsonDeserializationSchema.java │ │ │ │ │ └── JsonRowDataSerializationSchema.java │ │ │ │ ├── shaded │ │ │ │ └── curator4 │ │ │ │ │ └── org │ │ │ │ │ └── apache │ │ │ │ │ └── curator │ │ │ │ │ └── shaded │ │ │ │ │ └── com │ │ │ │ │ └── google │ │ │ │ │ └── common │ │ │ │ │ └── util │ │ │ │ │ └── concurrent │ │ │ │ │ └── ThreadFactoryBuilder.java │ │ │ │ └── table │ │ │ │ └── types │ │ │ │ └── logical │ │ │ │ └── utils │ │ │ │ └── LogicalTypeChecks.java │ │ │ ├── java-flink-1.19 │ │ │ └── org │ │ │ │ └── apache │ │ │ │ └── flink │ │ │ │ ├── formats │ │ │ │ └── json │ │ │ │ │ ├── AbstractJsonDeserializationSchema.java │ │ │ │ │ └── JsonRowDataSerializationSchema.java │ │ │ │ ├── shaded │ │ │ │ └── curator4 │ │ │ │ │ └── org │ │ │ │ │ └── apache │ │ │ │ │ └── curator │ │ │ │ │ └── shaded │ │ │ │ │ └── com │ │ │ │ │ └── google │ │ │ │ │ └── common │ │ │ │ │ └── util │ │ │ │ │ └── concurrent │ │ │ │ │ └── ThreadFactoryBuilder.java │ │ │ │ └── table │ │ │ │ └── types │ │ │ │ └── logical │ │ │ │ └── utils │ │ │ │ └── LogicalTypeChecks.java │ │ │ ├── java │ │ │ ├── com │ │ │ │ └── zto │ │ │ │ │ └── fire │ │ │ │ │ └── flink │ │ │ │ │ └── sql │ │ │ │ │ └── connector │ │ │ │ │ └── rocketmq │ │ │ │ │ ├── DynamicRocketMQDeserializationSchema.java │ │ │ │ │ ├── FireRocketMQDynamicSource.java │ │ │ │ │ ├── FireRocketMQDynamicTableFactory.java │ │ │ │ │ ├── FireRocketMQOptions.java │ │ │ │ │ ├── FireRocketMQSource.java │ │ │ │ │ └── RocketMQDeserializationSchema.java │ │ │ └── org │ │ │ │ └── apache │ │ │ │ └── rocketmq │ │ │ │ └── flink │ │ │ │ ├── RocketMQConfig.java │ │ │ │ ├── RocketMQSink.java │ │ │ │ ├── RocketMQSinkWithTag.java │ │ │ │ ├── RocketMQSource.java │ │ │ │ ├── RocketMQSourceWithTag.java │ │ │ │ ├── RocketMQUtils.java │ │ │ │ ├── RunningChecker.java │ │ │ │ ├── common │ │ │ │ ├── selector │ │ │ │ │ ├── DefaultTopicSelector.java │ │ │ │ │ ├── SimpleTopicSelector.java │ │ │ │ │ └── TopicSelector.java │ │ │ │ └── serialization │ │ │ │ │ ├── JsonDeserializationSchema.java │ │ │ │ │ ├── JsonSerializationSchema.java │ │ │ │ │ ├── KeyValueDeserializationSchema.java │ │ │ │ │ ├── KeyValueSerializationSchema.java │ │ │ │ │ ├── MetadataDeserializationSchema.java │ │ │ │ │ ├── SimpleKeyValueDeserializationSchema.java │ │ │ │ │ ├── SimpleKeyValueSerializationSchema.java │ │ │ │ │ ├── SimpleTagKeyValueDeserializationSchema.java │ │ │ │ │ ├── TagKeyValueDeserializationSchema.java │ │ │ │ │ └── TagKeyValueSerializationSchema.java │ │ │ │ ├── util │ │ │ │ ├── MetricUtils.java │ │ │ │ ├── RetryUtil.java │ │ │ │ └── TestUtils.java │ │ │ │ └── watermark │ │ │ │ ├── BoundedOutOfOrdernessGenerator.java │ │ │ │ ├── BoundedOutOfOrdernessGeneratorPerQueue.java │ │ │ │ ├── PunctuatedAssigner.java │ │ │ │ ├── TimeLagWatermarkGenerator.java │ │ │ │ ├── WaterMarkForAll.java │ │ │ │ └── WaterMarkPerQueue.java │ │ │ ├── resources │ │ │ └── META-INF │ │ │ │ └── services │ │ │ │ └── org.apache.flink.table.factories.Factory │ │ │ └── scala │ │ │ └── com │ │ │ └── zto │ │ │ └── fire │ │ │ └── flink │ │ │ └── sql │ │ │ └── connector │ │ │ └── rocketmq │ │ │ ├── RocketMQDynamicTableFactory.scala │ │ │ ├── RocketMQDynamicTableSink.scala │ │ │ ├── RocketMQDynamicTableSource.scala │ │ │ └── RocketMQOptions.scala │ └── pom.xml ├── pom.xml └── spark-connectors │ ├── pom.xml │ ├── spark-hbase │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── org │ │ │ └── apache │ │ │ └── hadoop │ │ │ └── hbase │ │ │ ├── client │ │ │ ├── ConnFactoryEnhance.java │ │ │ └── ConnectionFactoryEnhance.java │ │ │ └── spark │ │ │ ├── SparkSQLPushDownFilter.java │ │ │ ├── example │ │ │ └── hbasecontext │ │ │ │ ├── JavaHBaseBulkDeleteExample.java │ │ │ │ ├── JavaHBaseBulkGetExample.java │ │ │ │ ├── JavaHBaseBulkPutExample.java │ │ │ │ ├── JavaHBaseDistributedScan.java │ │ │ │ ├── JavaHBaseMapGetPutExample.java │ │ │ │ └── JavaHBaseStreamingBulkPutExample.java │ │ │ └── protobuf │ │ │ └── generated │ │ │ └── FilterProtos.java │ │ ├── protobuf │ │ └── Filter.proto │ │ ├── scala-spark-2.3 │ │ └── apache │ │ │ └── hadoop │ │ │ └── hbase │ │ │ └── spark │ │ │ └── datasources │ │ │ └── HBaseTableScanRDD.scala │ │ ├── scala-spark-2.4 │ │ └── apache │ │ │ └── hadoop │ │ │ └── hbase │ │ │ └── spark │ │ │ └── datasources │ │ │ └── HBaseTableScanRDD.scala │ │ ├── scala-spark-3.0 │ │ └── org │ │ │ └── apache │ │ │ ├── hadoop │ │ │ └── hbase │ │ │ │ └── spark │ │ │ │ └── datasources │ │ │ │ └── HBaseTableScanRDD.scala │ │ │ └── spark │ │ │ └── deploy │ │ │ └── SparkHadoopUtil.scala │ │ ├── scala-spark-3.1 │ │ └── org │ │ │ └── apache │ │ │ ├── hadoop │ │ │ └── hbase │ │ │ │ └── spark │ │ │ │ └── datasources │ │ │ │ └── HBaseTableScanRDD.scala │ │ │ └── spark │ │ │ └── deploy │ │ │ └── SparkHadoopUtil.scala │ │ ├── scala-spark-3.2 │ │ └── org │ │ │ └── apache │ │ │ ├── hadoop │ │ │ └── hbase │ │ │ │ └── spark │ │ │ │ └── datasources │ │ │ │ └── HBaseTableScanRDD.scala │ │ │ └── spark │ │ │ └── deploy │ │ │ └── SparkHadoopUtil.scala │ │ ├── scala-spark-3.3 │ │ └── org │ │ │ └── apache │ │ │ ├── hadoop │ │ │ └── hbase │ │ │ │ └── spark │ │ │ │ └── datasources │ │ │ │ └── HBaseTableScanRDD.scala │ │ │ └── spark │ │ │ └── deploy │ │ │ └── SparkHadoopUtil.scala │ │ ├── scala-spark-3.4 │ │ └── org │ │ │ └── apache │ │ │ ├── hadoop │ │ │ └── hbase │ │ │ │ └── spark │ │ │ │ └── datasources │ │ │ │ └── HBaseTableScanRDD.scala │ │ │ └── spark │ │ │ └── deploy │ │ │ └── SparkHadoopUtil.scala │ │ ├── scala-spark-3.5 │ │ └── org │ │ │ └── apache │ │ │ ├── hadoop │ │ │ └── hbase │ │ │ │ └── spark │ │ │ │ └── datasources │ │ │ │ └── HBaseTableScanRDD.scala │ │ │ └── spark │ │ │ └── deploy │ │ │ └── SparkHadoopUtil.scala │ │ └── scala │ │ └── org │ │ └── apache │ │ └── hadoop │ │ └── hbase │ │ └── spark │ │ ├── BulkLoadPartitioner.scala │ │ ├── ByteArrayComparable.scala │ │ ├── ByteArrayWrapper.scala │ │ ├── ColumnFamilyQualifierMapKeyWrapper.scala │ │ ├── DefaultSource.scala │ │ ├── DynamicLogicExpression.scala │ │ ├── FamiliesQualifiersValues.scala │ │ ├── FamilyHFileWriteOptions.scala │ │ ├── HBaseContext.scala │ │ ├── HBaseDStreamFunctions.scala │ │ ├── HBaseRDDFunctions.scala │ │ ├── JavaHBaseContext.scala │ │ ├── KeyFamilyQualifier.scala │ │ ├── NewHBaseRDD.scala │ │ ├── datasources │ │ ├── Bound.scala │ │ ├── HBaseResources.scala │ │ ├── HBaseSparkConf.scala │ │ ├── SerializableConfiguration.scala │ │ └── package.scala │ │ └── example │ │ ├── hbasecontext │ │ ├── HBaseBulkDeleteExample.scala │ │ ├── HBaseBulkGetExample.scala │ │ ├── HBaseBulkPutExample.scala │ │ ├── HBaseBulkPutExampleFromFile.scala │ │ ├── HBaseBulkPutTimestampExample.scala │ │ ├── HBaseDistributedScanExample.scala │ │ └── HBaseStreamingBulkPutExample.scala │ │ └── rdd │ │ ├── HBaseBulkDeleteExample.scala │ │ ├── HBaseBulkGetExample.scala │ │ ├── HBaseBulkPutExample.scala │ │ ├── HBaseForeachPartitionExample.scala │ │ └── HBaseMapPartitionExample.scala │ ├── spark-hudi │ ├── .gitignore │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java-hudi-0.10 │ │ └── org │ │ │ └── apache │ │ │ └── hudi │ │ │ └── common │ │ │ └── fs │ │ │ └── FSUtils.java │ │ ├── java-hudi-0.11 │ │ └── org │ │ │ └── apache │ │ │ └── hudi │ │ │ └── common │ │ │ └── fs │ │ │ └── FSUtils.java │ │ ├── java-hudi-0.12 │ │ └── org │ │ │ └── apache │ │ │ └── hudi │ │ │ └── common │ │ │ └── fs │ │ │ └── FSUtils.java │ │ ├── java-hudi-0.13 │ │ └── org │ │ │ └── apache │ │ │ └── hudi │ │ │ └── common │ │ │ └── fs │ │ │ └── FSUtils.java │ │ ├── java-hudi-0.8 │ │ └── org │ │ │ └── apache │ │ │ └── hudi │ │ │ └── common │ │ │ ├── fs │ │ │ └── FSUtils.java │ │ │ └── util │ │ │ └── queue │ │ │ └── BoundedInMemoryExecutor.java │ │ ├── java-hudi-0.9 │ │ └── org │ │ │ └── apache │ │ │ └── hudi │ │ │ └── common │ │ │ └── fs │ │ │ └── FSUtils.java │ │ ├── java-hudi-1.0 │ │ └── org │ │ │ └── apache │ │ │ └── hudi │ │ │ └── common │ │ │ └── fs │ │ │ └── FSUtils.java │ │ ├── java │ │ └── com │ │ │ └── zto │ │ │ └── fire │ │ │ └── hudi │ │ │ └── enu │ │ │ ├── HoodieOperationType.java │ │ │ └── HoodieTableType.java │ │ └── scala │ │ └── com │ │ └── zto │ │ └── fire │ │ ├── hudi.scala │ │ └── hudi │ │ ├── conf │ │ └── FireHudiConf.scala │ │ └── util │ │ └── HudiUtils.scala │ └── spark-rocketmq │ ├── pom.xml │ └── src │ └── main │ ├── java │ └── org │ │ └── apache │ │ └── rocketmq │ │ └── spark │ │ ├── OffsetCommitCallback.java │ │ ├── RocketMQConfig.java │ │ ├── TopicQueueId.java │ │ └── streaming │ │ ├── DefaultMessageRetryManager.java │ │ ├── MessageRetryManager.java │ │ ├── MessageSet.java │ │ ├── ReliableRocketMQReceiver.java │ │ └── RocketMQReceiver.java │ ├── scala-spark-2.3 │ ├── org.apache.spark.streaming │ │ └── RocketMqRDD.scala │ └── org │ │ └── apache │ │ └── spark │ │ └── sql │ │ └── rocketmq │ │ ├── RocketMQSource.scala │ │ ├── RocketMQSourceOffset.scala │ │ └── RocketMQSourceRDDOffsetRange.scala │ ├── scala-spark-2.4 │ ├── org.apache.spark.streaming │ │ └── RocketMqRDD.scala │ └── org │ │ └── apache │ │ └── spark │ │ └── sql │ │ └── rocketmq │ │ ├── RocketMQSource.scala │ │ ├── RocketMQSourceOffset.scala │ │ └── RocketMQSourceRDDOffsetRange.scala │ ├── scala-spark-3.0 │ └── org │ │ └── apache │ │ └── spark │ │ ├── sql │ │ └── rocketmq │ │ │ ├── RocketMQSource.scala │ │ │ ├── RocketMQSourceOffset.scala │ │ │ └── RocketMQSourceRDD.scala │ │ └── streaming │ │ └── RocketMqRDD.scala │ ├── scala-spark-3.1 │ └── org │ │ └── apache │ │ └── spark │ │ ├── sql │ │ └── rocketmq │ │ │ ├── RocketMQSource.scala │ │ │ ├── RocketMQSourceOffset.scala │ │ │ └── RocketMQSourceRDD.scala │ │ └── streaming │ │ └── RocketMqRDD.scala │ ├── scala-spark-3.2 │ └── org │ │ └── apache │ │ └── spark │ │ ├── sql │ │ └── rocketmq │ │ │ ├── RocketMQSource.scala │ │ │ ├── RocketMQSourceOffset.scala │ │ │ └── RocketMQSourceRDD.scala │ │ └── streaming │ │ └── RocketMqRDD.scala │ ├── scala-spark-3.3 │ └── org │ │ └── apache │ │ └── spark │ │ ├── sql │ │ └── rocketmq │ │ │ ├── RocketMQSource.scala │ │ │ ├── RocketMQSourceOffset.scala │ │ │ └── RocketMQSourceRDD.scala │ │ └── streaming │ │ └── RocketMqRDD.scala │ ├── scala-spark-3.4 │ └── org │ │ └── apache │ │ └── spark │ │ ├── sql │ │ └── rocketmq │ │ │ ├── RocketMQSource.scala │ │ │ ├── RocketMQSourceOffset.scala │ │ │ └── RocketMQSourceRDD.scala │ │ └── streaming │ │ └── RocketMqRDD.scala │ ├── scala-spark-3.5 │ └── org │ │ └── apache │ │ └── spark │ │ ├── sql │ │ └── rocketmq │ │ │ ├── RocketMQSource.scala │ │ │ ├── RocketMQSourceOffset.scala │ │ │ └── RocketMQSourceRDD.scala │ │ └── streaming │ │ └── RocketMqRDD.scala │ └── scala │ └── org │ └── apache │ ├── rocketmq │ └── spark │ │ ├── CachedMQConsumer.scala │ │ ├── ConsumerStrategy.scala │ │ ├── LocationStrategy.scala │ │ ├── Logging.scala │ │ ├── OffsetRange.scala │ │ ├── RocketMqRDDPartition.scala │ │ └── RocketMqUtils.scala │ └── spark │ ├── sql │ └── rocketmq │ │ ├── CachedRocketMQConsumer.scala │ │ ├── CachedRocketMQProducer.scala │ │ ├── JsonUtils.scala │ │ ├── RocketMQConf.scala │ │ ├── RocketMQOffsetRangeLimit.scala │ │ ├── RocketMQOffsetReader.scala │ │ ├── RocketMQRelation.scala │ │ ├── RocketMQSink.scala │ │ ├── RocketMQSourceProvider.scala │ │ ├── RocketMQUtils.scala │ │ ├── RocketMQWriteTask.scala │ │ └── RocketMQWriter.scala │ └── streaming │ └── MQPullInputDStream.scala ├── fire-core ├── pom.xml └── src │ └── main │ ├── java │ └── com │ │ └── zto │ │ └── fire │ │ └── core │ │ ├── TimeCost.java │ │ ├── anno │ │ ├── connector │ │ │ ├── HBase.java │ │ │ ├── HBase10.java │ │ │ ├── HBase11.java │ │ │ ├── HBase2.java │ │ │ ├── HBase3.java │ │ │ ├── HBase4.java │ │ │ ├── HBase5.java │ │ │ ├── HBase6.java │ │ │ ├── HBase7.java │ │ │ ├── HBase8.java │ │ │ ├── HBase9.java │ │ │ ├── Hive.java │ │ │ ├── Hudi.java │ │ │ ├── Hudi10.java │ │ │ ├── Hudi11.java │ │ │ ├── Hudi2.java │ │ │ ├── Hudi3.java │ │ │ ├── Hudi4.java │ │ │ ├── Hudi5.java │ │ │ ├── Hudi6.java │ │ │ ├── Hudi7.java │ │ │ ├── Hudi8.java │ │ │ ├── Hudi9.java │ │ │ ├── Jdbc.java │ │ │ ├── Jdbc10.java │ │ │ ├── Jdbc11.java │ │ │ ├── Jdbc2.java │ │ │ ├── Jdbc3.java │ │ │ ├── Jdbc4.java │ │ │ ├── Jdbc5.java │ │ │ ├── Jdbc6.java │ │ │ ├── Jdbc7.java │ │ │ ├── Jdbc8.java │ │ │ ├── Jdbc9.java │ │ │ ├── Kafka.java │ │ │ ├── Kafka10.java │ │ │ ├── Kafka11.java │ │ │ ├── Kafka2.java │ │ │ ├── Kafka3.java │ │ │ ├── Kafka4.java │ │ │ ├── Kafka5.java │ │ │ ├── Kafka6.java │ │ │ ├── Kafka7.java │ │ │ ├── Kafka8.java │ │ │ ├── Kafka9.java │ │ │ ├── RocketMQ.java │ │ │ ├── RocketMQ10.java │ │ │ ├── RocketMQ11.java │ │ │ ├── RocketMQ2.java │ │ │ ├── RocketMQ3.java │ │ │ ├── RocketMQ4.java │ │ │ ├── RocketMQ5.java │ │ │ ├── RocketMQ6.java │ │ │ ├── RocketMQ7.java │ │ │ ├── RocketMQ8.java │ │ │ └── RocketMQ9.java │ │ └── lifecycle │ │ │ ├── After.java │ │ │ ├── Before.java │ │ │ ├── Handle.java │ │ │ ├── Process.java │ │ │ ├── Step1.java │ │ │ ├── Step10.java │ │ │ ├── Step11.java │ │ │ ├── Step12.java │ │ │ ├── Step13.java │ │ │ ├── Step14.java │ │ │ ├── Step15.java │ │ │ ├── Step16.java │ │ │ ├── Step17.java │ │ │ ├── Step18.java │ │ │ ├── Step19.java │ │ │ ├── Step2.java │ │ │ ├── Step3.java │ │ │ ├── Step4.java │ │ │ ├── Step5.java │ │ │ ├── Step6.java │ │ │ ├── Step7.java │ │ │ ├── Step8.java │ │ │ └── Step9.java │ │ ├── bean │ │ └── ArthasParam.java │ │ └── task │ │ ├── SchedulerManager.java │ │ ├── TaskRunner.java │ │ └── TaskRunnerQueue.java │ ├── resources │ └── fire.properties │ └── scala │ └── com │ └── zto │ └── fire │ └── core │ ├── Api.scala │ ├── BaseFire.scala │ ├── conf │ └── AnnoManager.scala │ ├── connector │ ├── Connector.scala │ └── StreamingConnectors.scala │ ├── ext │ ├── BaseFireExt.scala │ └── Provider.scala │ ├── plugin │ ├── ArthasDynamicLauncher.scala │ ├── ArthasLauncher.scala │ └── ArthasManager.scala │ ├── rest │ ├── RestCase.scala │ ├── RestServerManager.scala │ └── SystemRestful.scala │ ├── sql │ ├── SqlExtensionsParser.scala │ └── SqlParser.scala │ ├── sync │ ├── DistributeExecuteManager.scala │ ├── LineageAccumulatorManager.scala │ ├── SyncEngineConf.scala │ └── SyncManager.scala │ ├── task │ └── FireInternalTask.scala │ └── util │ ├── ConsumerOffsetManager.scala │ ├── ErrorToleranceAcc.scala │ └── SingletonFactory.scala ├── fire-engines ├── .gitignore ├── fire-flink │ ├── .gitignore │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── zto │ │ │ └── fire │ │ │ └── flink │ │ │ ├── anno │ │ │ ├── Checkpoint.java │ │ │ ├── FlinkConf.java │ │ │ └── Streaming.java │ │ │ ├── bean │ │ │ ├── CheckpointParams.java │ │ │ ├── DistributeBean.java │ │ │ └── FlinkTableSchema.java │ │ │ ├── enu │ │ │ └── DistributeModule.java │ │ │ ├── ext │ │ │ └── watermark │ │ │ │ └── FirePeriodicWatermarks.java │ │ │ ├── sink │ │ │ ├── BaseSink.scala │ │ │ ├── HBaseSink.scala │ │ │ ├── JdbcSink.scala │ │ │ ├── KafkaSink.scala │ │ │ └── RocketMQSink.scala │ │ │ └── task │ │ │ └── FlinkSchedulerManager.java │ │ ├── resources │ │ ├── META-INF │ │ │ └── services │ │ │ │ └── org.apache.flink.table.factories.Factory │ │ ├── flink-batch.properties │ │ ├── flink-streaming.properties │ │ └── flink.properties │ │ ├── scala-flink-1.12 │ │ └── com │ │ │ └── zto │ │ │ └── fire │ │ │ └── flink │ │ │ ├── lineage │ │ │ └── LineageContext.scala │ │ │ └── sql │ │ │ ├── FlinkSqlParser.scala │ │ │ └── FlinkSqlParserConf.scala │ │ ├── scala-flink-1.13 │ │ └── com │ │ │ └── zto │ │ │ └── fire │ │ │ └── flink │ │ │ ├── lineage │ │ │ └── LineageContext.scala │ │ │ └── sql │ │ │ ├── FlinkSqlParser.scala │ │ │ └── FlinkSqlParserConf.scala │ │ ├── scala-flink-1.14 │ │ └── com │ │ │ └── zto │ │ │ └── fire │ │ │ └── flink │ │ │ ├── lineage │ │ │ └── LineageContext.scala │ │ │ ├── sql │ │ │ ├── FlinkSqlParser.scala │ │ │ └── FlinkSqlParserConf.scala │ │ │ └── util │ │ │ └── StateCleanerUtils.scala │ │ ├── scala-flink-1.15 │ │ └── com │ │ │ └── zto │ │ │ └── fire │ │ │ └── flink │ │ │ ├── lineage │ │ │ └── LineageContext.scala │ │ │ └── sql │ │ │ ├── FlinkSqlParser.scala │ │ │ └── FlinkSqlParserConf.scala │ │ ├── scala-flink-1.16 │ │ └── com │ │ │ └── zto │ │ │ └── fire │ │ │ └── flink │ │ │ ├── lineage │ │ │ └── LineageContext.scala │ │ │ └── sql │ │ │ ├── FlinkSqlParser.scala │ │ │ └── FlinkSqlParserConf.scala │ │ ├── scala-flink-1.17 │ │ └── com │ │ │ └── zto │ │ │ └── fire │ │ │ └── flink │ │ │ ├── lineage │ │ │ └── LineageContext.scala │ │ │ └── sql │ │ │ ├── FlinkSqlParser.scala │ │ │ └── FlinkSqlParserConf.scala │ │ ├── scala-flink-1.18 │ │ ├── com │ │ │ └── zto │ │ │ │ └── fire │ │ │ │ └── flink │ │ │ │ ├── lineage │ │ │ │ └── LineageContext.scala │ │ │ │ └── sql │ │ │ │ ├── FlinkSqlParser.scala │ │ │ │ └── FlinkSqlParserConf.scala │ │ └── org │ │ │ └── apache │ │ │ └── flink │ │ │ └── sql │ │ │ └── parser │ │ │ └── hive │ │ │ └── dml │ │ │ └── RichSqlHiveInsert.java │ │ ├── scala-flink-1.19 │ │ ├── com │ │ │ └── zto │ │ │ │ └── fire │ │ │ │ └── flink │ │ │ │ ├── lineage │ │ │ │ └── LineageContext.scala │ │ │ │ └── sql │ │ │ │ ├── FlinkSqlParser.scala │ │ │ │ └── FlinkSqlParserConf.scala │ │ └── org │ │ │ └── apache │ │ │ └── flink │ │ │ └── sql │ │ │ └── parser │ │ │ └── hive │ │ │ └── dml │ │ │ └── RichSqlHiveInsert.java │ │ ├── scala-pub │ │ └── com │ │ │ └── zto │ │ │ └── fire │ │ │ └── flink │ │ │ └── ext │ │ │ └── stream │ │ │ ├── DataStreamHelperImpl.scala │ │ │ └── StreamExecutionEnvHelperImpl.scala │ │ ├── scala-zto │ │ └── com │ │ │ └── zto │ │ │ └── fire │ │ │ └── flink │ │ │ └── ext │ │ │ └── stream │ │ │ ├── DataStreamHelperImpl.scala │ │ │ └── StreamExecutionEnvHelperImpl.scala │ │ └── scala │ │ └── com │ │ └── zto │ │ ├── fire.scala │ │ └── fire │ │ └── flink │ │ ├── AbstractFlinkBatch.scala │ │ ├── AbstractFlinkStreaming.scala │ │ ├── BaseFlink.scala │ │ ├── BaseFlinkBatch.scala │ │ ├── BaseFlinkCore.scala │ │ ├── BaseFlinkStreaming.scala │ │ ├── FlinkBatch.scala │ │ ├── FlinkCore.scala │ │ ├── FlinkStreaming.scala │ │ ├── acc │ │ └── MultiCounterAccumulator.scala │ │ ├── conf │ │ ├── FireFlinkConf.scala │ │ └── FlinkAnnoManager.scala │ │ ├── connector │ │ └── FlinkConnectors.scala │ │ ├── ext │ │ ├── batch │ │ │ ├── BatchExecutionEnvExt.scala │ │ │ ├── BatchTableEnvExt.scala │ │ │ └── DataSetExt.scala │ │ ├── function │ │ │ ├── RichFunctionExt.scala │ │ │ └── RuntimeContextExt.scala │ │ ├── provider │ │ │ ├── HBaseConnectorProvider.scala │ │ │ └── JdbcFlinkProvider.scala │ │ └── stream │ │ │ ├── DataStreamExt.scala │ │ │ ├── DataStreamHelper.scala │ │ │ ├── DataStreamSinkExt.scala │ │ │ ├── KeyedStreamExt.scala │ │ │ ├── RowExt.scala │ │ │ ├── SQLExt.scala │ │ │ ├── StreamExecutionEnvExt.scala │ │ │ ├── StreamExecutionEnvHelper.scala │ │ │ ├── TableEnvExt.scala │ │ │ ├── TableExt.scala │ │ │ └── TableResultImplExt.scala │ │ ├── plugin │ │ └── FlinkArthasLauncher.scala │ │ ├── rest │ │ └── FlinkSystemRestful.scala │ │ ├── sql │ │ ├── FlinkSqlExtensionsParser.scala │ │ ├── FlinkSqlParserBase.scala │ │ └── FlinkSqlParserConf.scala │ │ ├── sync │ │ ├── DistributeSyncManager.scala │ │ ├── FlinkDistributeExecuteManager.scala │ │ ├── FlinkLineageAccumulatorManager.scala │ │ └── SyncFlinkEngine.scala │ │ ├── task │ │ └── FlinkInternalTask.scala │ │ └── util │ │ ├── FlinkRocketMQUtils.scala │ │ ├── FlinkSingletonFactory.scala │ │ ├── FlinkUtils.scala │ │ └── TableUtils.scala ├── fire-spark │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── zto │ │ │ └── fire │ │ │ └── spark │ │ │ ├── anno │ │ │ ├── SparkConf.java │ │ │ ├── Streaming.java │ │ │ └── StreamingDuration.java │ │ │ ├── bean │ │ │ ├── ColumnMeta.java │ │ │ ├── FunctionMeta.java │ │ │ ├── RestartParams.java │ │ │ ├── SparkInfo.java │ │ │ └── TableMeta.java │ │ │ └── task │ │ │ └── SparkSchedulerManager.java │ │ ├── resources │ │ ├── spark-core.properties │ │ ├── spark-streaming.properties │ │ ├── spark.properties │ │ └── structured-streaming.properties │ │ ├── scala-spark-2.3 │ │ └── com.zto.fire.spark.sql │ │ │ ├── SparkSqlExtensionsParser.scala │ │ │ └── SparkSqlParser.scala │ │ ├── scala-spark-2.4 │ │ └── com.zto.fire.spark.sql │ │ │ ├── SparkSqlExtensionsParser.scala │ │ │ └── SparkSqlParser.scala │ │ ├── scala-spark-3.0 │ │ └── com │ │ │ └── zto │ │ │ └── fire │ │ │ └── spark │ │ │ └── sql │ │ │ ├── SparkSqlExtensionsParser.scala │ │ │ └── SparkSqlParser.scala │ │ ├── scala-spark-3.1 │ │ └── com │ │ │ └── zto │ │ │ └── fire │ │ │ └── spark │ │ │ └── sql │ │ │ ├── SparkSqlExtensionsParser.scala │ │ │ └── SparkSqlParser.scala │ │ ├── scala-spark-3.2 │ │ └── com │ │ │ └── zto │ │ │ └── fire │ │ │ └── spark │ │ │ └── sql │ │ │ ├── SparkSqlExtensionsParser.scala │ │ │ └── SparkSqlParser.scala │ │ ├── scala-spark-3.3 │ │ └── com │ │ │ └── zto │ │ │ └── fire │ │ │ └── spark │ │ │ └── sql │ │ │ ├── SparkSqlExtensionsParser.scala │ │ │ └── SparkSqlParser.scala │ │ ├── scala-spark-3.4 │ │ └── com │ │ │ └── zto │ │ │ └── fire │ │ │ └── spark │ │ │ └── sql │ │ │ ├── SparkSqlExtensionsParser.scala │ │ │ └── SparkSqlParser.scala │ │ ├── scala-spark-3.5 │ │ └── com │ │ │ └── zto │ │ │ └── fire │ │ │ └── spark │ │ │ └── sql │ │ │ ├── SparkSqlExtensionsParser.scala │ │ │ └── SparkSqlParser.scala │ │ └── scala │ │ └── com │ │ └── zto │ │ ├── fire.scala │ │ └── fire │ │ └── spark │ │ ├── AbstractSparkCore.scala │ │ ├── AbstractSparkStreaming.scala │ │ ├── AbstractStructuredStreaming.scala │ │ ├── BaseHudiStreaming.scala │ │ ├── BaseSpark.scala │ │ ├── BaseSparkBatch.scala │ │ ├── BaseSparkCore.scala │ │ ├── BaseSparkStreaming.scala │ │ ├── BaseStructuredStreaming.scala │ │ ├── HudiStreaming.scala │ │ ├── SparkBatch.scala │ │ ├── SparkCore.scala │ │ ├── SparkStreaming.scala │ │ ├── StructuredStreaming.scala │ │ ├── acc │ │ ├── AccumulatorManager.scala │ │ ├── EnvironmentAccumulator.scala │ │ ├── LineageAccumulator.scala │ │ ├── LogAccumulator.scala │ │ ├── MultiCounterAccumulator.scala │ │ ├── MultiTimerAccumulator.scala │ │ ├── StringAccumulator.scala │ │ └── SyncAccumulator.scala │ │ ├── conf │ │ ├── FireSparkConf.scala │ │ └── SparkAnnoManager.scala │ │ ├── connector │ │ ├── HBaseBulkConnector.scala │ │ ├── HBaseBulkFunctions.scala │ │ ├── HBaseSparkBridge.scala │ │ └── SparkConnectors.scala │ │ ├── ext │ │ ├── core │ │ │ ├── DStreamExt.scala │ │ │ ├── DataFrameExt.scala │ │ │ ├── DatasetExt.scala │ │ │ ├── RDDExt.scala │ │ │ ├── SQLContextExt.scala │ │ │ ├── SparkConfExt.scala │ │ │ ├── SparkContextExt.scala │ │ │ ├── SparkSessionExt.scala │ │ │ └── StreamingContextExt.scala │ │ └── provider │ │ │ ├── HBaseBulkProvider.scala │ │ │ ├── HBaseConnectorProvider.scala │ │ │ ├── HBaseHadoopProvider.scala │ │ │ ├── JdbcSparkProvider.scala │ │ │ ├── KafkaSparkProvider.scala │ │ │ ├── SparkProvider.scala │ │ │ └── SqlProvider.scala │ │ ├── listener │ │ ├── FireSparkListener.scala │ │ └── FireStreamingQueryListener.scala │ │ ├── plugin │ │ └── SparkArthasLauncher.scala │ │ ├── rest │ │ └── SparkSystemRestful.scala │ │ ├── sink │ │ ├── FireSink.scala │ │ └── JdbcStreamSink.scala │ │ ├── sql │ │ ├── SQLParserListener.scala │ │ ├── SparkSqlExtensionsParserBase.scala │ │ ├── SparkSqlParserBase.scala │ │ ├── SparkSqlUtils.scala │ │ └── SqlExtensions.scala │ │ ├── sync │ │ ├── DistributeSyncManager.scala │ │ ├── SparkDistributeExecuteManager.scala │ │ ├── SparkLineageAccumulatorManager.scala │ │ └── SyncSparkEngine.scala │ │ ├── task │ │ └── SparkInternalTask.scala │ │ ├── udf │ │ └── UDFs.scala │ │ └── util │ │ ├── SparkConsumerOffsetManager.scala │ │ ├── SparkRocketMQUtils.scala │ │ ├── SparkSingletonFactory.scala │ │ ├── SparkUtils.scala │ │ └── TiSparkUtils.scala └── pom.xml ├── fire-enhance ├── apache-arthas │ ├── pom.xml │ └── src │ │ └── main │ │ └── java │ │ └── com │ │ └── taobao │ │ └── arthas │ │ └── agent │ │ └── attach │ │ └── ArthasAgent.java ├── apache-flink │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java-flink-1.12 │ │ └── org │ │ │ ├── apache │ │ │ ├── calcite │ │ │ │ └── rel │ │ │ │ │ └── metadata │ │ │ │ │ ├── RelColumnOrigin.java │ │ │ │ │ └── RelMdColumnOrigins.java │ │ │ └── flink │ │ │ │ ├── client │ │ │ │ └── deployment │ │ │ │ │ └── application │ │ │ │ │ └── ApplicationDispatcherBootstrap.java │ │ │ │ ├── configuration │ │ │ │ └── GlobalConfiguration.java │ │ │ │ ├── connector │ │ │ │ └── jdbc │ │ │ │ │ ├── JdbcConnectionOptions.java │ │ │ │ │ ├── JdbcExactlyOnceOptions.java │ │ │ │ │ └── JdbcSink.java │ │ │ │ ├── contrib │ │ │ │ └── streaming │ │ │ │ │ └── state │ │ │ │ │ ├── RocksDBStateBackend.java │ │ │ │ │ └── restore │ │ │ │ │ └── RocksDBFullRestoreOperation.java │ │ │ │ ├── runtime │ │ │ │ ├── checkpoint │ │ │ │ │ ├── CheckpointCoordinator.java │ │ │ │ │ └── CheckpointRequestDecider.java │ │ │ │ └── util │ │ │ │ │ └── EnvironmentInformation.java │ │ │ │ ├── table │ │ │ │ └── api │ │ │ │ │ └── internal │ │ │ │ │ └── TableEnvironmentImpl.java │ │ │ │ └── util │ │ │ │ └── ExceptionUtils.java │ │ │ └── rocksdb │ │ │ └── RocksDB.java │ │ ├── java-flink-1.13 │ │ └── org │ │ │ └── apache │ │ │ ├── calcite │ │ │ ├── rel │ │ │ │ └── metadata │ │ │ │ │ ├── RelColumnOrigin.java │ │ │ │ │ └── RelMdColumnOrigins.java │ │ │ └── sql │ │ │ │ └── SqlSelect.java │ │ │ └── flink │ │ │ ├── client │ │ │ └── deployment │ │ │ │ └── application │ │ │ │ └── ApplicationDispatcherBootstrap.java │ │ │ ├── configuration │ │ │ └── GlobalConfiguration.java │ │ │ ├── connector │ │ │ └── jdbc │ │ │ │ └── JdbcExactlyOnceOptions.java │ │ │ ├── contrib │ │ │ └── streaming │ │ │ │ └── state │ │ │ │ └── EmbeddedRocksDBStateBackend.java │ │ │ ├── runtime │ │ │ ├── checkpoint │ │ │ │ ├── CheckpointCoordinator.java │ │ │ │ └── CheckpointRequestDecider.java │ │ │ └── util │ │ │ │ └── EnvironmentInformation.java │ │ │ ├── table │ │ │ └── api │ │ │ │ └── internal │ │ │ │ └── TableEnvironmentImpl.java │ │ │ └── util │ │ │ └── ExceptionUtils.java │ │ ├── java-flink-1.14 │ │ └── org │ │ │ ├── apache │ │ │ ├── calcite │ │ │ │ └── rel │ │ │ │ │ └── metadata │ │ │ │ │ ├── RelColumnOrigin.java │ │ │ │ │ └── RelMdColumnOrigins.java │ │ │ └── flink │ │ │ │ ├── client │ │ │ │ └── deployment │ │ │ │ │ └── application │ │ │ │ │ └── ApplicationDispatcherBootstrap.java │ │ │ │ ├── configuration │ │ │ │ └── GlobalConfiguration.java │ │ │ │ ├── connector │ │ │ │ └── jdbc │ │ │ │ │ ├── dialect │ │ │ │ │ ├── AdbDialect.java │ │ │ │ │ ├── JdbcDialect.java │ │ │ │ │ ├── JdbcDialects.java │ │ │ │ │ ├── MySQLDialect.java │ │ │ │ │ └── OracleSQLDialect.java │ │ │ │ │ └── internal │ │ │ │ │ └── converter │ │ │ │ │ └── OracleSQLRowConverter.java │ │ │ │ ├── contrib │ │ │ │ └── streaming │ │ │ │ │ └── state │ │ │ │ │ └── EmbeddedRocksDBStateBackend.java │ │ │ │ ├── runtime │ │ │ │ ├── checkpoint │ │ │ │ │ ├── CheckpointCoordinator.java │ │ │ │ │ └── CheckpointRequestDecider.java │ │ │ │ └── util │ │ │ │ │ └── EnvironmentInformation.java │ │ │ │ ├── streaming │ │ │ │ └── connectors │ │ │ │ │ └── kafka │ │ │ │ │ ├── FlinkKafkaConsumer.java │ │ │ │ │ └── FlinkKafkaConsumerBase.java │ │ │ │ ├── table │ │ │ │ ├── api │ │ │ │ │ └── internal │ │ │ │ │ │ └── TableEnvironmentImpl.java │ │ │ │ └── runtime │ │ │ │ │ └── operators │ │ │ │ │ └── rank │ │ │ │ │ └── RetractableTopNFunction.java │ │ │ │ └── util │ │ │ │ └── ExceptionUtils.java │ │ │ └── rocksdb │ │ │ └── RocksDB.java │ │ ├── java-flink-1.15 │ │ └── org │ │ │ ├── apache │ │ │ ├── calcite │ │ │ │ └── rel │ │ │ │ │ └── metadata │ │ │ │ │ ├── RelColumnOrigin.java │ │ │ │ │ └── RelMdColumnOrigins.java │ │ │ └── flink │ │ │ │ ├── client │ │ │ │ └── deployment │ │ │ │ │ └── application │ │ │ │ │ └── ApplicationDispatcherBootstrap.java │ │ │ │ ├── configuration │ │ │ │ └── GlobalConfiguration.java │ │ │ │ ├── connector │ │ │ │ └── jdbc │ │ │ │ │ └── dialect │ │ │ │ │ ├── adb │ │ │ │ │ └── AdbDialectFactory.java │ │ │ │ │ └── mysql │ │ │ │ │ └── MySqlDialectFactory.java │ │ │ │ ├── contrib │ │ │ │ └── streaming │ │ │ │ │ └── state │ │ │ │ │ └── EmbeddedRocksDBStateBackend.java │ │ │ │ ├── runtime │ │ │ │ ├── checkpoint │ │ │ │ │ ├── CheckpointCoordinator.java │ │ │ │ │ └── CheckpointRequestDecider.java │ │ │ │ └── util │ │ │ │ │ └── EnvironmentInformation.java │ │ │ │ ├── streaming │ │ │ │ └── connectors │ │ │ │ │ └── kafka │ │ │ │ │ ├── FlinkKafkaConsumer.java │ │ │ │ │ └── FlinkKafkaConsumerBase.java │ │ │ │ ├── table │ │ │ │ ├── api │ │ │ │ │ └── internal │ │ │ │ │ │ └── TableEnvironmentImpl.java │ │ │ │ └── runtime │ │ │ │ │ └── operators │ │ │ │ │ └── rank │ │ │ │ │ └── RetractableTopNFunction.java │ │ │ │ └── util │ │ │ │ └── ExceptionUtils.java │ │ │ └── rocksdb │ │ │ └── RocksDB.java │ │ ├── java-flink-1.16 │ │ └── org │ │ │ ├── apache │ │ │ ├── calcite │ │ │ │ └── rel │ │ │ │ │ └── metadata │ │ │ │ │ ├── RelColumnOrigin.java │ │ │ │ │ └── RelMdColumnOrigins.java │ │ │ └── flink │ │ │ │ ├── configuration │ │ │ │ └── GlobalConfiguration.java │ │ │ │ ├── connector │ │ │ │ └── jdbc │ │ │ │ │ └── dialect │ │ │ │ │ ├── adb │ │ │ │ │ └── AdbDialectFactory.java │ │ │ │ │ └── mysql │ │ │ │ │ └── MySqlDialectFactory.java │ │ │ │ ├── contrib │ │ │ │ └── streaming │ │ │ │ │ └── state │ │ │ │ │ └── EmbeddedRocksDBStateBackend.java │ │ │ │ ├── runtime │ │ │ │ ├── checkpoint │ │ │ │ │ ├── CheckpointCoordinator.java │ │ │ │ │ └── CheckpointRequestDecider.java │ │ │ │ └── util │ │ │ │ │ └── EnvironmentInformation.java │ │ │ │ ├── sql │ │ │ │ └── parser │ │ │ │ │ └── ddl │ │ │ │ │ └── SqlCreateTable.java │ │ │ │ ├── streaming │ │ │ │ └── connectors │ │ │ │ │ └── kafka │ │ │ │ │ ├── FlinkKafkaConsumer.java │ │ │ │ │ └── FlinkKafkaConsumerBase.java │ │ │ │ ├── table │ │ │ │ └── api │ │ │ │ │ └── internal │ │ │ │ │ └── TableEnvironmentImpl.java │ │ │ │ └── util │ │ │ │ └── ExceptionUtils.java │ │ │ └── rocksdb │ │ │ └── RocksDB.java │ │ ├── java-flink-1.17 │ │ └── org │ │ │ ├── apache │ │ │ ├── calcite │ │ │ │ ├── rel │ │ │ │ │ └── metadata │ │ │ │ │ │ ├── RelColumnOrigin.java │ │ │ │ │ │ └── RelMdColumnOrigins.java │ │ │ │ └── sql │ │ │ │ │ └── SqlBasicCall.java │ │ │ └── flink │ │ │ │ ├── configuration │ │ │ │ └── GlobalConfiguration.java │ │ │ │ ├── connector │ │ │ │ └── jdbc │ │ │ │ │ └── dialect │ │ │ │ │ ├── adb │ │ │ │ │ └── AdbDialectFactory.java │ │ │ │ │ └── mysql │ │ │ │ │ └── MySqlDialectFactory.java │ │ │ │ ├── contrib │ │ │ │ └── streaming │ │ │ │ │ └── state │ │ │ │ │ └── EmbeddedRocksDBStateBackend.java │ │ │ │ ├── runtime │ │ │ │ ├── checkpoint │ │ │ │ │ ├── CheckpointCoordinator.java │ │ │ │ │ └── CheckpointRequestDecider.java │ │ │ │ └── util │ │ │ │ │ └── EnvironmentInformation.java │ │ │ │ ├── sql │ │ │ │ └── parser │ │ │ │ │ └── ddl │ │ │ │ │ └── SqlCreateTable.java │ │ │ │ ├── streaming │ │ │ │ └── connectors │ │ │ │ │ └── kafka │ │ │ │ │ ├── FlinkKafkaConsumer.java │ │ │ │ │ └── FlinkKafkaConsumerBase.java │ │ │ │ ├── table │ │ │ │ └── api │ │ │ │ │ └── internal │ │ │ │ │ └── TableEnvironmentImpl.java │ │ │ │ └── util │ │ │ │ └── ExceptionUtils.java │ │ │ └── rocksdb │ │ │ └── RocksDB.java │ │ ├── java-flink-1.18 │ │ └── org │ │ │ ├── apache │ │ │ ├── calcite │ │ │ │ ├── rel │ │ │ │ │ └── metadata │ │ │ │ │ │ ├── RelColumnOrigin.java │ │ │ │ │ │ └── RelMdColumnOrigins.java │ │ │ │ └── sql │ │ │ │ │ └── SqlBasicCall.java │ │ │ └── flink │ │ │ │ ├── configuration │ │ │ │ └── GlobalConfiguration.java │ │ │ │ ├── connector │ │ │ │ └── jdbc │ │ │ │ │ └── dialect │ │ │ │ │ ├── adb │ │ │ │ │ └── AdbDialectFactory.java │ │ │ │ │ └── mysql │ │ │ │ │ └── MySqlDialectFactory.java │ │ │ │ ├── contrib │ │ │ │ └── streaming │ │ │ │ │ └── state │ │ │ │ │ └── EmbeddedRocksDBStateBackend.java │ │ │ │ ├── runtime │ │ │ │ ├── checkpoint │ │ │ │ │ ├── CheckpointCoordinator.java │ │ │ │ │ └── CheckpointRequestDecider.java │ │ │ │ └── util │ │ │ │ │ └── EnvironmentInformation.java │ │ │ │ ├── sql │ │ │ │ └── parser │ │ │ │ │ └── ddl │ │ │ │ │ └── SqlCreateTable.java │ │ │ │ ├── streaming │ │ │ │ └── connectors │ │ │ │ │ └── kafka │ │ │ │ │ ├── FlinkKafkaConsumer.java │ │ │ │ │ └── FlinkKafkaConsumerBase.java │ │ │ │ ├── table │ │ │ │ └── api │ │ │ │ │ └── internal │ │ │ │ │ └── TableEnvironmentImpl.java │ │ │ │ └── util │ │ │ │ └── ExceptionUtils.java │ │ │ └── rocksdb │ │ │ └── RocksDB.java │ │ └── java-flink-1.19 │ │ └── org │ │ ├── apache │ │ ├── calcite │ │ │ ├── rel │ │ │ │ └── metadata │ │ │ │ │ ├── RelColumnOrigin.java │ │ │ │ │ └── RelMdColumnOrigins.java │ │ │ └── sql │ │ │ │ └── SqlBasicCall.java │ │ └── flink │ │ │ ├── configuration │ │ │ └── GlobalConfiguration.java │ │ │ ├── connector │ │ │ └── jdbc │ │ │ │ └── dialect │ │ │ │ ├── adb │ │ │ │ └── AdbDialectFactory.java │ │ │ │ └── mysql │ │ │ │ └── MySqlDialectFactory.java │ │ │ ├── contrib │ │ │ └── streaming │ │ │ │ └── state │ │ │ │ └── EmbeddedRocksDBStateBackend.java │ │ │ ├── runtime │ │ │ ├── checkpoint │ │ │ │ ├── CheckpointCoordinator.java │ │ │ │ └── CheckpointRequestDecider.java │ │ │ └── util │ │ │ │ └── EnvironmentInformation.java │ │ │ ├── sql │ │ │ └── parser │ │ │ │ └── ddl │ │ │ │ └── SqlCreateTable.java │ │ │ ├── streaming │ │ │ └── connectors │ │ │ │ └── kafka │ │ │ │ ├── FlinkKafkaConsumer.java │ │ │ │ └── FlinkKafkaConsumerBase.java │ │ │ ├── table │ │ │ └── api │ │ │ │ └── internal │ │ │ │ └── TableEnvironmentImpl.java │ │ │ └── util │ │ │ └── ExceptionUtils.java │ │ └── rocksdb │ │ └── RocksDB.java ├── apache-spark │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── zto │ │ │ └── fire │ │ │ └── spark │ │ │ └── Enhance.java │ │ └── scala-spark-3.0 │ │ └── org │ │ └── apache │ │ └── spark │ │ ├── internal │ │ └── config │ │ │ └── Streaming.scala │ │ ├── sql │ │ └── execution │ │ │ └── datasources │ │ │ └── InsertIntoHadoopFsRelationCommand.scala │ │ └── streaming │ │ ├── kafka010 │ │ └── DirectKafkaInputDStream.scala │ │ └── scheduler │ │ └── ExecutorAllocationManager.scala └── pom.xml ├── fire-examples ├── flink-examples │ ├── pom.xml │ └── src │ │ ├── main │ │ ├── java │ │ │ └── com │ │ │ │ └── zto │ │ │ │ └── fire │ │ │ │ ├── examples │ │ │ │ └── bean │ │ │ │ │ ├── People.java │ │ │ │ │ └── Student.java │ │ │ │ └── sql │ │ │ │ └── SqlCommandParser.java │ │ ├── resources │ │ │ ├── META-INF │ │ │ │ └── services │ │ │ │ │ └── org.apache.flink.table.factories.Factory │ │ │ ├── common.properties │ │ │ ├── connector │ │ │ │ └── hive │ │ │ │ │ └── HiveSinkTest.properties │ │ │ ├── log4j.properties │ │ │ └── stream │ │ │ │ └── ConfigCenterTest.properties │ │ ├── scala-flink-1.14 │ │ │ └── com.zto.fire.examples.flink.util │ │ │ │ └── StateCleaner.scala │ │ └── scala │ │ │ └── com │ │ │ └── zto │ │ │ └── fire │ │ │ └── examples │ │ │ └── flink │ │ │ ├── FlinkDemo.scala │ │ │ ├── FlinkSQLDemo.scala │ │ │ ├── KafkaDeserializationTest.scala │ │ │ ├── Test.scala │ │ │ ├── ZTORouteBillCDC.scala │ │ │ ├── acc │ │ │ └── FlinkAccTest.scala │ │ │ ├── batch │ │ │ ├── FireMapFunctionTest.scala │ │ │ ├── FlinkBatchTest.scala │ │ │ └── FlinkBrocastTest.scala │ │ │ ├── connector │ │ │ ├── bean │ │ │ │ ├── BeanConnectorTest.scala │ │ │ │ ├── BeanDynamicTableFactory.scala │ │ │ │ ├── BeanDynamicTableSink.scala │ │ │ │ ├── BeanDynamicTableSource.scala │ │ │ │ └── BeanOptions.scala │ │ │ ├── clickhouse │ │ │ │ └── ClickhouseSinkTest.scala │ │ │ ├── doris │ │ │ │ └── DorisTest.scala │ │ │ ├── hbase │ │ │ │ └── HBaseTest.scala │ │ │ ├── hive │ │ │ │ ├── FlinkHiveTest.scala │ │ │ │ ├── FlinkSinkHiveTest.scala │ │ │ │ ├── HiveBatchSinkTest.scala │ │ │ │ ├── HiveRW.scala │ │ │ │ └── HiveSinkTest.scala │ │ │ ├── hudi │ │ │ │ └── HudiTest.scala │ │ │ ├── jdbc │ │ │ │ └── JdbcSinkTest.scala │ │ │ ├── kafka │ │ │ │ ├── KafkaConsumer.scala │ │ │ │ └── KafkaSinkTest.scala │ │ │ ├── rocketmq │ │ │ │ ├── RocketMQConnectorTest.scala │ │ │ │ └── RocketTest.scala │ │ │ └── sql │ │ │ │ ├── DDL.scala │ │ │ │ └── DataGenTest.scala │ │ │ ├── lineage │ │ │ ├── FlinkSqlLineageTest.scala │ │ │ └── LineageTest.scala │ │ │ ├── module │ │ │ ├── ArthasTest.scala │ │ │ └── ExceptionTest.scala │ │ │ ├── sql │ │ │ ├── FlinkSQL.scala │ │ │ ├── HiveDimDemo.scala │ │ │ ├── HiveWriteDemo.scala │ │ │ ├── JdbcDimDemo.scala │ │ │ ├── RocketMQConnectorTest.scala │ │ │ ├── SimpleSqlDemo.scala │ │ │ └── SqlJoinDemo.scala │ │ │ └── stream │ │ │ ├── ConfigCenterTest.scala │ │ │ ├── FlinkPartitioner.scala │ │ │ ├── FlinkRetractStreamTest.scala │ │ │ ├── FlinkStateTest.scala │ │ │ ├── UDFTest.scala │ │ │ ├── WatermarkTest.scala │ │ │ └── WindowTest.scala │ │ └── test │ │ └── scala │ │ └── com │ │ └── zto │ │ └── fire │ │ └── examples │ │ └── flink │ │ ├── anno │ │ └── AnnoConfTest.scala │ │ ├── core │ │ └── BaseFlinkTester.scala │ │ └── jdbc │ │ └── JdbcUnitTest.scala ├── pom.xml └── spark-examples │ ├── pom.xml │ └── src │ ├── main │ ├── java │ │ └── com │ │ │ └── zto │ │ │ └── fire │ │ │ └── examples │ │ │ └── bean │ │ │ ├── Hudi.java │ │ │ ├── Student.java │ │ │ └── StudentMulti.java │ ├── resources │ │ ├── common.properties │ │ ├── jdbc │ │ │ └── JdbcTest.properties │ │ └── streaming │ │ │ └── ConfigCenterTest.properties │ └── scala │ │ └── com │ │ └── zto │ │ └── fire │ │ └── examples │ │ └── spark │ │ ├── SparkDemo.scala │ │ ├── SparkSQLDemo.scala │ │ ├── Test.scala │ │ ├── acc │ │ └── FireAccTest.scala │ │ ├── hbase │ │ ├── HBaseConnectorTest.scala │ │ ├── HBaseHadoopTest.scala │ │ ├── HBaseStreamingTest.scala │ │ ├── HbaseBulkTest.scala │ │ └── HiveQL.scala │ │ ├── hive │ │ ├── HiveClusterReader.scala │ │ ├── HiveMetadataTest.scala │ │ └── HiveRW.scala │ │ ├── hudi │ │ ├── HudiTest.scala │ │ └── util │ │ │ └── CompactionUtils.scala │ │ ├── jdbc │ │ ├── JdbcStreamingTest.scala │ │ └── JdbcTest.scala │ │ ├── lineage │ │ ├── DataSourceTest.scala │ │ ├── LineageTest.scala │ │ └── SparkCoreLineageTest.scala │ │ ├── module │ │ ├── ArthasTest.scala │ │ └── ExceptionTest.scala │ │ ├── schedule │ │ ├── ScheduleTest.scala │ │ └── Tasks.scala │ │ ├── sql │ │ ├── LoadTestSQL.scala │ │ └── SparkSqlParseTest.scala │ │ ├── streaming │ │ ├── AtLeastOnceTest.scala │ │ ├── ConfigCenterTest.scala │ │ ├── KafkaTest.scala │ │ ├── RocketTest.scala │ │ ├── SinkKafkaRocketMQTest.scala │ │ └── StreamingConnectorTest.scala │ │ ├── structured │ │ ├── JdbcSinkTest.scala │ │ ├── MapTest.scala │ │ └── StructuredStreamingTest.scala │ │ └── thread │ │ └── ThreadTest.scala │ └── test │ ├── resources │ ├── ConfigCenterUnitTest.properties │ ├── SparkSQLParserTest.properties │ └── common.properties │ ├── scala-spark-3.0 │ └── com │ │ └── zto │ │ └── fire │ │ └── examples │ │ └── spark │ │ └── sql │ │ └── SparkSqlParseTest.scala │ └── scala │ └── com │ └── zto │ └── fire │ └── examples │ └── spark │ ├── anno │ └── AnnoConfTest.scala │ ├── conf │ └── ConfigCenterUnitTest.scala │ ├── core │ └── BaseSparkTester.scala │ ├── hbase │ ├── HBaseApiTest.scala │ ├── HBaseBaseTester.scala │ ├── HBaseBulkUnitTest.scala │ ├── HBaseConnectorUnitTest.scala │ └── HBaseHadoopUnitTest.scala │ ├── hive │ └── HiveUnitTest.scala │ ├── jdbc │ ├── JdbcConnectorTest.scala │ └── JdbcUnitTest.scala │ └── parser │ └── SparkSQLParserTest.scala ├── fire-shell ├── flink-shell │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java-flink-1.12 │ │ └── org.apache.flink.streaming.api.environment │ │ │ └── StreamExecutionEnvironment.java │ │ ├── java-flink-1.13 │ │ └── org.apache.flink.streaming.api.environment │ │ │ └── StreamExecutionEnvironment.java │ │ ├── java │ │ └── org │ │ │ └── apache │ │ │ └── flink │ │ │ └── api │ │ │ └── java │ │ │ ├── JarHelper.java │ │ │ ├── ScalaShellEnvironment.java │ │ │ └── ScalaShellStreamEnvironment.java │ │ └── scala │ │ ├── com │ │ └── zto │ │ │ └── fire │ │ │ └── shell │ │ │ └── flink │ │ │ ├── FireILoop.scala │ │ │ └── Test.scala │ │ └── org │ │ └── apache │ │ └── flink │ │ └── api │ │ └── scala │ │ └── FlinkShell.scala ├── pom.xml └── spark-shell │ ├── pom.xml │ └── src │ └── main │ └── scala-spark-3.0 │ ├── com │ └── zto │ │ └── fire │ │ └── shell │ │ └── spark │ │ ├── FireILoop.scala │ │ ├── Main.scala │ │ └── Test.scala │ └── org │ └── apache │ └── spark │ └── repl │ ├── ExecutorClassLoader.scala │ └── Signaling.scala └── pom.xml /.gitignore: -------------------------------------------------------------------------------- 1 | .idea/* 2 | fire-parent.iml 3 | *.iml 4 | target/ 5 | *.log 6 | conf-test.properties 7 | conf-dev.properties 8 | conf-prod.properties 9 | cluster.properties 10 | dev/repo 11 | pom.xml.versionsBackup -------------------------------------------------------------------------------- /CNAME: -------------------------------------------------------------------------------- 1 | www.fireframework.cn -------------------------------------------------------------------------------- /docs/.nojekyll: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FireFramework/fire/39a3ca76f12f6ec79a77350f5e09a5ccbd397d89/docs/.nojekyll -------------------------------------------------------------------------------- /docs/CNAME: -------------------------------------------------------------------------------- 1 | www.fireframework.cn -------------------------------------------------------------------------------- /docs/connector/adb.md: -------------------------------------------------------------------------------- 1 | 19 | 20 | ## Flink adb connector 21 | 22 | *Flink adb connector基于jdbc sql connector改造,使用方法同flink标准的jdbc sql connector,fire框架能根据jdbc url自动识别是mysql还是adb。* 23 | 24 | -------------------------------------------------------------------------------- /docs/connector/oracle.md: -------------------------------------------------------------------------------- 1 | 19 | 20 | ## Flink oracle connector 21 | 22 | *Flink oracle connector基于jdbc sql connector改造,使用方法同flink标准的jdbc sql connector,fire框架能根据jdbc url自动识别是mysql还是oracle。* 23 | 24 | -------------------------------------------------------------------------------- /docs/feature.md: -------------------------------------------------------------------------------- 1 | 19 | 20 | 21 | 22 | -------------------------------------------------------------------------------- /docs/img/Fire.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FireFramework/fire/39a3ca76f12f6ec79a77350f5e09a5ccbd397d89/docs/img/Fire.png -------------------------------------------------------------------------------- /docs/img/anno-sql.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FireFramework/fire/39a3ca76f12f6ec79a77350f5e09a5ccbd397d89/docs/img/anno-sql.png -------------------------------------------------------------------------------- /docs/img/anno_log.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FireFramework/fire/39a3ca76f12f6ec79a77350f5e09a5ccbd397d89/docs/img/anno_log.png -------------------------------------------------------------------------------- /docs/img/arthas-shell.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FireFramework/fire/39a3ca76f12f6ec79a77350f5e09a5ccbd397d89/docs/img/arthas-shell.png -------------------------------------------------------------------------------- /docs/img/checkpoint-duration.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FireFramework/fire/39a3ca76f12f6ec79a77350f5e09a5ccbd397d89/docs/img/checkpoint-duration.png -------------------------------------------------------------------------------- /docs/img/configuration.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FireFramework/fire/39a3ca76f12f6ec79a77350f5e09a5ccbd397d89/docs/img/configuration.png -------------------------------------------------------------------------------- /docs/img/dingding.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FireFramework/fire/39a3ca76f12f6ec79a77350f5e09a5ccbd397d89/docs/img/dingding.jpeg -------------------------------------------------------------------------------- /docs/img/fire-framework-logo.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FireFramework/fire/39a3ca76f12f6ec79a77350f5e09a5ccbd397d89/docs/img/fire-framework-logo.jpeg -------------------------------------------------------------------------------- /docs/img/fire-restful.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FireFramework/fire/39a3ca76f12f6ec79a77350f5e09a5ccbd397d89/docs/img/fire-restful.png -------------------------------------------------------------------------------- /docs/img/streaming-duration.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FireFramework/fire/39a3ca76f12f6ec79a77350f5e09a5ccbd397d89/docs/img/streaming-duration.png -------------------------------------------------------------------------------- /docs/img/web-config.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FireFramework/fire/39a3ca76f12f6ec79a77350f5e09a5ccbd397d89/docs/img/web-config.png -------------------------------------------------------------------------------- /docs/img/weixin.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FireFramework/fire/39a3ca76f12f6ec79a77350f5e09a5ccbd397d89/docs/img/weixin.png -------------------------------------------------------------------------------- /docs/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 |
4 | 5 |http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | *
Unless required by applicable law or agreed to in writing, software distributed under the 11 | * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either 12 | * express or implied. See the License for the specific language governing permissions and 13 | * limitations under the License. 14 | */ 15 | package org.apache.rocketmq.flink.util; 16 | 17 | import java.lang.reflect.Field; 18 | 19 | public class TestUtils { 20 | public static void setFieldValue(Object obj, String fieldName, Object value) { 21 | try { 22 | Field field = obj.getClass().getDeclaredField(fieldName); 23 | field.setAccessible(true); 24 | field.set(obj, value); 25 | } catch (Exception e) { 26 | e.printStackTrace(); 27 | } 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /fire-connectors/flink-connectors/flink-rocketmq/src/main/resources/META-INF/services/org.apache.flink.table.factories.Factory: -------------------------------------------------------------------------------- 1 | com.zto.fire.flink.sql.connector.rocketmq.RocketMQDynamicTableFactory 2 | 3 | com.zto.fire.flink.sql.connector.rocketmq.FireRocketMQDynamicTableFactory -------------------------------------------------------------------------------- /fire-connectors/spark-connectors/spark-hbase/src/main/java/org/apache/hadoop/hbase/client/ConnFactoryEnhance.java: -------------------------------------------------------------------------------- 1 | /** 2 | * 3 | * Licensed to the Apache Software Foundation (ASF) under one 4 | * or more contributor license agreements. See the NOTICE file 5 | * distributed with this work for additional information 6 | * regarding copyright ownership. The ASF licenses this file 7 | * to you under the Apache License, Version 2.0 (the 8 | * "License"); you may not use this file except in compliance 9 | * with the License. You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | * See the License for the specific language governing permissions and 17 | * limitations under the License. 18 | */ 19 | 20 | package org.apache.hadoop.hbase.client; 21 | 22 | import java.io.Serializable; 23 | 24 | public class ConnFactoryEnhance extends ConnectionFactoryEnhance implements Serializable { 25 | public ConnFactoryEnhance() { 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /fire-connectors/spark-connectors/spark-hbase/src/main/scala/org/apache/hadoop/hbase/spark/datasources/package.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package org.apache.hadoop.hbase.spark 19 | 20 | import org.apache.hadoop.hbase.util.Bytes 21 | 22 | import scala.math.Ordering 23 | 24 | package object hbase { 25 | type HBaseType = Array[Byte] 26 | val ByteMax = -1.asInstanceOf[Byte] 27 | val ByteMin = 0.asInstanceOf[Byte] 28 | val ord: Ordering[HBaseType] = new Ordering[HBaseType] { 29 | def compare(x: Array[Byte], y: Array[Byte]): Int = { 30 | return Bytes.compareTo(x, y) 31 | } 32 | } 33 | //Do not use BinaryType.ordering 34 | implicit val order: Ordering[HBaseType] = ord 35 | 36 | } 37 | -------------------------------------------------------------------------------- /fire-connectors/spark-connectors/spark-hudi/.gitignore: -------------------------------------------------------------------------------- 1 | target/ 2 | !.mvn/wrapper/maven-wrapper.jar 3 | !**/src/main/**/target/ 4 | !**/src/test/**/target/ 5 | 6 | ### IntelliJ IDEA ### 7 | .idea/modules.xml 8 | .idea/jarRepositories.xml 9 | .idea/compiler.xml 10 | .idea/libraries/ 11 | *.iws 12 | *.iml 13 | *.ipr 14 | 15 | ### Eclipse ### 16 | .apt_generated 17 | .classpath 18 | .factorypath 19 | .project 20 | .settings 21 | .springBeans 22 | .sts4-cache 23 | 24 | ### NetBeans ### 25 | /nbproject/private/ 26 | /nbbuild/ 27 | /dist/ 28 | /nbdist/ 29 | /.nb-gradle/ 30 | build/ 31 | !**/src/main/**/build/ 32 | !**/src/test/**/build/ 33 | 34 | ### VS Code ### 35 | .vscode/ 36 | 37 | ### Mac OS ### 38 | .DS_Store -------------------------------------------------------------------------------- /fire-connectors/spark-connectors/spark-hudi/src/main/java/com/zto/fire/hudi/enu/HoodieOperationType.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package com.zto.fire.hudi.enu; 20 | 21 | /** 22 | * hudi表操作类型 23 | * 24 | * @author ChengLong 2023-05-12 14:40:17 25 | * @since 2.3.5 26 | */ 27 | public enum HoodieOperationType { 28 | INSERT("insert"), INSERT_OVERWRITE("insert_overwrite"), UPSERT("upsert"), BULK_INSERT("bulk_insert"), DELETE("delete"); 29 | 30 | private String name; 31 | HoodieOperationType(String name) { 32 | this.name = name; 33 | } 34 | 35 | } 36 | -------------------------------------------------------------------------------- /fire-connectors/spark-connectors/spark-hudi/src/main/java/com/zto/fire/hudi/enu/HoodieTableType.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package com.zto.fire.hudi.enu; 20 | 21 | /** 22 | * hudi表类型 23 | * 24 | * @author ChengLong 2023-04-10 10:40:17 25 | * @since 2.3.5 26 | */ 27 | public enum HoodieTableType { 28 | COPY_ON_WRITE, MERGE_ON_READ 29 | } -------------------------------------------------------------------------------- /fire-connectors/spark-connectors/spark-hudi/src/main/scala/com/zto/fire/hudi.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire 19 | 20 | /** 21 | * hudi connector隐式转换 22 | * 23 | * @author ChengLong 2023-03-15 17:03:20 24 | * @since 2.3.5 25 | */ 26 | package object hudi { 27 | 28 | 29 | } 30 | -------------------------------------------------------------------------------- /fire-connectors/spark-connectors/spark-hudi/src/main/scala/com/zto/fire/hudi/conf/FireHudiConf.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.hudi.conf 19 | 20 | import com.zto.fire.common.conf.KeyNum 21 | import com.zto.fire.common.util.PropUtils 22 | 23 | /** 24 | * Hudi相关配置 25 | * 26 | * @author ChengLong 2023-03-15 17:06:40 27 | * @since 2.3.5 28 | */ 29 | private[fire] object FireHudiConf { 30 | lazy val HUDI_FORMAT = "org.apache.hudi" 31 | lazy val HUDI_OPTIONS_START = "hudi.options." 32 | 33 | // Spark write hudi的options选项 34 | def hudiOptions(keyNum: Int = KeyNum._1): Map[String, String] = PropUtils.sliceKeysByNum(this.HUDI_OPTIONS_START, keyNum) 35 | } 36 | -------------------------------------------------------------------------------- /fire-core/src/main/java/com/zto/fire/core/anno/lifecycle/After.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.core.anno.lifecycle; 19 | 20 | import java.lang.annotation.ElementType; 21 | import java.lang.annotation.Retention; 22 | import java.lang.annotation.RetentionPolicy; 23 | import java.lang.annotation.Target; 24 | 25 | /** 26 | * 标记注解:用于标注生命周期方法,在用户代码执行完成后调用,可用于资源释放 27 | * 28 | * @author ChengLong 2022-08-09 09:49:12 29 | * @since 2.3.2 30 | */ 31 | @Target(ElementType.METHOD) 32 | @Retention(RetentionPolicy.RUNTIME) 33 | public @interface After { 34 | } 35 | -------------------------------------------------------------------------------- /fire-core/src/main/java/com/zto/fire/core/anno/lifecycle/Before.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.core.anno.lifecycle; 19 | 20 | import java.lang.annotation.ElementType; 21 | import java.lang.annotation.Retention; 22 | import java.lang.annotation.RetentionPolicy; 23 | import java.lang.annotation.Target; 24 | 25 | /** 26 | * 标记注解:用于标注生命周期方法,在引擎初始化前被调用执行,可用于资源初始化 27 | * 28 | * @author ChengLong 2022-08-09 09:49:12 29 | * @since 2.3.2 30 | */ 31 | @Target(ElementType.METHOD) 32 | @Retention(RetentionPolicy.RUNTIME) 33 | public @interface Before { 34 | } 35 | -------------------------------------------------------------------------------- /fire-core/src/main/java/com/zto/fire/core/anno/lifecycle/Step1.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.core.anno.lifecycle; 19 | 20 | import java.lang.annotation.ElementType; 21 | import java.lang.annotation.Retention; 22 | import java.lang.annotation.RetentionPolicy; 23 | import java.lang.annotation.Target; 24 | 25 | /** 26 | * 标记注解:用于标注业务逻辑代码执行步骤 27 | * 28 | * @author ChengLong 2022-08-09 09:49:12 29 | * @since 2.3.2 30 | */ 31 | @Target(ElementType.METHOD) 32 | @Retention(RetentionPolicy.RUNTIME) 33 | public @interface Step1 { 34 | 35 | /** 36 | * 业务代码逻辑描述 37 | */ 38 | String value() default ""; 39 | 40 | /** 41 | * 当发生异常时,是否跳过异常执行下一步 42 | */ 43 | boolean skipError() default false; 44 | } 45 | -------------------------------------------------------------------------------- /fire-core/src/main/java/com/zto/fire/core/anno/lifecycle/Step10.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.core.anno.lifecycle; 19 | 20 | import java.lang.annotation.ElementType; 21 | import java.lang.annotation.Retention; 22 | import java.lang.annotation.RetentionPolicy; 23 | import java.lang.annotation.Target; 24 | 25 | /** 26 | * 标记注解:用于标注业务逻辑代码执行步骤 27 | * 28 | * @author ChengLong 2022-08-09 09:49:12 29 | * @since 2.3.2 30 | */ 31 | @Target(ElementType.METHOD) 32 | @Retention(RetentionPolicy.RUNTIME) 33 | public @interface Step10 { 34 | 35 | /** 36 | * 业务代码逻辑描述 37 | */ 38 | String value() default ""; 39 | 40 | /** 41 | * 当发生异常时,是否跳过异常执行下一步 42 | */ 43 | boolean skipError() default false; 44 | } 45 | -------------------------------------------------------------------------------- /fire-core/src/main/java/com/zto/fire/core/anno/lifecycle/Step11.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.core.anno.lifecycle; 19 | 20 | import java.lang.annotation.ElementType; 21 | import java.lang.annotation.Retention; 22 | import java.lang.annotation.RetentionPolicy; 23 | import java.lang.annotation.Target; 24 | 25 | /** 26 | * 标记注解:用于标注业务逻辑代码执行步骤 27 | * 28 | * @author ChengLong 2022-08-09 09:49:12 29 | * @since 2.3.2 30 | */ 31 | @Target(ElementType.METHOD) 32 | @Retention(RetentionPolicy.RUNTIME) 33 | public @interface Step11 { 34 | 35 | /** 36 | * 业务代码逻辑描述 37 | */ 38 | String value() default ""; 39 | 40 | /** 41 | * 当发生异常时,是否跳过异常执行下一步 42 | */ 43 | boolean skipError() default false; 44 | } 45 | -------------------------------------------------------------------------------- /fire-core/src/main/java/com/zto/fire/core/anno/lifecycle/Step12.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.core.anno.lifecycle; 19 | 20 | import java.lang.annotation.ElementType; 21 | import java.lang.annotation.Retention; 22 | import java.lang.annotation.RetentionPolicy; 23 | import java.lang.annotation.Target; 24 | 25 | /** 26 | * 标记注解:用于标注业务逻辑代码执行步骤 27 | * 28 | * @author ChengLong 2022-08-09 09:49:12 29 | * @since 2.3.2 30 | */ 31 | @Target(ElementType.METHOD) 32 | @Retention(RetentionPolicy.RUNTIME) 33 | public @interface Step12 { 34 | 35 | /** 36 | * 业务代码逻辑描述 37 | */ 38 | String value() default ""; 39 | 40 | /** 41 | * 当发生异常时,是否跳过异常执行下一步 42 | */ 43 | boolean skipError() default false; 44 | } 45 | -------------------------------------------------------------------------------- /fire-core/src/main/java/com/zto/fire/core/anno/lifecycle/Step13.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.core.anno.lifecycle; 19 | 20 | import java.lang.annotation.ElementType; 21 | import java.lang.annotation.Retention; 22 | import java.lang.annotation.RetentionPolicy; 23 | import java.lang.annotation.Target; 24 | 25 | /** 26 | * 标记注解:用于标注业务逻辑代码执行步骤 27 | * 28 | * @author ChengLong 2022-08-09 09:49:12 29 | * @since 2.3.2 30 | */ 31 | @Target(ElementType.METHOD) 32 | @Retention(RetentionPolicy.RUNTIME) 33 | public @interface Step13 { 34 | 35 | /** 36 | * 业务代码逻辑描述 37 | */ 38 | String value() default ""; 39 | 40 | /** 41 | * 当发生异常时,是否跳过异常执行下一步 42 | */ 43 | boolean skipError() default false; 44 | } 45 | -------------------------------------------------------------------------------- /fire-core/src/main/java/com/zto/fire/core/anno/lifecycle/Step14.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.core.anno.lifecycle; 19 | 20 | import java.lang.annotation.ElementType; 21 | import java.lang.annotation.Retention; 22 | import java.lang.annotation.RetentionPolicy; 23 | import java.lang.annotation.Target; 24 | 25 | /** 26 | * 标记注解:用于标注业务逻辑代码执行步骤 27 | * 28 | * @author ChengLong 2022-08-09 09:49:12 29 | * @since 2.3.2 30 | */ 31 | @Target(ElementType.METHOD) 32 | @Retention(RetentionPolicy.RUNTIME) 33 | public @interface Step14 { 34 | 35 | /** 36 | * 业务代码逻辑描述 37 | */ 38 | String value() default ""; 39 | 40 | /** 41 | * 当发生异常时,是否跳过异常执行下一步 42 | */ 43 | boolean skipError() default false; 44 | } 45 | -------------------------------------------------------------------------------- /fire-core/src/main/java/com/zto/fire/core/anno/lifecycle/Step15.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.core.anno.lifecycle; 19 | 20 | import java.lang.annotation.ElementType; 21 | import java.lang.annotation.Retention; 22 | import java.lang.annotation.RetentionPolicy; 23 | import java.lang.annotation.Target; 24 | 25 | /** 26 | * 标记注解:用于标注业务逻辑代码执行步骤 27 | * 28 | * @author ChengLong 2022-08-09 09:49:12 29 | * @since 2.3.2 30 | */ 31 | @Target(ElementType.METHOD) 32 | @Retention(RetentionPolicy.RUNTIME) 33 | public @interface Step15 { 34 | 35 | /** 36 | * 业务代码逻辑描述 37 | */ 38 | String value() default ""; 39 | 40 | /** 41 | * 当发生异常时,是否跳过异常执行下一步 42 | */ 43 | boolean skipError() default false; 44 | } 45 | -------------------------------------------------------------------------------- /fire-core/src/main/java/com/zto/fire/core/anno/lifecycle/Step16.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.core.anno.lifecycle; 19 | 20 | import java.lang.annotation.ElementType; 21 | import java.lang.annotation.Retention; 22 | import java.lang.annotation.RetentionPolicy; 23 | import java.lang.annotation.Target; 24 | 25 | /** 26 | * 标记注解:用于标注业务逻辑代码执行步骤 27 | * 28 | * @author ChengLong 2022-08-09 09:49:12 29 | * @since 2.3.2 30 | */ 31 | @Target(ElementType.METHOD) 32 | @Retention(RetentionPolicy.RUNTIME) 33 | public @interface Step16 { 34 | 35 | /** 36 | * 业务代码逻辑描述 37 | */ 38 | String value() default ""; 39 | 40 | /** 41 | * 当发生异常时,是否跳过异常执行下一步 42 | */ 43 | boolean skipError() default false; 44 | } 45 | -------------------------------------------------------------------------------- /fire-core/src/main/java/com/zto/fire/core/anno/lifecycle/Step17.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.core.anno.lifecycle; 19 | 20 | import java.lang.annotation.ElementType; 21 | import java.lang.annotation.Retention; 22 | import java.lang.annotation.RetentionPolicy; 23 | import java.lang.annotation.Target; 24 | 25 | /** 26 | * 标记注解:用于标注业务逻辑代码执行步骤 27 | * 28 | * @author ChengLong 2022-08-09 09:49:12 29 | * @since 2.3.2 30 | */ 31 | @Target(ElementType.METHOD) 32 | @Retention(RetentionPolicy.RUNTIME) 33 | public @interface Step17 { 34 | 35 | /** 36 | * 业务代码逻辑描述 37 | */ 38 | String value() default ""; 39 | 40 | /** 41 | * 当发生异常时,是否跳过异常执行下一步 42 | */ 43 | boolean skipError() default false; 44 | } 45 | -------------------------------------------------------------------------------- /fire-core/src/main/java/com/zto/fire/core/anno/lifecycle/Step18.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.core.anno.lifecycle; 19 | 20 | import java.lang.annotation.ElementType; 21 | import java.lang.annotation.Retention; 22 | import java.lang.annotation.RetentionPolicy; 23 | import java.lang.annotation.Target; 24 | 25 | /** 26 | * 标记注解:用于标注业务逻辑代码执行步骤 27 | * 28 | * @author ChengLong 2022-08-09 09:49:12 29 | * @since 2.3.2 30 | */ 31 | @Target(ElementType.METHOD) 32 | @Retention(RetentionPolicy.RUNTIME) 33 | public @interface Step18 { 34 | 35 | /** 36 | * 业务代码逻辑描述 37 | */ 38 | String value() default ""; 39 | 40 | /** 41 | * 当发生异常时,是否跳过异常执行下一步 42 | */ 43 | boolean skipError() default false; 44 | } 45 | -------------------------------------------------------------------------------- /fire-core/src/main/java/com/zto/fire/core/task/TaskRunner.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.core.task; 19 | 20 | import org.quartz.Job; 21 | import org.quartz.JobExecutionContext; 22 | import org.quartz.JobExecutionException; 23 | 24 | import java.io.Serializable; 25 | 26 | /** 27 | * Scheduler TaskRunner 28 | * @author ChengLong 2019年11月5日 09:59:33 29 | * @since 0.3.5 30 | */ 31 | public class TaskRunner implements Job, Serializable { 32 | @Override 33 | public void execute(JobExecutionContext context) throws JobExecutionException { 34 | SchedulerManager.execute(context); 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /fire-core/src/main/java/com/zto/fire/core/task/TaskRunnerQueue.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.core.task; 19 | 20 | import org.quartz.DisallowConcurrentExecution; 21 | 22 | /** 23 | * 线程安全的方式执行定时任务,同一实例同一时刻只能有一个任务 24 | * @author ChengLong 2019年11月5日 09:59:33 25 | * @since 0.3.5 26 | */ 27 | @DisallowConcurrentExecution 28 | public class TaskRunnerQueue extends TaskRunner { 29 | } 30 | -------------------------------------------------------------------------------- /fire-core/src/main/scala/com/zto/fire/core/Api.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.core 19 | 20 | /** 21 | * Fire变量API 22 | * 23 | * @author ChengLong 24 | * @since 1.0.0 25 | * @create 2021-01-12 17:16 26 | */ 27 | private[fire] trait Api { 28 | 29 | /** 30 | * 流的启动 31 | */ 32 | def start: Any 33 | } 34 | -------------------------------------------------------------------------------- /fire-core/src/main/scala/com/zto/fire/core/ext/BaseFireExt.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.core.ext 19 | 20 | import com.zto.fire.common.util.Tools 21 | 22 | /** 23 | * 隐式转换基类 24 | * 25 | * @author ChengLong 26 | * @since 2.0.0 27 | * @create 2020-12-16 15:55 28 | */ 29 | trait BaseFireExt extends Tools 30 | -------------------------------------------------------------------------------- /fire-core/src/main/scala/com/zto/fire/core/ext/Provider.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.core.ext 19 | 20 | import com.zto.fire.common.util.Logging 21 | 22 | /** 23 | * 为上层扩展层提供api集合 24 | * 25 | * @author ChengLong 26 | * @since 2.0.0 27 | * @create 2020-12-23 17:52 28 | */ 29 | trait Provider extends Logging { 30 | } 31 | -------------------------------------------------------------------------------- /fire-core/src/main/scala/com/zto/fire/core/rest/RestCase.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.core.rest 19 | 20 | import spark.{Request, Response} 21 | 22 | /** 23 | * 用于封装rest的相关信息 24 | * 25 | * @param method 26 | * rest的提交方式:GET/POST/PUT/DELETE等 27 | * @param path 28 | * rest服务地址 29 | * @author ChengLong 2019-3-16 09:58:06 30 | */ 31 | private[fire] case class RestCase(method: String, path: String, fun: (Request, Response) => AnyRef) 32 | -------------------------------------------------------------------------------- /fire-core/src/main/scala/com/zto/fire/core/sql/SqlExtensionsParser.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.core.sql 19 | 20 | /** 21 | * fire 框架sql扩展与解析器 22 | * 23 | * @author ChengLong 24 | * @date 2022-05-10 10:01:52 25 | * @since 2.2.2 26 | */ 27 | private[fire] trait SqlExtensionsParser { 28 | 29 | } 30 | -------------------------------------------------------------------------------- /fire-core/src/main/scala/com/zto/fire/core/sync/SyncManager.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.core.sync 19 | 20 | import com.zto.fire.common.util.Logging 21 | 22 | /** 23 | * 同步管理器: 24 | * 1. 用于Diver或JobManager端向Executor或TaskManager端同步数据 25 | * 2. 用于将Executor或TaskManager端数据收集到driver或JobManager端 26 | * 27 | * @author ChengLong 2021-11-2 15:41:30 28 | * @since 2.2.0 29 | */ 30 | trait SyncManager extends Logging { 31 | 32 | } 33 | -------------------------------------------------------------------------------- /fire-core/src/main/scala/com/zto/fire/core/util/SingletonFactory.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.core.util 19 | 20 | import com.zto.fire.common.util.ValueUtils 21 | 22 | /** 23 | * 单例工厂 24 | * 25 | * @author ChengLong 26 | * @since 2.0.0 27 | * @create 2020-12-18 14:02 28 | */ 29 | private[fire] trait SingletonFactory { 30 | @transient protected[this] var appName: String = _ 31 | 32 | /** 33 | * 设置TableEnv实例 34 | */ 35 | protected[fire] def setAppName(appName: String): this.type = { 36 | if (ValueUtils.noEmpty(appName) && ValueUtils.isEmpty(this.appName)) this.appName = appName 37 | this 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /fire-engines/.gitignore: -------------------------------------------------------------------------------- 1 | # use glob syntax. 2 | syntax: glob 3 | *.ser 4 | *.class 5 | *~ 6 | *.bak 7 | #*.off 8 | *.old 9 | 10 | # eclipse conf file 11 | .settings 12 | .classpath 13 | .project 14 | .manager 15 | .scala_dependencies 16 | 17 | # idea 18 | .idea 19 | *.iml 20 | 21 | # building 22 | target 23 | build 24 | null 25 | tmp* 26 | temp* 27 | dist 28 | test-output 29 | build.log 30 | 31 | # other scm 32 | .svn 33 | .CVS 34 | .hg* 35 | 36 | # switch to regexp syntax. 37 | # syntax: regexp 38 | # ^\.pc/ 39 | 40 | #SHITTY output not in target directory 41 | build.log 42 | -------------------------------------------------------------------------------- /fire-engines/fire-flink/.gitignore: -------------------------------------------------------------------------------- 1 | # use glob syntax. 2 | syntax: glob 3 | *.ser 4 | *.class 5 | *~ 6 | *.bak 7 | #*.off 8 | *.old 9 | 10 | # eclipse conf file 11 | .settings 12 | .classpath 13 | .project 14 | .manager 15 | .scala_dependencies 16 | 17 | # idea 18 | .idea 19 | *.iml 20 | 21 | # building 22 | target 23 | build 24 | null 25 | tmp* 26 | temp* 27 | dist 28 | test-output 29 | build.log 30 | 31 | # other scm 32 | .svn 33 | .CVS 34 | .hg* 35 | 36 | # switch to regexp syntax. 37 | # syntax: regexp 38 | # ^\.pc/ 39 | 40 | #SHITTY output not in target directory 41 | build.log 42 | -------------------------------------------------------------------------------- /fire-engines/fire-flink/src/main/java/com/zto/fire/flink/anno/FlinkConf.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.flink.anno; 19 | 20 | import java.lang.annotation.*; 21 | 22 | /** 23 | * 基于注解进行任务的配置,支持纯注解方式进行Flink相关参数配置 24 | * 25 | * @author ChengLong 2022-08-18 08:57:23 26 | * @since 2.3.2 27 | */ 28 | @Inherited 29 | @Target(ElementType.TYPE) 30 | @Retention(RetentionPolicy.RUNTIME) 31 | public @interface FlinkConf { 32 | 33 | /** 34 | * 配置项列表,key=value的字符串形式 35 | */ 36 | String[] props() default ""; 37 | 38 | /** 39 | * 配置的字符串 40 | */ 41 | String value() default ""; 42 | } -------------------------------------------------------------------------------- /fire-engines/fire-flink/src/main/java/com/zto/fire/flink/bean/CheckpointParams.java: -------------------------------------------------------------------------------- 1 | package com.zto.fire.flink.bean; 2 | 3 | /** 4 | * checkpoint热修改参数 5 | * {"interval":10000, "timeout":20000, "minPauseBetween": 10000} 6 | * @author ChengLong 2019-5-5 16:57:49 7 | */ 8 | public class CheckpointParams { 9 | 10 | /** 11 | * checkpoint的频率 12 | */ 13 | private Long interval; 14 | 15 | /** 16 | * checkpoint的超时时间 17 | */ 18 | private Long timeout; 19 | 20 | /** 21 | * 两次checkpoint的最短时间间隔 22 | */ 23 | private Long minPauseBetween; 24 | 25 | public Long getInterval() { 26 | return interval; 27 | } 28 | 29 | public void setInterval(Long interval) { 30 | this.interval = interval; 31 | } 32 | 33 | public Long getTimeout() { 34 | return timeout; 35 | } 36 | 37 | public void setTimeout(Long timeout) { 38 | this.timeout = timeout; 39 | } 40 | 41 | public Long getMinPauseBetween() { 42 | return minPauseBetween; 43 | } 44 | 45 | public void setMinPauseBetween(Long minPauseBetween) { 46 | this.minPauseBetween = minPauseBetween; 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /fire-engines/fire-flink/src/main/java/com/zto/fire/flink/enu/DistributeModule.java: -------------------------------------------------------------------------------- 1 | package com.zto.fire.flink.enu; 2 | 3 | import org.apache.commons.lang3.StringUtils; 4 | 5 | /** 6 | * 模块类型,用于标识不同的模块 7 | * 8 | * @author ChengLong 2021-11-11 09:34:48 9 | * @since 2.2.0 10 | */ 11 | public enum DistributeModule { 12 | CONF("conf"), ARTHAS("arthas"); 13 | 14 | DistributeModule(String type) { 15 | } 16 | 17 | /** 18 | * 将字符串解析成指定的枚举类型 19 | */ 20 | public static DistributeModule parse(String type) { 21 | if (StringUtils.isBlank(type)) { 22 | return CONF; 23 | } 24 | 25 | try { 26 | return Enum.valueOf(DistributeModule.class, type.trim().toUpperCase()); 27 | } catch (Exception e) { 28 | return CONF; 29 | } 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /fire-engines/fire-flink/src/main/resources/META-INF/services/org.apache.flink.table.factories.Factory: -------------------------------------------------------------------------------- 1 | com.zto.fire.flink.sql.connector.rocketmq.RocketMQDynamicTableFactory -------------------------------------------------------------------------------- /fire-engines/fire-flink/src/main/resources/flink-batch.properties: -------------------------------------------------------------------------------- 1 | # 2 | # Licensed to the Apache Software Foundation (ASF) under one or more 3 | # contributor license agreements. See the NOTICE file distributed with 4 | # this work for additional information regarding copyright ownership. 5 | # The ASF licenses this file to You under the Apache License, Version 2.0 6 | # (the "License"); you may not use this file except in compliance with 7 | # the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | # 17 | 18 | flink.fire.config_center.enable = false -------------------------------------------------------------------------------- /fire-engines/fire-flink/src/main/scala-flink-1.12/com/zto/fire/flink/sql/FlinkSqlParser.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.flink.sql 19 | 20 | import com.zto.fire.common.anno.Internal 21 | 22 | /** 23 | * Flink SQL解析器,用于解析Flink SQL语句中的库、表、分区、操作类型等信息 24 | * 25 | * @author ChengLong 2021-6-18 16:41:04 26 | * @since 2.0.0 27 | */ 28 | @Internal 29 | private[fire] object FlinkSqlParser extends FlinkSqlParserBase { 30 | 31 | } 32 | -------------------------------------------------------------------------------- /fire-engines/fire-flink/src/main/scala-flink-1.13/com/zto/fire/flink/sql/FlinkSqlParser.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.flink.sql 19 | 20 | import com.zto.fire.common.anno.Internal 21 | 22 | /** 23 | * Flink SQL解析器,用于解析Flink SQL语句中的库、表、分区、操作类型等信息 24 | * 25 | * @author ChengLong 2021-6-18 16:41:04 26 | * @since 2.0.0 27 | */ 28 | @Internal 29 | private[fire] object FlinkSqlParser extends FlinkSqlParserBase { 30 | 31 | } 32 | -------------------------------------------------------------------------------- /fire-engines/fire-flink/src/main/scala-flink-1.14/com/zto/fire/flink/sql/FlinkSqlParser.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.flink.sql 19 | 20 | import com.zto.fire.common.anno.Internal 21 | 22 | /** 23 | * Flink SQL解析器,用于解析Flink SQL语句中的库、表、分区、操作类型等信息 24 | * 25 | * @author ChengLong 2021-6-18 16:41:04 26 | * @since 2.0.0 27 | */ 28 | @Internal 29 | private[fire] object FlinkSqlParser extends FlinkSqlParserBase { 30 | 31 | } 32 | -------------------------------------------------------------------------------- /fire-engines/fire-flink/src/main/scala-flink-1.15/com/zto/fire/flink/sql/FlinkSqlParser.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.flink.sql 19 | 20 | import com.zto.fire.common.anno.Internal 21 | 22 | /** 23 | * Flink SQL解析器,用于解析Flink SQL语句中的库、表、分区、操作类型等信息 24 | * 25 | * @author ChengLong 2021-6-18 16:41:04 26 | * @since 2.0.0 27 | */ 28 | @Internal 29 | private[fire] object FlinkSqlParser extends FlinkSqlParserBase { 30 | 31 | } 32 | -------------------------------------------------------------------------------- /fire-engines/fire-flink/src/main/scala-flink-1.16/com/zto/fire/flink/sql/FlinkSqlParser.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.flink.sql 19 | 20 | import com.zto.fire.common.anno.Internal 21 | 22 | /** 23 | * Flink SQL解析器,用于解析Flink SQL语句中的库、表、分区、操作类型等信息 24 | * 25 | * @author ChengLong 2021-6-18 16:41:04 26 | * @since 2.0.0 27 | */ 28 | @Internal 29 | private[fire] object FlinkSqlParser extends FlinkSqlParserBase { 30 | 31 | } 32 | -------------------------------------------------------------------------------- /fire-engines/fire-flink/src/main/scala-flink-1.17/com/zto/fire/flink/sql/FlinkSqlParser.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.flink.sql 19 | 20 | import com.zto.fire.common.anno.Internal 21 | 22 | /** 23 | * Flink SQL解析器,用于解析Flink SQL语句中的库、表、分区、操作类型等信息 24 | * 25 | * @author ChengLong 2021-6-18 16:41:04 26 | * @since 2.0.0 27 | */ 28 | @Internal 29 | private[fire] object FlinkSqlParser extends FlinkSqlParserBase { 30 | 31 | } 32 | -------------------------------------------------------------------------------- /fire-engines/fire-flink/src/main/scala-flink-1.18/com/zto/fire/flink/sql/FlinkSqlParser.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.flink.sql 19 | 20 | import com.zto.fire.common.anno.Internal 21 | 22 | /** 23 | * Flink SQL解析器,用于解析Flink SQL语句中的库、表、分区、操作类型等信息 24 | * 25 | * @author ChengLong 2021-6-18 16:41:04 26 | * @since 2.0.0 27 | */ 28 | @Internal 29 | private[fire] object FlinkSqlParser extends FlinkSqlParserBase { 30 | 31 | } 32 | -------------------------------------------------------------------------------- /fire-engines/fire-flink/src/main/scala-flink-1.19/com/zto/fire/flink/sql/FlinkSqlParser.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.flink.sql 19 | 20 | import com.zto.fire.common.anno.Internal 21 | 22 | /** 23 | * Flink SQL解析器,用于解析Flink SQL语句中的库、表、分区、操作类型等信息 24 | * 25 | * @author ChengLong 2021-6-18 16:41:04 26 | * @since 2.0.0 27 | */ 28 | @Internal 29 | private[fire] object FlinkSqlParser extends FlinkSqlParserBase { 30 | 31 | } 32 | -------------------------------------------------------------------------------- /fire-engines/fire-flink/src/main/scala-pub/com/zto/fire/flink/ext/stream/DataStreamHelperImpl.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.flink.ext.stream 19 | 20 | import org.apache.flink.streaming.api.scala.DataStream 21 | 22 | /** 23 | * 社区版本:用于包装flink addSink api,实现血缘采集 24 | * 25 | * @author ChengLong 26 | * @Date 2024/3/8 16:51 27 | * @version 2.4.3 28 | */ 29 | abstract class DataStreamHelperImpl[T](stream: DataStream[T]) extends DataStreamHelper[T](stream) { 30 | 31 | } 32 | -------------------------------------------------------------------------------- /fire-engines/fire-flink/src/main/scala-pub/com/zto/fire/flink/ext/stream/StreamExecutionEnvHelperImpl.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.flink.ext.stream 19 | 20 | import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment 21 | 22 | /** 23 | * 社区版本:用于包装flink addSource与addSink api,实现血缘采集 24 | * 25 | * @author ChengLong 26 | * @Date 2024/3/8 16:51 27 | * @version 2.4.3 28 | */ 29 | class StreamExecutionEnvHelperImpl(env: StreamExecutionEnvironment) extends StreamExecutionEnvHelper(env) { 30 | 31 | } 32 | -------------------------------------------------------------------------------- /fire-engines/fire-flink/src/main/scala/com/zto/fire/flink/BaseFlinkBatch.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.flink 19 | 20 | /** 21 | * flink batch通用父接口 22 | * @author ChengLong 2020年1月7日 15:15:56 23 | */ 24 | trait BaseFlinkBatch extends AbstractFlinkBatch { 25 | 26 | /** 27 | * 初始化引擎上下文,如SparkSession、StreamExecutionEnvironment等 28 | * 可根据实际情况,将配置参数放到同名的配置文件中进行差异化的初始化 29 | */ 30 | def main(args: Array[String]): Unit = { 31 | this.init(null, args) 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /fire-engines/fire-flink/src/main/scala/com/zto/fire/flink/BaseFlinkCore.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.flink 19 | 20 | /** 21 | * flink batch通用父接口 22 | * 23 | * @author ChengLong 2020年1月7日 15:15:56 24 | */ 25 | trait BaseFlinkCore extends BaseFlinkBatch { 26 | 27 | } 28 | -------------------------------------------------------------------------------- /fire-engines/fire-flink/src/main/scala/com/zto/fire/flink/BaseFlinkStreaming.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.flink 19 | 20 | /** 21 | * flink streaming通用父接口 22 | * 23 | * @author ChengLong 2020年1月7日 10:50:19 24 | */ 25 | trait BaseFlinkStreaming extends AbstractFlinkStreaming { 26 | 27 | /** 28 | * 初始化引擎上下文,如SparkSession、StreamExecutionEnvironment等 29 | * 可根据实际情况,将配置参数放到同名的配置文件中进行差异化的初始化 30 | */ 31 | def main(args: Array[String]): Unit = { 32 | this.init(null, args) 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /fire-engines/fire-flink/src/main/scala/com/zto/fire/flink/FlinkBatch.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.flink 19 | 20 | /** 21 | * flink batch通用父接口 22 | * @author ChengLong 2020年1月7日 15:15:56 23 | */ 24 | trait FlinkBatch extends BaseFlinkBatch { 25 | 26 | } 27 | -------------------------------------------------------------------------------- /fire-engines/fire-flink/src/main/scala/com/zto/fire/flink/FlinkCore.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.flink 19 | 20 | /** 21 | * flink batch通用父接口 22 | * @author ChengLong 2020年1月7日 15:15:56 23 | */ 24 | trait FlinkCore extends BaseFlinkBatch { 25 | 26 | } 27 | -------------------------------------------------------------------------------- /fire-engines/fire-flink/src/main/scala/com/zto/fire/flink/FlinkStreaming.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.flink 19 | 20 | /** 21 | * flink streaming通用父接口 22 | * 23 | * @author ChengLong 2020年1月7日 10:52:19 24 | */ 25 | trait FlinkStreaming extends BaseFlinkStreaming { 26 | 27 | } 28 | -------------------------------------------------------------------------------- /fire-engines/fire-flink/src/main/scala/com/zto/fire/flink/ext/batch/BatchTableEnvExt.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.flink.ext.batch 19 | 20 | import com.zto.fire.jdbc.JdbcConnectorBridge 21 | import org.apache.flink.table.api.{Table, TableEnvironment} 22 | 23 | /** 24 | * 用于flink BatchTableEnvironment API库扩展 25 | * 26 | * @author ChengLong 2020年1月9日 13:52:16 27 | * @since 0.4.1 28 | */ 29 | class BatchTableEnvExt(env: TableEnvironment) extends JdbcConnectorBridge { 30 | 31 | /** 32 | * 执行sql query操作 33 | * 34 | * @param sql 35 | * sql语句 36 | * @return 37 | * table对象 38 | */ 39 | def sql(sql: String): Table = { 40 | this.env.sqlQuery(sql) 41 | } 42 | 43 | } 44 | -------------------------------------------------------------------------------- /fire-engines/fire-flink/src/main/scala/com/zto/fire/flink/ext/function/RuntimeContextExt.scala: -------------------------------------------------------------------------------- 1 | package com.zto.fire.flink.ext.function 2 | 3 | import com.zto.fire.common.util.Logging 4 | import org.apache.flink.api.common.functions.RuntimeContext 5 | 6 | /** 7 | * RuntimeContext扩展 8 | * 9 | * @author ChengLong 2021-9-13 14:26:28 10 | * @since 2.2.0 11 | */ 12 | class RuntimeContextExt(runtimeContext: RuntimeContext) extends Logging { 13 | 14 | } 15 | -------------------------------------------------------------------------------- /fire-engines/fire-flink/src/main/scala/com/zto/fire/flink/ext/stream/TableResultImplExt.scala: -------------------------------------------------------------------------------- 1 | package com.zto.fire.flink.ext.stream 2 | 3 | import org.apache.flink.table.api.TableResult 4 | 5 | /** 6 | * 用于对Flink TableResult的API库扩展 7 | * 8 | * @author ChengLong 2020年1月7日 09:18:21 9 | * @since 2.1.0 10 | */ 11 | class TableResultImplExt(tableResult: TableResult) { 12 | 13 | /** 14 | * 打印执行结果 15 | */ 16 | def show(): Unit = this.tableResult.print() 17 | } 18 | -------------------------------------------------------------------------------- /fire-engines/fire-flink/src/main/scala/com/zto/fire/flink/sql/FlinkSqlParserConf.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.flink.sql 19 | 20 | import org.apache.calcite.sql.parser.{SqlParser => CalciteParser} 21 | import org.apache.flink.table.api.{SqlDialect => FlinkSqlDialect} 22 | 23 | /** 24 | * Flink SQL解析器配置 25 | * 26 | * @author ChengLong 27 | * @Date 2024/7/11 16:25 28 | * @version 2.5.0 29 | */ 30 | trait FlinkSqlParserConfBase { 31 | 32 | /** 33 | * 构建flink default的SqlParser config 34 | */ 35 | def createParserConfig(dialect: FlinkSqlDialect = FlinkSqlDialect.DEFAULT): CalciteParser.Config = ??? 36 | } 37 | -------------------------------------------------------------------------------- /fire-engines/fire-flink/src/main/scala/com/zto/fire/flink/sync/FlinkDistributeExecuteManager.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.flink.sync 19 | 20 | import com.zto.fire.common.util.FireUtils 21 | import com.zto.fire.core.sync.DistributeExecuteManager 22 | 23 | /** 24 | * Flink分布式执行器,分布式执行指定的代码逻辑 25 | * 26 | * @author ChengLong 27 | * @Date 2024/4/24 17:00 28 | * @version 2.4.6 29 | */ 30 | private object FlinkDistributeExecuteManager extends DistributeExecuteManager { 31 | 32 | /** 33 | * 分布式执行相应的逻辑 34 | */ 35 | override def distributeExecute: Unit = { 36 | // 分布式打印指定类的路径信息 37 | FireUtils.printCodeResource() 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /fire-engines/fire-spark/src/main/java/com/zto/fire/spark/anno/SparkConf.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.spark.anno; 19 | 20 | import java.lang.annotation.*; 21 | 22 | /** 23 | * 基于注解进行任务的配置,支持纯注解方式进行Spark相关参数配置 24 | * 25 | * @author ChengLong 2022-08-18 08:57:23 26 | * @since 2.3.2 27 | */ 28 | @Inherited 29 | @Target(ElementType.TYPE) 30 | @Retention(RetentionPolicy.RUNTIME) 31 | public @interface SparkConf { 32 | 33 | /** 34 | * 配置项列表,key=value的字符串形式 35 | */ 36 | String[] props() default ""; 37 | 38 | /** 39 | * 配置的字符串 40 | */ 41 | String value() default ""; 42 | } -------------------------------------------------------------------------------- /fire-engines/fire-spark/src/main/java/com/zto/fire/spark/anno/Streaming.java: -------------------------------------------------------------------------------- 1 | package com.zto.fire.spark.anno; 2 | 3 | import java.lang.annotation.*; 4 | 5 | /** 6 | * 基于注解的方式配置Spark Streaming任务 7 | * 8 | * @author ChengLong 9 | * @date 2022-04-30 21:44:19 10 | * @since 2.2.1 11 | */ 12 | @Inherited 13 | @Target(ElementType.TYPE) 14 | @Retention(RetentionPolicy.RUNTIME) 15 | public @interface Streaming { 16 | 17 | /** 18 | * 批次时间(s) 19 | */ 20 | int value() default 10; 21 | 22 | /** 23 | * 批次时间(s),同value字段 24 | */ 25 | int interval() default -1; 26 | 27 | /** 28 | * 是否开启spark streaming的checkpoint 29 | */ 30 | boolean checkpoint() default false; 31 | 32 | /** 33 | * 是否自动提交job:call startAwaitTermination() 34 | */ 35 | boolean autoStart() default true; 36 | 37 | /** 38 | * 并行执行的streaming批次数 39 | */ 40 | int concurrent() default -1; 41 | 42 | /** 43 | * 指定消费kafka或rocketmq每秒从每个分区获取的最大记录数 44 | */ 45 | long maxRatePerPartition() default -1; 46 | 47 | /** 48 | * 是否启用反压机制 49 | */ 50 | boolean backpressure() default true; 51 | 52 | /** 53 | * 启用反压机制时每个接收器接收第一批数据的初始最大速率 54 | */ 55 | long backpressureInitialRate() default -1; 56 | 57 | /** 58 | * 是否优雅的停止streaming 59 | */ 60 | boolean stopGracefullyOnShutdown() default true; 61 | 62 | /** 63 | * 任务的并行度 64 | */ 65 | int parallelism() default -1; 66 | } 67 | -------------------------------------------------------------------------------- /fire-engines/fire-spark/src/main/java/com/zto/fire/spark/anno/StreamingDuration.java: -------------------------------------------------------------------------------- 1 | package com.zto.fire.spark.anno; 2 | 3 | import java.lang.annotation.*; 4 | 5 | /** 6 | * Spark Streaming任务的批次时间 7 | * 8 | * @author ChengLong 2021年8月3日19:39:28 9 | * @since 2.1.1 10 | */ 11 | @Inherited 12 | @Deprecated 13 | @Target(ElementType.TYPE) 14 | @Retention(RetentionPolicy.RUNTIME) 15 | public @interface StreamingDuration { 16 | 17 | /** 18 | * 批次时间(s) 19 | */ 20 | int value() default 10; 21 | 22 | /** 23 | * 批次时间(s),同value字段 24 | */ 25 | int interval() default -1; 26 | 27 | /** 28 | * 是否开启spark streaming的checkpoint 29 | */ 30 | boolean checkpoint() default false; 31 | 32 | } 33 | -------------------------------------------------------------------------------- /fire-engines/fire-spark/src/main/scala/com/zto/fire/spark/BaseSparkBatch.scala: -------------------------------------------------------------------------------- 1 | package com.zto.fire.spark 2 | 3 | /** 4 | * Spark core通用父接口 5 | * Created by ChengLong on 2018-03-28. 6 | */ 7 | trait BaseSparkBatch extends BaseSparkCore { 8 | 9 | } 10 | -------------------------------------------------------------------------------- /fire-engines/fire-spark/src/main/scala/com/zto/fire/spark/BaseSparkCore.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.spark 19 | 20 | /** 21 | * Spark core通用父接口 22 | * Created by ChengLong on 2018-03-28. 23 | */ 24 | class BaseSparkCore extends AbstractSparkCore { 25 | 26 | /** 27 | * 初始化SparkSession对象 28 | */ 29 | def main(args: Array[String]): Unit = { 30 | this.init(args = args) 31 | this.stop 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /fire-engines/fire-spark/src/main/scala/com/zto/fire/spark/BaseSparkStreaming.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.spark 19 | 20 | 21 | /** 22 | * Spark Streaming通用父接口 23 | * Created by ChengLong on 2018-03-28. 24 | */ 25 | trait BaseSparkStreaming extends AbstractSparkStreaming { 26 | 27 | /** 28 | * 初始化SparkSession与StreamingContext,默认批次时间为30s 29 | * 批次时间可通过子类复写main方法实现或通过在配置文件中指定:spark.streaming.batch.duration=30 30 | */ 31 | def main(args: Array[String]): Unit = { 32 | val batchDuration = this.conf.getLong("spark.streaming.batch.duration", 10) 33 | val ck = this.conf.getBoolean("spark.streaming.receiver.writeAheadLog.enable", false) 34 | this.init(batchDuration, ck, args) 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /fire-engines/fire-spark/src/main/scala/com/zto/fire/spark/BaseStructuredStreaming.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.spark 19 | 20 | /** 21 | * Structured Streaming通用父类 22 | * Created by ChengLong on 2019-03-11. 23 | */ 24 | trait BaseStructuredStreaming extends AbstractStructuredStreaming { 25 | 26 | /** 27 | * 初始化引擎上下文,如SparkSession、StreamExecutionEnvironment等 28 | * 可根据实际情况,将配置参数放到同名的配置文件中进行差异化的初始化 29 | */ 30 | def main(args: Array[String]): Unit = { 31 | this.init(null, args) 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /fire-engines/fire-spark/src/main/scala/com/zto/fire/spark/SparkBatch.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.spark 19 | 20 | /** 21 | * Spark core通用父接口 22 | * Created by ChengLong on 2018-03-28. 23 | */ 24 | trait SparkBatch extends BaseSparkCore { 25 | 26 | } 27 | -------------------------------------------------------------------------------- /fire-engines/fire-spark/src/main/scala/com/zto/fire/spark/SparkCore.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.spark 19 | 20 | /** 21 | * Spark core通用父接口 22 | * Created by ChengLong on 2018-03-28. 23 | */ 24 | trait SparkCore extends BaseSparkCore { 25 | 26 | } 27 | -------------------------------------------------------------------------------- /fire-engines/fire-spark/src/main/scala/com/zto/fire/spark/SparkStreaming.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.spark 19 | 20 | /** 21 | * Spark Streaming通用父接口 22 | * Created by ChengLong on 2018-03-28. 23 | */ 24 | trait SparkStreaming extends BaseSparkStreaming { 25 | 26 | } 27 | -------------------------------------------------------------------------------- /fire-engines/fire-spark/src/main/scala/com/zto/fire/spark/StructuredStreaming.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.spark 19 | 20 | /** 21 | * Structured Streaming通用父类 22 | * Created by ChengLong on 2019-03-11. 23 | */ 24 | trait StructuredStreaming extends BaseStructuredStreaming { 25 | 26 | } 27 | -------------------------------------------------------------------------------- /fire-engines/fire-spark/src/main/scala/com/zto/fire/spark/acc/SyncAccumulator.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.spark.acc 19 | 20 | import org.apache.spark.util.AccumulatorV2 21 | 22 | import java.util.concurrent.ConcurrentLinkedQueue 23 | 24 | /** 25 | * fire框架内部信息同步累加器 26 | * 27 | * @author 2022-08-24 14:49:55 28 | * @since 2.3.2 29 | */ 30 | private[fire] class SyncAccumulator extends StringAccumulator { 31 | 32 | /** 33 | * 用于复制累加器 34 | */ 35 | override def copy(): AccumulatorV2[String, ConcurrentLinkedQueue[String]] = new SyncAccumulator 36 | } 37 | -------------------------------------------------------------------------------- /fire-engines/fire-spark/src/main/scala/com/zto/fire/spark/ext/core/SparkConfExt.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.spark.ext.core 19 | 20 | import org.apache.spark.SparkConf 21 | 22 | /** 23 | * SparkConf扩展 24 | * 25 | * @param sparkConf 26 | * sparkConf对象 27 | * @author ChengLong 2019-5-18 10:50:35 28 | */ 29 | class SparkConfExt(sparkConf: SparkConf) { 30 | 31 | } 32 | -------------------------------------------------------------------------------- /fire-engines/fire-spark/src/main/scala/com/zto/fire/spark/ext/core/SparkContextExt.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.spark.ext.core 19 | 20 | import org.apache.spark.SparkContext 21 | 22 | /** 23 | * SparkContext扩展 24 | * 25 | * @param sc 26 | * SparkContext对象 27 | * @author ChengLong 2019-5-18 10:53:56 28 | */ 29 | class SparkContextExt(sc: SparkContext) { 30 | 31 | /** 32 | * 判断SparkContext是否已启动 33 | * 34 | * @return 35 | * true:Spark上下文初始化完成 false:已销毁 36 | */ 37 | def isStarted: Boolean = { 38 | if (sc == null) return false 39 | !sc.isStopped 40 | } 41 | } -------------------------------------------------------------------------------- /fire-engines/fire-spark/src/main/scala/com/zto/fire/spark/ext/provider/SparkProvider.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.spark.ext.provider 19 | 20 | import com.zto.fire.core.ext.Provider 21 | import com.zto.fire.spark.util.SparkSingletonFactory 22 | 23 | /** 24 | * spark provider父接口 25 | * 26 | * @author ChengLong 27 | * @since 2.0.0 28 | * @create 2020-12-23 17:49 29 | */ 30 | trait SparkProvider extends Provider { 31 | protected lazy val spark = SparkSingletonFactory.getSparkSession 32 | protected lazy val sc = spark.sparkContext 33 | } 34 | -------------------------------------------------------------------------------- /fire-engines/fire-spark/src/main/scala/com/zto/fire/spark/sql/SparkSqlExtensionsParserBase.scala: -------------------------------------------------------------------------------- 1 | package com.zto.fire.spark.sql 2 | 3 | import com.zto.fire.common.lineage.LineageManager 4 | import com.zto.fire.common.util.{ExceptionBus, Logging} 5 | import org.apache.spark.sql.SparkSession 6 | import org.apache.spark.sql.catalyst.expressions.Expression 7 | import org.apache.spark.sql.catalyst.parser.ParserInterface 8 | import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan 9 | 10 | /** 11 | * 提供通用的sql解析与校验类 12 | * @param sparkSession 13 | * @param parser 14 | */ 15 | private[fire] class SparkSqlExtensionsParserBase(sparkSession: SparkSession, parser: ParserInterface) extends Logging { 16 | 17 | /** 18 | * Parse a string to a [[LogicalPlan]]. 19 | */ 20 | def parsePlan(sqlText: String): LogicalPlan = { 21 | try { 22 | SparkSqlParser.sqlParse(sqlText) 23 | LineageManager.printLog(s"采集SQL文本:${sqlText}") 24 | parser.parsePlan(sqlText) 25 | } catch { 26 | case e: Throwable => 27 | ExceptionBus.post(e, sqlText) 28 | throw e 29 | } 30 | } 31 | 32 | /** 33 | * Parse a string to an [[Expression]]. 34 | */ 35 | def parseExpression(sqlText: String): Expression = parser.parseExpression(sqlText) 36 | } 37 | -------------------------------------------------------------------------------- /fire-engines/fire-spark/src/main/scala/com/zto/fire/spark/sql/SqlExtensions.scala: -------------------------------------------------------------------------------- 1 | package com.zto.fire.spark.sql 2 | 3 | import com.zto.fire.core.sql.SqlExtensionsParser 4 | import com.zto.fire.spark.conf.FireSparkConf 5 | import org.apache.spark.sql.{SparkSession, SparkSessionExtensions} 6 | import org.apache.spark.sql.catalyst.parser.ParserInterface 7 | 8 | /** 9 | * spark sql语法扩展 10 | * @author ChengLong 11 | * @date 2022-05-09 14:45:15 12 | * @since 2.2.2 13 | */ 14 | private[fire] object SqlExtensions extends SqlExtensionsParser { 15 | 16 | /** 17 | * 启用自定义Sql解析器扩展 18 | */ 19 | def sqlExtension(sessionBuilder: SparkSession.Builder): Unit = { 20 | if (FireSparkConf.sqlExtensionsEnable) { 21 | type ParserBuilder = (SparkSession, ParserInterface) => ParserInterface 22 | type ExtensionsBuilder = SparkSessionExtensions => Unit 23 | val parserBuilder: ParserBuilder = (sparkSession, parser) => new SparkSqlExtensionsParser(sparkSession, parser) 24 | val extBuilder: ExtensionsBuilder = { e => e.injectParser(parserBuilder) } 25 | sessionBuilder.withExtensions(extBuilder) 26 | } 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /fire-engines/fire-spark/src/main/scala/com/zto/fire/spark/sync/SparkDistributeExecuteManager.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.spark.sync 19 | 20 | import com.zto.fire.common.util.FireUtils 21 | import com.zto.fire.core.sync.DistributeExecuteManager 22 | 23 | /** 24 | * Spark分布式执行器,分布式执行指定的代码逻辑 25 | * 26 | * @author ChengLong 27 | * @Date 2024/4/24 17:00 28 | * @version 2.4.6 29 | */ 30 | private object SparkDistributeExecuteManager extends DistributeExecuteManager { 31 | 32 | /** 33 | * 分布式执行相应的逻辑 34 | */ 35 | override def distributeExecute: Unit = { 36 | // 分布式打印指定类的路径信息 37 | FireUtils.printCodeResource() 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /fire-enhance/apache-flink/src/main/java-flink-1.14/org/apache/flink/connector/jdbc/internal/converter/OracleSQLRowConverter.java: -------------------------------------------------------------------------------- 1 | package org.apache.flink.connector.jdbc.internal.converter; 2 | 3 | import org.apache.flink.table.types.logical.RowType; 4 | 5 | /** 6 | * Runtime converter that responsible to convert between JDBC object and Flink internal object for 7 | * Oracle. 8 | */ 9 | public class OracleSQLRowConverter extends AbstractJdbcRowConverter { 10 | 11 | private static final long serialVersionUID = 1L; 12 | 13 | @Override 14 | public String converterName() { 15 | return "Oracle"; 16 | } 17 | 18 | public OracleSQLRowConverter(RowType rowType) { 19 | super(rowType); 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /fire-enhance/apache-spark/src/main/java/com/zto/fire/spark/Enhance.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.spark; 19 | 20 | public class Enhance { 21 | } 22 | -------------------------------------------------------------------------------- /fire-examples/flink-examples/src/main/resources/META-INF/services/org.apache.flink.table.factories.Factory: -------------------------------------------------------------------------------- 1 | com.zto.fire.flink.sql.connector.rocketmq.RocketMQDynamicTableFactory -------------------------------------------------------------------------------- /fire-examples/flink-examples/src/test/scala/com/zto/fire/examples/flink/core/BaseFlinkTester.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.zto.fire.examples.flink.core 19 | 20 | import com.zto.fire.flink.FlinkStreaming 21 | import org.junit.{After, Before} 22 | 23 | /** 24 | * Flink 单元测试父接口,用于初始化fire与flink上下文 25 | * 26 | * @author ChengLong 27 | * @date 2022-05-17 09:55:30 28 | * @since 2.2.2 29 | */ 30 | trait BaseFlinkTester extends FlinkStreaming { 31 | 32 | /** 33 | * 初始化fire框架与flink相关的运行时上下文 34 | */ 35 | @Before 36 | def before: Unit = { 37 | this.init() 38 | } 39 | 40 | @After 41 | override def after: Unit = { 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /fire-examples/spark-examples/src/main/resources/streaming/ConfigCenterTest.properties: -------------------------------------------------------------------------------- 1 | # 2 | # Licensed to the Apache Software Foundation (ASF) under one or more 3 | # contributor license agreements. See the NOTICE file distributed with 4 | # this work for additional information regarding copyright ownership. 5 | # The ASF licenses this file to You under the Apache License, Version 2.0 6 | # (the "License"); you may not use this file except in compliance with 7 | # the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | # 17 | 18 | #fire.thread.pool.size=10 19 | fire.thread.pool.size=6 20 | fire.restful.max.thread=9 21 | fire.jdbc.query.partitions=11 22 | fire.hbase.scan.repartitions=110 23 | fire.acc.log.max.size=22 24 | fire.conf.test=scala -------------------------------------------------------------------------------- /fire-examples/spark-examples/src/test/resources/ConfigCenterUnitTest.properties: -------------------------------------------------------------------------------- 1 | # 2 | # Licensed to the Apache Software Foundation (ASF) under one or more 3 | # contributor license agreements. See the NOTICE file distributed with 4 | # this work for additional information regarding copyright ownership. 5 | # The ASF licenses this file to You under the Apache License, Version 2.0 6 | # (the "License"); you may not use this file except in compliance with 7 | # the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | # 17 | 18 | #fire.thread.pool.size=10 19 | fire.thread.pool.size=6 20 | fire.restful.max.thread=9 21 | fire.thread.pool.schedule.size=5 22 | fire.jdbc.query.partitions=11 23 | fire.hbase.scan.repartitions=110 24 | fire.acc.log.max.size=22 -------------------------------------------------------------------------------- /fire-examples/spark-examples/src/test/resources/SparkSQLParserTest.properties: -------------------------------------------------------------------------------- 1 | spark.fire.task.schedule.enable=false 2 | spark.fire.acc.enable=false -------------------------------------------------------------------------------- /fire-examples/spark-examples/src/test/resources/common.properties: -------------------------------------------------------------------------------- 1 | spark.fire.task.schedule.enable=false 2 | spark.fire.acc.enable=false -------------------------------------------------------------------------------- /fire-shell/spark-shell/src/main/scala-spark-3.0/com/zto/fire/shell/spark/Test.scala: -------------------------------------------------------------------------------- 1 | package com.zto.fire.shell.spark 2 | 3 | import com.zto.fire.common.anno.Config 4 | import com.zto.fire.spark.SparkStreaming 5 | import org.apache.spark.SparkContext 6 | import org.apache.spark.sql.SparkSession 7 | 8 | @Config( 9 | """ 10 | |hive.cluster=test 11 | |kafka.brokers.name = bigdata_test 12 | |kafka.topics = fire 13 | |kafka.group.id=fire 14 | |spark.streaming.stopGracefullyOnShutdown=false 15 | |""") 16 | object Test extends SparkStreaming { 17 | 18 | def getFire: SparkSession = this.fire 19 | 20 | def getSparkSession: SparkSession = this.fire 21 | 22 | def getSc: SparkContext = this.sc 23 | } --------------------------------------------------------------------------------