├── .gitignore
├── .gitlab-ci.yml
├── LICENSE.txt
├── README.md
├── README.md.back
├── aws
├── aws-sink
│ ├── pom.xml
│ └── src
│ │ └── main
│ │ └── java
│ │ └── com
│ │ └── dtstack
│ │ └── flink
│ │ └── sql
│ │ └── sink
│ │ └── aws
│ │ ├── AwsConstantKey.java
│ │ ├── AwsOutputFormat.java
│ │ ├── AwsSink.java
│ │ ├── table
│ │ ├── AwsSinkParser.java
│ │ └── AwsTableInfo.java
│ │ └── util
│ │ └── AwsManager.java
└── pom.xml
├── bin
└── submit.sh
├── cassandra
├── cassandra-side
│ ├── cassandra-all-side
│ │ ├── pom.xml
│ │ └── src
│ │ │ ├── main
│ │ │ └── java
│ │ │ │ └── com
│ │ │ │ └── dtstack
│ │ │ │ └── flink
│ │ │ │ └── sql
│ │ │ │ └── side
│ │ │ │ └── cassandra
│ │ │ │ ├── CassandraAllReqRow.java
│ │ │ │ └── CassandraAllSideInfo.java
│ │ │ └── test
│ │ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── cassandra
│ │ │ ├── CassandraAllReqRowTest.java
│ │ │ └── CassandraAllSideInfoTest.java
│ ├── cassandra-async-side
│ │ ├── pom.xml
│ │ └── src
│ │ │ ├── main
│ │ │ └── java
│ │ │ │ └── com
│ │ │ │ └── dtstack
│ │ │ │ └── flink
│ │ │ │ └── sql
│ │ │ │ └── side
│ │ │ │ └── cassandra
│ │ │ │ ├── CassandraAsyncReqRow.java
│ │ │ │ └── CassandraAsyncSideInfo.java
│ │ │ └── test
│ │ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── cassandra
│ │ │ ├── CassandraAsyncReqRowTest.java
│ │ │ └── CassandraAsyncSideInfoTest.java
│ ├── cassandra-side-core
│ │ ├── pom.xml
│ │ └── src
│ │ │ ├── main
│ │ │ └── java
│ │ │ │ └── com
│ │ │ │ └── dtstack
│ │ │ │ └── flink
│ │ │ │ └── sql
│ │ │ │ └── side
│ │ │ │ └── cassandra
│ │ │ │ └── table
│ │ │ │ ├── CassandraSideParser.java
│ │ │ │ └── CassandraSideTableInfo.java
│ │ │ └── test
│ │ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── cassandra
│ │ │ └── table
│ │ │ └── CassandraSideParserTest.java
│ └── pom.xml
├── cassandra-sink
│ ├── pom.xml
│ └── src
│ │ ├── main
│ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── sink
│ │ │ └── cassandra
│ │ │ ├── CassandraOutputFormat.java
│ │ │ ├── CassandraSink.java
│ │ │ └── table
│ │ │ ├── CassandraSinkParser.java
│ │ │ └── CassandraTableInfo.java
│ │ └── test
│ │ └── java
│ │ └── com
│ │ └── dtstack
│ │ └── flink
│ │ └── sql
│ │ └── sink
│ │ └── cassandra
│ │ ├── CassandraOutputFormatTest.java
│ │ ├── CassandraSinkTest.java
│ │ └── table
│ │ └── CassandraSinkParserTest.java
└── pom.xml
├── ci
├── sonar_analyze.sh
└── sonar_notify.sh
├── clickhouse
├── clickhouse-side
│ ├── clickhouse-all-side
│ │ ├── pom.xml
│ │ └── src
│ │ │ ├── main
│ │ │ └── java
│ │ │ │ └── com
│ │ │ │ └── dtstack
│ │ │ │ └── flink
│ │ │ │ └── sql
│ │ │ │ └── side
│ │ │ │ └── clickhouse
│ │ │ │ ├── ClickhouseAllReqRow.java
│ │ │ │ └── ClickhouseAllSideInfo.java
│ │ │ └── test
│ │ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── clickhouse
│ │ │ └── ClickhouseAllReqRowTest.java
│ ├── clickhouse-async-side
│ │ ├── pom.xml
│ │ └── src
│ │ │ ├── main
│ │ │ └── java
│ │ │ │ └── com
│ │ │ │ └── dtstack
│ │ │ │ └── flink
│ │ │ │ └── sql
│ │ │ │ └── side
│ │ │ │ └── clickhouse
│ │ │ │ ├── ClickhouseAsyncReqRow.java
│ │ │ │ └── ClickhouseAsyncSideInfo.java
│ │ │ └── test
│ │ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── clickhouse
│ │ │ └── ClickhouseAsyncReqRowTest.java
│ ├── clickhouse-side-core
│ │ ├── pom.xml
│ │ └── src
│ │ │ ├── main
│ │ │ └── java
│ │ │ │ └── com
│ │ │ │ └── dtstack
│ │ │ │ └── flink
│ │ │ │ └── sql
│ │ │ │ └── side
│ │ │ │ └── clickhouse
│ │ │ │ └── table
│ │ │ │ └── ClickhouseSideParser.java
│ │ │ └── test
│ │ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── clickhouse
│ │ │ └── table
│ │ │ └── ClickhouseSideParserTest.java
│ └── pom.xml
├── clickhouse-sink
│ ├── pom.xml
│ └── src
│ │ ├── main
│ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── sink
│ │ │ └── clickhouse
│ │ │ ├── ClickhouseDialect.java
│ │ │ ├── ClickhouseSink.java
│ │ │ └── table
│ │ │ └── ClickhouseSinkParser.java
│ │ └── test
│ │ └── java
│ │ └── com
│ │ └── dtstack
│ │ └── flink
│ │ └── sql
│ │ └── sink
│ │ └── clickhouse
│ │ ├── ClickhouseDialectTest.java
│ │ ├── ClickhouseSinkTest.java
│ │ └── table
│ │ └── ClickhouseSinkParserTest.java
└── pom.xml
├── console
├── console-sink
│ ├── pom.xml
│ └── src
│ │ ├── main
│ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── sink
│ │ │ └── console
│ │ │ ├── ConsoleOutputFormat.java
│ │ │ ├── ConsoleSink.java
│ │ │ └── table
│ │ │ ├── ConsoleSinkParser.java
│ │ │ ├── ConsoleTableInfo.java
│ │ │ └── TablePrintUtil.java
│ │ └── test
│ │ └── java
│ │ └── com
│ │ └── dtstack
│ │ └── flink
│ │ └── sql
│ │ └── sink
│ │ └── console
│ │ ├── ConsoleOutputFormatTest.java
│ │ ├── ConsoleSinkTest.java
│ │ └── table
│ │ ├── ConsoleSinkParserTest.java
│ │ ├── ConsoleTableInfoTest.java
│ │ └── TablePrintUtilTest.java
└── pom.xml
├── core
├── pom.xml
└── src
│ ├── main
│ ├── java
│ │ ├── com
│ │ │ └── dtstack
│ │ │ │ └── flink
│ │ │ │ └── sql
│ │ │ │ ├── GetPlan.java
│ │ │ │ ├── Main.java
│ │ │ │ ├── classloader
│ │ │ │ ├── ClassLoaderManager.java
│ │ │ │ ├── ClassLoaderSupplier.java
│ │ │ │ ├── ClassLoaderSupplierCallBack.java
│ │ │ │ └── DtClassLoader.java
│ │ │ │ ├── constant
│ │ │ │ └── PluginParamConsts.java
│ │ │ │ ├── constrant
│ │ │ │ └── ConfigConstrant.java
│ │ │ │ ├── dirtyManager
│ │ │ │ ├── consumer
│ │ │ │ │ ├── AbstractDirtyDataConsumer.java
│ │ │ │ │ └── DirtyConsumerFactory.java
│ │ │ │ ├── entity
│ │ │ │ │ └── DirtyDataEntity.java
│ │ │ │ └── manager
│ │ │ │ │ ├── DirtyDataManager.java
│ │ │ │ │ └── DirtyKeys.java
│ │ │ │ ├── enums
│ │ │ │ ├── ClusterMode.java
│ │ │ │ ├── ColumnType.java
│ │ │ │ ├── ECacheContentType.java
│ │ │ │ ├── ECacheType.java
│ │ │ │ ├── EConnectionErrorCode.java
│ │ │ │ ├── EDatabaseType.java
│ │ │ │ ├── EPluginLoadMode.java
│ │ │ │ ├── EStateBackend.java
│ │ │ │ ├── ETableType.java
│ │ │ │ └── EUpdateMode.java
│ │ │ │ ├── environment
│ │ │ │ ├── MyLocalStreamEnvironment.java
│ │ │ │ └── StreamEnvConfigManager.java
│ │ │ │ ├── exception
│ │ │ │ ├── BaseCodeEnum.java
│ │ │ │ ├── BaseException.java
│ │ │ │ ├── ErrorCode.java
│ │ │ │ ├── ExceptionTrace.java
│ │ │ │ └── sqlparse
│ │ │ │ │ ├── SqlParseCodeEnum.java
│ │ │ │ │ └── WithoutTableNameException.java
│ │ │ │ ├── exec
│ │ │ │ ├── ApiResult.java
│ │ │ │ ├── ExecuteProcessHelper.java
│ │ │ │ ├── FlinkSQLExec.java
│ │ │ │ └── ParamsInfo.java
│ │ │ │ ├── factory
│ │ │ │ └── DTThreadFactory.java
│ │ │ │ ├── format
│ │ │ │ ├── DeserializationMetricWrapper.java
│ │ │ │ ├── FormatType.java
│ │ │ │ ├── SerializationMetricWrapper.java
│ │ │ │ └── dtnest
│ │ │ │ │ └── DtNestRowDeserializationSchema.java
│ │ │ │ ├── function
│ │ │ │ └── FunctionManager.java
│ │ │ │ ├── krb
│ │ │ │ └── KerberosTable.java
│ │ │ │ ├── metric
│ │ │ │ ├── EventDelayGauge.java
│ │ │ │ └── MetricConstant.java
│ │ │ │ ├── option
│ │ │ │ ├── OptionParser.java
│ │ │ │ ├── OptionRequired.java
│ │ │ │ └── Options.java
│ │ │ │ ├── outputformat
│ │ │ │ └── AbstractDtRichOutputFormat.java
│ │ │ │ ├── parser
│ │ │ │ ├── CreateFuncParser.java
│ │ │ │ ├── CreateTableParser.java
│ │ │ │ ├── CreateTmpTableParser.java
│ │ │ │ ├── FlinkPlanner.java
│ │ │ │ ├── IParser.java
│ │ │ │ ├── InsertSqlParser.java
│ │ │ │ ├── SqlParser.java
│ │ │ │ └── SqlTree.java
│ │ │ │ ├── resource
│ │ │ │ └── ResourceCheck.java
│ │ │ │ ├── side
│ │ │ │ ├── AbstractSideTableInfo.java
│ │ │ │ ├── AliasInfo.java
│ │ │ │ ├── BaseAllReqRow.java
│ │ │ │ ├── BaseAsyncReqRow.java
│ │ │ │ ├── BaseSideInfo.java
│ │ │ │ ├── CacheMissVal.java
│ │ │ │ ├── FieldInfo.java
│ │ │ │ ├── FieldReplaceInfo.java
│ │ │ │ ├── ISideReqRow.java
│ │ │ │ ├── JoinInfo.java
│ │ │ │ ├── JoinNodeDealer.java
│ │ │ │ ├── JoinScope.java
│ │ │ │ ├── ParserJoinField.java
│ │ │ │ ├── PredicateInfo.java
│ │ │ │ ├── SidePredicatesParser.java
│ │ │ │ ├── SideSQLParser.java
│ │ │ │ ├── SideSqlExec.java
│ │ │ │ ├── StreamSideFactory.java
│ │ │ │ ├── TupleKeySelector.java
│ │ │ │ ├── cache
│ │ │ │ │ ├── AbstractSideCache.java
│ │ │ │ │ ├── CacheObj.java
│ │ │ │ │ └── LRUSideCache.java
│ │ │ │ └── operator
│ │ │ │ │ ├── SideAsyncOperator.java
│ │ │ │ │ └── SideWithAllCacheOperator.java
│ │ │ │ ├── sink
│ │ │ │ ├── IStreamSinkGener.java
│ │ │ │ └── StreamSinkFactory.java
│ │ │ │ ├── source
│ │ │ │ ├── IStreamSourceGener.java
│ │ │ │ └── StreamSourceFactory.java
│ │ │ │ ├── table
│ │ │ │ ├── AbstractSideTableParser.java
│ │ │ │ ├── AbstractSourceParser.java
│ │ │ │ ├── AbstractSourceTableInfo.java
│ │ │ │ ├── AbstractTableInfo.java
│ │ │ │ ├── AbstractTableInfoParser.java
│ │ │ │ ├── AbstractTableParser.java
│ │ │ │ ├── AbstractTargetTableInfo.java
│ │ │ │ └── ITableFieldDealHandler.java
│ │ │ │ ├── util
│ │ │ │ ├── AuthUtil.java
│ │ │ │ ├── ByteUtils.java
│ │ │ │ ├── ClassUtil.java
│ │ │ │ ├── DataTypeUtils.java
│ │ │ │ ├── DateUtil.java
│ │ │ │ ├── DtFileUtils.java
│ │ │ │ ├── DtStringUtil.java
│ │ │ │ ├── FieldReplaceUtil.java
│ │ │ │ ├── KrbUtils.java
│ │ │ │ ├── MD5Utils.java
│ │ │ │ ├── MathUtil.java
│ │ │ │ ├── ParseUtils.java
│ │ │ │ ├── PluginUtil.java
│ │ │ │ ├── PropertiesUtils.java
│ │ │ │ ├── ReflectionUtils.java
│ │ │ │ ├── RowDataComplete.java
│ │ │ │ ├── RowDataConvert.java
│ │ │ │ ├── TableUtils.java
│ │ │ │ ├── ThreadUtil.java
│ │ │ │ └── TypeInfoDataTypeConverter.java
│ │ │ │ └── watermarker
│ │ │ │ ├── AbstractCustomerWaterMarker.java
│ │ │ │ ├── CustomerWaterMarkerForLong.java
│ │ │ │ ├── CustomerWaterMarkerForTimeStamp.java
│ │ │ │ └── WaterMarkerAssigner.java
│ │ └── org
│ │ │ └── apache
│ │ │ └── flink
│ │ │ └── table
│ │ │ └── planner
│ │ │ └── plan
│ │ │ └── QueryOperationConverter.java
│ └── scala
│ │ └── com
│ │ └── dtstack
│ │ └── flink
│ │ └── App.scala
│ └── test
│ └── java
│ └── com
│ └── dtstack
│ └── flink
│ └── sql
│ ├── TestGetPlan.java
│ ├── classloader
│ ├── ClassLoaderManagerTest.java
│ ├── ClassLoaderSupplierCallBackTest.java
│ └── DtClassLoaderTest.java
│ ├── dirtyManager
│ └── TestDirtyDataManager.java
│ ├── environment
│ └── MyLocalStreamEnvironmentTest.java
│ ├── exec
│ ├── ApiResultTest.java
│ ├── ExecuteProcessHelperTest.java
│ ├── FlinkSQLExecTest.java
│ └── PraramsInfoTest.java
│ ├── factory
│ └── DTFactoryTest.java
│ ├── function
│ └── FunctionManagerTest.java
│ ├── option
│ └── OptionParseTest.java
│ ├── outputformat
│ └── AbstractDtRichOutputFormatTest.java
│ ├── parse
│ ├── CreateFuncParserTest.java
│ ├── CreateTableParserTest.java
│ ├── CreateTmpTableParserTest.java
│ └── InsertSqlParserTest.java
│ ├── side
│ ├── AbstractSideTableInfoTest.java
│ ├── BaseAllReqRowTest.java
│ ├── BaseAsyncReqRowTest.java
│ ├── BaseSideInfoTest.java
│ ├── JoinNodeDealerTest.java
│ ├── SidePredicatesParserTest.java
│ ├── SideSQLParserTest.java
│ ├── SideSqlExecTest.java
│ ├── StreamSideFactoryTest.java
│ ├── cache
│ │ └── LruSideCacheTest.java
│ └── operator
│ │ ├── SideAsyncOperatorTest.java
│ │ └── SideWithAllCacheOperatorTest.java
│ ├── sink
│ └── StreamSinkFactoryTest.java
│ ├── source
│ └── StreamSourceFactoryTest.java
│ ├── table
│ ├── AbstractSideTableParserTest.java
│ ├── AbstractSourceParserTest.java
│ └── RdbParserTestBase.java
│ ├── util
│ ├── ByteUtilsTest.java
│ ├── ClassUtilTest.java
│ ├── DataTypeUtilsTest.java
│ ├── DateUtilTest.java
│ ├── DtStringUtilTest.java
│ ├── FieldReplaceUtilTest.java
│ ├── MD5UtilTest.java
│ ├── MathUtilTest.java
│ ├── ParseUtilTest.java
│ ├── PluginUtilTest.java
│ ├── PropertiesUtilTest.java
│ ├── ReflectionUtilTest.java
│ ├── TableUtilTest.java
│ └── TestDtStringUtil.java
│ └── watermarker
│ ├── CustomerWaterMarkerForLongTest.java
│ ├── CustomerWaterMarkerForTimeStampTest.java
│ └── WaterMarkerAssignerTest.java
├── db2
├── db2-side
│ ├── db2-all-side
│ │ ├── pom.xml
│ │ └── src
│ │ │ ├── main
│ │ │ └── java
│ │ │ │ └── com
│ │ │ │ └── dtstack
│ │ │ │ └── flink
│ │ │ │ └── sql
│ │ │ │ └── side
│ │ │ │ └── db2
│ │ │ │ ├── Db2AllReqRow.java
│ │ │ │ └── Db2AllSideInfo.java
│ │ │ └── test
│ │ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── db2
│ │ │ └── Db2AllReqRowTest.java
│ ├── db2-async-side
│ │ ├── pom.xml
│ │ └── src
│ │ │ ├── main
│ │ │ └── java
│ │ │ │ └── com
│ │ │ │ └── dtstack
│ │ │ │ └── flink
│ │ │ │ └── sql
│ │ │ │ └── side
│ │ │ │ └── db2
│ │ │ │ ├── Db2AsyncReqRow.java
│ │ │ │ └── Db2AsyncSideInfo.java
│ │ │ └── test
│ │ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── db2
│ │ │ └── Db2AsyncReqRowTest.java
│ ├── db2-side-core
│ │ ├── pom.xml
│ │ └── src
│ │ │ ├── main
│ │ │ └── java
│ │ │ │ └── com
│ │ │ │ └── dtstack
│ │ │ │ └── flink
│ │ │ │ └── sql
│ │ │ │ └── side
│ │ │ │ └── db2
│ │ │ │ └── table
│ │ │ │ └── Db2SideParser.java
│ │ │ └── test
│ │ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── db2
│ │ │ └── table
│ │ │ └── Db2SideParserTest.java
│ └── pom.xml
├── db2-sink
│ ├── pom.xml
│ └── src
│ │ ├── main
│ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── sink
│ │ │ └── db
│ │ │ ├── DbDialect.java
│ │ │ ├── DbSink.java
│ │ │ └── table
│ │ │ └── DbSinkParser.java
│ │ └── test
│ │ └── java
│ │ └── com
│ │ └── dtstack
│ │ └── flink
│ │ └── sql
│ │ └── sink
│ │ └── db
│ │ ├── DbDialectTest.java
│ │ └── table
│ │ └── DbSinkParserTest.java
└── pom.xml
├── dirtyData
├── console
│ ├── pom.xml
│ └── src
│ │ ├── main
│ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── dirty
│ │ │ └── console
│ │ │ └── ConsoleDirtyDataConsumer.java
│ │ └── test
│ │ └── java
│ │ └── com
│ │ └── dtstack
│ │ └── flink
│ │ └── sql
│ │ └── dirty
│ │ └── console
│ │ └── TestPrintDirtyDataConsumer.java
├── mysql
│ ├── pom.xml
│ └── src
│ │ └── main
│ │ └── java
│ │ └── com
│ │ └── dtstack
│ │ └── flink
│ │ └── sql
│ │ └── dirty
│ │ └── mysql
│ │ └── MysqlDirtyDataConsumer.java
└── pom.xml
├── docs
├── colType.md
├── config.md
├── createView.md
├── demo.md
├── function.md
├── images
│ └── streamsql_dd.jpg
├── newMetric.md
├── plugin
│ ├── cassandraSide.md
│ ├── cassandraSink.md
│ ├── clickhouseSide.md
│ ├── clickhouseSink.md
│ ├── consoleSink.md
│ ├── db2Side.md
│ ├── db2Sink.md
│ ├── elasticsearch5Sink.md
│ ├── elasticsearch6Side.md
│ ├── elasticsearch6Sink.md
│ ├── elasticsearch7Side.md
│ ├── elasticsearch7Sink.md
│ ├── filesource.md
│ ├── hbaseSide.md
│ ├── hbaseSink.md
│ ├── httpSink.md
│ ├── impalaColType.md
│ ├── impalaSide.md
│ ├── impalaSink.md
│ ├── kafkaSink.md
│ ├── kafkaSource.md
│ ├── kuduSide.md
│ ├── kuduSink.md
│ ├── mongoSide.md
│ ├── mongoSink.md
│ ├── mysqlSide.md
│ ├── mysqlSink.md
│ ├── oracleSide.md
│ ├── oracleSink.md
│ ├── polardbSide.md
│ ├── polardbSink.md
│ ├── postgresqlSide.md
│ ├── postgresqlSink.md
│ ├── redisSide.md
│ ├── redisSink.md
│ ├── serverSocketSource.md
│ ├── sideParams.md
│ ├── sqlserverSide.md
│ └── sqlserverSink.md
├── pluginsInfo.md
├── pr.md
├── prometheus.md
├── quickStart.md
└── timeZone.md
├── elasticsearch5-xh
├── elasticsearch5-xh-sink
│ ├── pom.xml
│ └── src
│ │ └── main
│ │ └── java
│ │ ├── com
│ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── sink
│ │ │ └── elasticsearch
│ │ │ ├── CustomerSinkFunc.java
│ │ │ ├── ElasticsearchSink.java
│ │ │ ├── EsUtil.java
│ │ │ ├── ExtendES5ApiCallBridge.java
│ │ │ ├── MetricElasticsearchSink.java
│ │ │ └── table
│ │ │ ├── ElasticsearchSinkParser.java
│ │ │ └── ElasticsearchTableInfo.java
│ │ └── org
│ │ └── apache
│ │ └── flink
│ │ ├── streaming
│ │ └── connectors
│ │ │ └── elasticsearch
│ │ │ ├── ActionRequestFailureHandler.java
│ │ │ ├── BufferingNoOpRequestIndexer.java
│ │ │ ├── ElasticsearchApiCallBridge.java
│ │ │ ├── ElasticsearchSinkBase.java
│ │ │ ├── ElasticsearchSinkFunction.java
│ │ │ ├── ElasticsearchUpsertTableSinkBase.java
│ │ │ ├── ElasticsearchUpsertTableSinkFactoryBase.java
│ │ │ ├── PreElasticsearch6BulkProcessorIndexer.java
│ │ │ ├── RequestIndexer.java
│ │ │ └── util
│ │ │ ├── ElasticsearchUtils.java
│ │ │ ├── IgnoringFailureHandler.java
│ │ │ ├── NoOpFailureHandler.java
│ │ │ └── RetryRejectedExecutionFailureHandler.java
│ │ └── table
│ │ └── descriptors
│ │ ├── Elasticsearch.java
│ │ └── ElasticsearchValidator.java
└── pom.xml
├── elasticsearch5
├── elasticsearch5-sink
│ ├── pom.xml
│ └── src
│ │ ├── main
│ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── sink
│ │ │ └── elasticsearch
│ │ │ ├── CustomerSinkFunc.java
│ │ │ ├── ElasticsearchSink.java
│ │ │ ├── EsUtil.java
│ │ │ ├── ExtendES5ApiCallBridge.java
│ │ │ ├── MetricElasticsearchSink.java
│ │ │ └── table
│ │ │ ├── ElasticsearchSinkParser.java
│ │ │ └── ElasticsearchTableInfo.java
│ │ └── test
│ │ └── java
│ │ └── com
│ │ └── dtstack
│ │ └── flink
│ │ └── sql
│ │ └── sink
│ │ └── elasticsearch
│ │ ├── CustomerSinkFuncTest.java
│ │ ├── ElasticsearchSinkTest.java
│ │ ├── EsUtilTest.java
│ │ ├── ExtendES5ApiCallBridgeTest.java
│ │ ├── MetricElasticsearchSinkTest.java
│ │ └── table
│ │ ├── ElasticsearchSinkParserTest.java
│ │ └── ElasticsearchTableInfoTest.java
└── pom.xml
├── elasticsearch6
├── elasticsearch6-side
│ ├── elasticsearch6-all-side
│ │ ├── pom.xml
│ │ └── src
│ │ │ ├── main
│ │ │ └── java
│ │ │ │ └── com
│ │ │ │ └── dtstack
│ │ │ │ └── flink
│ │ │ │ └── sql
│ │ │ │ └── side
│ │ │ │ └── elasticsearch6
│ │ │ │ ├── Elasticsearch6AllReqRow.java
│ │ │ │ └── Elasticsearch6AllSideInfo.java
│ │ │ └── test
│ │ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── elasticsearch6
│ │ │ ├── Elasticsearch6AllReqRowTest.java
│ │ │ └── Elasticsearch6AllSideInfoTest.java
│ ├── elasticsearch6-async-side
│ │ ├── pom.xml
│ │ └── src
│ │ │ ├── main
│ │ │ └── java
│ │ │ │ └── com
│ │ │ │ └── dtstack
│ │ │ │ └── flink
│ │ │ │ └── sql
│ │ │ │ └── side
│ │ │ │ └── elasticsearch6
│ │ │ │ ├── Elasticsearch6AsyncReqRow.java
│ │ │ │ └── Elasticsearch6AsyncSideInfo.java
│ │ │ └── test
│ │ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── elasticsearch6
│ │ │ ├── Elasticsearch6AsyncReqRowTest.java
│ │ │ └── Elasticsearch6AsyncSideInfoTest.java
│ ├── elasticsearch6-side-core
│ │ ├── pom.xml
│ │ └── src
│ │ │ ├── main
│ │ │ └── java
│ │ │ │ └── com
│ │ │ │ └── dtstack
│ │ │ │ └── flink
│ │ │ │ └── sql
│ │ │ │ └── side
│ │ │ │ └── elasticsearch6
│ │ │ │ ├── table
│ │ │ │ ├── Elasticsearch6SideParser.java
│ │ │ │ └── Elasticsearch6SideTableInfo.java
│ │ │ │ └── util
│ │ │ │ └── Es6Util.java
│ │ │ └── test
│ │ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── elasticsearch6
│ │ │ ├── table
│ │ │ ├── Elasticsearch6SideParserTest.java
│ │ │ └── Elasticsearch6SideTableInfoTest.java
│ │ │ └── util
│ │ │ └── Es6UtilTest.java
│ └── pom.xml
├── elasticsearch6-sink
│ ├── pom.xml
│ └── src
│ │ ├── main
│ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── sink
│ │ │ └── elasticsearch
│ │ │ ├── CustomerSinkFunc.java
│ │ │ ├── ElasticsearchSink.java
│ │ │ ├── Es6Util.java
│ │ │ ├── ExtendEs6ApiCallBridge.java
│ │ │ ├── MetricElasticsearch6Sink.java
│ │ │ └── table
│ │ │ ├── ElasticsearchSinkParser.java
│ │ │ └── ElasticsearchTableInfo.java
│ │ └── test
│ │ └── java
│ │ └── com
│ │ └── dtstack
│ │ └── flink
│ │ └── sql
│ │ └── sink
│ │ └── elasticsearch
│ │ ├── CustomerSinkFuncTest.java
│ │ ├── ElasticsearchSinkTest.java
│ │ ├── Es6UtilTest.java
│ │ ├── ExtendES6ApiCallBridgeTest.java
│ │ ├── MetricElasticsearch6SinkTest.java
│ │ └── table
│ │ ├── ElasticsearchSinkParserTest.java
│ │ └── ElasticsearchTableInfoTest.java
└── pom.xml
├── elasticsearch7
├── elasticsearch7-side
│ ├── elasticsearch7-all-side
│ │ ├── pom.xml
│ │ └── src
│ │ │ └── main
│ │ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── elasticsearch7
│ │ │ ├── Elasticsearch7AllReqRow.java
│ │ │ └── Elasticsearch7AllSideInfo.java
│ ├── elasticsearch7-async-side
│ │ ├── pom.xml
│ │ └── src
│ │ │ └── main
│ │ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── elasticsearch7
│ │ │ ├── Elasticsearch7AsyncReqRow.java
│ │ │ └── Elasticsearch7AsyncSideInfo.java
│ ├── elasticsearch7-side-core
│ │ ├── pom.xml
│ │ └── src
│ │ │ └── main
│ │ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── elasticsearch7
│ │ │ ├── table
│ │ │ ├── Elasticsearch7SideParser.java
│ │ │ └── Elasticsearch7SideTableInfo.java
│ │ │ └── util
│ │ │ └── Es7Util.java
│ └── pom.xml
├── elasticsearch7-sink
│ ├── pom.xml
│ └── src
│ │ └── main
│ │ └── java
│ │ ├── com
│ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── sink
│ │ │ └── elasticsearch
│ │ │ ├── CustomerSinkFunc.java
│ │ │ ├── ElasticsearchSink.java
│ │ │ ├── Es7Util.java
│ │ │ ├── ExtendEs7ApiCallBridge.java
│ │ │ ├── MetricElasticsearch7Sink.java
│ │ │ └── table
│ │ │ ├── ElasticsearchSinkParser.java
│ │ │ └── ElasticsearchTableInfo.java
│ │ └── org
│ │ └── apache
│ │ └── flink
│ │ └── streaming
│ │ └── connectors
│ │ └── elasticsearch
│ │ └── index
│ │ ├── AbstractTimeIndexGenerator.java
│ │ ├── IndexGenerator.java
│ │ ├── IndexGeneratorBase.java
│ │ ├── IndexGeneratorFactory.java
│ │ └── StaticIndexGenerator.java
└── pom.xml
├── file
├── file-source
│ ├── pom.xml
│ └── src
│ │ ├── main
│ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── source
│ │ │ └── file
│ │ │ ├── DTCsvRowDeserializationSchema.java
│ │ │ ├── FileSource.java
│ │ │ ├── FileSourceConstant.java
│ │ │ ├── table
│ │ │ ├── ArvoSourceTableInfo.java
│ │ │ ├── CsvSourceTableInfo.java
│ │ │ ├── FileSourceParser.java
│ │ │ ├── FileSourceTableInfo.java
│ │ │ └── JsonSourceTableInfo.java
│ │ │ └── throwable
│ │ │ └── LengthMismatchException.java
│ │ └── test
│ │ └── java
│ │ └── com
│ │ └── dtstack
│ │ └── flink
│ │ └── sql
│ │ └── source
│ │ └── file
│ │ ├── FileSourceTest.java
│ │ └── table
│ │ └── FileSourceTableInfoTest.java
└── pom.xml
├── hbase
├── hbase-side
│ ├── hbase-all-side
│ │ ├── pom.xml
│ │ └── src
│ │ │ ├── main
│ │ │ └── java
│ │ │ │ └── com
│ │ │ │ └── dtstack
│ │ │ │ └── flink
│ │ │ │ └── sql
│ │ │ │ └── side
│ │ │ │ └── hbase
│ │ │ │ ├── HbaseAllReqRow.java
│ │ │ │ └── HbaseAllSideInfo.java
│ │ │ └── test
│ │ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── hbase
│ │ │ ├── HbaseAllReqRowTest.java
│ │ │ └── HbaseAllSideInfoTest.java
│ ├── hbase-async-side
│ │ ├── pom.xml
│ │ └── src
│ │ │ ├── main
│ │ │ └── java
│ │ │ │ └── com
│ │ │ │ └── dtstack
│ │ │ │ └── flink
│ │ │ │ └── sql
│ │ │ │ └── side
│ │ │ │ └── hbase
│ │ │ │ ├── HbaseAsyncReqRow.java
│ │ │ │ ├── HbaseAsyncSideInfo.java
│ │ │ │ └── rowkeydealer
│ │ │ │ ├── AbstractRowKeyModeDealer.java
│ │ │ │ ├── PreRowKeyModeDealerDealer.java
│ │ │ │ └── RowKeyEqualModeDealer.java
│ │ │ └── test
│ │ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── hbase
│ │ │ └── HbaseAsyncReqRowTest.java
│ ├── hbase-side-core
│ │ ├── pom.xml
│ │ └── src
│ │ │ ├── main
│ │ │ └── java
│ │ │ │ └── com
│ │ │ │ └── dtstack
│ │ │ │ └── flink
│ │ │ │ └── sql
│ │ │ │ └── side
│ │ │ │ └── hbase
│ │ │ │ ├── AbstractReplaceOperator.java
│ │ │ │ ├── Md5ReplaceOperator.java
│ │ │ │ ├── ReplaceInfo.java
│ │ │ │ ├── RowKeyBuilder.java
│ │ │ │ ├── enums
│ │ │ │ ├── EReplaceOpType.java
│ │ │ │ └── EReplaceType.java
│ │ │ │ ├── table
│ │ │ │ ├── HbaseSideParser.java
│ │ │ │ └── HbaseSideTableInfo.java
│ │ │ │ └── utils
│ │ │ │ ├── HbaseConfigUtils.java
│ │ │ │ └── HbaseUtils.java
│ │ │ └── test
│ │ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── hbase
│ │ │ ├── HbaseSideParserTest.java
│ │ │ ├── Md5ReplaceOperatorTest.java
│ │ │ ├── RowKeyBuilderTest.java
│ │ │ └── utils
│ │ │ ├── HbaseConfigUtilsTest.java
│ │ │ └── HbaseUtilTest.java
│ └── pom.xml
├── hbase-sink
│ ├── pom.xml
│ └── src
│ │ ├── main
│ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── sink
│ │ │ └── hbase
│ │ │ ├── AbstractReplaceOperator.java
│ │ │ ├── HbaseConfigUtils.java
│ │ │ ├── HbaseOutputFormat.java
│ │ │ ├── HbaseSink.java
│ │ │ ├── HbaseUtil.java
│ │ │ ├── Md5ReplaceOperator.java
│ │ │ ├── ReplaceInfo.java
│ │ │ ├── RowKeyBuilder.java
│ │ │ ├── enums
│ │ │ ├── EReplaceOpType.java
│ │ │ └── EReplaceType.java
│ │ │ └── table
│ │ │ ├── HbaseSinkParser.java
│ │ │ └── HbaseTableInfo.java
│ │ └── test
│ │ └── java
│ │ └── com
│ │ └── dtstack
│ │ └── flink
│ │ └── sql
│ │ └── sink
│ │ └── hbase
│ │ ├── HbaseConfigUtilsTest.java
│ │ ├── HbaseOutputFormatTest.java
│ │ ├── HbaseSinkTest.java
│ │ ├── HbaseUtilTest.java
│ │ ├── Md5ReplaceOperatorTest.java
│ │ └── table
│ │ └── HbaseSinkParserTest.java
└── pom.xml
├── http
├── http-sink
│ ├── pom.xml
│ └── src
│ │ ├── main
│ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── sink
│ │ │ └── http
│ │ │ ├── DtHttpClient.java
│ │ │ ├── HttpOutputFormat.java
│ │ │ ├── HttpSink.java
│ │ │ ├── SqlHttpRequestRetryHandler.java
│ │ │ └── table
│ │ │ ├── HttpSinkParser.java
│ │ │ └── HttpTableInfo.java
│ │ └── test
│ │ └── java
│ │ └── com
│ │ └── dtstack
│ │ └── flink
│ │ └── sql
│ │ └── flink
│ │ └── http
│ │ ├── HttpSinkTest.java
│ │ └── table
│ │ └── HttpTableInfoTest.java
└── pom.xml
├── impala
├── impala-side
│ ├── impala-all-side
│ │ ├── pom.xml
│ │ └── src
│ │ │ ├── main
│ │ │ └── java
│ │ │ │ └── com
│ │ │ │ └── dtstack
│ │ │ │ └── flink
│ │ │ │ └── sql
│ │ │ │ └── side
│ │ │ │ └── impala
│ │ │ │ ├── ImpalaAllReqRow.java
│ │ │ │ └── ImpalaAllSideInfo.java
│ │ │ └── test
│ │ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── impala
│ │ │ ├── ImpalaAllReqRowTest.java
│ │ │ └── ImpalaAllSideInfoTest.java
│ ├── impala-async-side
│ │ ├── pom.xml
│ │ └── src
│ │ │ ├── main
│ │ │ └── java
│ │ │ │ └── com
│ │ │ │ └── dtstack
│ │ │ │ └── flink
│ │ │ │ └── sql
│ │ │ │ └── side
│ │ │ │ └── impala
│ │ │ │ ├── ImpalaAsyncReqRow.java
│ │ │ │ └── ImpalaAsyncSideInfo.java
│ │ │ └── test
│ │ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── impala
│ │ │ ├── ImpalaAsyncReqRowTest.java
│ │ │ └── ImpalaAsyncSideInfoTest.java
│ ├── impala-side-core
│ │ ├── pom.xml
│ │ └── src
│ │ │ ├── main
│ │ │ └── java
│ │ │ │ └── com
│ │ │ │ └── dtstack
│ │ │ │ └── flink
│ │ │ │ └── sql
│ │ │ │ └── side
│ │ │ │ └── impala
│ │ │ │ └── table
│ │ │ │ ├── ImpalaSideParser.java
│ │ │ │ └── ImpalaSideTableInfo.java
│ │ │ └── test
│ │ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── impala
│ │ │ └── table
│ │ │ └── ImpalaSideParserTest.java
│ └── pom.xml
├── impala-sink
│ ├── pom.xml
│ └── src
│ │ ├── main
│ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── sink
│ │ │ └── impala
│ │ │ ├── EAuthMech.java
│ │ │ ├── ImpalaOutputFormat.java
│ │ │ ├── ImpalaSink.java
│ │ │ └── table
│ │ │ ├── ImpalaSinkParser.java
│ │ │ └── ImpalaTableInfo.java
│ │ └── test
│ │ └── java
│ │ └── com
│ │ └── dtstack
│ │ └── flink
│ │ └── sql
│ │ └── sink
│ │ └── impala
│ │ ├── ImpalaSinkTest.java
│ │ └── table
│ │ └── ImpalaSinkParserTest.java
└── pom.xml
├── kafka-base
├── kafka-base-sink
│ ├── pom.xml
│ └── src
│ │ ├── main
│ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── sink
│ │ │ └── kafka
│ │ │ ├── AbstractKafkaProducerFactory.java
│ │ │ ├── AbstractKafkaSink.java
│ │ │ ├── CustomerFlinkPartition.java
│ │ │ ├── serialization
│ │ │ ├── AvroTuple2SerializationSchema.java
│ │ │ ├── CsvTupleSerializationSchema.java
│ │ │ ├── CustomerKeyedSerializationSchema.java
│ │ │ ├── DTJsonRowSerializationSchema.java
│ │ │ └── JsonTupleSerializationSchema.java
│ │ │ └── table
│ │ │ ├── KafkaSinkParser.java
│ │ │ └── KafkaSinkTableInfo.java
│ │ └── test
│ │ └── java
│ │ └── com
│ │ └── dtstack
│ │ └── flink
│ │ └── sql
│ │ └── sink
│ │ └── kafka
│ │ ├── AbstractKafkaProducerFactoryTest.java
│ │ ├── AbstractKafkaSinkTest.java
│ │ ├── CustomerFlinkPartitionTest.java
│ │ ├── CustomerKeyedSerializationSchemaTest.java
│ │ ├── serialization
│ │ ├── AvroTuple2SerializationSchemaTest.java
│ │ ├── CsvTuple2SerializationSchemaTest.java
│ │ ├── DTJsonRowSerializationSchemaTest.java
│ │ └── JsonTuple2SerializationSchemaTest.java
│ │ └── table
│ │ ├── KafkaSinkParserTest.java
│ │ └── KafkaSinkTableInfoTest.java
├── kafka-base-source
│ ├── pom.xml
│ └── src
│ │ ├── main
│ │ └── java
│ │ │ ├── com
│ │ │ └── dtstack
│ │ │ │ └── flink
│ │ │ │ └── sql
│ │ │ │ └── source
│ │ │ │ └── kafka
│ │ │ │ ├── AbstractKafkaConsumerFactory.java
│ │ │ │ ├── AbstractKafkaSource.java
│ │ │ │ ├── Calculate.java
│ │ │ │ ├── deserialization
│ │ │ │ ├── DeserializationSchemaFactory.java
│ │ │ │ ├── DtKafkaDeserializationSchemaWrapper.java
│ │ │ │ └── KafkaDeserializationMetricWrapper.java
│ │ │ │ ├── enums
│ │ │ │ └── EKafkaOffset.java
│ │ │ │ ├── sample
│ │ │ │ ├── OffsetFetcher.java
│ │ │ │ ├── OffsetMap.java
│ │ │ │ └── SampleCalculateHelper.java
│ │ │ │ ├── table
│ │ │ │ ├── KafkaSourceParser.java
│ │ │ │ └── KafkaSourceTableInfo.java
│ │ │ │ └── throwable
│ │ │ │ └── KafkaSamplingUnavailableException.java
│ │ │ └── org
│ │ │ └── apache
│ │ │ └── flink
│ │ │ └── formats
│ │ │ └── json
│ │ │ └── DTJsonRowDeserializationSchema.java
│ │ └── test
│ │ └── java
│ │ └── com
│ │ └── dtstack
│ │ └── flink
│ │ └── sql
│ │ └── source
│ │ └── kafka
│ │ ├── AbstractKafkaConsumerFactoryTest.java
│ │ ├── AbstractKafkaSourceTest.java
│ │ ├── DTJsonRowDeserializationSchemaTest.java
│ │ ├── DtKafkaDeserializationSchemaWrapperTest.java
│ │ ├── KafkaDeserializationMetricWrapperTest.java
│ │ └── KafkaSourceParserTest.java
└── pom.xml
├── kafka
├── kafka-sink
│ ├── pom.xml
│ └── src
│ │ ├── main
│ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── sink
│ │ │ └── kafka
│ │ │ ├── KafkaProducer.java
│ │ │ ├── KafkaProducerFactory.java
│ │ │ └── KafkaSink.java
│ │ └── test
│ │ └── java
│ │ └── com
│ │ └── dtstack
│ │ └── flink
│ │ └── sql
│ │ └── sink
│ │ └── kafka
│ │ ├── KafkaProducerFactoryTest.java
│ │ ├── KafkaProduerTest.java
│ │ └── KafkaSinkTest.java
├── kafka-source
│ ├── pom.xml
│ └── src
│ │ ├── main
│ │ └── java
│ │ │ ├── com
│ │ │ └── dtstack
│ │ │ │ └── flink
│ │ │ │ └── sql
│ │ │ │ └── source
│ │ │ │ └── kafka
│ │ │ │ ├── KafkaConsumer.java
│ │ │ │ ├── KafkaConsumerFactory.java
│ │ │ │ ├── KafkaSource.java
│ │ │ │ └── deserializer
│ │ │ │ └── DtKafkaDeserializer.java
│ │ │ └── org
│ │ │ └── apache
│ │ │ └── flink
│ │ │ └── streaming
│ │ │ └── connectors
│ │ │ └── kafka
│ │ │ └── FlinkKafkaConsumer.java
│ │ └── test
│ │ └── java
│ │ └── com
│ │ └── dtstack
│ │ └── flink
│ │ └── sql
│ │ └── source
│ │ └── kafka
│ │ ├── KafkaSourceTest.java
│ │ └── deserializer
│ │ └── DtKafkaDeserializerTest.java
└── pom.xml
├── kafka09
├── kafka09-sink
│ ├── pom.xml
│ └── src
│ │ ├── main
│ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── sink
│ │ │ └── kafka
│ │ │ ├── KafkaProducer09.java
│ │ │ ├── KafkaProducer09Factory.java
│ │ │ └── KafkaSink.java
│ │ └── test
│ │ └── java
│ │ └── com
│ │ └── dtstack
│ │ └── flink
│ │ └── sql
│ │ └── source
│ │ └── kafka
│ │ └── KafkaSinkTest.java
├── kafka09-source
│ ├── pom.xml
│ └── src
│ │ ├── main
│ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── source
│ │ │ └── kafka
│ │ │ ├── KafkaConsumer09.java
│ │ │ ├── KafkaConsumer09Factory.java
│ │ │ └── KafkaSource.java
│ │ └── test
│ │ └── java
│ │ └── com
│ │ └── dtstack
│ │ └── flink
│ │ └── sql
│ │ └── source
│ │ └── kafka
│ │ └── KafkaSourceTest.java
└── pom.xml
├── kafka10
├── kafka10-sink
│ ├── pom.xml
│ └── src
│ │ ├── main
│ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── sink
│ │ │ └── kafka
│ │ │ ├── KafkaProducer010.java
│ │ │ ├── KafkaProducer010Factory.java
│ │ │ └── KafkaSink.java
│ │ └── test
│ │ └── java
│ │ └── com
│ │ └── dtstack
│ │ └── flink
│ │ └── sql
│ │ └── source
│ │ └── kafka
│ │ └── KafkaSinkTest.java
├── kafka10-source
│ ├── pom.xml
│ └── src
│ │ ├── main
│ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── source
│ │ │ └── kafka
│ │ │ ├── KafkaConsumer010.java
│ │ │ ├── KafkaConsumer010Factory.java
│ │ │ └── KafkaSource.java
│ │ └── test
│ │ └── java
│ │ └── com
│ │ └── dtstack
│ │ └── flink
│ │ └── sql
│ │ └── source
│ │ └── kafka
│ │ └── KafkaSourceTest.java
└── pom.xml
├── kafka11
├── kafka11-sink
│ ├── pom.xml
│ └── src
│ │ ├── main
│ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── sink
│ │ │ └── kafka
│ │ │ ├── KafkaProducer011.java
│ │ │ ├── KafkaProducer011Factory.java
│ │ │ └── KafkaSink.java
│ │ └── test
│ │ └── java
│ │ └── com
│ │ └── dtstack
│ │ └── flink
│ │ └── sql
│ │ └── sink
│ │ └── kafka
│ │ └── KafkaSinkTest.java
├── kafka11-source
│ ├── pom.xml
│ └── src
│ │ ├── main
│ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── source
│ │ │ └── kafka
│ │ │ ├── KafkaConsumer011.java
│ │ │ ├── KafkaConsumer011Factory.java
│ │ │ └── KafkaSource.java
│ │ └── test
│ │ └── java
│ │ └── com
│ │ └── dtstack
│ │ └── flink
│ │ └── sql
│ │ └── source
│ │ └── kafka
│ │ └── KafkaSourceTest.java
└── pom.xml
├── kingbase
├── kingbase-side
│ ├── kingbase-all-side
│ │ ├── pom.xml
│ │ └── src
│ │ │ ├── main
│ │ │ └── java
│ │ │ │ └── com
│ │ │ │ └── dtstack
│ │ │ │ └── flink
│ │ │ │ └── sql
│ │ │ │ └── side
│ │ │ │ └── kingbase
│ │ │ │ ├── KingbaseAllReqRow.java
│ │ │ │ └── KingbaseAllSideInfo.java
│ │ │ └── test
│ │ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── kingbase
│ │ │ └── KingbaseAllReqRowTest.java
│ ├── kingbase-async-side
│ │ ├── pom.xml
│ │ └── src
│ │ │ ├── main
│ │ │ └── java
│ │ │ │ └── com
│ │ │ │ └── dtstack
│ │ │ │ └── flink
│ │ │ │ └── sql
│ │ │ │ └── side
│ │ │ │ └── kingbase
│ │ │ │ ├── KingbaseAsyncReqRow.java
│ │ │ │ └── KingbaseAsyncSideInfo.java
│ │ │ └── test
│ │ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── kingbase
│ │ │ └── KingbaseAsyncReqRowTest.java
│ ├── kingbase-side-core
│ │ ├── pom.xml
│ │ └── src
│ │ │ └── main
│ │ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── kingbase
│ │ │ └── table
│ │ │ └── KingbaseSideParser.java
│ └── pom.xml
├── kingbase-sink
│ ├── pom.xml
│ └── src
│ │ └── main
│ │ └── java
│ │ └── com
│ │ └── dtstack
│ │ └── flink
│ │ └── sql
│ │ └── sink
│ │ └── kingbase
│ │ ├── KingbaseDialect.java
│ │ ├── KingbaseSink.java
│ │ └── table
│ │ └── KingbaseSinkParser.java
└── pom.xml
├── kudu
├── kudu-side
│ ├── kudu-all-side
│ │ ├── pom.xml
│ │ └── src
│ │ │ ├── main
│ │ │ └── java
│ │ │ │ └── com
│ │ │ │ └── dtstack
│ │ │ │ └── flink
│ │ │ │ └── sql
│ │ │ │ └── side
│ │ │ │ └── kudu
│ │ │ │ ├── KuduAllReqRow.java
│ │ │ │ └── KuduAllSideInfo.java
│ │ │ └── test
│ │ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── kudu
│ │ │ ├── KuduAllReqRowTest.java
│ │ │ └── KuduAllSideInfoTest.java
│ ├── kudu-async-side
│ │ ├── pom.xml
│ │ └── src
│ │ │ ├── main
│ │ │ └── java
│ │ │ │ └── com
│ │ │ │ └── dtstack
│ │ │ │ └── flink
│ │ │ │ └── sql
│ │ │ │ └── side
│ │ │ │ └── kudu
│ │ │ │ ├── KuduAsyncReqRow.java
│ │ │ │ └── KuduAsyncSideInfo.java
│ │ │ └── test
│ │ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── kudu
│ │ │ ├── KuduAsyncReqRowTest.java
│ │ │ └── KuduAsyncSideInfoTest.java
│ ├── kudu-side-core
│ │ ├── pom.xml
│ │ └── src
│ │ │ ├── main
│ │ │ └── java
│ │ │ │ └── com
│ │ │ │ └── dtstack
│ │ │ │ └── flink
│ │ │ │ └── sql
│ │ │ │ └── side
│ │ │ │ └── kudu
│ │ │ │ ├── table
│ │ │ │ ├── KuduSideParser.java
│ │ │ │ └── KuduSideTableInfo.java
│ │ │ │ └── utils
│ │ │ │ └── KuduUtil.java
│ │ │ └── test
│ │ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── kudu
│ │ │ ├── table
│ │ │ └── KuduSideParserTest.java
│ │ │ └── utils
│ │ │ └── KuduUtilTest.java
│ └── pom.xml
├── kudu-sink
│ ├── pom.xml
│ └── src
│ │ ├── main
│ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── sink
│ │ │ └── kudu
│ │ │ ├── KuduOutputFormat.java
│ │ │ ├── KuduSink.java
│ │ │ └── table
│ │ │ ├── KuduSinkParser.java
│ │ │ └── KuduTableInfo.java
│ │ └── test
│ │ └── java
│ │ └── com
│ │ └── dtstack
│ │ └── flink
│ │ └── sql
│ │ └── sink
│ │ └── kudu
│ │ ├── KuduOutputFormatTest.java
│ │ ├── KuduSinkTest.java
│ │ └── table
│ │ └── KuduSinkParserTest.java
└── pom.xml
├── launcher
├── .gitignore
├── job
│ ├── kafkaNotNull.txt
│ ├── mysqlsideSql.txt
│ └── sideSql.txt
├── pom.xml
└── src
│ └── main
│ ├── java
│ └── com
│ │ └── dtstack
│ │ └── flink
│ │ └── sql
│ │ └── launcher
│ │ ├── LauncherMain.java
│ │ ├── entity
│ │ └── JobParamsInfo.java
│ │ ├── executor
│ │ ├── StandaloneExecutor.java
│ │ ├── YarnJobClusterExecutor.java
│ │ └── YarnSessionClusterExecutor.java
│ │ ├── factory
│ │ ├── AbstractClusterClientFactory.java
│ │ ├── StandaloneClientFactory.java
│ │ └── YarnClusterClientFactory.java
│ │ └── utils
│ │ └── JobGraphBuildUtil.java
│ ├── resources
│ └── log4j.properties
│ └── test
│ └── java
│ └── com
│ └── dtstack
│ └── flink
│ └── sql
│ └── launcher
│ ├── LauncherMainTest.java
│ ├── PluginLoadModeTest.java
│ └── YarnConfLoaderTest.java
├── localTest
├── pom.xml
└── src
│ ├── main
│ └── java
│ │ └── com
│ │ └── dtstack
│ │ └── flink
│ │ └── sql
│ │ └── localTest
│ │ └── LocalTest.java
│ └── test
│ ├── java
│ └── com
│ │ └── dtstack
│ │ └── flink
│ │ └── sql
│ │ └── localTest
│ │ └── TestLocalTest.java
│ └── resources
│ └── test.txt
├── mongo
├── mongo-side
│ ├── mongo-all-side
│ │ ├── pom.xml
│ │ └── src
│ │ │ ├── main
│ │ │ └── java
│ │ │ │ └── com
│ │ │ │ └── dtstack
│ │ │ │ └── flink
│ │ │ │ └── sql
│ │ │ │ └── side
│ │ │ │ └── mongo
│ │ │ │ ├── MongoAllReqRow.java
│ │ │ │ └── MongoAllSideInfo.java
│ │ │ └── test
│ │ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── mongo
│ │ │ ├── MongoAllReqRowTest.java
│ │ │ └── MongoAllSideInfoTest.java
│ ├── mongo-async-side
│ │ ├── pom.xml
│ │ └── src
│ │ │ ├── main
│ │ │ └── java
│ │ │ │ └── com
│ │ │ │ └── dtstack
│ │ │ │ └── flink
│ │ │ │ └── sql
│ │ │ │ └── side
│ │ │ │ └── mongo
│ │ │ │ ├── MongoAsyncReqRow.java
│ │ │ │ └── MongoAsyncSideInfo.java
│ │ │ └── test
│ │ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── mongo
│ │ │ ├── MongoAsyncReqRowTest.java
│ │ │ └── MongoAsyncSideInfoTest.java
│ ├── mongo-side-core
│ │ ├── pom.xml
│ │ └── src
│ │ │ ├── main
│ │ │ └── java
│ │ │ │ └── com
│ │ │ │ └── dtstack
│ │ │ │ └── flink
│ │ │ │ └── sql
│ │ │ │ └── side
│ │ │ │ └── mongo
│ │ │ │ ├── table
│ │ │ │ ├── MongoSideParser.java
│ │ │ │ └── MongoSideTableInfo.java
│ │ │ │ └── utils
│ │ │ │ └── MongoUtil.java
│ │ │ └── test
│ │ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── mongo
│ │ │ ├── table
│ │ │ └── MongoSideParserTest.java
│ │ │ └── utils
│ │ │ └── MongoUtilTest.java
│ └── pom.xml
├── mongo-sink
│ ├── pom.xml
│ └── src
│ │ ├── main
│ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── sink
│ │ │ └── mongo
│ │ │ ├── MongoOutputFormat.java
│ │ │ ├── MongoSink.java
│ │ │ └── table
│ │ │ ├── MongoSinkParser.java
│ │ │ └── MongoTableInfo.java
│ │ └── test
│ │ └── java
│ │ └── com
│ │ └── dtstack
│ │ └── flink
│ │ └── sql
│ │ └── sink
│ │ └── mongo
│ │ ├── MongoOutputFormatTest.java
│ │ ├── MongoSinkTest.java
│ │ └── table
│ │ └── MongoSinkParserTest.java
└── pom.xml
├── mysql
├── mysql-side
│ ├── mysql-all-side
│ │ ├── pom.xml
│ │ └── src
│ │ │ ├── main
│ │ │ └── java
│ │ │ │ └── com
│ │ │ │ └── dtstack
│ │ │ │ └── flink
│ │ │ │ └── sql
│ │ │ │ └── side
│ │ │ │ └── mysql
│ │ │ │ ├── MysqlAllReqRow.java
│ │ │ │ └── MysqlAllSideInfo.java
│ │ │ └── test
│ │ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── mysql
│ │ │ └── MysqlAllReqRowTest.java
│ ├── mysql-async-side
│ │ ├── pom.xml
│ │ └── src
│ │ │ ├── main
│ │ │ └── java
│ │ │ │ └── com
│ │ │ │ └── dtstack
│ │ │ │ └── flink
│ │ │ │ └── sql
│ │ │ │ └── side
│ │ │ │ └── mysql
│ │ │ │ ├── MysqlAsyncReqRow.java
│ │ │ │ └── MysqlAsyncSideInfo.java
│ │ │ └── test
│ │ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── mysql
│ │ │ └── MysqlAsyncReqRowTest.java
│ ├── mysql-side-core
│ │ ├── pom.xml
│ │ └── src
│ │ │ ├── main
│ │ │ └── java
│ │ │ │ └── com
│ │ │ │ └── dtstack
│ │ │ │ └── flink
│ │ │ │ └── sql
│ │ │ │ └── side
│ │ │ │ └── mysql
│ │ │ │ └── table
│ │ │ │ └── MysqlSideParser.java
│ │ │ └── test
│ │ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── mysql
│ │ │ └── table
│ │ │ └── MysqlSideParserTest.java
│ └── pom.xml
├── mysql-sink
│ ├── pom.xml
│ └── src
│ │ ├── main
│ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── sink
│ │ │ └── mysql
│ │ │ ├── MySQLDialect.java
│ │ │ ├── MysqlSink.java
│ │ │ └── table
│ │ │ └── MysqlSinkParser.java
│ │ └── test
│ │ └── java
│ │ └── com
│ │ └── dtstack
│ │ └── flink
│ │ └── sql
│ │ └── sink
│ │ └── mysql
│ │ ├── MySQLDialectTest.java
│ │ ├── MysqlSinkTest.java
│ │ └── table
│ │ └── MysqlSinkParserTest.java
└── pom.xml
├── oceanbase
├── oceanbase-side
│ ├── oceanbase-all-side
│ │ ├── pom.xml
│ │ └── src
│ │ │ ├── main
│ │ │ └── java
│ │ │ │ └── com
│ │ │ │ └── dtstack
│ │ │ │ └── flink
│ │ │ │ └── sql
│ │ │ │ └── side
│ │ │ │ └── oceanbase
│ │ │ │ ├── OceanbaseAllReqRow.java
│ │ │ │ └── OceanbaseAllSideInfo.java
│ │ │ └── test
│ │ │ └── java
│ │ │ └── com
│ │ │ └── dtstatck
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── oceanbase
│ │ │ └── OceanbaseAllReqRowTest.java
│ ├── oceanbase-async-side
│ │ ├── pom.xml
│ │ └── src
│ │ │ ├── main
│ │ │ └── java
│ │ │ │ └── com
│ │ │ │ └── dtstack
│ │ │ │ └── flink
│ │ │ │ └── sql
│ │ │ │ └── side
│ │ │ │ └── oceanbase
│ │ │ │ ├── OceanbaseAsyncReqRow.java
│ │ │ │ └── OceanbaseAsyncSideInfo.java
│ │ │ └── test
│ │ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── oceanbase
│ │ │ └── OceanbaseAsyncReqRowTest.java
│ ├── oceanbase-side-core
│ │ ├── pom.xml
│ │ └── src
│ │ │ ├── main
│ │ │ └── java
│ │ │ │ └── com
│ │ │ │ └── dtstack
│ │ │ │ └── flink
│ │ │ │ └── sql
│ │ │ │ └── side
│ │ │ │ └── oceanbase
│ │ │ │ └── table
│ │ │ │ └── OceanbaseSideParser.java
│ │ │ └── test
│ │ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── oceanbase
│ │ │ └── table
│ │ │ └── OceanbaseSideParserTest.java
│ └── pom.xml
├── oceanbase-sink
│ ├── pom.xml
│ └── src
│ │ ├── main
│ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── sink
│ │ │ └── oceanbase
│ │ │ ├── OceanbaseDialect.java
│ │ │ ├── OceanbaseSink.java
│ │ │ └── table
│ │ │ └── OceanbaseSinkParser.java
│ │ └── test
│ │ └── java
│ │ └── com
│ │ └── dtstack
│ │ └── flink
│ │ └── sql
│ │ └── sink
│ │ └── ocean
│ │ ├── OceanbaseDialectTest.java
│ │ ├── OceanbaseSinkTest.java
│ │ └── table
│ │ └── OceanbaseSinkParserTest.java
└── pom.xml
├── oracle
├── oracle-side
│ ├── oracle-all-side
│ │ ├── pom.xml
│ │ └── src
│ │ │ ├── main
│ │ │ └── java
│ │ │ │ └── com
│ │ │ │ └── dtstack
│ │ │ │ └── flink
│ │ │ │ └── sql
│ │ │ │ └── side
│ │ │ │ └── oracle
│ │ │ │ ├── OracleAllReqRow.java
│ │ │ │ └── OracleAllSideInfo.java
│ │ │ └── test
│ │ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── oracle
│ │ │ └── OracleAllReqRowTest.java
│ ├── oracle-async-side
│ │ ├── pom.xml
│ │ └── src
│ │ │ ├── main
│ │ │ └── java
│ │ │ │ └── com
│ │ │ │ └── dtstack
│ │ │ │ └── flink
│ │ │ │ └── sql
│ │ │ │ └── side
│ │ │ │ └── oracle
│ │ │ │ ├── OracleAsyncReqRow.java
│ │ │ │ └── OracleAsyncSideInfo.java
│ │ │ └── test
│ │ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── oracle
│ │ │ ├── OracleAsyncReqRowTest.java
│ │ │ └── OracleAsyncSideInfoTest.java
│ ├── oracle-side-core
│ │ ├── pom.xml
│ │ └── src
│ │ │ ├── main
│ │ │ └── java
│ │ │ │ └── com
│ │ │ │ └── dtstack
│ │ │ │ └── flink
│ │ │ │ └── sql
│ │ │ │ └── side
│ │ │ │ └── oracle
│ │ │ │ └── table
│ │ │ │ └── OracleSideParser.java
│ │ │ └── test
│ │ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── oracle
│ │ │ └── table
│ │ │ └── OracleSideParserTest.java
│ └── pom.xml
├── oracle-sink
│ ├── pom.xml
│ └── src
│ │ ├── main
│ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── sink
│ │ │ └── oracle
│ │ │ ├── OracleDialect.java
│ │ │ ├── OracleSink.java
│ │ │ └── table
│ │ │ └── OracleSinkParser.java
│ │ └── test
│ │ └── java
│ │ └── com
│ │ └── dtstack
│ │ └── flink
│ │ └── sql
│ │ └── sink
│ │ └── oracle
│ │ ├── OracleDialectTest.java
│ │ ├── OracleSinkTest.java
│ │ └── table
│ │ └── OracleSinkParserTest.java
└── pom.xml
├── polardb
├── polardb-side
│ ├── polardb-all-side
│ │ ├── pom.xml
│ │ └── src
│ │ │ ├── main
│ │ │ └── java
│ │ │ │ └── com
│ │ │ │ └── dtstack
│ │ │ │ └── flink
│ │ │ │ └── sql
│ │ │ │ └── side
│ │ │ │ └── polardb
│ │ │ │ ├── PolardbAllReqRow.java
│ │ │ │ └── PolardbAllSideInfo.java
│ │ │ └── test
│ │ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── polardb
│ │ │ └── PolardbAllReqRowTest.java
│ ├── polardb-async-side
│ │ ├── pom.xml
│ │ └── src
│ │ │ ├── main
│ │ │ └── java
│ │ │ │ └── com
│ │ │ │ └── dtstack
│ │ │ │ └── flink
│ │ │ │ └── sql
│ │ │ │ └── side
│ │ │ │ └── polardb
│ │ │ │ ├── PolardbAsyncReqRow.java
│ │ │ │ └── PolardbAsyncSideInfo.java
│ │ │ └── test
│ │ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── polardb
│ │ │ └── PolardbAsyncReqRowTest.java
│ ├── polardb-side-core
│ │ ├── pom.xml
│ │ └── src
│ │ │ ├── main
│ │ │ └── java
│ │ │ │ └── com
│ │ │ │ └── dtstack
│ │ │ │ └── flink
│ │ │ │ └── sql
│ │ │ │ └── side
│ │ │ │ └── polardb
│ │ │ │ └── table
│ │ │ │ └── PolardbSideParser.java
│ │ │ └── test
│ │ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── polardb
│ │ │ └── table
│ │ │ └── PolardbSideParserTest.java
│ └── pom.xml
├── polardb-sink
│ ├── pom.xml
│ └── src
│ │ ├── main
│ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── sink
│ │ │ └── polardb
│ │ │ ├── PolardbDialect.java
│ │ │ ├── PolardbSink.java
│ │ │ └── table
│ │ │ └── PolardbSinkParser.java
│ │ └── test
│ │ └── java
│ │ └── com
│ │ └── dtstack
│ │ └── flink
│ │ └── sql
│ │ └── sink
│ │ └── polardb
│ │ ├── PolardbDialectTest.java
│ │ ├── PolardbSinkTest.java
│ │ └── table
│ │ └── PolardbSinkParserTest.java
└── pom.xml
├── pom.xml
├── postgresql
├── pom.xml
├── postgresql-side
│ ├── pom.xml
│ ├── postgresql-all-side
│ │ ├── pom.xml
│ │ └── src
│ │ │ ├── main
│ │ │ └── java
│ │ │ │ └── com
│ │ │ │ └── dtstack
│ │ │ │ └── flink
│ │ │ │ └── sql
│ │ │ │ └── side
│ │ │ │ └── postgresql
│ │ │ │ ├── PostgresqlAllReqRow.java
│ │ │ │ └── PostgresqlAllSideInfo.java
│ │ │ └── test
│ │ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── postgresql
│ │ │ └── PostgresqlAllReqRowTest.java
│ ├── postgresql-async-side
│ │ ├── pom.xml
│ │ └── src
│ │ │ ├── main
│ │ │ └── java
│ │ │ │ └── com
│ │ │ │ └── dtstack
│ │ │ │ └── flink
│ │ │ │ └── sql
│ │ │ │ └── side
│ │ │ │ └── postgresql
│ │ │ │ ├── PostgresqlAsyncReqRow.java
│ │ │ │ └── PostgresqlAsyncSideInfo.java
│ │ │ └── test
│ │ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── postgresql
│ │ │ └── PostgresqlAsyncReqRowTest.java
│ └── postgresql-side-core
│ │ ├── pom.xml
│ │ └── src
│ │ ├── main
│ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── postgresql
│ │ │ └── table
│ │ │ └── PostgresqlSideParser.java
│ │ └── test
│ │ └── java
│ │ └── com
│ │ └── dtstack
│ │ └── flink
│ │ └── sql
│ │ └── side
│ │ └── postgresql
│ │ └── table
│ │ └── PostgresqlSideParserTest.java
└── postgresql-sink
│ ├── pom.xml
│ └── src
│ ├── main
│ └── java
│ │ └── com
│ │ └── dtstack
│ │ └── flink
│ │ └── sql
│ │ └── sink
│ │ └── postgresql
│ │ ├── PostgresqlDialect.java
│ │ ├── PostgresqlSink.java
│ │ ├── table
│ │ └── PostgresqlSinkParser.java
│ │ └── writer
│ │ └── CopyWriter.java
│ └── test
│ └── java
│ └── com
│ └── dtstack
│ └── flink
│ └── sql
│ └── sink
│ └── postgresql
│ ├── PostgresqlDialectTest.java
│ ├── PostgresqlSinkTest.java
│ └── table
│ └── PostgresqlSinkParserTest.java
├── rdb
├── pom.xml
├── rdb-core
│ ├── pom.xml
│ └── src
│ │ └── main
│ │ └── java
│ │ └── com
│ │ └── dtstack
│ │ └── flink
│ │ └── sql
│ │ └── core
│ │ └── rdb
│ │ ├── JdbcCheckKeys.java
│ │ ├── JdbcResourceCheck.java
│ │ └── util
│ │ └── JdbcConnectionUtil.java
├── rdb-side
│ ├── pom.xml
│ └── src
│ │ ├── main
│ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── rdb
│ │ │ ├── all
│ │ │ ├── AbstractRdbAllReqRow.java
│ │ │ └── RdbAllSideInfo.java
│ │ │ ├── async
│ │ │ ├── RdbAsyncReqRow.java
│ │ │ └── RdbAsyncSideInfo.java
│ │ │ ├── provider
│ │ │ └── DTC3P0DataSourceProvider.java
│ │ │ ├── table
│ │ │ ├── RdbSideParser.java
│ │ │ └── RdbSideTableInfo.java
│ │ │ └── util
│ │ │ └── SwitchUtil.java
│ │ └── test
│ │ └── java
│ │ └── com
│ │ └── dtstack
│ │ └── flink
│ │ └── sql
│ │ └── side
│ │ └── rdb
│ │ ├── all
│ │ ├── AbstractRdbAllReqRowTest.java
│ │ ├── ConcreteRdbAllReqRow.java
│ │ ├── RdbAllReqRowTestBase.java
│ │ └── RdbAllSideInfoTest.java
│ │ ├── async
│ │ ├── RdbAsyncReqRowTest.java
│ │ ├── RdbAsyncReqRowTestBase.java
│ │ └── RdbAsyncSideInfoTest.java
│ │ ├── provider
│ │ └── DTC3P0DataSourceProviderTest.java
│ │ ├── table
│ │ ├── RdbSideParserTest.java
│ │ └── RdbSideTableInfoTest.java
│ │ ├── testutil
│ │ └── ArgFactory.java
│ │ └── util
│ │ └── SwitchUtilTest.java
└── rdb-sink
│ ├── pom.xml
│ └── src
│ ├── main
│ └── java
│ │ └── com
│ │ └── dtstack
│ │ └── flink
│ │ └── sql
│ │ └── sink
│ │ └── rdb
│ │ ├── AbstractRdbSink.java
│ │ ├── JDBCOptions.java
│ │ ├── JDBCTypeConvertUtils.java
│ │ ├── dialect
│ │ └── JDBCDialect.java
│ │ ├── format
│ │ ├── AbstractJDBCOutputFormat.java
│ │ └── JDBCUpsertOutputFormat.java
│ │ ├── table
│ │ ├── RdbSinkParser.java
│ │ └── RdbTableInfo.java
│ │ └── writer
│ │ ├── AbstractUpsertWriter.java
│ │ ├── AppendOnlyWriter.java
│ │ └── JDBCWriter.java
│ └── test
│ └── java
│ └── com
│ └── dtstack
│ └── flink
│ └── sql
│ └── sink
│ └── rdb
│ ├── AbstractRdbSinkTest.java
│ ├── ConcreteRdbSink.java
│ ├── JDBCOptionsTest.java
│ ├── JDBCTypeConvertUtilsTest.java
│ ├── dialect
│ ├── ConcreteJDBCDialect.java
│ └── JDBCDialectTest.java
│ ├── format
│ └── JDBCUpsertOutputFormatTest.java
│ └── table
│ ├── RdbSinkParserTest.java
│ └── RdbTableInfoTest.java
├── redis5
├── pom.xml
├── redis5-side
│ ├── pom.xml
│ ├── redis-all-side
│ │ ├── pom.xml
│ │ └── src
│ │ │ ├── main
│ │ │ └── java
│ │ │ │ └── com
│ │ │ │ └── dtstack
│ │ │ │ └── flink
│ │ │ │ └── sql
│ │ │ │ └── side
│ │ │ │ └── redis
│ │ │ │ ├── RedisAllReqRow.java
│ │ │ │ └── RedisAllSideInfo.java
│ │ │ └── test
│ │ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── redis
│ │ │ ├── RedisAllReqRowTest.java
│ │ │ └── RedisAllSideInfoTest.java
│ ├── redis-async-side
│ │ ├── pom.xml
│ │ └── src
│ │ │ ├── main
│ │ │ └── java
│ │ │ │ └── com
│ │ │ │ └── dtstack
│ │ │ │ └── flink
│ │ │ │ └── sql
│ │ │ │ └── side
│ │ │ │ └── redis
│ │ │ │ ├── RedisAsyncReqRow.java
│ │ │ │ └── RedisAsyncSideInfo.java
│ │ │ └── test
│ │ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── redis
│ │ │ ├── RedisAsyncReqRowTest.java
│ │ │ └── RedisAsyncSideInfoTest.java
│ └── redis-side-core
│ │ ├── pom.xml
│ │ └── src
│ │ ├── main
│ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── redis
│ │ │ ├── enums
│ │ │ └── RedisType.java
│ │ │ └── table
│ │ │ ├── RedisSideParser.java
│ │ │ ├── RedisSideReqRow.java
│ │ │ └── RedisSideTableInfo.java
│ │ └── test
│ │ └── java
│ │ └── com
│ │ └── dtstack
│ │ └── flink
│ │ └── sql
│ │ └── side
│ │ └── redis
│ │ ├── enums
│ │ └── RedisTypeTest.java
│ │ └── table
│ │ ├── RedisSideParserTest.java
│ │ └── RedisSideReqRowTest.java
└── redis5-sink
│ ├── pom.xml
│ └── src
│ ├── main
│ └── java
│ │ └── com
│ │ └── dtstack
│ │ └── flink
│ │ └── sql
│ │ └── sink
│ │ └── redis
│ │ ├── RedisOutputFormat.java
│ │ ├── RedisSink.java
│ │ ├── enums
│ │ └── RedisType.java
│ │ └── table
│ │ ├── RedisSinkParser.java
│ │ └── RedisTableInfo.java
│ └── test
│ └── java
│ └── com
│ └── dtstack
│ └── flink
│ └── sql
│ └── sink
│ └── redis
│ ├── RedisOutputFormatTest.java
│ ├── RedisSinkTest.java
│ └── table
│ └── RedisSinkParserTest.java
├── serversocket
├── pom.xml
└── serversocket-source
│ ├── pom.xml
│ └── src
│ ├── main
│ └── java
│ │ └── com
│ │ └── dtstack
│ │ └── flink
│ │ └── sql
│ │ └── source
│ │ └── serversocket
│ │ ├── CustomerSocketTextStreamFunction.java
│ │ ├── ServersocketSource.java
│ │ └── table
│ │ ├── ServersocketSourceParser.java
│ │ └── ServersocketSourceTableInfo.java
│ └── test
│ └── java
│ └── com
│ └── dtstack
│ └── flink
│ └── sql
│ └── source
│ └── serversocket
│ ├── CustomerSocketTextStreamFunctionTest.java
│ ├── ServersocketSourceTest.java
│ └── table
│ └── ServersocketSourceParserTest.java
├── solr
├── pom.xml
└── solr-sink
│ ├── pom.xml
│ └── src
│ ├── main
│ └── java
│ │ └── com
│ │ └── dtstack
│ │ └── flink
│ │ └── sql
│ │ └── sink
│ │ └── solr
│ │ ├── SolrOutputFormat.java
│ │ ├── SolrSink.java
│ │ ├── client
│ │ ├── CloudSolrClientProvider.java
│ │ └── FlinkxKrb5HttpClientBuilder.java
│ │ ├── options
│ │ ├── KerberosOptions.java
│ │ ├── SolrClientOptions.java
│ │ └── SolrWriteOptions.java
│ │ └── table
│ │ ├── SolrSinkParser.java
│ │ └── SolrTableInfo.java
│ └── test
│ └── java
│ └── com
│ └── dtstack
│ └── flink
│ └── sql
│ └── sink
│ └── solr
│ └── table
│ └── SolrTableInfoTest.java
├── sqlserver
├── pom.xml
├── sqlserver-side
│ ├── pom.xml
│ ├── sqlserver-all-side
│ │ ├── pom.xml
│ │ └── src
│ │ │ ├── main
│ │ │ └── java
│ │ │ │ └── com
│ │ │ │ └── dtstack
│ │ │ │ └── flink
│ │ │ │ └── sql
│ │ │ │ └── side
│ │ │ │ └── sqlserver
│ │ │ │ ├── SqlserverAllReqRow.java
│ │ │ │ └── SqlserverAllSideInfo.java
│ │ │ └── test
│ │ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── sqlserver
│ │ │ └── SqlserverAllReqRowTest.java
│ ├── sqlserver-async-side
│ │ ├── pom.xml
│ │ └── src
│ │ │ ├── main
│ │ │ └── java
│ │ │ │ └── com
│ │ │ │ └── dtstack
│ │ │ │ └── flink
│ │ │ │ └── sql
│ │ │ │ └── side
│ │ │ │ └── sqlserver
│ │ │ │ ├── SqlserverAsyncReqRow.java
│ │ │ │ └── SqlserverAsyncSideInfo.java
│ │ │ └── test
│ │ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── sqlserver
│ │ │ └── SqlserverAsyncReqRowTest.java
│ └── sqlserver-side-core
│ │ ├── pom.xml
│ │ └── src
│ │ ├── main
│ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── sqlserver
│ │ │ └── table
│ │ │ └── SqlserverSideParser.java
│ │ └── test
│ │ └── java
│ │ └── com
│ │ └── dtstack
│ │ └── flink
│ │ └── sql
│ │ └── side
│ │ └── sqlserver
│ │ └── table
│ │ └── SqlserverSideParserTest.java
└── sqlserver-sink
│ ├── pom.xml
│ └── src
│ ├── main
│ └── java
│ │ └── com
│ │ └── dtstack
│ │ └── flink
│ │ └── sql
│ │ └── sink
│ │ └── sqlserver
│ │ ├── SqlserverDialect.java
│ │ ├── SqlserverSink.java
│ │ └── table
│ │ └── SqlserverSinkParser.java
│ └── test
│ └── java
│ └── com
│ └── dtstack
│ └── flink
│ └── sql
│ └── sink
│ └── sqlserver
│ ├── SqlserverDialectTest.java
│ ├── SqlserverSinkTest.java
│ └── table
│ └── SqlserverSinkParserTest.java
└── tidb
├── pom.xml
├── tidb-side
├── pom.xml
├── tidb-all-side
│ ├── pom.xml
│ └── src
│ │ ├── main
│ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── tidb
│ │ │ ├── TidbAllReqRow.java
│ │ │ └── TidbAllSideInfo.java
│ │ └── test
│ │ └── java
│ │ └── com
│ │ └── dtstack
│ │ └── flink
│ │ └── sql
│ │ └── side
│ │ └── tidb
│ │ └── TidbAllReqRowTest.java
├── tidb-async-side
│ ├── pom.xml
│ └── src
│ │ ├── main
│ │ └── java
│ │ │ └── com
│ │ │ └── dtstack
│ │ │ └── flink
│ │ │ └── sql
│ │ │ └── side
│ │ │ └── tidb
│ │ │ ├── TidbAsyncReqRow.java
│ │ │ └── TidbAsyncSideInfo.java
│ │ └── test
│ │ └── java
│ │ └── com
│ │ └── dtstack
│ │ └── flink
│ │ └── sql
│ │ └── side
│ │ └── tidb
│ │ └── TidbAsyncReqRowTest.java
└── tidb-side-core
│ ├── pom.xml
│ └── src
│ ├── main
│ └── java
│ │ └── com
│ │ └── dtstack
│ │ └── flink
│ │ └── sql
│ │ └── side
│ │ └── tidb
│ │ └── table
│ │ └── TidbSideParser.java
│ └── test
│ └── java
│ └── com
│ └── dtstack
│ └── flink
│ └── sql
│ └── side
│ └── tidb
│ └── table
│ └── TidbSideParserTest.java
└── tidb-sink
├── pom.xml
└── src
├── main
└── java
│ └── com
│ └── dtstack
│ └── flink
│ └── sql
│ └── sink
│ └── tidb
│ ├── TidbDialect.java
│ ├── TidbSink.java
│ └── table
│ └── TidbSinkParser.java
└── test
└── java
└── com
└── dtstack
└── flink
└── sql
└── sink
└── tidb
├── TidbDialectTest.java
├── TidbSinkTest.java
└── table
└── TidbSinkParserTest.java
/.gitignore:
--------------------------------------------------------------------------------
1 | target
2 | .idea/
3 | /.idea/*
4 | target/
5 | .class
6 | .project
7 | .classpath
8 | *.eclipse.*
9 | *.iml
10 | plugins/
11 | sqlplugins/
12 | lib/
13 | .vertx/
14 | .DS_Store
15 | bin/nohup.out
16 | .DS_Store
17 | bin/sideSql.txt
18 | *.keytab
19 | krb5.conf
20 | .gradle
21 | gradle
--------------------------------------------------------------------------------
/.gitlab-ci.yml:
--------------------------------------------------------------------------------
1 | stages:
2 | - validate
3 | - test
4 |
5 | test-job:
6 | stage: test
7 | script:
8 | - mvn clean org.jacoco:jacoco-maven-plugin:0.7.8:prepare-agent package -Dmaven.test.failure.ignore=true -q
9 | only:
10 | - v1.10.0_dev
11 | tags:
12 | - dt-insight-engine
13 |
14 | validate-job:
15 | stage: validate
16 | script:
17 | - mvn sonar:sonar -Dsonar.projectKey="dt-insight-engine/flinkStreamSQL" -Dsonar.login=11974c5e9a29625efa09fdc3c3fdc031efb1aab1 -Dsonar.host.url=http://172.16.100.198:9000 -Dsonar.jdbc.url=jdbc:postgresql://172.16.100.198:5432/sonar -Dsonar.java.binaries=target/sonar
18 | - sh ci/sonar_notify.sh
19 | only:
20 | - v1.10.0_dev
21 | tags:
22 | - dt-insight-engine
23 |
--------------------------------------------------------------------------------
/aws/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | flink.sql
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 | ../pom.xml
10 |
11 | 4.0.0
12 |
13 | sql.aws
14 | pom
15 |
16 | aws-sink
17 |
18 |
19 |
20 | 1.0-SNAPSHOT
21 |
22 |
23 |
24 |
25 | com.dtstack.flink
26 | sql.core
27 | ${sql.core.version}
28 | provided
29 |
30 |
31 |
32 |
--------------------------------------------------------------------------------
/bin/submit.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | #
4 | # Licensed to the Apache Software Foundation (ASF) under one or more
5 | # contributor license agreements. See the NOTICE file distributed with
6 | # this work for additional information regarding copyright ownership.
7 | # The ASF licenses this file to You under the Apache License, Version 2.0
8 | # (the "License"); you may not use this file except in compliance with
9 | # the License. You may obtain a copy of the License at
10 | #
11 | # http://www.apache.org/licenses/LICENSE-2.0
12 | #
13 | # Unless required by applicable law or agreed to in writing, software
14 | # distributed under the License is distributed on an "AS IS" BASIS,
15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | # See the License for the specific language governing permissions and
17 | # limitations under the License.
18 | #
19 |
20 | set -e
21 |
22 | export SQL_HOME="$(cd "`dirname "$0"`"/..; pwd)"
23 |
24 | # Find the java binary
25 | if [ -n "${JAVA_HOME}" ]; then
26 | JAVA_RUN="${JAVA_HOME}/bin/java"
27 | else
28 | if [ `command -v java` ]; then
29 | JAVA_RUN="java"
30 | else
31 | echo "JAVA_HOME is not set" >&2
32 | exit 1
33 | fi
34 | fi
35 |
36 | JAR_DIR=$SQL_HOME/lib/*
37 | CLASS_NAME=com.dtstack.flink.sql.launcher.LauncherMain
38 |
39 | echo "sql submit ..."
40 | nohup $JAVA_RUN -cp $JAR_DIR $CLASS_NAME $@ &
--------------------------------------------------------------------------------
/cassandra/cassandra-side/cassandra-side-core/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | sql.side.cassandra
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 | ../pom.xml
10 |
11 | 4.0.0
12 |
13 | sql.side.cassandra.core
14 |
15 |
16 | com.dtstack.flink
17 | sql.core
18 | 1.0-SNAPSHOT
19 | provided
20 |
21 |
22 | jar
23 |
24 |
--------------------------------------------------------------------------------
/cassandra/cassandra-side/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | sql.cassandra
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 | ../pom.xml
10 |
11 | 4.0.0
12 |
13 | sql.side.cassandra
14 | cassandra-side
15 |
16 | cassandra-side-core
17 | cassandra-async-side
18 | cassandra-all-side
19 |
20 |
21 | pom
22 |
23 |
--------------------------------------------------------------------------------
/cassandra/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | flink.sql
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 |
10 | 4.0.0
11 | sql.cassandra
12 | pom
13 |
14 |
15 | cassandra-sink
16 | cassandra-side
17 |
18 |
19 |
20 |
21 | com.dtstack.flink
22 | sql.core
23 | 1.0-SNAPSHOT
24 | provided
25 |
26 |
27 | com.datastax.cassandra
28 | cassandra-driver-core
29 | 3.6.0
30 |
31 |
32 |
33 |
--------------------------------------------------------------------------------
/ci/sonar_analyze.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | mvn clean test -Dmaven.test.failure.ignore=true -q
4 |
5 | mvn sonar:sonar \
6 | -Dsonar.projectKey="dt-insight-engine/flinkStreamSQL" \
7 | -Dsonar.login=11974c5e9a29625efa09fdc3c3fdc031efb1aab1 \
8 | -Dsonar.host.url=http://172.16.100.198:9000 \
9 | -Dsonar.jdbc.url=jdbc:postgresql://172.16.100.198:5432/sonar \
10 | -Dsonar.java.binaries=target/classes \
11 | -Dsonar.inclusions="src/main/java/com/dtstack/flink/**/*" \
12 | -Dsonar.exclusions="src/main/java/org/**/*"
13 |
--------------------------------------------------------------------------------
/ci/sonar_notify.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | #参考钉钉文档 https://open-doc.dingtalk.com/microapp/serverapi2/qf2nxq
3 | sonarreport=$(curl -s http://172.16.100.198:8082/?projectname=dt-insight-engine/flinkStreamSQL)
4 | curl -s "https://oapi.dingtalk.com/robot/send?access_token=58fd731d8bed3b17708d3aa27e49a7e2c41c7e6545f6c4be3170963a7bba7e2a" \
5 | -H "Content-Type: application/json" \
6 | -d "{
7 | \"msgtype\": \"markdown\",
8 | \"markdown\": {
9 | \"title\":\"sonar代码质量\",
10 | \"text\": \"## sonar代码质量报告: \n
11 | > [sonar地址](http://172.16.100.198:9000/dashboard?id=dt-insight-engine/flinkStreamSQL) \n
12 | > ${sonarreport} \n\"
13 | }
14 | }"
--------------------------------------------------------------------------------
/clickhouse/clickhouse-side/clickhouse-all-side/src/test/java/com/dtstack/flink/sql/side/clickhouse/ClickhouseAllReqRowTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.side.clickhouse;
2 |
3 | import com.dtstack.flink.sql.side.rdb.all.RdbAllReqRowTestBase;
4 |
5 | public class ClickhouseAllReqRowTest extends RdbAllReqRowTestBase {
6 |
7 | @Override
8 | protected void init() {
9 | clazz = ClickhouseAllReqRow.class;
10 | }
11 |
12 | }
--------------------------------------------------------------------------------
/clickhouse/clickhouse-side/clickhouse-async-side/src/test/java/com/dtstack/flink/sql/side/clickhouse/ClickhouseAsyncReqRowTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.side.clickhouse;
2 |
3 | import com.dtstack.flink.sql.side.rdb.async.RdbAsyncReqRowTestBase;
4 |
5 | public class ClickhouseAsyncReqRowTest extends RdbAsyncReqRowTestBase {
6 |
7 | @Override
8 | protected void init() {
9 | clazz = ClickhouseAsyncReqRow.class;
10 | }
11 |
12 | }
--------------------------------------------------------------------------------
/clickhouse/clickhouse-side/clickhouse-side-core/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | sql.side.clickhouse
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 |
10 | 4.0.0
11 |
12 | sql.side.clickhouse.core
13 | 1.0-SNAPSHOT
14 | jar
15 | clickhouse-side-core
16 |
17 |
--------------------------------------------------------------------------------
/clickhouse/clickhouse-side/clickhouse-side-core/src/test/java/com/dtstack/flink/sql/side/clickhouse/table/ClickhouseSideParserTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.side.clickhouse.table;
2 |
3 | import com.dtstack.flink.sql.table.AbstractTableInfo;
4 | import org.junit.Assert;
5 | import org.junit.Test;
6 | import java.util.HashMap;
7 | import java.util.Map;
8 |
9 | public class ClickhouseSideParserTest {
10 |
11 | // @Test
12 | public void getTableInfo() {
13 | ClickhouseSideParser sideParser = new ClickhouseSideParser();
14 |
15 | final String tableName = "table_foo";
16 | final String fieldsInfo = "id INT, name VARCHAR , PRIMARY KEY (id) , PERIOD FOR SYSTEM_TIME";
17 |
18 | Map props = new HashMap();
19 | props.put("url", "jdbc:mysql://foo:3306/db_foo");
20 | props.put("tablename", "table_foo");
21 | props.put("username", "foo");
22 | props.put("password", "foo");
23 |
24 | AbstractTableInfo tableInfo= sideParser.getTableInfo(tableName, fieldsInfo, props);
25 |
26 | final String NORMAL_TYPE = "clickhouse";
27 | final String table_type = tableInfo.getType();
28 | Assert.assertTrue(NORMAL_TYPE.equals(table_type));
29 | }
30 |
31 | }
--------------------------------------------------------------------------------
/clickhouse/clickhouse-sink/src/test/java/com/dtstack/flink/sql/sink/clickhouse/ClickhouseDialectTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.sink.clickhouse;
2 |
3 | import org.junit.Assert;
4 | import org.junit.Before;
5 | import org.junit.Test;
6 |
7 | public class ClickhouseDialectTest {
8 |
9 | ClickhouseDialect dialect;
10 |
11 | @Before
12 | public void setUp() throws Exception {
13 | dialect = new ClickhouseDialect();
14 | }
15 |
16 | @Test
17 | public void testEasyUtils() {
18 | final String s = "jdbc:clickhouse://localhost:3306/foo_db";
19 | boolean r = dialect.canHandle(s);
20 | Assert.assertTrue(r);
21 |
22 | String driver = dialect.defaultDriverName().get();
23 | Assert.assertTrue(driver.equals("ru.yandex.clickhouse.ClickHouseDriver"));
24 | }
25 |
26 | }
--------------------------------------------------------------------------------
/clickhouse/clickhouse-sink/src/test/java/com/dtstack/flink/sql/sink/clickhouse/table/ClickhouseSinkParserTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.sink.clickhouse.table;
2 |
3 | import com.dtstack.flink.sql.table.AbstractTableInfo;
4 | import org.junit.Assert;
5 | import org.junit.Test;
6 | import java.util.HashMap;
7 | import java.util.Map;
8 |
9 | public class ClickhouseSinkParserTest {
10 |
11 | // @Test
12 | public void getTableInfo() {
13 | ClickhouseSinkParser mysqlSinkParser = new ClickhouseSinkParser();
14 |
15 | final String tableName = "table_foo";
16 | final String fieldsInfo = "id INT, name VARCHAR";
17 |
18 | Map props = new HashMap();
19 | props.put("url", "jdbc:mysql://foo:3306/db_foo");
20 | props.put("tablename", "table_foo");
21 | props.put("username", "foo");
22 | props.put("password", "foo");
23 |
24 | AbstractTableInfo tableInfo= mysqlSinkParser.getTableInfo(tableName, fieldsInfo, props);
25 |
26 | final String NORMAL_TYPE = "clickhouse";
27 | final String table_type = tableInfo.getType();
28 | Assert.assertTrue(NORMAL_TYPE.equals(table_type));
29 | }
30 |
31 | }
--------------------------------------------------------------------------------
/clickhouse/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | flink.sql
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 |
10 | 4.0.0
11 |
12 | sql.clickhouse
13 | pom
14 |
15 |
16 | clickhouse-side
17 | clickhouse-sink
18 |
19 |
20 |
21 |
22 | 1.0-SNAPSHOT
23 | 0.1.55
24 |
25 |
26 |
27 |
28 | com.dtstack.flink
29 | sql.core
30 | ${sql.core.version}
31 | provided
32 |
33 |
34 |
35 | ru.yandex.clickhouse
36 | clickhouse-jdbc
37 | ${clickhouse.jdbc.version}
38 |
39 |
40 |
41 |
--------------------------------------------------------------------------------
/console/console-sink/src/test/java/com/dtstack/flink/sql/sink/console/ConsoleSinkTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.sink.console;
2 |
3 | /**
4 | * @program: flink.sql
5 | * @description:
6 | * @author: wuren
7 | * @create: 2020-06-16 20:18
8 | **/
9 | public class ConsoleSinkTest {
10 | }
11 |
--------------------------------------------------------------------------------
/console/console-sink/src/test/java/com/dtstack/flink/sql/sink/console/table/ConsoleSinkParserTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.sink.console.table;
2 |
3 | import com.dtstack.flink.sql.table.AbstractTableInfo;
4 | import org.junit.Assert;
5 | import org.junit.Test;
6 |
7 | import java.util.HashMap;
8 | import java.util.Map;
9 |
10 | public class ConsoleSinkParserTest {
11 |
12 | @Test
13 | public void getTableInfo() {
14 | ConsoleSinkParser sinkParser = new ConsoleSinkParser();
15 |
16 | final String tableName = "table_foo";
17 | final String fieldsInfo = "id INT, name VARCHAR";
18 |
19 | Map props = new HashMap();
20 | props.put("url", "jdbc:mysql://foo:3306/db_foo");
21 | props.put("tablename", "table_foo");
22 | props.put("username", "foo");
23 | props.put("password", "foo");
24 |
25 | AbstractTableInfo tableInfo= sinkParser.getTableInfo(tableName, fieldsInfo, props);
26 |
27 | final String table_type = tableInfo.getName();
28 | Assert.assertTrue(tableName.equals(table_type));
29 | }
30 |
31 | }
--------------------------------------------------------------------------------
/console/console-sink/src/test/java/com/dtstack/flink/sql/sink/console/table/ConsoleTableInfoTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.sink.console.table;
2 |
3 | import org.junit.Assert;
4 | import org.junit.Before;
5 | import org.junit.Test;
6 |
7 | /**
8 | * @program: flink.sql
9 | * @description:
10 | * @author: wuren
11 | * @create: 2020-06-16 20:18
12 | **/
13 | public class ConsoleTableInfoTest {
14 | private ConsoleTableInfo consoleTableInfo;
15 | @Before
16 | public void setUp() {
17 | consoleTableInfo = new ConsoleTableInfo();
18 | }
19 |
20 | @Test
21 | public void testCheck() {
22 | Boolean b = consoleTableInfo.check();
23 | Assert.assertTrue(b == true);
24 | }
25 |
26 | @Test
27 | public void testGetType() {
28 | String r = consoleTableInfo.getType();
29 | Assert.assertTrue("console".equals(r));
30 | }
31 | }
32 |
--------------------------------------------------------------------------------
/console/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | flink.sql
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 |
10 | 4.0.0
11 | sql.console
12 | pom
13 |
14 |
15 | console-sink
16 |
17 |
18 |
19 |
20 | com.dtstack.flink
21 | sql.core
22 | 1.0-SNAPSHOT
23 | provided
24 |
25 |
26 |
27 |
28 |
--------------------------------------------------------------------------------
/core/src/main/java/com/dtstack/flink/sql/constant/PluginParamConsts.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 | package com.dtstack.flink.sql.constant;
20 |
21 | /**
22 | * @program: flinkStreamSQL
23 | * @author: wuren
24 | * @create: 2020/09/15
25 | */
26 | public class PluginParamConsts {
27 |
28 | public static final String PRINCIPAL = "principal";
29 | public static final String KEYTAB = "keytab";
30 | public static final String KRB5_CONF = "krb5conf";
31 |
32 | public static final String SINK_BUFFER_FLUSH_MAX_ROWS = "sink.buffer-flush.max-rows";
33 | public static final String SINK_BUFFER_FLUSH_INTERVAL = "sink.buffer-flush.interval";
34 | public static final String SINK_MAX_RETRIES = "sink.max-retries";
35 |
36 | }
37 |
--------------------------------------------------------------------------------
/core/src/main/java/com/dtstack/flink/sql/enums/ClusterMode.java:
--------------------------------------------------------------------------------
1 |
2 | /**
3 | * Licensed to the Apache Software Foundation (ASF) under one
4 | * or more contributor license agreements. See the NOTICE file
5 | * distributed with this work for additional information
6 | * regarding copyright ownership. The ASF licenses this file
7 | * to you under the Apache License, Version 2.0 (the
8 | * "License"); you may not use this file except in compliance
9 | * with the License. You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | */
19 |
20 | package com.dtstack.flink.sql.enums;
21 |
22 | /**
23 | * Created by sishu.yss on 2018/10/10.
24 | */
25 | public enum ClusterMode {
26 |
27 | //run in local
28 | local(0),
29 | //submit job to standalone cluster
30 | standalone(1),
31 | //submit job to flink-session which is already run on yarn
32 | yarn(2),
33 | //submit job to yarn cluster as an application
34 | yarnPer(3);
35 |
36 | private int type;
37 |
38 | ClusterMode(int type){
39 | this.type = type;
40 | }
41 |
42 | public int getType(){
43 | return this.type;
44 | }
45 | }
46 |
--------------------------------------------------------------------------------
/core/src/main/java/com/dtstack/flink/sql/enums/ECacheContentType.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 |
20 |
21 | package com.dtstack.flink.sql.enums;
22 |
23 | /**
24 | * Reason:
25 | * Date: 2018/9/10
26 | * Company: www.dtstack.com
27 | *
28 | * @author xuchao
29 | */
30 |
31 | public enum ECacheContentType {
32 |
33 | /**
34 | * 无
35 | */
36 | MissVal(0),
37 | /**
38 | * 1行
39 | */
40 | SingleLine(1),
41 | /**
42 | * 多行
43 | */
44 | MultiLine(2);
45 |
46 | int type;
47 |
48 | ECacheContentType(int type){
49 | this.type = type;
50 | }
51 |
52 | public int getType(){
53 | return this.type;
54 | }
55 | }
56 |
--------------------------------------------------------------------------------
/core/src/main/java/com/dtstack/flink/sql/enums/ECacheType.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 |
20 |
21 | package com.dtstack.flink.sql.enums;
22 |
23 | /**
24 | * Reason:
25 | * Date: 2018/8/2
26 | * Company: www.dtstack.com
27 | * @author xuchao
28 | */
29 | public enum ECacheType {
30 | /**
31 | * none
32 | */
33 | NONE,
34 | /**
35 | * lru
36 | */
37 | LRU,
38 | /**
39 | * all
40 | */
41 | ALL;
42 |
43 | public static boolean isValid(String type){
44 | for(ECacheType tmpType : ECacheType.values()){
45 | if(tmpType.name().equalsIgnoreCase(type)){
46 | return true;
47 | }
48 | }
49 |
50 | return false;
51 | }
52 | }
53 |
--------------------------------------------------------------------------------
/core/src/main/java/com/dtstack/flink/sql/enums/EDatabaseType.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 | package com.dtstack.flink.sql.enums;
20 |
21 | /**
22 | * Database type
23 | *
24 | * Company: www.dtstack.com
25 | * @author jiangbo
26 | */
27 | public enum EDatabaseType {
28 |
29 | /**
30 | * mysql
31 | */
32 | MYSQL,
33 | /**
34 | * sqlserver
35 | */
36 | SQLSERVER,
37 | /**
38 | * oracle
39 | */
40 | ORACLE,
41 |
42 | }
43 |
--------------------------------------------------------------------------------
/core/src/main/java/com/dtstack/flink/sql/enums/EStateBackend.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 | package com.dtstack.flink.sql.enums;
20 |
21 | /**
22 | * Flink状态后端类型
23 | * Date: 2019/11/15
24 | * Company: www.dtstack.com
25 | * @author maqi
26 | */
27 | public enum EStateBackend {
28 | /**
29 | * memory
30 | */
31 | MEMORY,
32 | /**
33 | * rockdb
34 | */
35 | ROCKSDB,
36 | /**
37 | * filesystem
38 | */
39 | FILESYSTEM;
40 |
41 | public static EStateBackend convertFromString(String type) {
42 | if(type == null) {
43 | throw new RuntimeException("null StateBackend!");
44 | }
45 | return valueOf(type.toUpperCase());
46 | }
47 | }
48 |
49 |
50 |
--------------------------------------------------------------------------------
/core/src/main/java/com/dtstack/flink/sql/enums/ETableType.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 |
20 |
21 | package com.dtstack.flink.sql.enums;
22 |
23 | /**
24 | * Reason:
25 | * Date: 2018/7/5
26 | * Company: www.dtstack.com
27 | * @author xuchao
28 | */
29 | public enum ETableType {
30 | //源表
31 | SOURCE(1),
32 | //目的表
33 | SINK(2);
34 |
35 | int type;
36 |
37 | ETableType(int type){
38 | this.type = type;
39 | }
40 |
41 | public int getType() {
42 | return type;
43 | }
44 | }
45 |
--------------------------------------------------------------------------------
/core/src/main/java/com/dtstack/flink/sql/enums/EUpdateMode.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 | package com.dtstack.flink.sql.enums;
20 |
21 | /**
22 | * restract stream数据处理模式
23 | *
24 | * Reason:
25 | * Date: 2019/1/2
26 | * Company: www.dtstack.com
27 | * @author maqi
28 | */
29 | public enum EUpdateMode {
30 | // 不回撤数据,只下发增量数据
31 | APPEND(0),
32 | // 先删除回撤数据,然后更新
33 | UPSERT(1);
34 |
35 | private int type;
36 |
37 | EUpdateMode(int type) {
38 | this.type = type;
39 | }
40 |
41 | public int getType() {
42 | return this.type;
43 | }
44 | }
45 |
--------------------------------------------------------------------------------
/core/src/main/java/com/dtstack/flink/sql/exception/ErrorCode.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.exception;
2 |
3 | /**
4 | * 错误码
5 | */
6 | public interface ErrorCode {
7 |
8 | /**
9 | * 获取错误码
10 | *
11 | * @return
12 | */
13 | String getCode();
14 |
15 | /**
16 | * 获取错误信息
17 | *
18 | * @return
19 | */
20 | String getDescription();
21 | }
--------------------------------------------------------------------------------
/core/src/main/java/com/dtstack/flink/sql/exception/ExceptionTrace.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.exception;
2 |
3 | import org.apache.flink.runtime.execution.SuppressRestartsException;
4 |
5 | import java.util.Objects;
6 |
7 | /**
8 | * @author tiezhu
9 | * @date 2021/2/2 星期二
10 | * Company dtstack
11 | */
12 | public class ExceptionTrace {
13 | // 追溯当前异常的最原始异常信息
14 | public static String traceOriginalCause(Throwable e) {
15 | String errorMsg;
16 | if (Objects.nonNull(e.getCause())) {
17 | errorMsg = traceOriginalCause(e.getCause());
18 | } else {
19 | errorMsg = e.getMessage();
20 | }
21 | return errorMsg;
22 | }
23 | }
24 |
--------------------------------------------------------------------------------
/core/src/main/java/com/dtstack/flink/sql/exception/sqlparse/WithoutTableNameException.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 | package com.dtstack.flink.sql.exception.sqlparse;
20 |
21 | import com.dtstack.flink.sql.exception.BaseException;
22 | import com.dtstack.flink.sql.exception.ErrorCode;
23 |
24 | /**
25 | * @author: chuixue
26 | * @create: 2020-11-30 14:49
27 | * @description:流join维表时,select、join、group by等字段未使用t.field
28 | **/
29 | public class WithoutTableNameException extends BaseException {
30 | public WithoutTableNameException(String msg){
31 | super(msg);
32 | }
33 |
34 | public WithoutTableNameException(ErrorCode errorCode){
35 | super(errorCode);
36 | }
37 | }
--------------------------------------------------------------------------------
/core/src/main/java/com/dtstack/flink/sql/format/FormatType.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 | package com.dtstack.flink.sql.format;
20 |
21 | /**
22 | * indicate source table input data format type
23 | * company: www.dtstack.com
24 | * author: toutian
25 | * create: 2019/12/24
26 | */
27 | public enum FormatType {
28 | //Indicates that the data is in nest json format(default)
29 | DT_NEST,
30 | //Indicates that the data is in json format
31 | JSON,
32 | //Indicates that the data is in avro format
33 | AVRO,
34 | //Indicates that the data is in csv format
35 | CSV
36 | }
37 |
--------------------------------------------------------------------------------
/core/src/main/java/com/dtstack/flink/sql/metric/EventDelayGauge.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 | package com.dtstack.flink.sql.metric;
20 |
21 | import org.apache.flink.metrics.Gauge;
22 |
23 | /**
24 | * event data delay time ,unit seconds
25 | * Date: 2018/10/18
26 | * Company: www.dtstack.com
27 | * @author xuchao
28 | */
29 |
30 | public class EventDelayGauge implements Gauge {
31 |
32 | private volatile int delayTime = 0;
33 |
34 | public void setDelayTime(int delayTime) {
35 | this.delayTime = delayTime;
36 | }
37 |
38 | @Override
39 | public Integer getValue() {
40 | return delayTime;
41 | }
42 | }
43 |
--------------------------------------------------------------------------------
/core/src/main/java/com/dtstack/flink/sql/parser/IParser.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 |
20 |
21 | package com.dtstack.flink.sql.parser;
22 |
23 | /**
24 | * Reason:
25 | * Date: 2018/6/26
26 | * Company: www.dtstack.com
27 | * @author xuchao
28 | */
29 |
30 | public interface IParser {
31 |
32 | /**
33 | * 是否满足该解析类型
34 | * @param sql
35 | * @return
36 | */
37 | boolean verify(String sql);
38 |
39 | /***
40 | * 解析sql
41 | * @param sql
42 | * @param sqlTree
43 | */
44 | void parseSql(String sql, SqlTree sqlTree) throws Exception;
45 | }
46 |
--------------------------------------------------------------------------------
/core/src/main/java/com/dtstack/flink/sql/side/AliasInfo.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 |
20 |
21 | package com.dtstack.flink.sql.side;
22 |
23 | /**
24 | * Reason:
25 | * Date: 2018/7/24
26 | * Company: www.dtstack.com
27 | * @author xuchao
28 | */
29 |
30 | public class AliasInfo {
31 |
32 | private String name;
33 |
34 | private String alias;
35 |
36 | public String getName() {
37 | return name;
38 | }
39 |
40 | public void setName(String name) {
41 | this.name = name;
42 | }
43 |
44 | public String getAlias() {
45 | return alias;
46 | }
47 |
48 | public void setAlias(String alias) {
49 | this.alias = alias;
50 | }
51 | }
52 |
--------------------------------------------------------------------------------
/core/src/main/java/com/dtstack/flink/sql/side/CacheMissVal.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 |
20 |
21 | package com.dtstack.flink.sql.side;
22 |
23 | import com.dtstack.flink.sql.enums.ECacheContentType;
24 | import com.dtstack.flink.sql.side.cache.CacheObj;
25 |
26 | /**
27 | * Only the data marked to dimension table miss
28 | * Date: 2018/8/28
29 | * Company: www.dtstack.com
30 | * @author xuchao
31 | */
32 |
33 | public class CacheMissVal {
34 |
35 | private static CacheObj missObj = CacheObj.buildCacheObj(ECacheContentType.MissVal, null);
36 |
37 | public static CacheObj getMissKeyObj(){
38 | return missObj;
39 | }
40 | }
41 |
--------------------------------------------------------------------------------
/core/src/main/java/com/dtstack/flink/sql/side/ISideReqRow.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 | package com.dtstack.flink.sql.side;
20 |
21 | import org.apache.flink.table.dataformat.BaseRow;
22 |
23 | /**
24 | *
25 | * Date: 2018/12/4
26 | * Company: www.dtstack.com
27 | * @author xuchao
28 | */
29 | public interface ISideReqRow {
30 |
31 | BaseRow fillData(BaseRow input, Object sideInput);
32 |
33 | }
34 |
--------------------------------------------------------------------------------
/core/src/main/java/com/dtstack/flink/sql/sink/IStreamSinkGener.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 |
20 |
21 | package com.dtstack.flink.sql.sink;
22 |
23 | import com.dtstack.flink.sql.table.AbstractTargetTableInfo;
24 |
25 | /**
26 | * Reason:
27 | * Date: 2017/7/31
28 | * Company: www.dtstack.com
29 | * @author xuchao
30 | */
31 | public interface IStreamSinkGener {
32 |
33 | T genStreamSink(AbstractTargetTableInfo targetTableInfo);
34 | }
35 |
--------------------------------------------------------------------------------
/core/src/main/java/com/dtstack/flink/sql/table/ITableFieldDealHandler.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 |
20 |
21 | package com.dtstack.flink.sql.table;
22 |
23 | import java.util.regex.Matcher;
24 |
25 | /**
26 | * Reason:
27 | * Date: 2018/7/4
28 | * Company: www.dtstack.com
29 | * @author xuchao
30 | */
31 | public interface ITableFieldDealHandler {
32 |
33 | void dealPrimaryKey(Matcher matcher, AbstractTableInfo tableInfo);
34 | }
35 |
--------------------------------------------------------------------------------
/core/src/main/java/com/dtstack/flink/sql/util/DtFileUtils.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 | package com.dtstack.flink.sql.util;
20 |
21 | import org.apache.flink.util.Preconditions;
22 |
23 | import java.io.File;
24 |
25 | /**
26 | * @program: flinkStreamSQL
27 | * @author: wuren
28 | * @create: 2020/09/21
29 | **/
30 | public class DtFileUtils {
31 | public static void checkExists(String path) {
32 | File file = new File(path);
33 | String errorMsg = "%s file is not exist!";
34 | Preconditions.checkState(file.exists(), errorMsg, path);
35 | }
36 | }
37 |
--------------------------------------------------------------------------------
/core/src/main/java/com/dtstack/flink/sql/util/PropertiesUtils.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 |
20 | package com.dtstack.flink.sql.util;
21 |
22 | import java.util.Properties;
23 |
24 | public class PropertiesUtils {
25 | public static Properties propertiesTrim(Properties confProperties) {
26 | Properties properties = new Properties();
27 | confProperties.forEach(
28 | (k, v) -> {
29 | properties.put(k.toString().trim(), v.toString().trim());
30 | }
31 | );
32 | return properties;
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/core/src/main/scala/com/dtstack/flink/App.scala:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink
2 |
3 | /**
4 | * Hello world!
5 | *
6 | */
7 | object App {
8 | def main(args: Array[String]): Unit = {
9 | println( "Hello World!" )
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/core/src/test/java/com/dtstack/flink/sql/classloader/ClassLoaderSupplierCallBackTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.classloader;
2 |
3 | import org.junit.Test;
4 | import org.junit.runner.RunWith;
5 | import org.powermock.modules.junit4.PowerMockRunner;
6 |
7 | import java.net.URL;
8 |
9 | import static org.mockito.Mockito.mock;
10 |
11 | @RunWith(PowerMockRunner.class)
12 | public class ClassLoaderSupplierCallBackTest {
13 |
14 | @Test
15 | public void testCallBackAndRest() throws Exception {
16 | ClassLoaderSupplier classLoaderSupplier = mock(ClassLoaderSupplier.class);
17 | URL[] urls = new URL[1];
18 | ClassLoaderSupplierCallBack.callbackAndReset(classLoaderSupplier, new DtClassLoader(urls));
19 | }
20 |
21 | }
22 |
--------------------------------------------------------------------------------
/core/src/test/java/com/dtstack/flink/sql/dirtyManager/TestDirtyDataManager.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.dirtyManager;
2 |
3 | import com.dtstack.flink.sql.dirtyManager.manager.DirtyDataManager;
4 |
5 | import java.util.HashMap;
6 | import java.util.Map;
7 |
8 | /**
9 | * @author tiezhu
10 | * Company dtstack
11 | * Date 2020/8/28 星期五
12 | */
13 | public class TestDirtyDataManager {
14 |
15 | }
16 |
--------------------------------------------------------------------------------
/core/src/test/java/com/dtstack/flink/sql/exec/ApiResultTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.exec;
2 |
3 | import org.junit.Test;
4 |
5 | public class ApiResultTest {
6 |
7 | @Test
8 | public void createSuccessResultJsonStr(){
9 | ApiResult.createSuccessResultJsonStr("ss", 12L);
10 | }
11 | @Test
12 | public void createErrorResultJsonStr(){
13 | ApiResult.createErrorResultJsonStr("ss");
14 | }
15 |
16 |
17 | }
18 |
--------------------------------------------------------------------------------
/core/src/test/java/com/dtstack/flink/sql/exec/FlinkSQLExecTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.exec;
2 |
3 | import org.apache.flink.table.api.Table;
4 | import org.apache.flink.table.api.java.StreamTableEnvironment;
5 | import org.junit.Test;
6 | import org.junit.runner.RunWith;
7 | import org.powermock.core.classloader.annotations.PrepareForTest;
8 | import org.powermock.modules.junit4.PowerMockRunner;
9 |
10 | import static org.mockito.Mockito.mock;
11 |
12 |
13 | @RunWith(PowerMockRunner.class)
14 | @PrepareForTest({FlinkSQLExec.class, Table.class})
15 | public class FlinkSQLExecTest {
16 |
17 |
18 | @Test
19 | public void sqlUpdate() throws Exception {
20 | String stmt = "insert into a select fieldA from b";
21 | StreamTableEnvironment tableEnv = mock(StreamTableEnvironment.class);
22 |
23 |
24 | }
25 |
26 |
27 |
28 | @Test
29 | public void ignoreCase() throws NoSuchMethodException {
30 | // Method method = FlinkSQLExec.class.getDeclaredMethod("ignoreCase",String.class, String.class);
31 | // String[] queryFieldNames = new String[1];
32 | // queryFieldNames[0] = "a";
33 | // String[] sinkFieldNames = new String[1];
34 | // sinkFieldNames[0] = "a";
35 | // FlinkSQLExec.ignoreCase(queryFieldNames, sinkFieldNames);
36 | }
37 |
38 | }
39 |
--------------------------------------------------------------------------------
/core/src/test/java/com/dtstack/flink/sql/exec/PraramsInfoTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.exec;
2 |
3 | import com.google.common.collect.Lists;
4 | import org.junit.Test;
5 |
6 | import java.util.Properties;
7 |
8 | public class PraramsInfoTest {
9 |
10 | @Test
11 | public void paramInfo(){
12 | ParamsInfo paramsInfo = ParamsInfo.builder()
13 | .setConfProp(new Properties())
14 | .setDeployMode("local")
15 | .setName("test")
16 | .setJarUrlList(Lists.newArrayList())
17 | .setLocalSqlPluginPath(".")
18 | .setRemoteSqlPluginPath(".")
19 | .setPluginLoadMode("classpath")
20 | .setSql("select a from b")
21 | .build();
22 | System.out.println(paramsInfo.toString());
23 |
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/core/src/test/java/com/dtstack/flink/sql/factory/DTFactoryTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.factory;
2 |
3 | import org.junit.Test;
4 |
5 | public class DTFactoryTest {
6 |
7 | @Test
8 | public void testFactory(){
9 | DTThreadFactory dtThreadFactory = new DTThreadFactory("test");
10 | dtThreadFactory.newThread(new Runnable() {
11 | @Override
12 | public void run() {
13 | System.out.println("run..");
14 | }
15 | });
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/core/src/test/java/com/dtstack/flink/sql/function/FunctionManagerTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.function;
2 |
3 | import org.apache.flink.table.api.java.StreamTableEnvironment;
4 | import org.apache.flink.table.functions.ScalarFunction;
5 | import org.junit.Test;
6 |
7 | import static org.mockito.Mockito.mock;
8 |
9 | public class FunctionManagerTest extends ScalarFunction {
10 |
11 | @Test
12 | public void registerUDF(){
13 | StreamTableEnvironment tableEnvironment = mock(StreamTableEnvironment.class);
14 |
15 | FunctionManager.registerUDF("SCALA", "com.dtstack.flink.sql.function.FunctionManagerTest", "getResultType", tableEnvironment, Thread.currentThread().getContextClassLoader());
16 | FunctionManager.registerUDF("AGGREGATE", "org.apache.flink.table.planner.functions.aggfunctions.LastValueAggFunction", "resultTypeConvert", tableEnvironment, Thread.currentThread().getContextClassLoader());
17 | try{
18 | FunctionManager.registerUDF("TABLE", " org.apache.flink.table.plan.util.ObjectExplodeTableFunc", "collectArray", tableEnvironment, Thread.currentThread().getContextClassLoader());
19 | }catch (Exception e){
20 |
21 | }
22 | }
23 |
24 | @Test
25 | public void transformTypes(){
26 | Class[] fieldTypes = new Class[1];
27 | fieldTypes[0] = String.class;
28 | FunctionManager.transformTypes(fieldTypes);
29 | }
30 |
31 | }
32 |
--------------------------------------------------------------------------------
/core/src/test/java/com/dtstack/flink/sql/option/OptionParseTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.option;
2 |
3 | import org.junit.Test;
4 |
5 | public class OptionParseTest {
6 |
7 | @Test
8 | public void testOption() throws Exception {
9 | String[] sql = new String[]{"-mode", "yarnPer", "-sql", "/Users/maqi/tmp/json/group_tmp4.txt", "-name", "PluginLoadModeTest",
10 | "-localSqlPluginPath", "/Users/maqi/code/dtstack/dt-center-flinkStreamSQL/plugins",
11 | "-remoteSqlPluginPath", "/Users/maqi/code/dtstack/dt-center-flinkStreamSQL/plugins",
12 | "-flinkconf", "/Users/maqi/tmp/flink-1.8.1/conf",
13 | "-confProp", "{\"sql.checkpoint.cleanup.mode\":\"false\",\"sql.checkpoint.interval\":10000,\"time.characteristic\":\"EventTime\"}",
14 | "-yarnconf", "/Users/maqi/tmp/hadoop", "-flinkJarPath", "/Users/maqi/tmp/flink-1.8.1/lib", "-queue", "c", "-pluginLoadMode", "shipfile"};
15 | OptionParser optionParser = new OptionParser(sql);
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/core/src/test/java/com/dtstack/flink/sql/outputformat/AbstractDtRichOutputFormatTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.outputformat;
2 |
3 | import org.apache.flink.api.common.functions.RuntimeContext;
4 | import org.apache.flink.configuration.Configuration;
5 | import org.apache.flink.metrics.MetricGroup;
6 | import org.junit.Test;
7 |
8 | import java.io.IOException;
9 |
10 | import static org.mockito.Mockito.mock;
11 | import static org.mockito.Mockito.when;
12 |
13 | public class AbstractDtRichOutputFormatTest {
14 |
15 | @Test
16 | public void initMetric(){
17 | AbstractDtRichOutputFormat richOutputFormat = new AbstractDtRichOutputFormat() {
18 | @Override
19 | public void configure(Configuration parameters) {
20 |
21 | }
22 |
23 | @Override
24 | public void open(int taskNumber, int numTasks) throws IOException {
25 |
26 | }
27 |
28 | @Override
29 | public void writeRecord(Object record) throws IOException {
30 |
31 | }
32 |
33 | @Override
34 | public void close() throws IOException {
35 |
36 | }
37 | };
38 | RuntimeContext runtimeContext = mock(RuntimeContext.class);
39 | richOutputFormat.setRuntimeContext(runtimeContext);
40 | MetricGroup metricGroup = mock(MetricGroup.class);
41 | when(runtimeContext.getMetricGroup()).thenReturn(metricGroup);
42 | richOutputFormat.initMetric();
43 | }
44 | }
45 |
--------------------------------------------------------------------------------
/core/src/test/java/com/dtstack/flink/sql/parse/CreateFuncParserTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.parse;
2 |
3 | import com.dtstack.flink.sql.parser.CreateFuncParser;
4 | import com.dtstack.flink.sql.parser.SqlTree;
5 | import org.junit.Test;
6 |
7 | public class CreateFuncParserTest {
8 |
9 | @Test
10 | public void parseSql(){
11 | String sql = "create table function with xxxx";
12 |
13 | CreateFuncParser parser = CreateFuncParser.newInstance();
14 | parser.verify(sql);
15 |
16 | parser.parseSql(sql, new SqlTree());
17 | }
18 |
19 | }
--------------------------------------------------------------------------------
/core/src/test/java/com/dtstack/flink/sql/parse/CreateTableParserTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.parse;
2 |
3 | import com.dtstack.flink.sql.parser.CreateTableParser;
4 | import com.dtstack.flink.sql.parser.SqlTree;
5 | import org.junit.Test;
6 |
7 | public class CreateTableParserTest {
8 |
9 | @Test
10 | public void parseSql(){
11 | String sql = "CREATE TABLE MyResult(\n" +
12 | " id varchar,\n" +
13 | " name varchar,\n" +
14 | " address varchar,\n" +
15 | " message varchar,\n" +
16 | " info varchar\n" +
17 | ")WITH(\n" +
18 | " type = 'console'\n" +
19 | ");";
20 | CreateTableParser parser = CreateTableParser.newInstance();
21 | parser.verify(sql);
22 | parser.parseSql(sql, new SqlTree());
23 | }
24 |
25 | }
26 |
--------------------------------------------------------------------------------
/core/src/test/java/com/dtstack/flink/sql/side/AbstractSideTableInfoTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.side;
2 |
3 | import org.junit.Test;
4 |
5 | public class AbstractSideTableInfoTest {
6 |
7 | @Test
8 | public void getRowTypeInfo(){
9 | AbstractSideTableInfo sideTableInfo = new AbstractSideTableInfo() {
10 | @Override
11 | public boolean check() {
12 | return false;
13 | }
14 | };
15 | Class[] fieldClasses = new Class[1];
16 | fieldClasses[0] = String.class;
17 | sideTableInfo.setFieldClasses(fieldClasses);
18 |
19 | String[] fields = new String[1];
20 | fields[0] = "a";
21 |
22 | sideTableInfo.setFields(fields);
23 | sideTableInfo.getRowTypeInfo();
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/core/src/test/java/com/dtstack/flink/sql/side/SideSQLParserTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.side;
2 |
3 | public class SideSQLParserTest {
4 | }
5 |
--------------------------------------------------------------------------------
/core/src/test/java/com/dtstack/flink/sql/side/cache/LruSideCacheTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.side.cache;
2 |
3 | import com.dtstack.flink.sql.side.AbstractSideTableInfo;
4 | import org.junit.Before;
5 | import org.junit.Test;
6 |
7 | import static org.mockito.Mockito.mock;
8 | import static org.mockito.Mockito.when;
9 |
10 | public class LruSideCacheTest {
11 | private LRUSideCache lruSideCache ;
12 |
13 | @Before
14 | public void before(){
15 | AbstractSideTableInfo sideTableInfo = mock(AbstractSideTableInfo.class);
16 | when(sideTableInfo.getCacheSize()).thenReturn(10);
17 | when(sideTableInfo.getCacheTimeout()).thenReturn(1000000L);
18 |
19 | lruSideCache = new LRUSideCache(sideTableInfo);
20 | lruSideCache.initCache();
21 | }
22 |
23 |
24 | @Test
25 | public void getFromCache(){
26 | lruSideCache.getFromCache("test");
27 |
28 | }
29 |
30 | @Test
31 | public void putCache(){
32 | lruSideCache.putCache("test", CacheObj.buildCacheObj(null, null));
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/core/src/test/java/com/dtstack/flink/sql/table/AbstractSourceParserTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.table;
2 |
3 | import org.junit.Test;
4 |
5 | public class AbstractSourceParserTest {
6 |
7 | @Test
8 | public void dealVirtualField(){
9 |
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/core/src/test/java/com/dtstack/flink/sql/table/RdbParserTestBase.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.table;
2 |
3 | import org.junit.Before;
4 |
5 | /**
6 | * @program: flinkStreamSQL
7 | * @author: wuren
8 | * @create: 2020/11/09
9 | **/
10 | abstract public class RdbParserTestBase {
11 |
12 | protected AbstractTableParser parser;
13 | protected String type;
14 |
15 | @Before
16 | abstract public void setUp();
17 |
18 | }
19 |
--------------------------------------------------------------------------------
/core/src/test/java/com/dtstack/flink/sql/util/MD5UtilTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.util;
2 |
3 | import org.junit.Assert;
4 | import org.junit.Test;
5 |
6 | public class MD5UtilTest {
7 |
8 | @Test
9 | public void testGetMD5String(){
10 | Assert.assertNotEquals(MD5Utils.getMD5String("aaaaa"), "d41d8cd98f00b204e9800998ecf8427e");
11 | }
12 |
13 | }
14 |
--------------------------------------------------------------------------------
/core/src/test/java/com/dtstack/flink/sql/util/PropertiesUtilTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.util;
2 |
3 | import org.junit.Test;
4 |
5 | import java.util.Properties;
6 |
7 | public class PropertiesUtilTest {
8 |
9 | @Test
10 | public void propertiesTrim(){
11 | Properties properties = new Properties();
12 | properties.put("k", "v");
13 | PropertiesUtils.propertiesTrim(properties);
14 | }
15 | }
16 |
--------------------------------------------------------------------------------
/core/src/test/java/com/dtstack/flink/sql/util/ReflectionUtilTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.util;
2 |
3 | import org.junit.Test;
4 |
5 | public class ReflectionUtilTest extends Object{
6 |
7 | @Test
8 | public void getDeclaredMethod(){
9 | ReflectionUtilTest reflectionUtilTest = new ReflectionUtilTest();
10 | ReflectionUtils.getDeclaredMethod(reflectionUtilTest, "getDeclaredField", String.class);
11 | }
12 |
13 | @Test
14 | public void getDeclaredField(){
15 | ReflectionUtilTest reflectionUtilTest = new ReflectionUtilTest();
16 | ReflectionUtils.getDeclaredField(reflectionUtilTest, "id");
17 | }
18 |
19 | }
20 |
--------------------------------------------------------------------------------
/core/src/test/java/com/dtstack/flink/sql/watermarker/CustomerWaterMarkerForLongTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.watermarker;
2 |
3 | import org.apache.flink.streaming.api.windowing.time.Time;
4 | import org.apache.flink.types.Row;
5 | import org.junit.Test;
6 | import org.junit.runner.RunWith;
7 | import org.powermock.core.classloader.annotations.PrepareForTest;
8 | import org.powermock.modules.junit4.PowerMockRunner;
9 |
10 | import static org.mockito.Mockito.mock;
11 | import static org.mockito.Mockito.when;
12 |
13 | @RunWith(PowerMockRunner.class)
14 | @PrepareForTest({Time.class})
15 | public class CustomerWaterMarkerForLongTest {
16 |
17 | @Test
18 | public void extractTimestamp(){
19 | Time time = mock(Time.class);
20 | when(time.toMilliseconds()).thenReturn(121200000l);
21 | CustomerWaterMarkerForLong customerWaterMarkerForLong = new CustomerWaterMarkerForLong(time, 1,"");
22 |
23 | Row row = mock(Row.class);
24 | when(row.getField(1)).thenReturn(System.currentTimeMillis());
25 | customerWaterMarkerForLong.extractTimestamp(row);
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/core/src/test/java/com/dtstack/flink/sql/watermarker/CustomerWaterMarkerForTimeStampTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.watermarker;
2 |
3 | import org.apache.flink.streaming.api.windowing.time.Time;
4 | import org.apache.flink.types.Row;
5 | import org.junit.Test;
6 | import org.junit.runner.RunWith;
7 | import org.powermock.core.classloader.annotations.PrepareForTest;
8 | import org.powermock.modules.junit4.PowerMockRunner;
9 |
10 | import static org.mockito.Mockito.mock;
11 | import static org.mockito.Mockito.when;
12 |
13 | @RunWith(PowerMockRunner.class)
14 | @PrepareForTest({Time.class})
15 | public class CustomerWaterMarkerForTimeStampTest {
16 |
17 | @Test
18 | public void extractTimestamp(){
19 | Time time = mock(Time.class);
20 | when(time.toMilliseconds()).thenReturn(121200000l);
21 | CustomerWaterMarkerForTimeStamp customerWaterMarkerForTimeStamp = new CustomerWaterMarkerForTimeStamp(time, 1,"");
22 |
23 | Row row = mock(Row.class);
24 | when(row.getField(1)).thenReturn(System.currentTimeMillis());
25 | customerWaterMarkerForTimeStamp.extractTimestamp(row);
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/db2/db2-side/db2-all-side/src/test/java/com/dtstack/flink/sql/side/db2/Db2AllReqRowTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.side.db2;
2 |
3 | import com.dtstack.flink.sql.side.rdb.all.RdbAllReqRowTestBase;
4 |
5 | public class Db2AllReqRowTest extends RdbAllReqRowTestBase {
6 |
7 | @Override
8 | protected void init() {
9 | clazz = Db2AllReqRow.class;
10 | }
11 |
12 | }
--------------------------------------------------------------------------------
/db2/db2-side/db2-async-side/src/test/java/com/dtstack/flink/sql/side/db2/Db2AsyncReqRowTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.side.db2;
2 |
3 | import com.dtstack.flink.sql.side.rdb.async.RdbAsyncReqRowTestBase;
4 |
5 | public class Db2AsyncReqRowTest extends RdbAsyncReqRowTestBase {
6 |
7 | @Override
8 | protected void init() {
9 | clazz = Db2AsyncReqRow.class;
10 | }
11 |
12 | }
--------------------------------------------------------------------------------
/db2/db2-side/db2-side-core/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | sql.side.db2
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 | ../pom.xml
10 |
11 | 4.0.0
12 |
13 | sql.side.db2.core
14 | 1.0-SNAPSHOT
15 | jar
16 | db2-side-core
17 |
18 |
19 |
--------------------------------------------------------------------------------
/db2/db2-side/db2-side-core/src/test/java/com/dtstack/flink/sql/side/db2/table/Db2SideParserTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.side.db2.table;
2 |
3 | import com.dtstack.flink.sql.table.AbstractTableInfo;
4 | import org.junit.Assert;
5 | import org.junit.Test;
6 |
7 | import java.util.HashMap;
8 | import java.util.Map;
9 |
10 | public class Db2SideParserTest {
11 |
12 | // @Test
13 | public void getTableInfo() {
14 | Db2SideParser sideParser = new Db2SideParser();
15 |
16 | final String tableName = "table_foo";
17 | final String fieldsInfo = "id INT, name VARCHAR , PRIMARY KEY (id)\n" +
18 | " , PERIOD FOR SYSTEM_TIME";
19 |
20 | Map props = new HashMap();
21 | props.put("url", "jdbc:mysql://foo:3306/db_foo");
22 | props.put("tablename", "table_foo");
23 | props.put("username", "foo");
24 | props.put("password", "foo");
25 |
26 | AbstractTableInfo tableInfo= sideParser.getTableInfo(tableName, fieldsInfo, props);
27 |
28 | final String NORMAL_TYPE = "db2";
29 | final String table_type = tableInfo.getType();
30 | Assert.assertTrue(NORMAL_TYPE.equals(table_type));
31 | }
32 |
33 | }
--------------------------------------------------------------------------------
/db2/db2-side/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | sql.db2
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 | ../pom.xml
10 |
11 | 4.0.0
12 |
13 | sql.side.db2
14 | pom
15 | db2-side
16 |
17 |
18 | db2-async-side
19 | db2-side-core
20 | db2-all-side
21 |
22 |
23 |
24 | 1.0-SNAPSHOT
25 |
26 |
27 |
28 |
29 | com.dtstack.flink
30 | sql.side.rdb
31 | ${rdb.side.version}
32 |
33 |
34 |
35 | com.dtstack.flink
36 | sql.side.rdb
37 | ${rdb.side.version}
38 | test-jar
39 | test
40 |
41 |
42 |
43 |
44 |
--------------------------------------------------------------------------------
/db2/db2-sink/src/main/java/com/dtstack/flink/sql/sink/db/DbSink.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.sink.db;
2 |
3 | import com.dtstack.flink.sql.sink.rdb.JDBCOptions;
4 | import com.dtstack.flink.sql.sink.rdb.AbstractRdbSink;
5 | import com.dtstack.flink.sql.sink.rdb.format.JDBCUpsertOutputFormat;
6 |
7 | public class DbSink extends AbstractRdbSink {
8 |
9 | public DbSink() {
10 | super(new DbDialect());
11 | }
12 | @Override
13 | public JDBCUpsertOutputFormat getOutputFormat() {
14 | JDBCOptions jdbcOptions = JDBCOptions.builder()
15 | .setDbUrl(dbUrl)
16 | .setDialect(jdbcDialect)
17 | .setUsername(userName)
18 | .setPassword(password)
19 | .setSchema(schema)
20 | .setTableName(tableName)
21 | .build();
22 |
23 | return JDBCUpsertOutputFormat.builder()
24 | .setOptions(jdbcOptions)
25 | .setFieldNames(fieldNames)
26 | .setFlushMaxSize(batchNum)
27 | .setFlushIntervalMills(batchWaitInterval)
28 | .setFieldTypes(sqlTypes)
29 | .setKeyFields(primaryKeys)
30 | .setAllReplace(allReplace)
31 | .setErrorLimit(errorLimit)
32 | .setUpdateMode(updateMode).build();
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/db2/db2-sink/src/main/java/com/dtstack/flink/sql/sink/db/table/DbSinkParser.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.sink.db.table;
2 |
3 | import com.dtstack.flink.sql.core.rdb.JdbcCheckKeys;
4 | import com.dtstack.flink.sql.sink.rdb.table.RdbSinkParser;
5 | import com.dtstack.flink.sql.table.AbstractTableInfo;
6 |
7 | import java.util.Map;
8 |
9 | public class DbSinkParser extends RdbSinkParser {
10 |
11 | private static final String CURR_TYPE = "db2";
12 |
13 | @Override
14 | public AbstractTableInfo getTableInfo(String tableName, String fieldsInfo, Map props) {
15 | props.put(JdbcCheckKeys.DRIVER_NAME, "com.ibm.db2.jcc.DB2Driver");
16 | AbstractTableInfo tableInfo = super.getTableInfo(tableName, fieldsInfo, props);
17 | tableInfo.setType(CURR_TYPE);
18 | return tableInfo;
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/db2/db2-sink/src/test/java/com/dtstack/flink/sql/sink/db/table/DbSinkParserTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.sink.db.table;
2 |
3 | import com.dtstack.flink.sql.table.AbstractTableInfo;
4 | import org.junit.Assert;
5 | import org.junit.Test;
6 |
7 | import java.util.HashMap;
8 | import java.util.Map;
9 |
10 | public class DbSinkParserTest {
11 |
12 | // @Test
13 | public void getTableInfo() {
14 | DbSinkParser sinkParser = new DbSinkParser();
15 |
16 | final String tableName = "table_foo";
17 | final String fieldsInfo = "id INT, name VARCHAR";
18 |
19 | Map props = new HashMap();
20 | props.put("url", "jdbc:mysql://foo:3306/db_foo");
21 | props.put("tablename", "table_foo");
22 | props.put("username", "foo");
23 | props.put("password", "foo");
24 |
25 | AbstractTableInfo tableInfo= sinkParser.getTableInfo(tableName, fieldsInfo, props);
26 |
27 | final String NORMAL_TYPE = "db2";
28 | final String table_type = tableInfo.getType();
29 | Assert.assertTrue(NORMAL_TYPE.equals(table_type));
30 | }
31 | }
--------------------------------------------------------------------------------
/dirtyData/console/src/test/java/com/dtstack/flink/sql/dirty/console/TestPrintDirtyDataConsumer.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.dirty.console;
2 |
3 | /**
4 | * @author tiezhu
5 | * Company dtstack
6 | * Date 2020/8/28 星期五
7 | */
8 | public class TestPrintDirtyDataConsumer {
9 | public static void main(String[] args) {
10 |
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/dirtyData/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | flink.sql
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 |
10 | 4.0.0
11 |
12 | sql.dirtyConsumer
13 | pom
14 |
15 | console
16 | mysql
17 |
18 |
19 |
20 | 3.8.1
21 | 1.0-SNAPSHOT
22 |
23 |
24 |
25 |
26 | junit
27 | junit
28 | ${junit.version}
29 | test
30 |
31 |
32 |
33 | com.dtstack.flink
34 | sql.core
35 | ${sql.core.version}
36 | provided
37 |
38 |
39 |
40 |
--------------------------------------------------------------------------------
/docs/colType.md:
--------------------------------------------------------------------------------
1 | | 支持的类型 | java对应类型 |
2 | | ------ | ----- |
3 | | boolean | Boolean |
4 | | int | Integer |
5 | | integer| Integer |
6 | | bigint | Long |
7 | | tinyint | Byte |
8 | | smallint | Short|
9 | | varchar | String |
10 | | real | Float |
11 | | float | Float|
12 | | double | Double|
13 | | date | Date |
14 | | timestamp | Timestamp |
15 | | decimal |BigDecimal|
--------------------------------------------------------------------------------
/docs/images/streamsql_dd.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DTStack/flinkStreamSQL/fd15f55eaebc7437fc9856d4d0855d54bdc0a1d3/docs/images/streamsql_dd.jpg
--------------------------------------------------------------------------------
/docs/newMetric.md:
--------------------------------------------------------------------------------
1 | ### 1. 自定义的性能指标(新增)
2 |
3 | ### 开启prometheus 需要设置的 confProp 参数
4 | * metrics.reporter.promgateway.class: org.apache.flink.metrics.prometheus.PrometheusPushGatewayReporter
5 | * metrics.reporter.promgateway.host: prometheus pushgateway的地址
6 | * metrics.reporter.promgateway.port:prometheus pushgateway的端口
7 | * metrics.reporter.promgateway.jobName: 实例名称
8 | * metrics.reporter.promgateway.randomJobNameSuffix: 是否在实例名称后面添加随机字符串(默认:true)
9 | * metrics.reporter.promgateway.deleteOnShutdown: 是否在停止的时候删除数据(默认false)
10 |
11 | #### kafka插件
12 | * 业务延迟: flink_taskmanager_job_task_operator_dtEventDelay(单位s)
13 | 数据本身的时间和进入flink的当前时间的差值.
14 |
15 | * 各个输入源的脏数据:flink_taskmanager_job_task_operator_dtDirtyData
16 | 从kafka获取的数据解析失败的视为脏数据
17 |
18 | * 各Source的数据输入TPS: flink_taskmanager_job_task_operator_dtNumRecordsInRate
19 | kafka接受的记录数(未解析前)/s
20 |
21 | * 各Source的数据输入RPS: flink_taskmanager_job_task_operator_dtNumRecordsInResolveRate
22 | kafka接受的记录数(解析后)/s
23 |
24 | * 各Source的数据输入BPS: flink_taskmanager_job_task_operator_dtNumBytesInRate
25 | kafka接受的字节数/s
26 |
27 | * Kafka作为输入源的各个分区的延迟数: flink_taskmanager_job_task_operator_topic_partition_dtTopicPartitionLag
28 | 当前kafka10,kafka11有采集该指标
29 |
30 | * 各个输出源RPS: flink_taskmanager_job_task_operator_dtNumRecordsOutRate
31 | 写入的外部记录数/s
32 |
--------------------------------------------------------------------------------
/docs/plugin/consoleSink.md:
--------------------------------------------------------------------------------
1 | ## 1.格式:
2 | ```
3 | CREATE TABLE tableName(
4 | colName colType,
5 | ...
6 | colNameX colType
7 | )WITH(
8 | type ='console',
9 | parallelism ='parllNum'
10 | );
11 |
12 | ```
13 |
14 | ## 2.支持版本
15 | 没有限制
16 |
17 | ## 3.表结构定义
18 |
19 | |参数名称|含义|
20 | |----|---|
21 | | tableName| 在 sql 中使用的名称;即注册到flink-table-env上的名称|
22 | | colName | 列名称|
23 | | colType | 列类型 [colType支持的类型](../colType.md)|
24 |
25 | ## 4.参数:
26 |
27 | |参数名称|含义|是否必填|默认值|
28 | |----|----|----|----|
29 | |type |表明 输出表类型[console]|是||
30 | | parallelism | 并行度设置|否|1|
31 |
32 | ## 5.样例:
33 | ```
34 | CREATE TABLE MyResult(
35 | name VARCHAR,
36 | channel VARCHAR
37 | )WITH(
38 | type ='console',
39 | parallelism ='1'
40 | )
41 | ```
42 |
43 | ## 6.输出结果:
44 | ```
45 | +------+---------+
46 | | name | channel |
47 | +------+---------+
48 | | aa | 02 |
49 | +------+---------+
50 | ```
--------------------------------------------------------------------------------
/docs/plugin/httpSink.md:
--------------------------------------------------------------------------------
1 | ## 1.格式:
2 | ```
3 | CREATE TABLE tableName(
4 | colName colType,
5 | ...
6 | colNameX colType
7 | )WITH(
8 | type ='http',
9 | url ='http://xxx:8080/test/returnAll'
10 | ,flag ='aa'
11 | ,delay = '10'
12 | );
13 |
14 | ```
15 |
16 |
17 | ## 3.表结构定义
18 |
19 | |参数名称|含义|
20 | |----|---|
21 | | tableName| http表名称|
22 | | colName | 列名称|
23 | | colType | 列类型 [colType支持的类型](../colType.md)|
24 |
25 | ## 4.参数:
26 |
27 | |参数名称|含义|是否必填|默认值|
28 | |----|----|----|----|
29 | |http |结果表插件类型,必须为http|是||
30 | |url | 地址 |是||
31 | |flag | 结果返回标识符|否||
32 | |delay |每条结果数据之间延时时间 |否|默认20毫秒|
33 |
34 |
35 | ## 5.样例:
36 |
37 | ```
38 |
39 | -- {"name":"maqi","id":1001}
40 | CREATE TABLE sourceIn (
41 | id int,
42 | name VARCHAR
43 | )WITH(
44 | type = 'kafka',
45 | bootstrapServers = 'localhost:9092',
46 | topic ='test1'
47 | );
48 |
49 | CREATE TABLE sinkOut (
50 | id int
51 | , name varchar
52 | ) WITH (
53 | type ='http',
54 | url ='http://xxx:8080/test/returnAll'
55 | ,flag ='aa'
56 | ,delay = '10'
57 | );
58 |
59 | insert into sinkOut select id,name from sourceIn;
60 |
61 | ```
62 |
63 | 发送数据:{"name":"maqi","id":1001}
64 | 结果数据:
65 | 1.flag不填或者为空串:{"name":"maqi","id":1001}
66 | 2.flag有内容:{"flag":"11111111","name":"maqi","id":1001,"tableName":"sinkOut"}
67 |
--------------------------------------------------------------------------------
/docs/plugin/impalaColType.md:
--------------------------------------------------------------------------------
1 | | 支持的类型 | java对应类型 |
2 | | ------ | ----- |
3 | | boolean | Boolean |
4 | | char | Character |
5 | | double | Double|
6 | | float | Float|
7 | | tinyint | Byte |
8 | | smallint | Short|
9 | | int | Integer |
10 | | bigint | Long |
11 | | decimal |BigDecimal|
12 | | string | String |
13 | | varchar | String |
14 | | timestamp | Timestamp |
--------------------------------------------------------------------------------
/docs/plugin/polardbSink.md:
--------------------------------------------------------------------------------
1 | ## 1.格式:
2 | ```
3 | CREATE TABLE tableName(
4 | colName colType,
5 | ...
6 | colNameX colType
7 | )WITH(
8 | type ='polardb',
9 | url ='jdbcUrl',
10 | userName ='userName',
11 | password ='pwd',
12 | tableName ='tableName',
13 | parallelism ='parllNum'
14 | );
15 |
16 | ```
17 |
18 | ## 2.支持版本
19 | mysql-8.0.16
20 |
21 | ## 3.表结构定义
22 |
23 | |参数名称|含义|
24 | |----|---|
25 | | tableName| polardb表名称|
26 | | colName | 列名称|
27 | | colType | 列类型 [colType支持的类型](../colType.md)|
28 |
29 | ## 4.参数:
30 |
31 | |参数名称|含义|是否必填|默认值|
32 | |----|----|----|----|
33 | |type |表名 输出表类型 polardb|是||
34 | |url | 连接polardb数据库 jdbcUrl |是||
35 | |userName | polardb连接用户名 |是||
36 | | password | polardb连接密码|是||
37 | | tableName | polardb表名称|是||
38 | | parallelism | 并行度设置|否|1|
39 |
40 | ## 5.样例:
41 | ```
42 | CREATE TABLE MyResult(
43 | channel VARCHAR,
44 | pv VARCHAR
45 | )WITH(
46 | type ='polardb',
47 | url ='jdbc:mysql://xxx.xxx.xxx:3306/test?charset=utf8',
48 | userName ='dtstack',
49 | password ='abc123',
50 | tableName ='pv2',
51 | parallelism ='1'
52 | );
53 | ```
--------------------------------------------------------------------------------
/docs/plugin/serverSocketSource.md:
--------------------------------------------------------------------------------
1 |
2 | ## 1.数据格式:
3 | ```
4 | 数据现在只支持json格式 {"xx":"bb","cc":"dd"}
5 |
6 | CREATE TABLE MyTable(
7 | channel varchar,
8 | pv int,
9 | xctime date,
10 | xtime date
11 |
12 | )WITH(
13 | type='serversocket',
14 | host='127.0.0.1',
15 | port='8888',
16 | delimiter=';',
17 | maxNumRetries='100'
18 | );
19 | ```
20 |
21 |
22 | ## 2.参数:
23 |
24 | |参数名称|含义|是否必填|默认值|
25 | |----|---|---|---|
26 | |type | serversocket | 是||
27 | |host | server host|是||
28 | |port | server port|是||
29 | |delimiter| 每条json数据的分割符(比如:;)|是||
30 | |maxNumRetries| 最大重连次数 (大于0)|是||
31 |
32 |
33 | ## 3.Server端样例:
34 | ```
35 | String JsonStr = "{\"CHANNEL\":\"xc3\",\"pv\":1234567,\"xdate\":\"2018-12-07\",\"xtime\":\"2018-12-15\"};";
36 |
37 | ```
38 |
--------------------------------------------------------------------------------
/docs/plugin/sideParams.md:
--------------------------------------------------------------------------------
1 | ## 维表参数
2 |
3 |
4 | ### 维表参数
5 |
6 | 维表需要的基本属性,每个插件还需要提供连接所需的基本信息。
7 |
8 | |参数名称|含义|是否必填|默认值|
9 | |----|---|---|----|
10 | | type | 维表类型, 例如:mysql |是||
11 | | tableName| 表名称|是||
12 | | cache | 维表缓存策略(NONE/LRU/ALL)|否|LRU|
13 | | partitionedJoin | 是否在維表join之前先根据设定的key 做一次keyby操作(可以減少维表的数据缓存量)|否|false|
14 | | parallelism | 处理后的数据流并行度|否||
15 |
16 | ### 缓存策略
17 |
18 | - NONE:不做内存缓存。每条流数据触发一次维表查询操作。
19 | - ALL: 任务启动时,一次性加载所有数据到内存,并进行缓存。适用于维表数据量较小的情况。
20 | - LRU: 任务执行时,根据维表关联条件使用异步算子加载维表数据,并进行缓存。
21 |
22 | #### ALL全量维表参数
23 |
24 | |参数名称|含义|默认值|
25 | |----|---|----|
26 | | cacheTTLMs | 缓存周期刷新时间 |60,单位s|
27 |
28 | #### LRU异步维表参数
29 |
30 | |参数名称|含义|默认值|
31 | |----|---|----|
32 | | cacheTTLMs | LRU缓存写入后超时时间 |60,单位s|
33 | | cacheSize | LRU缓存大小 |10000|
34 | | cacheMode | 异步请求处理有序还是无序,可选:ordered,unordered |ordered|
35 | | asyncCapacity | 异步线程容量 |100|
36 | | asyncTimeout | 异步处理超时时间 |10000,单位毫秒|
37 | | asyncPoolSize | 异步查询DB最大线程池,上限20。适用于MYSQL,ORACLE,SQLSERVER,POSTGRESQL,DB2,POLARDB,CLICKHOUSE,IMPALA维表插件|min(20,Runtime.getRuntime().availableProcessors() * 2)|
38 |
39 |
40 |
--------------------------------------------------------------------------------
/docs/pluginsInfo.md:
--------------------------------------------------------------------------------
1 | ### 1 插件列表
2 | #### 1.1 源表插件
3 | * [kafka 源表插件](plugin/kafkaSource.md)
4 |
5 | #### 1.2 结果表插件
6 | * [kafka 结果表插件](plugin/kafkaSink.md)
7 | * [elasticsearch5 结果表插件](plugin/elasticsearch5Sink.md)
8 | * [elasticsearch6 结果表插件](plugin/elasticsearch6Sink.md)
9 | * [elasticsearch7 结果表插件](plugin/elasticsearch7Sink.md)
10 | * [hbase 结果表插件](plugin/hbaseSink.md)
11 | * [mysql 结果表插件](plugin/mysqlSink.md)
12 | * [oracle 结果表插件](plugin/oracleSink.md)
13 | * [mongo 结果表插件](plugin/mongoSink.md)
14 | * [redis 结果表插件](plugin/redisSink.md)
15 | * [cassandra 结果表插件](plugin/cassandraSink.md)
16 | * [kudu 结果表插件](plugin/kuduSink.md)
17 | * [postgresql 结果表插件](plugin/postgresqlSink.md)
18 | * [clickhouse 结果表插件](plugin/clickhouseSink.md)
19 | * [impala 结果表插件](plugin/impalaSink.md)
20 | * [db2 结果表插件](plugin/db2Sink.md)
21 | * [sqlserver 结果表插件](plugin/sqlserverSink.md)
22 | * [http 结果表插件](plugin/httpSink.md)
23 |
24 | #### 1.3 维表插件
25 | * [hbase 维表插件](plugin/hbaseSide.md)
26 | * [mysql 维表插件](plugin/mysqlSide.md)
27 | * [elasticsearch6 维表插件](plugin/elasticsearch6Side.md)
28 | * [elasticsearch7 维表插件](plugin/elasticsearch7Side.md)
29 | * [oracle 维表插件](plugin/oracleSide.md)
30 | * [mongo 维表插件](plugin/mongoSide.md)
31 | * [redis 维表插件](plugin/redisSide.md)
32 | * [cassandra 维表插件](plugin/cassandraSide.md)
33 | * [kudu 维表插件](plugin/kuduSide.md)
34 | * [postgresql 维表插件](plugin/postgresqlSide.md)
35 | * [clickhouse 维表插件](plugin/clickhouseSide.md)
36 | * [impala 维表插件](plugin/impalaSide.md)
37 | * [db2 维表插件](plugin/db2Side.md)
38 | * [sqlserver 维表插件](plugin/sqlserverSide.md)
39 |
--------------------------------------------------------------------------------
/docs/pr.md:
--------------------------------------------------------------------------------
1 | ## PR规范
2 |
3 | 1. 建立issue,描述相关问题信息
4 | 2. 基于对应的release分支拉取开发分支
5 | 3. commit 信息:[type-issueid] [module] msg
6 | 1. type 类别
7 | 2. feat:表示是一个新功能(feature)
8 | 3. hotfix:hotfix,修补bug
9 | 4. docs:改动、增加文档
10 | 5. opt:修改代码风格及opt imports这些,不改动原有执行的代码
11 | 6. test:增加测试
12 |
13 |
14 |
15 | eg:
16 | [hotfix-31280][core] 修复bigdecimal转decimal运行失败问题
17 | [feat-31372][rdb] RDB结果表Upsert模式支持选择更新策略
18 |
19 | 1. 多次提交使用rebase 合并成一个。
20 | 2. pr 名称:[flinkx-issueid][module名称] 标题
--------------------------------------------------------------------------------
/docs/prometheus.md:
--------------------------------------------------------------------------------
1 | ## 使用 prometheus pushgateway 需要设置的 confProp 参数
2 | * metrics.reporter.promgateway.class: org.apache.flink.metrics.prometheus.PrometheusPushGatewayReporter
3 | * metrics.reporter.promgateway.host: prometheus pushgateway的地址
4 | * metrics.reporter.promgateway.port:prometheus pushgateway的端口
5 | * metrics.reporter.promgateway.jobName: 实例名称
6 | * metrics.reporter.promgateway.randomJobNameSuffix: 是否在实例名称后面添加随机字符串(默认:true)
7 | * metrics.reporter.promgateway.deleteOnShutdown: 是否在停止的时候删除数据(默认false)
--------------------------------------------------------------------------------
/elasticsearch5-xh/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | flink.sql
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 |
10 | 4.0.0
11 |
12 | sql.elasticsearch5-xh
13 | elasticsearch5-xh
14 | pom
15 |
16 | elasticsearch5-xh-sink
17 |
18 |
19 |
20 |
21 |
22 | com.dtstack.flink
23 | sql.core
24 | 1.0-SNAPSHOT
25 | provided
26 |
27 |
28 |
29 |
30 |
--------------------------------------------------------------------------------
/elasticsearch5/elasticsearch5-sink/src/test/java/com/dtstack/flink/sql/sink/elasticsearch/EsUtilTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.sink.elasticsearch;
2 |
3 | import org.apache.flink.types.Row;
4 | import org.junit.Assert;
5 | import org.junit.Test;
6 |
7 | import java.util.Arrays;
8 | import java.util.Collections;
9 | import java.util.List;
10 |
11 |
12 | /**
13 | * Company: www.dtstack.com
14 | *
15 | * @author zhufeng
16 | * @date 2020-06-19
17 | */
18 |
19 | public class EsUtilTest {
20 | List fieldNames = Collections.singletonList("name.pv");
21 | List fieldTypes = Arrays.asList("varchar", "varchar");
22 | Row row = new Row(1);
23 |
24 | @Test
25 | public void rowToJsonMapTest() {
26 | boolean test = false;
27 | if (EsUtil.rowToJsonMap(row, fieldNames, fieldTypes).toString().equals("{name={}}")) {
28 | test = true;
29 | }
30 | Assert.assertTrue(test);
31 |
32 | }
33 |
34 | }
35 |
--------------------------------------------------------------------------------
/elasticsearch5/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | flink.sql
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 |
10 | 4.0.0
11 |
12 | sql.elasticsearch5
13 | pom
14 |
15 |
16 | 5.3.3
17 |
18 |
19 |
20 | elasticsearch5-sink
21 |
22 |
23 |
24 | com.dtstack.flink
25 | sql.core
26 | 1.0-SNAPSHOT
27 | provided
28 |
29 |
30 |
31 |
32 |
--------------------------------------------------------------------------------
/elasticsearch6/elasticsearch6-side/elasticsearch6-all-side/src/test/java/com/dtstack/flink/sql/side/elasticsearch6/Elasticsearch6AllReqRowTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.side.elasticsearch6;
2 |
3 |
4 | import org.apache.flink.table.dataformat.BaseRow;
5 | import org.apache.flink.types.Row;
6 | import org.apache.flink.util.Collector;
7 | import org.junit.Test;
8 |
9 | import static org.mockito.Mockito.mock;
10 |
11 | /**
12 | * Company: www.dtstack.com
13 | *
14 | * @author zhufeng
15 | * @date 2020-07-03
16 | */
17 | public class Elasticsearch6AllReqRowTest {
18 | Elasticsearch6AllReqRow reqRow = mock(Elasticsearch6AllReqRow.class);
19 |
20 | @Test
21 | //有问题,有参构造器初始化时getKind()中Sqlkind.inter无法赋值
22 | public void ES6Test() throws Exception {
23 | BaseRow value = mock(BaseRow.class);
24 | Collector out = mock(Collector.class);
25 | BaseRow input = mock(BaseRow.class);
26 | Object sideInput = mock(Object.class);
27 | reqRow.initCache();
28 | reqRow.getFetchSize();
29 | reqRow.reloadCache();
30 | reqRow.flatMap(value, out);
31 | reqRow.fillData(input, sideInput);
32 | }
33 |
34 | }
35 |
--------------------------------------------------------------------------------
/elasticsearch6/elasticsearch6-side/elasticsearch6-async-side/src/test/java/com/dtstack/flink/sql/side/elasticsearch6/Elasticsearch6AsyncReqRowTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.side.elasticsearch6;
2 |
3 | /**
4 | * Company: www.dtstack.com
5 | *
6 | * @author zhufeng
7 | * @date 2020-07-03
8 | */
9 | public class Elasticsearch6AsyncReqRowTest {
10 | //有问题,有参构造器初始化时getKind()中Sqlkind.inter无法赋值,后面的方法暂时不能测试
11 | }
12 |
--------------------------------------------------------------------------------
/elasticsearch6/elasticsearch6-side/elasticsearch6-side-core/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | sql.side.elasticsearch6
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 | ../pom.xml
10 |
11 | 4.0.0
12 |
13 | sql.side.elasticsearch6.core
14 | elasticsearch6-side-core
15 | 1.0-SNAPSHOT
16 | jar
17 |
18 |
19 |
20 | org.elasticsearch.client
21 | elasticsearch-rest-high-level-client
22 | ${elasticsearch.version}
23 |
24 |
25 | junit
26 | junit
27 | 4.12
28 | test
29 |
30 |
31 |
32 |
33 |
--------------------------------------------------------------------------------
/elasticsearch6/elasticsearch6-side/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | sql.elasticsearch6
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 |
10 | 4.0.0
11 |
12 | sql.side.elasticsearch6
13 | pom
14 | elasticsearch6-side
15 |
16 | elasticsearch6-all-side
17 | elasticsearch6-async-side
18 | elasticsearch6-side-core
19 |
20 |
21 |
--------------------------------------------------------------------------------
/elasticsearch6/elasticsearch6-sink/src/test/java/com/dtstack/flink/sql/sink/elasticsearch/Es6UtilTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.sink.elasticsearch;
2 |
3 | import junit.framework.Assert;
4 | import org.apache.flink.types.Row;
5 |
6 | import java.util.Arrays;
7 | import java.util.Collections;
8 | import java.util.List;
9 |
10 | import org.junit.Test;
11 |
12 |
13 | /**
14 | * Company: www.dtstack.com
15 | *
16 | * @author zhufeng
17 | * @date 2020-06-23
18 | */
19 | public class Es6UtilTest {
20 | List fieldNames = Collections.singletonList("name.pv");
21 | List fieldTypes = Arrays.asList("varchar", "varchar");
22 | Row row = new Row(1);
23 |
24 | @Test
25 | public void rowToJsonMapTest() {
26 | boolean test = false;
27 | if (Es6Util.rowToJsonMap(row, fieldNames, fieldTypes).toString().equals("{name={}}")) {
28 | test = true;
29 | }
30 | Assert.assertTrue(test);
31 | }
32 | }
33 |
--------------------------------------------------------------------------------
/elasticsearch6/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | flink.sql
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 |
10 | 4.0.0
11 |
12 | sql.elasticsearch6
13 | pom
14 |
15 |
16 |
17 | 6.8.6
18 |
19 |
20 |
21 | elasticsearch6-side
22 | elasticsearch6-sink
23 |
24 |
25 |
26 |
27 |
28 | com.dtstack.flink
29 | sql.core
30 | 1.0-SNAPSHOT
31 | provided
32 |
33 |
34 |
35 |
36 |
--------------------------------------------------------------------------------
/elasticsearch7/elasticsearch7-side/elasticsearch7-side-core/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | sql.side.elasticsearch7
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 |
10 | 4.0.0
11 |
12 | sql.side.elasticsearch7.core
13 | elasticsearch7-side-core
14 | 1.0-SNAPSHOT
15 | jar
16 |
17 |
18 |
19 | org.elasticsearch.client
20 | elasticsearch-rest-high-level-client
21 | ${elasticsearch.version}
22 |
23 |
24 | junit
25 | junit
26 | 4.12
27 | test
28 |
29 |
30 |
31 |
32 |
33 |
--------------------------------------------------------------------------------
/elasticsearch7/elasticsearch7-side/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | sql.elasticsearch7
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 |
10 | 4.0.0
11 |
12 | pom
13 | sql.side.elasticsearch7
14 | elasticsearch7-side
15 |
16 |
17 | elasticsearch7-all-side
18 | elasticsearch7-async-side
19 | elasticsearch7-side-core
20 |
21 |
22 |
23 |
--------------------------------------------------------------------------------
/elasticsearch7/elasticsearch7-sink/src/main/java/org/apache/flink/streaming/connectors/elasticsearch/index/StaticIndexGenerator.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 | package org.apache.flink.streaming.connectors.elasticsearch.index;
20 |
21 | import org.apache.flink.types.Row;
22 |
23 | /**
24 | * A static {@link IndexGenerator} which generate fixed index name.
25 | */
26 | public class StaticIndexGenerator extends IndexGeneratorBase {
27 |
28 | public StaticIndexGenerator(String index) {
29 | super(index);
30 | }
31 |
32 | public String generate(Row row) {
33 | return index;
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/elasticsearch7/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | flink.sql
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 |
10 | 4.0.0
11 |
12 |
13 |
14 | 7.9.2
15 |
16 |
17 | sql.elasticsearch7
18 | pom
19 |
20 | elasticsearch7-side
21 | elasticsearch7-sink
22 |
23 |
24 |
25 |
26 |
27 | junit
28 | junit
29 | 3.8.1
30 | test
31 |
32 |
33 |
34 | com.dtstack.flink
35 | sql.core
36 | 1.0-SNAPSHOT
37 | provided
38 |
39 |
40 |
41 |
42 |
43 |
--------------------------------------------------------------------------------
/file/file-source/src/main/java/com/dtstack/flink/sql/source/file/throwable/LengthMismatchException.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 | package com.dtstack.flink.sql.source.file.throwable;
20 |
21 | import org.apache.flink.util.FlinkRuntimeException;
22 |
23 | /**
24 | * @author tiezhu
25 | * @since 2021/5/6 3:39 下午
26 | */
27 |
28 | public class LengthMismatchException extends FlinkRuntimeException {
29 | public LengthMismatchException(String message) {
30 | super(message);
31 | }
32 | }
33 |
--------------------------------------------------------------------------------
/file/file-source/src/test/java/com/dtstack/flink/sql/source/file/FileSourceTest.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 | package com.dtstack.flink.sql.source.file;
20 |
21 | /**
22 | * @author tiezhu
23 | * @date 2021/3/10 星期三
24 | * Company dtstack
25 | */
26 | public class FileSourceTest {
27 | }
28 |
--------------------------------------------------------------------------------
/hbase/hbase-side/hbase-side-core/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | sql.side.hbase
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 | ../pom.xml
10 |
11 | 4.0.0
12 |
13 | sql.side.hbase.core
14 | jar
15 |
16 |
--------------------------------------------------------------------------------
/hbase/hbase-side/hbase-side-core/src/main/java/com/dtstack/flink/sql/side/hbase/Md5ReplaceOperator.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 |
20 |
21 | package com.dtstack.flink.sql.side.hbase;
22 |
23 | import com.dtstack.flink.sql.side.hbase.enums.EReplaceOpType;
24 | import com.dtstack.flink.sql.util.MD5Utils;
25 |
26 | /**
27 | * Reason:
28 | * Date: 2018/8/23
29 | * Company: www.dtstack.com
30 | * @author xuchao
31 | */
32 |
33 | public class Md5ReplaceOperator extends AbstractReplaceOperator {
34 |
35 | public Md5ReplaceOperator(EReplaceOpType opType) {
36 | super(opType);
37 | }
38 |
39 | @Override
40 | String doFunc(String replaceStr) {
41 | return MD5Utils.getMD5String(replaceStr);
42 | }
43 | }
44 |
--------------------------------------------------------------------------------
/hbase/hbase-side/hbase-side-core/src/main/java/com/dtstack/flink/sql/side/hbase/enums/EReplaceOpType.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 |
20 |
21 | package com.dtstack.flink.sql.side.hbase.enums;
22 |
23 | /**
24 | * Reason:
25 | * Date: 2018/8/23
26 | * Company: www.dtstack.com
27 | * @author xuchao
28 | */
29 |
30 | public enum EReplaceOpType {
31 | /**
32 | * 没有func
33 | */
34 | NO_FUNC,
35 | /**
36 | * md5 func
37 | */
38 | MD5_FUNC;
39 | }
40 |
--------------------------------------------------------------------------------
/hbase/hbase-side/hbase-side-core/src/main/java/com/dtstack/flink/sql/side/hbase/enums/EReplaceType.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 |
20 |
21 | package com.dtstack.flink.sql.side.hbase.enums;
22 |
23 | /**
24 | * Reason:
25 | * Date: 2018/8/23
26 | * Company: www.dtstack.com
27 | *
28 | * @author xuchao
29 | */
30 | public enum EReplaceType {
31 | /**
32 | * 参数
33 | */
34 | PARAM,
35 | /**
36 | * 函数
37 | */
38 | FUNC,
39 | /**
40 | * 常量
41 | */
42 | CONSTANT;
43 | }
44 |
--------------------------------------------------------------------------------
/hbase/hbase-side/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | sql.hbase
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 |
10 | 4.0.0
11 |
12 | sql.side.hbase
13 | hbase-side
14 |
15 | hbase-all-side
16 | hbase-async-side
17 | hbase-side-core
18 |
19 |
20 | pom
21 |
22 |
23 |
--------------------------------------------------------------------------------
/hbase/hbase-sink/src/main/java/com/dtstack/flink/sql/sink/hbase/Md5ReplaceOperator.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 |
20 |
21 | package com.dtstack.flink.sql.sink.hbase;
22 |
23 | import com.dtstack.flink.sql.sink.hbase.enums.EReplaceOpType;
24 | import com.dtstack.flink.sql.util.MD5Utils;
25 |
26 | /**
27 | * Reason:
28 | * Date: 2018/8/23
29 | * Company: www.dtstack.com
30 | * @author xuchao
31 | */
32 |
33 | public class Md5ReplaceOperator extends AbstractReplaceOperator {
34 |
35 | public Md5ReplaceOperator(EReplaceOpType opType) {
36 | super(opType);
37 | }
38 |
39 | @Override
40 | String doFunc(String replaceStr) {
41 | return MD5Utils.getMD5String(replaceStr);
42 | }
43 | }
44 |
--------------------------------------------------------------------------------
/hbase/hbase-sink/src/main/java/com/dtstack/flink/sql/sink/hbase/enums/EReplaceOpType.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 |
20 |
21 | package com.dtstack.flink.sql.sink.hbase.enums;
22 |
23 | /**
24 | * Reason:
25 | * Date: 2018/8/23
26 | * Company: www.dtstack.com
27 | * @author xuchao
28 | */
29 |
30 | public enum EReplaceOpType {
31 | /**
32 | * 没有func
33 | */
34 | NO_FUNC,
35 | /**
36 | * md5 func
37 | */
38 | MD5_FUNC;
39 | }
40 |
--------------------------------------------------------------------------------
/hbase/hbase-sink/src/main/java/com/dtstack/flink/sql/sink/hbase/enums/EReplaceType.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 |
20 |
21 | package com.dtstack.flink.sql.sink.hbase.enums;
22 |
23 | /**
24 | * Reason:
25 | * Date: 2018/8/23
26 | * Company: www.dtstack.com
27 | *
28 | * @author xuchao
29 | */
30 | public enum EReplaceType {
31 | /**
32 | * 参数
33 | */
34 | PARAM,
35 | /**
36 | * 函数
37 | */
38 | FUNC,
39 | /**
40 | * 常量
41 | */
42 | CONSTANT;
43 | }
44 |
--------------------------------------------------------------------------------
/http/http-sink/src/test/java/com/dtstack/flink/sql/flink/http/HttpSinkTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.flink.http;
2 |
3 | /**
4 | * @author tiezhu
5 | * @date 2021/3/18 星期四
6 | * Company dtstack
7 | */
8 | public class HttpSinkTest {
9 | }
10 |
--------------------------------------------------------------------------------
/http/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | flink.sql
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 |
10 | 4.0.0
11 |
12 | sql.http
13 | pom
14 |
15 | http-sink
16 |
17 |
18 |
19 |
20 | com.dtstack.flink
21 | sql.core
22 | 1.0-SNAPSHOT
23 | provided
24 |
25 |
26 |
27 | org.apache.httpcomponents
28 | httpclient
29 | 4.5.13
30 |
31 |
32 |
33 |
--------------------------------------------------------------------------------
/impala/impala-side/impala-side-core/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | sql.side.impala
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 | ../pom.xml
10 |
11 | 4.0.0
12 |
13 | sql.side.impala.core
14 | 1.0-SNAPSHOT
15 | jar
16 | impala-side-core
17 |
18 |
19 |
--------------------------------------------------------------------------------
/impala/impala-sink/src/main/java/com/dtstack/flink/sql/sink/impala/EAuthMech.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 | package com.dtstack.flink.sql.sink.impala;
20 |
21 | /**
22 | * impala kdnc AuthMech params
23 | * Date: 2020/2/18
24 | * Company: www.dtstack.com
25 | * @author maqi
26 | */
27 | public enum EAuthMech {
28 | // 0 for No Authentication
29 | NoAuthentication(0),
30 | // 1 for Kerberos
31 | Kerberos(1),
32 | // 2 for User Name
33 | UserName(2),
34 | // 3 for User Name and Password
35 | NameANDPassword(3);
36 |
37 | private int type;
38 |
39 | EAuthMech(int type) {
40 | this.type = type;
41 | }
42 |
43 | public int getType() {
44 | return this.type;
45 | }
46 | }
47 |
--------------------------------------------------------------------------------
/impala/impala-sink/src/test/java/com/dtstack/flink/sql/sink/impala/ImpalaSinkTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.sink.impala;
2 |
3 | import com.dtstack.flink.sql.sink.impala.table.ImpalaTableInfo;
4 | import com.dtstack.flink.sql.sink.rdb.dialect.JDBCDialect;
5 | import com.dtstack.flink.sql.sink.rdb.format.JDBCUpsertOutputFormat;
6 | import org.junit.Before;
7 | import org.junit.Test;
8 | import org.mockito.InjectMocks;
9 | import org.mockito.Mock;
10 | import org.mockito.MockitoAnnotations;
11 | import org.powermock.api.support.membermodification.MemberModifier;
12 | import org.powermock.reflect.Whitebox;
13 |
14 | import java.util.Optional;
15 |
16 | import static org.mockito.Mockito.when;
17 |
18 | public class ImpalaSinkTest {
19 |
20 | @Mock
21 | JDBCDialect jdbcDialect;
22 |
23 | @InjectMocks
24 | ImpalaSink sink = new ImpalaSink();;
25 |
26 | ImpalaTableInfo tableInfo = new ImpalaTableInfo();
27 | @Before
28 | public void setUp () {
29 | MockitoAnnotations.initMocks(this);
30 | tableInfo.setAuthMech(EAuthMech.NoAuthentication.ordinal());
31 | Whitebox.setInternalState(sink, "impalaTableInfo", tableInfo);
32 | }
33 |
34 | }
--------------------------------------------------------------------------------
/kafka-base/kafka-base-sink/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
20 |
23 | 4.0.0
24 |
25 | sql.kafka-base
26 | com.dtstack.flink
27 | 1.0-SNAPSHOT
28 | ../pom.xml
29 |
30 |
31 | sql.sink.kafka-base
32 | kafka-base-sink
33 |
34 | jar
35 |
36 |
--------------------------------------------------------------------------------
/kafka-base/kafka-base-sink/src/test/java/com/dtstack/flink/sql/sink/kafka/CustomerFlinkPartitionTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.sink.kafka;
2 |
3 | import com.google.common.collect.Lists;
4 | import org.junit.Test;
5 |
6 | public class CustomerFlinkPartitionTest {
7 |
8 | @Test
9 | public void testCustomerFlinkPartition(){
10 | CustomerFlinkPartition customerFlinkPartition = new CustomerFlinkPartition();
11 | customerFlinkPartition.open(1, 1);
12 | int[] partition = new int[1];
13 | partition[0] = 1;
14 | customerFlinkPartition.partition(null, "key".getBytes(), "value".getBytes(), "topic", partition);
15 | customerFlinkPartition.partition(null, "key".getBytes(), "value".getBytes(), "topic", partition);
16 | customerFlinkPartition.hashCode();
17 | customerFlinkPartition.equals(customerFlinkPartition);
18 |
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/kafka-base/kafka-base-sink/src/test/java/com/dtstack/flink/sql/sink/kafka/table/KafkaSinkParserTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.sink.kafka.table;
2 |
3 | import com.google.common.collect.Maps;
4 | import org.junit.Test;
5 |
6 | import java.util.Map;
7 |
8 | public class KafkaSinkParserTest {
9 |
10 | @Test
11 | public void getTableInfo(){
12 | KafkaSinkParser kafkaSinkParser = new KafkaSinkParser();
13 | Map prop = Maps.newHashMap();
14 | prop.put(KafkaSinkTableInfo.TYPE_KEY.toLowerCase(), "kafka");
15 | prop.put(KafkaSinkTableInfo.SINK_DATA_TYPE.toLowerCase(), "json");
16 | prop.put(KafkaSinkTableInfo.SCHEMA_STRING_KEY.toLowerCase(), "kafka");
17 | prop.put(KafkaSinkTableInfo.CSV_FIELD_DELIMITER_KEY.toLowerCase(), "kafka");
18 | prop.put(KafkaSinkTableInfo.BOOTSTRAPSERVERS_KEY.toLowerCase(), "kafka");
19 | prop.put(KafkaSinkTableInfo.TOPIC_KEY.toLowerCase(), "kafka");
20 | prop.put(KafkaSinkTableInfo.ENABLE_KEY_PARTITION_KEY.toLowerCase(), "kafka");
21 | prop.put(KafkaSinkTableInfo.PARALLELISM_KEY.toLowerCase(), "1");
22 | prop.put("kafka.test", "1");
23 | kafkaSinkParser.getTableInfo("tablea", "a varchar as a", prop);
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/kafka-base/kafka-base-sink/src/test/java/com/dtstack/flink/sql/sink/kafka/table/KafkaSinkTableInfoTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.sink.kafka.table;
2 |
3 | import org.junit.Test;
4 |
5 | public class KafkaSinkTableInfoTest {
6 |
7 | @Test
8 | public void testKafkaSinkTableInfo(){
9 | KafkaSinkTableInfo kafkaSinkTableInfo = new KafkaSinkTableInfo();
10 | kafkaSinkTableInfo.setType("kafka");
11 | kafkaSinkTableInfo.setTopic("a");
12 | kafkaSinkTableInfo.setBootstrapServers("localhost:9092");
13 | kafkaSinkTableInfo.setSinkDataType("avro");
14 | kafkaSinkTableInfo.setUpdateMode("append");
15 | kafkaSinkTableInfo.setSchemaString("{\"name\":\"channel\",\"type\":\"string\"}");
16 | kafkaSinkTableInfo.check();
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/kafka-base/kafka-base-source/pom.xml:
--------------------------------------------------------------------------------
1 |
19 |
21 | 4.0.0
22 |
23 | sql.kafka-base
24 | com.dtstack.flink
25 | 1.0-SNAPSHOT
26 | ../pom.xml
27 |
28 |
29 | sql.source.kafka-base
30 | kafka-base-source
31 |
32 | jar
33 | http://maven.apache.org
34 |
35 |
36 |
--------------------------------------------------------------------------------
/kafka-base/kafka-base-source/src/main/java/com/dtstack/flink/sql/source/kafka/Calculate.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 | package com.dtstack.flink.sql.source.kafka;
20 |
21 | import org.apache.kafka.clients.consumer.internals.SubscriptionState;
22 | import org.apache.kafka.common.TopicPartition;
23 |
24 | /**
25 | * company: www.dtstack.com
26 | * @author: toutian
27 | * create: 2019/12/24
28 | */
29 | @FunctionalInterface
30 | public interface Calculate {
31 |
32 | Long calc(SubscriptionState subscriptionState, TopicPartition topicPartition);
33 | }
--------------------------------------------------------------------------------
/kafka-base/kafka-base-source/src/main/java/com/dtstack/flink/sql/source/kafka/enums/EKafkaOffset.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 | package com.dtstack.flink.sql.source.kafka.enums;
20 |
21 | /**
22 | * Date: 2020/3/20
23 | * Company: www.dtstack.com
24 | * @author maqi
25 | */
26 | public enum EKafkaOffset {
27 |
28 | LATEST,
29 | EARLIEST,
30 | TIMESTAMP,
31 | NONE
32 | }
33 |
--------------------------------------------------------------------------------
/kafka-base/kafka-base-source/src/main/java/com/dtstack/flink/sql/source/kafka/throwable/KafkaSamplingUnavailableException.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 | package com.dtstack.flink.sql.source.kafka.throwable;
20 |
21 | import org.apache.flink.runtime.throwable.ThrowableAnnotation;
22 | import org.apache.flink.runtime.throwable.ThrowableType;
23 | import org.apache.flink.util.FlinkRuntimeException;
24 |
25 | /**
26 | * @author tiezhu
27 | * @since 2021/6/17 星期四
28 | */
29 | @ThrowableAnnotation(ThrowableType.NonRecoverableError)
30 | public class KafkaSamplingUnavailableException extends FlinkRuntimeException {
31 |
32 | public KafkaSamplingUnavailableException(String message) {
33 | super(message);
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/kafka-base/kafka-base-source/src/test/java/com/dtstack/flink/sql/source/kafka/DTJsonRowDeserializationSchemaTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.source.kafka;
2 |
3 | import org.apache.flink.api.common.typeinfo.TypeInformation;
4 | import org.apache.flink.api.common.typeinfo.Types;
5 | import org.apache.flink.api.java.typeutils.RowTypeInfo;
6 | import org.apache.flink.formats.json.DTJsonRowDeserializationSchema;
7 | import org.junit.Test;
8 |
9 | import java.io.IOException;
10 |
11 | import static org.mockito.Mockito.*;
12 |
13 | public class DTJsonRowDeserializationSchemaTest {
14 |
15 | @Test
16 | public void mockDeserialize() throws IOException {
17 | RowTypeInfo rowTypeInfo = mock(RowTypeInfo.class);
18 | when(rowTypeInfo.getFieldNames()).thenReturn(new String[]{"name"});
19 | when(rowTypeInfo.getFieldTypes()).thenReturn(new TypeInformation[]{Types.STRING});
20 | DTJsonRowDeserializationSchema dtJsonRowDeserializationSchema = new DTJsonRowDeserializationSchema(rowTypeInfo);
21 |
22 | DTJsonRowDeserializationSchema dtJsonRowDeserializationSchemaSpy = spy(dtJsonRowDeserializationSchema);
23 | String message = "{\"name\":\"roc\"}";
24 | dtJsonRowDeserializationSchemaSpy.deserialize(message.getBytes());
25 | verify(dtJsonRowDeserializationSchemaSpy).deserialize(message.getBytes());
26 |
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/kafka-base/kafka-base-source/src/test/java/com/dtstack/flink/sql/source/kafka/KafkaSourceParserTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.source.kafka;
2 |
3 | import com.dtstack.flink.sql.source.kafka.table.KafkaSourceParser;
4 | import com.google.common.collect.Maps;
5 | import org.junit.Test;
6 |
7 | import java.util.Map;
8 |
9 | import static org.mockito.Mockito.*;
10 |
11 | public class KafkaSourceParserTest {
12 |
13 | @Test
14 | public void mockGetTableInfo() throws Exception {
15 | String tableName = "table";
16 | String fieldsInfo = "aa varchar";
17 | Map props = Maps.newHashMap();
18 | props.put("type", "kafka10");
19 | props.put("parallelism", "1");
20 | props.put("bootstrapservers", "localhost");
21 | props.put("groupid", "groupId");
22 | props.put("topic", "topic");
23 | props.put("offsetreset", "1atest");
24 | props.put("topicsspattern", "false");
25 | props.put("sourcedataType", "json");
26 | KafkaSourceParser kafkaSourceParser = new KafkaSourceParser();
27 | KafkaSourceParser kafkaSourceParserSpy = spy(kafkaSourceParser);
28 | kafkaSourceParserSpy.getTableInfo(tableName, fieldsInfo, props);
29 | verify(kafkaSourceParserSpy).getTableInfo(tableName, fieldsInfo, props);
30 | }
31 | }
32 |
--------------------------------------------------------------------------------
/kafka/kafka-sink/src/test/java/com/dtstack/flink/sql/sink/kafka/KafkaProduerTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.sink.kafka;
2 |
3 | import org.junit.Test;
4 |
5 | public class KafkaProduerTest {
6 |
7 | @Test
8 | public void mockOpen(){
9 |
10 | }
11 |
12 | }
13 |
--------------------------------------------------------------------------------
/kafka/kafka-sink/src/test/java/com/dtstack/flink/sql/sink/kafka/KafkaSinkTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.sink.kafka;
2 |
3 | import com.dtstack.flink.sql.sink.kafka.table.KafkaSinkTableInfo;
4 | import com.google.common.collect.Sets;
5 | import org.junit.Test;
6 | import static org.mockito.Mockito.*;
7 |
8 | public class KafkaSinkTest {
9 |
10 | @Test
11 | public void mockGenStreamSink(){
12 | KafkaSinkTableInfo kafkaSinkTableInfo = mock(KafkaSinkTableInfo.class);
13 | when(kafkaSinkTableInfo.getName()).thenReturn("roc");
14 | when(kafkaSinkTableInfo.getTopic()).thenReturn("topic");
15 | when(kafkaSinkTableInfo.getBootstrapServers()).thenReturn("localhost");
16 | when(kafkaSinkTableInfo.getKafkaParamKeys()).thenReturn(Sets.newHashSet("aa"));
17 | when(kafkaSinkTableInfo.getKafkaParam("aa")).thenReturn("xx");
18 | when(kafkaSinkTableInfo.getPartitionKeys()).thenReturn(null);
19 | when(kafkaSinkTableInfo.getFields()).thenReturn(new String[]{"aa"});
20 | when(kafkaSinkTableInfo.getFieldClasses()).thenReturn(new Class[]{String.class});
21 | when(kafkaSinkTableInfo.getParallelism()).thenReturn(1);
22 | when(kafkaSinkTableInfo.getSinkDataType()).thenReturn("json");
23 | when(kafkaSinkTableInfo.getUpdateMode()).thenReturn("append");
24 |
25 | KafkaSink kafkaSink = new KafkaSink();
26 | KafkaSink kafkaSinkSpy = spy(kafkaSink);
27 | kafkaSinkSpy.genStreamSink(kafkaSinkTableInfo);
28 | }
29 | }
30 |
--------------------------------------------------------------------------------
/kafka/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | flink.sql
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 |
10 | 4.0.0
11 |
12 | sql.kafka
13 | pom
14 |
15 |
16 | kafka-source
17 | kafka-sink
18 |
19 |
20 |
21 |
22 | org.apache.flink
23 | flink-connector-kafka_2.11
24 | ${flink.version}
25 |
26 |
27 |
28 | com.dtstack.flink
29 | sql.core
30 | 1.0-SNAPSHOT
31 | provided
32 |
33 |
34 |
35 |
36 |
--------------------------------------------------------------------------------
/kafka09/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | flink.sql
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 | ../pom.xml
10 |
11 | 4.0.0
12 |
13 | sql.kafka09
14 | pom
15 |
16 |
17 | kafka09-source
18 | kafka09-sink
19 |
20 |
21 |
22 |
23 |
24 | org.apache.flink
25 | flink-connector-kafka-0.9_2.11
26 | ${flink.version}
27 |
28 |
29 |
30 | com.dtstack.flink
31 | sql.core
32 | 1.0-SNAPSHOT
33 | provided
34 |
35 |
36 |
37 |
38 |
--------------------------------------------------------------------------------
/kafka10/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | flink.sql
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 |
10 | 4.0.0
11 |
12 | sql.kafka10
13 | pom
14 |
15 |
16 | kafka10-source
17 | kafka10-sink
18 |
19 |
20 |
21 |
22 | org.apache.flink
23 | flink-connector-kafka-0.10_2.11
24 | ${flink.version}
25 |
26 |
27 |
28 | com.dtstack.flink
29 | sql.core
30 | 1.0-SNAPSHOT
31 | provided
32 |
33 |
34 |
35 |
36 |
--------------------------------------------------------------------------------
/kafka11/kafka11-sink/src/test/java/com/dtstack/flink/sql/sink/kafka/KafkaSinkTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.sink.kafka;
2 |
3 | import com.dtstack.flink.sql.sink.kafka.table.KafkaSinkTableInfo;
4 | import com.google.common.collect.Sets;
5 | import org.junit.Test;
6 |
7 | import static org.mockito.Mockito.*;
8 |
9 | public class KafkaSinkTest {
10 |
11 | @Test
12 | public void mockGenStreamSink(){
13 | KafkaSinkTableInfo kafkaSinkTableInfo = mock(KafkaSinkTableInfo.class);
14 | when(kafkaSinkTableInfo.getName()).thenReturn("roc");
15 | when(kafkaSinkTableInfo.getTopic()).thenReturn("topic");
16 | when(kafkaSinkTableInfo.getBootstrapServers()).thenReturn("localhost");
17 | when(kafkaSinkTableInfo.getKafkaParamKeys()).thenReturn(Sets.newHashSet("aa"));
18 | when(kafkaSinkTableInfo.getKafkaParam("aa")).thenReturn("xx");
19 | when(kafkaSinkTableInfo.getPartitionKeys()).thenReturn(null);
20 | when(kafkaSinkTableInfo.getFields()).thenReturn(new String[]{"aa"});
21 | when(kafkaSinkTableInfo.getFieldClasses()).thenReturn(new Class[]{String.class});
22 | when(kafkaSinkTableInfo.getParallelism()).thenReturn(1);
23 | when(kafkaSinkTableInfo.getSinkDataType()).thenReturn("json");
24 | when(kafkaSinkTableInfo.getUpdateMode()).thenReturn("append");
25 |
26 | KafkaSink kafkaSink = new KafkaSink();
27 | KafkaSink kafkaSinkSpy = spy(kafkaSink);
28 | kafkaSinkSpy.genStreamSink(kafkaSinkTableInfo);
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/kafka11/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | flink.sql
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 |
10 | 4.0.0
11 |
12 | sql.kafka11
13 | pom
14 |
15 |
16 | kafka11-source
17 | kafka11-sink
18 |
19 |
20 |
21 |
22 | org.apache.flink
23 | flink-connector-kafka-0.11_2.11
24 | ${flink.version}
25 |
26 |
27 |
28 | com.dtstack.flink
29 | sql.core
30 | 1.0-SNAPSHOT
31 | provided
32 |
33 |
34 |
35 |
36 |
--------------------------------------------------------------------------------
/kingbase/kingbase-side/kingbase-all-side/src/test/java/com/dtstack/flink/sql/side/kingbase/KingbaseAllReqRowTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.side.kingbase;
2 |
3 | import com.dtstack.flink.sql.side.rdb.all.RdbAllReqRowTestBase;
4 |
5 | public class KingbaseAllReqRowTest extends RdbAllReqRowTestBase {
6 |
7 | @Override
8 | protected void init() {
9 | clazz = KingbaseAllReqRow.class;
10 | }
11 |
12 | }
--------------------------------------------------------------------------------
/kingbase/kingbase-side/kingbase-async-side/src/test/java/com/dtstack/flink/sql/side/kingbase/KingbaseAsyncReqRowTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.side.kingbase;
2 |
3 | import com.dtstack.flink.sql.side.rdb.async.RdbAsyncReqRowTestBase;
4 |
5 | public class KingbaseAsyncReqRowTest extends RdbAsyncReqRowTestBase {
6 |
7 | @Override
8 | protected void init() {
9 | clazz = KingbaseAsyncReqRow.class;
10 | }
11 |
12 | }
--------------------------------------------------------------------------------
/kingbase/kingbase-side/kingbase-side-core/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | sql.side.kingbase
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 | ../pom.xml
10 |
11 | 4.0.0
12 | 1.0-SNAPSHOT
13 | jar
14 | sql.side.kingbase.core
15 | kingbase-side-core
16 |
17 |
--------------------------------------------------------------------------------
/kingbase/kingbase-side/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | sql.kingbase
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 |
10 | 4.0.0
11 |
12 | sql.side.kingbase
13 | pom
14 |
15 | kingbase-all-side
16 | kingbase-async-side
17 | kingbase-side-core
18 |
19 |
20 | 1.0-SNAPSHOT
21 |
22 |
23 |
24 |
25 | com.dtstack.flink
26 | sql.side.rdb
27 | ${rdb.side.version}
28 |
29 |
30 |
31 |
32 |
33 | com.dtstack.flink
34 | sql.side.rdb
35 | ${rdb.side.version}
36 | test-jar
37 | test
38 |
39 |
40 |
41 |
--------------------------------------------------------------------------------
/kingbase/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | flink.sql
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 |
10 | 4.0.0
11 |
12 | sql.kingbase
13 | pom
14 |
15 | kingbase-sink
16 | kingbase-side
17 |
18 |
19 |
20 | 8.2.0
21 | 1.0-SNAPSHOT
22 |
23 |
24 |
25 |
26 | com.kingbase8
27 | kingbase8
28 | ${kingbase.connector.version}
29 |
30 |
31 |
32 | com.dtstack.flink
33 | sql.core
34 | ${sql.core.version}
35 | provided
36 |
37 |
38 |
39 |
40 |
--------------------------------------------------------------------------------
/kudu/kudu-side/kudu-side-core/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | sql.side.kudu
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 | ../pom.xml
10 |
11 | 4.0.0
12 |
13 | sql.side.kudu.core
14 |
15 |
16 |
17 | com.dtstack.flink
18 | sql.core
19 | 1.0-SNAPSHOT
20 | provided
21 |
22 |
23 | jar
24 |
--------------------------------------------------------------------------------
/kudu/kudu-side/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | sql.kudu
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 | ../pom.xml
10 |
11 | 4.0.0
12 |
13 | sql.side.kudu
14 | kudu-side
15 |
16 | kudu-side-core
17 | kudu-all-side
18 | kudu-async-side
19 |
20 |
21 |
22 | pom
23 |
--------------------------------------------------------------------------------
/kudu/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | flink.sql
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 | ../pom.xml
10 |
11 | 4.0.0
12 |
13 | sql.kudu
14 | pom
15 |
16 | kudu-sink
17 | kudu-side
18 |
19 |
20 |
21 |
22 |
23 |
24 | com.dtstack.flink
25 | sql.core
26 | 1.0-SNAPSHOT
27 | provided
28 |
29 |
30 |
31 | org.apache.kudu
32 | kudu-client
33 | 1.10.1
34 |
35 |
36 |
37 |
38 |
39 |
--------------------------------------------------------------------------------
/launcher/.gitignore:
--------------------------------------------------------------------------------
1 | target
2 | .idea/
3 | /.idea/*
4 | target/
5 | .class
6 | .project
7 | .classpath
8 | *.eclipse.*
9 | *.iml
10 | plugins/
11 | lib/
12 | dependency-reduced-pom.xml
13 | .vertx/
14 |
--------------------------------------------------------------------------------
/launcher/job/kafkaNotNull.txt:
--------------------------------------------------------------------------------
1 | CREATE TABLE MyTable(
2 | channel varchar as sss not null,
3 | pv INT not null,
4 | xctime bigint,
5 | CHARACTER_LENGTH(channel) as timeLeng,
6 | WATERMARK FOR xctime AS withOffset(xctime,1000)
7 | )WITH(
8 | type='kafka11',
9 | bootstrapServers='172.16.8.107:9092',
10 | offsetReset='latest',
11 | topic='toutou'
12 | );
13 |
14 | CREATE TABLE MyResult(
15 | channel varchar,
16 | pv INT
17 | )WITH(
18 | type='mysql',
19 | url='jdbc:mysql://172.16.8.109:3306/test?charset=utf8',
20 | userName='dtstack',
21 | password='abc123',
22 | tableName='pv'
23 | );
24 |
25 |
26 | insert
27 | into
28 | MyResult
29 | select
30 | a.channel,
31 | a.pv
32 | from
33 | MyTable a
34 |
35 |
--------------------------------------------------------------------------------
/launcher/job/mysqlsideSql.txt:
--------------------------------------------------------------------------------
1 | CREATE TABLE MyTable(
2 | channel STRING,
3 | pv INT,
4 | xctime bigint,
5 | CHARACTER_LENGTH(channel) as timeLeng,
6 | WATERMARK FOR xctime AS withOffset(xctime,1000)
7 | )WITH(
8 | type='kafka09',
9 | bootstrapServers='172.16.8.198:9092',
10 | offsetReset='latest',
11 | topic='nbTest1'
12 | );
13 | CREATE TABLE MyResult(
14 | channel STRING,
15 | pv INT
16 | )WITH(
17 | type='mysql',
18 | url='jdbc:mysql://172.16.8.104:3306/test?charset=utf8',
19 | userName='dtstack',
20 | password='abc123',
21 | tableName='pv'
22 | );
23 |
24 | create table sideTable(
25 | channel String,
26 | xccount int,
27 | PRIMARY KEY(channel),
28 | PERIOD FOR SYSTEM_TIME
29 | )WITH(
30 | type='mysql',
31 | url='jdbc:mysql://172.16.8.104:3306/test?charset=utf8',
32 | userName='dtstack',
33 | password='abc123',
34 | tableName='sidetest',
35 | cache = 'LRU',
36 | cacheTTLMs='10000'
37 | );
38 |
39 | insert
40 | into
41 | MyResult
42 | select
43 | a.channel,
44 | b.xccount
45 | from
46 | MyTable a
47 | join
48 | sideTable b
49 | on a.channel=b.channel
50 | where
51 | b.channel = 'xc'
52 | and a.pv=10;
--------------------------------------------------------------------------------
/launcher/job/sideSql.txt:
--------------------------------------------------------------------------------
1 | CREATE TABLE STREAM_FIELD20(
2 | after.id int AS id,
3 | after.field02 varchar AS field02,
4 | after.field03 varchar AS field03,
5 | after.field04 varchar AS field04,
6 | after.field05 varchar AS field05,
7 | after.field06 varchar AS field06
8 | )WITH(
9 | type ='kafka10',
10 | bootstrapServers ='172.16.101.247:9092',
11 | zookeeperQuorum ='172.16.101.247:2181,172.16.101.141:2181,172.16.100.214:2181/kafka',
12 | offsetReset ='latest',
13 | topic ='toutiantest',
14 | timezone='Asia/Shanghai',
15 | topicIsPattern ='false',
16 | parallelism ='1'
17 | );
18 |
19 | CREATE TABLE RESULT_FIELD20(
20 | id INT,
21 | field02 VARCHAR,
22 | PRIMARY KEY(id)
23 | )WITH(
24 | type='console'
25 | );
26 |
27 |
28 | insert
29 | into
30 | RESULT_FIELD20
31 | SELECT
32 | a.id AS id,
33 | a.field02 as field02
34 | from
35 | STREAM_FIELD20 a
--------------------------------------------------------------------------------
/launcher/src/main/resources/log4j.properties:
--------------------------------------------------------------------------------
1 | #控制台输出:
2 | log4j.rootLogger = INFO,consoleAppender
3 | log4j.appender.consoleAppender = org.apache.log4j.ConsoleAppender
4 | log4j.appender.console.Target = System.out
5 | log4j.appender.consoleAppender.layout = org.apache.log4j.PatternLayout
6 | log4j.appender.consoleAppender.layout.ConversionPattern =%d %-5p %m %n
7 | log4j.appender.consoleAppender.ImmediateFlush = true
--------------------------------------------------------------------------------
/launcher/src/main/test/java/com/dtstack/flink/sql/launcher/YarnConfLoaderTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.launcher;
2 |
3 | import org.junit.Test;
4 |
5 | public class YarnConfLoaderTest {
6 |
7 | @Test
8 | public void testGetYarnConf(){
9 |
10 |
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/localTest/src/test/resources/test.txt:
--------------------------------------------------------------------------------
1 | this is a test
--------------------------------------------------------------------------------
/mongo/mongo-side/mongo-side-core/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | sql.side.mongo
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 | ../pom.xml
10 |
11 | 4.0.0
12 |
13 | sql.side.mongo.core
14 |
15 |
16 | com.dtstack.flink
17 | sql.core
18 | 1.0-SNAPSHOT
19 | provided
20 |
21 |
22 | jar
23 |
24 |
--------------------------------------------------------------------------------
/mongo/mongo-side/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | sql.mongo
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 | ../pom.xml
10 |
11 | 4.0.0
12 |
13 | sql.side.mongo
14 | mongo-side
15 |
16 | mongo-side-core
17 | mongo-async-side
18 | mongo-all-side
19 |
20 |
21 | pom
22 |
23 |
--------------------------------------------------------------------------------
/mongo/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | flink.sql
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 |
10 | 4.0.0
11 | sql.mongo
12 | pom
13 |
14 |
15 | mongo-sink
16 | mongo-side
17 |
18 |
19 |
20 |
21 | com.dtstack.flink
22 | sql.core
23 | 1.0-SNAPSHOT
24 | provided
25 |
26 |
27 | org.mongodb
28 | mongo-java-driver
29 | 3.8.2
30 |
31 |
32 | org.mongodb
33 | mongodb-driver-async
34 | 3.8.2
35 |
36 |
37 |
38 |
39 |
--------------------------------------------------------------------------------
/mysql/mysql-side/mysql-all-side/src/test/java/com/dtstack/flink/sql/side/mysql/MysqlAllReqRowTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.side.mysql;
2 |
3 | import com.dtstack.flink.sql.side.rdb.all.RdbAllReqRowTestBase;
4 | import org.junit.Assert;
5 | import org.junit.Test;
6 |
7 | public class MysqlAllReqRowTest extends RdbAllReqRowTestBase {
8 |
9 | @Override
10 | protected void init() {
11 | clazz = MysqlAllReqRow.class;
12 | }
13 |
14 | @Test
15 | public void testFetch() {
16 | Assert.assertTrue(Integer.MIN_VALUE == reqRow.getFetchSize());
17 | }
18 |
19 | }
--------------------------------------------------------------------------------
/mysql/mysql-side/mysql-async-side/src/test/java/com/dtstack/flink/sql/side/mysql/MysqlAsyncReqRowTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.side.mysql;
2 |
3 | import com.dtstack.flink.sql.side.rdb.async.RdbAsyncReqRowTestBase;
4 |
5 | public class MysqlAsyncReqRowTest extends RdbAsyncReqRowTestBase {
6 |
7 | @Override
8 | protected void init() {
9 | clazz = MysqlAsyncReqRow.class;
10 | }
11 |
12 | }
--------------------------------------------------------------------------------
/mysql/mysql-side/mysql-side-core/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | sql.side.mysql
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 | ../pom.xml
10 |
11 | 4.0.0
12 |
13 | sql.side.mysql.core
14 | 1.0-SNAPSHOT
15 | jar
16 | mysql-side-core
17 |
18 |
--------------------------------------------------------------------------------
/mysql/mysql-side/mysql-side-core/src/test/java/com/dtstack/flink/sql/side/mysql/table/MysqlSideParserTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.side.mysql.table;
2 |
3 | import com.dtstack.flink.sql.table.AbstractTableInfo;
4 | import org.junit.Assert;
5 | import org.junit.Test;
6 |
7 | import java.util.HashMap;
8 | import java.util.Map;
9 |
10 | import static org.junit.Assert.*;
11 |
12 | public class MysqlSideParserTest {
13 |
14 | // @Test
15 | public void getTableInfo() {
16 | MysqlSideParser mysqlSideParser = new MysqlSideParser();
17 |
18 | final String tableName = "table_foo";
19 | final String fieldsInfo = "id INT, name VARCHAR, PRIMARY KEY (id)";
20 |
21 | Map props = new HashMap();
22 | props.put("url", "jdbc:mysql://foo:3306/db_foo");
23 | props.put("tablename", "table_foo");
24 | props.put("username", "foo");
25 | props.put("password", "foo");
26 |
27 | AbstractTableInfo tableInfo= mysqlSideParser.getTableInfo(tableName, fieldsInfo, props);
28 |
29 | final String NORMAL_TYPE = "mysql";
30 | final String table_type = tableInfo.getType();
31 | Assert.assertTrue(NORMAL_TYPE.equals(table_type));
32 | }
33 | }
--------------------------------------------------------------------------------
/mysql/mysql-sink/src/test/java/com/dtstack/flink/sql/sink/mysql/table/MysqlSinkParserTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.sink.mysql.table;
2 |
3 | import org.junit.Before;
4 | import org.junit.Test;
5 |
6 | import static org.junit.Assert.*;
7 |
8 | /**
9 | * @program: flink.sql
10 | * @author: wuren
11 | * @create: 2020/06/17
12 | **/
13 | public class MysqlSinkParserTest {
14 |
15 | @Before
16 | public void setUp() throws Exception {
17 | }
18 |
19 | @Test
20 | public void getTableInfo() {
21 | }
22 | }
--------------------------------------------------------------------------------
/mysql/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | flink.sql
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 | ../pom.xml
10 |
11 | 4.0.0
12 |
13 | sql.mysql
14 | pom
15 |
16 |
17 | mysql-sink
18 | mysql-side
19 |
20 |
21 |
22 | 5.1.46
23 | 1.0-SNAPSHOT
24 |
25 |
26 |
27 |
28 |
29 | com.dtstack.flink
30 | sql.core
31 | ${sql.core.version}
32 | provided
33 |
34 |
35 |
36 | mysql
37 | mysql-connector-java
38 | ${mysql.connector.version}
39 |
40 |
41 |
42 |
43 |
--------------------------------------------------------------------------------
/oceanbase/oceanbase-side/oceanbase-all-side/src/test/java/com/dtstatck/flink/sql/side/oceanbase/OceanbaseAllReqRowTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstatck.flink.sql.side.oceanbase;
2 |
3 | import com.dtstack.flink.sql.side.oceanbase.OceanbaseAllReqRow;
4 | import com.dtstack.flink.sql.side.rdb.all.RdbAllReqRowTestBase;
5 |
6 | public class OceanbaseAllReqRowTest extends RdbAllReqRowTestBase {
7 |
8 | @Override
9 | protected void init() {
10 | clazz = OceanbaseAllReqRow.class;
11 | }
12 |
13 | }
--------------------------------------------------------------------------------
/oceanbase/oceanbase-side/oceanbase-async-side/src/test/java/com/dtstack/flink/sql/side/oceanbase/OceanbaseAsyncReqRowTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.side.oceanbase;
2 |
3 | import com.dtstack.flink.sql.side.rdb.async.RdbAsyncReqRowTestBase;
4 |
5 | public class OceanbaseAsyncReqRowTest extends RdbAsyncReqRowTestBase {
6 |
7 | @Override
8 | protected void init() {
9 | clazz = OceanbaseAsyncReqRow.class;
10 | }
11 |
12 | }
--------------------------------------------------------------------------------
/oceanbase/oceanbase-side/oceanbase-side-core/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | sql.side.oceanbase
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 | ../pom.xml
10 |
11 | 4.0.0
12 |
13 | sql.side.oceanbase.core
14 | oceanbase-side-core
15 | 1.0-SNAPSHOT
16 | jar
17 |
18 |
--------------------------------------------------------------------------------
/oceanbase/oceanbase-side/oceanbase-side-core/src/test/java/com/dtstack/flink/sql/side/oceanbase/table/OceanbaseSideParserTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.side.oceanbase.table;
2 |
3 | import com.dtstack.flink.sql.table.AbstractTableInfo;
4 | import org.junit.Assert;
5 | import org.junit.Test;
6 |
7 | import java.util.HashMap;
8 | import java.util.Map;
9 |
10 | public class OceanbaseSideParserTest {
11 |
12 | // @Test
13 | public void getTableInfo() {
14 | OceanbaseSideParser sideParser = new OceanbaseSideParser();
15 |
16 | final String tableName = "table_foo";
17 | final String fieldsInfo = "id INT, name VARCHAR, PRIMARY KEY (id)";
18 |
19 | Map props = new HashMap();
20 | props.put("url", "jdbc:mysql://foo:3306/db_foo");
21 | props.put("tablename", "table_foo");
22 | props.put("username", "foo");
23 | props.put("password", "foo");
24 |
25 | AbstractTableInfo tableInfo= sideParser.getTableInfo(tableName, fieldsInfo, props);
26 |
27 | final String NORMAL_TYPE = "oceanbase";
28 | final String table_type = tableInfo.getType();
29 | Assert.assertTrue(NORMAL_TYPE.equals(table_type));
30 | }
31 |
32 | }
--------------------------------------------------------------------------------
/oceanbase/oceanbase-sink/src/test/java/com/dtstack/flink/sql/sink/ocean/table/OceanbaseSinkParserTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.sink.ocean.table;
2 |
3 | import com.dtstack.flink.sql.sink.oceanbase.table.OceanbaseSinkParser;
4 | import com.dtstack.flink.sql.table.AbstractTableInfo;
5 | import org.junit.Assert;
6 | import org.junit.Test;
7 |
8 | import java.util.HashMap;
9 | import java.util.Map;
10 |
11 | public class OceanbaseSinkParserTest {
12 |
13 | // @Test
14 | public void getTableInfo() {
15 | OceanbaseSinkParser sideParser = new OceanbaseSinkParser();
16 |
17 | final String tableName = "table_foo";
18 | final String fieldsInfo = "id INT, name VARCHAR";
19 |
20 | Map props = new HashMap();
21 | props.put("url", "jdbc:mysql://foo:3306/db_foo");
22 | props.put("tablename", "table_foo");
23 | props.put("username", "foo");
24 | props.put("password", "foo");
25 |
26 | AbstractTableInfo tableInfo= sideParser.getTableInfo(tableName, fieldsInfo, props);
27 |
28 | final String NORMAL_TYPE = "oceanbase";
29 | final String table_type = tableInfo.getType();
30 | Assert.assertTrue(NORMAL_TYPE.equals(table_type));
31 | }
32 |
33 | }
--------------------------------------------------------------------------------
/oceanbase/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | flink.sql
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 |
10 | 4.0.0
11 |
12 | sql.oceanbase
13 | pom
14 |
15 | oceanbase-sink
16 | oceanbase-side
17 |
18 |
19 |
20 | 5.1.40
21 | 1.0-SNAPSHOT
22 |
23 |
24 |
25 |
26 | com.dtstack.flink
27 | sql.core
28 | ${sql.core.version}
29 | provided
30 |
31 |
32 |
33 | mysql
34 | mysql-connector-java
35 | ${mysql.connector.version}
36 |
37 |
38 |
39 |
40 |
--------------------------------------------------------------------------------
/oracle/oracle-side/oracle-all-side/src/test/java/com/dtstack/flink/sql/side/oracle/OracleAllReqRowTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.side.oracle;
2 |
3 | import com.dtstack.flink.sql.side.rdb.all.RdbAllReqRowTestBase;
4 | import org.junit.Before;
5 | import org.junit.Test;
6 | import org.powermock.reflect.Whitebox;
7 |
8 | import static org.junit.Assert.*;
9 |
10 | public class OracleAllReqRowTest extends RdbAllReqRowTestBase {
11 |
12 | @Override
13 | protected void init() {
14 | clazz = OracleAllReqRow.class;
15 | }
16 |
17 | }
--------------------------------------------------------------------------------
/oracle/oracle-side/oracle-async-side/src/test/java/com/dtstack/flink/sql/side/oracle/OracleAsyncReqRowTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.side.oracle;
2 |
3 | import com.dtstack.flink.sql.side.rdb.async.RdbAsyncReqRowTestBase;
4 |
5 | public class OracleAsyncReqRowTest extends RdbAsyncReqRowTestBase {
6 |
7 | @Override
8 | protected void init() {
9 | clazz = OracleAsyncReqRow.class;
10 | }
11 |
12 | }
--------------------------------------------------------------------------------
/oracle/oracle-side/oracle-async-side/src/test/java/com/dtstack/flink/sql/side/oracle/OracleAsyncSideInfoTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.side.oracle;
2 |
3 | import com.dtstack.flink.sql.side.rdb.table.RdbSideTableInfo;
4 | import com.dtstack.flink.sql.table.AbstractTableInfo;
5 | import org.junit.Assert;
6 | import org.junit.Test;
7 | import org.powermock.reflect.Whitebox;
8 |
9 | public class OracleAsyncSideInfoTest {
10 |
11 | @Test
12 | public void testWrapperPlaceholder() {
13 | RdbSideTableInfo tableInfo = new RdbSideTableInfo();
14 | String fieldName = "TEST_name";
15 | String type = "char";
16 | tableInfo.addField(fieldName);
17 | tableInfo.addFieldType(type);
18 |
19 | AbstractTableInfo.FieldExtraInfo extraInfo = new AbstractTableInfo.FieldExtraInfo();
20 | extraInfo.setLength(4);
21 | tableInfo.addFieldExtraInfo(extraInfo);
22 |
23 | OracleAsyncSideInfo sideInfo = Whitebox.newInstance(OracleAsyncSideInfo.class);
24 | Whitebox.setInternalState(sideInfo, "sideTableInfo", tableInfo);
25 |
26 | String placeholder = sideInfo.wrapperPlaceholder(fieldName);
27 | Assert.assertEquals("rpad(?, 4, ' ')", placeholder);
28 | }
29 |
30 | }
--------------------------------------------------------------------------------
/oracle/oracle-side/oracle-side-core/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | sql.side.oracle
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 | ../pom.xml
10 |
11 | 4.0.0
12 |
13 | sql.side.oracle.core
14 | 1.0-SNAPSHOT
15 | jar
16 | oracle-side-core
17 |
18 |
--------------------------------------------------------------------------------
/oracle/oracle-side/oracle-side-core/src/test/java/com/dtstack/flink/sql/side/oracle/table/OracleSideParserTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.side.oracle.table;
2 |
3 | import com.dtstack.flink.sql.table.AbstractTableInfo;
4 | import org.junit.Assert;
5 | import org.junit.Test;
6 |
7 | import java.util.HashMap;
8 | import java.util.Map;
9 |
10 | import static org.junit.Assert.*;
11 |
12 | public class OracleSideParserTest {
13 |
14 | // @Test
15 | public void getTableInfo() {
16 | OracleSideParser sideParser = new OracleSideParser();
17 |
18 | final String tableName = "table_foo";
19 | final String fieldsInfo = "id INT, name VARCHAR, PRIMARY KEY (id)";
20 |
21 | Map props = new HashMap();
22 | props.put("url", "jdbc:mysql://foo:3306/db_foo");
23 | props.put("tablename", "table_foo");
24 | props.put("username", "foo");
25 | props.put("password", "foo");
26 |
27 | AbstractTableInfo tableInfo= sideParser.getTableInfo(tableName, fieldsInfo, props);
28 |
29 | final String NORMAL_TYPE = "oracle";
30 | final String table_type = tableInfo.getType();
31 | Assert.assertTrue(NORMAL_TYPE.equals(table_type));
32 | }
33 |
34 | }
--------------------------------------------------------------------------------
/oracle/oracle-sink/src/test/java/com/dtstack/flink/sql/sink/oracle/OracleSinkTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.sink.oracle;
2 |
3 | import com.dtstack.flink.sql.sink.rdb.format.JDBCUpsertOutputFormat;
4 | import org.junit.Before;
5 | import org.junit.Test;
6 | import org.mockito.InjectMocks;
7 | import org.mockito.Mock;
8 | import org.mockito.MockitoAnnotations;
9 | import org.powermock.api.support.membermodification.MemberModifier;
10 | import java.util.Optional;
11 | import static org.mockito.Mockito.when;
12 |
13 | public class OracleSinkTest {
14 |
15 | @Mock
16 | OracleDialect jdbcDialect;
17 |
18 | @InjectMocks
19 | OracleSink sink = new OracleSink();;
20 | @Before
21 | public void setUp () {
22 | MockitoAnnotations.initMocks(this);
23 | }
24 |
25 | @Test
26 | public void testGetOutputFormat() throws IllegalAccessException {
27 | when(jdbcDialect.defaultDriverName()).thenReturn(Optional.of("dd"));
28 |
29 | MemberModifier.field(OracleSink.class, "dbUrl").set(sink, "foo");
30 | MemberModifier.field(OracleSink.class, "jdbcDialect").set(sink, jdbcDialect);
31 | MemberModifier.field(OracleSink.class, "userName").set(sink, "foo");
32 | MemberModifier.field(OracleSink.class, "password").set(sink, "foo");
33 | MemberModifier.field(OracleSink.class, "tableName").set(sink, "foo");
34 | MemberModifier.field(OracleSink.class, "fieldNames").set(sink, new String[]{"foo", "bar"});
35 |
36 | JDBCUpsertOutputFormat format = sink.getOutputFormat();
37 | }
38 |
39 | }
--------------------------------------------------------------------------------
/oracle/oracle-sink/src/test/java/com/dtstack/flink/sql/sink/oracle/table/OracleSinkParserTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.sink.oracle.table;
2 |
3 | import com.dtstack.flink.sql.table.AbstractTableInfo;
4 | import org.junit.Assert;
5 | import org.junit.Test;
6 | import java.util.HashMap;
7 | import java.util.Map;
8 |
9 | public class OracleSinkParserTest {
10 |
11 | // @Test
12 | public void getTableInfo() {
13 | OracleSinkParser sinkParser = new OracleSinkParser();
14 |
15 | final String tableName = "table_foo";
16 | final String fieldsInfo = "id INT, name VARCHAR";
17 |
18 | Map props = new HashMap();
19 | props.put("url", "jdbc:mysql://foo:3306/db_foo");
20 | props.put("tablename", "table_foo");
21 | props.put("username", "foo");
22 | props.put("password", "foo");
23 |
24 | AbstractTableInfo tableInfo= sinkParser.getTableInfo(tableName, fieldsInfo, props);
25 |
26 | final String NORMAL_TYPE = "oracle";
27 | final String table_type = tableInfo.getType();
28 | Assert.assertTrue(NORMAL_TYPE.equals(table_type));
29 | }
30 |
31 | }
--------------------------------------------------------------------------------
/oracle/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | flink.sql
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 |
10 | 4.0.0
11 |
12 | sql.oracle
13 | 1.0-SNAPSHOT
14 |
15 | oracle-side
16 | oracle-sink
17 |
18 | pom
19 |
20 |
21 | 12.2.0.1
22 | 1.0-SNAPSHOT
23 |
24 |
25 |
26 |
27 | com.dtstack.flink
28 | sql.core
29 | ${sql.core.version}
30 | provided
31 |
32 |
33 |
34 | com.github.noraui
35 | ojdbc8
36 | ${ojdbc.version}
37 |
38 |
39 |
--------------------------------------------------------------------------------
/polardb/polardb-side/polardb-all-side/src/test/java/com/dtstack/flink/sql/side/polardb/PolardbAllReqRowTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.side.polardb;
2 |
3 | import com.dtstack.flink.sql.side.rdb.all.RdbAllReqRowTestBase;
4 |
5 | public class PolardbAllReqRowTest extends RdbAllReqRowTestBase {
6 |
7 | @Override
8 | protected void init() {
9 | clazz = PolardbAllReqRow.class;
10 | }
11 |
12 | }
--------------------------------------------------------------------------------
/polardb/polardb-side/polardb-async-side/src/test/java/com/dtstack/flink/sql/side/polardb/PolardbAsyncReqRowTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.side.polardb;
2 |
3 | import com.dtstack.flink.sql.side.rdb.async.RdbAsyncReqRowTestBase;
4 | import org.junit.Before;
5 | import org.powermock.reflect.Whitebox;
6 |
7 | public class PolardbAsyncReqRowTest extends RdbAsyncReqRowTestBase {
8 |
9 | @Override
10 | protected void init() {
11 | clazz = PolardbAsyncReqRow.class;
12 | }
13 |
14 | }
--------------------------------------------------------------------------------
/polardb/polardb-side/polardb-side-core/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | sql.side.polardb
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 |
10 | 4.0.0
11 |
12 | sql.side.polardb.core
13 | 1.0-SNAPSHOT
14 | polardb-side-core
15 |
16 |
--------------------------------------------------------------------------------
/polardb/polardb-side/polardb-side-core/src/test/java/com/dtstack/flink/sql/side/polardb/table/PolardbSideParserTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.side.polardb.table;
2 |
3 | import com.dtstack.flink.sql.table.AbstractTableInfo;
4 | import org.junit.Assert;
5 | import org.junit.Test;
6 | import java.util.HashMap;
7 | import java.util.Map;
8 |
9 | public class PolardbSideParserTest {
10 |
11 | // @Test
12 | public void getTableInfo() {
13 | PolardbSideParser sideParser = new PolardbSideParser();
14 |
15 | final String tableName = "table_foo";
16 | final String fieldsInfo = "id INT, name VARCHAR, PRIMARY KEY (id)";
17 |
18 | Map props = new HashMap();
19 | props.put("url", "jdbc:mysql://foo:3306/db_foo");
20 | props.put("tablename", "table_foo");
21 | props.put("username", "foo");
22 | props.put("password", "foo");
23 |
24 | AbstractTableInfo tableInfo= sideParser.getTableInfo(tableName, fieldsInfo, props);
25 |
26 | final String NORMAL_TYPE = "polardb";
27 | final String table_type = tableInfo.getType();
28 | Assert.assertTrue(NORMAL_TYPE.equals(table_type));
29 | }
30 |
31 | }
--------------------------------------------------------------------------------
/polardb/polardb-sink/src/test/java/com/dtstack/flink/sql/sink/polardb/table/PolardbSinkParserTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.sink.polardb.table;
2 |
3 | import com.dtstack.flink.sql.table.AbstractTableInfo;
4 | import org.junit.Assert;
5 | import org.junit.Test;
6 | import java.util.HashMap;
7 | import java.util.Map;
8 |
9 | public class PolardbSinkParserTest {
10 |
11 | // @Test
12 | public void getTableInfo() {
13 | PolardbSinkParser sinkParser = new PolardbSinkParser();
14 |
15 | final String tableName = "table_foo";
16 | final String fieldsInfo = "id INT, name VARCHAR, PRIMARY KEY (id)";
17 |
18 | Map props = new HashMap();
19 | props.put("url", "jdbc:mysql://foo:3306/db_foo");
20 | props.put("tablename", "table_foo");
21 | props.put("username", "foo");
22 | props.put("password", "foo");
23 |
24 | AbstractTableInfo tableInfo= sinkParser.getTableInfo(tableName, fieldsInfo, props);
25 |
26 | final String NORMAL_TYPE = "polardb";
27 | final String table_type = tableInfo.getType();
28 | Assert.assertTrue(NORMAL_TYPE.equals(table_type));
29 | }
30 |
31 | }
--------------------------------------------------------------------------------
/polardb/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | flink.sql
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 | ../pom.xml
10 |
11 |
12 | 4.0.0
13 | sql.polardb
14 | pom
15 |
16 |
17 | polardb-sink
18 | polardb-side
19 |
20 |
21 |
22 | 8.0.16
23 | 1.0-SNAPSHOT
24 |
25 |
26 |
27 |
28 |
29 | com.dtstack.flink
30 | sql.core
31 | ${sql.core.version}
32 | provided
33 |
34 |
35 |
36 | mysql
37 | mysql-connector-java
38 | ${mysql.connector.version}
39 |
40 |
41 |
42 |
--------------------------------------------------------------------------------
/postgresql/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | flink.sql
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 | ../pom.xml
10 |
11 | 4.0.0
12 | pom
13 | sql.postgresql
14 |
15 |
16 | postgresql-side
17 | postgresql-sink
18 |
19 |
20 | 1.0-SNAPSHOT
21 | 42.4.1
22 |
23 |
24 |
25 |
26 |
27 | com.dtstack.flink
28 | sql.core
29 | ${sql.core.version}
30 | provided
31 |
32 |
33 | org.postgresql
34 | postgresql
35 | ${postgresql.version}
36 |
37 |
38 |
39 |
--------------------------------------------------------------------------------
/postgresql/postgresql-side/postgresql-all-side/src/test/java/com/dtstack/flink/sql/side/postgresql/PostgresqlAllReqRowTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.side.postgresql;
2 |
3 | import com.dtstack.flink.sql.side.rdb.all.RdbAllReqRowTestBase;
4 |
5 | public class PostgresqlAllReqRowTest extends RdbAllReqRowTestBase {
6 |
7 | @Override
8 | protected void init() {
9 | clazz = PostgresqlAllReqRow.class;
10 | }
11 |
12 | }
--------------------------------------------------------------------------------
/postgresql/postgresql-side/postgresql-async-side/src/test/java/com/dtstack/flink/sql/side/postgresql/PostgresqlAsyncReqRowTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.side.postgresql;
2 |
3 | import com.dtstack.flink.sql.side.rdb.async.RdbAsyncReqRowTestBase;
4 |
5 | public class PostgresqlAsyncReqRowTest extends RdbAsyncReqRowTestBase {
6 |
7 | @Override
8 | protected void init() {
9 | clazz = PostgresqlAsyncReqRow.class;
10 | }
11 |
12 | }
--------------------------------------------------------------------------------
/postgresql/postgresql-side/postgresql-side-core/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | sql.side.postgresql
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 | ../pom.xml
10 |
11 | 4.0.0
12 |
13 | sql.side.postgresql.core
14 | 1.0-SNAPSHOT
15 | jar
16 | postgresql-side-core
17 |
18 |
19 |
--------------------------------------------------------------------------------
/postgresql/postgresql-side/postgresql-side-core/src/test/java/com/dtstack/flink/sql/side/postgresql/table/PostgresqlSideParserTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.side.postgresql.table;
2 |
3 |
4 | import com.dtstack.flink.sql.table.AbstractTableInfo;
5 | import org.junit.Assert;
6 | import org.junit.Test;
7 |
8 | import java.util.HashMap;
9 | import java.util.Map;
10 |
11 | public class PostgresqlSideParserTest {
12 |
13 | // @Test
14 | public void getTableInfo() {
15 | PostgresqlSideParser sideParser = new PostgresqlSideParser();
16 |
17 | final String tableName = "table_foo";
18 | final String fieldsInfo = "id INT, name VARCHAR, PRIMARY KEY (id)";
19 |
20 | Map props = new HashMap();
21 | props.put("url", "jdbc:postgresql://foo:3306/db_foo");
22 | props.put("tablename", "table_foo");
23 | props.put("username", "foo");
24 | props.put("password", "foo");
25 |
26 | AbstractTableInfo tableInfo= sideParser.getTableInfo(tableName, fieldsInfo, props);
27 |
28 | final String NORMAL_TYPE = "postgresql";
29 | final String table_type = tableInfo.getType();
30 | Assert.assertTrue(NORMAL_TYPE.equals(table_type));
31 | }
32 |
33 | }
--------------------------------------------------------------------------------
/postgresql/postgresql-sink/src/test/java/com/dtstack/flink/sql/sink/postgresql/table/PostgresqlSinkParserTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.sink.postgresql.table;
2 |
3 | import com.dtstack.flink.sql.table.AbstractTableInfo;
4 | import org.junit.Assert;
5 | import org.junit.Test;
6 | import java.util.HashMap;
7 | import java.util.Map;
8 |
9 | public class PostgresqlSinkParserTest {
10 |
11 | // @Test
12 | public void getTableInfo() {
13 | PostgresqlSinkParser sinkParser = new PostgresqlSinkParser();
14 |
15 | final String tableName = "table_foo";
16 | final String fieldsInfo = "id INT, name VARCHAR";
17 |
18 | Map props = new HashMap();
19 | props.put("url", "jdbc:mysql://foo:3306/db_foo");
20 | props.put("tablename", "table_foo");
21 | props.put("username", "foo");
22 | props.put("password", "foo");
23 |
24 | AbstractTableInfo tableInfo= sinkParser.getTableInfo(tableName, fieldsInfo, props);
25 |
26 | final String NORMAL_TYPE = "postgresql";
27 | final String table_type = tableInfo.getType();
28 | Assert.assertTrue(NORMAL_TYPE.equals(table_type));
29 | }
30 |
31 | }
--------------------------------------------------------------------------------
/rdb/rdb-core/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | sql.rdb
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 |
10 | 4.0.0
11 |
12 | rdb-core
13 | sql.core.rdb
14 |
15 |
16 | 8
17 | 8
18 |
19 |
20 |
--------------------------------------------------------------------------------
/rdb/rdb-side/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | sql.rdb
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 | ../pom.xml
10 |
11 | 4.0.0
12 |
13 | sql.side.rdb
14 | 1.0-SNAPSHOT
15 | rdb-side
16 | jar
17 |
18 |
19 | 3.9.4
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 | io.vertx
28 | vertx-jdbc-client
29 | ${vertx.version}
30 |
31 |
32 |
33 | io.vertx
34 | vertx-core
35 | ${vertx.version}
36 |
37 |
38 |
39 | com.dtstack.flink
40 | sql.core.rdb
41 | 1.0-SNAPSHOT
42 |
43 |
44 |
45 |
46 |
47 |
--------------------------------------------------------------------------------
/rdb/rdb-side/src/test/java/com/dtstack/flink/sql/side/rdb/all/RdbAllReqRowTestBase.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.side.rdb.all;
2 |
3 | import org.junit.Before;
4 | import org.junit.Test;
5 | import org.powermock.reflect.Whitebox;
6 |
7 | /**
8 | * @program: flinkStreamSQL
9 | * @author: wuren
10 | * @create: 2020/11/10
11 | **/
12 | public abstract class RdbAllReqRowTestBase {
13 |
14 | protected AbstractRdbAllReqRow reqRow;
15 | protected Class extends AbstractRdbAllReqRow> clazz;
16 |
17 | @Before
18 | public void setUp() {
19 | init();
20 | this.reqRow = Whitebox.newInstance(clazz);
21 | }
22 |
23 | protected abstract void init();
24 |
25 | @Test
26 | public void testGetConn() {
27 | try {
28 | reqRow.getConn("", "", "");
29 | } catch (RuntimeException e) {}
30 | }
31 |
32 | }
33 |
--------------------------------------------------------------------------------
/rdb/rdb-side/src/test/java/com/dtstack/flink/sql/side/rdb/table/RdbSideTableInfoTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.side.rdb.table;
2 |
3 | import org.junit.Assert;
4 | import org.junit.Before;
5 | import org.junit.Test;
6 |
7 | public class RdbSideTableInfoTest {
8 |
9 | private RdbSideTableInfo tableInfo;
10 | private static String SCHEMA = "TEST_schema";
11 |
12 | @Before
13 | public void setUp() {
14 | tableInfo = new RdbSideTableInfo();
15 | tableInfo.setUrl("jdbc://mysql");
16 | tableInfo.setUserName("TEST_root");
17 | tableInfo.setPassword("TEST_pass");
18 | tableInfo.setTableName("foo_tablename");
19 | tableInfo.setSchema(SCHEMA);
20 | }
21 |
22 | // @Test
23 | public void testCheck() {
24 | Boolean success = tableInfo.check();
25 | Assert.assertTrue(success);
26 | }
27 |
28 | // @Test
29 | public void testToString() {
30 | tableInfo.toString();
31 | }
32 |
33 | // @Test
34 | public void testGetSchema() {
35 | Assert.assertEquals(SCHEMA, tableInfo.getSchema());
36 | }
37 |
38 | }
--------------------------------------------------------------------------------
/rdb/rdb-sink/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | sql.rdb
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 | ../pom.xml
10 |
11 | 4.0.0
12 |
13 | sql.sink.rdb
14 | 1.0-SNAPSHOT
15 | rdb-sink
16 | jar
17 |
18 |
19 |
20 | com.dtstack.flink
21 | sql.core.rdb
22 | 1.0-SNAPSHOT
23 |
24 |
25 |
26 |
--------------------------------------------------------------------------------
/rdb/rdb-sink/src/test/java/com/dtstack/flink/sql/sink/rdb/ConcreteRdbSink.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.sink.rdb;
2 |
3 | import com.dtstack.flink.sql.sink.rdb.dialect.JDBCDialect;
4 | import com.dtstack.flink.sql.sink.rdb.format.JDBCUpsertOutputFormat;
5 |
6 | /**
7 | * @program: flink.sql
8 | * @author: wuren
9 | * @create: 2020/07/31
10 | **/
11 | public class ConcreteRdbSink extends AbstractRdbSink {
12 |
13 | public ConcreteRdbSink(JDBCDialect jdbcDialect) {
14 | super(jdbcDialect);
15 | }
16 |
17 | @Override
18 | public JDBCUpsertOutputFormat getOutputFormat() {
19 | return null;
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/rdb/rdb-sink/src/test/java/com/dtstack/flink/sql/sink/rdb/dialect/ConcreteJDBCDialect.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.sink.rdb.dialect;
2 |
3 | /**
4 | * @program: flink.sql
5 | * @author: wuren
6 | * @create: 2020/07/31
7 | **/
8 | public class ConcreteJDBCDialect implements JDBCDialect {
9 |
10 | @Override
11 | public boolean canHandle(String url) {
12 | return false;
13 | }
14 |
15 | }
16 |
--------------------------------------------------------------------------------
/rdb/rdb-sink/src/test/java/com/dtstack/flink/sql/sink/rdb/table/RdbTableInfoTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.sink.rdb.table;
2 |
3 | import org.junit.Assert;
4 | import org.junit.Test;
5 |
6 | public class RdbTableInfoTest {
7 |
8 | @Test
9 | public void test() {
10 | RdbTableInfo tablaInfo = new RdbTableInfo();
11 | final String type = "mysql";
12 | tablaInfo.setType(type);
13 | Assert.assertEquals(type, tablaInfo.getType());
14 | }
15 |
16 | }
--------------------------------------------------------------------------------
/redis5/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | flink.sql
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 |
10 | 4.0.0
11 | sql.redis
12 | pom
13 |
14 |
15 | redis5-sink
16 | redis5-side
17 |
18 |
19 |
20 | com.dtstack.flink
21 | sql.core
22 | 1.0-SNAPSHOT
23 | provided
24 |
25 |
26 | redis.clients
27 | jedis
28 | 2.9.0
29 |
30 |
31 |
32 |
--------------------------------------------------------------------------------
/redis5/redis5-side/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | sql.redis
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 | ../pom.xml
10 |
11 | 4.0.0
12 | sql.side.redis
13 | redis-side
14 |
15 |
16 | redis-side-core
17 | redis-async-side
18 | redis-all-side
19 |
20 |
21 | pom
22 |
23 |
--------------------------------------------------------------------------------
/redis5/redis5-side/redis-side-core/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 | 4.0.0
6 |
7 |
8 | sql.side.redis
9 | com.dtstack.flink
10 | 1.0-SNAPSHOT
11 | ../pom.xml
12 |
13 |
14 | sql.side.redis.core
15 |
16 | jar
17 |
18 |
19 |
--------------------------------------------------------------------------------
/redis5/redis5-side/redis-side-core/src/main/java/com/dtstack/flink/sql/side/redis/enums/RedisType.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.side.redis.enums;
2 |
3 | public enum RedisType {
4 | /**
5 | * 单机
6 | */
7 | STANDALONE(1),
8 | /**
9 | * 哨兵
10 | */
11 | SENTINEL(2),
12 | /**
13 | * 集群
14 | */
15 | CLUSTER(3);
16 | int type;
17 | RedisType(int type){
18 | this.type = type;
19 | }
20 |
21 | public int getType(){
22 | return type;
23 | }
24 |
25 | public static RedisType parse(int redisType){
26 | for(RedisType type : RedisType.values()){
27 | if(type.getType() == redisType){
28 | return type;
29 | }
30 | }
31 | throw new RuntimeException("unsupport redis type["+ redisType + "]");
32 | }
33 | }
34 |
--------------------------------------------------------------------------------
/redis5/redis5-side/redis-side-core/src/test/java/com/dtstack/flink/sql/side/redis/enums/RedisTypeTest.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 | package com.dtstack.flink.sql.side.redis.enums;
20 |
21 | import org.junit.Test;
22 |
23 | /**
24 | * @author: chuixue
25 | * @create: 2020-07-20 14:27
26 | * @description:
27 | **/
28 | public class RedisTypeTest {
29 |
30 | @Test
31 | public void testParse() {
32 | for (RedisType value : RedisType.values()) {
33 | RedisType.parse(value.getType());
34 | }
35 | }
36 | }
37 |
--------------------------------------------------------------------------------
/redis5/redis5-sink/src/main/java/com/dtstack/flink/sql/sink/redis/enums/RedisType.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.sink.redis.enums;
2 |
3 | public enum RedisType {
4 | /**
5 | * 单机
6 | */
7 | STANDALONE(1),
8 | /**
9 | * 哨兵
10 | */
11 | SENTINEL(2),
12 | /**
13 | * 集群
14 | */
15 | CLUSTER(3);
16 | int type;
17 | RedisType(int type){
18 | this.type = type;
19 | }
20 |
21 | public int getType(){
22 | return type;
23 | }
24 |
25 | public static RedisType parse(int redisType){
26 | for(RedisType type : RedisType.values()){
27 | if(type.getType() == redisType){
28 | return type;
29 | }
30 | }
31 | throw new RuntimeException("unsupported redis type["+ redisType + "]");
32 | }
33 | }
34 |
--------------------------------------------------------------------------------
/serversocket/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | flink.sql
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 | ../pom.xml
10 |
11 | 4.0.0
12 |
13 | sql.serversocket
14 | 1.0-SNAPSHOT
15 |
16 | serversocket-source
17 |
18 | pom
19 |
20 |
21 | 1.0-SNAPSHOT
22 |
23 |
24 |
25 |
26 |
27 | com.dtstack.flink
28 | sql.core
29 | ${sql.core.version}
30 | provided
31 |
32 |
33 |
--------------------------------------------------------------------------------
/solr/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | flink.sql
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 |
10 | 4.0.0
11 |
12 | sql.solr
13 | pom
14 |
15 | solr-sink
16 |
17 |
18 |
19 | 1.0-SNAPSHOT
20 |
21 |
22 |
23 |
24 | com.dtstack.flink
25 | sql.core
26 | ${sql.core.version}
27 | provided
28 |
29 |
30 | org.apache.solr
31 | solr-solrj
32 | 7.4.0
33 |
34 |
35 | org.slf4j
36 | slf4j-api
37 |
38 |
39 |
40 |
41 |
--------------------------------------------------------------------------------
/solr/solr-sink/src/test/java/com/dtstack/flink/sql/sink/solr/table/SolrTableInfoTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.sink.solr.table;
2 |
3 | import com.dtstack.flink.sql.sink.solr.options.SolrClientOptions;
4 | import org.junit.Test;
5 |
6 | import java.util.ArrayList;
7 | import java.util.List;
8 |
9 | public class SolrTableInfoTest {
10 |
11 | @Test
12 | public void check() {
13 | SolrTableInfo solrTableInfo = new SolrTableInfo();
14 | List zkHosts = new ArrayList<>();
15 | SolrClientOptions solrClientOptions = new SolrClientOptions(zkHosts, null, "");
16 | solrTableInfo.setSolrClientOptions(solrClientOptions);
17 | try {
18 | solrTableInfo.check();
19 | } catch (NullPointerException | IllegalStateException e) {
20 | }
21 | zkHosts.add("host:2181");
22 | solrClientOptions.setZkHosts(zkHosts);
23 | try {
24 | solrTableInfo.check();
25 | } catch (NullPointerException | IllegalStateException e) {
26 | }
27 | solrClientOptions.setCollection("abc");
28 | solrTableInfo.check();
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/sqlserver/sqlserver-side/sqlserver-all-side/src/test/java/com/dtstack/flink/sql/side/sqlserver/SqlserverAllReqRowTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.side.sqlserver;
2 |
3 | import com.dtstack.flink.sql.side.rdb.all.RdbAllReqRowTestBase;
4 |
5 | public class SqlserverAllReqRowTest extends RdbAllReqRowTestBase {
6 |
7 | @Override
8 | protected void init() {
9 | clazz = SqlserverAllReqRow.class;
10 | }
11 |
12 | }
--------------------------------------------------------------------------------
/sqlserver/sqlserver-side/sqlserver-async-side/src/test/java/com/dtstack/flink/sql/side/sqlserver/SqlserverAsyncReqRowTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.side.sqlserver;
2 |
3 | import com.dtstack.flink.sql.side.rdb.async.RdbAsyncReqRowTestBase;
4 |
5 | public class SqlserverAsyncReqRowTest extends RdbAsyncReqRowTestBase {
6 |
7 | @Override
8 | protected void init() {
9 | clazz = SqlserverAsyncReqRow.class;
10 | }
11 |
12 | }
--------------------------------------------------------------------------------
/sqlserver/sqlserver-side/sqlserver-side-core/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | sql.side.sqlserver
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 | ../pom.xml
10 |
11 | 4.0.0
12 |
13 | sql.side.sqlserver.core
14 | 1.0-SNAPSHOT
15 | jar
16 | sqlserver-side-core
17 |
18 |
19 |
--------------------------------------------------------------------------------
/sqlserver/sqlserver-side/sqlserver-side-core/src/test/java/com/dtstack/flink/sql/side/sqlserver/table/SqlserverSideParserTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.side.sqlserver.table;
2 |
3 | import com.dtstack.flink.sql.table.AbstractTableInfo;
4 | import org.junit.Assert;
5 | import org.junit.Test;
6 | import java.util.HashMap;
7 | import java.util.Map;
8 |
9 | public class SqlserverSideParserTest {
10 |
11 | // @Test
12 | public void getTableInfo() {
13 | SqlserverSideParser sideParser = new SqlserverSideParser();
14 |
15 | final String tableName = "table_foo";
16 | final String fieldsInfo = "id INT, name VARCHAR, PRIMARY KEY (id)";
17 |
18 | Map props = new HashMap();
19 | props.put("url", "jdbc:mysql://foo:3306/db_foo");
20 | props.put("tablename", "table_foo");
21 | props.put("username", "foo");
22 | props.put("password", "foo");
23 |
24 | AbstractTableInfo tableInfo= sideParser.getTableInfo(tableName, fieldsInfo, props);
25 |
26 | final String NORMAL_TYPE = "sqlserver";
27 | final String table_type = tableInfo.getType();
28 | Assert.assertTrue(NORMAL_TYPE.equals(table_type));
29 | }
30 |
31 | }
--------------------------------------------------------------------------------
/sqlserver/sqlserver-sink/src/test/java/com/dtstack/flink/sql/sink/sqlserver/table/SqlserverSinkParserTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.sink.sqlserver.table;
2 |
3 | import com.dtstack.flink.sql.table.AbstractTableInfo;
4 | import org.junit.Assert;
5 | import org.junit.Test;
6 | import java.util.HashMap;
7 | import java.util.Map;
8 |
9 | public class SqlserverSinkParserTest {
10 |
11 | // @Test
12 | public void getTableInfo() {
13 | SqlserverSinkParser sinkParser = new SqlserverSinkParser();
14 |
15 | final String tableName = "table_foo";
16 | final String fieldsInfo = "id INT, name VARCHAR";
17 |
18 | Map props = new HashMap();
19 | props.put("url", "jdbc:mysql://foo:3306/db_foo");
20 | props.put("tablename", "table_foo");
21 | props.put("username", "foo");
22 | props.put("password", "foo");
23 |
24 | AbstractTableInfo tableInfo= sinkParser.getTableInfo(tableName, fieldsInfo, props);
25 |
26 | final String NORMAL_TYPE = "sqlserver";
27 | final String table_type = tableInfo.getType();
28 | Assert.assertTrue(NORMAL_TYPE.equals(table_type));
29 | }
30 |
31 | }
--------------------------------------------------------------------------------
/tidb/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | flink.sql
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 | ../pom.xml
10 |
11 | 4.0.0
12 |
13 | sql.tidb
14 | pom
15 |
16 | tidb-side
17 | tidb-sink
18 |
19 |
20 |
21 | 5.1.46
22 | 1.0-SNAPSHOT
23 |
24 |
25 |
26 |
27 |
28 | com.dtstack.flink
29 | sql.core
30 | ${sql.core.version}
31 | provided
32 |
33 |
34 |
35 | mysql
36 | mysql-connector-java
37 | ${tidb.connector.version}
38 |
39 |
40 |
41 |
42 |
--------------------------------------------------------------------------------
/tidb/tidb-side/tidb-all-side/src/test/java/com/dtstack/flink/sql/side/tidb/TidbAllReqRowTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.side.tidb;
2 |
3 | import com.dtstack.flink.sql.side.rdb.all.RdbAllReqRowTestBase;
4 |
5 | public class TidbAllReqRowTest extends RdbAllReqRowTestBase {
6 |
7 | @Override
8 | protected void init() {
9 | clazz = TidbAllReqRow.class;
10 | }
11 |
12 | }
--------------------------------------------------------------------------------
/tidb/tidb-side/tidb-async-side/src/test/java/com/dtstack/flink/sql/side/tidb/TidbAsyncReqRowTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.side.tidb;
2 |
3 | import com.dtstack.flink.sql.side.rdb.async.RdbAsyncReqRowTestBase;
4 |
5 | public class TidbAsyncReqRowTest extends RdbAsyncReqRowTestBase {
6 |
7 | @Override
8 | protected void init() {
9 | clazz = TidbAsyncReqRow.class;
10 | }
11 |
12 | }
--------------------------------------------------------------------------------
/tidb/tidb-side/tidb-side-core/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | sql.side.tidb
7 | com.dtstack.flink
8 | 1.0-SNAPSHOT
9 |
10 | 4.0.0
11 |
12 | sql.side.tidb.core
13 | jar
14 | tidb-side-core
15 | 1.0-SNAPSHOT
16 |
--------------------------------------------------------------------------------
/tidb/tidb-side/tidb-side-core/src/test/java/com/dtstack/flink/sql/side/tidb/table/TidbSideParserTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.side.tidb.table;
2 |
3 | import com.dtstack.flink.sql.table.AbstractTableInfo;
4 | import org.junit.Assert;
5 | import org.junit.Test;
6 |
7 | import java.util.HashMap;
8 | import java.util.Map;
9 |
10 | public class TidbSideParserTest {
11 |
12 | // @Test
13 | public void getTableInfo() {
14 | TidbSideParser sideParser = new TidbSideParser();
15 |
16 | final String tableName = "table_foo";
17 | final String fieldsInfo = "id INT, name VARCHAR, PRIMARY KEY (id)";
18 |
19 | Map props = new HashMap();
20 | props.put("url", "jdbc:mysql://foo:3306/db_foo");
21 | props.put("tablename", "table_foo");
22 | props.put("username", "foo");
23 | props.put("password", "foo");
24 |
25 | AbstractTableInfo tableInfo= sideParser.getTableInfo(tableName, fieldsInfo, props);
26 |
27 | final String NORMAL_TYPE = "tidb";
28 | final String table_type = tableInfo.getType();
29 | Assert.assertTrue(NORMAL_TYPE.equals(table_type));
30 | }
31 |
32 | }
--------------------------------------------------------------------------------
/tidb/tidb-sink/src/test/java/com/dtstack/flink/sql/sink/tidb/table/TidbSinkParserTest.java:
--------------------------------------------------------------------------------
1 | package com.dtstack.flink.sql.sink.tidb.table;
2 |
3 | import com.dtstack.flink.sql.table.AbstractTableInfo;
4 | import org.junit.Assert;
5 | import org.junit.Test;
6 |
7 | import java.util.HashMap;
8 | import java.util.Map;
9 |
10 | public class TidbSinkParserTest {
11 |
12 | // @Test
13 | public void getTableInfo() {
14 | TidbSinkParser parser = new TidbSinkParser();
15 |
16 | final String tableName = "table_foo";
17 | final String fieldsInfo = "id INT, name VARCHAR";
18 |
19 | Map props = new HashMap();
20 | props.put("url", "jdbc:mysql://foo:3306/db_foo");
21 | props.put("tablename", "table_foo");
22 | props.put("username", "foo");
23 | props.put("password", "foo");
24 |
25 | AbstractTableInfo tableInfo= parser.getTableInfo(tableName, fieldsInfo, props);
26 |
27 | final String NORMAL_TYPE = "tidb";
28 | final String table_type = tableInfo.getType();
29 | Assert.assertTrue(NORMAL_TYPE.equals(table_type));
30 | }
31 |
32 | }
--------------------------------------------------------------------------------