├── .editorconfig
├── .github
└── workflows
│ ├── push_pr.yml
│ └── test.yml
├── .gitignore
├── .idea
├── icon.png
└── vcs.xml
├── .scanignore
├── LICENSE
├── README.md
├── README_CN.md
├── docs
├── cli
│ ├── README.md
│ ├── README_CN.md
│ └── flink-cdc
│ │ ├── flink-cdc-source.md
│ │ └── flink-cdc-source_cn.md
└── sink
│ ├── flink-connector-obkv-hbase.md
│ ├── flink-connector-obkv-hbase_cn.md
│ ├── flink-connector-oceanbase-directload.md
│ ├── flink-connector-oceanbase-directload_cn.md
│ ├── flink-connector-oceanbase.md
│ └── flink-connector-oceanbase_cn.md
├── flink-connector-obkv-hbase
├── pom.xml
└── src
│ ├── main
│ ├── java
│ │ └── com
│ │ │ └── oceanbase
│ │ │ └── connector
│ │ │ └── flink
│ │ │ ├── OBKVHBaseConnectorOptions.java
│ │ │ ├── OBKVHBaseDynamicTableSinkFactory.java
│ │ │ ├── connection
│ │ │ └── OBKVHBaseConnectionProvider.java
│ │ │ ├── sink
│ │ │ ├── OBKVHBaseDynamicTableSink.java
│ │ │ └── OBKVHBaseRecordFlusher.java
│ │ │ └── table
│ │ │ └── OBKVHBaseRowDataSerializationSchema.java
│ └── resources
│ │ └── META-INF
│ │ └── services
│ │ └── org.apache.flink.table.factories.Factory
│ └── test
│ ├── java
│ └── com
│ │ └── oceanbase
│ │ └── connector
│ │ └── flink
│ │ └── OBKVHBaseConnectorITCase.java
│ └── resources
│ ├── log4j2-test.properties
│ └── sql
│ └── htable.sql
├── flink-connector-oceanbase-base
├── pom.xml
└── src
│ ├── main
│ └── java
│ │ └── com
│ │ └── oceanbase
│ │ └── connector
│ │ └── flink
│ │ ├── ConnectorOptions.java
│ │ ├── connection
│ │ ├── ConnectionProvider.java
│ │ ├── OceanBaseUserInfo.java
│ │ └── OceanBaseVersion.java
│ │ ├── sink
│ │ ├── AbstractDynamicTableSink.java
│ │ ├── OceanBaseSink.java
│ │ ├── OceanBaseWriter.java
│ │ ├── OceanBaseWriterEvent.java
│ │ └── RecordFlusher.java
│ │ ├── table
│ │ ├── AbstractRecordSerializationSchema.java
│ │ ├── DataChangeRecord.java
│ │ ├── DataChangeRecordData.java
│ │ ├── HTableInfo.java
│ │ ├── Record.java
│ │ ├── RecordSerializationSchema.java
│ │ ├── SchemaChangeRecord.java
│ │ ├── SerializationRuntimeConverter.java
│ │ ├── Table.java
│ │ ├── TableId.java
│ │ └── TableInfo.java
│ │ └── utils
│ │ ├── OptionUtils.java
│ │ └── TableCache.java
│ └── test
│ └── java
│ └── com
│ └── oceanbase
│ └── connector
│ └── flink
│ ├── OceanBaseMetadata.java
│ ├── OceanBaseMySQLTestBase.java
│ ├── OceanBaseOracleTestBase.java
│ ├── OceanBaseProxyContainer.java
│ └── OceanBaseTestBase.java
├── flink-connector-oceanbase-cli
├── pom.xml
└── src
│ ├── main
│ └── java
│ │ └── com
│ │ └── oceanbase
│ │ └── connector
│ │ └── flink
│ │ ├── Cli.java
│ │ ├── config
│ │ └── CliConfig.java
│ │ ├── process
│ │ ├── ParsingProcessFunction.java
│ │ ├── Sync.java
│ │ └── TableNameConverter.java
│ │ ├── source
│ │ ├── FieldSchema.java
│ │ ├── FieldType.java
│ │ ├── JdbcMetadataAccessor.java
│ │ ├── MetadataAccessor.java
│ │ ├── TableSchema.java
│ │ └── cdc
│ │ │ ├── OceanBaseJsonDeserializationSchema.java
│ │ │ └── mysql
│ │ │ ├── MysqlCdcSync.java
│ │ │ ├── MysqlDateConverter.java
│ │ │ ├── MysqlMetadataAccessor.java
│ │ │ └── MysqlTypeConverter.java
│ │ ├── table
│ │ ├── OceanBaseJsonSerializationSchema.java
│ │ ├── OceanBaseTableSchema.java
│ │ ├── OceanBaseType.java
│ │ └── OceanBaseTypeMapper.java
│ │ └── utils
│ │ └── OceanBaseCatalogUtils.java
│ └── test
│ ├── java
│ └── com
│ │ └── oceanbase
│ │ └── connector
│ │ └── flink
│ │ ├── MysqlCdcSyncITCase.java
│ │ └── table
│ │ └── OceanBaseJsonSerializationSchemaTest.java
│ └── resources
│ ├── docker
│ └── mysql
│ │ └── my.cnf
│ ├── log4j2-test.properties
│ └── sql
│ └── mysql-cdc.sql
├── flink-connector-oceanbase-directload
├── pom.xml
└── src
│ ├── main
│ ├── java
│ │ └── com
│ │ │ └── oceanbase
│ │ │ └── connector
│ │ │ └── flink
│ │ │ ├── OBDirectLoadConnectorOptions.java
│ │ │ ├── OBDirectLoadDynamicTableSinkFactory.java
│ │ │ ├── directload
│ │ │ ├── DirectLoadUtils.java
│ │ │ ├── DirectLoader.java
│ │ │ └── DirectLoaderBuilder.java
│ │ │ ├── sink
│ │ │ ├── DirectLoadStreamSinkProvider.java
│ │ │ ├── OBDirectLoadDynamicTableSink.java
│ │ │ └── batch
│ │ │ │ ├── DirectLoadSink.java
│ │ │ │ └── DirectLoadWriter.java
│ │ │ └── table
│ │ │ └── OceanBaseRowDataSerializationSchema.java
│ └── resources
│ │ └── META-INF
│ │ └── services
│ │ └── org.apache.flink.table.factories.Factory
│ └── test
│ ├── java
│ └── com
│ │ └── oceanbase
│ │ └── connector
│ │ └── flink
│ │ └── OBDirectLoadITCase.java
│ └── resources
│ ├── log4j2-test.properties
│ └── sql
│ └── products.sql
├── flink-connector-oceanbase-e2e-tests
├── pom.xml
└── src
│ └── test
│ ├── java
│ └── com
│ │ └── oceanbase
│ │ └── connector
│ │ └── flink
│ │ ├── MysqlCdcSyncE2eITCase.java
│ │ ├── OBDirectLoadE2eITCase.java
│ │ ├── OBKVHBaseE2eITCase.java
│ │ ├── OceanBaseE2eITCase.java
│ │ └── utils
│ │ └── FlinkContainerTestEnvironment.java
│ └── resources
│ ├── docker
│ └── mysql
│ │ └── my.cnf
│ ├── log4j2-test.properties
│ └── sql
│ ├── htable.sql
│ ├── mysql-cdc.sql
│ └── products.sql
├── flink-connector-oceanbase
├── pom.xml
└── src
│ ├── main
│ ├── java
│ │ └── com
│ │ │ └── oceanbase
│ │ │ └── connector
│ │ │ └── flink
│ │ │ ├── OceanBaseConnectorOptions.java
│ │ │ ├── OceanBaseDynamicTableSinkFactory.java
│ │ │ ├── connection
│ │ │ ├── OceanBaseConnectionProvider.java
│ │ │ └── OceanBaseTablePartInfo.java
│ │ │ ├── dialect
│ │ │ ├── OceanBaseDialect.java
│ │ │ ├── OceanBaseMySQLDialect.java
│ │ │ └── OceanBaseOracleDialect.java
│ │ │ ├── sink
│ │ │ ├── OceanBaseDynamicTableSink.java
│ │ │ └── OceanBaseRecordFlusher.java
│ │ │ ├── table
│ │ │ └── OceanBaseRowDataSerializationSchema.java
│ │ │ └── utils
│ │ │ └── OceanBaseJdbcUtils.java
│ └── resources
│ │ └── META-INF
│ │ └── services
│ │ └── org.apache.flink.table.factories.Factory
│ └── test
│ ├── java
│ └── com
│ │ └── oceanbase
│ │ └── connector
│ │ └── flink
│ │ ├── OceanBaseMySQLConnectorITCase.java
│ │ ├── OceanBaseOracleConnectorITCase.java
│ │ ├── dialect
│ │ ├── OceanBaseMySQLDialectTest.java
│ │ └── OceanBaseOracleDialectTest.java
│ │ └── table
│ │ ├── OceanBaseTestData.java
│ │ └── OceanBaseTestDataSerializationSchema.java
│ └── resources
│ ├── log4j2-test.properties
│ └── sql
│ ├── mysql
│ ├── array_type.sql
│ ├── gis_types.sql
│ └── products.sql
│ └── oracle
│ └── products.sql
├── flink-sql-connector-obkv-hbase
└── pom.xml
├── flink-sql-connector-oceanbase-directload
└── pom.xml
├── flink-sql-connector-oceanbase
└── pom.xml
└── pom.xml
/.editorconfig:
--------------------------------------------------------------------------------
1 | # This file is for unifying the coding style for different editors and IDEs
2 | # See editorconfig.org
3 |
4 | root = true
5 |
6 | [*]
7 | indent_style = space
8 | indent_size = 2
9 | end_of_line = lf
10 | charset = utf-8
11 | trim_trailing_whitespace = true
12 | insert_final_newline = true
13 |
14 | [{*.java,*.xml}]
15 | indent_size = 4
16 |
--------------------------------------------------------------------------------
/.github/workflows/push_pr.yml:
--------------------------------------------------------------------------------
1 | name: CI
2 |
3 | on:
4 | pull_request:
5 | paths-ignore:
6 | - "docs/**"
7 | - "**.md"
8 | - ".*"
9 | push:
10 | branches:
11 | - main
12 |
13 | concurrency:
14 | group: ci-${{ github.event.pull_request.number || github.ref }}
15 | cancel-in-progress: true
16 |
17 | jobs:
18 | flink-connector-oceanbase:
19 | uses: ./.github/workflows/test.yml
20 | with:
21 | module: flink-connector-oceanbase
22 |
23 | flink-connector-obkv-hbase:
24 | uses: ./.github/workflows/test.yml
25 | with:
26 | module: flink-connector-obkv-hbase
27 |
28 | flink-connector-oceanbase-directload:
29 | uses: ./.github/workflows/test.yml
30 | with:
31 | module: flink-connector-oceanbase-directload
32 |
33 | flink-connector-oceanbase-cli:
34 | uses: ./.github/workflows/test.yml
35 | with:
36 | module: flink-connector-oceanbase-cli
37 |
38 | flink-connector-oceanbase-e2e-tests:
39 | strategy:
40 | matrix:
41 | flink_version: ["1.15.4", "1.16.3", "1.17.2", "1.18.1", "1.19.1", "1.20.0"]
42 | uses: ./.github/workflows/test.yml
43 | with:
44 | module: flink-connector-oceanbase-e2e-tests
45 | maven_opts: "-Dflink_version=${{ matrix.flink_version }}"
46 |
--------------------------------------------------------------------------------
/.github/workflows/test.yml:
--------------------------------------------------------------------------------
1 | name: Test Module
2 |
3 | on:
4 | workflow_call:
5 | inputs:
6 | module:
7 | required: true
8 | type: string
9 | maven_opts:
10 | required: false
11 | type: string
12 |
13 | jobs:
14 | test:
15 | name: Test
16 | runs-on: ubuntu-latest
17 | steps:
18 | - name: Free disk space on Ubuntu runner
19 | uses: kfir4444/free-disk-space@main
20 | with:
21 | tool-cache: false
22 | android: true
23 | dotnet: true
24 | haskell: true
25 | large-packages: true
26 | swap-storage: true
27 |
28 | - name: Check out repository code
29 | uses: actions/checkout@v4
30 |
31 | - name: Set up Java
32 | uses: actions/setup-java@v4
33 | with:
34 | java-version: '8'
35 | distribution: 'zulu'
36 | cache: 'maven'
37 |
38 | - name: Maven build
39 | run: mvn install -DskipTests=true
40 |
41 | - name: Maven test
42 | run: |
43 | cd ${{ inputs.module }}
44 | mvn verify ${{ inputs.maven_opts }}
45 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | target/
2 | !.mvn/wrapper/maven-wrapper.jar
3 | !**/src/main/**/target/
4 | !**/src/test/**/target/
5 | pom.xml.tag
6 | pom.xml.releaseBackup
7 | pom.xml.versionsBackup
8 | pom.xml.next
9 | release.properties
10 | .flattened-pom.xml
11 |
12 | ### IntelliJ IDEA ###
13 | .idea/*
14 | !.idea/vcs.xml
15 | !.idea/icon.png
16 | *.iml
17 | *.ipr
18 | *.iws
19 |
20 | ### Eclipse ###
21 | .apt_generated
22 | .classpath
23 | .factorypath
24 | .project
25 | .settings
26 | .springBeans
27 | .sts4-cache
28 |
29 | ### NetBeans ###
30 | /nbproject/private/
31 | /nbbuild/
32 | /dist/
33 | /nbdist/
34 | /.nb-gradle/
35 | build/
36 | !**/src/main/**/build/
37 | !**/src/test/**/build/
38 |
39 | ### VS Code ###
40 | .vscode/
41 |
42 | ### Mac OS ###
43 | .DS_Store
44 |
45 | ### Log ###
46 | *.log
47 | /logs
48 |
--------------------------------------------------------------------------------
/.idea/icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/oceanbase/flink-connector-oceanbase/61b108cd8f5783109be1b5c6aad49e89da177f96/.idea/icon.png
--------------------------------------------------------------------------------
/.idea/vcs.xml:
--------------------------------------------------------------------------------
1 |
2 |
16 |
17 |
18 |
26 |
27 |
28 |
29 |
30 |
31 |
--------------------------------------------------------------------------------
/.scanignore:
--------------------------------------------------------------------------------
1 | # git pre hook scan ignore configuration
2 |
3 | *.md
4 | pom.xml
5 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # OceanBase Connectors for Apache Flink
2 |
3 | English | [简体中文](README_CN.md)
4 |
5 | [](https://github.com/oceanbase/flink-connector-oceanbase/actions/workflows/push_pr.yml?query=branch%3Amain)
6 | [](https://github.com/oceanbase/flink-connector-oceanbase/releases)
7 | [](LICENSE)
8 |
9 | This repository contains the OceanBase connectors for Apache Flink.
10 |
11 | ## Features
12 |
13 | Prerequisites
14 |
15 | - JDK 8
16 | - Flink 1.15 or later version
17 |
18 | This repository contains connectors as following:
19 |
20 | | Connector | Description | Document |
21 | |----------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------|-----------------------------------------------------------|
22 | | Flink Connector: OceanBase | This Connector uses the JDBC driver supported by OceanBase to write data to OceanBase, and supports MySQL and Oracle compatibility modes. | [Sink](docs/sink/flink-connector-oceanbase.md) |
23 | | Flink Connector: OceanBase Direct Load | This Connector uses the [direct load](https://en.oceanbase.com/docs/common-oceanbase-database-10000000001375568) API to write data to OceanBase. | [Sink](docs/sink/flink-connector-oceanbase-directload.md) |
24 | | Flink Connector: OBKV HBase | This Connector uses the [OBKV HBase API](https://github.com/oceanbase/obkv-hbase-client-java) to write data to OceanBase. | [Sink](docs/sink/flink-connector-obkv-hbase.md) |
25 |
26 | We also provide a command line tool for submitting Flink end-to-end tasks, see the [CLI docs](docs/cli/README.md) for details.
27 |
28 | ### Other External Projects
29 |
30 | There are some community projects which can be used to work with Apache Flink and OceanBase.
31 |
32 | | Project | OceanBase Compatible Mode | Supported Features |
33 | |------------------------------------------------------------------------|---------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------|
34 | | [Flink Connector JDBC](https://github.com/apache/flink-connector-jdbc) | MySQL, Oracle | [Source + Sink](https://nightlies.apache.org/flink/flink-docs-release-1.19/docs/connectors/table/jdbc/) |
35 | | [Flink CDC](https://github.com/ververica/flink-cdc-connectors) | MySQL, Oracle | [Source + CDC](https://nightlies.apache.org/flink/flink-cdc-docs-master/docs/connectors/flink-sources/oceanbase-cdc/) |
36 | | [Apache SeaTunnel](https://github.com/apache/seatunnel) | MySQL, Oracle | [Source](https://seatunnel.apache.org/docs/connector-v2/source/OceanBase)
[Sink](https://seatunnel.apache.org/docs/connector-v2/sink/OceanBase) |
37 |
38 | ## Community
39 |
40 | Don’t hesitate to ask!
41 |
42 | Contact the developers and community at [https://ask.oceanbase.com](https://ask.oceanbase.com) if you need any help.
43 |
44 | [Open an issue](https://github.com/oceanbase/flink-connector-oceanbase/issues) if you found a bug.
45 |
46 | ## Licensing
47 |
48 | See [LICENSE](LICENSE) for more information.
49 |
--------------------------------------------------------------------------------
/README_CN.md:
--------------------------------------------------------------------------------
1 | # OceanBase Connectors for Apache Flink
2 |
3 | [English](README.md) | 简体中文
4 |
5 | [](https://github.com/oceanbase/flink-connector-oceanbase/actions/workflows/push_pr.yml?query=branch%3Amain)
6 | [](https://github.com/oceanbase/flink-connector-oceanbase/releases)
7 | [](https://opensource.org/licenses/Apache-2.0)
8 |
9 | 本仓库包含 OceanBase 的 Flink Connector。
10 |
11 | ## 功能
12 |
13 | 运行环境需要准备
14 |
15 | - JDK 8
16 | - Flink 1.15 或后续版本
17 |
18 | 本仓库提供了如下 Connector:
19 |
20 | | Connector | 描述 | 使用文档 |
21 | |----------------------------------------|--------------------------------------------------------------------------------------------------------------------|--------------------------------------------------------------|
22 | | Flink Connector: OceanBase | 该Connector通过OceanBase支持的JDBC驱动将数据写入OceanBase,支持MySQL 和 Oracle 兼容模式。 | [Sink](docs/sink/flink-connector-oceanbase_cn.md) |
23 | | Flink Connector: OceanBase Direct Load | 该Connector通过[旁路导入](https://www.oceanbase.com/docs/common-oceanbase-database-cn-1000000001428636)API将数据写入OceanBase。 | [Sink](docs/sink/flink-connector-oceanbase-directload_cn.md) |
24 | | Flink Connector: OBKV HBase | 该Connector通过[OBKV HBase API](https://github.com/oceanbase/obkv-hbase-client-java)将数据写入OceanBase。 | [Sink](docs/sink/flink-connector-obkv-hbase_cn.md) |
25 |
26 | 我们还提供了一个用于提交 Flink 端到端任务的命令行工具,详细信息请参阅 [CLI 文档](docs/cli/README_CN.md)。
27 |
28 | ### 其他外部项目
29 |
30 | 在其他的社区和组织中,也有一些项目可以用于通过 Flink 处理 OceanBase 中的数据。
31 |
32 | | Project | OceanBase 兼容模式 | 支持的功能 |
33 | |------------------------------------------------------------------------|----------------|--------------------------------------------------------------------------------------------------------------------------------------------------|
34 | | [Flink Connector JDBC](https://github.com/apache/flink-connector-jdbc) | MySQL, Oracle | [Source + Sink](https://nightlies.apache.org/flink/flink-docs-release-1.19/zh/docs/connectors/table/jdbc/) |
35 | | [Flink CDC](https://github.com/ververica/flink-cdc-connectors) | MySQL, Oracle | [Source + CDC](https://nightlies.apache.org/flink/flink-cdc-docs-master/zh/docs/connectors/flink-sources/oceanbase-cdc/) |
36 | | [Apache SeaTunnel](https://github.com/apache/seatunnel) | MySQL, Oracle | [Source](https://seatunnel.apache.org/docs/connector-v2/source/OceanBase), [Sink](https://seatunnel.apache.org/docs/connector-v2/sink/OceanBase) |
37 |
38 | ## 社区
39 |
40 | 当你需要帮助时,你可以在 [https://ask.oceanbase.com](https://ask.oceanbase.com) 上找到开发者和其他的社区伙伴。
41 |
42 | 当你发现项目缺陷时,请在 [issues](https://github.com/oceanbase/flink-connector-oceanbase/issues) 页面创建一个新的 issue。
43 |
44 | ## 许可证
45 |
46 | 更多信息见 [LICENSE](LICENSE)。
47 |
--------------------------------------------------------------------------------
/docs/cli/README.md:
--------------------------------------------------------------------------------
1 | # Flink Connector OceanBase CLI
2 |
3 | English | [简体中文](README_CN.md)
4 |
5 | The project is a set of CLI (command line interface) tools that supports submitting Flink jobs to migrate data from other data sources to OceanBase.
6 |
7 | ## Getting Started
8 |
9 | You can get the release packages at [Releases Page](https://github.com/oceanbase/flink-connector-oceanbase/releases) or [Maven Central](https://central.sonatype.com/artifact/com.oceanbase/flink-connector-oceanbase-cli),or get the latest snapshot packages at [Sonatype Snapshot](https://s01.oss.sonatype.org/content/repositories/snapshots/com/oceanbase/flink-connector-oceanbase-cli).
10 |
11 | You can also manually build it from the source code.
12 |
13 | ```shell
14 | git clone https://github.com/oceanbase/flink-connector-oceanbase.git
15 | cd flink-connector-oceanbase
16 | mvn clean package -DskipTests
17 | ```
18 |
19 | ## Features
20 |
21 | This command line tool supports the following connectors as sources:
22 |
23 | | Source Connectors | Supported Data Sources | Documentation |
24 | |-------------------|------------------------|---------------------------------------------------|
25 | | Flink CDC | MySQL | [Flink CDC Source](flink-cdc/flink-cdc-source.md) |
26 |
27 |
--------------------------------------------------------------------------------
/docs/cli/README_CN.md:
--------------------------------------------------------------------------------
1 | # Flink Connector OceanBase CLI
2 |
3 | [English](README.md) | 简体中文
4 |
5 | 本项目是一套 CLI(命令行界面)工具,支持提交 Flink 作业将数据从其他数据源迁移到 OceanBase。
6 |
7 | ## 开始上手
8 |
9 | 您可以在 [Releases 页面](https://github.com/oceanbase/flink-connector-oceanbase/releases) 或者 [Maven 中央仓库](https://central.sonatype.com/artifact/com.oceanbase/flink-connector-oceanbase-cli) 找到正式的发布版本,或者从 [Sonatype Snapshot](https://s01.oss.sonatype.org/content/repositories/snapshots/com/oceanbase/flink-connector-oceanbase-cli) 获取最新的快照版本。
10 |
11 | 您也可以通过源码构建的方式获得程序包。
12 |
13 | ```shell
14 | git clone https://github.com/oceanbase/flink-connector-oceanbase.git
15 | cd flink-connector-oceanbase
16 | mvn clean package -DskipTests
17 | ```
18 |
19 | ## 功能
20 |
21 | 此命令行工具支持以下连接器作为源端:
22 |
23 | | 源端连接器 | 支持的数据源 | 文档 |
24 | |-----------|--------|--------------------------------------------------|
25 | | Flink CDC | MySQL | [Flink CDC 源端](flink-cdc/flink-cdc-source_cn.md) |
26 |
27 |
--------------------------------------------------------------------------------
/flink-connector-obkv-hbase/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
16 |
17 | 4.0.0
18 |
19 | com.oceanbase
20 | flink-connector-oceanbase-parent
21 | ${revision}
22 |
23 |
24 | flink-connector-obkv-hbase
25 | jar
26 |
27 |
28 |
29 | com.oceanbase
30 | flink-connector-oceanbase-base
31 | ${project.version}
32 |
33 |
34 |
35 | com.oceanbase
36 | obkv-hbase-client
37 |
38 |
39 |
40 | com.oceanbase
41 | flink-connector-oceanbase-base
42 | ${project.version}
43 | test-jar
44 | test
45 |
46 |
47 |
48 |
49 |
50 |
--------------------------------------------------------------------------------
/flink-connector-obkv-hbase/src/main/java/com/oceanbase/connector/flink/OBKVHBaseConnectorOptions.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.oceanbase.connector.flink;
18 |
19 | import com.oceanbase.connector.flink.utils.OptionUtils;
20 |
21 | import org.apache.flink.configuration.ConfigOption;
22 | import org.apache.flink.configuration.ConfigOptions;
23 |
24 | import java.util.Map;
25 | import java.util.Properties;
26 |
27 | public class OBKVHBaseConnectorOptions extends ConnectorOptions {
28 |
29 | private static final long serialVersionUID = 1L;
30 |
31 | public static final ConfigOption SYS_USERNAME =
32 | ConfigOptions.key("sys.username")
33 | .stringType()
34 | .noDefaultValue()
35 | .withDescription("The username of system tenant.");
36 |
37 | public static final ConfigOption SYS_PASSWORD =
38 | ConfigOptions.key("sys.password")
39 | .stringType()
40 | .noDefaultValue()
41 | .withDescription("The password of system tenant");
42 |
43 | public static final ConfigOption HBASE_PROPERTIES =
44 | ConfigOptions.key("hbase.properties")
45 | .stringType()
46 | .noDefaultValue()
47 | .withDescription("Properties to configure 'obkv-hbase-client-java'.");
48 |
49 | public static final ConfigOption ODP_MODE =
50 | ConfigOptions.key("odp-mode")
51 | .booleanType()
52 | .defaultValue(false)
53 | .withDescription("Whether to use ODP to connect to OBKV.");
54 |
55 | public static final ConfigOption ODP_IP =
56 | ConfigOptions.key("odp-ip")
57 | .stringType()
58 | .noDefaultValue()
59 | .withDescription("ODP IP address.");
60 |
61 | public static final ConfigOption ODP_PORT =
62 | ConfigOptions.key("odp-port")
63 | .intType()
64 | .defaultValue(2885)
65 | .withDescription("ODP rpc port.");
66 |
67 | public OBKVHBaseConnectorOptions(Map config) {
68 | super(config);
69 | }
70 |
71 | public String getSysUsername() {
72 | return allConfig.get(SYS_USERNAME);
73 | }
74 |
75 | public String getSysPassword() {
76 | return allConfig.get(SYS_PASSWORD);
77 | }
78 |
79 | public Properties getHBaseProperties() {
80 | return OptionUtils.parseProperties(allConfig.get(HBASE_PROPERTIES));
81 | }
82 |
83 | public Boolean getOdpMode() {
84 | return allConfig.get(ODP_MODE);
85 | }
86 |
87 | public String getOdpIP() {
88 | return allConfig.get(ODP_IP);
89 | }
90 |
91 | public Integer getOdpPort() {
92 | return allConfig.get(ODP_PORT);
93 | }
94 | }
95 |
--------------------------------------------------------------------------------
/flink-connector-obkv-hbase/src/main/java/com/oceanbase/connector/flink/OBKVHBaseDynamicTableSinkFactory.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.oceanbase.connector.flink;
18 |
19 | import com.oceanbase.connector.flink.sink.OBKVHBaseDynamicTableSink;
20 | import com.oceanbase.connector.flink.utils.OptionUtils;
21 |
22 | import org.apache.flink.configuration.ConfigOption;
23 | import org.apache.flink.table.catalog.Column;
24 | import org.apache.flink.table.catalog.ResolvedSchema;
25 | import org.apache.flink.table.connector.sink.DynamicTableSink;
26 | import org.apache.flink.table.factories.DynamicTableSinkFactory;
27 | import org.apache.flink.table.factories.FactoryUtil;
28 |
29 | import java.util.HashSet;
30 | import java.util.Map;
31 | import java.util.Objects;
32 | import java.util.Set;
33 | import java.util.stream.Collectors;
34 |
35 | public class OBKVHBaseDynamicTableSinkFactory implements DynamicTableSinkFactory {
36 |
37 | public static final String IDENTIFIER = "obkv-hbase";
38 |
39 | @Override
40 | public DynamicTableSink createDynamicTableSink(Context context) {
41 | FactoryUtil.TableFactoryHelper helper = FactoryUtil.createTableFactoryHelper(this, context);
42 | helper.validate();
43 |
44 | ResolvedSchema resolvedSchema = context.getCatalogTable().getResolvedSchema();
45 | ResolvedSchema physicalSchema =
46 | new ResolvedSchema(
47 | resolvedSchema.getColumns().stream()
48 | .filter(Column::isPhysical)
49 | .collect(Collectors.toList()),
50 | resolvedSchema.getWatermarkSpecs(),
51 | resolvedSchema.getPrimaryKey().orElse(null));
52 | Map options = context.getCatalogTable().getOptions();
53 | OptionUtils.printOptions(IDENTIFIER, options);
54 | OBKVHBaseConnectorOptions connectorOptions = new OBKVHBaseConnectorOptions(options);
55 | validateConnectorSinkOptions(connectorOptions);
56 | return new OBKVHBaseDynamicTableSink(physicalSchema, connectorOptions);
57 | }
58 |
59 | @Override
60 | public String factoryIdentifier() {
61 | return IDENTIFIER;
62 | }
63 |
64 | @Override
65 | public Set> requiredOptions() {
66 | Set> options = new HashSet<>();
67 | options.add(OBKVHBaseConnectorOptions.USERNAME);
68 | options.add(OBKVHBaseConnectorOptions.PASSWORD);
69 | options.add(OBKVHBaseConnectorOptions.SCHEMA_NAME);
70 | options.add(OBKVHBaseConnectorOptions.TABLE_NAME);
71 | return options;
72 | }
73 |
74 | @Override
75 | public Set> optionalOptions() {
76 | Set> options = new HashSet<>();
77 | options.add(OBKVHBaseConnectorOptions.URL);
78 | options.add(OBKVHBaseConnectorOptions.ODP_MODE);
79 | options.add(OBKVHBaseConnectorOptions.ODP_IP);
80 | options.add(OBKVHBaseConnectorOptions.ODP_PORT);
81 | options.add(OBKVHBaseConnectorOptions.SYS_USERNAME);
82 | options.add(OBKVHBaseConnectorOptions.SYS_PASSWORD);
83 | options.add(OBKVHBaseConnectorOptions.SYNC_WRITE);
84 | options.add(OBKVHBaseConnectorOptions.BUFFER_FLUSH_INTERVAL);
85 | options.add(OBKVHBaseConnectorOptions.BUFFER_SIZE);
86 | options.add(OBKVHBaseConnectorOptions.MAX_RETRIES);
87 | options.add(OBKVHBaseConnectorOptions.HBASE_PROPERTIES);
88 | return options;
89 | }
90 |
91 | private void validateConnectorSinkOptions(OBKVHBaseConnectorOptions connectorOptions) {
92 | if (connectorOptions.getOdpMode()) {
93 | Objects.requireNonNull(
94 | connectorOptions.getOdpIP(), "'odp-ip' is required if 'odp-mode' is 'true'");
95 | } else {
96 | Objects.requireNonNull(
97 | connectorOptions.getUrl(), "'url' is required if 'odp-mode' is 'false'");
98 | Objects.requireNonNull(
99 | connectorOptions.getUrl(),
100 | "'sys.username' is required if 'odp-mode' is 'false'");
101 | Objects.requireNonNull(
102 | connectorOptions.getUrl(),
103 | "'sys.password' is required if 'odp-mode' is 'false'");
104 | }
105 | }
106 | }
107 |
--------------------------------------------------------------------------------
/flink-connector-obkv-hbase/src/main/java/com/oceanbase/connector/flink/connection/OBKVHBaseConnectionProvider.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.oceanbase.connector.flink.connection;
18 |
19 | import com.oceanbase.connector.flink.OBKVHBaseConnectorOptions;
20 | import com.oceanbase.connector.flink.table.TableId;
21 | import com.oceanbase.connector.flink.utils.TableCache;
22 |
23 | import com.alipay.oceanbase.hbase.OHTableClient;
24 | import com.alipay.oceanbase.hbase.constants.OHConstants;
25 | import org.apache.hadoop.conf.Configuration;
26 | import org.apache.hadoop.hbase.client.Table;
27 | import org.slf4j.Logger;
28 | import org.slf4j.LoggerFactory;
29 |
30 | import java.util.Properties;
31 |
32 | public class OBKVHBaseConnectionProvider implements ConnectionProvider {
33 |
34 | private static final Logger LOG = LoggerFactory.getLogger(OBKVHBaseConnectionProvider.class);
35 |
36 | private static final long serialVersionUID = 1L;
37 |
38 | private final OBKVHBaseConnectorOptions options;
39 |
40 | private final TableCache tableCache;
41 |
42 | public OBKVHBaseConnectionProvider(OBKVHBaseConnectorOptions options) {
43 | this.options = options;
44 | this.tableCache = new TableCache<>();
45 | }
46 |
47 | public Table getHTableClient(TableId tableId) {
48 | return tableCache.get(
49 | tableId.identifier(),
50 | () -> {
51 | try {
52 | OHTableClient tableClient =
53 | new OHTableClient(
54 | tableId.getTableName(), getConfig(tableId.getSchemaName()));
55 | tableClient.init();
56 | return tableClient;
57 | } catch (Exception e) {
58 | throw new RuntimeException("Failed to initialize OHTableClient", e);
59 | }
60 | });
61 | }
62 |
63 | private Configuration getConfig(String databaseName) {
64 |
65 | Configuration conf = new Configuration();
66 | if (options.getOdpMode()) {
67 | conf.setBoolean(OHConstants.HBASE_OCEANBASE_ODP_MODE, options.getOdpMode());
68 | conf.set(OHConstants.HBASE_OCEANBASE_ODP_ADDR, options.getOdpIP());
69 | conf.setInt(OHConstants.HBASE_OCEANBASE_ODP_PORT, options.getOdpPort());
70 | conf.set(OHConstants.HBASE_OCEANBASE_DATABASE, databaseName);
71 | } else {
72 | String paramUrl = String.format("%s&database=%s", options.getUrl(), databaseName);
73 | LOG.debug("Set paramURL for database {} to {}", databaseName, paramUrl);
74 | conf.set(OHConstants.HBASE_OCEANBASE_PARAM_URL, paramUrl);
75 | conf.set(OHConstants.HBASE_OCEANBASE_SYS_USER_NAME, options.getSysUsername());
76 | conf.set(OHConstants.HBASE_OCEANBASE_SYS_PASSWORD, options.getSysPassword());
77 | }
78 | conf.set(OHConstants.HBASE_OCEANBASE_FULL_USER_NAME, options.getUsername());
79 | conf.set(OHConstants.HBASE_OCEANBASE_PASSWORD, options.getPassword());
80 | Properties hbaseProperties = options.getHBaseProperties();
81 | if (hbaseProperties != null) {
82 | for (String name : hbaseProperties.stringPropertyNames()) {
83 | conf.set(name, hbaseProperties.getProperty(name));
84 | }
85 | }
86 | return conf;
87 | }
88 |
89 | @Override
90 | public void close() throws Exception {
91 | for (Table table : tableCache.getAll()) {
92 | table.close();
93 | }
94 | tableCache.clear();
95 | }
96 | }
97 |
--------------------------------------------------------------------------------
/flink-connector-obkv-hbase/src/main/java/com/oceanbase/connector/flink/sink/OBKVHBaseDynamicTableSink.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.oceanbase.connector.flink.sink;
18 |
19 | import com.oceanbase.connector.flink.OBKVHBaseConnectorOptions;
20 | import com.oceanbase.connector.flink.table.DataChangeRecord;
21 | import com.oceanbase.connector.flink.table.HTableInfo;
22 | import com.oceanbase.connector.flink.table.OBKVHBaseRowDataSerializationSchema;
23 | import com.oceanbase.connector.flink.table.TableId;
24 |
25 | import org.apache.flink.table.catalog.ResolvedSchema;
26 | import org.apache.flink.table.connector.sink.DynamicTableSink;
27 |
28 | public class OBKVHBaseDynamicTableSink extends AbstractDynamicTableSink {
29 |
30 | private final OBKVHBaseConnectorOptions connectorOptions;
31 |
32 | public OBKVHBaseDynamicTableSink(
33 | ResolvedSchema resolvedSchema, OBKVHBaseConnectorOptions connectorOptions) {
34 | super(resolvedSchema);
35 | this.connectorOptions = connectorOptions;
36 | }
37 |
38 | @Override
39 | public SinkRuntimeProvider getSinkRuntimeProvider(Context context) {
40 | return new SinkProvider(
41 | typeSerializer ->
42 | new OceanBaseSink<>(
43 | connectorOptions,
44 | typeSerializer,
45 | new OBKVHBaseRowDataSerializationSchema(
46 | new HTableInfo(
47 | new TableId(
48 | connectorOptions.getSchemaName(),
49 | connectorOptions.getTableName()),
50 | physicalSchema)),
51 | DataChangeRecord.KeyExtractor.simple(),
52 | new OBKVHBaseRecordFlusher(connectorOptions)));
53 | }
54 |
55 | @Override
56 | public DynamicTableSink copy() {
57 | return new OBKVHBaseDynamicTableSink(physicalSchema, connectorOptions);
58 | }
59 |
60 | @Override
61 | public String asSummaryString() {
62 | return "OBKV-HBASE";
63 | }
64 | }
65 |
--------------------------------------------------------------------------------
/flink-connector-obkv-hbase/src/main/resources/META-INF/services/org.apache.flink.table.factories.Factory:
--------------------------------------------------------------------------------
1 | # Copyright 2024 OceanBase.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | # http://www.apache.org/licenses/LICENSE-2.0
7 | #
8 | # Unless required by applicable law or agreed to in writing, software
9 | # distributed under the License is distributed on an "AS IS" BASIS,
10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 | # See the License for the specific language governing permissions and
12 | # limitations under the License.
13 |
14 | com.oceanbase.connector.flink.OBKVHBaseDynamicTableSinkFactory
15 |
--------------------------------------------------------------------------------
/flink-connector-obkv-hbase/src/test/resources/log4j2-test.properties:
--------------------------------------------------------------------------------
1 | # Copyright 2024 OceanBase.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | # http://www.apache.org/licenses/LICENSE-2.0
7 | #
8 | # Unless required by applicable law or agreed to in writing, software
9 | # distributed under the License is distributed on an "AS IS" BASIS,
10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 | # See the License for the specific language governing permissions and
12 | # limitations under the License.
13 |
14 | rootLogger.level = INFO
15 | rootLogger.appenderRef.test.ref = TestLogger
16 |
17 | appender.testlogger.name = TestLogger
18 | appender.testlogger.type = CONSOLE
19 | appender.testlogger.target = SYSTEM_ERR
20 | appender.testlogger.layout.type = PatternLayout
21 | appender.testlogger.layout.pattern = %-4r [%t] %-5p %c %x - %m%n
22 |
--------------------------------------------------------------------------------
/flink-connector-obkv-hbase/src/test/resources/sql/htable.sql:
--------------------------------------------------------------------------------
1 | -- Copyright 2024 OceanBase.
2 | --
3 | -- Licensed under the Apache License, Version 2.0 (the "License");
4 | -- you may not use this file except in compliance with the License.
5 | -- You may obtain a copy of the License at
6 | -- http://www.apache.org/licenses/LICENSE-2.0
7 | -- Unless required by applicable law or agreed to in writing,
8 | -- software distributed under the License is distributed on an
9 | -- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
10 | -- KIND, either express or implied. See the License for the
11 | -- specific language governing permissions and limitations
12 | -- under the License.
13 |
14 | CREATE TABLE `htable$family1`
15 | (
16 | `K` varbinary(1024) NOT NULL,
17 | `Q` varbinary(256) NOT NULL,
18 | `T` bigint(20) NOT NULL,
19 | `V` varbinary(1024) DEFAULT NULL,
20 | PRIMARY KEY (`K`, `Q`, `T`)
21 | );
22 |
23 | CREATE TABLE `htable$family2`
24 | (
25 | `K` varbinary(1024) NOT NULL,
26 | `Q` varbinary(256) NOT NULL,
27 | `T` bigint(20) NOT NULL,
28 | `V` varbinary(1024) DEFAULT NULL,
29 | PRIMARY KEY (`K`, `Q`, `T`)
30 | );
31 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-base/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
16 |
17 | 4.0.0
18 |
19 | com.oceanbase
20 | flink-connector-oceanbase-parent
21 | ${revision}
22 |
23 |
24 | flink-connector-oceanbase-base
25 |
26 |
27 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-base/src/main/java/com/oceanbase/connector/flink/connection/ConnectionProvider.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.oceanbase.connector.flink.connection;
18 |
19 | import java.io.Serializable;
20 |
21 | public interface ConnectionProvider extends AutoCloseable, Serializable {}
22 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-base/src/main/java/com/oceanbase/connector/flink/connection/OceanBaseUserInfo.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.oceanbase.connector.flink.connection;
18 |
19 | import org.apache.commons.lang3.StringUtils;
20 |
21 | import java.io.Serializable;
22 |
23 | public class OceanBaseUserInfo implements Serializable {
24 |
25 | private static final long serialVersionUID = 1L;
26 |
27 | private String cluster;
28 | private String tenant;
29 | private final String user;
30 |
31 | public OceanBaseUserInfo(String cluster, String tenant, String user) {
32 | this.cluster = cluster;
33 | this.tenant = tenant;
34 | this.user = user;
35 | }
36 |
37 | public String getCluster() {
38 | return cluster;
39 | }
40 |
41 | public String getTenant() {
42 | return tenant;
43 | }
44 |
45 | public String getUser() {
46 | return user;
47 | }
48 |
49 | public void setCluster(String cluster) {
50 | this.cluster = cluster;
51 | }
52 |
53 | public void setTenant(String tenant) {
54 | this.tenant = tenant;
55 | }
56 |
57 | public static OceanBaseUserInfo parse(String username) {
58 | final String sepUserAtTenant = "@";
59 | final String sepTenantAtCluster = "#";
60 | final String sep = ":";
61 | final int expectedSepCount = 2;
62 | if (username.contains(sepTenantAtCluster) && username.contains(sepUserAtTenant)) {
63 | // user@tenant#cluster
64 | String[] parts = username.split(sepTenantAtCluster);
65 | String[] userAndTenant = parts[0].split(sepUserAtTenant);
66 | return new OceanBaseUserInfo(parts[1], userAndTenant[1], userAndTenant[0]);
67 | } else if (StringUtils.countMatches(username, sep) == expectedSepCount) {
68 | // cluster:tenant:user
69 | String[] parts = username.split(sep);
70 | return new OceanBaseUserInfo(parts[0], parts[1], parts[2]);
71 | } else if (username.contains(sepUserAtTenant) && !username.contains(sepTenantAtCluster)) {
72 | // user@tenant
73 | String[] parts = username.split(sepUserAtTenant);
74 | return new OceanBaseUserInfo(null, parts[1], parts[0]);
75 | } else {
76 | // only user
77 | return new OceanBaseUserInfo(null, null, username);
78 | }
79 | }
80 | }
81 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-base/src/main/java/com/oceanbase/connector/flink/connection/OceanBaseVersion.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.oceanbase.connector.flink.connection;
18 |
19 | import org.apache.commons.lang3.StringUtils;
20 |
21 | import javax.annotation.Nonnull;
22 |
23 | import java.io.Serializable;
24 |
25 | public class OceanBaseVersion implements Serializable {
26 |
27 | private static final long serialVersionUID = 1L;
28 |
29 | private final String version;
30 |
31 | OceanBaseVersion(@Nonnull String version) {
32 | this.version = version;
33 | }
34 |
35 | public String getVersion() {
36 | return version;
37 | }
38 |
39 | public boolean isV4() {
40 | return version.startsWith("4.");
41 | }
42 |
43 | public static OceanBaseVersion fromVersionComment(String versionComment) {
44 | if (StringUtils.isBlank(versionComment)) {
45 | throw new RuntimeException("Version comment must not be empty");
46 | }
47 | String[] parts = versionComment.split(" ");
48 | if (parts.length <= 1) {
49 | throw new RuntimeException("Invalid version comment: " + versionComment);
50 | }
51 | return new OceanBaseVersion(parts[1]);
52 | }
53 | }
54 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-base/src/main/java/com/oceanbase/connector/flink/sink/AbstractDynamicTableSink.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.oceanbase.connector.flink.sink;
18 |
19 | import org.apache.flink.api.common.typeutils.TypeSerializer;
20 | import org.apache.flink.api.connector.sink2.Sink;
21 | import org.apache.flink.streaming.api.datastream.DataStream;
22 | import org.apache.flink.streaming.api.datastream.DataStreamSink;
23 | import org.apache.flink.table.catalog.ResolvedSchema;
24 | import org.apache.flink.table.connector.ChangelogMode;
25 | import org.apache.flink.table.connector.ProviderContext;
26 | import org.apache.flink.table.connector.sink.DataStreamSinkProvider;
27 | import org.apache.flink.table.connector.sink.DynamicTableSink;
28 | import org.apache.flink.table.data.RowData;
29 | import org.apache.flink.types.RowKind;
30 | import org.apache.flink.util.function.SerializableFunction;
31 |
32 | import java.io.Serializable;
33 |
34 | import static org.apache.flink.util.Preconditions.checkState;
35 |
36 | public abstract class AbstractDynamicTableSink implements DynamicTableSink {
37 |
38 | protected final ResolvedSchema physicalSchema;
39 |
40 | public AbstractDynamicTableSink(ResolvedSchema physicalSchema) {
41 | this.physicalSchema = physicalSchema;
42 | }
43 |
44 | @Override
45 | public ChangelogMode getChangelogMode(ChangelogMode requestedMode) {
46 | validatePrimaryKey(requestedMode);
47 | return ChangelogMode.newBuilder()
48 | .addContainedKind(RowKind.INSERT)
49 | .addContainedKind(RowKind.DELETE)
50 | .addContainedKind(RowKind.UPDATE_AFTER)
51 | .build();
52 | }
53 |
54 | protected void validatePrimaryKey(ChangelogMode requestedMode) {
55 | checkState(
56 | ChangelogMode.insertOnly().equals(requestedMode)
57 | || physicalSchema.getPrimaryKey().isPresent(),
58 | "please declare primary key for sink table when query contains update/delete record.");
59 | }
60 |
61 | static class SinkProvider implements DataStreamSinkProvider, Serializable {
62 |
63 | private static final long serialVersionUID = 1L;
64 |
65 | private final SerializableFunction, Sink> sinkSupplier;
66 |
67 | public SinkProvider(
68 | SerializableFunction, Sink> sinkSupplier) {
69 | this.sinkSupplier = sinkSupplier;
70 | }
71 |
72 | @Override
73 | public DataStreamSink> consumeDataStream(
74 | ProviderContext providerContext, DataStream dataStream) {
75 | final boolean objectReuse =
76 | dataStream.getExecutionEnvironment().getConfig().isObjectReuseEnabled();
77 | TypeSerializer typeSerializer =
78 | objectReuse
79 | ? dataStream.getType().createSerializer(dataStream.getExecutionConfig())
80 | : null;
81 | return dataStream.sinkTo(sinkSupplier.apply(typeSerializer));
82 | }
83 | }
84 | }
85 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-base/src/main/java/com/oceanbase/connector/flink/sink/OceanBaseSink.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.oceanbase.connector.flink.sink;
18 |
19 | import com.oceanbase.connector.flink.ConnectorOptions;
20 | import com.oceanbase.connector.flink.table.DataChangeRecord;
21 | import com.oceanbase.connector.flink.table.RecordSerializationSchema;
22 |
23 | import org.apache.flink.api.common.typeutils.TypeSerializer;
24 | import org.apache.flink.api.connector.sink2.Sink;
25 | import org.apache.flink.api.connector.sink2.SinkWriter;
26 |
27 | public class OceanBaseSink implements Sink {
28 |
29 | private static final long serialVersionUID = 1L;
30 |
31 | private final ConnectorOptions options;
32 | private final TypeSerializer typeSerializer;
33 | private final RecordSerializationSchema recordSerializer;
34 | private final DataChangeRecord.KeyExtractor keyExtractor;
35 | private final RecordFlusher recordFlusher;
36 |
37 | public OceanBaseSink(
38 | ConnectorOptions options,
39 | TypeSerializer typeSerializer,
40 | RecordSerializationSchema recordSerializer,
41 | DataChangeRecord.KeyExtractor keyExtractor,
42 | RecordFlusher recordFlusher) {
43 | this.options = options;
44 | this.typeSerializer = typeSerializer;
45 | this.recordSerializer = recordSerializer;
46 | this.keyExtractor = keyExtractor;
47 | this.recordFlusher = recordFlusher;
48 | }
49 |
50 | @Override
51 | public SinkWriter createWriter(InitContext context) {
52 | return new OceanBaseWriter<>(
53 | options, context, typeSerializer, recordSerializer, keyExtractor, recordFlusher);
54 | }
55 | }
56 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-base/src/main/java/com/oceanbase/connector/flink/sink/OceanBaseWriterEvent.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.oceanbase.connector.flink.sink;
18 |
19 | import java.io.Serializable;
20 |
21 | public enum OceanBaseWriterEvent {
22 | INITIALIZED,
23 | CLOSING;
24 |
25 | public interface Listener extends Serializable {
26 | void apply(OceanBaseWriterEvent event);
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-base/src/main/java/com/oceanbase/connector/flink/sink/RecordFlusher.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.oceanbase.connector.flink.sink;
18 |
19 | import com.oceanbase.connector.flink.table.DataChangeRecord;
20 | import com.oceanbase.connector.flink.table.SchemaChangeRecord;
21 |
22 | import javax.annotation.Nonnull;
23 |
24 | import java.io.Serializable;
25 | import java.util.List;
26 |
27 | public interface RecordFlusher extends AutoCloseable, Serializable {
28 |
29 | default void flush(@Nonnull SchemaChangeRecord record) throws Exception {}
30 |
31 | void flush(List records) throws Exception;
32 | }
33 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-base/src/main/java/com/oceanbase/connector/flink/table/AbstractRecordSerializationSchema.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.oceanbase.connector.flink.table;
18 |
19 | import org.apache.flink.table.types.logical.LogicalType;
20 |
21 | import java.util.Map;
22 | import java.util.concurrent.ConcurrentHashMap;
23 |
24 | public abstract class AbstractRecordSerializationSchema implements RecordSerializationSchema {
25 |
26 | private final Map converters =
27 | new ConcurrentHashMap<>();
28 |
29 | protected SerializationRuntimeConverter getOrCreateConverter(LogicalType type) {
30 | return converters.computeIfAbsent(type, this::createConverter);
31 | }
32 |
33 | protected abstract SerializationRuntimeConverter createNotNullConverter(LogicalType type);
34 |
35 | private SerializationRuntimeConverter createConverter(LogicalType type) {
36 | return wrapIntoNullableConverter(createNotNullConverter(type));
37 | }
38 |
39 | private static SerializationRuntimeConverter wrapIntoNullableConverter(
40 | SerializationRuntimeConverter converter) {
41 | return new SerializationRuntimeConverter() {
42 |
43 | private static final long serialVersionUID = 1L;
44 |
45 | @Override
46 | public Object convert(Object data) {
47 | if (data == null) {
48 | return null;
49 | }
50 | return converter.convert(data);
51 | }
52 | };
53 | }
54 | }
55 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-base/src/main/java/com/oceanbase/connector/flink/table/DataChangeRecord.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.oceanbase.connector.flink.table;
18 |
19 | import java.io.Serializable;
20 | import java.util.Objects;
21 | import java.util.Optional;
22 |
23 | public class DataChangeRecord implements Record {
24 |
25 | private static final long serialVersionUID = 1L;
26 |
27 | public enum Type {
28 | UPSERT,
29 | DELETE,
30 | }
31 |
32 | public interface KeyExtractor extends Serializable {
33 |
34 | Object extract(DataChangeRecord record);
35 |
36 | static KeyExtractor simple() {
37 | return record ->
38 | Optional.ofNullable(record.getTable().getKey())
39 | .map(
40 | keys -> {
41 | Object[] array =
42 | keys.stream().map(record::getFieldValue).toArray();
43 | if (array.length == 0) {
44 | return null;
45 | } else {
46 | return new DataChangeRecordData(array);
47 | }
48 | })
49 | .orElse(null);
50 | }
51 | }
52 |
53 | private final Table table;
54 | private final Type type;
55 | private final DataChangeRecordData data;
56 |
57 | public DataChangeRecord(Table table, Type type, Object[] values) {
58 | this.table = table;
59 | this.type = type;
60 | this.data = new DataChangeRecordData(values);
61 | }
62 |
63 | @Override
64 | public TableId getTableId() {
65 | return table.getTableId();
66 | }
67 |
68 | public Table getTable() {
69 | return table;
70 | }
71 |
72 | public Type getType() {
73 | return type;
74 | }
75 |
76 | public boolean isUpsert() {
77 | return Type.UPSERT == getType();
78 | }
79 |
80 | public Object getFieldValue(String fieldName) {
81 | return data.getValue(table.getFieldIndex(fieldName));
82 | }
83 |
84 | @Override
85 | public boolean equals(Object object) {
86 | if (this == object) return true;
87 | if (object == null || getClass() != object.getClass()) return false;
88 | DataChangeRecord record = (DataChangeRecord) object;
89 | return Objects.equals(table, record.table)
90 | && type == record.type
91 | && Objects.equals(data, record.data);
92 | }
93 |
94 | @Override
95 | public int hashCode() {
96 | return Objects.hash(table, type, data);
97 | }
98 |
99 | @Override
100 | public String toString() {
101 | return "DataChangeRecord{" + "table=" + table + ", type=" + type + ", data=" + data + '}';
102 | }
103 | }
104 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-base/src/main/java/com/oceanbase/connector/flink/table/DataChangeRecordData.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.oceanbase.connector.flink.table;
18 |
19 | import java.io.Serializable;
20 | import java.util.Arrays;
21 |
22 | public class DataChangeRecordData implements Serializable {
23 |
24 | private static final long serialVersionUID = 1L;
25 |
26 | private final Object[] values;
27 |
28 | public DataChangeRecordData(Object[] values) {
29 | this.values = values;
30 | }
31 |
32 | public Object getValue(int index) {
33 | return values[index];
34 | }
35 |
36 | public boolean equals(Object o) {
37 | if (this == o) {
38 | return true;
39 | } else if (!(o instanceof DataChangeRecordData)) {
40 | return false;
41 | } else {
42 | DataChangeRecordData that = (DataChangeRecordData) o;
43 | return Arrays.deepEquals(this.values, that.values);
44 | }
45 | }
46 |
47 | public int hashCode() {
48 | return Arrays.deepHashCode(this.values);
49 | }
50 |
51 | @Override
52 | public String toString() {
53 | return Arrays.toString(values);
54 | }
55 | }
56 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-base/src/main/java/com/oceanbase/connector/flink/table/Record.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.oceanbase.connector.flink.table;
18 |
19 | import java.io.Serializable;
20 |
21 | public interface Record extends Serializable {
22 | TableId getTableId();
23 | }
24 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-base/src/main/java/com/oceanbase/connector/flink/table/RecordSerializationSchema.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.oceanbase.connector.flink.table;
18 |
19 | import java.io.Serializable;
20 |
21 | public interface RecordSerializationSchema extends Serializable {
22 | Record serialize(T data);
23 | }
24 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-base/src/main/java/com/oceanbase/connector/flink/table/SchemaChangeRecord.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.oceanbase.connector.flink.table;
18 |
19 | public class SchemaChangeRecord implements Record {
20 |
21 | private static final long serialVersionUID = 1L;
22 |
23 | public enum Type {
24 | ALTER,
25 | CREATE,
26 | DROP,
27 | TRUNCATE
28 | }
29 |
30 | private final TableId tableId;
31 | private final String sql;
32 | private final Type type;
33 |
34 | public SchemaChangeRecord(TableId tableId, String sql, Type type) {
35 | this.tableId = tableId;
36 | this.sql = sql;
37 | this.type = type;
38 | }
39 |
40 | @Override
41 | public TableId getTableId() {
42 | return tableId;
43 | }
44 |
45 | public Type getType() {
46 | return type;
47 | }
48 |
49 | public boolean shouldRefreshSchema() {
50 | return getType() != Type.TRUNCATE;
51 | }
52 |
53 | public String getSql() {
54 | return sql;
55 | }
56 |
57 | @Override
58 | public String toString() {
59 | return "SchemaChangeRecord{"
60 | + "tableId='"
61 | + tableId
62 | + '\''
63 | + ", sql='"
64 | + sql
65 | + '\''
66 | + ", type="
67 | + type
68 | + '}';
69 | }
70 | }
71 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-base/src/main/java/com/oceanbase/connector/flink/table/SerializationRuntimeConverter.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.oceanbase.connector.flink.table;
18 |
19 | import java.io.Serializable;
20 |
21 | public interface SerializationRuntimeConverter extends Serializable {
22 | Object convert(Object data);
23 | }
24 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-base/src/main/java/com/oceanbase/connector/flink/table/Table.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.oceanbase.connector.flink.table;
18 |
19 | import java.io.Serializable;
20 | import java.util.List;
21 |
22 | public interface Table extends Serializable {
23 |
24 | TableId getTableId();
25 |
26 | List getKey();
27 |
28 | int getFieldIndex(String fieldName);
29 | }
30 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-base/src/main/java/com/oceanbase/connector/flink/table/TableId.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.oceanbase.connector.flink.table;
18 |
19 | import javax.annotation.Nonnull;
20 |
21 | import java.io.Serializable;
22 |
23 | public class TableId implements Serializable {
24 | private static final long serialVersionUID = 1L;
25 |
26 | @FunctionalInterface
27 | public interface Identifier extends Serializable {
28 | String identifier(String schemaName, String tableName);
29 | }
30 |
31 | private final Identifier identifier;
32 | private final String schemaName;
33 | private final String tableName;
34 |
35 | public TableId(@Nonnull String schemaName, @Nonnull String tableName) {
36 | this(
37 | (schema, table) -> String.format("\"%s\".\"%s\"", schema, table),
38 | schemaName,
39 | tableName);
40 | }
41 |
42 | public TableId(
43 | @Nonnull Identifier identifier, @Nonnull String schemaName, @Nonnull String tableName) {
44 | this.identifier = identifier;
45 | this.schemaName = schemaName;
46 | this.tableName = tableName;
47 | }
48 |
49 | public String identifier() {
50 | return identifier.identifier(schemaName, tableName);
51 | }
52 |
53 | public String getSchemaName() {
54 | return schemaName;
55 | }
56 |
57 | public String getTableName() {
58 | return tableName;
59 | }
60 | }
61 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-base/src/main/java/com/oceanbase/connector/flink/table/TableInfo.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.oceanbase.connector.flink.table;
18 |
19 | import org.apache.flink.table.catalog.ResolvedSchema;
20 | import org.apache.flink.table.catalog.UniqueConstraint;
21 | import org.apache.flink.table.types.DataType;
22 | import org.apache.flink.table.types.logical.LogicalType;
23 | import org.apache.flink.util.function.SerializableFunction;
24 |
25 | import javax.annotation.Nonnull;
26 | import javax.annotation.Nullable;
27 |
28 | import java.util.List;
29 | import java.util.Map;
30 | import java.util.Objects;
31 | import java.util.stream.Collectors;
32 | import java.util.stream.IntStream;
33 |
34 | import static org.apache.flink.util.Preconditions.checkNotNull;
35 |
36 | public class TableInfo implements Table {
37 |
38 | private static final long serialVersionUID = 1L;
39 |
40 | private final TableId tableId;
41 | private final List primaryKey;
42 | private final List fieldNames;
43 | private final Map fieldIndexMap;
44 | private final List dataTypes;
45 | private final SerializableFunction placeholderFunc;
46 |
47 | public TableInfo(TableId tableId, ResolvedSchema resolvedSchema) {
48 | this(
49 | tableId,
50 | resolvedSchema.getPrimaryKey().map(UniqueConstraint::getColumns).orElse(null),
51 | resolvedSchema.getColumnNames(),
52 | resolvedSchema.getColumnDataTypes().stream()
53 | .map(DataType::getLogicalType)
54 | .collect(Collectors.toList()),
55 | null);
56 | }
57 |
58 | public TableInfo(
59 | @Nonnull TableId tableId,
60 | @Nullable List primaryKey,
61 | @Nonnull List fieldNames,
62 | @Nonnull List dataTypes,
63 | @Nullable SerializableFunction placeholderFunc) {
64 | this.tableId = tableId;
65 | this.primaryKey = primaryKey;
66 | this.fieldNames = fieldNames;
67 | this.dataTypes = dataTypes;
68 | this.fieldIndexMap =
69 | IntStream.range(0, fieldNames.size())
70 | .boxed()
71 | .collect(Collectors.toMap(fieldNames::get, i -> i));
72 | this.placeholderFunc = placeholderFunc;
73 | }
74 |
75 | @Override
76 | public TableId getTableId() {
77 | return tableId;
78 | }
79 |
80 | @Override
81 | public List getKey() {
82 | return primaryKey;
83 | }
84 |
85 | @Override
86 | public int getFieldIndex(String fieldName) {
87 | return checkNotNull(
88 | fieldIndexMap.get(fieldName), String.format("Field '%s' not found", fieldName));
89 | }
90 |
91 | public List getFieldNames() {
92 | return fieldNames;
93 | }
94 |
95 | public List getDataTypes() {
96 | return dataTypes;
97 | }
98 |
99 | public SerializableFunction getPlaceholderFunc() {
100 | return placeholderFunc;
101 | }
102 |
103 | @Override
104 | public boolean equals(Object o) {
105 | if (this == o) {
106 | return true;
107 | }
108 | if (o == null || getClass() != o.getClass()) {
109 | return false;
110 | }
111 | TableInfo that = (TableInfo) o;
112 | return Objects.equals(this.tableId, that.tableId)
113 | && Objects.equals(this.primaryKey, that.primaryKey)
114 | && Objects.equals(this.fieldNames, that.fieldNames)
115 | && Objects.equals(this.fieldIndexMap, that.fieldIndexMap)
116 | && Objects.equals(this.dataTypes, that.dataTypes);
117 | }
118 | }
119 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-base/src/main/java/com/oceanbase/connector/flink/utils/OptionUtils.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.oceanbase.connector.flink.utils;
18 |
19 | import org.apache.flink.configuration.ConfigurationUtils;
20 |
21 | import org.apache.commons.lang3.StringUtils;
22 | import org.slf4j.Logger;
23 | import org.slf4j.LoggerFactory;
24 |
25 | import java.util.Map;
26 | import java.util.Properties;
27 |
28 | public class OptionUtils {
29 |
30 | private static final Logger LOG = LoggerFactory.getLogger(OptionUtils.class);
31 |
32 | /** Utility class can not be instantiated. */
33 | private OptionUtils() {}
34 |
35 | public static void printOptions(String identifier, Map config) {
36 | Map hideMap = ConfigurationUtils.hideSensitiveValues(config);
37 | LOG.info("Print {} connector configuration:", identifier);
38 | for (String key : hideMap.keySet()) {
39 | LOG.info("{} = {}", key, hideMap.get(key));
40 | }
41 | }
42 |
43 | public static Properties parseProperties(String propsStr) {
44 | if (StringUtils.isBlank(propsStr)) {
45 | return null;
46 | }
47 | Properties props = new Properties();
48 | for (String propStr : propsStr.split(";")) {
49 | if (StringUtils.isBlank(propStr)) {
50 | continue;
51 | }
52 | String[] pair = propStr.trim().split("=");
53 | if (pair.length != 2) {
54 | throw new IllegalArgumentException("properties must have one key value pair");
55 | }
56 | props.put(pair[0].trim(), pair[1].trim());
57 | }
58 | return props;
59 | }
60 | }
61 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-base/src/main/java/com/oceanbase/connector/flink/utils/TableCache.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.oceanbase.connector.flink.utils;
18 |
19 | import java.io.Serializable;
20 | import java.util.Collection;
21 | import java.util.Map;
22 | import java.util.concurrent.ConcurrentHashMap;
23 | import java.util.function.Supplier;
24 |
25 | public class TableCache implements Serializable {
26 |
27 | private static final long serialVersionUID = 1L;
28 |
29 | private transient Map cache;
30 |
31 | private Map getCache() {
32 | if (cache == null) {
33 | cache = new ConcurrentHashMap<>();
34 | }
35 | return cache;
36 | }
37 |
38 | public Collection getAll() {
39 | return getCache().values();
40 | }
41 |
42 | public T get(String tableId, Supplier supplier) {
43 | if (getCache().containsKey(tableId)) {
44 | return getCache().get(tableId);
45 | }
46 | T t = supplier.get();
47 | getCache().put(tableId, t);
48 | return t;
49 | }
50 |
51 | public void remove(String tableId) {
52 | getCache().remove(tableId);
53 | }
54 |
55 | public void clear() {
56 | if (cache != null) {
57 | cache.clear();
58 | }
59 | }
60 | }
61 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-base/src/test/java/com/oceanbase/connector/flink/OceanBaseMetadata.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.oceanbase.connector.flink;
18 |
19 | public interface OceanBaseMetadata {
20 |
21 | String getHost();
22 |
23 | int getPort();
24 |
25 | int getRpcPort();
26 |
27 | String getJdbcUrl();
28 |
29 | String getClusterName();
30 |
31 | String getSchemaName();
32 |
33 | String getSysUsername();
34 |
35 | String getSysPassword();
36 |
37 | String getUsername();
38 |
39 | String getPassword();
40 | }
41 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-base/src/test/java/com/oceanbase/connector/flink/OceanBaseOracleTestBase.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.oceanbase.connector.flink;
18 |
19 | import java.util.Map;
20 |
21 | public abstract class OceanBaseOracleTestBase extends OceanBaseTestBase {
22 |
23 | @Override
24 | public Map getOptions() {
25 | Map options = super.getOptions();
26 | options.put("driver-class-name", "com.oceanbase.jdbc.Driver");
27 | return options;
28 | }
29 |
30 | @Override
31 | public String getHost() {
32 | return System.getenv("HOST");
33 | }
34 |
35 | @Override
36 | public int getPort() {
37 | return Integer.parseInt(System.getenv("PORT"));
38 | }
39 |
40 | @Override
41 | public int getRpcPort() {
42 | return Integer.parseInt(System.getenv("RPC_PORT"));
43 | }
44 |
45 | @Override
46 | public String getJdbcUrl() {
47 | return String.format("jdbc:oceanbase://%s:%d/%s", getHost(), getPort(), getSchemaName());
48 | }
49 |
50 | @Override
51 | public String getClusterName() {
52 | return System.getenv("CLUSTER_NAME");
53 | }
54 |
55 | @Override
56 | public String getSchemaName() {
57 | return System.getenv("SCHEMA_NAME");
58 | }
59 |
60 | @Override
61 | public String getSysUsername() {
62 | return System.getenv("SYS_USERNAME");
63 | }
64 |
65 | @Override
66 | public String getSysPassword() {
67 | return System.getenv("SYS_PASSWORD");
68 | }
69 |
70 | @Override
71 | public String getUsername() {
72 | return System.getenv("USERNAME");
73 | }
74 |
75 | @Override
76 | public String getPassword() {
77 | return System.getenv("PASSWORD");
78 | }
79 | }
80 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-base/src/test/java/com/oceanbase/connector/flink/OceanBaseProxyContainer.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.oceanbase.connector.flink;
18 |
19 | import org.testcontainers.containers.GenericContainer;
20 | import org.testcontainers.utility.DockerImageName;
21 |
22 | import java.util.Objects;
23 |
24 | public class OceanBaseProxyContainer extends GenericContainer {
25 |
26 | private static final String IMAGE = "oceanbase/obproxy-ce";
27 |
28 | private static final int SQL_PORT = 2883;
29 | private static final int RPC_PORT = 2885;
30 | private static final String APP_NAME = "flink_oceanbase_test";
31 |
32 | private String configUrl;
33 | private String password;
34 |
35 | public OceanBaseProxyContainer(String version) {
36 | super(DockerImageName.parse(IMAGE + ":" + version));
37 | addExposedPorts(SQL_PORT, RPC_PORT);
38 | }
39 |
40 | @Override
41 | protected void configure() {
42 | addEnv("APP_NAME", APP_NAME);
43 | addEnv("CONFIG_URL", Objects.requireNonNull(configUrl));
44 | addEnv("PROXYRO_PASSWORD", Objects.requireNonNull(password));
45 | }
46 |
47 | public OceanBaseProxyContainer withConfigUrl(String configUrl) {
48 | this.configUrl = configUrl;
49 | return this;
50 | }
51 |
52 | public OceanBaseProxyContainer withPassword(String password) {
53 | this.password = password;
54 | return this;
55 | }
56 |
57 | public int getSqlPort() {
58 | return getMappedPort(SQL_PORT);
59 | }
60 |
61 | public int getRpcPort() {
62 | return getMappedPort(RPC_PORT);
63 | }
64 | }
65 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-cli/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
16 |
17 | 4.0.0
18 |
19 |
20 | com.oceanbase
21 | flink-connector-oceanbase-parent
22 | ${revision}
23 |
24 |
25 | flink-connector-oceanbase-cli
26 | jar
27 |
28 |
29 |
30 | com.oceanbase
31 | flink-sql-connector-oceanbase
32 | ${project.version}
33 |
34 |
35 |
36 | org.apache.flink
37 | flink-sql-connector-mysql-cdc
38 | ${flink.cdc.version}
39 | provided
40 |
41 |
42 | org.apache.flink
43 | flink-shaded-guava
44 |
45 |
46 |
47 |
48 |
49 | com.oceanbase
50 | flink-connector-oceanbase-base
51 | ${project.version}
52 | test-jar
53 | test
54 |
55 |
56 |
57 | org.testcontainers
58 | mysql
59 | test
60 |
61 |
62 |
63 |
64 |
65 |
66 | org.apache.maven.plugins
67 | maven-shade-plugin
68 |
69 |
70 | shade-flink
71 |
72 | shade
73 |
74 | package
75 |
76 | false
77 | false
78 | false
79 |
80 |
81 | *:*
82 |
83 |
84 |
85 |
86 | com.oceanbase.connector.flink.Cli
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-cli/src/main/java/com/oceanbase/connector/flink/config/CliConfig.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.oceanbase.connector.flink.config;
18 |
19 | public class CliConfig {
20 |
21 | /** Option key for source type. */
22 | public static final String SOURCE_TYPE = "source-type";
23 |
24 | /** Option key for cdc source. */
25 | public static final String SOURCE_CONF = "source-conf";
26 |
27 | /** Option key for oceanbase sink. */
28 | public static final String SINK_CONF = "sink-conf";
29 |
30 | // ------------------------------------------------------------
31 | // Source types
32 | // ------------------------------------------------------------
33 | public static final String MYSQL_CDC = "mysql-cdc";
34 |
35 | // ------------------------------------------------------------
36 | // Sync configurations
37 | // ------------------------------------------------------------
38 | public static final String JOB_NAME = "job-name";
39 | public static final String DATABASE = "database";
40 | public static final String TABLE_PREFIX = "table-prefix";
41 | public static final String TABLE_SUFFIX = "table-suffix";
42 | public static final String INCLUDING_TABLES = "including-tables";
43 | public static final String EXCLUDING_TABLES = "excluding-tables";
44 | public static final String MULTI_TO_ONE_ORIGIN = "multi-to-one-origin";
45 | public static final String MULTI_TO_ONE_TARGET = "multi-to-one-target";
46 | public static final String CREATE_TABLE_ONLY = "create-table-only";
47 | public static final String IGNORE_DEFAULT_VALUE = "ignore-default-value";
48 | public static final String IGNORE_INCOMPATIBLE = "ignore-incompatible";
49 |
50 | // ------------------------------------------------------------
51 | // Temporal configurations
52 | // ------------------------------------------------------------
53 | public static final String CONVERTERS = "converters";
54 | public static final String DATE = "date";
55 | public static final String DATE_TYPE = "date.type";
56 | public static final String DATE_FORMAT_DATE = "date.format.date";
57 | public static final String DATE_FORMAT_DATETIME = "date.format.datetime";
58 | public static final String DATE_FORMAT_TIMESTAMP = "date.format.timestamp";
59 | public static final String DATE_FORMAT_TIMESTAMP_ZONE = "date.format.timestamp.zone";
60 | public static final String YEAR_MONTH_DAY_FORMAT = "yyyy-MM-dd";
61 | public static final String DATE_TIME_FORMAT = "yyyy-MM-dd HH:mm:ss";
62 | public static final String DATETIME_MICRO_FORMAT = "yyyy-MM-dd HH:mm:ss.SSSSSS";
63 | public static final String TIME_ZONE_UTC_8 = "UTC+8";
64 | public static final String FORMAT_DATE = "format.date";
65 | public static final String FORMAT_TIME = "format.time";
66 | public static final String FORMAT_DATETIME = "format.datetime";
67 | public static final String FORMAT_TIMESTAMP = "format.timestamp";
68 | public static final String FORMAT_TIMESTAMP_ZONE = "format.timestamp.zone";
69 | public static final String UPPERCASE_DATE = "DATE";
70 | public static final String TIME = "TIME";
71 | public static final String DATETIME = "DATETIME";
72 | public static final String TIMESTAMP = "TIMESTAMP";
73 | }
74 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-cli/src/main/java/com/oceanbase/connector/flink/process/ParsingProcessFunction.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.oceanbase.connector.flink.process;
17 |
18 | import org.apache.flink.configuration.Configuration;
19 | import org.apache.flink.streaming.api.functions.ProcessFunction;
20 | import org.apache.flink.util.Collector;
21 | import org.apache.flink.util.OutputTag;
22 |
23 | import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode;
24 | import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper;
25 |
26 | import java.util.HashMap;
27 | import java.util.Map;
28 |
29 | public class ParsingProcessFunction extends ProcessFunction {
30 |
31 | private final TableNameConverter converter;
32 | private final ObjectMapper objectMapper;
33 |
34 | private transient Map> recordOutputTags;
35 |
36 | public ParsingProcessFunction(TableNameConverter converter) {
37 | this.converter = converter;
38 | this.objectMapper = new ObjectMapper();
39 | }
40 |
41 | @Override
42 | public void open(Configuration parameters) throws Exception {
43 | recordOutputTags = new HashMap<>();
44 | }
45 |
46 | @Override
47 | public void processElement(
48 | String record, ProcessFunction.Context context, Collector collector)
49 | throws Exception {
50 | String tableName = getRecordTableName(record);
51 | String oceanbaseName = converter.convert(tableName);
52 | context.output(getRecordOutputTag(oceanbaseName), record);
53 | }
54 |
55 | protected String getRecordTableName(String record) throws Exception {
56 | JsonNode recordRoot = objectMapper.readValue(record, JsonNode.class);
57 | return extractJsonNode(recordRoot.get("source"), "table");
58 | }
59 |
60 | protected String extractJsonNode(JsonNode record, String key) {
61 | return record != null && record.get(key) != null ? record.get(key).asText() : null;
62 | }
63 |
64 | private OutputTag getRecordOutputTag(String tableName) {
65 | return recordOutputTags.computeIfAbsent(
66 | tableName, ParsingProcessFunction::createRecordOutputTag);
67 | }
68 |
69 | public static OutputTag createRecordOutputTag(String tableName) {
70 | return new OutputTag("record-" + tableName) {};
71 | }
72 | }
73 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-cli/src/main/java/com/oceanbase/connector/flink/process/TableNameConverter.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.oceanbase.connector.flink.process;
17 |
18 | import org.apache.flink.util.StringUtils;
19 |
20 | import java.io.Serializable;
21 | import java.util.HashMap;
22 | import java.util.Map;
23 | import java.util.regex.Pattern;
24 |
25 | public class TableNameConverter implements Serializable {
26 | private static final long serialVersionUID = 1L;
27 |
28 | private final String prefix;
29 | private final String suffix;
30 | private final Map multiToOneRulesPattern;
31 |
32 | public TableNameConverter(
33 | String prefix, String suffix, String multiToOneOrigin, String multiToOneTarget) {
34 | this.prefix = prefix == null ? "" : prefix;
35 | this.suffix = suffix == null ? "" : suffix;
36 | this.multiToOneRulesPattern = multiToOneRulesParser(multiToOneOrigin, multiToOneTarget);
37 | }
38 |
39 | public static HashMap multiToOneRulesParser(
40 | String multiToOneOrigin, String multiToOneTarget) {
41 | if (StringUtils.isNullOrWhitespaceOnly(multiToOneOrigin)
42 | || StringUtils.isNullOrWhitespaceOnly(multiToOneTarget)) {
43 | return null;
44 | }
45 | HashMap multiToOneRulesPattern = new HashMap<>();
46 | String[] origins = multiToOneOrigin.split("\\|");
47 | String[] targets = multiToOneTarget.split("\\|");
48 | if (origins.length != targets.length) {
49 | throw new IllegalArgumentException(
50 | "The lengths of multiToOneOrigin and multiToOneTarget are not equal");
51 | }
52 | try {
53 | for (int i = 0; i < origins.length; i++) {
54 | multiToOneRulesPattern.put(Pattern.compile(origins[i]), targets[i]);
55 | }
56 | } catch (Exception e) {
57 | throw new RuntimeException("Failed to parse multiToOnePattern", e);
58 | }
59 | return multiToOneRulesPattern;
60 | }
61 |
62 | public String convert(String tableName) {
63 | if (multiToOneRulesPattern == null) {
64 | return prefix + tableName + suffix;
65 | }
66 |
67 | String target = null;
68 |
69 | for (Map.Entry entry : multiToOneRulesPattern.entrySet()) {
70 | if (entry.getKey().matcher(tableName).matches()) {
71 | target = entry.getValue();
72 | }
73 | }
74 |
75 | if (target == null) {
76 | return prefix + tableName + suffix;
77 | }
78 | return target;
79 | }
80 | }
81 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-cli/src/main/java/com/oceanbase/connector/flink/source/FieldSchema.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.oceanbase.connector.flink.source;
18 |
19 | /** The schema information of table fields. */
20 | public class FieldSchema {
21 | private final String name;
22 | private final FieldType type;
23 | private final String defaultValue;
24 | private final String comment;
25 | private final boolean nullable;
26 |
27 | public FieldSchema(
28 | String name, FieldType type, String defaultValue, String comment, boolean nullable) {
29 | this.name = name;
30 | this.type = type;
31 | this.defaultValue = defaultValue;
32 | this.comment = comment;
33 | this.nullable = nullable;
34 | }
35 |
36 | public String getName() {
37 | return name;
38 | }
39 |
40 | public FieldType getType() {
41 | return type;
42 | }
43 |
44 | public String getDefaultValue() {
45 | return defaultValue;
46 | }
47 |
48 | public String getComment() {
49 | return comment;
50 | }
51 |
52 | public Boolean getNullable() {
53 | return nullable;
54 | }
55 |
56 | @Override
57 | public String toString() {
58 | return "FieldSchema{"
59 | + "name='"
60 | + name
61 | + '\''
62 | + ", type="
63 | + type
64 | + ", defaultValue='"
65 | + defaultValue
66 | + '\''
67 | + ", comment='"
68 | + comment
69 | + '\''
70 | + ", nullable="
71 | + nullable
72 | + '}';
73 | }
74 | }
75 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-cli/src/main/java/com/oceanbase/connector/flink/source/FieldType.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.oceanbase.connector.flink.source;
18 |
19 | import org.apache.flink.table.types.logical.LogicalType;
20 |
21 | /** The field type. */
22 | public class FieldType {
23 |
24 | private final LogicalType flinkType;
25 | private final String obType;
26 |
27 | public FieldType(LogicalType flinkType, String obType) {
28 | this.flinkType = flinkType;
29 | this.obType = obType;
30 | }
31 |
32 | public LogicalType getFlinkType() {
33 | return flinkType;
34 | }
35 |
36 | public String getObType() {
37 | return obType;
38 | }
39 |
40 | @Override
41 | public String toString() {
42 | return "FieldType{" + "flinkType=" + flinkType + ", obType='" + obType + '\'' + '}';
43 | }
44 | }
45 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-cli/src/main/java/com/oceanbase/connector/flink/source/JdbcMetadataAccessor.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.oceanbase.connector.flink.source;
18 |
19 | import org.apache.flink.util.function.SupplierWithException;
20 |
21 | import java.sql.DatabaseMetaData;
22 | import java.sql.ResultSet;
23 | import java.sql.SQLException;
24 | import java.util.ArrayList;
25 | import java.util.List;
26 |
27 | /** The generic Jdbc MetadataAccessor. */
28 | public abstract class JdbcMetadataAccessor implements MetadataAccessor {
29 |
30 | public abstract SupplierWithException getMetadataSupplier();
31 |
32 | public abstract FieldType getFieldType(
33 | int jdbcType, String jdbcTypeName, int precision, int scale);
34 |
35 | public List getColumnInfo(
36 | String databaseName, String schemaName, String tableName) {
37 |
38 | List fieldSchemas = new ArrayList<>();
39 |
40 | try (ResultSet rs =
41 | getMetadataSupplier().get().getColumns(databaseName, schemaName, tableName, null)) {
42 | while (rs.next()) {
43 | String fieldName = rs.getString("COLUMN_NAME");
44 | String comment = rs.getString("REMARKS");
45 | int dataType = rs.getInt("DATA_TYPE");
46 | String typeName = rs.getString("TYPE_NAME");
47 | int precision = rs.getInt("COLUMN_SIZE");
48 | int scale = rs.getInt("DECIMAL_DIGITS");
49 | String columnDefault = rs.getString("COLUMN_DEF");
50 | boolean isNullable = rs.getBoolean("IS_NULLABLE");
51 |
52 | fieldSchemas.add(
53 | new FieldSchema(
54 | fieldName,
55 | getFieldType(dataType, typeName, precision, scale),
56 | columnDefault,
57 | comment,
58 | isNullable));
59 | }
60 | } catch (SQLException e) {
61 | throw new RuntimeException(e);
62 | }
63 | return fieldSchemas;
64 | }
65 |
66 | public List getPrimaryKeys(String databaseName, String schemaName, String tableName) {
67 | List primaryKeys = new ArrayList<>();
68 | try (ResultSet rs =
69 | getMetadataSupplier().get().getPrimaryKeys(databaseName, schemaName, tableName)) {
70 | while (rs.next()) {
71 | String fieldName = rs.getString("COLUMN_NAME");
72 | primaryKeys.add(fieldName);
73 | }
74 | } catch (SQLException e) {
75 | throw new RuntimeException(e);
76 | }
77 | return primaryKeys;
78 | }
79 | }
80 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-cli/src/main/java/com/oceanbase/connector/flink/source/MetadataAccessor.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.oceanbase.connector.flink.source;
18 |
19 | import java.util.List;
20 |
21 | /** {@code MetadataAccessor} is used to access the metadata of data sources. */
22 | public interface MetadataAccessor {
23 |
24 | List getColumnInfo(String databaseName, String schemaName, String tableName);
25 |
26 | List getPrimaryKeys(String databaseName, String schemaName, String tableName);
27 | }
28 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-cli/src/main/java/com/oceanbase/connector/flink/source/TableSchema.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.oceanbase.connector.flink.source;
18 |
19 | import com.oceanbase.connector.flink.table.TableId;
20 | import com.oceanbase.connector.flink.table.TableInfo;
21 |
22 | import org.apache.flink.table.types.logical.LogicalType;
23 | import org.apache.flink.util.StringUtils;
24 |
25 | import java.util.ArrayList;
26 | import java.util.List;
27 | import java.util.StringJoiner;
28 |
29 | /** Schema information of generic data sources. */
30 | public class TableSchema {
31 |
32 | public final String databaseName;
33 | public final String schemaName;
34 | public final String tableName;
35 | public final String tableComment;
36 | public final List fields = new ArrayList<>();
37 | public final List primaryKeys = new ArrayList<>();
38 |
39 | public TableSchema(
40 | String databaseName, String schemaName, String tableName, String tableComment) {
41 | this.databaseName = databaseName;
42 | this.schemaName = schemaName;
43 | this.tableName = tableName;
44 | this.tableComment = tableComment;
45 | }
46 |
47 | public String getTableIdentifier() {
48 | StringJoiner identifier = new StringJoiner(".");
49 | if (!StringUtils.isNullOrWhitespaceOnly(databaseName)) {
50 | identifier.add(databaseName);
51 | }
52 | if (!StringUtils.isNullOrWhitespaceOnly(schemaName)) {
53 | identifier.add(schemaName);
54 | }
55 |
56 | if (!StringUtils.isNullOrWhitespaceOnly(tableName)) {
57 | identifier.add(tableName);
58 | }
59 | return identifier.toString();
60 | }
61 |
62 | public String getDatabaseName() {
63 | return databaseName;
64 | }
65 |
66 | public String getTableName() {
67 | return tableName;
68 | }
69 |
70 | public List getFields() {
71 | return fields;
72 | }
73 |
74 | public List getPrimaryKeys() {
75 | return primaryKeys;
76 | }
77 |
78 | public String getTableComment() {
79 | return tableComment;
80 | }
81 |
82 | public void addField(FieldSchema fieldSchema) {
83 | this.fields.add(fieldSchema);
84 | }
85 |
86 | public void addPrimaryKey(String primaryKey) {
87 | this.primaryKeys.add(primaryKey);
88 | }
89 |
90 | public TableInfo toTableInfo() {
91 | List fieldNames = new ArrayList<>();
92 | List fieldTypes = new ArrayList<>();
93 | for (FieldSchema fieldSchema : getFields()) {
94 | fieldNames.add(fieldSchema.getName());
95 | fieldTypes.add(fieldSchema.getType().getFlinkType());
96 | }
97 | return new TableInfo(
98 | new TableId(getDatabaseName(), getTableName()),
99 | getPrimaryKeys(),
100 | fieldNames,
101 | fieldTypes,
102 | null);
103 | }
104 | }
105 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-cli/src/main/java/com/oceanbase/connector/flink/source/cdc/mysql/MysqlMetadataAccessor.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.oceanbase.connector.flink.source.cdc.mysql;
17 |
18 | import com.oceanbase.connector.flink.source.FieldType;
19 | import com.oceanbase.connector.flink.source.JdbcMetadataAccessor;
20 |
21 | import org.apache.flink.util.function.SupplierWithException;
22 |
23 | import java.sql.DatabaseMetaData;
24 | import java.sql.SQLException;
25 |
26 | import static com.oceanbase.connector.flink.source.cdc.mysql.MysqlTypeConverter.toOceanBaseType;
27 | import static com.oceanbase.connector.flink.table.OceanBaseTypeMapper.convertToLogicalType;
28 |
29 | public class MysqlMetadataAccessor extends JdbcMetadataAccessor {
30 |
31 | private final SupplierWithException metadataSupplier;
32 |
33 | public MysqlMetadataAccessor(
34 | SupplierWithException metadataSupplier) {
35 | this.metadataSupplier = metadataSupplier;
36 | }
37 |
38 | @Override
39 | public SupplierWithException getMetadataSupplier() {
40 | return metadataSupplier;
41 | }
42 |
43 | @Override
44 | public FieldType getFieldType(int jdbcType, String jdbcTypeName, int precision, int scale) {
45 | return new FieldType(
46 | convertToLogicalType(jdbcType, precision, scale),
47 | toOceanBaseType(jdbcTypeName, precision, scale));
48 | }
49 | }
50 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-cli/src/main/java/com/oceanbase/connector/flink/table/OceanBaseType.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.oceanbase.connector.flink.table;
18 |
19 | public class OceanBaseType {
20 | public static final String BIT = "BIT";
21 | public static final String BOOLEAN = "BOOLEAN";
22 | public static final String TINYINT = "TINYINT";
23 | public static final String MEDIUMINT = "MEDIUMINT";
24 | public static final String SMALLINT = "SMALLINT";
25 | public static final String INT = "INT";
26 | public static final String BIGINT = "BIGINT";
27 | public static final String FLOAT = "FLOAT";
28 | public static final String DOUBLE = "DOUBLE";
29 | public static final String TIMESTAMP = "TIMESTAMP";
30 | public static final String DECIMAL = "DECIMAL";
31 | public static final String DATE = "DATE";
32 | public static final String TIME = "TIME";
33 | public static final String DATETIME = "DATETIME";
34 | public static final String YEAR = "YEAR";
35 | public static final String CHAR = "CHAR";
36 | public static final String VARCHAR = "VARCHAR";
37 | public static final String TEXT = "TEXT";
38 | public static final String BINARY = "BINARY";
39 | public static final String VARBINARY = "VARBINARY";
40 | public static final String BLOB = "BLOB";
41 | public static final String ENUM = "ENUM";
42 | public static final String SET = "SET";
43 | public static final String JSON = "JSON";
44 | }
45 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-cli/src/main/java/com/oceanbase/connector/flink/table/OceanBaseTypeMapper.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.oceanbase.connector.flink.table;
18 |
19 | import org.apache.flink.table.api.DataTypes;
20 | import org.apache.flink.table.types.logical.LogicalType;
21 |
22 | import java.sql.Types;
23 |
24 | public class OceanBaseTypeMapper {
25 |
26 | public static LogicalType convertToLogicalType(int jdbcType, int precision, int scale) {
27 | switch (jdbcType) {
28 | case Types.INTEGER:
29 | return DataTypes.INT().getLogicalType();
30 | case Types.BIGINT:
31 | return DataTypes.BIGINT().getLogicalType();
32 | case Types.DOUBLE:
33 | return DataTypes.DOUBLE().getLogicalType();
34 | case Types.FLOAT:
35 | case Types.REAL:
36 | return DataTypes.FLOAT().getLogicalType();
37 | case Types.LONGVARCHAR:
38 | case Types.VARCHAR:
39 | if (precision > 0) {
40 | return DataTypes.STRING().getLogicalType();
41 | } else {
42 | return DataTypes.STRING().getLogicalType();
43 | }
44 | case Types.CHAR:
45 | if (precision > 0) {
46 | return DataTypes.CHAR(precision).getLogicalType();
47 | } else {
48 | return DataTypes.STRING().getLogicalType();
49 | }
50 | case Types.TIMESTAMP:
51 | if (precision > 0 && precision <= 3) {
52 | return DataTypes.TIMESTAMP(precision).getLogicalType();
53 | } else {
54 | return DataTypes.TIMESTAMP(3).getLogicalType();
55 | }
56 | case Types.DATE:
57 | return DataTypes.DATE().getLogicalType();
58 | case Types.TIME:
59 | if (precision > 0 && precision <= 3) {
60 | return DataTypes.TIME(precision).getLogicalType();
61 | } else {
62 | return DataTypes.TIME(0).getLogicalType();
63 | }
64 | case Types.DECIMAL:
65 | case Types.NUMERIC:
66 | if (precision > 0 && precision <= 38 && scale >= 0) {
67 | return DataTypes.DECIMAL(precision, scale).getLogicalType();
68 | } else {
69 | return DataTypes.DECIMAL(10, 0).getLogicalType();
70 | }
71 | case Types.BOOLEAN:
72 | case Types.BIT:
73 | return DataTypes.BOOLEAN().getLogicalType();
74 | case Types.TINYINT:
75 | return DataTypes.TINYINT().getLogicalType();
76 | case Types.SMALLINT:
77 | return DataTypes.SMALLINT().getLogicalType();
78 | case Types.BLOB:
79 | return DataTypes.BYTES().getLogicalType();
80 | case Types.CLOB:
81 | return DataTypes.STRING().getLogicalType();
82 | case Types.BINARY:
83 | if (precision > 0) {
84 | return DataTypes.BINARY(precision).getLogicalType();
85 | } else {
86 | return DataTypes.BYTES().getLogicalType();
87 | }
88 | case Types.LONGVARBINARY:
89 | case Types.VARBINARY:
90 | if (precision > 0) {
91 | return DataTypes.VARBINARY(precision).getLogicalType();
92 | } else {
93 | return DataTypes.BYTES().getLogicalType();
94 | }
95 | default:
96 | throw new IllegalArgumentException("Unsupported JDBC type: " + jdbcType);
97 | }
98 | }
99 | }
100 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-cli/src/main/java/com/oceanbase/connector/flink/utils/OceanBaseCatalogUtils.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.oceanbase.connector.flink.utils;
17 |
18 | import com.oceanbase.connector.flink.connection.OceanBaseConnectionProvider;
19 | import com.oceanbase.connector.flink.dialect.OceanBaseOracleDialect;
20 | import com.oceanbase.connector.flink.table.OceanBaseTableSchema;
21 |
22 | import org.apache.commons.compress.utils.Lists;
23 |
24 | import java.util.HashSet;
25 | import java.util.List;
26 | import java.util.Set;
27 |
28 | public class OceanBaseCatalogUtils extends OceanBaseJdbcUtils {
29 |
30 | private static final Set builtinDatabases =
31 | new HashSet() {
32 | {
33 | add("__public");
34 | add("information_schema");
35 | add("mysql");
36 | add("oceanbase");
37 | add("LBACSYS");
38 | add("ORAAUDITOR");
39 | }
40 | };
41 |
42 | @SuppressWarnings("unchecked")
43 | public static List listDatabases(OceanBaseConnectionProvider connectionProvider) {
44 | List databases = Lists.newArrayList();
45 | return (List)
46 | query(
47 | connectionProvider::getConnection,
48 | connectionProvider.getDialect().getListSchemaStatement(),
49 | rs -> {
50 | while (rs.next()) {
51 | String database = rs.getString(1);
52 | if (!builtinDatabases.contains(database)) {
53 | databases.add(database);
54 | }
55 | }
56 | return databases;
57 | });
58 | }
59 |
60 | public static boolean databaseExists(
61 | OceanBaseConnectionProvider connectionProvider, String database) {
62 | return listDatabases(connectionProvider).contains(database);
63 | }
64 |
65 | public static void createDatabase(
66 | OceanBaseConnectionProvider connectionProvider, String database) {
67 | if (connectionProvider.getDialect() instanceof OceanBaseOracleDialect) {
68 | throw new UnsupportedOperationException();
69 | }
70 | execute(
71 | connectionProvider::getConnection,
72 | String.format("CREATE DATABASE IF NOT EXISTS %s", database));
73 | }
74 |
75 | @SuppressWarnings("unchecked")
76 | public static List listTables(
77 | OceanBaseConnectionProvider connectionProvider, String databaseName) {
78 | if (!databaseExists(connectionProvider, databaseName)) {
79 | throw new RuntimeException("database" + databaseName + " is not exists");
80 | }
81 | List tables = Lists.newArrayList();
82 | return (List)
83 | query(
84 | connectionProvider::getConnection,
85 | connectionProvider.getDialect().getListTableStatement(databaseName),
86 | rs -> {
87 | while (rs.next()) {
88 | tables.add(rs.getString(1));
89 | }
90 | return tables;
91 | });
92 | }
93 |
94 | public static boolean tableExists(
95 | OceanBaseConnectionProvider connectionProvider, String database, String table) {
96 | return databaseExists(connectionProvider, database)
97 | && listTables(connectionProvider, database).contains(table);
98 | }
99 |
100 | public static void createTable(
101 | OceanBaseConnectionProvider connectionProvider, OceanBaseTableSchema schema) {
102 | execute(connectionProvider::getConnection, schema.generateCreateTableDDL());
103 | }
104 | }
105 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-cli/src/test/java/com/oceanbase/connector/flink/MysqlCdcSyncITCase.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.oceanbase.connector.flink;
18 |
19 | import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
20 |
21 | import org.junit.jupiter.api.AfterAll;
22 | import org.junit.jupiter.api.BeforeAll;
23 | import org.junit.jupiter.api.Test;
24 | import org.slf4j.Logger;
25 | import org.slf4j.LoggerFactory;
26 | import org.testcontainers.containers.GenericContainer;
27 | import org.testcontainers.containers.MySQLContainer;
28 | import org.testcontainers.containers.output.Slf4jLogConsumer;
29 |
30 | import java.util.stream.Stream;
31 |
32 | public class MysqlCdcSyncITCase extends OceanBaseMySQLTestBase {
33 | private static final Logger LOG = LoggerFactory.getLogger(MysqlCdcSyncITCase.class);
34 |
35 | private static final MySQLContainer> MYSQL_CONTAINER =
36 | new MySQLContainer<>("mysql:8.0.20")
37 | .withConfigurationOverride("docker/mysql")
38 | .withInitScript("sql/mysql-cdc.sql")
39 | .withNetwork(NETWORK)
40 | .withExposedPorts(3306)
41 | .withDatabaseName("test")
42 | .withUsername("root")
43 | .withPassword("mysqlpw")
44 | .withLogConsumer(new Slf4jLogConsumer(LOG));
45 |
46 | @BeforeAll
47 | public static void setup() {
48 | CONTAINER.withLogConsumer(new Slf4jLogConsumer(LOG)).start();
49 | MYSQL_CONTAINER.start();
50 | }
51 |
52 | @AfterAll
53 | public static void tearDown() {
54 | Stream.of(CONTAINER, MYSQL_CONTAINER).forEach(GenericContainer::stop);
55 | }
56 |
57 | @Test
58 | public void testMysqlCdcSync() throws Exception {
59 | StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
60 | Cli.setStreamExecutionEnvironmentForTesting(env);
61 |
62 | Cli.main(
63 | new String[] {
64 | "--source-type",
65 | "mysql-cdc",
66 | "--source-conf",
67 | "hostname=" + getContainerIP(MYSQL_CONTAINER),
68 | "--source-conf",
69 | "port=" + MySQLContainer.MYSQL_PORT,
70 | "--source-conf",
71 | "username=" + MYSQL_CONTAINER.getUsername(),
72 | "--source-conf",
73 | "password=" + MYSQL_CONTAINER.getPassword(),
74 | "--source-conf",
75 | "database-name=" + MYSQL_CONTAINER.getDatabaseName(),
76 | "--source-conf",
77 | "table-name=.*",
78 | "--sink-conf",
79 | "url=" + CONTAINER.getJdbcUrl(),
80 | "--sink-conf",
81 | "username=" + CONTAINER.getUsername(),
82 | "--sink-conf",
83 | "password=" + CONTAINER.getPassword(),
84 | "--job-name",
85 | "test-mysql-cdc-sync",
86 | "--database",
87 | CONTAINER.getDatabaseName(),
88 | "--including-tables",
89 | ".*"
90 | });
91 |
92 | waitingAndAssertTableCount("products", 9);
93 | waitingAndAssertTableCount("customers", 4);
94 |
95 | Cli.getJobClientForTesting().cancel();
96 | }
97 | }
98 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-cli/src/test/resources/docker/mysql/my.cnf:
--------------------------------------------------------------------------------
1 | #
2 | # Licensed to the Apache Software Foundation (ASF) under one or more
3 | # contributor license agreements. See the NOTICE file distributed with
4 | # this work for additional information regarding copyright ownership.
5 | # The ASF licenses this file to You under the Apache License, Version 2.0
6 | # (the "License"); you may not use this file except in compliance with
7 | # the License. You may obtain a copy of the License at
8 | #
9 | # http://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # Unless required by applicable law or agreed to in writing, software
12 | # distributed under the License is distributed on an "AS IS" BASIS,
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | # See the License for the specific language governing permissions and
15 | # limitations under the License.
16 | #
17 |
18 | # For advice on how to change settings please see
19 | # http://dev.mysql.com/doc/refman/5.7/en/server-configuration-defaults.html
20 |
21 | [mysqld]
22 | #
23 | # Remove leading # and set to the amount of RAM for the most important data
24 | # cache in MySQL. Start at 70% of total RAM for dedicated server, else 10%.
25 | # innodb_buffer_pool_size = 128M
26 | #
27 | # Remove leading # to turn on a very important data integrity option: logging
28 | # changes to the binary log between backups.
29 | # log_bin
30 | #
31 | # Remove leading # to set options mainly useful for reporting servers.
32 | # The server defaults are faster for transactions and fast SELECTs.
33 | # Adjust sizes as needed, experiment to find the optimal values.
34 | # join_buffer_size = 128M
35 | # sort_buffer_size = 2M
36 | # read_rnd_buffer_size = 2M
37 | skip-host-cache
38 | skip-name-resolve
39 | #datadir=/var/lib/mysql
40 | #socket=/var/lib/mysql/mysql.sock
41 | secure-file-priv=/var/lib/mysql
42 | user=mysql
43 |
44 | # Disabling symbolic-links is recommended to prevent assorted security risks
45 | symbolic-links=0
46 |
47 | #log-error=/var/log/mysqld.log
48 | #pid-file=/var/run/mysqld/mysqld.pid
49 |
50 | # ----------------------------------------------
51 | # Enable the binlog for replication & CDC
52 | # ----------------------------------------------
53 |
54 | # Enable binary replication log and set the prefix, expiration, and log format.
55 | # The prefix is arbitrary, expiration can be short for integration tests but would
56 | # be longer on a production system. Row-level info is required for ingest to work.
57 | # Server ID is required, but this will vary on production systems
58 | server-id = 223344
59 | log_bin = mysql-bin
60 | expire_logs_days = 1
61 | binlog_format = row
62 |
63 | # enable gtid mode
64 | gtid_mode = on
65 | enforce_gtid_consistency = on
--------------------------------------------------------------------------------
/flink-connector-oceanbase-cli/src/test/resources/log4j2-test.properties:
--------------------------------------------------------------------------------
1 | # Copyright 2024 OceanBase.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | # http://www.apache.org/licenses/LICENSE-2.0
7 | #
8 | # Unless required by applicable law or agreed to in writing, software
9 | # distributed under the License is distributed on an "AS IS" BASIS,
10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 | # See the License for the specific language governing permissions and
12 | # limitations under the License.
13 |
14 | rootLogger.level = INFO
15 | rootLogger.appenderRef.test.ref = TestLogger
16 |
17 | appender.testlogger.name = TestLogger
18 | appender.testlogger.type = CONSOLE
19 | appender.testlogger.target = SYSTEM_ERR
20 | appender.testlogger.layout.type = PatternLayout
21 | appender.testlogger.layout.pattern = %-4r [%t] %-5p %c %x - %m%n
22 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-cli/src/test/resources/sql/mysql-cdc.sql:
--------------------------------------------------------------------------------
1 | -- Copyright 2024 OceanBase.
2 | --
3 | -- Licensed under the Apache License, Version 2.0 (the "License");
4 | -- you may not use this file except in compliance with the License.
5 | -- You may obtain a copy of the License at
6 | -- http://www.apache.org/licenses/LICENSE-2.0
7 | -- Unless required by applicable law or agreed to in writing,
8 | -- software distributed under the License is distributed on an
9 | -- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
10 | -- KIND, either express or implied. See the License for the
11 | -- specific language governing permissions and limitations
12 | -- under the License.
13 |
14 | -- Create and populate our products using a single insert with many rows
15 | CREATE TABLE products
16 | (
17 | id INTEGER NOT NULL AUTO_INCREMENT PRIMARY KEY,
18 | name VARCHAR(255) NOT NULL DEFAULT 'flink',
19 | description VARCHAR(512),
20 | weight FLOAT
21 | );
22 | ALTER TABLE products AUTO_INCREMENT = 101;
23 |
24 | INSERT INTO products
25 | VALUES (default, "scooter", "Small 2-wheel scooter", 3.14),
26 | (default, "car battery", "12V car battery", 8.1),
27 | (default, "12-pack drill bits", "12-pack of drill bits with sizes ranging from #40 to #3", 0.8),
28 | (default, "hammer", "12oz carpenter's hammer", 0.75),
29 | (default, "hammer", "14oz carpenter's hammer", 0.875),
30 | (default, "hammer", "16oz carpenter's hammer", 1.0),
31 | (default, "rocks", "box of assorted rocks", 5.3),
32 | (default, "jacket", "water resistent black wind breaker", 0.1),
33 | (default, "spare tire", "24 inch spare tire", 22.2);
34 |
35 | -- Create some customers ...
36 | CREATE TABLE customers
37 | (
38 | id INTEGER NOT NULL AUTO_INCREMENT PRIMARY KEY,
39 | first_name VARCHAR(255) NOT NULL,
40 | last_name VARCHAR(255) NOT NULL,
41 | email VARCHAR(255) NOT NULL UNIQUE KEY
42 | ) AUTO_INCREMENT = 1001;
43 |
44 |
45 | INSERT INTO customers
46 | VALUES (default, "Sally", "Thomas", "sally.thomas@acme.com"),
47 | (default, "George", "Bailey", "gbailey@foobar.com"),
48 | (default, "Edward", "Walker", "ed@walker.com"),
49 | (default, "Anne", "Kretchmar", "annek@noanswer.org");
50 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-directload/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
16 |
17 | 4.0.0
18 |
19 | com.oceanbase
20 | flink-connector-oceanbase-parent
21 | ${revision}
22 |
23 |
24 | flink-connector-oceanbase-directload
25 | jar
26 |
27 |
28 |
29 | com.oceanbase
30 | obkv-table-client
31 |
32 |
33 |
34 | com.oceanbase
35 | flink-connector-oceanbase-base
36 | ${project.version}
37 |
38 |
39 |
40 | com.oceanbase
41 | flink-connector-oceanbase-base
42 | ${project.version}
43 | test-jar
44 | test
45 |
46 |
47 |
48 |
49 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-directload/src/main/java/com/oceanbase/connector/flink/OBDirectLoadDynamicTableSinkFactory.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.oceanbase.connector.flink;
18 |
19 | import com.oceanbase.connector.flink.sink.OBDirectLoadDynamicTableSink;
20 | import com.oceanbase.connector.flink.utils.OptionUtils;
21 |
22 | import org.apache.flink.api.common.RuntimeExecutionMode;
23 | import org.apache.flink.configuration.ConfigOption;
24 | import org.apache.flink.configuration.ExecutionOptions;
25 | import org.apache.flink.configuration.TaskManagerOptions;
26 | import org.apache.flink.table.catalog.Column;
27 | import org.apache.flink.table.catalog.ResolvedSchema;
28 | import org.apache.flink.table.connector.sink.DynamicTableSink;
29 | import org.apache.flink.table.factories.DynamicTableSinkFactory;
30 | import org.apache.flink.table.factories.FactoryUtil;
31 |
32 | import java.util.HashSet;
33 | import java.util.Map;
34 | import java.util.Set;
35 | import java.util.stream.Collectors;
36 |
37 | /** The factory of direct-load dynamic table sink. see {@link DynamicTableSinkFactory}. */
38 | public class OBDirectLoadDynamicTableSinkFactory implements DynamicTableSinkFactory {
39 |
40 | public static final String IDENTIFIER = "oceanbase-directload";
41 |
42 | @Override
43 | public DynamicTableSink createDynamicTableSink(Context context) {
44 | FactoryUtil.TableFactoryHelper helper = FactoryUtil.createTableFactoryHelper(this, context);
45 | helper.validate();
46 |
47 | ResolvedSchema resolvedSchema = context.getCatalogTable().getResolvedSchema();
48 | ResolvedSchema physicalSchema =
49 | new ResolvedSchema(
50 | resolvedSchema.getColumns().stream()
51 | .filter(Column::isPhysical)
52 | .collect(Collectors.toList()),
53 | resolvedSchema.getWatermarkSpecs(),
54 | resolvedSchema.getPrimaryKey().orElse(null));
55 | Map options = context.getCatalogTable().getOptions();
56 | OptionUtils.printOptions(IDENTIFIER, options);
57 | RuntimeExecutionMode runtimeExecutionMode =
58 | context.getConfiguration().get(ExecutionOptions.RUNTIME_MODE);
59 | int numberOfTaskSlots = context.getConfiguration().get(TaskManagerOptions.NUM_TASK_SLOTS);
60 | return new OBDirectLoadDynamicTableSink(
61 | physicalSchema,
62 | new OBDirectLoadConnectorOptions(options),
63 | runtimeExecutionMode,
64 | numberOfTaskSlots);
65 | }
66 |
67 | @Override
68 | public String factoryIdentifier() {
69 | return IDENTIFIER;
70 | }
71 |
72 | @Override
73 | public Set> requiredOptions() {
74 | Set> options = new HashSet<>();
75 | options.add(OBDirectLoadConnectorOptions.HOST);
76 | options.add(OBDirectLoadConnectorOptions.PORT);
77 | options.add(OBDirectLoadConnectorOptions.USERNAME);
78 | options.add(OBDirectLoadConnectorOptions.TENANT_NAME);
79 | options.add(OBDirectLoadConnectorOptions.PASSWORD);
80 | options.add(OBDirectLoadConnectorOptions.SCHEMA_NAME);
81 | options.add(OBDirectLoadConnectorOptions.TABLE_NAME);
82 | return options;
83 | }
84 |
85 | @Override
86 | public Set> optionalOptions() {
87 | Set> options = new HashSet<>();
88 | options.add(OBDirectLoadConnectorOptions.EXECUTION_ID);
89 | options.add(OBDirectLoadConnectorOptions.PARALLEL);
90 | options.add(OBDirectLoadConnectorOptions.MAX_ERROR_ROWS);
91 | options.add(OBDirectLoadConnectorOptions.DUP_ACTION);
92 | options.add(OBDirectLoadConnectorOptions.TIMEOUT);
93 | options.add(OBDirectLoadConnectorOptions.HEARTBEAT_TIMEOUT);
94 | options.add(OBDirectLoadConnectorOptions.LOAD_METHOD);
95 | options.add(OBDirectLoadConnectorOptions.ENABLE_MULTI_NODE_WRITE);
96 | options.add(OBDirectLoadConnectorOptions.BUFFER_SIZE);
97 | return options;
98 | }
99 | }
100 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-directload/src/main/java/com/oceanbase/connector/flink/directload/DirectLoadUtils.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.oceanbase.connector.flink.directload;
18 |
19 | import com.oceanbase.connector.flink.OBDirectLoadConnectorOptions;
20 |
21 | /** The utils of {@link DirectLoader} */
22 | public class DirectLoadUtils {
23 |
24 | public static DirectLoader buildDirectLoaderFromConnOption(
25 | OBDirectLoadConnectorOptions connectorOptions) {
26 | try {
27 | return new DirectLoaderBuilder()
28 | .host(connectorOptions.getDirectLoadHost())
29 | .port(connectorOptions.getDirectLoadPort())
30 | .user(connectorOptions.getUsername())
31 | .password(connectorOptions.getPassword())
32 | .tenant(connectorOptions.getTenantName())
33 | .schema(connectorOptions.getSchemaName())
34 | .table(connectorOptions.getTableName())
35 | .enableMultiNodeWrite(connectorOptions.getEnableMultiNodeWrite())
36 | .duplicateKeyAction(connectorOptions.getDirectLoadDupAction())
37 | .maxErrorCount(connectorOptions.getDirectLoadMaxErrorRows())
38 | .timeout(connectorOptions.getDirectLoadTimeout())
39 | .heartBeatTimeout(connectorOptions.getDirectLoadHeartbeatTimeout())
40 | .heartBeatInterval(connectorOptions.getDirectLoadHeartbeatInterval())
41 | .directLoadMethod(connectorOptions.getDirectLoadLoadMethod())
42 | .parallel(connectorOptions.getDirectLoadParallel())
43 | .executionId(connectorOptions.getExecutionId())
44 | .build();
45 | } catch (Exception e) {
46 | throw new RuntimeException("Fail to build DirectLoader.", e);
47 | }
48 | }
49 | }
50 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-directload/src/main/java/com/oceanbase/connector/flink/sink/DirectLoadStreamSinkProvider.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 | package com.oceanbase.connector.flink.sink;
20 |
21 | import org.apache.flink.streaming.api.datastream.DataStream;
22 | import org.apache.flink.streaming.api.datastream.DataStreamSink;
23 | import org.apache.flink.table.connector.ProviderContext;
24 | import org.apache.flink.table.connector.sink.DataStreamSinkProvider;
25 | import org.apache.flink.table.data.RowData;
26 |
27 | import java.util.function.Function;
28 |
29 | /** The direct-load sink provider. see {@link DataStreamSinkProvider}. */
30 | public class DirectLoadStreamSinkProvider implements DataStreamSinkProvider {
31 |
32 | private final Function, DataStreamSink>> producer;
33 |
34 | public DirectLoadStreamSinkProvider(Function, DataStreamSink>> producer) {
35 | this.producer = producer;
36 | }
37 |
38 | @Override
39 | public DataStreamSink> consumeDataStream(
40 | ProviderContext providerContext, DataStream dataStream) {
41 | return producer.apply(dataStream);
42 | }
43 | }
44 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-directload/src/main/java/com/oceanbase/connector/flink/sink/OBDirectLoadDynamicTableSink.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.oceanbase.connector.flink.sink;
18 |
19 | import com.oceanbase.connector.flink.OBDirectLoadConnectorOptions;
20 | import com.oceanbase.connector.flink.OBDirectLoadDynamicTableSinkFactory;
21 | import com.oceanbase.connector.flink.sink.batch.DirectLoadSink;
22 | import com.oceanbase.connector.flink.table.OceanBaseRowDataSerializationSchema;
23 | import com.oceanbase.connector.flink.table.TableId;
24 | import com.oceanbase.connector.flink.table.TableInfo;
25 |
26 | import org.apache.flink.api.common.RuntimeExecutionMode;
27 | import org.apache.flink.table.catalog.ResolvedSchema;
28 | import org.apache.flink.table.connector.ChangelogMode;
29 | import org.apache.flink.table.connector.sink.DynamicTableSink;
30 | import org.apache.flink.types.RowKind;
31 |
32 | import org.apache.commons.lang3.NotImplementedException;
33 | import org.apache.commons.lang3.StringUtils;
34 |
35 | /** The direct-load dynamic table sink. see {@link DynamicTableSink}. */
36 | public class OBDirectLoadDynamicTableSink implements DynamicTableSink {
37 |
38 | private final ResolvedSchema physicalSchema;
39 | private final OBDirectLoadConnectorOptions connectorOptions;
40 | private final RuntimeExecutionMode runtimeExecutionMode;
41 | private final int numberOfTaskSlots;
42 |
43 | public OBDirectLoadDynamicTableSink(
44 | ResolvedSchema physicalSchema,
45 | OBDirectLoadConnectorOptions connectorOptions,
46 | RuntimeExecutionMode runtimeExecutionMode,
47 | int numberOfTaskSlots) {
48 | this.physicalSchema = physicalSchema;
49 | this.connectorOptions = connectorOptions;
50 | this.runtimeExecutionMode = runtimeExecutionMode;
51 | this.numberOfTaskSlots = numberOfTaskSlots;
52 | }
53 |
54 | @Override
55 | public ChangelogMode getChangelogMode(ChangelogMode requestedMode) {
56 | return ChangelogMode.newBuilder()
57 | .addContainedKind(RowKind.INSERT)
58 | .addContainedKind(RowKind.DELETE)
59 | .addContainedKind(RowKind.UPDATE_AFTER)
60 | .build();
61 | }
62 |
63 | @Override
64 | public SinkRuntimeProvider getSinkRuntimeProvider(Context context) {
65 | TableId tableId =
66 | new TableId(connectorOptions.getSchemaName(), connectorOptions.getTableName());
67 | DirectLoadSink directLoadSink =
68 | new DirectLoadSink(
69 | connectorOptions,
70 | new OceanBaseRowDataSerializationSchema(
71 | new TableInfo(tableId, physicalSchema)),
72 | numberOfTaskSlots);
73 |
74 | if (context.isBounded()
75 | && runtimeExecutionMode == RuntimeExecutionMode.BATCH
76 | && connectorOptions.getEnableMultiNodeWrite()) {
77 | if (StringUtils.isBlank(connectorOptions.getExecutionId())) {
78 | throw new RuntimeException(
79 | "Execution id can't be null when multi-node-write enable.");
80 | }
81 | return new DirectLoadStreamSinkProvider(
82 | (dataStream) -> dataStream.sinkTo(directLoadSink));
83 | } else if (context.isBounded()
84 | && runtimeExecutionMode == RuntimeExecutionMode.BATCH
85 | && !connectorOptions.getEnableMultiNodeWrite()) {
86 | return new DirectLoadStreamSinkProvider(
87 | (dataStream) -> dataStream.sinkTo(directLoadSink).setParallelism(1));
88 | } else {
89 | throw new NotImplementedException(
90 | "The direct-load currently only supports running in flink batch execution mode.");
91 | }
92 | }
93 |
94 | @Override
95 | public DynamicTableSink copy() {
96 | return new OBDirectLoadDynamicTableSink(
97 | physicalSchema, connectorOptions, runtimeExecutionMode, numberOfTaskSlots);
98 | }
99 |
100 | @Override
101 | public String asSummaryString() {
102 | return OBDirectLoadDynamicTableSinkFactory.IDENTIFIER;
103 | }
104 | }
105 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-directload/src/main/java/com/oceanbase/connector/flink/sink/batch/DirectLoadSink.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.oceanbase.connector.flink.sink.batch;
18 |
19 | import com.oceanbase.connector.flink.OBDirectLoadConnectorOptions;
20 | import com.oceanbase.connector.flink.table.RecordSerializationSchema;
21 |
22 | import org.apache.flink.api.connector.sink2.Sink;
23 | import org.apache.flink.api.connector.sink2.SinkWriter;
24 | import org.apache.flink.table.data.RowData;
25 |
26 | import java.io.IOException;
27 |
28 | /** The direct-load sink. see {@link Sink}. */
29 | public class DirectLoadSink implements Sink {
30 | private final OBDirectLoadConnectorOptions connectorOptions;
31 | private final RecordSerializationSchema recordSerializer;
32 | private final int numberOfTaskSlots;
33 |
34 | public DirectLoadSink(
35 | OBDirectLoadConnectorOptions connectorOptions,
36 | RecordSerializationSchema recordSerializer,
37 | int numberOfTaskSlots) {
38 | this.connectorOptions = connectorOptions;
39 | this.recordSerializer = recordSerializer;
40 | this.numberOfTaskSlots = numberOfTaskSlots;
41 | }
42 |
43 | @Override
44 | public SinkWriter createWriter(InitContext context) throws IOException {
45 | return new DirectLoadWriter(connectorOptions, recordSerializer, numberOfTaskSlots);
46 | }
47 | }
48 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-directload/src/main/java/com/oceanbase/connector/flink/table/OceanBaseRowDataSerializationSchema.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.oceanbase.connector.flink.table;
18 |
19 | import org.apache.flink.table.data.ArrayData;
20 | import org.apache.flink.table.data.DecimalData;
21 | import org.apache.flink.table.data.RowData;
22 | import org.apache.flink.table.data.TimestampData;
23 | import org.apache.flink.table.types.logical.LogicalType;
24 | import org.apache.flink.types.RowKind;
25 |
26 | import java.sql.Date;
27 | import java.sql.Time;
28 | import java.time.LocalDate;
29 | import java.time.LocalTime;
30 | import java.util.stream.Collectors;
31 | import java.util.stream.IntStream;
32 |
33 | public class OceanBaseRowDataSerializationSchema
34 | extends AbstractRecordSerializationSchema {
35 |
36 | private static final long serialVersionUID = 1L;
37 |
38 | private final TableInfo tableInfo;
39 | private final RowData.FieldGetter[] fieldGetters;
40 | private final SerializationRuntimeConverter[] fieldConverters;
41 |
42 | public OceanBaseRowDataSerializationSchema(TableInfo tableInfo) {
43 | this.tableInfo = tableInfo;
44 | this.fieldGetters =
45 | IntStream.range(0, tableInfo.getDataTypes().size())
46 | .boxed()
47 | .map(i -> RowData.createFieldGetter(tableInfo.getDataTypes().get(i), i))
48 | .toArray(RowData.FieldGetter[]::new);
49 | this.fieldConverters =
50 | tableInfo.getDataTypes().stream()
51 | .map(this::getOrCreateConverter)
52 | .toArray(SerializationRuntimeConverter[]::new);
53 | }
54 |
55 | @Override
56 | public Record serialize(RowData rowData) {
57 | Object[] values = new Object[fieldGetters.length];
58 | for (int i = 0; i < fieldGetters.length; i++) {
59 | values[i] = fieldConverters[i].convert(fieldGetters[i].getFieldOrNull(rowData));
60 | }
61 | return new DataChangeRecord(
62 | tableInfo,
63 | (rowData.getRowKind() == RowKind.INSERT
64 | || rowData.getRowKind() == RowKind.UPDATE_AFTER)
65 | ? DataChangeRecord.Type.UPSERT
66 | : DataChangeRecord.Type.DELETE,
67 | values);
68 | }
69 |
70 | @Override
71 | protected SerializationRuntimeConverter createNotNullConverter(LogicalType type) {
72 | switch (type.getTypeRoot()) {
73 | case BOOLEAN:
74 | case TINYINT:
75 | case SMALLINT:
76 | case INTEGER:
77 | case INTERVAL_YEAR_MONTH:
78 | case BIGINT:
79 | case INTERVAL_DAY_TIME:
80 | case FLOAT:
81 | case DOUBLE:
82 | case BINARY:
83 | case VARBINARY:
84 | return data -> data;
85 | case CHAR:
86 | case VARCHAR:
87 | return Object::toString;
88 | case DATE:
89 | return data -> Date.valueOf(LocalDate.ofEpochDay((int) data));
90 | case TIME_WITHOUT_TIME_ZONE:
91 | return data -> Time.valueOf(LocalTime.ofNanoOfDay((int) data * 1_000_000L));
92 | case TIMESTAMP_WITHOUT_TIME_ZONE:
93 | return data -> ((TimestampData) data).toTimestamp();
94 | case DECIMAL:
95 | return data -> ((DecimalData) data).toBigDecimal();
96 | case ARRAY:
97 | return data -> {
98 | ArrayData arrayData = (ArrayData) data;
99 | return IntStream.range(0, arrayData.size())
100 | .mapToObj(i -> arrayData.getString(i).toString())
101 | .collect(Collectors.joining(","));
102 | };
103 | default:
104 | throw new UnsupportedOperationException("Unsupported type:" + type);
105 | }
106 | }
107 | }
108 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-directload/src/main/resources/META-INF/services/org.apache.flink.table.factories.Factory:
--------------------------------------------------------------------------------
1 | # Copyright 2024 OceanBase.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | # http://www.apache.org/licenses/LICENSE-2.0
7 | #
8 | # Unless required by applicable law or agreed to in writing, software
9 | # distributed under the License is distributed on an "AS IS" BASIS,
10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 | # See the License for the specific language governing permissions and
12 | # limitations under the License.
13 |
14 | com.oceanbase.connector.flink.OBDirectLoadDynamicTableSinkFactory
15 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-directload/src/test/resources/log4j2-test.properties:
--------------------------------------------------------------------------------
1 | # Copyright 2024 OceanBase.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | # http://www.apache.org/licenses/LICENSE-2.0
7 | #
8 | # Unless required by applicable law or agreed to in writing, software
9 | # distributed under the License is distributed on an "AS IS" BASIS,
10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 | # See the License for the specific language governing permissions and
12 | # limitations under the License.
13 |
14 | rootLogger.level = INFO
15 | rootLogger.appenderRef.test.ref = TestLogger
16 |
17 | appender.testlogger.name = TestLogger
18 | appender.testlogger.type = CONSOLE
19 | appender.testlogger.target = SYSTEM_ERR
20 | appender.testlogger.layout.type = PatternLayout
21 | appender.testlogger.layout.pattern = %-4r [%t] %-5p %c %x - %m%n
22 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-directload/src/test/resources/sql/products.sql:
--------------------------------------------------------------------------------
1 | -- Copyright 2024 OceanBase.
2 | --
3 | -- Licensed under the Apache License, Version 2.0 (the "License");
4 | -- you may not use this file except in compliance with the License.
5 | -- You may obtain a copy of the License at
6 | -- http://www.apache.org/licenses/LICENSE-2.0
7 | -- Unless required by applicable law or agreed to in writing,
8 | -- software distributed under the License is distributed on an
9 | -- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
10 | -- KIND, either express or implied. See the License for the
11 | -- specific language governing permissions and limitations
12 | -- under the License.
13 |
14 | CREATE TABLE products
15 | (
16 | id INTEGER NOT NULL AUTO_INCREMENT PRIMARY KEY,
17 | name VARCHAR(255) NOT NULL DEFAULT 'flink',
18 | description VARCHAR(512),
19 | weight DECIMAL(20, 10)
20 | );
21 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-e2e-tests/src/test/java/com/oceanbase/connector/flink/MysqlCdcSyncE2eITCase.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.oceanbase.connector.flink;
18 |
19 | import com.oceanbase.connector.flink.utils.FlinkContainerTestEnvironment;
20 |
21 | import org.junit.jupiter.api.AfterAll;
22 | import org.junit.jupiter.api.BeforeAll;
23 | import org.junit.jupiter.api.Test;
24 | import org.junit.jupiter.api.condition.DisabledIfSystemProperty;
25 | import org.slf4j.Logger;
26 | import org.slf4j.LoggerFactory;
27 | import org.testcontainers.containers.GenericContainer;
28 | import org.testcontainers.containers.MySQLContainer;
29 | import org.testcontainers.containers.output.Slf4jLogConsumer;
30 |
31 | import java.util.Collections;
32 | import java.util.stream.Stream;
33 |
34 | @DisabledIfSystemProperty(
35 | named = "flink_version",
36 | matches = "1.15.4",
37 | disabledReason = "Flink 1.15.4 does not contain 'SideOutputDataStream'")
38 | public class MysqlCdcSyncE2eITCase extends FlinkContainerTestEnvironment {
39 |
40 | private static final Logger LOG = LoggerFactory.getLogger(MysqlCdcSyncE2eITCase.class);
41 |
42 | private static final MySQLContainer> MYSQL_CONTAINER =
43 | new MySQLContainer<>("mysql:8.0.20")
44 | .withConfigurationOverride("docker/mysql")
45 | .withInitScript("sql/mysql-cdc.sql")
46 | .withNetwork(NETWORK)
47 | .withExposedPorts(3306)
48 | .withDatabaseName("test")
49 | .withUsername("root")
50 | .withPassword("mysqlpw")
51 | .withLogConsumer(new Slf4jLogConsumer(LOG));
52 |
53 | @BeforeAll
54 | public static void setup() {
55 | CONTAINER.withLogConsumer(new Slf4jLogConsumer(LOG)).start();
56 | MYSQL_CONTAINER.start();
57 | }
58 |
59 | @AfterAll
60 | public static void tearDown() {
61 | Stream.of(CONTAINER, MYSQL_CONTAINER).forEach(GenericContainer::stop);
62 | }
63 |
64 | @Test
65 | public void testMysqlCdcSync() throws Exception {
66 | submitJob(
67 | Collections.singletonList(getResource("flink-sql-connector-mysql-cdc.jar")),
68 | getResource("flink-connector-oceanbase-cli.jar"),
69 | new String[] {
70 | multipleParameterArg("source-type", "mysql-cdc"),
71 | multipleParameterArg(
72 | "source-conf", "hostname=" + getContainerIP(MYSQL_CONTAINER)),
73 | multipleParameterArg("source-conf", "port=" + MySQLContainer.MYSQL_PORT),
74 | multipleParameterArg(
75 | "source-conf", "username=" + MYSQL_CONTAINER.getUsername()),
76 | multipleParameterArg(
77 | "source-conf", "password=" + MYSQL_CONTAINER.getPassword()),
78 | multipleParameterArg(
79 | "source-conf", "database-name=" + MYSQL_CONTAINER.getDatabaseName()),
80 | multipleParameterArg("source-conf", "table-name=.*"),
81 | multipleParameterArg("sink-conf", "url=" + getJdbcUrl()),
82 | multipleParameterArg("sink-conf", "username=" + CONTAINER.getUsername()),
83 | multipleParameterArg("sink-conf", "password=" + CONTAINER.getPassword()),
84 | multipleParameterArg("job-name", "test-mysql-cdc-sync"),
85 | multipleParameterArg("database", CONTAINER.getDatabaseName()),
86 | multipleParameterArg("including-tables", ".*")
87 | });
88 |
89 | waitingAndAssertTableCount("products", 9);
90 | waitingAndAssertTableCount("customers", 4);
91 | }
92 | }
93 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-e2e-tests/src/test/resources/docker/mysql/my.cnf:
--------------------------------------------------------------------------------
1 | #
2 | # Licensed to the Apache Software Foundation (ASF) under one or more
3 | # contributor license agreements. See the NOTICE file distributed with
4 | # this work for additional information regarding copyright ownership.
5 | # The ASF licenses this file to You under the Apache License, Version 2.0
6 | # (the "License"); you may not use this file except in compliance with
7 | # the License. You may obtain a copy of the License at
8 | #
9 | # http://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # Unless required by applicable law or agreed to in writing, software
12 | # distributed under the License is distributed on an "AS IS" BASIS,
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | # See the License for the specific language governing permissions and
15 | # limitations under the License.
16 | #
17 |
18 | # For advice on how to change settings please see
19 | # http://dev.mysql.com/doc/refman/5.7/en/server-configuration-defaults.html
20 |
21 | [mysqld]
22 | #
23 | # Remove leading # and set to the amount of RAM for the most important data
24 | # cache in MySQL. Start at 70% of total RAM for dedicated server, else 10%.
25 | # innodb_buffer_pool_size = 128M
26 | #
27 | # Remove leading # to turn on a very important data integrity option: logging
28 | # changes to the binary log between backups.
29 | # log_bin
30 | #
31 | # Remove leading # to set options mainly useful for reporting servers.
32 | # The server defaults are faster for transactions and fast SELECTs.
33 | # Adjust sizes as needed, experiment to find the optimal values.
34 | # join_buffer_size = 128M
35 | # sort_buffer_size = 2M
36 | # read_rnd_buffer_size = 2M
37 | skip-host-cache
38 | skip-name-resolve
39 | #datadir=/var/lib/mysql
40 | #socket=/var/lib/mysql/mysql.sock
41 | secure-file-priv=/var/lib/mysql
42 | user=mysql
43 |
44 | # Disabling symbolic-links is recommended to prevent assorted security risks
45 | symbolic-links=0
46 |
47 | #log-error=/var/log/mysqld.log
48 | #pid-file=/var/run/mysqld/mysqld.pid
49 |
50 | # ----------------------------------------------
51 | # Enable the binlog for replication & CDC
52 | # ----------------------------------------------
53 |
54 | # Enable binary replication log and set the prefix, expiration, and log format.
55 | # The prefix is arbitrary, expiration can be short for integration tests but would
56 | # be longer on a production system. Row-level info is required for ingest to work.
57 | # Server ID is required, but this will vary on production systems
58 | server-id = 223344
59 | log_bin = mysql-bin
60 | expire_logs_days = 1
61 | binlog_format = row
62 |
63 | # enable gtid mode
64 | gtid_mode = on
65 | enforce_gtid_consistency = on
--------------------------------------------------------------------------------
/flink-connector-oceanbase-e2e-tests/src/test/resources/log4j2-test.properties:
--------------------------------------------------------------------------------
1 | # Copyright 2024 OceanBase.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | # http://www.apache.org/licenses/LICENSE-2.0
7 | #
8 | # Unless required by applicable law or agreed to in writing, software
9 | # distributed under the License is distributed on an "AS IS" BASIS,
10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 | # See the License for the specific language governing permissions and
12 | # limitations under the License.
13 |
14 | rootLogger.level = INFO
15 | rootLogger.appenderRef.test.ref = TestLogger
16 |
17 | appender.testlogger.name = TestLogger
18 | appender.testlogger.type = CONSOLE
19 | appender.testlogger.target = SYSTEM_ERR
20 | appender.testlogger.layout.type = PatternLayout
21 | appender.testlogger.layout.pattern = %-4r [%t] %-5p %c %x - %m%n
22 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-e2e-tests/src/test/resources/sql/htable.sql:
--------------------------------------------------------------------------------
1 | -- Copyright 2024 OceanBase.
2 | --
3 | -- Licensed under the Apache License, Version 2.0 (the "License");
4 | -- you may not use this file except in compliance with the License.
5 | -- You may obtain a copy of the License at
6 | -- http://www.apache.org/licenses/LICENSE-2.0
7 | -- Unless required by applicable law or agreed to in writing,
8 | -- software distributed under the License is distributed on an
9 | -- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
10 | -- KIND, either express or implied. See the License for the
11 | -- specific language governing permissions and limitations
12 | -- under the License.
13 |
14 | CREATE TABLE `htable$family1`
15 | (
16 | `K` varbinary(1024) NOT NULL,
17 | `Q` varbinary(256) NOT NULL,
18 | `T` bigint(20) NOT NULL,
19 | `V` varbinary(1024) DEFAULT NULL,
20 | PRIMARY KEY (`K`, `Q`, `T`)
21 | );
22 |
23 | CREATE TABLE `htable$family2`
24 | (
25 | `K` varbinary(1024) NOT NULL,
26 | `Q` varbinary(256) NOT NULL,
27 | `T` bigint(20) NOT NULL,
28 | `V` varbinary(1024) DEFAULT NULL,
29 | PRIMARY KEY (`K`, `Q`, `T`)
30 | );
31 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-e2e-tests/src/test/resources/sql/mysql-cdc.sql:
--------------------------------------------------------------------------------
1 | -- Copyright 2024 OceanBase.
2 | --
3 | -- Licensed under the Apache License, Version 2.0 (the "License");
4 | -- you may not use this file except in compliance with the License.
5 | -- You may obtain a copy of the License at
6 | -- http://www.apache.org/licenses/LICENSE-2.0
7 | -- Unless required by applicable law or agreed to in writing,
8 | -- software distributed under the License is distributed on an
9 | -- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
10 | -- KIND, either express or implied. See the License for the
11 | -- specific language governing permissions and limitations
12 | -- under the License.
13 |
14 | -- Create and populate our products using a single insert with many rows
15 | CREATE TABLE products
16 | (
17 | id INTEGER NOT NULL AUTO_INCREMENT PRIMARY KEY,
18 | name VARCHAR(255) NOT NULL DEFAULT 'flink',
19 | description VARCHAR(512),
20 | weight FLOAT
21 | );
22 | ALTER TABLE products AUTO_INCREMENT = 101;
23 |
24 | INSERT INTO products
25 | VALUES (default, "scooter", "Small 2-wheel scooter", 3.14),
26 | (default, "car battery", "12V car battery", 8.1),
27 | (default, "12-pack drill bits", "12-pack of drill bits with sizes ranging from #40 to #3", 0.8),
28 | (default, "hammer", "12oz carpenter's hammer", 0.75),
29 | (default, "hammer", "14oz carpenter's hammer", 0.875),
30 | (default, "hammer", "16oz carpenter's hammer", 1.0),
31 | (default, "rocks", "box of assorted rocks", 5.3),
32 | (default, "jacket", "water resistent black wind breaker", 0.1),
33 | (default, "spare tire", "24 inch spare tire", 22.2);
34 |
35 | -- Create some customers ...
36 | CREATE TABLE customers
37 | (
38 | id INTEGER NOT NULL AUTO_INCREMENT PRIMARY KEY,
39 | first_name VARCHAR(255) NOT NULL,
40 | last_name VARCHAR(255) NOT NULL,
41 | email VARCHAR(255) NOT NULL UNIQUE KEY
42 | ) AUTO_INCREMENT = 1001;
43 |
44 |
45 | INSERT INTO customers
46 | VALUES (default, "Sally", "Thomas", "sally.thomas@acme.com"),
47 | (default, "George", "Bailey", "gbailey@foobar.com"),
48 | (default, "Edward", "Walker", "ed@walker.com"),
49 | (default, "Anne", "Kretchmar", "annek@noanswer.org");
50 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase-e2e-tests/src/test/resources/sql/products.sql:
--------------------------------------------------------------------------------
1 | -- Copyright 2024 OceanBase.
2 | --
3 | -- Licensed under the Apache License, Version 2.0 (the "License");
4 | -- you may not use this file except in compliance with the License.
5 | -- You may obtain a copy of the License at
6 | -- http://www.apache.org/licenses/LICENSE-2.0
7 | -- Unless required by applicable law or agreed to in writing,
8 | -- software distributed under the License is distributed on an
9 | -- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
10 | -- KIND, either express or implied. See the License for the
11 | -- specific language governing permissions and limitations
12 | -- under the License.
13 |
14 | CREATE TABLE products
15 | (
16 | id INTEGER NOT NULL AUTO_INCREMENT PRIMARY KEY,
17 | name VARCHAR(255) NOT NULL DEFAULT 'flink',
18 | description VARCHAR(512),
19 | weight DECIMAL(20, 10)
20 | );
21 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
16 |
17 | 4.0.0
18 |
19 |
20 | com.oceanbase
21 | flink-connector-oceanbase-parent
22 | ${revision}
23 |
24 |
25 | flink-connector-oceanbase
26 | jar
27 |
28 |
29 |
30 | com.oceanbase
31 | flink-connector-oceanbase-base
32 | ${project.version}
33 |
34 |
35 |
36 | com.alibaba
37 | druid
38 |
39 |
40 |
41 | mysql
42 | mysql-connector-java
43 |
44 |
45 |
46 | com.oceanbase
47 | ob-partition-calculator
48 |
49 |
50 |
51 | com.oceanbase
52 | oceanbase-client
53 | test
54 |
55 |
56 |
57 | com.oceanbase
58 | flink-connector-oceanbase-base
59 | ${project.version}
60 | test-jar
61 | test
62 |
63 |
64 |
65 |
66 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase/src/main/java/com/oceanbase/connector/flink/OceanBaseConnectorOptions.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.oceanbase.connector.flink;
18 |
19 | import com.oceanbase.connector.flink.utils.OptionUtils;
20 |
21 | import org.apache.flink.configuration.ConfigOption;
22 | import org.apache.flink.configuration.ConfigOptions;
23 |
24 | import java.time.Duration;
25 | import java.util.Map;
26 | import java.util.Properties;
27 |
28 | public class OceanBaseConnectorOptions extends ConnectorOptions {
29 | private static final long serialVersionUID = 1L;
30 |
31 | public static final ConfigOption DRIVER_CLASS_NAME =
32 | ConfigOptions.key("driver-class-name")
33 | .stringType()
34 | .defaultValue("com.mysql.cj.jdbc.Driver")
35 | .withDescription(
36 | "JDBC driver class name, use 'com.mysql.cj.jdbc.Driver' by default.");
37 |
38 | public static final ConfigOption DRUID_PROPERTIES =
39 | ConfigOptions.key("druid-properties")
40 | .stringType()
41 | .noDefaultValue()
42 | .withDescription("Properties for specific connection pool.");
43 |
44 | public static final ConfigOption MEMSTORE_CHECK_ENABLED =
45 | ConfigOptions.key("memstore-check.enabled")
46 | .booleanType()
47 | .defaultValue(true)
48 | .withDescription("Whether enable memstore check. Default value is 'true'");
49 |
50 | public static final ConfigOption MEMSTORE_THRESHOLD =
51 | ConfigOptions.key("memstore-check.threshold")
52 | .doubleType()
53 | .defaultValue(0.9)
54 | .withDescription(
55 | "Memory usage threshold ratio relative to the limit value. Default value is '0.9'.");
56 |
57 | public static final ConfigOption MEMSTORE_CHECK_INTERVAL =
58 | ConfigOptions.key("memstore-check.interval")
59 | .durationType()
60 | .defaultValue(Duration.ofSeconds(30))
61 | .withDescription(
62 | "The check interval, over this time, the writer will check if memstore reaches threshold. Default value is '30s'.");
63 |
64 | public static final ConfigOption PARTITION_ENABLED =
65 | ConfigOptions.key("partition.enabled")
66 | .booleanType()
67 | .defaultValue(false)
68 | .withDescription(
69 | "Whether to enable partition calculation and flush records by partitions. Default value is 'false'.");
70 |
71 | public static final ConfigOption TABLE_ORACLE_TENANT_CASE_INSENSITIVE =
72 | ConfigOptions.key("table.oracle-tenant-case-insensitive")
73 | .booleanType()
74 | .defaultValue(true)
75 | .withDescription(
76 | "By default, under the Oracle tenant, schema names and column names are case-insensitive.");
77 |
78 | public OceanBaseConnectorOptions(Map config) {
79 | super(config);
80 | }
81 |
82 | public String getDriverClassName() {
83 | return allConfig.get(DRIVER_CLASS_NAME);
84 | }
85 |
86 | public Properties getDruidProperties() {
87 | return OptionUtils.parseProperties(allConfig.get(DRUID_PROPERTIES));
88 | }
89 |
90 | public boolean getMemStoreCheckEnabled() {
91 | return allConfig.get(MEMSTORE_CHECK_ENABLED);
92 | }
93 |
94 | public double getMemStoreThreshold() {
95 | return allConfig.get(MEMSTORE_THRESHOLD);
96 | }
97 |
98 | public long getMemStoreCheckInterval() {
99 | return allConfig.get(MEMSTORE_CHECK_INTERVAL).toMillis();
100 | }
101 |
102 | public boolean getPartitionEnabled() {
103 | return allConfig.get(PARTITION_ENABLED);
104 | }
105 |
106 | public boolean getTableOracleTenantCaseInsensitive() {
107 | return allConfig.get(TABLE_ORACLE_TENANT_CASE_INSENSITIVE);
108 | }
109 | }
110 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase/src/main/java/com/oceanbase/connector/flink/OceanBaseDynamicTableSinkFactory.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.oceanbase.connector.flink;
18 |
19 | import com.oceanbase.connector.flink.sink.OceanBaseDynamicTableSink;
20 | import com.oceanbase.connector.flink.utils.OptionUtils;
21 |
22 | import org.apache.flink.configuration.ConfigOption;
23 | import org.apache.flink.table.catalog.Column;
24 | import org.apache.flink.table.catalog.ResolvedSchema;
25 | import org.apache.flink.table.connector.sink.DynamicTableSink;
26 | import org.apache.flink.table.factories.DynamicTableSinkFactory;
27 | import org.apache.flink.table.factories.FactoryUtil;
28 |
29 | import java.util.HashSet;
30 | import java.util.Map;
31 | import java.util.Set;
32 | import java.util.stream.Collectors;
33 |
34 | public class OceanBaseDynamicTableSinkFactory implements DynamicTableSinkFactory {
35 |
36 | public static final String IDENTIFIER = "oceanbase";
37 |
38 | @Override
39 | public DynamicTableSink createDynamicTableSink(Context context) {
40 | FactoryUtil.TableFactoryHelper helper = FactoryUtil.createTableFactoryHelper(this, context);
41 | helper.validate();
42 |
43 | ResolvedSchema resolvedSchema = context.getCatalogTable().getResolvedSchema();
44 | ResolvedSchema physicalSchema =
45 | new ResolvedSchema(
46 | resolvedSchema.getColumns().stream()
47 | .filter(Column::isPhysical)
48 | .collect(Collectors.toList()),
49 | resolvedSchema.getWatermarkSpecs(),
50 | resolvedSchema.getPrimaryKey().orElse(null));
51 | Map options = context.getCatalogTable().getOptions();
52 | OptionUtils.printOptions(IDENTIFIER, options);
53 | return new OceanBaseDynamicTableSink(
54 | physicalSchema, new OceanBaseConnectorOptions(options));
55 | }
56 |
57 | @Override
58 | public String factoryIdentifier() {
59 | return IDENTIFIER;
60 | }
61 |
62 | @Override
63 | public Set> requiredOptions() {
64 | Set> options = new HashSet<>();
65 | options.add(OceanBaseConnectorOptions.URL);
66 | options.add(OceanBaseConnectorOptions.USERNAME);
67 | options.add(OceanBaseConnectorOptions.PASSWORD);
68 | options.add(OceanBaseConnectorOptions.SCHEMA_NAME);
69 | options.add(OceanBaseConnectorOptions.TABLE_NAME);
70 | return options;
71 | }
72 |
73 | @Override
74 | public Set> optionalOptions() {
75 | Set> options = new HashSet<>();
76 | options.add(OceanBaseConnectorOptions.SYNC_WRITE);
77 | options.add(OceanBaseConnectorOptions.BUFFER_FLUSH_INTERVAL);
78 | options.add(OceanBaseConnectorOptions.BUFFER_SIZE);
79 | options.add(OceanBaseConnectorOptions.MAX_RETRIES);
80 | options.add(OceanBaseConnectorOptions.DRIVER_CLASS_NAME);
81 | options.add(OceanBaseConnectorOptions.DRUID_PROPERTIES);
82 | options.add(OceanBaseConnectorOptions.MEMSTORE_CHECK_ENABLED);
83 | options.add(OceanBaseConnectorOptions.MEMSTORE_THRESHOLD);
84 | options.add(OceanBaseConnectorOptions.MEMSTORE_CHECK_INTERVAL);
85 | options.add(OceanBaseConnectorOptions.PARTITION_ENABLED);
86 | options.add(OceanBaseConnectorOptions.TABLE_ORACLE_TENANT_CASE_INSENSITIVE);
87 | return options;
88 | }
89 | }
90 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase/src/main/java/com/oceanbase/connector/flink/connection/OceanBaseTablePartInfo.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.oceanbase.connector.flink.connection;
18 |
19 | import com.oceanbase.partition.calculator.ObPartIdCalculator;
20 | import com.oceanbase.partition.calculator.model.TableEntry;
21 | import com.oceanbase.partition.metadata.desc.ObTablePart;
22 |
23 | import java.util.Map;
24 | import java.util.TreeMap;
25 | import java.util.stream.Stream;
26 |
27 | public class OceanBaseTablePartInfo {
28 |
29 | private final ObPartIdCalculator partIdCalculator;
30 | private final Map partColumnIndexMap;
31 |
32 | public OceanBaseTablePartInfo(TableEntry tableEntry, boolean isV4) {
33 | this.partIdCalculator = new ObPartIdCalculator(false, tableEntry, isV4);
34 | this.partColumnIndexMap = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
35 | ObTablePart tablePart = tableEntry.getTablePart();
36 | if (tablePart != null) {
37 | Stream.concat(
38 | tablePart.getPartColumns().stream(),
39 | tablePart.getSubPartColumns().stream())
40 | .forEach(
41 | obPartColumn ->
42 | this.partColumnIndexMap.put(
43 | obPartColumn.getColumnName(),
44 | obPartColumn.getColumnIndex()));
45 | }
46 | }
47 |
48 | public ObPartIdCalculator getPartIdCalculator() {
49 | return partIdCalculator;
50 | }
51 |
52 | public Map getPartColumnIndexMap() {
53 | return partColumnIndexMap;
54 | }
55 | }
56 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase/src/main/java/com/oceanbase/connector/flink/dialect/OceanBaseMySQLDialect.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.oceanbase.connector.flink.dialect;
18 |
19 | import org.apache.flink.util.function.SerializableFunction;
20 |
21 | import org.apache.commons.lang3.StringUtils;
22 |
23 | import javax.annotation.Nonnull;
24 | import javax.annotation.Nullable;
25 |
26 | import java.util.List;
27 | import java.util.stream.Collectors;
28 |
29 | public class OceanBaseMySQLDialect implements OceanBaseDialect {
30 |
31 | private static final long serialVersionUID = 1L;
32 |
33 | @Override
34 | public String quoteIdentifier(@Nonnull String identifier) {
35 | return "`" + identifier.replaceAll("`", "``") + "`";
36 | }
37 |
38 | @Override
39 | public String getUpsertStatement(
40 | @Nonnull String schemaName,
41 | @Nonnull String tableName,
42 | @Nonnull List fieldNames,
43 | @Nonnull List uniqueKeyFields,
44 | @Nullable SerializableFunction placeholderFunc) {
45 | String updateClause =
46 | fieldNames.stream()
47 | .filter(f -> !uniqueKeyFields.contains(f))
48 | .map(f -> quoteIdentifier(f) + "=VALUES(" + quoteIdentifier(f) + ")")
49 | .collect(Collectors.joining(", "));
50 | String insertIntoStatement =
51 | getInsertIntoStatement(schemaName, tableName, fieldNames, placeholderFunc);
52 | if (StringUtils.isNotEmpty(updateClause)) {
53 | // ON DUPLICATE KEY UPDATE
54 | return insertIntoStatement + " ON DUPLICATE KEY UPDATE " + updateClause;
55 | } else {
56 | // INSERT IGNORE
57 | return StringUtils.replace(insertIntoStatement, "INSERT", "INSERT IGNORE", 1);
58 | }
59 | }
60 |
61 | @Override
62 | public String getSysDatabase() {
63 | return "oceanbase";
64 | }
65 |
66 | @Override
67 | public String getQueryTenantNameStatement() {
68 | return "SHOW TENANT";
69 | }
70 |
71 | @Override
72 | public String getListSchemaStatement() {
73 | return "SELECT `SCHEMA_NAME` FROM `INFORMATION_SCHEMA`.`SCHEMATA`";
74 | }
75 |
76 | @Override
77 | public String getListTableStatement(String schemaName) {
78 | return "SELECT TABLE_NAME FROM information_schema.`TABLES` WHERE TABLE_SCHEMA = '"
79 | + schemaName
80 | + "'";
81 | }
82 | }
83 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase/src/main/java/com/oceanbase/connector/flink/dialect/OceanBaseOracleDialect.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.oceanbase.connector.flink.dialect;
18 |
19 | import com.oceanbase.connector.flink.OceanBaseConnectorOptions;
20 |
21 | import org.apache.flink.util.function.SerializableFunction;
22 |
23 | import javax.annotation.Nonnull;
24 | import javax.annotation.Nullable;
25 |
26 | import java.util.List;
27 | import java.util.Objects;
28 | import java.util.stream.Collectors;
29 |
30 | public class OceanBaseOracleDialect implements OceanBaseDialect {
31 |
32 | private static final long serialVersionUID = 1L;
33 |
34 | private final OceanBaseConnectorOptions options;
35 |
36 | public OceanBaseOracleDialect(OceanBaseConnectorOptions options) {
37 | this.options = options;
38 | }
39 |
40 | @Override
41 | public String quoteIdentifier(@Nonnull String identifier) {
42 | if (Objects.isNull(options) || options.getTableOracleTenantCaseInsensitive()) {
43 | return identifier;
44 | }
45 |
46 | return "\"" + identifier + "\"";
47 | }
48 |
49 | @Override
50 | public String getUpsertStatement(
51 | @Nonnull String schemaName,
52 | @Nonnull String tableName,
53 | @Nonnull List fieldNames,
54 | @Nonnull List uniqueKeyFields,
55 | @Nullable SerializableFunction placeholderFunc) {
56 | String sourceFields =
57 | fieldNames.stream()
58 | .map(f -> getPlaceholder(f, placeholderFunc) + " AS " + quoteIdentifier(f))
59 | .collect(Collectors.joining(", "));
60 |
61 | String onClause =
62 | uniqueKeyFields.stream()
63 | .map(f -> "t." + quoteIdentifier(f) + "=s." + quoteIdentifier(f))
64 | .collect(Collectors.joining(" and "));
65 |
66 | String updateClause =
67 | fieldNames.stream()
68 | .filter(f -> !uniqueKeyFields.contains(f))
69 | .map(f -> "t." + quoteIdentifier(f) + "=s." + quoteIdentifier(f))
70 | .collect(Collectors.joining(", "));
71 |
72 | String insertFields =
73 | fieldNames.stream().map(this::quoteIdentifier).collect(Collectors.joining(", "));
74 |
75 | String valuesClause =
76 | fieldNames.stream()
77 | .map(f -> "s." + quoteIdentifier(f))
78 | .collect(Collectors.joining(", "));
79 |
80 | return "MERGE INTO "
81 | + getFullTableName(schemaName, tableName)
82 | + " t "
83 | + " USING (SELECT "
84 | + sourceFields
85 | + " FROM DUAL) s "
86 | + " ON ("
87 | + onClause
88 | + ") "
89 | + " WHEN MATCHED THEN UPDATE SET "
90 | + updateClause
91 | + " WHEN NOT MATCHED THEN INSERT ("
92 | + insertFields
93 | + ")"
94 | + " VALUES ("
95 | + valuesClause
96 | + ")";
97 | }
98 |
99 | @Override
100 | public String getSysDatabase() {
101 | return "SYS";
102 | }
103 |
104 | @Override
105 | public String getQueryTenantNameStatement() {
106 | return "SELECT SYS_CONTEXT('USERENV', 'CON_NAME') FROM DUAL";
107 | }
108 |
109 | @Override
110 | public String getListSchemaStatement() {
111 | return "SELECT USERNAME FROM ALL_USERS";
112 | }
113 |
114 | @Override
115 | public String getListTableStatement(String schemaName) {
116 | return "SELECT TABLE_NAME FROM ALL_TABLES WHERE OWNER = '" + schemaName + "'";
117 | }
118 | }
119 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase/src/main/java/com/oceanbase/connector/flink/sink/OceanBaseDynamicTableSink.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.oceanbase.connector.flink.sink;
18 |
19 | import com.oceanbase.connector.flink.OceanBaseConnectorOptions;
20 | import com.oceanbase.connector.flink.connection.OceanBaseConnectionProvider;
21 | import com.oceanbase.connector.flink.table.DataChangeRecord;
22 | import com.oceanbase.connector.flink.table.OceanBaseRowDataSerializationSchema;
23 | import com.oceanbase.connector.flink.table.TableId;
24 | import com.oceanbase.connector.flink.table.TableInfo;
25 |
26 | import org.apache.flink.table.catalog.ResolvedSchema;
27 | import org.apache.flink.table.connector.sink.DynamicTableSink;
28 |
29 | public class OceanBaseDynamicTableSink extends AbstractDynamicTableSink {
30 |
31 | private final OceanBaseConnectorOptions connectorOptions;
32 |
33 | public OceanBaseDynamicTableSink(
34 | ResolvedSchema physicalSchema, OceanBaseConnectorOptions connectorOptions) {
35 | super(physicalSchema);
36 | this.connectorOptions = connectorOptions;
37 | }
38 |
39 | @Override
40 | public SinkRuntimeProvider getSinkRuntimeProvider(Context context) {
41 | OceanBaseConnectionProvider connectionProvider =
42 | new OceanBaseConnectionProvider(connectorOptions);
43 | TableId tableId =
44 | new TableId(
45 | connectionProvider.getDialect()::getFullTableName,
46 | connectorOptions.getSchemaName(),
47 | connectorOptions.getTableName());
48 | OceanBaseRecordFlusher recordFlusher =
49 | new OceanBaseRecordFlusher(connectorOptions, connectionProvider);
50 | return new SinkProvider(
51 | typeSerializer ->
52 | new OceanBaseSink<>(
53 | connectorOptions,
54 | typeSerializer,
55 | new OceanBaseRowDataSerializationSchema(
56 | new TableInfo(tableId, physicalSchema)),
57 | DataChangeRecord.KeyExtractor.simple(),
58 | recordFlusher));
59 | }
60 |
61 | @Override
62 | public DynamicTableSink copy() {
63 | return new OceanBaseDynamicTableSink(physicalSchema, connectorOptions);
64 | }
65 |
66 | @Override
67 | public String asSummaryString() {
68 | return "OceanBase";
69 | }
70 | }
71 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase/src/main/java/com/oceanbase/connector/flink/utils/OceanBaseJdbcUtils.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.oceanbase.connector.flink.utils;
18 |
19 | import com.oceanbase.connector.flink.dialect.OceanBaseDialect;
20 |
21 | import org.apache.flink.util.function.FunctionWithException;
22 | import org.apache.flink.util.function.SupplierWithException;
23 |
24 | import java.sql.Connection;
25 | import java.sql.ResultSet;
26 | import java.sql.SQLException;
27 | import java.sql.Statement;
28 |
29 | public class OceanBaseJdbcUtils {
30 |
31 | public static int getTableRowsCount(
32 | SupplierWithException connectionSupplier, String tableName) {
33 | return (int)
34 | query(
35 | connectionSupplier,
36 | "SELECT COUNT(1) FROM " + tableName,
37 | rs -> rs.next() ? rs.getInt(1) : 0);
38 | }
39 |
40 | public static String getVersionComment(
41 | SupplierWithException connectionSupplier) {
42 | return (String)
43 | query(
44 | connectionSupplier,
45 | "SHOW VARIABLES LIKE 'version_comment'",
46 | rs -> rs.next() ? rs.getString("VALUE") : null);
47 | }
48 |
49 | public static String getCompatibleMode(
50 | SupplierWithException connectionSupplier) {
51 | return (String)
52 | query(
53 | connectionSupplier,
54 | "SHOW VARIABLES LIKE 'ob_compatibility_mode'",
55 | rs -> rs.next() ? rs.getString("VALUE") : null);
56 | }
57 |
58 | public static String getClusterName(
59 | SupplierWithException connectionSupplier) {
60 | return (String)
61 | query(
62 | connectionSupplier,
63 | "SHOW PARAMETERS LIKE 'cluster'",
64 | rs -> rs.next() ? rs.getString("VALUE") : null);
65 | }
66 |
67 | public static String getTenantName(
68 | SupplierWithException connectionSupplier,
69 | OceanBaseDialect dialect) {
70 | return (String)
71 | query(
72 | connectionSupplier,
73 | dialect.getQueryTenantNameStatement(),
74 | rs -> rs.next() ? rs.getString(1) : null);
75 | }
76 |
77 | public static Object query(
78 | SupplierWithException connectionSupplier,
79 | String sql,
80 | FunctionWithException resultSetConsumer) {
81 | try (Connection connection = connectionSupplier.get();
82 | Statement statement = connection.createStatement()) {
83 | ResultSet rs = statement.executeQuery(sql);
84 | return resultSetConsumer.apply(rs);
85 | } catch (SQLException e) {
86 | throw new RuntimeException("Failed to execute sql: " + sql, e);
87 | }
88 | }
89 |
90 | public static void execute(
91 | SupplierWithException connectionSupplier, String sql) {
92 | try (Connection connection = connectionSupplier.get();
93 | Statement statement = connection.createStatement()) {
94 | statement.execute(sql);
95 | } catch (SQLException e) {
96 | throw new RuntimeException("Failed to execute sql: " + sql, e);
97 | }
98 | }
99 | }
100 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase/src/main/resources/META-INF/services/org.apache.flink.table.factories.Factory:
--------------------------------------------------------------------------------
1 | # Copyright 2024 OceanBase.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | # http://www.apache.org/licenses/LICENSE-2.0
7 | #
8 | # Unless required by applicable law or agreed to in writing, software
9 | # distributed under the License is distributed on an "AS IS" BASIS,
10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 | # See the License for the specific language governing permissions and
12 | # limitations under the License.
13 |
14 | com.oceanbase.connector.flink.OceanBaseDynamicTableSinkFactory
15 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase/src/test/java/com/oceanbase/connector/flink/dialect/OceanBaseMySQLDialectTest.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.oceanbase.connector.flink.dialect;
17 |
18 | import org.apache.flink.util.function.SerializableFunction;
19 |
20 | import org.junit.jupiter.api.Assertions;
21 | import org.junit.jupiter.api.Test;
22 |
23 | import java.util.stream.Collectors;
24 | import java.util.stream.Stream;
25 |
26 | class OceanBaseMySQLDialectTest {
27 |
28 | @Test
29 | void getUpsertStatementUpdate() {
30 | OceanBaseMySQLDialect dialect = new OceanBaseMySQLDialect();
31 | String upsertStatement =
32 | dialect.getUpsertStatement(
33 | "sche1",
34 | "tb1",
35 | Stream.of("id", "name").collect(Collectors.toList()),
36 | Stream.of("id").collect(Collectors.toList()),
37 | (SerializableFunction) s -> "?");
38 | Assertions.assertEquals(
39 | "INSERT INTO `sche1`.`tb1`(`id`, `name`) VALUES (?, ?) ON DUPLICATE KEY UPDATE `name`=VALUES(`name`)",
40 | upsertStatement);
41 | }
42 |
43 | @Test
44 | void getUpsertStatementIgnore() {
45 | OceanBaseMySQLDialect dialect = new OceanBaseMySQLDialect();
46 | String upsertStatement =
47 | dialect.getUpsertStatement(
48 | "sche1",
49 | "tb1",
50 | Stream.of("id").collect(Collectors.toList()),
51 | Stream.of("id").collect(Collectors.toList()),
52 | (SerializableFunction) s -> "?");
53 | Assertions.assertEquals(
54 | "INSERT IGNORE INTO `sche1`.`tb1`(`id`) VALUES (?)", upsertStatement);
55 | }
56 | }
57 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase/src/test/java/com/oceanbase/connector/flink/dialect/OceanBaseOracleDialectTest.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.oceanbase.connector.flink.dialect;
17 |
18 | import com.oceanbase.connector.flink.OceanBaseConnectorOptions;
19 |
20 | import org.junit.jupiter.api.Assertions;
21 | import org.junit.jupiter.api.Test;
22 | import org.testcontainers.shaded.com.google.common.collect.Maps;
23 |
24 | public class OceanBaseOracleDialectTest {
25 |
26 | @Test
27 | public void testQuoteIdentifier() {
28 | OceanBaseConnectorOptions options = new OceanBaseConnectorOptions(Maps.newHashMap());
29 | Assertions.assertTrue(options.getTableOracleTenantCaseInsensitive());
30 | OceanBaseOracleDialect oracleDialect = new OceanBaseOracleDialect(options);
31 |
32 | String identifier = "name";
33 | Assertions.assertEquals(identifier, oracleDialect.quoteIdentifier(identifier));
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase/src/test/java/com/oceanbase/connector/flink/table/OceanBaseTestData.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.oceanbase.connector.flink.table;
18 |
19 | import org.apache.flink.table.catalog.ResolvedSchema;
20 | import org.apache.flink.table.data.RowData;
21 |
22 | import java.io.Serializable;
23 |
24 | public class OceanBaseTestData implements Serializable {
25 |
26 | private static final long serialVersionUID = 1L;
27 |
28 | private final String schemaName;
29 | private final String tableName;
30 | private final ResolvedSchema resolvedSchema;
31 | private final RowData rowData;
32 | private final SchemaChangeRecord.Type sqlType;
33 | private final String sql;
34 |
35 | public OceanBaseTestData(
36 | String schemaName, String tableName, ResolvedSchema resolvedSchema, RowData rowData) {
37 | this.schemaName = schemaName;
38 | this.tableName = tableName;
39 | this.resolvedSchema = resolvedSchema;
40 | this.rowData = rowData;
41 | this.sqlType = null;
42 | this.sql = null;
43 | }
44 |
45 | public OceanBaseTestData(
46 | String schemaName, String tableName, SchemaChangeRecord.Type sqlType, String sql) {
47 | this.schemaName = schemaName;
48 | this.tableName = tableName;
49 | this.resolvedSchema = null;
50 | this.rowData = null;
51 | this.sqlType = sqlType;
52 | this.sql = sql;
53 | }
54 |
55 | public String getSchemaName() {
56 | return schemaName;
57 | }
58 |
59 | public String getTableName() {
60 | return tableName;
61 | }
62 |
63 | public ResolvedSchema getResolvedSchema() {
64 | return resolvedSchema;
65 | }
66 |
67 | public RowData getRowData() {
68 | return rowData;
69 | }
70 |
71 | public SchemaChangeRecord.Type getSqlType() {
72 | return sqlType;
73 | }
74 |
75 | public String getSql() {
76 | return sql;
77 | }
78 | }
79 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase/src/test/java/com/oceanbase/connector/flink/table/OceanBaseTestDataSerializationSchema.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2024 OceanBase.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.oceanbase.connector.flink.table;
18 |
19 | import com.oceanbase.connector.flink.dialect.OceanBaseDialect;
20 | import com.oceanbase.connector.flink.dialect.OceanBaseMySQLDialect;
21 |
22 | import org.apache.flink.table.data.ArrayData;
23 | import org.apache.flink.table.data.DecimalData;
24 | import org.apache.flink.table.data.TimestampData;
25 | import org.apache.flink.table.types.logical.LogicalType;
26 |
27 | import java.sql.Date;
28 | import java.sql.Time;
29 | import java.time.LocalDate;
30 | import java.time.LocalTime;
31 | import java.util.stream.Collectors;
32 | import java.util.stream.IntStream;
33 |
34 | public class OceanBaseTestDataSerializationSchema
35 | extends AbstractRecordSerializationSchema {
36 |
37 | private final OceanBaseDialect dialect = new OceanBaseMySQLDialect();
38 |
39 | @Override
40 | public Record serialize(OceanBaseTestData data) {
41 | TableId tableId =
42 | new TableId(dialect::getFullTableName, data.getSchemaName(), data.getTableName());
43 | if (data.getSql() != null) {
44 | return new SchemaChangeRecord(tableId, data.getSql(), data.getSqlType());
45 | }
46 | TableInfo tableInfo = new TableInfo(tableId, data.getResolvedSchema());
47 | OceanBaseRowDataSerializationSchema serializationSchema =
48 | new OceanBaseRowDataSerializationSchema(tableInfo);
49 | return serializationSchema.serialize(data.getRowData());
50 | }
51 |
52 | @Override
53 | protected SerializationRuntimeConverter createNotNullConverter(LogicalType type) {
54 | switch (type.getTypeRoot()) {
55 | case BOOLEAN:
56 | case TINYINT:
57 | case SMALLINT:
58 | case INTEGER:
59 | case INTERVAL_YEAR_MONTH:
60 | case BIGINT:
61 | case INTERVAL_DAY_TIME:
62 | case FLOAT:
63 | case DOUBLE:
64 | case BINARY:
65 | case VARBINARY:
66 | return data -> data;
67 | case CHAR:
68 | case VARCHAR:
69 | return Object::toString;
70 | case DATE:
71 | return data -> Date.valueOf(LocalDate.ofEpochDay((int) data));
72 | case TIME_WITHOUT_TIME_ZONE:
73 | return data -> Time.valueOf(LocalTime.ofNanoOfDay((int) data * 1_000_000L));
74 | case TIMESTAMP_WITHOUT_TIME_ZONE:
75 | return data -> ((TimestampData) data).toTimestamp();
76 | case DECIMAL:
77 | return data -> ((DecimalData) data).toBigDecimal();
78 | case ARRAY:
79 | return data -> {
80 | ArrayData arrayData = (ArrayData) data;
81 | return IntStream.range(0, arrayData.size())
82 | .mapToObj(i -> arrayData.getString(i).toString())
83 | .collect(Collectors.joining(","));
84 | };
85 | default:
86 | throw new UnsupportedOperationException("Unsupported type:" + type);
87 | }
88 | }
89 | }
90 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase/src/test/resources/log4j2-test.properties:
--------------------------------------------------------------------------------
1 | # Copyright 2024 OceanBase.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | # http://www.apache.org/licenses/LICENSE-2.0
7 | #
8 | # Unless required by applicable law or agreed to in writing, software
9 | # distributed under the License is distributed on an "AS IS" BASIS,
10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 | # See the License for the specific language governing permissions and
12 | # limitations under the License.
13 |
14 | rootLogger.level = INFO
15 | rootLogger.appenderRef.test.ref = TestLogger
16 |
17 | appender.testlogger.name = TestLogger
18 | appender.testlogger.type = CONSOLE
19 | appender.testlogger.target = SYSTEM_ERR
20 | appender.testlogger.layout.type = PatternLayout
21 | appender.testlogger.layout.pattern = %-4r [%t] %-5p %c %x - %m%n
22 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase/src/test/resources/sql/mysql/array_type.sql:
--------------------------------------------------------------------------------
1 | -- Copyright 2024 OceanBase.
2 | --
3 | -- Licensed under the Apache License, Version 2.0 (the "License");
4 | -- you may not use this file except in compliance with the License.
5 | -- You may obtain a copy of the License at
6 | -- http://www.apache.org/licenses/LICENSE-2.0
7 | -- Unless required by applicable law or agreed to in writing,
8 | -- software distributed under the License is distributed on an
9 | -- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
10 | -- KIND, either express or implied. See the License for the
11 | -- specific language governing permissions and limitations
12 | -- under the License.
13 |
14 | CREATE TABLE test_array
15 | (
16 | id int,
17 | arr1 int[],
18 | arr2 bool[][],
19 | arr3 float[][][],
20 | arr4 double[][][][],
21 | arr5 bigint[][][][][],
22 | arr6 varchar(225)[][][]
23 | );
24 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase/src/test/resources/sql/mysql/gis_types.sql:
--------------------------------------------------------------------------------
1 | -- Copyright 2024 OceanBase.
2 | --
3 | -- Licensed under the Apache License, Version 2.0 (the "License");
4 | -- you may not use this file except in compliance with the License.
5 | -- You may obtain a copy of the License at
6 | -- http://www.apache.org/licenses/LICENSE-2.0
7 | -- Unless required by applicable law or agreed to in writing,
8 | -- software distributed under the License is distributed on an
9 | -- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
10 | -- KIND, either express or implied. See the License for the
11 | -- specific language governing permissions and limitations
12 | -- under the License.
13 |
14 | CREATE TABLE gis_types
15 | (
16 | id INTEGER NOT NULL AUTO_INCREMENT PRIMARY KEY,
17 | point_c POINT,
18 | geometry_c GEOMETRY,
19 | linestring_c LINESTRING,
20 | polygon_c POLYGON,
21 | multipoint_c MULTIPOINT,
22 | multiline_c MULTILINESTRING,
23 | multipolygon_c MULTIPOLYGON,
24 | geometrycollection_c GEOMETRYCOLLECTION
25 | )
26 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase/src/test/resources/sql/mysql/products.sql:
--------------------------------------------------------------------------------
1 | -- Copyright 2024 OceanBase.
2 | --
3 | -- Licensed under the Apache License, Version 2.0 (the "License");
4 | -- you may not use this file except in compliance with the License.
5 | -- You may obtain a copy of the License at
6 | -- http://www.apache.org/licenses/LICENSE-2.0
7 | -- Unless required by applicable law or agreed to in writing,
8 | -- software distributed under the License is distributed on an
9 | -- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
10 | -- KIND, either express or implied. See the License for the
11 | -- specific language governing permissions and limitations
12 | -- under the License.
13 |
14 | CREATE TABLE products
15 | (
16 | id INTEGER NOT NULL AUTO_INCREMENT PRIMARY KEY,
17 | name VARCHAR(255) NOT NULL DEFAULT 'flink',
18 | description VARCHAR(512),
19 | weight DECIMAL(20, 10)
20 | );
21 |
--------------------------------------------------------------------------------
/flink-connector-oceanbase/src/test/resources/sql/oracle/products.sql:
--------------------------------------------------------------------------------
1 | -- Copyright 2024 OceanBase.
2 | --
3 | -- Licensed under the Apache License, Version 2.0 (the "License");
4 | -- you may not use this file except in compliance with the License.
5 | -- You may obtain a copy of the License at
6 | -- http://www.apache.org/licenses/LICENSE-2.0
7 | -- Unless required by applicable law or agreed to in writing,
8 | -- software distributed under the License is distributed on an
9 | -- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
10 | -- KIND, either express or implied. See the License for the
11 | -- specific language governing permissions and limitations
12 | -- under the License.
13 |
14 | CREATE TABLE products
15 | (
16 | id NUMBER PRIMARY KEY,
17 | name VARCHAR2(225),
18 | description VARCHAR2(225),
19 | weight NUMBER
20 | );
21 |
--------------------------------------------------------------------------------
/flink-sql-connector-oceanbase-directload/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
16 |
17 | 4.0.0
18 |
19 | com.oceanbase
20 | flink-connector-oceanbase-parent
21 | ${revision}
22 |
23 |
24 | flink-sql-connector-oceanbase-directload
25 |
26 |
27 |
28 | com.oceanbase
29 | flink-connector-oceanbase-directload
30 | ${project.version}
31 |
32 |
33 |
34 |
35 |
36 |
37 | org.apache.maven.plugins
38 | maven-jar-plugin
39 |
40 |
41 | empty-javadoc-jar
42 |
43 | jar
44 |
45 | package
46 |
47 | javadoc
48 | ${basedir}/src/main/java
49 |
50 |
51 |
52 |
53 |
54 | org.apache.maven.plugins
55 | maven-shade-plugin
56 |
57 |
58 | shade-flink
59 |
60 | shade
61 |
62 | package
63 |
64 | false
65 | false
66 | false
67 |
68 |
69 | com.oceanbase:*
70 | com.alibaba:*
71 | com.alipay.*:*
72 | com.google.*:*
73 | commons-*:*
74 | io.netty:*
75 |
76 |
77 |
78 |
79 | com.google
80 | ${shaded.prefix}.com.google
81 |
82 |
83 | io.netty
84 | ${shaded.prefix}.io.netty
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
--------------------------------------------------------------------------------
/flink-sql-connector-oceanbase/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
16 |
17 | 4.0.0
18 |
19 | com.oceanbase
20 | flink-connector-oceanbase-parent
21 | ${revision}
22 |
23 |
24 | flink-sql-connector-oceanbase
25 |
26 |
27 |
28 | com.oceanbase
29 | flink-connector-oceanbase
30 | ${project.version}
31 |
32 |
33 |
34 |
35 |
36 |
37 | org.apache.maven.plugins
38 | maven-jar-plugin
39 |
40 |
41 | empty-javadoc-jar
42 |
43 | jar
44 |
45 | package
46 |
47 | javadoc
48 | ${basedir}/src/main/java
49 |
50 |
51 |
52 |
53 |
54 | org.apache.maven.plugins
55 | maven-shade-plugin
56 |
57 |
58 | shade-flink
59 |
60 | shade
61 |
62 | package
63 |
64 | false
65 | false
66 | false
67 |
68 |
69 | com.oceanbase:*
70 | com.alibaba:*
71 | mysql:mysql-connector-java
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
--------------------------------------------------------------------------------