├── connector ├── src │ ├── test │ │ ├── resources │ │ │ ├── ssl │ │ │ │ ├── selfsigned.password │ │ │ │ ├── test.ca.password │ │ │ │ ├── test.ca.srl │ │ │ │ ├── casigned.crt │ │ │ │ ├── test.derive.csr │ │ │ │ ├── test.derive.crt │ │ │ │ ├── casigned.pem │ │ │ │ ├── selfsigned.pem │ │ │ │ ├── test.ca.pem │ │ │ │ ├── casigned.key │ │ │ │ ├── test.derive.key │ │ │ │ ├── test.ca.key │ │ │ │ └── selfsigned.key │ │ │ └── log4j.properties │ │ └── java │ │ │ └── org │ │ │ └── apache │ │ │ └── flink │ │ │ └── connector │ │ │ └── nebula │ │ │ ├── NebulaValueUtils.java │ │ │ ├── catalog │ │ │ └── NebulaCatalogCreateSpaceTest.java │ │ │ ├── MockData.java │ │ │ ├── utils │ │ │ ├── NebulaUtilsTest.java │ │ │ └── NebulaVerticesTest.java │ │ │ ├── connection │ │ │ ├── NebulaGraphConnectionProviderTest.java │ │ │ └── NebulaClientOptionsTest.java │ │ │ ├── NebulaITTestBase.java │ │ │ └── sink │ │ │ ├── NebulaRowVertexOutputFormatConverterTest.java │ │ │ ├── NebulaRowEdgeOutputFormatConverterTest.java │ │ │ └── NebulaOutputFormatConverterTest.java │ └── main │ │ ├── resources │ │ ├── META-INF │ │ │ └── services │ │ │ │ └── org.apache.flink.table.factories.Factory │ │ └── log4j.properties │ │ └── java │ │ └── org.apache.flink │ │ ├── connector │ │ └── nebula │ │ │ ├── utils │ │ │ ├── FailureHandlerEnum.java │ │ │ ├── SSLSignType.java │ │ │ ├── PolicyEnum.java │ │ │ ├── VidTypeEnum.java │ │ │ ├── WriteModeEnum.java │ │ │ ├── PartitionUtils.java │ │ │ ├── DataTypeEnum.java │ │ │ ├── NebulaCatalogUtils.java │ │ │ ├── NebulaVertex.java │ │ │ ├── NebulaEdge.java │ │ │ ├── NebulaSpace.java │ │ │ ├── NebulaSpaces.java │ │ │ ├── NebulaConstant.java │ │ │ ├── NebulaUtils.java │ │ │ ├── NebulaVertices.java │ │ │ └── NebulaEdges.java │ │ │ ├── source │ │ │ ├── NebulaBaseTableRowConverter.java │ │ │ ├── NebulaConverter.java │ │ │ ├── NebulaSource.java │ │ │ ├── NebulaInputRowFormat.java │ │ │ ├── NebulaInputTableRowFormat.java │ │ │ ├── NebulaRowConverter.java │ │ │ ├── NebulaEdgeSource.java │ │ │ ├── NebulaVertexSource.java │ │ │ ├── NebulaSourceFunction.java │ │ │ └── NebulaInputFormat.java │ │ │ ├── sink │ │ │ ├── NebulaEdgeBatchOutputFormat.java │ │ │ ├── NebulaVertexBatchOutputFormat.java │ │ │ ├── NebulaBatchExecutor.java │ │ │ ├── NebulaSinkFunction.java │ │ │ ├── NebulaVertexBatchTableOutputFormat.java │ │ │ ├── NebulaEdgeBatchTableOutputFormat.java │ │ │ ├── NebulaTableBufferReducedExecutor.java │ │ │ ├── NebulaEdgeBatchExecutor.java │ │ │ ├── NebulaVertexBatchExecutor.java │ │ │ ├── NebulaRowVertexOutputFormatConverter.java │ │ │ └── NebulaRowEdgeOutputFormatConverter.java │ │ │ ├── connection │ │ │ ├── SelfSignParams.java │ │ │ ├── CASignParams.java │ │ │ ├── NebulaStorageConnectionProvider.java │ │ │ ├── NebulaGraphConnectionProvider.java │ │ │ └── NebulaMetaConnectionProvider.java │ │ │ ├── table │ │ │ ├── NebulaRowDataInputFormat.java │ │ │ ├── NebulaDynamicTableSource.java │ │ │ └── NebulaDynamicTableSink.java │ │ │ └── catalog │ │ │ └── factory │ │ │ └── NebulaCatalogFactory.java │ │ └── graph │ │ └── descriptors │ │ └── NebulaValidator.java └── .gitignore ├── example ├── src │ └── main │ │ └── resources │ │ ├── ssl │ │ ├── selfsigned.password │ │ ├── casigned.crt │ │ ├── casigned.pem │ │ ├── selfsigned.pem │ │ ├── casigned.key │ │ └── selfsigned.key │ │ └── log4j.properties ├── .gitignore └── pom.xml ├── .github └── workflows │ ├── ISSUE_TEMPLATE.md │ ├── PULL_REQUEST_TEMPLATE.md │ ├── check_label.yml │ ├── pull_request.yml │ ├── release.yml │ └── snapshot.yml ├── checkstyle-suppressions.xml ├── .gitignore └── pom.xml /connector/src/test/resources/ssl/selfsigned.password: -------------------------------------------------------------------------------- 1 | vesoft 2 | -------------------------------------------------------------------------------- /connector/src/test/resources/ssl/test.ca.password: -------------------------------------------------------------------------------- 1 | vesoft 2 | -------------------------------------------------------------------------------- /example/src/main/resources/ssl/selfsigned.password: -------------------------------------------------------------------------------- 1 | vesoft 2 | -------------------------------------------------------------------------------- /connector/src/test/resources/ssl/test.ca.srl: -------------------------------------------------------------------------------- 1 | 4AF2EBB941EA7EE8358ECC7E51C2F1A38EE18873 2 | -------------------------------------------------------------------------------- /.github/workflows/ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | #### Expected behavior 2 | 3 | #### Actual behavior 4 | 5 | #### Steps to reproduce 6 | 7 | #### JVM version (e.g. `java -version`) 8 | 9 | ### Nebula Graph version 10 | 11 | #### OS version (e.g. `uname -a`) 12 | -------------------------------------------------------------------------------- /.github/workflows/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | Motivation: 2 | 3 | Why you're making that change and what is the problem you're trying to solve. 4 | 5 | Modification: 6 | 7 | Describe the modifications you've done. 8 | 9 | Result: 10 | 11 | Fixes #. 12 | -------------------------------------------------------------------------------- /connector/src/main/resources/META-INF/services/org.apache.flink.table.factories.Factory: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2022 vesoft inc. All rights reserved. 2 | # 3 | # This source code is licensed under Apache 2.0 License. 4 | 5 | org.apache.flink.connector.nebula.table.NebulaDynamicTableFactory 6 | -------------------------------------------------------------------------------- /connector/src/main/resources/log4j.properties: -------------------------------------------------------------------------------- 1 | # Global logging configuration 2 | log4j.rootLogger=INFO, stdout 3 | # Console output... 4 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender 5 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout 6 | log4j.appender.stdout.layout.ConversionPattern=%5p [%t] - %m%n 7 | -------------------------------------------------------------------------------- /connector/src/test/resources/log4j.properties: -------------------------------------------------------------------------------- 1 | # Global logging configuration 2 | log4j.rootLogger=INFO, stdout 3 | # Console output... 4 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender 5 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout 6 | log4j.appender.stdout.layout.ConversionPattern=%5p [%t] - %m%n 7 | -------------------------------------------------------------------------------- /example/src/main/resources/log4j.properties: -------------------------------------------------------------------------------- 1 | # Global logging configuration 2 | log4j.rootLogger=INFO, stdout 3 | # Console output... 4 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender 5 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout 6 | log4j.appender.stdout.layout.ConversionPattern=%5p [%t] - %m%n 7 | -------------------------------------------------------------------------------- /checkstyle-suppressions.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 6 | 7 | 8 | 9 | 10 | -------------------------------------------------------------------------------- /.github/workflows/check_label.yml: -------------------------------------------------------------------------------- 1 | name: Auto label 2 | 3 | on: 4 | issues: 5 | types: 6 | - reopened 7 | - opened 8 | - labeled 9 | - unlabeled 10 | - closed 11 | 12 | env: 13 | GH_PAT: ${{ secrets.GITHUB_TOKEN }} 14 | EVENT: ${{ toJSON(github.event)}} 15 | EVENT_NAME: ${{ github.event_name}} 16 | 17 | jobs: 18 | sync: 19 | name: auto label 20 | runs-on: ubuntu-latest 21 | steps: 22 | - uses: HarrisChu/auto_label@v1 23 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Compiled class file 2 | *.class 3 | 4 | # Log file 5 | *.log 6 | 7 | # BlueJ files 8 | *.ctxt 9 | 10 | # Mobile Tools for Java (J2ME) 11 | .mtj.tmp/ 12 | 13 | # Package Files # 14 | *.jar 15 | *.war 16 | *.nar 17 | *.ear 18 | *.zip 19 | *.tar.gz 20 | *.rar 21 | 22 | # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml 23 | hs_err_pid* 24 | 25 | # build target 26 | target/ 27 | 28 | # IDE 29 | .idea/ 30 | .eclipse/ 31 | *.iml 32 | 33 | .DS_Store 34 | -------------------------------------------------------------------------------- /example/.gitignore: -------------------------------------------------------------------------------- 1 | # Compiled class file 2 | *.class 3 | 4 | # Log file 5 | *.log 6 | 7 | # BlueJ files 8 | *.ctxt 9 | 10 | # Mobile Tools for Java (J2ME) 11 | .mtj.tmp/ 12 | 13 | # Package Files # 14 | *.jar 15 | *.war 16 | *.nar 17 | *.ear 18 | *.zip 19 | *.tar.gz 20 | *.rar 21 | 22 | # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml 23 | hs_err_pid* 24 | 25 | # build target 26 | target/ 27 | 28 | # IDE 29 | .idea/ 30 | .eclipse/ 31 | *.iml 32 | 33 | .DS_Store 34 | -------------------------------------------------------------------------------- /connector/.gitignore: -------------------------------------------------------------------------------- 1 | # Compiled class file 2 | *.class 3 | 4 | # Log file 5 | *.log 6 | 7 | # BlueJ files 8 | *.ctxt 9 | 10 | # Mobile Tools for Java (J2ME) 11 | .mtj.tmp/ 12 | 13 | # Package Files # 14 | *.jar 15 | *.war 16 | *.nar 17 | *.ear 18 | *.zip 19 | *.tar.gz 20 | *.rar 21 | 22 | # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml 23 | hs_err_pid* 24 | 25 | # build target 26 | target/ 27 | 28 | # IDE 29 | .idea/ 30 | .eclipse/ 31 | *.iml 32 | 33 | .DS_Store 34 | -------------------------------------------------------------------------------- /connector/src/main/java/org.apache.flink/connector/nebula/utils/FailureHandlerEnum.java: -------------------------------------------------------------------------------- 1 | /* Copyright (c) 2023 vesoft inc. All rights reserved. 2 | * 3 | * This source code is licensed under Apache 2.0 License. 4 | */ 5 | 6 | package org.apache.flink.connector.nebula.utils; 7 | 8 | public enum FailureHandlerEnum { 9 | FAIL("FAIL"), 10 | 11 | IGNORE("IGNORE"); 12 | 13 | private final String handler; 14 | 15 | FailureHandlerEnum(String handler) { 16 | this.handler = handler; 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /connector/src/main/java/org.apache.flink/connector/nebula/utils/SSLSignType.java: -------------------------------------------------------------------------------- 1 | /* Copyright (c) 2021 vesoft inc. All rights reserved. 2 | * 3 | * This source code is licensed under Apache 2.0 License. 4 | */ 5 | 6 | package org.apache.flink.connector.nebula.utils; 7 | 8 | public enum SSLSignType { 9 | /** 10 | * CA sign 11 | */ 12 | CA("ca"), 13 | 14 | /** 15 | * SELF sign 16 | */ 17 | SELF("self"); 18 | 19 | private String type; 20 | 21 | SSLSignType(String type) { 22 | this.type = type; 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /connector/src/main/java/org.apache.flink/connector/nebula/source/NebulaBaseTableRowConverter.java: -------------------------------------------------------------------------------- 1 | /* Copyright (c) 2020 vesoft inc. All rights reserved. 2 | * 3 | * This source code is licensed under Apache 2.0 License. 4 | */ 5 | 6 | package org.apache.flink.connector.nebula.source; 7 | 8 | import com.vesoft.nebula.client.storage.data.BaseTableRow; 9 | 10 | public class NebulaBaseTableRowConverter implements NebulaConverter { 11 | 12 | @Override 13 | public BaseTableRow convert(BaseTableRow row) { 14 | return row; 15 | } 16 | 17 | } 18 | -------------------------------------------------------------------------------- /connector/src/main/java/org.apache.flink/connector/nebula/source/NebulaConverter.java: -------------------------------------------------------------------------------- 1 | /* Copyright (c) 2020 vesoft inc. All rights reserved. 2 | * 3 | * This source code is licensed under Apache 2.0 License. 4 | */ 5 | 6 | package org.apache.flink.connector.nebula.source; 7 | 8 | 9 | import com.vesoft.nebula.client.storage.data.BaseTableRow; 10 | import java.io.UnsupportedEncodingException; 11 | 12 | /** 13 | * converter to convert Nebula Data to [T] 14 | */ 15 | public interface NebulaConverter { 16 | 17 | public T convert(BaseTableRow record) throws UnsupportedEncodingException; 18 | } 19 | -------------------------------------------------------------------------------- /connector/src/main/java/org.apache.flink/connector/nebula/utils/PolicyEnum.java: -------------------------------------------------------------------------------- 1 | /* Copyright (c) 2020 vesoft inc. All rights reserved. 2 | * 3 | * This source code is licensed under Apache 2.0 License. 4 | */ 5 | 6 | package org.apache.flink.connector.nebula.utils; 7 | 8 | public enum PolicyEnum { 9 | /** 10 | * HASH policy 11 | */ 12 | HASH("HASH"), 13 | 14 | /** 15 | * UUID policy 16 | */ 17 | UUID("UUID"); 18 | 19 | private String type; 20 | 21 | PolicyEnum(String type) { 22 | this.type = type; 23 | } 24 | 25 | public String policy() { 26 | return type; 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /connector/src/main/java/org.apache.flink/connector/nebula/utils/VidTypeEnum.java: -------------------------------------------------------------------------------- 1 | /* Copyright (c) 2020 vesoft inc. All rights reserved. 2 | * 3 | * This source code is licensed under Apache 2.0 License. 4 | */ 5 | 6 | package org.apache.flink.connector.nebula.utils; 7 | 8 | public enum VidTypeEnum { 9 | /** 10 | * INT vertex id 11 | */ 12 | INT("INT"), 13 | 14 | /** 15 | * STRING vertex id 16 | */ 17 | STRING("STRING"); 18 | 19 | private String type; 20 | 21 | VidTypeEnum(String type) { 22 | this.type = type; 23 | } 24 | 25 | public String vidType() { 26 | return type; 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /connector/src/main/java/org.apache.flink/connector/nebula/utils/WriteModeEnum.java: -------------------------------------------------------------------------------- 1 | /* Copyright (c) 2021 vesoft inc. All rights reserved. 2 | * 3 | * This source code is licensed under Apache 2.0 License. 4 | */ 5 | 6 | package org.apache.flink.connector.nebula.utils; 7 | 8 | public enum WriteModeEnum { 9 | /** 10 | * INSERT write mode 11 | */ 12 | INSERT("insert"), 13 | 14 | /** 15 | * UPDATE write mode 16 | */ 17 | UPDATE("update"), 18 | 19 | /** 20 | * DELETE write mode 21 | */ 22 | DELETE("delete"); 23 | 24 | private String mode; 25 | 26 | WriteModeEnum(String mode) { 27 | this.mode = mode; 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /connector/src/main/java/org.apache.flink/connector/nebula/utils/PartitionUtils.java: -------------------------------------------------------------------------------- 1 | /* Copyright (c) 2021 vesoft inc. All rights reserved. 2 | * 3 | * This source code is licensed under Apache 2.0 License. 4 | */ 5 | 6 | package org.apache.flink.connector.nebula.utils; 7 | 8 | import com.google.common.collect.Lists; 9 | import java.util.List; 10 | 11 | /** 12 | * @author Pan BinBin 13 | */ 14 | public class PartitionUtils { 15 | public static List getScanParts(Integer index, 16 | Integer nebulaTotalPart, Integer numSplit) { 17 | List scanParts = Lists.newArrayList(); 18 | Integer currentPart = index; 19 | while (currentPart <= nebulaTotalPart) { 20 | scanParts.add(currentPart); 21 | currentPart += numSplit; 22 | } 23 | return scanParts; 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /connector/src/main/java/org.apache.flink/connector/nebula/utils/DataTypeEnum.java: -------------------------------------------------------------------------------- 1 | /* Copyright (c) 2020 vesoft inc. All rights reserved. 2 | * 3 | * This source code is licensed under Apache 2.0 License. 4 | */ 5 | 6 | package org.apache.flink.connector.nebula.utils; 7 | 8 | public enum DataTypeEnum { 9 | VERTEX("VERTEX"), 10 | 11 | EDGE("EDGE"); 12 | 13 | private String type; 14 | 15 | DataTypeEnum(String type) { 16 | this.type = type; 17 | } 18 | 19 | public boolean isVertex() { 20 | if (VERTEX.type.equalsIgnoreCase(this.type)) { 21 | return true; 22 | } 23 | return false; 24 | } 25 | 26 | public static boolean checkValidDataType(String type) { 27 | if (VERTEX.name().equalsIgnoreCase(type) || EDGE.name().equalsIgnoreCase(type)) { 28 | return true; 29 | } 30 | return false; 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /connector/src/main/java/org.apache.flink/connector/nebula/utils/NebulaCatalogUtils.java: -------------------------------------------------------------------------------- 1 | /* Copyright (c) 2020 vesoft inc. All rights reserved. 2 | * 3 | * This source code is licensed under Apache 2.0 License. 4 | */ 5 | 6 | package org.apache.flink.connector.nebula.utils; 7 | 8 | import org.apache.flink.connector.nebula.catalog.NebulaCatalog; 9 | 10 | /** 11 | * util for {@link NebulaCatalog} 12 | */ 13 | public class NebulaCatalogUtils { 14 | 15 | /** 16 | * Create catalog instance from given information 17 | */ 18 | public static NebulaCatalog createNebulaCatalog( 19 | String catalogName, 20 | String defaultSpace, 21 | String username, 22 | String password, 23 | String metaAddress, 24 | String graphAddress) { 25 | return new NebulaCatalog(catalogName, defaultSpace, username, password, 26 | metaAddress, graphAddress); 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /connector/src/test/resources/ssl/casigned.crt: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIICljCCAX4CCQC9uuUY+ah8qzANBgkqhkiG9w0BAQsFADANMQswCQYDVQQGEwJD 3 | TjAeFw0yMTA5MjkwNzM4MDRaFw0yNDAxMDIwNzM4MDRaMA0xCzAJBgNVBAYTAkNO 4 | MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuo7hKpcs+VQKbGRq0fUL 5 | +GcSfPfJ8mARtIeI8WfU0j1vI5KNujI//G2olOGEueDCw4OO0UbdjnsFpgj2awAo 6 | rj4ga2W6adQHK8qHY6q/Rdqv0oDCrcePMtQ8IwbFjNWOXC4bn7GcV7mzOkigdcj8 7 | UPkSeaqI9XxBRm3OoDX+T8h6cDLrm+ncKB8KKe/QApKH4frV3HYDqGtN49zuRs6F 8 | iurFbXDGVAZEdFEJl38IQJdmE2ASOzEHZbxWKzO/DZr/Z2+L1CuycZIwuITcnddx 9 | b2Byx/opwX4HlyODeUBbyDp+hd+GkasmIcpOlIDw9OXIvrcajKvzLEbqGt2ThsxX 10 | QwIDAQABMA0GCSqGSIb3DQEBCwUAA4IBAQAxzxtbYBQ2WgBGrpzOX4TxsuSaigqo 11 | YJ5zbVEHtwbsbBTZ7UJvRc9IyhrOL5Ui4PJI85chh1GpGqOmMoYSaWdddaIroilQ 12 | 56bn5haB8ezAMnLXbPuf97UENO0RIkyzt63XPIUkDnwlzOukIq50qgsYEDuiioM/ 13 | wpCqSbMJ4iK/SlSSUWw3cKuAHvFfLv7hkC6AhvT7yfaCNDs29xEQUCD12XlIdFGH 14 | FjMgVMcvcIePQq5ZcmSfVMge9jPjPx/Nj9SVauF5z5pil9qHG4jyXPGThiiJ3CE4 15 | GU5d/Qfe7OeiYI3LaoVufZ5pZnR9nMnpzqU46w9gY7vgi6bAhNwsCDr3 16 | -----END CERTIFICATE----- 17 | -------------------------------------------------------------------------------- /example/src/main/resources/ssl/casigned.crt: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIICljCCAX4CCQC9uuUY+ah8qzANBgkqhkiG9w0BAQsFADANMQswCQYDVQQGEwJD 3 | TjAeFw0yMTA5MjkwNzM4MDRaFw0yNDAxMDIwNzM4MDRaMA0xCzAJBgNVBAYTAkNO 4 | MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuo7hKpcs+VQKbGRq0fUL 5 | +GcSfPfJ8mARtIeI8WfU0j1vI5KNujI//G2olOGEueDCw4OO0UbdjnsFpgj2awAo 6 | rj4ga2W6adQHK8qHY6q/Rdqv0oDCrcePMtQ8IwbFjNWOXC4bn7GcV7mzOkigdcj8 7 | UPkSeaqI9XxBRm3OoDX+T8h6cDLrm+ncKB8KKe/QApKH4frV3HYDqGtN49zuRs6F 8 | iurFbXDGVAZEdFEJl38IQJdmE2ASOzEHZbxWKzO/DZr/Z2+L1CuycZIwuITcnddx 9 | b2Byx/opwX4HlyODeUBbyDp+hd+GkasmIcpOlIDw9OXIvrcajKvzLEbqGt2ThsxX 10 | QwIDAQABMA0GCSqGSIb3DQEBCwUAA4IBAQAxzxtbYBQ2WgBGrpzOX4TxsuSaigqo 11 | YJ5zbVEHtwbsbBTZ7UJvRc9IyhrOL5Ui4PJI85chh1GpGqOmMoYSaWdddaIroilQ 12 | 56bn5haB8ezAMnLXbPuf97UENO0RIkyzt63XPIUkDnwlzOukIq50qgsYEDuiioM/ 13 | wpCqSbMJ4iK/SlSSUWw3cKuAHvFfLv7hkC6AhvT7yfaCNDs29xEQUCD12XlIdFGH 14 | FjMgVMcvcIePQq5ZcmSfVMge9jPjPx/Nj9SVauF5z5pil9qHG4jyXPGThiiJ3CE4 15 | GU5d/Qfe7OeiYI3LaoVufZ5pZnR9nMnpzqU46w9gY7vgi6bAhNwsCDr3 16 | -----END CERTIFICATE----- 17 | -------------------------------------------------------------------------------- /connector/src/main/java/org.apache.flink/connector/nebula/source/NebulaSource.java: -------------------------------------------------------------------------------- 1 | /* Copyright (c) 2020 vesoft inc. All rights reserved. 2 | * 3 | * This source code is licensed under Apache 2.0 License. 4 | */ 5 | 6 | package org.apache.flink.connector.nebula.source; 7 | 8 | import com.vesoft.nebula.client.storage.StorageClient; 9 | import com.vesoft.nebula.client.storage.data.BaseTableRow; 10 | import org.apache.flink.connector.nebula.statement.ExecutionOptions; 11 | 12 | /** 13 | * NebulaSource is the reader to read NebulaGraph's data iteratively. 14 | */ 15 | abstract class NebulaSource { 16 | 17 | StorageClient storageClient; 18 | ExecutionOptions executionOptions; 19 | 20 | public NebulaSource(StorageClient storageClient, ExecutionOptions executionOptions) { 21 | this.storageClient = storageClient; 22 | this.executionOptions = executionOptions; 23 | } 24 | 25 | /** 26 | * if source has more data 27 | */ 28 | abstract boolean hasNext() throws Exception; 29 | 30 | /** 31 | * get another Nebula Graph data 32 | */ 33 | abstract BaseTableRow next(); 34 | } 35 | -------------------------------------------------------------------------------- /connector/src/test/resources/ssl/test.derive.csr: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE REQUEST----- 2 | MIIDEjCCAfoCAQAwgZkxCzAJBgNVBAYTAkNOMREwDwYDVQQIDAhaaGVqaWFuZzER 3 | MA8GA1UEBwwISGFuZ3pob3UxFDASBgNVBAoMC1Zlc29mdCBJbmMuMRAwDgYDVQQL 4 | DAdzZWN0aW9uMRMwEQYDVQQDDApTaHlsb2NrIEhnMScwJQYJKoZIhvcNAQkBFhhz 5 | aHlsb2NrLmh1YW5nQHZlc29mdC5jb20wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAw 6 | ggEKAoIBAQDHk1PQtaCGS31nvxKuT6pzVQuOsA2hEIDzBZuoBK3blezBB16fjUWG 7 | 2wHG/r9Oss5YzOly4viL1oFLsNdYg27EFH7pcGfdSUmZa6LHILegJTmLa1aB4lRG 8 | 9EsvPIxNuo637CW2z6EWElVKXn2N1G1vW3fpKGxJ+d1ovaFfBliO0sK+myW+vYdK 9 | rNg70WqKKCoCIlIjEWw3vQdrmvhuhIBbG1bXkXbJwIepBdb4wGSx8qsgs93I6/je 10 | /K/iJaPJIqdH8loo6fSoDBUiNA87ZsQdtbBeuk7QuF71SxD5+E8wCMtFMwRGmL0v 11 | YMPwkaurKxwEs49e8eTzRvIrNtyYgVo7AgMBAAGgMzAVBgkqhkiG9w0BCQcxCAwG 12 | dmVzb2Z0MBoGCSqGSIb3DQEJAjENDAtWZXNvZnQgSW5jLjANBgkqhkiG9w0BAQsF 13 | AAOCAQEAjmyCyxziJMR8NILRAwmfYcBB90CbTFMMEyWy402KxoXcyVZBGO2eukIq 14 | gaF2ywuh6yuTPtGsdVMVTWDQ4RLYpoQoR5Blu+M8Or8rhZSfMYXi79Ne3abSF28E 15 | eWjBmh2Ys0GtaThlufJBWE+vWPH2iEGrSRTg1fvBLBzAW6nXU2svoTrKfDcEoY5z 16 | xB0CKhBoewoIZ2FPBmBAnIWHfXR/vQ76QIoNdfQ4nT8iXuLRoNjRlvVU4AUDwKtu 17 | keRDrnmJ7A5eqTlleCMzra2MAp9Na9gojXlGQP9q9V8nFtSvbjYAoH0ezWpdWj4+ 18 | Rtu9EK4JkDymmmZcneFapExZrRLt0A== 19 | -----END CERTIFICATE REQUEST----- 20 | -------------------------------------------------------------------------------- /connector/src/main/java/org.apache.flink/connector/nebula/utils/NebulaVertex.java: -------------------------------------------------------------------------------- 1 | /* Copyright (c) 2021 vesoft inc. All rights reserved. 2 | * 3 | * This source code is licensed under Apache 2.0 License. 4 | */ 5 | 6 | package org.apache.flink.connector.nebula.utils; 7 | 8 | import java.io.Serializable; 9 | import java.util.List; 10 | 11 | public class NebulaVertex implements Serializable { 12 | 13 | private String vid; 14 | private List propValues; 15 | 16 | public NebulaVertex(String vid, List propValues) { 17 | this.vid = vid; 18 | this.propValues = propValues; 19 | } 20 | 21 | public String getVid() { 22 | return vid; 23 | } 24 | 25 | public void setVid(String vid) { 26 | this.vid = vid; 27 | } 28 | 29 | public String getPropValuesString() { 30 | return String.join(",", propValues); 31 | } 32 | 33 | public List getPropValues() { 34 | return propValues; 35 | } 36 | 37 | public void setPropValues(List propValues) { 38 | this.propValues = propValues; 39 | } 40 | 41 | @Override 42 | public String toString() { 43 | return "NebulaVertex{" 44 | + "vid='" + vid + '\'' 45 | + ", propValues=" + propValues 46 | + '}'; 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /.github/workflows/pull_request.yml: -------------------------------------------------------------------------------- 1 | # This workflow will build a Java project with Maven 2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/building-and-testing-java-with-maven 3 | 4 | name: pull_request 5 | 6 | on: 7 | push: 8 | branches: [ master ] 9 | pull_request: 10 | branches: 11 | - master 12 | - 'v[0-9]+.*' 13 | 14 | jobs: 15 | build: 16 | 17 | runs-on: ubuntu-latest 18 | 19 | steps: 20 | - uses: actions/checkout@v2 21 | - name: Set up JDK 1.8 22 | uses: actions/setup-java@v1 23 | with: 24 | java-version: 1.8 25 | 26 | - name: Cache the Maven packages to speed up build 27 | uses: actions/cache@v2 28 | with: 29 | path: ~/.m2/repository 30 | key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }} 31 | restore-keys: ${{ runner.os }}-maven- 32 | 33 | - name: Install nebula-graph 34 | run: | 35 | mkdir tmp 36 | pushd tmp 37 | git clone https://github.com/vesoft-inc/nebula-docker-compose.git 38 | pushd nebula-docker-compose/ 39 | cp ../../connector/src/test/resources/docker-compose.yaml . 40 | docker-compose up -d 41 | sleep 10 42 | popd 43 | popd 44 | 45 | - name: Build with Maven 46 | run: | 47 | mvn -B package 48 | bash <(curl -s https://codecov.io/bash) 49 | -------------------------------------------------------------------------------- /connector/src/main/java/org.apache.flink/connector/nebula/sink/NebulaEdgeBatchOutputFormat.java: -------------------------------------------------------------------------------- 1 | package org.apache.flink.connector.nebula.sink; 2 | 3 | import java.util.Map; 4 | import org.apache.flink.connector.nebula.connection.NebulaGraphConnectionProvider; 5 | import org.apache.flink.connector.nebula.connection.NebulaMetaConnectionProvider; 6 | import org.apache.flink.connector.nebula.statement.EdgeExecutionOptions; 7 | import org.apache.flink.connector.nebula.utils.VidTypeEnum; 8 | import org.apache.flink.types.Row; 9 | 10 | public class NebulaEdgeBatchOutputFormat 11 | extends NebulaBatchOutputFormat { 12 | public NebulaEdgeBatchOutputFormat(NebulaGraphConnectionProvider graphProvider, 13 | NebulaMetaConnectionProvider metaProvider, 14 | EdgeExecutionOptions executionOptions) { 15 | super(graphProvider, metaProvider, executionOptions); 16 | } 17 | 18 | @Override 19 | protected NebulaBatchExecutor createNebulaBatchExecutor() { 20 | VidTypeEnum vidType = metaProvider.getVidType(metaClient, executionOptions.getGraphSpace()); 21 | Map schema = metaProvider.getEdgeSchema( 22 | metaClient, 23 | executionOptions.getGraphSpace(), 24 | executionOptions.getLabel()); 25 | return new NebulaEdgeBatchExecutor(executionOptions, vidType, schema); 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /connector/src/test/resources/ssl/test.derive.crt: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIDvjCCAqYCFEry67lB6n7oNY7MflHC8aOO4YhzMA0GCSqGSIb3DQEBCwUAMIGc 3 | MQswCQYDVQQGEwJDTjERMA8GA1UECAwIWmhlamlhbmcxETAPBgNVBAcMCEhhbmd6 4 | aG91MRQwEgYDVQQKDAtWZXNvZnQgSW5jLjEQMA4GA1UECwwHc2VjdGlvbjEWMBQG 5 | A1UEAwwNc2h5bG9jayBodWFuZzEnMCUGCSqGSIb3DQEJARYYc2h5bG9jay5odWFu 6 | Z0B2ZXNvZnQuY29tMB4XDTIxMDgyNDEwNTExMloXDTIzMTEyNzEwNTExMlowgZkx 7 | CzAJBgNVBAYTAkNOMREwDwYDVQQIDAhaaGVqaWFuZzERMA8GA1UEBwwISGFuZ3po 8 | b3UxFDASBgNVBAoMC1Zlc29mdCBJbmMuMRAwDgYDVQQLDAdzZWN0aW9uMRMwEQYD 9 | VQQDDApTaHlsb2NrIEhnMScwJQYJKoZIhvcNAQkBFhhzaHlsb2NrLmh1YW5nQHZl 10 | c29mdC5jb20wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDHk1PQtaCG 11 | S31nvxKuT6pzVQuOsA2hEIDzBZuoBK3blezBB16fjUWG2wHG/r9Oss5YzOly4viL 12 | 1oFLsNdYg27EFH7pcGfdSUmZa6LHILegJTmLa1aB4lRG9EsvPIxNuo637CW2z6EW 13 | ElVKXn2N1G1vW3fpKGxJ+d1ovaFfBliO0sK+myW+vYdKrNg70WqKKCoCIlIjEWw3 14 | vQdrmvhuhIBbG1bXkXbJwIepBdb4wGSx8qsgs93I6/je/K/iJaPJIqdH8loo6fSo 15 | DBUiNA87ZsQdtbBeuk7QuF71SxD5+E8wCMtFMwRGmL0vYMPwkaurKxwEs49e8eTz 16 | RvIrNtyYgVo7AgMBAAEwDQYJKoZIhvcNAQELBQADggEBAGBpm5OLXn02kWr1ENU5 17 | FOOVryD41SCmPy8hLwQ2MCXd446UfTXc5TTlllksaePn373ZANLUe78vUCoVPjOh 18 | dU5GxyOKtubXovI+yuvMS11u00KtgiAd5qa+IhX3c/P60bh4+fdKZ9ViyLsG+IpQ 19 | +XDYT2uekLyjXXJU6h1raW7M1VY9FcDC63moXz0WgWJ/9tJgB0ZQkVcL+2UpveoZ 20 | Whf9P0xAzCmNSrR7CMhdeRN2vBQQaHXk/64wkHncdkz/NglVl00rh4MtBKZ6Cqze 21 | uZvgrxOJNzB4aXBMHO7sWzw1VSfS79CZm4H39hBWGiVEkr3yZYQbboDRY6F5dQyc 22 | BZc= 23 | -----END CERTIFICATE----- 24 | -------------------------------------------------------------------------------- /connector/src/main/java/org.apache.flink/connector/nebula/source/NebulaInputRowFormat.java: -------------------------------------------------------------------------------- 1 | /* Copyright (c) 2020 vesoft inc. All rights reserved. 2 | * 3 | * This source code is licensed under Apache 2.0 License. 4 | */ 5 | 6 | package org.apache.flink.connector.nebula.source; 7 | 8 | import java.io.IOException; 9 | import org.apache.flink.connector.nebula.connection.NebulaStorageConnectionProvider; 10 | import org.apache.flink.connector.nebula.statement.ExecutionOptions; 11 | import org.apache.flink.core.io.InputSplit; 12 | import org.apache.flink.types.Row; 13 | 14 | /** 15 | * implementation of NebulaInputFormat. 16 | * Read NebulaGraph data in flink's {@link Row} format. 17 | *

how to use: 18 | * NebulaInputRowFormat inputFormat = new NebulaInputRowFormat 19 | * (storageConnectionProvider, vertexExecutionOptions); 20 | * DataSource dataSource = env.createInput(inputFormat); 21 | *

22 | */ 23 | public class NebulaInputRowFormat extends NebulaInputFormat { 24 | 25 | public NebulaInputRowFormat(NebulaStorageConnectionProvider storageConnectionProvider, 26 | ExecutionOptions executionOptions) { 27 | super(storageConnectionProvider, executionOptions); 28 | } 29 | 30 | @Override 31 | public void open(InputSplit inputSplit) throws IOException { 32 | super.open(inputSplit); 33 | super.nebulaConverter = new NebulaRowConverter(); 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /connector/src/main/java/org.apache.flink/connector/nebula/sink/NebulaVertexBatchOutputFormat.java: -------------------------------------------------------------------------------- 1 | package org.apache.flink.connector.nebula.sink; 2 | 3 | import java.util.Map; 4 | import org.apache.flink.connector.nebula.connection.NebulaGraphConnectionProvider; 5 | import org.apache.flink.connector.nebula.connection.NebulaMetaConnectionProvider; 6 | import org.apache.flink.connector.nebula.statement.VertexExecutionOptions; 7 | import org.apache.flink.connector.nebula.utils.VidTypeEnum; 8 | import org.apache.flink.types.Row; 9 | 10 | public class NebulaVertexBatchOutputFormat 11 | extends NebulaBatchOutputFormat { 12 | 13 | public NebulaVertexBatchOutputFormat(NebulaGraphConnectionProvider graphProvider, 14 | NebulaMetaConnectionProvider metaProvider, 15 | VertexExecutionOptions executionOptions) { 16 | super(graphProvider, metaProvider, executionOptions); 17 | } 18 | 19 | @Override 20 | protected NebulaBatchExecutor createNebulaBatchExecutor() { 21 | VidTypeEnum vidType = metaProvider.getVidType(metaClient, executionOptions.getGraphSpace()); 22 | Map schema = metaProvider.getTagSchema( 23 | metaClient, 24 | executionOptions.getGraphSpace(), 25 | executionOptions.getLabel()); 26 | return new NebulaVertexBatchExecutor(executionOptions, vidType, schema); 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | # This workflow will build a Java project with Maven 2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/building-and-testing-java-with-maven 3 | 4 | name: release 5 | 6 | on: 7 | release: 8 | types: published 9 | 10 | jobs: 11 | build: 12 | 13 | runs-on: ubuntu-latest 14 | 15 | steps: 16 | - uses: actions/checkout@v2 17 | - name: Set up JDK 1.8 18 | uses: actions/setup-java@v1 19 | with: 20 | java-version: 1.8 21 | 22 | - name: Cache the Maven packages to speed up build 23 | uses: actions/cache@v2 24 | with: 25 | path: ~/.m2/repository 26 | key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }} 27 | restore-keys: ${{ runner.os }}-maven- 28 | 29 | - name: Install nebula-graph 30 | run: | 31 | mkdir tmp 32 | pushd tmp 33 | git clone https://github.com/vesoft-inc/nebula-docker-compose.git 34 | pushd nebula-docker-compose/ 35 | cp ../../connector/src/test/resources/docker-compose.yaml . 36 | docker-compose up -d 37 | sleep 10 38 | popd 39 | popd 40 | 41 | - name: Deploy release to Maven 42 | uses: samuelmeuli/action-maven-publish@v1 43 | with: 44 | gpg_private_key: ${{ secrets.JAVA_GPG_PRIVATE_KEY }} 45 | gpg_passphrase: "" 46 | nexus_username: ${{ secrets.OSSRH_USERNAME }} 47 | nexus_password: ${{ secrets.OSSRH_TOKEN }} 48 | -------------------------------------------------------------------------------- /connector/src/main/java/org.apache.flink/connector/nebula/utils/NebulaEdge.java: -------------------------------------------------------------------------------- 1 | /* Copyright (c) 2021 vesoft inc. All rights reserved. 2 | * 3 | * This source code is licensed under Apache 2.0 License. 4 | */ 5 | 6 | package org.apache.flink.connector.nebula.utils; 7 | 8 | import java.io.Serializable; 9 | import java.util.List; 10 | 11 | public class NebulaEdge implements Serializable { 12 | private String source; 13 | private String target; 14 | private Long rank; 15 | private List propValues; 16 | 17 | public NebulaEdge(String source, String target, Long rank, List propValues) { 18 | this.source = source; 19 | this.target = target; 20 | this.rank = rank; 21 | this.propValues = propValues; 22 | } 23 | 24 | public String getSource() { 25 | return source; 26 | } 27 | 28 | public String getTarget() { 29 | return target; 30 | } 31 | 32 | public Long getRank() { 33 | return rank; 34 | } 35 | 36 | public List getPropValues() { 37 | return propValues; 38 | } 39 | 40 | public String getPropValuesString() { 41 | return String.join(",", propValues); 42 | } 43 | 44 | @Override 45 | public String toString() { 46 | return "NebulaEdge{" 47 | + "source='" + source + '\'' 48 | + ", target='" + target + '\'' 49 | + ", rank=" + rank 50 | + ", propValues=" + propValues 51 | + '}'; 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /.github/workflows/snapshot.yml: -------------------------------------------------------------------------------- 1 | # This workflow will build a Java project with Maven 2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/building-and-testing-java-with-maven 3 | 4 | name: snapshot 5 | 6 | on: 7 | push: 8 | branches: [ master ] 9 | schedule: 10 | - cron: '0 6 * * *' 11 | 12 | jobs: 13 | build: 14 | 15 | runs-on: ubuntu-latest 16 | 17 | steps: 18 | - uses: actions/checkout@v2 19 | - name: Set up JDK 1.8 20 | uses: actions/setup-java@v1 21 | with: 22 | java-version: 1.8 23 | 24 | - name: Cache the Maven packages to speed up build 25 | uses: actions/cache@v2 26 | with: 27 | path: ~/.m2/repository 28 | key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }} 29 | restore-keys: ${{ runner.os }}-maven- 30 | 31 | - name: Install nebula-graph 32 | run: | 33 | mkdir tmp 34 | pushd tmp 35 | git clone https://github.com/vesoft-inc/nebula-docker-compose.git 36 | pushd nebula-docker-compose/ 37 | cp ../../connector/src/test/resources/docker-compose.yaml . 38 | docker-compose up -d 39 | sleep 10 40 | popd 41 | popd 42 | 43 | - name: Deploy SNAPSHOT to Sonatype 44 | uses: samuelmeuli/action-maven-publish@v1 45 | with: 46 | gpg_private_key: ${{ secrets.JAVA_GPG_PRIVATE_KEY }} 47 | gpg_passphrase: "" 48 | nexus_username: ${{ secrets.OSSRH_USERNAME }} 49 | nexus_password: ${{ secrets.OSSRH_TOKEN }} 50 | -------------------------------------------------------------------------------- /connector/src/main/java/org.apache.flink/connector/nebula/source/NebulaInputTableRowFormat.java: -------------------------------------------------------------------------------- 1 | /* Copyright (c) 2020 vesoft inc. All rights reserved. 2 | * 3 | * This source code is licensed under Apache 2.0 License. 4 | */ 5 | 6 | package org.apache.flink.connector.nebula.source; 7 | 8 | import com.vesoft.nebula.client.storage.data.BaseTableRow; 9 | import java.io.IOException; 10 | import org.apache.flink.connector.nebula.connection.NebulaStorageConnectionProvider; 11 | import org.apache.flink.connector.nebula.statement.ExecutionOptions; 12 | import org.apache.flink.core.io.InputSplit; 13 | 14 | /** 15 | * implementation of NebulaInputFormat. 16 | * Read NebulaGraph data in nebula's {@link BaseTableRow} format. 17 | * how to use: 18 | * NebulaInputTableRowFormat inputFormat = new NebulaInputTableRowFormat 19 | * (storageConnectionProvider, vertexExecutionOptions); 20 | * DataSource dataSource = env.createInput(inputFormat); 21 | * 22 | */ 23 | public class NebulaInputTableRowFormat extends NebulaInputFormat { 24 | 25 | public NebulaInputTableRowFormat(NebulaStorageConnectionProvider storageConnectionProvider, 26 | ExecutionOptions executionOptions) { 27 | super(storageConnectionProvider, executionOptions); 28 | } 29 | 30 | @Override 31 | public void open(InputSplit inputSplit) throws IOException { 32 | super.open(inputSplit); 33 | super.nebulaConverter = new NebulaBaseTableRowConverter(); 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /example/src/main/resources/ssl/casigned.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIEGzCCAwOgAwIBAgIUDcmZFpL4PcdCXfLRBK8bR2vb39cwDQYJKoZIhvcNAQEL 3 | BQAwgZwxCzAJBgNVBAYTAkNOMREwDwYDVQQIDAhaaGVqaWFuZzERMA8GA1UEBwwI 4 | SGFuZ3pob3UxFDASBgNVBAoMC1Zlc29mdCBJbmMuMRAwDgYDVQQLDAdzZWN0aW9u 5 | MRYwFAYDVQQDDA1zaHlsb2NrIGh1YW5nMScwJQYJKoZIhvcNAQkBFhhzaHlsb2Nr 6 | Lmh1YW5nQHZlc29mdC5jb20wHhcNMjEwODE5MDkyNDQ3WhcNMjUwODE4MDkyNDQ3 7 | WjCBnDELMAkGA1UEBhMCQ04xETAPBgNVBAgMCFpoZWppYW5nMREwDwYDVQQHDAhI 8 | YW5nemhvdTEUMBIGA1UECgwLVmVzb2Z0IEluYy4xEDAOBgNVBAsMB3NlY3Rpb24x 9 | FjAUBgNVBAMMDXNoeWxvY2sgaHVhbmcxJzAlBgkqhkiG9w0BCQEWGHNoeWxvY2su 10 | aHVhbmdAdmVzb2Z0LmNvbTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB 11 | AMEAgpamCQHl+8JnUHI6/VmJHjDLYJLTliN/CwpFrhMqIVjJ8wG57WYLpXpn91Lz 12 | eHu52LkVzcikybIJ2a+LOTvnhNFdbmTbqDtrb+s6wM/sO+nF6tU2Av4e5zhyKoeR 13 | LL+rHMk3nymohbdN4djySFmOOU5A1O/4b0bZz4Ylu995kUawdiaEo13BzxxOC7Ik 14 | Gge5RyDcm0uLXZqTAPy5Sjv/zpOyj0AqL1CJUH7XBN9OMRhVU0ZX9nHWl1vgLRld 15 | J6XT17Y9QbbHhCNEdAmFE5kEFgCvZc+MungUYABlkvoj86TLmC/FMV6fWdxQssyd 16 | hS+ssfJFLaTDaEFz5a/Tr48CAwEAAaNTMFEwHQYDVR0OBBYEFK0GVrQx+wX1GCHy 17 | e+6fl4X+prmYMB8GA1UdIwQYMBaAFK0GVrQx+wX1GCHye+6fl4X+prmYMA8GA1Ud 18 | EwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAHqP8P+ZUHmngviHLSSN1ln5 19 | Mx4BCkVeFRUaFx0yFXytV/iLXcG2HpFg3A9rAFoYgCDwi1xpsERnBZ/ShTv/eFOc 20 | IxBY5yggx3/lGi8tAgvUdarhd7mQO67UJ0V4YU3hAkbnZ8grHHXj+4hfgUpY4ok6 21 | yaed6HXwknBb9W8N1jZI8ginhkhjaeRCHdMiF+fBvNCtmeR1bCml1Uz7ailrpcaT 22 | Mf84+5VYuFEnaRZYWFNsWNCOBlJ/6/b3V10vMXzMmYHqz3xgAq0M3fVTFTzopnAX 23 | DLSzorL/dYVdqEDCQi5XI9YAlgWN4VeGzJI+glkLOCNzHxRNP6Qev+YI+7Uxz6I= 24 | -----END CERTIFICATE----- 25 | -------------------------------------------------------------------------------- /connector/src/test/resources/ssl/casigned.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIEGzCCAwOgAwIBAgIUDcmZFpL4PcdCXfLRBK8bR2vb39cwDQYJKoZIhvcNAQEL 3 | BQAwgZwxCzAJBgNVBAYTAkNOMREwDwYDVQQIDAhaaGVqaWFuZzERMA8GA1UEBwwI 4 | SGFuZ3pob3UxFDASBgNVBAoMC1Zlc29mdCBJbmMuMRAwDgYDVQQLDAdzZWN0aW9u 5 | MRYwFAYDVQQDDA1zaHlsb2NrIGh1YW5nMScwJQYJKoZIhvcNAQkBFhhzaHlsb2Nr 6 | Lmh1YW5nQHZlc29mdC5jb20wHhcNMjEwODE5MDkyNDQ3WhcNMjUwODE4MDkyNDQ3 7 | WjCBnDELMAkGA1UEBhMCQ04xETAPBgNVBAgMCFpoZWppYW5nMREwDwYDVQQHDAhI 8 | YW5nemhvdTEUMBIGA1UECgwLVmVzb2Z0IEluYy4xEDAOBgNVBAsMB3NlY3Rpb24x 9 | FjAUBgNVBAMMDXNoeWxvY2sgaHVhbmcxJzAlBgkqhkiG9w0BCQEWGHNoeWxvY2su 10 | aHVhbmdAdmVzb2Z0LmNvbTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB 11 | AMEAgpamCQHl+8JnUHI6/VmJHjDLYJLTliN/CwpFrhMqIVjJ8wG57WYLpXpn91Lz 12 | eHu52LkVzcikybIJ2a+LOTvnhNFdbmTbqDtrb+s6wM/sO+nF6tU2Av4e5zhyKoeR 13 | LL+rHMk3nymohbdN4djySFmOOU5A1O/4b0bZz4Ylu995kUawdiaEo13BzxxOC7Ik 14 | Gge5RyDcm0uLXZqTAPy5Sjv/zpOyj0AqL1CJUH7XBN9OMRhVU0ZX9nHWl1vgLRld 15 | J6XT17Y9QbbHhCNEdAmFE5kEFgCvZc+MungUYABlkvoj86TLmC/FMV6fWdxQssyd 16 | hS+ssfJFLaTDaEFz5a/Tr48CAwEAAaNTMFEwHQYDVR0OBBYEFK0GVrQx+wX1GCHy 17 | e+6fl4X+prmYMB8GA1UdIwQYMBaAFK0GVrQx+wX1GCHye+6fl4X+prmYMA8GA1Ud 18 | EwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAHqP8P+ZUHmngviHLSSN1ln5 19 | Mx4BCkVeFRUaFx0yFXytV/iLXcG2HpFg3A9rAFoYgCDwi1xpsERnBZ/ShTv/eFOc 20 | IxBY5yggx3/lGi8tAgvUdarhd7mQO67UJ0V4YU3hAkbnZ8grHHXj+4hfgUpY4ok6 21 | yaed6HXwknBb9W8N1jZI8ginhkhjaeRCHdMiF+fBvNCtmeR1bCml1Uz7ailrpcaT 22 | Mf84+5VYuFEnaRZYWFNsWNCOBlJ/6/b3V10vMXzMmYHqz3xgAq0M3fVTFTzopnAX 23 | DLSzorL/dYVdqEDCQi5XI9YAlgWN4VeGzJI+glkLOCNzHxRNP6Qev+YI+7Uxz6I= 24 | -----END CERTIFICATE----- 25 | -------------------------------------------------------------------------------- /connector/src/test/resources/ssl/selfsigned.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIEGzCCAwOgAwIBAgIUDcmZFpL4PcdCXfLRBK8bR2vb39cwDQYJKoZIhvcNAQEL 3 | BQAwgZwxCzAJBgNVBAYTAkNOMREwDwYDVQQIDAhaaGVqaWFuZzERMA8GA1UEBwwI 4 | SGFuZ3pob3UxFDASBgNVBAoMC1Zlc29mdCBJbmMuMRAwDgYDVQQLDAdzZWN0aW9u 5 | MRYwFAYDVQQDDA1zaHlsb2NrIGh1YW5nMScwJQYJKoZIhvcNAQkBFhhzaHlsb2Nr 6 | Lmh1YW5nQHZlc29mdC5jb20wHhcNMjEwODE5MDkyNDQ3WhcNMjUwODE4MDkyNDQ3 7 | WjCBnDELMAkGA1UEBhMCQ04xETAPBgNVBAgMCFpoZWppYW5nMREwDwYDVQQHDAhI 8 | YW5nemhvdTEUMBIGA1UECgwLVmVzb2Z0IEluYy4xEDAOBgNVBAsMB3NlY3Rpb24x 9 | FjAUBgNVBAMMDXNoeWxvY2sgaHVhbmcxJzAlBgkqhkiG9w0BCQEWGHNoeWxvY2su 10 | aHVhbmdAdmVzb2Z0LmNvbTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB 11 | AMEAgpamCQHl+8JnUHI6/VmJHjDLYJLTliN/CwpFrhMqIVjJ8wG57WYLpXpn91Lz 12 | eHu52LkVzcikybIJ2a+LOTvnhNFdbmTbqDtrb+s6wM/sO+nF6tU2Av4e5zhyKoeR 13 | LL+rHMk3nymohbdN4djySFmOOU5A1O/4b0bZz4Ylu995kUawdiaEo13BzxxOC7Ik 14 | Gge5RyDcm0uLXZqTAPy5Sjv/zpOyj0AqL1CJUH7XBN9OMRhVU0ZX9nHWl1vgLRld 15 | J6XT17Y9QbbHhCNEdAmFE5kEFgCvZc+MungUYABlkvoj86TLmC/FMV6fWdxQssyd 16 | hS+ssfJFLaTDaEFz5a/Tr48CAwEAAaNTMFEwHQYDVR0OBBYEFK0GVrQx+wX1GCHy 17 | e+6fl4X+prmYMB8GA1UdIwQYMBaAFK0GVrQx+wX1GCHye+6fl4X+prmYMA8GA1Ud 18 | EwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAHqP8P+ZUHmngviHLSSN1ln5 19 | Mx4BCkVeFRUaFx0yFXytV/iLXcG2HpFg3A9rAFoYgCDwi1xpsERnBZ/ShTv/eFOc 20 | IxBY5yggx3/lGi8tAgvUdarhd7mQO67UJ0V4YU3hAkbnZ8grHHXj+4hfgUpY4ok6 21 | yaed6HXwknBb9W8N1jZI8ginhkhjaeRCHdMiF+fBvNCtmeR1bCml1Uz7ailrpcaT 22 | Mf84+5VYuFEnaRZYWFNsWNCOBlJ/6/b3V10vMXzMmYHqz3xgAq0M3fVTFTzopnAX 23 | DLSzorL/dYVdqEDCQi5XI9YAlgWN4VeGzJI+glkLOCNzHxRNP6Qev+YI+7Uxz6I= 24 | -----END CERTIFICATE----- 25 | -------------------------------------------------------------------------------- /connector/src/test/resources/ssl/test.ca.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIEGzCCAwOgAwIBAgIUDcmZFpL4PcdCXfLRBK8bR2vb39cwDQYJKoZIhvcNAQEL 3 | BQAwgZwxCzAJBgNVBAYTAkNOMREwDwYDVQQIDAhaaGVqaWFuZzERMA8GA1UEBwwI 4 | SGFuZ3pob3UxFDASBgNVBAoMC1Zlc29mdCBJbmMuMRAwDgYDVQQLDAdzZWN0aW9u 5 | MRYwFAYDVQQDDA1zaHlsb2NrIGh1YW5nMScwJQYJKoZIhvcNAQkBFhhzaHlsb2Nr 6 | Lmh1YW5nQHZlc29mdC5jb20wHhcNMjEwODE5MDkyNDQ3WhcNMjUwODE4MDkyNDQ3 7 | WjCBnDELMAkGA1UEBhMCQ04xETAPBgNVBAgMCFpoZWppYW5nMREwDwYDVQQHDAhI 8 | YW5nemhvdTEUMBIGA1UECgwLVmVzb2Z0IEluYy4xEDAOBgNVBAsMB3NlY3Rpb24x 9 | FjAUBgNVBAMMDXNoeWxvY2sgaHVhbmcxJzAlBgkqhkiG9w0BCQEWGHNoeWxvY2su 10 | aHVhbmdAdmVzb2Z0LmNvbTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB 11 | AMEAgpamCQHl+8JnUHI6/VmJHjDLYJLTliN/CwpFrhMqIVjJ8wG57WYLpXpn91Lz 12 | eHu52LkVzcikybIJ2a+LOTvnhNFdbmTbqDtrb+s6wM/sO+nF6tU2Av4e5zhyKoeR 13 | LL+rHMk3nymohbdN4djySFmOOU5A1O/4b0bZz4Ylu995kUawdiaEo13BzxxOC7Ik 14 | Gge5RyDcm0uLXZqTAPy5Sjv/zpOyj0AqL1CJUH7XBN9OMRhVU0ZX9nHWl1vgLRld 15 | J6XT17Y9QbbHhCNEdAmFE5kEFgCvZc+MungUYABlkvoj86TLmC/FMV6fWdxQssyd 16 | hS+ssfJFLaTDaEFz5a/Tr48CAwEAAaNTMFEwHQYDVR0OBBYEFK0GVrQx+wX1GCHy 17 | e+6fl4X+prmYMB8GA1UdIwQYMBaAFK0GVrQx+wX1GCHye+6fl4X+prmYMA8GA1Ud 18 | EwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAHqP8P+ZUHmngviHLSSN1ln5 19 | Mx4BCkVeFRUaFx0yFXytV/iLXcG2HpFg3A9rAFoYgCDwi1xpsERnBZ/ShTv/eFOc 20 | IxBY5yggx3/lGi8tAgvUdarhd7mQO67UJ0V4YU3hAkbnZ8grHHXj+4hfgUpY4ok6 21 | yaed6HXwknBb9W8N1jZI8ginhkhjaeRCHdMiF+fBvNCtmeR1bCml1Uz7ailrpcaT 22 | Mf84+5VYuFEnaRZYWFNsWNCOBlJ/6/b3V10vMXzMmYHqz3xgAq0M3fVTFTzopnAX 23 | DLSzorL/dYVdqEDCQi5XI9YAlgWN4VeGzJI+glkLOCNzHxRNP6Qev+YI+7Uxz6I= 24 | -----END CERTIFICATE----- 25 | -------------------------------------------------------------------------------- /example/src/main/resources/ssl/selfsigned.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIEGzCCAwOgAwIBAgIUDcmZFpL4PcdCXfLRBK8bR2vb39cwDQYJKoZIhvcNAQEL 3 | BQAwgZwxCzAJBgNVBAYTAkNOMREwDwYDVQQIDAhaaGVqaWFuZzERMA8GA1UEBwwI 4 | SGFuZ3pob3UxFDASBgNVBAoMC1Zlc29mdCBJbmMuMRAwDgYDVQQLDAdzZWN0aW9u 5 | MRYwFAYDVQQDDA1zaHlsb2NrIGh1YW5nMScwJQYJKoZIhvcNAQkBFhhzaHlsb2Nr 6 | Lmh1YW5nQHZlc29mdC5jb20wHhcNMjEwODE5MDkyNDQ3WhcNMjUwODE4MDkyNDQ3 7 | WjCBnDELMAkGA1UEBhMCQ04xETAPBgNVBAgMCFpoZWppYW5nMREwDwYDVQQHDAhI 8 | YW5nemhvdTEUMBIGA1UECgwLVmVzb2Z0IEluYy4xEDAOBgNVBAsMB3NlY3Rpb24x 9 | FjAUBgNVBAMMDXNoeWxvY2sgaHVhbmcxJzAlBgkqhkiG9w0BCQEWGHNoeWxvY2su 10 | aHVhbmdAdmVzb2Z0LmNvbTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB 11 | AMEAgpamCQHl+8JnUHI6/VmJHjDLYJLTliN/CwpFrhMqIVjJ8wG57WYLpXpn91Lz 12 | eHu52LkVzcikybIJ2a+LOTvnhNFdbmTbqDtrb+s6wM/sO+nF6tU2Av4e5zhyKoeR 13 | LL+rHMk3nymohbdN4djySFmOOU5A1O/4b0bZz4Ylu995kUawdiaEo13BzxxOC7Ik 14 | Gge5RyDcm0uLXZqTAPy5Sjv/zpOyj0AqL1CJUH7XBN9OMRhVU0ZX9nHWl1vgLRld 15 | J6XT17Y9QbbHhCNEdAmFE5kEFgCvZc+MungUYABlkvoj86TLmC/FMV6fWdxQssyd 16 | hS+ssfJFLaTDaEFz5a/Tr48CAwEAAaNTMFEwHQYDVR0OBBYEFK0GVrQx+wX1GCHy 17 | e+6fl4X+prmYMB8GA1UdIwQYMBaAFK0GVrQx+wX1GCHye+6fl4X+prmYMA8GA1Ud 18 | EwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAHqP8P+ZUHmngviHLSSN1ln5 19 | Mx4BCkVeFRUaFx0yFXytV/iLXcG2HpFg3A9rAFoYgCDwi1xpsERnBZ/ShTv/eFOc 20 | IxBY5yggx3/lGi8tAgvUdarhd7mQO67UJ0V4YU3hAkbnZ8grHHXj+4hfgUpY4ok6 21 | yaed6HXwknBb9W8N1jZI8ginhkhjaeRCHdMiF+fBvNCtmeR1bCml1Uz7ailrpcaT 22 | Mf84+5VYuFEnaRZYWFNsWNCOBlJ/6/b3V10vMXzMmYHqz3xgAq0M3fVTFTzopnAX 23 | DLSzorL/dYVdqEDCQi5XI9YAlgWN4VeGzJI+glkLOCNzHxRNP6Qev+YI+7Uxz6I= 24 | -----END CERTIFICATE----- 25 | -------------------------------------------------------------------------------- /connector/src/main/java/org.apache.flink/connector/nebula/utils/NebulaSpace.java: -------------------------------------------------------------------------------- 1 | /* Copyright (c) 2022 vesoft inc. All rights reserved. 2 | * 3 | * This source code is licensed under Apache 2.0 License. 4 | */ 5 | 6 | package org.apache.flink.connector.nebula.utils; 7 | 8 | import java.io.Serializable; 9 | import java.util.Map; 10 | 11 | public class NebulaSpace implements Serializable { 12 | 13 | private String spaceName; 14 | private String comment; 15 | private Map props; 16 | 17 | public NebulaSpace() { 18 | } 19 | 20 | public NebulaSpace(String spaceName, String comment, Map props) { 21 | this.spaceName = spaceName; 22 | this.comment = comment; 23 | this.props = props; 24 | } 25 | 26 | public String getSpaceName() { 27 | return spaceName; 28 | } 29 | 30 | public void setSpaceName(String spaceName) { 31 | this.spaceName = spaceName; 32 | } 33 | 34 | public String getComment() { 35 | return comment; 36 | } 37 | 38 | public void setComment(String comment) { 39 | this.comment = comment; 40 | } 41 | 42 | public Map getProps() { 43 | return props; 44 | } 45 | 46 | public void setProps(Map props) { 47 | this.props = props; 48 | } 49 | 50 | @Override 51 | public String toString() { 52 | return "NebulaSpace{" 53 | + "spaceName='" + spaceName + '\'' 54 | + ", comment='" + comment + '\'' 55 | + ", props=" + props 56 | + '}'; 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /connector/src/main/java/org.apache.flink/connector/nebula/connection/SelfSignParams.java: -------------------------------------------------------------------------------- 1 | /* Copyright (c) 2021 vesoft inc. All rights reserved. 2 | * 3 | * This source code is licensed under Apache 2.0 License. 4 | */ 5 | 6 | package org.apache.flink.connector.nebula.connection; 7 | 8 | import java.io.Serializable; 9 | 10 | public class SelfSignParams implements Serializable { 11 | 12 | private String crtFilePath; 13 | private String keyFilePath; 14 | private String password; 15 | 16 | public SelfSignParams(String crtFilePath, String keyFilePath, String password) { 17 | this.crtFilePath = crtFilePath; 18 | this.keyFilePath = keyFilePath; 19 | this.password = password; 20 | } 21 | 22 | public String getCrtFilePath() { 23 | return crtFilePath; 24 | } 25 | 26 | public void setCrtFilePath(String crtFilePath) { 27 | this.crtFilePath = crtFilePath; 28 | } 29 | 30 | public String getKeyFilePath() { 31 | return keyFilePath; 32 | } 33 | 34 | public void setKeyFilePath(String keyFilePath) { 35 | this.keyFilePath = keyFilePath; 36 | } 37 | 38 | public String getPassword() { 39 | return password; 40 | } 41 | 42 | public void setPassword(String password) { 43 | this.password = password; 44 | } 45 | 46 | @Override 47 | public String toString() { 48 | return "SelfSSLSignParams{" 49 | + "crtFilePath='" + crtFilePath + '\'' 50 | + ", keyFilePath='" + keyFilePath + '\'' 51 | + ", password='" + password + '\'' 52 | + '}'; 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /connector/src/main/java/org.apache.flink/connector/nebula/connection/CASignParams.java: -------------------------------------------------------------------------------- 1 | /* Copyright (c) 2021 vesoft inc. All rights reserved. 2 | * 3 | * This source code is licensed under Apache 2.0 License. 4 | */ 5 | 6 | package org.apache.flink.connector.nebula.connection; 7 | 8 | import java.io.Serializable; 9 | 10 | public class CASignParams implements Serializable { 11 | 12 | private String caCrtFilePath; 13 | private String crtFilePath; 14 | private String keyFilePath; 15 | 16 | public CASignParams(String caCrtFilePath, String crtFilePath, String keyFilePath) { 17 | this.caCrtFilePath = caCrtFilePath; 18 | this.crtFilePath = crtFilePath; 19 | this.keyFilePath = keyFilePath; 20 | } 21 | 22 | public String getCaCrtFilePath() { 23 | return caCrtFilePath; 24 | } 25 | 26 | public void setCaCrtFilePath(String caCrtFilePath) { 27 | this.caCrtFilePath = caCrtFilePath; 28 | } 29 | 30 | public String getCrtFilePath() { 31 | return crtFilePath; 32 | } 33 | 34 | public void setCrtFilePath(String crtFilePath) { 35 | this.crtFilePath = crtFilePath; 36 | } 37 | 38 | public String getKeyFilePath() { 39 | return keyFilePath; 40 | } 41 | 42 | public void setKeyFilePath(String keyFilePath) { 43 | this.keyFilePath = keyFilePath; 44 | } 45 | 46 | @Override 47 | public String toString() { 48 | return "CASSLSignParams{" 49 | + "caCrtFilePath='" + caCrtFilePath + '\'' 50 | + ", crtFilePath='" + crtFilePath + '\'' 51 | + ", keyFilePath='" + keyFilePath + '\'' 52 | + '}'; 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /connector/src/main/java/org.apache.flink/connector/nebula/table/NebulaRowDataInputFormat.java: -------------------------------------------------------------------------------- 1 | /* Copyright (c) 2020 vesoft inc. All rights reserved. 2 | * 3 | * This source code is licensed under Apache 2.0 License. 4 | */ 5 | 6 | package org.apache.flink.connector.nebula.table; 7 | 8 | import com.vesoft.nebula.client.storage.data.BaseTableRow; 9 | import java.io.IOException; 10 | import org.apache.flink.connector.nebula.connection.NebulaStorageConnectionProvider; 11 | import org.apache.flink.connector.nebula.source.NebulaInputFormat; 12 | import org.apache.flink.connector.nebula.statement.ExecutionOptions; 13 | import org.apache.flink.core.io.InputSplit; 14 | import org.apache.flink.table.data.RowData; 15 | import org.apache.flink.table.types.logical.LogicalType; 16 | import org.apache.flink.table.types.logical.RowType; 17 | 18 | /** 19 | * implementation of NebulaInputFormat. 20 | * Read NebulaGraph data in nebula's {@link BaseTableRow} format. 21 | */ 22 | public class NebulaRowDataInputFormat extends NebulaInputFormat { 23 | 24 | private final LogicalType[] logicalTypes; 25 | 26 | public NebulaRowDataInputFormat(NebulaStorageConnectionProvider storageConnectionProvider, 27 | ExecutionOptions executionOptions, 28 | LogicalType[] logicalTypes) { 29 | super(storageConnectionProvider, executionOptions); 30 | this.logicalTypes = logicalTypes; 31 | } 32 | 33 | @Override 34 | public void open(InputSplit inputSplit) throws IOException { 35 | super.open(inputSplit); 36 | RowType rowType = RowType.of(logicalTypes); 37 | super.nebulaConverter = new NebulaRowDataConverter(rowType); 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /connector/src/test/java/org/apache/flink/connector/nebula/NebulaValueUtils.java: -------------------------------------------------------------------------------- 1 | /* Copyright (c) 2022 vesoft inc. All rights reserved. 2 | * 3 | * This source code is licensed under Apache 2.0 License. 4 | */ 5 | 6 | package org.apache.flink.connector.nebula; 7 | 8 | import static com.vesoft.nebula.client.graph.net.Session.value2Nvalue; 9 | 10 | import com.vesoft.nebula.Coordinate; 11 | import com.vesoft.nebula.Date; 12 | import com.vesoft.nebula.DateTime; 13 | import com.vesoft.nebula.Geography; 14 | import com.vesoft.nebula.Point; 15 | import com.vesoft.nebula.Row; 16 | import com.vesoft.nebula.Time; 17 | import com.vesoft.nebula.Value; 18 | import java.util.Arrays; 19 | 20 | public class NebulaValueUtils { 21 | public static Row rowOf(Value... values) { 22 | return new Row(Arrays.asList(values)); 23 | } 24 | 25 | public static Value valueOf(Object obj) { 26 | return value2Nvalue(obj); 27 | } 28 | 29 | public static Value dateOf(int year, int month, int day) { 30 | return value2Nvalue(new Date((short) year, (byte) month, (byte) day)); 31 | } 32 | 33 | public static Value dateTimeOf(int year, int month, int day, 34 | int hour, int minute, int sec, int microsec) { 35 | return value2Nvalue(new DateTime( 36 | (short) year, (byte) month, (byte) day, 37 | (byte) hour, (byte) minute, (byte) sec, microsec)); 38 | } 39 | 40 | public static Value timeOf(int hour, int minute, int sec, int microsec) { 41 | return value2Nvalue(new Time((byte) hour, (byte) minute, (byte) sec, microsec)); 42 | } 43 | 44 | public static Value pointOf(double x, double y) { 45 | return value2Nvalue(new Geography(Geography.PTVAL, new Point(new Coordinate(x, y)))); 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /example/src/main/resources/ssl/casigned.key: -------------------------------------------------------------------------------- 1 | -----BEGIN RSA PRIVATE KEY----- 2 | MIIEpAIBAAKCAQEAuo7hKpcs+VQKbGRq0fUL+GcSfPfJ8mARtIeI8WfU0j1vI5KN 3 | ujI//G2olOGEueDCw4OO0UbdjnsFpgj2awAorj4ga2W6adQHK8qHY6q/Rdqv0oDC 4 | rcePMtQ8IwbFjNWOXC4bn7GcV7mzOkigdcj8UPkSeaqI9XxBRm3OoDX+T8h6cDLr 5 | m+ncKB8KKe/QApKH4frV3HYDqGtN49zuRs6FiurFbXDGVAZEdFEJl38IQJdmE2AS 6 | OzEHZbxWKzO/DZr/Z2+L1CuycZIwuITcnddxb2Byx/opwX4HlyODeUBbyDp+hd+G 7 | kasmIcpOlIDw9OXIvrcajKvzLEbqGt2ThsxXQwIDAQABAoIBAH4SEBe4EaxsHp8h 8 | PQ6linFTNis9SDuCsHRPIzv/7tIksfZYE27Ahn0Pndz+ibMTMIrvXJQQT6j5ede6 9 | NswYT2Vwlnf9Rvw9TJtLQjMYMCoEnsyiNu047oxq4DjLWrTRnGKuxfwlCoI9++Bn 10 | NAhkyh3uM44EsIk0bugpTHj4A+PlbUPe7xdEI/6XpaZrRN9oiejJ4VxZAPgFGiTm 11 | uNF5qg16+0900Pfj5Y/M4vXmn+gq39PO/y0FlTpaoEuYZiZZS3xHGmSVhlt8LIgI 12 | 8MdMRaKTfNeNITaqgOWh9pAW4xmK48/KfLgNPQgtDHjMJpgM0BbcBOayOY8Eio0x 13 | Z66G2AECgYEA9vj/8Fm3CKn/ogNOO81y9kIs0iPcbjasMnQ3UXeOdD0z0+7TM86F 14 | Xj3GK/z2ecvY7skWtO5ZUbbxp4aB7omW8Ke9+q8XPzMEmUuAOTzxQkAOxdr++HXP 15 | TILy0hNX2cmiLQT1U60KoZHzPZ5o5hNIQPMt7hN12ERWcIfR/MUZa5UCgYEAwWCP 16 | 6Y7Zso1QxQR/qfjuILET3/xU+ZmqSRDvzJPEiGI3oeWNG4L6cKR+XTe0FWZBAmVk 17 | Qq/1qXmdBnf5S7azffoJe2+H/m3kHJSprIiAAWlBN2e+kFlNfBhtkgia5NvsrjRw 18 | al6mf/+weRD1FiPoZY3e1wBKoqro7aI8fE5gwXcCgYEAnEI05OROeyvb8qy2vf2i 19 | JA8AfsBzwkPTNWT0bxX+yqrCdO/hLyEWnubk0IYPiEYibgpK1JUNbDcctErVQJBL 20 | MN5gxBAt3C2yVi8/5HcbijgvYJ3LvnYDf7xGWAYnCkOZ2XQOqC+Oz2UhijYE1rUS 21 | fQ2fXMdxQzERo8c7Y/tstvUCgYBuixy5jwezokUB20h/ieXWmmOaL00EQmutyRjM 22 | AczfigXzbp3zlDRGIEJ8V1OCyClxjTR7SstMTlENWZgRSCfjZAP3pBJBx+AW1oUI 23 | NB+4rsqxOYUeT26T+gLo8DJbkb0C+Mcqh2D22tuu2ZrBRVWceDVjAq+nvbvZ3Fxn 24 | UwbMkQKBgQCxL3aA6ART6laIxT/ZqMhV0ZcaoDJogjF+4I4bhlO4ivWGWJ4RpEDn 25 | ziFb6+M/4pe4vCou9yuAof6WTKM8JG4rok0yxhN3V6QGP49TjtrfkkrEPCtB2LSI 26 | N1+YRSTrS5VDcl8h8JH7fpghRnXHONEyIqasYVqsbxKzNyLV/z2rkw== 27 | -----END RSA PRIVATE KEY----- 28 | -------------------------------------------------------------------------------- /connector/src/test/resources/ssl/casigned.key: -------------------------------------------------------------------------------- 1 | -----BEGIN RSA PRIVATE KEY----- 2 | MIIEpAIBAAKCAQEAuo7hKpcs+VQKbGRq0fUL+GcSfPfJ8mARtIeI8WfU0j1vI5KN 3 | ujI//G2olOGEueDCw4OO0UbdjnsFpgj2awAorj4ga2W6adQHK8qHY6q/Rdqv0oDC 4 | rcePMtQ8IwbFjNWOXC4bn7GcV7mzOkigdcj8UPkSeaqI9XxBRm3OoDX+T8h6cDLr 5 | m+ncKB8KKe/QApKH4frV3HYDqGtN49zuRs6FiurFbXDGVAZEdFEJl38IQJdmE2AS 6 | OzEHZbxWKzO/DZr/Z2+L1CuycZIwuITcnddxb2Byx/opwX4HlyODeUBbyDp+hd+G 7 | kasmIcpOlIDw9OXIvrcajKvzLEbqGt2ThsxXQwIDAQABAoIBAH4SEBe4EaxsHp8h 8 | PQ6linFTNis9SDuCsHRPIzv/7tIksfZYE27Ahn0Pndz+ibMTMIrvXJQQT6j5ede6 9 | NswYT2Vwlnf9Rvw9TJtLQjMYMCoEnsyiNu047oxq4DjLWrTRnGKuxfwlCoI9++Bn 10 | NAhkyh3uM44EsIk0bugpTHj4A+PlbUPe7xdEI/6XpaZrRN9oiejJ4VxZAPgFGiTm 11 | uNF5qg16+0900Pfj5Y/M4vXmn+gq39PO/y0FlTpaoEuYZiZZS3xHGmSVhlt8LIgI 12 | 8MdMRaKTfNeNITaqgOWh9pAW4xmK48/KfLgNPQgtDHjMJpgM0BbcBOayOY8Eio0x 13 | Z66G2AECgYEA9vj/8Fm3CKn/ogNOO81y9kIs0iPcbjasMnQ3UXeOdD0z0+7TM86F 14 | Xj3GK/z2ecvY7skWtO5ZUbbxp4aB7omW8Ke9+q8XPzMEmUuAOTzxQkAOxdr++HXP 15 | TILy0hNX2cmiLQT1U60KoZHzPZ5o5hNIQPMt7hN12ERWcIfR/MUZa5UCgYEAwWCP 16 | 6Y7Zso1QxQR/qfjuILET3/xU+ZmqSRDvzJPEiGI3oeWNG4L6cKR+XTe0FWZBAmVk 17 | Qq/1qXmdBnf5S7azffoJe2+H/m3kHJSprIiAAWlBN2e+kFlNfBhtkgia5NvsrjRw 18 | al6mf/+weRD1FiPoZY3e1wBKoqro7aI8fE5gwXcCgYEAnEI05OROeyvb8qy2vf2i 19 | JA8AfsBzwkPTNWT0bxX+yqrCdO/hLyEWnubk0IYPiEYibgpK1JUNbDcctErVQJBL 20 | MN5gxBAt3C2yVi8/5HcbijgvYJ3LvnYDf7xGWAYnCkOZ2XQOqC+Oz2UhijYE1rUS 21 | fQ2fXMdxQzERo8c7Y/tstvUCgYBuixy5jwezokUB20h/ieXWmmOaL00EQmutyRjM 22 | AczfigXzbp3zlDRGIEJ8V1OCyClxjTR7SstMTlENWZgRSCfjZAP3pBJBx+AW1oUI 23 | NB+4rsqxOYUeT26T+gLo8DJbkb0C+Mcqh2D22tuu2ZrBRVWceDVjAq+nvbvZ3Fxn 24 | UwbMkQKBgQCxL3aA6ART6laIxT/ZqMhV0ZcaoDJogjF+4I4bhlO4ivWGWJ4RpEDn 25 | ziFb6+M/4pe4vCou9yuAof6WTKM8JG4rok0yxhN3V6QGP49TjtrfkkrEPCtB2LSI 26 | N1+YRSTrS5VDcl8h8JH7fpghRnXHONEyIqasYVqsbxKzNyLV/z2rkw== 27 | -----END RSA PRIVATE KEY----- 28 | -------------------------------------------------------------------------------- /connector/src/test/resources/ssl/test.derive.key: -------------------------------------------------------------------------------- 1 | -----BEGIN RSA PRIVATE KEY----- 2 | MIIEowIBAAKCAQEAx5NT0LWghkt9Z78Srk+qc1ULjrANoRCA8wWbqASt25XswQde 3 | n41FhtsBxv6/TrLOWMzpcuL4i9aBS7DXWINuxBR+6XBn3UlJmWuixyC3oCU5i2tW 4 | geJURvRLLzyMTbqOt+wlts+hFhJVSl59jdRtb1t36ShsSfndaL2hXwZYjtLCvpsl 5 | vr2HSqzYO9FqiigqAiJSIxFsN70Ha5r4boSAWxtW15F2ycCHqQXW+MBksfKrILPd 6 | yOv43vyv4iWjySKnR/JaKOn0qAwVIjQPO2bEHbWwXrpO0Lhe9UsQ+fhPMAjLRTME 7 | Rpi9L2DD8JGrqyscBLOPXvHk80byKzbcmIFaOwIDAQABAoIBAEZ50URHjzs9VziW 8 | sdsaSN/XbXBi3T0+Xbr0BQatOFPtuqBjoNeJBL9dgWArP5Vj8RhMrDekzQ5cnmYD 9 | OdiI+UmGz1ZSGmt7YOErsFzPQejsnEiOjArryMURqacxo34jXhi27I6E/aaUrMfJ 10 | XF8EX+zOCSct3ie1c6l0JZMv43/zbzP2vMFEdfnVfZA2Kxo5l3I4rjuxHUEWHzrb 11 | EgM4a2+y7LQrut75zP9zWEZAqim/VEIEj24Gqj+Vocb6cHlc31KzKaEz7Ra5ha2J 12 | kN2CQRKCzoMupVL5E6dWMiDVjUyUXdUgjSCIW2H+E1ONgvxA78jJx7+Dzj+/bWxH 13 | h/vr3dkCgYEA9Aev7PGoGF0eapZY3crehvtCn1v4YLheh0dk4EpbpbEx0rQaG3h7 14 | YYCf7euxMvoTsKPETHAUG/s/RZV1DNOjxs8GKgEIVaRYEf1VZeDXudtnyKBwCMAL 15 | 5CKHRBvfmNG9n+PpQQlrIAZGej7HU+/IzEVsrD2A5DeH9IVpMNvrX10CgYEA0V1r 16 | aydbBP+Ma/fiG5UDa8l4GdLzvAoW2cY6ZhQX4NiLTK91MwA/QOQcVMvJAN2KpPHC 17 | kGDRT7IhMs66cMxl0ImIJ2QSnv8HRNmBBSdUtJx1S6nV2u0VfgP61oNT/YbLR/Jk 18 | CAIl1qe7Q8IsrMbPxCbt8g+D8Wr9C3pdYYqFvncCgYEAicGdKmDwx3Apr3nYCLxx 19 | CjnkzhkZCWCK3EsNQyA2xD5XJd7NrhxBajU2ExUuHtzVKK4KLixG7dTTTvCj9u2y 20 | UpSjoiqbDd2MaftcrfpTTXPyDmujUw02qT5kpaomexpLtWrvTeuHMbjZKEEwPM3r 21 | yISYaFL/49UFRp/ZVd+P63ECgYAX1B0ctf77A6bUxwK6Buy7wNNlhQful+tf39rX 22 | sWPCWIMKOFILevS4Cv5afFMlQRG9kjKFwi8wdeKnaLX5jpnr8StI6G/iHr6SDHtN 23 | vds7Ly9+bBcF8sPmcseC0LGngkbyqljOPIhX9QEwRhJVm88b0R511WQ7/uRMASJN 24 | rrloIwKBgCxYlu1xvvEuQNoIux/yKAEJ1h4Ta2zc5upjw0uDKMi0UNIbNhgdFOvj 25 | LuVbxTRU8WktrLNk3T0rsopKsTbEZVg6Yuv8ZLkEiNYTzhUbn2Y5yM3bnoVwyOns 26 | pTtqmBtvDZxaRCYdIQG3b09IvrewDk26AOtNHdeKw883G2muP/vA 27 | -----END RSA PRIVATE KEY----- 28 | -------------------------------------------------------------------------------- /connector/src/test/resources/ssl/test.ca.key: -------------------------------------------------------------------------------- 1 | -----BEGIN RSA PRIVATE KEY----- 2 | Proc-Type: 4,ENCRYPTED 3 | DEK-Info: DES-EDE3-CBC,6D12ED8559E80FA3 4 | 5 | tv9epnwlt4dP6Q5ee0dACOyFA5BTwYTdoMykQRJrKGwfaNeXUXn+sQ/U/oFHp1Wx 6 | O8VZE+z2aHpiFSTw+Eh6MPt86X5yVG3tpeVO6dErvr8Kd+NpuI8zn7rNoOFRh8wD 7 | 33EFcQMLQPneDl10O18hooIoi0qwp1pd63hYZPwEhB3eOrM5Mnv9OVJs65bzYfyf 8 | Wku33YWYxeqlDvMCsou8PZnv/M2wYsr7+QoTcNmGKP45igMthMDBzwgF+q0p9ZZU 9 | N11c6ojAs01kfuqFf3vKfHNYe6zsBiNhnUuEy8enXSxD5E7tR/OI8aEzPLdk7fmN 10 | /UsMK2LE0Yd5iS3O1x/1ZjSBxJ+M/UzzCO692GTAiD6Hc13iJOavq/vt1mEPjfCD 11 | neF38Bhb5DfFi+UAHrz6EHMreamGCzP82us2maIs7mSTq7nXDZfbBc7mBDLAUUnT 12 | J6tlrTyc+DQXzkJa6jmbxJhcsWm6XvjIBEzSXVHxEDPLnZICQk3VXODjCXTD75Rg 13 | 0WaS78Ven7DW8wn07q3VzWAFDKaet3VI+TVTv7EfIavlfiA6LSshaENdFLeHahNE 14 | s/V/j5K3Pg6+WQcZRgOsfqIwUCSQxY13R6TTdaaCkLay5BggF5iiAO3pkqsJiadf 15 | w843Ak4USBptymJxoZgJyFtQHpQyNiFfsAbs9BaYbg2evvE7/VQhLk0gQ7HgQMeJ 16 | wgxEQqZQKDCCSugSzY1YEGXKnrZYCKyipzyyH936mE15zNwhYp/Pi2020+gmtP3h 17 | CDfcPs1yeLI2/1JuimafbuKsv9xchWa6ASU8p8Q7wTLtUj9ylLKyA4A/75pK0DXG 18 | Hv/q0O+UfhAMD438SoPBle7RSvIsDU1VjUqstlNybBglBZxGIME7/18+Ms7U32wh 19 | 4xFkZwxT2nqFgyk37tXMdMz9UBh12/AXR9NU4XY37C3Ao2TDT7/0DvU6KdJhsDpv 20 | rGcaC2zzhko+0CPrLlk52KbqP003JXiWvOSI+FylyPPDB/YGitmndJUuQblf3u/E 21 | l+tGi9MeSBQeWKV6D3AVnO05AZjfTUzSK0vw4DgNh5YPNJvLy31B7kDAS88vyGI1 22 | t6MBwjW4/tz/nS/p1Go3mSzBhPkIsCrZE+ar7lH8p8JqkLl4fXIMaVKIfyfJdzyS 23 | lkh3K7bOGDPegxxxaWdb+EnC7k+1R3EOU7uJFW61HyrGI3q6Y7kOl5aYSJ5Ge1Uv 24 | PycFWHWVTHq/R7HRE6HIJzGe/PnLIbStXLDFeivjfcYq1YaSaF8Vl+xg+0u3ULOl 25 | P6IuPTph6dlcgttRZVl3ETcF0T+2wfbUwgjf0ZiguCJfR2jLGhPl1KBg0Kd9cTSY 26 | zI3YMMd2G8hApt/QFlm4Ry8CqaJUmDcjDNIJT3M+RldUgfz37NsX05cA5e9+I1AL 27 | 2406F/v5U9gWsYx7HuwJtQrDzYYDbl1GD4H+qHFJE5JYhPP4AyWYxJ1NR5dqyvrt 28 | +3r5+xlwZrS76c10RsBWL7th8ZEzRxOZxbtLwbf4bG/tIGfQP2sTnWwA+qym6b2S 29 | sRduqOTP+xwnhOq/ZKn8lfsDfhT8CPnKHBsd09kM9y/UWuxFe0upLydRLE/Wsb9s 30 | -----END RSA PRIVATE KEY----- 31 | -------------------------------------------------------------------------------- /example/src/main/resources/ssl/selfsigned.key: -------------------------------------------------------------------------------- 1 | -----BEGIN RSA PRIVATE KEY----- 2 | Proc-Type: 4,ENCRYPTED 3 | DEK-Info: DES-EDE3-CBC,6D12ED8559E80FA3 4 | 5 | tv9epnwlt4dP6Q5ee0dACOyFA5BTwYTdoMykQRJrKGwfaNeXUXn+sQ/U/oFHp1Wx 6 | O8VZE+z2aHpiFSTw+Eh6MPt86X5yVG3tpeVO6dErvr8Kd+NpuI8zn7rNoOFRh8wD 7 | 33EFcQMLQPneDl10O18hooIoi0qwp1pd63hYZPwEhB3eOrM5Mnv9OVJs65bzYfyf 8 | Wku33YWYxeqlDvMCsou8PZnv/M2wYsr7+QoTcNmGKP45igMthMDBzwgF+q0p9ZZU 9 | N11c6ojAs01kfuqFf3vKfHNYe6zsBiNhnUuEy8enXSxD5E7tR/OI8aEzPLdk7fmN 10 | /UsMK2LE0Yd5iS3O1x/1ZjSBxJ+M/UzzCO692GTAiD6Hc13iJOavq/vt1mEPjfCD 11 | neF38Bhb5DfFi+UAHrz6EHMreamGCzP82us2maIs7mSTq7nXDZfbBc7mBDLAUUnT 12 | J6tlrTyc+DQXzkJa6jmbxJhcsWm6XvjIBEzSXVHxEDPLnZICQk3VXODjCXTD75Rg 13 | 0WaS78Ven7DW8wn07q3VzWAFDKaet3VI+TVTv7EfIavlfiA6LSshaENdFLeHahNE 14 | s/V/j5K3Pg6+WQcZRgOsfqIwUCSQxY13R6TTdaaCkLay5BggF5iiAO3pkqsJiadf 15 | w843Ak4USBptymJxoZgJyFtQHpQyNiFfsAbs9BaYbg2evvE7/VQhLk0gQ7HgQMeJ 16 | wgxEQqZQKDCCSugSzY1YEGXKnrZYCKyipzyyH936mE15zNwhYp/Pi2020+gmtP3h 17 | CDfcPs1yeLI2/1JuimafbuKsv9xchWa6ASU8p8Q7wTLtUj9ylLKyA4A/75pK0DXG 18 | Hv/q0O+UfhAMD438SoPBle7RSvIsDU1VjUqstlNybBglBZxGIME7/18+Ms7U32wh 19 | 4xFkZwxT2nqFgyk37tXMdMz9UBh12/AXR9NU4XY37C3Ao2TDT7/0DvU6KdJhsDpv 20 | rGcaC2zzhko+0CPrLlk52KbqP003JXiWvOSI+FylyPPDB/YGitmndJUuQblf3u/E 21 | l+tGi9MeSBQeWKV6D3AVnO05AZjfTUzSK0vw4DgNh5YPNJvLy31B7kDAS88vyGI1 22 | t6MBwjW4/tz/nS/p1Go3mSzBhPkIsCrZE+ar7lH8p8JqkLl4fXIMaVKIfyfJdzyS 23 | lkh3K7bOGDPegxxxaWdb+EnC7k+1R3EOU7uJFW61HyrGI3q6Y7kOl5aYSJ5Ge1Uv 24 | PycFWHWVTHq/R7HRE6HIJzGe/PnLIbStXLDFeivjfcYq1YaSaF8Vl+xg+0u3ULOl 25 | P6IuPTph6dlcgttRZVl3ETcF0T+2wfbUwgjf0ZiguCJfR2jLGhPl1KBg0Kd9cTSY 26 | zI3YMMd2G8hApt/QFlm4Ry8CqaJUmDcjDNIJT3M+RldUgfz37NsX05cA5e9+I1AL 27 | 2406F/v5U9gWsYx7HuwJtQrDzYYDbl1GD4H+qHFJE5JYhPP4AyWYxJ1NR5dqyvrt 28 | +3r5+xlwZrS76c10RsBWL7th8ZEzRxOZxbtLwbf4bG/tIGfQP2sTnWwA+qym6b2S 29 | sRduqOTP+xwnhOq/ZKn8lfsDfhT8CPnKHBsd09kM9y/UWuxFe0upLydRLE/Wsb9s 30 | -----END RSA PRIVATE KEY----- 31 | -------------------------------------------------------------------------------- /connector/src/test/resources/ssl/selfsigned.key: -------------------------------------------------------------------------------- 1 | -----BEGIN RSA PRIVATE KEY----- 2 | Proc-Type: 4,ENCRYPTED 3 | DEK-Info: DES-EDE3-CBC,6D12ED8559E80FA3 4 | 5 | tv9epnwlt4dP6Q5ee0dACOyFA5BTwYTdoMykQRJrKGwfaNeXUXn+sQ/U/oFHp1Wx 6 | O8VZE+z2aHpiFSTw+Eh6MPt86X5yVG3tpeVO6dErvr8Kd+NpuI8zn7rNoOFRh8wD 7 | 33EFcQMLQPneDl10O18hooIoi0qwp1pd63hYZPwEhB3eOrM5Mnv9OVJs65bzYfyf 8 | Wku33YWYxeqlDvMCsou8PZnv/M2wYsr7+QoTcNmGKP45igMthMDBzwgF+q0p9ZZU 9 | N11c6ojAs01kfuqFf3vKfHNYe6zsBiNhnUuEy8enXSxD5E7tR/OI8aEzPLdk7fmN 10 | /UsMK2LE0Yd5iS3O1x/1ZjSBxJ+M/UzzCO692GTAiD6Hc13iJOavq/vt1mEPjfCD 11 | neF38Bhb5DfFi+UAHrz6EHMreamGCzP82us2maIs7mSTq7nXDZfbBc7mBDLAUUnT 12 | J6tlrTyc+DQXzkJa6jmbxJhcsWm6XvjIBEzSXVHxEDPLnZICQk3VXODjCXTD75Rg 13 | 0WaS78Ven7DW8wn07q3VzWAFDKaet3VI+TVTv7EfIavlfiA6LSshaENdFLeHahNE 14 | s/V/j5K3Pg6+WQcZRgOsfqIwUCSQxY13R6TTdaaCkLay5BggF5iiAO3pkqsJiadf 15 | w843Ak4USBptymJxoZgJyFtQHpQyNiFfsAbs9BaYbg2evvE7/VQhLk0gQ7HgQMeJ 16 | wgxEQqZQKDCCSugSzY1YEGXKnrZYCKyipzyyH936mE15zNwhYp/Pi2020+gmtP3h 17 | CDfcPs1yeLI2/1JuimafbuKsv9xchWa6ASU8p8Q7wTLtUj9ylLKyA4A/75pK0DXG 18 | Hv/q0O+UfhAMD438SoPBle7RSvIsDU1VjUqstlNybBglBZxGIME7/18+Ms7U32wh 19 | 4xFkZwxT2nqFgyk37tXMdMz9UBh12/AXR9NU4XY37C3Ao2TDT7/0DvU6KdJhsDpv 20 | rGcaC2zzhko+0CPrLlk52KbqP003JXiWvOSI+FylyPPDB/YGitmndJUuQblf3u/E 21 | l+tGi9MeSBQeWKV6D3AVnO05AZjfTUzSK0vw4DgNh5YPNJvLy31B7kDAS88vyGI1 22 | t6MBwjW4/tz/nS/p1Go3mSzBhPkIsCrZE+ar7lH8p8JqkLl4fXIMaVKIfyfJdzyS 23 | lkh3K7bOGDPegxxxaWdb+EnC7k+1R3EOU7uJFW61HyrGI3q6Y7kOl5aYSJ5Ge1Uv 24 | PycFWHWVTHq/R7HRE6HIJzGe/PnLIbStXLDFeivjfcYq1YaSaF8Vl+xg+0u3ULOl 25 | P6IuPTph6dlcgttRZVl3ETcF0T+2wfbUwgjf0ZiguCJfR2jLGhPl1KBg0Kd9cTSY 26 | zI3YMMd2G8hApt/QFlm4Ry8CqaJUmDcjDNIJT3M+RldUgfz37NsX05cA5e9+I1AL 27 | 2406F/v5U9gWsYx7HuwJtQrDzYYDbl1GD4H+qHFJE5JYhPP4AyWYxJ1NR5dqyvrt 28 | +3r5+xlwZrS76c10RsBWL7th8ZEzRxOZxbtLwbf4bG/tIGfQP2sTnWwA+qym6b2S 29 | sRduqOTP+xwnhOq/ZKn8lfsDfhT8CPnKHBsd09kM9y/UWuxFe0upLydRLE/Wsb9s 30 | -----END RSA PRIVATE KEY----- 31 | -------------------------------------------------------------------------------- /connector/src/main/java/org.apache.flink/connector/nebula/utils/NebulaSpaces.java: -------------------------------------------------------------------------------- 1 | /* Copyright (c) 2022 vesoft inc. All rights reserved. 2 | * 3 | * This source code is licensed under Apache 2.0 License. 4 | */ 5 | 6 | package org.apache.flink.connector.nebula.utils; 7 | 8 | import java.io.Serializable; 9 | import java.util.Map; 10 | 11 | public class NebulaSpaces implements Serializable { 12 | 13 | private final NebulaSpace nebulaSpace; 14 | private final Map props; 15 | 16 | public NebulaSpaces(NebulaSpace nebulaSpace) { 17 | this.nebulaSpace = nebulaSpace; 18 | this.props = nebulaSpace.getProps(); 19 | } 20 | 21 | /** 22 | * construct Nebula create space ngql. 23 | * 24 | * @return ngql 25 | */ 26 | public String getCreateStatement() { 27 | Map props = nebulaSpace.getProps(); 28 | StringBuilder sb = new StringBuilder(); 29 | addParams(sb, NebulaConstant.CREATE_PARTITION_NUM, NebulaConstant.DEFAULT_PARTITION_NUM); 30 | addParams(sb, NebulaConstant.CREATE_REPLICA_FACTOR, NebulaConstant.DEFAULT_REPLICA_FACTOR); 31 | sb.append(NebulaConstant.CREATE_VID_TYPE) 32 | .append(" = ") 33 | .append(props.get(NebulaConstant.CREATE_VID_TYPE)); 34 | 35 | String stat = String.format( 36 | NebulaConstant.CREATE_SPACE_TEMPLATE, 37 | nebulaSpace.getSpaceName(), 38 | sb 39 | ); 40 | 41 | String comment = nebulaSpace.getComment(); 42 | if (comment != null) { 43 | stat += String.format(NebulaConstant.CREATE_SPACE_COMMENT, comment); 44 | } 45 | 46 | return stat; 47 | } 48 | 49 | private void addParams(StringBuilder sb, String para, int defaultValue) { 50 | sb.append(para) 51 | .append(" = ") 52 | .append(props.containsKey(para) 53 | ? Integer.parseInt(props.get(para)) 54 | : defaultValue) 55 | .append(", "); 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /connector/src/main/java/org.apache.flink/connector/nebula/catalog/factory/NebulaCatalogFactory.java: -------------------------------------------------------------------------------- 1 | /* Copyright (c) 2020 vesoft inc. All rights reserved. 2 | * 3 | * This source code is licensed under Apache 2.0 License. 4 | */ 5 | 6 | package org.apache.flink.connector.nebula.catalog.factory; 7 | 8 | import static org.apache.flink.connector.nebula.table.NebulaDynamicTableFactory.GRAPHADDRESS; 9 | import static org.apache.flink.connector.nebula.table.NebulaDynamicTableFactory.GRAPH_SPACE; 10 | import static org.apache.flink.connector.nebula.table.NebulaDynamicTableFactory.LABEL_NAME; 11 | import static org.apache.flink.connector.nebula.table.NebulaDynamicTableFactory.METAADDRESS; 12 | import static org.apache.flink.connector.nebula.table.NebulaDynamicTableFactory.PASSWORD; 13 | import static org.apache.flink.connector.nebula.table.NebulaDynamicTableFactory.TIMEOUT; 14 | import static org.apache.flink.connector.nebula.table.NebulaDynamicTableFactory.USERNAME; 15 | 16 | import java.util.HashSet; 17 | import java.util.Set; 18 | import org.apache.flink.configuration.ConfigOption; 19 | import org.apache.flink.table.catalog.Catalog; 20 | import org.apache.flink.table.factories.CatalogFactory; 21 | import org.slf4j.Logger; 22 | import org.slf4j.LoggerFactory; 23 | 24 | public class NebulaCatalogFactory implements CatalogFactory { 25 | private static final Logger LOG = LoggerFactory.getLogger(NebulaCatalogFactory.class); 26 | 27 | @Override 28 | public Catalog createCatalog(Context context) { 29 | return CatalogFactory.super.createCatalog(context); 30 | } 31 | 32 | @Override 33 | public Set> requiredOptions() { 34 | final Set> options = new HashSet<>(); 35 | options.add(GRAPHADDRESS); 36 | options.add(USERNAME); 37 | options.add(PASSWORD); 38 | options.add(METAADDRESS); 39 | options.add(GRAPH_SPACE); 40 | return options; 41 | } 42 | 43 | @Override 44 | public Set> optionalOptions() { 45 | final Set> options = new HashSet<>(); 46 | options.add(LABEL_NAME); 47 | options.add(TIMEOUT); 48 | return options; 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /connector/src/main/java/org.apache.flink/connector/nebula/source/NebulaRowConverter.java: -------------------------------------------------------------------------------- 1 | /* Copyright (c) 2020 vesoft inc. All rights reserved. 2 | * 3 | * This source code is licensed under Apache 2.0 License. 4 | */ 5 | 6 | package org.apache.flink.connector.nebula.source; 7 | 8 | import com.vesoft.nebula.client.graph.data.ValueWrapper; 9 | import com.vesoft.nebula.client.storage.data.BaseTableRow; 10 | import java.io.UnsupportedEncodingException; 11 | import java.util.List; 12 | import org.apache.flink.types.Row; 13 | 14 | /** 15 | * convert nebula {@link BaseTableRow} to flink {@link Row} 16 | */ 17 | public class NebulaRowConverter implements NebulaConverter { 18 | 19 | @Override 20 | public Row convert(BaseTableRow row) throws UnsupportedEncodingException { 21 | List values = row.getValues(); 22 | Row record = new Row(values.size()); 23 | for (int pos = 0; pos < values.size(); pos++) { 24 | ValueWrapper valueWrapper = values.get(pos); 25 | 26 | if (valueWrapper.isNull()) { 27 | record.setField(pos, null); 28 | continue; 29 | } 30 | if (valueWrapper.isString()) { 31 | record.setField(pos, valueWrapper.asString()); 32 | continue; 33 | } 34 | if (valueWrapper.isBoolean()) { 35 | record.setField(pos, valueWrapper.asBoolean()); 36 | continue; 37 | } 38 | if (valueWrapper.isLong()) { 39 | record.setField(pos, valueWrapper.asLong()); 40 | continue; 41 | } 42 | if (valueWrapper.isDouble()) { 43 | record.setField(pos, valueWrapper.asDouble()); 44 | continue; 45 | } 46 | if (valueWrapper.isDate()) { 47 | record.setField(pos, valueWrapper.asDate()); 48 | continue; 49 | } 50 | if (valueWrapper.isTime()) { 51 | record.setField(pos, valueWrapper.asTime()); 52 | continue; 53 | } 54 | if (valueWrapper.isDateTime()) { 55 | record.setField(pos, valueWrapper.asDateTime()); 56 | continue; 57 | } 58 | if (valueWrapper.isGeography()) { 59 | record.setField(pos, valueWrapper.asGeography()); 60 | continue; 61 | } 62 | if (valueWrapper.isDuration()) { 63 | record.setField(pos, valueWrapper.asDuration()); 64 | } 65 | } 66 | return record; 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /connector/src/main/java/org.apache.flink/connector/nebula/sink/NebulaBatchExecutor.java: -------------------------------------------------------------------------------- 1 | /* Copyright (c) 2020 vesoft inc. All rights reserved. 2 | * 3 | * This source code is licensed under Apache 2.0 License. 4 | */ 5 | 6 | package org.apache.flink.connector.nebula.sink; 7 | 8 | import com.vesoft.nebula.ErrorCode; 9 | import com.vesoft.nebula.client.graph.data.ResultSet; 10 | import com.vesoft.nebula.client.graph.exception.IOErrorException; 11 | import com.vesoft.nebula.client.graph.net.Session; 12 | import java.io.IOException; 13 | import org.slf4j.Logger; 14 | import org.slf4j.LoggerFactory; 15 | 16 | public abstract class NebulaBatchExecutor { 17 | 18 | public static class ExecutionException extends IOException { 19 | private final String statement; 20 | private final String errorMessage; 21 | private final int errorCode; 22 | 23 | public ExecutionException(String statement, String errorMessage, int errorCode) { 24 | this.statement = statement; 25 | this.errorMessage = errorMessage; 26 | this.errorCode = errorCode; 27 | } 28 | 29 | @Override 30 | public String getMessage() { 31 | return String.format("failed to execute statement %s: %s [%s]", 32 | statement, errorMessage, errorCode); 33 | } 34 | 35 | public boolean isNonRecoverableError() { 36 | return this.errorCode == ErrorCode.E_SEMANTIC_ERROR.getValue() 37 | || this.errorCode == ErrorCode.E_SYNTAX_ERROR.getValue(); 38 | } 39 | } 40 | 41 | private static final Logger LOG = LoggerFactory.getLogger(NebulaBatchExecutor.class); 42 | 43 | /** 44 | * put record into buffer 45 | * 46 | * @param record represent vertex or edge 47 | */ 48 | public abstract void addToBatch(T record); 49 | 50 | /** 51 | * execute the statement 52 | * 53 | * @param session graph session 54 | */ 55 | public abstract void executeBatch(Session session) throws IOException; 56 | 57 | public abstract void clearBatch(); 58 | 59 | public abstract boolean isBatchEmpty(); 60 | 61 | protected static void executeStatement(Session session, String statement) throws IOException { 62 | LOG.debug("write statement: {}", statement); 63 | ResultSet execResult; 64 | try { 65 | execResult = session.execute(statement); 66 | } catch (IOErrorException e) { 67 | throw new IOException(e); 68 | } 69 | if (execResult.isSucceeded()) { 70 | LOG.debug("write success"); 71 | } else { 72 | throw new ExecutionException( 73 | statement, execResult.getErrorMessage(), execResult.getErrorCode()); 74 | } 75 | } 76 | } 77 | -------------------------------------------------------------------------------- /connector/src/main/java/org.apache.flink/connector/nebula/sink/NebulaSinkFunction.java: -------------------------------------------------------------------------------- 1 | /* Copyright (c) 2020 vesoft inc. All rights reserved. 2 | * 3 | * This source code is licensed under Apache 2.0 License. 4 | */ 5 | 6 | package org.apache.flink.connector.nebula.sink; 7 | 8 | import java.io.IOException; 9 | import java.util.concurrent.atomic.AtomicReference; 10 | import org.apache.flink.api.common.functions.RuntimeContext; 11 | import org.apache.flink.configuration.Configuration; 12 | import org.apache.flink.runtime.state.FunctionInitializationContext; 13 | import org.apache.flink.runtime.state.FunctionSnapshotContext; 14 | import org.apache.flink.streaming.api.checkpoint.CheckpointedFunction; 15 | import org.apache.flink.streaming.api.functions.sink.RichSinkFunction; 16 | import org.apache.flink.types.Row; 17 | 18 | public class NebulaSinkFunction extends RichSinkFunction implements CheckpointedFunction { 19 | 20 | private static final long serialVersionUID = 8100784397926666769L; 21 | 22 | private final NebulaBatchOutputFormat outputFormat; 23 | 24 | private final AtomicReference failureThrowable = new AtomicReference<>(); 25 | 26 | public NebulaSinkFunction(NebulaBatchOutputFormat outputFormat) { 27 | super(); 28 | this.outputFormat = outputFormat; 29 | } 30 | 31 | @Override 32 | public void open(Configuration parameters) throws Exception { 33 | super.open(parameters); 34 | RuntimeContext ctx = getRuntimeContext(); 35 | outputFormat.setRuntimeContext(ctx); 36 | try { 37 | outputFormat.open(ctx.getIndexOfThisSubtask(), ctx.getNumberOfParallelSubtasks()); 38 | } catch (IOException e) { 39 | failureThrowable.compareAndSet(null, e); 40 | } 41 | } 42 | 43 | @Override 44 | public void close() throws Exception { 45 | outputFormat.close(); 46 | } 47 | 48 | @Override 49 | public void invoke(T value, Context context) throws Exception { 50 | checkErrorAndRethrow(); 51 | outputFormat.writeRecord(value); 52 | } 53 | 54 | @Override 55 | public void snapshotState(FunctionSnapshotContext functionSnapshotContext) throws Exception { 56 | flush(); 57 | checkErrorAndRethrow(); 58 | } 59 | 60 | @Override 61 | public void initializeState(FunctionInitializationContext functionInitializationContext) { 62 | // nothing to do 63 | } 64 | 65 | private void checkErrorAndRethrow() { 66 | Throwable cause = failureThrowable.get(); 67 | if (cause != null) { 68 | throw new RuntimeException("An error occurred in NebulaSink.", cause); 69 | } 70 | } 71 | 72 | private void flush() throws IOException { 73 | outputFormat.flush(); 74 | } 75 | } 76 | -------------------------------------------------------------------------------- /connector/src/main/java/org.apache.flink/connector/nebula/sink/NebulaVertexBatchTableOutputFormat.java: -------------------------------------------------------------------------------- 1 | package org.apache.flink.connector.nebula.sink; 2 | 3 | import java.util.Map; 4 | import java.util.function.Function; 5 | import org.apache.flink.connector.nebula.connection.NebulaGraphConnectionProvider; 6 | import org.apache.flink.connector.nebula.connection.NebulaMetaConnectionProvider; 7 | import org.apache.flink.connector.nebula.statement.VertexExecutionOptions; 8 | import org.apache.flink.connector.nebula.utils.VidTypeEnum; 9 | import org.apache.flink.connector.nebula.utils.WriteModeEnum; 10 | import org.apache.flink.table.connector.sink.DynamicTableSink.DataStructureConverter; 11 | import org.apache.flink.table.data.RowData; 12 | import org.apache.flink.types.Row; 13 | 14 | public class NebulaVertexBatchTableOutputFormat 15 | extends NebulaBatchOutputFormat { 16 | private final DataStructureConverter dataStructureConverter; 17 | 18 | public NebulaVertexBatchTableOutputFormat(NebulaGraphConnectionProvider graphProvider, 19 | NebulaMetaConnectionProvider metaProvider, 20 | VertexExecutionOptions executionOptions, 21 | DataStructureConverter dataStructureConverter) { 22 | super(graphProvider, metaProvider, executionOptions); 23 | this.dataStructureConverter = dataStructureConverter; 24 | } 25 | 26 | @Override 27 | protected NebulaBatchExecutor createNebulaBatchExecutor() { 28 | VidTypeEnum vidType = metaProvider.getVidType(metaClient, executionOptions.getGraphSpace()); 29 | Map schema = metaProvider.getTagSchema( 30 | metaClient, 31 | executionOptions.getGraphSpace(), 32 | executionOptions.getLabel()); 33 | VertexExecutionOptions insertOptions = executionOptions.toBuilder() 34 | .setWriteMode(WriteModeEnum.INSERT) 35 | .build(); 36 | VertexExecutionOptions deleteOptions = executionOptions.toBuilder() 37 | .setWriteMode(WriteModeEnum.DELETE) 38 | .build(); 39 | return new NebulaTableBufferReducedExecutor(dataStructureConverter, 40 | createKeyExtractor(executionOptions.getIdIndex()), 41 | new NebulaVertexBatchExecutor(insertOptions, vidType, schema), 42 | new NebulaVertexBatchExecutor(deleteOptions, vidType, schema)); 43 | } 44 | 45 | private static Function createKeyExtractor(int idIndex) { 46 | return row -> { 47 | Row key = new Row(1); 48 | key.setField(0, row.getField(idIndex)); 49 | return key; 50 | }; 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /connector/src/test/java/org/apache/flink/connector/nebula/catalog/NebulaCatalogCreateSpaceTest.java: -------------------------------------------------------------------------------- 1 | /* Copyright (c) 2022 vesoft inc. All rights reserved. 2 | * 3 | * This source code is licensed under Apache 2.0 License. 4 | */ 5 | 6 | package org.apache.flink.connector.nebula.catalog; 7 | 8 | import org.apache.flink.connector.nebula.utils.NebulaCatalogUtils; 9 | import org.apache.flink.table.api.EnvironmentSettings; 10 | import org.apache.flink.table.api.TableEnvironment; 11 | import org.junit.Test; 12 | 13 | public class NebulaCatalogCreateSpaceTest { 14 | 15 | private static final String CATALOG_NAME = "NebulaCatalog"; 16 | private static final String GRAPH_SPACE = "default"; 17 | private static final String USERNAME = "root"; 18 | private static final String PASSWORD = "nebula"; 19 | private static final String META_ADDRESS = "127.0.0.1:9559"; 20 | private static final String GRAPH_ADDRESS = "127.0.0.1:9669"; 21 | 22 | @Test 23 | public void testCreateGraphSpace() { 24 | NebulaCatalog nebulaCatalog = NebulaCatalogUtils.createNebulaCatalog( 25 | CATALOG_NAME, 26 | GRAPH_SPACE, 27 | USERNAME, 28 | PASSWORD, 29 | META_ADDRESS, 30 | GRAPH_ADDRESS 31 | ); 32 | 33 | EnvironmentSettings settings = EnvironmentSettings.newInstance() 34 | .inStreamingMode() 35 | .build(); 36 | TableEnvironment tableEnv = TableEnvironment.create(settings); 37 | 38 | tableEnv.registerCatalog(CATALOG_NAME, nebulaCatalog); 39 | tableEnv.useCatalog(CATALOG_NAME); 40 | 41 | String createDataBase1 = "CREATE DATABASE IF NOT EXISTS `db1`" 42 | + " COMMENT 'space 1'" 43 | + " WITH (" 44 | + " 'partition_num' = '100'," 45 | + " 'replica_factor' = '3'," 46 | + " 'vid_type' = 'FIXED_STRING(10)'" 47 | + ")"; 48 | 49 | String createDataBase2 = "CREATE DATABASE IF NOT EXISTS `db2`" 50 | + " COMMENT 'space 2'" 51 | + " WITH (" 52 | + " 'partition_num' = '10'," 53 | + " 'replica_factor' = '2'," 54 | + " 'vid_type' = 'INT'" 55 | + ")"; 56 | 57 | String createSameDataBase = "CREATE DATABASE IF NOT EXISTS `db1`" 58 | + " COMMENT 'same name as space 1'" 59 | + " WITH (" 60 | + " 'partition_num' = '10'," 61 | + " 'replica_factor' = '2'," 62 | + " 'vid_type' = 'INT64'" 63 | + ")"; 64 | 65 | tableEnv.executeSql(createDataBase1); 66 | tableEnv.executeSql(createDataBase2); 67 | tableEnv.executeSql(createSameDataBase); 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /connector/src/main/java/org.apache.flink/connector/nebula/table/NebulaDynamicTableSource.java: -------------------------------------------------------------------------------- 1 | /* Copyright (c) 2020 vesoft inc. All rights reserved. 2 | * 3 | * This source code is licensed under Apache 2.0 License. 4 | */ 5 | 6 | package org.apache.flink.connector.nebula.table; 7 | 8 | 9 | import java.util.Arrays; 10 | import org.apache.flink.api.common.io.InputFormat; 11 | import org.apache.flink.connector.nebula.connection.NebulaClientOptions; 12 | import org.apache.flink.connector.nebula.connection.NebulaStorageConnectionProvider; 13 | import org.apache.flink.connector.nebula.statement.ExecutionOptions; 14 | import org.apache.flink.core.io.InputSplit; 15 | import org.apache.flink.table.api.TableSchema; 16 | import org.apache.flink.table.connector.ChangelogMode; 17 | import org.apache.flink.table.connector.source.DynamicTableSource; 18 | import org.apache.flink.table.connector.source.InputFormatProvider; 19 | import org.apache.flink.table.connector.source.ScanTableSource; 20 | import org.apache.flink.table.data.RowData; 21 | import org.apache.flink.table.types.DataType; 22 | import org.apache.flink.table.types.logical.LogicalType; 23 | 24 | public class NebulaDynamicTableSource implements ScanTableSource { 25 | 26 | private final NebulaClientOptions nebulaClientOptions; 27 | private final ExecutionOptions executionOptions; 28 | private final TableSchema tableSchema; 29 | 30 | public NebulaDynamicTableSource(NebulaClientOptions nebulaClientOptions, 31 | ExecutionOptions executionOptions, 32 | TableSchema tableSchema) { 33 | this.nebulaClientOptions = nebulaClientOptions; 34 | this.executionOptions = executionOptions; 35 | this.tableSchema = tableSchema; 36 | } 37 | 38 | @Override 39 | public ChangelogMode getChangelogMode() { 40 | return ChangelogMode.insertOnly(); 41 | } 42 | 43 | @Override 44 | public ScanRuntimeProvider getScanRuntimeProvider(ScanContext runtimeProviderContext) { 45 | DataType[] fieldDataTypes = tableSchema.getFieldDataTypes(); 46 | LogicalType[] logicalTypes = Arrays.stream(fieldDataTypes) 47 | .map(DataType::getLogicalType) 48 | .toArray(LogicalType[]::new); 49 | 50 | InputFormat inputFormat = new NebulaRowDataInputFormat( 51 | new NebulaStorageConnectionProvider(this.nebulaClientOptions), 52 | this.executionOptions, 53 | logicalTypes 54 | ); 55 | return InputFormatProvider.of(inputFormat); 56 | } 57 | 58 | @Override 59 | public DynamicTableSource copy() { 60 | return new NebulaDynamicTableSource(nebulaClientOptions, executionOptions, tableSchema); 61 | } 62 | 63 | @Override 64 | public String asSummaryString() { 65 | return "NebulaDynamicTableSource"; 66 | } 67 | } 68 | -------------------------------------------------------------------------------- /connector/src/main/java/org.apache.flink/connector/nebula/utils/NebulaConstant.java: -------------------------------------------------------------------------------- 1 | /* Copyright (c) 2020 vesoft inc. All rights reserved. 2 | * 3 | * This source code is licensed under Apache 2.0 License. 4 | */ 5 | 6 | package org.apache.flink.connector.nebula.utils; 7 | 8 | public class NebulaConstant { 9 | // template for insert statement 10 | public static String BATCH_INSERT_TEMPLATE = "INSERT %s `%s`(%s) VALUES %s"; 11 | public static String VERTEX_VALUE_TEMPLATE = "%s: (%s)"; 12 | public static String VERTEX_VALUE_TEMPLATE_WITH_POLICY = "%s(\"%s\"): (%s)"; 13 | public static String ENDPOINT_TEMPLATE = "%s(\"%s\")"; 14 | public static String EDGE_VALUE_WITHOUT_RANKING_TEMPLATE = "%s->%s: (%s)"; 15 | public static String EDGE_VALUE_TEMPLATE = "%s->%s@%d: (%s)"; 16 | 17 | // template for update statement 18 | public static String UPDATE_VERTEX_TEMPLATE = "UPDATE %s ON `%s` %s SET %s"; 19 | public static String UPDATE_EDGE_TEMPLATE = "UPDATE %s ON `%s` %s->%s@%d SET %s"; 20 | public static String UPDATE_VALUE_TEMPLATE = "`%s`=%s"; 21 | 22 | // template for delete statement 23 | public static String DELETE_VERTEX_TEMPLATE = "DELETE VERTEX %s"; 24 | public static String DELETE_VERTEX_TEMPLATE_WITH_EDGE = "DELETE VERTEX %s WITH EDGE"; 25 | public static String DELETE_EDGE_TEMPLATE = "DELETE EDGE `%s` %s"; 26 | public static String EDGE_ENDPOINT_TEMPLATE = "%s->%s@%d"; 27 | 28 | // template for create space statement 29 | public static String CREATE_SPACE_TEMPLATE = "CREATE SPACE `%s` (%s)"; 30 | public static String CREATE_SPACE_COMMENT = " COMMENT = '%s'"; 31 | 32 | // Delimiter 33 | public static String COMMA = ","; 34 | public static String SUB_LINE = "_"; 35 | public static String POINT = "."; 36 | public static String SPLIT_POINT = "\\."; 37 | public static String COLON = ":"; 38 | 39 | 40 | // default value for read & write 41 | public static final int DEFAULT_SCAN_LIMIT = 2000; 42 | public static final int DEFAULT_WRITE_BATCH_SIZE = 2000; 43 | public static final int DEFAULT_BATCH_INTERVAL_MS = 0; 44 | public static final int DEFAULT_VERTEX_ID_INDEX = 0; 45 | public static final int DEFAULT_ROW_INFO_INDEX = -1; 46 | 47 | // default value for connection 48 | public static final int DEFAULT_TIMEOUT_MS = 1000; 49 | public static final int DEFAULT_CONNECT_TIMEOUT_MS = 3000; 50 | public static final int DEFAULT_CONNECT_RETRY = 3; 51 | public static final int DEFAULT_EXECUTION_RETRY = 3; 52 | public static final int DEFAULT_RETRY_DELAY_MS = 1000; 53 | 54 | // params for create space 55 | public static final String CREATE_VID_TYPE = "vid_type"; 56 | public static final String CREATE_PARTITION_NUM = "partition_num"; 57 | public static final String CREATE_REPLICA_FACTOR = "replica_factor"; 58 | 59 | // default params for create space 60 | public static final int DEFAULT_PARTITION_NUM = 100; 61 | public static final int DEFAULT_REPLICA_FACTOR = 1; 62 | } 63 | -------------------------------------------------------------------------------- /connector/src/test/java/org/apache/flink/connector/nebula/MockData.java: -------------------------------------------------------------------------------- 1 | /* Copyright (c) 2020 vesoft inc. All rights reserved. 2 | * 3 | * This source code is licensed under Apache 2.0 License. 4 | */ 5 | 6 | package org.apache.flink.connector.nebula; 7 | 8 | public class MockData { 9 | 10 | public static String createStringSpace() { 11 | return "CLEAR SPACE IF EXISTS `test_string`;" 12 | + "CREATE SPACE IF NOT EXISTS test_string(partition_num=10," 13 | + "vid_type=fixed_string(8));" 14 | + "USE test_string;" 15 | + "CREATE TAG IF NOT EXISTS person(col1 fixed_string(8), col2 string, col3 int32," 16 | + " col4 double, col5 date, col6 datetime, col7 time, col8 timestamp);" 17 | + "CREATE EDGE IF NOT EXISTS friend(col1 fixed_string(8), col2 string, col3 " 18 | + "int32, col4 double, col5 date, col6 datetime, col7 time, col8 timestamp);"; 19 | } 20 | 21 | public static String createIntSpace() { 22 | return "CLEAR SPACE IF EXISTS `test_int`;" 23 | + "CREATE SPACE IF NOT EXISTS test_int(partition_num=10,vid_type=int64);" 24 | + "USE test_int;" 25 | + "CREATE TAG IF NOT EXISTS person(col1 fixed_string(8), col2 string, col3 int32," 26 | + " col4 double, col5 date, col6 datetime, col7 time, col8 timestamp);" 27 | + "CREATE EDGE IF NOT EXISTS friend(col1 fixed_string(8), col2 string, col3 " 28 | + "int32, col4 double, col5 date, col6 datetime, col7 time, col8 timestamp);"; 29 | } 30 | 31 | public static String createFlinkSinkSpace() { 32 | return "CLEAR SPACE IF EXISTS `flink_sink`;" 33 | + "CREATE SPACE IF NOT EXISTS flink_sink(partition_num=10," 34 | + "vid_type=fixed_string(8));" 35 | + "USE flink_sink;" 36 | + "CREATE TAG IF NOT EXISTS player(name string, age int);" 37 | + "CREATE EDGE IF NOT EXISTS follow(degree int);"; 38 | } 39 | 40 | public static String createFlinkTestSpace() { 41 | return "CLEAR SPACE IF EXISTS `flink_test`;" 42 | + " CREATE SPACE IF NOT EXISTS `flink_test` (partition_num = 100," 43 | + " charset = utf8, replica_factor = 3, collate = utf8_bin, vid_type = INT64);" 44 | + " USE `flink_test`;" 45 | + " CREATE TAG IF NOT EXISTS person (col1 string, col2 fixed_string(8)," 46 | + " col3 int8, col4 int16, col5 int32, col6 int64," 47 | + " col7 date, col8 datetime, col9 timestamp, col10 bool," 48 | + " col11 double, col12 float, col13 time, col14 geography);" 49 | + " CREATE EDGE IF NOT EXISTS friend (col1 string, col2 fixed_string(8)," 50 | + " col3 int8, col4 int16, col5 int32, col6 int64," 51 | + " col7 date, col8 datetime, col9 timestamp, col10 bool," 52 | + " col11 double, col12 float, col13 time, col14 geography);"; 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /connector/src/main/java/org.apache.flink/connector/nebula/sink/NebulaEdgeBatchTableOutputFormat.java: -------------------------------------------------------------------------------- 1 | package org.apache.flink.connector.nebula.sink; 2 | 3 | import java.util.Map; 4 | import java.util.function.Function; 5 | import org.apache.flink.connector.nebula.connection.NebulaGraphConnectionProvider; 6 | import org.apache.flink.connector.nebula.connection.NebulaMetaConnectionProvider; 7 | import org.apache.flink.connector.nebula.statement.EdgeExecutionOptions; 8 | import org.apache.flink.connector.nebula.utils.VidTypeEnum; 9 | import org.apache.flink.connector.nebula.utils.WriteModeEnum; 10 | import org.apache.flink.table.connector.sink.DynamicTableSink.DataStructureConverter; 11 | import org.apache.flink.table.data.RowData; 12 | import org.apache.flink.types.Row; 13 | 14 | public class NebulaEdgeBatchTableOutputFormat 15 | extends NebulaBatchOutputFormat { 16 | private final DataStructureConverter dataStructureConverter; 17 | 18 | public NebulaEdgeBatchTableOutputFormat(NebulaGraphConnectionProvider graphProvider, 19 | NebulaMetaConnectionProvider metaProvider, 20 | EdgeExecutionOptions executionOptions, 21 | DataStructureConverter dataStructureConverter) { 22 | super(graphProvider, metaProvider, executionOptions); 23 | this.dataStructureConverter = dataStructureConverter; 24 | } 25 | 26 | @Override 27 | protected NebulaBatchExecutor createNebulaBatchExecutor() { 28 | VidTypeEnum vidType = metaProvider.getVidType(metaClient, executionOptions.getGraphSpace()); 29 | Map schema = metaProvider.getEdgeSchema( 30 | metaClient, 31 | executionOptions.getGraphSpace(), 32 | executionOptions.getLabel()); 33 | EdgeExecutionOptions insertOptions = executionOptions.toBuilder() 34 | .setWriteMode(WriteModeEnum.INSERT) 35 | .build(); 36 | EdgeExecutionOptions deleteOptions = executionOptions.toBuilder() 37 | .setWriteMode(WriteModeEnum.DELETE) 38 | .build(); 39 | return new NebulaTableBufferReducedExecutor(dataStructureConverter, 40 | createKeyExtractor(executionOptions.getSrcIndex(), 41 | executionOptions.getDstIndex(), 42 | executionOptions.getRankIndex()), 43 | new NebulaEdgeBatchExecutor(insertOptions, vidType, schema), 44 | new NebulaEdgeBatchExecutor(deleteOptions, vidType, schema)); 45 | } 46 | 47 | private static Function createKeyExtractor(int srcIdIndex, 48 | int dstIdIndex, int rankIdIndex) { 49 | return row -> { 50 | Row key = new Row(3); 51 | key.setField(0, row.getField(srcIdIndex)); 52 | key.setField(1, row.getField(dstIdIndex)); 53 | if (rankIdIndex >= 0) { 54 | key.setField(2, row.getField(rankIdIndex)); 55 | } 56 | return key; 57 | }; 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /connector/src/main/java/org.apache.flink/connector/nebula/sink/NebulaTableBufferReducedExecutor.java: -------------------------------------------------------------------------------- 1 | package org.apache.flink.connector.nebula.sink; 2 | 3 | import com.vesoft.nebula.client.graph.net.Session; 4 | import java.io.IOException; 5 | import java.util.HashMap; 6 | import java.util.Map; 7 | import java.util.function.Function; 8 | import org.apache.flink.api.java.tuple.Tuple2; 9 | import org.apache.flink.table.connector.sink.DynamicTableSink.DataStructureConverter; 10 | import org.apache.flink.table.data.RowData; 11 | import org.apache.flink.types.Row; 12 | 13 | public class NebulaTableBufferReducedExecutor extends NebulaBatchExecutor { 14 | private final DataStructureConverter dataStructureConverter; 15 | private final Function keyExtractor; 16 | private final NebulaBatchExecutor insertExecutor; 17 | private final NebulaBatchExecutor deleteExecutor; 18 | private final Map> reduceBuffer = new HashMap<>(); 19 | 20 | public NebulaTableBufferReducedExecutor(DataStructureConverter dataStructureConverter, 21 | Function keyExtractor, 22 | NebulaBatchExecutor insertExecutor, 23 | NebulaBatchExecutor deleteExecutor) { 24 | this.dataStructureConverter = dataStructureConverter; 25 | this.keyExtractor = keyExtractor; 26 | this.insertExecutor = insertExecutor; 27 | this.deleteExecutor = deleteExecutor; 28 | } 29 | 30 | @Override 31 | public void addToBatch(RowData record) { 32 | boolean isUpsert; 33 | switch (record.getRowKind()) { 34 | case INSERT: 35 | case UPDATE_AFTER: 36 | isUpsert = true; 37 | break; 38 | case UPDATE_BEFORE: 39 | case DELETE: 40 | isUpsert = false; 41 | break; 42 | default: 43 | return; 44 | } 45 | Row row = (Row) dataStructureConverter.toExternal(record); 46 | Row key = keyExtractor.apply(row); 47 | reduceBuffer.put(key, Tuple2.of(isUpsert, row)); 48 | } 49 | 50 | @Override 51 | public void clearBatch() { 52 | reduceBuffer.clear(); 53 | } 54 | 55 | @Override 56 | public boolean isBatchEmpty() { 57 | return reduceBuffer.isEmpty(); 58 | } 59 | 60 | @Override 61 | public void executeBatch(Session session) throws IOException { 62 | if (isBatchEmpty()) { 63 | return; 64 | } 65 | for (Tuple2 value : reduceBuffer.values()) { 66 | boolean isUpsert = value.f0; 67 | Row row = value.f1; 68 | if (isUpsert) { 69 | insertExecutor.addToBatch(row); 70 | } else { 71 | deleteExecutor.addToBatch(row); 72 | } 73 | } 74 | try { 75 | insertExecutor.executeBatch(session); 76 | deleteExecutor.executeBatch(session); 77 | } finally { 78 | insertExecutor.clearBatch(); 79 | deleteExecutor.clearBatch(); 80 | } 81 | clearBatch(); 82 | } 83 | } 84 | -------------------------------------------------------------------------------- /connector/src/main/java/org.apache.flink/connector/nebula/sink/NebulaEdgeBatchExecutor.java: -------------------------------------------------------------------------------- 1 | /* Copyright (c) 2021 vesoft inc. All rights reserved. 2 | * 3 | * This source code is licensed under Apache 2.0 License. 4 | */ 5 | 6 | package org.apache.flink.connector.nebula.sink; 7 | 8 | import com.vesoft.nebula.client.graph.net.Session; 9 | import java.io.IOException; 10 | import java.util.ArrayList; 11 | import java.util.List; 12 | import java.util.Map; 13 | import org.apache.flink.connector.nebula.statement.EdgeExecutionOptions; 14 | import org.apache.flink.connector.nebula.utils.NebulaEdge; 15 | import org.apache.flink.connector.nebula.utils.NebulaEdges; 16 | import org.apache.flink.connector.nebula.utils.VidTypeEnum; 17 | import org.apache.flink.types.Row; 18 | import org.slf4j.Logger; 19 | import org.slf4j.LoggerFactory; 20 | 21 | public class NebulaEdgeBatchExecutor extends NebulaBatchExecutor { 22 | private static final Logger LOG = LoggerFactory.getLogger(NebulaEdgeBatchExecutor.class); 23 | private final EdgeExecutionOptions executionOptions; 24 | private final List nebulaEdgeList; 25 | private final NebulaRowEdgeOutputFormatConverter converter; 26 | 27 | public NebulaEdgeBatchExecutor(EdgeExecutionOptions executionOptions, 28 | VidTypeEnum vidType, Map schema) { 29 | this.executionOptions = executionOptions; 30 | this.nebulaEdgeList = new ArrayList<>(); 31 | this.converter = new NebulaRowEdgeOutputFormatConverter(executionOptions, vidType, schema); 32 | } 33 | 34 | /** 35 | * put record into buffer 36 | */ 37 | @Override 38 | public void addToBatch(Row record) { 39 | NebulaEdge edge = converter.createEdge(record, executionOptions.getPolicy()); 40 | if (edge == null) { 41 | return; 42 | } 43 | nebulaEdgeList.add(edge); 44 | } 45 | 46 | @Override 47 | public void clearBatch() { 48 | nebulaEdgeList.clear(); 49 | } 50 | 51 | @Override 52 | public boolean isBatchEmpty() { 53 | return nebulaEdgeList.isEmpty(); 54 | } 55 | 56 | @Override 57 | public void executeBatch(Session session) throws IOException { 58 | if (isBatchEmpty()) { 59 | return; 60 | } 61 | NebulaEdges nebulaEdges = new NebulaEdges(executionOptions.getLabel(), 62 | executionOptions.getFields(), nebulaEdgeList, executionOptions.getPolicy(), 63 | executionOptions.getPolicy()); 64 | // generate the write ngql statement 65 | String statement = null; 66 | switch (executionOptions.getWriteMode()) { 67 | case INSERT: 68 | statement = nebulaEdges.getInsertStatement(); 69 | break; 70 | case UPDATE: 71 | statement = nebulaEdges.getUpdateStatement(); 72 | break; 73 | case DELETE: 74 | statement = nebulaEdges.getDeleteStatement(); 75 | break; 76 | default: 77 | throw new IllegalArgumentException("write mode is not supported"); 78 | } 79 | executeStatement(session, statement); 80 | clearBatch(); 81 | } 82 | } 83 | -------------------------------------------------------------------------------- /connector/src/main/java/org.apache.flink/connector/nebula/source/NebulaEdgeSource.java: -------------------------------------------------------------------------------- 1 | /* Copyright (c) 2020 vesoft inc. All rights reserved. 2 | * 3 | * This source code is licensed under Apache 2.0 License. 4 | */ 5 | 6 | package org.apache.flink.connector.nebula.source; 7 | 8 | import com.vesoft.nebula.client.storage.StorageClient; 9 | import com.vesoft.nebula.client.storage.data.BaseTableRow; 10 | import com.vesoft.nebula.client.storage.data.EdgeTableRow; 11 | import com.vesoft.nebula.client.storage.scan.ScanEdgeResult; 12 | import com.vesoft.nebula.client.storage.scan.ScanEdgeResultIterator; 13 | import java.util.Iterator; 14 | import java.util.List; 15 | import org.apache.flink.connector.nebula.statement.ExecutionOptions; 16 | 17 | /** 18 | * Nebula Graph Edge reader 19 | */ 20 | public class NebulaEdgeSource extends NebulaSource { 21 | ScanEdgeResultIterator iterator = null; 22 | Iterator dataIterator = null; 23 | Iterator scanPartIterator; 24 | 25 | public NebulaEdgeSource(StorageClient storageClient, 26 | ExecutionOptions executionOptions, List scanParts) { 27 | super(storageClient, executionOptions); 28 | this.scanPartIterator = scanParts.iterator(); 29 | } 30 | 31 | public void getEdgeDataRow(int part) { 32 | if (executionOptions.isNoColumn()) { 33 | iterator = storageClient.scanEdge( 34 | executionOptions.getGraphSpace(), 35 | part, 36 | executionOptions.getLabel(), 37 | executionOptions.getLimit(), 38 | executionOptions.getStartTime(), 39 | executionOptions.getEndTime(), 40 | true, 41 | true 42 | ); 43 | } else { 44 | iterator = storageClient.scanEdge( 45 | executionOptions.getGraphSpace(), 46 | part, 47 | executionOptions.getLabel(), 48 | executionOptions.getFields(), 49 | executionOptions.getLimit(), 50 | executionOptions.getStartTime(), 51 | executionOptions.getEndTime(), 52 | true, 53 | true 54 | ); 55 | } 56 | } 57 | 58 | @Override 59 | public boolean hasNext() throws Exception { 60 | if (dataIterator == null && iterator == null && !scanPartIterator.hasNext()) { 61 | return false; 62 | } 63 | 64 | while (dataIterator == null || !dataIterator.hasNext()) { 65 | if (iterator == null || !iterator.hasNext()) { 66 | if (scanPartIterator.hasNext()) { 67 | getEdgeDataRow(scanPartIterator.next()); 68 | continue; 69 | } 70 | break; 71 | } else { 72 | ScanEdgeResult next = iterator.next(); 73 | if (!next.isEmpty()) { 74 | dataIterator = next.getEdgeTableRows().iterator(); 75 | } 76 | } 77 | } 78 | 79 | if (dataIterator == null) { 80 | return false; 81 | } 82 | return dataIterator.hasNext(); 83 | } 84 | 85 | @Override 86 | public BaseTableRow next() { 87 | return dataIterator.next(); 88 | } 89 | } 90 | -------------------------------------------------------------------------------- /connector/src/main/java/org.apache.flink/connector/nebula/source/NebulaVertexSource.java: -------------------------------------------------------------------------------- 1 | /* Copyright (c) 2020 vesoft inc. All rights reserved. 2 | * 3 | * This source code is licensed under Apache 2.0 License. 4 | */ 5 | 6 | package org.apache.flink.connector.nebula.source; 7 | 8 | import com.vesoft.nebula.client.storage.StorageClient; 9 | import com.vesoft.nebula.client.storage.data.BaseTableRow; 10 | import com.vesoft.nebula.client.storage.data.VertexTableRow; 11 | import com.vesoft.nebula.client.storage.scan.ScanVertexResult; 12 | import com.vesoft.nebula.client.storage.scan.ScanVertexResultIterator; 13 | import java.util.Iterator; 14 | import java.util.List; 15 | import org.apache.flink.connector.nebula.statement.ExecutionOptions; 16 | 17 | /** 18 | * Nebula Graph Vertex reader 19 | */ 20 | public class NebulaVertexSource extends NebulaSource { 21 | ScanVertexResultIterator iterator = null; 22 | Iterator dataIterator = null; 23 | Iterator scanPartIterator; 24 | 25 | public NebulaVertexSource(StorageClient storageClient, ExecutionOptions executionOptions, 26 | List scanParts) { 27 | super(storageClient, executionOptions); 28 | this.scanPartIterator = scanParts.iterator(); 29 | } 30 | 31 | private void getVertexDataRow(int part) { 32 | if (executionOptions.isNoColumn()) { 33 | iterator = storageClient.scanVertex( 34 | executionOptions.getGraphSpace(), 35 | part, 36 | executionOptions.getLabel(), 37 | executionOptions.getLimit(), 38 | executionOptions.getStartTime(), 39 | executionOptions.getEndTime(), 40 | true, 41 | true); 42 | } else { 43 | iterator = storageClient.scanVertex( 44 | executionOptions.getGraphSpace(), 45 | part, 46 | executionOptions.getLabel(), 47 | executionOptions.getFields(), 48 | executionOptions.getLimit(), 49 | executionOptions.getStartTime(), 50 | executionOptions.getEndTime(), 51 | true, 52 | true); 53 | } 54 | } 55 | 56 | @Override 57 | public boolean hasNext() throws Exception { 58 | if (dataIterator == null && iterator == null && !scanPartIterator.hasNext()) { 59 | return false; 60 | } 61 | 62 | while (dataIterator == null || !dataIterator.hasNext()) { 63 | if (iterator == null || !iterator.hasNext()) { 64 | if (scanPartIterator.hasNext()) { 65 | getVertexDataRow(scanPartIterator.next()); 66 | continue; 67 | } 68 | break; 69 | } else { 70 | ScanVertexResult next = iterator.next(); 71 | if (!next.isEmpty()) { 72 | dataIterator = next.getVertexTableRows().iterator(); 73 | } 74 | } 75 | } 76 | 77 | if (dataIterator == null) { 78 | return false; 79 | } 80 | return dataIterator.hasNext(); 81 | } 82 | 83 | @Override 84 | public BaseTableRow next() { 85 | return dataIterator.next(); 86 | } 87 | } 88 | -------------------------------------------------------------------------------- /connector/src/main/java/org.apache.flink/connector/nebula/sink/NebulaVertexBatchExecutor.java: -------------------------------------------------------------------------------- 1 | /* Copyright (c) 2021 vesoft inc. All rights reserved. 2 | * 3 | * This source code is licensed under Apache 2.0 License. 4 | */ 5 | 6 | package org.apache.flink.connector.nebula.sink; 7 | 8 | import com.vesoft.nebula.client.graph.net.Session; 9 | import java.io.IOException; 10 | import java.util.ArrayList; 11 | import java.util.List; 12 | import java.util.Map; 13 | import org.apache.flink.connector.nebula.statement.VertexExecutionOptions; 14 | import org.apache.flink.connector.nebula.utils.NebulaVertex; 15 | import org.apache.flink.connector.nebula.utils.NebulaVertices; 16 | import org.apache.flink.connector.nebula.utils.VidTypeEnum; 17 | import org.apache.flink.types.Row; 18 | import org.slf4j.Logger; 19 | import org.slf4j.LoggerFactory; 20 | 21 | public class NebulaVertexBatchExecutor extends NebulaBatchExecutor { 22 | private static final Logger LOG = LoggerFactory.getLogger(NebulaVertexBatchExecutor.class); 23 | private final VertexExecutionOptions executionOptions; 24 | private final List nebulaVertexList; 25 | private final NebulaRowVertexOutputFormatConverter converter; 26 | 27 | public NebulaVertexBatchExecutor(VertexExecutionOptions executionOptions, 28 | VidTypeEnum vidType, Map schema) { 29 | this.executionOptions = executionOptions; 30 | this.nebulaVertexList = new ArrayList<>(); 31 | this.converter = new NebulaRowVertexOutputFormatConverter(executionOptions, 32 | vidType, schema); 33 | } 34 | 35 | /** 36 | * put record into buffer 37 | * 38 | * @param record represent vertex or edge 39 | */ 40 | @Override 41 | public void addToBatch(Row record) { 42 | NebulaVertex vertex = converter.createVertex(record, executionOptions.getPolicy()); 43 | if (vertex == null) { 44 | return; 45 | } 46 | nebulaVertexList.add(vertex); 47 | } 48 | 49 | @Override 50 | public void clearBatch() { 51 | nebulaVertexList.clear(); 52 | } 53 | 54 | @Override 55 | public boolean isBatchEmpty() { 56 | return nebulaVertexList.isEmpty(); 57 | } 58 | 59 | @Override 60 | public void executeBatch(Session session) throws IOException { 61 | if (isBatchEmpty()) { 62 | return; 63 | } 64 | NebulaVertices nebulaVertices = new NebulaVertices(executionOptions.getLabel(), 65 | executionOptions.getFields(), nebulaVertexList, executionOptions.getPolicy(), 66 | executionOptions.isDeleteExecutedWithEdges()); 67 | // generate the write ngql statement 68 | String statement = null; 69 | switch (executionOptions.getWriteMode()) { 70 | case INSERT: 71 | statement = nebulaVertices.getInsertStatement(); 72 | break; 73 | case UPDATE: 74 | statement = nebulaVertices.getUpdateStatement(); 75 | break; 76 | case DELETE: 77 | statement = nebulaVertices.getDeleteStatement(); 78 | break; 79 | default: 80 | throw new IllegalArgumentException("write mode is not supported"); 81 | } 82 | executeStatement(session, statement); 83 | clearBatch(); 84 | } 85 | } 86 | -------------------------------------------------------------------------------- /connector/src/main/java/org.apache.flink/connector/nebula/connection/NebulaStorageConnectionProvider.java: -------------------------------------------------------------------------------- 1 | /* Copyright (c) 2020 vesoft inc. All rights reserved. 2 | * 3 | * This source code is licensed under Apache 2.0 License. 4 | */ 5 | 6 | package org.apache.flink.connector.nebula.connection; 7 | 8 | import com.vesoft.nebula.client.graph.data.CASignedSSLParam; 9 | import com.vesoft.nebula.client.graph.data.HostAddress; 10 | import com.vesoft.nebula.client.graph.data.SSLParam; 11 | import com.vesoft.nebula.client.graph.data.SelfSignedSSLParam; 12 | import com.vesoft.nebula.client.storage.StorageClient; 13 | import java.io.Serializable; 14 | import java.util.List; 15 | 16 | public class NebulaStorageConnectionProvider implements Serializable { 17 | 18 | private static final long serialVersionUID = -3822165815516596188L; 19 | 20 | private NebulaClientOptions nebulaClientOptions; 21 | 22 | public NebulaStorageConnectionProvider(NebulaClientOptions nebulaClientOptions) { 23 | this.nebulaClientOptions = nebulaClientOptions; 24 | } 25 | 26 | public NebulaStorageConnectionProvider() { 27 | } 28 | 29 | public StorageClient getStorageClient() throws Exception { 30 | List addresses = nebulaClientOptions.getMetaAddress(); 31 | int timeout = nebulaClientOptions.getTimeout(); 32 | int retry = nebulaClientOptions.getConnectRetry(); 33 | StorageClient storageClient; 34 | if (nebulaClientOptions.isEnableStorageSSL()) { 35 | switch (nebulaClientOptions.getSSLSignType()) { 36 | case CA: { 37 | CASignParams caSignParams = nebulaClientOptions.getCaSignParam(); 38 | SSLParam sslParam = new CASignedSSLParam(caSignParams.getCaCrtFilePath(), 39 | caSignParams.getCrtFilePath(), caSignParams.getKeyFilePath()); 40 | storageClient = new StorageClient(addresses, timeout, retry, retry, true, 41 | sslParam); 42 | break; 43 | } 44 | case SELF: { 45 | SelfSignParams selfSignParams = nebulaClientOptions.getSelfSignParam(); 46 | SSLParam sslParam = new SelfSignedSSLParam(selfSignParams.getCrtFilePath(), 47 | selfSignParams.getKeyFilePath(), selfSignParams.getPassword()); 48 | storageClient = new StorageClient(addresses, timeout, retry, retry, true, 49 | sslParam); 50 | break; 51 | } 52 | default: 53 | throw new IllegalArgumentException("ssl sign type is not supported."); 54 | } 55 | } else { 56 | storageClient = new StorageClient(addresses, timeout); 57 | } 58 | 59 | storageClient.setUser(nebulaClientOptions.getUsername()); 60 | storageClient.setPassword(nebulaClientOptions.getPassword()); 61 | if (!storageClient.connect()) { 62 | throw new Exception("failed to connect storaged."); 63 | } 64 | return storageClient; 65 | } 66 | 67 | public NebulaClientOptions getNebulaClientOptions() { 68 | return nebulaClientOptions; 69 | } 70 | 71 | public void setNebulaClientOptions(NebulaClientOptions nebulaClientOptions) { 72 | this.nebulaClientOptions = nebulaClientOptions; 73 | } 74 | } 75 | -------------------------------------------------------------------------------- /connector/src/main/java/org.apache.flink/connector/nebula/sink/NebulaRowVertexOutputFormatConverter.java: -------------------------------------------------------------------------------- 1 | /* Copyright (c) 2020 vesoft inc. All rights reserved. 2 | * 3 | * This source code is licensed under Apache 2.0 License. 4 | */ 5 | 6 | package org.apache.flink.connector.nebula.sink; 7 | 8 | import com.esotericsoftware.minlog.Log; 9 | import java.io.Serializable; 10 | import java.util.ArrayList; 11 | import java.util.HashMap; 12 | import java.util.List; 13 | import java.util.Map; 14 | import org.apache.flink.connector.nebula.statement.VertexExecutionOptions; 15 | import org.apache.flink.connector.nebula.utils.NebulaUtils; 16 | import org.apache.flink.connector.nebula.utils.NebulaVertex; 17 | import org.apache.flink.connector.nebula.utils.PolicyEnum; 18 | import org.apache.flink.connector.nebula.utils.VidTypeEnum; 19 | import org.apache.flink.types.Row; 20 | 21 | public class NebulaRowVertexOutputFormatConverter implements Serializable { 22 | 23 | private static final long serialVersionUID = -7728344698410737677L; 24 | 25 | private final int idIndex; 26 | private final VidTypeEnum vidType; 27 | private final List positions; 28 | private final Map pos2Field; 29 | private final Map schema; 30 | 31 | 32 | public NebulaRowVertexOutputFormatConverter(VertexExecutionOptions executionOptions, 33 | VidTypeEnum vidType, 34 | Map schema) { 35 | this.idIndex = executionOptions.getIdIndex(); 36 | this.vidType = vidType; 37 | this.positions = executionOptions.getPositions(); 38 | this.pos2Field = new HashMap<>(); 39 | List fields = executionOptions.getFields(); 40 | for (int i = 0; i < positions.size(); i++) { 41 | this.pos2Field.put(positions.get(i), fields.get(i)); 42 | } 43 | this.schema = schema; 44 | } 45 | 46 | 47 | public NebulaVertex createVertex(Row row, PolicyEnum policy) { 48 | // check row data 49 | if (row == null || row.getArity() == 0) { 50 | Log.error("empty row"); 51 | return null; 52 | } 53 | Object id = row.getField(idIndex); 54 | if (id == null) { 55 | Log.error("wrong id, your id is null "); 56 | return null; 57 | } 58 | // extract vertex properties 59 | List vertexProps = new ArrayList<>(); 60 | for (int i : positions) { 61 | String propName = pos2Field.get(i); 62 | if (propName == null || !schema.containsKey(propName)) { 63 | throw new IllegalArgumentException("position " + i + " or field " + propName 64 | + " does not exist."); 65 | } 66 | int type = schema.get(propName); 67 | vertexProps.add(NebulaUtils.extraValue(row.getField(i), type)); 68 | } 69 | 70 | // format vertex id 71 | String formatId = String.valueOf(id); 72 | if (policy == null) { 73 | if (vidType == VidTypeEnum.STRING) { 74 | formatId = NebulaUtils.mkString(NebulaUtils.escapeUtil(String.valueOf(formatId)), 75 | "\"", "", "\""); 76 | } else { 77 | assert (NebulaUtils.isNumeric(formatId)); 78 | } 79 | } else { 80 | assert (vidType == VidTypeEnum.INT); 81 | } 82 | NebulaVertex vertex = new NebulaVertex(formatId, vertexProps); 83 | return vertex; 84 | } 85 | } 86 | -------------------------------------------------------------------------------- /connector/src/test/java/org/apache/flink/connector/nebula/utils/NebulaUtilsTest.java: -------------------------------------------------------------------------------- 1 | package org.apache.flink.connector.nebula.utils; 2 | 3 | import com.vesoft.nebula.PropertyType; 4 | import junit.framework.TestCase; 5 | 6 | public class NebulaUtilsTest extends TestCase { 7 | 8 | public void testGetHostAndPorts() { 9 | assert (NebulaUtils.getHostAndPorts("127.0.0.1:9669").size() == 1); 10 | assert (NebulaUtils.getHostAndPorts("127.0.0.1:9669,127.0.0.1:9670").size() == 2); 11 | try { 12 | NebulaUtils.getHostAndPorts(null); 13 | } catch (IllegalArgumentException e) { 14 | assert (true); 15 | } catch (Exception e) { 16 | assert (false); 17 | } 18 | 19 | try { 20 | NebulaUtils.getHostAndPorts("127.0.0.1"); 21 | } catch (IllegalArgumentException e) { 22 | assert (true); 23 | } catch (Exception e) { 24 | assert (false); 25 | } 26 | } 27 | 28 | public void testIsNumeric() { 29 | assert (NebulaUtils.isNumeric("123456")); 30 | assert (NebulaUtils.isNumeric("0123456")); 31 | assert (NebulaUtils.isNumeric("-123456")); 32 | assert (NebulaUtils.isNumeric("000")); 33 | assert (!NebulaUtils.isNumeric("aaa")); 34 | assert (!NebulaUtils.isNumeric("0123aaa")); 35 | assert (!NebulaUtils.isNumeric("123a8")); 36 | } 37 | 38 | public void testExtraValue() { 39 | assert (null == NebulaUtils.extraValue(null, PropertyType.STRING.getValue())); 40 | assert ("\"\"".equals(NebulaUtils.extraValue("", PropertyType.STRING.getValue()))); 41 | assert ("\"\"".equals(NebulaUtils.extraValue("", PropertyType.FIXED_STRING.getValue()))); 42 | assert ("1".equals(NebulaUtils.extraValue(1, PropertyType.INT8.getValue()))); 43 | assert ("timestamp(\"2021-01-01T12:12:12\")".equals( 44 | NebulaUtils.extraValue("2021-01-01T12:12:12", PropertyType.TIMESTAMP.getValue()))); 45 | assert ("datetime(\"2021-01-01T12:12:12\")".equals( 46 | NebulaUtils.extraValue("2021-01-01T12:12:12", PropertyType.DATETIME.getValue()))); 47 | assert ("date(\"2021-01-01\")".equals(NebulaUtils.extraValue("2021-01-01", 48 | PropertyType.DATE.getValue()))); 49 | assert ("time(\"12:12:12\")".equals(NebulaUtils.extraValue("12:12:12", 50 | PropertyType.TIME.getValue()))); 51 | assert ("ST_GeogFromText(\"POINT(1 3)\")".equals(NebulaUtils.extraValue("POINT(1 3)", 52 | PropertyType.GEOGRAPHY.getValue()))); 53 | assert ("ST_GeogFromText(\"LINESTRING(1 2, 3 4)\")".equals(NebulaUtils.extraValue( 54 | "LINESTRING(1 2, 3 4)", 55 | PropertyType.GEOGRAPHY.getValue()))); 56 | assert ("ST_GeogFromText(\"POLYGON((0 1, 1 2, 2 3, 0 1))\")" 57 | .equals(NebulaUtils.extraValue("POLYGON((0 1, 1 2, 2 3, 0 1))", 58 | PropertyType.GEOGRAPHY.getValue()))); 59 | } 60 | 61 | public void testMkString() { 62 | assertEquals("\"test\"", NebulaUtils.mkString("test", "\"", "", "\"")); 63 | assertEquals("\"t,e,s,t\"", NebulaUtils.mkString("test", "\"", ",", "\"")); 64 | } 65 | 66 | public void testCheckValidVidType() { 67 | assertTrue(NebulaUtils.checkValidVidType("INT")); 68 | assertTrue(NebulaUtils.checkValidVidType("INT64")); 69 | assertTrue(NebulaUtils.checkValidVidType("FIXED_STRING(10)")); 70 | 71 | assertFalse(NebulaUtils.checkValidVidType("INT32")); 72 | assertFalse(NebulaUtils.checkValidVidType("FIXED_STRING")); 73 | assertFalse(NebulaUtils.checkValidVidType("FIXED_STRING(-1)")); 74 | assertFalse(NebulaUtils.checkValidVidType("FIXED_STRING(aaa)")); 75 | } 76 | } 77 | -------------------------------------------------------------------------------- /connector/src/main/java/org.apache.flink/connector/nebula/table/NebulaDynamicTableSink.java: -------------------------------------------------------------------------------- 1 | package org.apache.flink.connector.nebula.table; 2 | 3 | import org.apache.flink.connector.nebula.connection.NebulaClientOptions; 4 | import org.apache.flink.connector.nebula.connection.NebulaGraphConnectionProvider; 5 | import org.apache.flink.connector.nebula.connection.NebulaMetaConnectionProvider; 6 | import org.apache.flink.connector.nebula.sink.NebulaBatchOutputFormat; 7 | import org.apache.flink.connector.nebula.sink.NebulaEdgeBatchTableOutputFormat; 8 | import org.apache.flink.connector.nebula.sink.NebulaSinkFunction; 9 | import org.apache.flink.connector.nebula.sink.NebulaVertexBatchTableOutputFormat; 10 | import org.apache.flink.connector.nebula.statement.EdgeExecutionOptions; 11 | import org.apache.flink.connector.nebula.statement.ExecutionOptions; 12 | import org.apache.flink.connector.nebula.statement.VertexExecutionOptions; 13 | import org.apache.flink.table.connector.ChangelogMode; 14 | import org.apache.flink.table.connector.sink.DynamicTableSink; 15 | import org.apache.flink.table.connector.sink.SinkFunctionProvider; 16 | import org.apache.flink.table.data.RowData; 17 | import org.apache.flink.table.types.DataType; 18 | import org.apache.flink.types.RowKind; 19 | 20 | public class NebulaDynamicTableSink implements DynamicTableSink { 21 | private final NebulaClientOptions nebulaClientOptions; 22 | private final ExecutionOptions executionOptions; 23 | final DataType producedDataType; 24 | 25 | public NebulaDynamicTableSink(NebulaClientOptions clientOptions, 26 | ExecutionOptions executionOptions, DataType producedDataType) { 27 | this.nebulaClientOptions = clientOptions; 28 | this.executionOptions = executionOptions; 29 | this.producedDataType = producedDataType; 30 | } 31 | 32 | @Override 33 | public ChangelogMode getChangelogMode(ChangelogMode requestedMode) { 34 | ChangelogMode.Builder builder = ChangelogMode.newBuilder(); 35 | for (RowKind kind : requestedMode.getContainedKinds()) { 36 | if (kind != RowKind.UPDATE_BEFORE) { 37 | builder.addContainedKind(kind); 38 | } 39 | } 40 | return builder.build(); 41 | } 42 | 43 | @Override 44 | public SinkRuntimeProvider getSinkRuntimeProvider(Context context) { 45 | 46 | NebulaGraphConnectionProvider graphProvider = 47 | new NebulaGraphConnectionProvider(nebulaClientOptions); 48 | NebulaMetaConnectionProvider metaProvider = 49 | new NebulaMetaConnectionProvider(nebulaClientOptions); 50 | DataStructureConverter converter = 51 | context.createDataStructureConverter(producedDataType); 52 | NebulaBatchOutputFormat outputFormat; 53 | if (executionOptions instanceof VertexExecutionOptions) { 54 | outputFormat = new NebulaVertexBatchTableOutputFormat(graphProvider, metaProvider, 55 | (VertexExecutionOptions) executionOptions, converter); 56 | } else if (executionOptions instanceof EdgeExecutionOptions) { 57 | outputFormat = new NebulaEdgeBatchTableOutputFormat(graphProvider, metaProvider, 58 | (EdgeExecutionOptions) executionOptions, converter); 59 | } else { 60 | throw new IllegalArgumentException("unknown execution options type"); 61 | } 62 | NebulaSinkFunction sinkFunction = new NebulaSinkFunction<>(outputFormat); 63 | return SinkFunctionProvider.of(sinkFunction); 64 | } 65 | 66 | @Override 67 | public DynamicTableSink copy() { 68 | return null; 69 | } 70 | 71 | @Override 72 | public String asSummaryString() { 73 | return null; 74 | } 75 | } 76 | -------------------------------------------------------------------------------- /connector/src/main/java/org.apache.flink/connector/nebula/connection/NebulaGraphConnectionProvider.java: -------------------------------------------------------------------------------- 1 | /* Copyright (c) 2020 vesoft inc. All rights reserved. 2 | * 3 | * This source code is licensed under Apache 2.0 License. 4 | */ 5 | 6 | package org.apache.flink.connector.nebula.connection; 7 | 8 | import com.vesoft.nebula.client.graph.NebulaPoolConfig; 9 | import com.vesoft.nebula.client.graph.data.CASignedSSLParam; 10 | import com.vesoft.nebula.client.graph.data.HostAddress; 11 | import com.vesoft.nebula.client.graph.data.SSLParam; 12 | import com.vesoft.nebula.client.graph.data.SelfSignedSSLParam; 13 | import com.vesoft.nebula.client.graph.net.NebulaPool; 14 | import java.io.Serializable; 15 | import java.net.UnknownHostException; 16 | import java.util.ArrayList; 17 | import java.util.Collections; 18 | import java.util.List; 19 | import org.apache.flink.connector.nebula.utils.NebulaConstant; 20 | import org.slf4j.Logger; 21 | import org.slf4j.LoggerFactory; 22 | 23 | public class NebulaGraphConnectionProvider implements Serializable { 24 | private static final Logger LOG = LoggerFactory.getLogger(NebulaGraphConnectionProvider.class); 25 | 26 | private static final long serialVersionUID = 8392002706492085208L; 27 | 28 | private final NebulaClientOptions nebulaClientOptions; 29 | 30 | public NebulaGraphConnectionProvider(NebulaClientOptions nebulaClientOptions) { 31 | this.nebulaClientOptions = nebulaClientOptions; 32 | } 33 | 34 | /** 35 | * get Session to execute query statement 36 | */ 37 | public NebulaPool getNebulaPool() throws UnknownHostException { 38 | List addresses = new ArrayList<>(); 39 | for (String address : nebulaClientOptions.getGraphAddress().split(NebulaConstant.COMMA)) { 40 | String[] hostAndPort = address.split(NebulaConstant.COLON); 41 | addresses.add(new HostAddress(hostAndPort[0], Integer.parseInt(hostAndPort[1]))); 42 | } 43 | 44 | Collections.shuffle(addresses); 45 | NebulaPoolConfig poolConfig = new NebulaPoolConfig(); 46 | poolConfig.setTimeout(nebulaClientOptions.getTimeout()); 47 | if (nebulaClientOptions.isEnableGraphSSL()) { 48 | poolConfig.setEnableSsl(true); 49 | switch (nebulaClientOptions.getSSLSignType()) { 50 | case CA: { 51 | CASignParams caSignParams = nebulaClientOptions.getCaSignParam(); 52 | SSLParam sslParam = new CASignedSSLParam(caSignParams.getCaCrtFilePath(), 53 | caSignParams.getCrtFilePath(), caSignParams.getKeyFilePath()); 54 | poolConfig.setSslParam(sslParam); 55 | break; 56 | } 57 | case SELF: { 58 | SelfSignParams selfSignParams = nebulaClientOptions.getSelfSignParam(); 59 | SSLParam sslParam = new SelfSignedSSLParam(selfSignParams.getCrtFilePath(), 60 | selfSignParams.getKeyFilePath(), selfSignParams.getPassword()); 61 | poolConfig.setSslParam(sslParam); 62 | break; 63 | } 64 | default: 65 | throw new IllegalArgumentException("ssl sign type is not supported."); 66 | } 67 | } 68 | NebulaPool nebulaPool = new NebulaPool(); 69 | if (nebulaPool.init(addresses, poolConfig)) { 70 | return nebulaPool; 71 | } else { 72 | throw new RuntimeException("NebulaPool init failed."); 73 | } 74 | } 75 | 76 | /** 77 | * get username 78 | */ 79 | public String getUserName() { 80 | return nebulaClientOptions.getUsername(); 81 | } 82 | 83 | /** 84 | * get password 85 | */ 86 | public String getPassword() { 87 | return nebulaClientOptions.getPassword(); 88 | } 89 | } 90 | -------------------------------------------------------------------------------- /connector/src/test/java/org/apache/flink/connector/nebula/connection/NebulaGraphConnectionProviderTest.java: -------------------------------------------------------------------------------- 1 | /* Copyright (c) 2021 vesoft inc. All rights reserved. 2 | * 3 | * This source code is licensed under Apache 2.0 License. 4 | */ 5 | 6 | package org.apache.flink.connector.nebula.connection; 7 | 8 | import com.vesoft.nebula.client.graph.exception.AuthFailedException; 9 | import com.vesoft.nebula.client.graph.exception.ClientServerIncompatibleException; 10 | import com.vesoft.nebula.client.graph.exception.IOErrorException; 11 | import com.vesoft.nebula.client.graph.exception.NotValidConnectionException; 12 | import com.vesoft.nebula.client.graph.net.NebulaPool; 13 | import java.net.UnknownHostException; 14 | import org.apache.flink.connector.nebula.utils.SSLSignType; 15 | import org.junit.After; 16 | import org.junit.Before; 17 | import org.junit.Test; 18 | import org.slf4j.Logger; 19 | import org.slf4j.LoggerFactory; 20 | 21 | public class NebulaGraphConnectionProviderTest { 22 | private static final Logger LOG = 23 | LoggerFactory.getLogger(NebulaGraphConnectionProviderTest.class); 24 | 25 | @Before 26 | public void setUp() throws Exception { 27 | } 28 | 29 | @After 30 | public void tearDown() throws Exception { 31 | } 32 | 33 | @Test 34 | public void getNebulaPool() { 35 | NebulaClientOptions nebulaClientOptions = 36 | new NebulaClientOptions.NebulaClientOptionsBuilder() 37 | .setGraphAddress("127.0.0.1:9669") 38 | .setMetaAddress("127.0.0.1:9559") 39 | .setUsername("root") 40 | .setPassword("nebula") 41 | .setConnectRetry(1) 42 | .setTimeout(1000) 43 | .build(); 44 | NebulaGraphConnectionProvider graphConnectionProvider = 45 | new NebulaGraphConnectionProvider(nebulaClientOptions); 46 | try { 47 | NebulaPool nebulaPool = graphConnectionProvider.getNebulaPool(); 48 | nebulaPool.getSession("root", "nebula", true); 49 | } catch (Exception e) { 50 | LOG.info("get session failed", e); 51 | assert (false); 52 | } 53 | } 54 | 55 | 56 | /** 57 | * nebula server does not enable ssl, the connection cannot be established correctly. 58 | */ 59 | @Test(expected = RuntimeException.class) 60 | public void getSessionWithSsl() throws NotValidConnectionException { 61 | NebulaClientOptions nebulaClientOptions = 62 | new NebulaClientOptions.NebulaClientOptionsBuilder() 63 | .setGraphAddress("127.0.0.1:9669") 64 | .setMetaAddress("127.0.0.1:9559") 65 | .setUsername("root") 66 | .setPassword("nebula") 67 | .setConnectRetry(1) 68 | .setTimeout(1000) 69 | .setEnableGraphSSL(true) 70 | .setEnableMetaSSL(true) 71 | .setSSLSignType(SSLSignType.CA) 72 | .setCaSignParam("src/test/resources/ssl/casigned.pem", 73 | "src/test/resources/ssl/casigned.crt", 74 | "src/test/resources/ssl/casigned.key") 75 | .build(); 76 | 77 | 78 | NebulaGraphConnectionProvider graphConnectionProvider = 79 | new NebulaGraphConnectionProvider(nebulaClientOptions); 80 | 81 | try { 82 | NebulaPool pool = graphConnectionProvider.getNebulaPool(); 83 | pool.getSession("root", "nebula", true); 84 | } catch (UnknownHostException | IOErrorException | AuthFailedException 85 | | ClientServerIncompatibleException e) { 86 | LOG.error("get session failed", e); 87 | assert (false); 88 | } 89 | } 90 | 91 | // todo test ssl for server 92 | } 93 | -------------------------------------------------------------------------------- /connector/src/main/java/org.apache.flink/connector/nebula/sink/NebulaRowEdgeOutputFormatConverter.java: -------------------------------------------------------------------------------- 1 | /* Copyright (c) 2020 vesoft inc. All rights reserved. 2 | * 3 | * This source code is licensed under Apache 2.0 License. 4 | */ 5 | 6 | package org.apache.flink.connector.nebula.sink; 7 | 8 | import com.esotericsoftware.minlog.Log; 9 | import java.io.Serializable; 10 | import java.util.ArrayList; 11 | import java.util.HashMap; 12 | import java.util.List; 13 | import java.util.Map; 14 | import org.apache.flink.connector.nebula.statement.EdgeExecutionOptions; 15 | import org.apache.flink.connector.nebula.utils.NebulaEdge; 16 | import org.apache.flink.connector.nebula.utils.NebulaUtils; 17 | import org.apache.flink.connector.nebula.utils.PolicyEnum; 18 | import org.apache.flink.connector.nebula.utils.VidTypeEnum; 19 | import org.apache.flink.types.Row; 20 | 21 | public class NebulaRowEdgeOutputFormatConverter implements Serializable { 22 | 23 | private final int srcIdIndex; 24 | private final int dstIdIndex; 25 | private final int rankIndex; 26 | private final VidTypeEnum vidType; 27 | private final List positions; 28 | private final Map pos2Field; 29 | private final Map schema; 30 | 31 | public NebulaRowEdgeOutputFormatConverter(EdgeExecutionOptions executionOptions, 32 | VidTypeEnum vidType, 33 | Map schema) { 34 | this.srcIdIndex = executionOptions.getSrcIndex(); 35 | this.dstIdIndex = executionOptions.getDstIndex(); 36 | this.rankIndex = executionOptions.getRankIndex(); 37 | this.vidType = vidType; 38 | this.schema = schema; 39 | this.positions = executionOptions.getPositions(); 40 | this.pos2Field = new HashMap<>(); 41 | List fields = executionOptions.getFields(); 42 | for (int i = 0; i < positions.size(); i++) { 43 | this.pos2Field.put(positions.get(i), fields.get(i)); 44 | } 45 | } 46 | 47 | 48 | public NebulaEdge createEdge(Row row, PolicyEnum policy) { 49 | // check row data 50 | if (row == null || row.getArity() == 0) { 51 | Log.error("empty row"); 52 | return null; 53 | } 54 | Object srcId = row.getField(srcIdIndex); 55 | Object dstId = row.getField(dstIdIndex); 56 | if (srcId == null || dstId == null) { 57 | Log.error("null srcId or dstId"); 58 | return null; 59 | } 60 | // extract edge properties 61 | List edgeProps = new ArrayList<>(); 62 | for (int i : positions) { 63 | String propName = pos2Field.get(i); 64 | int type = schema.get(propName); 65 | edgeProps.add(NebulaUtils.extraValue(row.getField(i), type)); 66 | } 67 | 68 | // format edge source id and target id 69 | String srcFormatId = srcId.toString(); 70 | String dstFormatId = dstId.toString(); 71 | 72 | if (policy == null) { 73 | if (vidType == VidTypeEnum.STRING) { 74 | srcFormatId = NebulaUtils.mkString(srcFormatId, "\"", "", "\""); 75 | dstFormatId = NebulaUtils.mkString(dstFormatId, "\"", "", "\""); 76 | } else { 77 | assert (NebulaUtils.isNumeric(srcFormatId)); 78 | assert (NebulaUtils.isNumeric(dstFormatId)); 79 | } 80 | } else { 81 | assert (vidType == VidTypeEnum.INT); 82 | } 83 | 84 | // extract edge rank 85 | Long rank = null; 86 | if (rankIndex >= 0) { 87 | if (row.getField(rankIndex) == null) { 88 | rank = 0L; 89 | } else { 90 | rank = Long.parseLong(row.getField(rankIndex).toString()); 91 | } 92 | } 93 | 94 | NebulaEdge edge = new NebulaEdge(srcFormatId, dstFormatId, rank, edgeProps); 95 | return edge; 96 | } 97 | } 98 | -------------------------------------------------------------------------------- /connector/src/test/java/org/apache/flink/connector/nebula/NebulaITTestBase.java: -------------------------------------------------------------------------------- 1 | /* Copyright (c) 2022 vesoft inc. All rights reserved. 2 | * 3 | * This source code is licensed under Apache 2.0 License. 4 | */ 5 | 6 | package org.apache.flink.connector.nebula; 7 | 8 | import static org.junit.Assert.assertEquals; 9 | import static org.junit.Assert.assertTrue; 10 | 11 | import com.vesoft.nebula.Row; 12 | import com.vesoft.nebula.client.graph.NebulaPoolConfig; 13 | import com.vesoft.nebula.client.graph.data.HostAddress; 14 | import com.vesoft.nebula.client.graph.data.ResultSet; 15 | import com.vesoft.nebula.client.graph.exception.AuthFailedException; 16 | import com.vesoft.nebula.client.graph.exception.ClientServerIncompatibleException; 17 | import com.vesoft.nebula.client.graph.exception.IOErrorException; 18 | import com.vesoft.nebula.client.graph.exception.NotValidConnectionException; 19 | import com.vesoft.nebula.client.graph.net.NebulaPool; 20 | import com.vesoft.nebula.client.graph.net.Session; 21 | import java.net.UnknownHostException; 22 | import java.util.Collections; 23 | import java.util.List; 24 | import org.apache.flink.connector.nebula.utils.NebulaConstant; 25 | 26 | public class NebulaITTestBase { 27 | 28 | protected static final String META_ADDRESS = "127.0.0.1:9559"; 29 | protected static final String GRAPH_ADDRESS = "127.0.0.1:9669"; 30 | protected static final String USERNAME = "root"; 31 | protected static final String PASSWORD = "nebula"; 32 | 33 | protected static Session session; 34 | protected static NebulaPool pool; 35 | 36 | protected static void initializeNebulaSession() { 37 | NebulaPoolConfig nebulaPoolConfig = new NebulaPoolConfig(); 38 | String[] addressAndPort = GRAPH_ADDRESS.split(NebulaConstant.COLON); 39 | List addresses = Collections.singletonList( 40 | new HostAddress(addressAndPort[0], Integer.parseInt(addressAndPort[1])) 41 | ); 42 | pool = new NebulaPool(); 43 | try { 44 | boolean result = pool.init(addresses, nebulaPoolConfig); 45 | if (!result) { 46 | throw new RuntimeException("failed to initialize connection pool"); 47 | } 48 | } catch (UnknownHostException e) { 49 | throw new RuntimeException("init nebula pool error", e); 50 | } 51 | try { 52 | session = pool.getSession(USERNAME, PASSWORD, true); 53 | } catch (NotValidConnectionException 54 | | AuthFailedException 55 | | IOErrorException 56 | | ClientServerIncompatibleException e) { 57 | throw new RuntimeException("init nebula session error", e); 58 | } 59 | } 60 | 61 | protected static void initializeNebulaSchema(String statement) { 62 | executeNGql(statement); 63 | // wait for at least two heartbeat cycles 64 | try { 65 | Thread.sleep(5000); 66 | } catch (InterruptedException e) { 67 | throw new RuntimeException(e); 68 | } 69 | } 70 | 71 | protected static void closeNebulaSession() { 72 | if (session != null) { 73 | session.release(); 74 | } 75 | if (pool != null) { 76 | pool.close(); 77 | } 78 | } 79 | 80 | protected static ResultSet executeNGql(String stmt) { 81 | ResultSet response; 82 | try { 83 | response = session.execute(stmt); 84 | } catch (IOErrorException e) { 85 | throw new RuntimeException(String.format("failed to execute statement %s", stmt), e); 86 | } 87 | if (!response.isSucceeded()) { 88 | throw new RuntimeException(String.format( 89 | "failed to execute statement %s with error: %s", 90 | stmt, response.getErrorMessage())); 91 | } 92 | return response; 93 | } 94 | 95 | protected static void check(List expected, String stmt) { 96 | ResultSet response = executeNGql(stmt); 97 | if (expected == null || expected.isEmpty()) { 98 | assertTrue(response.isEmpty()); 99 | } else { 100 | assertEquals(expected, response.getRows()); 101 | } 102 | } 103 | } 104 | -------------------------------------------------------------------------------- /connector/src/test/java/org/apache/flink/connector/nebula/sink/NebulaRowVertexOutputFormatConverterTest.java: -------------------------------------------------------------------------------- 1 | package org.apache.flink.connector.nebula.sink; 2 | 3 | import com.vesoft.nebula.PropertyType; 4 | import java.util.Arrays; 5 | import java.util.HashMap; 6 | import java.util.Map; 7 | import org.apache.flink.connector.nebula.statement.ExecutionOptions; 8 | import org.apache.flink.connector.nebula.statement.VertexExecutionOptions; 9 | import org.apache.flink.connector.nebula.utils.NebulaVertex; 10 | import org.apache.flink.connector.nebula.utils.VidTypeEnum; 11 | import org.apache.flink.types.Row; 12 | import org.junit.After; 13 | import org.junit.Before; 14 | import org.junit.Test; 15 | import org.slf4j.Logger; 16 | import org.slf4j.LoggerFactory; 17 | 18 | public class NebulaRowVertexOutputFormatConverterTest { 19 | private static final Logger LOGGER = 20 | LoggerFactory.getLogger(NebulaRowVertexOutputFormatConverterTest.class); 21 | 22 | VertexExecutionOptions.ExecutionOptionBuilder builder = null; 23 | Map schema = new HashMap<>(); 24 | Row row = new Row(9); 25 | 26 | @Before 27 | public void setUp() { 28 | builder = new VertexExecutionOptions.ExecutionOptionBuilder() 29 | .setGraphSpace("test") 30 | .setTag("person") 31 | .setIdIndex(0) 32 | .setFields(Arrays.asList("col1", "col2", "col3", "col4", "col5", "col6", "col7", 33 | "col8")) 34 | .setPositions(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8)); 35 | 36 | schema.put("col1", PropertyType.STRING.getValue()); 37 | schema.put("col2", PropertyType.FIXED_STRING.getValue()); 38 | schema.put("col3", PropertyType.INT32.getValue()); 39 | schema.put("col4", PropertyType.DOUBLE.getValue()); 40 | schema.put("col5", PropertyType.DATE.getValue()); 41 | schema.put("col6", PropertyType.DATETIME.getValue()); 42 | schema.put("col7", PropertyType.TIME.getValue()); 43 | schema.put("col8", PropertyType.TIMESTAMP.getValue()); 44 | 45 | row.setField(0, 1); 46 | row.setField(1, "Tom"); 47 | row.setField(2, "Tom"); 48 | row.setField(3, 10); 49 | row.setField(4, 1.0); 50 | row.setField(5, "2021-01-01"); 51 | row.setField(6, "2021-01-01T12:00:00"); 52 | row.setField(7, "12:00:00"); 53 | row.setField(8, 372435234); 54 | } 55 | 56 | @After 57 | public void tearDown() throws Exception { 58 | } 59 | 60 | @Test 61 | public void testCreateVertex() { 62 | VertexExecutionOptions options = builder.build(); 63 | NebulaRowVertexOutputFormatConverter converter = 64 | new NebulaRowVertexOutputFormatConverter(options, VidTypeEnum.INT, schema); 65 | NebulaVertex vertex = converter.createVertex(row, null); 66 | assert (vertex.getVid().equals("1")); 67 | assert (vertex.getPropValuesString().equals("\"Tom\",\"Tom\",10,1.0,date(\"2021-01-01\")," 68 | + "datetime(\"2021-01-01T12:00:00\"),time(\"12:00:00\"),372435234")); 69 | } 70 | 71 | /** 72 | * test create vertex with policy for int vid type 73 | */ 74 | @Test 75 | public void testCreateVertexPolicy() { 76 | VertexExecutionOptions options = builder.setPolicy("HASH").build(); 77 | NebulaRowVertexOutputFormatConverter converter = 78 | new NebulaRowVertexOutputFormatConverter(options, VidTypeEnum.INT, schema); 79 | NebulaVertex vertex = converter.createVertex(row, null); 80 | assert (vertex.getVid().equals("1")); 81 | assert (vertex.getPropValuesString().equals("\"Tom\",\"Tom\",10,1.0,date(\"2021-01-01\")," 82 | + "datetime(\"2021-01-01T12:00:00\"),time(\"12:00:00\"),372435234")); 83 | } 84 | 85 | /** 86 | * test create vertex for string vid type 87 | */ 88 | @Test 89 | public void testCreateVertexStringId() { 90 | VertexExecutionOptions options = builder.build(); 91 | NebulaRowVertexOutputFormatConverter converter = 92 | new NebulaRowVertexOutputFormatConverter(options, VidTypeEnum.STRING, schema); 93 | NebulaVertex vertex = converter.createVertex(row, null); 94 | assert (vertex.getVid().equals("\"1\"")); 95 | assert (vertex.getPropValuesString().equals("\"Tom\",\"Tom\",10,1.0,date(\"2021-01-01\")," 96 | + "datetime(\"2021-01-01T12:00:00\"),time(\"12:00:00\"),372435234")); 97 | } 98 | } 99 | -------------------------------------------------------------------------------- /connector/src/test/java/org/apache/flink/connector/nebula/connection/NebulaClientOptionsTest.java: -------------------------------------------------------------------------------- 1 | 2 | /* Copyright (c) 2021 vesoft inc. All rights reserved. 3 | * 4 | * This source code is licensed under Apache 2.0 License. 5 | */ 6 | 7 | package org.apache.flink.connector.nebula.connection; 8 | 9 | import org.apache.flink.connector.nebula.utils.SSLSignType; 10 | import org.junit.Test; 11 | 12 | public class NebulaClientOptionsTest { 13 | @Test 14 | public void testConfigAddress() { 15 | NebulaClientOptions nebulaClientOptions = 16 | new NebulaClientOptions.NebulaClientOptionsBuilder() 17 | .setGraphAddress("127.0.0.1:9669") 18 | .setMetaAddress("127.0.0.1:9559") 19 | .setUsername("root") 20 | .setPassword("nebula") 21 | .setConnectRetry(1) 22 | .setTimeout(1000) 23 | .setEnableGraphSSL(true) 24 | .setEnableMetaSSL(true) 25 | .setSSLSignType(SSLSignType.CA) 26 | .setCaSignParam("caCrtFile", "crtFile", "keyFile") 27 | .setSelfSignParam("crtFile", "keyFile", "password") 28 | .build(); 29 | } 30 | 31 | @Test(expected = IllegalArgumentException.class) 32 | public void testMetaAddressWithEmptyAddress() { 33 | new NebulaClientOptions.NebulaClientOptionsBuilder() 34 | .setGraphAddress("127.0.0.1:9669") 35 | .setMetaAddress(null) 36 | .build(); 37 | } 38 | 39 | @Test 40 | public void testGraphAddressWithEmptyAddress() { 41 | new NebulaClientOptions.NebulaClientOptionsBuilder() 42 | .setGraphAddress(null) 43 | .setMetaAddress("127.0.0.1:9559") 44 | .build(); 45 | } 46 | 47 | 48 | @Test(expected = IllegalArgumentException.class) 49 | public void testIsEnableGraphSsl() { 50 | NebulaClientOptions nebulaClientOptions = new NebulaClientOptions 51 | .NebulaClientOptionsBuilder() 52 | .setGraphAddress(null) 53 | .setMetaAddress("127.0.0.1:9559") 54 | .setEnableMetaSSL(false) 55 | .setEnableStorageSSL(true) 56 | .setSSLSignType(SSLSignType.CA) 57 | .setCaSignParam("caCrtFile", "crtFile", "keyFile") 58 | .build(); 59 | } 60 | 61 | 62 | @Test(expected = IllegalArgumentException.class) 63 | public void testNullSslSignType() { 64 | NebulaClientOptions nebulaClientOptions = new NebulaClientOptions 65 | .NebulaClientOptionsBuilder() 66 | .setGraphAddress(null) 67 | .setMetaAddress("127.0.0.1:9559") 68 | .setEnableGraphSSL(false) 69 | .setEnableMetaSSL(true) 70 | .setSSLSignType(null) 71 | .setCaSignParam("caCrtFile", "crtFile", "keyFile") 72 | .build(); 73 | } 74 | 75 | @Test(expected = IllegalArgumentException.class) 76 | public void testNoSslSignType() { 77 | NebulaClientOptions nebulaClientOptions = new NebulaClientOptions 78 | .NebulaClientOptionsBuilder() 79 | .setGraphAddress(null) 80 | .setMetaAddress("127.0.0.1:9559") 81 | .setEnableGraphSSL(false) 82 | .setEnableMetaSSL(true) 83 | .setCaSignParam("caCrtFile", "crtFile", "keyFile") 84 | .build(); 85 | } 86 | 87 | @Test(expected = IllegalArgumentException.class) 88 | public void testGetCaSignParam() { 89 | NebulaClientOptions nebulaClientOptions = new NebulaClientOptions 90 | .NebulaClientOptionsBuilder() 91 | .setGraphAddress(null) 92 | .setMetaAddress("127.0.0.1:9559") 93 | .setEnableGraphSSL(false) 94 | .setEnableMetaSSL(true) 95 | .setSSLSignType(SSLSignType.CA) 96 | .setSelfSignParam("crtFile", "keyFile", "password") 97 | .build(); 98 | 99 | } 100 | 101 | @Test(expected = IllegalArgumentException.class) 102 | public void testGetSelfSignParam() { 103 | NebulaClientOptions nebulaClientOptions = new NebulaClientOptions 104 | .NebulaClientOptionsBuilder() 105 | .setGraphAddress(null) 106 | .setMetaAddress("127.0.0.1:9559") 107 | .setEnableGraphSSL(false) 108 | .setEnableMetaSSL(true) 109 | .setSSLSignType(SSLSignType.SELF) 110 | .setCaSignParam("caCrtFile", "crtFile", "keyFile") 111 | .build(); 112 | } 113 | } 114 | -------------------------------------------------------------------------------- /connector/src/main/java/org.apache.flink/connector/nebula/source/NebulaSourceFunction.java: -------------------------------------------------------------------------------- 1 | /* Copyright (c) 2020 vesoft inc. All rights reserved. 2 | * 3 | * This source code is licensed under Apache 2.0 License. 4 | */ 5 | 6 | package org.apache.flink.connector.nebula.source; 7 | 8 | import com.vesoft.nebula.client.meta.MetaClient; 9 | import com.vesoft.nebula.client.storage.StorageClient; 10 | import com.vesoft.nebula.client.storage.data.BaseTableRow; 11 | import java.util.List; 12 | import org.apache.flink.api.common.functions.RuntimeContext; 13 | import org.apache.flink.configuration.Configuration; 14 | import org.apache.flink.connector.nebula.connection.NebulaClientOptions; 15 | import org.apache.flink.connector.nebula.connection.NebulaMetaConnectionProvider; 16 | import org.apache.flink.connector.nebula.connection.NebulaStorageConnectionProvider; 17 | import org.apache.flink.connector.nebula.statement.ExecutionOptions; 18 | import org.apache.flink.connector.nebula.utils.PartitionUtils; 19 | import org.apache.flink.streaming.api.functions.source.RichParallelSourceFunction; 20 | import org.slf4j.Logger; 21 | import org.slf4j.LoggerFactory; 22 | 23 | /** 24 | * Implementation of {@link RichParallelSourceFunction} to get NebulaGraph vertex and edge. 25 | */ 26 | public class NebulaSourceFunction extends RichParallelSourceFunction { 27 | 28 | private static final Logger LOG = LoggerFactory.getLogger(NebulaSourceFunction.class); 29 | 30 | private static final long serialVersionUID = -4864517634021753949L; 31 | 32 | private StorageClient storageClient; 33 | private MetaClient metaClient; 34 | private final NebulaStorageConnectionProvider storageConnectionProvider; 35 | private final NebulaMetaConnectionProvider metaConnectionProvider; 36 | private ExecutionOptions executionOptions; 37 | /** 38 | * the number of graph partitions 39 | */ 40 | private int numPart; 41 | 42 | public NebulaSourceFunction(NebulaStorageConnectionProvider storageConnectionProvider) { 43 | super(); 44 | this.storageConnectionProvider = storageConnectionProvider; 45 | NebulaClientOptions nebulaClientOptions = 46 | storageConnectionProvider.getNebulaClientOptions(); 47 | this.metaConnectionProvider = 48 | new NebulaMetaConnectionProvider(nebulaClientOptions); 49 | } 50 | 51 | /** 52 | * open nebula client 53 | */ 54 | @Override 55 | public void open(Configuration parameters) throws Exception { 56 | super.open(parameters); 57 | storageClient = storageConnectionProvider.getStorageClient(); 58 | metaClient = metaConnectionProvider.getMetaClient(); 59 | numPart = metaClient.getPartsAlloc(executionOptions.getGraphSpace()).size(); 60 | } 61 | 62 | /** 63 | * close nebula client 64 | */ 65 | @Override 66 | public void close() throws Exception { 67 | try { 68 | if (storageClient != null) { 69 | storageClient.close(); 70 | } 71 | if (metaClient != null) { 72 | metaClient.close(); 73 | } 74 | } catch (Exception e) { 75 | LOG.error("cancel exception:{}", e.getMessage(), e); 76 | } 77 | } 78 | 79 | /** 80 | * execute scan nebula data 81 | */ 82 | @Override 83 | public void run(SourceContext sourceContext) throws Exception { 84 | RuntimeContext runtimeContext = getRuntimeContext(); 85 | List scanParts = PartitionUtils.getScanParts( 86 | runtimeContext.getIndexOfThisSubtask() + 1, 87 | numPart, 88 | runtimeContext.getNumberOfParallelSubtasks()); 89 | 90 | NebulaSource nebulaSource; 91 | if (executionOptions.getDataType().isVertex()) { 92 | nebulaSource = new NebulaVertexSource(storageClient, executionOptions, scanParts); 93 | } else { 94 | nebulaSource = new NebulaEdgeSource(storageClient, executionOptions, scanParts); 95 | } 96 | 97 | while (nebulaSource.hasNext()) { 98 | BaseTableRow row = nebulaSource.next(); 99 | sourceContext.collect(row); 100 | } 101 | } 102 | 103 | @Override 104 | public void cancel() { 105 | try { 106 | if (storageClient != null) { 107 | storageClient.close(); 108 | } 109 | if (metaClient != null) { 110 | metaClient.close(); 111 | } 112 | } catch (Exception e) { 113 | LOG.error("cancel exception:{}", e.getMessage(), e); 114 | } 115 | } 116 | 117 | public NebulaSourceFunction setExecutionOptions(ExecutionOptions executionOptions) { 118 | this.executionOptions = executionOptions; 119 | return this; 120 | } 121 | } 122 | -------------------------------------------------------------------------------- /connector/src/main/java/org.apache.flink/connector/nebula/utils/NebulaUtils.java: -------------------------------------------------------------------------------- 1 | /* Copyright (c) 2020 vesoft inc. All rights reserved. 2 | * 3 | * This source code is licensed under Apache 2.0 License. 4 | */ 5 | 6 | package org.apache.flink.connector.nebula.utils; 7 | 8 | import com.vesoft.nebula.PropertyType; 9 | import com.vesoft.nebula.client.graph.data.HostAddress; 10 | import java.util.ArrayList; 11 | import java.util.List; 12 | import java.util.regex.Matcher; 13 | import java.util.regex.Pattern; 14 | 15 | public class NebulaUtils { 16 | 17 | public static List getHostAndPorts(String address) { 18 | if (address == null || "".equalsIgnoreCase(address)) { 19 | throw new IllegalArgumentException("empty address"); 20 | } 21 | List hostAndPortList = new ArrayList<>(); 22 | for (String addr : address.split(NebulaConstant.COMMA)) { 23 | String[] hostPort = addr.split(NebulaConstant.COLON); 24 | if (hostPort.length < 2) { 25 | throw new IllegalArgumentException("wrong address"); 26 | } 27 | hostAndPortList.add(new HostAddress(hostPort[0], Integer.parseInt(hostPort[1]))); 28 | } 29 | return hostAndPortList; 30 | } 31 | 32 | public static boolean isNumeric(String str) { 33 | String newStr = null; 34 | if (str.startsWith("-")) { 35 | newStr = str.substring(1); 36 | } else { 37 | newStr = str; 38 | } 39 | for (char c : newStr.toCharArray()) { 40 | if (!Character.isDigit(c)) { 41 | return false; 42 | } 43 | } 44 | return true; 45 | } 46 | 47 | 48 | public static String extraValue(Object value, int type) { 49 | if (value == null) { 50 | return null; 51 | } 52 | switch (PropertyType.findByValue(type)) { 53 | case STRING: 54 | case FIXED_STRING: 55 | return mkString(escapeUtil(String.valueOf(value)), "\"", "", "\""); 56 | case DATE: 57 | return "date(\"" + value + "\")"; 58 | case TIME: 59 | return "time(\"" + value + "\")"; 60 | case DATETIME: 61 | return "datetime(\"" + value + "\")"; 62 | case TIMESTAMP: { 63 | if (isNumeric(String.valueOf(value))) { 64 | return String.valueOf(value); 65 | } else { 66 | return "timestamp(\"" + value + "\")"; 67 | } 68 | } 69 | case GEOGRAPHY: 70 | return "ST_GeogFromText(\"" + value + "\")"; 71 | default: { 72 | return String.valueOf(value); 73 | } 74 | 75 | } 76 | } 77 | 78 | 79 | public static String escapeUtil(String value) { 80 | String s = value; 81 | if (s.contains("\\")) { 82 | s = s.replaceAll("\\\\", "\\\\\\\\"); 83 | } 84 | if (s.contains("\t")) { 85 | s = s.replaceAll("\t", "\\\\t"); 86 | } 87 | if (s.contains("\n")) { 88 | s = s.replaceAll("\n", "\\\\n"); 89 | } 90 | if (s.contains("\"")) { 91 | s = s.replaceAll("\"", "\\\\\""); 92 | } 93 | if (s.contains("\'")) { 94 | s = s.replaceAll("\'", "\\\\'"); 95 | } 96 | if (s.contains("\r")) { 97 | s = s.replaceAll("\r", "\\\\r"); 98 | } 99 | if (s.contains("\b")) { 100 | s = s.replaceAll("\b", "\\\\b"); 101 | } 102 | return s; 103 | } 104 | 105 | public static String mkString(String value, String start, String sep, String end) { 106 | StringBuilder builder = new StringBuilder(); 107 | boolean first = true; 108 | builder.append(start); 109 | for (char c : value.toCharArray()) { 110 | if (first) { 111 | builder.append(c); 112 | first = false; 113 | } else { 114 | builder.append(sep); 115 | builder.append(c); 116 | } 117 | } 118 | builder.append(end); 119 | return builder.toString(); 120 | } 121 | 122 | /** 123 | * Check valid VID definition 124 | * @param vidType vid define string 125 | * @return true if INT | INT64 | FIXED_STRING(n) 126 | */ 127 | public static boolean checkValidVidType(String vidType) { 128 | if ("INT".equals(vidType) || "INT64".equals(vidType)) { 129 | return true; 130 | } 131 | String regex = "FIXED_STRING\\(\\d+\\)"; 132 | Pattern pattern = Pattern.compile(regex, Pattern.MULTILINE); 133 | Matcher matcher = pattern.matcher(vidType); 134 | 135 | return matcher.matches(); 136 | } 137 | } 138 | -------------------------------------------------------------------------------- /example/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | nebula-flink 7 | com.vesoft 8 | 3.8.0 9 | ../pom.xml 10 | 11 | 4.0.0 12 | 13 | example 14 | 15 | 16 | 1.14.4 17 | 2.11 18 | 19 | 20 | 21 | 22 | org.apache.flink 23 | flink-streaming-java_${scala.binary.version} 24 | ${flink.version} 25 | provided 26 | 27 | 28 | 29 | com.vesoft 30 | nebula-flink-connector 31 | ${project.version} 32 | 33 | 34 | 35 | org.slf4j 36 | slf4j-log4j12 37 | 1.7.25 38 | 39 | 40 | 41 | 42 | 43 | 44 | org.apache.maven.plugins 45 | maven-deploy-plugin 46 | 2.8.2 47 | 48 | 49 | default-deploy 50 | deploy 51 | 52 | true 53 | 54 | 55 | 56 | 57 | 58 | org.sonatype.plugins 59 | nexus-staging-maven-plugin 60 | 61 | 62 | default-deploy 63 | deploy 64 | 65 | deploy 66 | 67 | 68 | ossrh 69 | true 70 | 71 | 72 | 73 | 74 | 75 | org.apache.maven.plugins 76 | maven-checkstyle-plugin 77 | 3.1.0 78 | 79 | 80 | ${project.parent.basedir}/nebula_java_style_checks.xml 81 | 82 | 83 | ${project.build.sourceDirectory} 84 | 85 | UTF-8 86 | true 87 | false 88 | true 89 | 0 90 | warning 91 | ${project.parent.basedir}/checkstyle-suppressions.xml 92 | 93 | checkstyle.suppressions.file 94 | 95 | ${project.basedir}/src/ 96 | 97 | 98 | 99 | checkstyle 100 | validate 101 | 102 | check 103 | 104 | 105 | 106 | 107 | 108 | com.puppycrawl.tools 109 | checkstyle 110 | 8.29 111 | 112 | 113 | 114 | 115 | 116 | org.apache.maven.plugins 117 | maven-compiler-plugin 118 | 3.8.1 119 | 120 | 1.8 121 | 1.8 122 | 123 | 124 | 125 | 126 | 127 | -------------------------------------------------------------------------------- /connector/src/test/java/org/apache/flink/connector/nebula/sink/NebulaRowEdgeOutputFormatConverterTest.java: -------------------------------------------------------------------------------- 1 | package org.apache.flink.connector.nebula.sink; 2 | 3 | import com.vesoft.nebula.PropertyType; 4 | import java.util.Arrays; 5 | import java.util.HashMap; 6 | import java.util.Map; 7 | import org.apache.flink.connector.nebula.statement.EdgeExecutionOptions; 8 | import org.apache.flink.connector.nebula.statement.ExecutionOptions; 9 | import org.apache.flink.connector.nebula.utils.NebulaEdge; 10 | import org.apache.flink.connector.nebula.utils.PolicyEnum; 11 | import org.apache.flink.connector.nebula.utils.VidTypeEnum; 12 | import org.apache.flink.types.Row; 13 | import org.junit.Before; 14 | import org.junit.Test; 15 | import org.slf4j.Logger; 16 | import org.slf4j.LoggerFactory; 17 | 18 | public class NebulaRowEdgeOutputFormatConverterTest { 19 | 20 | private static final Logger LOGGER = 21 | LoggerFactory.getLogger(NebulaRowEdgeOutputFormatConverterTest.class); 22 | 23 | EdgeExecutionOptions.ExecutionOptionBuilder builder = null; 24 | Map schema = new HashMap<>(); 25 | Row row = new Row(10); 26 | 27 | @Before 28 | public void setUp() { 29 | builder = new EdgeExecutionOptions.ExecutionOptionBuilder() 30 | .setGraphSpace("test") 31 | .setEdge("friend") 32 | .setSrcIndex(0) 33 | .setDstIndex(1) 34 | .setFields(Arrays.asList("col1", "col2", "col3", "col4", "col5", "col6", "col7", 35 | "col8")) 36 | .setPositions(Arrays.asList(2, 3, 4, 5, 6, 7, 8, 9)); 37 | 38 | schema.put("col1", PropertyType.STRING.getValue()); 39 | schema.put("col2", PropertyType.FIXED_STRING.getValue()); 40 | schema.put("col3", PropertyType.INT32.getValue()); 41 | schema.put("col4", PropertyType.DOUBLE.getValue()); 42 | schema.put("col5", PropertyType.DATE.getValue()); 43 | schema.put("col6", PropertyType.DATETIME.getValue()); 44 | schema.put("col7", PropertyType.TIME.getValue()); 45 | schema.put("col8", PropertyType.TIMESTAMP.getValue()); 46 | 47 | row.setField(0, 1); 48 | row.setField(1, 2); 49 | row.setField(2, "Tom"); 50 | row.setField(3, "Tom"); 51 | row.setField(4, 10); 52 | row.setField(5, 1.0); 53 | row.setField(6, "2021-01-01"); 54 | row.setField(7, "2021-01-01T12:00:00"); 55 | row.setField(8, "12:00:00"); 56 | row.setField(9, 372435234); 57 | } 58 | 59 | public void tearDown() { 60 | } 61 | 62 | /** 63 | * test create edge for int id 64 | */ 65 | @Test 66 | public void testCreateEdgeIntId() { 67 | EdgeExecutionOptions options = builder.build(); 68 | NebulaRowEdgeOutputFormatConverter converter = 69 | new NebulaRowEdgeOutputFormatConverter(options, VidTypeEnum.INT, schema); 70 | NebulaEdge edge = converter.createEdge(row, null); 71 | assert (edge.getSource().equals("1")); 72 | assert (edge.getTarget().equals("2")); 73 | assert (edge.getRank() == null); 74 | assert (edge.getPropValuesString().equals("\"Tom\",\"Tom\",10,1.0,date(\"2021-01-01\")," 75 | + "datetime(\"2021-01-01T12:00:00\"),time(\"12:00:00\"),372435234")); 76 | 77 | } 78 | 79 | /** 80 | * test create edge with rank for int id 81 | */ 82 | @Test 83 | public void testCreateEdgeIntIdWithRank() { 84 | EdgeExecutionOptions options = builder.setRankIndex(4).build(); 85 | NebulaRowEdgeOutputFormatConverter converter = 86 | new NebulaRowEdgeOutputFormatConverter(options, VidTypeEnum.INT, schema); 87 | NebulaEdge edge = converter.createEdge(row, null); 88 | assert (edge.getSource().equals("1")); 89 | assert (edge.getTarget().equals("2")); 90 | assert (edge.getRank() == 10L); 91 | } 92 | 93 | /** 94 | * test create edge with policy for int id 95 | */ 96 | @Test 97 | public void testCreateEdgeIntIdWithPolicy() { 98 | EdgeExecutionOptions options = builder.build(); 99 | NebulaRowEdgeOutputFormatConverter converter = 100 | new NebulaRowEdgeOutputFormatConverter(options, VidTypeEnum.INT, schema); 101 | NebulaEdge edge = converter.createEdge(row, PolicyEnum.HASH); 102 | assert (edge.getSource().equals("1")); 103 | assert (edge.getTarget().equals("2")); 104 | assert (edge.getRank() == null); 105 | } 106 | 107 | /** 108 | * test create edge for String id 109 | */ 110 | @Test 111 | public void testCreateEdgeStringId() { 112 | EdgeExecutionOptions options = builder.build(); 113 | NebulaRowEdgeOutputFormatConverter converter = 114 | new NebulaRowEdgeOutputFormatConverter(options, VidTypeEnum.STRING, schema); 115 | NebulaEdge edge = converter.createEdge(row, null); 116 | assert (edge.getSource().equals("\"1\"")); 117 | assert (edge.getTarget().equals("\"2\"")); 118 | assert (edge.getRank() == null); 119 | } 120 | 121 | /** 122 | * test create edge with rank for String id 123 | */ 124 | @Test 125 | public void testCreateEdgeStringIdWithRank() { 126 | EdgeExecutionOptions options = builder.setRankIndex(4).build(); 127 | NebulaRowEdgeOutputFormatConverter converter = 128 | new NebulaRowEdgeOutputFormatConverter(options, VidTypeEnum.STRING, schema); 129 | NebulaEdge edge = converter.createEdge(row, null); 130 | assert (edge.getSource().equals("\"1\"")); 131 | assert (edge.getTarget().equals("\"2\"")); 132 | assert (edge.getRank() == 10L); 133 | } 134 | } 135 | -------------------------------------------------------------------------------- /connector/src/main/java/org.apache.flink/connector/nebula/utils/NebulaVertices.java: -------------------------------------------------------------------------------- 1 | /* Copyright (c) 2021 vesoft inc. All rights reserved. 2 | * 3 | * This source code is licensed under Apache 2.0 License. 4 | */ 5 | 6 | package org.apache.flink.connector.nebula.utils; 7 | 8 | import static org.apache.flink.connector.nebula.utils.NebulaConstant.BATCH_INSERT_TEMPLATE; 9 | import static org.apache.flink.connector.nebula.utils.NebulaConstant.DELETE_VERTEX_TEMPLATE; 10 | import static org.apache.flink.connector.nebula.utils.NebulaConstant.DELETE_VERTEX_TEMPLATE_WITH_EDGE; 11 | import static org.apache.flink.connector.nebula.utils.NebulaConstant.ENDPOINT_TEMPLATE; 12 | import static org.apache.flink.connector.nebula.utils.NebulaConstant.UPDATE_VALUE_TEMPLATE; 13 | import static org.apache.flink.connector.nebula.utils.NebulaConstant.UPDATE_VERTEX_TEMPLATE; 14 | import static org.apache.flink.connector.nebula.utils.NebulaConstant.VERTEX_VALUE_TEMPLATE; 15 | 16 | import java.io.Serializable; 17 | import java.util.ArrayList; 18 | import java.util.List; 19 | 20 | public class NebulaVertices implements Serializable { 21 | 22 | private String tagName; 23 | private List propNames; 24 | private List vertices; 25 | private PolicyEnum policy = null; 26 | 27 | private boolean isDeleteExecutedWithEdges; 28 | 29 | public NebulaVertices(String tagName, List propNames, List vertices, 30 | PolicyEnum policy) { 31 | this.tagName = tagName; 32 | this.propNames = propNames; 33 | this.vertices = vertices; 34 | this.policy = policy; 35 | } 36 | 37 | public NebulaVertices(String tagName, List propNames, List vertices, 38 | PolicyEnum policy, boolean isDeleteExecutedWithEdges) { 39 | this.tagName = tagName; 40 | this.propNames = propNames; 41 | this.vertices = vertices; 42 | this.policy = policy; 43 | this.isDeleteExecutedWithEdges = isDeleteExecutedWithEdges; 44 | } 45 | 46 | public String getPropNames() { 47 | List escapePropNames = new ArrayList<>(); 48 | for (String propName : propNames) { 49 | escapePropNames.add(NebulaUtils.mkString(propName, "`", "", "`")); 50 | } 51 | return String.join(",", escapePropNames); 52 | } 53 | 54 | public void setPropNames(List propNames) { 55 | this.propNames = propNames; 56 | } 57 | 58 | public List getVertices() { 59 | return vertices; 60 | } 61 | 62 | public void setVertices(List vertices) { 63 | this.vertices = vertices; 64 | } 65 | 66 | public PolicyEnum getPolicy() { 67 | return policy; 68 | } 69 | 70 | public void setPolicy(PolicyEnum policy) { 71 | this.policy = policy; 72 | } 73 | 74 | /** 75 | * construct Nebula batch insert ngql for vertex 76 | * 77 | * @return ngql 78 | */ 79 | public String getInsertStatement() { 80 | List values = new ArrayList<>(); 81 | for (NebulaVertex vertex : vertices) { 82 | String vertexId = getVertexId(vertex); 83 | values.add(String.format(VERTEX_VALUE_TEMPLATE, vertexId, 84 | vertex.getPropValuesString())); 85 | } 86 | return String.format(BATCH_INSERT_TEMPLATE, DataTypeEnum.VERTEX.name(), tagName, 87 | getPropNames(), String.join(",", values)); 88 | } 89 | 90 | /** 91 | * construct Nebula batch update ngql for vertex 92 | * 93 | * @return ngql 94 | */ 95 | public String getUpdateStatement() { 96 | List statements = new ArrayList<>(); 97 | // for update mode, each vertex construct one update statement. 98 | for (NebulaVertex vertex : vertices) { 99 | String vertexId = getVertexId(vertex); 100 | 101 | List updateProps = new ArrayList<>(); 102 | for (int i = 0; i < propNames.size(); i++) { 103 | updateProps.add(String.format(UPDATE_VALUE_TEMPLATE, propNames.get(i), 104 | vertex.getPropValues().get(i))); 105 | } 106 | String updatePropsString = String.join(",", updateProps); 107 | String statement = String.format(UPDATE_VERTEX_TEMPLATE, DataTypeEnum.VERTEX.name(), 108 | tagName, vertexId, updatePropsString); 109 | statements.add(statement); 110 | } 111 | return String.join(";", statements); 112 | } 113 | 114 | /** 115 | * construct Nebula batch delete ngql for vertex 116 | * 117 | * @return ngql 118 | */ 119 | public String getDeleteStatement() { 120 | List vertexIds = new ArrayList<>(); 121 | for (NebulaVertex vertex : vertices) { 122 | String vertexId = getVertexId(vertex); 123 | vertexIds.add(vertexId); 124 | } 125 | String template = isDeleteExecutedWithEdges 126 | ? DELETE_VERTEX_TEMPLATE_WITH_EDGE 127 | : DELETE_VERTEX_TEMPLATE; 128 | return String.format(template, String.join(",", vertexIds)); 129 | } 130 | 131 | /** 132 | * format vertex id with policy 133 | */ 134 | private String getVertexId(NebulaVertex vertex) { 135 | String vertexId = null; 136 | if (policy == null) { 137 | vertexId = vertex.getVid(); 138 | } else { 139 | switch (policy) { 140 | case HASH: 141 | vertexId = String.format(ENDPOINT_TEMPLATE, PolicyEnum.HASH.name(), 142 | vertex.getVid()); 143 | break; 144 | case UUID: 145 | vertexId = String.format(ENDPOINT_TEMPLATE, PolicyEnum.UUID.name(), 146 | vertex.getVid()); 147 | break; 148 | default: 149 | throw new IllegalArgumentException("policy is not supported"); 150 | } 151 | } 152 | return vertexId; 153 | } 154 | } 155 | -------------------------------------------------------------------------------- /connector/src/main/java/org.apache.flink/connector/nebula/connection/NebulaMetaConnectionProvider.java: -------------------------------------------------------------------------------- 1 | /* Copyright (c) 2020 vesoft inc. All rights reserved. 2 | * 3 | * This source code is licensed under Apache 2.0 License. 4 | */ 5 | 6 | package org.apache.flink.connector.nebula.connection; 7 | 8 | import com.facebook.thrift.TException; 9 | import com.vesoft.nebula.PropertyType; 10 | import com.vesoft.nebula.client.graph.data.CASignedSSLParam; 11 | import com.vesoft.nebula.client.graph.data.HostAddress; 12 | import com.vesoft.nebula.client.graph.data.SSLParam; 13 | import com.vesoft.nebula.client.graph.data.SelfSignedSSLParam; 14 | import com.vesoft.nebula.client.graph.exception.ClientServerIncompatibleException; 15 | import com.vesoft.nebula.client.meta.MetaClient; 16 | import com.vesoft.nebula.client.meta.exception.ExecuteFailedException; 17 | import com.vesoft.nebula.meta.ColumnDef; 18 | import com.vesoft.nebula.meta.Schema; 19 | import com.vesoft.nebula.meta.SpaceItem; 20 | import java.io.Serializable; 21 | import java.net.UnknownHostException; 22 | import java.util.HashMap; 23 | import java.util.List; 24 | import java.util.Map; 25 | import org.apache.flink.connector.nebula.utils.VidTypeEnum; 26 | import org.slf4j.Logger; 27 | import org.slf4j.LoggerFactory; 28 | 29 | public class NebulaMetaConnectionProvider implements Serializable { 30 | private static final Logger LOG = LoggerFactory.getLogger(NebulaMetaConnectionProvider.class); 31 | private static final long serialVersionUID = -1045337416133033961L; 32 | 33 | private final NebulaClientOptions nebulaClientOptions; 34 | 35 | public NebulaMetaConnectionProvider(NebulaClientOptions nebulaClientOptions) { 36 | this.nebulaClientOptions = nebulaClientOptions; 37 | } 38 | 39 | public MetaClient getMetaClient() throws TException, ClientServerIncompatibleException, 40 | UnknownHostException { 41 | List addresses = nebulaClientOptions.getMetaAddress(); 42 | int timeout = nebulaClientOptions.getTimeout(); 43 | int retry = nebulaClientOptions.getConnectRetry(); 44 | MetaClient metaClient; 45 | if (nebulaClientOptions.isEnableMetaSSL()) { 46 | switch (nebulaClientOptions.getSSLSignType()) { 47 | case CA: { 48 | CASignParams caSignParams = nebulaClientOptions.getCaSignParam(); 49 | SSLParam sslParam = new CASignedSSLParam(caSignParams.getCaCrtFilePath(), 50 | caSignParams.getCrtFilePath(), caSignParams.getKeyFilePath()); 51 | metaClient = new MetaClient(addresses, timeout, retry, retry, true, sslParam); 52 | break; 53 | } 54 | case SELF: { 55 | SelfSignParams selfSignParams = nebulaClientOptions.getSelfSignParam(); 56 | SSLParam sslParam = new SelfSignedSSLParam(selfSignParams.getCrtFilePath(), 57 | selfSignParams.getKeyFilePath(), selfSignParams.getPassword()); 58 | metaClient = new MetaClient(addresses, timeout, retry, retry, true, sslParam); 59 | break; 60 | } 61 | default: 62 | throw new IllegalArgumentException("ssl sign type is not supported."); 63 | } 64 | } else { 65 | metaClient = new MetaClient(addresses, timeout, retry, retry); 66 | } 67 | 68 | metaClient.connect(); 69 | return metaClient; 70 | } 71 | 72 | /** 73 | * get Nebula Graph vid type 74 | * 75 | * @param space nebula graph space 76 | * @return {@link VidTypeEnum} 77 | */ 78 | public VidTypeEnum getVidType(MetaClient metaClient, String space) { 79 | SpaceItem spaceItem; 80 | try { 81 | spaceItem = metaClient.getSpace(space); 82 | } catch (TException | ExecuteFailedException e) { 83 | LOG.error("get space info error, ", e); 84 | throw new RuntimeException(e); 85 | } 86 | PropertyType vidType = spaceItem.getProperties().getVid_type().getType(); 87 | if (vidType == PropertyType.FIXED_STRING) { 88 | return VidTypeEnum.STRING; 89 | } else { 90 | return VidTypeEnum.INT; 91 | } 92 | } 93 | 94 | /** 95 | * get schema info for tag 96 | * 97 | * @param space nebula graph space 98 | * @param tag nebula graph tag 99 | * @return Map property name -> {@link PropertyType} 100 | */ 101 | public Map getTagSchema(MetaClient metaClient, String space, String tag) { 102 | Map schema = new HashMap<>(); 103 | Schema tagSchema; 104 | try { 105 | tagSchema = metaClient.getTag(space, tag); 106 | } catch (TException | ExecuteFailedException e) { 107 | LOG.error("get tag schema error, ", e); 108 | throw new RuntimeException(e); 109 | } 110 | List columnDefs = tagSchema.getColumns(); 111 | for (ColumnDef col : columnDefs) { 112 | schema.put(new String(col.getName()), col.getType().getType().getValue()); 113 | } 114 | return schema; 115 | } 116 | 117 | /** 118 | * get schema info for edge 119 | * 120 | * @param space nebula graph space 121 | * @param edge nebula graph edge 122 | * @return Map property name -> {@link PropertyType} 123 | */ 124 | public Map getEdgeSchema(MetaClient metaClient, String space, String edge) { 125 | Map schema = new HashMap<>(); 126 | Schema edgeSchema; 127 | try { 128 | edgeSchema = metaClient.getEdge(space, edge); 129 | } catch (TException | ExecuteFailedException e) { 130 | LOG.error("get edge schema error, ", e); 131 | throw new RuntimeException(e); 132 | } 133 | List columnDefs = edgeSchema.getColumns(); 134 | for (ColumnDef col : columnDefs) { 135 | schema.put(new String(col.getName()), col.getType().getType().getValue()); 136 | } 137 | return schema; 138 | } 139 | } 140 | -------------------------------------------------------------------------------- /pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 4.0.0 6 | 7 | com.vesoft 8 | nebula-flink 9 | pom 10 | 3.8.0 11 | 12 | 13 | connector 14 | example 15 | 16 | 17 | 18 | nebula-flink-connector 19 | Nebula Flink Connector 20 | https://github.com/vesoft-inc/nebula-flink-connector 21 | 22 | scm:git:https://github.com/vesoft-inc/nebula 23 | https://github.com/vesoft-inc/nebula 24 | scm:git:https://github.com/vesoft-inc/nebula 25 | 26 | 27 | 28 | Apache License, Version 2.0 29 | https://www.apache.org/licenses/LICENSE-2.0.txt 30 | repo 31 | license 32 | 33 | 34 | 35 | 36 | 37 | nebula 38 | Nebula Graph 39 | nebula-flink-connector@vesoft-inc.com 40 | vesoft 41 | 42 | architect 43 | developer 44 | 45 | 46 | 47 | 48 | 49 | 50 | ossrh 51 | Nexus Release Repository 52 | https://oss.sonatype.org/service/local/staging/deploy/maven2 53 | 54 | 55 | ossrh 56 | Nexus Snapshot Repository 57 | https://oss.sonatype.org/content/repositories/snapshots 58 | 59 | 60 | 61 | 62 | UTF-8 63 | 2.11 64 | 1.8 65 | 1.8 66 | 4.13.1 67 | 68 | 69 | 70 | 71 | 72 | deploy 73 | 74 | 75 | 76 | 77 | org.apache.maven.plugins 78 | maven-source-plugin 79 | 3.2.0 80 | 81 | 82 | attach-sources 83 | 84 | jar-no-fork 85 | 86 | 87 | 88 | 89 | 90 | 91 | org.apache.maven.plugins 92 | maven-javadoc-plugin 93 | 3.2.0 94 | 95 | 96 | attach-javadocs 97 | 98 | jar 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | org.apache.maven.plugins 107 | maven-gpg-plugin 108 | 1.6 109 | 110 | 111 | sign-artifacts 112 | verify 113 | 114 | sign 115 | 116 | 117 | 118 | 119 | --pinentry-mode 120 | loopback 121 | 122 | 123 | 124 | 125 | 126 | 127 | 128 | 129 | 130 | 131 | 132 | 133 | 134 | 135 | org.sonatype.plugins 136 | nexus-staging-maven-plugin 137 | 1.6.8 138 | true 139 | 140 | ossrh 141 | https://oss.sonatype.org/ 142 | true 143 | 144 | 145 | 146 | 147 | 148 | -------------------------------------------------------------------------------- /connector/src/main/java/org.apache.flink/graph/descriptors/NebulaValidator.java: -------------------------------------------------------------------------------- 1 | /* Copyright (c) 2020 vesoft inc. All rights reserved. 2 | * 3 | * This source code is licensed under Apache 2.0 License. 4 | */ 5 | 6 | package org.apache.flink.graph.descriptors; 7 | 8 | import static org.apache.flink.table.descriptors.Schema.SCHEMA; 9 | 10 | import java.util.Optional; 11 | import org.apache.flink.table.api.TableSchema; 12 | import org.apache.flink.table.descriptors.ConnectorDescriptorValidator; 13 | import org.apache.flink.table.descriptors.DescriptorProperties; 14 | import org.apache.flink.table.utils.TableSchemaUtils; 15 | import org.apache.flink.util.Preconditions; 16 | 17 | public class NebulaValidator extends ConnectorDescriptorValidator { 18 | public static final String CONNECTOR_TYPE_VALUE_NEBULA = "nebula"; 19 | 20 | public static final String CONNECTOR_ADDRESS = "connector.address"; 21 | public static final String CONNECTOR_SPACE = "connector.space"; 22 | public static final String CONNECTOR_LABEL = "connector.label"; 23 | public static final String CONNECTOR_USERNAME = "connector.username"; 24 | public static final String CONNECTOR_PASSWORD = "connector.password"; 25 | 26 | public static final String CONNECTOR_READ_QUERY = "connector.read.query"; 27 | public static final String CONNECTOR_READ_PARTITION_COLUMN = "connector.read.partition.column"; 28 | public static final String CONNECTOR_READ_PARTITION_LOWER_BOUND = "connector.read.partition" 29 | + ".lower-bound"; 30 | public static final String CONNECTOR_READ_PARTITION_UPPER_BOUND = "connector.read.partition" 31 | + ".upper-bound"; 32 | public static final String CONNECTOR_READ_PARTITION_NUM = "connector.read.partition.num"; 33 | public static final String CONNECTOR_READ_FETCH_SIZE = "connector.read.fetch-size"; 34 | 35 | public static final String CONNECTOR_LOOKUP_CACHE_MAX_ROWS = "connector.lookup.cache.max-rows"; 36 | public static final String CONNECTOR_LOOKUP_CACHE_TTL = "connector.lookup.cache.ttl"; 37 | public static final String CONNECTOR_LOOKUP_MAX_RETRIES = "connector.lookup.max-retries"; 38 | 39 | public static final String CONNECTOR_WRITE_FLUSH_MAX_ROWS = "connector.write.flush.max-rows"; 40 | public static final String CONNECTOR_WRITE_FLUSH_INTERVAL = "connector.write.flush.interval"; 41 | public static final String CONNECTOR_WRITE_MAX_RETRIES = "connector.write.max-retries"; 42 | 43 | @Override 44 | public void validate(DescriptorProperties properties) { 45 | super.validate(properties); 46 | validateCommonProperties(properties); 47 | validateReadProperties(properties); 48 | validateLookupProperties(properties); 49 | validateSinkProperties(properties); 50 | } 51 | 52 | private void validateCommonProperties(DescriptorProperties properties) { 53 | properties.validateString(CONNECTOR_ADDRESS, false, 1); 54 | properties.validateString(CONNECTOR_SPACE, false, 1); 55 | properties.validateString(CONNECTOR_LABEL, true); 56 | properties.validateString(CONNECTOR_USERNAME, true); 57 | properties.validateString(CONNECTOR_PASSWORD, true); 58 | 59 | final String address = properties.getString(CONNECTOR_ADDRESS); 60 | TableSchema schema = TableSchemaUtils.getPhysicalSchema(properties.getTableSchema(SCHEMA)); 61 | 62 | Optional password = properties.getOptionalString(CONNECTOR_PASSWORD); 63 | if (password.isPresent()) { 64 | Preconditions.checkArgument( 65 | properties.getOptionalString(CONNECTOR_USERNAME).isPresent(), 66 | "Nebula username must be provided when nebula password is provided"); 67 | } 68 | } 69 | 70 | private void validateReadProperties(DescriptorProperties properties) { 71 | properties.validateString(CONNECTOR_READ_QUERY, true); 72 | properties.validateString(CONNECTOR_READ_PARTITION_COLUMN, true); 73 | properties.validateLong(CONNECTOR_READ_PARTITION_LOWER_BOUND, true); 74 | properties.validateLong(CONNECTOR_READ_PARTITION_UPPER_BOUND, true); 75 | properties.validateInt(CONNECTOR_READ_PARTITION_NUM, true); 76 | properties.validateInt(CONNECTOR_READ_FETCH_SIZE, true); 77 | 78 | Optional lowerBound = 79 | properties.getOptionalLong(CONNECTOR_READ_PARTITION_LOWER_BOUND); 80 | Optional upperBound = 81 | properties.getOptionalLong(CONNECTOR_READ_PARTITION_UPPER_BOUND); 82 | if (lowerBound.isPresent() && upperBound.isPresent()) { 83 | Preconditions.checkArgument(lowerBound.get() <= upperBound.get(), 84 | CONNECTOR_READ_PARTITION_LOWER_BOUND 85 | + " must not be larger than " 86 | + CONNECTOR_READ_PARTITION_UPPER_BOUND); 87 | } 88 | 89 | checkAllOrNone(properties, new String[]{CONNECTOR_READ_PARTITION_COLUMN, 90 | CONNECTOR_READ_PARTITION_LOWER_BOUND, CONNECTOR_READ_PARTITION_UPPER_BOUND, 91 | CONNECTOR_READ_PARTITION_NUM}); 92 | } 93 | 94 | private void validateLookupProperties(DescriptorProperties properties) { 95 | properties.validateLong(CONNECTOR_LOOKUP_CACHE_MAX_ROWS, true); 96 | properties.validateDuration(CONNECTOR_LOOKUP_CACHE_TTL, true, 1); 97 | properties.validateInt(CONNECTOR_LOOKUP_MAX_RETRIES, true); 98 | 99 | checkAllOrNone(properties, new String[]{CONNECTOR_LOOKUP_CACHE_MAX_ROWS, 100 | CONNECTOR_LOOKUP_CACHE_TTL}); 101 | } 102 | 103 | private void validateSinkProperties(DescriptorProperties properties) { 104 | properties.validateInt(CONNECTOR_WRITE_FLUSH_MAX_ROWS, true); 105 | properties.validateDuration(CONNECTOR_WRITE_FLUSH_INTERVAL, true, 1); 106 | properties.validateInt(CONNECTOR_WRITE_MAX_RETRIES, true); 107 | } 108 | 109 | private void checkAllOrNone(DescriptorProperties properties, String[] propertyNames) { 110 | int presentCount = 0; 111 | for (String name : propertyNames) { 112 | if (properties.getOptionalString(name).isPresent()) { 113 | presentCount++; 114 | } 115 | } 116 | Preconditions.checkArgument(presentCount == 0 || presentCount == propertyNames.length, 117 | "Either all or none of the following properties should be provided:\n" 118 | + String.join("\n", propertyNames)); 119 | } 120 | } 121 | -------------------------------------------------------------------------------- /connector/src/test/java/org/apache/flink/connector/nebula/utils/NebulaVerticesTest.java: -------------------------------------------------------------------------------- 1 | /* Copyright (c) 2021 vesoft inc. All rights reserved. 2 | * 3 | * This source code is licensed under Apache 2.0 License. 4 | */ 5 | 6 | package org.apache.flink.connector.nebula.utils; 7 | 8 | import java.util.ArrayList; 9 | import java.util.Arrays; 10 | import java.util.List; 11 | import junit.framework.TestCase; 12 | 13 | public class NebulaVerticesTest extends TestCase { 14 | 15 | List vertices = new ArrayList<>(); 16 | String tagName = "person"; 17 | List propNames = Arrays.asList( 18 | "col_string", 19 | "col_fixed_string", 20 | "col_bool", 21 | "col_int", 22 | "col_int64", 23 | "col_double", 24 | "col_date"); 25 | List props1 = Arrays.asList("\"Tom\"", "\"Tom\"", "true", "10", "100", "1.0", 26 | "2021-11-12"); 27 | List props2 = Arrays.asList("\"Bob\"", "\"Bob\"", "false", "20", "200", "2.0", 28 | "2021-05-01"); 29 | 30 | 31 | public void testGetInsertStatement() { 32 | vertices.add(new NebulaVertex("\"vid1\"", props1)); 33 | vertices.add(new NebulaVertex("\"vid2\"", props2)); 34 | 35 | NebulaVertices nebulaVertices = new NebulaVertices(tagName, propNames, vertices, null); 36 | String vertexStatement = nebulaVertices.getInsertStatement(); 37 | 38 | String expectStatement = "INSERT VERTEX `person`(`col_string`,`col_fixed_string`," 39 | + "`col_bool`," 40 | + "`col_int`,`col_int64`,`col_double`,`col_date`) VALUES \"vid1\": (" 41 | + String.join(",", props1) 42 | + "),\"vid2\": (" + String.join(",", props2) + ")"; 43 | assert (expectStatement.equals(vertexStatement)); 44 | } 45 | 46 | 47 | public void testGetInsertStatementWithPolicy() { 48 | vertices.add(new NebulaVertex("vid1", props1)); 49 | vertices.add(new NebulaVertex("vid2", props2)); 50 | 51 | NebulaVertices nebulaVerticesWithPolicy = new NebulaVertices(tagName, propNames, vertices, 52 | PolicyEnum.HASH); 53 | String vertexStatementWithPolicy = nebulaVerticesWithPolicy.getInsertStatement(); 54 | 55 | String expectStatementWithPolicy = "INSERT VERTEX `person`(`col_string`," 56 | + "`col_fixed_string`,`col_bool`," 57 | + "`col_int`,`col_int64`,`col_double`,`col_date`) VALUES HASH(\"vid1\"): (" 58 | + String.join(",", props1) 59 | + "),HASH(\"vid2\"): (" + String.join(",", props2) + ")"; 60 | assert (expectStatementWithPolicy.equals(vertexStatementWithPolicy)); 61 | } 62 | 63 | public void testGetUpdateStatement() { 64 | vertices.add(new NebulaVertex("\"vid1\"", props1)); 65 | vertices.add(new NebulaVertex("\"vid2\"", props2)); 66 | 67 | NebulaVertices nebulaVertices = new NebulaVertices(tagName, propNames, vertices, null); 68 | String vertexStatement = nebulaVertices.getUpdateStatement(); 69 | String expectStatement = "UPDATE VERTEX ON `person` \"vid1\" SET `col_string`=\"Tom\"," 70 | + "`col_fixed_string`=\"Tom\",`col_bool`=true,`col_int`=10,`col_int64`=100," 71 | + "`col_double`=1.0,`col_date`=2021-11-12;" 72 | + "UPDATE VERTEX ON `person` \"vid2\" SET `col_string`=\"Bob\"," 73 | + "`col_fixed_string`=\"Bob\",`col_bool`=false,`col_int`=20,`col_int64`=200," 74 | + "`col_double`=2.0,`col_date`=2021-05-01"; 75 | assert (vertexStatement.equals(expectStatement)); 76 | } 77 | 78 | public void testGetUpdateStatementWithPolicy() { 79 | vertices.add(new NebulaVertex("vid1", props1)); 80 | vertices.add(new NebulaVertex("vid2", props2)); 81 | 82 | NebulaVertices nebulaVertices = new NebulaVertices(tagName, propNames, vertices, 83 | PolicyEnum.HASH); 84 | String vertexStatement = nebulaVertices.getUpdateStatement(); 85 | String expectStatement = "UPDATE VERTEX ON `person` HASH(\"vid1\") SET " 86 | + "`col_string`=\"Tom\",`col_fixed_string`=\"Tom\",`col_bool`=true,`col_int`=10," 87 | + "`col_int64`=100,`col_double`=1.0,`col_date`=2021-11-12;" 88 | + "UPDATE VERTEX ON `person` HASH(\"vid2\") SET `col_string`=\"Bob\"," 89 | + "`col_fixed_string`=\"Bob\",`col_bool`=false,`col_int`=20,`col_int64`=200," 90 | + "`col_double`=2.0,`col_date`=2021-05-01"; 91 | assert (vertexStatement.equals(expectStatement)); 92 | } 93 | 94 | public void testGetDeleteStatement() { 95 | vertices.add(new NebulaVertex("\"vid1\"", props1)); 96 | vertices.add(new NebulaVertex("\"vid2\"", props2)); 97 | 98 | NebulaVertices nebulaVertices = new NebulaVertices(tagName, propNames, vertices, null); 99 | String vertexStatement = nebulaVertices.getDeleteStatement(); 100 | String expectStatement = "DELETE VERTEX \"vid1\",\"vid2\""; 101 | assert (vertexStatement.equals(expectStatement)); 102 | } 103 | 104 | public void testGetDeleteStatementWithPolicy() { 105 | vertices.add(new NebulaVertex("vid1", props1)); 106 | vertices.add(new NebulaVertex("vid2", props2)); 107 | 108 | NebulaVertices nebulaVertices = new NebulaVertices(tagName, propNames, vertices, 109 | PolicyEnum.HASH); 110 | String vertexStatement = nebulaVertices.getDeleteStatement(); 111 | String expectStatement = "DELETE VERTEX HASH(\"vid1\"),HASH(\"vid2\")"; 112 | assert (vertexStatement.equals(expectStatement)); 113 | } 114 | 115 | public void testGetDeleteStatementWithEdges() { 116 | vertices.add(new NebulaVertex("\"vid1\"", props1)); 117 | vertices.add(new NebulaVertex("\"vid2\"", props2)); 118 | 119 | NebulaVertices nebulaVertices = new NebulaVertices( 120 | tagName, propNames, vertices, null, true 121 | ); 122 | String vertexStatement = nebulaVertices.getDeleteStatement(); 123 | String expectStatement = "DELETE VERTEX \"vid1\",\"vid2\" WITH EDGE"; 124 | assert (vertexStatement.equals(expectStatement)); 125 | } 126 | 127 | public void testGetDeleteStatementWithPolicyAndEdges() { 128 | vertices.add(new NebulaVertex("vid1", props1)); 129 | vertices.add(new NebulaVertex("vid2", props2)); 130 | 131 | NebulaVertices nebulaVertices = new NebulaVertices(tagName, propNames, vertices, 132 | PolicyEnum.HASH, true); 133 | String vertexStatement = nebulaVertices.getDeleteStatement(); 134 | String expectStatement = "DELETE VERTEX HASH(\"vid1\"),HASH(\"vid2\") WITH EDGE"; 135 | assert (vertexStatement.equals(expectStatement)); 136 | } 137 | } 138 | -------------------------------------------------------------------------------- /connector/src/main/java/org.apache.flink/connector/nebula/source/NebulaInputFormat.java: -------------------------------------------------------------------------------- 1 | /* Copyright (c) 2020 vesoft inc. All rights reserved. 2 | * 3 | * This source code is licensed under Apache 2.0 License. 4 | */ 5 | 6 | package org.apache.flink.connector.nebula.source; 7 | 8 | import com.vesoft.nebula.client.meta.MetaClient; 9 | import com.vesoft.nebula.client.storage.StorageClient; 10 | import com.vesoft.nebula.client.storage.data.BaseTableRow; 11 | import java.io.IOException; 12 | import java.util.ArrayList; 13 | import java.util.List; 14 | import org.apache.flink.api.common.io.DefaultInputSplitAssigner; 15 | import org.apache.flink.api.common.io.RichInputFormat; 16 | import org.apache.flink.api.common.io.statistics.BaseStatistics; 17 | import org.apache.flink.configuration.Configuration; 18 | import org.apache.flink.connector.nebula.connection.NebulaClientOptions; 19 | import org.apache.flink.connector.nebula.connection.NebulaMetaConnectionProvider; 20 | import org.apache.flink.connector.nebula.connection.NebulaStorageConnectionProvider; 21 | import org.apache.flink.connector.nebula.statement.ExecutionOptions; 22 | import org.apache.flink.connector.nebula.utils.PartitionUtils; 23 | import org.apache.flink.core.io.GenericInputSplit; 24 | import org.apache.flink.core.io.InputSplit; 25 | import org.apache.flink.core.io.InputSplitAssigner; 26 | import org.apache.flink.types.Row; 27 | import org.slf4j.Logger; 28 | import org.slf4j.LoggerFactory; 29 | 30 | /** 31 | * InputFormat to read data from NebulaGraph and generate Rows. 32 | * The InputFormat has to be configured using the supplied 33 | * NebulaStorageConnectionProvider and ExecutionOptions. 34 | * 35 | * @see Row 36 | * @see NebulaStorageConnectionProvider 37 | * @see ExecutionOptions 38 | */ 39 | public abstract class NebulaInputFormat extends RichInputFormat { 40 | protected static final Logger LOG = LoggerFactory.getLogger(NebulaInputFormat.class); 41 | private static final long serialVersionUID = 902031944252613459L; 42 | 43 | protected ExecutionOptions executionOptions; 44 | protected NebulaStorageConnectionProvider storageConnectionProvider; 45 | protected NebulaMetaConnectionProvider metaConnectionProvider; 46 | private transient StorageClient storageClient; 47 | private transient MetaClient metaClient; 48 | 49 | protected Boolean hasNext = false; 50 | protected List rows; 51 | 52 | private NebulaSource nebulaSource; 53 | protected NebulaConverter nebulaConverter; 54 | 55 | private long scannedRows; 56 | /** 57 | * the number of graph partitions 58 | */ 59 | private int numPart; 60 | private int times = 0; // todo rm 61 | 62 | public NebulaInputFormat(NebulaStorageConnectionProvider storageConnectionProvider, 63 | ExecutionOptions executionOptions) { 64 | this.storageConnectionProvider = storageConnectionProvider; 65 | NebulaClientOptions nebulaClientOptions = 66 | storageConnectionProvider.getNebulaClientOptions(); 67 | this.metaConnectionProvider = 68 | new NebulaMetaConnectionProvider(nebulaClientOptions); 69 | this.executionOptions = executionOptions; 70 | } 71 | 72 | @Override 73 | public void configure(Configuration configuration) { 74 | // do nothing 75 | } 76 | 77 | @Override 78 | public void openInputFormat() throws IOException { 79 | try { 80 | storageClient = storageConnectionProvider.getStorageClient(); 81 | metaClient = metaConnectionProvider.getMetaClient(); 82 | numPart = metaClient.getPartsAlloc(executionOptions.getGraphSpace()).size(); 83 | } catch (Exception e) { 84 | LOG.error("connect storage client error", e); 85 | throw new IOException("connect storage client error", e); 86 | } 87 | rows = new ArrayList<>(); 88 | } 89 | 90 | @Override 91 | public void closeInputFormat() throws IOException { 92 | try { 93 | if (storageClient != null) { 94 | storageClient.close(); 95 | } 96 | if (metaClient != null) { 97 | metaClient.close(); 98 | } 99 | } catch (Exception e) { 100 | LOG.error("close client error", e); 101 | throw new IOException("close client error", e); 102 | } 103 | } 104 | 105 | @Override 106 | public BaseStatistics getStatistics(BaseStatistics baseStatistics) throws IOException { 107 | return baseStatistics; 108 | } 109 | 110 | @Override 111 | public InputSplit[] createInputSplits(int numSplit) throws IOException { 112 | InputSplit[] inputSplits = new InputSplit[numSplit]; 113 | for (int i = 0; i < numSplit; i++) { 114 | inputSplits[i] = new GenericInputSplit(i + 1, numSplit); 115 | } 116 | return inputSplits; 117 | } 118 | 119 | @Override 120 | public InputSplitAssigner getInputSplitAssigner(InputSplit[] inputSplits) { 121 | return new DefaultInputSplitAssigner(inputSplits); 122 | } 123 | 124 | @Override 125 | public void open(InputSplit inputSplit) throws IOException { 126 | if (inputSplit != null) { 127 | GenericInputSplit split = (GenericInputSplit) inputSplit; 128 | List scanParts = PartitionUtils.getScanParts(split.getSplitNumber(), 129 | numPart, split.getTotalNumberOfSplits()); 130 | if (executionOptions.getDataType().isVertex()) { 131 | nebulaSource = new NebulaVertexSource(storageClient, executionOptions, scanParts); 132 | } else { 133 | nebulaSource = new NebulaEdgeSource(storageClient, executionOptions, scanParts); 134 | } 135 | try { 136 | hasNext = nebulaSource.hasNext(); 137 | } catch (Exception e) { 138 | LOG.error("scan NebulaGraph error", e); 139 | throw new IOException("scan error", e); 140 | } 141 | } 142 | } 143 | 144 | @Override 145 | public boolean reachedEnd() throws IOException { 146 | return !hasNext; 147 | } 148 | 149 | @Override 150 | public T nextRecord(T reuse) throws IOException { 151 | if (!hasNext) { 152 | return null; 153 | } 154 | LOG.info("source nextRecord: {}", times++); 155 | 156 | BaseTableRow row = nebulaSource.next(); 157 | try { 158 | hasNext = nebulaSource.hasNext(); 159 | } catch (Exception e) { 160 | LOG.error("scan NebulaGraph error", e); 161 | throw new IOException("scan NebulaGraph error", e); 162 | } 163 | scannedRows++; 164 | return nebulaConverter.convert(row); 165 | } 166 | 167 | @Override 168 | public void close() { 169 | LOG.info("Closing split (scanned {} rows)", scannedRows); 170 | } 171 | 172 | public NebulaInputFormat setExecutionOptions(ExecutionOptions executionOptions) { 173 | this.executionOptions = executionOptions; 174 | return this; 175 | } 176 | } 177 | -------------------------------------------------------------------------------- /connector/src/main/java/org.apache.flink/connector/nebula/utils/NebulaEdges.java: -------------------------------------------------------------------------------- 1 | /* Copyright (c) 2021 vesoft inc. All rights reserved. 2 | * 3 | * This source code is licensed under Apache 2.0 License. 4 | */ 5 | 6 | package org.apache.flink.connector.nebula.utils; 7 | 8 | import static org.apache.flink.connector.nebula.utils.NebulaConstant.BATCH_INSERT_TEMPLATE; 9 | import static org.apache.flink.connector.nebula.utils.NebulaConstant.DELETE_EDGE_TEMPLATE; 10 | import static org.apache.flink.connector.nebula.utils.NebulaConstant.EDGE_ENDPOINT_TEMPLATE; 11 | import static org.apache.flink.connector.nebula.utils.NebulaConstant.EDGE_VALUE_TEMPLATE; 12 | import static org.apache.flink.connector.nebula.utils.NebulaConstant.EDGE_VALUE_WITHOUT_RANKING_TEMPLATE; 13 | import static org.apache.flink.connector.nebula.utils.NebulaConstant.ENDPOINT_TEMPLATE; 14 | import static org.apache.flink.connector.nebula.utils.NebulaConstant.UPDATE_EDGE_TEMPLATE; 15 | import static org.apache.flink.connector.nebula.utils.NebulaConstant.UPDATE_VALUE_TEMPLATE; 16 | 17 | import java.io.Serializable; 18 | import java.util.ArrayList; 19 | import java.util.List; 20 | 21 | public class NebulaEdges implements Serializable { 22 | 23 | private String edgeType; 24 | private List propNames; 25 | private List edges; 26 | private PolicyEnum sourcePolicy = null; 27 | private PolicyEnum targetPolicy = null; 28 | 29 | public NebulaEdges(String edgeType, List propNames, List edges, 30 | PolicyEnum sourcePolicy, PolicyEnum targetPolicy) { 31 | this.edgeType = edgeType; 32 | this.propNames = propNames; 33 | this.edges = edges; 34 | this.sourcePolicy = sourcePolicy; 35 | this.targetPolicy = targetPolicy; 36 | } 37 | 38 | public String getEdgeType() { 39 | return edgeType; 40 | } 41 | 42 | public String getPropNames() { 43 | List escapePropNames = new ArrayList<>(); 44 | for (String propName : propNames) { 45 | escapePropNames.add(NebulaUtils.mkString(propName, "`", "", "`")); 46 | } 47 | return String.join(",", escapePropNames); 48 | } 49 | 50 | public List getEdges() { 51 | return edges; 52 | } 53 | 54 | public PolicyEnum getSourcePolicy() { 55 | return sourcePolicy; 56 | } 57 | 58 | public PolicyEnum getTargetPolicy() { 59 | return targetPolicy; 60 | } 61 | 62 | /** 63 | * construct Nebula batch insert ngql for edges 64 | * 65 | * @return ngql 66 | */ 67 | public String getInsertStatement() { 68 | List values = new ArrayList<>(); 69 | for (NebulaEdge edge : edges) { 70 | String sourceId = getSourceId(edge); 71 | String targetId = getTargetId(edge); 72 | 73 | // edge rank 74 | if (edge.getRank() == null) { 75 | values.add(String.format(EDGE_VALUE_WITHOUT_RANKING_TEMPLATE, sourceId, targetId, 76 | edge.getPropValuesString())); 77 | } else { 78 | values.add(String.format(EDGE_VALUE_TEMPLATE, sourceId, targetId, edge.getRank(), 79 | edge.getPropValuesString())); 80 | } 81 | } 82 | return String.format(BATCH_INSERT_TEMPLATE, DataTypeEnum.EDGE.name(), edgeType, 83 | getPropNames(), String.join(",", values)); 84 | } 85 | 86 | /** 87 | * construct Nebula batch update ngql for edge 88 | * 89 | * @return ngql 90 | */ 91 | public String getUpdateStatement() { 92 | List statements = new ArrayList<>(); 93 | // for update mode, each vertex construct one update statement. 94 | for (NebulaEdge edge : edges) { 95 | String sourceId = getSourceId(edge); 96 | String targetId = getTargetId(edge); 97 | long rank = 0; 98 | if (edge.getRank() != null) { 99 | rank = edge.getRank(); 100 | } 101 | 102 | List updateProps = new ArrayList<>(); 103 | for (int i = 0; i < propNames.size(); i++) { 104 | updateProps.add(String.format(UPDATE_VALUE_TEMPLATE, propNames.get(i), 105 | edge.getPropValues().get(i))); 106 | } 107 | String updatePropsString = String.join(",", updateProps); 108 | String statement = String.format(UPDATE_EDGE_TEMPLATE, DataTypeEnum.EDGE.name(), 109 | edgeType, sourceId, targetId, rank, updatePropsString); 110 | statements.add(statement); 111 | } 112 | return String.join(";", statements); 113 | } 114 | 115 | /** 116 | * construct Nebula batch delete ngql for edge 117 | * 118 | * @return ngql 119 | */ 120 | public String getDeleteStatement() { 121 | List sourceTargetIds = new ArrayList<>(); 122 | for (NebulaEdge edge : edges) { 123 | String sourceId = getSourceId(edge); 124 | String targetId = getTargetId(edge); 125 | long rank = 0; 126 | if (edge.getRank() != null) { 127 | rank = edge.getRank(); 128 | } 129 | String statement = String.format(EDGE_ENDPOINT_TEMPLATE, sourceId, targetId, rank); 130 | sourceTargetIds.add(statement); 131 | } 132 | return String.format(DELETE_EDGE_TEMPLATE, edgeType, String.join(",", sourceTargetIds)); 133 | } 134 | 135 | /** 136 | * format edge source id with policy 137 | * 138 | * @param edge Nebula edge {@link NebulaEdge} 139 | * @return the formatted source id 140 | */ 141 | private String getSourceId(NebulaEdge edge) { 142 | String sourceId = null; 143 | if (sourcePolicy == null) { 144 | sourceId = edge.getSource(); 145 | } else { 146 | switch (sourcePolicy) { 147 | case HASH: 148 | sourceId = String.format(ENDPOINT_TEMPLATE, PolicyEnum.HASH.name(), 149 | edge.getSource()); 150 | break; 151 | case UUID: 152 | sourceId = String.format(ENDPOINT_TEMPLATE, PolicyEnum.UUID.name(), 153 | edge.getSource()); 154 | break; 155 | default: 156 | throw new IllegalArgumentException("source policy is not supported"); 157 | } 158 | } 159 | return sourceId; 160 | } 161 | 162 | /** 163 | * format edge target id with policy 164 | * 165 | * @param edge Nebula edge {@link NebulaEdge} 166 | * @return the formatted target id 167 | */ 168 | private String getTargetId(NebulaEdge edge) { 169 | String targetId = null; 170 | if (targetPolicy == null) { 171 | targetId = edge.getTarget(); 172 | } else { 173 | switch (targetPolicy) { 174 | case HASH: 175 | targetId = String.format(ENDPOINT_TEMPLATE, PolicyEnum.HASH.name(), 176 | edge.getTarget()); 177 | break; 178 | case UUID: 179 | targetId = String.format(ENDPOINT_TEMPLATE, PolicyEnum.UUID.name(), 180 | edge.getTarget()); 181 | break; 182 | default: 183 | throw new IllegalArgumentException("target policy is not supported"); 184 | } 185 | } 186 | return targetId; 187 | } 188 | 189 | } 190 | -------------------------------------------------------------------------------- /connector/src/test/java/org/apache/flink/connector/nebula/sink/NebulaOutputFormatConverterTest.java: -------------------------------------------------------------------------------- 1 | /* Copyright (c) 2020 vesoft inc. All rights reserved. 2 | * 3 | * This source code is licensed under Apache 2.0 License. 4 | */ 5 | 6 | package org.apache.flink.connector.nebula.sink; 7 | 8 | import com.vesoft.nebula.PropertyType; 9 | import java.util.Arrays; 10 | import java.util.HashMap; 11 | import java.util.Map; 12 | import org.apache.flink.connector.nebula.statement.EdgeExecutionOptions; 13 | import org.apache.flink.connector.nebula.statement.VertexExecutionOptions; 14 | import org.apache.flink.connector.nebula.utils.NebulaEdge; 15 | import org.apache.flink.connector.nebula.utils.NebulaVertex; 16 | import org.apache.flink.connector.nebula.utils.PolicyEnum; 17 | import org.apache.flink.connector.nebula.utils.VidTypeEnum; 18 | import org.apache.flink.types.Row; 19 | import org.junit.Before; 20 | import org.junit.Test; 21 | 22 | public class NebulaOutputFormatConverterTest { 23 | Map schema = new HashMap<>(); 24 | Row row = new Row(9); 25 | 26 | @Before 27 | public void before() { 28 | schema.put("src", PropertyType.STRING.getValue()); 29 | schema.put("dst", PropertyType.STRING.getValue()); 30 | schema.put("degree", PropertyType.DOUBLE.getValue()); 31 | schema.put("date", PropertyType.DATE.getValue()); 32 | schema.put("datetime", PropertyType.DATETIME.getValue()); 33 | schema.put("time", PropertyType.TIME.getValue()); 34 | schema.put("name", PropertyType.STRING.getValue()); 35 | schema.put("age", PropertyType.INT16.getValue()); 36 | schema.put("aaa", PropertyType.DOUBLE.getValue()); 37 | schema.put("bbb", PropertyType.INT16.getValue()); 38 | 39 | row.setField(0, 2); 40 | row.setField(1, "Tom"); 41 | row.setField(2, "Jena"); 42 | row.setField(3, "2020-01-01"); 43 | row.setField(4, "2020-01-01 12:12:12:0000"); 44 | row.setField(5, "12:12:12:0000"); 45 | row.setField(6, "a"); 46 | row.setField(7, 11); 47 | row.setField(8, 12.0); 48 | } 49 | 50 | @Test 51 | public void testCreateVertexValue() { 52 | VertexExecutionOptions options = new VertexExecutionOptions.ExecutionOptionBuilder() 53 | .setGraphSpace("test") 54 | .setTag("tag") 55 | .setIdIndex(0) 56 | .setFields(Arrays.asList("name", "age")) 57 | .setPositions(Arrays.asList(1, 7)) 58 | .build(); 59 | 60 | NebulaRowVertexOutputFormatConverter converter = 61 | new NebulaRowVertexOutputFormatConverter(options, VidTypeEnum.STRING, schema); 62 | 63 | NebulaVertex vertex = converter.createVertex(row, null); 64 | assert (vertex.getVid().equals("\"2\"")); 65 | assert (vertex.getPropValues().size() == 2); 66 | assert (vertex.getPropValuesString().equals("\"Tom\",11")); 67 | } 68 | 69 | @Test 70 | public void testVertexDateValue() { 71 | VertexExecutionOptions options = new VertexExecutionOptions.ExecutionOptionBuilder() 72 | .setGraphSpace("test") 73 | .setTag("tag") 74 | .setIdIndex(0) 75 | .setFields(Arrays.asList("name", "date", "datetime", "time", "age")) 76 | .setPositions(Arrays.asList(1, 3, 4, 5, 7)) 77 | .build(); 78 | NebulaRowVertexOutputFormatConverter converter = 79 | new NebulaRowVertexOutputFormatConverter(options, VidTypeEnum.STRING, schema); 80 | 81 | NebulaVertex vertex = converter.createVertex(row, null); 82 | assert (vertex.getVid().equals("\"2\"")); 83 | assert (vertex.getPropValuesString().equals("\"Tom\",date(\"2020-01-01\"),datetime" 84 | + "(\"2020-01-01 12:12:12:0000\"),time(\"12:12:12:0000\"),11")); 85 | } 86 | 87 | @Test 88 | public void testIntVidVertex() { 89 | VertexExecutionOptions options = new VertexExecutionOptions.ExecutionOptionBuilder() 90 | .setGraphSpace("test") 91 | .setTag("tag") 92 | .setIdIndex(1) 93 | .setFields(Arrays.asList("name", "age")) 94 | .setPositions(Arrays.asList(1, 7)) 95 | .build(); 96 | NebulaRowVertexOutputFormatConverter converter = 97 | new NebulaRowVertexOutputFormatConverter(options, VidTypeEnum.INT, schema); 98 | 99 | NebulaVertex vertex = converter.createVertex(row, PolicyEnum.HASH); 100 | assert (vertex.getVid().equals("Tom")); 101 | assert (vertex.getPropValues().size() == 2); 102 | assert (vertex.getPropValuesString().equals("\"Tom\",11")); 103 | } 104 | 105 | 106 | @Test 107 | public void testCreateEdgeValue() { 108 | EdgeExecutionOptions options = new EdgeExecutionOptions.ExecutionOptionBuilder() 109 | .setGraphSpace("test") 110 | .setEdge("edge") 111 | .setSrcIndex(1) 112 | .setDstIndex(2) 113 | .setRankIndex(0) 114 | .setFields(Arrays.asList("src", "dst", "degree")) 115 | .setPositions(Arrays.asList(1, 2, 8)) 116 | .build(); 117 | 118 | NebulaRowEdgeOutputFormatConverter converter = 119 | new NebulaRowEdgeOutputFormatConverter(options, VidTypeEnum.STRING, schema); 120 | NebulaEdge edge = converter.createEdge(row, null); 121 | assert (edge.getSource().equals("\"Tom\"")); 122 | assert (edge.getTarget().equals("\"Jena\"")); 123 | assert (edge.getRank() == 2); 124 | assert (edge.getPropValues().size() == 3); 125 | assert (edge.getPropValuesString().equals("\"Tom\",\"Jena\",12.0")); 126 | } 127 | 128 | 129 | @Test 130 | public void testEdgeDateValue() { 131 | EdgeExecutionOptions options = new EdgeExecutionOptions.ExecutionOptionBuilder() 132 | .setGraphSpace("test") 133 | .setEdge("edge") 134 | .setSrcIndex(1) 135 | .setDstIndex(2) 136 | .setFields(Arrays.asList("degree", "date", "datetime", "time")) 137 | .setPositions(Arrays.asList(8, 3, 4, 5)) 138 | .build(); 139 | 140 | NebulaRowEdgeOutputFormatConverter converter = 141 | new NebulaRowEdgeOutputFormatConverter(options, VidTypeEnum.STRING, schema); 142 | 143 | NebulaEdge edge = converter.createEdge(row, null); 144 | assert (edge.getSource().equals("\"Tom\"")); 145 | assert (edge.getTarget().equals("\"Jena\"")); 146 | assert (edge.getRank() == null); 147 | assert (edge.getPropValues().size() == 4); 148 | assert (edge.getPropValuesString().equals("12.0,date(\"2020-01-01\"),datetime" 149 | + "(\"2020-01-01 12:12:12:0000\"),time(\"12:12:12:0000\")")); 150 | } 151 | 152 | @Test 153 | public void testIntVidEdge() { 154 | EdgeExecutionOptions options = new EdgeExecutionOptions.ExecutionOptionBuilder() 155 | .setGraphSpace("test") 156 | .setEdge("edge") 157 | .setSrcIndex(1) 158 | .setDstIndex(2) 159 | .setFields(Arrays.asList("degree")) 160 | .setPositions(Arrays.asList(8)) 161 | .build(); 162 | 163 | NebulaRowEdgeOutputFormatConverter converter = 164 | new NebulaRowEdgeOutputFormatConverter(options, VidTypeEnum.INT, schema); 165 | 166 | NebulaEdge edge = converter.createEdge(row, PolicyEnum.HASH); 167 | assert (edge.getSource().equals("Tom")); 168 | assert (edge.getTarget().equals("Jena")); 169 | assert (edge.getRank() == null); 170 | assert (edge.getPropValues().size() == 1); 171 | assert (edge.getPropValuesString().equals("12.0")); 172 | } 173 | } 174 | --------------------------------------------------------------------------------