├── img.png ├── .gitignore ├── style ├── license-header └── spotless-formatter.xml ├── src ├── main │ ├── resources │ │ └── META-INF │ │ │ └── services │ │ │ ├── org.apache.flink.table.factories.Factory │ │ │ └── org.apache.flink.streaming.connectors.redis.hanlder.RedisHandler │ └── java │ │ └── org │ │ └── apache │ │ └── flink │ │ └── streaming │ │ └── connectors │ │ └── redis │ │ ├── command │ │ ├── CommandBoundednessType.java │ │ ├── RedisJoinCommand.java │ │ ├── RedisSelectCommand.java │ │ ├── RedisDeleteCommand.java │ │ ├── RedisInsertCommand.java │ │ ├── RedisCommandBaseDescription.java │ │ ├── RedisCommandDescription.java │ │ └── RedisCommand.java │ │ ├── config │ │ ├── ZremType.java │ │ ├── RedisValueDataStructure.java │ │ ├── RedisValidator.java │ │ ├── FlinkConfigHandler.java │ │ ├── LettuceConfig.java │ │ ├── RedisJoinConfig.java │ │ ├── FlinkSingleConfigHandler.java │ │ ├── FlinkConfigBase.java │ │ ├── FlinkClusterConfigHandler.java │ │ ├── FlinkSentinelConfigHandler.java │ │ ├── FlinkClusterConfig.java │ │ ├── FlinkSingleConfig.java │ │ ├── FlinkSentinelConfig.java │ │ └── RedisOptions.java │ │ ├── mapper │ │ ├── RedisMapper.java │ │ ├── RowRedisQueryMapper.java │ │ ├── RedisSinkMapper.java │ │ └── RowRedisSinkMapper.java │ │ ├── hanlder │ │ ├── RedisHandler.java │ │ └── RedisHandlerServices.java │ │ ├── container │ │ ├── RedisCommandsContainerBuilder.java │ │ ├── RedisClientBuilder.java │ │ └── RedisCommandsContainer.java │ │ ├── table │ │ ├── RedisDynamicTableSink.java │ │ ├── RedisDynamicTableSource.java │ │ ├── RedisLimitedSinkFunction.java │ │ ├── RedisResultWrapper.java │ │ ├── RedisDynamicTableFactory.java │ │ └── RedisSourceFunction.java │ │ └── converter │ │ └── RedisRowConverter.java └── test │ ├── resources │ └── log4j2-test.properties │ └── java │ └── org │ └── apache │ └── flink │ └── streaming │ └── connectors │ └── redis │ ├── table │ ├── base │ │ └── TestRedisConfigBase.java │ ├── FlinkCDCExample.java │ ├── LimitedSinkTest.java │ ├── ClusterSQLExample.java │ ├── SQLExpireTest.java │ ├── SQLLettuceLimitTest.java │ └── SQLQueryTest.java │ └── datastream │ └── DataStreamTest.java ├── .github └── workflows │ ├── license-eyes.yml │ ├── build-redis-connector.yml │ └── checkstyle.yaml └── LICENSE /img.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jeff-zou/flink-connector-redis/HEAD/img.png -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .springBeans 2 | .gradle/ 3 | .idea/ 4 | .log/ 5 | target/ 6 | *.iml 7 | classes/* 8 | *.ipr 9 | *.iws 10 | publish 11 | .classpath 12 | .project 13 | .settings 14 | /application-test.properties 15 | /application.pid 16 | /bin/ 17 | out 18 | .sass-cache/ 19 | node_modules/ 20 | .DS_Store 21 | .tags 22 | .tags1 23 | .tags2 24 | logs/ 25 | keytmp/ 26 | *.js.map 27 | *.css.map 28 | run/ 29 | dist-deploy/ 30 | coverage/ 31 | run_backup/ -------------------------------------------------------------------------------- /style/license-header: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | -------------------------------------------------------------------------------- /src/main/resources/META-INF/services/org.apache.flink.table.factories.Factory: -------------------------------------------------------------------------------- 1 | # Licensed to the Apache Software Foundation (ASF) under one or more 2 | # contributor license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright ownership. 4 | # The ASF licenses this file to You under the Apache License, Version 2.0 5 | # (the "License"); you may not use this file except in compliance with 6 | # the License. You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | org.apache.flink.streaming.connectors.redis.table.RedisDynamicTableFactory -------------------------------------------------------------------------------- /src/main/java/org/apache/flink/streaming/connectors/redis/command/CommandBoundednessType.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.flink.streaming.connectors.redis.command; 20 | 21 | public enum CommandBoundednessType { 22 | TRUE, 23 | FALSE 24 | } 25 | -------------------------------------------------------------------------------- /src/main/java/org/apache/flink/streaming/connectors/redis/command/RedisJoinCommand.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.flink.streaming.connectors.redis.command; 20 | 21 | public enum RedisJoinCommand { 22 | GET, 23 | HGET, 24 | ZSCORE, 25 | NONE 26 | } 27 | -------------------------------------------------------------------------------- /src/main/java/org/apache/flink/streaming/connectors/redis/config/ZremType.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.flink.streaming.connectors.redis.config; 20 | 21 | /** rem type for sorted set**/ 22 | public enum ZremType { 23 | 24 | SCORE, 25 | RANK, 26 | LEX, 27 | } 28 | -------------------------------------------------------------------------------- /src/main/resources/META-INF/services/org.apache.flink.streaming.connectors.redis.hanlder.RedisHandler: -------------------------------------------------------------------------------- 1 | # Licensed to the Apache Software Foundation (ASF) under one or more 2 | # contributor license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright ownership. 4 | # The ASF licenses this file to You under the Apache License, Version 2.0 5 | # (the "License"); you may not use this file except in compliance with 6 | # the License. You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | org.apache.flink.streaming.connectors.redis.config.FlinkClusterConfigHandler 17 | org.apache.flink.streaming.connectors.redis.config.FlinkSentinelConfigHandler 18 | org.apache.flink.streaming.connectors.redis.config.FlinkSingleConfigHandler -------------------------------------------------------------------------------- /src/main/java/org/apache/flink/streaming/connectors/redis/command/RedisSelectCommand.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.flink.streaming.connectors.redis.command; 20 | 21 | public enum RedisSelectCommand { 22 | GET, 23 | HGET, 24 | LRANGE, 25 | SRANDMEMBER, 26 | ZSCORE, 27 | SUBSCRIBE, 28 | NONE 29 | } 30 | -------------------------------------------------------------------------------- /src/main/java/org/apache/flink/streaming/connectors/redis/command/RedisDeleteCommand.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.flink.streaming.connectors.redis.command; 20 | 21 | public enum RedisDeleteCommand { 22 | SREM, 23 | DEL, 24 | ZREM, 25 | ZINCRBY, 26 | HDEL, 27 | HINCRBY, 28 | HINCRBYFLOAT, 29 | INCRBY, 30 | INCRBYFLOAT, 31 | NONE 32 | } 33 | -------------------------------------------------------------------------------- /.github/workflows/license-eyes.yml: -------------------------------------------------------------------------------- 1 | # 2 | #Licensed to the Apache Software Foundation (ASF) under one 3 | # or more contributor license agreements. See the NOTICE file 4 | # distributed with this work for additional information 5 | # regarding copyright ownership. The ASF licenses this file 6 | # to you under the Apache License, Version 2.0 (the 7 | # "License"); you may not use this file except in compliance 8 | # with the License. You may obtain a copy of the License at 9 | # 10 | # http://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, 13 | # software distributed under the License is distributed on an 14 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 15 | # KIND, either express or implied. See the License for the 16 | # specific language governing permissions and limitations 17 | # under the License. 18 | # 19 | --- 20 | name: License Check 21 | on: 22 | pull_request: 23 | push: 24 | branches: 25 | - dev 26 | jobs: 27 | license-check: 28 | name: "License Check" 29 | runs-on: ubuntu-24.04 30 | steps: 31 | - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" 32 | uses: actions/checkout@v2 33 | - name: Check License 34 | uses: apache/skywalking-eyes@v0.2.0 35 | env: 36 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} -------------------------------------------------------------------------------- /src/main/java/org/apache/flink/streaming/connectors/redis/command/RedisInsertCommand.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.flink.streaming.connectors.redis.command; 20 | 21 | public enum RedisInsertCommand { 22 | RPUSH, 23 | LPUSH, 24 | SADD, 25 | PFADD, 26 | SET, 27 | PUBLISH, 28 | ZADD, 29 | SREM, 30 | ZREM, 31 | ZINCRBY, 32 | HSET, 33 | HMSET, 34 | HINCRBY, 35 | HINCRBYFLOAT, 36 | INCRBY, 37 | INCRBYFLOAT, 38 | DECRBY, 39 | DEL, 40 | HDEL, 41 | NONE 42 | } 43 | -------------------------------------------------------------------------------- /src/main/java/org/apache/flink/streaming/connectors/redis/config/RedisValueDataStructure.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.flink.streaming.connectors.redis.config; 20 | 21 | /** redis value data structure. @Author: Jeff Zou @Date: 2022/9/28 15:53 */ 22 | public enum RedisValueDataStructure { 23 | // The value will come from a field (for example, set: key is the first field defined by DDL, 24 | // and value is the second field). 25 | column, 26 | // value is taken from the entire row, separated by '\01'. 27 | row 28 | } 29 | -------------------------------------------------------------------------------- /src/main/java/org/apache/flink/streaming/connectors/redis/config/RedisValidator.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.flink.streaming.connectors.redis.config; 20 | 21 | /** redis validator for validate redis descriptor. */ 22 | public class RedisValidator { 23 | 24 | public static final String REDIS_MODE = "redis-mode"; 25 | public static final String REDIS_CLUSTER = "cluster"; 26 | public static final String REDIS_SENTINEL = "sentinel"; 27 | public static final String REDIS_SINGLE = "single"; 28 | 29 | public static final String REDIS_COMMAND = "command"; 30 | } 31 | -------------------------------------------------------------------------------- /src/test/resources/log4j2-test.properties: -------------------------------------------------------------------------------- 1 | ################################################################################ 2 | # Licensed to the Apache Software Foundation (ASF) under one 3 | # or more contributor license agreements. See the NOTICE file 4 | # distributed with this work for additional information 5 | # regarding copyright ownership. The ASF licenses this file 6 | # to you under the Apache License, Version 2.0 (the 7 | # "License"); you may not use this file except in compliance 8 | # with the License. You may obtain a copy of the License at 9 | # 10 | # http://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | # See the License for the specific language governing permissions and 16 | # limitations under the License. 17 | ################################################################################ 18 | # Set root logger level to OFF to not flood build logs 19 | # set manually to INFO for debugging purposes 20 | rootLogger.level=info 21 | rootLogger.appenderRef.test.ref=TestLogger 22 | appender.testlogger.name=TestLogger 23 | appender.testlogger.type=CONSOLE 24 | appender.testlogger.target=SYSTEM_ERR 25 | appender.testlogger.layout.type=PatternLayout 26 | appender.testlogger.layout.pattern=%-4r [%t] %-5p %c %x - %m%n 27 | -------------------------------------------------------------------------------- /src/main/java/org/apache/flink/streaming/connectors/redis/mapper/RedisMapper.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.flink.streaming.connectors.redis.mapper; 20 | 21 | import org.apache.flink.api.common.functions.Function; 22 | import org.apache.flink.streaming.connectors.redis.command.RedisCommandBaseDescription; 23 | 24 | import java.io.Serializable; 25 | 26 | /** 27 | * @param 28 | */ 29 | public interface RedisMapper extends Function, Serializable { 30 | 31 | /** 32 | * Returns descriptor which defines data type. 33 | * 34 | * @return data type descriptor 35 | */ 36 | RedisCommandBaseDescription getCommandDescription(); 37 | } 38 | -------------------------------------------------------------------------------- /.github/workflows/build-redis-connector.yml: -------------------------------------------------------------------------------- 1 | # Licensed to the Apache Software Foundation (ASF) under one 2 | # or more contributor license agreements. See the NOTICE file 3 | # distributed with this work for additional information 4 | # regarding copyright ownership. The ASF licenses this file 5 | # to you under the Apache License, Version 2.0 (the 6 | # "License"); you may not use this file except in compliance 7 | # with the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | # 18 | --- 19 | name: Build Redis Connector 20 | on: 21 | pull_request: 22 | push: 23 | 24 | jobs: 25 | build-extension: 26 | name: "Build Connector" 27 | runs-on: ubuntu-latest 28 | defaults: 29 | run: 30 | shell: bash 31 | steps: 32 | - name: Checkout 33 | uses: actions/checkout@master 34 | 35 | - name: Setup java 36 | uses: actions/setup-java@v2 37 | with: 38 | distribution: adopt 39 | java-version: '8' 40 | 41 | - name: Build flink connector 1.15 42 | run: | 43 | mvn clean package -DskipTests \ 44 | -Dflink.version=1.15.0 -------------------------------------------------------------------------------- /src/main/java/org/apache/flink/streaming/connectors/redis/config/FlinkConfigHandler.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.flink.streaming.connectors.redis.config; 20 | 21 | import org.apache.flink.configuration.ReadableConfig; 22 | import org.apache.flink.streaming.connectors.redis.hanlder.RedisHandler; 23 | 24 | /** handler to create flink config. */ 25 | public interface FlinkConfigHandler extends RedisHandler { 26 | 27 | /** 28 | * create flink config use sepecified properties. 29 | * 30 | * @param config used to create flink config 31 | * @return flink config 32 | */ 33 | FlinkConfigBase createFlinkConfig(ReadableConfig config); 34 | } 35 | -------------------------------------------------------------------------------- /.github/workflows/checkstyle.yaml: -------------------------------------------------------------------------------- 1 | # Licensed to the Apache Software Foundation (ASF) under one 2 | # or more contributor license agreements. See the NOTICE file 3 | # distributed with this work for additional information 4 | # regarding copyright ownership. The ASF licenses this file 5 | # to you under the Apache License, Version 2.0 (the 6 | # "License"); you may not use this file except in compliance 7 | # with the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | # 18 | --- 19 | name: Code Style Checker 20 | 21 | on: 22 | pull_request: 23 | push: 24 | branches: 25 | - dev 26 | 27 | jobs: 28 | java-checkstyle: 29 | name: "CheckStyle" 30 | runs-on: ubuntu-24.04 31 | steps: 32 | - name: Checkout 33 | uses: actions/checkout@v3 34 | with: 35 | persist-credentials: false 36 | submodules: recursive 37 | 38 | - name: Setup java 39 | uses: actions/setup-java@v2 40 | with: 41 | distribution: adopt 42 | java-version: '8' 43 | 44 | - name: Run java checkstyle 45 | run: 46 | mvn clean compile checkstyle:checkstyle -------------------------------------------------------------------------------- /src/main/java/org/apache/flink/streaming/connectors/redis/command/RedisCommandBaseDescription.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.flink.streaming.connectors.redis.command; 20 | 21 | import org.apache.flink.util.Preconditions; 22 | 23 | import java.io.Serializable; 24 | 25 | /** base description. @Author: jeff.zou @Date: 2022/3/9.14:55 */ 26 | public class RedisCommandBaseDescription implements Serializable { 27 | 28 | private static final long serialVersionUID = 1L; 29 | 30 | private RedisCommand redisCommand; 31 | 32 | public RedisCommandBaseDescription(RedisCommand redisCommand) { 33 | Preconditions.checkNotNull(redisCommand, "redis command type cant be null!!!"); 34 | this.redisCommand = redisCommand; 35 | } 36 | 37 | public RedisCommand getRedisCommand() { 38 | return redisCommand; 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /src/main/java/org/apache/flink/streaming/connectors/redis/mapper/RowRedisQueryMapper.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.flink.streaming.connectors.redis.mapper; 20 | 21 | import org.apache.flink.streaming.connectors.redis.command.RedisCommand; 22 | import org.apache.flink.streaming.connectors.redis.command.RedisCommandBaseDescription; 23 | 24 | /** row redis mapper. @Author: jeff.zou @Date: 2022/3/7.14:59 */ 25 | public class RowRedisQueryMapper implements RedisMapper { 26 | 27 | RedisCommand redisCommand; 28 | 29 | public RowRedisQueryMapper(RedisCommand redisCommand) { 30 | this.redisCommand = redisCommand; 31 | } 32 | 33 | public RedisCommand getRedisCommand() { 34 | return redisCommand; 35 | } 36 | 37 | @Override 38 | public RedisCommandBaseDescription getCommandDescription() { 39 | return new RedisCommandBaseDescription(redisCommand); 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /src/main/java/org/apache/flink/streaming/connectors/redis/hanlder/RedisHandler.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.flink.streaming.connectors.redis.hanlder; 20 | 21 | import java.io.Serializable; 22 | import java.util.Collections; 23 | import java.util.List; 24 | import java.util.Map; 25 | 26 | /** * redis handler to create redis mapper and flink config. */ 27 | public interface RedisHandler extends Serializable { 28 | 29 | /** 30 | * require context for spi to find this redis handler. 31 | * 32 | * @return properties to find correct redis handler. 33 | */ 34 | Map requiredContext(); 35 | 36 | /** 37 | * suppport properties used for this redis handler. 38 | * 39 | * @return support properties list 40 | * @throws Exception 41 | */ 42 | default List supportProperties() throws Exception { 43 | return Collections.emptyList(); 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /src/main/java/org/apache/flink/streaming/connectors/redis/container/RedisCommandsContainerBuilder.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.flink.streaming.connectors.redis.container; 20 | 21 | import org.apache.flink.streaming.connectors.redis.config.FlinkConfigBase; 22 | 23 | import io.lettuce.core.AbstractRedisClient; 24 | import io.lettuce.core.RedisClient; 25 | import io.lettuce.core.cluster.RedisClusterClient; 26 | 27 | /** The builder for {@link RedisCommandsContainer}. */ 28 | public class RedisCommandsContainerBuilder { 29 | 30 | /** 31 | * @param flinkConfigBase 32 | * @return 33 | */ 34 | public static RedisCommandsContainer build(FlinkConfigBase flinkConfigBase) { 35 | AbstractRedisClient redisClient = RedisClientBuilder.build(flinkConfigBase); 36 | if (redisClient instanceof RedisClusterClient) { 37 | return new RedisClusterContainer((RedisClusterClient) redisClient); 38 | } else { 39 | return new RedisContainer((RedisClient) redisClient); 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /src/main/java/org/apache/flink/streaming/connectors/redis/mapper/RedisSinkMapper.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.flink.streaming.connectors.redis.mapper; 20 | 21 | import org.apache.flink.table.data.RowData; 22 | import org.apache.flink.table.types.logical.LogicalType; 23 | 24 | /** 25 | * @param 26 | */ 27 | public interface RedisSinkMapper extends RedisMapper { 28 | 29 | /** 30 | * Extracts key from data. 31 | * 32 | * @param rowData source data 33 | * @return key 34 | */ 35 | String getKeyFromData(RowData rowData, LogicalType logicalType, Integer keyIndex); 36 | 37 | /** 38 | * Extracts value from data. 39 | * 40 | * @param rowData source data 41 | * @return value 42 | */ 43 | String getValueFromData(RowData rowData, LogicalType logicalType, Integer valueIndex); 44 | 45 | /** 46 | * @param rowData 47 | * @param fieldIndex 48 | * @return 49 | */ 50 | String getFieldFromData(RowData rowData, LogicalType logicalType, Integer fieldIndex); 51 | } 52 | -------------------------------------------------------------------------------- /src/main/java/org/apache/flink/streaming/connectors/redis/config/LettuceConfig.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.flink.streaming.connectors.redis.config; 20 | 21 | import java.io.Serializable; 22 | 23 | public class LettuceConfig implements Serializable { 24 | 25 | private static final long serialVersionUID = 1L; 26 | private final Integer nettyIoPoolSize; 27 | 28 | private final Integer nettyEventPoolSize; 29 | 30 | public LettuceConfig(Integer nettyIoPoolSize, Integer nettyEventPoolSize) { 31 | this.nettyIoPoolSize = nettyIoPoolSize; 32 | this.nettyEventPoolSize = nettyEventPoolSize; 33 | } 34 | 35 | public Integer getNettyIoPoolSize() { 36 | return nettyIoPoolSize; 37 | } 38 | 39 | public Integer getNettyEventPoolSize() { 40 | return nettyEventPoolSize; 41 | } 42 | 43 | @Override 44 | public String toString() { 45 | return "LettuceConfig{" 46 | + "nettyIoPoolSize=" 47 | + nettyIoPoolSize 48 | + ", nettyEventPoolSize=" 49 | + nettyEventPoolSize 50 | + '}'; 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /src/main/java/org/apache/flink/streaming/connectors/redis/command/RedisCommandDescription.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.flink.streaming.connectors.redis.command; 20 | 21 | import java.io.Serializable; 22 | import java.time.LocalTime; 23 | 24 | /** */ 25 | public class RedisCommandDescription extends RedisCommandBaseDescription implements Serializable { 26 | 27 | private static final long serialVersionUID = 1L; 28 | 29 | private final Integer ttl; 30 | 31 | private final Boolean setIfAbsent; 32 | 33 | private final LocalTime expireTime; 34 | 35 | private final Boolean ttlKeyNotAbsent; 36 | 37 | private final boolean auditLog; 38 | 39 | public RedisCommandDescription( 40 | RedisCommand redisCommand, 41 | Integer ttl, 42 | LocalTime expireTime, 43 | Boolean setIfAbsent, 44 | Boolean ttlKeyNotAbsent, 45 | Boolean auditLog) { 46 | super(redisCommand); 47 | this.expireTime = expireTime; 48 | this.ttl = ttl; 49 | this.setIfAbsent = setIfAbsent; 50 | this.ttlKeyNotAbsent = ttlKeyNotAbsent; 51 | this.auditLog = auditLog; 52 | } 53 | 54 | public Integer getTTL() { 55 | return ttl; 56 | } 57 | 58 | public LocalTime getExpireTime() { 59 | return expireTime; 60 | } 61 | 62 | public Boolean getSetIfAbsent() { 63 | return setIfAbsent; 64 | } 65 | 66 | public boolean getTtlKeyNotAbsent() { 67 | return ttlKeyNotAbsent; 68 | } 69 | 70 | public boolean isAuditLog() { 71 | return auditLog; 72 | } 73 | } 74 | -------------------------------------------------------------------------------- /src/main/java/org/apache/flink/streaming/connectors/redis/config/RedisJoinConfig.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.flink.streaming.connectors.redis.config; 20 | 21 | /** query options. @Author:jeff.zou @Date: 2022/3/9.14:37 */ 22 | public class RedisJoinConfig { 23 | 24 | private final long cacheMaxSize; 25 | private final long cacheTtl; 26 | private final boolean loadAll; 27 | 28 | public RedisJoinConfig(long cacheMaxSize, long cacheTtl, boolean loadAll) { 29 | this.cacheMaxSize = cacheMaxSize; 30 | this.cacheTtl = cacheTtl; 31 | this.loadAll = loadAll; 32 | } 33 | 34 | public long getCacheMaxSize() { 35 | return cacheMaxSize; 36 | } 37 | 38 | public long getCacheTtl() { 39 | return cacheTtl; 40 | } 41 | 42 | public boolean getLoadAll() { 43 | return loadAll; 44 | } 45 | 46 | /** */ 47 | public static class Builder { 48 | 49 | private long cacheMaxSize = -1L; 50 | private long cacheTtl = -1L; 51 | private boolean loadAll = false; 52 | 53 | public Builder setCacheMaxSize(long cacheMaxSize) { 54 | this.cacheMaxSize = cacheMaxSize; 55 | return this; 56 | } 57 | 58 | public Builder setCacheTTL(long cacheTtl) { 59 | this.cacheTtl = cacheTtl; 60 | return this; 61 | } 62 | 63 | public Builder setLoadAll(boolean loadAll) { 64 | this.loadAll = loadAll; 65 | return this; 66 | } 67 | 68 | public RedisJoinConfig build() { 69 | return new RedisJoinConfig(cacheMaxSize, cacheTtl, loadAll); 70 | } 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /src/main/java/org/apache/flink/streaming/connectors/redis/config/FlinkSingleConfigHandler.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.flink.streaming.connectors.redis.config; 20 | 21 | import org.apache.flink.configuration.ReadableConfig; 22 | import org.apache.flink.util.Preconditions; 23 | 24 | import java.util.HashMap; 25 | import java.util.Map; 26 | 27 | /** */ 28 | public class FlinkSingleConfigHandler implements FlinkConfigHandler { 29 | 30 | @Override 31 | public FlinkConfigBase createFlinkConfig(ReadableConfig config) { 32 | String host = config.get(RedisOptions.HOST); 33 | Preconditions.checkNotNull(host, "host should not be null in single mode"); 34 | 35 | LettuceConfig lettuceConfig = 36 | new LettuceConfig( 37 | config.get(RedisOptions.NETTY_IO_POOL_SIZE), 38 | config.get(RedisOptions.NETTY_EVENT_POOL_SIZE)); 39 | 40 | FlinkSingleConfig.Builder builder = 41 | new FlinkSingleConfig.Builder() 42 | .setHost(host) 43 | .setPassword(config.get(RedisOptions.PASSWORD)) 44 | .setLettuceConfig(lettuceConfig); 45 | builder.setPort(config.get(RedisOptions.PORT)); 46 | builder.setTimeout(config.get(RedisOptions.TIMEOUT)) 47 | .setDatabase(config.get(RedisOptions.DATABASE)); 48 | 49 | return builder.build(); 50 | } 51 | 52 | @Override 53 | public Map requiredContext() { 54 | Map require = new HashMap<>(); 55 | require.put(RedisValidator.REDIS_MODE, RedisValidator.REDIS_SINGLE); 56 | return require; 57 | } 58 | 59 | public FlinkSingleConfigHandler() { 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /src/main/java/org/apache/flink/streaming/connectors/redis/config/FlinkConfigBase.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.flink.streaming.connectors.redis.config; 20 | 21 | import org.apache.flink.util.Preconditions; 22 | 23 | import java.io.Serializable; 24 | 25 | /** Base class for Flink Redis configuration. */ 26 | public abstract class FlinkConfigBase implements Serializable { 27 | 28 | private static final long serialVersionUID = 1L; 29 | 30 | protected final int connectionTimeout; 31 | 32 | protected final String password; 33 | 34 | protected final LettuceConfig lettuceConfig; 35 | 36 | protected FlinkConfigBase(int connectionTimeout, String password, LettuceConfig lettuceConfig) { 37 | Preconditions.checkArgument( 38 | connectionTimeout >= 0, "connection timeout can not be negative"); 39 | this.password = password; 40 | this.connectionTimeout = connectionTimeout; 41 | this.lettuceConfig = lettuceConfig; 42 | } 43 | 44 | public String getPassword() { 45 | return password; 46 | } 47 | 48 | /** 49 | * Returns timeout. 50 | * 51 | * @return connection timeout 52 | */ 53 | public int getConnectionTimeout() { 54 | return connectionTimeout; 55 | } 56 | 57 | public LettuceConfig getLettuceConfig() { 58 | return lettuceConfig; 59 | } 60 | 61 | @Override 62 | public String toString() { 63 | return "FlinkConfigBase{" 64 | + "connectionTimeout=" 65 | + connectionTimeout 66 | + ", password='" 67 | + password 68 | + '\'' 69 | + ", lettuceConfig=" 70 | + lettuceConfig 71 | + '}'; 72 | } 73 | } 74 | -------------------------------------------------------------------------------- /src/main/java/org/apache/flink/streaming/connectors/redis/config/FlinkClusterConfigHandler.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.flink.streaming.connectors.redis.config; 20 | 21 | import org.apache.flink.configuration.ReadableConfig; 22 | import org.apache.flink.util.Preconditions; 23 | 24 | import java.util.HashMap; 25 | import java.util.Map; 26 | 27 | import static org.apache.flink.streaming.connectors.redis.config.RedisValidator.REDIS_CLUSTER; 28 | import static org.apache.flink.streaming.connectors.redis.config.RedisValidator.REDIS_MODE; 29 | 30 | /** cluster config handler to find and create cluster config use meta. */ 31 | public class FlinkClusterConfigHandler implements FlinkConfigHandler { 32 | 33 | @Override 34 | public FlinkConfigBase createFlinkConfig(ReadableConfig config) { 35 | Preconditions.checkState( 36 | config.get(RedisOptions.DATABASE) == 0, "redis cluster just support db 0"); 37 | String nodesInfo = config.get(RedisOptions.CLUSTERNODES); 38 | Preconditions.checkNotNull(nodesInfo, "nodes should not be null in cluster mode"); 39 | 40 | LettuceConfig lettuceConfig = 41 | new LettuceConfig( 42 | config.get(RedisOptions.NETTY_IO_POOL_SIZE), 43 | config.get(RedisOptions.NETTY_EVENT_POOL_SIZE)); 44 | 45 | FlinkClusterConfig.Builder builder = 46 | new FlinkClusterConfig.Builder() 47 | .setNodesInfo(nodesInfo) 48 | .setPassword(config.get(RedisOptions.PASSWORD)) 49 | .setLettuceConfig(lettuceConfig); 50 | 51 | builder.setTimeout(config.get(RedisOptions.TIMEOUT)); 52 | 53 | return builder.build(); 54 | } 55 | 56 | @Override 57 | public Map requiredContext() { 58 | Map require = new HashMap<>(); 59 | require.put(REDIS_MODE, REDIS_CLUSTER); 60 | return require; 61 | } 62 | 63 | public FlinkClusterConfigHandler() { 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /src/test/java/org/apache/flink/streaming/connectors/redis/table/base/TestRedisConfigBase.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.flink.streaming.connectors.redis.table.base; 20 | 21 | import org.junit.jupiter.api.AfterAll; 22 | import org.junit.jupiter.api.BeforeAll; 23 | import org.slf4j.Logger; 24 | import org.slf4j.LoggerFactory; 25 | 26 | import io.lettuce.core.RedisClient; 27 | import io.lettuce.core.RedisURI; 28 | import io.lettuce.core.api.StatefulRedisConnection; 29 | import io.lettuce.core.api.sync.RedisCommands; 30 | 31 | /** 32 | * @Author: Jeff Zou @Date: 2022/10/14 10:07 33 | */ 34 | public class TestRedisConfigBase { 35 | 36 | private static final Logger LOG = LoggerFactory.getLogger(TestRedisConfigBase.class); 37 | 38 | public static final String REDIS_HOST = "10.11.69.176"; 39 | public static final int REDIS_PORT = 6379; 40 | public static final String REDIS_PASSWORD = "test123"; 41 | protected static StatefulRedisConnection singleConnect; 42 | protected static RedisCommands singleRedisCommands; 43 | 44 | private static RedisClient redisClient; 45 | 46 | @BeforeAll 47 | public static void connectRedis() { 48 | RedisURI redisURI = 49 | RedisURI.builder() 50 | .withHost(REDIS_HOST) 51 | .withPort(REDIS_PORT) 52 | .withPassword(REDIS_PASSWORD.toCharArray()) 53 | .build(); 54 | redisClient = RedisClient.create(redisURI); 55 | singleConnect = redisClient.connect(); 56 | singleRedisCommands = singleConnect.sync(); 57 | LOG.info("connect to the redis: {}", REDIS_HOST); 58 | } 59 | 60 | @AfterAll 61 | public static void stopSingle() { 62 | singleConnect.close(); 63 | redisClient.shutdown(); 64 | } 65 | 66 | protected String singleWith() { 67 | return "'connector'='redis', " 68 | + "'host'='" 69 | + REDIS_HOST 70 | + "','port'='" 71 | + REDIS_PORT 72 | + "', 'redis-mode'='single','password'='" 73 | + REDIS_PASSWORD 74 | + "',"; 75 | } 76 | } 77 | -------------------------------------------------------------------------------- /src/main/java/org/apache/flink/streaming/connectors/redis/config/FlinkSentinelConfigHandler.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.flink.streaming.connectors.redis.config; 20 | 21 | import org.apache.flink.configuration.ReadableConfig; 22 | import org.apache.flink.util.Preconditions; 23 | import org.apache.flink.util.StringUtils; 24 | 25 | import java.util.HashMap; 26 | import java.util.Map; 27 | 28 | import static org.apache.flink.streaming.connectors.redis.config.RedisValidator.REDIS_MODE; 29 | import static org.apache.flink.streaming.connectors.redis.config.RedisValidator.REDIS_SENTINEL; 30 | 31 | /** */ 32 | public class FlinkSentinelConfigHandler implements FlinkConfigHandler { 33 | 34 | @Override 35 | public FlinkConfigBase createFlinkConfig(ReadableConfig config) { 36 | String masterName = config.get(RedisOptions.REDIS_MASTER_NAME); 37 | String sentinelsInfo = config.get(RedisOptions.SENTINELS_INFO); 38 | String sentinelsPassword = 39 | StringUtils.isNullOrWhitespaceOnly(config.get(RedisOptions.SENTINELS_PASSWORD)) 40 | ? null 41 | : config.get(RedisOptions.SENTINELS_PASSWORD); 42 | Preconditions.checkNotNull(masterName, "master should not be null in sentinel mode"); 43 | Preconditions.checkNotNull(sentinelsInfo, "sentinels should not be null in sentinel mode"); 44 | 45 | LettuceConfig lettuceConfig = 46 | new LettuceConfig( 47 | config.get(RedisOptions.NETTY_IO_POOL_SIZE), 48 | config.get(RedisOptions.NETTY_EVENT_POOL_SIZE)); 49 | 50 | FlinkSentinelConfig flinkSentinelConfig = 51 | new FlinkSentinelConfig.Builder() 52 | .setSentinelsInfo(sentinelsInfo) 53 | .setMasterName(masterName) 54 | .setConnectionTimeout(config.get(RedisOptions.TIMEOUT)) 55 | .setDatabase(config.get(RedisOptions.DATABASE)) 56 | .setPassword(config.get(RedisOptions.PASSWORD)) 57 | .setSentinelsPassword(sentinelsPassword) 58 | .setLettuceConfig(lettuceConfig) 59 | .build(); 60 | return flinkSentinelConfig; 61 | } 62 | 63 | @Override 64 | public Map requiredContext() { 65 | Map require = new HashMap<>(); 66 | require.put(REDIS_MODE, REDIS_SENTINEL); 67 | return require; 68 | } 69 | 70 | public FlinkSentinelConfigHandler() { 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /src/main/java/org/apache/flink/streaming/connectors/redis/config/FlinkClusterConfig.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.flink.streaming.connectors.redis.config; 20 | 21 | import java.util.Objects; 22 | 23 | /** Configuration for cluster. */ 24 | public class FlinkClusterConfig extends FlinkConfigBase { 25 | 26 | private static final long serialVersionUID = 1L; 27 | 28 | private final String nodesInfo; 29 | 30 | public String getNodesInfo() { 31 | return nodesInfo; 32 | } 33 | 34 | /** 35 | * cluster configuration. The list of node is mandatory, and when nodes is not set, it throws 36 | * NullPointerException. 37 | * 38 | * @param nodesInfo list of node information for Cluster 39 | * @param connectionTimeout socket / connection timeout. The default is 2000 40 | * @param password limit of redirections-how much we'll follow MOVED or ASK 41 | * @throws NullPointerException if parameter {@code nodes} is {@code null} 42 | */ 43 | private FlinkClusterConfig( 44 | String nodesInfo, int connectionTimeout, String password, LettuceConfig lettuceConfig) { 45 | super(connectionTimeout, password, lettuceConfig); 46 | 47 | Objects.requireNonNull(nodesInfo, "nodesInfo information should be presented"); 48 | this.nodesInfo = nodesInfo; 49 | } 50 | 51 | /** Builder for initializing {@link FlinkClusterConfig}. */ 52 | public static class Builder { 53 | 54 | private String nodesInfo; 55 | private int timeout; 56 | private String password; 57 | 58 | private LettuceConfig lettuceConfig; 59 | 60 | public Builder setNodesInfo(String nodesInfo) { 61 | this.nodesInfo = nodesInfo; 62 | return this; 63 | } 64 | 65 | /** 66 | * Sets socket / connection timeout. 67 | * 68 | * @param timeout socket / connection timeout, default value is 2000 69 | * @return Builder itself 70 | */ 71 | public Builder setTimeout(int timeout) { 72 | this.timeout = timeout; 73 | return this; 74 | } 75 | 76 | public Builder setPassword(String password) { 77 | this.password = password; 78 | return this; 79 | } 80 | 81 | public Builder setLettuceConfig(LettuceConfig lettuceConfig) { 82 | this.lettuceConfig = lettuceConfig; 83 | return this; 84 | } 85 | 86 | /** 87 | * Builds ClusterConfig. 88 | * 89 | * @return ClusterConfig 90 | */ 91 | public FlinkClusterConfig build() { 92 | return new FlinkClusterConfig(nodesInfo, timeout, password, lettuceConfig); 93 | } 94 | } 95 | } 96 | -------------------------------------------------------------------------------- /src/test/java/org/apache/flink/streaming/connectors/redis/table/FlinkCDCExample.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.flink.streaming.connectors.redis.table; 20 | 21 | import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; 22 | import org.apache.flink.streaming.connectors.redis.command.RedisCommand; 23 | import org.apache.flink.streaming.connectors.redis.table.base.TestRedisConfigBase; 24 | import org.apache.flink.table.api.TableResult; 25 | import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; 26 | 27 | import static org.apache.flink.streaming.connectors.redis.config.RedisValidator.REDIS_COMMAND; 28 | 29 | public class FlinkCDCExample extends TestRedisConfigBase { 30 | 31 | public static void main(String[] args) throws Exception { 32 | cdcExample(); 33 | } 34 | 35 | public static void cdcExample() throws Exception { 36 | String ddl = 37 | "CREATE TABLE orders (\n" 38 | + " order_id INT,\n" 39 | + " customer_name STRING,\n" 40 | + " price DECIMAL(10, 5),\n" 41 | + " product_id INT,\n" 42 | + " PRIMARY KEY(order_id) NOT ENFORCED\n" 43 | + " ) WITH (\n" 44 | + " 'connector' = 'mysql-cdc',\n" 45 | + " 'hostname' = '10.11.69.176',\n" 46 | + " 'port' = '3306',\n" 47 | + " 'username' = 'test',\n" 48 | + " 'password' = '123456',\n" 49 | + " 'database-name' = 'cdc',\n" 50 | + " 'table-name' = 'orders');"; 51 | StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); 52 | StreamTableEnvironment tEnv = StreamTableEnvironment.create(env); 53 | env.setParallelism(1); 54 | tEnv.executeSql(ddl); 55 | 56 | String sink = 57 | "create table sink_redis(name varchar, level varchar, age varchar) with ( " 58 | + "'connector'='redis', " 59 | + "'host'='" 60 | + REDIS_HOST 61 | + "','port'='" 62 | + REDIS_PORT 63 | + "', 'redis-mode'='single','password'='" 64 | + REDIS_PASSWORD 65 | + "'," 66 | + " '" 67 | + REDIS_COMMAND 68 | + "'='" 69 | + RedisCommand.HSET 70 | + "' )"; 71 | tEnv.executeSql(sink); 72 | TableResult tableResult = 73 | tEnv.executeSql( 74 | "insert into sink_redis select cast(order_id as string), customer_name, cast(product_id as string) from orders /*+ OPTIONS('server-id'='5401-5404') */"); 75 | tableResult.getJobClient().get().getJobExecutionResult().get(); 76 | } 77 | } 78 | -------------------------------------------------------------------------------- /src/main/java/org/apache/flink/streaming/connectors/redis/mapper/RowRedisSinkMapper.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.flink.streaming.connectors.redis.mapper; 20 | 21 | import org.apache.flink.configuration.ReadableConfig; 22 | import org.apache.flink.streaming.connectors.redis.command.RedisCommand; 23 | import org.apache.flink.streaming.connectors.redis.command.RedisCommandDescription; 24 | import org.apache.flink.streaming.connectors.redis.config.RedisOptions; 25 | import org.apache.flink.streaming.connectors.redis.converter.RedisRowConverter; 26 | import org.apache.flink.table.data.GenericRowData; 27 | import org.apache.flink.table.data.RowData; 28 | import org.apache.flink.table.types.logical.LogicalType; 29 | import org.apache.flink.util.StringUtils; 30 | 31 | import java.time.LocalTime; 32 | 33 | /** base row redis mapper implement. */ 34 | public class RowRedisSinkMapper implements RedisSinkMapper { 35 | 36 | private final Integer ttl; 37 | 38 | private LocalTime expireTime; 39 | 40 | private final RedisCommand redisCommand; 41 | 42 | private final Boolean setIfAbsent; 43 | 44 | private final Boolean ttlKeyNotAbsent; 45 | 46 | private final Boolean auditLog; 47 | 48 | public RowRedisSinkMapper(RedisCommand redisCommand, ReadableConfig config) { 49 | this.redisCommand = redisCommand; 50 | this.ttl = config.get(RedisOptions.TTL); 51 | this.setIfAbsent = config.get(RedisOptions.SET_IF_ABSENT); 52 | this.ttlKeyNotAbsent = config.get(RedisOptions.TTL_KEY_NOT_ABSENT); 53 | this.auditLog = config.get(RedisOptions.AUDIT_LOG); 54 | String expireOnTime = config.get(RedisOptions.EXPIRE_ON_TIME); 55 | if (!StringUtils.isNullOrWhitespaceOnly(expireOnTime)) { 56 | this.expireTime = LocalTime.parse(expireOnTime); 57 | } 58 | } 59 | 60 | @Override 61 | public RedisCommandDescription getCommandDescription() { 62 | return new RedisCommandDescription( 63 | redisCommand, ttl, expireTime, setIfAbsent, ttlKeyNotAbsent, auditLog); 64 | } 65 | 66 | @Override 67 | public String getKeyFromData(RowData rowData, LogicalType logicalType, Integer keyIndex) { 68 | return RedisRowConverter.rowDataToString(logicalType, rowData, keyIndex); 69 | } 70 | 71 | @Override 72 | public String getValueFromData(RowData rowData, LogicalType logicalType, Integer valueIndex) { 73 | return RedisRowConverter.rowDataToString(logicalType, rowData, valueIndex); 74 | } 75 | 76 | @Override 77 | public String getFieldFromData(RowData rowData, LogicalType logicalType, Integer fieldIndex) { 78 | return RedisRowConverter.rowDataToString(logicalType, rowData, fieldIndex); 79 | } 80 | 81 | public RedisCommand getRedisCommand() { 82 | return redisCommand; 83 | } 84 | 85 | @Override 86 | public boolean equals(Object obj) { 87 | RedisCommand redisCommand = ((RowRedisSinkMapper) obj).redisCommand; 88 | return this.redisCommand == redisCommand; 89 | } 90 | } 91 | -------------------------------------------------------------------------------- /src/test/java/org/apache/flink/streaming/connectors/redis/table/LimitedSinkTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.flink.streaming.connectors.redis.table; 20 | 21 | import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; 22 | import org.apache.flink.streaming.connectors.redis.command.RedisCommand; 23 | import org.apache.flink.streaming.connectors.redis.table.base.TestRedisConfigBase; 24 | import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; 25 | import org.junit.jupiter.api.Test; 26 | import org.junit.platform.commons.util.Preconditions; 27 | 28 | import static org.apache.flink.streaming.connectors.redis.config.RedisValidator.REDIS_COMMAND; 29 | 30 | /** 31 | * @Author: Jeff Zou @Date: 2022/9/27 15:08 32 | */ 33 | public class LimitedSinkTest extends TestRedisConfigBase { 34 | 35 | @Test 36 | public void testLimitedSink() throws Exception { 37 | singleRedisCommands.del("sink_limit_test"); 38 | final int ttl = 60000; 39 | String sink = 40 | "create table sink_redis(key_name varchar, user_name VARCHAR, passport varchar) with ( 'connector'='redis', " 41 | + "'host'='" 42 | + REDIS_HOST 43 | + "','port'='" 44 | + REDIS_PORT 45 | + "', 'redis-mode'='single','password'='" 46 | + REDIS_PASSWORD 47 | + "','" 48 | + REDIS_COMMAND 49 | + "'='" 50 | + RedisCommand.HSET 51 | + "', 'sink.limit'='true', 'sink.limit.max-online'='" 52 | + ttl 53 | + "','sink.limit.max-num'='10')"; 54 | 55 | StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); 56 | StreamTableEnvironment tEnv = StreamTableEnvironment.create(env); 57 | tEnv.executeSql(sink); 58 | 59 | String source = 60 | "create table source_table (user_name VARCHAR, passport varchar) with ('connector'= 'datagen','rows-per-second'='1'," 61 | + " 'fields.user_name.kind'='sequence', 'fields.user_name.start'='0', 'fields.user_name.end'='100'," 62 | + " 'fields.passport.kind'='sequence', 'fields.passport.start'='0', 'fields.passport.end'='100')"; 63 | tEnv.executeSql(source); 64 | 65 | try { 66 | tEnv.executeSql( 67 | "insert into sink_redis select 'sink_limit_test', user_name, passport from source_table ") 68 | .getJobClient() 69 | .get() 70 | .getJobExecutionResult() 71 | .get(); 72 | } catch (Exception e) { 73 | } 74 | 75 | Preconditions.condition(singleRedisCommands.hget("sink_limit_test", "0").equals("0"), ""); 76 | Preconditions.condition(singleRedisCommands.hget("sink_limit_test", "51") == null, ""); 77 | 78 | Thread.sleep(ttl + 10000); 79 | Preconditions.condition(singleRedisCommands.hget("sink_limit_test", "0") == null, ""); 80 | } 81 | } 82 | -------------------------------------------------------------------------------- /src/test/java/org/apache/flink/streaming/connectors/redis/table/ClusterSQLExample.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.flink.streaming.connectors.redis.table; 20 | 21 | import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; 22 | import org.apache.flink.streaming.connectors.redis.command.RedisCommand; 23 | import org.apache.flink.table.api.EnvironmentSettings; 24 | import org.apache.flink.table.api.TableResult; 25 | import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; 26 | 27 | import static org.apache.flink.streaming.connectors.redis.config.RedisValidator.REDIS_COMMAND; 28 | 29 | /** Created by jeff.zou on 2020/9/10. */ 30 | public class ClusterSQLExample { 31 | 32 | public static void main(String[] args) throws Exception { 33 | SentinelSQLExample(); 34 | clusterSQLExample(); 35 | } 36 | 37 | public static void SentinelSQLExample() throws Exception { 38 | StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); 39 | 40 | EnvironmentSettings environmentSettings = 41 | EnvironmentSettings.newInstance().inStreamingMode().build(); 42 | StreamTableEnvironment tEnv = StreamTableEnvironment.create(env, environmentSettings); 43 | 44 | String ddl = 45 | "create table sink_redis(username VARCHAR, passport time(3)) with ( 'connector'='redis', " 46 | + "'master.name'='mymaster','sentinels.info'='10.11.0.1:26379,10.11.0.2:26379,10.11.0.3:26379', 'redis-mode'='sentinel'" 47 | + ",'password'='abc123','sentinels.password'='abc123','" 48 | + REDIS_COMMAND 49 | + "'='" 50 | + RedisCommand.SET 51 | + "')"; 52 | 53 | tEnv.executeSql(ddl); 54 | String sql = 55 | " insert into sink_redis select * from (values ('test_time', time '04:04:00'))"; 56 | TableResult tableResult = tEnv.executeSql(sql); 57 | tableResult.getJobClient().get().getJobExecutionResult().get(); 58 | } 59 | 60 | public static void clusterSQLExample() throws Exception { 61 | StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); 62 | 63 | EnvironmentSettings environmentSettings = 64 | EnvironmentSettings.newInstance().inStreamingMode().build(); 65 | StreamTableEnvironment tEnv = StreamTableEnvironment.create(env, environmentSettings); 66 | 67 | String ddl = 68 | "create table sink_redis(username VARCHAR, passport time(3)) with ( 'connector'='redis', " 69 | + "'cluster-nodes'='10.11.0.1:7000,10.11.0.1:7001,10.11.0.1:8000,10.11.0.1:8001,10.11.0.1:9000,10.11.0.1:9001', " 70 | + " 'redis-mode'='cluster' ,'password'='abc123','" 71 | + REDIS_COMMAND 72 | + "'='" 73 | + RedisCommand.SET 74 | + "')"; 75 | 76 | tEnv.executeSql(ddl); 77 | String sql = 78 | " insert into sink_redis select * from (values ('test_time', time '04:04:00'))"; 79 | TableResult tableResult = tEnv.executeSql(sql); 80 | tableResult.getJobClient().get().getJobExecutionResult().get(); 81 | } 82 | } 83 | -------------------------------------------------------------------------------- /style/spotless-formatter.xml: -------------------------------------------------------------------------------- 1 | 2 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | -------------------------------------------------------------------------------- /src/main/java/org/apache/flink/streaming/connectors/redis/table/RedisDynamicTableSink.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.flink.streaming.connectors.redis.table; 20 | 21 | import org.apache.flink.configuration.ReadableConfig; 22 | import org.apache.flink.streaming.connectors.redis.command.RedisCommand; 23 | import org.apache.flink.streaming.connectors.redis.config.FlinkConfigBase; 24 | import org.apache.flink.streaming.connectors.redis.config.FlinkConfigHandler; 25 | import org.apache.flink.streaming.connectors.redis.config.RedisOptions; 26 | import org.apache.flink.streaming.connectors.redis.hanlder.RedisHandlerServices; 27 | import org.apache.flink.streaming.connectors.redis.mapper.RedisSinkMapper; 28 | import org.apache.flink.streaming.connectors.redis.mapper.RowRedisSinkMapper; 29 | import org.apache.flink.table.catalog.ResolvedSchema; 30 | import org.apache.flink.table.connector.ChangelogMode; 31 | import org.apache.flink.table.connector.sink.DynamicTableSink; 32 | import org.apache.flink.table.connector.sink.SinkFunctionProvider; 33 | import org.apache.flink.types.RowKind; 34 | import org.apache.flink.util.Preconditions; 35 | 36 | import java.util.Map; 37 | 38 | /** Created by jeff.zou on 2020/9/10. */ 39 | public class RedisDynamicTableSink implements DynamicTableSink { 40 | 41 | private FlinkConfigBase flinkConfigBase; 42 | private RedisSinkMapper redisMapper; 43 | private Map properties; 44 | private ReadableConfig config; 45 | private Integer sinkParallelism; 46 | private ResolvedSchema resolvedSchema; 47 | 48 | private final RedisCommand redisCommand; 49 | 50 | public RedisDynamicTableSink( 51 | RedisCommand redisCommand, 52 | Map properties, 53 | ResolvedSchema resolvedSchema, 54 | ReadableConfig config) { 55 | this.redisCommand = redisCommand; 56 | this.properties = properties; 57 | Preconditions.checkNotNull(properties, "properties should not be null"); 58 | this.config = config; 59 | this.sinkParallelism = config.get(RedisOptions.SINK_PARALLELISM); 60 | redisMapper = new RowRedisSinkMapper(redisCommand, config); 61 | flinkConfigBase = 62 | RedisHandlerServices.findRedisHandler(FlinkConfigHandler.class, properties) 63 | .createFlinkConfig(config); 64 | this.resolvedSchema = resolvedSchema; 65 | } 66 | 67 | @Override 68 | public ChangelogMode getChangelogMode(ChangelogMode requestedMode) { 69 | return ChangelogMode.newBuilder() 70 | .addContainedKind(RowKind.INSERT) 71 | .addContainedKind(RowKind.DELETE) 72 | .addContainedKind(RowKind.UPDATE_AFTER) 73 | .build(); 74 | } 75 | 76 | @Override 77 | public SinkRuntimeProvider getSinkRuntimeProvider(Context context) { 78 | RedisSinkFunction redisSinkFunction = 79 | config.get(RedisOptions.SINK_LIMIT) 80 | ? new RedisLimitedSinkFunction( 81 | flinkConfigBase, redisMapper, resolvedSchema, config) 82 | : new RedisSinkFunction( 83 | flinkConfigBase, redisMapper, resolvedSchema, config); 84 | 85 | return SinkFunctionProvider.of(redisSinkFunction, sinkParallelism); 86 | } 87 | 88 | @Override 89 | public DynamicTableSink copy() { 90 | return new RedisDynamicTableSink(redisCommand, properties, resolvedSchema, config); 91 | } 92 | 93 | @Override 94 | public String asSummaryString() { 95 | return "REDIS"; 96 | } 97 | } 98 | -------------------------------------------------------------------------------- /src/test/java/org/apache/flink/streaming/connectors/redis/table/SQLExpireTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.flink.streaming.connectors.redis.table; 20 | 21 | import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; 22 | import org.apache.flink.streaming.connectors.redis.command.RedisCommand; 23 | import org.apache.flink.streaming.connectors.redis.table.base.TestRedisConfigBase; 24 | import org.apache.flink.table.api.TableResult; 25 | import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; 26 | import org.junit.jupiter.api.Test; 27 | import org.junit.platform.commons.util.Preconditions; 28 | 29 | import static org.apache.flink.streaming.connectors.redis.config.RedisValidator.REDIS_COMMAND; 30 | 31 | /** Created by jeff.zou on 2020/9/10. */ 32 | public class SQLExpireTest extends TestRedisConfigBase { 33 | 34 | @Test 35 | public void testSinkValueWithExpire() throws Exception { 36 | StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); 37 | StreamTableEnvironment tEnv = StreamTableEnvironment.create(env); 38 | singleRedisCommands.del("1"); 39 | String ddl = 40 | "create table source_table(uid VARCHAR) with ('connector'='datagen'," 41 | + "'rows-per-second'='1', " 42 | + "'fields.uid.kind'='sequence', 'fields.uid.start'='1', 'fields.uid.end'='1')"; 43 | tEnv.executeSql(ddl); 44 | 45 | String sink = 46 | "create table sink_redis(name varchar, level varchar, age varchar) with ( " 47 | + singleWith() 48 | + "'ttl'='10', '" 49 | + REDIS_COMMAND 50 | + "'='" 51 | + RedisCommand.HSET 52 | + "' )"; 53 | tEnv.executeSql(sink); 54 | String sql = " insert into sink_redis select '1', '1', uid from source_table"; 55 | 56 | TableResult tableResult = tEnv.executeSql(sql); 57 | tableResult.getJobClient().get().getJobExecutionResult().get(); 58 | System.out.println(sql); 59 | Preconditions.condition(singleRedisCommands.exists("1") == 1, ""); 60 | Thread.sleep(10 * 1000); 61 | Preconditions.condition(singleRedisCommands.exists("1") == 0, ""); 62 | } 63 | 64 | @Test 65 | public void testSinkValueWithExpireOnKeyPresent() throws Exception { 66 | StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); 67 | StreamTableEnvironment tEnv = StreamTableEnvironment.create(env); 68 | singleRedisCommands.del("test_hash"); 69 | String ddl = 70 | "create table source_table(uid VARCHAR) with ('connector'='datagen'," 71 | + "'rows-per-second'='1', " 72 | + "'fields.uid.kind'='sequence', 'fields.uid.start'='1', 'fields.uid.end'='1')"; 73 | tEnv.executeSql(ddl); 74 | 75 | String dim = 76 | "create table sink_redis(name varchar, level varchar, age varchar) with ( " 77 | + singleWith() 78 | + " 'ttl'='8', 'ttl.key.not.absent'='true', '" 79 | + REDIS_COMMAND 80 | + "'='" 81 | + RedisCommand.HSET 82 | + "' )"; 83 | 84 | tEnv.executeSql(dim); 85 | String sql = " insert into sink_redis select 'test_hash', '1', uid from source_table"; 86 | TableResult tableResult = tEnv.executeSql(sql); 87 | tableResult.getJobClient().get().getJobExecutionResult().get(); 88 | Preconditions.condition(singleRedisCommands.exists("test_hash") == 1, ""); 89 | Thread.sleep(10 * 1000); 90 | Preconditions.condition(singleRedisCommands.exists("test_hash") == 0, ""); 91 | } 92 | } 93 | -------------------------------------------------------------------------------- /src/test/java/org/apache/flink/streaming/connectors/redis/datastream/DataStreamTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.flink.streaming.connectors.redis.datastream; 20 | 21 | import org.apache.flink.configuration.Configuration; 22 | import org.apache.flink.streaming.api.datastream.DataStream; 23 | import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; 24 | import org.apache.flink.streaming.connectors.redis.command.RedisCommand; 25 | import org.apache.flink.streaming.connectors.redis.config.FlinkConfigBase; 26 | import org.apache.flink.streaming.connectors.redis.config.FlinkSingleConfig; 27 | import org.apache.flink.streaming.connectors.redis.mapper.RedisSinkMapper; 28 | import org.apache.flink.streaming.connectors.redis.mapper.RowRedisSinkMapper; 29 | import org.apache.flink.streaming.connectors.redis.table.RedisSinkFunction; 30 | import org.apache.flink.streaming.connectors.redis.table.base.TestRedisConfigBase; 31 | import org.apache.flink.table.api.DataTypes; 32 | import org.apache.flink.table.catalog.ResolvedSchema; 33 | import org.apache.flink.table.data.StringData; 34 | import org.apache.flink.table.data.binary.BinaryRowData; 35 | import org.apache.flink.table.data.writer.BinaryRowWriter; 36 | import org.apache.flink.table.types.DataType; 37 | import org.junit.jupiter.api.Test; 38 | import org.junit.platform.commons.util.Preconditions; 39 | 40 | import java.util.Arrays; 41 | import java.util.List; 42 | 43 | import static org.apache.flink.streaming.connectors.redis.config.RedisOptions.TTL; 44 | import static org.apache.flink.streaming.connectors.redis.config.RedisValidator.REDIS_COMMAND; 45 | import static org.apache.flink.streaming.connectors.redis.config.RedisValidator.REDIS_MODE; 46 | import static org.apache.flink.streaming.connectors.redis.config.RedisValidator.REDIS_SINGLE; 47 | 48 | /** Created by jeff.zou on 2021/2/26. */ 49 | public class DataStreamTest extends TestRedisConfigBase { 50 | 51 | @Test 52 | public void testDateStreamInsert() throws Exception { 53 | 54 | singleRedisCommands.del("tom"); 55 | Configuration configuration = new Configuration(); 56 | configuration.setString(REDIS_MODE, REDIS_SINGLE); 57 | configuration.setString(REDIS_COMMAND, RedisCommand.HSET.name()); 58 | configuration.setInteger(TTL, 10); 59 | 60 | RedisSinkMapper redisMapper = new RowRedisSinkMapper(RedisCommand.HSET, configuration); 61 | 62 | StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); 63 | 64 | BinaryRowData binaryRowData = new BinaryRowData(3); 65 | BinaryRowWriter binaryRowWriter = new BinaryRowWriter(binaryRowData); 66 | binaryRowWriter.writeString(0, StringData.fromString("tom")); 67 | binaryRowWriter.writeString(1, StringData.fromString("math")); 68 | binaryRowWriter.writeString(2, StringData.fromString("152")); 69 | 70 | DataStream dataStream = env.fromElements(binaryRowData, binaryRowData); 71 | 72 | List columnNames = Arrays.asList("name", "subject", "scope"); 73 | List columnDataTypes = 74 | Arrays.asList(DataTypes.STRING(), DataTypes.STRING(), DataTypes.STRING()); 75 | ResolvedSchema resolvedSchema = ResolvedSchema.physical(columnNames, columnDataTypes); 76 | 77 | FlinkConfigBase conf = 78 | new FlinkSingleConfig.Builder() 79 | .setHost(REDIS_HOST) 80 | .setPort(REDIS_PORT) 81 | .setPassword(REDIS_PASSWORD) 82 | .build(); 83 | 84 | RedisSinkFunction redisSinkFunction = 85 | new RedisSinkFunction<>(conf, redisMapper, resolvedSchema, configuration); 86 | 87 | dataStream.addSink(redisSinkFunction).setParallelism(1); 88 | env.execute("RedisSinkTest"); 89 | 90 | Preconditions.condition(singleRedisCommands.hget("tom", "math").equals("152"), ""); 91 | } 92 | } 93 | -------------------------------------------------------------------------------- /src/main/java/org/apache/flink/streaming/connectors/redis/config/FlinkSingleConfig.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.flink.streaming.connectors.redis.config; 20 | 21 | import java.util.Objects; 22 | 23 | /** Configuration for pool. */ 24 | public class FlinkSingleConfig extends FlinkConfigBase { 25 | 26 | private static final long serialVersionUID = 1L; 27 | 28 | private final String host; 29 | private final int port; 30 | private final int database; 31 | 32 | /** 33 | * pool configuration. The host is mandatory, and when host is not set, it throws 34 | * NullPointerException. 35 | * 36 | * @param host hostname or IP 37 | * @param port port, default value is 6379 38 | * @param connectionTimeout socket / connection timeout, default value is 2000 milli second 39 | * @param database database index 40 | * @throws NullPointerException if parameter {@code host} is {@code null} 41 | */ 42 | private FlinkSingleConfig( 43 | String host, 44 | int port, 45 | int connectionTimeout, 46 | String password, 47 | int database, 48 | LettuceConfig lettuceConfig) { 49 | super(connectionTimeout, password, lettuceConfig); 50 | Objects.requireNonNull(host, "Host information should be presented"); 51 | this.host = host; 52 | this.port = port; 53 | this.database = database; 54 | } 55 | 56 | /** 57 | * Returns host. 58 | * 59 | * @return hostname or IP 60 | */ 61 | public String getHost() { 62 | return host; 63 | } 64 | 65 | /** 66 | * Returns port. 67 | * 68 | * @return port 69 | */ 70 | public int getPort() { 71 | return port; 72 | } 73 | 74 | /** 75 | * Returns database index. 76 | * 77 | * @return database index 78 | */ 79 | public int getDatabase() { 80 | return database; 81 | } 82 | 83 | /** Builder for initializing {@link FlinkSingleConfig}. */ 84 | public static class Builder { 85 | 86 | private String host; 87 | private int port; 88 | private int timeout; 89 | private int database; 90 | private String password; 91 | 92 | private LettuceConfig lettuceConfig; 93 | 94 | /** 95 | * Sets host. 96 | * 97 | * @param host host 98 | * @return Builder itself 99 | */ 100 | public Builder setHost(String host) { 101 | this.host = host; 102 | return this; 103 | } 104 | 105 | /** 106 | * Sets port. 107 | * 108 | * @param port port, default value is 6379 109 | * @return Builder itself 110 | */ 111 | public Builder setPort(int port) { 112 | this.port = port; 113 | return this; 114 | } 115 | 116 | /** 117 | * Sets timeout. 118 | * 119 | * @param timeout timeout, default value is 2000 120 | * @return Builder itself 121 | */ 122 | public Builder setTimeout(int timeout) { 123 | this.timeout = timeout; 124 | return this; 125 | } 126 | 127 | /** 128 | * Sets database index. 129 | * 130 | * @param database database index, default value is 0 131 | * @return Builder itself 132 | */ 133 | public Builder setDatabase(int database) { 134 | this.database = database; 135 | return this; 136 | } 137 | 138 | /** 139 | * Sets password. 140 | * 141 | * @param password password, if any 142 | * @return Builder itself 143 | */ 144 | public Builder setPassword(String password) { 145 | this.password = password; 146 | return this; 147 | } 148 | 149 | public Builder setLettuceConfig(LettuceConfig lettuceConfig) { 150 | this.lettuceConfig = lettuceConfig; 151 | return this; 152 | } 153 | 154 | /** 155 | * Builds PoolConfig. 156 | * 157 | * @return PoolConfig 158 | */ 159 | public FlinkSingleConfig build() { 160 | return new FlinkSingleConfig(host, port, timeout, password, database, lettuceConfig); 161 | } 162 | } 163 | } 164 | -------------------------------------------------------------------------------- /src/main/java/org/apache/flink/streaming/connectors/redis/table/RedisDynamicTableSource.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.flink.streaming.connectors.redis.table; 20 | 21 | import org.apache.flink.configuration.ReadableConfig; 22 | import org.apache.flink.streaming.connectors.redis.command.RedisCommand; 23 | import org.apache.flink.streaming.connectors.redis.config.FlinkConfigBase; 24 | import org.apache.flink.streaming.connectors.redis.config.FlinkConfigHandler; 25 | import org.apache.flink.streaming.connectors.redis.config.RedisJoinConfig; 26 | import org.apache.flink.streaming.connectors.redis.config.RedisOptions; 27 | import org.apache.flink.streaming.connectors.redis.hanlder.RedisHandlerServices; 28 | import org.apache.flink.streaming.connectors.redis.mapper.RedisMapper; 29 | import org.apache.flink.streaming.connectors.redis.mapper.RowRedisQueryMapper; 30 | import org.apache.flink.table.catalog.ResolvedSchema; 31 | import org.apache.flink.table.connector.ChangelogMode; 32 | import org.apache.flink.table.connector.source.AsyncTableFunctionProvider; 33 | import org.apache.flink.table.connector.source.DynamicTableSource; 34 | import org.apache.flink.table.connector.source.LookupTableSource; 35 | import org.apache.flink.table.connector.source.ScanTableSource; 36 | import org.apache.flink.table.connector.source.SourceFunctionProvider; 37 | import org.apache.flink.util.Preconditions; 38 | 39 | import java.util.Map; 40 | 41 | /** redis dynamic table source. @Author: jeff.zou @Date: 2022/3/7.13:41 */ 42 | public class RedisDynamicTableSource implements ScanTableSource, LookupTableSource { 43 | 44 | private FlinkConfigBase flinkConfigBase; 45 | private Map properties; 46 | private ResolvedSchema resolvedSchema; 47 | private ReadableConfig config; 48 | private RedisMapper redisMapper; 49 | private RedisJoinConfig redisJoinConfig; 50 | 51 | private RedisCommand redisCommand; 52 | 53 | public RedisDynamicTableSource( 54 | RedisCommand redisCommand, 55 | Map properties, 56 | ResolvedSchema resolvedSchema, 57 | ReadableConfig config) { 58 | this.redisCommand = redisCommand; 59 | this.properties = properties; 60 | Preconditions.checkNotNull(properties, "properties should not be null"); 61 | this.resolvedSchema = resolvedSchema; 62 | Preconditions.checkNotNull(resolvedSchema, "resolvedSchema should not be null"); 63 | this.config = config; 64 | redisMapper = new RowRedisQueryMapper(redisCommand); 65 | this.properties = properties; 66 | this.resolvedSchema = resolvedSchema; 67 | this.config = config; 68 | flinkConfigBase = 69 | RedisHandlerServices.findRedisHandler(FlinkConfigHandler.class, properties) 70 | .createFlinkConfig(config); 71 | redisJoinConfig = 72 | new RedisJoinConfig.Builder() 73 | .setCacheTTL(config.get(RedisOptions.LOOKUP_CHCHE_TTL)) 74 | .setCacheMaxSize(config.get(RedisOptions.LOOKUP_CACHE_MAX_ROWS)) 75 | .setLoadAll(config.get(RedisOptions.LOOKUP_CACHE_LOAD_ALL)) 76 | .build(); 77 | } 78 | 79 | @Override 80 | public ChangelogMode getChangelogMode() { 81 | return ChangelogMode.all(); 82 | } 83 | 84 | @Override 85 | public ScanRuntimeProvider getScanRuntimeProvider(ScanContext runtimeProviderContext) { 86 | RedisSourceFunction redisSourceFunction = 87 | new RedisSourceFunction<>(redisMapper, config, flinkConfigBase, resolvedSchema); 88 | return SourceFunctionProvider.of( 89 | redisSourceFunction, this.redisCommand.isCommandBoundedness()); 90 | } 91 | 92 | @Override 93 | public LookupRuntimeProvider getLookupRuntimeProvider(LookupContext context) { 94 | return AsyncTableFunctionProvider.of( 95 | new RedisLookupFunction( 96 | flinkConfigBase, redisMapper, redisJoinConfig, resolvedSchema, config)); 97 | } 98 | 99 | @Override 100 | public DynamicTableSource copy() { 101 | return new RedisDynamicTableSource(redisCommand, properties, resolvedSchema, config); 102 | } 103 | 104 | @Override 105 | public String asSummaryString() { 106 | return "REDIS"; 107 | } 108 | } 109 | -------------------------------------------------------------------------------- /src/main/java/org/apache/flink/streaming/connectors/redis/table/RedisLimitedSinkFunction.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.flink.streaming.connectors.redis.table; 20 | 21 | import org.apache.flink.configuration.Configuration; 22 | import org.apache.flink.configuration.ReadableConfig; 23 | import org.apache.flink.streaming.connectors.redis.config.FlinkConfigBase; 24 | import org.apache.flink.streaming.connectors.redis.config.RedisOptions; 25 | import org.apache.flink.streaming.connectors.redis.mapper.RedisSinkMapper; 26 | import org.apache.flink.table.catalog.ResolvedSchema; 27 | import org.apache.flink.table.data.RowData; 28 | import org.apache.flink.types.RowKind; 29 | import org.apache.flink.util.Preconditions; 30 | import org.slf4j.Logger; 31 | import org.slf4j.LoggerFactory; 32 | 33 | import java.util.concurrent.TimeUnit; 34 | 35 | /** 36 | * @Author: Jeff Zou @Date: 2022/9/26 15:28 Specially used for Flink online debugging. 37 | */ 38 | public class RedisLimitedSinkFunction extends RedisSinkFunction { 39 | 40 | private static final Logger LOG = LoggerFactory.getLogger(RedisLimitedSinkFunction.class); 41 | 42 | private long maxOnline; 43 | 44 | private long startTime; 45 | 46 | private long sinkInterval; 47 | 48 | private int maxNum; 49 | 50 | private volatile int curNum; 51 | 52 | /** 53 | * Creates a new {@link RedisSinkFunction} that connects to the Redis server. 54 | * 55 | * @param flinkConfigBase The configuration of {@link FlinkConfigBase} 56 | * @param redisSinkMapper This is used to generate Redis command and key value from incoming 57 | * @param resolvedSchema 58 | */ 59 | public RedisLimitedSinkFunction( 60 | FlinkConfigBase flinkConfigBase, 61 | RedisSinkMapper redisSinkMapper, 62 | ResolvedSchema resolvedSchema, 63 | ReadableConfig config) { 64 | super(flinkConfigBase, redisSinkMapper, resolvedSchema, config); 65 | maxOnline = config.get(RedisOptions.SINK_LIMIT_MAX_ONLINE); 66 | 67 | Preconditions.checkState( 68 | maxOnline > 0 && maxOnline <= RedisOptions.SINK_LIMIT_MAX_ONLINE.defaultValue(), 69 | "the max online milliseconds must be more than 0 and less than %s seconds.", 70 | RedisOptions.SINK_LIMIT_MAX_ONLINE.defaultValue()); 71 | 72 | sinkInterval = config.get(RedisOptions.SINK_LIMIT_INTERVAL); 73 | Preconditions.checkState( 74 | sinkInterval >= RedisOptions.SINK_LIMIT_INTERVAL.defaultValue(), 75 | "the sink limit interval must be more than % millisecond", 76 | RedisOptions.SINK_LIMIT_INTERVAL.defaultValue()); 77 | 78 | maxNum = config.get(RedisOptions.SINK_LIMIT_MAX_NUM); 79 | Preconditions.checkState( 80 | maxNum > 0 && maxNum <= RedisOptions.SINK_LIMIT_MAX_NUM.defaultValue(), 81 | "the max num must be more than 0 and less than %s.", 82 | RedisOptions.SINK_LIMIT_MAX_NUM.defaultValue()); 83 | } 84 | 85 | @Override 86 | public void open(Configuration parameters) throws Exception { 87 | super.open(parameters); 88 | startTime = System.currentTimeMillis(); 89 | } 90 | 91 | @Override 92 | public void invoke(IN input, Context context) throws Exception { 93 | long remainTime = maxOnline - (System.currentTimeMillis() - startTime); 94 | if (remainTime < 0) { 95 | throw new RuntimeException( 96 | "thread id:" 97 | + Thread.currentThread().getId() 98 | + ", the debugging time has exceeded the max online time."); 99 | } 100 | 101 | RowData rowData = (RowData) input; 102 | RowKind kind = rowData.getRowKind(); 103 | if (kind == RowKind.UPDATE_BEFORE) { 104 | return; 105 | } 106 | 107 | // all keys must expire 10 seconds after online debugging end. 108 | super.ttl = (int) remainTime / 1000 + 10; 109 | super.invoke(input, context); 110 | 111 | TimeUnit.MILLISECONDS.sleep(sinkInterval); 112 | curNum++; 113 | if (curNum > maxNum) { 114 | throw new RuntimeException( 115 | "thread id:" 116 | + Thread.currentThread().getId() 117 | + ", the number of debug results has exceeded the max num." 118 | + curNum); 119 | } 120 | } 121 | } 122 | -------------------------------------------------------------------------------- /src/main/java/org/apache/flink/streaming/connectors/redis/table/RedisResultWrapper.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.flink.streaming.connectors.redis.table; 20 | 21 | import org.apache.flink.streaming.connectors.redis.config.RedisValueDataStructure; 22 | import org.apache.flink.streaming.connectors.redis.converter.RedisRowConverter; 23 | import org.apache.flink.table.data.GenericRowData; 24 | import org.apache.flink.table.types.DataType; 25 | 26 | import java.util.List; 27 | 28 | import static org.apache.flink.streaming.connectors.redis.table.RedisDynamicTableFactory.CACHE_SEPERATOR; 29 | 30 | public class RedisResultWrapper { 31 | 32 | /** 33 | * create row data for string. 34 | * 35 | * @param keys 36 | * @param value 37 | */ 38 | public static GenericRowData createRowDataForString( 39 | Object[] keys, 40 | String value, 41 | RedisValueDataStructure redisValueDataStructure, 42 | List dataTypes) { 43 | if (redisValueDataStructure == RedisValueDataStructure.column) { 44 | GenericRowData genericRowData = new GenericRowData(2); 45 | genericRowData.setField( 46 | 0, 47 | RedisRowConverter.dataTypeFromString( 48 | dataTypes.get(0).getLogicalType(), String.valueOf(keys[0]))); 49 | if (value == null) { 50 | genericRowData.setField(0, null); 51 | return genericRowData; 52 | } 53 | 54 | genericRowData.setField( 55 | 1, 56 | RedisRowConverter.dataTypeFromString(dataTypes.get(1).getLogicalType(), value)); 57 | return genericRowData; 58 | } 59 | 60 | return createRowDataForRow(value, dataTypes); 61 | } 62 | 63 | /** 64 | * create row data for whole row. 65 | * 66 | * @param value 67 | * @return 68 | */ 69 | public static GenericRowData createRowDataForRow(String value, List dataTypes) { 70 | GenericRowData genericRowData = new GenericRowData(dataTypes.size()); 71 | if (value == null) { 72 | return genericRowData; 73 | } 74 | 75 | String[] values = value.split(CACHE_SEPERATOR); 76 | for (int i = 0; i < dataTypes.size(); i++) { 77 | if (i < values.length) { 78 | genericRowData.setField( 79 | i, 80 | RedisRowConverter.dataTypeFromString( 81 | dataTypes.get(i).getLogicalType(), values[i])); 82 | } else { 83 | genericRowData.setField(i, null); 84 | } 85 | } 86 | return genericRowData; 87 | } 88 | 89 | /** 90 | * create row data for hash. 91 | * 92 | * @param keys 93 | * @param value 94 | */ 95 | public static GenericRowData createRowDataForHash( 96 | Object[] keys, 97 | String value, 98 | RedisValueDataStructure redisValueDataStructure, 99 | List dataTypes) { 100 | if (redisValueDataStructure == RedisValueDataStructure.column) { 101 | GenericRowData genericRowData = new GenericRowData(3); 102 | genericRowData.setField( 103 | 0, 104 | RedisRowConverter.dataTypeFromString( 105 | dataTypes.get(0).getLogicalType(), String.valueOf(keys[0]))); 106 | genericRowData.setField( 107 | 1, 108 | RedisRowConverter.dataTypeFromString( 109 | dataTypes.get(1).getLogicalType(), String.valueOf(keys[1]))); 110 | 111 | if (value == null) { 112 | return genericRowData; 113 | } 114 | genericRowData.setField( 115 | 2, 116 | RedisRowConverter.dataTypeFromString(dataTypes.get(2).getLogicalType(), value)); 117 | return genericRowData; 118 | } 119 | return createRowDataForRow(value, dataTypes); 120 | } 121 | 122 | public static GenericRowData createRowDataForSortedSet( 123 | Object[] keys, Double value, List dataTypes) { 124 | GenericRowData genericRowData = new GenericRowData(3); 125 | genericRowData.setField( 126 | 0, 127 | RedisRowConverter.dataTypeFromString( 128 | dataTypes.get(0).getLogicalType(), String.valueOf(keys[0]))); 129 | genericRowData.setField( 130 | 2, 131 | RedisRowConverter.dataTypeFromString( 132 | dataTypes.get(2).getLogicalType(), String.valueOf(keys[1]))); 133 | 134 | if (value == null) { 135 | return genericRowData; 136 | } 137 | genericRowData.setField(1, value); 138 | return genericRowData; 139 | } 140 | } 141 | -------------------------------------------------------------------------------- /src/main/java/org/apache/flink/streaming/connectors/redis/table/RedisDynamicTableFactory.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.flink.streaming.connectors.redis.table; 20 | 21 | import org.apache.flink.configuration.ConfigOption; 22 | import org.apache.flink.configuration.ReadableConfig; 23 | import org.apache.flink.streaming.connectors.redis.command.RedisCommand; 24 | import org.apache.flink.streaming.connectors.redis.config.RedisOptions; 25 | import org.apache.flink.table.connector.sink.DynamicTableSink; 26 | import org.apache.flink.table.connector.source.DynamicTableSource; 27 | import org.apache.flink.table.factories.DynamicTableSinkFactory; 28 | import org.apache.flink.table.factories.DynamicTableSourceFactory; 29 | import org.apache.flink.table.factories.FactoryUtil; 30 | import org.apache.flink.util.FlinkRuntimeException; 31 | 32 | import java.util.HashSet; 33 | import java.util.Set; 34 | 35 | /** Created by jeff.zou on 2020/9/10. */ 36 | public class RedisDynamicTableFactory 37 | implements DynamicTableSinkFactory, DynamicTableSourceFactory { 38 | 39 | public static final String IDENTIFIER = "redis"; 40 | 41 | public static final String CACHE_SEPERATOR = "\01"; 42 | 43 | @Override 44 | public DynamicTableSource createDynamicTableSource(Context context) { 45 | FactoryUtil.TableFactoryHelper helper = FactoryUtil.createTableFactoryHelper(this, context); 46 | ReadableConfig config = helper.getOptions(); 47 | helper.validate(); 48 | RedisCommand redisCommand = parseCommand(config); 49 | 50 | return new RedisDynamicTableSource( 51 | redisCommand, 52 | context.getCatalogTable().getOptions(), 53 | context.getCatalogTable().getResolvedSchema(), 54 | config); 55 | } 56 | 57 | @Override 58 | public DynamicTableSink createDynamicTableSink(Context context) { 59 | FactoryUtil.TableFactoryHelper helper = FactoryUtil.createTableFactoryHelper(this, context); 60 | ReadableConfig config = helper.getOptions(); 61 | helper.validate(); 62 | RedisCommand redisCommand = parseCommand(config); 63 | return new RedisDynamicTableSink( 64 | redisCommand, 65 | context.getCatalogTable().getOptions(), 66 | context.getCatalogTable().getResolvedSchema(), 67 | config); 68 | } 69 | 70 | @Override 71 | public String factoryIdentifier() { 72 | return IDENTIFIER; 73 | } 74 | 75 | @Override 76 | public Set> requiredOptions() { 77 | final Set> options = new HashSet<>(); 78 | options.add(RedisOptions.COMMAND); 79 | return options; 80 | } 81 | 82 | @Override 83 | public Set> optionalOptions() { 84 | final Set> options = new HashSet<>(); 85 | options.add(RedisOptions.DATABASE); 86 | options.add(RedisOptions.HOST); 87 | options.add(RedisOptions.PORT); 88 | options.add(RedisOptions.MAXIDLE); 89 | options.add(RedisOptions.MAXTOTAL); 90 | options.add(RedisOptions.CLUSTERNODES); 91 | options.add(RedisOptions.PASSWORD); 92 | options.add(RedisOptions.TIMEOUT); 93 | options.add(RedisOptions.MINIDLE); 94 | options.add(RedisOptions.REDISMODE); 95 | options.add(RedisOptions.TTL); 96 | options.add(RedisOptions.LOOKUP_CACHE_MAX_ROWS); 97 | options.add(RedisOptions.LOOKUP_CHCHE_TTL); 98 | options.add(RedisOptions.MAX_RETRIES); 99 | options.add(RedisOptions.SINK_PARALLELISM); 100 | options.add(RedisOptions.LOOKUP_CACHE_LOAD_ALL); 101 | options.add(RedisOptions.SINK_LIMIT); 102 | options.add(RedisOptions.SINK_LIMIT_MAX_NUM); 103 | options.add(RedisOptions.SINK_LIMIT_MAX_ONLINE); 104 | options.add(RedisOptions.SINK_LIMIT_INTERVAL); 105 | options.add(RedisOptions.VALUE_DATA_STRUCTURE); 106 | options.add(RedisOptions.REDIS_MASTER_NAME); 107 | options.add(RedisOptions.SENTINELS_INFO); 108 | options.add(RedisOptions.EXPIRE_ON_TIME); 109 | options.add(RedisOptions.SENTINELS_PASSWORD); 110 | options.add(RedisOptions.SET_IF_ABSENT); 111 | options.add(RedisOptions.TTL_KEY_NOT_ABSENT); 112 | options.add(RedisOptions.NETTY_EVENT_POOL_SIZE); 113 | options.add(RedisOptions.NETTY_IO_POOL_SIZE); 114 | options.add(RedisOptions.SCAN_KEY); 115 | options.add(RedisOptions.SCAN_ADDITION_KEY); 116 | options.add(RedisOptions.SCAN_RANGE_STOP); 117 | options.add(RedisOptions.SCAN_RANGE_START); 118 | options.add(RedisOptions.SCAN_COUNT); 119 | options.add(RedisOptions.ZREM_RANGEBY); 120 | options.add(RedisOptions.AUDIT_LOG); 121 | return options; 122 | } 123 | 124 | private RedisCommand parseCommand(ReadableConfig config) { 125 | try { 126 | return RedisCommand.valueOf(config.get(RedisOptions.COMMAND).toUpperCase()); 127 | } catch (Exception e) { 128 | throw new FlinkRuntimeException( 129 | String.format( 130 | "do not support redis command: %s", config.get(RedisOptions.COMMAND))); 131 | } 132 | } 133 | } 134 | -------------------------------------------------------------------------------- /src/test/java/org/apache/flink/streaming/connectors/redis/table/SQLLettuceLimitTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.flink.streaming.connectors.redis.table; 20 | 21 | import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; 22 | import org.apache.flink.streaming.connectors.redis.command.RedisCommand; 23 | import org.apache.flink.streaming.connectors.redis.table.base.TestRedisConfigBase; 24 | import org.apache.flink.table.api.TableResult; 25 | import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; 26 | import org.junit.jupiter.api.Test; 27 | import org.junit.platform.commons.util.Preconditions; 28 | 29 | import static org.apache.flink.streaming.connectors.redis.config.RedisValidator.REDIS_COMMAND; 30 | 31 | /** Created by jeff.zou on 2020/9/10. */ 32 | public class SQLLettuceLimitTest extends TestRedisConfigBase { 33 | 34 | @Test 35 | public void testSinkLimitLettucePool() throws Exception { 36 | StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); 37 | StreamTableEnvironment tEnv = StreamTableEnvironment.create(env); 38 | singleRedisCommands.del("1"); 39 | String ddl = 40 | "create table source_table(uid VARCHAR) with ('connector'='datagen'," 41 | + "'rows-per-second'='1', " 42 | + "'fields.uid.kind'='sequence', 'fields.uid.start'='1', 'fields.uid.end'='10')"; 43 | tEnv.executeSql(ddl); 44 | 45 | String sink = 46 | "create table sink_redis(name varchar, level varchar, age varchar) with ( " 47 | + singleWith() 48 | + "'ttl'='10', '" 49 | + REDIS_COMMAND 50 | + "'='" 51 | + RedisCommand.HSET 52 | + "','io.pool.size'='3' ,'event.pool.size'='3', 'sink.parallelism'='2')"; 53 | tEnv.executeSql(sink); 54 | String sql = " insert into sink_redis select '1', '1', uid from source_table"; 55 | 56 | TableResult tableResult = tEnv.executeSql(sql); 57 | tableResult.getJobClient().get().getJobExecutionResult().get(); 58 | Preconditions.condition(singleRedisCommands.exists("1") == 1, ""); 59 | Thread.sleep(10 * 1000); 60 | Preconditions.condition(singleRedisCommands.exists("1") == 0, ""); 61 | } 62 | 63 | @Test 64 | public void testJoinLimitLettucePool() throws Exception { 65 | StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); 66 | StreamTableEnvironment tEnv = StreamTableEnvironment.create(env); 67 | singleRedisCommands.del("test_hash", "test_hash2"); 68 | singleRedisCommands.hset("test_hash", "1", "test"); 69 | singleRedisCommands.hset("test_hash", "5", "test"); 70 | String dim = 71 | "create table dim_table(name varchar, level varchar, age varchar) with ( 'connector'='redis', " 72 | + "'host'='" 73 | + REDIS_HOST 74 | + "','port'='" 75 | + REDIS_PORT 76 | + "', 'redis-mode'='single','password'='" 77 | + REDIS_PASSWORD 78 | + "','" 79 | + REDIS_COMMAND 80 | + "'='" 81 | + RedisCommand.HGET 82 | + "')"; 83 | 84 | String source = 85 | "create table source_table(username varchar, level varchar, proctime as procTime()) " 86 | + "with ('connector'='datagen', 'rows-per-second'='1', " 87 | + "'fields.username.kind'='sequence', 'fields.username.start'='1', 'fields.username.end'='9'," 88 | + "'fields.level.kind'='sequence', 'fields.level.start'='1', 'fields.level.end'='9'" 89 | + ")"; 90 | 91 | String sink = 92 | "create table sink_table(username varchar, level varchar,age varchar) with ( 'connector'='redis', " 93 | + "'host'='" 94 | + REDIS_HOST 95 | + "','port'='" 96 | + REDIS_PORT 97 | + "', 'redis-mode'='single','password'='" 98 | + REDIS_PASSWORD 99 | + "','" 100 | + REDIS_COMMAND 101 | + "'='" 102 | + RedisCommand.HSET 103 | + "' )"; 104 | tEnv.executeSql(source); 105 | tEnv.executeSql(dim); 106 | tEnv.executeSql(sink); 107 | 108 | String sql = 109 | " insert into sink_table " 110 | + " select 'test_hash2', s.level, d.age from source_table s" 111 | + " left join dim_table for system_time as of s.proctime as d " 112 | + " on d.name = 'test_hash' and d.level = s.level"; 113 | TableResult tableResult = tEnv.executeSql(sql); 114 | tableResult.getJobClient().get().getJobExecutionResult().get(); 115 | System.out.println(sql); 116 | 117 | Preconditions.condition(singleRedisCommands.hget("test_hash2", "1").equals("test"), ""); 118 | Preconditions.condition(singleRedisCommands.hget("test_hash2", "2") == "", ""); 119 | Preconditions.condition(singleRedisCommands.hget("test_hash2", "5").equals("test"), ""); 120 | } 121 | } 122 | -------------------------------------------------------------------------------- /src/main/java/org/apache/flink/streaming/connectors/redis/config/FlinkSentinelConfig.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.flink.streaming.connectors.redis.config; 20 | 21 | import org.slf4j.Logger; 22 | import org.slf4j.LoggerFactory; 23 | 24 | import java.util.Objects; 25 | 26 | /** Configuration for Sentinel pool. */ 27 | public class FlinkSentinelConfig extends FlinkConfigBase { 28 | 29 | private static final long serialVersionUID = 1L; 30 | 31 | private static final Logger LOG = LoggerFactory.getLogger(FlinkSentinelConfig.class); 32 | 33 | private final String sentinelsInfo; 34 | private final int soTimeout; 35 | private final int database; 36 | private final String masterName; 37 | 38 | private final String sentinelsPassword; 39 | 40 | /** 41 | * Sentinels config. The master name and sentinels are mandatory, and when you didn't set these, 42 | * it throws NullPointerException. 43 | * 44 | * @param masterName master name of the replica set 45 | * @param sentinelsInfo set of sentinel hosts 46 | * @param connectionTimeout timeout connection timeout 47 | * @param soTimeout timeout socket timeout 48 | * @param database database database index 49 | * @throws NullPointerException if {@code masterName} or {@code sentinels} is {@code null} 50 | * @throws IllegalArgumentException if {@code sentinels} are empty 51 | */ 52 | private FlinkSentinelConfig( 53 | String masterName, 54 | String sentinelsInfo, 55 | int connectionTimeout, 56 | int soTimeout, 57 | int database, 58 | String password, 59 | String sentinelsPassword, 60 | LettuceConfig lettuceConfig) { 61 | super(connectionTimeout, password, lettuceConfig); 62 | Objects.requireNonNull(masterName, "Master name should be presented"); 63 | Objects.requireNonNull(sentinelsInfo, "Sentinels information should be presented"); 64 | this.masterName = masterName; 65 | this.sentinelsInfo = sentinelsInfo; 66 | this.soTimeout = soTimeout; 67 | this.database = database; 68 | this.sentinelsPassword = sentinelsPassword; 69 | } 70 | 71 | /** 72 | * Returns master name of the replica set. 73 | * 74 | * @return master name of the replica set. 75 | */ 76 | public String getMasterName() { 77 | return masterName; 78 | } 79 | 80 | public String getSentinelsInfo() { 81 | return sentinelsInfo; 82 | } 83 | 84 | /** 85 | * Returns socket timeout. 86 | * 87 | * @return socket timeout 88 | */ 89 | public int getSoTimeout() { 90 | return soTimeout; 91 | } 92 | 93 | /** 94 | * Returns database index. 95 | * 96 | * @return database index 97 | */ 98 | public int getDatabase() { 99 | return database; 100 | } 101 | 102 | public String getSentinelsPassword() { 103 | return sentinelsPassword; 104 | } 105 | 106 | /** Builder for initializing {@link FlinkSentinelConfig}. */ 107 | public static class Builder { 108 | 109 | private String masterName; 110 | private String sentinelsInfo; 111 | private int connectionTimeout; 112 | private int soTimeout; 113 | private int database; 114 | private String password; 115 | private String sentinelsPassword; 116 | 117 | private LettuceConfig lettuceConfig; 118 | 119 | /** 120 | * Sets master name of the replica set. 121 | * 122 | * @param masterName master name of the replica set 123 | * @return Builder itself 124 | */ 125 | public Builder setMasterName(String masterName) { 126 | this.masterName = masterName; 127 | return this; 128 | } 129 | 130 | public Builder setSentinelsInfo(String sentinelsInfo) { 131 | this.sentinelsInfo = sentinelsInfo; 132 | return this; 133 | } 134 | 135 | /** 136 | * Sets connection timeout. 137 | * 138 | * @param connectionTimeout connection timeout, default value is 2000 139 | * @return Builder itself 140 | */ 141 | public Builder setConnectionTimeout(int connectionTimeout) { 142 | this.connectionTimeout = connectionTimeout; 143 | return this; 144 | } 145 | 146 | /** 147 | * Sets socket timeout. 148 | * 149 | * @param soTimeout socket timeout, default value is 2000 150 | * @return Builder itself 151 | */ 152 | public Builder setSoTimeout(int soTimeout) { 153 | this.soTimeout = soTimeout; 154 | return this; 155 | } 156 | 157 | /** 158 | * Sets database index. 159 | * 160 | * @param database database index, default value is 0 161 | * @return Builder itself 162 | */ 163 | public Builder setDatabase(int database) { 164 | this.database = database; 165 | return this; 166 | } 167 | 168 | public Builder setPassword(String password) { 169 | this.password = password; 170 | return this; 171 | } 172 | 173 | public Builder setSentinelsPassword(String sentinelsPassword) { 174 | this.sentinelsPassword = sentinelsPassword; 175 | return this; 176 | } 177 | 178 | public Builder setLettuceConfig(LettuceConfig lettuceConfig) { 179 | this.lettuceConfig = lettuceConfig; 180 | return this; 181 | } 182 | 183 | /** 184 | * Builds SentinelConfig. 185 | * 186 | * @return SentinelConfig 187 | */ 188 | public FlinkSentinelConfig build() { 189 | return new FlinkSentinelConfig( 190 | masterName, 191 | sentinelsInfo, 192 | connectionTimeout, 193 | soTimeout, 194 | database, 195 | password, 196 | sentinelsPassword, 197 | lettuceConfig); 198 | } 199 | } 200 | } 201 | -------------------------------------------------------------------------------- /src/main/java/org/apache/flink/streaming/connectors/redis/hanlder/RedisHandlerServices.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.flink.streaming.connectors.redis.hanlder; 20 | 21 | import org.apache.flink.table.api.TableException; 22 | import org.apache.flink.util.Preconditions; 23 | import org.slf4j.Logger; 24 | import org.slf4j.LoggerFactory; 25 | 26 | import java.util.HashMap; 27 | import java.util.LinkedList; 28 | import java.util.List; 29 | import java.util.Map; 30 | import java.util.Optional; 31 | import java.util.ServiceConfigurationError; 32 | import java.util.ServiceLoader; 33 | import java.util.stream.Collectors; 34 | 35 | /** 36 | * Unified class to search for a {@link RedisHandler} of provided type and properties. for find 37 | * correct redis handler. 38 | * 39 | * @param redis handler type. 40 | */ 41 | public class RedisHandlerServices { 42 | 43 | private static final ServiceLoader defaultLoader = 44 | ServiceLoader.load(RedisHandler.class); 45 | private static final Logger LOG = LoggerFactory.getLogger(RedisHandlerServices.class); 46 | 47 | /** 48 | * use specified class and properties to find redis handler. 49 | * 50 | * @param redisHanlderClass specified redis handler class. 51 | * @param meta properties to search redis handler 52 | * @param 53 | * @return 54 | */ 55 | public static T findRedisHandler( 56 | Class redisHanlderClass, Map meta) { 57 | Preconditions.checkNotNull(meta); 58 | return findSingRedisHandler(redisHanlderClass, meta, Optional.empty()); 59 | } 60 | 61 | /** 62 | * use specified class and properties and class loader to find redis handler. 63 | * 64 | * @param redisHanlderClass specified redis handler class. 65 | * @param meta properties to search redis handler 66 | * @param classLoader class loader to load redis handler class 67 | * @param redis handler 68 | * @return matched redis handler 69 | */ 70 | private static T findSingRedisHandler( 71 | Class redisHanlderClass, 72 | Map meta, 73 | Optional classLoader) { 74 | 75 | List redisHandlers = discoverRedisHanlder(classLoader); 76 | List filtered = filter(redisHandlers, redisHanlderClass, meta); 77 | 78 | return filtered.get(0); 79 | } 80 | 81 | /** Filters found redis by factory class and with matching context. */ 82 | private static List filter( 83 | List redis, Class redisClass, Map meta) { 84 | 85 | Preconditions.checkNotNull(redisClass); 86 | Preconditions.checkNotNull(meta); 87 | 88 | List redisFactories = filterByFactoryClass(redisClass, redis); 89 | 90 | List contextFactories = filterByContext(meta, redisFactories); 91 | return contextFactories; 92 | } 93 | 94 | /** 95 | * Searches for redis using Java service providers. 96 | * 97 | * @return all redis in the classpath 98 | */ 99 | private static List discoverRedisHanlder(Optional classLoader) { 100 | try { 101 | List result = new LinkedList<>(); 102 | if (classLoader.isPresent()) { 103 | ServiceLoader.load(RedisHandler.class, classLoader.get()) 104 | .iterator() 105 | .forEachRemaining(result::add); 106 | } else { 107 | defaultLoader.iterator().forEachRemaining(result::add); 108 | } 109 | return result; 110 | } catch (ServiceConfigurationError e) { 111 | LOG.error("Could not load service provider for redis handler.", e); 112 | throw new TableException("Could not load service provider for redis handler.", e); 113 | } 114 | } 115 | 116 | /** Filters factories with matching context by factory class. */ 117 | @SuppressWarnings("unchecked") 118 | private static List filterByFactoryClass(Class redisClass, List redis) { 119 | 120 | List redisList = 121 | redis.stream() 122 | .filter(p -> redisClass.isAssignableFrom(p.getClass())) 123 | .collect(Collectors.toList()); 124 | 125 | if (redisList.isEmpty()) { 126 | throw new RuntimeException( 127 | String.format( 128 | "No redis hanlder implements '%s'.", redisClass.getCanonicalName())); 129 | } 130 | 131 | return (List) redisList; 132 | } 133 | 134 | /** 135 | * Filters for factories with matching context. 136 | * 137 | * @return all matching factories 138 | */ 139 | private static List filterByContext( 140 | Map meta, List redisList) { 141 | 142 | List matchingredis = 143 | redisList.stream() 144 | .filter( 145 | factory -> { 146 | Map requestedContext = 147 | normalizeContext(factory); 148 | 149 | Map plainContext = 150 | new HashMap<>(requestedContext); 151 | 152 | // check if required context is met 153 | return plainContext.keySet().stream() 154 | .allMatch( 155 | e -> meta.containsKey(e) 156 | && meta.get(e) 157 | .equals( 158 | plainContext 159 | .get( 160 | e))); 161 | }) 162 | .collect(Collectors.toList()); 163 | 164 | if (matchingredis.isEmpty()) { 165 | throw new RuntimeException("no match redis"); 166 | } 167 | 168 | return matchingredis; 169 | } 170 | 171 | /** Prepares the properties of a context to be used for match operations. */ 172 | private static Map normalizeContext(RedisHandler redis) { 173 | Map requiredContext = redis.requiredContext(); 174 | if (requiredContext == null) { 175 | throw new RuntimeException( 176 | String.format( 177 | "Required context of redis '%s' must not be null.", 178 | redis.getClass().getName())); 179 | } 180 | return requiredContext.keySet().stream() 181 | .collect(Collectors.toMap(String::toLowerCase, requiredContext::get)); 182 | } 183 | } 184 | -------------------------------------------------------------------------------- /src/main/java/org/apache/flink/streaming/connectors/redis/command/RedisCommand.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.flink.streaming.connectors.redis.command; 20 | 21 | /** All available commands for Redis. */ 22 | public enum RedisCommand { 23 | 24 | /** 25 | * Set key to hold the string value. If key already holds a value, it is overwritten, regardless 26 | * of its type. 27 | */ 28 | SET( 29 | RedisInsertCommand.SET, 30 | RedisSelectCommand.GET, 31 | RedisJoinCommand.GET, 32 | RedisDeleteCommand.DEL, 33 | true), 34 | 35 | /** 36 | * Sets field in the hash stored at key to value. If key does not exist, a new key holding a 37 | * hash is created. If field already exists in the hash, it is overwritten. 38 | */ 39 | HSET( 40 | RedisInsertCommand.HSET, 41 | RedisSelectCommand.HGET, 42 | RedisJoinCommand.HGET, 43 | RedisDeleteCommand.HDEL, 44 | true), 45 | 46 | HMSET( 47 | RedisInsertCommand.HMSET, 48 | RedisSelectCommand.HGET, 49 | RedisJoinCommand.HGET, 50 | RedisDeleteCommand.HDEL, 51 | true), 52 | 53 | /** get val from map. */ 54 | HGET( 55 | RedisInsertCommand.HSET, 56 | RedisSelectCommand.HGET, 57 | RedisJoinCommand.HGET, 58 | RedisDeleteCommand.HDEL, 59 | true), 60 | 61 | /** get val from string. */ 62 | GET( 63 | RedisInsertCommand.SET, 64 | RedisSelectCommand.GET, 65 | RedisJoinCommand.GET, 66 | RedisDeleteCommand.DEL, 67 | true), 68 | 69 | /** 70 | * Insert the specified value at the tail of the list stored at key. If key does not exist, it 71 | * is created as empty list before performing the push operation. 72 | */ 73 | RPUSH( 74 | RedisInsertCommand.RPUSH, 75 | RedisSelectCommand.LRANGE, 76 | RedisJoinCommand.NONE, 77 | RedisDeleteCommand.NONE, 78 | true), 79 | /** 80 | * Insert the specified value at the head of the list stored at key. If key does not exist, it 81 | * is created as empty list before performing the push operations. 82 | */ 83 | LPUSH( 84 | RedisInsertCommand.LPUSH, 85 | RedisSelectCommand.LRANGE, 86 | RedisJoinCommand.NONE, 87 | RedisDeleteCommand.NONE, 88 | true), 89 | 90 | /** Delta plus for specified key. */ 91 | INCRBY( 92 | RedisInsertCommand.INCRBY, 93 | RedisSelectCommand.GET, 94 | RedisJoinCommand.GET, 95 | RedisDeleteCommand.INCRBY, 96 | true), 97 | 98 | /** Delta plus for specified key. */ 99 | INCRBYFLOAT( 100 | RedisInsertCommand.INCRBYFLOAT, 101 | RedisSelectCommand.GET, 102 | RedisJoinCommand.GET, 103 | RedisDeleteCommand.INCRBYFLOAT, 104 | true), 105 | 106 | /** Delta plus for specified key. */ 107 | HINCRBY( 108 | RedisInsertCommand.HINCRBY, 109 | RedisSelectCommand.HGET, 110 | RedisJoinCommand.HGET, 111 | RedisDeleteCommand.HINCRBY, 112 | true), 113 | 114 | /** Delta plus for specified key. */ 115 | HINCRBYFLOAT( 116 | RedisInsertCommand.HINCRBYFLOAT, 117 | RedisSelectCommand.HGET, 118 | RedisJoinCommand.HGET, 119 | RedisDeleteCommand.HINCRBYFLOAT, 120 | true), 121 | 122 | /** */ 123 | ZINCRBY( 124 | RedisInsertCommand.ZINCRBY, 125 | RedisSelectCommand.ZSCORE, 126 | RedisJoinCommand.ZSCORE, 127 | RedisDeleteCommand.ZINCRBY, 128 | true), 129 | 130 | /** 131 | * Add the specified member to the set stored at key. Specified member that is already a member 132 | * of this set is ignored. 133 | */ 134 | SADD( 135 | RedisInsertCommand.SADD, 136 | RedisSelectCommand.SRANDMEMBER, 137 | RedisJoinCommand.NONE, 138 | RedisDeleteCommand.SREM, 139 | true), 140 | 141 | /** Adds the specified members with the specified score to the sorted set stored at key. */ 142 | ZADD( 143 | RedisInsertCommand.ZADD, 144 | RedisSelectCommand.ZSCORE, 145 | RedisJoinCommand.ZSCORE, 146 | RedisDeleteCommand.ZREM, 147 | true), 148 | 149 | /** 150 | * Adds the element to the HyperLogLog data structure stored at the variable name specified as 151 | * first argument. 152 | */ 153 | PFADD( 154 | RedisInsertCommand.PFADD, 155 | RedisSelectCommand.NONE, 156 | RedisJoinCommand.NONE, 157 | RedisDeleteCommand.NONE, 158 | true), 159 | 160 | /** Posts a message to the given channel. */ 161 | PUBLISH( 162 | RedisInsertCommand.PUBLISH, 163 | RedisSelectCommand.SUBSCRIBE, 164 | RedisJoinCommand.NONE, 165 | RedisDeleteCommand.NONE, 166 | false), 167 | 168 | /** Posts a message to the given channel. */ 169 | SUBSCRIBE( 170 | RedisInsertCommand.PUBLISH, 171 | RedisSelectCommand.SUBSCRIBE, 172 | RedisJoinCommand.NONE, 173 | RedisDeleteCommand.NONE, 174 | false), 175 | 176 | /** Removes the specified members from set at key. */ 177 | SREM( 178 | RedisInsertCommand.SREM, 179 | RedisSelectCommand.SRANDMEMBER, 180 | RedisJoinCommand.NONE, 181 | RedisDeleteCommand.NONE, 182 | true), 183 | 184 | /** Removes the specified members from the sorted set stored at key. */ 185 | ZREM( 186 | RedisInsertCommand.ZREM, 187 | RedisSelectCommand.ZSCORE, 188 | RedisJoinCommand.ZSCORE, 189 | RedisDeleteCommand.NONE, 190 | true), 191 | 192 | /** del key. */ 193 | DEL( 194 | RedisInsertCommand.DEL, 195 | RedisSelectCommand.GET, 196 | RedisJoinCommand.GET, 197 | RedisDeleteCommand.NONE, 198 | true), 199 | 200 | /** del val in map. */ 201 | HDEL( 202 | RedisInsertCommand.HDEL, 203 | RedisSelectCommand.HGET, 204 | RedisJoinCommand.HGET, 205 | RedisDeleteCommand.NONE, 206 | true), 207 | /** decrease with fixed num for specified key. */ 208 | DECRBY( 209 | RedisInsertCommand.DECRBY, 210 | RedisSelectCommand.GET, 211 | RedisJoinCommand.GET, 212 | RedisDeleteCommand.NONE, 213 | true); 214 | 215 | private final RedisSelectCommand selectCommand; 216 | 217 | private final RedisInsertCommand insertCommand; 218 | 219 | private final RedisDeleteCommand deleteCommand; 220 | 221 | private final RedisJoinCommand joinCommand; 222 | 223 | private final boolean commandBoundedness; 224 | 225 | RedisCommand( 226 | RedisInsertCommand insertCommand, 227 | RedisSelectCommand selectCommand, 228 | RedisJoinCommand joinCommand, 229 | RedisDeleteCommand deleteCommand, 230 | boolean commandBoundedness) { 231 | this.selectCommand = selectCommand; 232 | this.insertCommand = insertCommand; 233 | this.deleteCommand = deleteCommand; 234 | this.joinCommand = joinCommand; 235 | this.commandBoundedness = commandBoundedness; 236 | } 237 | 238 | public RedisSelectCommand getSelectCommand() { 239 | return selectCommand; 240 | } 241 | 242 | public RedisInsertCommand getInsertCommand() { 243 | return insertCommand; 244 | } 245 | 246 | public RedisDeleteCommand getDeleteCommand() { 247 | return deleteCommand; 248 | } 249 | 250 | public RedisJoinCommand getJoinCommand() { 251 | return joinCommand; 252 | } 253 | 254 | public boolean isCommandBoundedness() { 255 | return commandBoundedness; 256 | } 257 | } 258 | -------------------------------------------------------------------------------- /src/main/java/org/apache/flink/streaming/connectors/redis/converter/RedisRowConverter.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.flink.streaming.connectors.redis.converter; 20 | 21 | import org.apache.flink.table.data.DecimalData; 22 | import org.apache.flink.table.data.RowData; 23 | import org.apache.flink.table.data.TimestampData; 24 | import org.apache.flink.table.data.binary.BinaryStringData; 25 | import org.apache.flink.table.types.logical.DecimalType; 26 | import org.apache.flink.table.types.logical.LogicalType; 27 | 28 | import java.io.Serializable; 29 | import java.math.BigDecimal; 30 | import java.util.Base64; 31 | 32 | import static org.apache.flink.table.types.logical.utils.LogicalTypeChecks.getPrecision; 33 | 34 | /** redis serialize . @Author: jeff.zou @Date: 2022/3/10.13:17 */ 35 | public class RedisRowConverter { 36 | 37 | private static final int TIMESTAMP_PRECISION_MIN = 0; 38 | private static final int TIMESTAMP_PRECISION_MAX = 3; 39 | 40 | public static Object dataTypeFromString(LogicalType fieldType, String result) { 41 | return createDeserializer(fieldType).deserialize(result); 42 | } 43 | 44 | public static String rowDataToString(LogicalType fieldType, RowData rowData, Integer index) { 45 | if (rowData.isNullAt(index)) { 46 | return null; 47 | } 48 | return createSerializer(fieldType).serialize(rowData, index); 49 | } 50 | 51 | public static RedisDeserializationConverter createDeserializer(LogicalType fieldType) { 52 | int precision; 53 | switch (fieldType.getTypeRoot()) { 54 | case BIGINT: 55 | case INTERVAL_DAY_TIME: 56 | return Long::valueOf; 57 | case FLOAT: 58 | return Float::valueOf; 59 | case DOUBLE: 60 | return Double::valueOf; 61 | case CHAR: 62 | case VARCHAR: 63 | return BinaryStringData::fromString; 64 | case BOOLEAN: 65 | return Boolean::valueOf; 66 | case BINARY: 67 | case VARBINARY: 68 | return result -> Base64.getDecoder().decode(result); 69 | case DECIMAL: 70 | DecimalType decimalType = (DecimalType) fieldType; 71 | precision = decimalType.getPrecision(); 72 | final int scale = decimalType.getScale(); 73 | return result -> { 74 | BigDecimal decimal = new BigDecimal(result); 75 | return DecimalData.fromBigDecimal(decimal, precision, scale); 76 | }; 77 | case TINYINT: 78 | return Byte::valueOf; 79 | case SMALLINT: 80 | return Short::valueOf; 81 | case INTEGER: 82 | case DATE: 83 | case INTERVAL_YEAR_MONTH: 84 | return Integer::valueOf; 85 | case TIME_WITHOUT_TIME_ZONE: 86 | precision = getPrecision(fieldType); 87 | if (precision < TIMESTAMP_PRECISION_MIN || precision > TIMESTAMP_PRECISION_MAX) { 88 | throw new UnsupportedOperationException( 89 | String.format( 90 | "The precision %s of Time type is out of range [%s, %s]", 91 | precision, TIMESTAMP_PRECISION_MIN, TIMESTAMP_PRECISION_MAX)); 92 | } 93 | return Integer::valueOf; 94 | case TIMESTAMP_WITHOUT_TIME_ZONE: 95 | case TIMESTAMP_WITH_LOCAL_TIME_ZONE: 96 | precision = getPrecision(fieldType); 97 | if (precision < TIMESTAMP_PRECISION_MIN || precision > TIMESTAMP_PRECISION_MAX) { 98 | throw new UnsupportedOperationException( 99 | String.format( 100 | "The precision %s of Timestamp is out of " + "range [%s, %s]", 101 | precision, TIMESTAMP_PRECISION_MIN, TIMESTAMP_PRECISION_MAX)); 102 | } 103 | return result -> { 104 | long milliseconds = Long.valueOf(result); 105 | return TimestampData.fromEpochMillis(milliseconds); 106 | }; 107 | default: 108 | throw new UnsupportedOperationException("Unsupported field type: " + fieldType); 109 | } 110 | } 111 | 112 | private static RedisSerializationConverter createSerializer(LogicalType fieldType) { 113 | int precision; 114 | switch (fieldType.getTypeRoot()) { 115 | case CHAR: 116 | case VARCHAR: 117 | return (rowData, index) -> rowData.getString(index).toString(); 118 | case BOOLEAN: 119 | return (rowData, index) -> String.valueOf(rowData.getBoolean(index)); 120 | case BINARY: 121 | case VARBINARY: 122 | return (rowData, index) -> Base64.getEncoder().encodeToString(rowData.getBinary(index)); 123 | case DECIMAL: 124 | DecimalType decimalType = (DecimalType) fieldType; 125 | precision = decimalType.getPrecision(); 126 | final int scale = decimalType.getScale(); 127 | return (rowData, index) -> { 128 | BigDecimal decimal = rowData.getDecimal(index, precision, scale).toBigDecimal(); 129 | return decimal.toString(); 130 | }; 131 | case TINYINT: 132 | return (rowData, index) -> String.valueOf(rowData.getByte(index)); 133 | case SMALLINT: 134 | return (rowData, index) -> String.valueOf(rowData.getShort(index)); 135 | case INTEGER: 136 | case DATE: 137 | case INTERVAL_YEAR_MONTH: 138 | return (rowData, index) -> String.valueOf(rowData.getInt(index)); 139 | case TIME_WITHOUT_TIME_ZONE: 140 | precision = getPrecision(fieldType); 141 | if (precision < TIMESTAMP_PRECISION_MIN || precision > TIMESTAMP_PRECISION_MAX) { 142 | throw new UnsupportedOperationException( 143 | String.format( 144 | "The precision %s of Time type is out of range [%s, %s]", 145 | precision, TIMESTAMP_PRECISION_MIN, TIMESTAMP_PRECISION_MAX)); 146 | } 147 | return (rowData, index) -> String.valueOf(rowData.getInt(index)); 148 | case BIGINT: 149 | case INTERVAL_DAY_TIME: 150 | return (rowData, index) -> String.valueOf(rowData.getLong(index)); 151 | case FLOAT: 152 | return (rowData, index) -> String.valueOf(rowData.getFloat(index)); 153 | case DOUBLE: 154 | return (rowData, index) -> String.valueOf(rowData.getDouble(index)); 155 | case TIMESTAMP_WITHOUT_TIME_ZONE: 156 | case TIMESTAMP_WITH_LOCAL_TIME_ZONE: 157 | precision = getPrecision(fieldType); 158 | if (precision < TIMESTAMP_PRECISION_MIN || precision > TIMESTAMP_PRECISION_MAX) { 159 | throw new UnsupportedOperationException( 160 | String.format( 161 | "The precision %s of Timestamp is out of range [%s, %s]", 162 | precision, TIMESTAMP_PRECISION_MIN, TIMESTAMP_PRECISION_MAX)); 163 | } 164 | return (rowData, index) -> String.valueOf(rowData.getTimestamp(index, precision).getMillisecond()); 165 | default: 166 | throw new UnsupportedOperationException("Unsupported field type: " + fieldType); 167 | } 168 | } 169 | 170 | @FunctionalInterface 171 | interface RedisDeserializationConverter extends Serializable { 172 | 173 | Object deserialize(String field); 174 | } 175 | 176 | @FunctionalInterface 177 | interface RedisSerializationConverter extends Serializable { 178 | 179 | String serialize(RowData rowData, Integer index); 180 | } 181 | } 182 | -------------------------------------------------------------------------------- /src/main/java/org/apache/flink/streaming/connectors/redis/container/RedisClientBuilder.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.flink.streaming.connectors.redis.container; 20 | 21 | import io.lettuce.core.AbstractRedisClient; 22 | import io.lettuce.core.RedisClient; 23 | import io.lettuce.core.RedisURI; 24 | import io.lettuce.core.cluster.ClusterClientOptions; 25 | import io.lettuce.core.cluster.ClusterTopologyRefreshOptions; 26 | import io.lettuce.core.cluster.RedisClusterClient; 27 | import io.lettuce.core.resource.ClientResources; 28 | import io.lettuce.core.resource.DefaultClientResources; 29 | 30 | import org.apache.flink.streaming.connectors.redis.config.FlinkClusterConfig; 31 | import org.apache.flink.streaming.connectors.redis.config.FlinkConfigBase; 32 | import org.apache.flink.streaming.connectors.redis.config.FlinkSentinelConfig; 33 | import org.apache.flink.streaming.connectors.redis.config.FlinkSingleConfig; 34 | import org.apache.flink.util.StringUtils; 35 | 36 | import java.time.Duration; 37 | import java.util.Arrays; 38 | import java.util.List; 39 | import java.util.Objects; 40 | import java.util.stream.Collectors; 41 | 42 | /** 43 | * @author Jeff Zou 44 | * @date 2024/3/20 17:08 45 | */ 46 | public class RedisClientBuilder { 47 | 48 | /** 49 | * Initialize the {@link RedisCommandsContainer} based on the instance type. 50 | * 51 | * @param flinkConfigBase configuration base 52 | * @return @throws IllegalArgumentException if Config, ClusterConfig and SentinelConfig are all 53 | * null 54 | */ 55 | public static AbstractRedisClient build(FlinkConfigBase flinkConfigBase) { 56 | DefaultClientResources.Builder builder = DefaultClientResources.builder(); 57 | if (flinkConfigBase.getLettuceConfig() != null) { 58 | if (flinkConfigBase.getLettuceConfig().getNettyIoPoolSize() != null) { 59 | builder.ioThreadPoolSize(flinkConfigBase.getLettuceConfig().getNettyIoPoolSize()); 60 | } 61 | if (flinkConfigBase.getLettuceConfig().getNettyEventPoolSize() != null) { 62 | builder.computationThreadPoolSize( 63 | flinkConfigBase.getLettuceConfig().getNettyEventPoolSize()); 64 | } 65 | } 66 | 67 | ClientResources clientResources = builder.build(); 68 | 69 | if (flinkConfigBase instanceof FlinkSingleConfig) { 70 | return build((FlinkSingleConfig) flinkConfigBase, clientResources); 71 | } else if (flinkConfigBase instanceof FlinkClusterConfig) { 72 | return build((FlinkClusterConfig) flinkConfigBase, clientResources); 73 | } else if (flinkConfigBase instanceof FlinkSentinelConfig) { 74 | return build((FlinkSentinelConfig) flinkConfigBase, clientResources); 75 | } else { 76 | throw new IllegalArgumentException("configuration not found!"); 77 | } 78 | } 79 | 80 | /** 81 | * Builds container for single Redis environment. 82 | * 83 | * @param singleConfig configuration for redis 84 | * @return container for single Redis environment 85 | * @throws NullPointerException if singleConfig is null 86 | */ 87 | private static RedisClient build( 88 | FlinkSingleConfig singleConfig, ClientResources clientResources) { 89 | Objects.requireNonNull(singleConfig, "Redis config should not be Null"); 90 | 91 | RedisURI.Builder builder = 92 | RedisURI.builder() 93 | .withHost(singleConfig.getHost()) 94 | .withPort(singleConfig.getPort()) 95 | .withDatabase(singleConfig.getDatabase()); 96 | if (!StringUtils.isNullOrWhitespaceOnly(singleConfig.getPassword())) { 97 | builder.withPassword(singleConfig.getPassword().toCharArray()); 98 | } 99 | 100 | return RedisClient.create(clientResources, builder.build()); 101 | } 102 | 103 | /** 104 | * Builds container for Redis Cluster environment. 105 | * 106 | * @param clusterConfig configuration for Cluster 107 | * @return container for Redis Cluster environment 108 | * @throws NullPointerException if ClusterConfig is null 109 | */ 110 | private static RedisClusterClient build( 111 | FlinkClusterConfig clusterConfig, ClientResources clientResources) { 112 | Objects.requireNonNull(clusterConfig, "Redis cluster config should not be Null"); 113 | 114 | List redisURIS = 115 | Arrays.stream(clusterConfig.getNodesInfo().split(",")) 116 | .map( 117 | node -> { 118 | String[] redis = node.split(":"); 119 | RedisURI.Builder builder = 120 | RedisURI.builder() 121 | .withHost(redis[0]) 122 | .withPort(Integer.parseInt(redis[1])); 123 | if (!StringUtils.isNullOrWhitespaceOnly( 124 | clusterConfig.getPassword())) { 125 | builder.withPassword( 126 | clusterConfig.getPassword().toCharArray()); 127 | } 128 | return builder.build(); 129 | }) 130 | .collect(Collectors.toList()); 131 | 132 | RedisClusterClient clusterClient = RedisClusterClient.create(clientResources, redisURIS); 133 | 134 | ClusterTopologyRefreshOptions topologyRefreshOptions = 135 | ClusterTopologyRefreshOptions.builder() 136 | .enableAdaptiveRefreshTrigger( 137 | ClusterTopologyRefreshOptions.RefreshTrigger.MOVED_REDIRECT, 138 | ClusterTopologyRefreshOptions.RefreshTrigger.PERSISTENT_RECONNECTS) 139 | .adaptiveRefreshTriggersTimeout(Duration.ofSeconds(10L)) 140 | .build(); 141 | 142 | clusterClient.setOptions( 143 | ClusterClientOptions.builder() 144 | .topologyRefreshOptions(topologyRefreshOptions) 145 | .build()); 146 | 147 | return clusterClient; 148 | } 149 | 150 | /** 151 | * @param sentinelConfig 152 | * @param clientResources 153 | * @return 154 | */ 155 | private static RedisClient build( 156 | FlinkSentinelConfig sentinelConfig, ClientResources clientResources) { 157 | Objects.requireNonNull(sentinelConfig, "Redis sentinel config should not be Null"); 158 | 159 | RedisURI.Builder builder = 160 | RedisURI.builder() 161 | .withSentinelMasterId(sentinelConfig.getMasterName()) 162 | .withDatabase(sentinelConfig.getDatabase()); 163 | 164 | Arrays.stream(sentinelConfig.getSentinelsInfo().split(",")) 165 | .forEach( 166 | node -> { 167 | String[] redis = node.split(":"); 168 | if (StringUtils.isNullOrWhitespaceOnly(sentinelConfig.getPassword())) { 169 | builder.withSentinel( 170 | redis[0], 171 | Integer.parseInt(redis[1]), 172 | sentinelConfig.getSentinelsPassword()); 173 | } else { 174 | builder.withSentinel( 175 | redis[0], 176 | Integer.parseInt(redis[1]), 177 | sentinelConfig.getSentinelsPassword()) 178 | .withPassword(sentinelConfig.getPassword().toCharArray()); 179 | } 180 | }); 181 | 182 | return RedisClient.create(clientResources, builder.build()); 183 | } 184 | } 185 | -------------------------------------------------------------------------------- /src/test/java/org/apache/flink/streaming/connectors/redis/table/SQLQueryTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.flink.streaming.connectors.redis.table; 20 | 21 | import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; 22 | import org.apache.flink.streaming.connectors.redis.command.RedisCommand; 23 | import org.apache.flink.streaming.connectors.redis.table.base.TestRedisConfigBase; 24 | import org.apache.flink.table.api.TableResult; 25 | import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; 26 | import org.apache.flink.util.Preconditions; 27 | import org.junit.jupiter.api.Test; 28 | 29 | import static org.apache.flink.streaming.connectors.redis.config.RedisValidator.REDIS_COMMAND; 30 | 31 | public class SQLQueryTest extends TestRedisConfigBase { 32 | 33 | @Test 34 | public void testQuery() throws Exception { 35 | StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); 36 | StreamTableEnvironment tEnv = StreamTableEnvironment.create(env); 37 | singleRedisCommands.del("test"); 38 | singleRedisCommands.set("test", "1"); 39 | String redis_table = 40 | "create table redis_table(username VARCHAR, age int) with ( 'connector'='redis', " 41 | + "'host'='" 42 | + REDIS_HOST 43 | + "','port'='" 44 | + REDIS_PORT 45 | + "', 'redis-mode'='single', 'password'='" 46 | + REDIS_PASSWORD 47 | + "','" 48 | + REDIS_COMMAND 49 | + "'='" 50 | + RedisCommand.GET 51 | + "')"; 52 | tEnv.executeSql(redis_table); 53 | TableResult tableResult = 54 | tEnv.executeSql( 55 | "insert into redis_table select username,age + 1 from redis_table /*+ options('scan.key'='test') */"); 56 | 57 | tableResult.getJobClient().get().getJobExecutionResult().get(); 58 | Preconditions.checkArgument(singleRedisCommands.get("test").equals("2")); 59 | } 60 | 61 | @Test 62 | public void testMapQuery() throws Exception { 63 | StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); 64 | StreamTableEnvironment tEnv = StreamTableEnvironment.create(env); 65 | singleRedisCommands.del("test_hash"); 66 | singleRedisCommands.hset("test_hash", "1", "1"); 67 | String source = 68 | "create table source_redis(username VARCHAR, passport int, age int) with ( 'connector'='redis', " 69 | + "'host'='" 70 | + REDIS_HOST 71 | + "','port'='" 72 | + REDIS_PORT 73 | + "', 'redis-mode'='single','scan.key'='test_hash', 'scan.addition.key'='1', 'password'='" 74 | + REDIS_PASSWORD 75 | + "','" 76 | + REDIS_COMMAND 77 | + "'='" 78 | + RedisCommand.HGET 79 | + "')"; 80 | tEnv.executeSql(source); 81 | String sink = 82 | "create table sink_table(username varchar, passport int, age int) with ( 'connector'='redis', " 83 | + "'host'='" 84 | + REDIS_HOST 85 | + "','port'='" 86 | + REDIS_PORT 87 | + "', 'redis-mode'='single','password'='" 88 | + REDIS_PASSWORD 89 | + "','" 90 | + REDIS_COMMAND 91 | + "'='" 92 | + RedisCommand.HSET 93 | + "')"; 94 | tEnv.executeSql(sink); 95 | TableResult tableResult = 96 | tEnv.executeSql( 97 | "insert into sink_table select username,passport, age + 1 from source_redis "); 98 | 99 | tableResult.getJobClient().get().getJobExecutionResult().get(); 100 | Preconditions.checkArgument(singleRedisCommands.hget("test_hash", "1").equals("2")); 101 | } 102 | 103 | @Test 104 | public void testSortedSetQuery() throws Exception { 105 | StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); 106 | StreamTableEnvironment tEnv = StreamTableEnvironment.create(env); 107 | singleRedisCommands.del("test_sorted_set"); 108 | singleRedisCommands.zadd("test_sorted_set", 1d, "test"); 109 | String source = 110 | "create table source_redis(username VARCHAR, age double, passport VARCHAR) with ( 'connector'='redis', " 111 | + "'host'='" 112 | + REDIS_HOST 113 | + "','port'='" 114 | + REDIS_PORT 115 | + "', 'redis-mode'='single','scan.key'='test_sorted_set', 'scan.addition.key'='test', 'password'='" 116 | + REDIS_PASSWORD 117 | + "','" 118 | + REDIS_COMMAND 119 | + "'='" 120 | + RedisCommand.ZADD 121 | + "')"; 122 | tEnv.executeSql(source); 123 | String sink = 124 | "create table sink_table(username varchar, age double, passport VARCHAR) with ( 'connector'='redis', " 125 | + "'host'='" 126 | + REDIS_HOST 127 | + "','port'='" 128 | + REDIS_PORT 129 | + "', 'redis-mode'='single','password'='" 130 | + REDIS_PASSWORD 131 | + "','" 132 | + REDIS_COMMAND 133 | + "'='" 134 | + RedisCommand.ZADD 135 | + "')"; 136 | tEnv.executeSql(sink); 137 | TableResult tableResult = 138 | tEnv.executeSql( 139 | "insert into sink_table select username, age + 1 ,passport from source_redis "); 140 | 141 | tableResult.getJobClient().get().getJobExecutionResult().get(); 142 | Preconditions.checkArgument(singleRedisCommands.zscore("test_sorted_set", "test") == 2); 143 | } 144 | 145 | @Test 146 | public void testLrangeQuery() throws Exception { 147 | StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); 148 | StreamTableEnvironment tEnv = StreamTableEnvironment.create(env); 149 | singleRedisCommands.del("test_list"); 150 | singleRedisCommands.lpush("test_list", "2", "test2"); 151 | String source = 152 | "create table source_redis(username VARCHAR, passport VARCHAR) with ( 'connector'='redis', " 153 | + "'host'='" 154 | + REDIS_HOST 155 | + "','port'='" 156 | + REDIS_PORT 157 | + "', 'redis-mode'='single','scan.key'='test_list', 'scan.range.start'='0', 'scan.range.stop'='1', 'password'='" 158 | + REDIS_PASSWORD 159 | + "','" 160 | + REDIS_COMMAND 161 | + "'='" 162 | + RedisCommand.LPUSH 163 | + "')"; 164 | tEnv.executeSql(source); 165 | String sink = 166 | "create table sink_table(username varchar, passport VARCHAR) with ( 'connector'='print')"; 167 | tEnv.executeSql(sink); 168 | TableResult tableResult = 169 | tEnv.executeSql("insert into sink_table select * from source_redis "); 170 | 171 | tableResult.getJobClient().get().getJobExecutionResult().get(); 172 | } 173 | 174 | @Test 175 | public void testSrandmemberQuery() throws Exception { 176 | StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); 177 | StreamTableEnvironment tEnv = StreamTableEnvironment.create(env); 178 | singleRedisCommands.del("test_set"); 179 | singleRedisCommands.sadd("test_set", "2", "test2"); 180 | String source = 181 | "create table source_redis(username VARCHAR, passport VARCHAR) with ( 'connector'='redis', " 182 | + "'host'='" 183 | + REDIS_HOST 184 | + "','port'='" 185 | + REDIS_PORT 186 | + "', 'redis-mode'='single','scan.key'='test_set', 'scan.count'='2', 'password'='" 187 | + REDIS_PASSWORD 188 | + "','" 189 | + REDIS_COMMAND 190 | + "'='" 191 | + RedisCommand.SADD 192 | + "')"; 193 | tEnv.executeSql(source); 194 | String sink = 195 | "create table sink_table(username varchar, passport VARCHAR) with ( 'connector'='print')"; 196 | tEnv.executeSql(sink); 197 | TableResult tableResult = 198 | tEnv.executeSql("insert into sink_table select * from source_redis "); 199 | 200 | tableResult.getJobClient().get().getJobExecutionResult().get(); 201 | } 202 | } 203 | -------------------------------------------------------------------------------- /src/main/java/org/apache/flink/streaming/connectors/redis/container/RedisCommandsContainer.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.flink.streaming.connectors.redis.container; 20 | 21 | import io.lettuce.core.Range; 22 | import io.lettuce.core.RedisFuture; 23 | import io.lettuce.core.cluster.api.async.RedisClusterAsyncCommands; 24 | 25 | import java.io.IOException; 26 | import java.io.Serializable; 27 | import java.util.List; 28 | import java.util.Map; 29 | 30 | /** The container for all available Redis commands. */ 31 | public interface RedisCommandsContainer extends Serializable { 32 | 33 | /** 34 | * Open the container. 35 | * 36 | * @throws Exception if the instance can not be opened properly 37 | */ 38 | void open() throws Exception; 39 | 40 | /** 41 | * Sets field in the hash stored at key to value, with TTL, if needed. Setting expire time to 42 | * key is optional. If key does not exist, a new key holding a hash is created. If field already 43 | * exists in the hash, it is overwritten. 44 | * 45 | * @param key Hash name 46 | * @param hashField Hash field 47 | * @param value Hash value 48 | */ 49 | RedisFuture hset(String key, String hashField, String value); 50 | 51 | /** 52 | * Sets fields in the hash stored at key to value, with TTL, if needed. Setting expire time to 53 | * key is optional. If key does not exist, a new key holding a hash is created. If key already 54 | * exists, it is overwritten. 55 | * @param key 56 | * @param hashField 57 | * @return 58 | */ 59 | RedisFuture hmset(String key, Map hashField); 60 | 61 | /** 62 | * @param key 63 | * @param hashField 64 | * @param value 65 | * @return 66 | */ 67 | RedisFuture hincrBy(String key, String hashField, long value); 68 | 69 | /** 70 | * @param key 71 | * @param hashField 72 | * @param value 73 | * @return 74 | */ 75 | RedisFuture hincrByFloat(String key, String hashField, double value); 76 | 77 | /** 78 | * Insert the specified value at the tail of the list stored at key. If key does not exist, it 79 | * is created as empty list before performing the push operation. 80 | * 81 | * @param listName Name of the List 82 | * @param value Value to be added 83 | */ 84 | RedisFuture rpush(String listName, String value); 85 | 86 | /** 87 | * Insert the specified value at the head of the list stored at key. If key does not exist, it 88 | * is created as empty list before performing the push operation. 89 | * 90 | * @param listName Name of the List 91 | * @param value Value to be added 92 | */ 93 | RedisFuture lpush(String listName, String value); 94 | 95 | /** 96 | * Add the specified member to the set stored at key. Specified members that are already a 97 | * member of this set are ignored. If key does not exist, a new set is created before adding the 98 | * specified members. 99 | * 100 | * @param setName Name of the Set 101 | * @param value Value to be added 102 | */ 103 | RedisFuture sadd(String setName, String value); 104 | 105 | /** 106 | * Posts a message to the given channel. 107 | * 108 | * @param channelName Name of the channel to which data will be published 109 | * @param message the message 110 | */ 111 | RedisFuture publish(String channelName, String message); 112 | 113 | /** 114 | * Set key to hold the string value. If key already holds a value, it is overwritten, regardless 115 | * of its type. Any previous time to live associated with the key is discarded on successful SET 116 | * operation. 117 | * 118 | * @param key the key name in which value to be set 119 | * @param value the value 120 | */ 121 | RedisFuture set(String key, String value); 122 | 123 | /** 124 | * Adds all the element arguments to the HyperLogLog data structure stored at the variable name 125 | * specified as first argument. 126 | * 127 | * @param key The name of the key 128 | * @param element the element 129 | */ 130 | RedisFuture pfadd(String key, String element); 131 | 132 | /** 133 | * Adds the specified member with the specified scores to the sorted set stored at key. 134 | * 135 | * @param key The name of the Sorted Set 136 | * @param score Score of the element 137 | * @param element element to be added 138 | */ 139 | RedisFuture zadd(String key, double score, String element); 140 | 141 | /** 142 | * increase the specified member with the specified scores to the sorted set stored at key. 143 | * 144 | * @param key 145 | * @param score 146 | * @param element 147 | */ 148 | RedisFuture zincrBy(String key, double score, String element); 149 | 150 | /** 151 | * Removes the specified member from the sorted set stored at key. 152 | * 153 | * @param key The name of the Sorted Set 154 | * @param element element to be removed 155 | */ 156 | RedisFuture zrem(String key, String element); 157 | 158 | /** 159 | * Remove members from a specified score range 160 | * @param key 161 | * @param range 162 | * @return 163 | */ 164 | RedisFuture zremRangeByScore(String key, Range range); 165 | 166 | /** 167 | * Remove members from a specified lex range 168 | * @param key 169 | * @param range 170 | * @return 171 | */ 172 | RedisFuture zremRangeByLex(String key, Range range); 173 | 174 | /** 175 | * Remove members from a specified rank 176 | * @param key 177 | * @param start 178 | * @param stop 179 | * @return 180 | */ 181 | RedisFuture zremRangeByRank(String key, long start, long stop); 182 | 183 | /** 184 | * increase value to specified key. 185 | * 186 | * @param key 187 | * @param value 188 | */ 189 | RedisFuture incrBy(String key, long value); 190 | 191 | /** 192 | * increase value to specified key. 193 | * 194 | * @param key 195 | * @param value 196 | * @return 197 | */ 198 | RedisFuture incrByFloat(String key, double value); 199 | 200 | /** 201 | * decrease value from specified key. 202 | * 203 | * @param key the key name in which value to be set 204 | * @param value value the value 205 | */ 206 | RedisFuture decrBy(String key, Long value); 207 | 208 | /** 209 | * get value by key and field . 210 | * 211 | * @param key 212 | * @param field 213 | * @return 214 | */ 215 | RedisFuture hget(String key, String field); 216 | 217 | /** 218 | * get all value by key. 219 | * 220 | * @param key 221 | * @return 222 | */ 223 | RedisFuture> hgetAll(String key); 224 | 225 | /** 226 | * get value by key. 227 | * 228 | * @param key 229 | * @return 230 | */ 231 | RedisFuture get(String key); 232 | 233 | /** 234 | * Close the container. 235 | * 236 | * @throws IOException 237 | */ 238 | void close() throws IOException; 239 | 240 | /** 241 | * expire key with seconds. 242 | * 243 | * @param key 244 | * @param seconds 245 | * @return 246 | */ 247 | RedisFuture expire(String key, int seconds); 248 | 249 | /** 250 | * get ttl of key. 251 | * 252 | * @param key 253 | * @return 254 | */ 255 | RedisFuture getTTL(String key); 256 | 257 | /** 258 | * delete key in map. 259 | * 260 | * @param key 261 | * @param field 262 | */ 263 | RedisFuture hdel(String key, String field); 264 | 265 | /** 266 | * delete key. 267 | * 268 | * @param key 269 | */ 270 | RedisFuture del(String key); 271 | 272 | /** 273 | * delete key value from set. 274 | * 275 | * @param setName 276 | * @param value 277 | */ 278 | RedisFuture srem(String setName, String value); 279 | 280 | /** 281 | * @param key 282 | * @param start 283 | * @param end 284 | * @return 285 | */ 286 | RedisFuture lRange(String key, long start, long end); 287 | 288 | /** 289 | * @param key 290 | * @return 291 | */ 292 | RedisFuture exists(String key); 293 | 294 | /** 295 | * @param key 296 | * @param field 297 | * @return 298 | */ 299 | RedisFuture hexists(String key, String field); 300 | 301 | /** 302 | * @param key 303 | * @return 304 | */ 305 | RedisFuture pfcount(String key); 306 | 307 | /** 308 | * @param key 309 | * @param member 310 | * @return 311 | */ 312 | RedisFuture zscore(String key, String member); 313 | 314 | /** 315 | * @param key 316 | * @param start 317 | * @param stop 318 | * @return 319 | */ 320 | RedisFuture zrange(String key, long start, long stop); 321 | 322 | /** 323 | * @param key 324 | * @param count 325 | * @return 326 | */ 327 | public RedisFuture srandmember(String key, long count); 328 | 329 | /** 330 | * get redis async commands. 331 | * 332 | * @return 333 | */ 334 | RedisClusterAsyncCommands getAsyncCommands(); 335 | } 336 | -------------------------------------------------------------------------------- /src/main/java/org/apache/flink/streaming/connectors/redis/config/RedisOptions.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.flink.streaming.connectors.redis.config; 20 | 21 | import org.apache.flink.configuration.ConfigOption; 22 | import org.apache.flink.configuration.ConfigOptions; 23 | 24 | /** Created by jeff.zou on 2020/9/10. */ 25 | public class RedisOptions { 26 | 27 | public static final ConfigOption TIMEOUT = 28 | ConfigOptions.key("timeout") 29 | .intType() 30 | .defaultValue(2000) 31 | .withDescription("Optional timeout for connect to redis"); 32 | public static final ConfigOption MAXTOTAL = 33 | ConfigOptions.key("maxTotal") 34 | .intType() 35 | .defaultValue(2) 36 | .withDescription("Optional maxTotal for connect to redis"); 37 | public static final ConfigOption MAXIDLE = 38 | ConfigOptions.key("maxIdle") 39 | .intType() 40 | .defaultValue(2) 41 | .withDescription("Optional maxIdle for connect to redis"); 42 | public static final ConfigOption MINIDLE = 43 | ConfigOptions.key("minIdle") 44 | .intType() 45 | .defaultValue(1) 46 | .withDescription("Optional minIdle for connect to redis"); 47 | public static final ConfigOption PASSWORD = 48 | ConfigOptions.key("password") 49 | .stringType() 50 | .noDefaultValue() 51 | .withDescription("Optional password for connect to redis"); 52 | public static final ConfigOption PORT = 53 | ConfigOptions.key("port") 54 | .intType() 55 | .defaultValue(6379) 56 | .withDescription("Optional port for connect to redis"); 57 | public static final ConfigOption HOST = 58 | ConfigOptions.key("host") 59 | .stringType() 60 | .noDefaultValue() 61 | .withDescription("Optional host for connect to redis"); 62 | public static final ConfigOption CLUSTERNODES = 63 | ConfigOptions.key("cluster-nodes") 64 | .stringType() 65 | .noDefaultValue() 66 | .withDescription("Optional nodes for connect to redis cluster"); 67 | public static final ConfigOption DATABASE = 68 | ConfigOptions.key("database") 69 | .intType() 70 | .defaultValue(0) 71 | .withDescription("Optional database for connect to redis"); 72 | public static final ConfigOption COMMAND = 73 | ConfigOptions.key("command") 74 | .stringType() 75 | .noDefaultValue() 76 | .withDescription("Optional command for connect to redis"); 77 | public static final ConfigOption REDISMODE = 78 | ConfigOptions.key("redis-mode") 79 | .stringType() 80 | .noDefaultValue() 81 | .withDescription("Optional redis-mode for connect to redis"); 82 | public static final ConfigOption REDIS_MASTER_NAME = 83 | ConfigOptions.key("master.name") 84 | .stringType() 85 | .noDefaultValue() 86 | .withDescription("Optional master.name for connect to redis sentinels"); 87 | public static final ConfigOption SENTINELS_INFO = 88 | ConfigOptions.key("sentinels.info") 89 | .stringType() 90 | .noDefaultValue() 91 | .withDescription("Optional sentinels.info for connect to redis sentinels"); 92 | public static final ConfigOption SENTINELS_PASSWORD = 93 | ConfigOptions.key("sentinels.password") 94 | .stringType() 95 | .noDefaultValue() 96 | .withDescription("Optional sentinels.password for connect to redis sentinels"); 97 | public static final ConfigOption TTL = 98 | ConfigOptions.key("ttl") 99 | .intType() 100 | .noDefaultValue() 101 | .withDescription("Optional ttl for insert to redis"); 102 | public static final ConfigOption LOOKUP_CACHE_MAX_ROWS = 103 | ConfigOptions.key("lookup.cache.max-rows") 104 | .longType() 105 | .defaultValue(-1L) 106 | .withDescription("Optional max rows of cache for query redis"); 107 | public static final ConfigOption LOOKUP_CHCHE_TTL = 108 | ConfigOptions.key("lookup.cache.ttl") 109 | .longType() 110 | .defaultValue(-1L) 111 | .withDescription("Optional ttl of cache for query redis"); 112 | public static final ConfigOption LOOKUP_CACHE_LOAD_ALL = 113 | ConfigOptions.key("lookup.cache.load-all") 114 | .booleanType() 115 | .defaultValue(false) 116 | .withDescription("Optional if load all elements into cache for query"); 117 | public static final ConfigOption MAX_RETRIES = 118 | ConfigOptions.key("max.retries") 119 | .intType() 120 | .defaultValue(1) 121 | .withDescription("Optional max retries of cache sink"); 122 | public static final ConfigOption SINK_PARALLELISM = 123 | ConfigOptions.key("sink.parallelism") 124 | .intType() 125 | .defaultValue(1) 126 | .withDescription("Optional parrallelism for sink"); 127 | public static final ConfigOption SINK_LIMIT = 128 | ConfigOptions.key("sink.limit") 129 | .booleanType() 130 | .defaultValue(false) 131 | .withDescription("Optional if open the limit for sink "); 132 | public static final ConfigOption SINK_LIMIT_MAX_NUM = 133 | ConfigOptions.key("sink.limit.max-num") 134 | .intType() 135 | .defaultValue(10000) 136 | .withDescription("Optional the max num of writes for limited sink"); 137 | public static final ConfigOption SINK_LIMIT_INTERVAL = 138 | ConfigOptions.key("sink.limit.interval") 139 | .longType() 140 | .defaultValue(100L) 141 | .withDescription( 142 | "Optional the millisecond interval between each write for limited sink"); 143 | public static final ConfigOption SINK_LIMIT_MAX_ONLINE = 144 | ConfigOptions.key("sink.limit.max-online") 145 | .longType() 146 | .defaultValue(30 * 60 * 1000L) 147 | .withDescription("Optional the max online milliseconds for limited sink"); 148 | public static final ConfigOption VALUE_DATA_STRUCTURE = 149 | ConfigOptions.key("value.data.structure") 150 | .enumType(RedisValueDataStructure.class) 151 | .defaultValue(RedisValueDataStructure.column) 152 | .withDescription("Optional redis value data structure."); 153 | public static final ConfigOption EXPIRE_ON_TIME = 154 | ConfigOptions.key("ttl.on.time") 155 | .stringType() 156 | .noDefaultValue() 157 | .withDescription("Optional redis key expire on time, eg: 10:00 12:12:01"); 158 | public static final ConfigOption SET_IF_ABSENT = 159 | ConfigOptions.key("set.if.absent") 160 | .booleanType() 161 | .defaultValue(false) 162 | .withDescription("Optional setIfAbsent for insert(set/hset) to redis"); 163 | public static final ConfigOption TTL_KEY_NOT_ABSENT = 164 | ConfigOptions.key("ttl.key.not.absent") 165 | .booleanType() 166 | .defaultValue(false) 167 | .withDescription("Optional set ttl when key not absent"); 168 | public static final ConfigOption NETTY_IO_POOL_SIZE = 169 | ConfigOptions.key("io.pool.size") 170 | .intType() 171 | .defaultValue(null) 172 | .withDescription("Optional set io pool size for netty of lettuce"); 173 | public static final ConfigOption NETTY_EVENT_POOL_SIZE = 174 | ConfigOptions.key("event.pool.size") 175 | .intType() 176 | .defaultValue(null) 177 | .withDescription("Optional set event pool size for netty of lettuce"); 178 | public static final ConfigOption SCAN_KEY = 179 | ConfigOptions.key("scan.key") 180 | .stringType() 181 | .defaultValue(null) 182 | .withDescription("Optional set key for query"); 183 | public static final ConfigOption SCAN_ADDITION_KEY = 184 | ConfigOptions.key("scan.addition.key") 185 | .stringType() 186 | .defaultValue(null) 187 | .withDescription("Optional set addition key for query"); 188 | public static final ConfigOption SCAN_RANGE_START = 189 | ConfigOptions.key("scan.range.start") 190 | .intType() 191 | .defaultValue(null) 192 | .withDescription("Optional set range start for lrange query"); 193 | public static final ConfigOption SCAN_RANGE_STOP = 194 | ConfigOptions.key("scan.range.stop") 195 | .intType() 196 | .defaultValue(null) 197 | .withDescription("Optional set range stop for lrange query"); 198 | public static final ConfigOption SCAN_COUNT = 199 | ConfigOptions.key("scan.count") 200 | .intType() 201 | .defaultValue(null) 202 | .withDescription("Optional set count for srandmember query"); 203 | public static final ConfigOption ZREM_RANGEBY = 204 | ConfigOptions.key("zset.zremrangeby") 205 | .stringType() 206 | .defaultValue(null) 207 | .withDescription("Remove related elements,Valid values: LEX,RANK,SCORE"); 208 | public static final ConfigOption AUDIT_LOG = 209 | ConfigOptions.key("audit.log") 210 | .booleanType() 211 | .defaultValue(false) 212 | .withDescription("Optional turn on the audit log switch."); 213 | 214 | private RedisOptions() {} 215 | } 216 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /src/main/java/org/apache/flink/streaming/connectors/redis/table/RedisSourceFunction.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.flink.streaming.connectors.redis.table; 20 | 21 | import org.apache.flink.configuration.Configuration; 22 | import org.apache.flink.configuration.ReadableConfig; 23 | import org.apache.flink.streaming.api.functions.source.RichSourceFunction; 24 | import org.apache.flink.streaming.connectors.redis.command.RedisCommand; 25 | import org.apache.flink.streaming.connectors.redis.command.RedisCommandBaseDescription; 26 | import org.apache.flink.streaming.connectors.redis.command.RedisSelectCommand; 27 | import org.apache.flink.streaming.connectors.redis.config.FlinkConfigBase; 28 | import org.apache.flink.streaming.connectors.redis.config.RedisOptions; 29 | import org.apache.flink.streaming.connectors.redis.config.RedisValueDataStructure; 30 | import org.apache.flink.streaming.connectors.redis.container.RedisCommandsContainer; 31 | import org.apache.flink.streaming.connectors.redis.container.RedisCommandsContainerBuilder; 32 | import org.apache.flink.streaming.connectors.redis.mapper.RedisMapper; 33 | import org.apache.flink.table.catalog.ResolvedSchema; 34 | import org.apache.flink.table.data.GenericRowData; 35 | import org.apache.flink.table.types.DataType; 36 | import org.apache.flink.table.types.logical.DoubleType; 37 | import org.apache.flink.util.Preconditions; 38 | import org.slf4j.Logger; 39 | import org.slf4j.LoggerFactory; 40 | 41 | import java.util.List; 42 | 43 | public class RedisSourceFunction extends RichSourceFunction { 44 | 45 | private static final long serialVersionUID = 1L; 46 | 47 | private static final Logger LOG = LoggerFactory.getLogger(RedisSourceFunction.class); 48 | 49 | ReadableConfig readableConfig; 50 | 51 | private FlinkConfigBase flinkConfigBase; 52 | private transient RedisCommandsContainer redisCommandsContainer; 53 | 54 | private final int maxRetryTimes; 55 | 56 | private RedisCommand redisCommand; 57 | 58 | private final RedisValueDataStructure redisValueDataStructure; 59 | 60 | private final List dataTypes; 61 | 62 | private String[] queryParameter; 63 | 64 | public RedisSourceFunction( 65 | RedisMapper redisMapper, 66 | ReadableConfig readableConfig, 67 | FlinkConfigBase flinkConfigBase, 68 | ResolvedSchema resolvedSchema) { 69 | this.readableConfig = readableConfig; 70 | this.flinkConfigBase = flinkConfigBase; 71 | this.maxRetryTimes = readableConfig.get(RedisOptions.MAX_RETRIES); 72 | this.redisValueDataStructure = readableConfig.get(RedisOptions.VALUE_DATA_STRUCTURE); 73 | 74 | RedisCommandBaseDescription redisCommandDescription = redisMapper.getCommandDescription(); 75 | Preconditions.checkNotNull( 76 | redisCommandDescription, "Redis Mapper data type description can not be null"); 77 | this.redisCommand = redisCommandDescription.getRedisCommand(); 78 | this.dataTypes = resolvedSchema.getColumnDataTypes(); 79 | } 80 | 81 | @Override 82 | public void open(Configuration parameters) throws Exception { 83 | validator(); 84 | this.queryParameter = new String[2]; 85 | this.queryParameter[0] = this.readableConfig.get(RedisOptions.SCAN_KEY); 86 | 87 | if (redisCommand.getSelectCommand() == RedisSelectCommand.HGET) { 88 | this.queryParameter[1] = this.readableConfig.get(RedisOptions.SCAN_ADDITION_KEY); 89 | } else if (redisCommand.getSelectCommand() == RedisSelectCommand.ZSCORE) { 90 | this.queryParameter[1] = this.readableConfig.get(RedisOptions.SCAN_ADDITION_KEY); 91 | } 92 | 93 | try { 94 | this.redisCommandsContainer = RedisCommandsContainerBuilder.build(this.flinkConfigBase); 95 | this.redisCommandsContainer.open(); 96 | LOG.info("success to create redis container."); 97 | } catch (Exception e) { 98 | LOG.error("Redis has not been properly initialized: ", e); 99 | throw e; 100 | } 101 | super.open(parameters); 102 | } 103 | 104 | @Override 105 | public void run(SourceContext ctx) throws Exception { 106 | // It will try many times which less than {@code maxRetryTimes} until execute success. 107 | for (int i = 0; i <= maxRetryTimes; i++) { 108 | try { 109 | query(ctx); 110 | break; 111 | } catch (Exception e) { 112 | LOG.error("query redis error, retry times:{}", i, e); 113 | if (i >= maxRetryTimes) { 114 | throw new RuntimeException("query redis error ", e); 115 | } 116 | Thread.sleep(500 * i); 117 | } 118 | } 119 | } 120 | 121 | private void query(SourceContext ctx) throws Exception { 122 | switch (redisCommand.getSelectCommand()) { 123 | case GET: { 124 | String result = this.redisCommandsContainer.get(queryParameter[0]).get(); 125 | GenericRowData rowData = 126 | RedisResultWrapper.createRowDataForString( 127 | queryParameter, result, redisValueDataStructure, dataTypes); 128 | ctx.collect(rowData); 129 | break; 130 | } 131 | case HGET: { 132 | String result = 133 | this.redisCommandsContainer 134 | .hget(queryParameter[0], queryParameter[1]) 135 | .get(); 136 | GenericRowData rowData = 137 | RedisResultWrapper.createRowDataForHash( 138 | queryParameter, result, redisValueDataStructure, dataTypes); 139 | ctx.collect(rowData); 140 | break; 141 | } 142 | case ZSCORE: { 143 | Double result = 144 | this.redisCommandsContainer 145 | .zscore(queryParameter[0], queryParameter[1]) 146 | .get(); 147 | GenericRowData rowData = 148 | RedisResultWrapper.createRowDataForSortedSet( 149 | queryParameter, result, dataTypes); 150 | ctx.collect(rowData); 151 | break; 152 | } 153 | case LRANGE: { 154 | List list = 155 | this.redisCommandsContainer 156 | .lRange( 157 | queryParameter[0], 158 | this.readableConfig.get(RedisOptions.SCAN_RANGE_START), 159 | this.readableConfig.get(RedisOptions.SCAN_RANGE_STOP)) 160 | .get(); 161 | list.forEach( 162 | result -> { 163 | GenericRowData rowData = 164 | RedisResultWrapper.createRowDataForString( 165 | queryParameter, 166 | String.valueOf(result), 167 | redisValueDataStructure, 168 | dataTypes); 169 | ctx.collect(rowData); 170 | }); 171 | 172 | break; 173 | } 174 | case SRANDMEMBER: { 175 | List list = 176 | this.redisCommandsContainer 177 | .srandmember( 178 | String.valueOf(queryParameter[0]), 179 | readableConfig.get(RedisOptions.SCAN_COUNT)) 180 | .get(); 181 | 182 | list.forEach( 183 | result -> { 184 | GenericRowData rowData = 185 | RedisResultWrapper.createRowDataForString( 186 | queryParameter, 187 | String.valueOf(result), 188 | redisValueDataStructure, 189 | dataTypes); 190 | ctx.collect(rowData); 191 | }); 192 | break; 193 | } 194 | case SUBSCRIBE: { 195 | } 196 | default: 197 | } 198 | } 199 | 200 | @Override 201 | public void close() throws Exception { 202 | super.close(); 203 | if (redisCommandsContainer != null) { 204 | redisCommandsContainer.close(); 205 | } 206 | } 207 | 208 | @Override 209 | public void cancel() { 210 | } 211 | 212 | private void validator() { 213 | Preconditions.checkNotNull( 214 | this.readableConfig.get(RedisOptions.SCAN_KEY), 215 | "the %s for source can not be null", 216 | RedisOptions.SCAN_KEY.key()); 217 | 218 | Preconditions.checkArgument( 219 | redisCommand.getSelectCommand() != RedisSelectCommand.NONE, 220 | String.format("the command %s do not support query.", redisCommand.name())); 221 | 222 | if (redisCommand.getSelectCommand() == RedisSelectCommand.HGET) { 223 | Preconditions.checkNotNull( 224 | this.readableConfig.get(RedisOptions.SCAN_ADDITION_KEY), 225 | "must set field value of Map to %s", 226 | RedisOptions.SCAN_ADDITION_KEY.key()); 227 | } else if (redisCommand.getSelectCommand() == RedisSelectCommand.ZSCORE) { 228 | Preconditions.checkNotNull( 229 | this.readableConfig.get(RedisOptions.SCAN_ADDITION_KEY), 230 | "must set member value of SortedSet to %s", 231 | RedisOptions.SCAN_ADDITION_KEY.key()); 232 | Preconditions.checkArgument( 233 | dataTypes.get(1).getLogicalType() instanceof DoubleType, 234 | "the second column's type of source table must be double. the type of score is double when the data structure in redis is SortedSet."); 235 | } else if (redisCommand.getSelectCommand() == RedisSelectCommand.LRANGE) { 236 | Preconditions.checkNotNull( 237 | this.readableConfig.get(RedisOptions.SCAN_RANGE_START), 238 | "the %s must not be null when query list", 239 | RedisOptions.SCAN_RANGE_START.key()); 240 | 241 | Preconditions.checkNotNull( 242 | this.readableConfig.get(RedisOptions.SCAN_RANGE_STOP), 243 | "the %s must not be null when query list", 244 | RedisOptions.SCAN_RANGE_STOP.key()); 245 | } else if (redisCommand.getSelectCommand() == RedisSelectCommand.SRANDMEMBER) { 246 | Preconditions.checkNotNull( 247 | this.readableConfig.get(RedisOptions.SCAN_COUNT), 248 | "the %s must not be null when query set", 249 | RedisOptions.SCAN_COUNT.key()); 250 | } 251 | } 252 | } 253 | --------------------------------------------------------------------------------