├── .gitignore ├── README.md ├── pom.xml ├── rediswriter.iml └── src ├── main ├── assembly │ └── package.xml ├── java │ └── com │ │ └── alibaba │ │ └── datax │ │ └── plugin │ │ └── writer │ │ └── rediswriter │ │ ├── Constant.java │ │ ├── JedisClusterPipeline.java │ │ ├── Key.java │ │ ├── RedisWriteAbstract.java │ │ ├── RedisWriter.java │ │ ├── RedisWriterHelper.java │ │ └── writer │ │ ├── DeleteWriter.java │ │ ├── HashTypeWriter.java │ │ ├── ListTypeWriter.java │ │ └── StringTypeWriter.java └── resources │ ├── plugin.json │ └── plugin_job_template.json └── test ├── demo ├── data │ ├── create_hive_table.sql │ └── hive_data.txt ├── hive_delete_to_redis_hash_filed.json ├── hive_delete_to_redis_string.json ├── hive_to_redis_hash.json ├── hive_to_redis_list.json └── hive_to_redis_string.json └── java └── JedisTest.java /.gitignore: -------------------------------------------------------------------------------- 1 | /target/ 2 | !.mvn/wrapper/maven-wrapper.jar 3 | 4 | ### STS ### 5 | .apt_generated 6 | .classpath 7 | .factorypath 8 | .project 9 | .settings 10 | .springBeans 11 | .sts4-cache 12 | 13 | ### IntelliJ IDEA ### 14 | .idea 15 | *.iws 16 | *.iml 17 | *.ipr 18 | derby.log 19 | 20 | ### NetBeans ### 21 | /nbproject/private/ 22 | /build/ 23 | /nbbuild/ 24 | /dist/ 25 | /nbdist/ 26 | /.nb-gradle/ 27 | 28 | /logs/ 29 | /spark-warehouse/ 30 | /metastore_db/ 31 | /out/ 32 | /venv/ -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # DataX-redis-writer 2 | 详情参见博客:[https://blog.csdn.net/u013289115/article/details/106277937](https://blog.csdn.net/u013289115/article/details/106277937) 3 | -------------------------------------------------------------------------------- /pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | datax-all 7 | com.alibaba.datax 8 | 0.0.1-SNAPSHOT 9 | 10 | 4.0.0 11 | rediswriter 12 | rediswriter 13 | jar 14 | 15 | 16 | 17 | com.alibaba.datax 18 | datax-common 19 | ${datax-project-version} 20 | 21 | 22 | 23 | redis.clients 24 | jedis 25 | 2.9.0 26 | 27 | 28 | 29 | 30 | 31 | 32 | maven-compiler-plugin 33 | 34 | ${jdk-version} 35 | ${jdk-version} 36 | ${project-sourceEncoding} 37 | 38 | 39 | 40 | maven-assembly-plugin 41 | 42 | 43 | src/main/assembly/package.xml 44 | 45 | datax 46 | 47 | 48 | 49 | dwzip 50 | package 51 | 52 | single 53 | 54 | 55 | 56 | 57 | 58 | 59 | -------------------------------------------------------------------------------- /rediswriter.iml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | -------------------------------------------------------------------------------- /src/main/assembly/package.xml: -------------------------------------------------------------------------------- 1 | 5 | 6 | 7 | dir 8 | 9 | false 10 | 11 | 12 | src/main/resources 13 | 14 | plugin.json 15 | plugin_job_template.json 16 | 17 | plugin/writer/rediswriter 18 | 19 | 20 | target/ 21 | 22 | rediswriter-0.0.1-SNAPSHOT.jar 23 | 24 | plugin/writer/rediswriter 25 | 26 | 27 | 28 | 29 | 30 | false 31 | plugin/writer/rediswriter/libs 32 | runtime 33 | 34 | 35 | 36 | -------------------------------------------------------------------------------- /src/main/java/com/alibaba/datax/plugin/writer/rediswriter/Constant.java: -------------------------------------------------------------------------------- 1 | package com.alibaba.datax.plugin.writer.rediswriter; 2 | 3 | public final class Constant { 4 | 5 | public static final String STANDALONE = "singleton"; 6 | public static final String CLUSTER = "cluster"; 7 | 8 | // 支持的redis的三种数据类型 9 | public static final String WRITE_TYPE_STRING = "string"; 10 | public static final String WRITE_TYPE_LIST= "list"; 11 | public static final String WRITE_TYPE_HASH = "hash"; 12 | 13 | // 两种redis的操作类型,delete和insert 14 | public static final String WRITE_MODE_DELETE = "delete"; 15 | public static final String WRITE_MODE_INSERT = "insert"; 16 | 17 | // 导入redis list的模式,有lpush,rpush,overwrite,默认overwrite 18 | public static final String LIST_PUSH_TYPE_OVERWRITE = "overwrite"; 19 | public static final String LIST_PUSH_TYPE_LPUSH = "lpush"; 20 | public static final String LIST_PUSH_TYPE_RPUSH = "rpush"; 21 | 22 | } 23 | -------------------------------------------------------------------------------- /src/main/java/com/alibaba/datax/plugin/writer/rediswriter/JedisClusterPipeline.java: -------------------------------------------------------------------------------- 1 | 2 | package com.alibaba.datax.plugin.writer.rediswriter; 3 | 4 | import redis.clients.jedis.*; 5 | import redis.clients.jedis.exceptions.JedisRedirectionException; 6 | import redis.clients.util.JedisClusterCRC16; 7 | import redis.clients.util.SafeEncoder; 8 | 9 | import java.io.Closeable; 10 | import java.lang.reflect.Field; 11 | import java.util.*; 12 | 13 | /** 14 | * JedisClusterPipeline 15 | * @Author: lijf@2345.com 16 | * @Date: 2019/1/22 16:29 17 | * @Version: 1.0 18 | */ 19 | public class JedisClusterPipeline extends PipelineBase implements Closeable { 20 | // 部分字段没有对应的获取方法,只能采用反射来做 21 | // 你也可以去继承JedisCluster和JedisSlotBasedConnectionHandler来提供访问接口 22 | private static final Field FIELD_CONNECTION_HANDLER; 23 | private static final Field FIELD_CACHE; 24 | static { 25 | FIELD_CONNECTION_HANDLER = getField(BinaryJedisCluster.class, "connectionHandler"); 26 | FIELD_CACHE = getField(JedisClusterConnectionHandler.class, "cache"); 27 | } 28 | 29 | private JedisSlotBasedConnectionHandler connectionHandler; 30 | private JedisClusterInfoCache clusterInfoCache; 31 | /**根据顺序存储每个命令对应的Client*/ 32 | private Queue clients = new LinkedList<>(); 33 | /**用于缓存连接*/ 34 | private Map jedisMap = new HashMap<>(); 35 | /**是否有数据在缓存区*/ 36 | private boolean hasDataInBuf = false; 37 | 38 | /** 39 | * 根据jedisCluster实例生成对应的JedisClusterPipeline 40 | * @param jedisCluster jedisCluster 41 | * @return JedisClusterPipeline 42 | */ 43 | public static JedisClusterPipeline pipelined(JedisCluster jedisCluster) { 44 | JedisClusterPipeline pipeline = new JedisClusterPipeline(); 45 | pipeline.setJedisCluster(jedisCluster); 46 | return pipeline; 47 | } 48 | 49 | 50 | 51 | public void setJedisCluster(JedisCluster jedis) { 52 | connectionHandler = getValue(jedis, FIELD_CONNECTION_HANDLER); 53 | clusterInfoCache = getValue(connectionHandler, FIELD_CACHE); 54 | } 55 | 56 | /** 57 | * 刷新集群信息,当集群信息发生变更时调用 58 | * @param 59 | * @return 60 | */ 61 | public void refreshCluster() { 62 | connectionHandler.renewSlotCache(); 63 | } 64 | 65 | /** 66 | * 同步读取所有数据. 与syncAndReturnAll()相比,sync()只是没有对数据做反序列化 67 | */ 68 | public void sync() { 69 | innerSync(null); 70 | } 71 | 72 | /** 73 | * 同步读取所有数据 并按命令顺序返回一个列表 74 | * 75 | * @return 按照命令的顺序返回所有的数据 76 | */ 77 | public List syncAndReturnAll() { 78 | List responseList = new ArrayList<>(); 79 | innerSync(responseList); 80 | return responseList; 81 | } 82 | 83 | private void innerSync(List formatted) { 84 | try { 85 | for (Client client : clients) { 86 | // 在sync()调用时其实是不需要解析结果数据的,但是如果不调用get方法,发生了JedisMovedDataException这样的错误应用是不知道的,因此需要调用get()来触发错误。 87 | // 其实如果Response的data属性可以直接获取,可以省掉解析数据的时间,然而它并没有提供对应方法,要获取data属性就得用反射,不想再反射了,所以就这样了 88 | Object data = generateResponse(client.getOne()).get(); 89 | if (null != formatted) { 90 | formatted.add(data); 91 | } 92 | } 93 | } catch (JedisRedirectionException jre) { 94 | refreshCluster(); 95 | throw jre; 96 | } finally { 97 | // 所有还没有执行过的client要保证执行(flush),防止放回连接池后后面的命令被污染 98 | for (Jedis jedis : jedisMap.values()) { 99 | flushCachedData(jedis); 100 | } 101 | hasDataInBuf = false; 102 | close(); 103 | } 104 | } 105 | 106 | @Override 107 | public void close() { 108 | clean(); 109 | 110 | clients.clear(); 111 | 112 | for (Jedis jedis : jedisMap.values()) { 113 | if (hasDataInBuf) { 114 | flushCachedData(jedis); 115 | } 116 | 117 | jedis.close(); 118 | } 119 | 120 | jedisMap.clear(); 121 | 122 | hasDataInBuf = false; 123 | } 124 | 125 | private void flushCachedData(Jedis jedis) { 126 | try { 127 | jedis.getClient().getAll(); 128 | } catch (RuntimeException ex) { 129 | // 其中一个client出问题,后面出问题的几率较大 130 | } 131 | } 132 | 133 | @Override 134 | protected Client getClient(String key) { 135 | byte[] bKey = SafeEncoder.encode(key); 136 | 137 | return getClient(bKey); 138 | } 139 | 140 | @Override 141 | protected Client getClient(byte[] key) { 142 | Jedis jedis = getJedis(JedisClusterCRC16.getSlot(key)); 143 | 144 | Client client = jedis.getClient(); 145 | clients.add(client); 146 | 147 | return client; 148 | } 149 | 150 | private Jedis getJedis(int slot) { 151 | JedisPool pool = clusterInfoCache.getSlotPool(slot); 152 | // 根据pool从缓存中获取Jedis 153 | Jedis jedis = jedisMap.get(pool); 154 | if (null == jedis) { 155 | jedis = pool.getResource(); 156 | jedisMap.put(pool, jedis); 157 | } 158 | 159 | hasDataInBuf = true; 160 | return jedis; 161 | } 162 | 163 | private static Field getField(Class cls, String fieldName) { 164 | try { 165 | Field field = cls.getDeclaredField(fieldName); 166 | field.setAccessible(true); 167 | return field; 168 | } catch (NoSuchFieldException | SecurityException e) { 169 | throw new RuntimeException("cannot find or access field '" + fieldName + "' from " + cls.getName(), e); 170 | } 171 | } 172 | 173 | @SuppressWarnings({"unchecked" }) 174 | private static T getValue(Object obj, Field field) { 175 | try { 176 | return (T)field.get(obj); 177 | } catch (IllegalArgumentException | IllegalAccessException e) { 178 | throw new RuntimeException("get value fail",e); 179 | } 180 | } 181 | 182 | 183 | } 184 | -------------------------------------------------------------------------------- /src/main/java/com/alibaba/datax/plugin/writer/rediswriter/Key.java: -------------------------------------------------------------------------------- 1 | package com.alibaba.datax.plugin.writer.rediswriter; 2 | 3 | public class Key { 4 | public final static String REDISMODE = "redisMode"; 5 | public final static String ADDRESS = "address"; 6 | public final static String AUTH = "auth"; 7 | 8 | public final static String BATCH_SIZE = "batchSize"; 9 | 10 | //对应redis的数据类型,目前支持三种,string,list,hash 11 | public final static String WRITE_TYPE = "writeType"; 12 | 13 | //对应redis key值的hive列配置 14 | public final static String COLKEY = "colKey"; 15 | 16 | //数据库列名 17 | public final static String COL_NAME = "name"; 18 | //数据库列索引 19 | public final static String COL_INDEX = "index"; 20 | 21 | //对应redis value值的hive列 22 | public final static String COLVALUE = "colValue"; 23 | //redis key值的前缀,非必须 24 | public final static String KEY_PREFIX = "keyPrefix"; 25 | 26 | //redis key值的后缀,非必须 27 | public final static String KEY_SUFFIX = "keySuffix"; 28 | 29 | //自定义的redis key值,非hive列 30 | public final static String STRING_KEY = "strKey"; 31 | 32 | // redis key值的过期时间,单位秒 33 | public final static String EXPIRE = "expire"; 34 | 35 | // 默认是insert,也可以delete redis 36 | public final static String WRITE_MODE = "writeMode"; 37 | // 具体每种类型配置 38 | public final static String CONFIG = "config"; 39 | // redis list类型的push类型,有lpush,rpush,overwrite 40 | public final static String LIST_PUSH_TYPE = "pushType"; 41 | 42 | // redis list类型对应数据源column值的分隔符,只支持string类型的数据源column 43 | public final static String LIST_VALUE_DELIMITER = "valueDelimiter"; 44 | 45 | // hash类型要删除的field,次参数只对删除hash类型的field时有效 46 | public final static String HASH_DELETE_FILEDS = "hashFields"; 47 | 48 | } 49 | -------------------------------------------------------------------------------- /src/main/java/com/alibaba/datax/plugin/writer/rediswriter/RedisWriteAbstract.java: -------------------------------------------------------------------------------- 1 | package com.alibaba.datax.plugin.writer.rediswriter; 2 | 3 | import com.alibaba.datax.common.exception.CommonErrorCode; 4 | import com.alibaba.datax.common.exception.DataXException; 5 | import com.alibaba.datax.common.plugin.RecordReceiver; 6 | import com.alibaba.datax.common.util.Configuration; 7 | import org.apache.commons.lang3.StringUtils; 8 | import redis.clients.jedis.PipelineBase; 9 | 10 | 11 | /** 12 | * @author lijf@2345.com 13 | * @date 2020/5/19 15:56 14 | * @desc 写redis对象抽象类 15 | */ 16 | public abstract class RedisWriteAbstract { 17 | protected Configuration configuration; 18 | protected PipelineBase pipelined; 19 | Object redisClient; 20 | protected int records; 21 | protected String keyPreffix; 22 | protected String keySuffix; 23 | protected Integer batchSize; 24 | protected Integer expire; 25 | protected String strKey; 26 | protected Integer keyIndex; 27 | protected Integer valueIndex; 28 | 29 | public RedisWriteAbstract(Configuration configuration) { 30 | this.configuration = configuration; 31 | } 32 | 33 | 34 | public PipelineBase getRedisPipelineBase(Configuration configuration) { 35 | String mode = configuration.getNecessaryValue(Key.REDISMODE, CommonErrorCode.CONFIG_ERROR); 36 | String addr = configuration.getNecessaryValue(Key.ADDRESS, CommonErrorCode.CONFIG_ERROR); 37 | String auth = configuration.getString(Key.AUTH); 38 | if (Constant.CLUSTER.equalsIgnoreCase(mode)) { 39 | redisClient = RedisWriterHelper.getJedisCluster(addr, auth); 40 | } else { 41 | redisClient = RedisWriterHelper.getJedis(addr, auth); 42 | } 43 | return RedisWriterHelper.getPipeLine(redisClient); 44 | } 45 | 46 | /** 47 | * 初始化公共参数 48 | */ 49 | public void initCommonParams() { 50 | Configuration detailConfig = this.configuration.getConfiguration(Key.CONFIG); 51 | batchSize = detailConfig.getInt(Key.BATCH_SIZE, 1000); 52 | keyPreffix = detailConfig.getString(Key.KEY_PREFIX, ""); 53 | keySuffix = detailConfig.getString(Key.KEY_SUFFIX, ""); 54 | expire = detailConfig.getInt(Key.EXPIRE, Integer.MAX_VALUE); 55 | pipelined = getRedisPipelineBase(configuration); 56 | } 57 | 58 | /** 59 | * 检查和解析参数 60 | */ 61 | public void checkAndGetParams() { 62 | Configuration detailConfig = configuration.getConfiguration(Key.CONFIG); 63 | 64 | String colKey = detailConfig.getString(Key.COLKEY, null); 65 | String strKey = detailConfig.getString(Key.STRING_KEY, null); 66 | 67 | if ((StringUtils.isBlank(colKey) && StringUtils.isBlank(strKey))) { 68 | throw DataXException.asDataXException(CommonErrorCode.CONFIG_ERROR, "strKey或colKey不能为空!请检查配置"); 69 | } 70 | if ((StringUtils.isNotBlank(colKey) && StringUtils.isNotBlank(strKey))) { 71 | throw DataXException.asDataXException(CommonErrorCode.CONFIG_ERROR, "strKey或colKey不能同时存在!请检查配置"); 72 | } 73 | 74 | if (StringUtils.isNotBlank(colKey)) { 75 | keyIndex = detailConfig.getConfiguration(Key.COLKEY).getInt(Key.COL_INDEX); 76 | } else { 77 | this.strKey = strKey; 78 | } 79 | String colValue = detailConfig.getString(Key.COLVALUE, null); 80 | if (StringUtils.isNotBlank(colKey) && StringUtils.isBlank(colValue)) { 81 | throw DataXException.asDataXException(CommonErrorCode.CONFIG_ERROR, "colValue不能为空!请检查配置"); 82 | } 83 | String writeType = configuration.getString(Key.WRITE_TYPE); 84 | // hash类型的colValue配置里面有多个column,要考虑排除获取valueIndex,HashTypeWriter子类单独处理 85 | if (!Constant.WRITE_TYPE_HASH.equalsIgnoreCase(writeType)) { 86 | valueIndex = detailConfig.getConfiguration(Key.COLVALUE).getInt(Key.COL_INDEX); 87 | } 88 | } 89 | 90 | /** 91 | * 把数据add到pipeline 92 | * 93 | * @param lineReceiver PipelineBase 94 | */ 95 | public abstract void addToPipLine(RecordReceiver lineReceiver); 96 | 97 | 98 | /** 99 | * 正式写入数据到redis 100 | */ 101 | public void syscData() { 102 | if (records >= batchSize) { 103 | RedisWriterHelper.syscData(pipelined); 104 | records = 0; 105 | } 106 | } 107 | 108 | 109 | public void syscAllData() { 110 | RedisWriterHelper.syscData(pipelined); 111 | } 112 | 113 | /** 114 | * 关闭资源 115 | */ 116 | public void close() { 117 | RedisWriterHelper.syscData(pipelined); 118 | RedisWriterHelper.close(redisClient); 119 | } 120 | } 121 | -------------------------------------------------------------------------------- /src/main/java/com/alibaba/datax/plugin/writer/rediswriter/RedisWriter.java: -------------------------------------------------------------------------------- 1 | package com.alibaba.datax.plugin.writer.rediswriter; 2 | 3 | import com.alibaba.datax.common.exception.CommonErrorCode; 4 | import com.alibaba.datax.common.exception.DataXException; 5 | import com.alibaba.datax.common.plugin.RecordReceiver; 6 | import com.alibaba.datax.common.spi.Writer; 7 | import com.alibaba.datax.common.util.Configuration; 8 | import com.alibaba.datax.plugin.writer.rediswriter.writer.DeleteWriter; 9 | import com.alibaba.datax.plugin.writer.rediswriter.writer.HashTypeWriter; 10 | import com.alibaba.datax.plugin.writer.rediswriter.writer.ListTypeWriter; 11 | import com.alibaba.datax.plugin.writer.rediswriter.writer.StringTypeWriter; 12 | 13 | import java.util.ArrayList; 14 | import java.util.List; 15 | 16 | public class RedisWriter extends Writer { 17 | public static class Job extends Writer.Job { 18 | private Configuration originalConfig = null; 19 | 20 | @Override 21 | public List split(int mandatoryNumber) { 22 | List splitResultConfigs = new ArrayList(); 23 | for (int j = 0; j < mandatoryNumber; j++) { 24 | splitResultConfigs.add(originalConfig.clone()); 25 | } 26 | return splitResultConfigs; 27 | } 28 | 29 | @Override 30 | public void init() { 31 | this.originalConfig = super.getPluginJobConf(); 32 | RedisWriterHelper.checkConnection(originalConfig); 33 | } 34 | 35 | @Override 36 | public void destroy() { 37 | 38 | } 39 | } 40 | 41 | public static class Task extends Writer.Task { 42 | private Configuration taskConfig; 43 | RedisWriteAbstract wirter; 44 | 45 | @Override 46 | public void startWrite(RecordReceiver lineReceiver) { 47 | wirter.addToPipLine(lineReceiver); 48 | wirter.syscData(); 49 | } 50 | 51 | @Override 52 | public void init() { 53 | this.taskConfig = super.getPluginJobConf(); 54 | String writeType = taskConfig.getString(Key.WRITE_TYPE); 55 | String writeMode = taskConfig.getString(Key.WRITE_MODE); 56 | // 判断是delete还是insert 57 | if (Constant.WRITE_MODE_DELETE.equalsIgnoreCase(writeMode)) { 58 | wirter = new DeleteWriter(taskConfig); 59 | } else { 60 | // 判断写redis的数据类型,string,list,hash 61 | switch (writeType) { 62 | case Constant.WRITE_TYPE_HASH: 63 | wirter = new HashTypeWriter(taskConfig); 64 | break; 65 | case Constant.WRITE_TYPE_LIST: 66 | wirter = new ListTypeWriter(taskConfig); 67 | break; 68 | case Constant.WRITE_TYPE_STRING: 69 | wirter = new StringTypeWriter(taskConfig); 70 | break; 71 | default: 72 | throw DataXException.asDataXException(CommonErrorCode.CONFIG_ERROR, "rediswriter 不支持此数据类型:" + writeType); 73 | } 74 | 75 | } 76 | wirter.checkAndGetParams(); 77 | wirter.initCommonParams(); 78 | } 79 | 80 | @Override 81 | public void destroy() { 82 | wirter.syscAllData(); 83 | wirter.close(); 84 | } 85 | } 86 | } 87 | -------------------------------------------------------------------------------- /src/main/java/com/alibaba/datax/plugin/writer/rediswriter/RedisWriterHelper.java: -------------------------------------------------------------------------------- 1 | package com.alibaba.datax.plugin.writer.rediswriter; 2 | 3 | import com.alibaba.datax.common.exception.CommonErrorCode; 4 | import com.alibaba.datax.common.exception.DataXException; 5 | import com.alibaba.datax.common.util.Configuration; 6 | import org.apache.commons.lang3.StringUtils; 7 | import redis.clients.jedis.*; 8 | 9 | import java.io.IOException; 10 | import java.util.HashSet; 11 | import java.util.Set; 12 | 13 | public class RedisWriterHelper { 14 | 15 | 16 | /** 17 | * 检查连接redis是否通 18 | * @param originalConfig 从json文件获取的配置 19 | */ 20 | public static void checkConnection(Configuration originalConfig){ 21 | String mode = originalConfig.getNecessaryValue(Key.REDISMODE, CommonErrorCode.CONFIG_ERROR); 22 | String addr = originalConfig.getNecessaryValue(Key.ADDRESS, CommonErrorCode.CONFIG_ERROR); 23 | String auth = originalConfig.getString(Key.AUTH); 24 | 25 | if(Constant.CLUSTER.equalsIgnoreCase(mode)){ 26 | JedisCluster jedisCluster = getJedisCluster(addr, auth); 27 | jedisCluster.set("testConnet","test"); 28 | jedisCluster.expire("testConnet",1); 29 | try { 30 | jedisCluster.close(); 31 | } catch (IOException e) { 32 | e.printStackTrace(); 33 | } 34 | 35 | }else if(Constant.STANDALONE.equalsIgnoreCase(mode)){ 36 | Jedis jedis = getJedis(addr, auth); 37 | jedis.set("testConnet","test"); 38 | jedis.expire("testConnet",1); 39 | jedis.close(); 40 | }else { 41 | throw DataXException.asDataXException(CommonErrorCode.CONFIG_ERROR, 42 | String.format("您提供配置文件有误,[%s] redis的redismode必须是standalone或cluster .", mode)); 43 | 44 | } 45 | } 46 | 47 | /** 48 | * 获取Jedis 49 | * @param addr 地址,ip:port 50 | * @param auth 密码 51 | * @return Jedis 52 | */ 53 | public static Jedis getJedis(String addr, String auth) { 54 | String[] split = addr.split(":"); 55 | Jedis jedis = new Jedis(split[0], Integer.parseInt(split[1])); 56 | if(StringUtils.isNoneBlank(auth)){ 57 | jedis.auth(auth); 58 | } 59 | return jedis; 60 | } 61 | 62 | /** 63 | * 获取JedisCluster 64 | * @param addr 地址,ip:port,ip:port,ip:port 65 | * @param auth 密码 66 | * @return JedisCluster 67 | */ 68 | public static JedisCluster getJedisCluster(String addr, String auth) { 69 | JedisPoolConfig jedisPoolConfig = new JedisPoolConfig(); 70 | JedisCluster jedisCluster; 71 | Set nodes = new HashSet<>(); 72 | String[] split = addr.split(","); 73 | for (int i = 0; i < split.length; i++) { 74 | String node = split[i]; 75 | String[] hostPort = node.split(":"); 76 | nodes.add(new HostAndPort(hostPort[0],Integer.parseInt(hostPort[1]))); 77 | } 78 | if(StringUtils.isBlank(auth)) { 79 | jedisCluster = new JedisCluster(nodes,10000, 10000, 3,jedisPoolConfig); 80 | }else { 81 | jedisCluster = new JedisCluster(nodes, 10000, 10000, 3, auth, jedisPoolConfig); 82 | } 83 | 84 | return jedisCluster; 85 | } 86 | 87 | 88 | /** 89 | * 获取Pipeline 90 | * 91 | * @param obj 92 | * @return 93 | */ 94 | public static PipelineBase getPipeLine(Object obj) { 95 | if (obj instanceof Jedis) { 96 | return ((Jedis) obj).pipelined(); 97 | } else if (obj instanceof JedisCluster) { 98 | JedisCluster jedis = ((JedisCluster) obj); 99 | return JedisClusterPipeline.pipelined(jedis); 100 | } 101 | return null; 102 | } 103 | 104 | /** 105 | * pipline写入数据到redis 106 | * 107 | * @param obj 108 | */ 109 | public static void syscData(Object obj) { 110 | if (obj instanceof Pipeline) { 111 | ((Pipeline) obj).sync(); 112 | 113 | } else if (obj instanceof JedisClusterPipeline) { 114 | ((JedisClusterPipeline) obj).sync(); 115 | } 116 | } 117 | 118 | /** 119 | * 关闭pipline资源 120 | * 121 | * @param obj 122 | */ 123 | public static void close(Object obj) { 124 | if (obj instanceof Jedis) { 125 | ((Jedis) obj).close(); 126 | } else if (obj instanceof JedisCluster) { 127 | try { 128 | ((JedisCluster) obj).close(); 129 | } catch (IOException e) { 130 | e.printStackTrace(); 131 | } 132 | } 133 | } 134 | } 135 | -------------------------------------------------------------------------------- /src/main/java/com/alibaba/datax/plugin/writer/rediswriter/writer/DeleteWriter.java: -------------------------------------------------------------------------------- 1 | package com.alibaba.datax.plugin.writer.rediswriter.writer; 2 | 3 | import com.alibaba.datax.common.element.Record; 4 | import com.alibaba.datax.common.exception.CommonErrorCode; 5 | import com.alibaba.datax.common.exception.DataXException; 6 | import com.alibaba.datax.common.plugin.RecordReceiver; 7 | import com.alibaba.datax.common.util.Configuration; 8 | import com.alibaba.datax.plugin.writer.rediswriter.Key; 9 | import com.alibaba.datax.plugin.writer.rediswriter.RedisWriteAbstract; 10 | import org.apache.commons.lang3.StringUtils; 11 | 12 | /** 13 | * @author lijf@2345.com 14 | * @date 2020/5/20 13:39 15 | * @desc 16 | */ 17 | public class DeleteWriter extends RedisWriteAbstract { 18 | // 要删除的hash的域名称 19 | String hashFileds; 20 | Integer keyIndex; 21 | String strKey; 22 | 23 | public DeleteWriter(Configuration configuration) { 24 | super(configuration); 25 | } 26 | 27 | @Override 28 | public void checkAndGetParams() { 29 | Configuration detailConfig = configuration.getConfiguration(Key.CONFIG); 30 | String colKey = detailConfig.getString(Key.COLKEY, null); 31 | this.strKey = detailConfig.getString(Key.STRING_KEY, null); 32 | if ((StringUtils.isBlank(colKey)) && StringUtils.isBlank(strKey)) { 33 | throw DataXException.asDataXException(CommonErrorCode.CONFIG_ERROR, "strKey或colKey不能为空!请检查配置"); 34 | } 35 | if (StringUtils.isNotBlank(colKey)) { 36 | this.keyIndex = detailConfig.getConfiguration(Key.COLKEY).getInt(Key.COL_INDEX); 37 | } 38 | hashFileds = detailConfig.getString(Key.HASH_DELETE_FILEDS, null); 39 | 40 | } 41 | 42 | @Override 43 | public void addToPipLine(RecordReceiver lineReceiver) { 44 | Record record; 45 | while ((record = lineReceiver.getFromReader()) != null) { 46 | String key = record.getColumn(keyIndex).asString(); 47 | String redisKey = keyPreffix + key + keySuffix; 48 | if (null != hashFileds) { 49 | String[] fileds = hashFileds.split(","); 50 | pipelined.hdel(redisKey, fileds); 51 | } else { 52 | pipelined.del(redisKey); 53 | } 54 | records++; 55 | } 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /src/main/java/com/alibaba/datax/plugin/writer/rediswriter/writer/HashTypeWriter.java: -------------------------------------------------------------------------------- 1 | package com.alibaba.datax.plugin.writer.rediswriter.writer; 2 | 3 | import com.alibaba.datax.common.element.Record; 4 | import com.alibaba.datax.common.plugin.RecordReceiver; 5 | import com.alibaba.datax.common.util.Configuration; 6 | import com.alibaba.datax.plugin.writer.rediswriter.Key; 7 | import com.alibaba.datax.plugin.writer.rediswriter.RedisWriteAbstract; 8 | 9 | import java.util.List; 10 | 11 | /** 12 | * @author lijf@2345.com 13 | * @date 2020/5/19 16:18 14 | * @desc hash类型写redis 15 | */ 16 | public class HashTypeWriter extends RedisWriteAbstract { 17 | List hashFieldIndexs; 18 | 19 | public HashTypeWriter(Configuration configuration) { 20 | super(configuration); 21 | } 22 | 23 | @Override 24 | public void checkAndGetParams() { 25 | super.checkAndGetParams(); 26 | Configuration detailConfig = configuration.getConfiguration(Key.CONFIG); 27 | // hash类型默认以数据源对应的列名作为hash的filed名 28 | hashFieldIndexs = detailConfig.getListConfiguration(Key.COLVALUE); 29 | } 30 | 31 | @Override 32 | public void addToPipLine(RecordReceiver lineReceiver) { 33 | Record record; 34 | while ((record = lineReceiver.getFromReader()) != null) { 35 | String redisKey; 36 | if (null != keyIndex) { 37 | String key = record.getColumn(keyIndex).asString(); 38 | redisKey = keyPreffix + key + keySuffix; 39 | } else { 40 | redisKey = keyPreffix + strKey + keySuffix; 41 | } 42 | // hash类型已数据源column名作为filed 43 | for (Configuration hashFieldIndex : hashFieldIndexs) { 44 | String filed = hashFieldIndex.getString(Key.COL_NAME); 45 | Integer index = hashFieldIndex.getInt(Key.COL_INDEX); 46 | String value = record.getColumn(index).asString(); 47 | pipelined.hset(redisKey, filed, value); 48 | records++; 49 | } 50 | } 51 | } 52 | 53 | } 54 | -------------------------------------------------------------------------------- /src/main/java/com/alibaba/datax/plugin/writer/rediswriter/writer/ListTypeWriter.java: -------------------------------------------------------------------------------- 1 | package com.alibaba.datax.plugin.writer.rediswriter.writer; 2 | 3 | import com.alibaba.datax.common.element.Record; 4 | import com.alibaba.datax.common.exception.CommonErrorCode; 5 | import com.alibaba.datax.common.exception.DataXException; 6 | import com.alibaba.datax.common.plugin.RecordReceiver; 7 | import com.alibaba.datax.common.util.Configuration; 8 | import com.alibaba.datax.plugin.writer.rediswriter.Constant; 9 | import com.alibaba.datax.plugin.writer.rediswriter.Key; 10 | import com.alibaba.datax.plugin.writer.rediswriter.RedisWriteAbstract; 11 | import org.apache.commons.lang3.StringUtils; 12 | 13 | /** 14 | * @author lijf@2345.com 15 | * @date 2020/5/19 16:18 16 | * @desc list类型写redis 17 | */ 18 | public class ListTypeWriter extends RedisWriteAbstract { 19 | String pushType; 20 | String valueDelimiter; 21 | 22 | public ListTypeWriter(Configuration configuration) { 23 | super(configuration); 24 | } 25 | 26 | @Override 27 | public void checkAndGetParams() { 28 | super.checkAndGetParams(); 29 | Configuration detailConfig = this.configuration.getConfiguration(Key.CONFIG); 30 | pushType = detailConfig.getString(Key.LIST_PUSH_TYPE, Constant.LIST_PUSH_TYPE_OVERWRITE); 31 | valueDelimiter = detailConfig.getString(Key.LIST_VALUE_DELIMITER); 32 | if (StringUtils.isBlank(valueDelimiter)) { 33 | throw DataXException.asDataXException(CommonErrorCode.CONFIG_ERROR, "valueDelimiter不能为空!请检查配置"); 34 | } 35 | if(!Constant.LIST_PUSH_TYPE_LPUSH.equalsIgnoreCase(pushType) && 36 | !Constant.LIST_PUSH_TYPE_RPUSH.equalsIgnoreCase(pushType) && 37 | !Constant.LIST_PUSH_TYPE_OVERWRITE.equalsIgnoreCase(pushType)){ 38 | throw DataXException.asDataXException(CommonErrorCode.CONFIG_ERROR, "pushType不合法!list类型只支持lpush,rpush,overwrite!请检查配置!pushType:"+pushType); 39 | } 40 | } 41 | 42 | @Override 43 | public void addToPipLine(RecordReceiver lineReceiver) { 44 | Record record; 45 | while ((record = lineReceiver.getFromReader()) != null) { 46 | String redisKey; 47 | String columnValue; 48 | if (null != keyIndex) { 49 | String key = record.getColumn(keyIndex).asString(); 50 | redisKey = keyPreffix + key + keySuffix; 51 | } else { 52 | redisKey = keyPreffix + strKey + keySuffix; 53 | } 54 | columnValue = record.getColumn(valueIndex).asString(); 55 | String[] redisValue = columnValue.split(valueDelimiter); 56 | switch (pushType) { 57 | case Constant.LIST_PUSH_TYPE_OVERWRITE: 58 | pipelined.del(redisKey); 59 | pipelined.rpush(redisKey, redisValue); 60 | break; 61 | case Constant.LIST_PUSH_TYPE_RPUSH: 62 | pipelined.rpush(redisKey, redisValue); 63 | break; 64 | case Constant.LIST_PUSH_TYPE_LPUSH: 65 | pipelined.lpush(redisKey, redisValue); 66 | break; 67 | } 68 | records++; 69 | } 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /src/main/java/com/alibaba/datax/plugin/writer/rediswriter/writer/StringTypeWriter.java: -------------------------------------------------------------------------------- 1 | package com.alibaba.datax.plugin.writer.rediswriter.writer; 2 | 3 | import com.alibaba.datax.common.element.Record; 4 | import com.alibaba.datax.common.plugin.RecordReceiver; 5 | import com.alibaba.datax.common.util.Configuration; 6 | import com.alibaba.datax.plugin.writer.rediswriter.RedisWriteAbstract; 7 | 8 | 9 | /** 10 | * @author lijf@2345.com 11 | * @date 2020/5/19 16:15 12 | * @desc string类型写redis 13 | */ 14 | public class StringTypeWriter extends RedisWriteAbstract { 15 | 16 | 17 | public StringTypeWriter(Configuration configuration) { 18 | super(configuration); 19 | } 20 | 21 | @Override 22 | public void checkAndGetParams() { 23 | super.checkAndGetParams(); 24 | } 25 | 26 | @Override 27 | public void addToPipLine(RecordReceiver lineReceiver) { 28 | Record record; 29 | while ((record = lineReceiver.getFromReader()) != null) { 30 | String redisKey; 31 | String redisValue; 32 | if (null != keyIndex) { 33 | String key = record.getColumn(keyIndex).asString(); 34 | redisKey = keyPreffix + key + keySuffix; 35 | redisValue = record.getColumn(valueIndex).asString(); 36 | } else { 37 | redisKey = keyPreffix + strKey + keySuffix; 38 | redisValue = record.getColumn(valueIndex).asString(); 39 | } 40 | pipelined.set(redisKey, redisValue); 41 | pipelined.expire(redisKey, expire); 42 | records++; 43 | } 44 | } 45 | 46 | } 47 | -------------------------------------------------------------------------------- /src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "rediswriter", 3 | "class": "com.alibaba.datax.plugin.writer.rediswriter.RedisWriter", 4 | "description": "useScene: prod. mechanism: Jdbc connection using the database, execute select sql, retrieve data from the ResultSet. warn: The more you know about the database, the less problems you encounter.", 5 | "developer": "2345.com" 6 | } 7 | -------------------------------------------------------------------------------- /src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "rediswriter", 3 | "parameter": { 4 | "redisMode": "", 5 | "address": "", 6 | "auth": "", 7 | "writeType": "", 8 | "config": { 9 | "colKey": {}, 10 | "colValue": {}, 11 | "expire": 0, 12 | "keyPrefix": "", 13 | "keySuffix": "" 14 | } 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /src/test/demo/data/create_hive_table.sql: -------------------------------------------------------------------------------- 1 | -- 执行创建表的语句后把测试数据hive_data.txt文件上传到表对应的路径 2 | CREATE table redis_writer(uid int,channels string,name string) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' STORED AS TEXTFILE; -------------------------------------------------------------------------------- /src/test/demo/data/hive_data.txt: -------------------------------------------------------------------------------- 1 | 111 a,b,c zhangsan 2 | 222 e,f,g lisi 3 | 333 h,i,j wangwu -------------------------------------------------------------------------------- /src/test/demo/hive_delete_to_redis_hash_filed.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "channel": 3 6 | } 7 | }, 8 | "content": [ 9 | { 10 | "reader": { 11 | "name": "hdfsreader", 12 | "parameter": { 13 | "path": "/user/hive/warehouse/test.db/redis_writer", 14 | "defaultFS": "hdfs://xxxxxxxxxx:8020", 15 | "column": [ 16 | { 17 | "index": 0, 18 | "name": "uid", 19 | "type": "string" 20 | }, 21 | { 22 | "index": 1, 23 | "name": "channels", 24 | "type": "string" 25 | }, 26 | { 27 | "index": 2, 28 | "name": "name", 29 | "type": "string" 30 | }], 31 | "fileType": "text", 32 | "encoding": "UTF-8", 33 | "fieldDelimiter": "\t" 34 | } 35 | }, 36 | "writer": { 37 | "name": "rediswriter", 38 | "parameter": { 39 | "redisMode": "cluster", 40 | "address": "xxxxxx:6379,xxxxxx:6479,xxxxxx:6379,xxxxxx:6479,xxxxxx:6379,xxxxxx:6479", 41 | "auth": "xxxxxx", 42 | "writeType":"string", 43 | "writeMode": "delete", 44 | "config":{ 45 | "colKey":{"name":"uid","index":0}, 46 | "hashFields":"name", 47 | "keyPrefix":"datax:" 48 | } 49 | } 50 | } 51 | } 52 | ] 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /src/test/demo/hive_delete_to_redis_string.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "channel": 3 6 | } 7 | }, 8 | "content": [ 9 | { 10 | "reader": { 11 | "name": "hdfsreader", 12 | "parameter": { 13 | "path": "/user/hive/warehouse/test.db/redis_writer", 14 | "defaultFS": "hdfs://xxxxxx:8020", 15 | "column": [ 16 | { 17 | "index": 0, 18 | "name": "uid", 19 | "type": "string" 20 | }, 21 | { 22 | "index": 1, 23 | "name": "channels", 24 | "type": "string" 25 | }, 26 | { 27 | "index": 2, 28 | "name": "name", 29 | "type": "string" 30 | }], 31 | "fileType": "text", 32 | "encoding": "UTF-8", 33 | "fieldDelimiter": "\t" 34 | } 35 | }, 36 | "writer": { 37 | "name": "rediswriter", 38 | "parameter": { 39 | "redisMode": "cluster", 40 | "address": "xxxxxxx:6379,xxxxxxx:6479,xxxxxxx:6379,xxxxxxx:6479,xxxxxxx:6379,xxxxxxx:6479", 41 | "auth": "xxxxxxx", 42 | "writeType":"string", 43 | "writeMode": "delete", 44 | "config":{ 45 | "colKey":{"name":"uid","index":0}, 46 | "keyPrefix":"datax:" 47 | } 48 | } 49 | } 50 | } 51 | ] 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /src/test/demo/hive_to_redis_hash.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "channel": 3 6 | } 7 | }, 8 | "content": [ 9 | { 10 | "reader": { 11 | "name": "hdfsreader", 12 | "parameter": { 13 | "path": "/user/hive/warehouse/test.db/redis_writer", 14 | "defaultFS": "hdfs://xxxx:8020", 15 | "column": [ 16 | { 17 | "index": 0, 18 | "name": "uid", 19 | "type": "string" 20 | }, 21 | { 22 | "index": 1, 23 | "name": "channels", 24 | "type": "string" 25 | }, 26 | { 27 | "index": 2, 28 | "name": "name", 29 | "type": "string" 30 | }], 31 | "fileType": "text", 32 | "encoding": "UTF-8", 33 | "fieldDelimiter": "\t" 34 | } 35 | }, 36 | "writer": { 37 | "name": "rediswriter", 38 | "parameter": { 39 | "redisMode": "cluster", 40 | "address": "xxxxxx:6379,xxxxxx:6479,xxxxxx:6379,xxxxxx:6479,xxxxxx:6379,xxxxxx:6479", 41 | "auth": "Pye9WQAYsgetVrLw", 42 | "writeType":"hash", 43 | "config":{ 44 | "colKey":{"name":"uid","index":0}, 45 | "colValue":[{"name":"channels","index":1},{"name":"name","index":2}], 46 | "valueDelimiter": ",", 47 | "expire":300, 48 | "keyPrefix":"datax:hash:" 49 | } 50 | } 51 | } 52 | } 53 | ] 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /src/test/demo/hive_to_redis_list.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "channel": 3 6 | } 7 | }, 8 | "content": [ 9 | { 10 | "reader": { 11 | "name": "hdfsreader", 12 | "parameter": { 13 | "path": "/user/hive/warehouse/test.db/redis_writer", 14 | "defaultFS": "hdfs://xxxxxxxx:8020", 15 | "column": [ 16 | { 17 | "index": 0, 18 | "name": "uid", 19 | "type": "string" 20 | }, 21 | { 22 | "index": 1, 23 | "name": "channels", 24 | "type": "string" 25 | }, 26 | { 27 | "index": 2, 28 | "name": "name", 29 | "type": "string" 30 | }], 31 | "fileType": "text", 32 | "encoding": "UTF-8", 33 | "fieldDelimiter": "\t" 34 | } 35 | }, 36 | "writer": { 37 | "name": "rediswriter", 38 | "parameter": { 39 | "redisMode": "cluster", 40 | "address": "xxxxxxxx:6379,xxxxxxxx:6479,xxxxxxxx:6379,xxxxxxxx:6479,xxxxxxxx:6379,xxxxxxxx:6479", 41 | "auth": "xxxxxxxx", 42 | "writeType":"list", 43 | "config":{ 44 | "colKey":{"name":"uid","index":0}, 45 | "colValue":{"name":"channels","index":1}, 46 | "valueDelimiter": ",", 47 | "pushType": "overwrite", 48 | "expire":300, 49 | "keyPrefix":"datax:list:" 50 | } 51 | } 52 | } 53 | } 54 | ] 55 | } 56 | } 57 | -------------------------------------------------------------------------------- /src/test/demo/hive_to_redis_string.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "channel": 3 6 | } 7 | }, 8 | "content": [ 9 | { 10 | "reader": { 11 | "name": "hdfsreader", 12 | "parameter": { 13 | "path": "/user/hive/warehouse/test.db/redis_writer", 14 | "defaultFS": "hdfs://xxxxxxxxxxxxx:8020", 15 | "column": [ 16 | { 17 | "index": 0, 18 | "name": "uid", 19 | "type": "string" 20 | }, 21 | { 22 | "index": 1, 23 | "name": "channels", 24 | "type": "string" 25 | }, 26 | { 27 | "index": 2, 28 | "name": "name", 29 | "type": "string" 30 | }], 31 | "fileType": "text", 32 | "encoding": "UTF-8", 33 | "fieldDelimiter": "\t" 34 | } 35 | }, 36 | "writer": { 37 | "name": "rediswriter", 38 | "parameter": { 39 | "redisMode": "cluster", 40 | "address": "xxxxxxxxxxxxx:6379,xxxxxxxxxxxxx:6479,xxxxxxxxxxxxx:6379,xxxxxxxxxxxxx:6479,xxxxxxxxxxxxx:6379,xxxxxxxxxxxxx:6479", 41 | "auth": "Pye9WQAYsgetVrLw", 42 | "writeType":"string", 43 | "config":{ 44 | "colKey":{"name":"uid","index":0}, 45 | "colValue":{"name":"name","index":2}, 46 | "expire":300, 47 | "keyPrefix":"datax:string:" 48 | } 49 | } 50 | } 51 | } 52 | ] 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /src/test/java/JedisTest.java: -------------------------------------------------------------------------------- 1 | import com.alibaba.datax.plugin.writer.rediswriter.RedisWriterHelper; 2 | import redis.clients.jedis.JedisCluster; 3 | import redis.clients.jedis.PipelineBase; 4 | 5 | /** 6 | * @author lijf@2345.com 7 | * @date 2020/5/21 19:13 8 | * @desc 9 | */ 10 | public class JedisTest { 11 | public static void main(String[] args) { 12 | JedisCluster je = RedisWriterHelper.getJedisCluster("recessw-web-redis002:6379,recessw-web-redis002:6479,recessw-web-redis003:6379,recessw-web-redis003:6479,recessw-web-redis004:6379,recessw-web-redis004:6479", "Pye9WQAYsgetVrLw"); 13 | // PipelineBase pipeLine = RedisWriterHelper.getPipeLine(je); 14 | // pipeLine.set("111","111"); 15 | // pipeLine.set("222","222"); 16 | // pipeLine.set("333","333"); 17 | // RedisWriterHelper.syscData(pipeLine); 18 | String s1 = je.get("datax:333"); 19 | 20 | System.out.println(s1); 21 | } 22 | } 23 | --------------------------------------------------------------------------------