├── README.md ├── awesome-netty-common ├── pom.xml └── src │ └── main │ └── java │ └── org │ └── north │ └── netty │ └── common │ └── utils │ ├── ProtostuffHelper.java │ └── SerializeUtils.java ├── awesome-netty-elasticsearch └── pom.xml ├── awesome-netty-kafka ├── pom.xml └── src │ ├── main │ └── java │ │ └── com │ │ └── north │ │ └── netty │ │ └── kafka │ │ ├── KafkaClient.java │ │ ├── bean │ │ ├── AbstractKafkaResponse.java │ │ ├── KafkaRequest.java │ │ ├── KafkaRequestHeader.java │ │ ├── KafkaResponse.java │ │ ├── KafkaResponseHeader.java │ │ ├── broker │ │ │ └── Broker.java │ │ ├── fetch │ │ │ ├── AbortedTransaction.java │ │ │ ├── FetchPartitionHeader.java │ │ │ ├── FetchPartitionResp.java │ │ │ ├── FetchRequest.java │ │ │ ├── FetchResponse.java │ │ │ ├── FetchTopicPartitionRequest.java │ │ │ ├── FetchTopicRequest.java │ │ │ └── FetchTopicResponse.java │ │ ├── meta │ │ │ ├── KafkaMetaRequest.java │ │ │ └── KafkaMetaResponse.java │ │ ├── msg │ │ │ ├── ConsumerRecord.java │ │ │ ├── KafkaMsgRecordBatch.java │ │ │ └── KafkaMsgRecordV2.java │ │ ├── partition │ │ │ └── PartitionMateData.java │ │ ├── produce │ │ │ ├── PartitionData.java │ │ │ ├── PartitionResponse.java │ │ │ ├── ProduceRequest.java │ │ │ ├── ProduceResponse.java │ │ │ ├── Record.java │ │ │ ├── TopicProduceData.java │ │ │ └── TopicProduceRes.java │ │ └── topic │ │ │ └── TopicMetaData.java │ │ ├── caches │ │ └── RequestCacheCenter.java │ │ ├── codec │ │ └── KafkaResponseDecoder.java │ │ ├── config │ │ ├── KafkaConsumerConfig.java │ │ └── KafkaProduceConfig.java │ │ ├── enums │ │ ├── ApiKeys.java │ │ └── Errors.java │ │ └── utils │ │ ├── Crc32C.java │ │ ├── PureJavaCrc32C.java │ │ ├── SimplePartitioner.java │ │ ├── StringSerializer.java │ │ └── VarLengthUtils.java │ └── test │ └── java │ └── test │ └── kafkaClientTest.java ├── awesome-netty-mysql └── pom.xml ├── awesome-netty-redis ├── pom.xml └── src │ ├── main │ └── java │ │ └── com │ │ └── north │ │ └── netty │ │ └── redis │ │ ├── clients │ │ ├── AbstractRedisClient.java │ │ ├── RedisBinaryClient.java │ │ ├── RedisClient.java │ │ └── RedisStringClient.java │ │ ├── cmd │ │ ├── AbstractCmd.java │ │ ├── Cmd.java │ │ ├── CmdResp.java │ │ └── impl │ │ │ ├── getcmd │ │ │ ├── AbstractGetCmd.java │ │ │ ├── binary │ │ │ │ └── GetBinaryCmd.java │ │ │ └── str │ │ │ │ └── GetStringCmd.java │ │ │ └── setcmd │ │ │ ├── AbstractSetCmd.java │ │ │ ├── binary │ │ │ └── SetBinaryCmd.java │ │ │ └── str │ │ │ └── SetStringCmd.java │ │ ├── codecs │ │ ├── ByteBufToByteDecoder.java │ │ ├── ByteToByteBufEncoder.java │ │ └── RedisRespHandler.java │ │ ├── config │ │ └── RedisConfig.java │ │ ├── connections │ │ ├── ConnectionPool.java │ │ └── RedisConnection.java │ │ ├── enums │ │ ├── ClientType.java │ │ ├── ExpireMode.java │ │ └── Xmode.java │ │ ├── exceptions │ │ ├── AwesomeNettyRedisException.java │ │ └── FailedToGetConnectionException.java │ │ └── utils │ │ ├── CmdBuildUtils.java │ │ ├── EncodeUtils.java │ │ └── SymbolUtils.java │ └── test │ └── java │ └── org │ └── nort │ └── netty │ └── redis │ └── test │ ├── RedisBinaryClientTest.java │ └── RedisStringClientTest.java ├── awesome-netty-zk ├── pom.xml └── src │ ├── main │ └── java │ │ └── org │ │ └── north │ │ └── netty │ │ └── zk │ │ ├── NettyZkClient.java │ │ ├── bean │ │ ├── AbstractZkResonse.java │ │ ├── RequestHeader.java │ │ ├── ZkRequest.java │ │ ├── ZkResponse.java │ │ ├── create │ │ │ ├── ZkAcl.java │ │ │ ├── ZkAclId.java │ │ │ ├── ZkCreateRequest.java │ │ │ └── ZkCreateResponse.java │ │ ├── getchildren │ │ │ ├── ZkGetChildrenRequest.java │ │ │ └── ZkGetChildrenResponse.java │ │ └── login │ │ │ ├── ZkLoginRequest.java │ │ │ └── ZkLoginResp.java │ │ ├── factories │ │ └── ZkCodecFactories.java │ │ ├── registrys │ │ └── ZkCaches.java │ │ ├── utils │ │ ├── CreateMode.java │ │ └── OpCode.java │ │ └── zkcodec │ │ ├── ZkAbstractCodec.java │ │ ├── ZkCodec.java │ │ ├── createcodec │ │ └── ZkCreateCodec.java │ │ ├── getchildren │ │ └── ZkGetChildrenCodec.java │ │ └── login │ │ ├── ZkLoginCodec.java │ │ └── ZkLoginHandler.java │ └── test │ └── java │ └── zk │ └── ZkClientTest.java └── pom.xml /README.md: -------------------------------------------------------------------------------- 1 | ## 简介: 2 | 3 | > netty到底能干嘛? 很多刚开始接触或者正在想深入理解netty的同学, 4 | 都必须面临这个来自灵魂的拷问. 跟大多数netty的教程都从server端开始不一样, 5 | 这个项目反其道而行之, 从客户端出发, 通过使用netty跟kafka,zk,redis等等市面的开源框架的服务端进行交互, 6 | 让你更加深刻的理解netty在定制化协议方面的巨大优势的同时, 也可以让你理解这些开源框架的通讯原理, 7 | 从而也能理解netty在服务端的巨大优势 8 | ------------------------------ 9 | 10 | ## 文档: 11 | 12 | ### zookeeper 13 | [https://juejin.im/post/5dd296c0e51d4508182449a6](https://juejin.im/post/5dd296c0e51d4508182449a6) 14 | 15 | ### redis 16 | [https://juejin.im/post/5dd33ebde51d4508587c0d7a](https://juejin.im/post/5dd33ebde51d4508587c0d7a) 17 | 18 | ### kafka 19 | [https://juejin.im/post/5ddb5605e51d4523551669b3](https://juejin.im/post/5ddb5605e51d4523551669b3) 20 | 21 | 22 | 23 | #### 涉及到的框架包括但是不限于: 24 | - zookeeper 25 | - redis 26 | - kafka 27 | - mysql(未开始) 28 | - elasticsearch(未开始) 29 | - dubbo(未开始) 30 | -------------------------------------------------------------------------------- /awesome-netty-common/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | awesome-netty 7 | org.north.netty 8 | 1.0-SNAPSHOT 9 | 10 | 4.0.0 11 | 12 | awesome-netty-common 13 | 14 | 15 | io.netty 16 | netty-all 17 | 4.1.42.Final 18 | 19 | 20 | com.google.code.gson 21 | gson 22 | 2.2.4 23 | 24 | 25 | junit 26 | junit 27 | 4.12 28 | 29 | 30 | 31 | com.google.guava 32 | guava 33 | 25.0-jre 34 | 35 | 36 | 37 | com.dyuproject.protostuff 38 | protostuff-core 39 | 1.0.8 40 | 41 | 42 | 43 | com.dyuproject.protostuff 44 | protostuff-runtime 45 | 1.0.8 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | org.apache.maven.plugins 54 | maven-compiler-plugin 55 | 56 | 1.8 57 | 1.8 58 | 59 | 60 | 61 | 62 | 63 | 64 | -------------------------------------------------------------------------------- /awesome-netty-common/src/main/java/org/north/netty/common/utils/ProtostuffHelper.java: -------------------------------------------------------------------------------- 1 | package org.north.netty.common.utils; 2 | import com.dyuproject.protostuff.LinkedBuffer; 3 | import com.dyuproject.protostuff.ProtostuffIOUtil; 4 | import com.dyuproject.protostuff.Schema; 5 | import com.dyuproject.protostuff.runtime.RuntimeSchema; 6 | /** 7 | * protobuf 8 | * @author laihaohua 9 | */ 10 | public class ProtostuffHelper { 11 | public ProtostuffHelper() { 12 | } 13 | 14 | public static byte[] serializeObject(T object, Class clz) { 15 | Schema schema = RuntimeSchema.createFrom(clz); 16 | LinkedBuffer BUFF = LinkedBuffer.allocate(512); 17 | return ProtostuffIOUtil.toByteArray(object, schema, BUFF); 18 | } 19 | 20 | public static T deSerializeObject(byte[] object, Class clz) { 21 | RuntimeSchema schema = RuntimeSchema.createFrom(clz); 22 | 23 | Object t; 24 | try { 25 | t = clz.newInstance(); 26 | } catch (Exception var5) { 27 | var5.printStackTrace(); 28 | throw new RuntimeException("init object failed."); 29 | } 30 | 31 | ProtostuffIOUtil.mergeFrom(object, t, schema); 32 | return (T) t; 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /awesome-netty-common/src/main/java/org/north/netty/common/utils/SerializeUtils.java: -------------------------------------------------------------------------------- 1 | package org.north.netty.common.utils; 2 | 3 | import com.google.common.collect.Lists; 4 | import io.netty.buffer.ByteBuf; 5 | 6 | import java.io.*; 7 | import java.nio.charset.StandardCharsets; 8 | import java.util.List; 9 | 10 | public class SerializeUtils { 11 | public static byte[] toByteArray(Object obj){ 12 | try { 13 | ByteArrayOutputStream byteArrayOS = new ByteArrayOutputStream(); 14 | ObjectOutputStream stream = new ObjectOutputStream(byteArrayOS); 15 | stream.writeObject(obj); 16 | stream.close(); 17 | return byteArrayOS.toByteArray(); 18 | } catch (IOException e) { 19 | e.printStackTrace(); 20 | } 21 | return null; 22 | } 23 | 24 | public Object byteArrayToObj(byte[] bytes) { 25 | Object obj = null; 26 | try { 27 | ByteArrayInputStream bis = new ByteArrayInputStream (bytes); 28 | ObjectInputStream ois = new ObjectInputStream (bis); 29 | obj = ois.readObject(); 30 | ois.close(); 31 | bis.close(); 32 | } catch (IOException ex) { 33 | ex.printStackTrace(); 34 | } catch (ClassNotFoundException ex) { 35 | ex.printStackTrace(); 36 | } 37 | return obj; 38 | } 39 | 40 | /** 41 | * 长度用int表示, 占4个字节 42 | * @param msg 43 | * @param out 44 | */ 45 | public static void writeStringToBuffer(String msg, ByteBuf out){ 46 | if (msg == null) { 47 | out.writeInt(-1); 48 | return; 49 | } 50 | byte [] bytes = msg.getBytes(StandardCharsets.UTF_8); 51 | // 字符串的长度 52 | out.writeInt(bytes.length); 53 | out.writeBytes(bytes); 54 | } 55 | 56 | /** 57 | * 长度用short表示 占2个字节 58 | * @param msg 59 | * @param out 60 | */ 61 | public static void writeStringToBuffer2(String msg, ByteBuf out){ 62 | if (msg == null) { 63 | out.writeShort(-1); 64 | return; 65 | } 66 | byte [] bytes = msg.getBytes(StandardCharsets.UTF_8); 67 | // 字符串的长度 68 | out.writeShort(bytes.length); 69 | out.writeBytes(bytes); 70 | } 71 | 72 | /** 73 | * 字符串长度是4个字节 74 | * @param in 75 | * @return 76 | */ 77 | public static String readStringToBuffer( ByteBuf in){ 78 | int strLen = in.readInt(); 79 | if(strLen < 0){ 80 | return null; 81 | } 82 | byte [] bytes = new byte[strLen]; 83 | in.readBytes(bytes); 84 | String s = new String(bytes, StandardCharsets.UTF_8); 85 | return s; 86 | } 87 | 88 | /** 89 | * 字符串长度是2个字节 90 | * @param in 91 | * @return 92 | */ 93 | public static String readStringToBuffer2( ByteBuf in){ 94 | int strLen = in.readShort(); 95 | if(strLen < 0){ 96 | return null; 97 | } 98 | byte [] bytes = new byte[strLen]; 99 | in.readBytes(bytes); 100 | String s = new String(bytes, StandardCharsets.UTF_8); 101 | return s; 102 | } 103 | public static void writeStringListToBuffer(List lists, ByteBuf out){ 104 | if(lists == null){ 105 | out.writeInt(-1); 106 | return; 107 | } 108 | out.writeInt(lists.size()); 109 | for(String s : lists){ 110 | writeStringToBuffer2(s, out); 111 | } 112 | 113 | } 114 | 115 | public static void writeByteArrToBuffer(byte [] buffer, ByteBuf out){ 116 | if (buffer == null) { 117 | out.writeInt(-1); 118 | return; 119 | } 120 | // 字符串的长度 121 | out.writeInt(buffer.length); 122 | out.writeBytes(buffer); 123 | } 124 | 125 | } 126 | -------------------------------------------------------------------------------- /awesome-netty-elasticsearch/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | awesome-netty 7 | org.north.netty 8 | 1.0-SNAPSHOT 9 | 10 | 4.0.0 11 | 12 | awesome-netty-elasticsearch 13 | 14 | 19 | 20 | 21 | -------------------------------------------------------------------------------- /awesome-netty-kafka/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | awesome-netty 7 | org.north.netty 8 | 1.0-SNAPSHOT 9 | 10 | 4.0.0 11 | 12 | awesome-netty-kafka 13 | 14 | 15 | org.north.netty 16 | awesome-netty-common 17 | 1.0-SNAPSHOT 18 | 19 | 20 | 21 | 22 | 23 | org.apache.maven.plugins 24 | maven-compiler-plugin 25 | 26 | 8 27 | 8 28 | 29 | 30 | 31 | 32 | 33 | -------------------------------------------------------------------------------- /awesome-netty-kafka/src/main/java/com/north/netty/kafka/KafkaClient.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.kafka; 2 | 3 | import com.google.common.collect.Lists; 4 | import com.north.netty.kafka.bean.fetch.*; 5 | import com.north.netty.kafka.bean.meta.KafkaMetaRequest; 6 | import com.north.netty.kafka.bean.meta.KafkaMetaResponse; 7 | import com.north.netty.kafka.bean.msg.ConsumerRecord; 8 | import com.north.netty.kafka.bean.msg.KafkaMsgRecordBatch; 9 | import com.north.netty.kafka.bean.msg.KafkaMsgRecordV2; 10 | import com.north.netty.kafka.bean.produce.*; 11 | import com.north.netty.kafka.caches.RequestCacheCenter; 12 | import com.north.netty.kafka.codec.KafkaResponseDecoder; 13 | import com.north.netty.kafka.config.KafkaConsumerConfig; 14 | import com.north.netty.kafka.config.KafkaProduceConfig; 15 | import com.north.netty.kafka.enums.Errors; 16 | import com.north.netty.kafka.utils.SimplePartitioner; 17 | import com.north.netty.kafka.utils.StringSerializer; 18 | import io.netty.bootstrap.Bootstrap; 19 | import io.netty.buffer.ByteBuf; 20 | import io.netty.buffer.PooledByteBufAllocator; 21 | import io.netty.channel.*; 22 | import io.netty.channel.nio.NioEventLoopGroup; 23 | import io.netty.channel.socket.nio.NioSocketChannel; 24 | import io.netty.handler.codec.LengthFieldBasedFrameDecoder; 25 | import io.netty.handler.codec.LengthFieldPrepender; 26 | 27 | import java.util.ArrayList; 28 | import java.util.HashMap; 29 | import java.util.List; 30 | import java.util.Map; 31 | import java.util.concurrent.atomic.AtomicInteger; 32 | 33 | /** 34 | * @author laihaohua 35 | */ 36 | public class KafkaClient { 37 | private Channel channel; 38 | private String clientId; 39 | private RequestCacheCenter requestCacheCenter = new RequestCacheCenter(); 40 | private AtomicInteger requestId = new AtomicInteger(1); 41 | 42 | public KafkaClient(String clientId, String host, int port){ 43 | this.clientId = clientId; 44 | EventLoopGroup eventLoopGroup = new NioEventLoopGroup(); 45 | Bootstrap bootstrap = new Bootstrap(); 46 | bootstrap.option(ChannelOption.SO_KEEPALIVE, true); 47 | bootstrap.channel(NioSocketChannel.class); 48 | bootstrap.group(eventLoopGroup); 49 | bootstrap.handler(new ChannelInitializer() { 50 | @Override 51 | protected void initChannel(NioSocketChannel ch) throws Exception { 52 | ch.pipeline() 53 | .addLast(new LengthFieldPrepender(4)) 54 | .addLast(new LengthFieldBasedFrameDecoder(Integer.MAX_VALUE,0,4,0,4)) 55 | .addLast(new KafkaResponseDecoder(requestCacheCenter)); 56 | } 57 | }); 58 | ChannelFuture channelFuture = null; 59 | try { 60 | channelFuture = bootstrap.connect(host, port).sync(); 61 | this.channel = channelFuture.channel(); 62 | } catch (InterruptedException e) { 63 | e.printStackTrace(); 64 | } 65 | 66 | } 67 | 68 | public KafkaMetaResponse fetchMataData(String topic){ 69 | Integer xId = requestId.getAndIncrement(); 70 | KafkaMetaRequest kafkaMetaRequest = new KafkaMetaRequest(clientId, xId); 71 | kafkaMetaRequest.setTopics(Lists.newArrayList(topic)); 72 | ByteBuf byteBuf = PooledByteBufAllocator.DEFAULT.buffer(); 73 | kafkaMetaRequest.serializable(byteBuf); 74 | try { 75 | requestCacheCenter.putKafkaResponse(xId, new KafkaMetaResponse()); 76 | this.channel.writeAndFlush(byteBuf).sync(); 77 | KafkaMetaResponse response = (KafkaMetaResponse)requestCacheCenter.waitForResp(xId, 400000); 78 | return response; 79 | } catch (InterruptedException e) { 80 | e.printStackTrace(); 81 | } 82 | return null; 83 | } 84 | public ProduceResponse send(KafkaProduceConfig config, String topic , String key, String val){ 85 | 86 | if(config == null || topic == null || topic.isEmpty() || val == null || val.isEmpty()){ 87 | throw new IllegalArgumentException("topic和val都不能为空"); 88 | } 89 | 90 | // 序列化器 key 和 val都简单地用String序列化器 91 | byte [] keyBytes = StringSerializer.getBytes(key); 92 | byte [] valBytes = StringSerializer.getBytes(val); 93 | 94 | // 分区器 95 | int partition = SimplePartitioner.getPartion(topic, keyBytes, valBytes); 96 | 97 | KafkaMsgRecordV2 kafkaMsgRecordV2 = new KafkaMsgRecordV2(keyBytes, valBytes , null); 98 | KafkaMsgRecordBatch kafkaMsgRecordBatch = new KafkaMsgRecordBatch(kafkaMsgRecordV2); 99 | 100 | Record record = new Record(); 101 | // 指定只发送到哪个分区 102 | record.setPartition(partition); 103 | record.setKafkaMsgRecordBatchList(new ArrayList<>()); 104 | record.getKafkaMsgRecordBatchList().add(kafkaMsgRecordBatch); 105 | 106 | PartitionData partitionData = new PartitionData(); 107 | partitionData.setRecordSset(record); 108 | 109 | TopicProduceData topicProduceData = new TopicProduceData(); 110 | topicProduceData.setTopic(topic); 111 | topicProduceData.setData(Lists.newArrayList(partitionData)); 112 | 113 | Integer xid = requestId.getAndIncrement(); 114 | ProduceRequest produceRequest = new ProduceRequest(clientId, xid); 115 | produceRequest.setAcks(config.getAck()); 116 | produceRequest.setTimeOut(config.getTimeout()); 117 | produceRequest.setTransactionalId(null); 118 | produceRequest.setTopicData(Lists.newArrayList(topicProduceData)); 119 | 120 | 121 | ByteBuf byteBuf = PooledByteBufAllocator.DEFAULT.buffer(); 122 | produceRequest.serializable(byteBuf); 123 | try { 124 | requestCacheCenter.putKafkaResponse(xid, new ProduceResponse()); 125 | this.channel.writeAndFlush(byteBuf).sync(); 126 | ProduceResponse response = (ProduceResponse)requestCacheCenter.waitForResp(xid, config.getTimeout()); 127 | return response; 128 | } catch (InterruptedException e) { 129 | e.printStackTrace(); 130 | } 131 | return null; 132 | 133 | } 134 | 135 | 136 | public Map> poll(KafkaConsumerConfig consumerConfig, String topic, int partition, long fetchOffset){ 137 | if(consumerConfig == null || topic == null){ 138 | throw new IllegalArgumentException("必要参数不能为空"); 139 | } 140 | 141 | Integer xid = requestId.getAndIncrement(); 142 | FetchRequest fetchRequest = new FetchRequest(this.clientId, xid); 143 | 144 | FetchTopicPartitionRequest fetchTopicPartitionRequest = new FetchTopicPartitionRequest(); 145 | 146 | fetchTopicPartitionRequest.setPartition(partition); 147 | fetchTopicPartitionRequest.setFetchOffset(fetchOffset); 148 | fetchTopicPartitionRequest.setLogStartOffset(0L); 149 | fetchTopicPartitionRequest.setMaxBytes(consumerConfig.getMaxBytes()); 150 | 151 | FetchTopicRequest fetchTopicRequest = new FetchTopicRequest(); 152 | fetchTopicRequest.setTopic(topic); 153 | fetchTopicRequest.setPartitions(new ArrayList<>()); 154 | fetchTopicRequest.getPartitions().add(fetchTopicPartitionRequest); 155 | 156 | fetchRequest.setReplicaId(-1); 157 | fetchRequest.setMaxBytes(consumerConfig.getMaxBytes()); 158 | fetchRequest.setMaxWaitTime(consumerConfig.getMaxWaitTime()); 159 | fetchRequest.setMinBytes(consumerConfig.getMinBytes()); 160 | byte b = 0; 161 | fetchRequest.setIsolationLevel(b); 162 | fetchRequest.setTopics(new ArrayList<>()); 163 | fetchRequest.getTopics().add(fetchTopicRequest); 164 | 165 | 166 | ByteBuf byteBuf = PooledByteBufAllocator.DEFAULT.buffer(); 167 | fetchRequest.serializable(byteBuf); 168 | 169 | 170 | try { 171 | requestCacheCenter.putKafkaResponse(xid, new FetchResponse()); 172 | this.channel.writeAndFlush(byteBuf).sync(); 173 | FetchResponse response = (FetchResponse)requestCacheCenter.waitForResp(xid, consumerConfig.getMaxWaitTime()); 174 | return parseResp(response); 175 | } catch (InterruptedException e) { 176 | e.printStackTrace(); 177 | } 178 | return null; 179 | 180 | } 181 | 182 | 183 | /** 184 | * kafka poll 返回的数据结构非常复杂, 这里解析出我们想要的数据 185 | * @param response 186 | * @return 187 | */ 188 | private Map> parseResp(FetchResponse response ){ 189 | if(response == null){ 190 | return null; 191 | } 192 | Map> partitionRecordMap = new HashMap<>(); 193 | for(FetchTopicResponse fetchTopicResponse : response.getResponses()){ 194 | // 由于我们这里的实现只会订阅一个topic, 所以这里没有用到这个 195 | String topic = fetchTopicResponse.getTopic(); 196 | List partitionResps = fetchTopicResponse.getPartitionResps(); 197 | assert partitionResps != null; 198 | // 遍历这个topic的每一个分区 199 | for(FetchPartitionResp partitionResp : partitionResps){ 200 | FetchPartitionHeader fetchPartitionHeader = partitionResp.getPartitionHeaders(); 201 | short errorCode = fetchPartitionHeader.getErrorCode(); 202 | if(errorCode != Errors.NONE.code()){ 203 | // 这个分区的响应有错误的话 抛出异常 204 | throw new IllegalArgumentException("broker 返回错误: " + Errors.forCode(errorCode).message()); 205 | } 206 | // 初始化这个分区的list 207 | Integer partition = fetchPartitionHeader.getPartition(); 208 | if(!partitionRecordMap.containsKey(partition)){ 209 | partitionRecordMap.putIfAbsent(partition, new ArrayList<>()); 210 | } 211 | Record record = partitionResp.getRecordSset(); 212 | assert record != null; 213 | List kafkaMsgRecordBatchList = record.getKafkaMsgRecordBatchList(); 214 | // kafka的响应可能会分成很多个批次, 所以这里要一个批次一个批次地处理 215 | for(KafkaMsgRecordBatch kafkaMsgRecordBatch : kafkaMsgRecordBatchList){ 216 | Long baseOffset = kafkaMsgRecordBatch.getBaseOffset(); 217 | long firstTimestamp = kafkaMsgRecordBatch.getFirstTimestamp(); 218 | List kafkaMsgRecordV2List = kafkaMsgRecordBatch.getMsgs(); 219 | for(KafkaMsgRecordV2 kafkaMsgRecordV2 : kafkaMsgRecordV2List){ 220 | ConsumerRecord consumerRecord = new ConsumerRecord(); 221 | long offset = baseOffset + kafkaMsgRecordV2.getOffsetDelta(); 222 | consumerRecord.setOffset(offset); 223 | long timestamp = firstTimestamp + kafkaMsgRecordV2.getTimestampDelta(); 224 | consumerRecord.setTimeStamp(timestamp); 225 | consumerRecord.setKey(StringSerializer.getString(kafkaMsgRecordV2.getKey())); 226 | consumerRecord.setVal(StringSerializer.getString(kafkaMsgRecordV2.getValues())); 227 | partitionRecordMap.get(partition).add(consumerRecord); 228 | } 229 | } 230 | 231 | } 232 | } 233 | return partitionRecordMap; 234 | } 235 | } 236 | -------------------------------------------------------------------------------- /awesome-netty-kafka/src/main/java/com/north/netty/kafka/bean/AbstractKafkaResponse.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.kafka.bean; 2 | 3 | /** 4 | * @author laihaohua 5 | */ 6 | public abstract class AbstractKafkaResponse implements KafkaResponse{ 7 | protected Integer correlationId; 8 | protected KafkaResponseHeader kafkaResponseHeader; 9 | 10 | public KafkaResponseHeader getKafkaResponseHeader() { 11 | return kafkaResponseHeader; 12 | } 13 | 14 | public void setKafkaResponseHeader(KafkaResponseHeader kafkaResponseHeader) { 15 | this.kafkaResponseHeader = kafkaResponseHeader; 16 | } 17 | 18 | public Integer getCorrelationId() { 19 | return correlationId; 20 | } 21 | 22 | public void setCorrelationId(Integer correlationId) { 23 | this.correlationId = correlationId; 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /awesome-netty-kafka/src/main/java/com/north/netty/kafka/bean/KafkaRequest.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.kafka.bean; 2 | 3 | 4 | import io.netty.buffer.ByteBuf; 5 | 6 | public interface KafkaRequest { 7 | public void serializable(ByteBuf out); 8 | } 9 | -------------------------------------------------------------------------------- /awesome-netty-kafka/src/main/java/com/north/netty/kafka/bean/KafkaRequestHeader.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.kafka.bean; 2 | 3 | import io.netty.buffer.ByteBuf; 4 | import org.north.netty.common.utils.SerializeUtils; 5 | 6 | import java.io.Serializable; 7 | import java.util.Arrays; 8 | 9 | /** 10 | * @author laihaohua 11 | */ 12 | public class KafkaRequestHeader implements Serializable { 13 | private Short apiKey; 14 | private Short apiVersion; 15 | private Integer correlationId; 16 | private String clientId; 17 | public void serializable(ByteBuf out){ 18 | out.writeShort(apiKey); 19 | out.writeShort(apiVersion); 20 | out.writeInt(correlationId); 21 | SerializeUtils.writeStringToBuffer2(clientId, out); 22 | } 23 | 24 | public Short getApiKey() { 25 | return apiKey; 26 | } 27 | 28 | public void setApiKey(Short apiKey) { 29 | this.apiKey = apiKey; 30 | } 31 | 32 | public Short getApiVersion() { 33 | return apiVersion; 34 | } 35 | 36 | public void setApiVersion(Short apiVersion) { 37 | this.apiVersion = apiVersion; 38 | } 39 | 40 | public Integer getCorrelationId() { 41 | return correlationId; 42 | } 43 | 44 | public void setCorrelationId(Integer correlationId) { 45 | this.correlationId = correlationId; 46 | } 47 | 48 | public String getClientId() { 49 | return clientId; 50 | } 51 | 52 | public void setClientId(String clientId) { 53 | this.clientId = clientId; 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /awesome-netty-kafka/src/main/java/com/north/netty/kafka/bean/KafkaResponse.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.kafka.bean; 2 | 3 | import io.netty.buffer.ByteBuf; 4 | 5 | /** 6 | * @author laihaohua 7 | */ 8 | public interface KafkaResponse { 9 | 10 | /** 11 | * 反序列化这个响应体 12 | * @param byteBuf 13 | */ 14 | void deserialize(ByteBuf byteBuf); 15 | 16 | } 17 | -------------------------------------------------------------------------------- /awesome-netty-kafka/src/main/java/com/north/netty/kafka/bean/KafkaResponseHeader.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.kafka.bean; 2 | 3 | import io.netty.buffer.ByteBuf; 4 | 5 | import java.io.Serializable; 6 | 7 | /** 8 | * @author laihaohua 9 | */ 10 | public class KafkaResponseHeader implements Serializable { 11 | private Integer correlationId; 12 | public void deserialize(ByteBuf byteBuf){ 13 | this.correlationId = byteBuf.readInt(); 14 | } 15 | 16 | public Integer getCorrelationId() { 17 | return correlationId; 18 | } 19 | 20 | public void setCorrelationId(Integer correlationId) { 21 | this.correlationId = correlationId; 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /awesome-netty-kafka/src/main/java/com/north/netty/kafka/bean/broker/Broker.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.kafka.bean.broker; 2 | 3 | import io.netty.buffer.ByteBuf; 4 | import org.north.netty.common.utils.SerializeUtils; 5 | 6 | import java.io.Serializable; 7 | 8 | /** 9 | * @author laihaohua 10 | */ 11 | public class Broker implements Serializable { 12 | private Integer nodeId; 13 | private String host; 14 | private Integer port; 15 | private String rack; 16 | 17 | public Integer getNodeId() { 18 | return nodeId; 19 | } 20 | 21 | public void setNodeId(Integer nodeId) { 22 | this.nodeId = nodeId; 23 | } 24 | 25 | public String getHost() { 26 | return host; 27 | } 28 | 29 | public void setHost(String host) { 30 | this.host = host; 31 | } 32 | 33 | public Integer getPort() { 34 | return port; 35 | } 36 | 37 | public void setPort(Integer port) { 38 | this.port = port; 39 | } 40 | 41 | public String getRack() { 42 | return rack; 43 | } 44 | 45 | public void setRack(String rack) { 46 | this.rack = rack; 47 | } 48 | 49 | public void deserialize(ByteBuf byteBuf) { 50 | this.nodeId = byteBuf.readInt(); 51 | this.host = SerializeUtils.readStringToBuffer2(byteBuf); 52 | this.port = byteBuf.readInt(); 53 | this.rack = SerializeUtils.readStringToBuffer2(byteBuf); 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /awesome-netty-kafka/src/main/java/com/north/netty/kafka/bean/fetch/AbortedTransaction.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.kafka.bean.fetch; 2 | 3 | import io.netty.buffer.ByteBuf; 4 | 5 | public class AbortedTransaction { 6 | private Long producerId; 7 | private Long firstOffset; 8 | public void deserialize(ByteBuf in){ 9 | this.producerId = in.readLong(); 10 | this.firstOffset = in.readLong(); 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /awesome-netty-kafka/src/main/java/com/north/netty/kafka/bean/fetch/FetchPartitionHeader.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.kafka.bean.fetch; 2 | 3 | import io.netty.buffer.ByteBuf; 4 | 5 | import java.io.Serializable; 6 | import java.util.ArrayList; 7 | import java.util.List; 8 | 9 | public class FetchPartitionHeader implements Serializable { 10 | private Integer partition; 11 | private Short errorCode; 12 | private Long highWaterMark; 13 | private Long lastStableOffset; 14 | private Long logStartOffset; 15 | private List abortedTransactions; 16 | public void deserialize(ByteBuf in){ 17 | this.partition = in.readInt(); 18 | this.errorCode = in.readShort(); 19 | this.highWaterMark = in.readLong(); 20 | this.lastStableOffset = in.readLong(); 21 | this.logStartOffset = in.readLong(); 22 | int count = in.readInt(); 23 | if(count >= 0){ 24 | abortedTransactions = new ArrayList<>(count); 25 | for(int i =0 ; i < count; i++){ 26 | AbortedTransaction abortedTransaction = new AbortedTransaction(); 27 | abortedTransaction.deserialize(in); 28 | abortedTransactions.add(abortedTransaction); 29 | } 30 | } 31 | 32 | } 33 | 34 | public Integer getPartition() { 35 | return partition; 36 | } 37 | 38 | public void setPartition(Integer partition) { 39 | this.partition = partition; 40 | } 41 | 42 | public Short getErrorCode() { 43 | return errorCode; 44 | } 45 | 46 | public void setErrorCode(Short errorCode) { 47 | this.errorCode = errorCode; 48 | } 49 | 50 | public Long getHighWaterMark() { 51 | return highWaterMark; 52 | } 53 | 54 | public void setHighWaterMark(Long highWaterMark) { 55 | this.highWaterMark = highWaterMark; 56 | } 57 | 58 | public Long getLastStableOffset() { 59 | return lastStableOffset; 60 | } 61 | 62 | public void setLastStableOffset(Long lastStableOffset) { 63 | this.lastStableOffset = lastStableOffset; 64 | } 65 | 66 | public Long getLogStartOffset() { 67 | return logStartOffset; 68 | } 69 | 70 | public void setLogStartOffset(Long logStartOffset) { 71 | this.logStartOffset = logStartOffset; 72 | } 73 | 74 | public List getAbortedTransactions() { 75 | return abortedTransactions; 76 | } 77 | 78 | public void setAbortedTransactions(List abortedTransactions) { 79 | this.abortedTransactions = abortedTransactions; 80 | } 81 | } 82 | -------------------------------------------------------------------------------- /awesome-netty-kafka/src/main/java/com/north/netty/kafka/bean/fetch/FetchPartitionResp.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.kafka.bean.fetch; 2 | 3 | import com.north.netty.kafka.bean.produce.Record; 4 | import io.netty.buffer.ByteBuf; 5 | import org.north.netty.common.utils.SerializeUtils; 6 | 7 | import java.io.Serializable; 8 | import java.util.ArrayList; 9 | import java.util.Arrays; 10 | import java.util.List; 11 | 12 | public class FetchPartitionResp implements Serializable { 13 | private FetchPartitionHeader partitionHeaders; 14 | private Record recordSset; 15 | public void deserialize(ByteBuf in){ 16 | partitionHeaders = new FetchPartitionHeader(); 17 | partitionHeaders.deserialize(in); 18 | recordSset = new Record(); 19 | recordSset.deserialize(in); 20 | } 21 | 22 | public FetchPartitionHeader getPartitionHeaders() { 23 | return partitionHeaders; 24 | } 25 | 26 | public void setPartitionHeaders(FetchPartitionHeader partitionHeaders) { 27 | this.partitionHeaders = partitionHeaders; 28 | } 29 | 30 | public Record getRecordSset() { 31 | return recordSset; 32 | } 33 | 34 | public void setRecordSset(Record recordSset) { 35 | this.recordSset = recordSset; 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /awesome-netty-kafka/src/main/java/com/north/netty/kafka/bean/fetch/FetchRequest.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.kafka.bean.fetch; 2 | 3 | import com.north.netty.kafka.bean.KafkaRequest; 4 | import com.north.netty.kafka.bean.KafkaRequestHeader; 5 | import com.north.netty.kafka.enums.ApiKeys; 6 | import io.netty.buffer.ByteBuf; 7 | 8 | import java.io.Serializable; 9 | import java.util.List; 10 | 11 | public class FetchRequest implements Serializable, KafkaRequest { 12 | 13 | private KafkaRequestHeader header; 14 | public FetchRequest(String clientId, Integer correlationId){ 15 | super(); 16 | header = new KafkaRequestHeader(); 17 | header.setClientId(clientId); 18 | header.setCorrelationId(correlationId); 19 | header.setApiKey(ApiKeys.FETCH.id); 20 | header.setApiVersion(ApiKeys.FETCH.apiVersion); 21 | } 22 | 23 | /** 24 | * 副本的brokerId, 一般的消费者的话 直接用-1即可 25 | */ 26 | private Integer replicaId; 27 | /** 28 | * 等待响应返回的最大ms 29 | */ 30 | private Integer maxWaitTime; 31 | /** 32 | * 响应的最小字节数 33 | */ 34 | private Integer minBytes; 35 | /** 36 | * 响应的最大字节数 37 | */ 38 | private Integer maxBytes; 39 | /** 40 | * 事务隔离等级 0 读未提交 1 读已提交 41 | */ 42 | private Byte isolationLevel; 43 | /** 44 | * 要拉取的topic 45 | */ 46 | private List topics; 47 | 48 | @Override 49 | public void serializable(ByteBuf out){ 50 | header.serializable(out); 51 | out.writeInt(replicaId); 52 | out.writeInt(maxWaitTime); 53 | out.writeInt(minBytes); 54 | out.writeInt(maxBytes); 55 | out.writeByte(isolationLevel); 56 | if(topics == null){ 57 | out.writeInt(-1); 58 | }else { 59 | out.writeInt(topics.size()); 60 | for(FetchTopicRequest fetchTopicRequest : topics){ 61 | fetchTopicRequest.serializable(out); 62 | } 63 | } 64 | } 65 | 66 | 67 | public KafkaRequestHeader getHeader() { 68 | return header; 69 | } 70 | 71 | public void setHeader(KafkaRequestHeader header) { 72 | this.header = header; 73 | } 74 | 75 | public Integer getReplicaId() { 76 | return replicaId; 77 | } 78 | 79 | public void setReplicaId(Integer replicaId) { 80 | this.replicaId = replicaId; 81 | } 82 | 83 | public Integer getMaxWaitTime() { 84 | return maxWaitTime; 85 | } 86 | 87 | public void setMaxWaitTime(Integer maxWaitTime) { 88 | this.maxWaitTime = maxWaitTime; 89 | } 90 | 91 | public Integer getMinBytes() { 92 | return minBytes; 93 | } 94 | 95 | public void setMinBytes(Integer minBytes) { 96 | this.minBytes = minBytes; 97 | } 98 | 99 | public Integer getMaxBytes() { 100 | return maxBytes; 101 | } 102 | 103 | public void setMaxBytes(Integer maxBytes) { 104 | this.maxBytes = maxBytes; 105 | } 106 | 107 | public Byte getIsolationLevel() { 108 | return isolationLevel; 109 | } 110 | 111 | public void setIsolationLevel(Byte isolationLevel) { 112 | this.isolationLevel = isolationLevel; 113 | } 114 | 115 | public List getTopics() { 116 | return topics; 117 | } 118 | 119 | public void setTopics(List topics) { 120 | this.topics = topics; 121 | } 122 | } 123 | -------------------------------------------------------------------------------- /awesome-netty-kafka/src/main/java/com/north/netty/kafka/bean/fetch/FetchResponse.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.kafka.bean.fetch; 2 | 3 | import com.north.netty.kafka.bean.AbstractKafkaResponse; 4 | import com.north.netty.kafka.bean.KafkaResponse; 5 | import io.netty.buffer.ByteBuf; 6 | 7 | import java.io.Serializable; 8 | import java.util.ArrayList; 9 | import java.util.Arrays; 10 | import java.util.List; 11 | 12 | public class FetchResponse extends AbstractKafkaResponse implements Serializable, KafkaResponse { 13 | /** 14 | * ms 15 | */ 16 | private Integer throttleTime; 17 | private List responses; 18 | 19 | @Override 20 | public void deserialize(ByteBuf in) { 21 | this.throttleTime = in.readInt(); 22 | int resCount = in.readInt(); 23 | if(resCount >= 0){ 24 | this.responses = new ArrayList<>(resCount); 25 | for(int i =0 ; i < resCount; i++){ 26 | FetchTopicResponse fetchTopicResponse = new FetchTopicResponse(); 27 | fetchTopicResponse.deserialize(in); 28 | responses.add(fetchTopicResponse); 29 | } 30 | } 31 | } 32 | 33 | public Integer getThrottleTime() { 34 | return throttleTime; 35 | } 36 | 37 | public void setThrottleTime(Integer throttleTime) { 38 | this.throttleTime = throttleTime; 39 | } 40 | 41 | public List getResponses() { 42 | return responses; 43 | } 44 | 45 | public void setResponses(List responses) { 46 | this.responses = responses; 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /awesome-netty-kafka/src/main/java/com/north/netty/kafka/bean/fetch/FetchTopicPartitionRequest.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.kafka.bean.fetch; 2 | 3 | import io.netty.buffer.ByteBuf; 4 | 5 | import java.io.Serializable; 6 | 7 | public class FetchTopicPartitionRequest implements Serializable { 8 | private Integer partition; 9 | private Long fetchOffset; 10 | private Long logStartOffset; 11 | private Integer maxBytes; 12 | public void serializable(ByteBuf out){ 13 | out.writeInt(partition); 14 | out.writeLong(fetchOffset); 15 | out.writeLong(logStartOffset); 16 | out.writeInt(maxBytes); 17 | } 18 | 19 | public Integer getPartition() { 20 | return partition; 21 | } 22 | 23 | public void setPartition(Integer partition) { 24 | this.partition = partition; 25 | } 26 | 27 | public Long getFetchOffset() { 28 | return fetchOffset; 29 | } 30 | 31 | public void setFetchOffset(Long fetchOffset) { 32 | this.fetchOffset = fetchOffset; 33 | } 34 | 35 | public Long getLogStartOffset() { 36 | return logStartOffset; 37 | } 38 | 39 | public void setLogStartOffset(Long logStartOffset) { 40 | this.logStartOffset = logStartOffset; 41 | } 42 | 43 | public Integer getMaxBytes() { 44 | return maxBytes; 45 | } 46 | 47 | public void setMaxBytes(Integer maxBytes) { 48 | this.maxBytes = maxBytes; 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /awesome-netty-kafka/src/main/java/com/north/netty/kafka/bean/fetch/FetchTopicRequest.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.kafka.bean.fetch; 2 | 3 | import io.netty.buffer.ByteBuf; 4 | import org.north.netty.common.utils.SerializeUtils; 5 | 6 | import java.io.Serializable; 7 | import java.util.List; 8 | 9 | public class FetchTopicRequest implements Serializable { 10 | private String topic; 11 | private List partitions; 12 | public void serializable(ByteBuf out){ 13 | SerializeUtils.writeStringToBuffer2(topic, out); 14 | if(partitions == null){ 15 | out.writeInt(-1); 16 | }else { 17 | out.writeInt(partitions.size()); 18 | for(FetchTopicPartitionRequest fetchTopicRequest : partitions){ 19 | fetchTopicRequest.serializable(out); 20 | } 21 | } 22 | } 23 | 24 | public String getTopic() { 25 | return topic; 26 | } 27 | 28 | public void setTopic(String topic) { 29 | this.topic = topic; 30 | } 31 | 32 | public List getPartitions() { 33 | return partitions; 34 | } 35 | 36 | public void setPartitions(List partitions) { 37 | this.partitions = partitions; 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /awesome-netty-kafka/src/main/java/com/north/netty/kafka/bean/fetch/FetchTopicResponse.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.kafka.bean.fetch; 2 | 3 | 4 | import io.netty.buffer.ByteBuf; 5 | import org.north.netty.common.utils.SerializeUtils; 6 | 7 | import java.io.Serializable; 8 | import java.util.ArrayList; 9 | import java.util.List; 10 | 11 | public class FetchTopicResponse implements Serializable { 12 | private String topic; 13 | private List partitionResps; 14 | 15 | public void deserialize(ByteBuf in){ 16 | this.topic = SerializeUtils.readStringToBuffer2(in); 17 | int partitionResCount = in.readInt(); 18 | if(partitionResCount > 0){ 19 | this.partitionResps = new ArrayList<>(partitionResCount); 20 | for(int i = 0; i < partitionResCount; i++){ 21 | FetchPartitionResp fetchPartitionResp = new FetchPartitionResp(); 22 | fetchPartitionResp.deserialize(in); 23 | partitionResps.add(fetchPartitionResp); 24 | } 25 | } 26 | } 27 | 28 | public String getTopic() { 29 | return topic; 30 | } 31 | 32 | public void setTopic(String topic) { 33 | this.topic = topic; 34 | } 35 | 36 | public List getPartitionResps() { 37 | return partitionResps; 38 | } 39 | 40 | public void setPartitionResps(List partitionResps) { 41 | this.partitionResps = partitionResps; 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /awesome-netty-kafka/src/main/java/com/north/netty/kafka/bean/meta/KafkaMetaRequest.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.kafka.bean.meta; 2 | 3 | import com.north.netty.kafka.bean.KafkaRequest; 4 | import com.north.netty.kafka.bean.KafkaRequestHeader; 5 | import com.north.netty.kafka.enums.ApiKeys; 6 | import io.netty.buffer.ByteBuf; 7 | import org.north.netty.common.utils.SerializeUtils; 8 | 9 | import java.io.Serializable; 10 | import java.util.List; 11 | 12 | /** 13 | * @author laihaohua 14 | */ 15 | public class KafkaMetaRequest implements Serializable, KafkaRequest { 16 | private List topics; 17 | private boolean allowAutoTopicCreation = true; 18 | private KafkaRequestHeader header; 19 | public KafkaMetaRequest(String clientId, Integer correlationId){ 20 | super(); 21 | header = new KafkaRequestHeader(); 22 | header.setClientId(clientId); 23 | header.setCorrelationId(correlationId); 24 | header.setApiKey(ApiKeys.METADATA.id); 25 | header.setApiVersion(ApiKeys.METADATA.apiVersion); 26 | } 27 | public List getTopics() { 28 | return topics; 29 | } 30 | 31 | public void setTopics(List topics) { 32 | this.topics = topics; 33 | } 34 | 35 | public boolean isAllowAutoTopicCreation() { 36 | return allowAutoTopicCreation; 37 | } 38 | 39 | public void setAllowAutoTopicCreation(boolean allowAutoTopicCreation) { 40 | this.allowAutoTopicCreation = allowAutoTopicCreation; 41 | } 42 | 43 | @Override 44 | public void serializable(ByteBuf out){ 45 | header.serializable(out); 46 | SerializeUtils.writeStringListToBuffer(topics, out); 47 | out.writeBoolean(allowAutoTopicCreation); 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /awesome-netty-kafka/src/main/java/com/north/netty/kafka/bean/meta/KafkaMetaResponse.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.kafka.bean.meta; 2 | 3 | import com.north.netty.kafka.bean.AbstractKafkaResponse; 4 | import com.north.netty.kafka.bean.KafkaResponse; 5 | import com.north.netty.kafka.bean.broker.Broker; 6 | import com.north.netty.kafka.bean.topic.TopicMetaData; 7 | import io.netty.buffer.ByteBuf; 8 | 9 | import java.io.Serializable; 10 | import java.util.ArrayList; 11 | import java.util.List; 12 | 13 | /** 14 | * @author laihaohua 15 | */ 16 | public class KafkaMetaResponse extends AbstractKafkaResponse implements Serializable, KafkaResponse { 17 | private Integer throttleTimeMs; 18 | private List brokers; 19 | private Integer controllerId = -1; 20 | /** 21 | * v2+的版本才有这个字段 22 | */ 23 | private String clusterId; 24 | private List topicMetadata; 25 | 26 | 27 | public Integer getThrottleTimeMs() { 28 | return throttleTimeMs; 29 | } 30 | 31 | public void setThrottleTimeMs(Integer throttleTimeMs) { 32 | this.throttleTimeMs = throttleTimeMs; 33 | } 34 | 35 | public List getBrokers() { 36 | return brokers; 37 | } 38 | 39 | public void setBrokers(List brokers) { 40 | this.brokers = brokers; 41 | } 42 | 43 | public Integer getControllerId() { 44 | return controllerId; 45 | } 46 | 47 | public void setControllerId(Integer controllerId) { 48 | this.controllerId = controllerId; 49 | } 50 | 51 | public String getClusterId() { 52 | return clusterId; 53 | } 54 | 55 | public void setClusterId(String clusterId) { 56 | this.clusterId = clusterId; 57 | } 58 | 59 | public List getTopicMetadata() { 60 | return topicMetadata; 61 | } 62 | 63 | public void setTopicMetadata(List topicMetadata) { 64 | this.topicMetadata = topicMetadata; 65 | } 66 | 67 | @Override 68 | public void deserialize(ByteBuf byteBuf) { 69 | int brokerIdCount = byteBuf.readInt(); 70 | if(brokerIdCount >= 0){ 71 | brokers = new ArrayList<>(brokerIdCount); 72 | for(int i =0 ; i < brokerIdCount; i++){ 73 | Broker broker = new Broker(); 74 | broker.deserialize(byteBuf); 75 | brokers.add(broker); 76 | } 77 | } 78 | this.controllerId = byteBuf.readInt(); 79 | int topicCount = byteBuf.readInt(); 80 | if(topicCount >= 0){ 81 | topicMetadata = new ArrayList<>(topicCount); 82 | for(int i =0 ; i < topicCount; i++){ 83 | TopicMetaData topicMetaData = new TopicMetaData(); 84 | topicMetaData.deserialize(byteBuf); 85 | topicMetadata.add(topicMetaData); 86 | } 87 | } 88 | } 89 | 90 | 91 | } 92 | -------------------------------------------------------------------------------- /awesome-netty-kafka/src/main/java/com/north/netty/kafka/bean/msg/ConsumerRecord.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.kafka.bean.msg; 2 | 3 | import java.io.Serializable; 4 | 5 | public class ConsumerRecord implements Serializable { 6 | private long offset; 7 | private long timeStamp; 8 | private String key; 9 | private String val; 10 | 11 | public long getOffset() { 12 | return offset; 13 | } 14 | 15 | public void setOffset(long offset) { 16 | this.offset = offset; 17 | } 18 | 19 | public long getTimeStamp() { 20 | return timeStamp; 21 | } 22 | 23 | public void setTimeStamp(long timeStamp) { 24 | this.timeStamp = timeStamp; 25 | } 26 | 27 | public String getKey() { 28 | return key; 29 | } 30 | 31 | public void setKey(String key) { 32 | this.key = key; 33 | } 34 | 35 | public String getVal() { 36 | return val; 37 | } 38 | 39 | public void setVal(String val) { 40 | this.val = val; 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /awesome-netty-kafka/src/main/java/com/north/netty/kafka/bean/msg/KafkaMsgRecordBatch.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.kafka.bean.msg; 2 | 3 | import com.google.common.collect.Lists; 4 | import com.north.netty.kafka.utils.Crc32C; 5 | import io.netty.buffer.ByteBuf; 6 | import io.netty.buffer.PooledByteBufAllocator; 7 | 8 | import java.io.Serializable; 9 | import java.util.ArrayList; 10 | import java.util.List; 11 | 12 | public class KafkaMsgRecordBatch implements Serializable { 13 | private int totalSize; 14 | public KafkaMsgRecordBatch(){ 15 | 16 | } 17 | public KafkaMsgRecordBatch(KafkaMsgRecordV2 kafkaMsgRecordV2){ 18 | this.msgs = Lists.newArrayList(kafkaMsgRecordV2); 19 | this.baseOffset = 0L; 20 | this.firstTimestamp = this.maxTimestamp = System.currentTimeMillis(); 21 | /** 22 | * 61 是KafkaMsgRecordBatch整个消息头部的大小, 12是baseOffset和length的长度和 23 | * KafkaMsgRecordBatch的长度就是 头部+消息体-12 24 | */ 25 | this.length = 61 + kafkaMsgRecordV2.getMsgSize() - 12; 26 | this.totalSize = 61 + kafkaMsgRecordV2.getMsgSize(); 27 | } 28 | 29 | public void deserialize(ByteBuf in){ 30 | this.baseOffset = in.readLong(); 31 | this.length = in.readInt(); 32 | this.partitionLeaderVersion = in.readInt(); 33 | this.magic = in.readByte(); 34 | this.crc = in.readInt(); 35 | this.attributes = in.readShort(); 36 | this.lastOffsetDelta = in.readInt(); 37 | this.firstTimestamp = in.readLong(); 38 | this.maxTimestamp = in.readLong(); 39 | this.producerId = in.readLong(); 40 | this.epoch = in.readShort(); 41 | this.sequence = in.readInt(); 42 | this.numRecords = in.readInt(); 43 | if(numRecords >= 0){ 44 | msgs = new ArrayList<>(numRecords); 45 | for(int i=0; i msgs; 150 | 151 | public int getTotalSize() { 152 | return totalSize; 153 | } 154 | 155 | public void setTotalSize(int totalSize) { 156 | this.totalSize = totalSize; 157 | } 158 | 159 | public Long getBaseOffset() { 160 | return baseOffset; 161 | } 162 | 163 | public void setBaseOffset(Long baseOffset) { 164 | this.baseOffset = baseOffset; 165 | } 166 | 167 | public Integer getLength() { 168 | return length; 169 | } 170 | 171 | public void setLength(Integer length) { 172 | this.length = length; 173 | } 174 | 175 | public Integer getPartitionLeaderVersion() { 176 | return partitionLeaderVersion; 177 | } 178 | 179 | public void setPartitionLeaderVersion(Integer partitionLeaderVersion) { 180 | this.partitionLeaderVersion = partitionLeaderVersion; 181 | } 182 | 183 | public byte getMagic() { 184 | return magic; 185 | } 186 | 187 | public void setMagic(byte magic) { 188 | this.magic = magic; 189 | } 190 | 191 | public Integer getCrc() { 192 | return crc; 193 | } 194 | 195 | public void setCrc(Integer crc) { 196 | this.crc = crc; 197 | } 198 | 199 | public short getAttributes() { 200 | return attributes; 201 | } 202 | 203 | public void setAttributes(short attributes) { 204 | this.attributes = attributes; 205 | } 206 | 207 | public int getLastOffsetDelta() { 208 | return lastOffsetDelta; 209 | } 210 | 211 | public void setLastOffsetDelta(int lastOffsetDelta) { 212 | this.lastOffsetDelta = lastOffsetDelta; 213 | } 214 | 215 | public long getFirstTimestamp() { 216 | return firstTimestamp; 217 | } 218 | 219 | public void setFirstTimestamp(long firstTimestamp) { 220 | this.firstTimestamp = firstTimestamp; 221 | } 222 | 223 | public long getMaxTimestamp() { 224 | return maxTimestamp; 225 | } 226 | 227 | public void setMaxTimestamp(long maxTimestamp) { 228 | this.maxTimestamp = maxTimestamp; 229 | } 230 | 231 | public long getProducerId() { 232 | return producerId; 233 | } 234 | 235 | public void setProducerId(long producerId) { 236 | this.producerId = producerId; 237 | } 238 | 239 | public short getEpoch() { 240 | return epoch; 241 | } 242 | 243 | public void setEpoch(short epoch) { 244 | this.epoch = epoch; 245 | } 246 | 247 | public int getSequence() { 248 | return sequence; 249 | } 250 | 251 | public void setSequence(int sequence) { 252 | this.sequence = sequence; 253 | } 254 | 255 | public int getNumRecords() { 256 | return numRecords; 257 | } 258 | 259 | public void setNumRecords(int numRecords) { 260 | this.numRecords = numRecords; 261 | } 262 | 263 | public List getMsgs() { 264 | return msgs; 265 | } 266 | 267 | public void setMsgs(List msgs) { 268 | this.msgs = msgs; 269 | } 270 | } 271 | -------------------------------------------------------------------------------- /awesome-netty-kafka/src/main/java/com/north/netty/kafka/bean/msg/KafkaMsgRecordV2.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.kafka.bean.msg; 2 | 3 | import com.north.netty.kafka.utils.VarLengthUtils; 4 | import io.netty.buffer.ByteBuf; 5 | 6 | import java.io.Serializable; 7 | import java.nio.charset.StandardCharsets; 8 | import java.util.HashMap; 9 | import java.util.Map; 10 | 11 | /** 12 | * @author laihaohua 13 | */ 14 | public class KafkaMsgRecordV2 implements Serializable { 15 | 16 | private int msgBodySize; 17 | 18 | public KafkaMsgRecordV2(){ 19 | 20 | } 21 | public KafkaMsgRecordV2(byte [] key, byte [] values, Map headers){ 22 | this.key = key; 23 | this.values = values; 24 | if (headers == null){ 25 | headers = new HashMap<>(); 26 | } 27 | this.headers = headers; 28 | calcMsgSize(); 29 | 30 | } 31 | private void calcMsgSize(){ 32 | int size = 0; 33 | // attributes属性的大小 34 | size += 1; 35 | // offsetDelta的大小 36 | size += VarLengthUtils.sizeOfVarint(this.offsetDelta); 37 | // timestampDelta的大小 38 | size += VarLengthUtils.sizeOfVarlong(this.timestampDelta); 39 | if(this.key == null){ 40 | // -1 占1位 41 | size += VarLengthUtils.NULL_VARINT_SIZE_BYTES; 42 | }else{ 43 | size += VarLengthUtils.sizeOfVarint(this.key.length); 44 | size += this.key.length; 45 | } 46 | if(this.values == null){ 47 | // -1 占1位 48 | size += VarLengthUtils.NULL_VARINT_SIZE_BYTES; 49 | }else{ 50 | size += VarLengthUtils.sizeOfVarint(this.values.length); 51 | size += this.values.length; 52 | } 53 | size += VarLengthUtils.sizeOfVarint(headers.size()); 54 | for (Map.Entry header : headers.entrySet()) { 55 | String headerKey = header.getKey(); 56 | if (headerKey == null) { 57 | throw new IllegalArgumentException("Invalid null header key found in headers"); 58 | } 59 | 60 | int headerKeySize = VarLengthUtils.utf8Length(headerKey); 61 | size += VarLengthUtils.sizeOfVarint(headerKeySize) + headerKeySize; 62 | 63 | byte[] headerValue = header.getValue(); 64 | if (headerValue == null) { 65 | size += VarLengthUtils.NULL_VARINT_SIZE_BYTES; 66 | } else { 67 | size += VarLengthUtils.sizeOfVarint(headerValue.length) + headerValue.length; 68 | } 69 | } 70 | 71 | this.msgBodySize = size; 72 | this.msgSize = VarLengthUtils.sizeOfVarint(size) + size; 73 | } 74 | public void serializable(ByteBuf out){ 75 | VarLengthUtils.writeVarint(msgBodySize, out); 76 | out.writeByte(attributes); 77 | VarLengthUtils.writeVarlong(timestampDelta, out); 78 | VarLengthUtils.writeVarint(offsetDelta, out); 79 | if(key == null){ 80 | VarLengthUtils.writeVarint(-1, out); 81 | }else{ 82 | VarLengthUtils.writeVarint(key.length, out); 83 | out.writeBytes(key); 84 | } 85 | if(values == null){ 86 | VarLengthUtils.writeVarint(-1, out); 87 | }else{ 88 | VarLengthUtils.writeVarint(values.length, out); 89 | out.writeBytes(values); 90 | } 91 | 92 | VarLengthUtils.writeVarint(headers.size(), out); 93 | 94 | for (Map.Entry header : headers.entrySet()) { 95 | String headerKey = header.getKey(); 96 | if (headerKey == null) { 97 | throw new IllegalArgumentException("Invalid null header key found in headers"); 98 | } 99 | 100 | byte[] utf8Bytes = headerKey.getBytes(StandardCharsets.UTF_8); 101 | VarLengthUtils.writeVarint(utf8Bytes.length, out); 102 | out.writeBytes(utf8Bytes); 103 | 104 | byte[] headerValue = header.getValue(); 105 | if (headerValue == null) { 106 | VarLengthUtils.writeVarint(-1, out); 107 | } else { 108 | VarLengthUtils.writeVarint(headerValue.length, out); 109 | out.writeBytes(headerValue); 110 | } 111 | } 112 | 113 | } 114 | public void deserialize(ByteBuf in){ 115 | this.msgBodySize = VarLengthUtils.readVarint(in); 116 | this.attributes = in.readByte(); 117 | timestampDelta = VarLengthUtils.readVarlong(in); 118 | offsetDelta = VarLengthUtils.readVarint(in); 119 | int keyLen = VarLengthUtils.readVarint(in); 120 | if(keyLen >= 0){ 121 | this.key = new byte[keyLen]; 122 | in.readBytes(key); 123 | } 124 | 125 | int valueLen = VarLengthUtils.readVarint(in); 126 | if(valueLen >= 0){ 127 | this.values = new byte[valueLen]; 128 | in.readBytes(values); 129 | } 130 | int headerSize = VarLengthUtils.readVarint(in); 131 | if(headerSize >= 0){ 132 | headers = new HashMap<>(headerSize); 133 | for(int i=0; i < headerSize; i++){ 134 | keyLen = VarLengthUtils.readVarint(in); 135 | byte [] bs = new byte[keyLen]; 136 | in.readBytes(bs); 137 | String key = new String(bs); 138 | valueLen = VarLengthUtils.readVarint(in); 139 | bs = new byte[valueLen]; 140 | in.readBytes(bs); 141 | headers.put(key, bs); 142 | } 143 | } 144 | } 145 | /** 146 | * 消息总长度 147 | */ 148 | private int msgSize; 149 | /** 150 | * 1字节的属性 我们这里没有用到压缩 所以写死为0 151 | */ 152 | private byte attributes = 0; 153 | /** 154 | * 当前消息中的时间戳 与该批次中的第一条消息的时间戳的差值 155 | * 在我们这里的实现里,每个批次只发送一条消息, 所以这个值固定为0 156 | */ 157 | private long timestampDelta = 0; 158 | 159 | /** 160 | * 当前消息的offset与该批次中的第一条消息的offset的差值 161 | * 在我们这里的实现里,每个批次只发送一条消息, 所以这个值固定为0 162 | */ 163 | private int offsetDelta = 0; 164 | /** 165 | * 该消息的key 166 | */ 167 | private byte [] key; 168 | /** 169 | * 该消息的value 170 | */ 171 | private byte [] values; 172 | 173 | /** 174 | * 该消息的headers 175 | */ 176 | private Map headers; 177 | 178 | 179 | public int getMsgSize() { 180 | return msgSize; 181 | } 182 | 183 | public void setMsgSize(int msgSize) { 184 | this.msgSize = msgSize; 185 | } 186 | 187 | public byte getAttributes() { 188 | return attributes; 189 | } 190 | 191 | public void setAttributes(byte attributes) { 192 | this.attributes = attributes; 193 | } 194 | 195 | public long getTimestampDelta() { 196 | return timestampDelta; 197 | } 198 | 199 | public void setTimestampDelta(long timestampDelta) { 200 | this.timestampDelta = timestampDelta; 201 | } 202 | 203 | public int getOffsetDelta() { 204 | return offsetDelta; 205 | } 206 | 207 | public void setOffsetDelta(int offsetDelta) { 208 | this.offsetDelta = offsetDelta; 209 | } 210 | 211 | public byte[] getKey() { 212 | return key; 213 | } 214 | 215 | public void setKey(byte[] key) { 216 | this.key = key; 217 | } 218 | 219 | public byte[] getValues() { 220 | return values; 221 | } 222 | 223 | public void setValues(byte[] values) { 224 | this.values = values; 225 | } 226 | 227 | public Map getHeaders() { 228 | return headers; 229 | } 230 | 231 | public void setHeaders(Map headers) { 232 | this.headers = headers; 233 | } 234 | } 235 | -------------------------------------------------------------------------------- /awesome-netty-kafka/src/main/java/com/north/netty/kafka/bean/partition/PartitionMateData.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.kafka.bean.partition; 2 | 3 | import io.netty.buffer.ByteBuf; 4 | 5 | import java.io.Serializable; 6 | import java.util.ArrayList; 7 | import java.util.List; 8 | 9 | /** 10 | * @author laihaohua 11 | */ 12 | public class PartitionMateData implements Serializable { 13 | private Short errorCode; 14 | private Integer partitionId; 15 | private Integer leader; 16 | private List replicas; 17 | private List isr; 18 | /** 19 | * v2版本才有offline_replicas 20 | */ 21 | private List offlineReplicas; 22 | 23 | public Short getErrorCode() { 24 | return errorCode; 25 | } 26 | 27 | public void setErrorCode(Short errorCode) { 28 | this.errorCode = errorCode; 29 | } 30 | 31 | public Integer getPartitionId() { 32 | return partitionId; 33 | } 34 | 35 | public void setPartitionId(Integer partitionId) { 36 | this.partitionId = partitionId; 37 | } 38 | 39 | public Integer getLeader() { 40 | return leader; 41 | } 42 | 43 | public void setLeader(Integer leader) { 44 | this.leader = leader; 45 | } 46 | 47 | public List getReplicas() { 48 | return replicas; 49 | } 50 | 51 | public void setReplicas(List replicas) { 52 | this.replicas = replicas; 53 | } 54 | 55 | public List getIsr() { 56 | return isr; 57 | } 58 | 59 | public void setIsr(List isr) { 60 | this.isr = isr; 61 | } 62 | 63 | public List getOfflineReplicas() { 64 | return offlineReplicas; 65 | } 66 | 67 | public void setOfflineReplicas(List offlineReplicas) { 68 | this.offlineReplicas = offlineReplicas; 69 | } 70 | 71 | public void deserialize(ByteBuf byteBuf) { 72 | this.errorCode = byteBuf.readShort(); 73 | this.partitionId = byteBuf.readInt(); 74 | this.leader = byteBuf.readInt(); 75 | int replicasCount = byteBuf.readInt(); 76 | if(replicasCount >= 0){ 77 | replicas = new ArrayList<>(replicasCount); 78 | for(int i=0; i< replicasCount; i++){ 79 | replicas.add(byteBuf.readInt()); 80 | } 81 | } 82 | int isrCount = byteBuf.readInt(); 83 | if(isrCount >= 0){ 84 | isr = new ArrayList<>(isrCount); 85 | for(int i=0; i< isrCount; i++){ 86 | isr.add(byteBuf.readInt()); 87 | } 88 | } 89 | } 90 | } 91 | -------------------------------------------------------------------------------- /awesome-netty-kafka/src/main/java/com/north/netty/kafka/bean/produce/PartitionData.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.kafka.bean.produce; 2 | 3 | import io.netty.buffer.ByteBuf; 4 | 5 | import java.io.Serializable; 6 | 7 | public class PartitionData implements Serializable { 8 | private Record recordSset; 9 | public void serializable(ByteBuf out){ 10 | recordSset.serializable(out); 11 | } 12 | 13 | public Record getRecordSset() { 14 | return recordSset; 15 | } 16 | 17 | public void setRecordSset(Record recordSset) { 18 | this.recordSset = recordSset; 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /awesome-netty-kafka/src/main/java/com/north/netty/kafka/bean/produce/PartitionResponse.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.kafka.bean.produce; 2 | 3 | import io.netty.buffer.ByteBuf; 4 | 5 | import java.io.Serializable; 6 | 7 | public class PartitionResponse implements Serializable { 8 | private Integer partitionId; 9 | private Short errorCode; 10 | private Long baseOffset; 11 | private Long logAppendTime; 12 | private Long logStartOffset; 13 | public void deserialize(ByteBuf byteBuf) { 14 | this.partitionId = byteBuf.readInt(); 15 | this.errorCode = byteBuf.readShort(); 16 | this.baseOffset = byteBuf.readLong(); 17 | this.logAppendTime = byteBuf.readLong(); 18 | this.logStartOffset = byteBuf.readLong(); 19 | } 20 | 21 | public Integer getPartitionId() { 22 | return partitionId; 23 | } 24 | 25 | public void setPartitionId(Integer partitionId) { 26 | this.partitionId = partitionId; 27 | } 28 | 29 | public Short getErrorCode() { 30 | return errorCode; 31 | } 32 | 33 | public void setErrorCode(Short errorCode) { 34 | this.errorCode = errorCode; 35 | } 36 | 37 | public Long getBaseOffset() { 38 | return baseOffset; 39 | } 40 | 41 | public void setBaseOffset(Long baseOffset) { 42 | this.baseOffset = baseOffset; 43 | } 44 | 45 | public Long getLogAppendTime() { 46 | return logAppendTime; 47 | } 48 | 49 | public void setLogAppendTime(Long logAppendTime) { 50 | this.logAppendTime = logAppendTime; 51 | } 52 | 53 | public Long getLogStartOffset() { 54 | return logStartOffset; 55 | } 56 | 57 | public void setLogStartOffset(Long logStartOffset) { 58 | this.logStartOffset = logStartOffset; 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /awesome-netty-kafka/src/main/java/com/north/netty/kafka/bean/produce/ProduceRequest.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.kafka.bean.produce; 2 | 3 | import com.north.netty.kafka.bean.KafkaRequest; 4 | import com.north.netty.kafka.bean.KafkaRequestHeader; 5 | import com.north.netty.kafka.enums.ApiKeys; 6 | import io.netty.buffer.ByteBuf; 7 | import org.north.netty.common.utils.SerializeUtils; 8 | 9 | import java.util.Arrays; 10 | import java.util.List; 11 | 12 | public class ProduceRequest implements KafkaRequest { 13 | private String transactionalId; 14 | private Short acks; 15 | private Integer timeOut; 16 | private List topicData; 17 | private KafkaRequestHeader requestHeader; 18 | public ProduceRequest(String clientId, Integer correlationId) { 19 | super(); 20 | this.requestHeader = new KafkaRequestHeader(); 21 | this.requestHeader.setClientId(clientId); 22 | this.requestHeader.setCorrelationId(correlationId); 23 | this.requestHeader.setApiKey(ApiKeys.PRODUCE.id); 24 | this.requestHeader.setApiVersion(ApiKeys.PRODUCE.apiVersion); 25 | 26 | } 27 | 28 | @Override 29 | public void serializable(ByteBuf out) { 30 | requestHeader.serializable(out); 31 | SerializeUtils.writeStringToBuffer2(transactionalId, out); 32 | out.writeShort(acks); 33 | out.writeInt(timeOut); 34 | if(topicData == null){ 35 | out.writeInt(-1); 36 | }else { 37 | out.writeInt(topicData.size()); 38 | for(TopicProduceData topicProduceData: topicData){ 39 | topicProduceData.serializable(out); 40 | 41 | } 42 | } 43 | 44 | } 45 | 46 | public String getTransactionalId() { 47 | return transactionalId; 48 | } 49 | 50 | public void setTransactionalId(String transactionalId) { 51 | this.transactionalId = transactionalId; 52 | } 53 | 54 | public Short getAcks() { 55 | return acks; 56 | } 57 | 58 | public void setAcks(Short acks) { 59 | this.acks = acks; 60 | } 61 | 62 | public Integer getTimeOut() { 63 | return timeOut; 64 | } 65 | 66 | public void setTimeOut(Integer timeOut) { 67 | this.timeOut = timeOut; 68 | } 69 | 70 | public List getTopicData() { 71 | return topicData; 72 | } 73 | 74 | public void setTopicData(List topicData) { 75 | this.topicData = topicData; 76 | } 77 | } 78 | -------------------------------------------------------------------------------- /awesome-netty-kafka/src/main/java/com/north/netty/kafka/bean/produce/ProduceResponse.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.kafka.bean.produce; 2 | 3 | import com.north.netty.kafka.bean.AbstractKafkaResponse; 4 | import com.north.netty.kafka.bean.KafkaResponse; 5 | import io.netty.buffer.ByteBuf; 6 | 7 | import java.io.Serializable; 8 | import java.util.ArrayList; 9 | import java.util.List; 10 | 11 | public class ProduceResponse extends AbstractKafkaResponse implements Serializable, KafkaResponse { 12 | private List topicProduceResList; 13 | private Integer throttleTimeMs; 14 | 15 | @Override 16 | public void deserialize(ByteBuf byteBuf) { 17 | int topicCount = byteBuf.readInt(); 18 | if(topicCount >= 0){ 19 | topicProduceResList = new ArrayList<>(topicCount); 20 | for(int i=0; i < topicCount; i++){ 21 | TopicProduceRes topicProduceRes = new TopicProduceRes(); 22 | topicProduceRes.deserialize(byteBuf); 23 | topicProduceResList.add(topicProduceRes); 24 | } 25 | } 26 | this.throttleTimeMs = byteBuf.readInt(); 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /awesome-netty-kafka/src/main/java/com/north/netty/kafka/bean/produce/Record.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.kafka.bean.produce; 2 | 3 | import com.google.gson.Gson; 4 | import com.north.netty.kafka.bean.msg.KafkaMsgRecordBatch; 5 | import io.netty.buffer.ByteBuf; 6 | 7 | import java.io.Serializable; 8 | import java.util.ArrayList; 9 | import java.util.List; 10 | 11 | public class Record implements Serializable { 12 | private Integer partition; 13 | private List kafkaMsgRecordBatchList; 14 | 15 | public void serializable(ByteBuf out){ 16 | out.writeInt(partition); 17 | if(kafkaMsgRecordBatchList != null){ 18 | for(KafkaMsgRecordBatch kafkaMsgRecordBatch : kafkaMsgRecordBatchList){ 19 | if(kafkaMsgRecordBatch == null){ 20 | out.writeInt(-1); 21 | }else{ 22 | out.writeInt(kafkaMsgRecordBatch.getTotalSize()); 23 | kafkaMsgRecordBatch.serializable(out); 24 | } 25 | } 26 | } 27 | 28 | 29 | } 30 | public void deserialize(ByteBuf in){ 31 | int totalSize = in.readInt(); 32 | kafkaMsgRecordBatchList = new ArrayList<>(); 33 | while(in.readerIndex() != in.writerIndex()){ 34 | KafkaMsgRecordBatch kafkaMsgRecordBatch = new KafkaMsgRecordBatch(); 35 | kafkaMsgRecordBatch.deserialize(in); 36 | kafkaMsgRecordBatchList.add(kafkaMsgRecordBatch); 37 | } 38 | } 39 | 40 | public Integer getPartition() { 41 | return partition; 42 | } 43 | 44 | public void setPartition(Integer partition) { 45 | this.partition = partition; 46 | } 47 | 48 | public List getKafkaMsgRecordBatchList() { 49 | return kafkaMsgRecordBatchList; 50 | } 51 | 52 | public void setKafkaMsgRecordBatchList(List kafkaMsgRecordBatchList) { 53 | this.kafkaMsgRecordBatchList = kafkaMsgRecordBatchList; 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /awesome-netty-kafka/src/main/java/com/north/netty/kafka/bean/produce/TopicProduceData.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.kafka.bean.produce; 2 | 3 | import io.netty.buffer.ByteBuf; 4 | import org.north.netty.common.utils.SerializeUtils; 5 | 6 | import java.io.Serializable; 7 | import java.util.List; 8 | 9 | public class TopicProduceData implements Serializable { 10 | private String topic; 11 | private List data; 12 | public void serializable(ByteBuf out){ 13 | SerializeUtils.writeStringToBuffer2(topic, out); 14 | if(data == null){ 15 | out.writeInt(-1); 16 | }else{ 17 | out.writeInt(data.size()); 18 | for(PartitionData partitionData : data){ 19 | partitionData.serializable(out); 20 | } 21 | } 22 | } 23 | 24 | public String getTopic() { 25 | return topic; 26 | } 27 | 28 | public void setTopic(String topic) { 29 | this.topic = topic; 30 | } 31 | 32 | public List getData() { 33 | return data; 34 | } 35 | 36 | public void setData(List data) { 37 | this.data = data; 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /awesome-netty-kafka/src/main/java/com/north/netty/kafka/bean/produce/TopicProduceRes.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.kafka.bean.produce; 2 | 3 | import io.netty.buffer.ByteBuf; 4 | import org.north.netty.common.utils.SerializeUtils; 5 | 6 | import java.io.Serializable; 7 | import java.util.ArrayList; 8 | import java.util.List; 9 | 10 | public class TopicProduceRes implements Serializable { 11 | private String topic; 12 | private List partitionResponseList; 13 | public void deserialize(ByteBuf byteBuf) { 14 | this.topic = SerializeUtils.readStringToBuffer2(byteBuf); 15 | int partitionCount = byteBuf.readInt(); 16 | if(partitionCount >= 0){ 17 | partitionResponseList = new ArrayList<>(); 18 | for(int i = 0; i < partitionCount; i++){ 19 | PartitionResponse partitionResponse = new PartitionResponse(); 20 | partitionResponse.deserialize(byteBuf); 21 | partitionResponseList.add(partitionResponse); 22 | } 23 | } 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /awesome-netty-kafka/src/main/java/com/north/netty/kafka/bean/topic/TopicMetaData.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.kafka.bean.topic; 2 | 3 | import com.north.netty.kafka.bean.partition.PartitionMateData; 4 | import io.netty.buffer.ByteBuf; 5 | import org.north.netty.common.utils.SerializeUtils; 6 | 7 | import java.io.Serializable; 8 | import java.util.ArrayList; 9 | import java.util.List; 10 | 11 | /** 12 | * @author laihaohua 13 | */ 14 | public class TopicMetaData implements Serializable { 15 | private Short errorCode; 16 | private String topicName; 17 | /** 18 | * 是否是内部的topic 19 | */ 20 | private Boolean isInternal; 21 | 22 | private List partitionMateDataList; 23 | 24 | 25 | public Short getErrorCode() { 26 | return errorCode; 27 | } 28 | 29 | public void setErrorCode(Short errorCode) { 30 | this.errorCode = errorCode; 31 | } 32 | 33 | public String getTopicName() { 34 | return topicName; 35 | } 36 | 37 | public void setTopicName(String topicName) { 38 | this.topicName = topicName; 39 | } 40 | 41 | public Boolean getInternal() { 42 | return isInternal; 43 | } 44 | 45 | public void setInternal(Boolean internal) { 46 | isInternal = internal; 47 | } 48 | 49 | public List getPartitionMateDataList() { 50 | return partitionMateDataList; 51 | } 52 | 53 | public void setPartitionMateDataList(List partitionMateDataList) { 54 | this.partitionMateDataList = partitionMateDataList; 55 | } 56 | 57 | public void deserialize(ByteBuf byteBuf) { 58 | this.errorCode = byteBuf.readShort(); 59 | this.topicName = SerializeUtils.readStringToBuffer2(byteBuf); 60 | this.isInternal = byteBuf.readBoolean(); 61 | int count = byteBuf.readInt(); 62 | if(count >= 0){ 63 | partitionMateDataList = new ArrayList<>(count); 64 | for(int i=0; i < count; i++){ 65 | PartitionMateData partitionMateData = new PartitionMateData(); 66 | partitionMateData.deserialize(byteBuf); 67 | partitionMateDataList.add(partitionMateData); 68 | } 69 | } 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /awesome-netty-kafka/src/main/java/com/north/netty/kafka/caches/RequestCacheCenter.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.kafka.caches; 2 | 3 | import com.google.common.cache.Cache; 4 | import com.google.common.cache.CacheBuilder; 5 | import com.north.netty.kafka.bean.AbstractKafkaResponse; 6 | 7 | import java.util.concurrent.TimeUnit; 8 | 9 | /** 10 | * @author laihaohua 11 | */ 12 | public final class RequestCacheCenter { 13 | private Cache cache = CacheBuilder.newBuilder() 14 | .concurrencyLevel(Runtime.getRuntime().availableProcessors()) 15 | .expireAfterAccess(5, TimeUnit.MINUTES) 16 | .build(); 17 | 18 | public void putKafkaResponse(Integer correlationId, AbstractKafkaResponse kafkaResponse){ 19 | cache.put(correlationId, kafkaResponse); 20 | } 21 | 22 | public AbstractKafkaResponse getKafkaResponse(Integer correlationId){ 23 | return cache.getIfPresent(correlationId); 24 | } 25 | 26 | public AbstractKafkaResponse waitForResp(Integer correlationId, long timeoutMs){ 27 | long bt = System.currentTimeMillis(); 28 | AbstractKafkaResponse response = null; 29 | while(response == null || response.getCorrelationId() == null){ 30 | long et = System.currentTimeMillis(); 31 | if(et > bt + timeoutMs){ 32 | return null; 33 | } 34 | response = cache.getIfPresent(correlationId); 35 | } 36 | return response; 37 | } 38 | 39 | 40 | } 41 | -------------------------------------------------------------------------------- /awesome-netty-kafka/src/main/java/com/north/netty/kafka/codec/KafkaResponseDecoder.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.kafka.codec; 2 | 3 | import com.north.netty.kafka.bean.AbstractKafkaResponse; 4 | import com.north.netty.kafka.bean.KafkaResponseHeader; 5 | import com.north.netty.kafka.caches.RequestCacheCenter; 6 | import io.netty.buffer.ByteBuf; 7 | import io.netty.channel.ChannelHandlerContext; 8 | import io.netty.handler.codec.ByteToMessageDecoder; 9 | 10 | import java.util.List; 11 | 12 | public class KafkaResponseDecoder extends ByteToMessageDecoder { 13 | private RequestCacheCenter requestCacheCenter; 14 | public KafkaResponseDecoder(RequestCacheCenter requestCacheCenter){ 15 | this.requestCacheCenter = requestCacheCenter; 16 | } 17 | @Override 18 | protected void decode(ChannelHandlerContext ctx, ByteBuf in, List out) throws Exception { 19 | KafkaResponseHeader kafkaResponseHeader = new KafkaResponseHeader(); 20 | kafkaResponseHeader.deserialize(in); 21 | if(kafkaResponseHeader.getCorrelationId() == null){ 22 | throw new IllegalStateException("服务端返回的correlationId 为null"); 23 | } 24 | AbstractKafkaResponse abstractKafkaResponse = requestCacheCenter.getKafkaResponse(kafkaResponseHeader.getCorrelationId()); 25 | if(abstractKafkaResponse == null){ 26 | throw new IllegalStateException("服务端返回的correlationId不是本客户端发送的"); 27 | } 28 | abstractKafkaResponse.deserialize(in); 29 | abstractKafkaResponse.setKafkaResponseHeader(kafkaResponseHeader); 30 | abstractKafkaResponse.setCorrelationId(kafkaResponseHeader.getCorrelationId()); 31 | requestCacheCenter.putKafkaResponse(kafkaResponseHeader.getCorrelationId(), abstractKafkaResponse); 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /awesome-netty-kafka/src/main/java/com/north/netty/kafka/config/KafkaConsumerConfig.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.kafka.config; 2 | 3 | import java.io.Serializable; 4 | 5 | public class KafkaConsumerConfig implements Serializable { 6 | /** 7 | * 等待响应返回的最大ms 8 | */ 9 | private Integer maxWaitTime; 10 | /** 11 | * 响应的最小字节数 12 | */ 13 | private Integer minBytes; 14 | /** 15 | * 响应的最大字节数 16 | */ 17 | private Integer maxBytes; 18 | 19 | public Integer getMaxWaitTime() { 20 | return maxWaitTime; 21 | } 22 | 23 | public void setMaxWaitTime(Integer maxWaitTime) { 24 | this.maxWaitTime = maxWaitTime; 25 | } 26 | 27 | public Integer getMinBytes() { 28 | return minBytes; 29 | } 30 | 31 | public void setMinBytes(Integer minBytes) { 32 | this.minBytes = minBytes; 33 | } 34 | 35 | public Integer getMaxBytes() { 36 | return maxBytes; 37 | } 38 | 39 | public void setMaxBytes(Integer maxBytes) { 40 | this.maxBytes = maxBytes; 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /awesome-netty-kafka/src/main/java/com/north/netty/kafka/config/KafkaProduceConfig.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.kafka.config; 2 | 3 | /** 4 | * @author laihaohua 5 | */ 6 | public class KafkaProduceConfig { 7 | /** 8 | * 发送消息的确认模式, 默认就是-1(all) 9 | */ 10 | private short ack = -1; 11 | 12 | /** 13 | * 超时时间, 默认30秒 14 | */ 15 | private int timeout = 30000; 16 | 17 | public short getAck() { 18 | return ack; 19 | } 20 | 21 | public void setAck(short ack) { 22 | this.ack = ack; 23 | } 24 | 25 | public int getTimeout() { 26 | return timeout; 27 | } 28 | 29 | public void setTimeout(int timeout) { 30 | this.timeout = timeout; 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /awesome-netty-kafka/src/main/java/com/north/netty/kafka/enums/ApiKeys.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | package com.north.netty.kafka.enums; 18 | 19 | /** 20 | * Identifiers for all the Kafka APIs 21 | */ 22 | public enum ApiKeys { 23 | /** 24 | * 发送消息 25 | */ 26 | PRODUCE(0, "Produce", (short) 5), 27 | 28 | /** 29 | * fetch 消息 30 | */ 31 | FETCH(1, "Fetch", (short)6), 32 | /** 33 | * 拉取元数据 34 | */ 35 | METADATA(3, "Metadata", (short) 1); 36 | 37 | public final short id; 38 | 39 | public final String name; 40 | 41 | public short apiVersion; 42 | 43 | ApiKeys(int id, String name, short apiVersion) { 44 | this.id = (short) id; 45 | this.name = name; 46 | this.apiVersion = apiVersion; 47 | } 48 | 49 | 50 | } 51 | -------------------------------------------------------------------------------- /awesome-netty-kafka/src/main/java/com/north/netty/kafka/enums/Errors.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | package com.north.netty.kafka.enums; 18 | 19 | 20 | import java.util.HashMap; 21 | import java.util.Map; 22 | 23 | /** 24 | * This class contains all the client-server errors--those errors that must be sent from the server to the client. These 25 | * are thus part of the protocol. The names can be changed but the error code cannot. 26 | * 27 | * Note that client library will convert an unknown error code to the non-retriable Exception if the client library 28 | * version is old and does not recognize the newly-added error code. Therefore when a new server-side error is added, 29 | * we may need extra logic to convert the new error code to another existing error code before sending the response back to 30 | * the client if the request version suggests that the client may not recognize the new error code. 31 | * 32 | * Do not add exceptions that occur only on the client or only on the server here. 33 | */ 34 | public enum Errors { 35 | UNKNOWN_SERVER_ERROR(-1, "The server experienced an unexpected error when processing the request", 36 | new ApiExceptionBuilder() { 37 | @Override 38 | public Exception build(String message) { 39 | return new Exception(message); 40 | } 41 | }), 42 | NONE(0, null, 43 | new ApiExceptionBuilder() { 44 | @Override 45 | public Exception build(String message) { 46 | return null; 47 | } 48 | }), 49 | OFFSET_OUT_OF_RANGE(1, "The requested offset is not within the range of offsets maintained by the server.", 50 | new ApiExceptionBuilder() { 51 | @Override 52 | public Exception build(String message) { 53 | return new Exception(message); 54 | } 55 | }), 56 | CORRUPT_MESSAGE(2, "This message has failed its CRC checksum, exceeds the valid size, or is otherwise corrupt.", 57 | new ApiExceptionBuilder() { 58 | @Override 59 | public Exception build(String message) { 60 | return new Exception(message); 61 | } 62 | }), 63 | UNKNOWN_TOPIC_OR_PARTITION(3, "This server does not host this topic-partition.", 64 | new ApiExceptionBuilder() { 65 | @Override 66 | public Exception build(String message) { 67 | return new Exception(message); 68 | } 69 | }), 70 | INVALID_FETCH_SIZE(4, "The requested fetch size is invalid.", 71 | new ApiExceptionBuilder() { 72 | @Override 73 | public Exception build(String message) { 74 | return new Exception(message); 75 | } 76 | }), 77 | LEADER_NOT_AVAILABLE(5, "There is no leader for this topic-partition as we are in the middle of a leadership election.", 78 | new ApiExceptionBuilder() { 79 | @Override 80 | public Exception build(String message) { 81 | return new Exception(message); 82 | } 83 | }), 84 | NOT_LEADER_FOR_PARTITION(6, "This server is not the leader for that topic-partition.", 85 | new ApiExceptionBuilder() { 86 | @Override 87 | public Exception build(String message) { 88 | return new Exception(message); 89 | } 90 | }), 91 | REQUEST_TIMED_OUT(7, "The request timed out.", 92 | new ApiExceptionBuilder() { 93 | @Override 94 | public Exception build(String message) { 95 | return new Exception(message); 96 | } 97 | }), 98 | BROKER_NOT_AVAILABLE(8, "The broker is not available.", 99 | new ApiExceptionBuilder() { 100 | @Override 101 | public Exception build(String message) { 102 | return new Exception(message); 103 | } 104 | }), 105 | REPLICA_NOT_AVAILABLE(9, "The replica is not available for the requested topic-partition", 106 | new ApiExceptionBuilder() { 107 | @Override 108 | public Exception build(String message) { 109 | return new Exception(message); 110 | } 111 | }), 112 | MESSAGE_TOO_LARGE(10, "The request included a message larger than the max message size the server will accept.", 113 | new ApiExceptionBuilder() { 114 | @Override 115 | public Exception build(String message) { 116 | return new Exception(message); 117 | } 118 | }), 119 | STALE_CONTROLLER_EPOCH(11, "The controller moved to another broker.", 120 | new ApiExceptionBuilder() { 121 | @Override 122 | public Exception build(String message) { 123 | return new Exception(message); 124 | } 125 | }), 126 | OFFSET_METADATA_TOO_LARGE(12, "The metadata field of the offset request was too large.", 127 | new ApiExceptionBuilder() { 128 | @Override 129 | public Exception build(String message) { 130 | return new Exception(message); 131 | } 132 | }), 133 | NETWORK_EXCEPTION(13, "The server disconnected before a response was received.", 134 | new ApiExceptionBuilder() { 135 | @Override 136 | public Exception build(String message) { 137 | return new Exception(message); 138 | } 139 | }), 140 | COORDINATOR_LOAD_IN_PROGRESS(14, "The coordinator is loading and hence can't process requests.", 141 | new ApiExceptionBuilder() { 142 | @Override 143 | public Exception build(String message) { 144 | return new Exception(message); 145 | } 146 | }), 147 | COORDINATOR_NOT_AVAILABLE(15, "The coordinator is not available.", 148 | new ApiExceptionBuilder() { 149 | @Override 150 | public Exception build(String message) { 151 | return new Exception(message); 152 | } 153 | }), 154 | NOT_COORDINATOR(16, "This is not the correct coordinator.", 155 | new ApiExceptionBuilder() { 156 | @Override 157 | public Exception build(String message) { 158 | return new Exception(message); 159 | } 160 | }), 161 | INVALID_TOPIC_EXCEPTION(17, "The request attempted to perform an operation on an invalid topic.", 162 | new ApiExceptionBuilder() { 163 | @Override 164 | public Exception build(String message) { 165 | return new Exception(message); 166 | } 167 | }), 168 | RECORD_LIST_TOO_LARGE(18, "The request included message batch larger than the configured segment size on the server.", 169 | new ApiExceptionBuilder() { 170 | @Override 171 | public Exception build(String message) { 172 | return new Exception(message); 173 | } 174 | }), 175 | NOT_ENOUGH_REPLICAS(19, "Messages are rejected since there are fewer in-sync replicas than required.", 176 | new ApiExceptionBuilder() { 177 | @Override 178 | public Exception build(String message) { 179 | return new Exception(message); 180 | } 181 | }), 182 | NOT_ENOUGH_REPLICAS_AFTER_APPEND(20, "Messages are written to the log, but to fewer in-sync replicas than required.", 183 | new ApiExceptionBuilder() { 184 | @Override 185 | public Exception build(String message) { 186 | return new Exception(message); 187 | } 188 | }), 189 | INVALID_REQUIRED_ACKS(21, "Produce request specified an invalid value for required acks.", 190 | new ApiExceptionBuilder() { 191 | @Override 192 | public Exception build(String message) { 193 | return new Exception(message); 194 | } 195 | }), 196 | ILLEGAL_GENERATION(22, "Specified group generation id is not valid.", 197 | new ApiExceptionBuilder() { 198 | @Override 199 | public Exception build(String message) { 200 | return new Exception(message); 201 | } 202 | }), 203 | INCONSISTENT_GROUP_PROTOCOL(23, 204 | "The group member's supported protocols are incompatible with those of existing members" + 205 | " or first group member tried to join with empty protocol type or empty protocol list.", 206 | new ApiExceptionBuilder() { 207 | @Override 208 | public Exception build(String message) { 209 | return new Exception(message); 210 | } 211 | }), 212 | INVALID_GROUP_ID(24, "The configured groupId is invalid", 213 | new ApiExceptionBuilder() { 214 | @Override 215 | public Exception build(String message) { 216 | return new Exception(message); 217 | } 218 | }), 219 | UNKNOWN_MEMBER_ID(25, "The coordinator is not aware of this member.", 220 | new ApiExceptionBuilder() { 221 | @Override 222 | public Exception build(String message) { 223 | return new Exception(message); 224 | } 225 | }), 226 | INVALID_SESSION_TIMEOUT(26, 227 | "The session timeout is not within the range allowed by the broker " + 228 | "(as configured by group.min.session.timeout.ms and group.max.session.timeout.ms).", 229 | new ApiExceptionBuilder() { 230 | @Override 231 | public Exception build(String message) { 232 | return new Exception(message); 233 | } 234 | }), 235 | REBALANCE_IN_PROGRESS(27, "The group is rebalancing, so a rejoin is needed.", 236 | new ApiExceptionBuilder() { 237 | @Override 238 | public Exception build(String message) { 239 | return new Exception(message); 240 | } 241 | }), 242 | INVALID_COMMIT_OFFSET_SIZE(28, "The committing offset data size is not valid", 243 | new ApiExceptionBuilder() { 244 | @Override 245 | public Exception build(String message) { 246 | return new Exception(message); 247 | } 248 | }), 249 | TOPIC_AUTHORIZATION_FAILED(29, "Topic authorization failed.", 250 | new ApiExceptionBuilder() { 251 | @Override 252 | public Exception build(String message) { 253 | return new Exception(message); 254 | } 255 | }), 256 | GROUP_AUTHORIZATION_FAILED(30, "Group authorization failed.", 257 | new ApiExceptionBuilder() { 258 | @Override 259 | public Exception build(String message) { 260 | return new Exception(message); 261 | } 262 | }), 263 | CLUSTER_AUTHORIZATION_FAILED(31, "Cluster authorization failed.", 264 | new ApiExceptionBuilder() { 265 | @Override 266 | public Exception build(String message) { 267 | return new Exception(message); 268 | } 269 | }), 270 | INVALID_TIMESTAMP(32, "The timestamp of the message is out of acceptable range.", 271 | new ApiExceptionBuilder() { 272 | @Override 273 | public Exception build(String message) { 274 | return new Exception(message); 275 | } 276 | }), 277 | UNSUPPORTED_SASL_MECHANISM(33, "The broker does not support the requested SASL mechanism.", 278 | new ApiExceptionBuilder() { 279 | @Override 280 | public Exception build(String message) { 281 | return new Exception(message); 282 | } 283 | }), 284 | ILLEGAL_SASL_STATE(34, "Request is not valid given the current SASL state.", 285 | new ApiExceptionBuilder() { 286 | @Override 287 | public Exception build(String message) { 288 | return new Exception(message); 289 | } 290 | }), 291 | UNSUPPORTED_VERSION(35, "The version of API is not supported.", 292 | new ApiExceptionBuilder() { 293 | @Override 294 | public Exception build(String message) { 295 | return new Exception(message); 296 | } 297 | }), 298 | TOPIC_ALREADY_EXISTS(36, "Topic with this name already exists.", 299 | new ApiExceptionBuilder() { 300 | @Override 301 | public Exception build(String message) { 302 | return new Exception(message); 303 | } 304 | }), 305 | INVALID_PARTITIONS(37, "Number of partitions is invalid.", 306 | new ApiExceptionBuilder() { 307 | @Override 308 | public Exception build(String message) { 309 | return new Exception(message); 310 | } 311 | }), 312 | INVALID_REPLICATION_FACTOR(38, "Replication-factor is invalid.", 313 | new ApiExceptionBuilder() { 314 | @Override 315 | public Exception build(String message) { 316 | return new Exception(message); 317 | } 318 | }), 319 | INVALID_REPLICA_ASSIGNMENT(39, "Replica assignment is invalid.", 320 | new ApiExceptionBuilder() { 321 | @Override 322 | public Exception build(String message) { 323 | return new Exception(message); 324 | } 325 | }), 326 | INVALID_CONFIG(40, "Configuration is invalid.", 327 | new ApiExceptionBuilder() { 328 | @Override 329 | public Exception build(String message) { 330 | return new Exception(message); 331 | } 332 | }), 333 | NOT_CONTROLLER(41, "This is not the correct controller for this cluster.", 334 | new ApiExceptionBuilder() { 335 | @Override 336 | public Exception build(String message) { 337 | return new Exception(message); 338 | } 339 | }), 340 | INVALID_REQUEST(42, "This most likely occurs because of a request being malformed by the " + 341 | "client library or the message was sent to an incompatible broker. See the broker logs " + 342 | "for more details.", 343 | new ApiExceptionBuilder() { 344 | @Override 345 | public Exception build(String message) { 346 | return new Exception(message); 347 | } 348 | }), 349 | UNSUPPORTED_FOR_MESSAGE_FORMAT(43, "The message format version on the broker does not support the request.", 350 | new ApiExceptionBuilder() { 351 | @Override 352 | public Exception build(String message) { 353 | return new Exception(message); 354 | } 355 | }), 356 | POLICY_VIOLATION(44, "Request parameters do not satisfy the configured policy.", 357 | new ApiExceptionBuilder() { 358 | @Override 359 | public Exception build(String message) { 360 | return new Exception(message); 361 | } 362 | }), 363 | OUT_OF_ORDER_SEQUENCE_NUMBER(45, "The broker received an out of order sequence number", 364 | new ApiExceptionBuilder() { 365 | @Override 366 | public Exception build(String message) { 367 | return new Exception(message); 368 | } 369 | }), 370 | DUPLICATE_SEQUENCE_NUMBER(46, "The broker received a duplicate sequence number", 371 | new ApiExceptionBuilder() { 372 | @Override 373 | public Exception build(String message) { 374 | return new Exception(message); 375 | } 376 | }), 377 | INVALID_PRODUCER_EPOCH(47, "Producer attempted an operation with an old epoch. Either there is a newer producer " + 378 | "with the same transactionalId, or the producer's transaction has been expired by the broker.", 379 | new ApiExceptionBuilder() { 380 | @Override 381 | public Exception build(String message) { 382 | return new Exception(message); 383 | } 384 | }), 385 | INVALID_TXN_STATE(48, "The producer attempted a transactional operation in an invalid state", 386 | new ApiExceptionBuilder() { 387 | @Override 388 | public Exception build(String message) { 389 | return new Exception(message); 390 | } 391 | }), 392 | INVALID_PRODUCER_ID_MAPPING(49, "The producer attempted to use a producer id which is not currently assigned to " + 393 | "its transactional id", 394 | new ApiExceptionBuilder() { 395 | @Override 396 | public Exception build(String message) { 397 | return new Exception(message); 398 | } 399 | }), 400 | INVALID_TRANSACTION_TIMEOUT(50, "The transaction timeout is larger than the maximum value allowed by " + 401 | "the broker (as configured by max.transaction.timeout.ms).", 402 | new ApiExceptionBuilder() { 403 | @Override 404 | public Exception build(String message) { 405 | return new Exception(message); 406 | } 407 | }), 408 | CONCURRENT_TRANSACTIONS(51, "The producer attempted to update a transaction " + 409 | "while another concurrent operation on the same transaction was ongoing", 410 | new ApiExceptionBuilder() { 411 | @Override 412 | public Exception build(String message) { 413 | return new Exception(message); 414 | } 415 | }), 416 | TRANSACTION_COORDINATOR_FENCED(52, "Indicates that the transaction coordinator sending a WriteTxnMarker " + 417 | "is no longer the current coordinator for a given producer", 418 | new ApiExceptionBuilder() { 419 | @Override 420 | public Exception build(String message) { 421 | return new Exception(message); 422 | } 423 | }), 424 | TRANSACTIONAL_ID_AUTHORIZATION_FAILED(53, "Transactional Id authorization failed", 425 | new ApiExceptionBuilder() { 426 | @Override 427 | public Exception build(String message) { 428 | return new Exception(message); 429 | } 430 | }), 431 | SECURITY_DISABLED(54, "Security features are disabled.", new ApiExceptionBuilder() { 432 | @Override 433 | public Exception build(String message) { 434 | return new Exception(message); 435 | } 436 | }), 437 | OPERATION_NOT_ATTEMPTED(55, "The broker did not attempt to execute this operation. This may happen for batched RPCs " + 438 | "where some operations in the batch failed, causing the broker to respond without trying the rest.", 439 | new ApiExceptionBuilder() { 440 | @Override 441 | public Exception build(String message) { 442 | return new Exception(message); 443 | } 444 | }), 445 | KAFKA_STORAGE_ERROR(56, "Disk error when trying to access log file on the disk.", 446 | new ApiExceptionBuilder() { 447 | @Override 448 | public Exception build(String message) { 449 | return new Exception(message); 450 | } 451 | }), 452 | LOG_DIR_NOT_FOUND(57, "The user-specified log directory is not found in the broker config.", 453 | new ApiExceptionBuilder() { 454 | @Override 455 | public Exception build(String message) { 456 | return new Exception(message); 457 | } 458 | }), 459 | SASL_AUTHENTICATION_FAILED(58, "SASL Authentication failed.", 460 | new ApiExceptionBuilder() { 461 | @Override 462 | public Exception build(String message) { 463 | return new Exception(message); 464 | } 465 | }), 466 | UNKNOWN_PRODUCER_ID(59, "This exception is raised by the broker if it could not locate the producer metadata " + 467 | "associated with the producerId in question. This could happen if, for instance, the producer's records " + 468 | "were deleted because their retention time had elapsed. Once the last records of the producerId are " + 469 | "removed, the producer's metadata is removed from the broker, and future appends by the producer will " + 470 | "return this exception.", 471 | new ApiExceptionBuilder() { 472 | @Override 473 | public Exception build(String message) { 474 | return new Exception(message); 475 | } 476 | }), 477 | REASSIGNMENT_IN_PROGRESS(60, "A partition reassignment is in progress", 478 | new ApiExceptionBuilder() { 479 | @Override 480 | public Exception build(String message) { 481 | return new Exception(message); 482 | } 483 | }); 484 | 485 | private interface ApiExceptionBuilder { 486 | Exception build(String message); 487 | } 488 | 489 | 490 | private static Map, Errors> classToError = new HashMap<>(); 491 | private static Map codeToError = new HashMap<>(); 492 | 493 | static { 494 | for (Errors error : Errors.values()) { 495 | codeToError.put(error.code(), error); 496 | if (error.exception != null) 497 | classToError.put(error.exception.getClass(), error); 498 | } 499 | } 500 | 501 | private final short code; 502 | private final ApiExceptionBuilder builder; 503 | private final Exception exception; 504 | 505 | Errors(int code, String defaultExceptionString, ApiExceptionBuilder builder) { 506 | this.code = (short) code; 507 | this.builder = builder; 508 | this.exception = builder.build(defaultExceptionString); 509 | } 510 | 511 | /** 512 | * An instance of the exception 513 | */ 514 | public Exception exception() { 515 | return this.exception; 516 | } 517 | 518 | /** 519 | * Create an instance of the Exception that contains the given error message. 520 | * 521 | * @param message The message string to set. 522 | * @return The exception. 523 | */ 524 | public Exception exception(String message) { 525 | if (message == null) { 526 | // If no error message was specified, return an exception with the default error message. 527 | return exception; 528 | } 529 | // Return an exception with the given error message. 530 | return builder.build(message); 531 | } 532 | 533 | /** 534 | * Returns the class name of the exception or null if this is {@code Errors.NONE}. 535 | */ 536 | public String exceptionName() { 537 | return exception == null ? null : exception.getClass().getName(); 538 | } 539 | 540 | /** 541 | * The error code for the exception 542 | */ 543 | public short code() { 544 | return this.code; 545 | } 546 | 547 | 548 | 549 | /** 550 | * Get a friendly description of the error (if one is available). 551 | * @return the error message 552 | */ 553 | public String message() { 554 | if (exception != null) 555 | return exception.getMessage(); 556 | return toString(); 557 | } 558 | 559 | /** 560 | * Throw the exception if there is one 561 | */ 562 | public static Errors forCode(short code) { 563 | Errors error = codeToError.get(code); 564 | if (error != null) { 565 | return error; 566 | } else { 567 | return UNKNOWN_SERVER_ERROR; 568 | } 569 | } 570 | 571 | /** 572 | * Return the error instance associated with this exception or any of its superclasses (or UNKNOWN if there is none). 573 | * If there are multiple matches in the class hierarchy, the first match starting from the bottom is used. 574 | */ 575 | public static Errors forException(Throwable t) { 576 | Class clazz = t.getClass(); 577 | while (clazz != null) { 578 | Errors error = classToError.get(clazz); 579 | if (error != null) 580 | return error; 581 | clazz = clazz.getSuperclass(); 582 | } 583 | return UNKNOWN_SERVER_ERROR; 584 | } 585 | 586 | } 587 | -------------------------------------------------------------------------------- /awesome-netty-kafka/src/main/java/com/north/netty/kafka/utils/Crc32C.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package com.north.netty.kafka.utils; 19 | 20 | import java.util.zip.Checksum; 21 | 22 | /** 23 | * A class that can be used to compute the CRC32C (Castagnoli) of a ByteBuffer or array of bytes. 24 | * 25 | * We use java.util.zip.CRC32C (introduced in Java 9) if it is available and fallback to PureJavaCrc32C, otherwise. 26 | * java.util.zip.CRC32C is significantly faster on reasonably modern CPUs as it uses the CRC32 instruction introduced 27 | * in SSE4.2. 28 | * 29 | * NOTE: This class is intended for INTERNAL usage only within Kafka. 30 | */ 31 | public final class Crc32C { 32 | 33 | private static final ChecksumFactory CHECKSUM_FACTORY = new PureJavaChecksumFactory(); 34 | 35 | 36 | private Crc32C() {} 37 | 38 | /** 39 | * Compute the CRC32C (Castagnoli) of the segment of the byte array given by the specified size and offset 40 | * 41 | * @param bytes The bytes to checksum 42 | * @param offset the offset at which to begin the checksum computation 43 | * @param size the number of bytes to checksum 44 | * @return The CRC32C 45 | */ 46 | public static long compute(byte[] bytes, int offset, int size) { 47 | Checksum crc = create(); 48 | crc.update(bytes, offset, size); 49 | return crc.getValue(); 50 | } 51 | 52 | 53 | public static Checksum create() { 54 | return CHECKSUM_FACTORY.create(); 55 | } 56 | 57 | private interface ChecksumFactory { 58 | Checksum create(); 59 | } 60 | 61 | private static class PureJavaChecksumFactory implements ChecksumFactory { 62 | @Override 63 | public Checksum create() { 64 | return new PureJavaCrc32C(); 65 | } 66 | } 67 | } 68 | -------------------------------------------------------------------------------- /awesome-netty-kafka/src/main/java/com/north/netty/kafka/utils/SimplePartitioner.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.kafka.utils; 2 | 3 | public class SimplePartitioner { 4 | public static int getPartion(String topic, byte [] key, byte[] val){ 5 | // 简单地返回0 6 | return 0; 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /awesome-netty-kafka/src/main/java/com/north/netty/kafka/utils/StringSerializer.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.kafka.utils; 2 | 3 | import java.io.UnsupportedEncodingException; 4 | 5 | public class StringSerializer { 6 | 7 | public static byte[] getBytes(String str){ 8 | if(str == null){ 9 | return null; 10 | } 11 | try { 12 | return str.getBytes("UTF8"); 13 | } catch (UnsupportedEncodingException e) { 14 | e.printStackTrace(); 15 | } 16 | return null; 17 | } 18 | 19 | public static String getString(byte [] bytes) { 20 | if(bytes == null){ 21 | return null; 22 | } 23 | try { 24 | return new String(bytes, "UTF8"); 25 | } catch (UnsupportedEncodingException e) { 26 | e.printStackTrace(); 27 | } 28 | return null; 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /awesome-netty-kafka/src/main/java/com/north/netty/kafka/utils/VarLengthUtils.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | package com.north.netty.kafka.utils; 18 | 19 | import io.netty.buffer.ByteBuf; 20 | 21 | /** 22 | * 可变长度变量的转化工具 23 | * 24 | * @author kafka 25 | */ 26 | public final class VarLengthUtils { 27 | 28 | public static final int NULL_VARINT_SIZE_BYTES = VarLengthUtils.sizeOfVarint(-1); 29 | 30 | private VarLengthUtils() {} 31 | 32 | 33 | /** 34 | * Read an integer stored in variable-length format using zig-zag decoding from 35 | * ByteBuf 36 | * 37 | * @param buffer The buffer to read from 38 | * @return The integer read 39 | * 40 | * @throws IllegalArgumentException if variable-length value does not terminate after 5 bytes have been read 41 | */ 42 | public static int readVarint(ByteBuf buffer) { 43 | int value = 0; 44 | int i = 0; 45 | int b; 46 | while (((b = buffer.readByte()) & 0x80) != 0) { 47 | value |= (b & 0x7f) << i; 48 | i += 7; 49 | if (i > 28) { 50 | throw new IllegalArgumentException("illegal value : " + value); 51 | } 52 | } 53 | value |= b << i; 54 | return (value >>> 1) ^ -(value & 1); 55 | } 56 | 57 | 58 | /** 59 | * Read a long stored in variable-length format using zig-zag decoding from 60 | * ByteBuf 61 | * 62 | * @param buffer The buffer to read from 63 | * @return The long value read 64 | * 65 | * @throws IllegalArgumentException if variable-length value does not terminate after 10 bytes have been read 66 | */ 67 | public static long readVarlong(ByteBuf buffer) { 68 | long value = 0L; 69 | int i = 0; 70 | long b; 71 | while (((b = buffer.readByte()) & 0x80) != 0) { 72 | value |= (b & 0x7f) << i; 73 | i += 7; 74 | if (i > 63) { 75 | throw new IllegalArgumentException("illegal value : " + value); 76 | } 77 | } 78 | value |= b << i; 79 | return (value >>> 1) ^ -(value & 1); 80 | } 81 | 82 | 83 | public static void writeVarint(int value, ByteBuf out){ 84 | int v = (value << 1) ^ (value >> 31); 85 | while ((v & 0xffffff80) != 0L) { 86 | out.writeByte((v & 0x7f) | 0x80); 87 | v >>>= 7; 88 | } 89 | out.writeByte((byte) v); 90 | } 91 | 92 | 93 | 94 | public static void writeVarlong(long value, ByteBuf out){ 95 | long v = (value << 1) ^ (value >> 63); 96 | while ((v & 0xffffffffffffff80L) != 0L) { 97 | out.writeByte(((int) v & 0x7f) | 0x80); 98 | v >>>= 7; 99 | } 100 | out.writeByte((byte) v); 101 | } 102 | 103 | 104 | 105 | /** 106 | * Number of bytes needed to encode an integer in variable-length format. 107 | * 108 | * @param value The signed value 109 | */ 110 | public static int sizeOfVarint(int value) { 111 | int v = (value << 1) ^ (value >> 31); 112 | int bytes = 1; 113 | while ((v & 0xffffff80) != 0L) { 114 | bytes += 1; 115 | v >>>= 7; 116 | } 117 | return bytes; 118 | } 119 | 120 | /** 121 | * Get the length for UTF8-encoding a string without encoding it first 122 | * 123 | * @param s The string to calculate the length for 124 | * @return The length when serialized 125 | */ 126 | public static int utf8Length(CharSequence s) { 127 | int count = 0; 128 | for (int i = 0, len = s.length(); i < len; i++) { 129 | char ch = s.charAt(i); 130 | if (ch <= 0x7F) { 131 | count++; 132 | } else if (ch <= 0x7FF) { 133 | count += 2; 134 | } else if (Character.isHighSurrogate(ch)) { 135 | count += 4; 136 | ++i; 137 | } else { 138 | count += 3; 139 | } 140 | } 141 | return count; 142 | } 143 | 144 | /** 145 | * Number of bytes needed to encode a long in variable-length format. 146 | * 147 | * @param value The signed value 148 | */ 149 | public static int sizeOfVarlong(long value) { 150 | long v = (value << 1) ^ (value >> 63); 151 | int bytes = 1; 152 | while ((v & 0xffffffffffffff80L) != 0L) { 153 | bytes += 1; 154 | v >>>= 7; 155 | } 156 | return bytes; 157 | } 158 | 159 | } 160 | -------------------------------------------------------------------------------- /awesome-netty-kafka/src/test/java/test/kafkaClientTest.java: -------------------------------------------------------------------------------- 1 | package test; 2 | 3 | import com.google.gson.Gson; 4 | import com.north.netty.kafka.KafkaClient; 5 | import com.north.netty.kafka.bean.fetch.FetchPartitionHeader; 6 | import com.north.netty.kafka.bean.fetch.FetchPartitionResp; 7 | import com.north.netty.kafka.bean.fetch.FetchResponse; 8 | import com.north.netty.kafka.bean.fetch.FetchTopicResponse; 9 | import com.north.netty.kafka.bean.meta.KafkaMetaResponse; 10 | import com.north.netty.kafka.bean.msg.ConsumerRecord; 11 | import com.north.netty.kafka.bean.msg.KafkaMsgRecordBatch; 12 | import com.north.netty.kafka.bean.msg.KafkaMsgRecordV2; 13 | import com.north.netty.kafka.bean.produce.ProduceResponse; 14 | import com.north.netty.kafka.bean.produce.Record; 15 | import com.north.netty.kafka.config.KafkaConsumerConfig; 16 | import com.north.netty.kafka.config.KafkaProduceConfig; 17 | import com.north.netty.kafka.enums.Errors; 18 | import com.north.netty.kafka.utils.StringSerializer; 19 | import org.junit.Test; 20 | 21 | import java.util.*; 22 | 23 | public class kafkaClientTest { 24 | private final static String host = "localhost"; 25 | private final static int port = 9092; 26 | private final static String topic = "testTopic123"; 27 | 28 | @Test 29 | public void testMetaData(){ 30 | KafkaClient kafkaClient = new KafkaClient("fetchMata-client", host, port); 31 | KafkaMetaResponse response = kafkaClient.fetchMataData(topic); 32 | assert response != null; 33 | System.out.println(new Gson().toJson(response)); 34 | } 35 | 36 | @Test 37 | public void testProducer(){ 38 | KafkaClient kafkaClient = new KafkaClient("producer-111", host, port); 39 | KafkaProduceConfig kafkaConfig = new KafkaProduceConfig(); 40 | // 注意这里设置为0时, broker不会响应任何数据, 但是消息实际上是发送到broker了的 41 | short ack = -1; 42 | kafkaConfig.setAck(ack); 43 | kafkaConfig.setTimeout(30000); 44 | ProduceResponse response = kafkaClient.send(kafkaConfig, topic,"testKey","helloWorld1113"); 45 | assert ack == 0 || response != null; 46 | System.out.println(new Gson().toJson(response)); 47 | } 48 | 49 | @Test 50 | public void testConsumer(){ 51 | // 如果broker上不存在这个topic的话, 直接消费可能会报错, 可以fetch一下metadata, 或先生产消息 52 | // testMetaData(); 53 | // testProducer(); 54 | KafkaClient kafkaClient = new KafkaClient("consumer-111", host, port); 55 | KafkaConsumerConfig consumerConfig = new KafkaConsumerConfig(); 56 | consumerConfig.setMaxBytes(Integer.MAX_VALUE); 57 | consumerConfig.setMaxWaitTime(30000); 58 | consumerConfig.setMinBytes(1); 59 | Map> response = kafkaClient.poll(consumerConfig, topic, 0, 0L); 60 | assert response != null && response.size() > 0; 61 | Set>> entrySet =response.entrySet(); 62 | for(Map.Entry> entry : entrySet){ 63 | Integer partition = entry.getKey(); 64 | System.out.println("partition" + partition + "的数据:"); 65 | for(ConsumerRecord consumerRecord : entry.getValue()){ 66 | System.out.println(new Gson().toJson(consumerRecord)); 67 | } 68 | } 69 | 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /awesome-netty-mysql/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | awesome-netty 7 | org.north.netty 8 | 1.0-SNAPSHOT 9 | 10 | 4.0.0 11 | 12 | awesome-netty-mysql 13 | 14 | 15 | -------------------------------------------------------------------------------- /awesome-netty-redis/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | awesome-netty 7 | org.north.netty 8 | 1.0-SNAPSHOT 9 | 10 | 4.0.0 11 | 12 | awesome-netty-redis 13 | 14 | 15 | 16 | org.apache.maven.plugins 17 | maven-compiler-plugin 18 | 19 | 1.8 20 | 1.8 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | org.north.netty 29 | awesome-netty-common 30 | 1.0-SNAPSHOT 31 | 32 | 33 | -------------------------------------------------------------------------------- /awesome-netty-redis/src/main/java/com/north/netty/redis/clients/AbstractRedisClient.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.redis.clients; 2 | 3 | import com.north.netty.redis.cmd.Cmd; 4 | import com.north.netty.redis.cmd.CmdResp; 5 | import com.north.netty.redis.config.RedisConfig; 6 | import com.north.netty.redis.connections.ConnectionPool; 7 | import com.north.netty.redis.connections.RedisConnection; 8 | import com.north.netty.redis.enums.ClientType; 9 | import com.north.netty.redis.exceptions.FailedToGetConnectionException; 10 | 11 | /** 12 | * @author laihaohua 13 | */ 14 | public abstract class AbstractRedisClient implements RedisClient { 15 | private ConnectionPool connectionPool; 16 | 17 | protected AbstractRedisClient(ClientType clientType){ 18 | connectionPool = new ConnectionPool<>(clientType); 19 | } 20 | 21 | 22 | protected RETURN invokeCmd(Cmd cmd, CmdResp cmdResp) throws FailedToGetConnectionException{ 23 | RedisConnection connection = null; 24 | try{ 25 | // 构建RESP结构体 26 | T data = cmd.build(); 27 | // 从连接池中borrow连接 28 | connection = connectionPool.borrowConnection(); 29 | if(connectionPool.checkChannel(connection)){ 30 | // 要锁定这个连接 31 | connection.lock(); 32 | try{ 33 | // 发送命令 34 | connection.writeAndFlush(data).sync(); 35 | // 获取命令的返回结果 36 | return cmdResp.parseResp(connection.getResp(RedisConfig.TIME_OUT_MS)); 37 | }catch (Exception e){ 38 | e.printStackTrace(); 39 | }finally { 40 | // 释放锁 41 | connection.unlock(); 42 | } 43 | }else{ 44 | throw new FailedToGetConnectionException("can not get connection form connection pool"); 45 | } 46 | }catch (Exception e){ 47 | e.printStackTrace(); 48 | }finally { 49 | if(connectionPool.checkChannel(connection)){ 50 | // 把连接放回到连接池 51 | connectionPool.returnConnection(connection); 52 | } 53 | } 54 | return null; 55 | } 56 | } 57 | -------------------------------------------------------------------------------- /awesome-netty-redis/src/main/java/com/north/netty/redis/clients/RedisBinaryClient.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.redis.clients; 2 | 3 | import com.north.netty.redis.cmd.impl.getcmd.binary.GetBinaryCmd; 4 | import com.north.netty.redis.cmd.impl.getcmd.str.GetStringCmd; 5 | import com.north.netty.redis.cmd.impl.setcmd.binary.SetBinaryCmd; 6 | import com.north.netty.redis.enums.ClientType; 7 | import com.north.netty.redis.enums.ExpireMode; 8 | import com.north.netty.redis.enums.Xmode; 9 | import com.north.netty.redis.exceptions.FailedToGetConnectionException; 10 | import io.netty.buffer.ByteBuf; 11 | import io.netty.buffer.PooledByteBufAllocator; 12 | 13 | /** 14 | * @author laihaohua 15 | */ 16 | public class RedisBinaryClient extends AbstractRedisClient implements RedisClient{ 17 | private static class InstanceHolder { 18 | static final RedisBinaryClient REDIS_CLIENT = new RedisBinaryClient(); 19 | } 20 | 21 | protected RedisBinaryClient() { 22 | super(ClientType.BINARY); 23 | } 24 | public static RedisBinaryClient getInstance(){ 25 | return InstanceHolder.REDIS_CLIENT; 26 | } 27 | 28 | 29 | @Override 30 | public boolean set(byte [] key, byte [] v){ 31 | SetBinaryCmd setCmd = new SetBinaryCmd(key, v); 32 | try { 33 | return invokeCmd(setCmd, setCmd); 34 | } catch (FailedToGetConnectionException e) { 35 | e.printStackTrace(); 36 | } 37 | return false; 38 | 39 | } 40 | 41 | @Override 42 | public boolean setNx(byte[] key, byte[] v) { 43 | SetBinaryCmd setCmd = new SetBinaryCmd(key, v, Xmode.NX); 44 | try { 45 | return invokeCmd(setCmd, setCmd); 46 | } catch (FailedToGetConnectionException e) { 47 | e.printStackTrace(); 48 | } 49 | return false; 50 | } 51 | 52 | @Override 53 | public boolean setWithExpireTime(byte[] key, byte[] v, long seconds) { 54 | SetBinaryCmd setCmd = new SetBinaryCmd(key, v, ExpireMode.EX, seconds); 55 | try { 56 | return invokeCmd(setCmd, setCmd); 57 | } catch (FailedToGetConnectionException e) { 58 | e.printStackTrace(); 59 | } 60 | return false; 61 | } 62 | 63 | @Override 64 | public boolean set(byte[] key, byte[] v, ExpireMode expireMode, long expireTime, Xmode x) { 65 | SetBinaryCmd setCmd = new SetBinaryCmd(key, v, ExpireMode.EX, expireTime, x); 66 | try { 67 | return invokeCmd(setCmd, setCmd); 68 | } catch (FailedToGetConnectionException e) { 69 | e.printStackTrace(); 70 | } 71 | return false; 72 | } 73 | 74 | @Override 75 | public byte[] get(byte [] key){ 76 | GetBinaryCmd getBinaryCmd = new GetBinaryCmd(key); 77 | try { 78 | byte [] resp = invokeCmd(getBinaryCmd, getBinaryCmd); 79 | return resp; 80 | } catch (FailedToGetConnectionException e) { 81 | e.printStackTrace(); 82 | } 83 | return null; 84 | } 85 | } 86 | -------------------------------------------------------------------------------- /awesome-netty-redis/src/main/java/com/north/netty/redis/clients/RedisClient.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.redis.clients; 2 | 3 | import com.north.netty.redis.enums.ExpireMode; 4 | import com.north.netty.redis.enums.Xmode; 5 | 6 | /** 7 | * @author laihaohua 8 | */ 9 | public interface RedisClient { 10 | /** 11 | * set 命令 没有过期时间 12 | * @param key 13 | * @param v 14 | * @return 15 | */ 16 | boolean set(T key, T v); 17 | /** 18 | * SETNX 命令 19 | * @param key 20 | * @param v 21 | * @return 22 | */ 23 | boolean setNx(T key, T v); 24 | 25 | 26 | /** 27 | * 带有过期时间的set命令 28 | * @param key 29 | * @param v 30 | * @param seconds 31 | * @return 32 | */ 33 | boolean setWithExpireTime(T key, T v, long seconds); 34 | 35 | /** 36 | * set key value [EX seconds] [PX milliseconds] [NX|XX] 37 | * @param key 38 | * @param v 39 | * @param expireMode 40 | * @param expireTime 41 | * @param x 42 | * @return 43 | */ 44 | boolean set(T key, T v, ExpireMode expireMode, long expireTime, Xmode x); 45 | 46 | /** 47 | * get命令 48 | * @param key 49 | * @return 50 | */ 51 | T get(T key); 52 | 53 | 54 | } 55 | -------------------------------------------------------------------------------- /awesome-netty-redis/src/main/java/com/north/netty/redis/clients/RedisStringClient.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.redis.clients; 2 | 3 | import com.north.netty.redis.cmd.impl.getcmd.str.GetStringCmd; 4 | import com.north.netty.redis.cmd.impl.setcmd.str.SetStringCmd; 5 | import com.north.netty.redis.enums.ClientType; 6 | import com.north.netty.redis.enums.ExpireMode; 7 | import com.north.netty.redis.enums.Xmode; 8 | import com.north.netty.redis.exceptions.FailedToGetConnectionException; 9 | 10 | /** 11 | * @author laihaohua 12 | */ 13 | public class RedisStringClient extends AbstractRedisClient implements RedisClient { 14 | protected RedisStringClient() { 15 | super(ClientType.STRING); 16 | } 17 | 18 | private static class InstanceHolder{ 19 | public static final RedisStringClient CLIENT = new RedisStringClient(); 20 | } 21 | 22 | public static RedisStringClient getInstance(){ 23 | return InstanceHolder.CLIENT; 24 | } 25 | 26 | @Override 27 | public boolean set(String key, String v){ 28 | SetStringCmd setCmd = new SetStringCmd(key, v); 29 | try { 30 | return invokeCmd(setCmd, setCmd); 31 | } catch (FailedToGetConnectionException e) { 32 | e.printStackTrace(); 33 | } 34 | return false; 35 | } 36 | 37 | @Override 38 | public boolean setNx(String key, String v) { 39 | SetStringCmd setCmd = new SetStringCmd(key, v, Xmode.NX); 40 | try { 41 | return invokeCmd(setCmd, setCmd); 42 | } catch (FailedToGetConnectionException e) { 43 | e.printStackTrace(); 44 | } 45 | return false; 46 | } 47 | 48 | @Override 49 | public boolean setWithExpireTime(String key, String v, long seconds) { 50 | SetStringCmd setCmd = new SetStringCmd(key, v,ExpireMode.EX, seconds); 51 | try { 52 | return invokeCmd(setCmd, setCmd); 53 | } catch (FailedToGetConnectionException e) { 54 | e.printStackTrace(); 55 | } 56 | return false; 57 | } 58 | 59 | @Override 60 | public boolean set(String key, String v, ExpireMode expireMode, long expireTime, Xmode x) { 61 | SetStringCmd setCmd = new SetStringCmd(key, v,ExpireMode.EX, expireTime, Xmode.NX); 62 | try { 63 | return invokeCmd(setCmd, setCmd); 64 | } catch (FailedToGetConnectionException e) { 65 | e.printStackTrace(); 66 | } 67 | return false; 68 | } 69 | 70 | @Override 71 | public String get(String key){ 72 | GetStringCmd getStringCmd = new GetStringCmd(key); 73 | try { 74 | return invokeCmd(getStringCmd, getStringCmd); 75 | } catch (FailedToGetConnectionException e) { 76 | e.printStackTrace(); 77 | } 78 | return null; 79 | } 80 | } 81 | -------------------------------------------------------------------------------- /awesome-netty-redis/src/main/java/com/north/netty/redis/cmd/AbstractCmd.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.redis.cmd; 2 | 3 | import java.util.List; 4 | 5 | /** 6 | * @author laihaohua 7 | */ 8 | public abstract class AbstractCmd implements Cmd { 9 | /** 10 | * 具体是什么命令, 例如get set等待 11 | */ 12 | protected String cmd; 13 | /** 14 | * 这个命令后面的参数 15 | */ 16 | protected List paramList; 17 | /** 18 | * redis命令 19 | * @return 20 | */ 21 | protected abstract String getCmd(); 22 | 23 | } 24 | -------------------------------------------------------------------------------- /awesome-netty-redis/src/main/java/com/north/netty/redis/cmd/Cmd.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.redis.cmd; 2 | 3 | /** 4 | * @author laihaohua 5 | */ 6 | public interface Cmd { 7 | /** 8 | * 构建RESP 协议 9 | * @return 10 | */ 11 | PT build(); 12 | } 13 | -------------------------------------------------------------------------------- /awesome-netty-redis/src/main/java/com/north/netty/redis/cmd/CmdResp.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.redis.cmd; 2 | 3 | 4 | /** 5 | * @author laihaohua 6 | */ 7 | public interface CmdResp { 8 | /** 9 | * 解析redis的resp 10 | * @param resp 11 | * @return 12 | */ 13 | RETURN parseResp(PARAM resp); 14 | } 15 | -------------------------------------------------------------------------------- /awesome-netty-redis/src/main/java/com/north/netty/redis/cmd/impl/getcmd/AbstractGetCmd.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.redis.cmd.impl.getcmd; 2 | 3 | import com.north.netty.redis.cmd.AbstractCmd; 4 | 5 | import java.util.ArrayList; 6 | 7 | /** 8 | * get命令的抽象类 9 | * 命令参数 10 | * get key 11 | * @author laihaohua 12 | */ 13 | public abstract class AbstractGetCmd extends AbstractCmd{ 14 | 15 | public AbstractGetCmd(T key){ 16 | super(); 17 | super.paramList = new ArrayList<>(); 18 | paramList.add(key); 19 | } 20 | 21 | @Override 22 | protected String getCmd() { 23 | return super.cmd = "get"; 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /awesome-netty-redis/src/main/java/com/north/netty/redis/cmd/impl/getcmd/binary/GetBinaryCmd.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.redis.cmd.impl.getcmd.binary; 2 | 3 | import com.north.netty.redis.cmd.CmdResp; 4 | import com.north.netty.redis.cmd.impl.getcmd.AbstractGetCmd; 5 | import com.north.netty.redis.utils.CmdBuildUtils; 6 | 7 | import java.util.Arrays; 8 | 9 | /** 10 | * @author laihaohua 11 | */ 12 | public class GetBinaryCmd extends AbstractGetCmd implements CmdResp { 13 | 14 | public GetBinaryCmd(byte[] key) { 15 | super(key); 16 | } 17 | 18 | @Override 19 | public byte[] build() { 20 | return CmdBuildUtils.buildBinary(getCmd(), paramList); 21 | } 22 | 23 | @Override 24 | public byte[] parseResp(byte[] resp) { 25 | if(resp == null){ 26 | return null; 27 | } 28 | int index = 1; 29 | int len = (int)resp[index++] - 48; 30 | if(len <= 0){ 31 | return null; 32 | } 33 | // 跳过 \r\n 34 | index += 2; 35 | return Arrays.copyOfRange(resp, index, index + len); 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /awesome-netty-redis/src/main/java/com/north/netty/redis/cmd/impl/getcmd/str/GetStringCmd.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.redis.cmd.impl.getcmd.str; 2 | 3 | import com.north.netty.redis.cmd.CmdResp; 4 | import com.north.netty.redis.cmd.impl.getcmd.AbstractGetCmd; 5 | import com.north.netty.redis.utils.CmdBuildUtils; 6 | import com.north.netty.redis.utils.SymbolUtils; 7 | 8 | /** 9 | * @author laihaohua 10 | */ 11 | public class GetStringCmd extends AbstractGetCmd implements CmdResp { 12 | public GetStringCmd(String key) { 13 | super(key); 14 | } 15 | 16 | @Override 17 | public String build() { 18 | return CmdBuildUtils.buildString(getCmd(), paramList); 19 | } 20 | 21 | @Override 22 | public String parseResp(String resp) { 23 | if(resp == null){ 24 | return null; 25 | } 26 | // 跳过第一位符号位 27 | int index = 1; 28 | int len = 0; 29 | char ch = resp.charAt(index); 30 | while(ch != SymbolUtils.CRLF.charAt(0)) { 31 | len = (len*10)+(ch - '0'); 32 | ch = resp.charAt(++index); 33 | } 34 | if(len <= 0){ 35 | return null; 36 | } 37 | index+=2; 38 | return resp.substring(index, index + len); 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /awesome-netty-redis/src/main/java/com/north/netty/redis/cmd/impl/setcmd/AbstractSetCmd.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.redis.cmd.impl.setcmd; 2 | 3 | import com.north.netty.redis.cmd.AbstractCmd; 4 | 5 | import java.util.ArrayList; 6 | 7 | /** 8 | * set命令的抽象类 9 | * 命令参数 10 | * set key value [EX seconds] [PX milliseconds] [NX|XX] 11 | * @author laihaohua 12 | */ 13 | public abstract class AbstractSetCmd extends AbstractCmd { 14 | 15 | public AbstractSetCmd(T key, T value, T expireMode, T expireTime, T xmode){ 16 | super(); 17 | super.paramList = new ArrayList<>(); 18 | paramList.add(key); 19 | paramList.add(value); 20 | // 设置了过期时间 21 | if(expireMode != null){ 22 | paramList.add(expireMode); 23 | paramList.add(expireTime); 24 | } 25 | // 设置了 XX或NX 26 | if(xmode != null){ 27 | paramList.add(xmode); 28 | } 29 | } 30 | 31 | @Override 32 | protected String getCmd() { 33 | return super.cmd = "set"; 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /awesome-netty-redis/src/main/java/com/north/netty/redis/cmd/impl/setcmd/binary/SetBinaryCmd.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.redis.cmd.impl.setcmd.binary; 2 | 3 | import com.north.netty.redis.cmd.CmdResp; 4 | import com.north.netty.redis.cmd.impl.setcmd.AbstractSetCmd; 5 | import com.north.netty.redis.enums.ExpireMode; 6 | import com.north.netty.redis.enums.Xmode; 7 | import com.north.netty.redis.utils.CmdBuildUtils; 8 | import com.north.netty.redis.utils.EncodeUtils; 9 | import com.north.netty.redis.utils.SymbolUtils; 10 | 11 | /** 12 | * 命令参数 13 | * set key value [EX seconds] [PX milliseconds] [NX|XX] 14 | * @author laihaohua 15 | */ 16 | public class SetBinaryCmd extends AbstractSetCmd implements CmdResp { 17 | /** 18 | * 没有过期时间 19 | * @param key 20 | * @param value 21 | */ 22 | public SetBinaryCmd(byte [] key, byte [] value){ 23 | this(key, value, null, 0, null); 24 | } 25 | public SetBinaryCmd(byte [] key, byte [] value, ExpireMode expireMode, long expireTime){ 26 | this(key, value, expireMode, expireTime, null); 27 | 28 | } 29 | public SetBinaryCmd(byte [] key, byte [] value, Xmode xmode){ 30 | this(key, value, null, 0, xmode); 31 | } 32 | public SetBinaryCmd(byte [] key, byte [] value, ExpireMode expireMode, long expireTime, Xmode xmode){ 33 | super( key, 34 | value, 35 | EncodeUtils.getBytes(expireMode), 36 | String.valueOf(expireTime).getBytes(), 37 | EncodeUtils.getBytes(xmode)); 38 | } 39 | 40 | @Override 41 | public byte[] build() { 42 | return CmdBuildUtils.buildBinary(getCmd(), paramList); 43 | } 44 | 45 | @Override 46 | public Boolean parseResp(byte[] resp){ 47 | if(resp == null){ 48 | return false; 49 | } 50 | if(resp[0] == SymbolUtils.OK_PLUS_BYTE[0]){ 51 | return true; 52 | } 53 | System.err.println("resp = [" + new String(resp) + "]"); 54 | return false; 55 | } 56 | } 57 | -------------------------------------------------------------------------------- /awesome-netty-redis/src/main/java/com/north/netty/redis/cmd/impl/setcmd/str/SetStringCmd.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.redis.cmd.impl.setcmd.str; 2 | 3 | import com.north.netty.redis.cmd.CmdResp; 4 | import com.north.netty.redis.cmd.impl.setcmd.AbstractSetCmd; 5 | import com.north.netty.redis.enums.ExpireMode; 6 | import com.north.netty.redis.enums.Xmode; 7 | import com.north.netty.redis.utils.CmdBuildUtils; 8 | import com.north.netty.redis.utils.SymbolUtils; 9 | 10 | /** 11 | * 命令参数 12 | * set key value [EX seconds] [PX milliseconds] [NX|XX] 13 | * @author laihaohua 14 | */ 15 | public class SetStringCmd extends AbstractSetCmd implements CmdResp { 16 | /** 17 | * 没有过期时间 18 | * @param key 19 | * @param value 20 | */ 21 | public SetStringCmd(String key, String value){ 22 | this(key, value, null, 0, null); 23 | } 24 | 25 | /** 26 | * 27 | * @param key 28 | * @param value 29 | * @param expireMode 30 | * @param expireTime 31 | */ 32 | public SetStringCmd(String key, String value, ExpireMode expireMode, long expireTime){ 33 | this(key, value, expireMode, expireTime, null); 34 | 35 | } 36 | public SetStringCmd(String key, String value, Xmode xmode){ 37 | this(key, value, null, 0, xmode); 38 | } 39 | public SetStringCmd(String key, String value, ExpireMode expireMode, long expireTime, Xmode xmode){ 40 | super( key, 41 | value , 42 | expireMode == null ? null : expireMode.getType(), 43 | String.valueOf(expireTime), 44 | xmode == null ? null : xmode.getType() ); 45 | } 46 | 47 | 48 | /** 49 | * 构建请求参数RESP 50 | * @return 51 | */ 52 | @Override 53 | public String build() { 54 | return CmdBuildUtils.buildString(getCmd(), paramList); 55 | } 56 | 57 | /** 58 | * 解析redis返回的RESP 59 | * @param resp 60 | * @return 61 | */ 62 | @Override 63 | public Boolean parseResp(String resp) { 64 | char ch = resp.charAt(0); 65 | // 一般返回 +OK 就是成功 66 | if(ch == SymbolUtils.OK_PLUS.charAt(0)){ 67 | return true; 68 | } 69 | // 其他的都是失败 70 | return false; 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /awesome-netty-redis/src/main/java/com/north/netty/redis/codecs/ByteBufToByteDecoder.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.redis.codecs; 2 | 3 | import io.netty.buffer.ByteBuf; 4 | import io.netty.channel.ChannelHandlerContext; 5 | import io.netty.handler.codec.ByteToMessageDecoder; 6 | 7 | import java.util.List; 8 | 9 | /** 10 | * @author laihaohua 11 | */ 12 | public class ByteBufToByteDecoder extends ByteToMessageDecoder { 13 | @Override 14 | protected void decode(ChannelHandlerContext ctx, ByteBuf in, List out) throws Exception { 15 | byte [] bytes = new byte[in.writerIndex()]; 16 | in.readBytes(bytes); 17 | out.add(bytes); 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /awesome-netty-redis/src/main/java/com/north/netty/redis/codecs/ByteToByteBufEncoder.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.redis.codecs; 2 | 3 | import io.netty.buffer.ByteBuf; 4 | import io.netty.channel.ChannelHandlerContext; 5 | import io.netty.handler.codec.MessageToByteEncoder; 6 | 7 | /** 8 | * @author laihaohua 9 | */ 10 | public class ByteToByteBufEncoder extends MessageToByteEncoder { 11 | 12 | @Override 13 | protected void encode(ChannelHandlerContext ctx, byte[] msg, ByteBuf out) throws Exception { 14 | if(msg == null){ 15 | return; 16 | } 17 | out.writeBytes(msg); 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /awesome-netty-redis/src/main/java/com/north/netty/redis/codecs/RedisRespHandler.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.redis.codecs; 2 | 3 | import io.netty.channel.ChannelHandlerContext; 4 | import io.netty.channel.ChannelInboundHandlerAdapter; 5 | 6 | import java.util.concurrent.SynchronousQueue; 7 | 8 | /** 9 | * @author laihaohua 10 | */ 11 | public class RedisRespHandler extends ChannelInboundHandlerAdapter { 12 | private SynchronousQueue synchronousQueue; 13 | 14 | public RedisRespHandler(SynchronousQueue synchronousQueue) { 15 | this.synchronousQueue = synchronousQueue; 16 | } 17 | 18 | @Override 19 | public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { 20 | super.channelRead(ctx, msg); 21 | synchronousQueue.put((T)msg); 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /awesome-netty-redis/src/main/java/com/north/netty/redis/config/RedisConfig.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.redis.config; 2 | 3 | /** 4 | * @author laihaohua 5 | */ 6 | public class RedisConfig { 7 | public static String host = "localhost"; 8 | public static int port = 6379; 9 | /** 10 | * 每个client创建的连接数 11 | */ 12 | public static int connectionCount = 10; 13 | 14 | /** 15 | * 超时时间 ms 16 | */ 17 | public static int TIME_OUT_MS = 5000; 18 | } 19 | -------------------------------------------------------------------------------- /awesome-netty-redis/src/main/java/com/north/netty/redis/connections/ConnectionPool.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.redis.connections; 2 | 3 | import com.north.netty.redis.codecs.ByteBufToByteDecoder; 4 | import com.north.netty.redis.codecs.ByteToByteBufEncoder; 5 | import com.north.netty.redis.codecs.RedisRespHandler; 6 | import com.north.netty.redis.config.RedisConfig; 7 | import com.north.netty.redis.enums.ClientType; 8 | import io.netty.bootstrap.Bootstrap; 9 | import io.netty.channel.*; 10 | import io.netty.channel.nio.NioEventLoopGroup; 11 | import io.netty.channel.socket.nio.NioSocketChannel; 12 | import io.netty.handler.codec.string.StringDecoder; 13 | import io.netty.handler.codec.string.StringEncoder; 14 | 15 | import java.util.concurrent.BlockingQueue; 16 | import java.util.concurrent.LinkedBlockingQueue; 17 | import java.util.concurrent.SynchronousQueue; 18 | 19 | /** 20 | * @author laihaohua 21 | */ 22 | public class ConnectionPool { 23 | private BlockingQueue> connections; 24 | 25 | public ConnectionPool(ClientType clientType){ 26 | this.connections = new LinkedBlockingQueue<>(RedisConfig.connectionCount); 27 | init(clientType); 28 | } 29 | private void init(final ClientType clientType){ 30 | Bootstrap bootstrap = new Bootstrap(); 31 | EventLoopGroup eventLoopGroup = new NioEventLoopGroup(); 32 | bootstrap.group(eventLoopGroup); 33 | bootstrap.channel(NioSocketChannel.class); 34 | bootstrap.option(ChannelOption.SO_KEEPALIVE, true); 35 | 36 | try { 37 | int count = RedisConfig.connectionCount; 38 | int tryCount = RedisConfig.connectionCount * 2; 39 | while(connections.size() < count && tryCount-- > 0){ 40 | // 公平的同步队列 传到RedisRespHandler中用于异步获取返回的数据 41 | final SynchronousQueue synchronousQueue = new SynchronousQueue<>(true); 42 | bootstrap.handler(new ChannelInitializer() { 43 | @Override 44 | protected void initChannel(NioSocketChannel ch) throws Exception { 45 | if(clientType.equals(ClientType.STRING)){ 46 | // string 类型的的kv 47 | ch.pipeline() 48 | .addLast("stringEncoder", new StringEncoder()) 49 | .addLast("stringDecoder", new StringDecoder()) 50 | .addLast("redisRespHandler", new RedisRespHandler<>(synchronousQueue)); 51 | }else if(clientType.equals(ClientType.BINARY)){ 52 | // byte[] 类型的kv, 发送时先用byteToByteBufEncoder 转成ByteBuf, 发送到redis, 53 | // 返回数据的处理链 byteBufToByteDecoder -> redisRespHandler 54 | ch.pipeline() 55 | .addLast("byteBufToByteDecoder", new ByteBufToByteDecoder()) 56 | .addLast("redisRespHandler", new RedisRespHandler<>(synchronousQueue)); 57 | ch.pipeline().addLast("byteToByteBufEncoder", new ByteToByteBufEncoder()); 58 | }else{ 59 | throw new IllegalArgumentException(); 60 | } 61 | } 62 | }); 63 | ChannelFuture channelFuture = bootstrap.connect(RedisConfig.host, RedisConfig.port).sync(); 64 | Channel channel = channelFuture.channel(); 65 | if(channel.isActive()){ 66 | String name = "connect-" + connections.size(); 67 | this.connections.add(new RedisConnection<>(name, (NioSocketChannel)channel, synchronousQueue)); 68 | } 69 | } 70 | if(connections.size() != count){ 71 | throw new IllegalStateException(""); 72 | } 73 | } catch (InterruptedException e) { 74 | e.printStackTrace(); 75 | } 76 | } 77 | 78 | public RedisConnection borrowConnection(){ 79 | try { 80 | RedisConnection connection = connections.take(); 81 | System.out.println("borrowConnection :" + connection.getName()); 82 | return connection; 83 | } catch (InterruptedException e) { 84 | e.printStackTrace(); 85 | } 86 | return null; 87 | } 88 | 89 | public void returnConnection(RedisConnection channel){ 90 | // 清除 91 | channel.cleanUp(); 92 | boolean flag = connections.offer(channel); 93 | if(!flag){ 94 | // try again 95 | flag = connections.offer(channel); 96 | } 97 | if(!flag){ 98 | channel.disconnect(); 99 | channel.close(); 100 | } 101 | } 102 | 103 | public boolean checkChannel(RedisConnection channel){ 104 | return channel != null && channel.isActive(); 105 | } 106 | 107 | } 108 | -------------------------------------------------------------------------------- /awesome-netty-redis/src/main/java/com/north/netty/redis/connections/RedisConnection.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.redis.connections; 2 | 3 | import io.netty.channel.ChannelFuture; 4 | import io.netty.channel.socket.nio.NioSocketChannel; 5 | 6 | import java.util.concurrent.SynchronousQueue; 7 | import java.util.concurrent.TimeUnit; 8 | import java.util.concurrent.locks.Lock; 9 | import java.util.concurrent.locks.ReentrantLock; 10 | 11 | /** 12 | * @author laihaohua 13 | */ 14 | public class RedisConnection{ 15 | private NioSocketChannel socketChannel; 16 | private Lock lock = new ReentrantLock(); 17 | private SynchronousQueue synchronousQueue; 18 | private String name; 19 | public RedisConnection(String name, NioSocketChannel socketChannel, SynchronousQueue synchronousQueue){ 20 | this.name = name; 21 | this.socketChannel = socketChannel; 22 | this.synchronousQueue = synchronousQueue; 23 | } 24 | public void cleanUp(){ 25 | } 26 | public T getResp(long timeout) throws InterruptedException { 27 | return synchronousQueue.poll(timeout, TimeUnit.MILLISECONDS); 28 | } 29 | 30 | public String getName() { 31 | return name; 32 | } 33 | 34 | public void setName(String name) { 35 | this.name = name; 36 | } 37 | 38 | public void lock() { 39 | lock.lock(); 40 | } 41 | public void unlock() { 42 | lock.unlock(); 43 | } 44 | 45 | /***********************代理channel的几个方法*******************************/ 46 | public ChannelFuture writeAndFlush(Object msg) { 47 | return socketChannel.writeAndFlush(msg); 48 | } 49 | 50 | public void disconnect() { 51 | socketChannel.disconnect(); 52 | } 53 | 54 | public void close() { 55 | socketChannel.close(); 56 | } 57 | 58 | public boolean isActive() { 59 | return socketChannel.isActive(); 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /awesome-netty-redis/src/main/java/com/north/netty/redis/enums/ClientType.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.redis.enums; 2 | 3 | /** 4 | * @author laihaohua 5 | */ 6 | 7 | public enum ClientType { 8 | /** 9 | * 字符串 10 | */ 11 | STRING, 12 | /** 13 | * 二进制 14 | */ 15 | BINARY; 16 | } 17 | -------------------------------------------------------------------------------- /awesome-netty-redis/src/main/java/com/north/netty/redis/enums/ExpireMode.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.redis.enums; 2 | 3 | /** 4 | * @author laihaohua 5 | */ 6 | 7 | public enum ExpireMode { 8 | /** 9 | * 过期时间是毫秒 10 | */ 11 | PX("PX"), 12 | /** 13 | * 过期时间是秒 14 | */ 15 | EX("EX"); 16 | private String type; 17 | ExpireMode(String type){ 18 | this.type = type; 19 | } 20 | 21 | public String getType() { 22 | return type; 23 | } 24 | 25 | public void setType(String type) { 26 | this.type = type; 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /awesome-netty-redis/src/main/java/com/north/netty/redis/enums/Xmode.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.redis.enums; 2 | 3 | /** 4 | * @author laihaohua 5 | */ 6 | 7 | public enum Xmode { 8 | /** 9 | * key存在的时候才创建 10 | */ 11 | XX("XX"), 12 | /** 13 | * key不存在的时候才创建 14 | */ 15 | NX("EX"); 16 | private String type; 17 | Xmode(String type){ 18 | this.type = type; 19 | } 20 | 21 | public String getType() { 22 | return type; 23 | } 24 | 25 | public void setType(String type) { 26 | this.type = type; 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /awesome-netty-redis/src/main/java/com/north/netty/redis/exceptions/AwesomeNettyRedisException.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.redis.exceptions; 2 | 3 | /** 4 | * @author laihaohua 5 | */ 6 | public class AwesomeNettyRedisException extends Exception { 7 | public AwesomeNettyRedisException(String message) { 8 | super(message); 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /awesome-netty-redis/src/main/java/com/north/netty/redis/exceptions/FailedToGetConnectionException.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.redis.exceptions; 2 | 3 | /** 4 | * @author laihaohua 5 | */ 6 | public class FailedToGetConnectionException extends AwesomeNettyRedisException { 7 | public FailedToGetConnectionException(String message) { 8 | super(message); 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /awesome-netty-redis/src/main/java/com/north/netty/redis/utils/CmdBuildUtils.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.redis.utils; 2 | 3 | import io.netty.buffer.ByteBuf; 4 | import io.netty.buffer.PooledByteBufAllocator; 5 | 6 | import java.util.List; 7 | 8 | /** 9 | * @author laihaohua 10 | */ 11 | public class CmdBuildUtils { 12 | /** 13 | * 构建成的RESP文本是: 14 | * "*arrLen\r\n" 15 | * +"$cmdLen\r\n" + "cmd\r\n" 16 | * +"$param0Len\r\n" + "param0\r\n" 17 | * +"$param1Len\r\n" + "param1\r\n" 18 | * +"$param2Len\r\n" + "param2\r\n" 19 | * ...... 20 | * +"$paramnLen\r\n" + "paramn\r\n" 21 | * @return 22 | */ 23 | public static byte[] buildBinary(String cmd, List paramList) { 24 | ByteBuf byteBuf = PooledByteBufAllocator.DEFAULT.buffer(); 25 | /*****************先写 *arrLen\r\n ***********************/ 26 | byteBuf.writeBytes(SymbolUtils.ARRAY_STAR_BYTE); 27 | // 整个命令数组长度 1(cmd) + paramList.size() 28 | int cmdLen = 1 + (paramList == null ? 0 : paramList.size()); 29 | byteBuf.writeBytes(EncodeUtils.getBytes(cmdLen)); 30 | byteBuf.writeBytes(SymbolUtils.CRLF_BYTE); 31 | /*****************写 *arrLen\r\n 结束 **********************/ 32 | 33 | /*************** "$cmdLen\r\n" + "cmd\r\n" 开始 *******************/ 34 | byte [] bytes = cmd.getBytes(); 35 | // $美元符号 36 | byteBuf.writeBytes(SymbolUtils.BULK_DOLLAR_BYTE); 37 | // cmd的长度 38 | byteBuf.writeBytes(EncodeUtils.getBytes(bytes.length)); 39 | // CRLF 40 | byteBuf.writeBytes(SymbolUtils.CRLF_BYTE); 41 | // cmd的内容 42 | byteBuf.writeBytes(bytes); 43 | byteBuf.writeBytes(SymbolUtils.CRLF_BYTE); 44 | /******************* "$cmdLen\r\n" + "cmd\r\n" 结束 **************************/ 45 | 46 | 47 | /*************** $paramNLen\r\n" + "paramN\r\n" 开始 *******************/ 48 | for(byte[] bs : paramList){ 49 | byteBuf.writeBytes(SymbolUtils.BULK_DOLLAR_BYTE); 50 | byteBuf.writeBytes(EncodeUtils.getBytes(bs.length)); 51 | byteBuf.writeBytes(SymbolUtils.CRLF_BYTE); 52 | byteBuf.writeBytes(bs); 53 | byteBuf.writeBytes(SymbolUtils.CRLF_BYTE); 54 | } 55 | /*************** $paramNLen\r\n" + "paramN\r\n" 结束 *******************/ 56 | bytes = new byte[byteBuf.writerIndex()]; 57 | byteBuf.readBytes(bytes); 58 | byteBuf.release(); 59 | return bytes; 60 | } 61 | 62 | /** 63 | * 构建成的RESP文本是: 64 | * "*arrLen\r\n" 65 | * +"$cmdLen\r\n" + "cmd\r\n" 66 | * +"$param0Len\r\n" + "param0\r\n" 67 | * +"$param1Len\r\n" + "param1\r\n" 68 | * +"$param2Len\r\n" + "param2\r\n" 69 | * ...... 70 | * +"$paramnLen\r\n" + "paramn\r\n" 71 | * @return 72 | */ 73 | public static String buildString(String cmd, List paramList) { 74 | StringBuilder s = new StringBuilder(); 75 | /*****************先写 *arrLen\r\n ***********************/ 76 | s.append(SymbolUtils.ARRAY_STAR); 77 | // 整个命令数组长度 1(cmd) + paramList.size() 78 | int cmdLen = 1 + (paramList == null ? 0 : paramList.size()); 79 | s.append(cmdLen); 80 | s.append(SymbolUtils.CRLF); 81 | /*****************写 *arrLen\r\n 结束 **********************/ 82 | 83 | /*************** "$cmdLen\r\n" + "cmd\r\n" 开始 *******************/ 84 | byte [] bytes = cmd.getBytes(); 85 | // $美元符号 86 | s.append(SymbolUtils.BULK_DOLLAR); 87 | // cmd的长度 88 | s.append(bytes.length); 89 | // CRLF 90 | s.append(SymbolUtils.CRLF); 91 | // cmd的内容 92 | s.append(cmd); 93 | s.append(SymbolUtils.CRLF); 94 | /******************* "$cmdLen\r\n" + "cmd\r\n" 结束 **************************/ 95 | 96 | /*************** $paramNLen\r\n" + "paramN\r\n" 开始 *******************/ 97 | for(String o : paramList){ 98 | bytes = o.toString().getBytes(); 99 | s.append(SymbolUtils.BULK_DOLLAR); 100 | s.append(bytes.length); 101 | s.append(SymbolUtils.CRLF); 102 | s.append(o); 103 | s.append(SymbolUtils.CRLF); 104 | } 105 | /*************** $paramNLen\r\n" + "paramN\r\n" 结束 *******************/ 106 | return s.toString(); 107 | } 108 | } 109 | -------------------------------------------------------------------------------- /awesome-netty-redis/src/main/java/com/north/netty/redis/utils/EncodeUtils.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.redis.utils; 2 | 3 | /** 4 | * @author laihaohua 5 | */ 6 | public class EncodeUtils { 7 | public static byte[] getBytes(Object object){ 8 | if(object == null){ 9 | return null; 10 | } 11 | return String.valueOf(object).getBytes(); 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /awesome-netty-redis/src/main/java/com/north/netty/redis/utils/SymbolUtils.java: -------------------------------------------------------------------------------- 1 | package com.north.netty.redis.utils; 2 | 3 | /** 4 | * @author laihaohua 5 | */ 6 | public class SymbolUtils { 7 | /** 8 | * 普通字符串 以+开始 9 | */ 10 | public static final String OK_PLUS = "+"; 11 | public static final byte [] OK_PLUS_BYTE = new byte[]{(byte)'+'}; 12 | /** 13 | * 异常或error 以-开始 14 | */ 15 | public static final String ERR_MINUS = "-"; 16 | public static final byte [] ERR_MINUS_BYTE = new byte[]{(byte)'-'}; 17 | /** 18 | * 整数 以:开始 19 | */ 20 | public static final String INT_COLON = ":"; 21 | public static final byte [] INT_COLON_BYTE = new byte[]{(byte)':'}; 22 | /** 23 | * 数组 以*开始 24 | */ 25 | public static final String ARRAY_STAR = "*"; 26 | public static final byte [] ARRAY_STAR_BYTE = new byte[]{(byte)'*'}; 27 | /** 28 | * bulk 字符串 以$开始 29 | */ 30 | public static final String BULK_DOLLAR = "$"; 31 | public static final byte [] BULK_DOLLAR_BYTE = new byte[]{(byte)'$'}; 32 | /** 33 | * CRLF 34 | */ 35 | public static final String CRLF = "\r\n"; 36 | public static final byte [] CRLF_BYTE = new byte[]{(byte)'\r',(byte)'\n'}; 37 | 38 | } 39 | -------------------------------------------------------------------------------- /awesome-netty-redis/src/test/java/org/nort/netty/redis/test/RedisBinaryClientTest.java: -------------------------------------------------------------------------------- 1 | package org.nort.netty.redis.test; 2 | 3 | import com.north.netty.redis.clients.RedisBinaryClient; 4 | import org.junit.FixMethodOrder; 5 | import org.junit.Test; 6 | import org.junit.runners.MethodSorters; 7 | import org.north.netty.common.utils.ProtostuffHelper; 8 | import org.north.netty.common.utils.SerializeUtils; 9 | 10 | import java.io.Serializable; 11 | import java.util.HashMap; 12 | 13 | @FixMethodOrder(MethodSorters.NAME_ASCENDING) 14 | public class RedisBinaryClientTest { 15 | 16 | public static final Key KEY = new Key("j"); 17 | public static final Val VAL = new Val("V"); 18 | @Test 19 | public void test001(){ 20 | RedisBinaryClient redisClient = RedisBinaryClient.getInstance(); 21 | boolean b = redisClient.set(ProtostuffHelper.serializeObject(KEY, Key.class), ProtostuffHelper.serializeObject(VAL, Val.class)); 22 | assert b; 23 | } 24 | 25 | @Test 26 | public void test002(){ 27 | RedisBinaryClient redisClient = RedisBinaryClient.getInstance(); 28 | byte [] bytes = redisClient.get(ProtostuffHelper.serializeObject(KEY, Key.class)); 29 | Val val = ProtostuffHelper.deSerializeObject(bytes, Val.class); 30 | assert val.getVal().equals(VAL.getVal()); 31 | } 32 | 33 | 34 | 35 | public static class Key implements Serializable{ 36 | public Key(){ 37 | 38 | } 39 | private Object key; 40 | public Key(Object key){ 41 | this.key = key; 42 | } 43 | 44 | public Object getKey() { 45 | return key; 46 | } 47 | 48 | public void setKey(Object key) { 49 | this.key = key; 50 | } 51 | } 52 | public static class Val implements Serializable{ 53 | public Val(){ 54 | 55 | } 56 | private Object val; 57 | public Val(Object val){ 58 | this.val = val; 59 | } 60 | 61 | public Object getVal() { 62 | return val; 63 | } 64 | 65 | public void setVal(Object val) { 66 | this.val = val; 67 | } 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /awesome-netty-redis/src/test/java/org/nort/netty/redis/test/RedisStringClientTest.java: -------------------------------------------------------------------------------- 1 | package org.nort.netty.redis.test; 2 | 3 | 4 | import com.north.netty.redis.clients.RedisStringClient; 5 | import org.junit.FixMethodOrder; 6 | import org.junit.Test; 7 | import org.junit.runners.MethodSorters; 8 | 9 | @FixMethodOrder(MethodSorters.NAME_ASCENDING) 10 | public class RedisStringClientTest { 11 | public static final String KEY = "TEST"; 12 | public static final String VAL = "YES"; 13 | @Test 14 | public void test001(){ 15 | RedisStringClient redisStringClient = RedisStringClient.getInstance(); 16 | boolean b = redisStringClient.set(KEY, VAL); 17 | assert b; 18 | } 19 | 20 | 21 | 22 | @Test 23 | public void test002(){ 24 | RedisStringClient redisStringClient = RedisStringClient.getInstance(); 25 | String str = redisStringClient.get(KEY); 26 | assert VAL.equalsIgnoreCase(str); 27 | } 28 | 29 | 30 | 31 | 32 | } 33 | -------------------------------------------------------------------------------- /awesome-netty-zk/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | awesome-netty 7 | org.north.netty 8 | 1.0-SNAPSHOT 9 | 10 | 4.0.0 11 | 12 | awesome-netty-zk 13 | 14 | 15 | org.north.netty 16 | awesome-netty-common 17 | 1.0-SNAPSHOT 18 | 19 | 20 | 21 | 22 | 23 | org.apache.maven.plugins 24 | maven-compiler-plugin 25 | 26 | 8 27 | 8 28 | 29 | 30 | 31 | 32 | 33 | 34 | -------------------------------------------------------------------------------- /awesome-netty-zk/src/main/java/org/north/netty/zk/NettyZkClient.java: -------------------------------------------------------------------------------- 1 | package org.north.netty.zk; 2 | 3 | 4 | import com.google.gson.Gson; 5 | import io.netty.bootstrap.Bootstrap; 6 | import io.netty.channel.*; 7 | import io.netty.channel.nio.NioEventLoopGroup; 8 | import io.netty.channel.socket.nio.NioSocketChannel; 9 | import io.netty.handler.codec.LengthFieldBasedFrameDecoder; 10 | import io.netty.handler.codec.LengthFieldPrepender; 11 | import org.north.netty.common.utils.SerializeUtils; 12 | import org.north.netty.zk.bean.create.ZkCreateResponse; 13 | import org.north.netty.zk.bean.create.ZkAcl; 14 | import org.north.netty.zk.bean.create.ZkAclId; 15 | import org.north.netty.zk.bean.create.ZkCreateRequest; 16 | import org.north.netty.zk.bean.getchildren.ZkGetChildrenRequest; 17 | import org.north.netty.zk.bean.getchildren.ZkGetChildrenResponse; 18 | import org.north.netty.zk.bean.login.ZkLoginRequest; 19 | import org.north.netty.zk.bean.login.ZkLoginResp; 20 | import org.north.netty.zk.registrys.ZkCaches; 21 | import org.north.netty.zk.utils.CreateMode; 22 | import org.north.netty.zk.utils.OpCode; 23 | import org.north.netty.zk.zkcodec.login.ZkLoginCodec; 24 | import org.north.netty.zk.zkcodec.login.ZkLoginHandler; 25 | 26 | import java.net.InetSocketAddress; 27 | import java.net.SocketAddress; 28 | import java.util.ArrayList; 29 | import java.util.List; 30 | import java.util.concurrent.TimeUnit; 31 | import java.util.concurrent.TimeoutException; 32 | import java.util.concurrent.atomic.AtomicInteger; 33 | 34 | /** 35 | * @author laihaohua 36 | */ 37 | public class NettyZkClient { 38 | private int timeout; 39 | private String zkServer; 40 | private int zkPort; 41 | private String passWord; 42 | private Channel channel; 43 | private final ZkCaches zkRegistry = new ZkCaches(); 44 | private final ZkLoginHandler zkLoginHandler = new ZkLoginHandler(zkRegistry); 45 | private final AtomicInteger atomicIntegerXid = new AtomicInteger(1); 46 | 47 | 48 | public NettyZkClient(int timeout) throws Exception { 49 | this("localhost", 2181, "", timeout); 50 | } 51 | 52 | public NettyZkClient(String zkServer, int zkPort, String passWord, int timeout) throws Exception { 53 | this.zkServer = zkServer; 54 | this.zkPort = zkPort; 55 | this.passWord = passWord; 56 | this.timeout = timeout; 57 | boolean isConnectToServer = connectToServer(); 58 | if(!isConnectToServer){ 59 | throw new Exception("can not connect to server: " + zkServer + ":" + zkPort); 60 | } 61 | ZkLoginResp zkLoginResp = login(); 62 | // login响应中的sessionId大于0才算成功建立连接 63 | if(zkLoginResp == null || zkLoginResp.getSessionId() == 0){ 64 | throw new Exception("login to server: " + zkServer + ":" + zkPort + " failed , resp : " + new Gson().toJson(zkLoginResp)); 65 | } 66 | } 67 | 68 | private boolean connectToServer(){ 69 | Bootstrap bootstrap = new Bootstrap(); 70 | EventLoopGroup eventLoopGroup = new NioEventLoopGroup(2); 71 | bootstrap.group(eventLoopGroup).channel(NioSocketChannel.class); 72 | bootstrap.handler(new ChannelInitializer() { 73 | @Override 74 | protected void initChannel(NioSocketChannel nioSocketChannel) throws Exception { 75 | nioSocketChannel.pipeline() 76 | // 解码器, 将HEADER-CONTENT格式的报文解析成只包含CONTENT 77 | .addLast(ZkLoginHandler.LOGIN_LENGTH_FIELD_BASED_FRAME_DECODER,new LengthFieldBasedFrameDecoder(2048, 0, 4, 0, 4)) 78 | // 编码器, 给报文加上一个4个字节大小的HEADER 79 | .addLast(ZkLoginHandler.LOGIN_LENGTH_FIELD_PREPENDER, new LengthFieldPrepender(4)) 80 | // 编码解码器 编码ZkLoginRequest , 解码ZkLoginResp 81 | .addLast(ZkLoginHandler.ZK_LOGIN_CODEC, new ZkLoginCodec()) 82 | // login的handler, remove该channel所有的handler, 然后返回resp 83 | .addLast(ZkLoginHandler.ZK_LOGIN_HANDLER,zkLoginHandler) 84 | ; 85 | } 86 | }); 87 | bootstrap.option(ChannelOption.SO_KEEPALIVE, true); 88 | try { 89 | SocketAddress socketAddress = new InetSocketAddress(zkServer, zkPort); 90 | ChannelFuture channelFuture = bootstrap.connect(socketAddress).sync(); 91 | this.channel = channelFuture.channel(); 92 | return channel != null && channel.isActive(); 93 | } catch (InterruptedException e) { 94 | e.printStackTrace(); 95 | } 96 | return false; 97 | } 98 | private boolean isSocketConnected(){ 99 | if(channel != null && channel.isActive()){ 100 | return true; 101 | } 102 | return connectToServer(); 103 | } 104 | 105 | /** 106 | * 0. header, 4个字节, 表示报文的大小 107 | * 108 | * 1. 4个字节的protocolVersion(Integer, 默认为0即可) 109 | * 2. 8个字节的lastZxidSeen(Long , 默认为0即可) 110 | * 3. 4个字节的超时时间(Integer) 111 | * 4. 8个字节的sessionId(Long, 默认为0) 112 | * 5. 4个字节, 表示passwd的长度n 113 | * 6. n个字节 114 | * @return 115 | */ 116 | private ZkLoginResp login() throws Exception { 117 | if(!isSocketConnected()){ 118 | throw new Exception("can not connect to server: " + zkServer + ":" + zkPort); 119 | } 120 | ZkLoginRequest zkLoginRequest = new ZkLoginRequest(); 121 | int protocolVersion = 0; 122 | zkLoginRequest.setProtocolVersion(protocolVersion); 123 | Long lastZxidSeen = 0L; 124 | zkLoginRequest.setLastZxidSeen(lastZxidSeen); 125 | zkLoginRequest.setTimeout(timeout); 126 | Long sessionId = 0L; 127 | zkLoginRequest.setSessionId(sessionId); 128 | zkLoginRequest.setPassword(passWord); 129 | zkLoginRequest.setReadOnly(true); 130 | this.channel.writeAndFlush(zkLoginRequest); 131 | long t = System.currentTimeMillis(); 132 | while(!zkLoginHandler.getIsLogon()){ 133 | long t2 = System.currentTimeMillis(); 134 | if(t2 > t + timeout){ 135 | throw new Exception("login to server: " + zkServer + ":" + zkPort + " timeout after " + timeout + "ms"); 136 | } 137 | } 138 | return zkLoginHandler.getZkLoginResp(); 139 | } 140 | public List getChildren(String path){ 141 | ZkGetChildrenRequest zkGetChildrenRequest = new ZkGetChildrenRequest(); 142 | zkGetChildrenRequest.setPath(path); 143 | zkGetChildrenRequest.setWatch(false); 144 | zkGetChildrenRequest.setType(OpCode.GET_CHILDREN); 145 | int xid = atomicIntegerXid.getAndIncrement(); 146 | zkGetChildrenRequest.setXid(xid); 147 | ChannelFuture channelFuture = this.channel.writeAndFlush(zkGetChildrenRequest); 148 | try { 149 | ZkGetChildrenResponse response = (ZkGetChildrenResponse)zkRegistry.getResp(xid, this.timeout, TimeUnit.MILLISECONDS); 150 | return response == null ? null : response.getChildren(); 151 | } catch (TimeoutException e) { 152 | e.printStackTrace(); 153 | } 154 | return null; 155 | } 156 | public ZkCreateResponse create(String path, Object data, CreateMode mode){ 157 | ZkCreateRequest createRequest = new ZkCreateRequest(); 158 | createRequest.setPath(path); 159 | createRequest.setData(SerializeUtils.toByteArray(data)); 160 | int xid = atomicIntegerXid.getAndIncrement(); 161 | createRequest.setXid(xid); 162 | createRequest.setType(OpCode.CREATE); 163 | /*******************/ 164 | ZkAclId zkAclId = new ZkAclId(); 165 | // 所有人都有权限 166 | zkAclId.setId("anyone"); 167 | zkAclId.setScheme("world"); 168 | ZkAcl zkAcl = new ZkAcl(); 169 | // 拥有read write update delete admin权限 170 | zkAcl.setPerms(31); 171 | zkAcl.setId(zkAclId); 172 | /*******************/ 173 | createRequest.setAcl(new ArrayList<>()); 174 | createRequest.getAcl().add(zkAcl); 175 | createRequest.setFlags(mode.getFlag()); 176 | ChannelFuture channelFuture = this.channel.writeAndFlush(createRequest); 177 | try { 178 | ZkCreateResponse response = (ZkCreateResponse)zkRegistry.getResp(xid, this.timeout, TimeUnit.MILLISECONDS); 179 | return response; 180 | } catch (TimeoutException e) { 181 | e.printStackTrace(); 182 | } 183 | return null; 184 | } 185 | } 186 | -------------------------------------------------------------------------------- /awesome-netty-zk/src/main/java/org/north/netty/zk/bean/AbstractZkResonse.java: -------------------------------------------------------------------------------- 1 | package org.north.netty.zk.bean; 2 | 3 | public abstract class AbstractZkResonse { 4 | int xid; 5 | long zxid; 6 | int err; 7 | 8 | public void setXid(int xid) { 9 | this.xid = xid; 10 | } 11 | 12 | public void setZxid(long zxid) { 13 | this.zxid = zxid; 14 | } 15 | 16 | public void setErr(int err) { 17 | this.err = err; 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /awesome-netty-zk/src/main/java/org/north/netty/zk/bean/RequestHeader.java: -------------------------------------------------------------------------------- 1 | package org.north.netty.zk.bean; 2 | 3 | import java.io.Serializable; 4 | 5 | public class RequestHeader implements Serializable { 6 | private int xid; 7 | private int type; 8 | 9 | public int getXid() { 10 | return xid; 11 | } 12 | 13 | public void setXid(int xid) { 14 | this.xid = xid; 15 | } 16 | 17 | public int getType() { 18 | return type; 19 | } 20 | 21 | public void setType(int type) { 22 | this.type = type; 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /awesome-netty-zk/src/main/java/org/north/netty/zk/bean/ZkRequest.java: -------------------------------------------------------------------------------- 1 | package org.north.netty.zk.bean; 2 | 3 | import java.io.Serializable; 4 | 5 | public interface ZkRequest extends Serializable { 6 | int getRequestId(); 7 | } 8 | -------------------------------------------------------------------------------- /awesome-netty-zk/src/main/java/org/north/netty/zk/bean/ZkResponse.java: -------------------------------------------------------------------------------- 1 | package org.north.netty.zk.bean; 2 | 3 | import java.io.Serializable; 4 | 5 | public interface ZkResponse extends Serializable{ 6 | } 7 | -------------------------------------------------------------------------------- /awesome-netty-zk/src/main/java/org/north/netty/zk/bean/create/ZkAcl.java: -------------------------------------------------------------------------------- 1 | package org.north.netty.zk.bean.create; 2 | 3 | import java.io.Serializable; 4 | 5 | public class ZkAcl implements Serializable { 6 | private int perms; 7 | private ZkAclId id; 8 | 9 | public int getPerms() { 10 | return perms; 11 | } 12 | 13 | public void setPerms(int perms) { 14 | this.perms = perms; 15 | } 16 | 17 | public ZkAclId getId() { 18 | return id; 19 | } 20 | 21 | public void setId(ZkAclId id) { 22 | this.id = id; 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /awesome-netty-zk/src/main/java/org/north/netty/zk/bean/create/ZkAclId.java: -------------------------------------------------------------------------------- 1 | package org.north.netty.zk.bean.create; 2 | 3 | import java.io.Serializable; 4 | 5 | public class ZkAclId implements Serializable { 6 | private String scheme; 7 | private String id; 8 | 9 | public String getScheme() { 10 | return scheme; 11 | } 12 | 13 | public void setScheme(String scheme) { 14 | this.scheme = scheme; 15 | } 16 | 17 | public String getId() { 18 | return id; 19 | } 20 | 21 | public void setId(String id) { 22 | this.id = id; 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /awesome-netty-zk/src/main/java/org/north/netty/zk/bean/create/ZkCreateRequest.java: -------------------------------------------------------------------------------- 1 | package org.north.netty.zk.bean.create; 2 | 3 | import org.north.netty.zk.bean.RequestHeader; 4 | import org.north.netty.zk.bean.ZkRequest; 5 | 6 | import java.util.List; 7 | 8 | public class ZkCreateRequest extends RequestHeader implements ZkRequest { 9 | private String path; 10 | private byte[] data; 11 | private java.util.List acl; 12 | private int flags; 13 | 14 | public String getPath() { 15 | return path; 16 | } 17 | 18 | public void setPath(String path) { 19 | this.path = path; 20 | } 21 | 22 | public byte[] getData() { 23 | return data; 24 | } 25 | 26 | public void setData(byte[] data) { 27 | this.data = data; 28 | } 29 | 30 | public List getAcl() { 31 | return acl; 32 | } 33 | 34 | public void setAcl(List acl) { 35 | this.acl = acl; 36 | } 37 | 38 | public int getFlags() { 39 | return flags; 40 | } 41 | 42 | public void setFlags(int flags) { 43 | this.flags = flags; 44 | } 45 | 46 | @Override 47 | public int getRequestId() { 48 | return getXid(); 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /awesome-netty-zk/src/main/java/org/north/netty/zk/bean/create/ZkCreateResponse.java: -------------------------------------------------------------------------------- 1 | package org.north.netty.zk.bean.create; 2 | 3 | import org.north.netty.zk.bean.AbstractZkResonse; 4 | import org.north.netty.zk.bean.ZkResponse; 5 | 6 | public class ZkCreateResponse extends AbstractZkResonse implements ZkResponse { 7 | private String path; 8 | 9 | public String getPath() { 10 | return path; 11 | } 12 | 13 | public void setPath(String path) { 14 | this.path = path; 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /awesome-netty-zk/src/main/java/org/north/netty/zk/bean/getchildren/ZkGetChildrenRequest.java: -------------------------------------------------------------------------------- 1 | package org.north.netty.zk.bean.getchildren; 2 | 3 | import org.north.netty.zk.bean.RequestHeader; 4 | import org.north.netty.zk.bean.ZkRequest; 5 | 6 | import java.io.Serializable; 7 | 8 | /** 9 | * @author laihaohua 10 | */ 11 | public class ZkGetChildrenRequest extends RequestHeader implements ZkRequest { 12 | private String path; 13 | private boolean watch; 14 | 15 | public String getPath() { 16 | return path; 17 | } 18 | 19 | public void setPath(String path) { 20 | this.path = path; 21 | } 22 | 23 | public boolean isWatch() { 24 | return watch; 25 | } 26 | 27 | public void setWatch(boolean watch) { 28 | this.watch = watch; 29 | } 30 | 31 | @Override 32 | public int getRequestId() { 33 | return super.getXid(); 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /awesome-netty-zk/src/main/java/org/north/netty/zk/bean/getchildren/ZkGetChildrenResponse.java: -------------------------------------------------------------------------------- 1 | package org.north.netty.zk.bean.getchildren; 2 | 3 | import org.north.netty.zk.bean.AbstractZkResonse; 4 | import org.north.netty.zk.bean.ZkResponse; 5 | 6 | import java.util.List; 7 | 8 | /** 9 | * @author laihaohua 10 | */ 11 | public class ZkGetChildrenResponse extends AbstractZkResonse implements ZkResponse { 12 | private java.util.List children; 13 | 14 | public List getChildren() { 15 | return children; 16 | } 17 | 18 | public void setChildren(List children) { 19 | this.children = children; 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /awesome-netty-zk/src/main/java/org/north/netty/zk/bean/login/ZkLoginRequest.java: -------------------------------------------------------------------------------- 1 | package org.north.netty.zk.bean.login; 2 | 3 | import java.io.Serializable; 4 | 5 | /** 6 | * @author laihaohua 7 | */ 8 | public class ZkLoginRequest implements Serializable { 9 | private Integer protocolVersion; 10 | private Long lastZxidSeen; 11 | private int timeout; 12 | private Long sessionId; 13 | private String password; 14 | private boolean readOnly = true; 15 | 16 | public Integer getProtocolVersion() { 17 | return protocolVersion; 18 | } 19 | 20 | public void setProtocolVersion(Integer protocolVersion) { 21 | this.protocolVersion = protocolVersion; 22 | } 23 | 24 | public Long getLastZxidSeen() { 25 | return lastZxidSeen; 26 | } 27 | 28 | public void setLastZxidSeen(Long lastZxidSeen) { 29 | this.lastZxidSeen = lastZxidSeen; 30 | } 31 | 32 | public int getTimeout() { 33 | return timeout; 34 | } 35 | 36 | public void setTimeout(int timeout) { 37 | this.timeout = timeout; 38 | } 39 | 40 | public Long getSessionId() { 41 | return sessionId; 42 | } 43 | 44 | public void setSessionId(Long sessionId) { 45 | this.sessionId = sessionId; 46 | } 47 | 48 | public String getPassword() { 49 | return password; 50 | } 51 | 52 | public void setPassword(String password) { 53 | this.password = password; 54 | } 55 | 56 | public boolean isReadOnly() { 57 | return readOnly; 58 | } 59 | 60 | public void setReadOnly(boolean readOnly) { 61 | this.readOnly = readOnly; 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /awesome-netty-zk/src/main/java/org/north/netty/zk/bean/login/ZkLoginResp.java: -------------------------------------------------------------------------------- 1 | package org.north.netty.zk.bean.login; 2 | 3 | import java.io.Serializable; 4 | 5 | public class ZkLoginResp implements Serializable { 6 | private Integer protocolVersion; 7 | private int timeout; 8 | private Long sessionId; 9 | private String password; 10 | private boolean readOnly; 11 | 12 | public Integer getProtocolVersion() { 13 | return protocolVersion; 14 | } 15 | 16 | public void setProtocolVersion(Integer protocolVersion) { 17 | this.protocolVersion = protocolVersion; 18 | } 19 | 20 | public int getTimeout() { 21 | return timeout; 22 | } 23 | 24 | public void setTimeout(int timeout) { 25 | this.timeout = timeout; 26 | } 27 | 28 | public Long getSessionId() { 29 | return sessionId; 30 | } 31 | 32 | public void setSessionId(Long sessionId) { 33 | this.sessionId = sessionId; 34 | } 35 | 36 | public String getPassword() { 37 | return password; 38 | } 39 | 40 | public void setPassword(String password) { 41 | this.password = password; 42 | } 43 | 44 | public boolean isReadOnly() { 45 | return readOnly; 46 | } 47 | 48 | public void setReadOnly(boolean readOnly) { 49 | this.readOnly = readOnly; 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /awesome-netty-zk/src/main/java/org/north/netty/zk/factories/ZkCodecFactories.java: -------------------------------------------------------------------------------- 1 | package org.north.netty.zk.factories; 2 | 3 | import io.netty.handler.codec.ByteToMessageCodec; 4 | import org.north.netty.zk.bean.create.ZkCreateRequest; 5 | import org.north.netty.zk.bean.getchildren.ZkGetChildrenRequest; 6 | import org.north.netty.zk.bean.ZkRequest; 7 | import org.north.netty.zk.registrys.ZkCaches; 8 | import org.north.netty.zk.zkcodec.createcodec.ZkCreateCodec; 9 | import org.north.netty.zk.zkcodec.getchildren.ZkGetChildrenCodec; 10 | 11 | /** 12 | * codecs 工厂 13 | * @author laihaohua 14 | */ 15 | public class ZkCodecFactories { 16 | public static ByteToMessageCodec getCodec(ZkRequest zkRequest, ZkCaches codecRegistry) throws IllegalAccessException { 17 | if(zkRequest instanceof ZkGetChildrenRequest){ 18 | return new ZkGetChildrenCodec(codecRegistry); 19 | }else if(zkRequest instanceof ZkCreateRequest){ 20 | return new ZkCreateCodec(codecRegistry); 21 | } 22 | 23 | throw new IllegalAccessException("cannot find codecs for " + zkRequest.getClass()); 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /awesome-netty-zk/src/main/java/org/north/netty/zk/registrys/ZkCaches.java: -------------------------------------------------------------------------------- 1 | package org.north.netty.zk.registrys; 2 | 3 | import com.google.common.cache.Cache; 4 | import com.google.common.cache.CacheBuilder; 5 | import io.netty.handler.codec.ByteToMessageCodec; 6 | import org.north.netty.zk.bean.ZkResponse; 7 | 8 | import java.util.concurrent.TimeUnit; 9 | import java.util.concurrent.TimeoutException; 10 | 11 | /** 12 | * 一个缓存中心, 保存zkClient实例中每个xid对应的codec 13 | * @author laihaohua 14 | */ 15 | public final class ZkCaches { 16 | private final Cache codecMap = CacheBuilder.newBuilder() 17 | .concurrencyLevel(Runtime.getRuntime().availableProcessors()) 18 | .expireAfterAccess(5, TimeUnit.MINUTES) 19 | .build(); 20 | private final Cache respMap = CacheBuilder.newBuilder() 21 | .concurrencyLevel(Runtime.getRuntime().availableProcessors()) 22 | .expireAfterAccess(5, TimeUnit.MINUTES) 23 | .build(); 24 | 25 | public void putCodec(Integer requestId, ByteToMessageCodec codec){ 26 | codecMap.put(requestId, codec); 27 | } 28 | 29 | public ByteToMessageCodec getCodec(Integer requestId){ 30 | return codecMap.getIfPresent(requestId); 31 | } 32 | 33 | public void removeCodec(Integer requestId){ 34 | codecMap.invalidate(requestId); 35 | } 36 | public void putResp(Integer requestId, ZkResponse zkResponse){ 37 | respMap.put(requestId, zkResponse); 38 | } 39 | 40 | public ZkResponse getResp(Integer requestId, long duration, TimeUnit unit) throws TimeoutException { 41 | long t = System.currentTimeMillis(); 42 | long timeout = TimeUnit.MILLISECONDS.convert(duration, unit); 43 | ZkResponse response = null; 44 | while((response = respMap.getIfPresent(requestId)) == null){ 45 | long t2 = System.currentTimeMillis(); 46 | if(t2 > (t + timeout)){ 47 | // remove when timeout 48 | removeResp(requestId); 49 | throw new TimeoutException("get resp timeout after " + timeout + " ms "); 50 | } 51 | } 52 | return response; 53 | } 54 | 55 | public void removeResp(Integer requestId){ 56 | respMap.invalidate(requestId); 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /awesome-netty-zk/src/main/java/org/north/netty/zk/utils/CreateMode.java: -------------------------------------------------------------------------------- 1 | package org.north.netty.zk.utils; 2 | 3 | public enum CreateMode { 4 | /** 5 | * 永久性节点 6 | */ 7 | PERSISTENT (0), 8 | /** 9 | * 永久性顺序节点 10 | */ 11 | PERSISTENT_SEQUENTIAL (2), 12 | /** 13 | * 临时节点 14 | */ 15 | EPHEMERAL (1), 16 | /** 17 | * 临时顺序节点 18 | */ 19 | EPHEMERAL_SEQUENTIAL (3); 20 | private int flag; 21 | CreateMode(int flag){ 22 | this.flag = flag; 23 | } 24 | 25 | public int getFlag() { 26 | return flag; 27 | } 28 | 29 | public void setFlag(int flag) { 30 | this.flag = flag; 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /awesome-netty-zk/src/main/java/org/north/netty/zk/utils/OpCode.java: -------------------------------------------------------------------------------- 1 | package org.north.netty.zk.utils; 2 | 3 | public class OpCode { 4 | public static final int NOTIFICATION = 0; 5 | 6 | public static final int CREATE = 1; 7 | 8 | public static final int DELETE = 2; 9 | 10 | public static final int EXISTS = 3; 11 | 12 | public static final int GET_DATA = 4; 13 | 14 | public static final int SET_DATA = 5; 15 | 16 | public static final int GETACL = 6; 17 | 18 | public static final int SETACL = 7; 19 | 20 | public static final int GET_CHILDREN = 8; 21 | 22 | public static final int SYNC = 9; 23 | 24 | public static final int PING = 11; 25 | 26 | public static final int GET_CHILDREN2 = 12; 27 | 28 | public static final int CHECK = 13; 29 | 30 | public static final int MULTI = 14; 31 | 32 | public static final int AUTH = 100; 33 | 34 | public static final int SET_WATCHES = 101; 35 | 36 | public static final int SASL = 102; 37 | 38 | public static final int CREATE_SESSION = -10; 39 | 40 | public static final int CLOSE_SESSION = -11; 41 | 42 | public static final int ERROR = -1; 43 | } 44 | -------------------------------------------------------------------------------- /awesome-netty-zk/src/main/java/org/north/netty/zk/zkcodec/ZkAbstractCodec.java: -------------------------------------------------------------------------------- 1 | package org.north.netty.zk.zkcodec; 2 | 3 | import io.netty.handler.codec.ByteToMessageCodec; 4 | import org.north.netty.zk.registrys.ZkCaches; 5 | 6 | public abstract class ZkAbstractCodec extends ByteToMessageCodec { 7 | protected ZkCaches codecRegistry; 8 | public ZkAbstractCodec(ZkCaches codecRegistry){ 9 | this.codecRegistry = codecRegistry; 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /awesome-netty-zk/src/main/java/org/north/netty/zk/zkcodec/ZkCodec.java: -------------------------------------------------------------------------------- 1 | package org.north.netty.zk.zkcodec; 2 | 3 | import io.netty.buffer.ByteBuf; 4 | import io.netty.channel.ChannelHandlerContext; 5 | import io.netty.handler.codec.ByteToMessageCodec; 6 | import org.north.netty.zk.bean.ZkRequest; 7 | import org.north.netty.zk.factories.ZkCodecFactories; 8 | import org.north.netty.zk.registrys.ZkCaches; 9 | 10 | import java.lang.reflect.Method; 11 | import java.util.List; 12 | 13 | /** 14 | * @author laihaohua 15 | */ 16 | public class ZkCodec extends ZkAbstractCodec { 17 | 18 | public ZkCodec(ZkCaches codecRegistry) { 19 | super(codecRegistry); 20 | } 21 | 22 | @Override 23 | protected void encode(ChannelHandlerContext channelHandlerContext, ZkRequest zkRequest, ByteBuf byteBuf) throws Exception { 24 | ByteToMessageCodec codec = ZkCodecFactories.getCodec(zkRequest, codecRegistry); 25 | Method method = codec.getClass().getDeclaredMethod("encode", ChannelHandlerContext.class, Object.class, ByteBuf.class); 26 | method.setAccessible(true); 27 | int requestId = zkRequest.getRequestId(); 28 | if(requestId == 0){ 29 | throw new Exception(" requestId can not be zero"); 30 | } 31 | codecRegistry.putCodec(requestId, codec); 32 | try { 33 | method.invoke(codec, channelHandlerContext, zkRequest, byteBuf); 34 | }catch (Exception e){ 35 | e.printStackTrace(); 36 | codecRegistry.removeCodec(requestId); 37 | } 38 | 39 | } 40 | 41 | @Override 42 | protected void decode(ChannelHandlerContext channelHandlerContext, ByteBuf byteBuf, List list) throws Exception { 43 | int xid = byteBuf.readInt(); 44 | ByteToMessageCodec codec = codecRegistry.getCodec(xid); 45 | if(codec == null){ 46 | throw new IllegalAccessException("cannot find codecs for xid = " + xid); 47 | } 48 | // 恢复readIndex, 让后面的handler也能读到xid 49 | byteBuf.readerIndex(byteBuf.readerIndex() - 4); 50 | Method method = codec.getClass().getDeclaredMethod("decode", ChannelHandlerContext.class, ByteBuf.class, List.class); 51 | method.setAccessible(true); 52 | method.invoke(codec, channelHandlerContext, byteBuf, list); 53 | codecRegistry.removeCodec(xid); 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /awesome-netty-zk/src/main/java/org/north/netty/zk/zkcodec/createcodec/ZkCreateCodec.java: -------------------------------------------------------------------------------- 1 | package org.north.netty.zk.zkcodec.createcodec; 2 | 3 | import io.netty.buffer.ByteBuf; 4 | import io.netty.channel.ChannelHandlerContext; 5 | import org.north.netty.common.utils.SerializeUtils; 6 | import org.north.netty.zk.bean.create.ZkAcl; 7 | import org.north.netty.zk.bean.create.ZkAclId; 8 | import org.north.netty.zk.bean.create.ZkCreateRequest; 9 | import org.north.netty.zk.bean.create.ZkCreateResponse; 10 | import org.north.netty.zk.registrys.ZkCaches; 11 | import org.north.netty.zk.zkcodec.ZkAbstractCodec; 12 | 13 | import java.util.List; 14 | 15 | /** 16 | * @author laihaohua 17 | */ 18 | public class ZkCreateCodec extends ZkAbstractCodec { 19 | public ZkCreateCodec(ZkCaches codecRegistry) { 20 | super(codecRegistry); 21 | } 22 | 23 | @Override 24 | protected void encode(ChannelHandlerContext ctx, ZkCreateRequest msg, ByteBuf out) throws Exception { 25 | out.writeInt(msg.getXid()); 26 | out.writeInt(msg.getType()); 27 | String path = msg.getPath(); 28 | SerializeUtils.writeStringToBuffer(path, out); 29 | byte [] bytes = msg.getData(); 30 | SerializeUtils.writeByteArrToBuffer(bytes, out); 31 | List list = msg.getAcl(); 32 | if (list == null) { 33 | out.writeInt(-1); 34 | }else { 35 | out.writeInt(list.size()); 36 | for(ZkAcl acl : list){ 37 | out.writeInt(acl.getPerms()); 38 | ZkAclId zkAclId = acl.getId(); 39 | SerializeUtils.writeStringToBuffer(zkAclId.getScheme(), out); 40 | SerializeUtils.writeStringToBuffer(zkAclId.getId(), out); 41 | } 42 | } 43 | out.writeInt(msg.getFlags()); 44 | } 45 | 46 | /** 47 | */ 48 | @Override 49 | protected void decode(ChannelHandlerContext ctx, ByteBuf in, List out) throws Exception { 50 | ZkCreateResponse zkCreateResponse = new ZkCreateResponse(); 51 | int xid = in.readInt(); 52 | long zxid = in.readLong(); 53 | int err = in.readInt(); 54 | zkCreateResponse.setXid(xid); 55 | zkCreateResponse.setZxid(zxid); 56 | zkCreateResponse.setErr(err); 57 | if(err == 0){ 58 | String path = SerializeUtils.readStringToBuffer(in); 59 | out.add(path); 60 | zkCreateResponse.setPath(path); 61 | } 62 | codecRegistry.putResp(xid, zkCreateResponse); 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /awesome-netty-zk/src/main/java/org/north/netty/zk/zkcodec/getchildren/ZkGetChildrenCodec.java: -------------------------------------------------------------------------------- 1 | package org.north.netty.zk.zkcodec.getchildren; 2 | 3 | import io.netty.buffer.ByteBuf; 4 | import io.netty.channel.ChannelHandlerContext; 5 | import org.north.netty.common.utils.SerializeUtils; 6 | import org.north.netty.zk.bean.getchildren.ZkGetChildrenRequest; 7 | import org.north.netty.zk.bean.getchildren.ZkGetChildrenResponse; 8 | import org.north.netty.zk.registrys.ZkCaches; 9 | import org.north.netty.zk.zkcodec.ZkAbstractCodec; 10 | 11 | import java.util.ArrayList; 12 | import java.util.List; 13 | 14 | /** 15 | * @author laihaohua 16 | */ 17 | public class ZkGetChildrenCodec extends ZkAbstractCodec { 18 | public ZkGetChildrenCodec(ZkCaches codecRegistry) { 19 | super(codecRegistry); 20 | } 21 | 22 | @Override 23 | protected void encode(ChannelHandlerContext ctx, ZkGetChildrenRequest msg, ByteBuf out) throws Exception { 24 | out.writeInt(msg.getXid()); 25 | out.writeInt(msg.getType()); 26 | String path = msg.getPath(); 27 | SerializeUtils.writeStringToBuffer(path, out); 28 | out.writeBoolean(msg.isWatch()); 29 | } 30 | 31 | /** 32 | * 内容: 33 | * 4字节的xid 34 | * 8字节的zxid 35 | * 4字节的err 36 | * 如果 err==0 后面紧接着是resp 37 | * 4个字节的len, 表示children的长度 38 | * 再读len个字符串(每个字符串是4个字节的长度 + 字符串内容字节数组) 39 | * @param ctx 40 | * @param in 41 | * @param out 42 | * @throws Exception 43 | */ 44 | @Override 45 | protected void decode(ChannelHandlerContext ctx, ByteBuf in, List out) throws Exception { 46 | ZkGetChildrenResponse zkGetChildrenResponse = new ZkGetChildrenResponse(); 47 | int xid = in.readInt(); 48 | long zXid = in.readLong(); 49 | int err = in.readInt(); 50 | zkGetChildrenResponse.setXid(xid); 51 | zkGetChildrenResponse.setZxid(zXid); 52 | zkGetChildrenResponse.setErr(err); 53 | if(err == 0){ 54 | int listLen = in.readInt(); 55 | if(listLen < 0){ 56 | codecRegistry.putResp(xid, zkGetChildrenResponse); 57 | return; 58 | } 59 | List list = new ArrayList<>(listLen); 60 | while(listLen-- > 0){ 61 | String s = SerializeUtils.readStringToBuffer(in); 62 | list.add(s); 63 | } 64 | zkGetChildrenResponse.setChildren(list); 65 | } 66 | codecRegistry.putResp(xid, zkGetChildrenResponse); 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /awesome-netty-zk/src/main/java/org/north/netty/zk/zkcodec/login/ZkLoginCodec.java: -------------------------------------------------------------------------------- 1 | package org.north.netty.zk.zkcodec.login; 2 | 3 | import io.netty.buffer.ByteBuf; 4 | import io.netty.channel.ChannelHandlerContext; 5 | import io.netty.handler.codec.ByteToMessageCodec; 6 | import org.north.netty.common.utils.SerializeUtils; 7 | import org.north.netty.zk.bean.login.ZkLoginRequest; 8 | import org.north.netty.zk.bean.login.ZkLoginResp; 9 | 10 | import java.util.List; 11 | 12 | /** 13 | * @author laihaohua 14 | */ 15 | public class ZkLoginCodec extends ByteToMessageCodec{ 16 | 17 | public ZkLoginCodec(){ 18 | 19 | } 20 | @Override 21 | protected void encode(ChannelHandlerContext ctx, ZkLoginRequest msg, ByteBuf outByteBuf) throws Exception { 22 | outByteBuf.writeInt(msg.getProtocolVersion()); 23 | outByteBuf.writeLong(msg.getLastZxidSeen()); 24 | outByteBuf.writeInt(msg.getTimeout()); 25 | outByteBuf.writeLong(msg.getSessionId()); 26 | String passWord = msg.getPassword(); 27 | SerializeUtils.writeStringToBuffer(passWord, outByteBuf); 28 | outByteBuf.writeBoolean(msg.isReadOnly()); 29 | } 30 | 31 | /** 32 | * @param ctx 33 | * @param in 34 | * @param out 35 | * @throws Exception 36 | */ 37 | @Override 38 | protected void decode(ChannelHandlerContext ctx, ByteBuf in, List out) throws Exception { 39 | ZkLoginResp zkLoginResp = new ZkLoginResp(); 40 | zkLoginResp.setProtocolVersion(in.readInt()); 41 | zkLoginResp.setTimeout(in.readInt()); 42 | zkLoginResp.setSessionId(in.readLong()); 43 | String password = SerializeUtils.readStringToBuffer(in); 44 | zkLoginResp.setPassword(password); 45 | zkLoginResp.setReadOnly(in.readBoolean()); 46 | out.add(zkLoginResp); 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /awesome-netty-zk/src/main/java/org/north/netty/zk/zkcodec/login/ZkLoginHandler.java: -------------------------------------------------------------------------------- 1 | package org.north.netty.zk.zkcodec.login; 2 | 3 | import com.google.gson.Gson; 4 | import io.netty.channel.ChannelHandlerContext; 5 | import io.netty.handler.codec.MessageToMessageDecoder; 6 | import org.north.netty.zk.bean.login.ZkLoginResp; 7 | import org.north.netty.zk.registrys.ZkCaches; 8 | import org.north.netty.zk.zkcodec.ZkCodec; 9 | 10 | import java.util.List; 11 | 12 | /** 13 | * @author laihaohua 14 | */ 15 | public class ZkLoginHandler extends MessageToMessageDecoder{ 16 | private Gson gson = new Gson(); 17 | public static final String LOGIN_LENGTH_FIELD_BASED_FRAME_DECODER = "LOGIN_LENGTH_FIELD_BASED_FRAME_DECODER"; 18 | public static final String LOGIN_LENGTH_FIELD_PREPENDER = "LOGIN_LENGTH_FIELD_PREPENDER"; 19 | public static final String ZK_LOGIN_CODEC = "ZK_LOGIN_CODEC"; 20 | public static final String ZK_LOGIN_HANDLER = "ZK_LOGIN_HANDLER"; 21 | 22 | private volatile boolean isLogon; 23 | private volatile ZkLoginResp zkLoginResp; 24 | private ZkCaches codecRegistry; 25 | public ZkLoginHandler(ZkCaches codecRegistry){ 26 | this.codecRegistry = codecRegistry; 27 | } 28 | @Override 29 | protected void decode(ChannelHandlerContext ctx, ZkLoginResp zkLoginResp, List out) throws Exception { 30 | System.out.println( "msg = [" + gson.toJson(zkLoginResp) + "]"); 31 | // remove两个login专用的handler 32 | ctx.pipeline().remove(ZK_LOGIN_CODEC); 33 | ctx.pipeline().remove(ZK_LOGIN_HANDLER); 34 | // 新增通用的ZkCodec 35 | ctx.pipeline().addLast(new ZkCodec(codecRegistry)); 36 | // 返回响应体 37 | setZkLoginResp(zkLoginResp); 38 | // 返回登录成功的标记 39 | setIsLogon(true); 40 | 41 | } 42 | 43 | public boolean getIsLogon() { 44 | return isLogon; 45 | } 46 | 47 | public void setIsLogon(boolean isLogon) { 48 | this.isLogon = isLogon; 49 | } 50 | 51 | public ZkLoginResp getZkLoginResp() { 52 | return zkLoginResp; 53 | } 54 | 55 | public void setZkLoginResp(ZkLoginResp zkLoginResp) { 56 | this.zkLoginResp = zkLoginResp; 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /awesome-netty-zk/src/test/java/zk/ZkClientTest.java: -------------------------------------------------------------------------------- 1 | package zk; 2 | 3 | import com.google.gson.Gson; 4 | import org.junit.Test; 5 | import org.north.netty.zk.NettyZkClient; 6 | import org.north.netty.zk.bean.create.ZkCreateResponse; 7 | import org.north.netty.zk.utils.CreateMode; 8 | 9 | import java.util.List; 10 | 11 | public class ZkClientTest { 12 | @Test 13 | public void testZkClient() throws Exception { 14 | // NettyZkClient的构造方法里面会调用login() 跟服务端建立会话 15 | NettyZkClient nettyZkClient = new NettyZkClient(30000); 16 | 17 | // 创建一个临时顺序节点 18 | ZkCreateResponse createResponse = nettyZkClient.create("/as", 12312, CreateMode.EPHEMERAL_SEQUENTIAL); 19 | System.out.println(new Gson().toJson(createResponse)); 20 | 21 | // 获取/下的所有子路径 22 | List list = nettyZkClient.getChildren("/"); 23 | System.out.println(new Gson().toJson(list)); 24 | 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 4.0.0 6 | 7 | org.north.netty 8 | awesome-netty 9 | pom 10 | 1.0-SNAPSHOT 11 | 12 | awesome-netty-zk 13 | awesome-netty-redis 14 | awesome-netty-kafka 15 | awesome-netty-mysql 16 | awesome-netty-elasticsearch 17 | awesome-netty-common 18 | 19 | 20 | 21 | 22 | 23 | 24 | --------------------------------------------------------------------------------