├── src ├── main │ ├── java │ │ └── org │ │ │ └── fh │ │ │ └── gae │ │ │ └── das │ │ │ ├── template │ │ │ ├── DasSerializable.java │ │ │ ├── vo │ │ │ │ ├── Column.java │ │ │ │ ├── Template.java │ │ │ │ └── Table.java │ │ │ ├── level │ │ │ │ ├── DasLevel.java │ │ │ │ └── TextDasLevel.java │ │ │ ├── DasTable.java │ │ │ ├── OpType.java │ │ │ ├── DasTemplate.java │ │ │ └── TemplateHolder.java │ │ │ ├── sender │ │ │ ├── DasSender.java │ │ │ ├── kafka │ │ │ │ ├── KafkaSender.java │ │ │ │ └── KafkaConfig.java │ │ │ └── file │ │ │ │ └── FileSender.java │ │ │ ├── mysql │ │ │ ├── listener │ │ │ │ ├── BizListener.java │ │ │ │ ├── DemoTableListener.java │ │ │ │ └── AggregationListener.java │ │ │ ├── binlog │ │ │ │ ├── BinlogPositionStore.java │ │ │ │ ├── BinlogPosition.java │ │ │ │ └── FileBinlogPositionStore.java │ │ │ ├── MysqlRowData.java │ │ │ ├── MysqlBinlogConfig.java │ │ │ └── BinlogClient.java │ │ │ ├── exception │ │ │ ├── DasStoreException.java │ │ │ ├── GaeDasException.java │ │ │ └── InvalidDasTemplateException.java │ │ │ ├── ha │ │ │ ├── heartbeat │ │ │ │ ├── BeatTimeHolder.java │ │ │ │ ├── JsonRequestDecoder.java │ │ │ │ ├── BeatTask.java │ │ │ │ ├── BeatMessage.java │ │ │ │ └── BeatHandler.java │ │ │ ├── NettyUtils.java │ │ │ ├── HaServer.java │ │ │ └── CoordinationService.java │ │ │ ├── DasApp.java │ │ │ ├── utils │ │ │ └── GaeCollectionUtils.java │ │ │ └── DasAppEventListener.java │ └── resources │ │ ├── application-peer1.yml │ │ ├── application-peer2.yml │ │ ├── application.yml │ │ └── template.json └── test │ └── java │ └── org │ └── fh │ └── gae │ └── das │ └── test │ ├── TemplateHolderTest.java │ └── TextDasLevelTest.java ├── index └── incr.idx ├── README.md └── pom.xml /src/main/java/org/fh/gae/das/template/DasSerializable.java: -------------------------------------------------------------------------------- 1 | package org.fh.gae.das.template; 2 | 3 | public interface DasSerializable { 4 | byte[] serialize(); 5 | } 6 | -------------------------------------------------------------------------------- /src/main/resources/application-peer1.yml: -------------------------------------------------------------------------------- 1 | das: 2 | mysql: 3 | server-id: 1 4 | 5 | ha: 6 | port: 10000 7 | peer-host: 127.0.0.1 8 | peer-port: 10001 9 | -------------------------------------------------------------------------------- /src/main/resources/application-peer2.yml: -------------------------------------------------------------------------------- 1 | das: 2 | mysql: 3 | server-id: 2 4 | 5 | ha: 6 | port: 10001 7 | peer-host: 127.0.0.1 8 | peer-port: 10000 9 | -------------------------------------------------------------------------------- /src/main/java/org/fh/gae/das/sender/DasSender.java: -------------------------------------------------------------------------------- 1 | package org.fh.gae.das.sender; 2 | 3 | import org.fh.gae.das.template.DasSerializable; 4 | 5 | public interface DasSender { 6 | void send(DasSerializable data); 7 | } 8 | -------------------------------------------------------------------------------- /src/main/java/org/fh/gae/das/mysql/listener/BizListener.java: -------------------------------------------------------------------------------- 1 | package org.fh.gae.das.mysql.listener; 2 | 3 | import org.fh.gae.das.mysql.MysqlRowData; 4 | 5 | public interface BizListener { 6 | void onEvent(MysqlRowData eventData); 7 | } 8 | -------------------------------------------------------------------------------- /src/main/java/org/fh/gae/das/template/vo/Column.java: -------------------------------------------------------------------------------- 1 | package org.fh.gae.das.template.vo; 2 | 3 | import lombok.Data; 4 | import lombok.NoArgsConstructor; 5 | 6 | @Data 7 | @NoArgsConstructor 8 | public class Column { 9 | private String column; 10 | 11 | private int position; 12 | } 13 | -------------------------------------------------------------------------------- /src/main/java/org/fh/gae/das/exception/DasStoreException.java: -------------------------------------------------------------------------------- 1 | package org.fh.gae.das.exception; 2 | 3 | public class DasStoreException extends GaeDasException { 4 | public DasStoreException(String msg) { 5 | super(msg); 6 | } 7 | 8 | public DasStoreException() { 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /src/main/java/org/fh/gae/das/exception/GaeDasException.java: -------------------------------------------------------------------------------- 1 | package org.fh.gae.das.exception; 2 | 3 | public class GaeDasException extends RuntimeException { 4 | public GaeDasException(String msg) { 5 | super(msg); 6 | } 7 | 8 | public GaeDasException() { 9 | 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /src/main/java/org/fh/gae/das/ha/heartbeat/BeatTimeHolder.java: -------------------------------------------------------------------------------- 1 | package org.fh.gae.das.ha.heartbeat; 2 | 3 | import org.fh.gae.das.mysql.binlog.BinlogPosition; 4 | 5 | public class BeatTimeHolder { 6 | public static volatile long lastBeat = 0; 7 | 8 | public static volatile BinlogPosition position; 9 | } 10 | -------------------------------------------------------------------------------- /src/main/java/org/fh/gae/das/template/vo/Template.java: -------------------------------------------------------------------------------- 1 | package org.fh.gae.das.template.vo; 2 | 3 | import lombok.Data; 4 | import lombok.NoArgsConstructor; 5 | 6 | import java.util.List; 7 | 8 | @Data 9 | @NoArgsConstructor 10 | public class Template { 11 | private String database; 12 | private List tableList; 13 | } 14 | -------------------------------------------------------------------------------- /src/main/java/org/fh/gae/das/exception/InvalidDasTemplateException.java: -------------------------------------------------------------------------------- 1 | package org.fh.gae.das.exception; 2 | 3 | public class InvalidDasTemplateException extends GaeDasException { 4 | public InvalidDasTemplateException(String msg) { 5 | super(msg); 6 | } 7 | 8 | public InvalidDasTemplateException() { 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /src/test/java/org/fh/gae/das/test/TemplateHolderTest.java: -------------------------------------------------------------------------------- 1 | package org.fh.gae.das.test; 2 | 3 | import org.fh.gae.das.template.TemplateHolder; 4 | import org.junit.Test; 5 | 6 | public class TemplateHolderTest { 7 | @Test 8 | public void testLoad() { 9 | TemplateHolder holder = new TemplateHolder(); 10 | holder.loadJson("template.json"); 11 | int i = 0; 12 | 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /src/main/java/org/fh/gae/das/template/vo/Table.java: -------------------------------------------------------------------------------- 1 | package org.fh.gae.das.template.vo; 2 | 3 | import lombok.Data; 4 | import lombok.NoArgsConstructor; 5 | 6 | import java.util.List; 7 | 8 | @Data 9 | @NoArgsConstructor 10 | public class Table { 11 | private String tableName; 12 | private Integer level; 13 | 14 | private List insert; 15 | private List update; 16 | private List delete; 17 | } 18 | -------------------------------------------------------------------------------- /src/main/java/org/fh/gae/das/DasApp.java: -------------------------------------------------------------------------------- 1 | package org.fh.gae.das; 2 | 3 | import org.springframework.boot.SpringApplication; 4 | import org.springframework.boot.autoconfigure.SpringBootApplication; 5 | 6 | @SpringBootApplication 7 | public class DasApp { 8 | public static void main(String[] args) { 9 | SpringApplication app = new SpringApplication(DasApp.class); 10 | app.addListeners(new DasAppEventListener()); 11 | app.run(args); 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /src/main/java/org/fh/gae/das/mysql/binlog/BinlogPositionStore.java: -------------------------------------------------------------------------------- 1 | package org.fh.gae.das.mysql.binlog; 2 | 3 | public interface BinlogPositionStore { 4 | /** 5 | * 加载上次保存的binlog pos 6 | * @return 7 | */ 8 | BinlogPosition load(); 9 | 10 | /** 11 | * 保存当前binlog位置 12 | * @param binlogPosition 13 | * @return 14 | */ 15 | int save(BinlogPosition binlogPosition); 16 | 17 | /** 18 | * 获取当前binlog位置 19 | * @return 20 | */ 21 | BinlogPosition extract(); 22 | } 23 | -------------------------------------------------------------------------------- /src/main/java/org/fh/gae/das/mysql/MysqlRowData.java: -------------------------------------------------------------------------------- 1 | package org.fh.gae.das.mysql; 2 | 3 | import com.github.shyiko.mysql.binlog.event.EventType; 4 | import lombok.Data; 5 | import lombok.NoArgsConstructor; 6 | import org.fh.gae.das.template.DasTable; 7 | 8 | import java.util.Map; 9 | 10 | @Data 11 | @NoArgsConstructor 12 | public class MysqlRowData { 13 | private DasTable table; 14 | 15 | private EventType eventType; 16 | 17 | private Map after; 18 | 19 | private Map before; 20 | } 21 | -------------------------------------------------------------------------------- /src/main/java/org/fh/gae/das/template/level/DasLevel.java: -------------------------------------------------------------------------------- 1 | package org.fh.gae.das.template.level; 2 | 3 | import lombok.Data; 4 | import org.fh.gae.das.template.DasSerializable; 5 | import org.fh.gae.das.template.DasTable; 6 | import org.fh.gae.das.template.OpType; 7 | 8 | import java.util.HashMap; 9 | import java.util.Map; 10 | 11 | @Data 12 | public abstract class DasLevel implements DasSerializable { 13 | protected DasTable table; 14 | 15 | protected OpType opType; 16 | 17 | protected Map fieldValueMap = new HashMap<>(); 18 | } 19 | -------------------------------------------------------------------------------- /src/main/java/org/fh/gae/das/template/DasTable.java: -------------------------------------------------------------------------------- 1 | package org.fh.gae.das.template; 2 | 3 | import lombok.Data; 4 | 5 | import java.util.HashMap; 6 | import java.util.List; 7 | import java.util.Map; 8 | 9 | @Data 10 | public class DasTable { 11 | private String tableName; 12 | 13 | private String level; 14 | 15 | /** 16 | * 操作类型->字段顺序 17 | */ 18 | private Map> opTypeFieldSetMap = new HashMap<>(); 19 | 20 | /** 21 | * 字段位置->字段名 22 | */ 23 | private Map posMap = new HashMap<>(); 24 | } 25 | -------------------------------------------------------------------------------- /src/main/java/org/fh/gae/das/utils/GaeCollectionUtils.java: -------------------------------------------------------------------------------- 1 | package org.fh.gae.das.utils; 2 | 3 | import java.util.Map; 4 | import java.util.function.Supplier; 5 | 6 | public class GaeCollectionUtils { 7 | private GaeCollectionUtils() { 8 | 9 | } 10 | 11 | /** 12 | * 从Map中按key取值, 如果不存在则创建,入map, 并返回创建的对象 13 | * @param key 14 | * @param map 15 | * @param factory 16 | * @return 17 | */ 18 | public static R getAndCreateIfNeed(T key, Map map, Supplier factory) { 19 | R ret = map.get(key); 20 | if (null == ret) { 21 | ret = factory.get(); 22 | map.put(key, ret); 23 | } 24 | 25 | return ret; 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /index/incr.idx: -------------------------------------------------------------------------------- 1 | 1 1 7 22333 2333 2 | 1 1 8 cat1ss 2 3 | 1 1 9 hellokitty 50 4 | 1 1 9 hellokittys 50 5 | 1 1 9 hellokittyss 50 6 | 1 1 9 hellokittysss 50 7 | 1 1 9 hellokittyssss 50 8 | 1 1 10 darks 1 9 | 1 1 8 cat1ssa 2 10 | 1 1 10 darkss 1 11 | 1 1 10 dark 1 12 | 1 1 10 dark 1 13 | 1 1 9 hellokittyssss 50 14 | 1 1 7 22333 2333 15 | 1 1 8 cat1ssa 2 16 | 1 1 6 name223 2 17 | 1 1 5 name1 1 18 | 1 1 5 name1 2 19 | 1 1 10 dark 12 20 | 1 1 10 dark 12 21 | 1 1 12 dark souls 3 22 | 1 1 12 dark souls 4 23 | 1 0 13 cod13 13 24 | 1 1 13 cod13 14 25 | 1 0 6 apple 26 | 1 1 6 apples 27 | 2 2 5 aaa 28 | 2 0 7 pear 29 | 2 2 1 a 30 | 2 2 4 efffs 31 | 1 2 4 brucess 201 32 | 1 2 5 name1 2 33 | 1 2 3 test 30 34 | 1 2 8 cat1ssa 2 35 | 1 1 7 22333 23332 36 | -------------------------------------------------------------------------------- /src/main/java/org/fh/gae/das/mysql/MysqlBinlogConfig.java: -------------------------------------------------------------------------------- 1 | package org.fh.gae.das.mysql; 2 | 3 | import lombok.Data; 4 | import lombok.NoArgsConstructor; 5 | import org.springframework.boot.context.properties.ConfigurationProperties; 6 | import org.springframework.stereotype.Component; 7 | 8 | @Component 9 | @ConfigurationProperties(prefix = "das.mysql") 10 | @Data 11 | @NoArgsConstructor 12 | public class MysqlBinlogConfig { 13 | private String host = "localhost"; 14 | 15 | private Integer port = 3306; 16 | 17 | private String username = "root"; 18 | 19 | private String password = ""; 20 | 21 | private long serverId = 1L; 22 | 23 | /** 24 | * binlog文件名 25 | */ 26 | private String binlogName = ""; 27 | 28 | /** 29 | * binlog偏移量 30 | */ 31 | private Long position = -1L; 32 | 33 | /** 34 | * binlog文件和偏移量保存位置 35 | */ 36 | private String binlogPositionFile = "binlog.pos"; 37 | } 38 | -------------------------------------------------------------------------------- /src/main/java/org/fh/gae/das/sender/kafka/KafkaSender.java: -------------------------------------------------------------------------------- 1 | package org.fh.gae.das.sender.kafka; 2 | 3 | import org.fh.gae.das.sender.DasSender; 4 | import org.fh.gae.das.template.DasSerializable; 5 | import org.springframework.beans.factory.annotation.Autowired; 6 | import org.springframework.beans.factory.annotation.Value; 7 | import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; 8 | import org.springframework.kafka.core.KafkaTemplate; 9 | import org.springframework.stereotype.Component; 10 | 11 | @Component 12 | @ConditionalOnProperty(prefix = "das.store.kafka", name = "enable", matchIfMissing = false, havingValue = "true") 13 | public class KafkaSender implements DasSender { 14 | @Value("${das.store.kafka.topic}") 15 | private String topic; 16 | 17 | @Autowired 18 | private KafkaTemplate kafkaTemplate; 19 | 20 | @Override 21 | public void send(DasSerializable data) { 22 | kafkaTemplate.send(topic, new String(data.serialize())); 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /src/main/resources/application.yml: -------------------------------------------------------------------------------- 1 | spring: 2 | application: 3 | name: GAE-DAS 4 | 5 | datasource: 6 | driver-class-name: com.mysql.jdbc.Driver 7 | url: jdbc:mysql://localhost:3306/gae-das 8 | username: root 9 | 10 | profiles: 11 | active: peer1 12 | 13 | 14 | das: 15 | mysql: 16 | host: 127.0.0.1 17 | port: 3306 18 | username: root 19 | password: 20 | 21 | # binlog文件名 22 | binlog-name: "" 23 | # binlog偏移量, -1表示从当前位置开始 24 | position: -1 25 | # binlog同步点保存文件名, 下次启动时会从该点继续 26 | binlog-position-file: binlog.pos 27 | 28 | 29 | # 增量索引配置 30 | store: 31 | # 文件存储 32 | file: 33 | enable: true 34 | # 文件名 35 | path: incr.idx 36 | 37 | kafka: 38 | enable: false 39 | topic: gae-idx 40 | addr: 127.0.0.1:8092 41 | 42 | ha: 43 | beat-interval: 5000 44 | 45 | logging: 46 | level: 47 | org.fh.gae.das: info 48 | org.springframework: error 49 | pattern: 50 | console: "[%d{HH:mm:ss}] [%thread] %-5level %logger{36} - %msg%n" 51 | 52 | -------------------------------------------------------------------------------- /src/main/java/org/fh/gae/das/template/OpType.java: -------------------------------------------------------------------------------- 1 | package org.fh.gae.das.template; 2 | 3 | import com.github.shyiko.mysql.binlog.event.EventType; 4 | import org.fh.gae.das.exception.InvalidDasTemplateException; 5 | 6 | public enum OpType { 7 | ADD, 8 | UPDATE, 9 | DELETE, 10 | OTHER; 11 | 12 | public static OpType of(String str) { 13 | switch (str) { 14 | case "insert": 15 | return ADD; 16 | 17 | case "update": 18 | return UPDATE; 19 | 20 | case "delete": 21 | return DELETE; 22 | } 23 | 24 | throw new InvalidDasTemplateException("invalid type: " + str); 25 | } 26 | 27 | public static OpType of(EventType eventType) { 28 | switch (eventType) { 29 | case WRITE_ROWS: 30 | return ADD; 31 | 32 | case UPDATE_ROWS: 33 | return UPDATE; 34 | 35 | case DELETE_ROWS: 36 | return DELETE; 37 | 38 | default: 39 | return OTHER; 40 | 41 | } 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /src/main/java/org/fh/gae/das/DasAppEventListener.java: -------------------------------------------------------------------------------- 1 | package org.fh.gae.das; 2 | 3 | import lombok.extern.slf4j.Slf4j; 4 | import org.fh.gae.das.ha.HaServer; 5 | import org.springframework.context.ApplicationListener; 6 | import org.springframework.context.event.ApplicationContextEvent; 7 | import org.springframework.context.event.ContextClosedEvent; 8 | import org.springframework.context.event.ContextRefreshedEvent; 9 | 10 | @Slf4j 11 | public class DasAppEventListener implements ApplicationListener { 12 | @Override 13 | public void onApplicationEvent(ApplicationContextEvent event) { 14 | // 上下文关闭时 15 | if (event instanceof ContextClosedEvent) { 16 | return; 17 | } 18 | 19 | // 上下文初始化完毕 20 | if (event instanceof ContextRefreshedEvent) { 21 | HaServer haServer = event.getApplicationContext().getBean(HaServer.class); 22 | 23 | // 启动心跳服务器 24 | haServer.start(); 25 | // coordinationService.startBinlogClient(); 26 | // 启动binlog client 27 | // binlogClient.connect(); 28 | } 29 | } 30 | 31 | } 32 | -------------------------------------------------------------------------------- /src/main/resources/template.json: -------------------------------------------------------------------------------- 1 | { 2 | "database": "gae-das", 3 | "tableList": [ 4 | { 5 | "tableName": "new_table", 6 | "level": 1, 7 | 8 | "insert": [ 9 | {"column": "id"}, 10 | {"column": "name"}, 11 | {"column": "age"} 12 | ], 13 | "update": [ 14 | {"column": "id"}, 15 | {"column": "name"}, 16 | {"column": "age"} 17 | ], 18 | "delete": [ 19 | {"column": "id"}, 20 | {"column": "name"}, 21 | {"column": "age"} 22 | ] 23 | }, 24 | { 25 | "tableName": "acc", 26 | "level": 2, 27 | 28 | "insert": [ 29 | {"column": "id"}, 30 | {"column": "name"} 31 | ], 32 | "update": [ 33 | {"column": "id"}, 34 | {"column": "name"} 35 | ], 36 | "delete": [ 37 | {"column": "id"}, 38 | {"column": "name"} 39 | ] 40 | } 41 | 42 | ] 43 | } 44 | -------------------------------------------------------------------------------- /src/main/java/org/fh/gae/das/template/level/TextDasLevel.java: -------------------------------------------------------------------------------- 1 | package org.fh.gae.das.template.level; 2 | 3 | import lombok.extern.slf4j.Slf4j; 4 | import org.fh.gae.das.template.OpType; 5 | 6 | @Slf4j 7 | public class TextDasLevel extends DasLevel { 8 | @Override 9 | public byte[] serialize() { 10 | // 操作类型 11 | OpType opType = getOpType(); 12 | 13 | int valueSize = getFieldValueMap().size(); 14 | 15 | StringBuilder sb = new StringBuilder(valueSize * 10); 16 | sb.append(getTable().getLevel()).append("\t"); 17 | sb.append(opType.ordinal()).append("\t"); 18 | 19 | // 遍历当前层级对应的操作类型的所有字段 20 | for (String fieldName : getTable().getOpTypeFieldSetMap().get(opType)) { 21 | // 取出字段值 22 | String fieldValue = getFieldValueMap().get(fieldName); 23 | if (null == fieldValue) { 24 | log.warn("field {} have no value", fieldName); 25 | continue; 26 | } 27 | 28 | sb.append(fieldValue); 29 | sb.append("\t"); 30 | } 31 | 32 | int len = sb.length(); 33 | sb.replace(len - 1, len, ""); 34 | 35 | return sb.toString().getBytes(); 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /src/main/java/org/fh/gae/das/ha/heartbeat/JsonRequestDecoder.java: -------------------------------------------------------------------------------- 1 | package org.fh.gae.das.ha.heartbeat; 2 | 3 | import com.alibaba.fastjson.JSON; 4 | import io.netty.channel.ChannelHandler; 5 | import io.netty.channel.ChannelHandlerContext; 6 | import io.netty.handler.codec.MessageToMessageDecoder; 7 | import io.netty.handler.codec.http.FullHttpRequest; 8 | import lombok.extern.slf4j.Slf4j; 9 | import org.springframework.stereotype.Component; 10 | 11 | import java.nio.charset.Charset; 12 | import java.util.List; 13 | 14 | @Component 15 | @ChannelHandler.Sharable 16 | @Slf4j 17 | public class JsonRequestDecoder extends MessageToMessageDecoder { 18 | @Override 19 | protected void decode(ChannelHandlerContext ctx, FullHttpRequest msg, List out) throws Exception { 20 | // 只允许POST请求 21 | boolean isPost = msg.method().name().equals("POST"); 22 | if (false == isPost) { 23 | ctx.close(); 24 | return; 25 | } 26 | 27 | // 取出body 28 | byte[] body = msg.content().copy().array(); 29 | 30 | 31 | // 反序列化 32 | BeatMessage requestObj = JSON.parseObject(body, 0, body.length, Charset.forName("utf-8"), BeatMessage.class); 33 | out.add(requestObj); 34 | 35 | // log.info("heatbeat\t{}", requestObj); 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /src/main/java/org/fh/gae/das/mysql/binlog/BinlogPosition.java: -------------------------------------------------------------------------------- 1 | package org.fh.gae.das.mysql.binlog; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Data; 5 | import lombok.NoArgsConstructor; 6 | import org.fh.gae.das.template.DasSerializable; 7 | import org.springframework.util.StringUtils; 8 | 9 | @Data 10 | @AllArgsConstructor 11 | @NoArgsConstructor 12 | public class BinlogPosition implements DasSerializable { 13 | private String binlogName = ""; 14 | 15 | private long position = -1; 16 | 17 | public String toString() { 18 | return this.binlogName + ":" + position; 19 | } 20 | 21 | @Override 22 | public byte[] serialize() { 23 | return toString().getBytes(); 24 | } 25 | 26 | public static BinlogPosition deserialize(byte[] buf, int start, int len) { 27 | return deserialize(new String(buf, start, len)); 28 | } 29 | 30 | public static BinlogPosition deserialize(String data) { 31 | if (StringUtils.isEmpty(data)) { 32 | return null; 33 | } 34 | 35 | String[] terms = data.split(":"); 36 | if (terms.length != 2) { 37 | return null; 38 | } 39 | 40 | String name = terms[0]; 41 | long pos = Long.valueOf(terms[1]); 42 | 43 | return new BinlogPosition(name, pos); 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /src/main/java/org/fh/gae/das/ha/heartbeat/BeatTask.java: -------------------------------------------------------------------------------- 1 | package org.fh.gae.das.ha.heartbeat; 2 | 3 | import lombok.extern.slf4j.Slf4j; 4 | import org.fh.gae.das.ha.CoordinationService; 5 | import org.springframework.beans.factory.annotation.Autowired; 6 | import org.springframework.stereotype.Component; 7 | 8 | import java.util.Date; 9 | import java.util.concurrent.TimeUnit; 10 | 11 | @Component 12 | @Slf4j 13 | public class BeatTask implements Runnable { 14 | @Autowired 15 | private CoordinationService coordinationService; 16 | 17 | @Override 18 | public void run() { 19 | if (null == coordinationService.status()) { 20 | // 尝试变成master 21 | coordinationService.startBinlogClient(); 22 | 23 | return; 24 | } 25 | 26 | if (CoordinationService.Status.MASTER == coordinationService.status()) { 27 | // 发送心跳 28 | log.info("sending heartbeat"); 29 | coordinationService.heartbeat(); 30 | 31 | return; 32 | } 33 | 34 | if (CoordinationService.Status.SLAVE == coordinationService.status()) { 35 | // 检查上次心跳间隔 36 | if (new Date().getTime() - BeatTimeHolder.lastBeat >= TimeUnit.SECONDS.toMillis(5)) { 37 | log.info("no beat received past 5s, change to master"); 38 | coordinationService.startBinlogClient(); 39 | } 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /src/test/java/org/fh/gae/das/test/TextDasLevelTest.java: -------------------------------------------------------------------------------- 1 | package org.fh.gae.das.test; 2 | 3 | import org.fh.gae.das.template.level.DasLevel; 4 | import org.fh.gae.das.template.DasTable; 5 | import org.fh.gae.das.template.OpType; 6 | import org.fh.gae.das.template.level.TextDasLevel; 7 | import org.junit.Test; 8 | 9 | import java.util.ArrayList; 10 | import java.util.HashMap; 11 | import java.util.List; 12 | import java.util.Map; 13 | 14 | public class TextDasLevelTest { 15 | @Test 16 | public void testSerialize() { 17 | DasLevel dasLevel = new TextDasLevel(); 18 | dasLevel.setOpType(OpType.ADD); 19 | 20 | Map> fields = new HashMap<>(); 21 | List fieldList = new ArrayList<>(); 22 | fieldList.add("id"); 23 | fieldList.add("name"); 24 | fieldList.add("age"); 25 | fields.put(OpType.ADD, fieldList); 26 | DasTable table = new DasTable(); 27 | table.setOpTypeFieldSetMap(fields); 28 | table.setTableName("gae"); 29 | table.setLevel("1"); 30 | dasLevel.setTable(table); 31 | 32 | Map values = new HashMap<>(); 33 | values.put("name", "gae"); 34 | values.put("id", "1"); 35 | values.put("age", "24"); 36 | dasLevel.setFieldValueMap(values); 37 | 38 | byte[] buf = dasLevel.serialize(); 39 | System.out.println(new String(buf, 0, buf.length)); 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /src/main/java/org/fh/gae/das/ha/heartbeat/BeatMessage.java: -------------------------------------------------------------------------------- 1 | package org.fh.gae.das.ha.heartbeat; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Data; 5 | import lombok.NoArgsConstructor; 6 | 7 | /** 8 | * 心跳数据定义 9 | */ 10 | @Data 11 | @NoArgsConstructor 12 | @AllArgsConstructor 13 | public class BeatMessage { 14 | private int type; 15 | 16 | private String binlog; 17 | 18 | private long position; 19 | 20 | public static class Builder { 21 | public static BeatMessage buildReport(String binlog, long pos) { 22 | return new BeatMessage(MessageType.REPORT.code, binlog, pos); 23 | } 24 | 25 | public static BeatMessage buildAck() { 26 | return new BeatMessage(MessageType.ACK.code, "", 0); 27 | } 28 | 29 | public static BeatMessage buildUnknown() { 30 | return new BeatMessage(MessageType.UNKNOWN.code, "", 0); 31 | } 32 | 33 | public static BeatMessage buildMaster() { 34 | return new BeatMessage(MessageType.MASTER.code, "", 0); 35 | } 36 | } 37 | 38 | public static enum MessageType { 39 | REPORT(1), 40 | ACK(0), 41 | MASTER(2), 42 | 43 | UNKNOWN(-1); 44 | 45 | private int code; 46 | 47 | MessageType(int code) { 48 | this.code = code; 49 | } 50 | 51 | public int code() { 52 | return this.code; 53 | } 54 | 55 | public static MessageType of(int code) { 56 | switch (code) { 57 | case 1: 58 | return REPORT; 59 | case 0: 60 | return ACK; 61 | case 2: 62 | return MASTER; 63 | } 64 | 65 | return UNKNOWN; 66 | } 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /src/main/java/org/fh/gae/das/mysql/binlog/FileBinlogPositionStore.java: -------------------------------------------------------------------------------- 1 | package org.fh.gae.das.mysql.binlog; 2 | 3 | import lombok.extern.slf4j.Slf4j; 4 | import org.fh.gae.das.mysql.BinlogClient; 5 | import org.fh.gae.das.mysql.MysqlBinlogConfig; 6 | import org.springframework.beans.factory.annotation.Autowired; 7 | import org.springframework.stereotype.Component; 8 | 9 | import java.io.FileInputStream; 10 | import java.io.FileOutputStream; 11 | 12 | @Component 13 | @Slf4j 14 | public class FileBinlogPositionStore implements BinlogPositionStore { 15 | @Autowired 16 | protected BinlogClient client; 17 | 18 | @Autowired 19 | private MysqlBinlogConfig config; 20 | 21 | private byte[] loadBuf = new byte[512]; 22 | 23 | @Override 24 | public BinlogPosition load() { 25 | try (FileInputStream in = new FileInputStream(config.getBinlogPositionFile())) { 26 | int len = in.read(loadBuf); 27 | return BinlogPosition.deserialize(loadBuf, 0, len); 28 | 29 | } catch (Exception e) { 30 | log.error("binlog position failed to load, {}", e.getMessage()); 31 | } 32 | 33 | return null; 34 | } 35 | 36 | @Override 37 | public int save(BinlogPosition binlogPosition) { 38 | try (FileOutputStream out = new FileOutputStream(config.getBinlogPositionFile())) { 39 | out.write(binlogPosition.serialize()); 40 | 41 | } catch (Exception e) { 42 | log.error("binlog position failed to save, {}", e); 43 | 44 | return -1; 45 | } 46 | 47 | 48 | log.debug("binlog position {}:{} saved", binlogPosition.getBinlogName(), binlogPosition.getPosition()); 49 | return 0; 50 | } 51 | 52 | @Override 53 | public BinlogPosition extract() { 54 | return new BinlogPosition( 55 | client.getBinlogName(), 56 | client.getBinlogPos() 57 | ); 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /src/main/java/org/fh/gae/das/sender/kafka/KafkaConfig.java: -------------------------------------------------------------------------------- 1 | package org.fh.gae.das.sender.kafka; 2 | 3 | import org.apache.kafka.clients.producer.ProducerConfig; 4 | import org.apache.kafka.common.serialization.StringSerializer; 5 | import org.springframework.beans.factory.annotation.Value; 6 | import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; 7 | import org.springframework.context.annotation.Bean; 8 | import org.springframework.context.annotation.Configuration; 9 | import org.springframework.kafka.core.DefaultKafkaProducerFactory; 10 | import org.springframework.kafka.core.KafkaTemplate; 11 | import org.springframework.kafka.core.ProducerFactory; 12 | 13 | import java.util.HashMap; 14 | import java.util.Map; 15 | 16 | @Configuration 17 | @ConditionalOnProperty(prefix = "das.store.kafka", name = "enable", matchIfMissing = false, havingValue = "true") 18 | public class KafkaConfig { 19 | @Value("${das.store.kafka.addr}") 20 | private String kafkaAddr; 21 | 22 | public Map producerConfigs() { 23 | Map props = new HashMap<>(); 24 | props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaAddr); 25 | props.put(ProducerConfig.RETRIES_CONFIG, 0); 26 | props.put(ProducerConfig.BATCH_SIZE_CONFIG, 4096); 27 | props.put(ProducerConfig.LINGER_MS_CONFIG, 1); 28 | props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 40960); 29 | props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class); 30 | props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class); 31 | return props; 32 | } 33 | 34 | public ProducerFactory producerFactory() { 35 | return new DefaultKafkaProducerFactory<>(producerConfigs()); 36 | } 37 | 38 | @Bean 39 | public KafkaTemplate kafkaTemplate() { 40 | return new KafkaTemplate(producerFactory()); 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /src/main/java/org/fh/gae/das/ha/NettyUtils.java: -------------------------------------------------------------------------------- 1 | package org.fh.gae.das.ha; 2 | 3 | import com.alibaba.fastjson.JSON; 4 | import io.netty.buffer.Unpooled; 5 | import io.netty.handler.codec.http.DefaultFullHttpRequest; 6 | import io.netty.handler.codec.http.DefaultFullHttpResponse; 7 | import io.netty.handler.codec.http.FullHttpRequest; 8 | import io.netty.handler.codec.http.FullHttpResponse; 9 | import io.netty.handler.codec.http.HttpHeaderNames; 10 | import io.netty.handler.codec.http.HttpMethod; 11 | import io.netty.handler.codec.http.HttpResponseStatus; 12 | import io.netty.handler.codec.http.HttpVersion; 13 | import org.fh.gae.das.ha.heartbeat.BeatMessage; 14 | 15 | public class NettyUtils { 16 | 17 | public static FullHttpRequest buildRequest(BeatMessage msg) { 18 | String respJson = JSON.toJSONString(msg); 19 | 20 | FullHttpRequest request = new DefaultFullHttpRequest( 21 | HttpVersion.HTTP_1_1, 22 | HttpMethod.POST, 23 | "/", 24 | Unpooled.wrappedBuffer(respJson.getBytes()) 25 | ); 26 | 27 | request.headers().set( 28 | HttpHeaderNames.CONTENT_TYPE.toString(), 29 | "application/json;charset=utf8" 30 | ); 31 | request.headers().set( 32 | HttpHeaderNames.CONTENT_LENGTH.toString(), 33 | request.content().readableBytes() 34 | ); 35 | 36 | return request; 37 | } 38 | 39 | public static FullHttpResponse buildResponse(BeatMessage msg) { 40 | String respJson = JSON.toJSONString(msg); 41 | // byte[] buf = JSON.toJSONBytes(bidResponse, jsonSnakeConfig); 42 | 43 | FullHttpResponse response = new DefaultFullHttpResponse( 44 | HttpVersion.HTTP_1_1, 45 | HttpResponseStatus.OK, 46 | Unpooled.wrappedBuffer(respJson.getBytes()) 47 | ); 48 | 49 | response.headers().set( 50 | HttpHeaderNames.CONTENT_TYPE.toString(), 51 | "application/json;charset=utf8" 52 | ); 53 | 54 | return response; 55 | } 56 | } 57 | -------------------------------------------------------------------------------- /src/main/java/org/fh/gae/das/mysql/listener/DemoTableListener.java: -------------------------------------------------------------------------------- 1 | package org.fh.gae.das.mysql.listener; 2 | 3 | import com.github.shyiko.mysql.binlog.event.EventType; 4 | import lombok.extern.slf4j.Slf4j; 5 | import org.fh.gae.das.mysql.MysqlRowData; 6 | import org.fh.gae.das.sender.file.FileSender; 7 | import org.fh.gae.das.template.DasTable; 8 | import org.fh.gae.das.template.OpType; 9 | import org.fh.gae.das.template.level.DasLevel; 10 | import org.fh.gae.das.template.level.TextDasLevel; 11 | import org.springframework.beans.factory.annotation.Autowired; 12 | import org.springframework.stereotype.Component; 13 | 14 | import javax.annotation.PostConstruct; 15 | import java.util.List; 16 | import java.util.Map; 17 | 18 | @Component 19 | @Slf4j 20 | public class DemoTableListener implements BizListener { 21 | @Autowired 22 | private AggregationListener aggregationListener; 23 | 24 | @Autowired 25 | private FileSender store; 26 | 27 | @PostConstruct 28 | private void register() { 29 | aggregationListener.register("gae-das", "acc", this); 30 | aggregationListener.register("gae-das", "new_table", this); 31 | } 32 | 33 | @Override 34 | public void onEvent(MysqlRowData eventData) { 35 | log.info(eventData.toString()); 36 | 37 | DasTable table = eventData.getTable(); 38 | EventType eventType = eventData.getEventType(); 39 | 40 | // 构造层级对象 41 | DasLevel level = new TextDasLevel(); 42 | level.setTable(table); 43 | OpType opType = OpType.of(eventType); 44 | level.setOpType(opType); 45 | 46 | // 取出模板中该操作对应的字段列表 47 | List fieldList = table.getOpTypeFieldSetMap().get(opType); 48 | if (null == fieldList) { 49 | log.warn("{} not support for {}", opType, table.getTableName()); 50 | return; 51 | } 52 | 53 | for (Map.Entry entry : eventData.getAfter().entrySet()) { 54 | String colName = entry.getKey(); 55 | String colValue = entry.getValue(); 56 | 57 | level.getFieldValueMap().put(colName, colValue); 58 | } 59 | 60 | store.send(level); 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /src/main/java/org/fh/gae/das/template/DasTemplate.java: -------------------------------------------------------------------------------- 1 | package org.fh.gae.das.template; 2 | 3 | import lombok.Data; 4 | import org.fh.gae.das.template.vo.Column; 5 | import org.fh.gae.das.template.vo.Table; 6 | import org.fh.gae.das.template.vo.Template; 7 | import org.fh.gae.das.utils.GaeCollectionUtils; 8 | 9 | import java.util.ArrayList; 10 | import java.util.HashMap; 11 | import java.util.List; 12 | import java.util.Map; 13 | 14 | @Data 15 | public class DasTemplate { 16 | private String database; 17 | 18 | private Map tableMap = new HashMap<>(); 19 | 20 | public static DasTemplate parse(Template temp) { 21 | DasTemplate dasTemplate = new DasTemplate(); 22 | dasTemplate.setDatabase(temp.getDatabase()); 23 | 24 | // 遍历表 25 | for (Table table : temp.getTableList()) { 26 | String name = table.getTableName(); 27 | Integer level = table.getLevel(); 28 | 29 | DasTable dasTable = new DasTable(); 30 | dasTable.setTableName(name); 31 | dasTable.setLevel(level.toString()); 32 | dasTemplate.tableMap.put(name, dasTable); 33 | 34 | // 遍历列 35 | Map> opTypeFieldSetMap = dasTable.getOpTypeFieldSetMap(); 36 | 37 | for (Column column : table.getInsert()) { 38 | String colName = column.getColumn(); 39 | 40 | GaeCollectionUtils.getAndCreateIfNeed( 41 | OpType.ADD, 42 | opTypeFieldSetMap, 43 | () -> new ArrayList<>() 44 | ).add(colName); 45 | } 46 | 47 | for (Column column : table.getUpdate()) { 48 | String colName = column.getColumn(); 49 | 50 | GaeCollectionUtils.getAndCreateIfNeed( 51 | OpType.UPDATE, 52 | opTypeFieldSetMap, 53 | () -> new ArrayList<>() 54 | ).add(colName); 55 | } 56 | 57 | for (Column column : table.getDelete()) { 58 | String colName = column.getColumn(); 59 | 60 | GaeCollectionUtils.getAndCreateIfNeed( 61 | OpType.DELETE, 62 | opTypeFieldSetMap, 63 | () -> new ArrayList<>() 64 | ).add(colName); 65 | } 66 | } 67 | 68 | return dasTemplate; 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /src/main/java/org/fh/gae/das/ha/HaServer.java: -------------------------------------------------------------------------------- 1 | package org.fh.gae.das.ha; 2 | 3 | import io.netty.bootstrap.ServerBootstrap; 4 | import io.netty.channel.ChannelFuture; 5 | import io.netty.channel.ChannelInitializer; 6 | import io.netty.channel.nio.NioEventLoopGroup; 7 | import io.netty.channel.socket.SocketChannel; 8 | import io.netty.channel.socket.nio.NioServerSocketChannel; 9 | import io.netty.handler.codec.http.HttpObjectAggregator; 10 | import io.netty.handler.codec.http.HttpServerCodec; 11 | import io.netty.handler.timeout.IdleStateHandler; 12 | import lombok.extern.slf4j.Slf4j; 13 | import org.fh.gae.das.ha.heartbeat.BeatHandler; 14 | import org.fh.gae.das.ha.heartbeat.BeatTask; 15 | import org.fh.gae.das.ha.heartbeat.JsonRequestDecoder; 16 | import org.springframework.beans.factory.annotation.Autowired; 17 | import org.springframework.beans.factory.annotation.Value; 18 | import org.springframework.stereotype.Component; 19 | 20 | import java.util.Random; 21 | import java.util.concurrent.TimeUnit; 22 | 23 | @Slf4j 24 | @Component 25 | public class HaServer { 26 | @Autowired 27 | private JsonRequestDecoder decoder; 28 | 29 | @Autowired 30 | private BeatHandler beatHandler; 31 | 32 | @Autowired 33 | private BeatTask beatTask; 34 | 35 | @Value("${das.ha.port}") 36 | private int port; 37 | 38 | @Value("${das.ha.beat-interval}") 39 | private int beatInterval; 40 | 41 | private NioEventLoopGroup group = new NioEventLoopGroup(1); 42 | 43 | private NioEventLoopGroup scheduleGroup = new NioEventLoopGroup(1); 44 | 45 | public void start() { 46 | try { 47 | ServerBootstrap boot = new ServerBootstrap(); 48 | boot.group(group) 49 | .channel(NioServerSocketChannel.class) 50 | .localAddress("127.0.0.1", port) 51 | .childHandler(new ChannelInitializer() { 52 | @Override 53 | protected void initChannel(SocketChannel socketChannel) { 54 | socketChannel.pipeline().addLast("codec", new HttpServerCodec()); 55 | socketChannel.pipeline().addLast("aggregator", new HttpObjectAggregator(512 * 1024)); 56 | socketChannel.pipeline().addLast("jsonDecoder", decoder); 57 | socketChannel.pipeline().addLast("handler", beatHandler); 58 | } 59 | }); 60 | 61 | ChannelFuture f = boot.bind().sync(); 62 | 63 | log.info("start receiving heartbeat at {}:{}", "127.0.0.1", port); 64 | startBeat(); 65 | 66 | // f.channel().closeFuture().sync(); 67 | 68 | } catch (Exception e) { 69 | e.printStackTrace(); 70 | group.shutdownGracefully(); 71 | scheduleGroup.shutdownGracefully(); 72 | } 73 | } 74 | 75 | private void startBeat() { 76 | Random random = new Random(); 77 | scheduleGroup.scheduleAtFixedRate(beatTask, random.nextInt(1000), beatInterval, TimeUnit.MILLISECONDS); 78 | } 79 | 80 | } 81 | -------------------------------------------------------------------------------- /src/main/java/org/fh/gae/das/template/TemplateHolder.java: -------------------------------------------------------------------------------- 1 | package org.fh.gae.das.template; 2 | 3 | import com.alibaba.fastjson.JSON; 4 | import lombok.extern.slf4j.Slf4j; 5 | import org.fh.gae.das.exception.InvalidDasTemplateException; 6 | import org.fh.gae.das.template.vo.Template; 7 | import org.springframework.beans.factory.annotation.Autowired; 8 | import org.springframework.jdbc.core.JdbcTemplate; 9 | import org.springframework.stereotype.Component; 10 | 11 | import javax.annotation.PostConstruct; 12 | import java.io.IOException; 13 | import java.io.InputStream; 14 | import java.nio.charset.Charset; 15 | import java.util.List; 16 | import java.util.Map; 17 | 18 | @Component 19 | @Slf4j 20 | public class TemplateHolder { 21 | private DasTemplate dasTemplate; 22 | 23 | @Autowired 24 | private JdbcTemplate jdbcTemplate; 25 | 26 | private String SQL_SCHEMA = "SELECT TABLE_SCHEMA, TABLE_NAME, COLUMN_NAME, ORDINAL_POSITION FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_SCHEMA = ? AND TABLE_NAME = ?"; 27 | 28 | @PostConstruct 29 | private void init() { 30 | loadJson("template.json"); 31 | } 32 | 33 | /** 34 | * 加载配置文件 35 | * @param path 36 | */ 37 | public void loadJson(String path) { 38 | ClassLoader cl = Thread.currentThread().getContextClassLoader(); 39 | InputStream inStream = cl.getResourceAsStream(path); 40 | 41 | try { 42 | Template template = JSON.parseObject(inStream, Charset.defaultCharset(), Template.class); 43 | this.dasTemplate = DasTemplate.parse(template); 44 | loadMeta(); 45 | 46 | } catch (IOException e) { 47 | log.error(e.getMessage()); 48 | throw new InvalidDasTemplateException("fail to parse json file"); 49 | } 50 | } 51 | 52 | public DasTable getTable(String tableName) { 53 | return dasTemplate.getTableMap().get(tableName); 54 | } 55 | 56 | /** 57 | * 查询模板中每张表的schema信息,因为binlog中不包含列名信息 58 | */ 59 | private void loadMeta() { 60 | String db = dasTemplate.getDatabase(); 61 | 62 | 63 | for (Map.Entry entry : dasTemplate.getTableMap().entrySet()) { 64 | DasTable table = entry.getValue(); 65 | String tableName = table.getTableName(); 66 | 67 | List updateFields = table.getOpTypeFieldSetMap().get(OpType.UPDATE); 68 | List insertFields = table.getOpTypeFieldSetMap().get(OpType.ADD); 69 | List deleteFields = table.getOpTypeFieldSetMap().get(OpType.DELETE); 70 | 71 | jdbcTemplate.query(SQL_SCHEMA, new Object[]{db, tableName}, (rs, i) -> { 72 | int pos = rs.getInt("ORDINAL_POSITION"); 73 | String colName = rs.getString("COLUMN_NAME"); 74 | 75 | if (updateFields.contains(colName) 76 | || insertFields.contains(colName) 77 | || deleteFields.contains(colName)) { 78 | 79 | table.getPosMap().put(pos - 1, colName); 80 | } 81 | 82 | return null; 83 | }); 84 | } 85 | } 86 | } 87 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # GAE-DAS 2 | 监听mysql binlog并生成GAE使用的增量索引, 输出至本地文件或kafka,支持**主从热备**高可用方案。 3 | 4 | GAE-DAS使用[mysql-binlog-connector](https://github.com/shyiko/mysql-binlog-connector-java)库进行binlog监听。程序启动连接成功后会查询`information_schema`库的`columns`表来读取指定数据库所有表的元数据(列名,位置,数据类型等),并使用该数据解析`TABLE_MAP`事件。因此,当数据表结构发生变化时,最好重启一下DAS。(除非新增列、位于所有列的最后且该列不涉及索引生成) 5 | 6 | 7 | 8 | 项目还处于开发状态。 9 | 10 | ## 架构示意 11 | 12 | ![das](http://ovbyjzegm.bkt.clouddn.com/das.png) 13 | 14 | - BinlogClient 15 | 16 | [mysql-binlog-connector](https://github.com/shyiko/mysql-binlog-connector-java)提供,与Mysql通讯。 17 | 18 | - TemplateHolder 19 | 20 | 解析用户配置的模板,创建并持有`DasTemplate` 对象 21 | 22 | - AggregationListener 23 | 24 | 将`TABLE_MAP`和`UPDATE/INSERT/DELETE EVENT`封装成`MysqlRowData`并传递给下游业务监听器 25 | 26 | - BizListener 27 | 28 | 用户实现的业务监听器,需先向`AggregationListener`进行注册,注册时声明自己对哪个库、哪张表感兴趣;同一个BizListener可以注册多次。`AggregationListener`在收到相关表的事件后会触发`onEvent()`方法。 29 | 30 | ## 双机热备 31 | 32 | GAE-DAS支持双机热备的高可用部署方案,同时部署两个实例,当一台挂掉后另一台可自动接替: 33 | 34 | ![HA](http://ovbyjzegm.bkt.clouddn.com/das-ha.png) 35 | 36 | 37 | 38 | - 使用主从模式时,两个实例部署后会自动"商讨"谁主谁从 39 | - 确定主从关系后, master定时向slave发送心跳,心跳包中包含当前master的binlog位置信息 40 | - 当slave超过一定时间未收到心跳时,会自动从上次心跳包里的binlog位置开始监听binlog,变为master 41 | - 原master修复上线后,会自动变为slave 42 | 43 | 44 | 45 | ## 模板 46 | 47 | 通过配置模板`template.json`来指定索引如何生成(配置对哪些表中的哪些字段感兴趣): 48 | ``` 49 | { 50 | "database": "gae-das", # 库名 51 | "tableList": [ 52 | { 53 | "tableName": "new_table", # 表名 54 | "level": 1, # 该表所在层级 55 | 56 | # 对 insert 操作感兴趣 57 | "insert": [ 58 | # 输出索引记录的字段顺序 59 | # 此例为 60 | # id\tname\tage 61 | {"column": "id"}, 62 | {"column": "name"}, 63 | {"column": "age"} 64 | ], 65 | "update": [ 66 | {"column": "id"}, 67 | {"column": "name"}, 68 | {"column": "age"} 69 | ], 70 | "delete": [ 71 | {"column": "id"}, 72 | {"column": "name"}, 73 | {"column": "age"} 74 | ] 75 | }, 76 | { 77 | "tableName": "acc", 78 | "level": 2, 79 | 80 | "insert": [ 81 | {"column": "id"}, 82 | {"column": "name"} 83 | ], 84 | "update": [ 85 | {"column": "id"}, 86 | {"column": "name"} 87 | ], 88 | "delete": [ 89 | {"column": "id"}, 90 | {"column": "name"} 91 | ] 92 | } 93 | 94 | ] 95 | } 96 | ``` 97 | 输出的索引文件为词表(`\t`分隔): 98 | ``` 99 | # 层级 # 操作类型(0:insert, 1:update, 2: delete) # 数据项(模板中的column) 100 | 1 0 6 apple 101 | 1 1 6 apples 102 | 2 2 5 aaa 103 | 2 0 7 pear 104 | ``` 105 | mysql连接信息和binlog同步设置见`application.yml`文件 106 | 107 | ## 构建运行 108 | 增量索引写到文件时, 除mysql连接信息和`template.json`外不需要额外配置: 109 | ``` 110 | mvn clean package -Dmaven.test.skip=true 111 | java -jar gae-das.jar 112 | ``` 113 | 增量索引写入kafka时, 需要配置kafka相关信息(见`application.yml`中`das.store.kafka`相关配置) 114 | -------------------------------------------------------------------------------- /src/main/java/org/fh/gae/das/sender/file/FileSender.java: -------------------------------------------------------------------------------- 1 | package org.fh.gae.das.sender.file; 2 | 3 | import lombok.extern.slf4j.Slf4j; 4 | import org.fh.gae.das.exception.DasStoreException; 5 | import org.fh.gae.das.sender.DasSender; 6 | import org.fh.gae.das.template.DasSerializable; 7 | import org.springframework.beans.factory.annotation.Value; 8 | import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; 9 | import org.springframework.stereotype.Component; 10 | 11 | import javax.annotation.PostConstruct; 12 | import java.io.BufferedOutputStream; 13 | import java.io.File; 14 | import java.io.FileNotFoundException; 15 | import java.io.FileOutputStream; 16 | import java.io.IOException; 17 | 18 | /** 19 | * 将增量数据保存到文件中; 20 | * 非线程安全,时刻只能有一个线程运行 21 | */ 22 | @Component 23 | @ConditionalOnProperty(prefix = "das.store.file", name = "enable", matchIfMissing = true, havingValue = "true") 24 | @Slf4j 25 | public class FileSender implements DasSender { 26 | @Value("${das.store.file.path}") 27 | private String filename; 28 | 29 | /** 30 | * 增量文件索引 31 | */ 32 | private int fileIndex = 0; 33 | 34 | /** 35 | * 当前增量文件行数 36 | */ 37 | private int rowNumber = 0; 38 | 39 | /** 40 | * 一个增量文件最大行数 41 | */ 42 | private int maxRowNumber = 10 * 10000; 43 | // private int maxRowNumber = 2; 44 | 45 | private BufferedOutputStream out; 46 | 47 | @PostConstruct 48 | private void init() throws IOException { 49 | findNextFileIndex(); 50 | 51 | String name = filename + "." + fileIndex; 52 | log.info("index file:{}", name); 53 | 54 | FileOutputStream fos = new FileOutputStream(name, true); 55 | out = new BufferedOutputStream(fos); 56 | } 57 | 58 | @Override 59 | public void send(DasSerializable data) { 60 | checkRowNumber(); 61 | byte[] buf = data.serialize(); 62 | 63 | try { 64 | out.write(buf); 65 | out.write('\n'); 66 | out.flush(); 67 | 68 | ++rowNumber; 69 | 70 | } catch (IOException e) { 71 | log.error(e.getMessage()); 72 | throw new DasStoreException("fail to write file"); 73 | } 74 | } 75 | 76 | private void findNextFileIndex() { 77 | for (int ix = 0; ix < 10000; ++ix) { 78 | File f = new File(this.filename + "." + ix); 79 | if (!f.exists()) { 80 | this.fileIndex = ix; 81 | break; 82 | } 83 | } 84 | } 85 | 86 | /** 87 | * 如果行数大于最大值, 则创建新文件, 文件索引数加1 88 | */ 89 | private void checkRowNumber() { 90 | if (rowNumber < maxRowNumber) { 91 | return; 92 | } 93 | 94 | findNextFileIndex(); 95 | try { 96 | String name = this.filename + "." + this.fileIndex; 97 | 98 | FileOutputStream fos = new FileOutputStream(name, true); 99 | out = new BufferedOutputStream(fos); 100 | 101 | this.rowNumber = 0; 102 | 103 | log.info("switch to new index file: {}", name); 104 | 105 | } catch (FileNotFoundException e) { 106 | e.printStackTrace(); 107 | throw new IllegalStateException(e.getMessage()); 108 | } 109 | 110 | 111 | } 112 | } 113 | -------------------------------------------------------------------------------- /src/main/java/org/fh/gae/das/mysql/BinlogClient.java: -------------------------------------------------------------------------------- 1 | package org.fh.gae.das.mysql; 2 | 3 | import com.github.shyiko.mysql.binlog.BinaryLogClient; 4 | import lombok.extern.slf4j.Slf4j; 5 | import org.fh.gae.das.ha.CoordinationService; 6 | import org.fh.gae.das.ha.heartbeat.BeatTimeHolder; 7 | import org.fh.gae.das.mysql.binlog.BinlogPosition; 8 | import org.fh.gae.das.mysql.binlog.BinlogPositionStore; 9 | import org.fh.gae.das.mysql.listener.AggregationListener; 10 | import org.springframework.beans.factory.annotation.Autowired; 11 | import org.springframework.stereotype.Component; 12 | 13 | import javax.annotation.PostConstruct; 14 | import java.io.IOException; 15 | 16 | /** 17 | * 连接mysql 18 | */ 19 | @Component 20 | @Slf4j 21 | public class BinlogClient { 22 | @Autowired 23 | private MysqlBinlogConfig config; 24 | 25 | private BinaryLogClient client; 26 | 27 | @Autowired 28 | private BinlogPositionStore positionStore; 29 | 30 | @Autowired 31 | private AggregationListener listener; 32 | 33 | @Autowired 34 | private CoordinationService coordinationService; 35 | 36 | public void connect() { 37 | new Thread(() -> { 38 | client = new BinaryLogClient( 39 | config.getHost(), 40 | config.getPort(), 41 | config.getUsername(), 42 | config.getPassword() 43 | ); 44 | 45 | BinlogPosition position = resetBinlogPositionInOrder(); 46 | if (null != position) { 47 | log.info("starting from previous position {}", position); 48 | } 49 | 50 | 51 | client.registerEventListener(listener); 52 | client.setServerId(config.getServerId()); 53 | 54 | 55 | try { 56 | log.info("connecting to mysql"); 57 | client.connect(); 58 | log.info("connection to mysql closed"); 59 | 60 | } catch (IOException e) { 61 | e.printStackTrace(); 62 | } 63 | 64 | }).start(); 65 | } 66 | 67 | public void close() { 68 | try { 69 | client.disconnect(); 70 | } catch (IOException e) { 71 | e.printStackTrace(); 72 | } 73 | } 74 | 75 | private BinlogPosition resetBinlogPositionInOrder() { 76 | // 先检查是否有心跳包中的位置信息 77 | if (null != BeatTimeHolder.position) { 78 | return BeatTimeHolder.position; 79 | } 80 | 81 | // 从文件中加载 82 | BinlogPosition binlogPosition = positionStore.load(); 83 | if (null != binlogPosition) { 84 | client.setBinlogFilename(binlogPosition.getBinlogName()); 85 | client.setBinlogPosition(binlogPosition.getPosition()); 86 | return binlogPosition; 87 | } 88 | 89 | // 从配置文件中加载 90 | String binlogName = config.getBinlogName(); 91 | long pos = config.getPosition().longValue(); 92 | if (!binlogName.isEmpty() && -1 != pos) { 93 | client.setBinlogFilename(binlogName); 94 | client.setBinlogPosition(pos); 95 | 96 | return new BinlogPosition(binlogName, pos); 97 | } 98 | 99 | return null; 100 | } 101 | 102 | public long getBinlogPos() { 103 | return client.getBinlogPosition(); 104 | } 105 | 106 | public String getBinlogName() { 107 | return client.getBinlogFilename(); 108 | } 109 | } 110 | -------------------------------------------------------------------------------- /pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 4.0.0 6 | 7 | org.fh 8 | gae-das 9 | 1.0-SNAPSHOT 10 | 11 | 12 | org.springframework.boot 13 | spring-boot-starter-parent 14 | 1.5.2.RELEASE 15 | 16 | 17 | 18 | 19 | org.springframework.boot 20 | spring-boot-starter 21 | 22 | 23 | 24 | org.projectlombok 25 | lombok 26 | 27 | 28 | 29 | mysql 30 | mysql-connector-java 31 | 32 | 33 | 34 | org.springframework.kafka 35 | spring-kafka 36 | 37 | 38 | 39 | org.springframework.boot 40 | spring-boot-starter-jdbc 41 | 42 | 43 | 44 | io.netty 45 | netty-all 46 | 4.1.14.Final 47 | 48 | 49 | 50 | org.apache.httpcomponents 51 | httpclient 52 | 53 | 54 | 55 | com.github.shyiko 56 | mysql-binlog-connector-java 57 | 0.13.0 58 | 59 | 60 | 61 | 62 | com.alibaba 63 | fastjson 64 | 1.2.40 65 | 66 | 67 | 68 | 69 | 70 | junit 71 | junit 72 | test 73 | 74 | 75 | 76 | org.springframework.boot 77 | spring-boot-configuration-processor 78 | 79 | 80 | 81 | 82 | 83 | 84 | gae-das 85 | 86 | 87 | org.apache.maven.plugins 88 | maven-compiler-plugin 89 | 90 | 1.8 91 | 1.8 92 | 93 | 94 | 95 | org.springframework.boot 96 | spring-boot-maven-plugin 97 | 98 | 99 | 100 | repackage 101 | 102 | 103 | 104 | 105 | 106 | 107 | -------------------------------------------------------------------------------- /src/main/java/org/fh/gae/das/ha/heartbeat/BeatHandler.java: -------------------------------------------------------------------------------- 1 | package org.fh.gae.das.ha.heartbeat; 2 | 3 | import io.netty.channel.ChannelHandler; 4 | import io.netty.channel.ChannelHandlerContext; 5 | import io.netty.channel.ChannelInboundHandlerAdapter; 6 | import io.netty.handler.timeout.IdleStateEvent; 7 | import lombok.extern.slf4j.Slf4j; 8 | import org.fh.gae.das.ha.CoordinationService; 9 | import org.fh.gae.das.ha.NettyUtils; 10 | import org.fh.gae.das.mysql.binlog.BinlogPosition; 11 | import org.springframework.beans.factory.annotation.Autowired; 12 | import org.springframework.stereotype.Component; 13 | import org.springframework.util.StringUtils; 14 | 15 | import java.util.Date; 16 | 17 | /** 18 | * 处理master的心跳包 19 | */ 20 | @Component 21 | @ChannelHandler.Sharable 22 | @Slf4j 23 | public class BeatHandler extends ChannelInboundHandlerAdapter { 24 | @Autowired 25 | private CoordinationService coordinationService; 26 | 27 | 28 | @Override 29 | public void channelRead(ChannelHandlerContext ctx, Object o) throws Exception { 30 | BeatMessage msg = (BeatMessage) o; 31 | if (BeatMessage.MessageType.REPORT == BeatMessage.MessageType.of(msg.getType())) { 32 | log.info("heartbeat received, {}:{}", msg.getBinlog(), msg.getPosition()); 33 | BeatTimeHolder.lastBeat = new Date().getTime(); 34 | 35 | // 已经是slave状态 36 | // 发送ACK 37 | if (CoordinationService.Status.SLAVE == coordinationService.status()) { 38 | if (!StringUtils.isEmpty(msg.getBinlog())) { 39 | BeatTimeHolder.position = new BinlogPosition(msg.getBinlog(), msg.getPosition()); 40 | } 41 | 42 | ctx.writeAndFlush(NettyUtils.buildResponse(BeatMessage.Builder.buildAck())); 43 | ctx.close(); 44 | 45 | return; 46 | } 47 | 48 | // 已经是master状态 49 | if (CoordinationService.Status.MASTER == coordinationService.status()) { 50 | ctx.writeAndFlush(NettyUtils.buildResponse(BeatMessage.Builder.buildMaster())); 51 | ctx.close(); 52 | return; 53 | } 54 | 55 | // 无状态 56 | // 尝试变成slave 57 | boolean result = coordinationService.trySlave(); 58 | if (result) { 59 | log.info("changed status to SLAVE"); 60 | 61 | if (!StringUtils.isEmpty(msg.getBinlog())) { 62 | BeatTimeHolder.position = new BinlogPosition(msg.getBinlog(), msg.getPosition()); 63 | } 64 | 65 | ctx.writeAndFlush(NettyUtils.buildResponse(BeatMessage.Builder.buildAck())); 66 | ctx.close(); 67 | 68 | return; 69 | 70 | } 71 | 72 | ctx.writeAndFlush(NettyUtils.buildResponse(BeatMessage.Builder.buildUnknown())); 73 | ctx.close(); 74 | return; 75 | } 76 | 77 | ctx.writeAndFlush(NettyUtils.buildResponse(BeatMessage.Builder.buildUnknown())); 78 | ctx.close(); 79 | } 80 | 81 | @Override 82 | public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception { 83 | System.out.println(evt); 84 | if (evt instanceof IdleStateEvent) { 85 | IdleStateEvent e = (IdleStateEvent) evt; 86 | 87 | switch (e.state()) { 88 | case READER_IDLE: 89 | // takeOver(ctx); 90 | break; 91 | 92 | case WRITER_IDLE: 93 | // sendBeat(ctx); 94 | break; 95 | } 96 | } 97 | } 98 | 99 | 100 | 101 | @Override 102 | public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { 103 | log.error(cause.getMessage()); 104 | ctx.close(); 105 | } 106 | 107 | } 108 | -------------------------------------------------------------------------------- /src/main/java/org/fh/gae/das/ha/CoordinationService.java: -------------------------------------------------------------------------------- 1 | package org.fh.gae.das.ha; 2 | 3 | import com.alibaba.fastjson.JSON; 4 | import lombok.extern.slf4j.Slf4j; 5 | import org.apache.http.HttpEntity; 6 | import org.apache.http.client.config.RequestConfig; 7 | import org.apache.http.client.methods.CloseableHttpResponse; 8 | import org.apache.http.client.methods.HttpPost; 9 | import org.apache.http.entity.StringEntity; 10 | import org.apache.http.impl.client.CloseableHttpClient; 11 | import org.apache.http.impl.client.HttpClients; 12 | import org.apache.http.util.EntityUtils; 13 | import org.fh.gae.das.ha.heartbeat.BeatMessage; 14 | import org.fh.gae.das.mysql.BinlogClient; 15 | import org.fh.gae.das.mysql.binlog.BinlogPosition; 16 | import org.fh.gae.das.mysql.binlog.BinlogPositionStore; 17 | import org.springframework.beans.factory.annotation.Autowired; 18 | import org.springframework.beans.factory.annotation.Value; 19 | import org.springframework.stereotype.Component; 20 | 21 | import javax.annotation.PostConstruct; 22 | import java.nio.charset.Charset; 23 | import java.util.Random; 24 | 25 | /** 26 | * 协调服务, 与peer节点"讨论"谁是master 27 | */ 28 | @Component 29 | @Slf4j 30 | public class CoordinationService { 31 | private CloseableHttpClient httpClient; 32 | 33 | @Value("${das.ha.peer-host}") 34 | private String peerHost; 35 | 36 | @Value("${das.ha.peer-port}") 37 | private int peerPort; 38 | 39 | @Autowired 40 | private BinlogClient binlogClient; 41 | 42 | @Autowired 43 | private BinlogPositionStore positionStore; 44 | 45 | /** 46 | * 当前实例状态 47 | */ 48 | private volatile Status status; 49 | 50 | @PostConstruct 51 | private void initClient() { 52 | httpClient = HttpClients.createDefault(); 53 | } 54 | 55 | /** 56 | * 尝试让自己变成master, 如果成功则启动binlog client, 失败则变成slave 57 | */ 58 | public void startBinlogClient() { 59 | boolean master = tryMaster(); 60 | if (master) { 61 | log.info("change status to MASTER"); 62 | binlogClient.connect(); 63 | return; 64 | } 65 | 66 | boolean slave = trySlave(); 67 | if (slave) { 68 | log.info("change status to SLAVE"); 69 | } 70 | } 71 | 72 | /** 73 | * 尝试让自己变成master 74 | * 75 | * @return true表示成功变为master 76 | */ 77 | public synchronized boolean tryMaster() { 78 | log.info("trying MASTER..."); 79 | boolean master = doTryMaster("", -100); 80 | if (master) { 81 | this.status = Status.MASTER; 82 | return true; 83 | } 84 | 85 | return false; 86 | } 87 | 88 | /** 89 | * 发送心跳 90 | * @return 91 | */ 92 | public boolean heartbeat() { 93 | BinlogPosition position = positionStore.load(); 94 | return sendReport(position.getBinlogName(), position.getPosition()); 95 | } 96 | 97 | public void changeStatus(Status newStatus) { 98 | if (this.status == Status.SLAVE && newStatus == Status.MASTER) { 99 | binlogClient.connect(); 100 | 101 | } else if (this.status == Status.MASTER && newStatus == Status.SLAVE) { 102 | binlogClient.close(); 103 | } 104 | 105 | this.status = newStatus; 106 | } 107 | 108 | private boolean doTryMaster(String binlog, long pos) { 109 | return sendReport(binlog, pos); 110 | } 111 | 112 | /** 113 | * 尝试变成slave 114 | * @return true表示成功 115 | */ 116 | public synchronized boolean trySlave() { 117 | if (null == status) { 118 | status = Status.SLAVE; 119 | return true; 120 | } 121 | 122 | return false; 123 | } 124 | 125 | public synchronized Status status() { 126 | return this.status; 127 | } 128 | 129 | /** 130 | * 发送HTTP请求报告自己binlog位置 131 | * @param binlog 132 | * @param pos 133 | * @return true表示自己可成为master, fales表示不可成为master 134 | */ 135 | private boolean sendReport(String binlog, long pos) { 136 | BeatMessage msg = BeatMessage.Builder.buildReport(binlog, pos); 137 | 138 | HttpPost post = new HttpPost("http://" + peerHost + ":" + peerPort); 139 | StringEntity body = new StringEntity(JSON.toJSONString(msg), Charset.defaultCharset()); 140 | post.setEntity(body); 141 | 142 | Random random = new Random(); 143 | int timeout = random.nextInt(1500) + 100; 144 | RequestConfig config = RequestConfig.custom() 145 | .setConnectTimeout(timeout) 146 | .setSocketTimeout(timeout) 147 | .setConnectionRequestTimeout(timeout) 148 | .build(); 149 | post.setConfig(config); 150 | 151 | 152 | try (CloseableHttpResponse response = httpClient.execute(post)) { 153 | HttpEntity entity = response.getEntity(); 154 | String json = EntityUtils.toString(entity); 155 | 156 | BeatMessage respMsg = JSON.parseObject(json, BeatMessage.class); 157 | // peer确认 158 | if (BeatMessage.MessageType.ACK.code() == respMsg.getType()) { 159 | log.info("ACK received"); 160 | return true; 161 | } 162 | 163 | // 对方已经处于MASTER状态了 164 | if (BeatMessage.MessageType.MASTER.code() == respMsg.getType()) { 165 | this.status = Status.SLAVE; 166 | return false; 167 | } 168 | 169 | } catch (Exception e) { 170 | // peer没收到 171 | log.warn(e.getMessage()); 172 | return true; 173 | } 174 | 175 | return false; 176 | } 177 | 178 | 179 | public enum Status { 180 | MASTER, 181 | SLAVE; 182 | } 183 | } 184 | -------------------------------------------------------------------------------- /src/main/java/org/fh/gae/das/mysql/listener/AggregationListener.java: -------------------------------------------------------------------------------- 1 | package org.fh.gae.das.mysql.listener; 2 | 3 | import com.github.shyiko.mysql.binlog.BinaryLogClient; 4 | import com.github.shyiko.mysql.binlog.event.DeleteRowsEventData; 5 | import com.github.shyiko.mysql.binlog.event.Event; 6 | import com.github.shyiko.mysql.binlog.event.EventData; 7 | import com.github.shyiko.mysql.binlog.event.EventType; 8 | import com.github.shyiko.mysql.binlog.event.TableMapEventData; 9 | import com.github.shyiko.mysql.binlog.event.UpdateRowsEventData; 10 | import com.github.shyiko.mysql.binlog.event.WriteRowsEventData; 11 | import lombok.extern.slf4j.Slf4j; 12 | import org.fh.gae.das.mysql.MysqlRowData; 13 | import org.fh.gae.das.mysql.binlog.BinlogPositionStore; 14 | import org.fh.gae.das.template.DasTable; 15 | import org.fh.gae.das.template.TemplateHolder; 16 | import org.springframework.beans.factory.annotation.Autowired; 17 | import org.springframework.stereotype.Component; 18 | import org.springframework.util.StringUtils; 19 | 20 | import java.io.Serializable; 21 | import java.util.Collections; 22 | import java.util.HashMap; 23 | import java.util.List; 24 | import java.util.Map; 25 | import java.util.stream.Collectors; 26 | 27 | 28 | /** 29 | * 聚合监听器; 30 | * 主要功能有二,一为缓存TABLE_MAP事件,当ROW事件来到时可以跟上一个TABLE_MAP关联起来; 31 | * 二为保存业务逻辑Listener, 每个业务Listener都必须通过register()方法注册起来 32 | */ 33 | @Component 34 | @Slf4j 35 | public class AggregationListener implements BinaryLogClient.EventListener { 36 | private String dbName; 37 | private String tableName; 38 | 39 | @Autowired 40 | private TemplateHolder templateHolder; 41 | 42 | @Autowired 43 | private BinlogPositionStore positionStore; 44 | 45 | private Map listenerMap = new HashMap<>(); 46 | 47 | 48 | /** 49 | * 注册监听器; 50 | * 同一个监听器可多次调用此方法来监听不同的库和表; 51 | * 52 | * @param dbName 感兴趣的数据库名 53 | * @param tableName 感兴趣的表名 54 | * @param listener 监听器 55 | */ 56 | public void register(String dbName, String tableName, BizListener listener) { 57 | this.listenerMap.put(genKey(dbName, tableName), listener); 58 | } 59 | 60 | protected String genKey(String dbName, String tableName) { 61 | return dbName + ":" + tableName; 62 | } 63 | 64 | @Override 65 | public void onEvent(Event event) { 66 | // 保存binlog位置 67 | positionStore.save(positionStore.extract()); 68 | 69 | EventType type = event.getHeader().getEventType(); 70 | log.debug("event type: {}", type); 71 | 72 | // 缓存表名和库名 73 | if (type == EventType.TABLE_MAP) { 74 | onTableMap(event); 75 | return; 76 | } 77 | 78 | if (type != EventType.UPDATE_ROWS 79 | && type != EventType.WRITE_ROWS 80 | && type != EventType.DELETE_ROWS) { 81 | return; 82 | } 83 | 84 | // 触发子类doEvent()方法, 传递表名库名 85 | if (StringUtils.isEmpty(dbName) || StringUtils.isEmpty(tableName)) { 86 | log.error("no meta data event"); 87 | return; 88 | } 89 | 90 | // 找出对当前表有兴趣的监听器 91 | String key = genKey(this.dbName, this.tableName); 92 | BizListener listener = this.listenerMap.get(key); 93 | if (null == listener) { 94 | log.debug("skip {}", key); 95 | return; 96 | } 97 | 98 | log.info("trigger event {}", type.name()); 99 | 100 | try { 101 | MysqlRowData rowData = buildRowData(event.getData()); 102 | if (null == rowData) { 103 | return; 104 | } 105 | 106 | rowData.setEventType(type); 107 | listener.onEvent(rowData); 108 | 109 | } catch (Exception e) { 110 | log.error(e.getMessage()); 111 | 112 | } finally { 113 | this.dbName = ""; 114 | this.tableName = ""; 115 | } 116 | 117 | } 118 | 119 | private void onTableMap(Event event) { 120 | TableMapEventData data = event.getData(); 121 | this.tableName = data.getTable(); 122 | this.dbName = data.getDatabase(); 123 | } 124 | 125 | /** 126 | * 从binlog对象中取出最新的值列表 127 | * @param eventData 128 | * @return 129 | */ 130 | private List getAfterValues(EventData eventData) { 131 | if (eventData instanceof WriteRowsEventData) { 132 | return ((WriteRowsEventData) eventData).getRows(); 133 | } 134 | 135 | if (eventData instanceof UpdateRowsEventData) { 136 | return ((UpdateRowsEventData) eventData).getRows().stream() 137 | .map( entry -> entry.getValue() ) 138 | .collect(Collectors.toList()); 139 | } 140 | 141 | if (eventData instanceof DeleteRowsEventData) { 142 | return ((DeleteRowsEventData) eventData).getRows(); 143 | } 144 | 145 | return Collections.emptyList(); 146 | } 147 | 148 | /** 149 | * 将Biglog数据对象转换成MysqlRowData对象 150 | * @param eventData 151 | * @return 152 | */ 153 | private MysqlRowData buildRowData(EventData eventData) { 154 | DasTable table = templateHolder.getTable(tableName); 155 | if (null == table) { 156 | log.warn("table {} not found", tableName); 157 | return null; 158 | } 159 | 160 | Map afterMap = new HashMap<>(); 161 | // 遍历行 162 | for (Serializable[] after : getAfterValues(eventData)) { 163 | // 取出新值 164 | int colLen = after.length; 165 | 166 | // 遍历值 167 | for (int ix = 0; ix < colLen; ++ix) { 168 | // 取出当前位置对应的列名 169 | String colName = table.getPosMap().get(ix); 170 | // 如果没有则说明不关心此列 171 | if (null == colName) { 172 | if (log.isDebugEnabled()) { 173 | log.debug("ignore position: {}", ix); 174 | } 175 | 176 | continue; 177 | } 178 | 179 | String colValue = after[ix].toString(); 180 | 181 | afterMap.put(colName, colValue); 182 | } 183 | } 184 | 185 | MysqlRowData rowData = new MysqlRowData(); 186 | rowData.setAfter(afterMap); 187 | rowData.setTable(table); 188 | 189 | return rowData; 190 | 191 | } 192 | 193 | 194 | } 195 | --------------------------------------------------------------------------------