├── .gitignore ├── README.md ├── pom.xml └── src └── main ├── java └── com │ └── isacc │ └── datax │ ├── DataxApplication.java │ ├── api │ ├── controller │ │ └── v1 │ │ │ ├── AzkabanController.java │ │ │ ├── DataxSyncSiteController.java │ │ │ └── HiveController.java │ └── dto │ │ ├── ApiResult.java │ │ ├── DataxSyncDTO.java │ │ ├── Hive2Hive.java │ │ ├── Hive2Mysql.java │ │ ├── Hive2Oracle.java │ │ ├── HiveInfoDTO.java │ │ ├── Mysql2Hive.java │ │ ├── Mysql2Mysql.java │ │ ├── Mysql2Oracle.java │ │ ├── Oracle2Hive.java │ │ ├── Oracle2Mysql.java │ │ └── Oracle2Oracle.java │ ├── app │ └── service │ │ ├── AzkabanService.java │ │ ├── BaseDataxService.java │ │ ├── DataxHandler.java │ │ ├── DataxSyncService.java │ │ ├── HiveService.java │ │ ├── MysqlService.java │ │ └── impl │ │ ├── AzkabanServiceImpl.java │ │ ├── BaseDataxServiceImpl.java │ │ ├── DataxSyncServiceImpl.java │ │ ├── Hive2HiveHandler.java │ │ ├── Hive2MysqlHandler.java │ │ ├── Hive2OracleHandler.java │ │ ├── HiveServiceImpl.java │ │ ├── Mysql2HiveHandler.java │ │ ├── Mysql2MysqlHandler.java │ │ ├── Mysql2OracleHandler.java │ │ ├── MysqlServiceImpl.java │ │ ├── Oracle2HiveHandler.java │ │ ├── Oracle2MysqlHandler.java │ │ └── Oracle2OracleHandler.java │ ├── domain │ ├── entity │ │ ├── DataxSync.java │ │ ├── datax │ │ │ ├── BaseDatax.java │ │ │ ├── DataxSetting.java │ │ │ ├── HivePartition.java │ │ │ └── MysqlInfo.java │ │ ├── reader │ │ │ ├── BaseReader.java │ │ │ ├── hdfsreader │ │ │ │ ├── HdfsColumn.java │ │ │ │ ├── HdfsFileTypeEnum.java │ │ │ │ └── HdfsReader.java │ │ │ ├── mysqlreader │ │ │ │ ├── MysqlReader.java │ │ │ │ └── ReaderConnection.java │ │ │ └── oraclereader │ │ │ │ └── OracleReader.java │ │ └── writer │ │ │ ├── BaseWriter.java │ │ │ ├── hdfswiter │ │ │ ├── HdfsCompressEnum.java │ │ │ ├── HdfsWriter.java │ │ │ └── HdfsWriterModeEnum.java │ │ │ ├── mysqlwriter │ │ │ ├── MysqlWriter.java │ │ │ ├── MysqlWriterModeEnum.java │ │ │ └── WriterConnection.java │ │ │ └── oraclewriter │ │ │ └── OracleWriter.java │ └── repository │ │ ├── DataxSyncRepository.java │ │ └── MysqlRepository.java │ └── infra │ ├── annotation │ └── DataxHandlerType.java │ ├── config │ ├── AzkabanProperties.java │ ├── CustomSimpleClientHttpRequestFactory.java │ ├── DataxHandlerContext.java │ ├── DataxHandlerProcessor.java │ ├── DataxProperties.java │ ├── JdbcTemplateConfig.java │ ├── RedisConfiguration.java │ ├── RestTemplateConfig.java │ └── SwaggerConfig.java │ ├── constant │ ├── Constants.java │ ├── DataxHandlerTypeConstants.java │ └── DataxParameterConstants.java │ ├── converter │ ├── ConvertorI.java │ └── DataxSyncConverter.java │ ├── dataobject │ └── DataxSyncDO.java │ ├── mapper │ ├── DataxSyncMapper.java │ └── MysqlSimpleMapper.java │ ├── repository │ └── impl │ │ ├── DataxSyncRepositoryImpl.java │ │ └── MysqlRepositoryImpl.java │ └── util │ ├── ApplicationContextHelper.java │ ├── BeanUtil.java │ ├── DataxUtil.java │ ├── FreemarkerUtil.java │ ├── GenerateDataModelUtil.java │ ├── HdfsUtil.java │ ├── SftpUtil.java │ └── ZipUtil.java └── resources ├── META-INF └── additional-spring-configuration-metadata.json ├── application-template.yml ├── application.yml ├── dataxJob.job ├── logback.xml ├── mapper ├── DataxSyncMapper.xml └── MysqlMapper.xml ├── sql └── datax_sync.sql └── templates ├── hive2hive.ftl ├── hive2mysql.ftl ├── hive2oracle.ftl ├── mysql2hive_querySql.ftl ├── mysql2hive_where.ftl ├── mysql2mysql_querySql.ftl ├── mysql2mysql_where.ftl ├── mysql2oracle_querySql.ftl ├── mysql2oracle_where.ftl ├── oracle2hive_querySql.ftl ├── oracle2hive_where.ftl ├── oracle2mysql_querySql.ftl ├── oracle2mysql_where.ftl ├── oracle2oracle_querySql.ftl └── oracle2oracle_where.ftl /.gitignore: -------------------------------------------------------------------------------- 1 | HELP.md 2 | /target/ 3 | !.mvn/wrapper/maven-wrapper.jar 4 | 5 | ### STS ### 6 | .apt_generated 7 | .classpath 8 | .factorypath 9 | .project 10 | .settings 11 | .springBeans 12 | .sts4-cache 13 | 14 | ### IntelliJ IDEA ### 15 | .idea 16 | *.iws 17 | *.iml 18 | *.ipr 19 | 20 | ### NetBeans ### 21 | /nbproject/private/ 22 | /nbbuild/ 23 | /dist/ 24 | /nbdist/ 25 | /.nb-gradle/ 26 | /build/ 27 | 28 | ### VS Code ### 29 | .vscode/ 30 | /logs/ 31 | /mvnw 32 | /mvnw.cmd 33 | /src/main/azkaban/ 34 | /src/main/datax/ 35 | /src/test/ 36 | /src/main/resources/application-dev.yml 37 | /src/main/resources/application-hdsp.yml 38 | /src/main/resources/bootstrap-*.yml 39 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # DEPRECATED!!!不在维护!!! 2 | 3 | 已优化重构并迁移至datax-admin 4 | 5 | [datax-admin传送门](https://github.com/thestyleofme/datax-admin.git) 6 | 7 | --- 8 | 9 | # common-datax 10 | 基于阿里DataX开发一个通用导数的微服务,可以开发前台页面,根据reader和writer自动进行数据同步 11 | 12 | 本项目只限于同步数据源量很少的时候使用,若是数据源很多的情况,请参考下面的设计思路 13 | 14 | 15 | --- 16 | 17 | 18 | 由于阿里DataX有一些缺点: 19 | - **不够自动化** 20 | - **需要手写json** 21 | - **需要手动运行job** 22 | 23 | 搬砖的时间很宝贵,所以: 24 | - **提供通用数据抽取restful接口** 25 | - **HDFS自动创库创表创分区** 26 | - **利用freemarker模板自动创建json文件** 27 | - **自动python执行job** 28 | - **集成Azkaban进行调度管理** 29 | 30 | 例如:mysql到hive 31 | 32 | 选择mysql需要同步的表、字段等信息,输入导入到hive的库表分区等信息,不需提前在hive进行创库创表创分区,自动根据要导的mysql表以及字段类型进行创建hive库表分区,然后利用freemarker去生成json文件,使用Azkaban进行调度执行,自动创建项目、上传zip、执行流一系列操作,可在Azkaban页面进行查看。当然也提供了可直接远程python执行。 33 | 34 | 上述设计使用策略实现,只有几个数据源之间相互同步还好,如hive/mysql/oracle三个,策略模式还是不错的,但若是数据源很多的时候,策略模式不是很方便,写的类也成幂次方增加,为了优化开发易维护,只有放弃策略模式,用以下方式,代码我就不推了,有点懒。 35 | 36 | ### 设计思路(跟策略模式对比即目前的项目) 后续有时间我推一下新版的设计实现: 37 | - 摒弃freemarker,DTO直接使用Map映射,Map里传reader、writer、setting的信息 38 | - 定义WriterService/ReaderService接口,该接口方法处理reader/writer部分的json信息 39 | - 一个reader/一个writer对应一个类进行处理(使用反射),专门生成reader/writer部分的json,最后加上setting部分生成成一个完整DataX的Job类 40 | 41 | 伪代码 42 | ``` 43 | DataxSyncDTO 44 | /** 45 | * 同步信息,包含以下三个key 46 | * @see BaseReader 子类 47 | * @see BaseWriter 子类 48 | * @see Job.Setting 49 | */ 50 | private Map sync; 51 | 52 | ReaderService/WriterService 53 | 如可以实现MysqlReaderService/MysqlWriterService 54 | public interface ReaderService { 55 | /** 56 | * 解析reader 57 | * 58 | * @param tenantId 租户id 59 | * @param datasourceId 数据源ID 60 | * @param reader json 61 | * @return json 62 | */ 63 | String parseReader(Long tenantId, Long datasourceId, String reader); 64 | } 65 | 66 | 根据名称使用反射找到具体的实现类,序列化出具体的reader/writer部分json 67 | 68 | 最后组合成datax的json 69 | 70 | 最终的datax json映射类 71 | public class Job { 72 | private Setting setting; 73 | private List content; 74 | public static class Setting { 75 | private Speed speed; 76 | private ErrorLimit errorLimit; 77 | } 78 | public static class Speed { 79 | private String record; 80 | private String channel; 81 | private String speedByte; 82 | } 83 | public static class ErrorLimit { 84 | private String record; 85 | private String percentage; 86 | } 87 | public static class Content { 88 | private Reader reader; 89 | private Writer writer; 90 | } 91 | public static class Reader { 92 | private String name; 93 | private Object parameter; 94 | } 95 | public static class Writer { 96 | private String name; 97 | private Object parameter; 98 | } 99 | } 100 | ``` 101 | 102 | ### done: 103 | - oracle、mysql、hive两两互相同步 104 | - 本地csv文件导入到hive,支持分区 105 | - 使用Azkaban去执行python脚本进行抽数 106 | - 一个restful接口,可以实现所有的同步 107 | 108 | ### todo: 109 | - 创表记录导数的历史 110 | - json文件下载 111 | - Azkaban定时调度等 112 | - 数据源,mysql、hive的数据源维护,下次要导数时,不用传那么多服务器信息 113 | - groovy脚本 114 | --- 115 | 116 | ## 说明 117 | 118 | #### 修改配置文件application-template.yml 119 | 120 | 1. 数据源修改,根据自己项目情况进行调整 121 | 122 | > 不要修改数据源名称,只需修改为自己的username、password、url即可 123 | 124 | 2. datax的信息修改 125 | ``` 126 | # 这里只要是路径,后面都加上/ 127 | datax: 128 | home: ${DATAX_HOME:/usr/local/DataX/target/datax/datax/} 129 | host: ${DATAX_HOST:datax01} 130 | port: 22 131 | # 要操作hdfs,用户要有权限 132 | username: ${DATAX_USERNAME:hadoop} 133 | password: ${DATAX_PASSWORD:hadoop} 134 | uploadDicPath: ${DATAX_JSON_FILE_HOME:/home/hadoop/datax/} 135 | ``` 136 | 3. azkaban的url, 也可以不用azkaban,本项目默认使用azkaban进行调度 137 | ``` 138 | azkaban: 139 | host: ${AZKABAN_HOST:http://192.168.43.221:8081} 140 | username: ${AZKABAN_USERNAME:azkaban} 141 | password: ${AZKABAN_PASSWORD:azkaban} 142 | ``` 143 | #### 指定启动配置 144 | 145 | > 可以重命名application-template.yml为application-dev.yml,application.yml指定生效的配置文件 146 | 147 | ``` 148 | spring: 149 | profiles: 150 | active: ${SPRING_PROFILES_ACTIVE:dev} 151 | ``` 152 | #### swagger地址 153 | > http://localhost:10024/swagger-ui.html 154 | --- 155 | 156 | ## 使用示例 157 | 158 | > 这里的mysql2Hive表明是mysql同步到hive,可以更换为mysql2Mysql、hive2Hive、oracle2Hive等,驼峰命名。 159 | 160 | ### 1. mysql2hive example 161 | 这里是mysql数据导入到hive,支持分区 162 | > 163 | > POST http://localhost:10024//v1/datax-syncs/execute 164 | > 165 | > Body示例 166 | 167 | ``` 168 | { 169 | "syncName": "mysql2hive_test_0625_where", 170 | "syncDescription": "mysql2hive_test_0625_where", 171 | "sourceDatasourceType": "mysql", 172 | "sourceDatasourceId": "1", 173 | "writeDatasourceType": "hadoop_hive_2", 174 | "writeDatasourceId": "1", 175 | "jsonFileName": "mysql2hive_test_0625_where.json", 176 | "mysql2Hive": { 177 | "setting": { 178 | "speed": { 179 | "channel": 3 180 | }, 181 | "errorLimit": { 182 | "record": 0, 183 | "percentage": 0.02 184 | } 185 | }, 186 | "reader": { 187 | "splitPk": "", 188 | "username": "root", 189 | "password": "root", 190 | "column": [ 191 | "id", 192 | "username" 193 | ], 194 | "connection": [{ 195 | "table": [ 196 | "userinfo" 197 | ], 198 | "jdbcUrl": [ 199 | "jdbc:mysql://hadoop04:3306/common_datax?useUnicode=true&characterEncoding=utf-8&useSSL=false" 200 | ] 201 | }], 202 | "where": "2 > 1" 203 | }, 204 | "writer": { 205 | "defaultFS": "hdfs://hadoop04:9000", 206 | "fileType": "text", 207 | "path": "/user/hive/warehouse/test.db/userinfo", 208 | "fileName": "userinfo", 209 | "column": [ 210 | { 211 | "name": "id", 212 | "type": "BIGINT" 213 | }, 214 | { 215 | "name": "username", 216 | "type": "STRING" 217 | } 218 | ], 219 | "writeMode": "append", 220 | "fieldDelimiter": "\t", 221 | "compress": "", 222 | "hadoopConfig": { 223 | }, 224 | "haveKerberos": false, 225 | "kerberosKeytabFilePath": "", 226 | "kerberosPrincipal": "" 227 | } 228 | } 229 | } 230 | ``` 231 | > path可以更换为分区的hdfs路径,不需提前创建分区,自动创建,例如: 232 | 233 | ``` 234 | "path": "/user/hive/warehouse/test.db/userinfo_dts/dt1=A1/dt2=B2" 235 | ``` 236 | > 这里会在hive里自动创建userinfo_dts分区表,有两个分区字段,然后会将数据导入到这里的dt1=A1,dt2=B2分区下 237 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/DataxApplication.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax; 2 | 3 | import org.mybatis.spring.annotation.MapperScan; 4 | import org.springframework.boot.SpringApplication; 5 | import org.springframework.boot.autoconfigure.SpringBootApplication; 6 | import org.springframework.boot.autoconfigure.freemarker.FreeMarkerAutoConfiguration; 7 | import org.springframework.cloud.client.discovery.EnableDiscoveryClient; 8 | import org.springframework.cloud.openfeign.EnableFeignClients; 9 | 10 | /** 11 | *

12 | * DataX启动类 13 | *

14 | * 15 | * @author isacc 2019/04/28 17:19 16 | */ 17 | @SpringBootApplication(exclude = {FreeMarkerAutoConfiguration.class}) 18 | @MapperScan("com.isacc.datax.infra.mapper") 19 | @EnableDiscoveryClient 20 | @EnableFeignClients 21 | public class DataxApplication { 22 | 23 | public static void main(String[] args) { 24 | SpringApplication.run(DataxApplication.class, args); 25 | } 26 | 27 | } 28 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/api/controller/v1/AzkabanController.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.api.controller.v1; 2 | 3 | import com.isacc.datax.api.dto.ApiResult; 4 | import com.isacc.datax.app.service.AzkabanService; 5 | import org.springframework.web.bind.annotation.PostMapping; 6 | import org.springframework.web.bind.annotation.RequestMapping; 7 | import org.springframework.web.bind.annotation.RestController; 8 | 9 | /** 10 | *

11 | * description 12 | *

13 | * 14 | * @author isacc 2019/05/14 13:49 15 | */ 16 | @RestController("azkabanController.v1") 17 | @RequestMapping("/azkaban") 18 | public class AzkabanController { 19 | 20 | private final AzkabanService azkabanService; 21 | 22 | public AzkabanController(AzkabanService azkabanService) { 23 | this.azkabanService = azkabanService; 24 | } 25 | 26 | @PostMapping("/execution") 27 | public ApiResult executeDataxJob(String projectName, String description, String zipPath) { 28 | return azkabanService.executeDataxJob(projectName, description, zipPath); 29 | } 30 | 31 | } 32 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/api/controller/v1/DataxSyncSiteController.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.api.controller.v1; 2 | 3 | import com.baomidou.mybatisplus.core.metadata.IPage; 4 | import com.baomidou.mybatisplus.extension.plugins.pagination.Page; 5 | import com.isacc.datax.api.dto.ApiResult; 6 | import com.isacc.datax.api.dto.DataxSyncDTO; 7 | import com.isacc.datax.app.service.DataxSyncService; 8 | import com.isacc.datax.domain.entity.DataxSync; 9 | import com.isacc.datax.domain.repository.DataxSyncRepository; 10 | import com.isacc.datax.infra.constant.Constants; 11 | import io.swagger.annotations.ApiOperation; 12 | import org.springframework.data.domain.Sort; 13 | import org.springframework.data.web.SortDefault; 14 | import org.springframework.web.bind.annotation.*; 15 | import springfox.documentation.annotations.ApiIgnore; 16 | 17 | /** 18 | * 数据同步表 管理 API 19 | * 20 | * @author isacc 2019-05-17 14:07:48 21 | */ 22 | @RestController("dataxSyncSiteController.v1") 23 | @RequestMapping("/v1/datax-syncs") 24 | public class DataxSyncSiteController { 25 | 26 | private final DataxSyncService dataxSyncService; 27 | private final DataxSyncRepository dataxSyncRepository; 28 | 29 | public DataxSyncSiteController(DataxSyncService dataxSyncService, DataxSyncRepository dataxSyncRepository) { 30 | this.dataxSyncService = dataxSyncService; 31 | this.dataxSyncRepository = dataxSyncRepository; 32 | } 33 | 34 | @ApiOperation(value = "数据同步表列表") 35 | @GetMapping 36 | public IPage list(DataxSyncDTO dataxSyncDTO, @ApiIgnore @SortDefault(value = DataxSync.FIELD_SYNC_ID, 37 | direction = Sort.Direction.DESC) Page dataxSyncPage) { 38 | dataxSyncDTO.setTenantId(Constants.DEFAULT_TENANT_ID); 39 | return dataxSyncRepository.pageAndSortDTO(dataxSyncPage, dataxSyncDTO); 40 | } 41 | 42 | @ApiOperation(value = "数据同步表明细") 43 | @GetMapping("/{syncId}") 44 | public DataxSyncDTO detail(@PathVariable Long syncId) { 45 | return dataxSyncRepository.selectByPrimaryKey(syncId); 46 | } 47 | 48 | @ApiOperation(value = "新增数据同步任务") 49 | @PostMapping 50 | public DataxSyncDTO create(@RequestBody DataxSyncDTO dataxSyncDTO) { 51 | dataxSyncDTO.setTenantId(Constants.DEFAULT_TENANT_ID); 52 | return dataxSyncRepository.insertSelectiveDTO(dataxSyncDTO); 53 | } 54 | 55 | @ApiOperation(value = "获取DataX任务执行命令") 56 | @GetMapping("/datax-command") 57 | public String generateDataxCommand() { 58 | return dataxSyncService.generateDataxCommand(); 59 | } 60 | 61 | @ApiOperation(value = "执行datax同步任务") 62 | @PostMapping("/execute") 63 | public ApiResult execute(@RequestBody DataxSyncDTO dataxSyncDTO) { 64 | dataxSyncDTO.setTenantId(Constants.DEFAULT_TENANT_ID); 65 | return dataxSyncService.execute(dataxSyncDTO); 66 | } 67 | 68 | @ApiOperation(value = "校验datax同步任务名称以及json文件名称是否重复") 69 | @PostMapping("/check") 70 | public ApiResult checkSyncNameAndJsonFileName(@RequestBody DataxSyncDTO dataxSyncDTO) { 71 | dataxSyncDTO.setTenantId(Constants.DEFAULT_TENANT_ID); 72 | return dataxSyncService.checkSyncNameAndJsonFileName(dataxSyncDTO); 73 | } 74 | 75 | @ApiOperation(value = "修改数据同步任务") 76 | @PutMapping 77 | public DataxSyncDTO update(@RequestBody DataxSyncDTO dataxSyncDTO) { 78 | dataxSyncDTO.setTenantId(Constants.DEFAULT_TENANT_ID); 79 | return dataxSyncRepository.updateSelectiveDTO(dataxSyncDTO); 80 | } 81 | 82 | @ApiOperation(value = "删除数据同步表") 83 | @DeleteMapping 84 | public ApiResult remove(@RequestBody DataxSyncDTO dataxSyncDTO) { 85 | dataxSyncDTO.setTenantId(Constants.DEFAULT_TENANT_ID); 86 | return dataxSyncService.deleteDataxSync(dataxSyncDTO); 87 | } 88 | 89 | } 90 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/api/controller/v1/HiveController.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.api.controller.v1; 2 | 3 | import javax.validation.constraints.NotNull; 4 | 5 | import com.isacc.datax.api.dto.ApiResult; 6 | import com.isacc.datax.api.dto.HiveInfoDTO; 7 | import com.isacc.datax.app.service.HiveService; 8 | import org.springframework.beans.factory.annotation.Autowired; 9 | import org.springframework.web.bind.annotation.*; 10 | 11 | /** 12 | *

13 | * Hive CURD 14 | *

15 | * 16 | * @author isacc 2019/04/28 19:34 17 | */ 18 | @RestController("hiveController.v1") 19 | @RequestMapping("/hive") 20 | public class HiveController { 21 | 22 | private final HiveService hiveService; 23 | 24 | @Autowired 25 | public HiveController(HiveService hiveService) { 26 | this.hiveService = hiveService; 27 | } 28 | 29 | 30 | @PostMapping("/table") 31 | public ApiResult createTable(@RequestBody HiveInfoDTO hiveInfoDTO) { 32 | return hiveService.createTable(hiveInfoDTO); 33 | } 34 | 35 | @PostMapping("table/partition") 36 | public ApiResult addPartition(@RequestBody HiveInfoDTO hiveInfoDTO) { 37 | return hiveService.addPartition(hiveInfoDTO); 38 | } 39 | 40 | 41 | @DeleteMapping("/table") 42 | public ApiResult deleteTable(@RequestBody HiveInfoDTO hiveInfoDTO) { 43 | return hiveService.deleteTable(hiveInfoDTO); 44 | } 45 | 46 | @GetMapping("/database") 47 | public ApiResult createTable(@NotNull String databaseName) { 48 | return hiveService.createDatabase(databaseName); 49 | } 50 | 51 | @DeleteMapping("/database") 52 | public ApiResult deleteDatabase(@NotNull String databaseName) { 53 | return hiveService.deleteDatabase(databaseName); 54 | } 55 | 56 | 57 | } 58 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/api/dto/ApiResult.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.api.dto; 2 | 3 | import com.fasterxml.jackson.annotation.JsonInclude; 4 | import lombok.*; 5 | import org.apache.http.HttpStatus; 6 | 7 | /** 8 | *

9 | * 对API调用的返回结果封装类 10 | *

11 | * 12 | * @author isacc 2019/04/29 0:59 13 | */ 14 | @Builder 15 | @Data 16 | @EqualsAndHashCode(callSuper = false) 17 | @AllArgsConstructor 18 | @JsonInclude(JsonInclude.Include.NON_NULL) 19 | public class ApiResult { 20 | 21 | /** 22 | * 返回给前台的状态码 23 | */ 24 | private Integer code; 25 | /** 26 | * 请求是否成功 27 | */ 28 | private Boolean result; 29 | /** 30 | * 返回给前台的信息 31 | */ 32 | private String message; 33 | /** 34 | * 返回给前台的数据 35 | */ 36 | private T content; 37 | 38 | private ApiResult(Integer code, Boolean result) { 39 | this.code = code; 40 | this.result = result; 41 | } 42 | 43 | public static ApiResult initSuccess() { 44 | return new ApiResult<>(HttpStatus.SC_OK, true); 45 | } 46 | 47 | public static ApiResult initFailure() { 48 | return new ApiResult<>(HttpStatus.SC_INTERNAL_SERVER_ERROR, false); 49 | } 50 | 51 | } 52 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/api/dto/DataxSyncDTO.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.api.dto; 2 | 3 | import java.util.Date; 4 | 5 | import com.fasterxml.jackson.annotation.JsonInclude; 6 | import io.swagger.annotations.ApiModel; 7 | import lombok.*; 8 | 9 | /** 10 | * 数据同步表 11 | * 12 | * @author isacc 2019-05-17 14:07:48 13 | */ 14 | @Data 15 | @Builder 16 | @NoArgsConstructor 17 | @AllArgsConstructor 18 | @EqualsAndHashCode(callSuper = false) 19 | @ApiModel("数据同步表") 20 | @JsonInclude(value = JsonInclude.Include.NON_NULL) 21 | public class DataxSyncDTO { 22 | 23 | public static final String FIELD_SYNC_ID = "syncId"; 24 | public static final String FIELD_SYNC_NAME = "syncName"; 25 | public static final String FIELD_SYNC_DESCRIPTION = "syncDescription"; 26 | public static final String FIELD_SOURCE_DATASOURCE_TYPE = "sourceDatasourceType"; 27 | public static final String FIELD_SOURCE_DATASOURCE_ID = "sourceDatasourceId"; 28 | public static final String FIELD_WRITE_DATASOURCE_TYPE = "writeDatasourceType"; 29 | public static final String FIELD_WRITE_DATASOURCE_ID = "writeDatasourceId"; 30 | public static final String FIELD_JSON_FILE_NAME = "jsonFileName"; 31 | public static final String FIELD_SETTING_INFO = "settingInfo"; 32 | public static final String FIELD_TENANT_ID = "tenantId"; 33 | public static final String FIELD_OBJECT_VERSION_NUMBER = "objectVersionNumber"; 34 | public static final String FIELD_CREATION_DATE = "creationDate"; 35 | public static final String FIELD_CREATED_BY = "createdBy"; 36 | public static final String FIELD_LAST_UPDATED_BY = "lastUpdatedBy"; 37 | public static final String FIELD_LAST_UPDATE_DATE = "lastUpdateDate"; 38 | 39 | // 40 | // 业务方法(按public protected private顺序排列) 41 | // ------------------------------------------------------------------------------ 42 | 43 | // 44 | // 数据库字段 45 | // ------------------------------------------------------------------------------ 46 | 47 | private Long syncId; 48 | private String syncName; 49 | private String syncDescription; 50 | private String sourceDatasourceType; 51 | private Long sourceDatasourceId; 52 | private String writeDatasourceType; 53 | private Long writeDatasourceId; 54 | private String jsonFileName; 55 | private Byte[] settingInfo; 56 | /** 57 | * 租户ID 58 | */ 59 | private Long tenantId; 60 | private Long objectVersionNumber; 61 | private Date creationDate; 62 | private Long createdBy; 63 | private Long lastUpdatedBy; 64 | private Date lastUpdateDate; 65 | 66 | /** 67 | * 支持的datax同步任务 68 | */ 69 | private Mysql2Mysql mysql2Mysql; 70 | private Mysql2Hive mysql2Hive; 71 | private Mysql2Oracle mysql2Oracle; 72 | private Hive2Hive hive2Hive; 73 | private Hive2Mysql hive2Mysql; 74 | private Hive2Oracle hive2Oracle; 75 | private Oracle2Hive oracle2Hive; 76 | private Oracle2Oracle oracle2Oracle; 77 | private Oracle2Mysql oracle2Mysql; 78 | 79 | } 80 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/api/dto/Hive2Hive.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.api.dto; 2 | 3 | import com.fasterxml.jackson.annotation.JsonInclude; 4 | import com.isacc.datax.domain.entity.datax.BaseDatax; 5 | import com.isacc.datax.domain.entity.reader.hdfsreader.HdfsReader; 6 | import com.isacc.datax.domain.entity.writer.hdfswiter.HdfsWriter; 7 | import lombok.*; 8 | 9 | /** 10 | *

11 | * Hive2HiveDTO 12 | *

13 | * 14 | * @author isacc 2019/05/07 14:12 15 | */ 16 | @Builder 17 | @Data 18 | @EqualsAndHashCode(callSuper = false) 19 | @NoArgsConstructor 20 | @AllArgsConstructor 21 | @JsonInclude(JsonInclude.Include.NON_NULL) 22 | public class Hive2Hive extends BaseDatax { 23 | /** 24 | * DataX HdfsReader 25 | */ 26 | private HdfsReader reader; 27 | /** 28 | * DataX HdfsWriter 29 | */ 30 | private HdfsWriter writer; 31 | /** 32 | * csv导入时,csv的本地路径 33 | */ 34 | private String csvPath; 35 | } 36 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/api/dto/Hive2Mysql.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.api.dto; 2 | 3 | import com.fasterxml.jackson.annotation.JsonInclude; 4 | import com.isacc.datax.domain.entity.datax.BaseDatax; 5 | import com.isacc.datax.domain.entity.reader.hdfsreader.HdfsReader; 6 | import com.isacc.datax.domain.entity.writer.mysqlwriter.MysqlWriter; 7 | import lombok.*; 8 | 9 | /** 10 | *

11 | * Hive2HiveDTO 12 | *

13 | * 14 | * @author isacc 2019/05/07 14:12 15 | */ 16 | @Builder 17 | @Data 18 | @EqualsAndHashCode(callSuper = false) 19 | @NoArgsConstructor 20 | @AllArgsConstructor 21 | @JsonInclude(JsonInclude.Include.NON_NULL) 22 | public class Hive2Mysql extends BaseDatax { 23 | /** 24 | * DataX HdfsReader 25 | */ 26 | private HdfsReader reader; 27 | /** 28 | * DataX MysqlWriter 29 | */ 30 | private MysqlWriter writer; 31 | } 32 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/api/dto/Hive2Oracle.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.api.dto; 2 | 3 | import com.fasterxml.jackson.annotation.JsonInclude; 4 | import com.isacc.datax.domain.entity.datax.BaseDatax; 5 | import com.isacc.datax.domain.entity.reader.hdfsreader.HdfsReader; 6 | import com.isacc.datax.domain.entity.writer.oraclewriter.OracleWriter; 7 | import lombok.*; 8 | 9 | /** 10 | * description 11 | * 12 | * @author isacc 2019/05/29 14:31 13 | */ 14 | @Builder 15 | @Data 16 | @EqualsAndHashCode(callSuper = false) 17 | @NoArgsConstructor 18 | @AllArgsConstructor 19 | @JsonInclude(JsonInclude.Include.NON_NULL) 20 | public class Hive2Oracle extends BaseDatax { 21 | 22 | /** 23 | * hdfsreader 24 | */ 25 | private HdfsReader reader; 26 | /** 27 | * oraclewriter 28 | */ 29 | private OracleWriter writer; 30 | 31 | } 32 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/api/dto/HiveInfoDTO.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.api.dto; 2 | 3 | import java.util.List; 4 | 5 | import com.fasterxml.jackson.annotation.JsonInclude; 6 | import com.isacc.datax.domain.entity.datax.HivePartition; 7 | import com.isacc.datax.domain.entity.reader.hdfsreader.HdfsColumn; 8 | import lombok.*; 9 | 10 | /** 11 | *

12 | * Hive信息 13 | *

14 | * 15 | * @author isacc 2019/04/28 19:33 16 | */ 17 | @Builder 18 | @Data 19 | @EqualsAndHashCode(callSuper = false) 20 | @NoArgsConstructor 21 | @AllArgsConstructor 22 | @JsonInclude(JsonInclude.Include.NON_NULL) 23 | public class HiveInfoDTO { 24 | 25 | /** 26 | * Hive数据库 27 | */ 28 | private String databaseName; 29 | /** 30 | * Hive表名 31 | */ 32 | private String tableName; 33 | /** 34 | * Hive表字段 35 | */ 36 | private List columns; 37 | /** 38 | * 字段分割符 39 | */ 40 | private String fieldDelimiter; 41 | /** 42 | * 文件的类型,目前只支持用户配置为"text"、"orc"、"rc"、"seq"、"csv" 43 | */ 44 | private String fileType; 45 | /** 46 | * 分区值 47 | */ 48 | private List partitionList; 49 | } 50 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/api/dto/Mysql2Hive.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.api.dto; 2 | 3 | import com.fasterxml.jackson.annotation.JsonInclude; 4 | import com.isacc.datax.domain.entity.datax.BaseDatax; 5 | import com.isacc.datax.domain.entity.reader.mysqlreader.MysqlReader; 6 | import com.isacc.datax.domain.entity.writer.hdfswiter.HdfsWriter; 7 | import lombok.*; 8 | 9 | /** 10 | *

11 | * DataX封装 12 | *

13 | * 14 | * @author isacc 2019/04/29 13:44 15 | */ 16 | @Builder 17 | @Data 18 | @EqualsAndHashCode(callSuper = false) 19 | @NoArgsConstructor 20 | @AllArgsConstructor 21 | @JsonInclude(JsonInclude.Include.NON_NULL) 22 | public class Mysql2Hive extends BaseDatax { 23 | 24 | /** 25 | * DataX MysqlReader 26 | */ 27 | private MysqlReader reader; 28 | /** 29 | * DataX HdfsWriter 30 | */ 31 | private HdfsWriter writer; 32 | 33 | } 34 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/api/dto/Mysql2Mysql.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.api.dto; 2 | 3 | import com.fasterxml.jackson.annotation.JsonInclude; 4 | import com.isacc.datax.domain.entity.datax.BaseDatax; 5 | import com.isacc.datax.domain.entity.reader.mysqlreader.MysqlReader; 6 | import com.isacc.datax.domain.entity.writer.mysqlwriter.MysqlWriter; 7 | import lombok.*; 8 | 9 | /** 10 | *

11 | * description 12 | *

13 | * 14 | * @author isacc 2019/05/22 11:35 15 | */ 16 | @Builder 17 | @Data 18 | @EqualsAndHashCode(callSuper = false) 19 | @NoArgsConstructor 20 | @AllArgsConstructor 21 | @JsonInclude(JsonInclude.Include.NON_NULL) 22 | public class Mysql2Mysql extends BaseDatax { 23 | /** 24 | * DataX MysqlReader 25 | */ 26 | private MysqlReader reader; 27 | /** 28 | * DataX MysqlWriter 29 | */ 30 | private MysqlWriter writer; 31 | } 32 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/api/dto/Mysql2Oracle.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.api.dto; 2 | 3 | import com.fasterxml.jackson.annotation.JsonInclude; 4 | import com.isacc.datax.domain.entity.datax.BaseDatax; 5 | import com.isacc.datax.domain.entity.reader.mysqlreader.MysqlReader; 6 | import com.isacc.datax.domain.entity.writer.oraclewriter.OracleWriter; 7 | import lombok.*; 8 | 9 | /** 10 | * Mysql2Oracle 11 | * 12 | * @author isacc 2019/05/29 15:02 13 | */ 14 | @Builder 15 | @Data 16 | @EqualsAndHashCode(callSuper = false) 17 | @NoArgsConstructor 18 | @AllArgsConstructor 19 | @JsonInclude(JsonInclude.Include.NON_NULL) 20 | public class Mysql2Oracle extends BaseDatax { 21 | 22 | /** 23 | * mysqlreader 24 | */ 25 | private MysqlReader reader; 26 | /** 27 | * oraclewriter 28 | */ 29 | private OracleWriter writer; 30 | 31 | } 32 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/api/dto/Oracle2Hive.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.api.dto; 2 | 3 | import com.fasterxml.jackson.annotation.JsonInclude; 4 | import com.isacc.datax.domain.entity.datax.BaseDatax; 5 | import com.isacc.datax.domain.entity.reader.oraclereader.OracleReader; 6 | import com.isacc.datax.domain.entity.writer.hdfswiter.HdfsWriter; 7 | import lombok.*; 8 | 9 | /** 10 | * description 11 | * 12 | * @author isacc 2019/05/28 11:28 13 | */ 14 | @Builder 15 | @Data 16 | @EqualsAndHashCode(callSuper = false) 17 | @NoArgsConstructor 18 | @AllArgsConstructor 19 | @JsonInclude(JsonInclude.Include.NON_NULL) 20 | public class Oracle2Hive extends BaseDatax { 21 | 22 | /** 23 | * oraclereader 24 | */ 25 | private OracleReader reader; 26 | /** 27 | * hdfswriter 28 | */ 29 | private HdfsWriter writer; 30 | } 31 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/api/dto/Oracle2Mysql.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.api.dto; 2 | 3 | import com.fasterxml.jackson.annotation.JsonInclude; 4 | import com.isacc.datax.domain.entity.datax.BaseDatax; 5 | import com.isacc.datax.domain.entity.reader.oraclereader.OracleReader; 6 | import com.isacc.datax.domain.entity.writer.mysqlwriter.MysqlWriter; 7 | import lombok.*; 8 | 9 | /** 10 | * description 11 | * 12 | * @author isacc 2019/05/28 11:28 13 | */ 14 | @Builder 15 | @Data 16 | @EqualsAndHashCode(callSuper = false) 17 | @NoArgsConstructor 18 | @AllArgsConstructor 19 | @JsonInclude(JsonInclude.Include.NON_NULL) 20 | public class Oracle2Mysql extends BaseDatax { 21 | 22 | /** 23 | * oraclereader 24 | */ 25 | private OracleReader reader; 26 | /** 27 | * mysqlwriter 28 | */ 29 | private MysqlWriter writer; 30 | } 31 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/api/dto/Oracle2Oracle.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.api.dto; 2 | 3 | import com.fasterxml.jackson.annotation.JsonInclude; 4 | import com.isacc.datax.domain.entity.datax.BaseDatax; 5 | import com.isacc.datax.domain.entity.reader.oraclereader.OracleReader; 6 | import com.isacc.datax.domain.entity.writer.oraclewriter.OracleWriter; 7 | import lombok.*; 8 | 9 | /** 10 | * description 11 | * 12 | * @author isacc 2019/05/28 11:28 13 | */ 14 | @Builder 15 | @Data 16 | @EqualsAndHashCode(callSuper = false) 17 | @NoArgsConstructor 18 | @AllArgsConstructor 19 | @JsonInclude(JsonInclude.Include.NON_NULL) 20 | public class Oracle2Oracle extends BaseDatax { 21 | 22 | /** 23 | * oraclereader 24 | */ 25 | private OracleReader reader; 26 | /** 27 | * oraclewriter 28 | */ 29 | private OracleWriter writer; 30 | } 31 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/app/service/AzkabanService.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.app.service; 2 | 3 | 4 | import com.isacc.datax.api.dto.ApiResult; 5 | 6 | /** 7 | *

8 | * description 9 | *

10 | * 11 | * @author isacc 2019/05/20 14:02 12 | */ 13 | public interface AzkabanService { 14 | 15 | /** 16 | * azkaban执行datax job 17 | * 18 | * @param projectName 要创建的项目名称 19 | * @param description 要创建的项目描述 20 | * @param zipPath 要上传的zip包路径 21 | * @return com.hand.datax.api.dto.ApiResult 22 | * @author isacc 2019/5/14 20:33 23 | */ 24 | ApiResult executeDataxJob(String projectName, String description, String zipPath); 25 | 26 | } 27 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/app/service/BaseDataxService.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.app.service; 2 | 3 | import java.util.Map; 4 | 5 | import com.isacc.datax.api.dto.ApiResult; 6 | import com.isacc.datax.api.dto.DataxSyncDTO; 7 | import com.isacc.datax.infra.config.AzkabanProperties; 8 | import com.isacc.datax.infra.config.DataxProperties; 9 | 10 | 11 | /** 12 | *

13 | * description 14 | *

15 | * 16 | * @author isacc 2019/05/18 0:47 17 | */ 18 | public interface BaseDataxService { 19 | 20 | /** 21 | * 生成json文件,上传到datax服务器 22 | * 23 | * @param dataModel freemarker data model 24 | * @param template 模板名称 25 | * @param jsonFileName datax生成的json file名称 26 | * @param dataxProperties Datax相关信息 27 | * @return com.hand.hdsp.datax.api.dto.ApiResult 28 | * @author isacc 2019/5/18 1:00 29 | */ 30 | ApiResult generateJsonFileAndUpload(Map dataModel, String template, String jsonFileName, DataxProperties dataxProperties); 31 | 32 | /** 33 | * 生成azkaban创建项目的zip 34 | * 35 | * @param jsonFileName datax json file name 36 | * @param azkabanProperties AzkabanProperties 37 | * @param dataxProperties DataxProperties 38 | * @return com.hand.hdsp.datax.api.dto.ApiResult 39 | * @author isacc 2019/5/20 23:42 40 | */ 41 | ApiResult generateAzkabanZip(String jsonFileName, AzkabanProperties azkabanProperties, DataxProperties dataxProperties); 42 | 43 | /** 44 | * 写datax json文件信息到表 45 | * 46 | * @param dataxSyncDTO DataxSyncDTO 47 | * @param dataxProperties DataxProperties 48 | * @param azkabanProperties AzkabanProperties 49 | * @return com.hand.hdsp.datax.api.dto.ApiResult 50 | * @author isacc 2019/5/21 13:38 51 | */ 52 | ApiResult writeDataxSettingInfo(DataxSyncDTO dataxSyncDTO, DataxProperties dataxProperties, AzkabanProperties azkabanProperties); 53 | 54 | /** 55 | * 删除datax服务器上json文件 56 | * 57 | * @param dataxProperties DataxProperties 58 | * @param dataxSyncDTO DataxSyncDTO 59 | * @return com.hand.hdsp.datax.api.dto.ApiResult 60 | * @author isacc 2019/5/21 15:29 61 | */ 62 | ApiResult deleteDataxJsonFile(DataxProperties dataxProperties, DataxSyncDTO dataxSyncDTO); 63 | 64 | } 65 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/app/service/DataxHandler.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.app.service; 2 | 3 | 4 | import com.isacc.datax.api.dto.ApiResult; 5 | import com.isacc.datax.api.dto.DataxSyncDTO; 6 | 7 | /** 8 | *

9 | * description 10 | *

11 | * 12 | * @author isacc 2019/05/23 9:23 13 | */ 14 | public interface DataxHandler { 15 | 16 | /** 17 | * 按照datax同步任务类型进行相应处理 18 | * 19 | * @param dataxSyncDTO DataxSyncDTO 20 | * @return com.hand.hdsp.datax.api.dto.ApiResult 21 | * @author isacc 2019/5/23 9:24 22 | */ 23 | ApiResult handle(DataxSyncDTO dataxSyncDTO); 24 | 25 | } 26 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/app/service/DataxSyncService.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.app.service; 2 | 3 | import com.isacc.datax.api.dto.ApiResult; 4 | import com.isacc.datax.api.dto.DataxSyncDTO; 5 | 6 | /** 7 | * 数据同步表应用服务 8 | * 9 | * @author isacc 2019-05-17 14:07:48 10 | */ 11 | public interface DataxSyncService extends BaseDataxService { 12 | 13 | /** 14 | * 执行datax数据同步 15 | * 16 | * @param dataxSyncDTO DataxSyncDTO 17 | * @return com.hand.hdsp.datax.api.dto.ApiResult 18 | * @author HP_USER 2019/5/23 11:24 19 | */ 20 | ApiResult execute(DataxSyncDTO dataxSyncDTO); 21 | 22 | /** 23 | * 生成datax任务执行命令 24 | * 25 | * @return java.lang.String 26 | * @author isacc 2019/5/31 11:47 27 | */ 28 | String generateDataxCommand(); 29 | 30 | /** 31 | * 删除datax同步任务 32 | * 33 | * @param dataxSyncDTO DataxSyncDTO 34 | * @return com.isacc.datax.api.dto.ApiResult 35 | * @author isacc 2019/6/25 16:30 36 | */ 37 | ApiResult deleteDataxSync(DataxSyncDTO dataxSyncDTO); 38 | 39 | /** 40 | * 校验datax同步任务名称以及json文件名称是否重复 41 | * 42 | * @param dataxSyncDTO DataxSyncDTO 43 | * @return com.isacc.datax.api.dto.ApiResult 44 | * @author isacc 2019/6/25 16:33 45 | */ 46 | ApiResult checkSyncNameAndJsonFileName(DataxSyncDTO dataxSyncDTO); 47 | } 48 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/app/service/HiveService.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.app.service; 2 | 3 | import com.isacc.datax.api.dto.ApiResult; 4 | import com.isacc.datax.api.dto.HiveInfoDTO; 5 | 6 | /** 7 | *

8 | * Hive CURD 9 | *

10 | * 11 | * @author isacc 2019/04/28 19:38 12 | */ 13 | public interface HiveService { 14 | 15 | /** 16 | * 创建Hive表 17 | * 18 | * @param hiveInfoDTO HiveInfoDTO 19 | * @return com.isacc.datax.api.dto.ApiResult 20 | * @author isacc 2019-04-28 19:44 21 | */ 22 | ApiResult createTable(HiveInfoDTO hiveInfoDTO); 23 | 24 | /** 25 | * 新建Hive数据库 26 | * 27 | * @param databaseName 数据库名称 28 | * @return com.isacc.datax.api.dto.ApiResult 29 | * @author isacc 2019-04-29 9:58 30 | */ 31 | ApiResult createDatabase(String databaseName); 32 | 33 | /** 34 | * 为hive分区表增加分区 35 | * 36 | * @param hiveInfoDTO HiveInfoDTO 37 | * @return com.isacc.datax.api.dto.ApiResult 38 | * @author isacc 2019/5/9 14:24 39 | */ 40 | ApiResult addPartition(HiveInfoDTO hiveInfoDTO); 41 | 42 | /** 43 | * 删除Hive表 44 | * 45 | * @param hiveInfoDTO HiveInfoDTO 46 | * @return com.isacc.datax.api.dto.ApiResult 47 | * @author isacc 2019-04-29 11:28 48 | */ 49 | ApiResult deleteTable(HiveInfoDTO hiveInfoDTO); 50 | 51 | /** 52 | * 删除Hive数据库 53 | * 54 | * @param databaseName 数据库名称 55 | * @return com.isacc.datax.api.dto.ApiResult 56 | * @author isacc 2019-04-29 9:58 57 | */ 58 | ApiResult deleteDatabase(String databaseName); 59 | } 60 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/app/service/MysqlService.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.app.service; 2 | 3 | 4 | /** 5 | *

6 | * Mysql CURD 7 | *

8 | * 9 | * @author isacc 2019/04/29 19:42 10 | */ 11 | public interface MysqlService { 12 | 13 | 14 | } 15 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/app/service/impl/Hive2MysqlHandler.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.app.service.impl; 2 | 3 | import java.util.HashMap; 4 | import java.util.Map; 5 | import java.util.Optional; 6 | 7 | import com.isacc.datax.api.dto.ApiResult; 8 | import com.isacc.datax.api.dto.DataxSyncDTO; 9 | import com.isacc.datax.api.dto.Hive2Mysql; 10 | import com.isacc.datax.app.service.DataxHandler; 11 | import com.isacc.datax.infra.annotation.DataxHandlerType; 12 | import com.isacc.datax.infra.config.DataxProperties; 13 | import com.isacc.datax.infra.constant.DataxHandlerTypeConstants; 14 | import com.isacc.datax.infra.constant.DataxParameterConstants; 15 | import com.isacc.datax.infra.util.GenerateDataModelUtil; 16 | import lombok.extern.slf4j.Slf4j; 17 | import org.springframework.stereotype.Service; 18 | 19 | /** 20 | *

21 | * Hive数据同步到MySQL处理类 22 | *

23 | * 24 | * @author isacc 2019/05/27 17:28 25 | */ 26 | @Service 27 | @Slf4j 28 | @DataxHandlerType(DataxHandlerTypeConstants.HIVE2MYSQL) 29 | public class Hive2MysqlHandler extends BaseDataxServiceImpl implements DataxHandler { 30 | 31 | private final DataxProperties dataxProperties; 32 | 33 | public Hive2MysqlHandler(DataxProperties dataxProperties) { 34 | this.dataxProperties = dataxProperties; 35 | } 36 | 37 | @Override 38 | public ApiResult handle(DataxSyncDTO dataxSyncDTO) { 39 | return this.hive2Mysql(dataxSyncDTO); 40 | } 41 | 42 | private ApiResult hive2Mysql(DataxSyncDTO dataxSyncDTO) { 43 | Hive2Mysql hive2Mysql = dataxSyncDTO.getHive2Mysql(); 44 | ApiResult failureResult = ApiResult.initFailure(); 45 | if (!Optional.ofNullable(hive2Mysql).isPresent()) { 46 | failureResult.setMessage("DataxSyncDTO.hive2Mysql is null!"); 47 | return failureResult; 48 | } 49 | String jsonFileName = dataxSyncDTO.getJsonFileName(); 50 | final String template = dataxProperties.getHive2MysqlTemplate(); 51 | final Map dataModelHive2Mysql = this.generateDataModelHive2Mysql(hive2Mysql); 52 | final ApiResult generateJsonFileAndUploadResult = this.generateJsonFileAndUpload(dataModelHive2Mysql, template, jsonFileName, dataxProperties); 53 | if (!generateJsonFileAndUploadResult.getResult()) { 54 | return generateJsonFileAndUploadResult; 55 | } 56 | return ApiResult.initSuccess(); 57 | } 58 | 59 | /** 60 | * @param hive2Mysql Hive2Mysql 61 | * @return java.util.Map 62 | * @author isacc 2019/5/27 20:45 63 | */ 64 | private Map generateDataModelHive2Mysql(Hive2Mysql hive2Mysql) { 65 | final HashMap root = new HashMap<>(16); 66 | // 通用的 67 | root.put(DataxParameterConstants.SETTING, hive2Mysql.getSetting()); 68 | // hdfsreader 参数部分 69 | GenerateDataModelUtil.commonHdfsReader(root, hive2Mysql.getReader()); 70 | // mysqlwriter 参数部分 71 | root.put(DataxParameterConstants.MYSQL_WRITE_MODE, hive2Mysql.getWriter().getWriteMode()); 72 | return GenerateDataModelUtil.commonMysqlWriter(root, hive2Mysql.getWriter()); 73 | } 74 | } 75 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/app/service/impl/Hive2OracleHandler.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.app.service.impl; 2 | 3 | import java.util.HashMap; 4 | import java.util.Map; 5 | import java.util.Optional; 6 | 7 | import com.isacc.datax.api.dto.ApiResult; 8 | import com.isacc.datax.api.dto.DataxSyncDTO; 9 | import com.isacc.datax.api.dto.Hive2Oracle; 10 | import com.isacc.datax.app.service.DataxHandler; 11 | import com.isacc.datax.infra.annotation.DataxHandlerType; 12 | import com.isacc.datax.infra.config.DataxProperties; 13 | import com.isacc.datax.infra.constant.DataxHandlerTypeConstants; 14 | import com.isacc.datax.infra.constant.DataxParameterConstants; 15 | import com.isacc.datax.infra.util.GenerateDataModelUtil; 16 | import lombok.extern.slf4j.Slf4j; 17 | import org.springframework.stereotype.Service; 18 | 19 | /** 20 | * hive数据同步到oracle处理类 21 | * 22 | * @author isacc 2019/05/29 14:34 23 | */ 24 | @Service 25 | @Slf4j 26 | @DataxHandlerType(DataxHandlerTypeConstants.HIVE2ORACLE) 27 | public class Hive2OracleHandler extends BaseDataxServiceImpl implements DataxHandler { 28 | 29 | private final DataxProperties dataxProperties; 30 | 31 | public Hive2OracleHandler(DataxProperties dataxProperties) { 32 | this.dataxProperties = dataxProperties; 33 | } 34 | 35 | @Override 36 | public ApiResult handle(DataxSyncDTO dataxSyncDTO) { 37 | return this.hive2Oracle(dataxSyncDTO); 38 | } 39 | 40 | private ApiResult hive2Oracle(DataxSyncDTO dataxSyncDTO) { 41 | final Hive2Oracle hive2Oracle = dataxSyncDTO.getHive2Oracle(); 42 | ApiResult failureResult = ApiResult.initFailure(); 43 | if (!Optional.ofNullable(hive2Oracle).isPresent()) { 44 | failureResult.setMessage("DataxSyncDTO.hive2Oracle is null!"); 45 | return failureResult; 46 | } 47 | String jsonFileName = dataxSyncDTO.getJsonFileName(); 48 | final String template = dataxProperties.getHive2OracleTemplate(); 49 | final Map dataModelHive2Mysql = this.generateDataModelHive2Oracle(hive2Oracle); 50 | final ApiResult generateJsonFileAndUploadResult = this.generateJsonFileAndUpload(dataModelHive2Mysql, template, jsonFileName, dataxProperties); 51 | if (!generateJsonFileAndUploadResult.getResult()) { 52 | return generateJsonFileAndUploadResult; 53 | } 54 | return ApiResult.initSuccess(); 55 | } 56 | 57 | private Map generateDataModelHive2Oracle(Hive2Oracle hive2Oracle) { 58 | final HashMap root = new HashMap<>(16); 59 | // 通用的 60 | root.put(DataxParameterConstants.SETTING, hive2Oracle.getSetting()); 61 | // reader 62 | GenerateDataModelUtil.commonHdfsReader(root, hive2Oracle.getReader()); 63 | // writer 64 | return GenerateDataModelUtil.commonOracleWriter(root, hive2Oracle.getWriter()); 65 | } 66 | 67 | } 68 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/app/service/impl/Mysql2MysqlHandler.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.app.service.impl; 2 | 3 | import java.util.HashMap; 4 | import java.util.Map; 5 | import java.util.Optional; 6 | 7 | import com.isacc.datax.api.dto.ApiResult; 8 | import com.isacc.datax.api.dto.DataxSyncDTO; 9 | import com.isacc.datax.api.dto.Mysql2Mysql; 10 | import com.isacc.datax.app.service.DataxHandler; 11 | import com.isacc.datax.infra.annotation.DataxHandlerType; 12 | import com.isacc.datax.infra.config.DataxProperties; 13 | import com.isacc.datax.infra.constant.DataxHandlerTypeConstants; 14 | import com.isacc.datax.infra.constant.DataxParameterConstants; 15 | import com.isacc.datax.infra.util.GenerateDataModelUtil; 16 | import lombok.extern.slf4j.Slf4j; 17 | import org.springframework.stereotype.Service; 18 | 19 | /** 20 | *

21 | * description 22 | *

23 | * 24 | * @author isacc 2019/05/23 11:29 25 | */ 26 | @Service 27 | @Slf4j 28 | @DataxHandlerType(DataxHandlerTypeConstants.MYSQL2MYSQL) 29 | public class Mysql2MysqlHandler extends BaseDataxServiceImpl implements DataxHandler { 30 | 31 | private final DataxProperties dataxProperties; 32 | 33 | public Mysql2MysqlHandler(DataxProperties dataxProperties) { 34 | this.dataxProperties = dataxProperties; 35 | } 36 | 37 | @Override 38 | public ApiResult handle(DataxSyncDTO dataxSyncDTO) { 39 | return this.mysql2Mysql(dataxSyncDTO); 40 | } 41 | 42 | private ApiResult mysql2Mysql(DataxSyncDTO dataxSyncDTO) { 43 | final Mysql2Mysql mysql2Mysql = dataxSyncDTO.getMysql2Mysql(); 44 | ApiResult failureResult = ApiResult.initFailure(); 45 | if (!Optional.ofNullable(mysql2Mysql).isPresent()) { 46 | failureResult.setMessage("DataxSyncDTO.mysql2Mysql is null!"); 47 | return failureResult; 48 | } 49 | // 判断是where还是querySql 50 | ApiResult generateJsonFileAndUploadResult; 51 | String jsonFileName = dataxSyncDTO.getJsonFileName(); 52 | if (Optional.ofNullable(mysql2Mysql.getReader().getWhere()).isPresent()) { 53 | // where模式 54 | final String whereTemplate = dataxProperties.getMysql2Mysql().getWhereTemplate(); 55 | Map dataModelWhere = generateDataModelMysql2MysqlWhere(mysql2Mysql); 56 | generateJsonFileAndUploadResult = this.generateJsonFileAndUpload(dataModelWhere, whereTemplate, jsonFileName, dataxProperties); 57 | } else { 58 | // querySql模式 59 | final String querySqlTemplate = dataxProperties.getMysql2Mysql().getQuerySqlTemplate(); 60 | Map dataModelQuery = this.generateDataModelMysql2MysqlQuery(mysql2Mysql); 61 | generateJsonFileAndUploadResult = this.generateJsonFileAndUpload(dataModelQuery, querySqlTemplate, jsonFileName, dataxProperties); 62 | } 63 | return generateJsonFileAndUploadResult; 64 | } 65 | 66 | /** 67 | * 生成mysql2mysql_querySql的freemarker data model 68 | * 69 | * @param mysql2Mysql Mysql2MysqlDTO 70 | * @return java.util.Map 71 | * @author isacc 2019/5/22 14:25 72 | */ 73 | private Map generateDataModelMysql2MysqlQuery(Mysql2Mysql mysql2Mysql) { 74 | Map root = new HashMap<>(16); 75 | // setting 76 | root.put(DataxParameterConstants.SETTING, mysql2Mysql.getSetting()); 77 | // reader 78 | GenerateDataModelUtil.commonMysqlReader(root, mysql2Mysql.getReader()); 79 | // writer 80 | root.put(DataxParameterConstants.MYSQL_WRITE_MODE, mysql2Mysql.getWriter().getWriteMode()); 81 | return GenerateDataModelUtil.commonMysqlWriter(root, mysql2Mysql.getWriter()); 82 | } 83 | 84 | /** 85 | * 生成mysql2mysql_where的freemarker data model 86 | * 87 | * @param mysql2Mysql Mysql2MysqlDTO 88 | * @return java.util.Map 89 | * @author isacc 2019/5/22 14:25 90 | */ 91 | private Map generateDataModelMysql2MysqlWhere(Mysql2Mysql mysql2Mysql) { 92 | Map root = generateDataModelMysql2MysqlQuery(mysql2Mysql); 93 | // reader 94 | root.put(DataxParameterConstants.MYSQL_READER_COLUMN, mysql2Mysql.getReader().getColumn()); 95 | root.put(DataxParameterConstants.MYSQL_READER_WHERE, mysql2Mysql.getReader().getWhere()); 96 | // writer 97 | root.put(DataxParameterConstants.MYSQL_WRITE_MODE, mysql2Mysql.getWriter().getWriteMode()); 98 | return root; 99 | } 100 | 101 | } 102 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/app/service/impl/Mysql2OracleHandler.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.app.service.impl; 2 | 3 | import java.util.HashMap; 4 | import java.util.Map; 5 | import java.util.Optional; 6 | 7 | import com.isacc.datax.api.dto.ApiResult; 8 | import com.isacc.datax.api.dto.DataxSyncDTO; 9 | import com.isacc.datax.api.dto.Mysql2Oracle; 10 | import com.isacc.datax.app.service.DataxHandler; 11 | import com.isacc.datax.infra.annotation.DataxHandlerType; 12 | import com.isacc.datax.infra.config.DataxProperties; 13 | import com.isacc.datax.infra.constant.DataxHandlerTypeConstants; 14 | import com.isacc.datax.infra.constant.DataxParameterConstants; 15 | import com.isacc.datax.infra.util.GenerateDataModelUtil; 16 | import lombok.extern.slf4j.Slf4j; 17 | import org.springframework.stereotype.Service; 18 | 19 | /** 20 | * mysql数据同步到oracle 21 | * 22 | * @author isacc 2019/05/29 15:04 23 | */ 24 | @Service 25 | @Slf4j 26 | @DataxHandlerType(DataxHandlerTypeConstants.MYSQL2ORACLE) 27 | public class Mysql2OracleHandler extends BaseDataxServiceImpl implements DataxHandler { 28 | 29 | private final DataxProperties dataxProperties; 30 | 31 | public Mysql2OracleHandler(DataxProperties dataxProperties) { 32 | this.dataxProperties = dataxProperties; 33 | } 34 | 35 | @Override 36 | public ApiResult handle(DataxSyncDTO dataxSyncDTO) { 37 | return this.mysql2Oracle(dataxSyncDTO); 38 | } 39 | 40 | private ApiResult mysql2Oracle(DataxSyncDTO dataxSyncDTO){ 41 | final Mysql2Oracle mysql2Oracle = dataxSyncDTO.getMysql2Oracle(); 42 | ApiResult failureResult = ApiResult.initFailure(); 43 | if (!Optional.ofNullable(mysql2Oracle).isPresent()) { 44 | failureResult.setMessage("DataxSyncDTO.mysql2Oracle is null!"); 45 | return failureResult; 46 | } 47 | // 判断是where还是querySql 48 | ApiResult generateJsonFileAndUploadResult; 49 | String jsonFileName = dataxSyncDTO.getJsonFileName(); 50 | if (Optional.ofNullable(mysql2Oracle.getReader().getWhere()).isPresent()) { 51 | // where模式 52 | final String whereTemplate = dataxProperties.getMysql2Oracle().getWhereTemplate(); 53 | Map dataModelWhere = generateDataModelMysql2OracleWhere(mysql2Oracle); 54 | generateJsonFileAndUploadResult = this.generateJsonFileAndUpload(dataModelWhere, whereTemplate, jsonFileName, dataxProperties); 55 | } else { 56 | // querySql模式 57 | final String querySqlTemplate = dataxProperties.getMysql2Oracle().getQuerySqlTemplate(); 58 | Map dataModelQuery = this.generateDataModelMysql2OracleQuery(mysql2Oracle); 59 | generateJsonFileAndUploadResult = this.generateJsonFileAndUpload(dataModelQuery, querySqlTemplate, jsonFileName, dataxProperties); 60 | } 61 | return generateJsonFileAndUploadResult; 62 | } 63 | 64 | private Map generateDataModelMysql2OracleQuery(Mysql2Oracle mysql2Oracle) { 65 | Map root = new HashMap<>(16); 66 | // setting 67 | root.put(DataxParameterConstants.SETTING, mysql2Oracle.getSetting()); 68 | // reader 69 | GenerateDataModelUtil.commonMysqlReader(root, mysql2Oracle.getReader()); 70 | // writer 71 | return GenerateDataModelUtil.commonOracleWriter(root, mysql2Oracle.getWriter()); 72 | } 73 | 74 | private Map generateDataModelMysql2OracleWhere(Mysql2Oracle mysql2Oracle) { 75 | Map root = generateDataModelMysql2OracleQuery(mysql2Oracle); 76 | // reader 77 | root.put(DataxParameterConstants.MYSQL_READER_COLUMN, mysql2Oracle.getReader().getColumn()); 78 | root.put(DataxParameterConstants.MYSQL_READER_WHERE, mysql2Oracle.getReader().getWhere()); 79 | // writer 80 | return root; 81 | } 82 | 83 | 84 | } 85 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/app/service/impl/MysqlServiceImpl.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.app.service.impl; 2 | 3 | import com.isacc.datax.app.service.MysqlService; 4 | import lombok.extern.slf4j.Slf4j; 5 | import org.springframework.stereotype.Service; 6 | 7 | /** 8 | *

9 | * Mysql Service Impl 10 | *

11 | * 12 | * @author isacc 2019/04/29 19:44 13 | */ 14 | @Service 15 | @Slf4j 16 | public class MysqlServiceImpl implements MysqlService { 17 | } 18 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/app/service/impl/Oracle2HiveHandler.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.app.service.impl; 2 | 3 | import java.util.HashMap; 4 | import java.util.Map; 5 | import java.util.Optional; 6 | 7 | import com.isacc.datax.api.dto.ApiResult; 8 | import com.isacc.datax.api.dto.DataxSyncDTO; 9 | import com.isacc.datax.api.dto.Oracle2Hive; 10 | import com.isacc.datax.app.service.DataxHandler; 11 | import com.isacc.datax.infra.annotation.DataxHandlerType; 12 | import com.isacc.datax.infra.config.DataxProperties; 13 | import com.isacc.datax.infra.constant.DataxHandlerTypeConstants; 14 | import com.isacc.datax.infra.constant.DataxParameterConstants; 15 | import com.isacc.datax.infra.util.GenerateDataModelUtil; 16 | import lombok.extern.slf4j.Slf4j; 17 | import org.springframework.stereotype.Service; 18 | 19 | /** 20 | * oracle数据同步到hive 21 | * 22 | * @author isacc 2019/05/28 11:21 23 | */ 24 | @Service 25 | @Slf4j 26 | @DataxHandlerType(DataxHandlerTypeConstants.ORACLE2HIVE) 27 | public class Oracle2HiveHandler extends BaseDataxServiceImpl implements DataxHandler { 28 | 29 | private final DataxProperties dataxProperties; 30 | 31 | public Oracle2HiveHandler(DataxProperties dataxProperties) { 32 | this.dataxProperties = dataxProperties; 33 | } 34 | 35 | @Override 36 | public ApiResult handle(DataxSyncDTO dataxSyncDTO) { 37 | return this.oracle2Hive(dataxSyncDTO); 38 | } 39 | 40 | private ApiResult oracle2Hive(DataxSyncDTO dataxSyncDTO) { 41 | final Oracle2Hive oracle2Hive = dataxSyncDTO.getOracle2Hive(); 42 | ApiResult failureResult = ApiResult.initFailure(); 43 | if (!Optional.ofNullable(oracle2Hive).isPresent()) { 44 | failureResult.setMessage("DataxSyncDTO.oracle2Hive is null!"); 45 | return failureResult; 46 | } 47 | // 判断是where还是querySql 48 | ApiResult generateJsonFileAndUploadResult; 49 | String jsonFileName = dataxSyncDTO.getJsonFileName(); 50 | if (Optional.ofNullable(oracle2Hive.getReader().getWhere()).isPresent()) { 51 | // where模式 52 | final String whereTemplate = dataxProperties.getOracle2Hive().getWhereTemplate(); 53 | Map dataModelWhere = this.generateDataModelOracle2HiveWhere(oracle2Hive); 54 | generateJsonFileAndUploadResult = this.generateJsonFileAndUpload(dataModelWhere, whereTemplate, jsonFileName, dataxProperties); 55 | } else { 56 | // querySql模式 57 | final String querySqlTemplate = dataxProperties.getOracle2Hive().getQuerySqlTemplate(); 58 | Map dataModelQuerySql = this.generateDataModelOracle2HiveQuerySql(oracle2Hive); 59 | generateJsonFileAndUploadResult = this.generateJsonFileAndUpload(dataModelQuerySql, querySqlTemplate, jsonFileName, dataxProperties); 60 | } 61 | return generateJsonFileAndUploadResult; 62 | } 63 | 64 | /** 65 | * 生成Oracle2HiveQuerySql的freemarker data model 66 | * 67 | * @param oracle2Hive Oracle2Hive 68 | * @return java.util.Map 69 | * @author isacc 2019/5/28 11:44 70 | */ 71 | private Map generateDataModelOracle2HiveQuerySql(Oracle2Hive oracle2Hive) { 72 | Map root = new HashMap<>(16); 73 | // setting 74 | root.put(DataxParameterConstants.SETTING, oracle2Hive.getSetting()); 75 | // reader 76 | GenerateDataModelUtil.commonOracleReader(root, oracle2Hive.getReader()); 77 | // writer 78 | root.put(DataxParameterConstants.HDFS_WRITER_MODE, oracle2Hive.getWriter().getWriteMode()); 79 | return GenerateDataModelUtil.commonHdfsWriter(root, oracle2Hive.getWriter()); 80 | } 81 | 82 | /** 83 | * 生成Oracle2HiveWhere的freemarker data model 84 | * 85 | * @param oracle2Hive Oracle2Hive 86 | * @return java.util.Map 87 | * @author isacc 2019/5/28 11:44 88 | */ 89 | private Map generateDataModelOracle2HiveWhere(Oracle2Hive oracle2Hive) { 90 | final HashMap root = new HashMap<>(16); 91 | // 通用的 92 | root.put(DataxParameterConstants.SETTING, oracle2Hive.getSetting()); 93 | GenerateDataModelUtil.commonOracleReader(root, oracle2Hive.getReader()); 94 | // oracle 95 | root.put(DataxParameterConstants.ORACLE_READER_COLUMN, oracle2Hive.getReader().getColumn()); 96 | root.put(DataxParameterConstants.ORACLE_READER_WHERE, oracle2Hive.getReader().getWhere()); 97 | // hdfs 98 | root.put(DataxParameterConstants.HDFS_WRITER_MODE, oracle2Hive.getWriter().getWriteMode()); 99 | return GenerateDataModelUtil.commonHdfsWriter(root, oracle2Hive.getWriter()); 100 | } 101 | 102 | } 103 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/app/service/impl/Oracle2MysqlHandler.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.app.service.impl; 2 | 3 | import java.util.HashMap; 4 | import java.util.Map; 5 | import java.util.Optional; 6 | 7 | import com.isacc.datax.api.dto.ApiResult; 8 | import com.isacc.datax.api.dto.DataxSyncDTO; 9 | import com.isacc.datax.api.dto.Oracle2Mysql; 10 | import com.isacc.datax.app.service.DataxHandler; 11 | import com.isacc.datax.infra.annotation.DataxHandlerType; 12 | import com.isacc.datax.infra.config.DataxProperties; 13 | import com.isacc.datax.infra.constant.DataxHandlerTypeConstants; 14 | import com.isacc.datax.infra.constant.DataxParameterConstants; 15 | import com.isacc.datax.infra.util.GenerateDataModelUtil; 16 | import lombok.extern.slf4j.Slf4j; 17 | import org.springframework.stereotype.Service; 18 | 19 | /** 20 | * description 21 | * 22 | * @author isacc 2019/05/29 11:49 23 | */ 24 | @Service 25 | @Slf4j 26 | @DataxHandlerType(DataxHandlerTypeConstants.ORACLE2MYSQL) 27 | public class Oracle2MysqlHandler extends BaseDataxServiceImpl implements DataxHandler { 28 | 29 | private final DataxProperties dataxProperties; 30 | 31 | public Oracle2MysqlHandler(DataxProperties dataxProperties) { 32 | this.dataxProperties = dataxProperties; 33 | } 34 | 35 | @Override 36 | public ApiResult handle(DataxSyncDTO dataxSyncDTO) { 37 | return this.oracle2Mysql(dataxSyncDTO); 38 | } 39 | 40 | private ApiResult oracle2Mysql(DataxSyncDTO dataxSyncDTO) { 41 | final Oracle2Mysql oracle2Mysql = dataxSyncDTO.getOracle2Mysql(); 42 | ApiResult failureResult = ApiResult.initFailure(); 43 | if (!Optional.ofNullable(oracle2Mysql).isPresent()) { 44 | failureResult.setMessage("DataxSyncDTO.oracle2Mysql is null!"); 45 | return failureResult; 46 | } 47 | // 判断是where还是querySql 48 | ApiResult generateJsonFileAndUploadResult; 49 | String jsonFileName = dataxSyncDTO.getJsonFileName(); 50 | if (Optional.ofNullable(oracle2Mysql.getReader().getWhere()).isPresent()) { 51 | // where模式 52 | final String whereTemplate = dataxProperties.getOracle2Mysql().getWhereTemplate(); 53 | Map dataModelWhere = this.generateDataModelOracle2MysqlWhere(oracle2Mysql); 54 | generateJsonFileAndUploadResult = this.generateJsonFileAndUpload(dataModelWhere, whereTemplate, jsonFileName, dataxProperties); 55 | } else { 56 | // querySql模式 57 | final String querySqlTemplate = dataxProperties.getOracle2Mysql().getQuerySqlTemplate(); 58 | Map dataModelQuerySql = this.generateDataModelOracle2MysqlQuerySql(oracle2Mysql); 59 | generateJsonFileAndUploadResult = this.generateJsonFileAndUpload(dataModelQuerySql, querySqlTemplate, jsonFileName, dataxProperties); 60 | } 61 | return generateJsonFileAndUploadResult; 62 | } 63 | 64 | /** 65 | * 生成Oracle2MysqlQuerySql的freemarker data model 66 | * 67 | * @param oracle2Mysql Oracle2Mysql 68 | * @return java.util.Map 69 | * @author isacc 2019/5/29 11:44 70 | */ 71 | private Map generateDataModelOracle2MysqlQuerySql(Oracle2Mysql oracle2Mysql) { 72 | Map root = new HashMap<>(16); 73 | // setting 74 | root.put(DataxParameterConstants.SETTING, oracle2Mysql.getSetting()); 75 | // reader 76 | GenerateDataModelUtil.commonOracleReader(root, oracle2Mysql.getReader()); 77 | // writer 78 | root.put(DataxParameterConstants.MYSQL_WRITE_MODE, oracle2Mysql.getWriter().getWriteMode()); 79 | return GenerateDataModelUtil.commonMysqlWriter(root, oracle2Mysql.getWriter()); 80 | } 81 | 82 | /** 83 | * 生成Oracle2MysqlWhere的freemarker data model 84 | * 85 | * @param oracle2Mysql Oracle2Mysql 86 | * @return java.util.Map 87 | * @author isacc 2019/5/29 11:44 88 | */ 89 | private Map generateDataModelOracle2MysqlWhere(Oracle2Mysql oracle2Mysql) { 90 | final HashMap root = new HashMap<>(16); 91 | // 通用的 92 | root.put(DataxParameterConstants.SETTING, oracle2Mysql.getSetting()); 93 | // reader 94 | GenerateDataModelUtil.commonOracleReader(root, oracle2Mysql.getReader()); 95 | root.put(DataxParameterConstants.ORACLE_READER_WHERE, oracle2Mysql.getReader().getWhere()); 96 | // writer 97 | root.put(DataxParameterConstants.MYSQL_WRITE_MODE, oracle2Mysql.getWriter().getWriteMode()); 98 | return GenerateDataModelUtil.commonMysqlWriter(root, oracle2Mysql.getWriter()); 99 | } 100 | 101 | } 102 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/app/service/impl/Oracle2OracleHandler.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.app.service.impl; 2 | 3 | import java.util.HashMap; 4 | import java.util.Map; 5 | import java.util.Optional; 6 | 7 | import com.isacc.datax.api.dto.ApiResult; 8 | import com.isacc.datax.api.dto.DataxSyncDTO; 9 | import com.isacc.datax.api.dto.Oracle2Oracle; 10 | import com.isacc.datax.app.service.DataxHandler; 11 | import com.isacc.datax.infra.annotation.DataxHandlerType; 12 | import com.isacc.datax.infra.config.DataxProperties; 13 | import com.isacc.datax.infra.constant.DataxHandlerTypeConstants; 14 | import com.isacc.datax.infra.constant.DataxParameterConstants; 15 | import com.isacc.datax.infra.util.GenerateDataModelUtil; 16 | import lombok.extern.slf4j.Slf4j; 17 | import org.springframework.stereotype.Service; 18 | 19 | /** 20 | * oracle数据同步到oracle 21 | * 22 | * @author isacc 2019/05/29 11:21 23 | */ 24 | @Service 25 | @Slf4j 26 | @DataxHandlerType(DataxHandlerTypeConstants.ORACLE2ORACLE) 27 | public class Oracle2OracleHandler extends BaseDataxServiceImpl implements DataxHandler { 28 | 29 | private final DataxProperties dataxProperties; 30 | 31 | public Oracle2OracleHandler(DataxProperties dataxProperties) { 32 | this.dataxProperties = dataxProperties; 33 | } 34 | 35 | @Override 36 | public ApiResult handle(DataxSyncDTO dataxSyncDTO) { 37 | return this.oracle2Oracle(dataxSyncDTO); 38 | } 39 | 40 | private ApiResult oracle2Oracle(DataxSyncDTO dataxSyncDTO) { 41 | final Oracle2Oracle oracle2Oracle = dataxSyncDTO.getOracle2Oracle(); 42 | ApiResult failureResult = ApiResult.initFailure(); 43 | if (!Optional.ofNullable(oracle2Oracle).isPresent()) { 44 | failureResult.setMessage("DataxSyncDTO.oracle2Oracle is null!"); 45 | return failureResult; 46 | } 47 | // 判断是where还是querySql 48 | ApiResult generateJsonFileAndUploadResult; 49 | String jsonFileName = dataxSyncDTO.getJsonFileName(); 50 | if (Optional.ofNullable(oracle2Oracle.getReader().getWhere()).isPresent()) { 51 | // where模式 52 | final String whereTemplate = dataxProperties.getOracle2Oracle().getWhereTemplate(); 53 | Map dataModelWhere = this.generateDataModelOracle2OracleWhere(oracle2Oracle); 54 | generateJsonFileAndUploadResult = this.generateJsonFileAndUpload(dataModelWhere, whereTemplate, jsonFileName, dataxProperties); 55 | } else { 56 | // querySql模式 57 | final String querySqlTemplate = dataxProperties.getOracle2Oracle().getQuerySqlTemplate(); 58 | Map dataModelQuerySql = this.generateDataModelOracle2OracleQuerySql(oracle2Oracle); 59 | generateJsonFileAndUploadResult = this.generateJsonFileAndUpload(dataModelQuerySql, querySqlTemplate, jsonFileName, dataxProperties); 60 | } 61 | return generateJsonFileAndUploadResult; 62 | } 63 | 64 | /** 65 | * 生成Oracle2OracleQuerySql的freemarker data model 66 | * 67 | * @param oracle2Oracle Oracle2Oracles 68 | * @return java.util.Map 69 | * @author isacc 2019/5/29 11:44 70 | */ 71 | private Map generateDataModelOracle2OracleQuerySql(Oracle2Oracle oracle2Oracle) { 72 | Map root = new HashMap<>(16); 73 | // setting 74 | root.put(DataxParameterConstants.SETTING, oracle2Oracle.getSetting()); 75 | // reader 76 | GenerateDataModelUtil.commonOracleReader(root, oracle2Oracle.getReader()); 77 | // writer 78 | return GenerateDataModelUtil.commonOracleWriter(root, oracle2Oracle.getWriter()); 79 | } 80 | 81 | /** 82 | * 生成Oracle2OracleWhere的freemarker data model 83 | * 84 | * @param oracle2Oracle Oracle2Oracle 85 | * @return java.util.Map 86 | * @author isacc 2019/5/29 11:44 87 | */ 88 | private Map generateDataModelOracle2OracleWhere(Oracle2Oracle oracle2Oracle) { 89 | final HashMap root = new HashMap<>(16); 90 | // 通用的 91 | root.put(DataxParameterConstants.SETTING, oracle2Oracle.getSetting()); 92 | // reader 93 | GenerateDataModelUtil.commonOracleReader(root, oracle2Oracle.getReader()); 94 | root.put(DataxParameterConstants.ORACLE_READER_WHERE, oracle2Oracle.getReader().getWhere()); 95 | // writer 96 | return GenerateDataModelUtil.commonOracleWriter(root, oracle2Oracle.getWriter()); 97 | } 98 | 99 | } 100 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/domain/entity/DataxSync.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.domain.entity; 2 | 3 | import java.util.Date; 4 | import javax.validation.constraints.NotBlank; 5 | import javax.validation.constraints.NotNull; 6 | 7 | import com.baomidou.mybatisplus.annotation.IdType; 8 | import com.baomidou.mybatisplus.annotation.TableId; 9 | import com.baomidou.mybatisplus.annotation.TableName; 10 | import com.baomidou.mybatisplus.annotation.Version; 11 | import io.swagger.annotations.ApiModel; 12 | import io.swagger.annotations.ApiModelProperty; 13 | import lombok.*; 14 | 15 | /** 16 | * 数据同步表 17 | * 18 | * @author isacc 2019-05-17 14:07:48 19 | */ 20 | @Data 21 | @Builder 22 | @NoArgsConstructor 23 | @AllArgsConstructor 24 | @EqualsAndHashCode(callSuper = false) 25 | @ApiModel("数据同步表") 26 | @TableName(value = "datax_sync") 27 | public class DataxSync { 28 | 29 | public static final String FIELD_SYNC_ID = "syncId"; 30 | public static final String FIELD_SYNC_NAME = "syncName"; 31 | public static final String FIELD_SYNC_DESCRIPTION = "syncDescription"; 32 | public static final String FIELD_SOURCE_DATASOURCE_TYPE = "sourceDatasourceType"; 33 | public static final String FIELD_SOURCE_DATASOURCE_ID = "sourceDatasourceId"; 34 | public static final String FIELD_WRITE_DATASOURCE_TYPE = "writeDatasourceType"; 35 | public static final String FIELD_WRITE_DATASOURCE_ID = "writeDatasourceId"; 36 | public static final String FIELD_JSON_FILE_NAME ="jsonFileName"; 37 | public static final String FIELD_SETTING_INFO = "settingInfo"; 38 | public static final String FIELD_TENANT_ID = "tenantId"; 39 | public static final String FIELD_OBJECT_VERSION_NUMBER = "objectVersionNumber"; 40 | public static final String FIELD_CREATION_DATE = "creationDate"; 41 | public static final String FIELD_CREATED_BY = "createdBy"; 42 | public static final String FIELD_LAST_UPDATED_BY = "lastUpdatedBy"; 43 | public static final String FIELD_LAST_UPDATE_DATE = "lastUpdateDate"; 44 | 45 | // 46 | // 业务方法(按public protected private顺序排列) 47 | // ------------------------------------------------------------------------------ 48 | 49 | // 50 | // 数据库字段 51 | // ------------------------------------------------------------------------------ 52 | 53 | @ApiModelProperty("表ID,主键,供其他表做外键") 54 | @TableId(type = IdType.AUTO) 55 | private Long syncId; 56 | @ApiModelProperty(value = "同步名称", required = true) 57 | @NotBlank 58 | private String syncName; 59 | @ApiModelProperty(value = "同步描述", required = true) 60 | @NotBlank 61 | private String syncDescription; 62 | @ApiModelProperty(value = "来源数据源类型,快码:HDSP.DATASOURCE_TYPE", required = true) 63 | @NotBlank 64 | private String sourceDatasourceType; 65 | @ApiModelProperty(value = "来源数据源ID,关联HDSP_CORE_DATASOURCE.DATASOURCE_ID", required = true) 66 | @NotNull 67 | private Long sourceDatasourceId; 68 | @ApiModelProperty(value = "写入数据源类型,快码:HDSP.DATASOURCE_TYPE", required = true) 69 | @NotBlank 70 | private String writeDatasourceType; 71 | @ApiModelProperty(value = "写入数据源ID,关联HDSP_CORE_DATASOURCE.DATASOURCE_ID", required = true) 72 | @NotNull 73 | private Long writeDatasourceId; 74 | @ApiModelProperty(value = "生成的Datax Json文件名称", required = true) 75 | @NotBlank 76 | private String jsonFileName; 77 | @ApiModelProperty(value = "调度任务ID,关联HDSP_DISP_JOB.JOB_ID,回写") 78 | private Byte[] settingInfo; 79 | @ApiModelProperty(value = "租户ID") 80 | private Long tenantId; 81 | @ApiModelProperty(value = "版本号", required = true) 82 | @NotNull 83 | @Version 84 | private Long objectVersionNumber; 85 | @ApiModelProperty(required = true) 86 | @NotNull 87 | private Date creationDate; 88 | @ApiModelProperty(required = true) 89 | @NotNull 90 | private Long createdBy; 91 | @ApiModelProperty(required = true) 92 | @NotNull 93 | private Long lastUpdatedBy; 94 | @ApiModelProperty(required = true) 95 | @NotNull 96 | private Date lastUpdateDate; 97 | 98 | } 99 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/domain/entity/datax/BaseDatax.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.domain.entity.datax; 2 | 3 | import com.fasterxml.jackson.annotation.JsonInclude; 4 | import lombok.AllArgsConstructor; 5 | import lombok.Data; 6 | import lombok.EqualsAndHashCode; 7 | import lombok.NoArgsConstructor; 8 | 9 | /** 10 | *

11 | * Base DataX 12 | *

13 | * 14 | * @author isacc 2019/04/29 14:03 15 | */ 16 | @Data 17 | @EqualsAndHashCode(callSuper = false) 18 | @NoArgsConstructor 19 | @AllArgsConstructor 20 | @JsonInclude(JsonInclude.Include.NON_NULL) 21 | public class BaseDatax { 22 | 23 | /** 24 | * DataX Setting 25 | */ 26 | private DataxSetting setting; 27 | 28 | } 29 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/domain/entity/datax/DataxSetting.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.domain.entity.datax; 2 | 3 | import com.fasterxml.jackson.annotation.JsonInclude; 4 | import lombok.*; 5 | 6 | /** 7 | *

8 | * DataX Setting 9 | *

10 | * 11 | * @author isacc 2019/04/29 13:59 12 | */ 13 | @SuppressWarnings("WeakerAccess") 14 | @Builder 15 | @Data 16 | @EqualsAndHashCode(callSuper = false) 17 | @NoArgsConstructor 18 | @AllArgsConstructor 19 | @JsonInclude(JsonInclude.Include.NON_NULL) 20 | public class DataxSetting { 21 | 22 | private DataxSpeed speed; 23 | private ErrorLimit errorLimit; 24 | 25 | @Builder 26 | @Data 27 | @EqualsAndHashCode(callSuper = false) 28 | @NoArgsConstructor 29 | @AllArgsConstructor 30 | @JsonInclude(JsonInclude.Include.NON_NULL) 31 | public static class ErrorLimit { 32 | 33 | /** 34 | * record 35 | */ 36 | private String record; 37 | /** 38 | * percentage 39 | */ 40 | private String percentage; 41 | } 42 | 43 | @Builder 44 | @Data 45 | @EqualsAndHashCode(callSuper = false) 46 | @NoArgsConstructor 47 | @AllArgsConstructor 48 | @JsonInclude(JsonInclude.Include.NON_NULL) 49 | public static class DataxSpeed { 50 | /** 51 | * record 52 | */ 53 | private String record; 54 | /** 55 | * channel 56 | */ 57 | private String channel; 58 | /** 59 | * byte 60 | */ 61 | private String speedByte; 62 | 63 | 64 | } 65 | 66 | } 67 | 68 | 69 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/domain/entity/datax/HivePartition.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.domain.entity.datax; 2 | 3 | import com.fasterxml.jackson.annotation.JsonInclude; 4 | import lombok.*; 5 | 6 | /** 7 | *

8 | * Hive分区信息 9 | *

10 | * 11 | * @author isacc 2019/05/09 13:46 12 | */ 13 | @Builder 14 | @Data 15 | @EqualsAndHashCode(callSuper = false) 16 | @NoArgsConstructor 17 | @AllArgsConstructor 18 | @JsonInclude(JsonInclude.Include.NON_NULL) 19 | public class HivePartition { 20 | 21 | /** 22 | * 分区类型 23 | */ 24 | @Builder.Default 25 | private String type = "STRING"; 26 | /** 27 | * 分区名称 28 | */ 29 | private String name; 30 | /** 31 | * 分区值 32 | */ 33 | private String value; 34 | 35 | } 36 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/domain/entity/datax/MysqlInfo.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.domain.entity.datax; 2 | 3 | import java.util.List; 4 | 5 | import com.fasterxml.jackson.annotation.JsonInclude; 6 | import lombok.*; 7 | 8 | /** 9 | *

10 | * mysqlReader获取mysql的信息 11 | *

12 | * 13 | * @author isacc 2019/05/10 16:29 14 | */ 15 | @Builder 16 | @Data 17 | @EqualsAndHashCode(callSuper = false) 18 | @NoArgsConstructor 19 | @AllArgsConstructor 20 | @JsonInclude(JsonInclude.Include.NON_NULL) 21 | public class MysqlInfo { 22 | 23 | /** 24 | * 数据库名 25 | */ 26 | private String databaseName; 27 | /** 28 | * 库下的表集合 29 | */ 30 | private List tableList; 31 | } 32 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/domain/entity/reader/BaseReader.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.domain.entity.reader; 2 | 3 | import com.fasterxml.jackson.annotation.JsonInclude; 4 | import lombok.AllArgsConstructor; 5 | import lombok.Data; 6 | import lombok.EqualsAndHashCode; 7 | import lombok.NoArgsConstructor; 8 | 9 | /** 10 | *

11 | * DataX Reader 12 | *

13 | * 14 | * @author isacc 2019/04/29 14:03 15 | */ 16 | @Data 17 | @EqualsAndHashCode(callSuper = false) 18 | @NoArgsConstructor 19 | @AllArgsConstructor 20 | @JsonInclude(JsonInclude.Include.NON_NULL) 21 | public class BaseReader { 22 | /** 23 | * DataX reader插件名称 24 | */ 25 | protected String name; 26 | } 27 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/domain/entity/reader/hdfsreader/HdfsColumn.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.domain.entity.reader.hdfsreader; 2 | 3 | 4 | import com.fasterxml.jackson.annotation.JsonInclude; 5 | import lombok.*; 6 | 7 | /** 8 | *

9 | * 指定Column信息,type必须填写,index/value必须选择其一 10 | *

11 | * 12 | * @author isacc 2019/04/28 15:05 13 | */ 14 | @Builder 15 | @Data 16 | @EqualsAndHashCode(callSuper = false) 17 | @NoArgsConstructor 18 | @AllArgsConstructor 19 | @JsonInclude(JsonInclude.Include.NON_NULL) 20 | public class HdfsColumn { 21 | 22 | /** 23 | * index指定当前列来自于文本第几列(以0开始) 24 | */ 25 | private Integer index; 26 | /** 27 | * type指定源数据的类型 28 | */ 29 | private String type; 30 | /** 31 | * value指定当前类型为常量,不从源头文件读取数据,而是根据value值自动生成对应的列 32 | */ 33 | private String value; 34 | /** 35 | * 列名 36 | */ 37 | private String name; 38 | } 39 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/domain/entity/reader/hdfsreader/HdfsFileTypeEnum.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.domain.entity.reader.hdfsreader; 2 | 3 | 4 | /** 5 | *

6 | * hdfs file type 7 | *

8 | * 9 | * @author isacc 2019/04/28 14:59 10 | */ 11 | public enum HdfsFileTypeEnum { 12 | /** 13 | * textfile文件格式 14 | */ 15 | TEXT("TEXTFILE"), 16 | /** 17 | * orcfile文件格式 18 | */ 19 | ORC("ORC"), 20 | /** 21 | * rcfile文件格式 22 | */ 23 | RC("RCFILE"), 24 | /** 25 | * sequence file文件格式 26 | */ 27 | SEQ("SEQUENCEFILE"), 28 | /** 29 | * 普通hdfs文件格式(逻辑二维表) 30 | */ 31 | CSV("CSV"); 32 | 33 | /** 34 | * hdfs file type 35 | */ 36 | private String fileType; 37 | 38 | HdfsFileTypeEnum(String fileType) { 39 | this.fileType = fileType; 40 | } 41 | 42 | public String getFileType() { 43 | return fileType; 44 | } 45 | 46 | } 47 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/domain/entity/reader/hdfsreader/HdfsReader.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.domain.entity.reader.hdfsreader; 2 | 3 | import com.fasterxml.jackson.annotation.JsonInclude; 4 | import com.isacc.datax.domain.entity.reader.BaseReader; 5 | import lombok.*; 6 | 7 | import java.util.List; 8 | import java.util.Map; 9 | import javax.validation.constraints.NotBlank; 10 | import javax.validation.constraints.NotEmpty; 11 | 12 | /** 13 | *

14 | * hdfsreader插件的参数封装 15 | *

16 | * 17 | * @author isacc 2019/04/28 14:43 18 | */ 19 | @Builder 20 | @Data 21 | @EqualsAndHashCode(callSuper = false) 22 | @NoArgsConstructor 23 | @AllArgsConstructor 24 | @JsonInclude(JsonInclude.Include.NON_NULL) 25 | public class HdfsReader extends BaseReader { 26 | 27 | /** 28 | * 要读取的文件路径,如果要读取多个文件,可以使用正则表达式"*",只支持"*"和"?"作为文件通配符 29 | */ 30 | @NotBlank 31 | private String path; 32 | /** 33 | * Hadoop hdfs文件系统namenode节点地址 34 | */ 35 | @NotBlank 36 | private String defaultFS; 37 | /** 38 | * 文件的类型,目前只支持用户配置为"text"、"orc"、"rc"、"seq"、"csv" 39 | */ 40 | @NotBlank 41 | private String fileType; 42 | /** 43 | * 读取字段列表 44 | */ 45 | @NotEmpty 46 | private List column; 47 | /** 48 | * 读取的字段分隔符,默认为',' 49 | * HdfsReader在读取orcfile时,用户无需指定字段分割符 50 | */ 51 | private String fieldDelimiter; 52 | /** 53 | * 读取文件的编码配置 54 | * 默认值:utf-8 55 | */ 56 | private String encoding; 57 | /** 58 | * 文本文件中无法使用标准字符串定义null(空指针),DataX提供nullFormat定义哪些字符串可以表示为null 59 | * 例如配置,nullFormat:"\N",那么如果源头数据是"\N",DataX视作null字段 60 | */ 61 | private String nullFormat; 62 | /** 63 | * 是否有Kerberos认证,默认false 64 | * 若配置true,则配置项kerberosKeytabFilePath,kerberosPrincipal为必填 65 | */ 66 | private Boolean haveKerberos; 67 | /** 68 | * Kerberos认证 keytab文件路径,绝对路径 69 | */ 70 | private String kerberosKeytabFilePath; 71 | /** 72 | * Kerberos认证Principal名,如xxxx/hadoopclient@xxx.xxx 73 | */ 74 | private String kerberosPrincipal; 75 | /** 76 | * 当fileType(文件类型)为csv下的文件压缩方式,目前仅支持 gzip、bz2、zip、lzo、lzo_deflate、hadoop-snappy、framing-snappy压缩 77 | * orc文件类型下无需填写 78 | */ 79 | private String compress; 80 | /** 81 | * 配置与Hadoop相关的一些高级参数,比如HA的配置 82 | */ 83 | private Map hadoopConfig; 84 | /** 85 | * 读取CSV类型文件参数配置 86 | */ 87 | private Map csvReaderConfig; 88 | 89 | } 90 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/domain/entity/reader/mysqlreader/MysqlReader.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.domain.entity.reader.mysqlreader; 2 | 3 | 4 | import com.fasterxml.jackson.annotation.JsonInclude; 5 | import com.isacc.datax.domain.entity.reader.BaseReader; 6 | import lombok.AllArgsConstructor; 7 | import lombok.Data; 8 | import lombok.NoArgsConstructor; 9 | 10 | import lombok.*; 11 | 12 | import java.util.List; 13 | import javax.validation.constraints.NotBlank; 14 | import javax.validation.constraints.NotEmpty; 15 | 16 | /** 17 | *

18 | * DataX Mysql插件的parameter封装 19 | *

20 | * 21 | * @author isacc 2019/04/28 10:34 22 | */ 23 | @Builder 24 | @Data 25 | @EqualsAndHashCode(callSuper = false) 26 | @NoArgsConstructor 27 | @AllArgsConstructor 28 | @JsonInclude(JsonInclude.Include.NON_NULL) 29 | public class MysqlReader extends BaseReader { 30 | 31 | /** 32 | * 数据源的用户名 33 | */ 34 | @NotBlank 35 | private String username; 36 | /** 37 | * 数据源指定用户名的密码 38 | */ 39 | @NotBlank 40 | private String password; 41 | /** 42 | * 所配置的表中需要同步的列名集合 43 | * 全选时值为:['*'] 44 | */ 45 | @NotEmpty 46 | private List column; 47 | /** 48 | * 使用splitPk代表的字段进行数据分片,仅支持整形数据切分,不支持浮点、字符串、日期等其他类型 49 | */ 50 | private String splitPk; 51 | /** 52 | * mysql连接信息 53 | */ 54 | @NotEmpty 55 | private List connection; 56 | /** 57 | * 筛选条件,MysqlReader根据指定的column、table、where条件拼接SQL,并根据这个SQL进行数据抽取 58 | */ 59 | private String where; 60 | } 61 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/domain/entity/reader/mysqlreader/ReaderConnection.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.domain.entity.reader.mysqlreader; 2 | 3 | import com.fasterxml.jackson.annotation.JsonInclude; 4 | import lombok.*; 5 | 6 | import java.util.List; 7 | import javax.validation.constraints.NotEmpty; 8 | 9 | /** 10 | *

11 | * DataX Mysql,Oracle插件的connection封装 12 | *

13 | * 14 | * @author isacc 2019/04/28 10:34 15 | */ 16 | @Builder 17 | @Data 18 | @EqualsAndHashCode(callSuper = false) 19 | @NoArgsConstructor 20 | @AllArgsConstructor 21 | @JsonInclude(JsonInclude.Include.NON_NULL) 22 | public class ReaderConnection { 23 | 24 | /** 25 | * 需要同步的表,支持同一schema下多张表同时抽取 26 | */ 27 | @NotEmpty 28 | private List table; 29 | /** 30 | * 支持一个库填写多个连接地址,依次探测ip的可连接性,直到选择一个合法的IP 31 | */ 32 | @NotEmpty 33 | private List jdbcUrl; 34 | /** 35 | * 当用户配置querySql时,reader直接忽略table、column、where条件的配置 36 | */ 37 | @NotEmpty 38 | private List querySql; 39 | 40 | } 41 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/domain/entity/reader/oraclereader/OracleReader.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.domain.entity.reader.oraclereader; 2 | 3 | import java.util.List; 4 | 5 | import javax.validation.constraints.NotBlank; 6 | import javax.validation.constraints.NotEmpty; 7 | 8 | import com.fasterxml.jackson.annotation.JsonInclude; 9 | import com.isacc.datax.domain.entity.reader.BaseReader; 10 | import com.isacc.datax.domain.entity.reader.mysqlreader.ReaderConnection; 11 | import lombok.*; 12 | 13 | /** 14 | * DataX oraclereader封装 15 | * 16 | * @author isacc 2019/05/28 10:20 17 | */ 18 | @Builder 19 | @Data 20 | @EqualsAndHashCode(callSuper = false) 21 | @NoArgsConstructor 22 | @AllArgsConstructor 23 | @JsonInclude(JsonInclude.Include.NON_NULL) 24 | public class OracleReader extends BaseReader { 25 | 26 | /** 27 | * 数据库连接用户名 28 | */ 29 | @NotBlank 30 | private String username; 31 | /** 32 | * 数据库连接密码 33 | */ 34 | @NotBlank 35 | private String password; 36 | /** 37 | * 所配置的表中需要同步的列名集合 38 | * 全选时值为:['*'] 39 | */ 40 | @NotEmpty 41 | private List column; 42 | /** 43 | * 使用splitPk代表的字段进行数据分片,仅支持整形数据切分,不支持浮点、字符串、日期等其他类型 44 | */ 45 | private String splitPk; 46 | /** 47 | * 筛选条件,Reader根据指定的column、table、where条件拼接SQL,并根据这个SQL进行数据抽取 48 | */ 49 | private String where; 50 | /** 51 | * oracle连接信息 52 | */ 53 | @NotEmpty 54 | private List connection; 55 | /** 56 | * 插件和数据库服务器端每次批量数据获取条数,该值决定了DataX和服务器端的网络交互次数,能够较大的提升数据抽取性能 57 | * 注意,该值过大(>2048)可能造成DataX进程OOM 58 | * 默认值:1024 59 | */ 60 | private Long fetchSize; 61 | /** 62 | * 控制写入数据的时间格式,时区等的配置 63 | */ 64 | private List session; 65 | 66 | } 67 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/domain/entity/writer/BaseWriter.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.domain.entity.writer; 2 | 3 | import com.fasterxml.jackson.annotation.JsonInclude; 4 | import lombok.AllArgsConstructor; 5 | import lombok.Data; 6 | import lombok.EqualsAndHashCode; 7 | import lombok.NoArgsConstructor; 8 | 9 | /** 10 | *

11 | * DataX Writer 12 | *

13 | * 14 | * @author isacc 2019/04/29 14:03 15 | */ 16 | @Data 17 | @EqualsAndHashCode(callSuper = false) 18 | @NoArgsConstructor 19 | @AllArgsConstructor 20 | @JsonInclude(JsonInclude.Include.NON_NULL) 21 | public class BaseWriter { 22 | /** 23 | * DataX writer插件名称 24 | */ 25 | protected String name; 26 | } 27 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/domain/entity/writer/hdfswiter/HdfsCompressEnum.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.domain.entity.writer.hdfswiter; 2 | 3 | /** 4 | *

5 | * Hdfs压缩类型 6 | *

7 | * 8 | * @author isacc 2019/04/29 16:10 9 | */ 10 | public enum HdfsCompressEnum { 11 | /** 12 | * gzip 13 | */ 14 | GZIP("gzip"), 15 | /** 16 | * bz2 17 | */ 18 | BZ2("bz2"), 19 | /** 20 | * zip 21 | */ 22 | ZIP("zip"), 23 | /** 24 | * lzo 25 | */ 26 | LZO("lzo"), 27 | /** 28 | * lzo_deflate 29 | */ 30 | LZO_DEFLATE("lzo_deflate"), 31 | /** 32 | * hadoop上的snappy stream format 33 | */ 34 | HADOOP_SNAPPY("hadoop-snappy"), 35 | /** 36 | * google建议的snappy stream format 37 | */ 38 | FRAMING_SNAPPY("framing-snappy"), 39 | /** 40 | * bzip2 41 | */ 42 | BZIP2("bzip2"), 43 | /** 44 | * NONE 45 | */ 46 | NONE("NONE"), 47 | /** 48 | * SNAPPY(需要用户安装SnappyCodec) 49 | */ 50 | SNAPPY("SNAPPY"); 51 | 52 | /** 53 | * 压缩类型 54 | */ 55 | private String type; 56 | 57 | HdfsCompressEnum(String type) { 58 | this.type = type; 59 | } 60 | 61 | public String getType() { 62 | return type; 63 | } 64 | 65 | 66 | } 67 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/domain/entity/writer/hdfswiter/HdfsWriter.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.domain.entity.writer.hdfswiter; 2 | 3 | import com.fasterxml.jackson.annotation.JsonInclude; 4 | import com.isacc.datax.domain.entity.reader.hdfsreader.HdfsColumn; 5 | import com.isacc.datax.domain.entity.writer.BaseWriter; 6 | import lombok.*; 7 | 8 | import java.util.List; 9 | import java.util.Map; 10 | import javax.validation.constraints.NotBlank; 11 | import javax.validation.constraints.NotEmpty; 12 | 13 | /** 14 | *

15 | * DataX Hdfs Writer插件的parameter封装 16 | *

17 | * 18 | * @author isacc 2019/04/29 15:15 19 | */ 20 | @Builder 21 | @Data 22 | @EqualsAndHashCode(callSuper = false) 23 | @NoArgsConstructor 24 | @AllArgsConstructor 25 | @JsonInclude(JsonInclude.Include.NON_NULL) 26 | public class HdfsWriter extends BaseWriter { 27 | 28 | /** 29 | * 要读取的文件路径,如果要读取多个文件,可以使用正则表达式"*",只支持"*"和"?"作为文件通配符 30 | */ 31 | @NotBlank 32 | private String path; 33 | /** 34 | * Hadoop hdfs文件系统namenode节点地址 35 | */ 36 | @NotBlank 37 | private String defaultFS; 38 | /** 39 | * 文件的类型,目前只支持用户配置为"text"、"orc" 40 | */ 41 | @NotBlank 42 | private String fileType; 43 | /** 44 | * HdfsWriter写入时的文件名 45 | */ 46 | @NotBlank 47 | private String fileName; 48 | /** 49 | * 写入数据的字段 50 | */ 51 | @NotEmpty 52 | private List column; 53 | /** 54 | * hdfswriter写入前数据清理处理模式: 55 | */ 56 | @NotBlank 57 | private String writeMode; 58 | /** 59 | * hdfswriter写入时的字段分隔符 60 | */ 61 | @NotBlank 62 | private String fieldDelimiter; 63 | /** 64 | * text类型文件支持压缩类型有gzip、bzip2;orc类型文件支持的压缩类型有NONE、SNAPPY(需要用户安装SnappyCodec) 65 | */ 66 | private String compress; 67 | /** 68 | * 读取文件的编码配置 69 | * 默认值:utf-8 70 | */ 71 | private String encoding; 72 | /** 73 | * 配置与Hadoop相关的一些高级参数,比如HA的配置 74 | */ 75 | private Map hadoopConfig; 76 | /** 77 | * 是否有Kerberos认证,默认false 78 | * 若配置true,则配置项kerberosKeytabFilePath,kerberosPrincipal为必填 79 | */ 80 | private Boolean haveKerberos; 81 | /** 82 | * Kerberos认证 keytab文件路径,绝对路径 83 | */ 84 | private String kerberosKeytabFilePath; 85 | /** 86 | * Kerberos认证Principal名,如xxxx/hadoopclient@xxx.xxx 87 | */ 88 | private String kerberosPrincipal; 89 | 90 | } 91 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/domain/entity/writer/hdfswiter/HdfsWriterModeEnum.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.domain.entity.writer.hdfswiter; 2 | 3 | /** 4 | *

5 | * HdfsWriter Mode 6 | *

7 | * 8 | * @author isacc 2019/04/28 14:25 9 | */ 10 | public enum HdfsWriterModeEnum { 11 | /** 12 | * append,写入前不做任何处理,DataX hdfswriter直接使用filename写入,并保证文件名不冲突。 13 | */ 14 | APPEND("append"), 15 | /** 16 | * nonConflict,如果目录下有fileName前缀的文件,直接报错。 17 | */ 18 | NON_CONFLICT("nonConflict"); 19 | 20 | /** 21 | * hdfs write mode 22 | */ 23 | private String writeMode; 24 | 25 | public String getWriteMode() { 26 | return writeMode; 27 | } 28 | 29 | HdfsWriterModeEnum(String writeMode) { 30 | this.writeMode = writeMode; 31 | } 32 | 33 | } 34 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/domain/entity/writer/mysqlwriter/MysqlWriter.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.domain.entity.writer.mysqlwriter; 2 | 3 | 4 | import com.fasterxml.jackson.annotation.JsonInclude; 5 | import com.isacc.datax.domain.entity.writer.BaseWriter; 6 | import lombok.*; 7 | 8 | import javax.validation.constraints.NotBlank; 9 | import javax.validation.constraints.NotEmpty; 10 | import javax.validation.constraints.NotNull; 11 | import java.util.List; 12 | 13 | /** 14 | *

15 | * DataX Mysql插件的parameter封装 16 | *

17 | * 18 | * @author isacc 2019/04/28 10:34 19 | */ 20 | @Builder 21 | @Data 22 | @EqualsAndHashCode(callSuper = false) 23 | @NoArgsConstructor 24 | @AllArgsConstructor 25 | @JsonInclude(JsonInclude.Include.NON_NULL) 26 | public class MysqlWriter extends BaseWriter { 27 | 28 | /** 29 | * 数据源的用户名 30 | */ 31 | @NotBlank 32 | private String username; 33 | /** 34 | * 数据源指定用户名的密码 35 | */ 36 | @NotBlank 37 | private String password; 38 | /** 39 | * 所配置的表中需要同步的列名集合 40 | * 全选时值为:['*'] 41 | */ 42 | @NotEmpty 43 | private List column; 44 | /** 45 | * Mysql连接信息 46 | */ 47 | @NotNull 48 | private List connection; 49 | /** 50 | * DataX在获取Mysql连接时,执行session指定的SQL语句,修改当前connection session属性 51 | */ 52 | private List session; 53 | /** 54 | * 写入数据到目的表前,会先执行这里的标准语句 55 | */ 56 | private List preSql; 57 | /** 58 | * 写入数据到目的表后,会执行这里的标准语句 59 | */ 60 | private List postSql; 61 | /** 62 | * 控制写入数据到目标表采用 insert into 或者 replace into 或者 ON DUPLICATE KEY UPDATE 语句 63 | * 所有选项:insert/replace/update 64 | * 默认值:insert 65 | */ 66 | @NotBlank 67 | private String writeMode; 68 | /** 69 | * 一次性批量提交的记录数大小,该值可以极大减少DataX与Mysql的网络交互次数,并提升整体吞吐量 70 | * 但是该值设置过大可能会造成DataX运行进程OOM情况。 71 | * 默认值:1024 72 | */ 73 | private Long batchSize; 74 | 75 | } 76 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/domain/entity/writer/mysqlwriter/MysqlWriterModeEnum.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.domain.entity.writer.mysqlwriter; 2 | 3 | /** 4 | *

5 | * MysqlWriter Mode 6 | *

7 | * 8 | * @author isacc 2019/04/28 14:25 9 | */ 10 | public enum MysqlWriterModeEnum { 11 | /** 12 | * insert into 13 | */ 14 | INSERT("insert"), 15 | /** 16 | * replace into 17 | */ 18 | REPLACE("replace"), 19 | /** 20 | * on duplicate key update 21 | */ 22 | UPDATE("replace"); 23 | 24 | /** 25 | * mysql write mode 26 | */ 27 | private String writeMode; 28 | 29 | public String getWriteMode() { 30 | return writeMode; 31 | } 32 | 33 | MysqlWriterModeEnum(String writeMode) { 34 | this.writeMode = writeMode; 35 | } 36 | 37 | } 38 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/domain/entity/writer/mysqlwriter/WriterConnection.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.domain.entity.writer.mysqlwriter; 2 | 3 | import java.util.List; 4 | 5 | import javax.validation.constraints.NotBlank; 6 | import javax.validation.constraints.NotEmpty; 7 | 8 | import com.fasterxml.jackson.annotation.JsonInclude; 9 | import lombok.*; 10 | 11 | 12 | /** 13 | *

14 | * DataX Mysql Oracle插件的connection封装 15 | *

16 | * 17 | * @author isacc 2019/04/28 10:34 18 | */ 19 | @Builder 20 | @Data 21 | @EqualsAndHashCode(callSuper = false) 22 | @NoArgsConstructor 23 | @AllArgsConstructor 24 | @JsonInclude(JsonInclude.Include.NON_NULL) 25 | public class WriterConnection { 26 | 27 | /** 28 | * 需要同步的表,支持同一schema下多张表同时抽取 29 | */ 30 | @NotEmpty 31 | private List table; 32 | /** 33 | * DataX 会在你提供的 jdbcUrl 后面追加如下属性: 34 | * yearIsDateType=false&zeroDateTimeBehavior=convertToNull&rewriteBatchedStatements=true 35 | */ 36 | @NotBlank 37 | private String jdbcUrl; 38 | 39 | } 40 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/domain/entity/writer/oraclewriter/OracleWriter.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.domain.entity.writer.oraclewriter; 2 | 3 | import java.util.List; 4 | 5 | import javax.validation.constraints.NotBlank; 6 | import javax.validation.constraints.NotEmpty; 7 | import javax.validation.constraints.NotNull; 8 | 9 | import com.fasterxml.jackson.annotation.JsonInclude; 10 | import com.isacc.datax.domain.entity.writer.BaseWriter; 11 | import com.isacc.datax.domain.entity.writer.mysqlwriter.WriterConnection; 12 | import lombok.*; 13 | 14 | /** 15 | * DataX oraclewriter封装 16 | * 17 | * @author isacc 2019/05/28 10:55 18 | */ 19 | @Builder 20 | @Data 21 | @EqualsAndHashCode(callSuper = false) 22 | @NoArgsConstructor 23 | @AllArgsConstructor 24 | @JsonInclude(JsonInclude.Include.NON_NULL) 25 | public class OracleWriter extends BaseWriter { 26 | 27 | /** 28 | * 目的数据库的用户名 29 | */ 30 | @NotBlank 31 | private String username; 32 | /** 33 | * 目的数据库的密码 34 | */ 35 | @NotBlank 36 | private String password; 37 | /** 38 | * 所配置的表中需要同步的列名集合 39 | * 全选时值为:['*'] 40 | */ 41 | @NotEmpty 42 | private List column; 43 | /** 44 | * 写入数据到目的表前,会先执行这里的标准语句 45 | */ 46 | private List preSql; 47 | /** 48 | * 写入数据到目的表后,会执行这里的标准语句 49 | */ 50 | private List postSql; 51 | /** 52 | * 一次性批量提交的记录数大小,该值可以极大减少DataX与oracle的网络交互次数,并提升整体吞吐量 53 | * 但是该值设置过大可能会造成DataX运行进程OOM情况。 54 | * 默认值:1024 55 | */ 56 | private Long batchSize; 57 | /** 58 | * 设置oracle连接时的session信息 59 | * 如: "alter session set nls_date_format = 'dd.mm.yyyy hh24:mi:ss';" 60 | */ 61 | private List session; 62 | /** 63 | * Oracle连接信息 64 | */ 65 | @NotNull 66 | private List connection; 67 | 68 | } 69 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/domain/repository/DataxSyncRepository.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.domain.repository; 2 | 3 | import com.baomidou.mybatisplus.core.metadata.IPage; 4 | import com.baomidou.mybatisplus.extension.plugins.pagination.Page; 5 | import com.isacc.datax.api.dto.DataxSyncDTO; 6 | import com.isacc.datax.domain.entity.DataxSync; 7 | 8 | /** 9 | * 数据同步表资源库 10 | * 11 | * @author isacc 2019-05-17 14:07:48 12 | */ 13 | @SuppressWarnings("UnusedReturnValue") 14 | public interface DataxSyncRepository { 15 | 16 | /** 17 | * 往同步表插入数据 18 | * 19 | * @param dataxSyncDTO DataxSyncDTO 20 | * @return com.isacc.datax.api.dto.DataxSyncDTO 21 | * @author isacc 2019/6/25 9:49 22 | */ 23 | DataxSyncDTO insertSelectiveDTO(DataxSyncDTO dataxSyncDTO); 24 | 25 | /** 26 | * 更新同步表数据 27 | * 28 | * @param dataxSyncDTO DataxSyncDTO 29 | * @return com.isacc.datax.api.dto.DataxSyncDTO 30 | * @author isacc 2019/6/25 9:49 31 | */ 32 | DataxSyncDTO updateSelectiveDTO(DataxSyncDTO dataxSyncDTO); 33 | 34 | /** 35 | * 分页条件查询同步列表 36 | * 37 | * @param dataxSyncPage page 38 | * @param dataxSyncDTO DataxSyncDTO 39 | * @return com.baomidou.mybatisplus.core.metadata.IPage 40 | * @author isacc 2019/6/25 16:23 41 | */ 42 | IPage pageAndSortDTO(Page dataxSyncPage, DataxSyncDTO dataxSyncDTO); 43 | 44 | /** 45 | * 根据主键查询同步信息 46 | * 47 | * @param syncId 主键ID 48 | * @return com.isacc.datax.api.dto.DataxSyncDTO 49 | * @author isacc 2019/6/25 16:27 50 | */ 51 | DataxSyncDTO selectByPrimaryKey(Long syncId); 52 | 53 | /** 54 | * 删除 55 | * 56 | * @param syncId 主键id 57 | * @return int 58 | * @author isacc 2019/6/25 16:34 59 | */ 60 | int deleteByPrimaryKey(Long syncId); 61 | 62 | /** 63 | * 条件统计总数 64 | * 65 | * @param dataxSync DataxSync 66 | * @return int 67 | * @author isacc 2019/6/25 16:34 68 | */ 69 | int selectCount(DataxSync dataxSync); 70 | } 71 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/domain/repository/MysqlRepository.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.domain.repository; 2 | 3 | import com.isacc.datax.api.dto.ApiResult; 4 | import com.isacc.datax.api.dto.Hive2Hive; 5 | 6 | /** 7 | *

8 | * Mysql Repository 9 | *

10 | * 11 | * @author isacc 2019/04/29 19:46 12 | */ 13 | public interface MysqlRepository { 14 | 15 | /** 16 | * 校验Hive数据库是否存在 reader and writer 17 | * 18 | * @param hive2Hive Hive2Hive 19 | * @return com.isacc.datax.api.dto.ApiResult 20 | * @author isacc 2019-05-07 15:22 21 | */ 22 | ApiResult checkHiveDbAndTable(Hive2Hive hive2Hive); 23 | 24 | /** 25 | * 校验Hive数据库是否存在 writer 26 | * 27 | * @param hive2Hive Hive2Hive 28 | * @return com.isacc.datax.api.dto.ApiResult 29 | * @author isacc 2019-05-07 15:22 30 | */ 31 | ApiResult checkWriterHiveDbAndTable(Hive2Hive hive2Hive); 32 | 33 | } 34 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/infra/annotation/DataxHandlerType.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.infra.annotation; 2 | 3 | import java.lang.annotation.*; 4 | 5 | /** 6 | *

7 | * 自定义注解,用于标识datax同步任务的类型 8 | *

9 | * 10 | * @author isacc 2019/05/23 9:19 11 | */ 12 | @Target(ElementType.TYPE) 13 | @Retention(RetentionPolicy.RUNTIME) 14 | @Documented 15 | @Inherited 16 | public @interface DataxHandlerType { 17 | String value(); 18 | } 19 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/infra/config/AzkabanProperties.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.infra.config; 2 | 3 | import lombok.Data; 4 | import org.springframework.boot.context.properties.ConfigurationProperties; 5 | import org.springframework.context.annotation.Configuration; 6 | import org.springframework.stereotype.Component; 7 | 8 | /** 9 | *

10 | * description 11 | *

12 | * 13 | * @author isacc 2019/05/13 22:11 14 | */ 15 | @Component 16 | @Data 17 | @Configuration 18 | @ConfigurationProperties(prefix = AzkabanProperties.PROPERTY_PREFIX) 19 | public class AzkabanProperties { 20 | 21 | public static final String PROPERTY_PREFIX = "azkaban"; 22 | 23 | /** 24 | * 主机名 25 | */ 26 | private String host; 27 | /** 28 | * 用户名 29 | */ 30 | private String username; 31 | /** 32 | * 密码 33 | */ 34 | private String password; 35 | /** 36 | * 本地存储路径 37 | */ 38 | private String localDicPath; 39 | /** 40 | * template名称 41 | */ 42 | private String templateName; 43 | /** 44 | * dataxJob.job的path 45 | */ 46 | private String dataxJob; 47 | /** 48 | * dataxParams.properties 49 | */ 50 | private String dataxProperties; 51 | } 52 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/infra/config/CustomSimpleClientHttpRequestFactory.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.infra.config; 2 | 3 | import java.io.IOException; 4 | import java.net.HttpURLConnection; 5 | import java.security.KeyManagementException; 6 | import java.security.NoSuchAlgorithmException; 7 | import java.security.SecureRandom; 8 | import java.security.cert.X509Certificate; 9 | import javax.net.ssl.*; 10 | 11 | import org.springframework.http.client.SimpleClientHttpRequestFactory; 12 | 13 | /** 14 | *

15 | * description 16 | *

17 | * 18 | * @author isacc 2019/05/14 14:27 19 | */ 20 | public class CustomSimpleClientHttpRequestFactory extends SimpleClientHttpRequestFactory { 21 | 22 | @SuppressWarnings("NullableProblems") 23 | @Override 24 | protected void prepareConnection(HttpURLConnection connection, String httpMethod) 25 | throws IOException { 26 | if (connection instanceof HttpsURLConnection) { 27 | prepareHttpsConnection((HttpsURLConnection) connection); 28 | } 29 | super.prepareConnection(connection, httpMethod); 30 | } 31 | 32 | private void prepareHttpsConnection(HttpsURLConnection connection) { 33 | connection.setHostnameVerifier(new SkipHostnameVerifier()); 34 | try { 35 | connection.setSSLSocketFactory(createSslSocketFactory()); 36 | } catch (Exception ex) { 37 | // Ignore 38 | } 39 | } 40 | 41 | private SSLSocketFactory createSslSocketFactory() throws NoSuchAlgorithmException, KeyManagementException { 42 | SSLContext context = SSLContext.getInstance("TLSv1.2"); 43 | context.init(null, new TrustManager[]{new SkipX509TrustManager()}, 44 | new SecureRandom()); 45 | return context.getSocketFactory(); 46 | } 47 | 48 | private class SkipHostnameVerifier implements HostnameVerifier { 49 | 50 | @Override 51 | public boolean verify(String s, SSLSession sslSession) { 52 | return true; 53 | } 54 | 55 | } 56 | 57 | private static class SkipX509TrustManager implements X509TrustManager { 58 | 59 | @Override 60 | public X509Certificate[] getAcceptedIssuers() { 61 | return new X509Certificate[0]; 62 | } 63 | 64 | @Override 65 | public void checkClientTrusted(X509Certificate[] chain, String authType) { 66 | } 67 | 68 | @Override 69 | public void checkServerTrusted(X509Certificate[] chain, String authType) { 70 | } 71 | } 72 | 73 | } 74 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/infra/config/DataxHandlerContext.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.infra.config; 2 | 3 | import java.util.Map; 4 | import java.util.Optional; 5 | 6 | import com.isacc.datax.app.service.DataxHandler; 7 | import com.isacc.datax.infra.util.BeanUtil; 8 | 9 | 10 | /** 11 | *

12 | * DataxHandlerContext 13 | *

14 | * 15 | * @author isacc 2019/05/23 9:40 16 | */ 17 | @SuppressWarnings({"unchecked"}) 18 | public class DataxHandlerContext { 19 | 20 | private Map handlerMap; 21 | 22 | public DataxHandlerContext(Map handlerMap) { 23 | this.handlerMap = handlerMap; 24 | } 25 | 26 | public DataxHandler getInstance(String type) { 27 | Class clazz = Optional.of(handlerMap.get(type)).orElseThrow(IllegalArgumentException::new); 28 | return (DataxHandler) BeanUtil.getBean(clazz); 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/infra/config/DataxHandlerProcessor.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.infra.config; 2 | 3 | import java.util.HashMap; 4 | import java.util.Optional; 5 | 6 | import com.google.common.collect.Maps; 7 | import com.isacc.datax.infra.annotation.DataxHandlerType; 8 | import org.springframework.beans.factory.config.BeanPostProcessor; 9 | import org.springframework.context.annotation.Bean; 10 | import org.springframework.context.annotation.Configuration; 11 | import org.springframework.core.annotation.AnnotationUtils; 12 | 13 | /** 14 | *

15 | * @DataxHandlerType注解扫描 16 | *

17 | * 18 | * @author isacc 2019/05/23 23:19 19 | */ 20 | @Configuration 21 | public class DataxHandlerProcessor implements BeanPostProcessor { 22 | 23 | private HashMap handlerMap = Maps.newHashMapWithExpectedSize(16); 24 | 25 | @SuppressWarnings("NullableProblems") 26 | @Override 27 | public Object postProcessBeforeInitialization(Object bean, String beanName) { 28 | return bean; 29 | } 30 | 31 | @Override 32 | public Object postProcessAfterInitialization(Object bean, String beanName) { 33 | DataxHandlerType annotation = AnnotationUtils.findAnnotation(bean.getClass(), DataxHandlerType.class); 34 | Optional.ofNullable(annotation).ifPresent(o -> handlerMap.put(o.value(), bean.getClass())); 35 | return bean; 36 | } 37 | 38 | @Bean 39 | public DataxHandlerContext dataxHandlerContext() { 40 | return new DataxHandlerContext(handlerMap); 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/infra/config/DataxProperties.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.infra.config; 2 | 3 | import lombok.Data; 4 | import org.springframework.boot.context.properties.ConfigurationProperties; 5 | import org.springframework.context.annotation.Configuration; 6 | import org.springframework.stereotype.Component; 7 | 8 | /** 9 | *

10 | * Datax Properties 11 | *

12 | * 13 | * @author isacc 2019/05/05 17:19 14 | */ 15 | @Component 16 | @Data 17 | @Configuration 18 | @ConfigurationProperties(prefix = DataxProperties.PROPERTY_PREFIX) 19 | public class DataxProperties { 20 | 21 | public static final String PROPERTY_PREFIX = "datax"; 22 | /** 23 | * datax home 24 | */ 25 | private String home; 26 | /** 27 | * datax server host 28 | */ 29 | private String host; 30 | /** 31 | * datax server port 32 | */ 33 | private String port; 34 | /** 35 | * datax server username 36 | */ 37 | private String username; 38 | /** 39 | * datax server username 40 | */ 41 | private String password; 42 | /** 43 | * 生成的datax json文件上传到datax服务器目录 44 | */ 45 | private String uploadDicPath; 46 | /** 47 | * 本地生成的datax json文件在目录 48 | */ 49 | private String localDicPath; 50 | /** 51 | * freemarker文件所在目录 52 | */ 53 | private String basePackagePath; 54 | 55 | private Mysql2Hive mysql2Hive; 56 | private Mysql2Mysql mysql2Mysql; 57 | private Mysql2Oracle mysql2Oracle; 58 | private String hive2HiveTemplate; 59 | private String hive2MysqlTemplate; 60 | private String hive2OracleTemplate; 61 | private Oracle2Hive oracle2Hive; 62 | private Oracle2Oracle oracle2Oracle; 63 | private Oracle2Mysql oracle2Mysql; 64 | 65 | @SuppressWarnings("WeakerAccess") 66 | @Data 67 | public static class Mysql2Hive { 68 | /** 69 | * mysql2hive的freemarker模板文件名称,使用where 70 | */ 71 | private String whereTemplate; 72 | /** 73 | * mysql2hive的freemarker模板文件名称,使用querySql 74 | */ 75 | private String querySqlTemplate; 76 | } 77 | 78 | @SuppressWarnings("WeakerAccess") 79 | @Data 80 | public static class Mysql2Oracle { 81 | /** 82 | * mysql2oracle的freemarker模板文件名称,使用where 83 | */ 84 | private String whereTemplate; 85 | /** 86 | * mysql2oracle的freemarker模板文件名称,使用querySql 87 | */ 88 | private String querySqlTemplate; 89 | } 90 | 91 | @SuppressWarnings("WeakerAccess") 92 | @Data 93 | public static class Oracle2Hive { 94 | /** 95 | * oracle2hive的freemarker模板文件名称,使用where 96 | */ 97 | private String whereTemplate; 98 | /** 99 | * oracle2hive的freemarker模板文件名称,使用querySql 100 | */ 101 | private String querySqlTemplate; 102 | } 103 | 104 | @SuppressWarnings("WeakerAccess") 105 | @Data 106 | public static class Mysql2Mysql { 107 | /** 108 | * mysql2mysql的freemarker模板文件名称,使用where 109 | */ 110 | private String whereTemplate; 111 | /** 112 | * mysql2mysql的freemarker模板文件名称,使用querySql 113 | */ 114 | private String querySqlTemplate; 115 | } 116 | 117 | @SuppressWarnings("WeakerAccess") 118 | @Data 119 | public static class Oracle2Oracle { 120 | /** 121 | * oracle2oracle的freemarker模板文件名称,使用where 122 | */ 123 | private String whereTemplate; 124 | /** 125 | * oracle2oracle的freemarker模板文件名称,使用querySql 126 | */ 127 | private String querySqlTemplate; 128 | } 129 | 130 | @SuppressWarnings("WeakerAccess") 131 | @Data 132 | public static class Oracle2Mysql { 133 | /** 134 | * oracle2mysql的freemarker模板文件名称,使用where 135 | */ 136 | private String whereTemplate; 137 | /** 138 | * oracle2mysql的freemarker模板文件名称,使用querySql 139 | */ 140 | private String querySqlTemplate; 141 | } 142 | 143 | } 144 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/infra/config/JdbcTemplateConfig.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.infra.config; 2 | 3 | 4 | import javax.sql.DataSource; 5 | 6 | import com.baomidou.dynamic.datasource.DynamicRoutingDataSource; 7 | import org.springframework.context.annotation.Bean; 8 | import org.springframework.context.annotation.Configuration; 9 | import org.springframework.jdbc.core.JdbcTemplate; 10 | 11 | /** 12 | *

13 | * JdbcTemplateConfig 14 | *

15 | * 16 | * @author isacc 2019/06/04 20:52 17 | */ 18 | @Configuration 19 | public class JdbcTemplateConfig { 20 | 21 | @SuppressWarnings("SpringJavaInjectionPointsAutowiringInspection") 22 | @Bean 23 | public JdbcTemplate hiveJdbcTemplate(DataSource dataSource) { 24 | return new JdbcTemplate(((DynamicRoutingDataSource) dataSource).getDataSource("hive")); 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/infra/config/RedisConfiguration.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.infra.config; 2 | 3 | import com.fasterxml.jackson.annotation.JsonAutoDetect; 4 | import com.fasterxml.jackson.annotation.PropertyAccessor; 5 | import com.fasterxml.jackson.databind.ObjectMapper; 6 | import org.springframework.context.annotation.Bean; 7 | import org.springframework.context.annotation.Configuration; 8 | import org.springframework.data.redis.connection.RedisConnectionFactory; 9 | import org.springframework.data.redis.core.RedisTemplate; 10 | import org.springframework.data.redis.serializer.Jackson2JsonRedisSerializer; 11 | import org.springframework.data.redis.serializer.StringRedisSerializer; 12 | 13 | /** 14 | *

15 | * redisTemplate用来操作对象 16 | *

17 | * 18 | * @author isacc 2019/05/15 15:59 19 | */ 20 | @Configuration 21 | public class RedisConfiguration { 22 | 23 | @Bean 24 | public RedisTemplate redisTemplate(RedisConnectionFactory redisConnectionFactory) { 25 | RedisTemplate redisTemplate = new RedisTemplate<>(); 26 | redisTemplate.setConnectionFactory(redisConnectionFactory); 27 | // 使用Jackson2JsonRedisSerialize 替换默认序列化 28 | Jackson2JsonRedisSerializer jackson2JsonRedisSerializer = new Jackson2JsonRedisSerializer<>(Object.class); 29 | ObjectMapper objectMapper = new ObjectMapper(); 30 | objectMapper.setVisibility(PropertyAccessor.ALL, JsonAutoDetect.Visibility.ANY); 31 | objectMapper.enableDefaultTyping(ObjectMapper.DefaultTyping.NON_FINAL); 32 | jackson2JsonRedisSerializer.setObjectMapper(objectMapper); 33 | // 设置value的序列化规则和 key的序列化规则 34 | redisTemplate.setValueSerializer(jackson2JsonRedisSerializer); 35 | redisTemplate.setKeySerializer(new StringRedisSerializer()); 36 | redisTemplate.afterPropertiesSet(); 37 | return redisTemplate; 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/infra/config/RestTemplateConfig.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.infra.config; 2 | 3 | import org.springframework.context.annotation.Bean; 4 | import org.springframework.context.annotation.Configuration; 5 | import org.springframework.http.client.ClientHttpRequestFactory; 6 | import org.springframework.web.client.RestTemplate; 7 | 8 | /** 9 | *

10 | * description 11 | *

12 | * 13 | * @author isacc 2019/05/13 22:00 14 | */ 15 | @Configuration 16 | public class RestTemplateConfig { 17 | 18 | @Bean 19 | public RestTemplate cusRestTemplate(ClientHttpRequestFactory customSimpleClientHttpRequestFactory) { 20 | return new RestTemplate(customSimpleClientHttpRequestFactory); 21 | } 22 | 23 | @Bean 24 | public ClientHttpRequestFactory customSimpleClientHttpRequestFactory() { 25 | CustomSimpleClientHttpRequestFactory factory = new CustomSimpleClientHttpRequestFactory(); 26 | factory.setReadTimeout(5000); 27 | factory.setConnectTimeout(15000); 28 | return factory; 29 | } 30 | 31 | 32 | } 33 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/infra/config/SwaggerConfig.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.infra.config; 2 | 3 | import org.springframework.context.annotation.Bean; 4 | import org.springframework.context.annotation.Configuration; 5 | import org.springframework.web.servlet.config.annotation.ResourceHandlerRegistry; 6 | import org.springframework.web.servlet.config.annotation.WebMvcConfigurationSupport; 7 | import springfox.documentation.builders.ApiInfoBuilder; 8 | import springfox.documentation.builders.PathSelectors; 9 | import springfox.documentation.builders.RequestHandlerSelectors; 10 | import springfox.documentation.service.ApiInfo; 11 | import springfox.documentation.service.Contact; 12 | import springfox.documentation.spi.DocumentationType; 13 | import springfox.documentation.spring.web.plugins.Docket; 14 | import springfox.documentation.swagger2.annotations.EnableSwagger2; 15 | 16 | /** 17 | *

18 | * description 19 | *

20 | * 21 | * @author headers 2019/05/16 17:32 22 | */ 23 | @Configuration 24 | @EnableSwagger2 25 | public class SwaggerConfig extends WebMvcConfigurationSupport { 26 | 27 | @Override 28 | public void addResourceHandlers(ResourceHandlerRegistry registry) { 29 | registry.addResourceHandler("swagger-ui.html") 30 | .addResourceLocations("classpath:/META-INF/resources/"); 31 | registry.addResourceHandler("/webjars/**") 32 | .addResourceLocations("classpath:/META-INF/resources/webjars/"); 33 | } 34 | 35 | @Bean 36 | public Docket api() { 37 | return new Docket(DocumentationType.SWAGGER_2) 38 | .groupName("v1") 39 | .select() 40 | .apis(RequestHandlerSelectors.basePackage("com.isacc.datax")) 41 | .paths(PathSelectors.any()) 42 | .build() 43 | .apiInfo(apiInfo()); 44 | } 45 | 46 | private ApiInfo apiInfo() { 47 | return new ApiInfoBuilder() 48 | .title("common-datax") 49 | .description("基于DataX的通用数据同步调度平台") 50 | .contact(new Contact("isacc", "https://github.com/codingdebugallday/common-datax", "codingdebugallday@163.com")) 51 | .version("1.0") 52 | .build(); 53 | } 54 | } -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/infra/constant/Constants.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.infra.constant; 2 | 3 | import java.util.Locale; 4 | 5 | import com.fasterxml.jackson.databind.ObjectMapper; 6 | 7 | /** 8 | *

9 | * 常量配置 10 | *

11 | * 12 | * @author isacc 2019/04/28 20:29 13 | */ 14 | @SuppressWarnings("unused") 15 | public final class Constants { 16 | private Constants() { 17 | throw new IllegalStateException(); 18 | } 19 | 20 | public static final Long DEFAULT_TENANT_ID = 0L; 21 | public static final String PAGE = "0"; 22 | public static final String SIZE = "10"; 23 | public static final String PAGE_FIELD_NAME = "page"; 24 | public static final String SIZE_FIELD_NAME = "size"; 25 | public static final int NEGATIVE_ONE = -1; 26 | public static final int PAGE_NUM = 0; 27 | public static final int PAGE_SIZE = 10; 28 | public static final String FIELD_BODY = "body"; 29 | public static final String FIELD_CONTENT = "content"; 30 | public static final Locale DEFAULT_LOCALE = Locale.CHINA; 31 | public static final String DEFAULT_LOCALE_STR = Locale.CHINA.toString(); 32 | public static final String FIELD_MSG = "message"; 33 | public static final String FIELD_FAILED = "failed"; 34 | public static final String FIELD_SUCCESS = "success"; 35 | public static final String FIELD_ERROR_MSG = "errorMsg"; 36 | public static final String DEFAULT_CHARSET = "UTF-8"; 37 | public static final String DEFAULT_ENV = "dev"; 38 | public static final ObjectMapper MAPPER = new ObjectMapper(); 39 | public static final String DEFAULT_CROWN_CODE = "+86"; 40 | public static final String DB_IS_NOT_EXIST = "DB_IS_NOT_EXIST"; 41 | public static final String TBL_IS_NOT_EXIST = "TBL_IS_NOT_EXIST"; 42 | 43 | public static final class Symbol { 44 | private Symbol() { 45 | throw new IllegalStateException(); 46 | } 47 | 48 | public static final String SIGH = "!"; 49 | public static final String AT = "@"; 50 | public static final String WELL = "#"; 51 | public static final String DOLLAR = "$"; 52 | public static final String RMB = "¥"; 53 | public static final String SPACE = " "; 54 | public static final String LB = System.getProperty("line.separator"); 55 | public static final String PERCENTAGE = "%"; 56 | public static final String AND = "&"; 57 | public static final String STAR = "*"; 58 | public static final String MIDDLE_LINE = "-"; 59 | public static final String LOWER_LINE = "_"; 60 | public static final String EQUAL = "="; 61 | public static final String PLUS = "+"; 62 | public static final String COLON = ":"; 63 | public static final String SEMICOLON = ";"; 64 | public static final String COMMA = ","; 65 | public static final String POINT = "."; 66 | public static final String SLASH = "/"; 67 | public static final String DOUBLE_SLASH = "//"; 68 | public static final String BACKSLASH = "\\"; 69 | public static final String SINGLE_QUOTE = "\""; 70 | public static final String QUESTION = "?"; 71 | public static final String LEFT_BIG_BRACE = "{"; 72 | public static final String LEFT_BRACE = "("; 73 | public static final String RIGHT_BIG_BRACE = "}"; 74 | public static final String RIGHT_BRACE = ")"; 75 | public static final String LEFT_MIDDLE_BRACE = "["; 76 | public static final String RIGHT_MIDDLE_BRACE = "]"; 77 | public static final String BACKQUOTE = "`"; 78 | } 79 | 80 | public static final class HeaderParam { 81 | private HeaderParam() { 82 | throw new IllegalStateException(); 83 | } 84 | 85 | public static final String REQUEST_HEADER_PARAM_PREFIX = "param-"; 86 | } 87 | 88 | public static final class Digital { 89 | private Digital() { 90 | throw new IllegalStateException(); 91 | } 92 | 93 | public static final int NEGATIVE_ONE = -1; 94 | public static final int ZERO = 0; 95 | public static final int ONE = 1; 96 | public static final int TWO = 2; 97 | public static final int FOUR = 4; 98 | public static final int EIGHT = 8; 99 | public static final int SIXTEEN = 16; 100 | } 101 | 102 | public static final class Flag { 103 | private Flag() { 104 | throw new IllegalStateException(); 105 | } 106 | 107 | public static final Integer YES = 1; 108 | public static final Integer NO = 0; 109 | } 110 | 111 | public static final class Pattern { 112 | private Pattern() { 113 | throw new IllegalStateException(); 114 | } 115 | 116 | public static final String DATE = "yyyy-MM-dd"; 117 | public static final String DATETIME = "yyyy-MM-dd HH:mm:ss"; 118 | public static final String DATETIME_MM = "yyyy-MM-dd HH:mm"; 119 | public static final String DATETIME_SSS = "yyyy-MM-dd HH:mm:ss.SSS"; 120 | public static final String TIME = "HH:mm"; 121 | public static final String TIME_SS = "HH:mm:ss"; 122 | public static final String SYS_DATE = "yyyy/MM/dd"; 123 | public static final String SYS_DATETIME = "yyyy/MM/dd HH:mm:ss"; 124 | public static final String SYS_DATETIME_MM = "yyyy/MM/dd HH:mm"; 125 | public static final String SYS_DATETIME_SSS = "yyyy/MM/dd HH:mm:ss.SSS"; 126 | public static final String NONE_DATE = "yyyyMMdd"; 127 | public static final String NONE_DATETIME = "yyyyMMddHHmmss"; 128 | public static final String NONE_DATETIME_MM = "yyyyMMddHHmm"; 129 | public static final String NONE_DATETIME_SSS = "yyyyMMddHHmmssSSS"; 130 | public static final String CST_DATETIME = "EEE MMM dd HH:mm:ss 'CST' yyyy"; 131 | public static final String NONE_DECIMAL = "0"; 132 | public static final String ONE_DECIMAL = "0.0"; 133 | public static final String TWO_DECIMAL = "0.00"; 134 | public static final String TB_NONE_DECIMAL = "#,##0"; 135 | public static final String TB_ONE_DECIMAL = "#,##0.0"; 136 | public static final String TB_TWO_DECIMAL = "#,##0.00"; 137 | } 138 | 139 | } 140 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/infra/constant/DataxHandlerTypeConstants.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.infra.constant; 2 | 3 | /** 4 | *

5 | * datax的数据同步任务类型,格式 source-target(全小写) 6 | *

7 | * 8 | * @author isacc 2019/05/23 13:50 9 | */ 10 | public final class DataxHandlerTypeConstants { 11 | 12 | private DataxHandlerTypeConstants() { 13 | throw new IllegalStateException("constant class!"); 14 | } 15 | 16 | public static final String MYSQL2MYSQL = "MYSQL-MYSQL"; 17 | public static final String MYSQL2HIVE = "MYSQL-HADOOP_HIVE_2"; 18 | public static final String MYSQL2ORACLE = "MYSQL-ORACLE"; 19 | public static final String HIVE2HIVE = "HADOOP_HIVE_2-HADOOP_HIVE_2"; 20 | public static final String HIVE2MYSQL = "HADOOP_HIVE_2-MYSQL"; 21 | public static final String HIVE2ORACLE = "HADOOP_HIVE_2-ORACLE"; 22 | public static final String ORACLE2HIVE = "ORACLE-HADOOP_HIVE_2"; 23 | public static final String ORACLE2MYSQL = "ORACLE-MYSQL"; 24 | public static final String ORACLE2ORACLE = "ORACLE-ORACLE"; 25 | 26 | } 27 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/infra/constant/DataxParameterConstants.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.infra.constant; 2 | 3 | /** 4 | *

5 | * Datax各种reader以及writer的参数常量类 6 | *

7 | * 8 | * @author isacc 2019/05/22 13:50 9 | */ 10 | public final class DataxParameterConstants { 11 | 12 | private DataxParameterConstants() { 13 | throw new IllegalStateException("constant class!"); 14 | } 15 | 16 | public static final String SETTING = "setting"; 17 | /** 18 | * mysql 19 | */ 20 | public static final String MYSQL_READER_USERNAME = "mysqlreaderUsername"; 21 | public static final String MYSQL_READER_PASSWORD = "mysqlreaderPassword"; 22 | public static final String MYSQL_READER_CONNECTION = "mysqlreaderConnection"; 23 | public static final String MYSQL_READER_SPLIT_PK = "mysqlreaderSplitPk"; 24 | public static final String MYSQL_READER_WHERE = "mysqlreaderWhere"; 25 | public static final String MYSQL_READER_COLUMN = "mysqlreaderColumn"; 26 | public static final String MYSQL_WRITE_MODE = "writeMode"; 27 | public static final String MYSQL_WRITER_USERNAME = "mysqlwriterUsername"; 28 | public static final String MYSQL_WRITER_PASSWORD = "mysqlwriterPassword"; 29 | public static final String MYSQL_WRITER_COLUMN = "mysqlwriterColumn"; 30 | public static final String MYSQL_WRITER_BATCH_SIZE = "mysqlwriterBatchSize"; 31 | public static final String MYSQL_WRITER_SESSION = "mysqlwriterSession"; 32 | public static final String MYSQL_WRITER_PRE_SQL = "mysqlwriterPreSql"; 33 | public static final String MYSQL_WRITER_CONNECTION = "mysqlwriterConnection"; 34 | public static final String MYSQL_WRITER_POST_SQL = "mysqlwriterPostSql"; 35 | 36 | /** 37 | * hdfs 38 | */ 39 | public static final String HDFS_READER_COLUMN = "hdfsreaderColumn"; 40 | public static final String HDFS_READER_DEFAULT_FS = "hdfsreaderDefaultFS"; 41 | public static final String HDFS_READER_FILE_TYPE = "hdfsreaderFileType"; 42 | public static final String HDFS_READER_PATH = "hdfsreaderPath"; 43 | public static final String HDFS_READER_FIELD_DELIMITER = "hdfsreaderFieldDelimiter"; 44 | public static final String HDFS_READER_COMPRESS = "hdfsreaderCompress"; 45 | public static final String HDFS_READER_NULL_FORMAT = "hdfsreaderNullFormat"; 46 | public static final String HDFS_READER_HADOOP_CONFIG = "hdfsreaderHadoopConfig"; 47 | public static final String HDFS_READER_HAVE_KERBEROS = "hdfsreaderHaveKerberos"; 48 | public static final String HDFS_READER_KERBEROS_KEYTAB_FILE_PATH = "hdfsreaderKerberosKeytabFilePath"; 49 | public static final String HDFS_READER_KERBEROS_PRINCIPAL = "hdfsreaderKerberosPrincipal"; 50 | public static final String HDFS_READER_CSV_READER_CONFIG = "hdfsreaderCsvReaderConfig"; 51 | public static final String HDFS_WRITER_COLUMN = "hdfswriterColumn"; 52 | public static final String HDFS_WRITER_MODE = "writeMode"; 53 | public static final String HDFS_WRITER_DEFAULT_FS = "hdfswriterDefaultFS"; 54 | public static final String HDFS_WRITER_FILE_TYPE = "hdfswriterFileType"; 55 | public static final String HDFS_WRITER_PATH = "hdfswriterPath"; 56 | public static final String HDFS_WRITER_FILE_NAME = "hdfswriterFileName"; 57 | public static final String HDFS_WRITER_FIELD_DELIMITER = "hdfswriterFieldDelimiter"; 58 | public static final String HDFS_WRITER_COMPRESS = "hdfswriterCompress"; 59 | public static final String HDFS_WRITER_HADOOP_CONFIG = "hdfswriterHadoopConfig"; 60 | public static final String HDFS_WRITER_HAVE_KERBEROS = "hdfswriterHaveKerberos"; 61 | public static final String HDFS_WRITER_KERBEROS_KEYTAB_FILE_PATH = "hdfswriterKerberosKeytabFilePath"; 62 | public static final String HDFS_WRITER_KERBEROS_PRINCIPAL = "hdfswriterKerberosPrincipal"; 63 | 64 | /** 65 | * oracle 66 | */ 67 | public static final String ORACLE_READER_USERNAME = "oraclereaderUsername"; 68 | public static final String ORACLE_READER_PASSWORD = "oraclereaderPassword"; 69 | public static final String ORACLE_READER_COLUMN = "oraclereaderColumn"; 70 | public static final String ORACLE_READER_SPLIT_PK = "oraclereaderSplitPk"; 71 | public static final String ORACLE_READER_WHERE = "oraclereaderWhere"; 72 | public static final String ORACLE_READER_CONNECTION = "oraclereaderConnection"; 73 | public static final String ORACLE_READER_FETCH_SIZE = "oraclereaderFetchSize"; 74 | public static final String ORACLE_READER_SESSION = "oraclereaderSession"; 75 | public static final String ORACLE_WRITER_USERNAME = "oraclewriterUsername"; 76 | public static final String ORACLE_WRITER_PASSWORD = "oraclewriterPassword"; 77 | public static final String ORACLE_WRITER_COLUMN = "oraclewriterColumn"; 78 | public static final String ORACLE_WRITER_PRE_SQL = "oraclewriterPreSql"; 79 | public static final String ORACLE_WRITER_POST_SQL = "oraclewriterPostSql"; 80 | public static final String ORACLE_WRITER_BATCH_SIZE = "oraclewriterBatchSize"; 81 | public static final String ORACLE_WRITER_SESSION = "oraclewriterSession"; 82 | public static final String ORACLE_WRITER_CONNECTION = "oraclewriterConnection"; 83 | 84 | 85 | } 86 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/infra/converter/ConvertorI.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.infra.converter; 2 | 3 | /** 4 | * description 5 | * 6 | * @author isacc 2019/06/03 14:02 7 | */ 8 | public interface ConvertorI { 9 | 10 | /** 11 | * dtoToEntity 12 | * 13 | * @param dto DTO 14 | * @return E 15 | * @author isacc 2019/6/3 14:03 16 | */ 17 | default E dtoToEntity(T dto) { 18 | return null; 19 | } 20 | 21 | /** 22 | * entityToDto 23 | * 24 | * @param entity Entity 25 | * @return DTO 26 | * @author isacc 2019/6/3 14:03 27 | */ 28 | default T entityToDto(E entity) { 29 | return null; 30 | } 31 | 32 | /** 33 | * doToEntity 34 | * 35 | * @param dataObject DO 36 | * @return E 37 | * @author isacc 2019/6/3 14:03 38 | */ 39 | default E doToEntity(D dataObject) { 40 | return null; 41 | } 42 | 43 | /** 44 | * entityToDo 45 | * 46 | * @param entity Entity 47 | * @return Entity 48 | * @author isacc 2019/6/3 14:03 49 | */ 50 | default D entityToDo(E entity) { 51 | return null; 52 | } 53 | 54 | /** 55 | * doToDto 56 | * 57 | * @param dataObject DO 58 | * @return DTO 59 | * @author isacc 2019/6/3 14:03 60 | */ 61 | default T doToDto(D dataObject) { 62 | return null; 63 | } 64 | 65 | /** 66 | * dtoToDo 67 | * 68 | * @param dto DTO 69 | * @return DO 70 | * @author isacc 2019/6/3 14:03 71 | */ 72 | default D dtoToDo(T dto) { 73 | return null; 74 | } 75 | 76 | } 77 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/infra/converter/DataxSyncConverter.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.infra.converter; 2 | 3 | import java.util.Optional; 4 | 5 | import com.isacc.datax.api.dto.DataxSyncDTO; 6 | import com.isacc.datax.domain.entity.DataxSync; 7 | import com.isacc.datax.infra.dataobject.DataxSyncDO; 8 | import org.springframework.beans.BeanUtils; 9 | import org.springframework.stereotype.Component; 10 | 11 | /** 12 | * 数据同步表 13 | * 14 | * @author isacc 2019-05-17 14:07:48 15 | */ 16 | @Component 17 | public class DataxSyncConverter implements ConvertorI { 18 | 19 | 20 | @Override 21 | public DataxSync dtoToEntity(DataxSyncDTO dto) { 22 | return Optional.ofNullable(dto).map(o -> { 23 | DataxSync entity = DataxSync.builder().build(); 24 | BeanUtils.copyProperties(dto, entity); 25 | return entity; 26 | }).orElseThrow(IllegalArgumentException::new); 27 | } 28 | 29 | @Override 30 | public DataxSyncDTO entityToDto(DataxSync entity) { 31 | return Optional.ofNullable(entity).map(o -> { 32 | DataxSyncDTO dto = DataxSyncDTO.builder().build(); 33 | BeanUtils.copyProperties(entity, dto); 34 | return dto; 35 | }).orElseThrow(IllegalArgumentException::new); 36 | } 37 | } -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/infra/dataobject/DataxSyncDO.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.infra.dataobject; 2 | 3 | import java.util.Date; 4 | import javax.validation.constraints.NotBlank; 5 | import javax.validation.constraints.NotNull; 6 | 7 | import io.swagger.annotations.ApiModel; 8 | import io.swagger.annotations.ApiModelProperty; 9 | import lombok.*; 10 | 11 | /** 12 | * 数据同步表 13 | * 14 | * @author isacc 2019-05-17 14:07:48 15 | */ 16 | @Data 17 | @Builder 18 | @NoArgsConstructor 19 | @AllArgsConstructor 20 | @EqualsAndHashCode(callSuper = false) 21 | @ApiModel("数据同步表") 22 | public class DataxSyncDO { 23 | 24 | public static final String FIELD_SYNC_ID = "syncId"; 25 | public static final String FIELD_SYNC_NAME = "syncName"; 26 | public static final String FIELD_SYNC_DESCRIPTION = "syncDescription"; 27 | public static final String FIELD_SOURCE_DATASOURCE_TYPE = "sourceDatasourceType"; 28 | public static final String FIELD_SOURCE_DATASOURCE_ID = "sourceDatasourceId"; 29 | public static final String FIELD_WRITE_DATASOURCE_TYPE = "writeDatasourceType"; 30 | public static final String FIELD_WRITE_DATASOURCE_ID = "writeDatasourceId"; 31 | public static final String FIELD_JSON_FILE_NAME ="jsonFileName"; 32 | public static final String FIELD_JOB_ID = "jobId"; 33 | public static final String FIELD_WORKFLOW_ID = "workflowId"; 34 | public static final String FIELD_SETTING_INFO = "settingInfo"; 35 | public static final String FIELD_TENANT_ID = "tenantId"; 36 | public static final String FIELD_OBJECT_VERSION_NUMBER = "objectVersionNumber"; 37 | public static final String FIELD_CREATION_DATE = "creationDate"; 38 | public static final String FIELD_CREATED_BY = "createdBy"; 39 | public static final String FIELD_LAST_UPDATED_BY = "lastUpdatedBy"; 40 | public static final String FIELD_LAST_UPDATE_DATE = "lastUpdateDate"; 41 | 42 | // 43 | // 业务方法(按public protected private顺序排列) 44 | // ------------------------------------------------------------------------------ 45 | 46 | // 47 | // 数据库字段 48 | // ------------------------------------------------------------------------------ 49 | 50 | @ApiModelProperty("表ID,主键,供其他表做外键") 51 | private Long syncId; 52 | @ApiModelProperty(value = "同步名称", required = true) 53 | @NotBlank 54 | private String syncName; 55 | @ApiModelProperty(value = "同步描述", required = true) 56 | @NotBlank 57 | private String syncDescription; 58 | @ApiModelProperty(value = "来源数据源类型,快码:HDSP.DATASOURCE_TYPE", required = true) 59 | @NotBlank 60 | private String sourceDatasourceType; 61 | @ApiModelProperty(value = "来源数据源ID,关联HDSP_CORE_DATASOURCE.DATASOURCE_ID", required = true) 62 | @NotNull 63 | private Long sourceDatasourceId; 64 | @ApiModelProperty(value = "写入数据源类型,快码:HDSP.DATASOURCE_TYPE", required = true) 65 | @NotBlank 66 | private String writeDatasourceType; 67 | @ApiModelProperty(value = "写入数据源ID,关联HDSP_CORE_DATASOURCE.DATASOURCE_ID", required = true) 68 | @NotNull 69 | private Long writeDatasourceId; 70 | @ApiModelProperty(value = "生成的Datax Json文件名称", required = true) 71 | @NotBlank 72 | private String jsonFileName; 73 | @ApiModelProperty(value = "调度任务ID,关联HDSP_DISP_JOB.JOB_ID,回写") 74 | private Long jobId; 75 | @ApiModelProperty(value = "调度任务流ID,关联HDSP_DISP_WORKFLOW.WORKFLOW_ID,回写") 76 | private Long workflowId; 77 | @ApiModelProperty(value = "数据配置信息") 78 | private Byte[] settingInfo; 79 | @ApiModelProperty(value = "租户ID") 80 | private Long tenantId; 81 | @ApiModelProperty(value = "版本号", required = true) 82 | @NotNull 83 | private Long objectVersionNumber; 84 | @ApiModelProperty(required = true) 85 | @NotNull 86 | private Date creationDate; 87 | @ApiModelProperty(required = true) 88 | @NotNull 89 | private Long createdBy; 90 | @ApiModelProperty(required = true) 91 | @NotNull 92 | private Long lastUpdatedBy; 93 | @ApiModelProperty(required = true) 94 | @NotNull 95 | private Date lastUpdateDate; 96 | 97 | } 98 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/infra/mapper/DataxSyncMapper.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.infra.mapper; 2 | 3 | import com.baomidou.dynamic.datasource.annotation.DS; 4 | import com.baomidou.mybatisplus.core.mapper.BaseMapper; 5 | import com.isacc.datax.domain.entity.DataxSync; 6 | 7 | /** 8 | * 数据同步表Mapper 9 | * 10 | * @author isacc 2019-05-17 14:07:48 11 | */ 12 | @DS("mysql_1") 13 | public interface DataxSyncMapper extends BaseMapper { 14 | } 15 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/infra/mapper/MysqlSimpleMapper.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.infra.mapper; 2 | 3 | import java.util.List; 4 | import java.util.Map; 5 | 6 | import com.baomidou.dynamic.datasource.annotation.DS; 7 | import com.isacc.datax.domain.entity.reader.hdfsreader.HdfsColumn; 8 | import org.apache.ibatis.annotations.ResultType; 9 | import org.apache.ibatis.annotations.Select; 10 | 11 | /** 12 | *

13 | * Mysql Simple Mapper 14 | *

15 | * 16 | * @author isacc 2019/04/29 20:24 17 | */ 18 | @SuppressWarnings("unused") 19 | @DS("mysql") 20 | public interface MysqlSimpleMapper { 21 | 22 | /** 23 | * 是否存在数据库 24 | * 25 | * @param databaseName Mysql数据库名称 26 | * @return java.lang.Integer 27 | * @author isacc 2019-04-29 20:09 28 | */ 29 | @Select("SELECT COUNT(*) FROM information_schema.SCHEMATA " + 30 | "WHERE SCHEMA_NAME = #{databaseName}") 31 | Integer mysqlDbIsExist(String databaseName); 32 | 33 | /** 34 | * 根据mysql数据库和表名查询表字段相关信息 35 | * 36 | * @param databaseName Mysql数据库名称 37 | * @param tableName Mysql表名称 38 | * @return java.util.List> 39 | * @author isacc 2019/5/10 14:33 40 | */ 41 | @Select("SELECT COLUMN_NAME, ORDINAL_POSITION, DATA_TYPE, COLUMN_TYPE " + 42 | "FROM information_schema. COLUMNS " + 43 | "WHERE " + 44 | "TABLE_SCHEMA = #{databaseName} AND TABLE_NAME = #{tableName}") 45 | List> mysqlTableColumnInfo(String databaseName, String tableName); 46 | 47 | /** 48 | * mysql表字段映射hive表字段 49 | * 50 | * @param databaseName Mysql数据库名称 51 | * @param tableName Mysql表名称 52 | * @return java.util.List> 53 | * @author isacc 2019/5/10 14:33 54 | */ 55 | @Select("SELECT " + 56 | "column_name name, " + 57 | "CASE " + 58 | "WHEN NUMERIC_PRECISION IS NOT NULL " + 59 | "AND (" + 60 | "data_type = 'decimal' " + 61 | "OR data_type = 'numeric' " + 62 | ") THEN " + 63 | "concat( " + 64 | "'decimal(', " + 65 | "NUMERIC_PRECISION, " + 66 | "',', " + 67 | "NUMERIC_SCALE, " + 68 | "')' " + 69 | ") " + 70 | "WHEN (" + 71 | "CHARACTER_MAXIMUM_LENGTH IS NOT NULL " + 72 | "OR data_type = 'uniqueidentifier' " + 73 | ") " + 74 | "AND data_type NOT LIKE '%text%' THEN " + 75 | "'string' " + 76 | "WHEN data_type = 'datetime' THEN " + 77 | "'timestamp' " + 78 | "WHEN data_type = 'money' THEN " + 79 | "'decimal(9,2)' " + 80 | "WHEN data_type = 'tinyint' THEN " + 81 | "'int' " + 82 | "ELSE " + 83 | "data_type " + 84 | "END AS type " + 85 | "FROM " + 86 | "information_schema. COLUMNS " + 87 | "WHERE " + 88 | "table_schema = #{databaseName} " + 89 | "AND table_name = #{tableName}") 90 | List mysqlColumn2HiveColumn(String databaseName, String tableName); 91 | 92 | /** 93 | * 判断指定数据库下指定表是否存在 94 | * 95 | * @param databaseName Mysql数据库名称 96 | * @param tableName Mysql表名称 97 | * @return java.lang.Integer 98 | * @author isacc 2019-04-29 21:20 99 | */ 100 | @Select("SELECT count(*) FROM information_schema.TABLES " + 101 | "WHERE table_schema=#{databaseName} and table_name=#{tableName}") 102 | Integer mysqlTblIsExist(String databaseName, String tableName); 103 | 104 | /** 105 | * 查询Hive所有数据库 106 | * 107 | * @return java.util.List> 108 | * @author isacc 2019-04-30 10:48 109 | */ 110 | @DS("mysql_hivemeta") 111 | @Select("select DB_ID,`DESC`,DB_LOCATION_URI,`NAME`,OWNER_NAME,OWNER_TYPE from dbs") 112 | @ResultType(Map.class) 113 | List> allHiveDatabases(); 114 | 115 | /** 116 | * 根据Hive数据库名称查看是否存在该数据库 117 | * 118 | * @param hiveDbName hive db name 119 | * @return java.util.Map 120 | * @author isacc 2019-05-07 15:54 121 | */ 122 | @DS("mysql_hivemeta") 123 | @Select("select DB_ID,`DESC`,DB_LOCATION_URI,`NAME`,OWNER_NAME,OWNER_TYPE from dbs " + 124 | "where `NAME` = #{hiveDbName}") 125 | @ResultType(Map.class) 126 | Map hiveDbIsExist(String hiveDbName); 127 | 128 | /** 129 | * 根据Hive数据库ID查看是否存在该表 130 | * 131 | * @param dbId hive数据库id 132 | * @param hiveTblName hive数据表名称 133 | * @return java.util.Map 134 | * @author isacc 2019-05-07 16:08 135 | */ 136 | @DS("mysql_hivemeta") 137 | @Select("SELECT " + 138 | "TBL_ID,CREATE_TIME,DB_ID LAST_ACCESS_TIME,OWNER RETENTION,SD_ID,TBL_NAME,TBL_TYPE,VIEW_EXPANDED_TEXT,VIEW_ORIGINAL_TEXT,IS_REWRITE_ENABLED " + 139 | "FROM " + 140 | "tbls " + 141 | "WHERE " + 142 | "DB_ID = #{dbId} " + 143 | "AND TBL_NAME = #{hiveTblName}") 144 | @ResultType(Map.class) 145 | Map hiveTblIsExist(Long dbId, String hiveTblName); 146 | 147 | /** 148 | * 查询指定Hive数据库下所有表 149 | * 150 | * @param dbId Hive数据库ID 151 | * @return java.util.List> 152 | * @author isacc 2019-04-30 10:54 153 | */ 154 | @DS("mysql_hivemeta") 155 | @Select("SELECT " + 156 | "TBL_ID,CREATE_TIME,DB_ID LAST_ACCESS_TIME,OWNER RETENTION,SD_ID,TBL_NAME,TBL_TYPE,VIEW_EXPANDED_TEXT,VIEW_ORIGINAL_TEXT,IS_REWRITE_ENABLED " + 157 | "FROM " + 158 | "tbls " + 159 | "WHERE " + 160 | "DB_ID = #{dbId}") 161 | @ResultType(Map.class) 162 | List> allHiveTableByDatabase(Long dbId); 163 | 164 | } 165 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/infra/repository/impl/DataxSyncRepositoryImpl.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.infra.repository.impl; 2 | 3 | 4 | import java.util.ArrayList; 5 | 6 | import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; 7 | import com.baomidou.mybatisplus.core.metadata.IPage; 8 | import com.baomidou.mybatisplus.extension.plugins.pagination.Page; 9 | import com.isacc.datax.api.dto.DataxSyncDTO; 10 | import com.isacc.datax.domain.entity.DataxSync; 11 | import com.isacc.datax.domain.repository.DataxSyncRepository; 12 | import com.isacc.datax.infra.converter.DataxSyncConverter; 13 | import com.isacc.datax.infra.mapper.DataxSyncMapper; 14 | import org.springframework.beans.BeanUtils; 15 | import org.springframework.stereotype.Component; 16 | 17 | /** 18 | * 数据同步表 资源库实现 19 | * 20 | * @author isacc 2019-05-17 14:07:48 21 | */ 22 | @Component 23 | public class DataxSyncRepositoryImpl implements DataxSyncRepository { 24 | 25 | private final DataxSyncMapper dataxSyncMapper; 26 | private final DataxSyncConverter dataxSyncConverter; 27 | 28 | public DataxSyncRepositoryImpl(DataxSyncMapper dataxSyncMapper, DataxSyncConverter dataxSyncConverter) { 29 | this.dataxSyncMapper = dataxSyncMapper; 30 | this.dataxSyncConverter = dataxSyncConverter; 31 | } 32 | 33 | @Override 34 | public DataxSyncDTO insertSelectiveDTO(DataxSyncDTO dataxSyncDTO) { 35 | final DataxSync dataxSync = dataxSyncConverter.dtoToEntity(dataxSyncDTO); 36 | dataxSyncMapper.insert(dataxSync); 37 | return dataxSyncConverter.entityToDto(dataxSync); 38 | } 39 | 40 | @Override 41 | public DataxSyncDTO updateSelectiveDTO(DataxSyncDTO dataxSyncDTO) { 42 | final DataxSync dataxSync = dataxSyncConverter.dtoToEntity(dataxSyncDTO); 43 | dataxSyncMapper.updateById(dataxSync); 44 | return dataxSyncDTO; 45 | } 46 | 47 | @Override 48 | public IPage pageAndSortDTO(Page dataxSyncPage, DataxSyncDTO dataxSyncDTO) { 49 | final QueryWrapper queryWrapper = this.commonQueryWrapper(dataxSyncDTO); 50 | final IPage entityPage = dataxSyncMapper.selectPage(dataxSyncPage, queryWrapper); 51 | final ArrayList dtoList = new ArrayList<>(); 52 | entityPage.getRecords().forEach(entity -> dtoList.add(dataxSyncConverter.entityToDto(entity))); 53 | final Page dtoPage = new Page<>(); 54 | BeanUtils.copyProperties(entityPage, dtoPage); 55 | dtoPage.setRecords(dtoList); 56 | return dtoPage; 57 | } 58 | 59 | @Override 60 | public DataxSyncDTO selectByPrimaryKey(Long syncId) { 61 | final DataxSync dataxSync = dataxSyncMapper.selectById(syncId); 62 | return dataxSyncConverter.entityToDto(dataxSync); 63 | } 64 | 65 | @Override 66 | public int deleteByPrimaryKey(Long syncId) { 67 | return dataxSyncMapper.deleteById(syncId); 68 | } 69 | 70 | private QueryWrapper commonQueryWrapper(DataxSyncDTO dataxSyncDTO) { 71 | final QueryWrapper queryWrapper = new QueryWrapper<>(); 72 | queryWrapper.or().like("SYNC_NAME", dataxSyncDTO.getSyncName()); 73 | queryWrapper.or().like("JSON_FILE_NAME", dataxSyncDTO.getJsonFileName()); 74 | queryWrapper.or().eq("TENANT_ID", dataxSyncDTO.getTenantId()); 75 | queryWrapper.or().eq("SOURCE_DATASOURCE_TYPE", dataxSyncDTO.getSourceDatasourceType()); 76 | queryWrapper.or().eq("WRITE_DATASOURCE_TYPE", dataxSyncDTO.getWriteDatasourceType()); 77 | return queryWrapper; 78 | } 79 | 80 | private QueryWrapper checkQueryWrapper(DataxSyncDTO dataxSyncDTO) { 81 | final QueryWrapper queryWrapper = new QueryWrapper<>(); 82 | queryWrapper.or().eq("SYNC_NAME", dataxSyncDTO.getSyncName()); 83 | queryWrapper.or().eq("JSON_FILE_NAME", dataxSyncDTO.getJsonFileName()); 84 | return queryWrapper; 85 | } 86 | 87 | @Override 88 | public int selectCount(DataxSync dataxSync) { 89 | final QueryWrapper queryWrapper = this.checkQueryWrapper(dataxSyncConverter.entityToDto(dataxSync)); 90 | return dataxSyncMapper.selectCount(queryWrapper); 91 | } 92 | } 93 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/infra/repository/impl/MysqlRepositoryImpl.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.infra.repository.impl; 2 | 3 | import java.util.HashMap; 4 | import java.util.Map; 5 | import java.util.Objects; 6 | import javax.validation.constraints.NotBlank; 7 | 8 | import com.isacc.datax.api.dto.ApiResult; 9 | import com.isacc.datax.api.dto.Hive2Hive; 10 | import com.isacc.datax.api.dto.HiveInfoDTO; 11 | import com.isacc.datax.domain.repository.MysqlRepository; 12 | import com.isacc.datax.infra.constant.Constants; 13 | import com.isacc.datax.infra.mapper.MysqlSimpleMapper; 14 | import com.isacc.datax.infra.util.DataxUtil; 15 | import lombok.extern.slf4j.Slf4j; 16 | import org.springframework.beans.factory.annotation.Autowired; 17 | import org.springframework.stereotype.Component; 18 | 19 | /** 20 | *

21 | * Mysql Repository Impl 22 | *

23 | * 24 | * @author isacc 2019/04/29 19:49 25 | */ 26 | @SuppressWarnings("SpringJavaInjectionPointsAutowiringInspection") 27 | @Component 28 | @Slf4j 29 | public class MysqlRepositoryImpl implements MysqlRepository { 30 | 31 | private final MysqlSimpleMapper mysqlSimpleMapper; 32 | 33 | @Autowired 34 | public MysqlRepositoryImpl(MysqlSimpleMapper mysqlSimpleMapper) { 35 | this.mysqlSimpleMapper = mysqlSimpleMapper; 36 | } 37 | 38 | @Override 39 | public ApiResult checkHiveDbAndTable(Hive2Hive hive2Hive) { 40 | // reader 41 | @NotBlank final String readerPath = hive2Hive.getReader().getPath(); 42 | final ApiResult readerApiResult = this.justCheckHiveDbAndTbl(readerPath); 43 | if (!readerApiResult.getResult()) { 44 | return readerApiResult; 45 | } 46 | // writer 47 | return checkWriterHiveDbAndTable(hive2Hive); 48 | } 49 | 50 | @Override 51 | public ApiResult checkWriterHiveDbAndTable(Hive2Hive hive2Hive) { 52 | // writer 53 | @NotBlank final String writerPath = hive2Hive.getWriter().getPath(); 54 | final ApiResult writerApiResult = this.justCheckHiveDbAndTbl(writerPath); 55 | if (!writerApiResult.getResult()) { 56 | // 改变状态,返回再接着操作 57 | writerApiResult.setResult(true); 58 | return writerApiResult; 59 | } 60 | return ApiResult.initSuccess(); 61 | } 62 | 63 | private ApiResult justCheckHiveDbAndTbl(String path) { 64 | HiveInfoDTO hiveInfo = (HiveInfoDTO) DataxUtil.getHiveInfoFromPath(path).getContent(); 65 | String hiveDbName = hiveInfo.getDatabaseName(); 66 | String hiveTblName = hiveInfo.getTableName(); 67 | final Map hiveDbInfoMap = mysqlSimpleMapper.hiveDbIsExist(hiveDbName); 68 | final HashMap map = new HashMap<>(3); 69 | map.put("hiveDbName", hiveDbName); 70 | map.put("hiveTblName", hiveTblName); 71 | if (Objects.isNull(hiveDbInfoMap)) { 72 | final ApiResult failureApiResult = ApiResult.initFailure(); 73 | failureApiResult.setMessage(String.format("path路径错误,不存在该hive数据库:%s!", hiveDbName)); 74 | map.put("errorType", Constants.DB_IS_NOT_EXIST); 75 | failureApiResult.setContent(map); 76 | return failureApiResult; 77 | } 78 | final Long dbId = Long.valueOf(String.valueOf(hiveDbInfoMap.get("DB_ID"))); 79 | final Map hiveTblInfoMap = mysqlSimpleMapper.hiveTblIsExist(dbId, hiveTblName); 80 | if (Objects.isNull(hiveTblInfoMap)) { 81 | final ApiResult failureApiResult = ApiResult.initFailure(); 82 | failureApiResult.setMessage(String.format("path路径错误,%s数据库下不存在表:%s!", hiveDbName, hiveTblName)); 83 | map.put("errorType", Constants.TBL_IS_NOT_EXIST); 84 | failureApiResult.setContent(map); 85 | return failureApiResult; 86 | } 87 | return ApiResult.initSuccess(); 88 | } 89 | 90 | } 91 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/infra/util/ApplicationContextHelper.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.infra.util; 2 | 3 | import org.springframework.beans.factory.support.DefaultListableBeanFactory; 4 | import org.springframework.context.ApplicationContext; 5 | import org.springframework.context.ApplicationContextAware; 6 | import org.springframework.context.support.AbstractRefreshableApplicationContext; 7 | import org.springframework.context.support.GenericApplicationContext; 8 | 9 | /** 10 | *

11 | * description 12 | *

13 | * 14 | * @author isacc 2019/10/15 17:53 15 | * @since 1.0 16 | */ 17 | public class ApplicationContextHelper implements ApplicationContextAware { 18 | 19 | 20 | private static DefaultListableBeanFactory springFactory; 21 | 22 | private static ApplicationContext context; 23 | 24 | 25 | @SuppressWarnings("NullableProblems") 26 | @Override 27 | public void setApplicationContext(ApplicationContext applicationContext) { 28 | context = applicationContext; 29 | if (applicationContext instanceof AbstractRefreshableApplicationContext) { 30 | AbstractRefreshableApplicationContext springContext = 31 | (AbstractRefreshableApplicationContext) applicationContext; 32 | springFactory = (DefaultListableBeanFactory) springContext.getBeanFactory(); 33 | } else if (applicationContext instanceof GenericApplicationContext) { 34 | GenericApplicationContext springContext = (GenericApplicationContext) applicationContext; 35 | springFactory = springContext.getDefaultListableBeanFactory(); 36 | } 37 | } 38 | 39 | public static DefaultListableBeanFactory getSpringFactory() { 40 | return springFactory; 41 | } 42 | 43 | public static ApplicationContext getContext() { 44 | return context; 45 | } 46 | } -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/infra/util/BeanUtil.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.infra.util; 2 | 3 | import org.springframework.context.ApplicationContext; 4 | import org.springframework.context.ApplicationContextAware; 5 | import org.springframework.stereotype.Component; 6 | 7 | /** 8 | *

9 | * 获取spring bean 10 | *

11 | * 12 | * @author isacc 2019/05/23 11:08 13 | */ 14 | @SuppressWarnings("unused") 15 | @Component 16 | public class BeanUtil implements ApplicationContextAware { 17 | 18 | /** 19 | * 上下文对象实例 20 | */ 21 | private static ApplicationContext applicationContext; 22 | 23 | @Override 24 | public void setApplicationContext(ApplicationContext applicationContext) { 25 | BeanUtil.applicationContext = applicationContext; 26 | } 27 | 28 | private static ApplicationContext getApplicationContext() { 29 | return applicationContext; 30 | } 31 | 32 | public static Object getBean(String name) { 33 | return getApplicationContext().getBean(name); 34 | } 35 | 36 | public static T getBean(Class clazz) { 37 | return getApplicationContext().getBean(clazz); 38 | } 39 | 40 | public static T getBean(String name, Class clazz) { 41 | return getApplicationContext().getBean(name, clazz); 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/infra/util/DataxUtil.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.infra.util; 2 | 3 | import java.util.ArrayList; 4 | import java.util.Arrays; 5 | import java.util.List; 6 | import java.util.regex.Matcher; 7 | import java.util.regex.Pattern; 8 | import java.util.stream.Collectors; 9 | 10 | import com.isacc.datax.api.dto.ApiResult; 11 | import com.isacc.datax.api.dto.HiveInfoDTO; 12 | import com.isacc.datax.domain.entity.datax.HivePartition; 13 | import com.isacc.datax.domain.entity.reader.hdfsreader.HdfsFileTypeEnum; 14 | import com.isacc.datax.domain.entity.writer.hdfswiter.HdfsWriterModeEnum; 15 | import com.isacc.datax.infra.constant.Constants; 16 | 17 | /** 18 | *

19 | * DataX util 20 | *

21 | * 22 | * @author isacc 2019/05/07 17:08 23 | */ 24 | public class DataxUtil { 25 | 26 | private static Pattern compileEqual = Pattern.compile("="); 27 | private static Pattern compileDb = Pattern.compile(".db"); 28 | 29 | private DataxUtil() { 30 | throw new IllegalStateException("Utility class"); 31 | } 32 | 33 | /** 34 | * 检验hdfs文件类型 35 | * 36 | * @param fileTypes Array[String] 37 | * @return com.isacc.datax.api.dto.ApiResult 38 | * @author isacc 2019/5/9 16:23 39 | */ 40 | public static ApiResult checkHdfsFileType(String... fileTypes) { 41 | for (String fileType : fileTypes) { 42 | List fileTypeInfo = Arrays.stream(HdfsFileTypeEnum.values()).filter(hdfsFileTypeEnum -> fileType.equalsIgnoreCase(hdfsFileTypeEnum.name())).collect(Collectors.toList()); 43 | if (fileTypeInfo.isEmpty()) { 44 | final ApiResult failureApiResult = ApiResult.initFailure(); 45 | failureApiResult.setMessage("datax doesn't have this fileType: " + fileType); 46 | return failureApiResult; 47 | } 48 | } 49 | return ApiResult.initSuccess(); 50 | } 51 | 52 | /** 53 | * 检验字段分隔符 54 | * 55 | * @param fieldDelimiter fieldDelimiter 56 | * @return com.isacc.datax.api.dto.ApiResult 57 | * @author isacc 2019/5/9 16:23 58 | */ 59 | public static ApiResult checkFieldDelimiter(String fieldDelimiter) { 60 | final int length = fieldDelimiter.replace(Constants.Symbol.BACKSLASH, "").replace(Constants.Symbol.SLASH, "").length(); 61 | if (length != 1) { 62 | final ApiResult failureApiResult = ApiResult.initFailure(); 63 | failureApiResult.setMessage(String.format("datax supports only single-character field delimiter, which you configure as : [%s]", fieldDelimiter)); 64 | return failureApiResult; 65 | } 66 | return ApiResult.initSuccess(); 67 | } 68 | 69 | /** 70 | * 检验WriteMode 71 | * 72 | * @param writeMode writeMode 73 | * @return com.isacc.datax.api.dto.ApiResult 74 | * @author isacc 2019/5/9 16:22 75 | */ 76 | public static ApiResult checkWriteMode(String writeMode) { 77 | List writeModeInfo = Arrays.stream(HdfsWriterModeEnum.values()).filter(hdfsWriterModeEnum -> writeMode.equalsIgnoreCase(hdfsWriterModeEnum.getWriteMode())).collect(Collectors.toList()); 78 | if (writeModeInfo.isEmpty()) { 79 | final ApiResult failureApiResult = ApiResult.initFailure(); 80 | failureApiResult.setMessage("datax doesn't have this writerMode: " + writeMode); 81 | return failureApiResult; 82 | } 83 | return ApiResult.initSuccess(); 84 | } 85 | 86 | /** 87 | * 解析hdfs路径获取分区信息 88 | * 89 | * @param path hdfs路径 90 | * @return java.util.List 91 | * @author isacc 2019/5/9 16:21 92 | */ 93 | public static List partitionList(String path) { 94 | List partitionList = new ArrayList<>(3); 95 | // 判断path是否含有分区信息 96 | if (path.contains(Constants.Symbol.EQUAL)) { 97 | // 有分区信息 98 | Matcher matcher = compileEqual.matcher(path); 99 | HivePartition partition; 100 | while (matcher.find()) { 101 | partition = HivePartition.builder().build(); 102 | String before = path.substring(0, matcher.start()); 103 | String partitionName = before.substring(before.lastIndexOf('/') + 1); 104 | partition.setName(partitionName); 105 | String after = path.substring(matcher.end()); 106 | String partitionValue = after.substring(0, after.indexOf('/') == -1 ? after.length() : after.indexOf('/')); 107 | partition.setValue(partitionValue); 108 | partitionList.add(partition); 109 | } 110 | } 111 | return partitionList; 112 | } 113 | 114 | /** 115 | * 通过hdfs路径获取hive信息 116 | * 117 | * @param path hdfs信息 118 | * @return com.isacc.datax.api.dto.ApiResult 119 | * @author isacc 2019/5/9 17:29 120 | */ 121 | public static ApiResult getHiveInfoFromPath(String path) { 122 | ApiResult successApiResult = ApiResult.initSuccess(); 123 | Matcher matcher = compileDb.matcher(path); 124 | String hiveDbName; 125 | String hiveTblName; 126 | if (!matcher.find()) { 127 | ApiResult pathApiResult = ApiResult.initFailure(); 128 | pathApiResult.setMessage("path中找不到hive数据库信息!"); 129 | pathApiResult.setContent(path); 130 | return pathApiResult; 131 | } else { 132 | String dbPath = path.substring(0, matcher.start()); 133 | hiveDbName = dbPath.substring(dbPath.lastIndexOf(Constants.Symbol.SLASH) + 1); 134 | String tblPath = path.substring(matcher.end() + 1); 135 | hiveTblName = tblPath.substring(0, !tblPath.contains(Constants.Symbol.SLASH) ? tblPath.length() : tblPath.indexOf(Constants.Symbol.SLASH)); 136 | } 137 | HiveInfoDTO hiveInfoDTO = HiveInfoDTO.builder().databaseName(hiveDbName).tableName(hiveTblName).build(); 138 | successApiResult.setContent(hiveInfoDTO); 139 | return successApiResult; 140 | } 141 | 142 | 143 | 144 | } 145 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/infra/util/FreemarkerUtil.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.infra.util; 2 | 3 | import java.io.File; 4 | import java.util.Locale; 5 | import java.util.Map; 6 | 7 | import com.isacc.datax.DataxApplication; 8 | import com.isacc.datax.api.dto.ApiResult; 9 | import com.isacc.datax.infra.config.DataxProperties; 10 | import freemarker.template.Configuration; 11 | import freemarker.template.Template; 12 | import lombok.extern.slf4j.Slf4j; 13 | import org.apache.commons.io.FileUtils; 14 | import org.apache.commons.io.output.FileWriterWithEncoding; 15 | 16 | /** 17 | *

18 | * Freemarker Utils 19 | *

20 | * 21 | * @author isacc 2019/05/05 14:25 22 | */ 23 | @Slf4j 24 | public class FreemarkerUtil { 25 | 26 | private FreemarkerUtil() { 27 | throw new IllegalStateException("Utility class"); 28 | } 29 | 30 | private static Configuration getConfiguration(String basePackagePath) { 31 | Configuration cfg = new Configuration(Configuration.VERSION_2_3_26); 32 | cfg.setNumberFormat("#"); 33 | cfg.setClassForTemplateLoading(DataxApplication.class, basePackagePath); 34 | return cfg; 35 | } 36 | 37 | public static ApiResult createJsonFile(Map root, DataxProperties dataxProperties, String templateName, String jsonFileName) { 38 | final ApiResult successApiResult = ApiResult.initSuccess(); 39 | final ApiResult failureApiResult = ApiResult.initFailure(); 40 | try { 41 | Configuration cfg = FreemarkerUtil.getConfiguration(dataxProperties.getBasePackagePath()); 42 | Template template = cfg.getTemplate(templateName, Locale.CHINA); 43 | final File file = new File(dataxProperties.getLocalDicPath() + jsonFileName); 44 | FileUtils.touch(file); 45 | FileWriterWithEncoding writer = new FileWriterWithEncoding(file, "UTF-8"); 46 | template.process(root, writer); 47 | writer.close(); 48 | successApiResult.setContent(file); 49 | return successApiResult; 50 | } catch (Exception e) { 51 | log.error("create json file failure!", e); 52 | failureApiResult.setMessage("create json file failure!"); 53 | failureApiResult.setContent(e.getMessage()); 54 | return failureApiResult; 55 | } 56 | } 57 | 58 | 59 | } 60 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/infra/util/HdfsUtil.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.infra.util; 2 | 3 | 4 | import java.io.IOException; 5 | import java.net.URI; 6 | import java.net.URISyntaxException; 7 | import java.util.Objects; 8 | 9 | import com.isacc.datax.api.dto.ApiResult; 10 | import lombok.extern.slf4j.Slf4j; 11 | import org.apache.hadoop.conf.Configuration; 12 | import org.apache.hadoop.fs.FileSystem; 13 | import org.apache.hadoop.fs.Path; 14 | 15 | /** 16 | *

17 | * description 18 | *

19 | * 20 | * @author isacc 2019/05/13 11:38 21 | */ 22 | @SuppressWarnings("unused") 23 | @Slf4j 24 | public class HdfsUtil implements AutoCloseable { 25 | 26 | private static FileSystem fileSystem = null; 27 | 28 | private static void getFileSystem(String nameNode, String user) throws URISyntaxException, IOException, InterruptedException { 29 | /* 30 | * Configuration参数对象的机制: 31 | * 构造时,会加载jar包中的默认配置 xx-default.xml 32 | * 再加载 用户配置xx-site.xml ,覆盖掉默认参数 33 | * 构造完成之后,还可以conf.set("p","v"),会再次覆盖用户配置文件中的参数值 34 | * new Configuration()会从项目的classpath中加载core-default.xml hdfs-default.xml core-site.xml hdfs-site.xml等文件 35 | */ 36 | Configuration conf = new Configuration(); 37 | /* 38 | * 指定本客户端上传文件到hdfs时需要保存的副本数为:2 39 | * conf.set("dfs.replication", "2") 40 | */ 41 | // 指定本客户端上传文件到hdfs时切块的规格大小:128M 42 | conf.set("dfs.blocksize", "128m"); 43 | fileSystem = FileSystem.get(new URI(nameNode), conf, user); 44 | } 45 | 46 | 47 | public static ApiResult upload(String nameNode, String user, String source, String target) { 48 | ApiResult successApiResult = ApiResult.initSuccess(); 49 | try { 50 | HdfsUtil.getFileSystem(nameNode, user); 51 | fileSystem.mkdirs(new Path(target)); 52 | fileSystem.copyFromLocalFile(new Path(source), new Path(target)); 53 | } catch (Exception e) { 54 | ApiResult failureApiResult = ApiResult.initFailure(); 55 | failureApiResult.setMessage(String.format("上传csv文件失败!%n%s", e.getMessage())); 56 | return failureApiResult; 57 | } 58 | return successApiResult; 59 | } 60 | 61 | @Override 62 | public void close() throws Exception { 63 | if (!Objects.isNull(fileSystem)) { 64 | fileSystem.close(); 65 | } 66 | } 67 | } 68 | -------------------------------------------------------------------------------- /src/main/java/com/isacc/datax/infra/util/ZipUtil.java: -------------------------------------------------------------------------------- 1 | package com.isacc.datax.infra.util; 2 | 3 | import java.io.File; 4 | import java.io.FileInputStream; 5 | import java.io.IOException; 6 | import java.io.OutputStream; 7 | import java.time.LocalDateTime; 8 | import java.util.List; 9 | import java.util.zip.ZipEntry; 10 | import java.util.zip.ZipOutputStream; 11 | 12 | import jline.internal.Log; 13 | 14 | /** 15 | *

16 | * description 17 | *

18 | * 19 | * @author isacc 2019/05/14 17:11 20 | */ 21 | public class ZipUtil { 22 | 23 | private static final int BUFFER_SIZE = 2 * 1024; 24 | 25 | private ZipUtil() { 26 | throw new IllegalStateException("Utility class"); 27 | } 28 | 29 | public static void toZip(List srcFiles, OutputStream out) { 30 | try (ZipOutputStream zos = new ZipOutputStream(out)) { 31 | for (File file : srcFiles) { 32 | byte[] buf = new byte[BUFFER_SIZE]; 33 | zos.putNextEntry(new ZipEntry(file.getName())); 34 | int len; 35 | try (FileInputStream in = new FileInputStream(file)) { 36 | while ((len = in.read(buf)) != -1) { 37 | zos.write(buf, 0, len); 38 | } 39 | } 40 | } 41 | } catch (IOException e) { 42 | Log.error("zip files fail!", e); 43 | } 44 | } 45 | 46 | public static String generateFileName() { 47 | final LocalDateTime now = LocalDateTime.now(); 48 | final String localDate = now.toLocalDate().toString(); 49 | final String localTime = now.toLocalTime().toString().replace(':', '-').replace('.', '-'); 50 | return "dataxJob" + "-" + localDate + "-" + localTime; 51 | } 52 | 53 | } 54 | -------------------------------------------------------------------------------- /src/main/resources/META-INF/additional-spring-configuration-metadata.json: -------------------------------------------------------------------------------- 1 | { 2 | "properties": [ 3 | { 4 | "name": "datax.mysql2Hive.whereTemplate", 5 | "type": "java.lang.String", 6 | "description": "mysql条件查询数据同步到Hive." 7 | }, 8 | { 9 | "name": "datax.mysql2Hive.querySqlTemplate", 10 | "type": "java.lang.String", 11 | "description": "mysql自定义sql查询数据同步到Hive." 12 | }, 13 | { 14 | "name": "datax.mysql2Mysql.whereTemplate", 15 | "type": "java.lang.String", 16 | "description": "mysql条件查询数据同步到mysql." 17 | }, 18 | { 19 | "name": "datax.mysql2Mysql.querySqlTemplate", 20 | "type": "java.lang.String", 21 | "description": "mysql自定义sql查询数据同步到mysql." 22 | }, 23 | { 24 | "name": "datax.oracle2Hive.whereTemplate", 25 | "type": "java.lang.String", 26 | "description": "oracle条件查询数据同步到hive." 27 | }, 28 | { 29 | "name": "datax.oracle2Hive.querySqlTemplate", 30 | "type": "java.lang.String", 31 | "description": "oracle自定义sql查询数据同步到hive." 32 | }, 33 | { 34 | "name": "datax.oracle2Oracle.whereTemplate", 35 | "type": "java.lang.String", 36 | "description": "oracle条件查询数据同步到oracle." 37 | }, 38 | { 39 | "name": "datax.oracle2Oracle.querySqlTemplate", 40 | "type": "java.lang.String", 41 | "description": "oracle自定义sql查询数据同步到oracle." 42 | }, 43 | { 44 | "name": "datax.oracle2Mysql.whereTemplate", 45 | "type": "java.lang.String", 46 | "description": "oracle条件查询数据同步到mysql." 47 | }, 48 | { 49 | "name": "datax.oracle2Mysql.querySqlTemplate", 50 | "type": "java.lang.String", 51 | "description": "oracle自定义sql查询数据同步到mysql." 52 | }, 53 | { 54 | "name": "datax.mysql2Oracle.whereTemplate", 55 | "type": "java.lang.String", 56 | "description": "mysql条件查询数据同步到oracle." 57 | }, 58 | { 59 | "name": "datax.mysql2Oracle.querySqlTemplate", 60 | "type": "java.lang.String", 61 | "description": "mysql自定义sql查询数据同步到mysql." 62 | } 63 | ] } -------------------------------------------------------------------------------- /src/main/resources/application-template.yml: -------------------------------------------------------------------------------- 1 | server: 2 | port: 10024 3 | eureka: 4 | instance: 5 | # 以IP注册到注册中心 6 | preferIpAddress: ${EUREKA_INSTANCE_PREFER_IP_ADDRESS:true} 7 | leaseRenewalIntervalInSeconds: 10 8 | leaseExpirationDurationInSeconds: 30 9 | # 服务的一些元数据信息 10 | metadata-map: 11 | VERSION: 0.1.0 12 | client: 13 | serviceUrl: 14 | # 注册中心地址 15 | defaultZone: ${EUREKA_DEFAULT_ZONE:http://ip:port/eureka} 16 | registryFetchIntervalSeconds: 10 17 | disable-delta: true 18 | enabled: false # 测试时不启动eureka 19 | spring: 20 | application: 21 | name: ${SPRING_APPLICATION_NAME:common-datax} 22 | cloud: 23 | config: 24 | fail-fast: false 25 | # 是否启用配置中心 26 | enabled: ${SPRING_CLOUD_CONFIG_ENABLED:false} 27 | # 配置中心地址 28 | uri: ${SPRING_CLOUD_CONFIG_URI:http://ip:port} 29 | retry: 30 | # 最大重试次数 31 | maxAttempts: 6 32 | multiplier: 1.1 33 | # 重试间隔时间 34 | maxInterval: 2000 35 | # 标签 36 | label: ${SPRING_CLOUD_CONFIG_LABEL:} 37 | resources: 38 | chain: 39 | cache: false # 开发时使用,resource文件夹下的资源不进行缓存处理,即每次都需要去加载 40 | datasource: 41 | dynamic: 42 | primary: mysql 43 | datasource: 44 | hive: 45 | username: xxxx # 注意用户名,使用hive用户没操作hdfs的权限 46 | password: xxxx 47 | driver-class-name: org.apache.hive.jdbc.HiveDriver 48 | url: jdbc:hive2://ip:port/db 49 | mysql_hivemeta: 50 | username: xxxx 51 | password: xxxx 52 | driver-class-name: com.mysql.jdbc.Driver 53 | url: jdbc:mysql://ip:port/db?useUnicode=true&characterEncoding=utf-8&useSSL=false 54 | mysql_1: 55 | username: xxxx 56 | password: xxxx 57 | driver-class-name: com.mysql.jdbc.Driver 58 | url: jdbc:mysql://ip:port/db?useUnicode=true&characterEncoding=utf-8&useSSL=false 59 | mysql_2: 60 | username: xxxx 61 | password: xxxx 62 | driver-class-name: com.mysql.jdbc.Driver 63 | url: jdbc:mysql://ip:port/db?useUnicode=true&characterEncoding=utf-8&useSSL=false 64 | type: com.zaxxer.hikari.HikariDataSource 65 | hikari: 66 | minimum-idle: 10 67 | maximum-pool-size: 15 68 | auto-commit: true 69 | idle-timeout: 30000 70 | pool-name: ExpendHikariCP 71 | max-lifetime: 1800000 72 | connection-timeout: 30000 73 | connection-test-query: SELECT 1 74 | 75 | redis: 76 | host: ${SPRING_REDIS_HOST:localhost} 77 | port: ${SPRING_REDIS_PORT:6379} 78 | database: ${SPRING_REDIS_DATABASE:15} 79 | password: ${SPRING_REDIS_PASSWORD:} 80 | jedis: 81 | pool: 82 | # 资源池中最大连接数 83 | # 默认8,-1表示无限制;可根据服务并发redis情况及服务端的支持上限调整 84 | max-active: 8 85 | # 资源池运行最大空闲的连接数 86 | # 默认8,-1表示无限制;可根据服务并发redis情况及服务端的支持上限调整,一般建议和max-active保持一致,避免资源伸缩带来的开销 87 | max-idle: 50 88 | # 当资源池连接用尽后,调用者的最大等待时间(单位为毫秒) 89 | # 默认 -1 表示永不超时,设置5秒 90 | max-wait: 5000 91 | 92 | logging: 93 | level: 94 | com.isacc.datax: debug 95 | com.baomidou.dynamic: debug 96 | file: logs/common-datax.log 97 | 98 | mybatis: 99 | mapperLocations: classpath*:/mapper/*.xml 100 | configuration: 101 | mapUnderscoreToCamelCase: true 102 | 103 | # 这里只要是路径,后面都加上/ 104 | datax: 105 | home: ${DATAX_HOME:/path/} 106 | host: ${DATAX_HOST:ip} 107 | port: 22 108 | # 要操作hdfs,用户要有权限 109 | username: ${DATAX_USERNAME:hive} 110 | password: ${DATAX_PASSWORD:hive} 111 | basePackagePath: /templates/ 112 | uploadDicPath: ${DATAX_JSON_FILE_HOME:/path/} 113 | localDicPath: datax/ 114 | mysql2Hive: 115 | whereTemplate: mysql2hive_where.ftl 116 | querySqlTemplate: mysql2hive_querySql.ftl 117 | mysql2Mysql: 118 | whereTemplate: mysql2mysql_where.ftl 119 | querySqlTemplate: mysql2mysql_querySql.ftl 120 | mysql2Oracle: 121 | whereTemplate: mysql2oracle_where.ftl 122 | querySqlTemplate: mysql2oracle_querySql.ftl 123 | hive2HiveTemplate: hive2hive.ftl 124 | hive2MysqlTemplate: hive2mysql.ftl 125 | hive2OracleTemplate: hive2oracle.ftl 126 | oracle2Hive: 127 | whereTemplate: oracle2hive_where.ftl 128 | querySqlTemplate: oracle2hive_querySql.ftl 129 | oracle2Oracle: 130 | whereTemplate: oracle2oracle_where.ftl 131 | querySqlTemplate: oracle2oracle_querySql.ftl 132 | oracle2Mysql: 133 | whereTemplate: oracle2mysql_where.ftl 134 | querySqlTemplate: oracle2mysql_querySql.ftl 135 | 136 | azkaban: 137 | host: ${AZKABAN_HOST:http://ip:port} 138 | username: ${AZKABAN_USERNAME:azkaban} 139 | password: ${AZKABAN_PASSWORD:azkaban} 140 | localDicPath: azkaban/ 141 | dataxJob: dataxJob.job 142 | dataxProperties: dataxParams.properties -------------------------------------------------------------------------------- /src/main/resources/application.yml: -------------------------------------------------------------------------------- 1 | spring: 2 | profiles: 3 | active: ${SPRING_PROFILES_ACTIVE:dev} # 指定哪个配置文件生效 -------------------------------------------------------------------------------- /src/main/resources/dataxJob.job: -------------------------------------------------------------------------------- 1 | # datax.job 2 | type=command 3 | command=python ${DATAX_HOME}bin/datax.py ${DATAX_JSON_FILE_NAME} -------------------------------------------------------------------------------- /src/main/resources/mapper/DataxSyncMapper.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | -------------------------------------------------------------------------------- /src/main/resources/mapper/MysqlMapper.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | -------------------------------------------------------------------------------- /src/main/resources/sql/datax_sync.sql: -------------------------------------------------------------------------------- 1 | /* 2 | Navicat Premium Data Transfer 3 | 4 | Source Server : xxxxxxxxxxxxxxxxxxxx 5 | Source Server Type : MySQL 6 | Source Server Version : 50724 7 | Source Host : xxxxxxxxxxxxxxxxxxxx 8 | Source Schema : common_datax 9 | 10 | Target Server Type : MySQL 11 | Target Server Version : 50724 12 | File Encoding : 65001 13 | 14 | Date: 04/06/2019 21:28:53 15 | */ 16 | 17 | SET NAMES utf8mb4; 18 | SET FOREIGN_KEY_CHECKS = 0; 19 | 20 | -- ---------------------------- 21 | -- Table structure for datax_sync 22 | -- ---------------------------- 23 | DROP TABLE IF EXISTS `datax_sync`; 24 | CREATE TABLE `datax_sync` ( 25 | `SYNC_ID` bigint(20) NOT NULL AUTO_INCREMENT COMMENT '表ID,主键,供其他表做外键', 26 | `SYNC_NAME` varchar(80) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL COMMENT '同步名称', 27 | `SYNC_DESCRIPTION` varchar(240) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL COMMENT '同步描述', 28 | `SOURCE_DATASOURCE_TYPE` varchar(30) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '来源数据源类型,快码:HDSP.DATASOURCE_TYPE', 29 | `SOURCE_DATASOURCE_ID` bigint(20) NULL DEFAULT NULL COMMENT '来源数据源ID,关联HDSP_CORE_DATASOURCE.DATASOURCE_ID', 30 | `WRITE_DATASOURCE_TYPE` varchar(30) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '写入数据源类型,快码:HDSP.DATASOURCE_TYPE', 31 | `WRITE_DATASOURCE_ID` bigint(20) NULL DEFAULT NULL COMMENT '写入数据源ID,关联HDSP_CORE_DATASOURCE.DATASOURCE_ID', 32 | `JSON_FILE_NAME` varchar(80) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL, 33 | `SETTING_INFO` blob NULL COMMENT '数据配置信息', 34 | `TENANT_ID` bigint(20) NULL DEFAULT NULL COMMENT '租户ID', 35 | `OBJECT_VERSION_NUMBER` bigint(20) NOT NULL DEFAULT 1 COMMENT '版本号', 36 | `CREATION_DATE` datetime(0) NOT NULL DEFAULT CURRENT_TIMESTAMP(0), 37 | `CREATED_BY` int(11) NOT NULL DEFAULT -1, 38 | `LAST_UPDATED_BY` int(11) NOT NULL DEFAULT -1, 39 | `LAST_UPDATE_DATE` datetime(0) NOT NULL DEFAULT CURRENT_TIMESTAMP(0), 40 | PRIMARY KEY (`SYNC_ID`) USING BTREE, 41 | UNIQUE INDEX `SYNC_ID`(`SYNC_ID`) USING BTREE, 42 | UNIQUE INDEX `SYNC_NAME`(`SYNC_NAME`) USING BTREE, 43 | UNIQUE INDEX `JSON_FILE_NAME`(`JSON_FILE_NAME`) USING BTREE 44 | ) ENGINE = InnoDB AUTO_INCREMENT = 8 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_bin COMMENT = '数据同步表' ROW_FORMAT = Dynamic; 45 | 46 | SET FOREIGN_KEY_CHECKS = 1; 47 | -------------------------------------------------------------------------------- /src/main/resources/templates/hive2hive.ftl: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "channel": ${(setting.speed.channel)!"3"} 6 | }, 7 | "errorLimit": { 8 | "record": ${(setting.errorLimit.record)!"0"}, 9 | "percentage": ${(setting.errorLimit.percentage)!"0.02"} 10 | } 11 | }, 12 | "content": [ 13 | { 14 | "reader": { 15 | "name": "hdfsreader", 16 | "parameter": { 17 | "path": "${hdfsreaderPath}", 18 | "defaultFS": "${hdfsreaderDefaultFS}", 19 | "column": [ 20 | <#list hdfsreaderColumn as column> 21 | { 22 | "type": "${column.type}", 23 | "index": "${column.index}" 24 | }<#if column_has_next>, 25 | 26 | ], 27 | "fileType": "${hdfsreaderFileType}", 28 | "fieldDelimiter": "${hdfsreaderFieldDelimiter}", 29 | "nullFormat": "${hdfsreaderNullFormat!}", 30 | "compress": "${hdfsreaderCompress!}", 31 | "hadoopConfig": { 32 | <#if hdfsreaderHadoopConfig??> 33 | <#list hdfsreaderHadoopConfig as key, value> 34 | "${key}": "${value}"<#if key_has_next>, 35 | 36 | 37 | }, 38 | "csvReaderConfig": { 39 | <#if hdfsreaderCsvReaderConfig??> 40 | <#list hdfsreaderCsvReaderConfig as key, value> 41 | "${key}": "${value}"<#if key_has_next>, 42 | 43 | 44 | }, 45 | "haveKerberos": ${hdfsreaderHaveKerberos?then("true","false")}, 46 | "kerberosKeytabFilePath": "${hdfsreaderKerberosKeytabFilePath!}", 47 | "kerberosPrincipal": "${hdfsreaderKerberosPrincipal!}" 48 | } 49 | }, 50 | "writer": { 51 | "name": "hdfswriter", 52 | "parameter": { 53 | "defaultFS": "${hdfswriterDefaultFS}", 54 | "fileType": "${hdfswriterFileType}", 55 | "path": "${hdfswriterPath}", 56 | "fileName": "${hdfswriterFileName}", 57 | "column": [ 58 | <#list hdfswriterColumn as column> 59 | { 60 | "name": "${column.name}", 61 | "type": "${column.type}" 62 | }<#if column_has_next>, 63 | 64 | ], 65 | "writeMode": "${writeMode}", 66 | "fieldDelimiter": "${hdfswriterFieldDelimiter}", 67 | "compress": "${hdfswriterCompress!}", 68 | "hadoopConfig": { 69 | <#if hdfswriterHadoopConfig??> 70 | <#list hdfswriterHadoopConfig as key, value> 71 | "${key}": "${value}"<#if key_has_next>, 72 | 73 | 74 | }, 75 | "haveKerberos": ${hdfswriterHaveKerberos?then("true","false")}, 76 | "kerberosKeytabFilePath": "${hdfswriterKerberosKeytabFilePath!}", 77 | "kerberosPrincipal": "${hdfswriterKerberosPrincipal!}" 78 | } 79 | } 80 | } 81 | ] 82 | } 83 | } -------------------------------------------------------------------------------- /src/main/resources/templates/hive2mysql.ftl: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "channel": ${(setting.speed.channel)!"3"} 6 | }, 7 | "errorLimit": { 8 | "record": ${(setting.errorLimit.record)!"0"}, 9 | "percentage": ${(setting.errorLimit.percentage)!"0.02"} 10 | } 11 | }, 12 | "content": [ 13 | { 14 | "reader": { 15 | "name": "hdfsreader", 16 | "parameter": { 17 | "path": "${hdfsreaderPath}", 18 | "defaultFS": "${hdfsreaderDefaultFS}", 19 | "column": [ 20 | <#list hdfsreaderColumn as column> 21 | { 22 | "type": "${column.type}", 23 | "index": "${column.index}" 24 | }<#if column_has_next>, 25 | 26 | ], 27 | "fileType": "${hdfsreaderFileType}", 28 | "fieldDelimiter": "${hdfsreaderFieldDelimiter}", 29 | "nullFormat": "${hdfsreaderNullFormat!}", 30 | "compress": "${hdfsreaderCompress!}", 31 | "hadoopConfig": { 32 | <#if hdfsreaderHadoopConfig??> 33 | <#list hdfsreaderHadoopConfig as key, value> 34 | "${key}": "${value}"<#if key_has_next>, 35 | 36 | 37 | }, 38 | "csvReaderConfig": { 39 | <#if hdfsreaderCsvReaderConfig??> 40 | <#list hdfsreaderCsvReaderConfig as key, value> 41 | "${key}": "${value}"<#if key_has_next>, 42 | 43 | 44 | }, 45 | "haveKerberos": ${hdfsreaderHaveKerberos?then("true","false")}, 46 | "kerberosKeytabFilePath": "${hdfsreaderKerberosKeytabFilePath!}", 47 | "kerberosPrincipal": "${hdfsreaderKerberosPrincipal!}" 48 | } 49 | }, 50 | "writer": { 51 | "name": "mysqlwriter", 52 | "parameter": { 53 | "writeMode": "${writeMode}", 54 | "username": "${mysqlwriterUsername}", 55 | "password": "${mysqlwriterPassword}", 56 | "batchSize": ${mysqlwriterBatchSize!'1024'}, 57 | "column": [ 58 | <#list mysqlwriterColumn as column> 59 | "${column}"<#if column_has_next>, 60 | 61 | ], 62 | "session": [ 63 | <#if mysqlwriterSession??> 64 | <#list mysqlwriterSession as session> 65 | "${session}"<#if session_has_next>, 66 | 67 | 68 | ], 69 | "preSql": [ 70 | <#if mysqlwriterPreSql??> 71 | <#list mysqlwriterPreSql as preSql> 72 | "${preSql}"<#if preSql_has_next>, 73 | 74 | 75 | ], 76 | "connection": [ 77 | <#list mysqlwriterConnection as conn> 78 | { 79 | "table": [ 80 | <#list conn.table as tbl> 81 | "${tbl}"<#if tbl_has_next>, 82 | 83 | ], 84 | "jdbcUrl": "${conn.jdbcUrl}" 85 | }<#if conn_has_next>, 86 | 87 | ], 88 | "postSql": [ 89 | <#if mysqlwriterPostSql??> 90 | <#list mysqlwriterPostSql as postSql> 91 | "${postSql}"<#if postSql_has_next>, 92 | 93 | 94 | ] 95 | } 96 | } 97 | } 98 | ] 99 | } 100 | } -------------------------------------------------------------------------------- /src/main/resources/templates/hive2oracle.ftl: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "channel": ${(setting.speed.channel)!"3"} 6 | }, 7 | "errorLimit": { 8 | "record": ${(setting.errorLimit.record)!"0"}, 9 | "percentage": ${(setting.errorLimit.percentage)!"0.02"} 10 | } 11 | }, 12 | "content": [ 13 | { 14 | "reader": { 15 | "name": "hdfsreader", 16 | "parameter": { 17 | "path": "${hdfsreaderPath}", 18 | "defaultFS": "${hdfsreaderDefaultFS}", 19 | "column": [ 20 | <#list hdfsreaderColumn as column> 21 | { 22 | "type": "${column.type}", 23 | "index": "${column.index}" 24 | }<#if column_has_next>, 25 | 26 | ], 27 | "fileType": "${hdfsreaderFileType}", 28 | "fieldDelimiter": "${hdfsreaderFieldDelimiter}", 29 | "nullFormat": "${hdfsreaderNullFormat!}", 30 | "compress": "${hdfsreaderCompress!}", 31 | "hadoopConfig": { 32 | <#if hdfsreaderHadoopConfig??> 33 | <#list hdfsreaderHadoopConfig as key, value> 34 | "${key}": "${value}"<#if key_has_next>, 35 | 36 | 37 | }, 38 | "csvReaderConfig": { 39 | <#if hdfsreaderCsvReaderConfig??> 40 | <#list hdfsreaderCsvReaderConfig as key, value> 41 | "${key}": "${value}"<#if key_has_next>, 42 | 43 | 44 | }, 45 | "haveKerberos": ${hdfsreaderHaveKerberos?then("true","false")}, 46 | "kerberosKeytabFilePath": "${hdfsreaderKerberosKeytabFilePath!}", 47 | "kerberosPrincipal": "${hdfsreaderKerberosPrincipal!}" 48 | } 49 | }, 50 | "writer": { 51 | "name": "oraclewriter", 52 | "parameter": { 53 | "username": "${oraclewriterUsername}", 54 | "password": "${oraclewriterPassword}", 55 | "batchSize": ${oraclewriterBatchSize!'1024'}, 56 | "column": [ 57 | <#list oraclewriterColumn as column> 58 | "${column}"<#if column_has_next>, 59 | 60 | ], 61 | "session": [ 62 | <#if oraclewriterSession??> 63 | <#list oraclewriterSession as session> 64 | "${session}"<#if session_has_next>, 65 | 66 | 67 | ], 68 | "preSql": [ 69 | <#if oraclewriterPreSql??> 70 | <#list oraclewriterPreSql as preSql> 71 | "${preSql}"<#if preSql_has_next>, 72 | 73 | 74 | ], 75 | "connection": [ 76 | <#list oraclewriterConnection as conn> 77 | { 78 | "table": [ 79 | <#list conn.table as tbl> 80 | "${tbl}"<#if tbl_has_next>, 81 | 82 | ], 83 | "jdbcUrl": "${conn.jdbcUrl}" 84 | }<#if conn_has_next>, 85 | 86 | ], 87 | "postSql": [ 88 | <#if oraclewriterPostSql??> 89 | <#list oraclewriterPostSql as postSql> 90 | "${postSql}"<#if postSql_has_next>, 91 | 92 | 93 | ] 94 | } 95 | } 96 | } 97 | ] 98 | } 99 | } -------------------------------------------------------------------------------- /src/main/resources/templates/mysql2hive_querySql.ftl: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "channel": ${(setting.speed.channel)!"3"} 6 | }, 7 | "errorLimit": { 8 | "record": ${(setting.errorLimit.record)!"0"}, 9 | "percentage": ${(setting.errorLimit.percentage)!"0.02"} 10 | } 11 | }, 12 | "content": [ 13 | { 14 | "reader": { 15 | "name": "mysqlreader", 16 | "parameter": { 17 | "username": "${mysqlreaderUsername}", 18 | "password": "${mysqlreaderPassword}", 19 | "splitPk": "${mysqlreaderSplitPk!}", 20 | "connection": [ 21 | <#list mysqlreaderConnection as conn> 22 | { 23 | "querySql": [ 24 | <#list conn.querySql as sql> 25 | "${sql}"<#if sql_has_next>, 26 | 27 | ], 28 | "jdbcUrl": [ 29 | <#list conn.jdbcUrl as url> 30 | "${url}"<#if url_has_next>, 31 | 32 | ] 33 | }<#if conn_has_next>, 34 | 35 | ] 36 | } 37 | }, 38 | "writer": { 39 | "name": "hdfswriter", 40 | "parameter": { 41 | "defaultFS": "${hdfswriterDefaultFS}", 42 | "fileType": "${hdfswriterFileType}", 43 | "path": "${hdfswriterPath}", 44 | "fileName": "${hdfswriterFileName}", 45 | "column": [ 46 | <#list hdfswriterColumn as column> 47 | { 48 | "name": "${column.name}", 49 | "type": "${column.type}" 50 | }<#if column_has_next>, 51 | 52 | ], 53 | "writeMode": "${writeMode}", 54 | "fieldDelimiter": "${hdfswriterFieldDelimiter}", 55 | "compress": "${hdfswriterCompress!}", 56 | "hadoopConfig": { 57 | <#if hdfswriterHadoopConfig??> 58 | <#list hdfswriterHadoopConfig as key, value> 59 | "${key}": "${value}"<#if key_has_next>, 60 | 61 | 62 | }, 63 | "haveKerberos": ${hdfswriterHaveKerberos?then("true","false")}, 64 | "kerberosKeytabFilePath": "${hdfswriterKerberosKeytabFilePath!}", 65 | "kerberosPrincipal": "${hdfswriterKerberosPrincipal!}" 66 | } 67 | } 68 | } 69 | ] 70 | } 71 | } -------------------------------------------------------------------------------- /src/main/resources/templates/mysql2hive_where.ftl: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "channel": ${(setting.speed.channel)!"3"} 6 | }, 7 | "errorLimit": { 8 | "record": ${(setting.errorLimit.record)!"0"}, 9 | "percentage": ${(setting.errorLimit.percentage)!"0.02"} 10 | } 11 | }, 12 | "content": [ 13 | { 14 | "reader": { 15 | "name": "mysqlreader", 16 | "parameter": { 17 | "username": "${mysqlreaderUsername}", 18 | "password": "${mysqlreaderPassword}", 19 | "splitPk": "${mysqlreaderSplitPk!}", 20 | "column": [ 21 | <#list mysqlreaderColumn as column> 22 | "${column}"<#if column_has_next>, 23 | 24 | ], 25 | "connection": [ 26 | <#list mysqlreaderConnection as conn> 27 | { 28 | "table": [ 29 | <#list conn.table as tbl> 30 | "${tbl}"<#if tbl_has_next>, 31 | 32 | ], 33 | "jdbcUrl": [ 34 | <#list conn.jdbcUrl as url> 35 | "${url}"<#if url_has_next>, 36 | 37 | ] 38 | }<#if conn_has_next>, 39 | 40 | ], 41 | "where":"${mysqlreaderWhere}" 42 | } 43 | }, 44 | "writer": { 45 | "name": "hdfswriter", 46 | "parameter": { 47 | "defaultFS": "${hdfswriterDefaultFS}", 48 | "fileType": "${hdfswriterFileType}", 49 | "path": "${hdfswriterPath}", 50 | "fileName": "${hdfswriterFileName}", 51 | "column": [ 52 | <#list hdfswriterColumn as column> 53 | { 54 | "name": "${column.name}", 55 | "type": "${column.type}" 56 | }<#if column_has_next>, 57 | 58 | ], 59 | "writeMode": "${writeMode}", 60 | "fieldDelimiter": "${hdfswriterFieldDelimiter}", 61 | "compress": "${hdfswriterCompress!}", 62 | "hadoopConfig": { 63 | <#if hdfswriterHadoopConfig??> 64 | <#list hdfswriterHadoopConfig as key, value> 65 | "${key}": "${value}"<#if key_has_next>, 66 | 67 | 68 | }, 69 | "haveKerberos": ${hdfswriterHaveKerberos?then("true","false")}, 70 | "kerberosKeytabFilePath": "${hdfswriterKerberosKeytabFilePath!}", 71 | "kerberosPrincipal": "${hdfswriterKerberosPrincipal!}" 72 | } 73 | } 74 | } 75 | ] 76 | } 77 | } -------------------------------------------------------------------------------- /src/main/resources/templates/mysql2mysql_querySql.ftl: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "channel": ${(setting.speed.channel)!"3"} 6 | }, 7 | "errorLimit": { 8 | "record": ${(setting.errorLimit.record)!"0"}, 9 | "percentage": ${(setting.errorLimit.percentage)!"0.02"} 10 | } 11 | }, 12 | "content": [ 13 | { 14 | "reader": { 15 | "name": "mysqlreader", 16 | "parameter": { 17 | "username": "${mysqlreaderUsername}", 18 | "password": "${mysqlreaderPassword}", 19 | "splitPk": "${mysqlreaderSplitPk!}", 20 | "connection": [ 21 | <#list mysqlreaderConnection as conn> 22 | { 23 | "querySql": [ 24 | <#list conn.querySql as sql> 25 | "${sql}"<#if sql_has_next>, 26 | 27 | ], 28 | "jdbcUrl": [ 29 | <#list conn.jdbcUrl as url> 30 | "${url}"<#if url_has_next>, 31 | 32 | ] 33 | }<#if conn_has_next>, 34 | 35 | ] 36 | } 37 | }, 38 | "writer": { 39 | "name": "mysqlwriter", 40 | "parameter": { 41 | "writeMode": "${writeMode}", 42 | "username": "${mysqlwriterUsername}", 43 | "password": "${mysqlwriterPassword}", 44 | "batchSize": ${mysqlwriterBatchSize!'1024'}, 45 | "column": [ 46 | <#list mysqlwriterColumn as column> 47 | "${column}"<#if column_has_next>, 48 | 49 | ], 50 | "session": [ 51 | <#if mysqlwriterSession??> 52 | <#list mysqlwriterSession as session> 53 | "${session}"<#if session_has_next>, 54 | 55 | 56 | ], 57 | "preSql": [ 58 | <#if mysqlwriterPreSql??> 59 | <#list mysqlwriterPreSql as preSql> 60 | "${preSql}"<#if preSql_has_next>, 61 | 62 | 63 | ], 64 | "connection": [ 65 | <#list mysqlwriterConnection as conn> 66 | { 67 | "table": [ 68 | <#list conn.table as tbl> 69 | "${tbl}"<#if tbl_has_next>, 70 | 71 | ], 72 | "jdbcUrl": "${conn.jdbcUrl}" 73 | }<#if conn_has_next>, 74 | 75 | ], 76 | "postSql": [ 77 | <#if mysqlwriterPostSql??> 78 | <#list mysqlwriterPostSql as postSql> 79 | "${postSql}"<#if postSql_has_next>, 80 | 81 | 82 | ] 83 | } 84 | } 85 | } 86 | ] 87 | } 88 | } -------------------------------------------------------------------------------- /src/main/resources/templates/mysql2mysql_where.ftl: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "channel": ${(setting.speed.channel)!"3"} 6 | }, 7 | "errorLimit": { 8 | "record": ${(setting.errorLimit.record)!"0"}, 9 | "percentage": ${(setting.errorLimit.percentage)!"0.02"} 10 | } 11 | }, 12 | "content": [ 13 | { 14 | "reader": { 15 | "name": "mysqlreader", 16 | "parameter": { 17 | "username": "${mysqlreaderUsername}", 18 | "password": "${mysqlreaderPassword}", 19 | "splitPk": "${mysqlreaderSplitPk!}", 20 | "column": [ 21 | <#list mysqlreaderColumn as column> 22 | "${column}"<#if column_has_next>, 23 | 24 | ], 25 | "connection": [ 26 | <#list mysqlreaderConnection as conn> 27 | { 28 | "table": [ 29 | <#list conn.table as tbl> 30 | "${tbl}"<#if tbl_has_next>, 31 | 32 | ], 33 | "jdbcUrl": [ 34 | <#list conn.jdbcUrl as url> 35 | "${url}"<#if url_has_next>, 36 | 37 | ] 38 | }<#if conn_has_next>, 39 | 40 | ], 41 | "where":"${mysqlreaderWhere}" 42 | } 43 | }, 44 | "writer": { 45 | "name": "mysqlwriter", 46 | "parameter": { 47 | "writeMode": "${writeMode}", 48 | "username": "${mysqlwriterUsername}", 49 | "password": "${mysqlwriterPassword}", 50 | "batchSize": ${mysqlwriterBatchSize!'1024'}, 51 | "column": [ 52 | <#list mysqlwriterColumn as column> 53 | "${column}"<#if column_has_next>, 54 | 55 | ], 56 | "session": [ 57 | <#if mysqlwriterSession??> 58 | <#list mysqlwriterSession as session> 59 | "${session}"<#if session_has_next>, 60 | 61 | 62 | ], 63 | "preSql": [ 64 | <#if mysqlwriterPreSql??> 65 | <#list mysqlwriterPreSql as preSql> 66 | "${preSql}"<#if preSql_has_next>, 67 | 68 | 69 | ], 70 | "connection": [ 71 | <#list mysqlwriterConnection as conn> 72 | { 73 | "table": [ 74 | <#list conn.table as tbl> 75 | "${tbl}"<#if tbl_has_next>, 76 | 77 | ], 78 | "jdbcUrl": "${conn.jdbcUrl}" 79 | }<#if conn_has_next>, 80 | 81 | ], 82 | "postSql": [ 83 | <#if mysqlwriterPostSql??> 84 | <#list mysqlwriterPostSql as postSql> 85 | "${postSql}"<#if postSql_has_next>, 86 | 87 | 88 | ] 89 | } 90 | } 91 | } 92 | ] 93 | } 94 | } -------------------------------------------------------------------------------- /src/main/resources/templates/mysql2oracle_querySql.ftl: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "channel": ${(setting.speed.channel)!"3"} 6 | }, 7 | "errorLimit": { 8 | "record": ${(setting.errorLimit.record)!"0"}, 9 | "percentage": ${(setting.errorLimit.percentage)!"0.02"} 10 | } 11 | }, 12 | "content": [ 13 | { 14 | "reader": { 15 | "name": "mysqlreader", 16 | "parameter": { 17 | "username": "${mysqlreaderUsername}", 18 | "password": "${mysqlreaderPassword}", 19 | "splitPk": "${mysqlreaderSplitPk!}", 20 | "connection": [ 21 | <#list mysqlreaderConnection as conn> 22 | { 23 | "querySql": [ 24 | <#list conn.querySql as sql> 25 | "${sql}"<#if sql_has_next>, 26 | 27 | ], 28 | "jdbcUrl": [ 29 | <#list conn.jdbcUrl as url> 30 | "${url}"<#if url_has_next>, 31 | 32 | ] 33 | }<#if conn_has_next>, 34 | 35 | ] 36 | } 37 | }, 38 | "writer": { 39 | "name": "oraclewriter", 40 | "parameter": { 41 | "username": "${oraclewriterUsername}", 42 | "password": "${oraclewriterPassword}", 43 | "batchSize": ${oraclewriterBatchSize!'1024'}, 44 | "column": [ 45 | <#list oraclewriterColumn as column> 46 | "${column}"<#if column_has_next>, 47 | 48 | ], 49 | "session": [ 50 | <#if oraclewriterSession??> 51 | <#list oraclewriterSession as session> 52 | "${session}"<#if session_has_next>, 53 | 54 | 55 | ], 56 | "preSql": [ 57 | <#if oraclewriterPreSql??> 58 | <#list oraclewriterPreSql as preSql> 59 | "${preSql}"<#if preSql_has_next>, 60 | 61 | 62 | ], 63 | "connection": [ 64 | <#list oraclewriterConnection as conn> 65 | { 66 | "table": [ 67 | <#list conn.table as tbl> 68 | "${tbl}"<#if tbl_has_next>, 69 | 70 | ], 71 | "jdbcUrl": "${conn.jdbcUrl}" 72 | }<#if conn_has_next>, 73 | 74 | ], 75 | "postSql": [ 76 | <#if oraclewriterPostSql??> 77 | <#list oraclewriterPostSql as postSql> 78 | "${postSql}"<#if postSql_has_next>, 79 | 80 | 81 | ] 82 | } 83 | } 84 | } 85 | ] 86 | } 87 | } -------------------------------------------------------------------------------- /src/main/resources/templates/mysql2oracle_where.ftl: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "channel": ${(setting.speed.channel)!"3"} 6 | }, 7 | "errorLimit": { 8 | "record": ${(setting.errorLimit.record)!"0"}, 9 | "percentage": ${(setting.errorLimit.percentage)!"0.02"} 10 | } 11 | }, 12 | "content": [ 13 | { 14 | "reader": { 15 | "name": "mysqlreader", 16 | "parameter": { 17 | "username": "${mysqlreaderUsername}", 18 | "password": "${mysqlreaderPassword}", 19 | "splitPk": "${mysqlreaderSplitPk!}", 20 | "column": [ 21 | <#list mysqlreaderColumn as column> 22 | "${column}"<#if column_has_next>, 23 | 24 | ], 25 | "connection": [ 26 | <#list mysqlreaderConnection as conn> 27 | { 28 | "table": [ 29 | <#list conn.table as tbl> 30 | "${tbl}"<#if tbl_has_next>, 31 | 32 | ], 33 | "jdbcUrl": [ 34 | <#list conn.jdbcUrl as url> 35 | "${url}"<#if url_has_next>, 36 | 37 | ] 38 | }<#if conn_has_next>, 39 | 40 | ], 41 | "where":"${mysqlreaderWhere}" 42 | } 43 | }, 44 | "writer": { 45 | "name": "oraclewriter", 46 | "parameter": { 47 | "username": "${oraclewriterUsername}", 48 | "password": "${oraclewriterPassword}", 49 | "batchSize": ${oraclewriterBatchSize!'1024'}, 50 | "column": [ 51 | <#list oraclewriterColumn as column> 52 | "${column}"<#if column_has_next>, 53 | 54 | ], 55 | "session": [ 56 | <#if oraclewriterSession??> 57 | <#list oraclewriterSession as session> 58 | "${session}"<#if session_has_next>, 59 | 60 | 61 | ], 62 | "preSql": [ 63 | <#if oraclewriterPreSql??> 64 | <#list oraclewriterPreSql as preSql> 65 | "${preSql}"<#if preSql_has_next>, 66 | 67 | 68 | ], 69 | "connection": [ 70 | <#list oraclewriterConnection as conn> 71 | { 72 | "table": [ 73 | <#list conn.table as tbl> 74 | "${tbl}"<#if tbl_has_next>, 75 | 76 | ], 77 | "jdbcUrl": "${conn.jdbcUrl}" 78 | }<#if conn_has_next>, 79 | 80 | ], 81 | "postSql": [ 82 | <#if oraclewriterPostSql??> 83 | <#list oraclewriterPostSql as postSql> 84 | "${postSql}"<#if postSql_has_next>, 85 | 86 | 87 | ] 88 | } 89 | } 90 | } 91 | ] 92 | } 93 | } -------------------------------------------------------------------------------- /src/main/resources/templates/oracle2hive_querySql.ftl: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "channel": ${(setting.speed.channel)!"3"} 6 | }, 7 | "errorLimit": { 8 | "record": ${(setting.errorLimit.record)!"0"}, 9 | "percentage": ${(setting.errorLimit.percentage)!"0.02"} 10 | } 11 | }, 12 | "content": [ 13 | { 14 | "reader": { 15 | "name": "oraclereader", 16 | "parameter": { 17 | "username": "${oraclereaderUsername}", 18 | "password": "${oraclereaderPassword}", 19 | "splitPk": "${oraclereaderSplitPk!}", 20 | "connection": [ 21 | <#list oraclereaderConnection as conn> 22 | { 23 | "querySql": [ 24 | <#list conn.querySql as sql> 25 | "${sql}"<#if sql_has_next>, 26 | 27 | ], 28 | "jdbcUrl": [ 29 | <#list conn.jdbcUrl as url> 30 | "${url}"<#if url_has_next>, 31 | 32 | ] 33 | }<#if conn_has_next>, 34 | 35 | ], 36 | "fetchSize": ${oraclereaderFetchSize!'1024'}, 37 | "session": [ 38 | <#if oracleReaderSession??> 39 | <#list oracleReaderSession as session> 40 | "${session}"<#if session_has_next>, 41 | 42 | 43 | ] 44 | } 45 | }, 46 | "writer": { 47 | "name": "hdfswriter", 48 | "parameter": { 49 | "defaultFS": "${hdfswriterDefaultFS}", 50 | "fileType": "${hdfswriterFileType}", 51 | "path": "${hdfswriterPath}", 52 | "fileName": "${hdfswriterFileName}", 53 | "column": [ 54 | <#list hdfswriterColumn as column> 55 | { 56 | "name": "${column.name}", 57 | "type": "${column.type}" 58 | }<#if column_has_next>, 59 | 60 | ], 61 | "writeMode": "${writeMode}", 62 | "fieldDelimiter": "${hdfswriterFieldDelimiter}", 63 | "compress": "${hdfswriterCompress!}", 64 | "hadoopConfig": { 65 | <#if hdfswriterHadoopConfig??> 66 | <#list hdfswriterHadoopConfig as key, value> 67 | "${key}": "${value}"<#if key_has_next>, 68 | 69 | 70 | }, 71 | "haveKerberos": ${hdfswriterHaveKerberos?then("true","false")}, 72 | "kerberosKeytabFilePath": "${hdfswriterKerberosKeytabFilePath!}", 73 | "kerberosPrincipal": "${hdfswriterKerberosPrincipal!}" 74 | } 75 | } 76 | } 77 | ] 78 | } 79 | } -------------------------------------------------------------------------------- /src/main/resources/templates/oracle2hive_where.ftl: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "channel": ${(setting.speed.channel)!"3"} 6 | }, 7 | "errorLimit": { 8 | "record": ${(setting.errorLimit.record)!"0"}, 9 | "percentage": ${(setting.errorLimit.percentage)!"0.02"} 10 | } 11 | }, 12 | "content": [ 13 | { 14 | "reader": { 15 | "name": "oraclereader", 16 | "parameter": { 17 | "username": "${oraclereaderUsername}", 18 | "password": "${oraclereaderPassword}", 19 | "splitPk": "${oraclereaderSplitPk!}", 20 | "column": [ 21 | <#list oraclereaderColumn as column> 22 | "${column}"<#if column_has_next>, 23 | 24 | ], 25 | "connection": [ 26 | <#list oraclereaderConnection as conn> 27 | { 28 | "table": [ 29 | <#list conn.table as tbl> 30 | "${tbl}"<#if tbl_has_next>, 31 | 32 | ], 33 | "jdbcUrl": [ 34 | <#list conn.jdbcUrl as url> 35 | "${url}"<#if url_has_next>, 36 | 37 | ] 38 | }<#if conn_has_next>, 39 | 40 | ], 41 | "where":"${oraclereaderWhere}", 42 | "fetchSize": ${oraclereaderFetchSize!'1024'}, 43 | "session": [ 44 | <#if oracleReaderSession??> 45 | <#list oracleReaderSession as session> 46 | "${session}"<#if session_has_next>, 47 | 48 | 49 | ] 50 | } 51 | }, 52 | "writer": { 53 | "name": "hdfswriter", 54 | "parameter": { 55 | "defaultFS": "${hdfswriterDefaultFS}", 56 | "fileType": "${hdfswriterFileType}", 57 | "path": "${hdfswriterPath}", 58 | "fileName": "${hdfswriterFileName}", 59 | "column": [ 60 | <#list hdfswriterColumn as column> 61 | { 62 | "name": "${column.name}", 63 | "type": "${column.type}" 64 | }<#if column_has_next>, 65 | 66 | ], 67 | "writeMode": "${writeMode}", 68 | "fieldDelimiter": "${hdfswriterFieldDelimiter}", 69 | "compress": "${hdfswriterCompress!}", 70 | "hadoopConfig": { 71 | <#if hdfswriterHadoopConfig??> 72 | <#list hdfswriterHadoopConfig as key, value> 73 | "${key}": "${value}"<#if key_has_next>, 74 | 75 | 76 | }, 77 | "haveKerberos": ${hdfswriterHaveKerberos?then("true","false")}, 78 | "kerberosKeytabFilePath": "${hdfswriterKerberosKeytabFilePath!}", 79 | "kerberosPrincipal": "${hdfswriterKerberosPrincipal!}" 80 | } 81 | } 82 | } 83 | ] 84 | } 85 | } -------------------------------------------------------------------------------- /src/main/resources/templates/oracle2mysql_querySql.ftl: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "channel": ${(setting.speed.channel)!"3"} 6 | }, 7 | "errorLimit": { 8 | "record": ${(setting.errorLimit.record)!"0"}, 9 | "percentage": ${(setting.errorLimit.percentage)!"0.02"} 10 | } 11 | }, 12 | "content": [ 13 | { 14 | "reader": { 15 | "name": "oraclereader", 16 | "parameter": { 17 | "username": "${oraclereaderUsername}", 18 | "password": "${oraclereaderPassword}", 19 | "splitPk": "${oraclereaderSplitPk!}", 20 | "connection": [ 21 | <#list oraclereaderConnection as conn> 22 | { 23 | "querySql": [ 24 | <#list conn.querySql as sql> 25 | "${sql}"<#if sql_has_next>, 26 | 27 | ], 28 | "jdbcUrl": [ 29 | <#list conn.jdbcUrl as url> 30 | "${url}"<#if url_has_next>, 31 | 32 | ] 33 | }<#if conn_has_next>, 34 | 35 | ], 36 | "fetchSize": ${oraclereaderFetchSize!'1024'}, 37 | "session": [ 38 | <#if oracleReaderSession??> 39 | <#list oracleReaderSession as session> 40 | "${session}"<#if session_has_next>, 41 | 42 | 43 | ] 44 | } 45 | }, 46 | "writer": { 47 | "name": "mysqlwriter", 48 | "parameter": { 49 | "writeMode": "${writeMode}", 50 | "username": "${mysqlwriterUsername}", 51 | "password": "${mysqlwriterPassword}", 52 | "batchSize": ${mysqlwriterBatchSize!'1024'}, 53 | "column": [ 54 | <#list mysqlwriterColumn as column> 55 | "${column}"<#if column_has_next>, 56 | 57 | ], 58 | "session": [ 59 | <#if mysqlwriterSession??> 60 | <#list mysqlwriterSession as session> 61 | "${session}"<#if session_has_next>, 62 | 63 | 64 | ], 65 | "preSql": [ 66 | <#if mysqlwriterPreSql??> 67 | <#list mysqlwriterPreSql as preSql> 68 | "${preSql}"<#if preSql_has_next>, 69 | 70 | 71 | ], 72 | "connection": [ 73 | <#list mysqlwriterConnection as conn> 74 | { 75 | "table": [ 76 | <#list conn.table as tbl> 77 | "${tbl}"<#if tbl_has_next>, 78 | 79 | ], 80 | "jdbcUrl": "${conn.jdbcUrl}" 81 | }<#if conn_has_next>, 82 | 83 | ], 84 | "postSql": [ 85 | <#if mysqlwriterPostSql??> 86 | <#list mysqlwriterPostSql as postSql> 87 | "${postSql}"<#if postSql_has_next>, 88 | 89 | 90 | ] 91 | } 92 | } 93 | } 94 | ] 95 | } 96 | } -------------------------------------------------------------------------------- /src/main/resources/templates/oracle2mysql_where.ftl: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "channel": ${(setting.speed.channel)!"3"} 6 | }, 7 | "errorLimit": { 8 | "record": ${(setting.errorLimit.record)!"0"}, 9 | "percentage": ${(setting.errorLimit.percentage)!"0.02"} 10 | } 11 | }, 12 | "content": [ 13 | { 14 | "reader": { 15 | "name": "oraclereader", 16 | "parameter": { 17 | "username": "${oraclereaderUsername}", 18 | "password": "${oraclereaderPassword}", 19 | "splitPk": "${oraclereaderSplitPk!}", 20 | "column": [ 21 | <#list oraclereaderColumn as column> 22 | "${column}"<#if column_has_next>, 23 | 24 | ], 25 | "connection": [ 26 | <#list oraclereaderConnection as conn> 27 | { 28 | "table": [ 29 | <#list conn.table as tbl> 30 | "${tbl}"<#if tbl_has_next>, 31 | 32 | ], 33 | "jdbcUrl": [ 34 | <#list conn.jdbcUrl as url> 35 | "${url}"<#if url_has_next>, 36 | 37 | ] 38 | }<#if conn_has_next>, 39 | 40 | ], 41 | "where":"${oraclereaderWhere}", 42 | "fetchSize": ${oraclereaderFetchSize!'1024'}, 43 | "session": [ 44 | <#if oracleReaderSession??> 45 | <#list oracleReaderSession as session> 46 | "${session}"<#if session_has_next>, 47 | 48 | 49 | ] 50 | } 51 | }, 52 | "writer": { 53 | "name": "mysqlwriter", 54 | "parameter": { 55 | "writeMode": "${writeMode}", 56 | "username": "${mysqlwriterUsername}", 57 | "password": "${mysqlwriterPassword}", 58 | "batchSize": ${mysqlwriterBatchSize!'1024'}, 59 | "column": [ 60 | <#list mysqlwriterColumn as column> 61 | "${column}"<#if column_has_next>, 62 | 63 | ], 64 | "session": [ 65 | <#if mysqlwriterSession??> 66 | <#list mysqlwriterSession as session> 67 | "${session}"<#if session_has_next>, 68 | 69 | 70 | ], 71 | "preSql": [ 72 | <#if mysqlwriterPreSql??> 73 | <#list mysqlwriterPreSql as preSql> 74 | "${preSql}"<#if preSql_has_next>, 75 | 76 | 77 | ], 78 | "connection": [ 79 | <#list mysqlwriterConnection as conn> 80 | { 81 | "table": [ 82 | <#list conn.table as tbl> 83 | "${tbl}"<#if tbl_has_next>, 84 | 85 | ], 86 | "jdbcUrl": "${conn.jdbcUrl}" 87 | }<#if conn_has_next>, 88 | 89 | ], 90 | "postSql": [ 91 | <#if mysqlwriterPostSql??> 92 | <#list mysqlwriterPostSql as postSql> 93 | "${postSql}"<#if postSql_has_next>, 94 | 95 | 96 | ] 97 | } 98 | } 99 | } 100 | ] 101 | } 102 | } -------------------------------------------------------------------------------- /src/main/resources/templates/oracle2oracle_querySql.ftl: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "channel": ${(setting.speed.channel)!"3"} 6 | }, 7 | "errorLimit": { 8 | "record": ${(setting.errorLimit.record)!"0"}, 9 | "percentage": ${(setting.errorLimit.percentage)!"0.02"} 10 | } 11 | }, 12 | "content": [ 13 | { 14 | "reader": { 15 | "name": "oraclereader", 16 | "parameter": { 17 | "username": "${oraclereaderUsername}", 18 | "password": "${oraclereaderPassword}", 19 | "splitPk": "${oraclereaderSplitPk!}", 20 | "connection": [ 21 | <#list oraclereaderConnection as conn> 22 | { 23 | "querySql": [ 24 | <#list conn.querySql as sql> 25 | "${sql}"<#if sql_has_next>, 26 | 27 | ], 28 | "jdbcUrl": [ 29 | <#list conn.jdbcUrl as url> 30 | "${url}"<#if url_has_next>, 31 | 32 | ] 33 | }<#if conn_has_next>, 34 | 35 | ], 36 | "fetchSize": ${oraclereaderFetchSize!'1024'}, 37 | "session": [ 38 | <#if oracleReaderSession??> 39 | <#list oracleReaderSession as session> 40 | "${session}"<#if session_has_next>, 41 | 42 | 43 | ] 44 | } 45 | }, 46 | "writer": { 47 | "name": "oraclewriter", 48 | "parameter": { 49 | "username": "${oraclewriterUsername}", 50 | "password": "${oraclewriterPassword}", 51 | "batchSize": ${oraclewriterBatchSize!'1024'}, 52 | "column": [ 53 | <#list oraclewriterColumn as column> 54 | "${column}"<#if column_has_next>, 55 | 56 | ], 57 | "session": [ 58 | <#if oraclewriterSession??> 59 | <#list oraclewriterSession as session> 60 | "${session}"<#if session_has_next>, 61 | 62 | 63 | ], 64 | "preSql": [ 65 | <#if oraclewriterPreSql??> 66 | <#list oraclewriterPreSql as preSql> 67 | "${preSql}"<#if preSql_has_next>, 68 | 69 | 70 | ], 71 | "connection": [ 72 | <#list oraclewriterConnection as conn> 73 | { 74 | "table": [ 75 | <#list conn.table as tbl> 76 | "${tbl}"<#if tbl_has_next>, 77 | 78 | ], 79 | "jdbcUrl": "${conn.jdbcUrl}" 80 | }<#if conn_has_next>, 81 | 82 | ], 83 | "postSql": [ 84 | <#if oraclewriterPostSql??> 85 | <#list oraclewriterPostSql as postSql> 86 | "${postSql}"<#if postSql_has_next>, 87 | 88 | 89 | ] 90 | } 91 | } 92 | } 93 | ] 94 | } 95 | } -------------------------------------------------------------------------------- /src/main/resources/templates/oracle2oracle_where.ftl: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "channel": ${(setting.speed.channel)!"3"} 6 | }, 7 | "errorLimit": { 8 | "record": ${(setting.errorLimit.record)!"0"}, 9 | "percentage": ${(setting.errorLimit.percentage)!"0.02"} 10 | } 11 | }, 12 | "content": [ 13 | { 14 | "reader": { 15 | "name": "oraclereader", 16 | "parameter": { 17 | "username": "${oraclereaderUsername}", 18 | "password": "${oraclereaderPassword}", 19 | "splitPk": "${oraclereaderSplitPk!}", 20 | "column": [ 21 | <#list oraclereaderColumn as column> 22 | "${column}"<#if column_has_next>, 23 | 24 | ], 25 | "connection": [ 26 | <#list oraclereaderConnection as conn> 27 | { 28 | "table": [ 29 | <#list conn.table as tbl> 30 | "${tbl}"<#if tbl_has_next>, 31 | 32 | ], 33 | "jdbcUrl": [ 34 | <#list conn.jdbcUrl as url> 35 | "${url}"<#if url_has_next>, 36 | 37 | ] 38 | }<#if conn_has_next>, 39 | 40 | ], 41 | "where":"${oraclereaderWhere}", 42 | "fetchSize": ${oraclereaderFetchSize!'1024'}, 43 | "session": [ 44 | <#if oracleReaderSession??> 45 | <#list oracleReaderSession as session> 46 | "${session}"<#if session_has_next>, 47 | 48 | 49 | ] 50 | } 51 | }, 52 | "writer": { 53 | "name": "oraclewriter", 54 | "parameter": { 55 | "username": "${oraclewriterUsername}", 56 | "password": "${oraclewriterPassword}", 57 | "batchSize": ${oraclewriterBatchSize!'1024'}, 58 | "column": [ 59 | <#list oraclewriterColumn as column> 60 | "${column}"<#if column_has_next>, 61 | 62 | ], 63 | "session": [ 64 | <#if oraclewriterSession??> 65 | <#list oraclewriterSession as session> 66 | "${session}"<#if session_has_next>, 67 | 68 | 69 | ], 70 | "preSql": [ 71 | <#if oraclewriterPreSql??> 72 | <#list oraclewriterPreSql as preSql> 73 | "${preSql}"<#if preSql_has_next>, 74 | 75 | 76 | ], 77 | "connection": [ 78 | <#list oraclewriterConnection as conn> 79 | { 80 | "table": [ 81 | <#list conn.table as tbl> 82 | "${tbl}"<#if tbl_has_next>, 83 | 84 | ], 85 | "jdbcUrl": "${conn.jdbcUrl}" 86 | }<#if conn_has_next>, 87 | 88 | ], 89 | "postSql": [ 90 | <#if oraclewriterPostSql??> 91 | <#list oraclewriterPostSql as postSql> 92 | "${postSql}"<#if postSql_has_next>, 93 | 94 | 95 | ] 96 | } 97 | } 98 | } 99 | ] 100 | } 101 | } --------------------------------------------------------------------------------