├── README.md ├── assets └── Metastore.png ├── gen-java └── org │ └── apache │ └── ranger │ └── binding │ └── metastore │ └── thrift │ ├── MetaStoreHistoryService.java │ ├── MetaStoreUpdateService.java │ ├── MetaStoreUpdateServiceVersion.java │ ├── TErrorCode.java │ ├── TFetchUpdatesRequest.java │ ├── TFetchUpdatesResponse.java │ ├── TOperation.java │ ├── TStatus.java │ ├── TTableType.java │ ├── TUpdateDelta.java │ ├── TUpdateMetadataRequest.java │ └── TUpdateMetadataResponse.java ├── hive-tools.properties ├── log4j.properties ├── pom.xml ├── script ├── DelMetaData.sh ├── MetastoreChangelog.sh ├── merge2hivecluster1.sh ├── merge2hivecluster2.sh ├── merge2hivecluster3.sh ├── merge2hivecluster4.sh └── merge2hivecluster5.sh └── src └── main ├── java ├── com │ └── netease │ │ └── hivetools │ │ ├── Main.java │ │ ├── apps │ │ ├── DelMetaData.java │ │ ├── HdfsTest.java │ │ ├── HiveTest.java │ │ ├── Mammut.java │ │ ├── MetaDataMerge.java │ │ ├── MetastoreChangelog.java │ │ └── SchemaToMetaBean.java │ │ ├── mammut │ │ └── PfHiveSite.java │ │ ├── mappers │ │ ├── MammutMapper.java │ │ └── MetaDataMapper.java │ │ ├── meta │ │ ├── BucketingCols.java │ │ ├── Cds.java │ │ ├── ColumnsV2.java │ │ ├── CompactionQueue.java │ │ ├── CompletedTxnComponents.java │ │ ├── DatabaseParams.java │ │ ├── DbPrivs.java │ │ ├── Dbs.java │ │ ├── DelegationTokens.java │ │ ├── FuncRu.java │ │ ├── Funcs.java │ │ ├── GlobalPrivs.java │ │ ├── HiveLocks.java │ │ ├── Idxs.java │ │ ├── IndexParams.java │ │ ├── MasterKeys.java │ │ ├── NextCompactionQueueId.java │ │ ├── NextLockId.java │ │ ├── NextTxnId.java │ │ ├── NotificationLog.java │ │ ├── NotificationSequence.java │ │ ├── NucleusTables.java │ │ ├── PartColPrivs.java │ │ ├── PartColStats.java │ │ ├── PartPrivs.java │ │ ├── PartitionEvents.java │ │ ├── PartitionKeyVals.java │ │ ├── PartitionKeys.java │ │ ├── PartitionParams.java │ │ ├── Partitions.java │ │ ├── RoleMap.java │ │ ├── Roles.java │ │ ├── SdParams.java │ │ ├── Sds.java │ │ ├── SequenceTable.java │ │ ├── SerdeParams.java │ │ ├── Serdes.java │ │ ├── SkewedColNames.java │ │ ├── SkewedColValueLocMap.java │ │ ├── SkewedStringList.java │ │ ├── SkewedStringListValues.java │ │ ├── SkewedValues.java │ │ ├── SortCols.java │ │ ├── TabColStats.java │ │ ├── TableParams.java │ │ ├── TblColPrivs.java │ │ ├── TblPrivs.java │ │ ├── Tbls.java │ │ ├── TxnComponents.java │ │ ├── Txns.java │ │ ├── TypeFields.java │ │ ├── Types.java │ │ └── Version.java │ │ └── service │ │ └── MyBatisUtil.java └── org │ └── apache │ └── ranger │ └── binding │ └── metastore │ └── thrift │ ├── MetaStoreHistoryService.java │ ├── MetaStoreUpdateService.java │ ├── MetaStoreUpdateServiceVersion.java │ ├── TErrorCode.java │ ├── TFetchUpdatesRequest.java │ ├── TFetchUpdatesResponse.java │ ├── TOperation.java │ ├── TStatus.java │ ├── TTableType.java │ ├── TUpdateDelta.java │ ├── TUpdateMetadataRequest.java │ └── TUpdateMetadataResponse.java ├── resources ├── Hive-DDL-MySQL-CREATE.sql ├── MammutMapper.xml ├── MetaDataMapper.xml ├── hive-tools.properties ├── log4j.properties └── mybatis-config.xml └── thrift └── MetastoreUpdater.thrift /README.md: -------------------------------------------------------------------------------- 1 | ## hive-tools 项目介绍 2 | 3 | 在网易集团内部有大大小小几百套 hive 集群,为了满足网易猛犸大数据平台的元数据统一管理的需求,我们需要将多个分别独立的 hive 集群的元数据信息进行合并,但是不需要移动 HDFS 中的数据文件,比如可以将 hive2、hive3、hive4 的元数据全部合并到 hive1 的元数据 Mysql 中,然后就可以在 hive1 中处理 hive2、hive3、hive4 中的数据。 4 | 5 | 我们首先想到的是 hive 中有自带的 EXPORT 命令,可以把指定库表的数据和元数据导出到本地或者 HDFS 目录中,再通过 IMPORT 命令将元数据和数据文件导入新的 hive 仓库中,但是存在以下问题不符合我们的场景 6 | 7 | 1. 我们不需要重现导入数据; 8 | 2. 我们的每个 hive 中的表的数量多达上十万,分区数量几千万,无法指定 IMPORT 命令中的分区名; 9 | 3. 经过测试 IMPORT 命令执行效率也很低,在偶发性导入失败后,无法回滚已经导入的部分元数据,只能手工在 hive 中执行 drop table 操作,但是我们线上的 hive 配置是开启了删除表同时删除数据,这是无法接受的; 10 | 11 | 于是我们便考虑自己开发一个 hive 元数据迁移合并工具,满足我们的以下需求: 12 | 13 | 1. 可以将一个 hive 集群中的元数据全部迁移到目标 hive 集群中,不移动数据; 14 | 2. 在迁移失败的情况下,可以回退到元数据导入之前的状态; 15 | 3. 可以停止源 hive 服务,但不能停止目标 hive 的服务下,进行元数据迁移; 16 | 4. 迁移过程控制在十分钟之内,以减少对迁移方的业务影响; 17 | 18 | ## 元数据合并的难点 19 | 20 | hive 的元数据信息(metastore)一般是通过 Mysql 数据库进行存储的,在 hive-1.2.1 版本中元数据信息有 54 张表进行了存储,比如存储了数据库名称的表 `DBS`、存储表名称的表 `TBLS` 、分区信息的 `PARTITIONS` 等等。 21 | 22 | ### 元数据表依赖关系非常复杂 23 | 24 | ![Metastore](assets/Metastore.png) 25 | 26 | 元数据信息的这 54 张表通过 `ID` 号形成的很强的主外健依赖关系,例如 27 | 28 | 1. `DBS` 表中的 `DB_ID` 字段被 20 多张表作为外健进行了引用; 29 | 2. `TBLS` 表中的 `TBL_ID` 字段被 20 多张表作为外健进行了引用; 30 | 3. `TBLS` 表中的 `DB_ID` 字段是 `DBS` 表的外健、`SD_ID` 字段是 `SDS` 表的外健; 31 | 4. `PARTITIONS` 表中的 `TBL_ID` 字段是 `TBLS` 表的外健、`SD_ID` 字段是 `SDS` 表的外健; 32 | 5. `DATABASE_PARAMS` 表中的 `DB_ID` 字段是 `DBS` 表的外健; 33 | 34 | 这样的嵌套让表与表之间的关系表现为 [DBS]=>[TBLS]=>[PARTITIONS]=>[PARTITION_KEY_VALS],像这样具有 5 层以上嵌套关系的有4-5 套,这为元数据合并带来了如下问题。 35 | 36 | 1. 源 hive 中的所有表的主键 ID 必须修改,否则会和目标 hive2 中的主键 ID 冲突,导致失败; 37 | 2. 源 hive 中所有表的主键 ID 修改后,但必须依然保持源 hive1 中自身的主外健依赖关系,也就是说所有的关联表的主外健 ID 都必须进行完全一致性的修改,比如 DBS 中的 ID 从 1 变成 100,那么 TBLS、PARTITIONS 等所有子表中的 DB_ID 也需要需要从 1 变成 100; 38 | 3. 按照表的依赖关系,我们必须首先导入主表,再导入子表,再导入子子表 …,否则也无法正确导入; 39 | 40 | ### 修改元数据的主外健 ID 41 | 42 | 我们使用了一个巧妙的方法来解决 ID 修改的问题: 43 | 44 | 1. 从目标 hive 中查询出所有表的最大 ID 号,将每个表的 ID 号加上源 hive 中所有对应表的 ID 号码,形成导入后新生成出的 ID 号,公式是:新表ID = 源表ID + 目标表 ID,因为所有的表都使用了相同的逻辑,通过这个方法我们的程序就不需要维护父子表之间主外健的 ID 号。 45 | 2. 唯一可能会存在问题的是,在线导入过程中,目标 hive 新创建了 DB,导致 DB_ID 冲突的问题,为此,我们在每次导入 hive 增加一个跳号,公式变为:新表ID = 源表ID + 目标表 ID + 跳号值(100) 46 | 47 | ### 数据库操作 48 | 49 | 我们使用了 mybatis 进行了源和目标这 2 个 Mysql 的数据库操作,从源 Mysql 中按照上面的逻辑关系取出元数据修改主外健的 ID 号再插入到目标 Mysql 数据库中。 50 | 51 | 1. 由于 mybatis 进行数据库操作的时候,需要通过表的 bean 对象进行操作,54 张表全部手工敲出来又累又容易出错,应该想办法偷懒,于是我们使用了 `druid` 解析 hive 的建表语句,再通过 `codemodel` 自动生成出了对应每个表的 54 个 JAVA 类对象。参见代码:`com.netease.hivetools.apps.SchemaToMetaBean` 52 | 53 | 54 | 55 | ### 元数据迁移操作步骤 56 | 57 | 1. 第一步:备份元数据迁移前的目标和源数据库 58 | 59 | 2. 第二步:将源数据库的元数据导入到临时数据库 exchange_db 中,需要一个临时数据库是因为源数据库的 hive 集群仍然在提供在线服务,元数据表的 ID 流水号仍然在变化,hive-tools 工具只支持目的数据库是在线状态; 60 | 61 | 3. 通过临时数据库 exchange_db 能够删除多余 hive db 的目的,还能够通过固定的数据库名称,规范整个元数据迁移操作流程,减低因为手工修改执行命令参数导致出错的概率 62 | 63 | 4. 在 hive-tools.properties 文件中中配置源和目的数据库的 JDBC 配置项 64 | 65 | ```shell 66 | # exchange_db 67 | exchange_db.jdbc.driverClassName=com.mysql.jdbc.Driver 68 | exchange_db.jdbc.url=jdbc:mysql://10.172.121.126:3306/hivecluster1?useUnicode=true&characterEncoding=utf8&autoReconnect=true&allowMultiQueries=true 69 | exchange_db.jdbc.username=src_hive 70 | exchange_db.jdbc.password=abcdefg 71 | 72 | # dest_hive 73 | dest_hive.jdbc.driverClassName=com.mysql.jdbc.Driver 74 | dest_hive.jdbc.url=jdbc:mysql://10.172.121.126:3306/hivecluster1?useUnicode=true&characterEncoding=utf8&autoReconnect=true&allowMultiQueries=true 75 | dest_hive.jdbc.username=dest_hive 76 | dest_hive.jdbc.password=abcdefg 77 | ``` 78 | 79 | 5. 执行元数据迁移命令 80 | 81 | ```shell 82 | export SOURCE_NAME=exchange_db 83 | export DEST_NAME=dest_hive 84 | 85 | /home/hadoop/java-current/jre/bin/java -cp "./hive-tools-current.jar" com.netease.hivetools.apps.MetaDataMerge --s=$SOURCE_NAME --d=$DEST_NAME 86 | ``` 87 | 88 | 6. hive-tools 会在迁移元数据之前首先检查源和目的元数据库中重名的 hive db,终止元数据迁移操作并给出提示 89 | 90 | 7. 执行删除重名数据库命令 91 | 92 | ```shell 93 | # 修改脚本中的 DEL_DB(多个库之间用逗号分割,default必须删除)参数和 DEL_TBL(为空则删除所有表) 94 | export SOURCE=exchange_db 95 | export DEL_DB=default,nisp_nhids,real,azkaban_autotest_db 96 | export DEL_TBL= 97 | 98 | ~/java-current/jre/bin/java -cp "./hive-tools-current.jar" com.netease.hivetools.apps.DelMetaData --s=$SOURCE --d=$DEL_DB --t=$DEL_TBL 99 | ``` 100 | 101 | 8. 再次执行执行元数据迁移命令 102 | 103 | 9. 检查元数据迁移命令窗口日志或文件日志,如果发现元数据合并出错,通过对目的数据库进行执行删除指定 hive db 的命令,将迁移过去的元数据进行删除,如果没有错误,通过 hive 客户端检查目的数据库中是否能够正常使用新迁移过来的元数据 104 | 105 | 10. 严格按照我们的元数据迁移流程已经在网易集团内部通过 hive-tools 已经成功迁移合并了大量的 hive 元数据库,几乎没有出现过问题 106 | 107 | ## compile 108 | 109 | mvn clean compile package -Dmaven.test.skip=true 110 | 111 | 112 | 113 | ## history 114 | 115 | 116 | Release Notes - Hive-tools - Version 0.1.4 117 | 118 | * [hive-tools-0.1.5] 119 | MetaDataMerge add update SEQUENCE_TABLE NO 120 | 121 | * [hive-tools-0.1.4] 122 | MetastoreChangelog -z=zkHost -c=changelog -d=database -t=table 123 | thrift -gen java src/main/thrift/MetastoreUpdater.thrift 124 | 125 | * [hive-tools-0.1.3] 126 | - delete database metedata database_name/table_name support % wildcard 127 | 128 | * [hive-tools-0.1.2] 129 | - hdfs proxy user test 130 | 131 | * [hive-tools-0.1.1] 132 | - delete database metedata 133 | 134 | * [hive-tools-0.1.0] 135 | - hive meta schema convert to java bean 136 | - multiple hive meta merge -------------------------------------------------------------------------------- /assets/Metastore.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NetEase/hive-tools/da3338bff95fddcfc9aa6b9e10b28d13b6666d7e/assets/Metastore.png -------------------------------------------------------------------------------- /gen-java/org/apache/ranger/binding/metastore/thrift/MetaStoreUpdateServiceVersion.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Autogenerated by Thrift Compiler (0.9.3) 3 | * 4 | * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING 5 | * @generated 6 | */ 7 | package org.apache.ranger.binding.metastore.thrift; 8 | 9 | 10 | import java.util.Map; 11 | import java.util.HashMap; 12 | import org.apache.thrift.TEnum; 13 | 14 | public enum MetaStoreUpdateServiceVersion implements org.apache.thrift.TEnum { 15 | V1(0); 16 | 17 | private final int value; 18 | 19 | private MetaStoreUpdateServiceVersion(int value) { 20 | this.value = value; 21 | } 22 | 23 | /** 24 | * Get the integer value of this enum value, as defined in the Thrift IDL. 25 | */ 26 | public int getValue() { 27 | return value; 28 | } 29 | 30 | /** 31 | * Find a the enum type by its integer value, as defined in the Thrift IDL. 32 | * @return null if the value is not found. 33 | */ 34 | public static MetaStoreUpdateServiceVersion findByValue(int value) { 35 | switch (value) { 36 | case 0: 37 | return V1; 38 | default: 39 | return null; 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /gen-java/org/apache/ranger/binding/metastore/thrift/TErrorCode.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Autogenerated by Thrift Compiler (0.9.3) 3 | * 4 | * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING 5 | * @generated 6 | */ 7 | package org.apache.ranger.binding.metastore.thrift; 8 | 9 | 10 | import java.util.Map; 11 | import java.util.HashMap; 12 | import org.apache.thrift.TEnum; 13 | 14 | public enum TErrorCode implements org.apache.thrift.TEnum { 15 | OK(0), 16 | ERROR(1), 17 | INVALID(2); 18 | 19 | private final int value; 20 | 21 | private TErrorCode(int value) { 22 | this.value = value; 23 | } 24 | 25 | /** 26 | * Get the integer value of this enum value, as defined in the Thrift IDL. 27 | */ 28 | public int getValue() { 29 | return value; 30 | } 31 | 32 | /** 33 | * Find a the enum type by its integer value, as defined in the Thrift IDL. 34 | * @return null if the value is not found. 35 | */ 36 | public static TErrorCode findByValue(int value) { 37 | switch (value) { 38 | case 0: 39 | return OK; 40 | case 1: 41 | return ERROR; 42 | case 2: 43 | return INVALID; 44 | default: 45 | return null; 46 | } 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /gen-java/org/apache/ranger/binding/metastore/thrift/TOperation.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Autogenerated by Thrift Compiler (0.9.3) 3 | * 4 | * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING 5 | * @generated 6 | */ 7 | package org.apache.ranger.binding.metastore.thrift; 8 | 9 | 10 | import java.util.Map; 11 | import java.util.HashMap; 12 | import org.apache.thrift.TEnum; 13 | 14 | public enum TOperation implements org.apache.thrift.TEnum { 15 | CREATE_DATABASE(0), 16 | DROP_DATABASE(1), 17 | CREATE_TABLE(2), 18 | DROP_TABLE(3), 19 | ALTER_TABLE(4), 20 | REMAME_TABLE(5), 21 | ADD_PARTITION(6), 22 | DROP_PARTITION(7), 23 | ALTER_PARTITION(8), 24 | RENAME_PARTITION(9), 25 | INIT_ID(10); 26 | 27 | private final int value; 28 | 29 | private TOperation(int value) { 30 | this.value = value; 31 | } 32 | 33 | /** 34 | * Get the integer value of this enum value, as defined in the Thrift IDL. 35 | */ 36 | public int getValue() { 37 | return value; 38 | } 39 | 40 | /** 41 | * Find a the enum type by its integer value, as defined in the Thrift IDL. 42 | * @return null if the value is not found. 43 | */ 44 | public static TOperation findByValue(int value) { 45 | switch (value) { 46 | case 0: 47 | return CREATE_DATABASE; 48 | case 1: 49 | return DROP_DATABASE; 50 | case 2: 51 | return CREATE_TABLE; 52 | case 3: 53 | return DROP_TABLE; 54 | case 4: 55 | return ALTER_TABLE; 56 | case 5: 57 | return REMAME_TABLE; 58 | case 6: 59 | return ADD_PARTITION; 60 | case 7: 61 | return DROP_PARTITION; 62 | case 8: 63 | return ALTER_PARTITION; 64 | case 9: 65 | return RENAME_PARTITION; 66 | case 10: 67 | return INIT_ID; 68 | default: 69 | return null; 70 | } 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /gen-java/org/apache/ranger/binding/metastore/thrift/TTableType.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Autogenerated by Thrift Compiler (0.9.3) 3 | * 4 | * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING 5 | * @generated 6 | */ 7 | package org.apache.ranger.binding.metastore.thrift; 8 | 9 | 10 | import java.util.Map; 11 | import java.util.HashMap; 12 | import org.apache.thrift.TEnum; 13 | 14 | public enum TTableType implements org.apache.thrift.TEnum { 15 | TABLE(0), 16 | VIEW(1); 17 | 18 | private final int value; 19 | 20 | private TTableType(int value) { 21 | this.value = value; 22 | } 23 | 24 | /** 25 | * Get the integer value of this enum value, as defined in the Thrift IDL. 26 | */ 27 | public int getValue() { 28 | return value; 29 | } 30 | 31 | /** 32 | * Find a the enum type by its integer value, as defined in the Thrift IDL. 33 | * @return null if the value is not found. 34 | */ 35 | public static TTableType findByValue(int value) { 36 | switch (value) { 37 | case 0: 38 | return TABLE; 39 | case 1: 40 | return VIEW; 41 | default: 42 | return null; 43 | } 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /hive-tools.properties: -------------------------------------------------------------------------------- 1 | # 第一步:备份所有数据库"); 2 | # hadoop519 > cd /home/hadoop/database-auto-backup/"); 3 | # hadoop519 > ./autoBackupDB.sh"); 4 | # 第二步:清空 hadoop357 上 Mysql 数据库中的 exchange_db"); 5 | # hadoop357 > mysql -uroot -proot -Dexchange_db;"); 6 | # mysql > use exchange_db;"); 7 | # mysql > drop database exchange_db;"); 8 | # mysql > create database exchange_db;"); 9 | # mysql > exit;"); 10 | # 第三步:使用用户提供的元数据 Mysql IP 和用户名密码,将数据库通过 mysqldump 出数据库(申请RDS权限)"); 11 | # hadoop357 > mysqldump -hMysqlHostIP -uUserName -pPassword --single-transaction MysqlDatabaseName > MysqlDatabaseName.sql;"); 12 | # 第四步:将 用户的 元数据导入 exchange_db"); 13 | # hadoop357 > mysql -uroot -proot -Dexchange_db;"); 14 | # mysql > use exchange_db;"); 15 | # mysql > source ./MysqlDatabaseName.sql"); 16 | # 第五步:检查是否存着和 DEST 数据库存着数据库重名,联系业务方将重名的数据库该名"); 17 | # :通过用户 Hive 表中的 HDFS 文件所属集群确定需要导入到哪个 hive 集群"); 18 | # hadoop357 > cd /home/hadoop/hive-tools/"); 19 | # hadoop357 > ./merge2hivecluster[1-5].sh"); 20 | # hadoop357 > 如果有数据库重名,会打印出重名到数据库名称"); 21 | # 第六步:在 exchange_db 中删除多余的 DB"); 22 | # hadoop357 > cd /home/hadoop/hive-tools/"); 23 | # hadoop357 > vi delMetaData.sh"); 24 | # 修改脚本中的 DEL_DB(多个库之间用逗号分割,default必须删除)参数和 DEL_TBL(为空则删除所有表)"); 25 | # hadoop357 > ./delMetaData.sh"); 26 | # 第七步:重新执行元数据合并脚本"); 27 | # hadoop357 > cd /home/hadoop/hive-tools/"); 28 | # hadoop357 > ./merge2hivecluster[1-5].sh"); 29 | # 第八步:检查合并日志,通过hive进行测试是否导入成功"); 30 | 31 | ########################################### online hive cluster mysql ################################################################ 32 | # online_cluster1 33 | online_cluster1.jdbc.driverClassName=com.mysql.jdbc.Driver 34 | online_cluster1.jdbc.url=jdbc:mysql://10.172.121.126:3306/hivecluster1?useUnicode=true&characterEncoding=utf8&autoReconnect=true&allowMultiQueries=true 35 | online_cluster1.jdbc.username=hivecluster1 36 | online_cluster1.jdbc.password=SIpFGv2KgChQ 37 | 38 | ############################################### Intermediate temporary database ############################################################ 39 | 40 | # exchange_db 41 | exchange_db.jdbc.driverClassName=com.mysql.jdbc.Driver 42 | exchange_db.jdbc.url=jdbc:mysql://10.120.232.16:3306/exchange_db?useUnicode=true&characterEncoding=UTF-8&allowMultiQueries=true 43 | exchange_db.jdbc.username=exchange_db 44 | exchange_db.jdbc.password=exchange_db -------------------------------------------------------------------------------- /log4j.properties: -------------------------------------------------------------------------------- 1 | #定义输出格式 2 | ConversionPattern=%d %-p %l - %m%n 3 | 4 | log4j.rootLogger=INFO,Console,DailyFile 5 | log4j.logger.com.cnblogs.lzrabbit=DEBUG 6 | log4j.logger.org.springframework=ERROR 7 | log4j.logger.org.mybatis=ERROR 8 | log4j.logger.org.apache.ibatis=ERROR 9 | log4j.logger.org.quartz=ERROR 10 | log4j.logger.org.apache.axis2=ERROR 11 | log4j.logger.org.apache.axiom=ERROR 12 | log4j.logger.org.apache=ERROR 13 | log4j.logger.httpclient=ERROR 14 | #log4j.additivity.org.springframework=false 15 | #Console 16 | log4j.appender.Console=org.apache.log4j.ConsoleAppender 17 | log4j.appender.Console.Threshold=DEBUG 18 | log4j.appender.Console.Target=System.out 19 | log4j.appender.Console.layout=org.apache.log4j.PatternLayout 20 | log4j.appender.Console.layout.ConversionPattern=${ConversionPattern} 21 | #log4j.appender.Console.encoding=UTF-8 22 | 23 | #org.apache.log4j.DailyRollingFileAppender 24 | log4j.appender.DailyFile=org.apache.log4j.DailyRollingFileAppender 25 | log4j.appender.DailyFile.DatePattern=.yyyy-MM-dd 26 | log4j.appender.DailyFile.File=logs/hive-tools.log 27 | log4j.appender.DailyFile.Append=true 28 | log4j.appender.DailyFile.Threshold=DEBUG 29 | log4j.appender.DailyFile.layout=org.apache.log4j.PatternLayout 30 | log4j.appender.DailyFile.layout.ConversionPattern=${ConversionPattern} 31 | log4j.appender.DailyFile.encoding=UTF-8 32 | 33 | # %c 输出日志信息所属的类的全名 34 | # %d 输出日志时间点的日期或时间,默认格式为ISO8601,也可以在其后指定格式,比如:%d{yyy-MM-dd HH:mm:ss},输出类似:2002-10-18- 22:10:28 35 | # %f 输出日志信息所属的类的类名 36 | # %l 输出日志事件的发生位置,即输出日志信息的语句处于它所在的类的第几行 37 | # %m 输出代码中指定的信息,如log(message)中的message 38 | # %n 输出一个回车换行符,Windows平台为“rn”,Unix平台为“n” 39 | # %p 输出优先级,即DEBUG,INFO,WARN,ERROR,FATAL。如果是调用debug()输出的,则为DEBUG,依此类推 40 | # %r 输出自应用启动到输出该日志信息所耗费的毫秒数 41 | # %t 输出产生该日志事件的线程名 -------------------------------------------------------------------------------- /pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 18 | 19 | 21 | 4.0.0 22 | com.netease 23 | hive-tools 24 | 0.1.6 25 | 26 | UTF-8 27 | UTF-8 28 | 1.7 29 | 2.11.7 30 | 2.11 31 | 1.7.16 32 | 1.2.17 33 | 1.6.0 34 | 20.0-rc1 35 | 0.8.0 36 | 10.12.1.1 37 | 10.12.1.1 38 | 3.2.8 39 | 2.6 40 | 1.0.19 41 | 5.1.39 42 | 3.8.1 43 | 44 | 64m 45 | 512m 46 | 512m 47 | 48 | 49 | 50 | apache.snapshots.https 51 | Apache Development Snapshot Repository 52 | https://repository.apache.org/content/repositories/snapshots 53 | 54 | true 55 | 56 | 57 | 58 | apache.public.https 59 | Apache Development Snapshot Repository 60 | https://repository.apache.org/content/repositories/public 61 | 62 | true 63 | 64 | 65 | false 66 | 67 | 68 | 69 | 70 | 71 | com.alibaba 72 | druid 73 | ${druid.version} 74 | 75 | 76 | com.sun.codemodel 77 | codemodel 78 | ${codemodel.version} 79 | 80 | 81 | com.google.guava 82 | guava 83 | ${guava.version} 84 | 85 | 86 | org.apache.calcite 87 | calcite-core 88 | ${calcite.version} 89 | 90 | 91 | org.apache.calcite 92 | calcite-avatica 93 | ${calcite.version} 94 | 95 | 96 | org.apache.calcite 97 | calcite-linq4j 98 | ${calcite.version} 99 | 100 | 101 | net.sf.jsqlparser 102 | jsqlparser 103 | ${jsqlparser.version} 104 | 105 | 106 | log4j 107 | log4j 108 | ${log4j.version} 109 | 110 | 111 | org.apache.derby 112 | derby 113 | ${derby.version} 114 | 115 | 116 | org.apache.derby 117 | derbyclient 118 | ${derbyclient.version} 119 | 120 | 121 | mysql 122 | mysql-connector-java 123 | ${mysql-connector-java.version} 124 | 125 | 126 | org.mybatis 127 | mybatis 128 | ${mybatis.version} 129 | 130 | 131 | junit 132 | junit 133 | ${junit.version} 134 | test 135 | 136 | 137 | org.apache.commons 138 | commons-math3 139 | 3.5 140 | 141 | 142 | commons-cli 143 | commons-cli 144 | 1.2 145 | 146 | 147 | org.apache.hadoop 148 | hadoop-common 149 | 2.7.3 150 | 151 | 152 | org.apache.hive 153 | hive-jdbc 154 | 1.2.1 155 | 156 | 157 | junit 158 | junit 159 | 4.12 160 | 161 | 162 | org.apache.curator 163 | curator-client 164 | 2.6.0 165 | 166 | 167 | org.apache.curator 168 | curator-framework 169 | 2.6.0 170 | 171 | 172 | org.apache.curator 173 | curator-recipes 174 | 2.6.0 175 | 176 | 177 | 178 | 179 | 180 | 191 | 192 | 193 | *.sql 194 | *.json 195 | hive-tools.sh 196 | hive-tools.properties 197 | log4j.properties 198 | 199 | src/main/resources 200 | true 201 | 202 | 203 | 204 | 205 | org.apache.maven.plugins 206 | maven-compiler-plugin 207 | 3.3 208 | 209 | ${java.version} 210 | ${java.version} 211 | 212 | 213 | 214 | org.apache.maven.plugins 215 | maven-jar-plugin 216 | 217 | 218 | 219 | true 220 | com.netease.hivetools.apps.MetaDataMerge 221 | 222 | 223 | 224 | 225 | 226 | maven-assembly-plugin 227 | 2.2-beta-5 228 | 229 | false 230 | 231 | 232 | com.netease.hivetools.apps.MetaDataMerge 233 | 234 | 235 | 236 | jar-with-dependencies 237 | 238 | 239 | 240 | 241 | make-assembly 242 | package 243 | 244 | single 245 | 246 | 247 | 248 | 249 | 250 | 251 | org.apache.maven.plugins 252 | maven-resources-plugin 253 | 2.6 254 | 255 | 256 | copy-resources 257 | package 258 | 259 | copy-resources 260 | 261 | 262 | UTF-8 263 | ${project.build.directory}/classes 264 | 265 | 266 | src/main/resources/ 267 | false 268 | 269 | *.json 270 | *.sql 271 | hive-tools.sh 272 | hive-tools.properties 273 | log4j.properties 274 | 275 | 276 | 277 | 278 | 279 | 280 | copy-command 281 | package 282 | 283 | copy-resources 284 | 285 | 286 | UTF-8 287 | ${project.build.directory} 288 | 289 | 290 | src/main/resources/ 291 | true 292 | 293 | hive-tools.sh 294 | hive-tools.properties 295 | log4j.properties 296 | 297 | 298 | 299 | 300 | 301 | 302 | 303 | 304 | 305 | 306 | -------------------------------------------------------------------------------- /script/DelMetaData.sh: -------------------------------------------------------------------------------- 1 | export SOURCE=exchange_db 2 | export DEL_DB=default,nisp_nhids,real,azkaban_autotest_db 3 | export DEL_TBL= 4 | 5 | ~/java-current/jre/bin/java -cp "./hive-tools-current.jar" com.netease.hivetools.apps.DelMetaData --s=$SOURCE --d=$DEL_DB --t=$DEL_TBL -------------------------------------------------------------------------------- /script/MetastoreChangelog.sh: -------------------------------------------------------------------------------- 1 | export ZK_HOST=hadoop944.hz.163.org 2 | export ZK_PATH=/hive-metastore-changelog/hive-cluster3 3 | export FILTE_DATABASE=beauty_dw 4 | export FILTE_TABLE=ods_beauty 5 | #export DEBUG='-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000' 6 | 7 | /home/hadoop/java-current/jre/bin/java ${DEBUG} -cp "./hive-tools-current.jar" com.netease.hivetools.apps.MetastoreChangelog --z=${ZK_HOST} --c=${ZK_PATH} --d=${FILTE_DATABASE} --t=${FILTE_TABLE} 8 | -------------------------------------------------------------------------------- /script/merge2hivecluster1.sh: -------------------------------------------------------------------------------- 1 | export SOURCE_NAME=exchange_db 2 | export DEST_NAME=online_cluster1 3 | 4 | /home/hadoop/java-current/jre/bin/java -cp "./hive-tools-current.jar" com.netease.hivetools.apps.MetaDataMerge --s=$SOURCE_NAME --d=$DEST_NAME -------------------------------------------------------------------------------- /script/merge2hivecluster2.sh: -------------------------------------------------------------------------------- 1 | export SOURCE_NAME=exchange_db 2 | export DEST_NAME=online_cluster2 3 | 4 | /home/hadoop/java-current/jre/bin/java -cp "./hive-tools-current.jar" com.netease.hivetools.apps.MetaDataMerge --s=$SOURCE_NAME --d=$DEST_NAME -------------------------------------------------------------------------------- /script/merge2hivecluster3.sh: -------------------------------------------------------------------------------- 1 | export SOURCE_NAME=exchange_db 2 | export DEST_NAME=online_cluster3 3 | 4 | /home/hadoop/java-current/jre/bin/java -cp "./hive-tools-current.jar" com.netease.hivetools.apps.MetaDataMerge --s=$SOURCE_NAME --d=$DEST_NAME -------------------------------------------------------------------------------- /script/merge2hivecluster4.sh: -------------------------------------------------------------------------------- 1 | export SOURCE_NAME=exchange_db 2 | export DEST_NAME=online_cluster4 3 | 4 | /home/hadoop/java-current/jre/bin/java -cp "./hive-tools-current.jar" com.netease.hivetools.apps.MetaDataMerge --s=$SOURCE_NAME --d=$DEST_NAME -------------------------------------------------------------------------------- /script/merge2hivecluster5.sh: -------------------------------------------------------------------------------- 1 | export SOURCE_NAME=exchange_db 2 | export DEST_NAME=online_cluster5 3 | 4 | /home/hadoop/java-current/jre/bin/java -cp "./hive-tools-current.jar" com.netease.hivetools.apps.MetaDataMerge --s=$SOURCE_NAME --d=$DEST_NAME -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/Main.java: -------------------------------------------------------------------------------- 1 | package com.netease.hivetools; 2 | 3 | 4 | import com.netease.hivetools.apps.DelMetaData; 5 | import com.netease.hivetools.apps.Mammut; 6 | import com.netease.hivetools.apps.MetaDataMerge; 7 | import com.netease.hivetools.apps.SchemaToMetaBean; 8 | import org.apache.commons.cli.*; 9 | import org.apache.log4j.Logger; 10 | import org.apache.log4j.PropertyConfigurator; 11 | import org.junit.Test; 12 | 13 | import java.sql.Connection; 14 | import java.sql.DriverManager; 15 | import java.sql.PreparedStatement; 16 | import java.sql.ResultSet; 17 | import java.util.ArrayList; 18 | 19 | public class Main { 20 | private static final Logger logger = Logger.getLogger(Main.class.getName()); 21 | 22 | public static void main(String[] args) { 23 | PropertyConfigurator.configure("log4j.properties"); 24 | 25 | // test(args); 26 | // cliCommond(args); 27 | } 28 | 29 | static private void cliCommond(String[] args) { 30 | Options opt = new Options(); 31 | opt.addOption(OptionBuilder.withLongOpt("p") 32 | .withDescription("处理函数名称") 33 | .withValueSeparator('=') 34 | .hasArg() 35 | .create()); 36 | opt.addOption("h", "help", false, "打印命令行帮助"); 37 | 38 | String formatstr = "hive-tools --p=[MetaDataMerge|SchemaToMetaBean|Mammut|DelMetaData] [-h/--help]"; 39 | 40 | HelpFormatter formatter = new HelpFormatter(); 41 | CommandLineParser parser = new PosixParser(); 42 | CommandLine cl = null; 43 | try { 44 | for (int i = 0; i < args.length; i ++) 45 | logger.debug("args[" +i +"] : " + args[i]); 46 | 47 | cl = parser.parse(opt, args); 48 | } catch (ParseException e) { 49 | formatter.printHelp(formatstr, opt); 50 | } 51 | if (cl.hasOption("h")) { 52 | HelpFormatter hf = new HelpFormatter(); 53 | hf.printHelp(formatstr, "", opt, ""); 54 | System.exit(1); 55 | } 56 | 57 | if (false == cl.hasOption("p")) { 58 | System.out.println("missing --t arg"); 59 | HelpFormatter hf = new HelpFormatter(); 60 | hf.printHelp(formatstr, "", opt, ""); 61 | System.exit(1); 62 | } 63 | 64 | String procName = cl.getOptionValue("p"); 65 | if (procName.equalsIgnoreCase("MetaDataMerge")) { 66 | MetaDataMerge.main(args); 67 | } else if (procName.equalsIgnoreCase("SchemaToMetaBean")) { 68 | SchemaToMetaBean.main(args); 69 | } else if (procName.equalsIgnoreCase("Mammut")) { 70 | Mammut.main(args); 71 | } else if (procName.equalsIgnoreCase("DelMetaData")) { 72 | DelMetaData.main(args); 73 | } else { 74 | System.out.println("error --p arg"); 75 | HelpFormatter hf = new HelpFormatter(); 76 | hf.printHelp(formatstr, "", opt, ""); 77 | System.exit(1); 78 | } 79 | } 80 | 81 | static class TabInfo{ 82 | public String tblId = ""; 83 | public String tblName = ""; 84 | public String tblType = ""; 85 | public String tblLocation = ""; 86 | public String partName = ""; 87 | public String partLocation = ""; 88 | 89 | public TabInfo() {} 90 | 91 | @Override 92 | public String toString() { 93 | return "tblId = " + tblId + ", tblName = " + tblName +", tblType = " + tblType +", tblLocation = " + tblLocation +", partName = " + partName +", partLocation = " + partLocation; 94 | } 95 | } 96 | 97 | @Test 98 | public static void test(String[] args) 99 | { 100 | String url = "jdbc:mysql://10.120.232.16:3306/haitao1201?useUnicode=true&characterEncoding=UTF-8"; 101 | try 102 | { 103 | Class.forName("com.mysql.jdbc.Driver").newInstance(); 104 | Connection c = DriverManager.getConnection(url, "haitao1201", "haitao1201"); 105 | PreparedStatement p = c.prepareStatement("select TBLS.TBL_ID, TBLS.TBL_NAME, TBLS.TBL_TYPE, SDS.LOCATION from TBLS, SDS where TBLS.SD_ID = SDS.SD_ID;"); // limit 300 106 | p.execute(); 107 | ResultSet rs = p.getResultSet(); 108 | ArrayList tabInfos = new ArrayList<>(); 109 | while (!rs.isLast()) 110 | { 111 | if (!rs.next()) 112 | break; 113 | 114 | TabInfo tabInfo = new TabInfo(); 115 | tabInfo.tblId = rs.getString(1); 116 | tabInfo.tblName = rs.getString(2); 117 | tabInfo.tblType = rs.getString(3); 118 | tabInfo.tblLocation = rs.getString(4)==null?"":rs.getString(4); 119 | 120 | tabInfos.add(tabInfo); 121 | } 122 | rs.close(); 123 | 124 | for (TabInfo tabInfo : tabInfos) { 125 | String sql = "select SDS.LOCATION, PARTITIONS.PART_NAME from SDS, PARTITIONS where PARTITIONS.SD_ID = SDS.SD_ID and TBL_ID = " + tabInfo.tblId + " limit 1"; 126 | p = c.prepareStatement(sql); 127 | p.execute(); 128 | rs = p.getResultSet(); 129 | while (!rs.isLast()) 130 | { 131 | if (!rs.next()) 132 | break; 133 | tabInfo.partLocation = rs.getString(1)==null?"":rs.getString(1); 134 | tabInfo.partName = rs.getString(2)==null?"":rs.getString(2); 135 | } 136 | rs.close(); 137 | } 138 | 139 | int count = 0, notsame = 0; 140 | for (TabInfo tabInfo : tabInfos) { 141 | count ++; 142 | boolean samePath = tabInfo.partLocation.startsWith(tabInfo.tblLocation); 143 | if (samePath) { 144 | // System.out.println("Y " + tabInfo.toString()); 145 | } else if(!samePath && !tabInfo.partLocation.isEmpty()) { 146 | notsame ++; 147 | System.out.println("N " + tabInfo.toString()); 148 | } 149 | } 150 | System.out.println("总数: " + count + ", 不相同的: " + notsame); 151 | } 152 | catch (Exception e) 153 | { 154 | e.printStackTrace(); 155 | } 156 | } 157 | 158 | } 159 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/apps/DelMetaData.java: -------------------------------------------------------------------------------- 1 | package com.netease.hivetools.apps; 2 | 3 | import com.netease.hivetools.mappers.MetaDataMapper; 4 | import com.netease.hivetools.meta.Dbs; 5 | import com.netease.hivetools.meta.Tbls; 6 | import com.netease.hivetools.service.MyBatisUtil; 7 | import org.apache.commons.cli.*; 8 | import org.apache.log4j.Logger; 9 | import org.apache.log4j.PropertyConfigurator; 10 | 11 | import java.util.HashMap; 12 | import java.util.List; 13 | import java.util.Map; 14 | import java.util.Scanner; 15 | 16 | /** 17 | * Created by hzliuxun on 17/1/22. 18 | */ 19 | public class DelMetaData { 20 | private static final Logger logger = Logger.getLogger(MetaDataMerge.class.getName()); 21 | 22 | private static String del_database_name = ""; 23 | private static String del_table_name = ""; 24 | public static void main(String[] args) { 25 | PropertyConfigurator.configure("log4j.properties"); 26 | 27 | cliCommond(args); 28 | 29 | Scanner sc = new Scanner(System.in); 30 | String useInput = ""; 31 | while (!useInput.equals("Y")) { 32 | System.err.println("请先备份数据库!"); 33 | System.err.println("删除数据源 " + MyBatisUtil.sourceName + " 中的数据库 " + del_database_name + ", 表 " + del_table_name + " 请输入[Y/n] : "); 34 | 35 | useInput = sc.nextLine(); 36 | if (useInput.equalsIgnoreCase("n")) { 37 | System.exit(1); 38 | } 39 | } 40 | 41 | String[] delDbNames = del_database_name.split(","); 42 | for (String delDbName : delDbNames) { 43 | deleteMetaData(delDbName); 44 | } 45 | } 46 | 47 | static void deleteMetaData(String delDbName) { 48 | System.out.println("==> deleteMetaData(" + delDbName + ")"); 49 | MetaDataMapper sourceMetaData = new MetaDataMapper(MyBatisUtil.sourceName); 50 | 51 | Map params = new HashMap(); 52 | params.put("database_name", delDbName); 53 | List dbs = (List) sourceMetaData.getTableRecords("DBS", params); 54 | for(Object object : dbs){ 55 | params.put("db_id", ((Dbs) object).getDbId().toString()); 56 | List tables = (List) sourceMetaData.getTableRecords("TBLS", params); 57 | for(Object table : tables){ 58 | if (del_table_name.isEmpty()) { 59 | // delete all table 60 | System.out.println("删除表名 = " + ((Tbls) table).getTblName() + ", ID = " + + ((Tbls) table).getTblId()); 61 | sourceMetaData.deleteTable((Tbls) table); 62 | } else if (del_table_name.equalsIgnoreCase(((Tbls) table).getTblName())) { 63 | // delete match table 64 | System.out.println("删除表名 = " + ((Tbls) table).getTblName() + ", ID = " + + ((Tbls) table).getTblId()); 65 | sourceMetaData.deleteTable((Tbls) table); 66 | break; 67 | } 68 | } 69 | sourceMetaData.deleteDatabase((Dbs) object); 70 | } 71 | System.out.println("<== deleteMetaData(" + delDbName + ")"); 72 | } 73 | 74 | static private void cliCommond(String[] args) { 75 | Options opt = new Options(); 76 | opt.addOption("h", "help", false, "打印命令行帮助"); 77 | opt.addOption(OptionBuilder.withLongOpt("p") 78 | .withDescription("处理函数名称") 79 | .withValueSeparator('=') 80 | .hasArg() 81 | .create()); 82 | opt.addOption(OptionBuilder.withLongOpt("s") 83 | .withDescription("元数据库") 84 | .withValueSeparator('=') 85 | .hasArg() 86 | .create()); 87 | opt.addOption(OptionBuilder.withLongOpt("d") 88 | .withDescription("数据库名") 89 | .withValueSeparator('=') 90 | .hasArg() 91 | .create()); 92 | opt.addOption(OptionBuilder.withLongOpt("t") 93 | .withDescription("表名") 94 | .withValueSeparator('=') 95 | .hasArg() 96 | .create()); 97 | 98 | String formatstr = "DelMetaData --s= --d= --t= [-h/--help]"; 99 | 100 | HelpFormatter formatter = new HelpFormatter(); 101 | CommandLineParser parser = new PosixParser(); 102 | CommandLine cl = null; 103 | try { 104 | // 处理Options和参数 105 | cl = parser.parse(opt, args); 106 | } catch (ParseException e) { 107 | formatter.printHelp(formatstr, opt); // 如果发生异常,则打印出帮助信息 108 | } 109 | // 如果包含有-h或--help,则打印出帮助信息 110 | if (cl.hasOption("h")) { 111 | HelpFormatter hf = new HelpFormatter(); 112 | hf.printHelp(formatstr, "", opt, ""); 113 | System.exit(1); 114 | } 115 | if( cl.hasOption("s") ) { 116 | String tempDb = cl.getOptionValue("s"); 117 | if (!tempDb.equalsIgnoreCase("exchange_db")) { 118 | System.out.println("错误! 待删除的数据源名称不是 exchange_db ??"); 119 | System.exit(1); 120 | } 121 | MyBatisUtil.sourceName = tempDb; 122 | } else { 123 | System.out.println("missing --s arg"); 124 | HelpFormatter hf = new HelpFormatter(); 125 | hf.printHelp(formatstr, "", opt, ""); 126 | System.exit(1); 127 | } 128 | 129 | if( cl.hasOption("d") ) { 130 | del_database_name = cl.getOptionValue("d"); 131 | } else { 132 | System.out.println("missing --d arg"); 133 | HelpFormatter hf = new HelpFormatter(); 134 | hf.printHelp(formatstr, "", opt, ""); 135 | System.exit(1); 136 | } 137 | 138 | if( cl.hasOption("t") ) { 139 | del_table_name = cl.getOptionValue("t"); 140 | } else { 141 | System.out.println("missing --t arg"); 142 | HelpFormatter hf = new HelpFormatter(); 143 | hf.printHelp(formatstr, "", opt, ""); 144 | System.exit(1); 145 | } 146 | 147 | logger.debug("MyBatisUtil.sourceName : " + MyBatisUtil.sourceName + ", del_database_name: " + del_database_name + ", del_table_name: " + del_table_name); 148 | } 149 | } 150 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/apps/HdfsTest.java: -------------------------------------------------------------------------------- 1 | package com.netease.hivetools.apps; 2 | 3 | import java.io.BufferedReader; 4 | import java.io.IOException; 5 | import java.io.InputStream; 6 | import java.io.InputStreamReader; 7 | import java.io.OutputStreamWriter; 8 | import java.security.PrivilegedExceptionAction; 9 | import java.util.Scanner; 10 | 11 | import org.apache.commons.cli.*; 12 | import org.apache.commons.io.IOUtils; 13 | import org.apache.hadoop.conf.Configuration; 14 | import org.apache.hadoop.fs.FSDataOutputStream; 15 | import org.apache.hadoop.fs.FileStatus; 16 | import org.apache.hadoop.fs.FileSystem; 17 | import org.apache.hadoop.fs.Path; 18 | import org.apache.hadoop.security.UserGroupInformation; 19 | import org.apache.log4j.Logger; 20 | 21 | /** 22 | * Created by hzliuxun on 17/2/6. 23 | */ 24 | public class HdfsTest { 25 | private static final Logger logger = Logger.getLogger(HdfsTest.class.getName()); 26 | 27 | public static String _proxyUser = ""; 28 | public static String _filePath = "/"; 29 | public static String _hadoop_cluster = "hadoop"; 30 | public static String _command = ""; 31 | public static Configuration _hadoop_conf = new Configuration(); 32 | 33 | public static void main(String[] argv) throws Exception { 34 | cliCommond(argv); 35 | 36 | Configuration conf = new Configuration(); 37 | HdfsTest hdfsClient = new HdfsTest(); 38 | 39 | hdfsClient.proxyTest(); 40 | } 41 | 42 | 43 | static private void cliCommond(String[] args) { 44 | Options opt = new Options(); 45 | opt.addOption("h", "help", false, "打印命令行帮助"); 46 | opt.addOption(OptionBuilder.withLongOpt("p") 47 | .withDescription("处理函数名称") 48 | .withValueSeparator('=') 49 | .hasArg() 50 | .create()); 51 | opt.addOption(OptionBuilder.withLongOpt("p") 52 | .withDescription("被代理用户名") 53 | .withValueSeparator('=') 54 | .hasArg() 55 | .create()); 56 | opt.addOption(OptionBuilder.withLongOpt("c") 57 | .withDescription("命令") 58 | .withValueSeparator('=') 59 | .hasArg() 60 | .create()); 61 | opt.addOption(OptionBuilder.withLongOpt("a") 62 | .withDescription("HDFS路径") 63 | .withValueSeparator('=') 64 | .hasArg() 65 | .create()); 66 | opt.addOption(OptionBuilder.withLongOpt("b") 67 | .withDescription("线上集群:hadoop | DS测试集群:ds") 68 | .withValueSeparator('=') 69 | .hasArg() 70 | .create()); 71 | 72 | String formatstr = "HdfsTest --p= --c= --a= --b= [-h/--help]"; 73 | 74 | HelpFormatter formatter = new HelpFormatter(); 75 | CommandLineParser parser = new PosixParser(); 76 | CommandLine cl = null; 77 | try { 78 | // 处理Options和参数 79 | cl = parser.parse(opt, args); 80 | } catch (ParseException e) { 81 | formatter.printHelp(formatstr, opt); // 如果发生异常,则打印出帮助信息 82 | } 83 | // 如果包含有-h或--help,则打印出帮助信息 84 | if (cl.hasOption("h")) { 85 | HelpFormatter hf = new HelpFormatter(); 86 | hf.printHelp(formatstr, "", opt, ""); 87 | System.exit(1); 88 | } 89 | if( cl.hasOption("p") ) { 90 | _proxyUser = cl.getOptionValue("p"); 91 | } else { 92 | System.out.println("missing --p arg"); 93 | HelpFormatter hf = new HelpFormatter(); 94 | hf.printHelp(formatstr, "", opt, ""); 95 | System.exit(1); 96 | } 97 | 98 | if( cl.hasOption("c") ) { 99 | _command = cl.getOptionValue("c"); 100 | } else { 101 | System.out.println("missing --c arg"); 102 | HelpFormatter hf = new HelpFormatter(); 103 | hf.printHelp(formatstr, "", opt, ""); 104 | System.exit(1); 105 | } 106 | 107 | if( cl.hasOption("a") ) { 108 | _filePath = cl.getOptionValue("a"); 109 | } else { 110 | System.out.println("missing --a arg"); 111 | HelpFormatter hf = new HelpFormatter(); 112 | hf.printHelp(formatstr, "", opt, ""); 113 | System.exit(1); 114 | } 115 | 116 | if( cl.hasOption("b") ) { 117 | _hadoop_cluster = cl.getOptionValue("b"); 118 | } else { 119 | System.out.println("missing --b arg"); 120 | HelpFormatter hf = new HelpFormatter(); 121 | hf.printHelp(formatstr, "", opt, ""); 122 | System.exit(1); 123 | } 124 | 125 | logger.debug("被代理用户 : " + _proxyUser + ", hadoop集群 : " + _hadoop_cluster + ", 运行HDFS命令: " + _command + ", HDFS路径: " + _filePath); 126 | } 127 | 128 | private void kerberosTest(FileSystem fs, String path) throws IOException { 129 | FileStatus[] fsStatus = fs.listStatus(new Path("/")); 130 | for (int i = 0; i < fsStatus.length; i++) { 131 | System.out.println(fsStatus[i].getPath().toString()); 132 | } 133 | } 134 | 135 | public void proxyTest() throws Exception { 136 | UserGroupInformation ugi = null; 137 | System.setProperty("java.security.krb5.conf", "krb5.conf"); 138 | if (_hadoop_cluster.equalsIgnoreCase("hadoop")) { 139 | _hadoop_conf.addResource("hadoop357.lt.163.org/core-site.xml"); 140 | _hadoop_conf.addResource("hadoop357.lt.163.org/hdfs-site.xml"); 141 | UserGroupInformation.setConfiguration(_hadoop_conf); 142 | UserGroupInformation.loginUserFromKeytab("hadoop/admin@HADOOP.HZ.NETEASE.COM", "/home/hadoop/yarn/conf/hadoop.keytab"); 143 | 144 | ugi = UserGroupInformation.createProxyUser(_proxyUser, UserGroupInformation.getLoginUser()); 145 | } else { 146 | _hadoop_conf.addResource("classb-ds-bigdata11.server.163.org/core-site.xml"); 147 | _hadoop_conf.addResource("classb-ds-bigdata11.server.163.org/hdfs-site.xml"); 148 | UserGroupInformation.setConfiguration(_hadoop_conf); 149 | UserGroupInformation.loginUserFromKeytab("hive/classb-ds-bigdata4.server.163.org@IF.HZ.NETEASE.COM", "/home/hadoop/yarn/conf/hive.keytab"); 150 | 151 | ugi = UserGroupInformation.createProxyUser(_proxyUser, UserGroupInformation.getLoginUser()); 152 | } 153 | 154 | System.out.println(" >>> Login User is: " + UserGroupInformation.getLoginUser().toString()); 155 | System.out.println(" >>> Proxy user is: " + ugi.getUserName()); 156 | System.out.println(" >>> current User is: " + UserGroupInformation.getCurrentUser().toString()); 157 | System.out.println(" >>> The credential is: " + ugi.getCredentials().toString()); 158 | 159 | ugi.doAs(new PrivilegedExceptionAction() { 160 | public Void run() throws Exception { 161 | System.out.println(">>> doAs current User is: " + UserGroupInformation.getCurrentUser().toString()); 162 | 163 | // _hadoop_conf.set("hadoop.security.authentication", "Kerberos"); 164 | // _hadoop_conf.addResource("hadoop357.lt.163.org/core-site.xml"); 165 | // _hadoop_conf.addResource("hadoop357.lt.163.org/hdfs-site.xml"); 166 | FileSystem dfs = FileSystem.get(_hadoop_conf); 167 | if (HdfsTest._command.equals("read")) { 168 | HdfsTest.this.read(dfs, HdfsTest._filePath); 169 | } else if (HdfsTest._command.equals("write")) { 170 | HdfsTest.this.write(dfs, HdfsTest._filePath); 171 | } else if (HdfsTest._command.equals("append")) { 172 | HdfsTest.this.append(dfs, HdfsTest._filePath); 173 | } else if (HdfsTest._command.equals("replicate")) { 174 | HdfsTest.this.setReplication(dfs, HdfsTest._filePath); 175 | } else if (HdfsTest._command.equals("status")) { 176 | HdfsTest.this.getStatus(dfs, HdfsTest._filePath); 177 | } else if (HdfsTest._command.equals("delete")) { 178 | HdfsTest.this.delete(dfs, HdfsTest._filePath); 179 | } else if (HdfsTest._command.equals("ls")) { 180 | HdfsTest.this.ls(dfs, HdfsTest._filePath); 181 | } else if (HdfsTest._command.equals("kerberos")) { 182 | HdfsTest.this.kerberosTest(dfs, HdfsTest._filePath); 183 | } else if (HdfsTest._command.equals("mkdir")) { 184 | HdfsTest.this.mkdir(dfs, HdfsTest._filePath); 185 | } 186 | return null; 187 | } 188 | }); 189 | } 190 | 191 | private void read(FileSystem fs, String path) throws IOException { 192 | System.out.println(" >>>Entering HelloHDFS.read()"); 193 | InputStream in = null; 194 | try { 195 | if (fs.exists(new Path(path)) != true) { 196 | System.out.println("/t>>>" + path + " is not exists!"); return; 197 | } 198 | in = fs.open(new Path(path)); 199 | IOUtils.copy(in, System.out); 200 | } finally { 201 | IOUtils.closeQuietly(in); 202 | } 203 | System.out.println(" <<>>Entering HelloHDFS.mkdir()"); 208 | 209 | Path path = new Path(filePath); 210 | dfs.mkdirs(path); 211 | 212 | System.out.println(" <<>>Entering HelloHDFS.create()"); 217 | 218 | Path path = new Path(filePath); 219 | FSDataOutputStream out = dfs.create(path); 220 | try { 221 | String words = "123456"; 222 | out.writeBytes(words); 223 | out.write(words.getBytes("UTF-8")); 224 | 225 | out.close(); 226 | } finally { 227 | out.close(); 228 | } 229 | System.out.println(" <<>>Entering HelloHDFS.append()"); 234 | OutputStreamWriter out = null; 235 | try { 236 | out = new OutputStreamWriter(fs.append(new Path(filePath))); 237 | BufferedReader br = new BufferedReader(new InputStreamReader(System.in)); 238 | String line = br.readLine(); 239 | while (line != null) { 240 | line = br.readLine(); 241 | if (line.equals("endfile")) { 242 | break; 243 | } 244 | IOUtils.write(line, out); 245 | } 246 | } finally { 247 | out.close(); 248 | } 249 | System.out.println(" <<>>Entering HelloHDFS.getStatus()"); 254 | FileStatus[] fileStatusList = fs.listStatus(new Path(filePath)); 255 | for (FileStatus fileStatus : fileStatusList) { 256 | System.out.println(fileStatus); 257 | } 258 | System.out.println(" <<>>Entering HelloHDFS.delete()"); 263 | Path path = new Path(filePath); 264 | if (fs.exists(path)) { 265 | fs.delete(path, true); 266 | } 267 | System.out.println(" <<>>Entering HelloHDFS.setReplicationFactor()"); 272 | Scanner reader = new Scanner(System.in); 273 | 274 | System.out.println("Enter replication factor:"); 275 | short replicationFactor = reader.nextShort(); 276 | dfs.setReplication(new Path(filePath), replicationFactor); 277 | System.out.println(" <<>>Entering HelloHDFS.search()"); 282 | 283 | FileStatus[] fsStatus = dfs.listStatus(new Path(filePattern)); 284 | for (int i = 0; i < fsStatus.length; i++) { 285 | System.out.println(fsStatus[i].getPath().toString()); 286 | } 287 | System.out.println(" <<() { 68 | 69 | public Void run() throws Exception { 70 | */ 71 | Class.forName("org.apache.hive.jdbc.HiveDriver"); 72 | Connection conn1 = DriverManager.getConnection( 73 | "jdbc:hive2://hadoop354.lt.163.org:10000/default;principal=hive/app-20.photo.163.org@HADOOP.HZ.NETEASE.COM"); 74 | 75 | Connection conn = DriverManager.getConnection( 76 | "jdbc:hive2://hadoop354.lt.163.org:10000/" + defDbName + 77 | ";principal=hive/app-20.photo.163.org@HADOOP.HZ.NETEASE.COM;hive.server2.proxy.user=" + "hive" + 78 | "#ranger.user.name=" + rangerUser); 79 | Statement statement = conn.createStatement(); 80 | String sql = "show tables"; 81 | System.out.println("Running: " + sql); 82 | ResultSet res = statement.executeQuery(sql); 83 | System.out.println("Runned"); 84 | while (res.next()) { 85 | System.out.println(String.valueOf(res.getMetaData())); 86 | } 87 | /* 88 | return null; 89 | } 90 | }); 91 | */ 92 | } 93 | } 94 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/apps/Mammut.java: -------------------------------------------------------------------------------- 1 | package com.netease.hivetools.apps; 2 | 3 | import com.netease.hivetools.mammut.PfHiveSite; 4 | import com.netease.hivetools.mappers.MammutMapper; 5 | import com.netease.hivetools.service.MyBatisUtil; 6 | import org.apache.log4j.Logger; 7 | import org.apache.log4j.PropertyConfigurator; 8 | 9 | import java.util.List; 10 | 11 | /** 12 | * Created by hzliuxun on 16/11/11. 13 | */ 14 | public class Mammut { 15 | private static final Logger logger = Logger.getLogger(Mammut.class.getName()); 16 | 17 | public static void main(String[] args) { 18 | PropertyConfigurator.configure("log4j.properties"); 19 | 20 | MyBatisUtil.sourceName = "mammut"; 21 | MammutMapper mammutMapper = new MammutMapper("mammut"); 22 | List list = mammutMapper.getPfHivesite(); 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/apps/MetastoreChangelog.java: -------------------------------------------------------------------------------- 1 | package com.netease.hivetools.apps; 2 | 3 | import org.apache.commons.cli.*; 4 | import org.apache.commons.logging.Log; 5 | import org.apache.commons.logging.LogFactory; 6 | import org.apache.curator.framework.CuratorFrameworkFactory; 7 | import org.apache.curator.framework.api.ACLProvider; 8 | import org.apache.curator.framework.api.GetChildrenBuilder; 9 | import org.apache.curator.framework.state.ConnectionState; 10 | import org.apache.curator.framework.state.ConnectionStateListener; 11 | import org.apache.curator.retry.RetryNTimes; 12 | import org.apache.hadoop.security.UserGroupInformation; 13 | import org.apache.log4j.Logger; 14 | import org.apache.log4j.PropertyConfigurator; 15 | import org.apache.curator.framework.CuratorFramework; 16 | import org.apache.ranger.binding.metastore.thrift.TUpdateMetadataRequest; 17 | import org.apache.thrift.protocol.TProtocol; 18 | import org.apache.thrift.transport.TMemoryBuffer; 19 | import org.apache.zookeeper.ZooDefs; 20 | import org.apache.zookeeper.data.ACL; 21 | import org.apache.zookeeper.data.Stat; 22 | 23 | import java.io.IOException; 24 | import java.util.ArrayList; 25 | import java.util.Calendar; 26 | import java.util.Date; 27 | import java.util.List; 28 | 29 | /** 30 | * Created by hzliuxun on 17/4/1. 31 | */ 32 | 33 | class zkListener implements ConnectionStateListener { 34 | public static final Log LOGGER = LogFactory.getLog(MetastoreChangelog.class); 35 | 36 | @Override 37 | public void stateChanged(CuratorFramework client, ConnectionState state) { 38 | switch (state) { 39 | case LOST: 40 | LOGGER.error("DistributedLock lost session with zookeeper"); 41 | break; 42 | case CONNECTED: 43 | LOGGER.warn("DistributedLock connected with zookeeper"); 44 | break; 45 | case RECONNECTED: 46 | LOGGER.warn("DistributedLock reconnected with zookeeper"); 47 | break; 48 | } 49 | } 50 | } 51 | 52 | public class MetastoreChangelog { 53 | private static final Logger LOGGER = Logger.getLogger(MetastoreChangelog.class.getName()); 54 | 55 | protected static CuratorFramework zkClient; 56 | private static zkListener listener = null; 57 | private static String zkHost = ""; 58 | private static String zkPath = ""; 59 | private static String filte_database = ""; 60 | private static String filte_table = ""; 61 | private final static String MAX_ID_FILE_NAME = "/maxid"; 62 | private final static String LOCK_RELATIVE_PATH = "/lock"; 63 | 64 | public static void main(String[] args) { 65 | PropertyConfigurator.configure("log4j.properties"); 66 | 67 | cliCommond(args); 68 | 69 | try { 70 | setUpZooKeeperAuth(); 71 | getSingletonClient(); 72 | 73 | // deleteZNodeData(); 74 | listZNodeData(); 75 | } catch (IOException e) { 76 | LOGGER.error(e.getMessage()); 77 | } catch (Exception e) { 78 | e.printStackTrace(); 79 | } 80 | } 81 | 82 | private static void setUpZooKeeperAuth() throws IOException { 83 | /* 84 | if (UserGroupInformation.isSecurityEnabled()) { 85 | String principal = hiveConf.getVar(HiveConf.ConfVars.METASTORE_KERBEROS_PRINCIPAL); 86 | if (StringUtils.isEmpty(principal)) { 87 | throw new IOException("Hive Metastore Kerberos principal is empty"); 88 | } 89 | String keyTabFile = hiveConf.getVar(HiveConf.ConfVars.METASTORE_KERBEROS_KEYTAB_FILE); 90 | if (StringUtils.isEmpty(keyTabFile)) { 91 | throw new IOException("Hive Metastore Kerberos keytab is empty"); 92 | } 93 | // Install the JAAS Configuration for the runtime 94 | Utils.setZookeeperClientKerberosJaasConfig(principal, keyTabFile); 95 | } 96 | */ 97 | } 98 | 99 | private final static ACLProvider zooKeeperAclProvider = new ACLProvider() { 100 | List nodeAcls = new ArrayList(); 101 | 102 | @Override 103 | public List getDefaultAcl() { 104 | if (UserGroupInformation.isSecurityEnabled()) { 105 | // Read all to the world 106 | nodeAcls.addAll(ZooDefs.Ids.READ_ACL_UNSAFE); 107 | // Create/Delete/Write/Admin to the authenticated user 108 | nodeAcls.add(new ACL(ZooDefs.Perms.ALL, ZooDefs.Ids.AUTH_IDS)); 109 | } else { 110 | // ACLs for znodes on a non-kerberized cluster 111 | // Create/Read/Delete/Write/Admin to the world 112 | nodeAcls.addAll(ZooDefs.Ids.OPEN_ACL_UNSAFE); 113 | } 114 | return nodeAcls; 115 | } 116 | 117 | @Override 118 | public List getAclForPath(String path) { 119 | return getDefaultAcl(); 120 | } 121 | }; 122 | 123 | private static void getSingletonClient() throws Exception { 124 | if (zkClient == null) { 125 | synchronized (MetastoreChangelog.class) { 126 | if (zkClient == null) { 127 | zkClient = 128 | CuratorFrameworkFactory 129 | .builder() 130 | .connectString(zkHost) 131 | // .aclProvider(zooKeeperAclProvider) 132 | .retryPolicy( 133 | new RetryNTimes(3, 3000)) 134 | .build(); 135 | listener = new zkListener(); 136 | zkClient.getConnectionStateListenable().addListener(listener); 137 | zkClient.start(); 138 | } 139 | } 140 | } 141 | } 142 | 143 | private static void listZNodeData() { 144 | if(LOGGER.isDebugEnabled()) { 145 | LOGGER.debug("==> writeZNodeData()"); 146 | } 147 | 148 | try { 149 | GetChildrenBuilder childrenBuilder = zkClient.getChildren(); 150 | List children = childrenBuilder.forPath(zkPath); 151 | 152 | int index = 0; 153 | for (String child : children) { 154 | child = "/" + child; 155 | if (child.equalsIgnoreCase(LOCK_RELATIVE_PATH) 156 | || child.equalsIgnoreCase(MAX_ID_FILE_NAME)) { 157 | // do not delete maxid and lock file 158 | continue; 159 | } 160 | String childPath = zkPath + child; 161 | Stat stat = zkClient.checkExists().forPath(childPath); 162 | if (null != stat ) { 163 | byte[] bytes = zkClient.getData().forPath(childPath); 164 | TUpdateMetadataRequest tUpdateMetadataRequest = new TUpdateMetadataRequest(); 165 | TMemoryBuffer tmb = new TMemoryBuffer(8); 166 | tmb.write(bytes); 167 | TProtocol tp = new org.apache.thrift.protocol.TJSONProtocol(tmb); 168 | tUpdateMetadataRequest.read(tp); 169 | 170 | if (tUpdateMetadataRequest.getDeltas() == null) { 171 | continue; 172 | } else { 173 | for (int i = 0; i < tUpdateMetadataRequest.getDeltas().size(); i++) { 174 | String dbName = tUpdateMetadataRequest.getDeltas().get(i).getDatabase(); 175 | String tabName = tUpdateMetadataRequest.getDeltas().get(i).getTable(); 176 | 177 | if (!filte_database.isEmpty() && !dbName.contains(filte_database)) { 178 | continue; 179 | } 180 | if (!filte_table.isEmpty() && !tabName.contains(filte_table)) { 181 | continue; 182 | } 183 | LOGGER.debug(" --- " + childPath + " --- "); 184 | LOGGER.debug(tUpdateMetadataRequest.toString()); 185 | // LOGGER.debug(tUpdateMetadataRequest.getDeltas().get(i).toString()); 186 | break; 187 | } 188 | } 189 | } 190 | } 191 | } catch (Exception e) { 192 | e.printStackTrace(); 193 | } finally { 194 | } 195 | 196 | if(LOGGER.isDebugEnabled()) { 197 | LOGGER.debug("<== writeZNodeData()"); 198 | } 199 | } 200 | 201 | private static void deleteZNodeData() { 202 | if(LOGGER.isDebugEnabled()) { 203 | LOGGER.debug("==> writeZNodeData()"); 204 | } 205 | 206 | Date now = new Date(); 207 | Calendar calendar = Calendar.getInstance(); 208 | calendar.setTime(now); 209 | calendar.set(Calendar.DATE, calendar.get(Calendar.DATE) - 3); 210 | Long day3Time = calendar.getTime().getTime(); 211 | 212 | int deleted = Integer.parseInt(filte_table); 213 | 214 | try { 215 | GetChildrenBuilder childrenBuilder = zkClient.getChildren(); 216 | List children = childrenBuilder.forPath(zkPath); 217 | 218 | for (String child : children) { 219 | if (deleted -- <= 0) { 220 | return; 221 | } 222 | child = "/" + child; 223 | if (child.equalsIgnoreCase(LOCK_RELATIVE_PATH) 224 | || child.equalsIgnoreCase(MAX_ID_FILE_NAME)) { 225 | // do not delete maxid and lock file 226 | continue; 227 | } 228 | String childPath = zkPath + child; 229 | Stat stat = zkClient.checkExists().forPath(childPath); 230 | if (null != stat ) { 231 | if (stat.getMtime() < day3Time) { 232 | LOGGER.debug("delete " + childPath); 233 | zkClient.delete().forPath(childPath); 234 | } 235 | } 236 | } 237 | } catch (Exception e) { 238 | e.printStackTrace(); 239 | } finally { 240 | } 241 | 242 | if(LOGGER.isDebugEnabled()) { 243 | LOGGER.debug("<== writeZNodeData()"); 244 | } 245 | } 246 | 247 | static private void cliCommond(String[] args) { 248 | Options opt = new Options(); 249 | opt.addOption("h", "help", false, "打印命令行帮助"); 250 | opt.addOption(OptionBuilder.withLongOpt("z") 251 | .withDescription("zookeeper服务器地址") 252 | .withValueSeparator('=') 253 | .hasArg() 254 | .create()); 255 | opt.addOption(OptionBuilder.withLongOpt("c") 256 | .withDescription("zookeeper 中 MetastoreChangelog 路径") 257 | .withValueSeparator('=') 258 | .hasArg() 259 | .create()); 260 | opt.addOption(OptionBuilder.withLongOpt("d") 261 | .withDescription("database 名称") 262 | .withValueSeparator('=') 263 | .hasOptionalArg() 264 | .create()); 265 | opt.addOption(OptionBuilder.withLongOpt("t") 266 | .withDescription("table 名称") 267 | .withValueSeparator('=') 268 | .hasOptionalArg() 269 | .create()); 270 | 271 | String formatstr = "MetastoreChangelog --z= --c= --d= --t= [-h/--help]"; 272 | 273 | HelpFormatter formatter = new HelpFormatter(); 274 | CommandLineParser parser = new PosixParser(); 275 | CommandLine cl = null; 276 | try { 277 | // 处理Options和参数 278 | cl = parser.parse(opt, args); 279 | } catch (ParseException e) { 280 | formatter.printHelp(formatstr, opt); // 如果发生异常,则打印出帮助信息 281 | } 282 | // 如果包含有-h或--help,则打印出帮助信息 283 | if (cl.hasOption("h")) { 284 | HelpFormatter hf = new HelpFormatter(); 285 | hf.printHelp(formatstr, "", opt, ""); 286 | System.exit(1); 287 | } 288 | if( cl.hasOption("z") ) { 289 | zkHost = cl.getOptionValue("z"); 290 | } else { 291 | System.out.println("missing --z arg"); 292 | HelpFormatter hf = new HelpFormatter(); 293 | hf.printHelp(formatstr, "", opt, ""); 294 | System.exit(1); 295 | } 296 | if( cl.hasOption("c") ) { 297 | zkPath = cl.getOptionValue("c"); 298 | } else { 299 | System.out.println("missing --c arg"); 300 | HelpFormatter hf = new HelpFormatter(); 301 | hf.printHelp(formatstr, "", opt, ""); 302 | System.exit(1); 303 | } 304 | if( cl.hasOption("d") ) { 305 | filte_database = cl.getOptionValue("d"); 306 | } 307 | 308 | if( cl.hasOption("t") ) { 309 | filte_table = cl.getOptionValue("t"); 310 | } 311 | } 312 | } 313 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/apps/SchemaToMetaBean.java: -------------------------------------------------------------------------------- 1 | package com.netease.hivetools.apps; 2 | 3 | import com.alibaba.druid.sql.SQLUtils; 4 | import com.alibaba.druid.sql.ast.SQLDataType; 5 | import com.alibaba.druid.sql.ast.SQLStatement; 6 | import com.alibaba.druid.sql.ast.statement.SQLColumnDefinition; 7 | import com.alibaba.druid.sql.ast.statement.SQLTableElement; 8 | import com.alibaba.druid.sql.dialect.mysql.ast.statement.MySqlCreateTableStatement; 9 | import com.alibaba.druid.sql.dialect.mysql.visitor.MySqlSchemaStatVisitor; 10 | import com.alibaba.druid.util.JdbcConstants; 11 | import com.sun.codemodel.*; 12 | import org.apache.log4j.Logger; 13 | 14 | import java.io.*; 15 | import java.sql.Blob; 16 | import java.util.ArrayList; 17 | import java.util.List; 18 | import java.util.regex.Matcher; 19 | import java.util.regex.Pattern; 20 | 21 | /** 22 | * Created by hzliuxun on 16/10/20. 23 | */ 24 | public class SchemaToMetaBean { 25 | private static final Logger logger = Logger.getLogger(SchemaToMetaBean.class.getName()); 26 | 27 | public static void main(String[] args) { 28 | 29 | String fileContext = readSchemaFile(System.getProperty("user.dir") + "/src/main/resources/Hive-DDL-MySQL-CREATE.sql"); 30 | 31 | List createStatement = new ArrayList<>(); 32 | 33 | String regex = "CREATE TABLE(\\d+)/(\\d+)ENGINE=InnoDB DEFAULT CHARSET=latin1;"; 34 | 35 | regex = "CREATE TABLE.*?ENGINE=InnoDB DEFAULT CHARSET=latin1;"; 36 | 37 | Pattern pattern = Pattern.compile(regex); 38 | Matcher matcher = pattern.matcher(fileContext); 39 | while (matcher.find()) { 40 | String statement = matcher.group(); 41 | createStatement.add(statement); 42 | schemaToJavaBean(statement); 43 | } 44 | } 45 | 46 | static String readSchemaFile(String fileName) { 47 | File file = new File(fileName); 48 | BufferedReader reader = null; 49 | StringBuffer sbFileContext = new StringBuffer(""); 50 | try { 51 | reader = new BufferedReader(new FileReader(file)); 52 | String tempString = null; 53 | while ((tempString = reader.readLine()) != null) { 54 | sbFileContext.append(tempString); 55 | } 56 | reader.close(); 57 | } catch (IOException e) { 58 | e.printStackTrace(); 59 | } finally { 60 | if (reader != null) { 61 | try { 62 | reader.close(); 63 | } catch (IOException e1) { 64 | } 65 | } 66 | } 67 | 68 | return sbFileContext.toString(); 69 | } 70 | 71 | static public void schemaToJavaBean(String statement) { 72 | try { 73 | String result = SQLUtils.format(statement, JdbcConstants.MYSQL); 74 | logger.info(result); 75 | List stmtList = SQLUtils.parseStatements(statement, JdbcConstants.MYSQL); 76 | for (int i = 0; i < stmtList.size(); i++) { 77 | SQLStatement sqlStatement = stmtList.get(i); 78 | 79 | MySqlSchemaStatVisitor visitor = new MySqlSchemaStatVisitor(); 80 | sqlStatement.accept(visitor); 81 | 82 | MySqlCreateTableStatement mySqlCreateTableStatement = (MySqlCreateTableStatement)sqlStatement; 83 | 84 | String tableName = mySqlCreateTableStatement.getTableSource().toString().toUpperCase().replaceAll("`", ""); 85 | tableName = formatTableColumnName(tableName, true); 86 | 87 | JCodeModel jCodeModel = new JCodeModel(); 88 | File destDir = new File(System.getProperty("user.dir") + "/src/main/java/"); 89 | JPackage jPackage = jCodeModel._package("com.netease.hivetools.meta"); 90 | JDefinedClass jDefinedClass = jPackage._class(JMod.PUBLIC, tableName, ClassType.CLASS); 91 | 92 | // init 方法 93 | JMethod initMethod = jDefinedClass.method(JMod.PUBLIC, jCodeModel.VOID, tableName); 94 | 95 | ArrayList tableElementList = (ArrayList)mySqlCreateTableStatement.getTableElementList(); 96 | for (SQLTableElement element : tableElementList) { 97 | if (!(element instanceof SQLColumnDefinition)) { 98 | continue; 99 | } 100 | 101 | String eleName = ((SQLColumnDefinition)element).getName().toString().replaceAll("`", "").toLowerCase(); 102 | String colName = formatTableColumnName(eleName, false); 103 | String ColName = formatTableColumnName(eleName, true); 104 | SQLDataType colDataType = ((SQLColumnDefinition)element).getDataType(); 105 | 106 | Class dataTypeClass = null; 107 | if (true == colDataType.getName().equals("string") 108 | || true == colDataType.getName().equals("varchar") 109 | || true == colDataType.getName().equals("mediumtext") 110 | || true == colDataType.getName().equals("text") 111 | || true == colDataType.getName().equals("char")) { 112 | // jPrimitiveType = jCodeModel.BYTE;;//new JPrimitiveType(jCodeModel, "string", String.class); 113 | dataTypeClass = String.class; 114 | } else if(true == colDataType.getName().equals("blob")) { 115 | dataTypeClass = Blob.class; 116 | } else if(true == colDataType.getName().equals("int")) { 117 | dataTypeClass = Long.class; 118 | } else if(true == colDataType.getName().equals("tinyint") 119 | || true == colDataType.getName().equals("mediumint") 120 | || true == colDataType.getName().equals("smallint")) { 121 | dataTypeClass = Integer.class; 122 | } else if(true == colDataType.getName().equals("bit")) { 123 | dataTypeClass = Boolean.class; 124 | } else if (true == colDataType.getName().equals("bigint")) { 125 | dataTypeClass = Long.class; 126 | } else if(true == colDataType.getName().equals("float")) { 127 | dataTypeClass = Float.class; 128 | } else if(true == colDataType.getName().equals("double")) { 129 | dataTypeClass = Double.class; 130 | } else { 131 | System.out.println("unknown data type : " + colDataType.toString()); 132 | continue; 133 | } 134 | 135 | // 字段定义 136 | JFieldVar jFieldVar = jDefinedClass.field(JMod.PRIVATE, dataTypeClass, eleName); 137 | 138 | // set方法 139 | JMethod setMethod = jDefinedClass.method(JMod.PUBLIC, jCodeModel.VOID, "set" + ColName); 140 | setMethod.param(dataTypeClass, colName+"_"); 141 | JBlock setBlock = setMethod.body(); 142 | JFieldRef setFieldRef = JExpr.ref(colName+"_"); 143 | setBlock.assign(jFieldVar, setFieldRef); 144 | 145 | // get方法 146 | JMethod getMethod = jDefinedClass.method(JMod.PUBLIC, dataTypeClass, "get" + ColName); 147 | JBlock getBlock = getMethod.body(); 148 | JFieldRef getFieldRef = JExpr.ref(eleName); 149 | getBlock._return(getFieldRef); 150 | } 151 | jCodeModel.build(destDir); 152 | } 153 | } catch (Exception ex) { 154 | ex.printStackTrace(); 155 | } 156 | } 157 | 158 | static public String formatTableColumnName(String name, boolean firstUpper) { 159 | // 格式化字段名 160 | String[] names = name.split("_"); 161 | String newName = ""; 162 | int numNames = names.length; 163 | 164 | if (numNames < 2) { 165 | if (false == firstUpper) { 166 | newName = name.toLowerCase(); 167 | } else { 168 | String firstChar = name.substring(0, 1); 169 | 170 | String tmp = name.substring(1, name.length()).toLowerCase(); 171 | newName = firstChar.toUpperCase() + tmp; 172 | } 173 | } else { 174 | for (int n = 0; n < names.length; n++) { 175 | String tmp = names[n]; 176 | tmp = tmp.toLowerCase(); 177 | 178 | if (false == firstUpper && n == 0) { 179 | newName = newName + tmp; 180 | continue; 181 | } 182 | int len = tmp.length(); 183 | String firstChar = tmp.substring(0, 1); 184 | newName = newName + firstChar.toUpperCase() + tmp.substring(1, len); 185 | } 186 | } 187 | 188 | return newName; 189 | } 190 | } 191 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/mammut/PfHiveSite.java: -------------------------------------------------------------------------------- 1 | package com.netease.hivetools.mammut; 2 | 3 | /** 4 | * Created by hzliuxun on 16/11/11. 5 | */ 6 | public class PfHiveSite { 7 | public String id = ""; 8 | public String name = ""; 9 | public String email = ""; 10 | public String cluster = ""; 11 | public String xml = ""; 12 | public String connectionURL = ""; 13 | public String connectionUserName = ""; 14 | public String connectionPassword = ""; 15 | public String product = ""; 16 | 17 | public String toString() { 18 | return "id=" + this.id + ", cluster=" + this.cluster + ", product=" + this.product + ", name=" + this.name 19 | + ", email=" + this.email + ", connectionURL=" + this.connectionURL 20 | + ", connectionUserName=" + this.connectionUserName + ", connectionPassword=" + this.connectionPassword; 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/mappers/MammutMapper.java: -------------------------------------------------------------------------------- 1 | package com.netease.hivetools.mappers; 2 | 3 | import com.netease.hivetools.mammut.PfHiveSite; 4 | import com.netease.hivetools.service.MyBatisUtil; 5 | import org.apache.hadoop.conf.Configuration; 6 | import org.apache.ibatis.session.SqlSession; 7 | import org.apache.log4j.Logger; 8 | 9 | import java.io.ByteArrayInputStream; 10 | import java.io.InputStream; 11 | import java.util.List; 12 | 13 | 14 | public class MammutMapper { 15 | private static final Logger logger = Logger.getLogger(MammutMapper.class.getName()); 16 | private String sourceName; 17 | 18 | public MammutMapper(String sourceName) { 19 | this.sourceName = sourceName; 20 | } 21 | 22 | public List getPfHivesite() { 23 | logger.info("getPfHivesite >>>>>> "); 24 | List list = null; 25 | SqlSession sqlSession = MyBatisUtil.getSqlSessionFactory(this.sourceName).openSession(); 26 | try { 27 | String statement = "com.netease.hivetools.mappers.MammutMapper.getPfHivesite"; 28 | list = sqlSession.selectList(statement); 29 | } catch (Exception e) { 30 | e.printStackTrace(); 31 | logger.error(e.getMessage()); 32 | } finally { 33 | sqlSession.close(); 34 | } 35 | 36 | 37 | for (PfHiveSite pfHiveSite : list) { 38 | Configuration conf = new Configuration(true); 39 | InputStream inputStream = new ByteArrayInputStream(pfHiveSite.xml.getBytes()); 40 | conf.addResource(inputStream); 41 | 42 | pfHiveSite.connectionURL = conf.get("javax.jdo.option.ConnectionURL"); 43 | pfHiveSite.connectionUserName = conf.get("javax.jdo.option.ConnectionUserName"); 44 | pfHiveSite.connectionPassword = conf.get("javax.jdo.option.ConnectionPassword"); 45 | 46 | logger.info(pfHiveSite.toString()); 47 | } 48 | logger.info("getPfHivesite <<<<<<<< "); 49 | return list; 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/mappers/MetaDataMapper.java: -------------------------------------------------------------------------------- 1 | package com.netease.hivetools.mappers; 2 | 3 | import com.netease.hivetools.apps.SchemaToMetaBean; 4 | import com.netease.hivetools.meta.Dbs; 5 | import com.netease.hivetools.meta.SerdeParams; 6 | import com.netease.hivetools.meta.Tbls; 7 | import com.netease.hivetools.service.MyBatisUtil; 8 | import org.apache.ibatis.session.SqlSession; 9 | import org.apache.log4j.Logger; 10 | 11 | 12 | import java.util.*; 13 | 14 | 15 | public class MetaDataMapper { 16 | private static final Logger logger = Logger.getLogger(MetaDataMapper.class.getName()); 17 | private String sourceName; 18 | 19 | public MetaDataMapper(String sourceName) { 20 | this.sourceName = sourceName; 21 | } 22 | 23 | // table => Table 24 | private String formatTableName(String tabName) { 25 | String firstChar = tabName.substring(0, 1); 26 | String tmp = tabName.substring(1, tabName.length()).toLowerCase(); 27 | tabName = firstChar.toUpperCase() + tmp; 28 | 29 | return tabName; 30 | } 31 | 32 | public List getDbsRecords(String tabName) { 33 | List list = null; 34 | SqlSession sqlSession = MyBatisUtil.getSqlSessionFactory(this.sourceName).openSession(); 35 | try { 36 | String statement = "com.netease.hivetools.mappers.MetaDataMapper.getTabsRecords"; 37 | list = sqlSession.selectList(statement); 38 | } catch (Exception e) { 39 | e.printStackTrace(); 40 | logger.error(e.getMessage()); 41 | } finally { 42 | sqlSession.close(); 43 | } 44 | return list; 45 | } 46 | 47 | public List getTableRecords(String tabName, Map params) { 48 | List list = null; 49 | SqlSession sqlSession = MyBatisUtil.getSqlSessionFactory(this.sourceName).openSession(); 50 | try { 51 | tabName = SchemaToMetaBean.formatTableColumnName(tabName, true); 52 | String statement = "com.netease.hivetools.mappers.MetaDataMapper.get" + tabName + "Records"; 53 | 54 | if (null == params) { 55 | // init 56 | params = new HashMap(); 57 | params.put("database_name", "%"); 58 | params.put("db_id", "%"); 59 | } 60 | 61 | list = sqlSession.selectList(statement, params); 62 | } catch (Exception e) { 63 | e.printStackTrace(); 64 | logger.error(e.getMessage()); 65 | } finally { 66 | sqlSession.close(); 67 | } 68 | return list; 69 | } 70 | 71 | public List getPagingTableRecords(String tabName, HashMap mapPagindId) { 72 | List list = null; 73 | SqlSession sqlSession = MyBatisUtil.getSqlSessionFactory(this.sourceName).openSession(); 74 | try { 75 | tabName = SchemaToMetaBean.formatTableColumnName(tabName, true); 76 | String statement = "com.netease.hivetools.mappers.MetaDataMapper.getPaging" + tabName + "Records"; 77 | Map params = new HashMap(); 78 | params.put("mapPagindId", mapPagindId); 79 | list = sqlSession.selectList(statement, params); 80 | } catch (Exception e) { 81 | e.printStackTrace(); 82 | logger.error(e.getMessage()); 83 | } finally { 84 | sqlSession.close(); 85 | } 86 | return list; 87 | } 88 | 89 | public int getTableMaxId(String tabName) { 90 | int maxId = 0; 91 | SqlSession sqlSession = MyBatisUtil.getSqlSessionFactory(this.sourceName).openSession(); 92 | try { 93 | tabName = SchemaToMetaBean.formatTableColumnName(tabName, true); 94 | String statement = "com.netease.hivetools.mappers.MetaDataMapper.get" + tabName+ "MaxId"; 95 | maxId = sqlSession.selectOne(statement); 96 | logger.info("getTableMaxId" + tabName + " maxId = " + maxId); 97 | } catch (Exception e) { 98 | e.printStackTrace(); 99 | logger.error(e.getMessage()); 100 | } finally { 101 | sqlSession.close(); 102 | } 103 | return maxId; 104 | } 105 | 106 | public boolean deleteTable(Tbls tbls) { 107 | SqlSession sqlSession = MyBatisUtil.getSqlSessionFactory(this.sourceName).openSession(); 108 | try { 109 | String statement = "com.netease.hivetools.mappers.MetaDataMapper.deleteTbls"; 110 | int delCount = sqlSession.delete(statement, tbls); 111 | sqlSession.commit(); 112 | logger.info("--- deleteTable[" + tbls.getTblName() + "]--- delete count = " + delCount); 113 | } catch (Exception e) { 114 | e.printStackTrace(); 115 | logger.error(e.getMessage()); 116 | return false; 117 | } finally { 118 | sqlSession.close(); 119 | } 120 | return true; 121 | } 122 | 123 | public boolean deleteDatabase(Dbs dbs) { 124 | SqlSession sqlSession = MyBatisUtil.getSqlSessionFactory(this.sourceName).openSession(); 125 | try { 126 | String statement = "com.netease.hivetools.mappers.MetaDataMapper.deleteDbs"; 127 | int delCount = sqlSession.delete(statement, dbs); 128 | sqlSession.commit(); 129 | logger.info("=== deleteDatabase[" + dbs.getName() + "]=== delete count = " + delCount); 130 | } catch (Exception e) { 131 | e.printStackTrace(); 132 | logger.error(e.getMessage()); 133 | return false; 134 | } finally { 135 | sqlSession.close(); 136 | } 137 | return true; 138 | } 139 | 140 | public int getTableMinId(String tabName) { 141 | int minId = 0; 142 | SqlSession sqlSession = MyBatisUtil.getSqlSessionFactory(this.sourceName).openSession(); 143 | try { 144 | tabName = SchemaToMetaBean.formatTableColumnName(tabName, true); 145 | String statement = "com.netease.hivetools.mappers.MetaDataMapper.get" +tabName+ "MinId"; 146 | minId = sqlSession.selectOne(statement); 147 | logger.info("getTableMinId" + tabName + " minId = " + minId); 148 | } catch (Exception e) { 149 | e.printStackTrace(); 150 | logger.error(e.getMessage()); 151 | } finally { 152 | sqlSession.close(); 153 | } 154 | return minId; 155 | } 156 | 157 | public int batchInsert(String tabName, List list, HashMap mapPlusId) { 158 | int numInsert = 0; 159 | SqlSession sqlSession = MyBatisUtil.getSqlSessionFactory(this.sourceName).openSession(); 160 | List tmpSubList = null; 161 | try { 162 | String tableName = SchemaToMetaBean.formatTableColumnName(tabName, true); 163 | logger.info("批量插入表 " + tabName + " plusId = " + mapPlusId.get(tabName)); 164 | 165 | if (tabName.equals("COMPACTION_QUEUE")) { 166 | int nnn = 0; 167 | } 168 | 169 | List> splitList = MetaDataMapper.getSplitList(list, 100); 170 | int index = 0; 171 | for (List subList : splitList) { 172 | Map params = new HashMap(); 173 | params.put("mapPlusId", mapPlusId); 174 | params.put("list", subList); 175 | 176 | tmpSubList = subList; 177 | 178 | String statement = "com.netease.hivetools.mappers.MetaDataMapper.batchInsert" + tableName; 179 | numInsert += sqlSession.insert(statement, params); 180 | int progress = ((index++ + 1)*100 / splitList.size()); 181 | logger.info("批量插入表 " + tabName + " 处理进度 [" + progress +"%]"); 182 | sqlSession.commit(); 183 | } 184 | logger.info("批量插入表 " + tabName + " 记录总数 [" + list.size()+"]"); 185 | } catch (Exception e) { 186 | e.printStackTrace(); 187 | logger.error(e.getMessage()); 188 | 189 | for (Object object : tmpSubList) { 190 | if (object.getClass() == SerdeParams.class) { 191 | SerdeParams serdeParams = (SerdeParams)object; 192 | logger.error("SerdeId=" + serdeParams.getSerdeId() + ", ParamKey=" + serdeParams.getParamKey() + ", ParamValue=" + serdeParams.getParamValue()); 193 | } 194 | } 195 | 196 | sqlSession.rollback(); 197 | numInsert = -1; 198 | } finally { 199 | sqlSession.close(); 200 | } 201 | 202 | return numInsert; 203 | } 204 | 205 | public boolean updateSequenceTable() { 206 | int count = 0; 207 | SqlSession sqlSession = MyBatisUtil.getSqlSessionFactory(this.sourceName).openSession(); 208 | try { 209 | String statement = "com.netease.hivetools.mappers.MetaDataMapper.updateSequenceTable"; 210 | count = sqlSession.update(statement); 211 | sqlSession.commit(); 212 | logger.info("update SequenceTable count = " + count); 213 | } catch (Exception e) { 214 | e.printStackTrace(); 215 | logger.error(e.getMessage()); 216 | } finally { 217 | sqlSession.close(); 218 | } 219 | return (count>0)?true:false; 220 | } 221 | 222 | public int rollback(String tabName, HashMap mapPlusId) { 223 | int numRollback = 0; 224 | SqlSession sqlSession = MyBatisUtil.getSqlSessionFactory(this.sourceName).openSession(); 225 | try { 226 | String tableName = SchemaToMetaBean.formatTableColumnName(tabName, true); 227 | logger.info("回滚元数据表 " + tabName + " plusId = " + mapPlusId.get(tabName)); 228 | 229 | String statement = "com.netease.hivetools.mappers.MetaDataMapper.rollback" + tableName; 230 | Map params = new HashMap(); 231 | params.put("mapPlusId", mapPlusId); 232 | numRollback = sqlSession.delete(statement, params); 233 | logger.info("回滚元数据表 " + tabName + " 记录总数 [" + numRollback+"]"); 234 | sqlSession.commit(); 235 | } catch (Exception e) { 236 | e.printStackTrace(); 237 | logger.error(e.getMessage()); 238 | sqlSession.rollback(); 239 | numRollback = -1; 240 | } finally { 241 | sqlSession.close(); 242 | } 243 | 244 | return numRollback; 245 | } 246 | 247 | public List checkUniqueKey(String tabName, List list) { 248 | List listUniqueKey = new ArrayList(); 249 | SqlSession sqlSession = MyBatisUtil.getSqlSessionFactory(this.sourceName).openSession(); 250 | try { 251 | String tableName = SchemaToMetaBean.formatTableColumnName(tabName, true); 252 | logger.info("检查唯一键冲突 " + tabName); 253 | 254 | List> splitList = MetaDataMapper.getSplitList(list, 100); 255 | int index = 0; 256 | for (List subList : splitList) { 257 | Map params = new HashMap(); 258 | params.put("list", subList); 259 | 260 | String statement = "com.netease.hivetools.mappers.MetaDataMapper.checkUniqueKey" + tableName; 261 | List listTmp = sqlSession.selectList(statement, params); 262 | if (listTmp != null) { 263 | listUniqueKey.addAll(listTmp); 264 | } 265 | int progress = ((index++ + 1)*100 / splitList.size()); 266 | logger.info("检查唯一键, 表 " + tabName + " 进度 [" + progress +"%]"); 267 | } 268 | logger.info("检查唯一键, 表 " + tabName + " 记录数 [" + list.size()+"]"); 269 | sqlSession.commit(); 270 | } catch (Exception e) { 271 | e.printStackTrace(); 272 | logger.error(e.getMessage()); 273 | sqlSession.rollback(); 274 | } finally { 275 | sqlSession.close(); 276 | } 277 | 278 | return listUniqueKey; 279 | } 280 | 281 | public static List> getSplitList(List list , int size) 282 | { 283 | List> returnList = new ArrayList>(); 284 | int listSize = list.size(); 285 | int num = listSize%size==0?listSize/size:(listSize/size+1); 286 | int start = 0; 287 | int end = 0; 288 | for(int i = 1; i <= num; i ++) { 289 | start = (i-1)*size; 290 | end = i*size>listSize?listSize:i*size; 291 | returnList.add(list.subList(start, end)); 292 | } 293 | return returnList; 294 | } 295 | } 296 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/BucketingCols.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class BucketingCols { 6 | 7 | private Long sd_id; 8 | private String bucket_col_name; 9 | private Long integer_idx; 10 | 11 | public void BucketingCols() { 12 | } 13 | 14 | public void setSdId(Long sdId_) { 15 | sd_id = sdId_; 16 | } 17 | 18 | public Long getSdId() { 19 | return sd_id; 20 | } 21 | 22 | public void setBucketColName(String bucketColName_) { 23 | bucket_col_name = bucketColName_; 24 | } 25 | 26 | public String getBucketColName() { 27 | return bucket_col_name; 28 | } 29 | 30 | public void setIntegerIdx(Long integerIdx_) { 31 | integer_idx = integerIdx_; 32 | } 33 | 34 | public Long getIntegerIdx() { 35 | return integer_idx; 36 | } 37 | 38 | } 39 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/Cds.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class Cds { 6 | 7 | private Long cd_id; 8 | 9 | public void Cds() { 10 | } 11 | 12 | public void setCdId(Long cdId_) { 13 | cd_id = cdId_; 14 | } 15 | 16 | public Long getCdId() { 17 | return cd_id; 18 | } 19 | 20 | } 21 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/ColumnsV2.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class ColumnsV2 { 6 | 7 | private Long cd_id; 8 | private String comment; 9 | private String column_name; 10 | private String type_name; 11 | private Long integer_idx; 12 | 13 | public void ColumnsV2() { 14 | } 15 | 16 | public void setCdId(Long cdId_) { 17 | cd_id = cdId_; 18 | } 19 | 20 | public Long getCdId() { 21 | return cd_id; 22 | } 23 | 24 | public void setComment(String comment_) { 25 | comment = comment_; 26 | } 27 | 28 | public String getComment() { 29 | return comment; 30 | } 31 | 32 | public void setColumnName(String columnName_) { 33 | column_name = columnName_; 34 | } 35 | 36 | public String getColumnName() { 37 | return column_name; 38 | } 39 | 40 | public void setTypeName(String typeName_) { 41 | type_name = typeName_; 42 | } 43 | 44 | public String getTypeName() { 45 | return type_name; 46 | } 47 | 48 | public void setIntegerIdx(Long integerIdx_) { 49 | integer_idx = integerIdx_; 50 | } 51 | 52 | public Long getIntegerIdx() { 53 | return integer_idx; 54 | } 55 | 56 | } 57 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/CompactionQueue.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class CompactionQueue { 6 | 7 | private Long cq_id; 8 | private String cq_database; 9 | private String cq_table; 10 | private String cq_partition; 11 | private String cq_state; 12 | private String cq_type; 13 | private String cq_worker_id; 14 | private Long cq_start; 15 | private String cq_run_as; 16 | 17 | public void CompactionQueue() { 18 | } 19 | 20 | public void setCqId(Long cqId_) { 21 | cq_id = cqId_; 22 | } 23 | 24 | public Long getCqId() { 25 | return cq_id; 26 | } 27 | 28 | public void setCqDatabase(String cqDatabase_) { 29 | cq_database = cqDatabase_; 30 | } 31 | 32 | public String getCqDatabase() { 33 | return cq_database; 34 | } 35 | 36 | public void setCqTable(String cqTable_) { 37 | cq_table = cqTable_; 38 | } 39 | 40 | public String getCqTable() { 41 | return cq_table; 42 | } 43 | 44 | public void setCqPartition(String cqPartition_) { 45 | cq_partition = cqPartition_; 46 | } 47 | 48 | public String getCqPartition() { 49 | return cq_partition; 50 | } 51 | 52 | public void setCqState(String cqState_) { 53 | cq_state = cqState_; 54 | } 55 | 56 | public String getCqState() { 57 | return cq_state; 58 | } 59 | 60 | public void setCqType(String cqType_) { 61 | cq_type = cqType_; 62 | } 63 | 64 | public String getCqType() { 65 | return cq_type; 66 | } 67 | 68 | public void setCqWorkerId(String cqWorkerId_) { 69 | cq_worker_id = cqWorkerId_; 70 | } 71 | 72 | public String getCqWorkerId() { 73 | return cq_worker_id; 74 | } 75 | 76 | public void setCqStart(Long cqStart_) { 77 | cq_start = cqStart_; 78 | } 79 | 80 | public Long getCqStart() { 81 | return cq_start; 82 | } 83 | 84 | public void setCqRunAs(String cqRunAs_) { 85 | cq_run_as = cqRunAs_; 86 | } 87 | 88 | public String getCqRunAs() { 89 | return cq_run_as; 90 | } 91 | 92 | } 93 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/CompletedTxnComponents.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class CompletedTxnComponents { 6 | 7 | private Long ctc_txnid; 8 | private String ctc_database; 9 | private String ctc_table; 10 | private String ctc_partition; 11 | 12 | public void CompletedTxnComponents() { 13 | } 14 | 15 | public void setCtcTxnid(Long ctcTxnid_) { 16 | ctc_txnid = ctcTxnid_; 17 | } 18 | 19 | public Long getCtcTxnid() { 20 | return ctc_txnid; 21 | } 22 | 23 | public void setCtcDatabase(String ctcDatabase_) { 24 | ctc_database = ctcDatabase_; 25 | } 26 | 27 | public String getCtcDatabase() { 28 | return ctc_database; 29 | } 30 | 31 | public void setCtcTable(String ctcTable_) { 32 | ctc_table = ctcTable_; 33 | } 34 | 35 | public String getCtcTable() { 36 | return ctc_table; 37 | } 38 | 39 | public void setCtcPartition(String ctcPartition_) { 40 | ctc_partition = ctcPartition_; 41 | } 42 | 43 | public String getCtcPartition() { 44 | return ctc_partition; 45 | } 46 | 47 | } 48 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/DatabaseParams.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class DatabaseParams { 6 | 7 | private Long db_id; 8 | private String param_key; 9 | private String param_value; 10 | 11 | public void DatabaseParams() { 12 | } 13 | 14 | public void setDbId(Long dbId_) { 15 | db_id = dbId_; 16 | } 17 | 18 | public Long getDbId() { 19 | return db_id; 20 | } 21 | 22 | public void setParamKey(String paramKey_) { 23 | param_key = paramKey_; 24 | } 25 | 26 | public String getParamKey() { 27 | return param_key; 28 | } 29 | 30 | public void setParamValue(String paramValue_) { 31 | param_value = paramValue_; 32 | } 33 | 34 | public String getParamValue() { 35 | return param_value; 36 | } 37 | 38 | } 39 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/DbPrivs.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class DbPrivs { 6 | 7 | private Long db_grant_id; 8 | private Long create_time; 9 | private Long db_id; 10 | private Integer grant_option; 11 | private String grantor; 12 | private String grantor_type; 13 | private String principal_name; 14 | private String principal_type; 15 | private String db_priv; 16 | 17 | public void DbPrivs() { 18 | } 19 | 20 | public void setDbGrantId(Long dbGrantId_) { 21 | db_grant_id = dbGrantId_; 22 | } 23 | 24 | public Long getDbGrantId() { 25 | return db_grant_id; 26 | } 27 | 28 | public void setCreateTime(Long createTime_) { 29 | create_time = createTime_; 30 | } 31 | 32 | public Long getCreateTime() { 33 | return create_time; 34 | } 35 | 36 | public void setDbId(Long dbId_) { 37 | db_id = dbId_; 38 | } 39 | 40 | public Long getDbId() { 41 | return db_id; 42 | } 43 | 44 | public void setGrantOption(Integer grantOption_) { 45 | grant_option = grantOption_; 46 | } 47 | 48 | public Integer getGrantOption() { 49 | return grant_option; 50 | } 51 | 52 | public void setGrantor(String grantor_) { 53 | grantor = grantor_; 54 | } 55 | 56 | public String getGrantor() { 57 | return grantor; 58 | } 59 | 60 | public void setGrantorType(String grantorType_) { 61 | grantor_type = grantorType_; 62 | } 63 | 64 | public String getGrantorType() { 65 | return grantor_type; 66 | } 67 | 68 | public void setPrincipalName(String principalName_) { 69 | principal_name = principalName_; 70 | } 71 | 72 | public String getPrincipalName() { 73 | return principal_name; 74 | } 75 | 76 | public void setPrincipalType(String principalType_) { 77 | principal_type = principalType_; 78 | } 79 | 80 | public String getPrincipalType() { 81 | return principal_type; 82 | } 83 | 84 | public void setDbPriv(String dbPriv_) { 85 | db_priv = dbPriv_; 86 | } 87 | 88 | public String getDbPriv() { 89 | return db_priv; 90 | } 91 | 92 | } 93 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/Dbs.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class Dbs { 6 | 7 | private Long db_id; 8 | private String desc; 9 | private String db_location_uri; 10 | private String name; 11 | private String owner_name; 12 | private String owner_type; 13 | 14 | public void Dbs() { 15 | } 16 | 17 | public void setDbId(Long dbId_) { 18 | db_id = dbId_; 19 | } 20 | 21 | public Long getDbId() { 22 | return db_id; 23 | } 24 | 25 | public void setDesc(String desc_) { 26 | desc = desc_; 27 | } 28 | 29 | public String getDesc() { 30 | return desc; 31 | } 32 | 33 | public void setDbLocationUri(String dbLocationUri_) { 34 | db_location_uri = dbLocationUri_; 35 | } 36 | 37 | public String getDbLocationUri() { 38 | return db_location_uri; 39 | } 40 | 41 | public void setName(String name_) { 42 | name = name_; 43 | } 44 | 45 | public String getName() { 46 | return name; 47 | } 48 | 49 | public void setOwnerName(String ownerName_) { 50 | owner_name = ownerName_; 51 | } 52 | 53 | public String getOwnerName() { 54 | return owner_name; 55 | } 56 | 57 | public void setOwnerType(String ownerType_) { 58 | owner_type = ownerType_; 59 | } 60 | 61 | public String getOwnerType() { 62 | return owner_type; 63 | } 64 | 65 | } 66 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/DelegationTokens.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class DelegationTokens { 6 | 7 | private String token_ident; 8 | private String token; 9 | 10 | public void DelegationTokens() { 11 | } 12 | 13 | public void setTokenIdent(String tokenIdent_) { 14 | token_ident = tokenIdent_; 15 | } 16 | 17 | public String getTokenIdent() { 18 | return token_ident; 19 | } 20 | 21 | public void setToken(String token_) { 22 | token = token_; 23 | } 24 | 25 | public String getToken() { 26 | return token; 27 | } 28 | 29 | } 30 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/FuncRu.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class FuncRu { 6 | 7 | private Long func_id; 8 | private Long resource_type; 9 | private String resource_uri; 10 | private Long integer_idx; 11 | 12 | public void FuncRu() { 13 | } 14 | 15 | public void setFuncId(Long funcId_) { 16 | func_id = funcId_; 17 | } 18 | 19 | public Long getFuncId() { 20 | return func_id; 21 | } 22 | 23 | public void setResourceType(Long resourceType_) { 24 | resource_type = resourceType_; 25 | } 26 | 27 | public Long getResourceType() { 28 | return resource_type; 29 | } 30 | 31 | public void setResourceUri(String resourceUri_) { 32 | resource_uri = resourceUri_; 33 | } 34 | 35 | public String getResourceUri() { 36 | return resource_uri; 37 | } 38 | 39 | public void setIntegerIdx(Long integerIdx_) { 40 | integer_idx = integerIdx_; 41 | } 42 | 43 | public Long getIntegerIdx() { 44 | return integer_idx; 45 | } 46 | 47 | } 48 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/Funcs.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class Funcs { 6 | 7 | private Long func_id; 8 | private String class_name; 9 | private Long create_time; 10 | private Long db_id; 11 | private String func_name; 12 | private Long func_type; 13 | private String owner_name; 14 | private String owner_type; 15 | 16 | public void Funcs() { 17 | } 18 | 19 | public void setFuncId(Long funcId_) { 20 | func_id = funcId_; 21 | } 22 | 23 | public Long getFuncId() { 24 | return func_id; 25 | } 26 | 27 | public void setClassName(String className_) { 28 | class_name = className_; 29 | } 30 | 31 | public String getClassName() { 32 | return class_name; 33 | } 34 | 35 | public void setCreateTime(Long createTime_) { 36 | create_time = createTime_; 37 | } 38 | 39 | public Long getCreateTime() { 40 | return create_time; 41 | } 42 | 43 | public void setDbId(Long dbId_) { 44 | db_id = dbId_; 45 | } 46 | 47 | public Long getDbId() { 48 | return db_id; 49 | } 50 | 51 | public void setFuncName(String funcName_) { 52 | func_name = funcName_; 53 | } 54 | 55 | public String getFuncName() { 56 | return func_name; 57 | } 58 | 59 | public void setFuncType(Long funcType_) { 60 | func_type = funcType_; 61 | } 62 | 63 | public Long getFuncType() { 64 | return func_type; 65 | } 66 | 67 | public void setOwnerName(String ownerName_) { 68 | owner_name = ownerName_; 69 | } 70 | 71 | public String getOwnerName() { 72 | return owner_name; 73 | } 74 | 75 | public void setOwnerType(String ownerType_) { 76 | owner_type = ownerType_; 77 | } 78 | 79 | public String getOwnerType() { 80 | return owner_type; 81 | } 82 | 83 | } 84 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/GlobalPrivs.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class GlobalPrivs { 6 | 7 | private Long user_grant_id; 8 | private Long create_time; 9 | private Integer grant_option; 10 | private String grantor; 11 | private String grantor_type; 12 | private String principal_name; 13 | private String principal_type; 14 | private String user_priv; 15 | 16 | public void GlobalPrivs() { 17 | } 18 | 19 | public void setUserGrantId(Long userGrantId_) { 20 | user_grant_id = userGrantId_; 21 | } 22 | 23 | public Long getUserGrantId() { 24 | return user_grant_id; 25 | } 26 | 27 | public void setCreateTime(Long createTime_) { 28 | create_time = createTime_; 29 | } 30 | 31 | public Long getCreateTime() { 32 | return create_time; 33 | } 34 | 35 | public void setGrantOption(Integer grantOption_) { 36 | grant_option = grantOption_; 37 | } 38 | 39 | public Integer getGrantOption() { 40 | return grant_option; 41 | } 42 | 43 | public void setGrantor(String grantor_) { 44 | grantor = grantor_; 45 | } 46 | 47 | public String getGrantor() { 48 | return grantor; 49 | } 50 | 51 | public void setGrantorType(String grantorType_) { 52 | grantor_type = grantorType_; 53 | } 54 | 55 | public String getGrantorType() { 56 | return grantor_type; 57 | } 58 | 59 | public void setPrincipalName(String principalName_) { 60 | principal_name = principalName_; 61 | } 62 | 63 | public String getPrincipalName() { 64 | return principal_name; 65 | } 66 | 67 | public void setPrincipalType(String principalType_) { 68 | principal_type = principalType_; 69 | } 70 | 71 | public String getPrincipalType() { 72 | return principal_type; 73 | } 74 | 75 | public void setUserPriv(String userPriv_) { 76 | user_priv = userPriv_; 77 | } 78 | 79 | public String getUserPriv() { 80 | return user_priv; 81 | } 82 | 83 | } 84 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/HiveLocks.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class HiveLocks { 6 | 7 | private Long hl_lock_ext_id; 8 | private Long hl_lock_int_id; 9 | private Long hl_txnid; 10 | private String hl_db; 11 | private String hl_table; 12 | private String hl_partition; 13 | private String hl_lock_state; 14 | private String hl_lock_type; 15 | private Long hl_last_heartbeat; 16 | private Long hl_acquired_at; 17 | private String hl_user; 18 | private String hl_host; 19 | 20 | public void HiveLocks() { 21 | } 22 | 23 | public void setHlLockExtId(Long hlLockExtId_) { 24 | hl_lock_ext_id = hlLockExtId_; 25 | } 26 | 27 | public Long getHlLockExtId() { 28 | return hl_lock_ext_id; 29 | } 30 | 31 | public void setHlLockIntId(Long hlLockIntId_) { 32 | hl_lock_int_id = hlLockIntId_; 33 | } 34 | 35 | public Long getHlLockIntId() { 36 | return hl_lock_int_id; 37 | } 38 | 39 | public void setHlTxnid(Long hlTxnid_) { 40 | hl_txnid = hlTxnid_; 41 | } 42 | 43 | public Long getHlTxnid() { 44 | return hl_txnid; 45 | } 46 | 47 | public void setHlDb(String hlDb_) { 48 | hl_db = hlDb_; 49 | } 50 | 51 | public String getHlDb() { 52 | return hl_db; 53 | } 54 | 55 | public void setHlTable(String hlTable_) { 56 | hl_table = hlTable_; 57 | } 58 | 59 | public String getHlTable() { 60 | return hl_table; 61 | } 62 | 63 | public void setHlPartition(String hlPartition_) { 64 | hl_partition = hlPartition_; 65 | } 66 | 67 | public String getHlPartition() { 68 | return hl_partition; 69 | } 70 | 71 | public void setHlLockState(String hlLockState_) { 72 | hl_lock_state = hlLockState_; 73 | } 74 | 75 | public String getHlLockState() { 76 | return hl_lock_state; 77 | } 78 | 79 | public void setHlLockType(String hlLockType_) { 80 | hl_lock_type = hlLockType_; 81 | } 82 | 83 | public String getHlLockType() { 84 | return hl_lock_type; 85 | } 86 | 87 | public void setHlLastHeartbeat(Long hlLastHeartbeat_) { 88 | hl_last_heartbeat = hlLastHeartbeat_; 89 | } 90 | 91 | public Long getHlLastHeartbeat() { 92 | return hl_last_heartbeat; 93 | } 94 | 95 | public void setHlAcquiredAt(Long hlAcquiredAt_) { 96 | hl_acquired_at = hlAcquiredAt_; 97 | } 98 | 99 | public Long getHlAcquiredAt() { 100 | return hl_acquired_at; 101 | } 102 | 103 | public void setHlUser(String hlUser_) { 104 | hl_user = hlUser_; 105 | } 106 | 107 | public String getHlUser() { 108 | return hl_user; 109 | } 110 | 111 | public void setHlHost(String hlHost_) { 112 | hl_host = hlHost_; 113 | } 114 | 115 | public String getHlHost() { 116 | return hl_host; 117 | } 118 | 119 | } 120 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/Idxs.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class Idxs { 6 | 7 | private Long index_id; 8 | private Long create_time; 9 | private Boolean deferred_rebuild; 10 | private String index_handler_class; 11 | private String index_name; 12 | private Long index_tbl_id; 13 | private Long last_access_time; 14 | private Long orig_tbl_id; 15 | private Long sd_id; 16 | 17 | public void Idxs() { 18 | } 19 | 20 | public void setIndexId(Long indexId_) { 21 | index_id = indexId_; 22 | } 23 | 24 | public Long getIndexId() { 25 | return index_id; 26 | } 27 | 28 | public void setCreateTime(Long createTime_) { 29 | create_time = createTime_; 30 | } 31 | 32 | public Long getCreateTime() { 33 | return create_time; 34 | } 35 | 36 | public void setDeferredRebuild(Boolean deferredRebuild_) { 37 | deferred_rebuild = deferredRebuild_; 38 | } 39 | 40 | public Boolean getDeferredRebuild() { 41 | return deferred_rebuild; 42 | } 43 | 44 | public void setIndexHandlerClass(String indexHandlerClass_) { 45 | index_handler_class = indexHandlerClass_; 46 | } 47 | 48 | public String getIndexHandlerClass() { 49 | return index_handler_class; 50 | } 51 | 52 | public void setIndexName(String indexName_) { 53 | index_name = indexName_; 54 | } 55 | 56 | public String getIndexName() { 57 | return index_name; 58 | } 59 | 60 | public void setIndexTblId(Long indexTblId_) { 61 | index_tbl_id = indexTblId_; 62 | } 63 | 64 | public Long getIndexTblId() { 65 | return index_tbl_id; 66 | } 67 | 68 | public void setLastAccessTime(Long lastAccessTime_) { 69 | last_access_time = lastAccessTime_; 70 | } 71 | 72 | public Long getLastAccessTime() { 73 | return last_access_time; 74 | } 75 | 76 | public void setOrigTblId(Long origTblId_) { 77 | orig_tbl_id = origTblId_; 78 | } 79 | 80 | public Long getOrigTblId() { 81 | return orig_tbl_id; 82 | } 83 | 84 | public void setSdId(Long sdId_) { 85 | sd_id = sdId_; 86 | } 87 | 88 | public Long getSdId() { 89 | return sd_id; 90 | } 91 | 92 | } 93 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/IndexParams.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class IndexParams { 6 | 7 | private Long index_id; 8 | private String param_key; 9 | private String param_value; 10 | 11 | public void IndexParams() { 12 | } 13 | 14 | public void setIndexId(Long indexId_) { 15 | index_id = indexId_; 16 | } 17 | 18 | public Long getIndexId() { 19 | return index_id; 20 | } 21 | 22 | public void setParamKey(String paramKey_) { 23 | param_key = paramKey_; 24 | } 25 | 26 | public String getParamKey() { 27 | return param_key; 28 | } 29 | 30 | public void setParamValue(String paramValue_) { 31 | param_value = paramValue_; 32 | } 33 | 34 | public String getParamValue() { 35 | return param_value; 36 | } 37 | 38 | } 39 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/MasterKeys.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class MasterKeys { 6 | 7 | private Long key_id; 8 | private String master_key; 9 | 10 | public void MasterKeys() { 11 | } 12 | 13 | public void setKeyId(Long keyId_) { 14 | key_id = keyId_; 15 | } 16 | 17 | public Long getKeyId() { 18 | return key_id; 19 | } 20 | 21 | public void setMasterKey(String masterKey_) { 22 | master_key = masterKey_; 23 | } 24 | 25 | public String getMasterKey() { 26 | return master_key; 27 | } 28 | 29 | } 30 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/NextCompactionQueueId.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class NextCompactionQueueId { 6 | 7 | private Long ncq_next; 8 | 9 | public void NextCompactionQueueId() { 10 | } 11 | 12 | public void setNcqNext(Long ncqNext_) { 13 | ncq_next = ncqNext_; 14 | } 15 | 16 | public Long getNcqNext() { 17 | return ncq_next; 18 | } 19 | 20 | } 21 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/NextLockId.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class NextLockId { 6 | 7 | private Long nl_next; 8 | 9 | public void NextLockId() { 10 | } 11 | 12 | public void setNlNext(Long nlNext_) { 13 | nl_next = nlNext_; 14 | } 15 | 16 | public Long getNlNext() { 17 | return nl_next; 18 | } 19 | 20 | } 21 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/NextTxnId.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class NextTxnId { 6 | 7 | private Long ntxn_next; 8 | 9 | public void NextTxnId() { 10 | } 11 | 12 | public void setNtxnNext(Long ntxnNext_) { 13 | ntxn_next = ntxnNext_; 14 | } 15 | 16 | public Long getNtxnNext() { 17 | return ntxn_next; 18 | } 19 | 20 | } 21 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/NotificationLog.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class NotificationLog { 6 | 7 | private Long nl_id; 8 | private Long event_id; 9 | private Long event_time; 10 | private String event_type; 11 | private String db_name; 12 | private String tbl_name; 13 | private String message; 14 | 15 | public void NotificationLog() { 16 | } 17 | 18 | public void setNlId(Long nlId_) { 19 | nl_id = nlId_; 20 | } 21 | 22 | public Long getNlId() { 23 | return nl_id; 24 | } 25 | 26 | public void setEventId(Long eventId_) { 27 | event_id = eventId_; 28 | } 29 | 30 | public Long getEventId() { 31 | return event_id; 32 | } 33 | 34 | public void setEventTime(Long eventTime_) { 35 | event_time = eventTime_; 36 | } 37 | 38 | public Long getEventTime() { 39 | return event_time; 40 | } 41 | 42 | public void setEventType(String eventType_) { 43 | event_type = eventType_; 44 | } 45 | 46 | public String getEventType() { 47 | return event_type; 48 | } 49 | 50 | public void setDbName(String dbName_) { 51 | db_name = dbName_; 52 | } 53 | 54 | public String getDbName() { 55 | return db_name; 56 | } 57 | 58 | public void setTblName(String tblName_) { 59 | tbl_name = tblName_; 60 | } 61 | 62 | public String getTblName() { 63 | return tbl_name; 64 | } 65 | 66 | public void setMessage(String message_) { 67 | message = message_; 68 | } 69 | 70 | public String getMessage() { 71 | return message; 72 | } 73 | 74 | } 75 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/NotificationSequence.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class NotificationSequence { 6 | 7 | private Long nni_id; 8 | private Long next_event_id; 9 | 10 | public void NotificationSequence() { 11 | } 12 | 13 | public void setNniId(Long nniId_) { 14 | nni_id = nniId_; 15 | } 16 | 17 | public Long getNniId() { 18 | return nni_id; 19 | } 20 | 21 | public void setNextEventId(Long nextEventId_) { 22 | next_event_id = nextEventId_; 23 | } 24 | 25 | public Long getNextEventId() { 26 | return next_event_id; 27 | } 28 | 29 | } 30 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/NucleusTables.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class NucleusTables { 6 | 7 | private String class_name; 8 | private String table_name; 9 | private String type; 10 | private String owner; 11 | private String version; 12 | private String interface_name; 13 | 14 | public void NucleusTables() { 15 | } 16 | 17 | public void setClassName(String className_) { 18 | class_name = className_; 19 | } 20 | 21 | public String getClassName() { 22 | return class_name; 23 | } 24 | 25 | public void setTableName(String tableName_) { 26 | table_name = tableName_; 27 | } 28 | 29 | public String getTableName() { 30 | return table_name; 31 | } 32 | 33 | public void setType(String type_) { 34 | type = type_; 35 | } 36 | 37 | public String getType() { 38 | return type; 39 | } 40 | 41 | public void setOwner(String owner_) { 42 | owner = owner_; 43 | } 44 | 45 | public String getOwner() { 46 | return owner; 47 | } 48 | 49 | public void setVersion(String version_) { 50 | version = version_; 51 | } 52 | 53 | public String getVersion() { 54 | return version; 55 | } 56 | 57 | public void setInterfaceName(String interfaceName_) { 58 | interface_name = interfaceName_; 59 | } 60 | 61 | public String getInterfaceName() { 62 | return interface_name; 63 | } 64 | 65 | } 66 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/PartColPrivs.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class PartColPrivs { 6 | 7 | private Long part_column_grant_id; 8 | private String column_name; 9 | private Long create_time; 10 | private Integer grant_option; 11 | private String grantor; 12 | private String grantor_type; 13 | private Long part_id; 14 | private String principal_name; 15 | private String principal_type; 16 | private String part_col_priv; 17 | 18 | public void PartColPrivs() { 19 | } 20 | 21 | public void setPartColumnGrantId(Long partColumnGrantId_) { 22 | part_column_grant_id = partColumnGrantId_; 23 | } 24 | 25 | public Long getPartColumnGrantId() { 26 | return part_column_grant_id; 27 | } 28 | 29 | public void setColumnName(String columnName_) { 30 | column_name = columnName_; 31 | } 32 | 33 | public String getColumnName() { 34 | return column_name; 35 | } 36 | 37 | public void setCreateTime(Long createTime_) { 38 | create_time = createTime_; 39 | } 40 | 41 | public Long getCreateTime() { 42 | return create_time; 43 | } 44 | 45 | public void setGrantOption(Integer grantOption_) { 46 | grant_option = grantOption_; 47 | } 48 | 49 | public Integer getGrantOption() { 50 | return grant_option; 51 | } 52 | 53 | public void setGrantor(String grantor_) { 54 | grantor = grantor_; 55 | } 56 | 57 | public String getGrantor() { 58 | return grantor; 59 | } 60 | 61 | public void setGrantorType(String grantorType_) { 62 | grantor_type = grantorType_; 63 | } 64 | 65 | public String getGrantorType() { 66 | return grantor_type; 67 | } 68 | 69 | public void setPartId(Long partId_) { 70 | part_id = partId_; 71 | } 72 | 73 | public Long getPartId() { 74 | return part_id; 75 | } 76 | 77 | public void setPrincipalName(String principalName_) { 78 | principal_name = principalName_; 79 | } 80 | 81 | public String getPrincipalName() { 82 | return principal_name; 83 | } 84 | 85 | public void setPrincipalType(String principalType_) { 86 | principal_type = principalType_; 87 | } 88 | 89 | public String getPrincipalType() { 90 | return principal_type; 91 | } 92 | 93 | public void setPartColPriv(String partColPriv_) { 94 | part_col_priv = partColPriv_; 95 | } 96 | 97 | public String getPartColPriv() { 98 | return part_col_priv; 99 | } 100 | 101 | } 102 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/PartColStats.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class PartColStats { 6 | 7 | private Long cs_id; 8 | private String db_name; 9 | private String table_name; 10 | private String partition_name; 11 | private String column_name; 12 | private String column_type; 13 | private Long part_id; 14 | private Long long_low_value; 15 | private Long long_high_value; 16 | private Double double_high_value; 17 | private Double double_low_value; 18 | private String big_decimal_low_value; 19 | private String big_decimal_high_value; 20 | private Long num_nulls; 21 | private Long num_distincts; 22 | private Double avg_col_len; 23 | private Long max_col_len; 24 | private Long num_trues; 25 | private Long num_falses; 26 | private Long last_analyzed; 27 | 28 | public void PartColStats() { 29 | } 30 | 31 | public void setCsId(Long csId_) { 32 | cs_id = csId_; 33 | } 34 | 35 | public Long getCsId() { 36 | return cs_id; 37 | } 38 | 39 | public void setDbName(String dbName_) { 40 | db_name = dbName_; 41 | } 42 | 43 | public String getDbName() { 44 | return db_name; 45 | } 46 | 47 | public void setTableName(String tableName_) { 48 | table_name = tableName_; 49 | } 50 | 51 | public String getTableName() { 52 | return table_name; 53 | } 54 | 55 | public void setPartitionName(String partitionName_) { 56 | partition_name = partitionName_; 57 | } 58 | 59 | public String getPartitionName() { 60 | return partition_name; 61 | } 62 | 63 | public void setColumnName(String columnName_) { 64 | column_name = columnName_; 65 | } 66 | 67 | public String getColumnName() { 68 | return column_name; 69 | } 70 | 71 | public void setColumnType(String columnType_) { 72 | column_type = columnType_; 73 | } 74 | 75 | public String getColumnType() { 76 | return column_type; 77 | } 78 | 79 | public void setPartId(Long partId_) { 80 | part_id = partId_; 81 | } 82 | 83 | public Long getPartId() { 84 | return part_id; 85 | } 86 | 87 | public void setLongLowValue(Long longLowValue_) { 88 | long_low_value = longLowValue_; 89 | } 90 | 91 | public Long getLongLowValue() { 92 | return long_low_value; 93 | } 94 | 95 | public void setLongHighValue(Long longHighValue_) { 96 | long_high_value = longHighValue_; 97 | } 98 | 99 | public Long getLongHighValue() { 100 | return long_high_value; 101 | } 102 | 103 | public void setDoubleHighValue(Double doubleHighValue_) { 104 | double_high_value = doubleHighValue_; 105 | } 106 | 107 | public Double getDoubleHighValue() { 108 | return double_high_value; 109 | } 110 | 111 | public void setDoubleLowValue(Double doubleLowValue_) { 112 | double_low_value = doubleLowValue_; 113 | } 114 | 115 | public Double getDoubleLowValue() { 116 | return double_low_value; 117 | } 118 | 119 | public void setBigDecimalLowValue(String bigDecimalLowValue_) { 120 | big_decimal_low_value = bigDecimalLowValue_; 121 | } 122 | 123 | public String getBigDecimalLowValue() { 124 | return big_decimal_low_value; 125 | } 126 | 127 | public void setBigDecimalHighValue(String bigDecimalHighValue_) { 128 | big_decimal_high_value = bigDecimalHighValue_; 129 | } 130 | 131 | public String getBigDecimalHighValue() { 132 | return big_decimal_high_value; 133 | } 134 | 135 | public void setNumNulls(Long numNulls_) { 136 | num_nulls = numNulls_; 137 | } 138 | 139 | public Long getNumNulls() { 140 | return num_nulls; 141 | } 142 | 143 | public void setNumDistincts(Long numDistincts_) { 144 | num_distincts = numDistincts_; 145 | } 146 | 147 | public Long getNumDistincts() { 148 | return num_distincts; 149 | } 150 | 151 | public void setAvgColLen(Double avgColLen_) { 152 | avg_col_len = avgColLen_; 153 | } 154 | 155 | public Double getAvgColLen() { 156 | return avg_col_len; 157 | } 158 | 159 | public void setMaxColLen(Long maxColLen_) { 160 | max_col_len = maxColLen_; 161 | } 162 | 163 | public Long getMaxColLen() { 164 | return max_col_len; 165 | } 166 | 167 | public void setNumTrues(Long numTrues_) { 168 | num_trues = numTrues_; 169 | } 170 | 171 | public Long getNumTrues() { 172 | return num_trues; 173 | } 174 | 175 | public void setNumFalses(Long numFalses_) { 176 | num_falses = numFalses_; 177 | } 178 | 179 | public Long getNumFalses() { 180 | return num_falses; 181 | } 182 | 183 | public void setLastAnalyzed(Long lastAnalyzed_) { 184 | last_analyzed = lastAnalyzed_; 185 | } 186 | 187 | public Long getLastAnalyzed() { 188 | return last_analyzed; 189 | } 190 | 191 | } 192 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/PartPrivs.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class PartPrivs { 6 | 7 | private Long part_grant_id; 8 | private Long create_time; 9 | private Integer grant_option; 10 | private String grantor; 11 | private String grantor_type; 12 | private Long part_id; 13 | private String principal_name; 14 | private String principal_type; 15 | private String part_priv; 16 | 17 | public void PartPrivs() { 18 | } 19 | 20 | public void setPartGrantId(Long partGrantId_) { 21 | part_grant_id = partGrantId_; 22 | } 23 | 24 | public Long getPartGrantId() { 25 | return part_grant_id; 26 | } 27 | 28 | public void setCreateTime(Long createTime_) { 29 | create_time = createTime_; 30 | } 31 | 32 | public Long getCreateTime() { 33 | return create_time; 34 | } 35 | 36 | public void setGrantOption(Integer grantOption_) { 37 | grant_option = grantOption_; 38 | } 39 | 40 | public Integer getGrantOption() { 41 | return grant_option; 42 | } 43 | 44 | public void setGrantor(String grantor_) { 45 | grantor = grantor_; 46 | } 47 | 48 | public String getGrantor() { 49 | return grantor; 50 | } 51 | 52 | public void setGrantorType(String grantorType_) { 53 | grantor_type = grantorType_; 54 | } 55 | 56 | public String getGrantorType() { 57 | return grantor_type; 58 | } 59 | 60 | public void setPartId(Long partId_) { 61 | part_id = partId_; 62 | } 63 | 64 | public Long getPartId() { 65 | return part_id; 66 | } 67 | 68 | public void setPrincipalName(String principalName_) { 69 | principal_name = principalName_; 70 | } 71 | 72 | public String getPrincipalName() { 73 | return principal_name; 74 | } 75 | 76 | public void setPrincipalType(String principalType_) { 77 | principal_type = principalType_; 78 | } 79 | 80 | public String getPrincipalType() { 81 | return principal_type; 82 | } 83 | 84 | public void setPartPriv(String partPriv_) { 85 | part_priv = partPriv_; 86 | } 87 | 88 | public String getPartPriv() { 89 | return part_priv; 90 | } 91 | 92 | } 93 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/PartitionEvents.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class PartitionEvents { 6 | 7 | private Long part_name_id; 8 | private String db_name; 9 | private Long event_time; 10 | private Long event_type; 11 | private String partition_name; 12 | private String tbl_name; 13 | 14 | public void PartitionEvents() { 15 | } 16 | 17 | public void setPartNameId(Long partNameId_) { 18 | part_name_id = partNameId_; 19 | } 20 | 21 | public Long getPartNameId() { 22 | return part_name_id; 23 | } 24 | 25 | public void setDbName(String dbName_) { 26 | db_name = dbName_; 27 | } 28 | 29 | public String getDbName() { 30 | return db_name; 31 | } 32 | 33 | public void setEventTime(Long eventTime_) { 34 | event_time = eventTime_; 35 | } 36 | 37 | public Long getEventTime() { 38 | return event_time; 39 | } 40 | 41 | public void setEventType(Long eventType_) { 42 | event_type = eventType_; 43 | } 44 | 45 | public Long getEventType() { 46 | return event_type; 47 | } 48 | 49 | public void setPartitionName(String partitionName_) { 50 | partition_name = partitionName_; 51 | } 52 | 53 | public String getPartitionName() { 54 | return partition_name; 55 | } 56 | 57 | public void setTblName(String tblName_) { 58 | tbl_name = tblName_; 59 | } 60 | 61 | public String getTblName() { 62 | return tbl_name; 63 | } 64 | 65 | } 66 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/PartitionKeyVals.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class PartitionKeyVals { 6 | 7 | private Long part_id; 8 | private String part_key_val; 9 | private Long integer_idx; 10 | 11 | public void PartitionKeyVals() { 12 | } 13 | 14 | public void setPartId(Long partId_) { 15 | part_id = partId_; 16 | } 17 | 18 | public Long getPartId() { 19 | return part_id; 20 | } 21 | 22 | public void setPartKeyVal(String partKeyVal_) { 23 | part_key_val = partKeyVal_; 24 | } 25 | 26 | public String getPartKeyVal() { 27 | return part_key_val; 28 | } 29 | 30 | public void setIntegerIdx(Long integerIdx_) { 31 | integer_idx = integerIdx_; 32 | } 33 | 34 | public Long getIntegerIdx() { 35 | return integer_idx; 36 | } 37 | 38 | } 39 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/PartitionKeys.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class PartitionKeys { 6 | 7 | private Long tbl_id; 8 | private String pkey_comment; 9 | private String pkey_name; 10 | private String pkey_type; 11 | private Long integer_idx; 12 | 13 | public void PartitionKeys() { 14 | } 15 | 16 | public void setTblId(Long tblId_) { 17 | tbl_id = tblId_; 18 | } 19 | 20 | public Long getTblId() { 21 | return tbl_id; 22 | } 23 | 24 | public void setPkeyComment(String pkeyComment_) { 25 | pkey_comment = pkeyComment_; 26 | } 27 | 28 | public String getPkeyComment() { 29 | return pkey_comment; 30 | } 31 | 32 | public void setPkeyName(String pkeyName_) { 33 | pkey_name = pkeyName_; 34 | } 35 | 36 | public String getPkeyName() { 37 | return pkey_name; 38 | } 39 | 40 | public void setPkeyType(String pkeyType_) { 41 | pkey_type = pkeyType_; 42 | } 43 | 44 | public String getPkeyType() { 45 | return pkey_type; 46 | } 47 | 48 | public void setIntegerIdx(Long integerIdx_) { 49 | integer_idx = integerIdx_; 50 | } 51 | 52 | public Long getIntegerIdx() { 53 | return integer_idx; 54 | } 55 | 56 | } 57 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/PartitionParams.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class PartitionParams { 6 | 7 | private Long part_id; 8 | private String param_key; 9 | private String param_value; 10 | 11 | public void PartitionParams() { 12 | } 13 | 14 | public void setPartId(Long partId_) { 15 | part_id = partId_; 16 | } 17 | 18 | public Long getPartId() { 19 | return part_id; 20 | } 21 | 22 | public void setParamKey(String paramKey_) { 23 | param_key = paramKey_; 24 | } 25 | 26 | public String getParamKey() { 27 | return param_key; 28 | } 29 | 30 | public void setParamValue(String paramValue_) { 31 | param_value = paramValue_; 32 | } 33 | 34 | public String getParamValue() { 35 | return param_value; 36 | } 37 | 38 | } 39 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/Partitions.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class Partitions { 6 | 7 | private Long part_id; 8 | private Long create_time; 9 | private Long last_access_time; 10 | private String part_name; 11 | private Long sd_id; 12 | private Long tbl_id; 13 | private Long link_target_id; 14 | 15 | public void Partitions() { 16 | } 17 | 18 | public void setPartId(Long partId_) { 19 | part_id = partId_; 20 | } 21 | 22 | public Long getPartId() { 23 | return part_id; 24 | } 25 | 26 | public void setCreateTime(Long createTime_) { 27 | create_time = createTime_; 28 | } 29 | 30 | public Long getCreateTime() { 31 | return create_time; 32 | } 33 | 34 | public void setLastAccessTime(Long lastAccessTime_) { 35 | last_access_time = lastAccessTime_; 36 | } 37 | 38 | public Long getLastAccessTime() { 39 | return last_access_time; 40 | } 41 | 42 | public void setPartName(String partName_) { 43 | part_name = partName_; 44 | } 45 | 46 | public String getPartName() { 47 | return part_name; 48 | } 49 | 50 | public void setSdId(Long sdId_) { 51 | sd_id = sdId_; 52 | } 53 | 54 | public Long getSdId() { 55 | return sd_id; 56 | } 57 | 58 | public void setTblId(Long tblId_) { 59 | tbl_id = tblId_; 60 | } 61 | 62 | public Long getTblId() { 63 | return tbl_id; 64 | } 65 | 66 | public void setLinkTargetId(Long linkTargetId_) { 67 | link_target_id = linkTargetId_; 68 | } 69 | 70 | public Long getLinkTargetId() { 71 | return link_target_id; 72 | } 73 | 74 | } 75 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/RoleMap.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class RoleMap { 6 | 7 | private Long role_grant_id; 8 | private Long add_time; 9 | private Integer grant_option; 10 | private String grantor; 11 | private String grantor_type; 12 | private String principal_name; 13 | private String principal_type; 14 | private Long role_id; 15 | 16 | public void RoleMap() { 17 | } 18 | 19 | public void setRoleGrantId(Long roleGrantId_) { 20 | role_grant_id = roleGrantId_; 21 | } 22 | 23 | public Long getRoleGrantId() { 24 | return role_grant_id; 25 | } 26 | 27 | public void setAddTime(Long addTime_) { 28 | add_time = addTime_; 29 | } 30 | 31 | public Long getAddTime() { 32 | return add_time; 33 | } 34 | 35 | public void setGrantOption(Integer grantOption_) { 36 | grant_option = grantOption_; 37 | } 38 | 39 | public Integer getGrantOption() { 40 | return grant_option; 41 | } 42 | 43 | public void setGrantor(String grantor_) { 44 | grantor = grantor_; 45 | } 46 | 47 | public String getGrantor() { 48 | return grantor; 49 | } 50 | 51 | public void setGrantorType(String grantorType_) { 52 | grantor_type = grantorType_; 53 | } 54 | 55 | public String getGrantorType() { 56 | return grantor_type; 57 | } 58 | 59 | public void setPrincipalName(String principalName_) { 60 | principal_name = principalName_; 61 | } 62 | 63 | public String getPrincipalName() { 64 | return principal_name; 65 | } 66 | 67 | public void setPrincipalType(String principalType_) { 68 | principal_type = principalType_; 69 | } 70 | 71 | public String getPrincipalType() { 72 | return principal_type; 73 | } 74 | 75 | public void setRoleId(Long roleId_) { 76 | role_id = roleId_; 77 | } 78 | 79 | public Long getRoleId() { 80 | return role_id; 81 | } 82 | 83 | } 84 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/Roles.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class Roles { 6 | 7 | private Long role_id; 8 | private Long create_time; 9 | private String owner_name; 10 | private String role_name; 11 | 12 | public void Roles() { 13 | } 14 | 15 | public void setRoleId(Long roleId_) { 16 | role_id = roleId_; 17 | } 18 | 19 | public Long getRoleId() { 20 | return role_id; 21 | } 22 | 23 | public void setCreateTime(Long createTime_) { 24 | create_time = createTime_; 25 | } 26 | 27 | public Long getCreateTime() { 28 | return create_time; 29 | } 30 | 31 | public void setOwnerName(String ownerName_) { 32 | owner_name = ownerName_; 33 | } 34 | 35 | public String getOwnerName() { 36 | return owner_name; 37 | } 38 | 39 | public void setRoleName(String roleName_) { 40 | role_name = roleName_; 41 | } 42 | 43 | public String getRoleName() { 44 | return role_name; 45 | } 46 | 47 | } 48 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/SdParams.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class SdParams { 6 | 7 | private Long sd_id; 8 | private String param_key; 9 | private String param_value; 10 | 11 | public void SdParams() { 12 | } 13 | 14 | public void setSdId(Long sdId_) { 15 | sd_id = sdId_; 16 | } 17 | 18 | public Long getSdId() { 19 | return sd_id; 20 | } 21 | 22 | public void setParamKey(String paramKey_) { 23 | param_key = paramKey_; 24 | } 25 | 26 | public String getParamKey() { 27 | return param_key; 28 | } 29 | 30 | public void setParamValue(String paramValue_) { 31 | param_value = paramValue_; 32 | } 33 | 34 | public String getParamValue() { 35 | return param_value; 36 | } 37 | 38 | } 39 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/Sds.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class Sds { 6 | 7 | private Long sd_id; 8 | private Long cd_id; 9 | private String input_format; 10 | private Boolean is_compressed; 11 | private Boolean is_storedassubdirectories; 12 | private String location; 13 | private Long num_buckets; 14 | private String output_format; 15 | private Long serde_id; 16 | 17 | public void Sds() { 18 | } 19 | 20 | public void setSdId(Long sdId_) { 21 | sd_id = sdId_; 22 | } 23 | 24 | public Long getSdId() { 25 | return sd_id; 26 | } 27 | 28 | public void setCdId(Long cdId_) { 29 | cd_id = cdId_; 30 | } 31 | 32 | public Long getCdId() { 33 | return cd_id; 34 | } 35 | 36 | public void setInputFormat(String inputFormat_) { 37 | input_format = inputFormat_; 38 | } 39 | 40 | public String getInputFormat() { 41 | return input_format; 42 | } 43 | 44 | public void setIsCompressed(Boolean isCompressed_) { 45 | is_compressed = isCompressed_; 46 | } 47 | 48 | public Boolean getIsCompressed() { 49 | return is_compressed; 50 | } 51 | 52 | public void setIsStoredassubdirectories(Boolean isStoredassubdirectories_) { 53 | is_storedassubdirectories = isStoredassubdirectories_; 54 | } 55 | 56 | public Boolean getIsStoredassubdirectories() { 57 | return is_storedassubdirectories; 58 | } 59 | 60 | public void setLocation(String location_) { 61 | location = location_; 62 | } 63 | 64 | public String getLocation() { 65 | return location; 66 | } 67 | 68 | public void setNumBuckets(Long numBuckets_) { 69 | num_buckets = numBuckets_; 70 | } 71 | 72 | public Long getNumBuckets() { 73 | return num_buckets; 74 | } 75 | 76 | public void setOutputFormat(String outputFormat_) { 77 | output_format = outputFormat_; 78 | } 79 | 80 | public String getOutputFormat() { 81 | return output_format; 82 | } 83 | 84 | public void setSerdeId(Long serdeId_) { 85 | serde_id = serdeId_; 86 | } 87 | 88 | public Long getSerdeId() { 89 | return serde_id; 90 | } 91 | 92 | } 93 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/SequenceTable.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class SequenceTable { 6 | 7 | private String sequence_name; 8 | private Long next_val; 9 | 10 | public void SequenceTable() { 11 | } 12 | 13 | public void setSequenceName(String sequenceName_) { 14 | sequence_name = sequenceName_; 15 | } 16 | 17 | public String getSequenceName() { 18 | return sequence_name; 19 | } 20 | 21 | public void setNextVal(Long nextVal_) { 22 | next_val = nextVal_; 23 | } 24 | 25 | public Long getNextVal() { 26 | return next_val; 27 | } 28 | 29 | } 30 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/SerdeParams.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class SerdeParams { 6 | 7 | private Long serde_id; 8 | private String param_key; 9 | private String param_value; 10 | 11 | public void SerdeParams() { 12 | } 13 | 14 | public void setSerdeId(Long serdeId_) { 15 | serde_id = serdeId_; 16 | } 17 | 18 | public Long getSerdeId() { 19 | return serde_id; 20 | } 21 | 22 | public void setParamKey(String paramKey_) { 23 | param_key = paramKey_; 24 | } 25 | 26 | public String getParamKey() { 27 | return param_key; 28 | } 29 | 30 | public void setParamValue(String paramValue_) { 31 | param_value = paramValue_; 32 | } 33 | 34 | public String getParamValue() { 35 | return param_value; 36 | } 37 | 38 | } 39 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/Serdes.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class Serdes { 6 | 7 | private Long serde_id; 8 | private String name; 9 | private String slib; 10 | 11 | public void Serdes() { 12 | } 13 | 14 | public void setSerdeId(Long serdeId_) { 15 | serde_id = serdeId_; 16 | } 17 | 18 | public Long getSerdeId() { 19 | return serde_id; 20 | } 21 | 22 | public void setName(String name_) { 23 | name = name_; 24 | } 25 | 26 | public String getName() { 27 | return name; 28 | } 29 | 30 | public void setSlib(String slib_) { 31 | slib = slib_; 32 | } 33 | 34 | public String getSlib() { 35 | return slib; 36 | } 37 | 38 | } 39 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/SkewedColNames.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class SkewedColNames { 6 | 7 | private Long sd_id; 8 | private String skewed_col_name; 9 | private Long integer_idx; 10 | 11 | public void SkewedColNames() { 12 | } 13 | 14 | public void setSdId(Long sdId_) { 15 | sd_id = sdId_; 16 | } 17 | 18 | public Long getSdId() { 19 | return sd_id; 20 | } 21 | 22 | public void setSkewedColName(String skewedColName_) { 23 | skewed_col_name = skewedColName_; 24 | } 25 | 26 | public String getSkewedColName() { 27 | return skewed_col_name; 28 | } 29 | 30 | public void setIntegerIdx(Long integerIdx_) { 31 | integer_idx = integerIdx_; 32 | } 33 | 34 | public Long getIntegerIdx() { 35 | return integer_idx; 36 | } 37 | 38 | } 39 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/SkewedColValueLocMap.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class SkewedColValueLocMap { 6 | 7 | private Long sd_id; 8 | private Long string_list_id_kid; 9 | private String location; 10 | 11 | public void SkewedColValueLocMap() { 12 | } 13 | 14 | public void setSdId(Long sdId_) { 15 | sd_id = sdId_; 16 | } 17 | 18 | public Long getSdId() { 19 | return sd_id; 20 | } 21 | 22 | public void setStringListIdKid(Long stringListIdKid_) { 23 | string_list_id_kid = stringListIdKid_; 24 | } 25 | 26 | public Long getStringListIdKid() { 27 | return string_list_id_kid; 28 | } 29 | 30 | public void setLocation(String location_) { 31 | location = location_; 32 | } 33 | 34 | public String getLocation() { 35 | return location; 36 | } 37 | 38 | } 39 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/SkewedStringList.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class SkewedStringList { 6 | 7 | private Long string_list_id; 8 | 9 | public void SkewedStringList() { 10 | } 11 | 12 | public void setStringListId(Long stringListId_) { 13 | string_list_id = stringListId_; 14 | } 15 | 16 | public Long getStringListId() { 17 | return string_list_id; 18 | } 19 | 20 | } 21 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/SkewedStringListValues.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class SkewedStringListValues { 6 | 7 | private Long string_list_id; 8 | private String string_list_value; 9 | private Long integer_idx; 10 | 11 | public void SkewedStringListValues() { 12 | } 13 | 14 | public void setStringListId(Long stringListId_) { 15 | string_list_id = stringListId_; 16 | } 17 | 18 | public Long getStringListId() { 19 | return string_list_id; 20 | } 21 | 22 | public void setStringListValue(String stringListValue_) { 23 | string_list_value = stringListValue_; 24 | } 25 | 26 | public String getStringListValue() { 27 | return string_list_value; 28 | } 29 | 30 | public void setIntegerIdx(Long integerIdx_) { 31 | integer_idx = integerIdx_; 32 | } 33 | 34 | public Long getIntegerIdx() { 35 | return integer_idx; 36 | } 37 | 38 | } 39 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/SkewedValues.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class SkewedValues { 6 | 7 | private Long sd_id_oid; 8 | private Long string_list_id_eid; 9 | private Long integer_idx; 10 | 11 | public void SkewedValues() { 12 | } 13 | 14 | public void setSdIdOid(Long sdIdOid_) { 15 | sd_id_oid = sdIdOid_; 16 | } 17 | 18 | public Long getSdIdOid() { 19 | return sd_id_oid; 20 | } 21 | 22 | public void setStringListIdEid(Long stringListIdEid_) { 23 | string_list_id_eid = stringListIdEid_; 24 | } 25 | 26 | public Long getStringListIdEid() { 27 | return string_list_id_eid; 28 | } 29 | 30 | public void setIntegerIdx(Long integerIdx_) { 31 | integer_idx = integerIdx_; 32 | } 33 | 34 | public Long getIntegerIdx() { 35 | return integer_idx; 36 | } 37 | 38 | } 39 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/SortCols.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class SortCols { 6 | 7 | private Long sd_id; 8 | private String column_name; 9 | private Long order; 10 | private Long integer_idx; 11 | 12 | public void SortCols() { 13 | } 14 | 15 | public void setSdId(Long sdId_) { 16 | sd_id = sdId_; 17 | } 18 | 19 | public Long getSdId() { 20 | return sd_id; 21 | } 22 | 23 | public void setColumnName(String columnName_) { 24 | column_name = columnName_; 25 | } 26 | 27 | public String getColumnName() { 28 | return column_name; 29 | } 30 | 31 | public void setOrder(Long order_) { 32 | order = order_; 33 | } 34 | 35 | public Long getOrder() { 36 | return order; 37 | } 38 | 39 | public void setIntegerIdx(Long integerIdx_) { 40 | integer_idx = integerIdx_; 41 | } 42 | 43 | public Long getIntegerIdx() { 44 | return integer_idx; 45 | } 46 | 47 | } 48 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/TabColStats.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class TabColStats { 6 | 7 | private Long cs_id; 8 | private String db_name; 9 | private String table_name; 10 | private String column_name; 11 | private String column_type; 12 | private Long tbl_id; 13 | private Long long_low_value; 14 | private Long long_high_value; 15 | private Double double_high_value; 16 | private Double double_low_value; 17 | private String big_decimal_low_value; 18 | private String big_decimal_high_value; 19 | private Long num_nulls; 20 | private Long num_distincts; 21 | private Double avg_col_len; 22 | private Long max_col_len; 23 | private Long num_trues; 24 | private Long num_falses; 25 | private Long last_analyzed; 26 | 27 | public void TabColStats() { 28 | } 29 | 30 | public void setCsId(Long csId_) { 31 | cs_id = csId_; 32 | } 33 | 34 | public Long getCsId() { 35 | return cs_id; 36 | } 37 | 38 | public void setDbName(String dbName_) { 39 | db_name = dbName_; 40 | } 41 | 42 | public String getDbName() { 43 | return db_name; 44 | } 45 | 46 | public void setTableName(String tableName_) { 47 | table_name = tableName_; 48 | } 49 | 50 | public String getTableName() { 51 | return table_name; 52 | } 53 | 54 | public void setColumnName(String columnName_) { 55 | column_name = columnName_; 56 | } 57 | 58 | public String getColumnName() { 59 | return column_name; 60 | } 61 | 62 | public void setColumnType(String columnType_) { 63 | column_type = columnType_; 64 | } 65 | 66 | public String getColumnType() { 67 | return column_type; 68 | } 69 | 70 | public void setTblId(Long tblId_) { 71 | tbl_id = tblId_; 72 | } 73 | 74 | public Long getTblId() { 75 | return tbl_id; 76 | } 77 | 78 | public void setLongLowValue(Long longLowValue_) { 79 | long_low_value = longLowValue_; 80 | } 81 | 82 | public Long getLongLowValue() { 83 | return long_low_value; 84 | } 85 | 86 | public void setLongHighValue(Long longHighValue_) { 87 | long_high_value = longHighValue_; 88 | } 89 | 90 | public Long getLongHighValue() { 91 | return long_high_value; 92 | } 93 | 94 | public void setDoubleHighValue(Double doubleHighValue_) { 95 | double_high_value = doubleHighValue_; 96 | } 97 | 98 | public Double getDoubleHighValue() { 99 | return double_high_value; 100 | } 101 | 102 | public void setDoubleLowValue(Double doubleLowValue_) { 103 | double_low_value = doubleLowValue_; 104 | } 105 | 106 | public Double getDoubleLowValue() { 107 | return double_low_value; 108 | } 109 | 110 | public void setBigDecimalLowValue(String bigDecimalLowValue_) { 111 | big_decimal_low_value = bigDecimalLowValue_; 112 | } 113 | 114 | public String getBigDecimalLowValue() { 115 | return big_decimal_low_value; 116 | } 117 | 118 | public void setBigDecimalHighValue(String bigDecimalHighValue_) { 119 | big_decimal_high_value = bigDecimalHighValue_; 120 | } 121 | 122 | public String getBigDecimalHighValue() { 123 | return big_decimal_high_value; 124 | } 125 | 126 | public void setNumNulls(Long numNulls_) { 127 | num_nulls = numNulls_; 128 | } 129 | 130 | public Long getNumNulls() { 131 | return num_nulls; 132 | } 133 | 134 | public void setNumDistincts(Long numDistincts_) { 135 | num_distincts = numDistincts_; 136 | } 137 | 138 | public Long getNumDistincts() { 139 | return num_distincts; 140 | } 141 | 142 | public void setAvgColLen(Double avgColLen_) { 143 | avg_col_len = avgColLen_; 144 | } 145 | 146 | public Double getAvgColLen() { 147 | return avg_col_len; 148 | } 149 | 150 | public void setMaxColLen(Long maxColLen_) { 151 | max_col_len = maxColLen_; 152 | } 153 | 154 | public Long getMaxColLen() { 155 | return max_col_len; 156 | } 157 | 158 | public void setNumTrues(Long numTrues_) { 159 | num_trues = numTrues_; 160 | } 161 | 162 | public Long getNumTrues() { 163 | return num_trues; 164 | } 165 | 166 | public void setNumFalses(Long numFalses_) { 167 | num_falses = numFalses_; 168 | } 169 | 170 | public Long getNumFalses() { 171 | return num_falses; 172 | } 173 | 174 | public void setLastAnalyzed(Long lastAnalyzed_) { 175 | last_analyzed = lastAnalyzed_; 176 | } 177 | 178 | public Long getLastAnalyzed() { 179 | return last_analyzed; 180 | } 181 | 182 | } 183 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/TableParams.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class TableParams { 6 | 7 | private Long tbl_id; 8 | private String param_key; 9 | private String param_value; 10 | 11 | public void TableParams() { 12 | } 13 | 14 | public void setTblId(Long tblId_) { 15 | tbl_id = tblId_; 16 | } 17 | 18 | public Long getTblId() { 19 | return tbl_id; 20 | } 21 | 22 | public void setParamKey(String paramKey_) { 23 | param_key = paramKey_; 24 | } 25 | 26 | public String getParamKey() { 27 | return param_key; 28 | } 29 | 30 | public void setParamValue(String paramValue_) { 31 | param_value = paramValue_; 32 | } 33 | 34 | public String getParamValue() { 35 | return param_value; 36 | } 37 | 38 | } 39 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/TblColPrivs.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class TblColPrivs { 6 | 7 | private Long tbl_column_grant_id; 8 | private String column_name; 9 | private Long create_time; 10 | private Integer grant_option; 11 | private String grantor; 12 | private String grantor_type; 13 | private String principal_name; 14 | private String principal_type; 15 | private String tbl_col_priv; 16 | private Long tbl_id; 17 | 18 | public void TblColPrivs() { 19 | } 20 | 21 | public void setTblColumnGrantId(Long tblColumnGrantId_) { 22 | tbl_column_grant_id = tblColumnGrantId_; 23 | } 24 | 25 | public Long getTblColumnGrantId() { 26 | return tbl_column_grant_id; 27 | } 28 | 29 | public void setColumnName(String columnName_) { 30 | column_name = columnName_; 31 | } 32 | 33 | public String getColumnName() { 34 | return column_name; 35 | } 36 | 37 | public void setCreateTime(Long createTime_) { 38 | create_time = createTime_; 39 | } 40 | 41 | public Long getCreateTime() { 42 | return create_time; 43 | } 44 | 45 | public void setGrantOption(Integer grantOption_) { 46 | grant_option = grantOption_; 47 | } 48 | 49 | public Integer getGrantOption() { 50 | return grant_option; 51 | } 52 | 53 | public void setGrantor(String grantor_) { 54 | grantor = grantor_; 55 | } 56 | 57 | public String getGrantor() { 58 | return grantor; 59 | } 60 | 61 | public void setGrantorType(String grantorType_) { 62 | grantor_type = grantorType_; 63 | } 64 | 65 | public String getGrantorType() { 66 | return grantor_type; 67 | } 68 | 69 | public void setPrincipalName(String principalName_) { 70 | principal_name = principalName_; 71 | } 72 | 73 | public String getPrincipalName() { 74 | return principal_name; 75 | } 76 | 77 | public void setPrincipalType(String principalType_) { 78 | principal_type = principalType_; 79 | } 80 | 81 | public String getPrincipalType() { 82 | return principal_type; 83 | } 84 | 85 | public void setTblColPriv(String tblColPriv_) { 86 | tbl_col_priv = tblColPriv_; 87 | } 88 | 89 | public String getTblColPriv() { 90 | return tbl_col_priv; 91 | } 92 | 93 | public void setTblId(Long tblId_) { 94 | tbl_id = tblId_; 95 | } 96 | 97 | public Long getTblId() { 98 | return tbl_id; 99 | } 100 | 101 | } 102 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/TblPrivs.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class TblPrivs { 6 | 7 | private Long tbl_grant_id; 8 | private Long create_time; 9 | private Integer grant_option; 10 | private String grantor; 11 | private String grantor_type; 12 | private String principal_name; 13 | private String principal_type; 14 | private String tbl_priv; 15 | private Long tbl_id; 16 | 17 | public void TblPrivs() { 18 | } 19 | 20 | public void setTblGrantId(Long tblGrantId_) { 21 | tbl_grant_id = tblGrantId_; 22 | } 23 | 24 | public Long getTblGrantId() { 25 | return tbl_grant_id; 26 | } 27 | 28 | public void setCreateTime(Long createTime_) { 29 | create_time = createTime_; 30 | } 31 | 32 | public Long getCreateTime() { 33 | return create_time; 34 | } 35 | 36 | public void setGrantOption(Integer grantOption_) { 37 | grant_option = grantOption_; 38 | } 39 | 40 | public Integer getGrantOption() { 41 | return grant_option; 42 | } 43 | 44 | public void setGrantor(String grantor_) { 45 | grantor = grantor_; 46 | } 47 | 48 | public String getGrantor() { 49 | return grantor; 50 | } 51 | 52 | public void setGrantorType(String grantorType_) { 53 | grantor_type = grantorType_; 54 | } 55 | 56 | public String getGrantorType() { 57 | return grantor_type; 58 | } 59 | 60 | public void setPrincipalName(String principalName_) { 61 | principal_name = principalName_; 62 | } 63 | 64 | public String getPrincipalName() { 65 | return principal_name; 66 | } 67 | 68 | public void setPrincipalType(String principalType_) { 69 | principal_type = principalType_; 70 | } 71 | 72 | public String getPrincipalType() { 73 | return principal_type; 74 | } 75 | 76 | public void setTblPriv(String tblPriv_) { 77 | tbl_priv = tblPriv_; 78 | } 79 | 80 | public String getTblPriv() { 81 | return tbl_priv; 82 | } 83 | 84 | public void setTblId(Long tblId_) { 85 | tbl_id = tblId_; 86 | } 87 | 88 | public Long getTblId() { 89 | return tbl_id; 90 | } 91 | 92 | } 93 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/Tbls.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class Tbls { 6 | 7 | private Long tbl_id; 8 | private Long create_time; 9 | private Long db_id; 10 | private Long last_access_time; 11 | private String owner; 12 | private Long retention; 13 | private Long sd_id; 14 | private String tbl_name; 15 | private String tbl_type; 16 | private String view_expanded_text; 17 | private String view_original_text; 18 | private Long link_target_id; 19 | 20 | public void Tbls() { 21 | } 22 | 23 | public void setTblId(Long tblId_) { 24 | tbl_id = tblId_; 25 | } 26 | 27 | public Long getTblId() { 28 | return tbl_id; 29 | } 30 | 31 | public void setCreateTime(Long createTime_) { 32 | create_time = createTime_; 33 | } 34 | 35 | public Long getCreateTime() { 36 | return create_time; 37 | } 38 | 39 | public void setDbId(Long dbId_) { 40 | db_id = dbId_; 41 | } 42 | 43 | public Long getDbId() { 44 | return db_id; 45 | } 46 | 47 | public void setLastAccessTime(Long lastAccessTime_) { 48 | last_access_time = lastAccessTime_; 49 | } 50 | 51 | public Long getLastAccessTime() { 52 | return last_access_time; 53 | } 54 | 55 | public void setOwner(String owner_) { 56 | owner = owner_; 57 | } 58 | 59 | public String getOwner() { 60 | return owner; 61 | } 62 | 63 | public void setRetention(Long retention_) { 64 | retention = retention_; 65 | } 66 | 67 | public Long getRetention() { 68 | return retention; 69 | } 70 | 71 | public void setSdId(Long sdId_) { 72 | sd_id = sdId_; 73 | } 74 | 75 | public Long getSdId() { 76 | return sd_id; 77 | } 78 | 79 | public void setTblName(String tblName_) { 80 | tbl_name = tblName_; 81 | } 82 | 83 | public String getTblName() { 84 | return tbl_name; 85 | } 86 | 87 | public void setTblType(String tblType_) { 88 | tbl_type = tblType_; 89 | } 90 | 91 | public String getTblType() { 92 | return tbl_type; 93 | } 94 | 95 | public void setViewExpandedText(String viewExpandedText_) { 96 | view_expanded_text = viewExpandedText_; 97 | } 98 | 99 | public String getViewExpandedText() { 100 | return view_expanded_text; 101 | } 102 | 103 | public void setViewOriginalText(String viewOriginalText_) { 104 | view_original_text = viewOriginalText_; 105 | } 106 | 107 | public String getViewOriginalText() { 108 | return view_original_text; 109 | } 110 | 111 | public void setLinkTargetId(Long linkTargetId_) { 112 | link_target_id = linkTargetId_; 113 | } 114 | 115 | public Long getLinkTargetId() { 116 | return link_target_id; 117 | } 118 | 119 | } 120 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/TxnComponents.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class TxnComponents { 6 | 7 | private Long tc_txnid; 8 | private String tc_database; 9 | private String tc_table; 10 | private String tc_partition; 11 | 12 | public void TxnComponents() { 13 | } 14 | 15 | public void setTcTxnid(Long tcTxnid_) { 16 | tc_txnid = tcTxnid_; 17 | } 18 | 19 | public Long getTcTxnid() { 20 | return tc_txnid; 21 | } 22 | 23 | public void setTcDatabase(String tcDatabase_) { 24 | tc_database = tcDatabase_; 25 | } 26 | 27 | public String getTcDatabase() { 28 | return tc_database; 29 | } 30 | 31 | public void setTcTable(String tcTable_) { 32 | tc_table = tcTable_; 33 | } 34 | 35 | public String getTcTable() { 36 | return tc_table; 37 | } 38 | 39 | public void setTcPartition(String tcPartition_) { 40 | tc_partition = tcPartition_; 41 | } 42 | 43 | public String getTcPartition() { 44 | return tc_partition; 45 | } 46 | 47 | } 48 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/Txns.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class Txns { 6 | 7 | private Long txn_id; 8 | private String txn_state; 9 | private Long txn_started; 10 | private Long txn_last_heartbeat; 11 | private String txn_user; 12 | private String txn_host; 13 | 14 | public void Txns() { 15 | } 16 | 17 | public void setTxnId(Long txnId_) { 18 | txn_id = txnId_; 19 | } 20 | 21 | public Long getTxnId() { 22 | return txn_id; 23 | } 24 | 25 | public void setTxnState(String txnState_) { 26 | txn_state = txnState_; 27 | } 28 | 29 | public String getTxnState() { 30 | return txn_state; 31 | } 32 | 33 | public void setTxnStarted(Long txnStarted_) { 34 | txn_started = txnStarted_; 35 | } 36 | 37 | public Long getTxnStarted() { 38 | return txn_started; 39 | } 40 | 41 | public void setTxnLastHeartbeat(Long txnLastHeartbeat_) { 42 | txn_last_heartbeat = txnLastHeartbeat_; 43 | } 44 | 45 | public Long getTxnLastHeartbeat() { 46 | return txn_last_heartbeat; 47 | } 48 | 49 | public void setTxnUser(String txnUser_) { 50 | txn_user = txnUser_; 51 | } 52 | 53 | public String getTxnUser() { 54 | return txn_user; 55 | } 56 | 57 | public void setTxnHost(String txnHost_) { 58 | txn_host = txnHost_; 59 | } 60 | 61 | public String getTxnHost() { 62 | return txn_host; 63 | } 64 | 65 | } 66 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/TypeFields.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class TypeFields { 6 | 7 | private Long type_name; 8 | private String comment; 9 | private String field_name; 10 | private String field_type; 11 | private Long integer_idx; 12 | 13 | public void TypeFields() { 14 | } 15 | 16 | public void setTypeName(Long typeName_) { 17 | type_name = typeName_; 18 | } 19 | 20 | public Long getTypeName() { 21 | return type_name; 22 | } 23 | 24 | public void setComment(String comment_) { 25 | comment = comment_; 26 | } 27 | 28 | public String getComment() { 29 | return comment; 30 | } 31 | 32 | public void setFieldName(String fieldName_) { 33 | field_name = fieldName_; 34 | } 35 | 36 | public String getFieldName() { 37 | return field_name; 38 | } 39 | 40 | public void setFieldType(String fieldType_) { 41 | field_type = fieldType_; 42 | } 43 | 44 | public String getFieldType() { 45 | return field_type; 46 | } 47 | 48 | public void setIntegerIdx(Long integerIdx_) { 49 | integer_idx = integerIdx_; 50 | } 51 | 52 | public Long getIntegerIdx() { 53 | return integer_idx; 54 | } 55 | 56 | } 57 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/Types.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class Types { 6 | 7 | private Long types_id; 8 | private String type_name; 9 | private String type1; 10 | private String type2; 11 | 12 | public void Types() { 13 | } 14 | 15 | public void setTypesId(Long typesId_) { 16 | types_id = typesId_; 17 | } 18 | 19 | public Long getTypesId() { 20 | return types_id; 21 | } 22 | 23 | public void setTypeName(String typeName_) { 24 | type_name = typeName_; 25 | } 26 | 27 | public String getTypeName() { 28 | return type_name; 29 | } 30 | 31 | public void setType1(String type1_) { 32 | type1 = type1_; 33 | } 34 | 35 | public String getType1() { 36 | return type1; 37 | } 38 | 39 | public void setType2(String type2_) { 40 | type2 = type2_; 41 | } 42 | 43 | public String getType2() { 44 | return type2; 45 | } 46 | 47 | } 48 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/meta/Version.java: -------------------------------------------------------------------------------- 1 | 2 | package com.netease.hivetools.meta; 3 | 4 | 5 | public class Version { 6 | 7 | private Long ver_id; 8 | private String schema_version; 9 | private String version_comment; 10 | 11 | public void Version() { 12 | } 13 | 14 | public void setVerId(Long verId_) { 15 | ver_id = verId_; 16 | } 17 | 18 | public Long getVerId() { 19 | return ver_id; 20 | } 21 | 22 | public void setSchemaVersion(String schemaVersion_) { 23 | schema_version = schemaVersion_; 24 | } 25 | 26 | public String getSchemaVersion() { 27 | return schema_version; 28 | } 29 | 30 | public void setVersionComment(String versionComment_) { 31 | version_comment = versionComment_; 32 | } 33 | 34 | public String getVersionComment() { 35 | return version_comment; 36 | } 37 | 38 | } 39 | -------------------------------------------------------------------------------- /src/main/java/com/netease/hivetools/service/MyBatisUtil.java: -------------------------------------------------------------------------------- 1 | package com.netease.hivetools.service; 2 | 3 | import org.apache.ibatis.io.Resources; 4 | import org.apache.ibatis.session.SqlSessionFactory; 5 | import org.apache.ibatis.session.SqlSessionFactoryBuilder; 6 | import org.apache.log4j.Logger; 7 | 8 | import java.io.File; 9 | import java.io.FileInputStream; 10 | import java.io.FileNotFoundException; 11 | import java.io.IOException; 12 | import java.io.Reader; 13 | import java.util.Properties; 14 | 15 | public class MyBatisUtil { 16 | 17 | private static final Logger logger = Logger.getLogger(MyBatisUtil.class.getName()); 18 | 19 | private static SqlSessionFactory soucrcFactory = null; 20 | private static SqlSessionFactory destFactory = null; 21 | private static SqlSessionFactory onlineFactory = null; 22 | public static String sourceName; 23 | public static String destName; 24 | public static String onlneName; 25 | 26 | private static void initSqlSessionFactory(String sourceName) { 27 | Reader reader = null; 28 | try { 29 | reader = Resources.getResourceAsReader("mybatis-config.xml"); 30 | } catch (IOException e) { 31 | throw new RuntimeException(e.getMessage()); 32 | } 33 | 34 | Properties allProps = new Properties(); 35 | File file = new File("hive-tools.properties"); 36 | FileInputStream fis = null; 37 | try { 38 | fis = new FileInputStream(file); 39 | allProps.load(fis); 40 | } catch (FileNotFoundException e) { 41 | e.printStackTrace(); 42 | } catch (IOException e) { 43 | e.printStackTrace(); 44 | } finally { 45 | if (fis != null) { 46 | try { 47 | fis.close(); 48 | } catch (IOException e) { 49 | } 50 | } 51 | } 52 | 53 | Properties props = new Properties(); 54 | props.setProperty("jdbc.driverClassName", allProps.getProperty(sourceName+".jdbc.driverClassName")); 55 | props.setProperty("jdbc.url", allProps.getProperty(sourceName+".jdbc.url")); 56 | props.setProperty("jdbc.username", allProps.getProperty(sourceName+".jdbc.username")); 57 | props.setProperty("jdbc.password", allProps.getProperty(sourceName+".jdbc.password")); 58 | 59 | if (sourceName.equals(MyBatisUtil.sourceName)) { 60 | soucrcFactory = new SqlSessionFactoryBuilder().build(reader, props); 61 | } else if (sourceName.equals(MyBatisUtil.destName)) { 62 | destFactory = new SqlSessionFactoryBuilder().build(reader, props); 63 | } else if (sourceName.equals(MyBatisUtil.onlneName)) { 64 | onlineFactory = new SqlSessionFactoryBuilder().build(reader, props); 65 | } else { 66 | logger.error("not found source : " + sourceName); 67 | } 68 | } 69 | 70 | public static SqlSessionFactory getSqlSessionFactory(String sourceName) { 71 | if (sourceName.equals(MyBatisUtil.sourceName)) { 72 | if (null == soucrcFactory) 73 | initSqlSessionFactory(sourceName); 74 | 75 | return soucrcFactory; 76 | } else if (sourceName.equals(MyBatisUtil.destName)) { 77 | if (null == destFactory) 78 | initSqlSessionFactory(sourceName); 79 | 80 | return destFactory; 81 | } else if (sourceName.equals(MyBatisUtil.onlneName)) { 82 | if (null == onlineFactory) 83 | initSqlSessionFactory(sourceName); 84 | 85 | return onlineFactory; 86 | } else { 87 | logger.error("not found source : " + sourceName); 88 | return null; 89 | } 90 | } 91 | } -------------------------------------------------------------------------------- /src/main/java/org/apache/ranger/binding/metastore/thrift/MetaStoreUpdateServiceVersion.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Autogenerated by Thrift Compiler (0.9.3) 3 | * 4 | * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING 5 | * @generated 6 | */ 7 | package org.apache.ranger.binding.metastore.thrift; 8 | 9 | 10 | import java.util.Map; 11 | import java.util.HashMap; 12 | import org.apache.thrift.TEnum; 13 | 14 | public enum MetaStoreUpdateServiceVersion implements TEnum { 15 | V1(0); 16 | 17 | private final int value; 18 | 19 | private MetaStoreUpdateServiceVersion(int value) { 20 | this.value = value; 21 | } 22 | 23 | /** 24 | * Get the integer value of this enum value, as defined in the Thrift IDL. 25 | */ 26 | public int getValue() { 27 | return value; 28 | } 29 | 30 | /** 31 | * Find a the enum type by its integer value, as defined in the Thrift IDL. 32 | * @return null if the value is not found. 33 | */ 34 | public static MetaStoreUpdateServiceVersion findByValue(int value) { 35 | switch (value) { 36 | case 0: 37 | return V1; 38 | default: 39 | return null; 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /src/main/java/org/apache/ranger/binding/metastore/thrift/TErrorCode.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Autogenerated by Thrift Compiler (0.9.3) 3 | * 4 | * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING 5 | * @generated 6 | */ 7 | package org.apache.ranger.binding.metastore.thrift; 8 | 9 | 10 | import java.util.Map; 11 | import java.util.HashMap; 12 | import org.apache.thrift.TEnum; 13 | 14 | public enum TErrorCode implements TEnum { 15 | OK(0), 16 | ERROR(1), 17 | INVALID(2); 18 | 19 | private final int value; 20 | 21 | private TErrorCode(int value) { 22 | this.value = value; 23 | } 24 | 25 | /** 26 | * Get the integer value of this enum value, as defined in the Thrift IDL. 27 | */ 28 | public int getValue() { 29 | return value; 30 | } 31 | 32 | /** 33 | * Find a the enum type by its integer value, as defined in the Thrift IDL. 34 | * @return null if the value is not found. 35 | */ 36 | public static TErrorCode findByValue(int value) { 37 | switch (value) { 38 | case 0: 39 | return OK; 40 | case 1: 41 | return ERROR; 42 | case 2: 43 | return INVALID; 44 | default: 45 | return null; 46 | } 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /src/main/java/org/apache/ranger/binding/metastore/thrift/TOperation.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Autogenerated by Thrift Compiler (0.9.3) 3 | * 4 | * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING 5 | * @generated 6 | */ 7 | package org.apache.ranger.binding.metastore.thrift; 8 | 9 | 10 | import java.util.Map; 11 | import java.util.HashMap; 12 | import org.apache.thrift.TEnum; 13 | 14 | public enum TOperation implements TEnum { 15 | CREATE_DATABASE(0), 16 | DROP_DATABASE(1), 17 | CREATE_TABLE(2), 18 | DROP_TABLE(3), 19 | ALTER_TABLE(4), 20 | REMAME_TABLE(5), 21 | ADD_PARTITION(6), 22 | DROP_PARTITION(7), 23 | ALTER_PARTITION(8), 24 | RENAME_PARTITION(9), 25 | INIT_ID(10); 26 | 27 | private final int value; 28 | 29 | private TOperation(int value) { 30 | this.value = value; 31 | } 32 | 33 | /** 34 | * Get the integer value of this enum value, as defined in the Thrift IDL. 35 | */ 36 | public int getValue() { 37 | return value; 38 | } 39 | 40 | /** 41 | * Find a the enum type by its integer value, as defined in the Thrift IDL. 42 | * @return null if the value is not found. 43 | */ 44 | public static TOperation findByValue(int value) { 45 | switch (value) { 46 | case 0: 47 | return CREATE_DATABASE; 48 | case 1: 49 | return DROP_DATABASE; 50 | case 2: 51 | return CREATE_TABLE; 52 | case 3: 53 | return DROP_TABLE; 54 | case 4: 55 | return ALTER_TABLE; 56 | case 5: 57 | return REMAME_TABLE; 58 | case 6: 59 | return ADD_PARTITION; 60 | case 7: 61 | return DROP_PARTITION; 62 | case 8: 63 | return ALTER_PARTITION; 64 | case 9: 65 | return RENAME_PARTITION; 66 | case 10: 67 | return INIT_ID; 68 | default: 69 | return null; 70 | } 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /src/main/java/org/apache/ranger/binding/metastore/thrift/TStatus.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Autogenerated by Thrift Compiler (0.9.3) 3 | * 4 | * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING 5 | * @generated 6 | */ 7 | package org.apache.ranger.binding.metastore.thrift; 8 | 9 | import org.apache.thrift.scheme.IScheme; 10 | import org.apache.thrift.scheme.SchemeFactory; 11 | import org.apache.thrift.scheme.StandardScheme; 12 | 13 | import org.apache.thrift.scheme.TupleScheme; 14 | import org.apache.thrift.protocol.TTupleProtocol; 15 | import org.apache.thrift.protocol.TProtocolException; 16 | import org.apache.thrift.EncodingUtils; 17 | import org.apache.thrift.TException; 18 | import org.apache.thrift.async.AsyncMethodCallback; 19 | import org.apache.thrift.server.AbstractNonblockingServer.*; 20 | import java.util.List; 21 | import java.util.ArrayList; 22 | import java.util.Map; 23 | import java.util.HashMap; 24 | import java.util.EnumMap; 25 | import java.util.Set; 26 | import java.util.HashSet; 27 | import java.util.EnumSet; 28 | import java.util.Collections; 29 | import java.util.BitSet; 30 | import java.nio.ByteBuffer; 31 | import java.util.Arrays; 32 | import javax.annotation.Generated; 33 | import org.slf4j.Logger; 34 | import org.slf4j.LoggerFactory; 35 | 36 | @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"}) 37 | @Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2017-04-01") 38 | public class TStatus implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { 39 | private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TStatus"); 40 | 41 | private static final org.apache.thrift.protocol.TField STATUS_FIELD_DESC = new org.apache.thrift.protocol.TField("status", org.apache.thrift.protocol.TType.I32, (short)1); 42 | private static final org.apache.thrift.protocol.TField ERROR_MSG_FIELD_DESC = new org.apache.thrift.protocol.TField("error_msg", org.apache.thrift.protocol.TType.STRING, (short)2); 43 | 44 | private static final Map, SchemeFactory> schemes = new HashMap, SchemeFactory>(); 45 | static { 46 | schemes.put(StandardScheme.class, new TStatusStandardSchemeFactory()); 47 | schemes.put(TupleScheme.class, new TStatusTupleSchemeFactory()); 48 | } 49 | 50 | /** 51 | * 52 | * @see TErrorCode 53 | */ 54 | public TErrorCode status; // required 55 | public String error_msg; // optional 56 | 57 | /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ 58 | public enum _Fields implements org.apache.thrift.TFieldIdEnum { 59 | /** 60 | * 61 | * @see TErrorCode 62 | */ 63 | STATUS((short)1, "status"), 64 | ERROR_MSG((short)2, "error_msg"); 65 | 66 | private static final Map byName = new HashMap(); 67 | 68 | static { 69 | for (_Fields field : EnumSet.allOf(_Fields.class)) { 70 | byName.put(field.getFieldName(), field); 71 | } 72 | } 73 | 74 | /** 75 | * Find the _Fields constant that matches fieldId, or null if its not found. 76 | */ 77 | public static _Fields findByThriftId(int fieldId) { 78 | switch(fieldId) { 79 | case 1: // STATUS 80 | return STATUS; 81 | case 2: // ERROR_MSG 82 | return ERROR_MSG; 83 | default: 84 | return null; 85 | } 86 | } 87 | 88 | /** 89 | * Find the _Fields constant that matches fieldId, throwing an exception 90 | * if it is not found. 91 | */ 92 | public static _Fields findByThriftIdOrThrow(int fieldId) { 93 | _Fields fields = findByThriftId(fieldId); 94 | if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!"); 95 | return fields; 96 | } 97 | 98 | /** 99 | * Find the _Fields constant that matches name, or null if its not found. 100 | */ 101 | public static _Fields findByName(String name) { 102 | return byName.get(name); 103 | } 104 | 105 | private final short _thriftId; 106 | private final String _fieldName; 107 | 108 | _Fields(short thriftId, String fieldName) { 109 | _thriftId = thriftId; 110 | _fieldName = fieldName; 111 | } 112 | 113 | public short getThriftFieldId() { 114 | return _thriftId; 115 | } 116 | 117 | public String getFieldName() { 118 | return _fieldName; 119 | } 120 | } 121 | 122 | // isset id assignments 123 | private static final _Fields optionals[] = {_Fields.ERROR_MSG}; 124 | public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; 125 | static { 126 | Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); 127 | tmpMap.put(_Fields.STATUS, new org.apache.thrift.meta_data.FieldMetaData("status", org.apache.thrift.TFieldRequirementType.REQUIRED, 128 | new org.apache.thrift.meta_data.EnumMetaData(org.apache.thrift.protocol.TType.ENUM, TErrorCode.class))); 129 | tmpMap.put(_Fields.ERROR_MSG, new org.apache.thrift.meta_data.FieldMetaData("error_msg", org.apache.thrift.TFieldRequirementType.OPTIONAL, 130 | new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); 131 | metaDataMap = Collections.unmodifiableMap(tmpMap); 132 | org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(TStatus.class, metaDataMap); 133 | } 134 | 135 | public TStatus() { 136 | } 137 | 138 | public TStatus( 139 | TErrorCode status) 140 | { 141 | this(); 142 | this.status = status; 143 | } 144 | 145 | /** 146 | * Performs a deep copy on other. 147 | */ 148 | public TStatus(TStatus other) { 149 | if (other.isSetStatus()) { 150 | this.status = other.status; 151 | } 152 | if (other.isSetError_msg()) { 153 | this.error_msg = other.error_msg; 154 | } 155 | } 156 | 157 | public TStatus deepCopy() { 158 | return new TStatus(this); 159 | } 160 | 161 | @Override 162 | public void clear() { 163 | this.status = null; 164 | this.error_msg = null; 165 | } 166 | 167 | /** 168 | * 169 | * @see TErrorCode 170 | */ 171 | public TErrorCode getStatus() { 172 | return this.status; 173 | } 174 | 175 | /** 176 | * 177 | * @see TErrorCode 178 | */ 179 | public TStatus setStatus(TErrorCode status) { 180 | this.status = status; 181 | return this; 182 | } 183 | 184 | public void unsetStatus() { 185 | this.status = null; 186 | } 187 | 188 | /** Returns true if field status is set (has been assigned a value) and false otherwise */ 189 | public boolean isSetStatus() { 190 | return this.status != null; 191 | } 192 | 193 | public void setStatusIsSet(boolean value) { 194 | if (!value) { 195 | this.status = null; 196 | } 197 | } 198 | 199 | public String getError_msg() { 200 | return this.error_msg; 201 | } 202 | 203 | public TStatus setError_msg(String error_msg) { 204 | this.error_msg = error_msg; 205 | return this; 206 | } 207 | 208 | public void unsetError_msg() { 209 | this.error_msg = null; 210 | } 211 | 212 | /** Returns true if field error_msg is set (has been assigned a value) and false otherwise */ 213 | public boolean isSetError_msg() { 214 | return this.error_msg != null; 215 | } 216 | 217 | public void setError_msgIsSet(boolean value) { 218 | if (!value) { 219 | this.error_msg = null; 220 | } 221 | } 222 | 223 | public void setFieldValue(_Fields field, Object value) { 224 | switch (field) { 225 | case STATUS: 226 | if (value == null) { 227 | unsetStatus(); 228 | } else { 229 | setStatus((TErrorCode)value); 230 | } 231 | break; 232 | 233 | case ERROR_MSG: 234 | if (value == null) { 235 | unsetError_msg(); 236 | } else { 237 | setError_msg((String)value); 238 | } 239 | break; 240 | 241 | } 242 | } 243 | 244 | public Object getFieldValue(_Fields field) { 245 | switch (field) { 246 | case STATUS: 247 | return getStatus(); 248 | 249 | case ERROR_MSG: 250 | return getError_msg(); 251 | 252 | } 253 | throw new IllegalStateException(); 254 | } 255 | 256 | /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ 257 | public boolean isSet(_Fields field) { 258 | if (field == null) { 259 | throw new IllegalArgumentException(); 260 | } 261 | 262 | switch (field) { 263 | case STATUS: 264 | return isSetStatus(); 265 | case ERROR_MSG: 266 | return isSetError_msg(); 267 | } 268 | throw new IllegalStateException(); 269 | } 270 | 271 | @Override 272 | public boolean equals(Object that) { 273 | if (that == null) 274 | return false; 275 | if (that instanceof TStatus) 276 | return this.equals((TStatus)that); 277 | return false; 278 | } 279 | 280 | public boolean equals(TStatus that) { 281 | if (that == null) 282 | return false; 283 | 284 | boolean this_present_status = true && this.isSetStatus(); 285 | boolean that_present_status = true && that.isSetStatus(); 286 | if (this_present_status || that_present_status) { 287 | if (!(this_present_status && that_present_status)) 288 | return false; 289 | if (!this.status.equals(that.status)) 290 | return false; 291 | } 292 | 293 | boolean this_present_error_msg = true && this.isSetError_msg(); 294 | boolean that_present_error_msg = true && that.isSetError_msg(); 295 | if (this_present_error_msg || that_present_error_msg) { 296 | if (!(this_present_error_msg && that_present_error_msg)) 297 | return false; 298 | if (!this.error_msg.equals(that.error_msg)) 299 | return false; 300 | } 301 | 302 | return true; 303 | } 304 | 305 | @Override 306 | public int hashCode() { 307 | List list = new ArrayList(); 308 | 309 | boolean present_status = true && (isSetStatus()); 310 | list.add(present_status); 311 | if (present_status) 312 | list.add(status.getValue()); 313 | 314 | boolean present_error_msg = true && (isSetError_msg()); 315 | list.add(present_error_msg); 316 | if (present_error_msg) 317 | list.add(error_msg); 318 | 319 | return list.hashCode(); 320 | } 321 | 322 | @Override 323 | public int compareTo(TStatus other) { 324 | if (!getClass().equals(other.getClass())) { 325 | return getClass().getName().compareTo(other.getClass().getName()); 326 | } 327 | 328 | int lastComparison = 0; 329 | 330 | lastComparison = Boolean.valueOf(isSetStatus()).compareTo(other.isSetStatus()); 331 | if (lastComparison != 0) { 332 | return lastComparison; 333 | } 334 | if (isSetStatus()) { 335 | lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.status, other.status); 336 | if (lastComparison != 0) { 337 | return lastComparison; 338 | } 339 | } 340 | lastComparison = Boolean.valueOf(isSetError_msg()).compareTo(other.isSetError_msg()); 341 | if (lastComparison != 0) { 342 | return lastComparison; 343 | } 344 | if (isSetError_msg()) { 345 | lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.error_msg, other.error_msg); 346 | if (lastComparison != 0) { 347 | return lastComparison; 348 | } 349 | } 350 | return 0; 351 | } 352 | 353 | public _Fields fieldForId(int fieldId) { 354 | return _Fields.findByThriftId(fieldId); 355 | } 356 | 357 | public void read(org.apache.thrift.protocol.TProtocol iprot) throws TException { 358 | schemes.get(iprot.getScheme()).getScheme().read(iprot, this); 359 | } 360 | 361 | public void write(org.apache.thrift.protocol.TProtocol oprot) throws TException { 362 | schemes.get(oprot.getScheme()).getScheme().write(oprot, this); 363 | } 364 | 365 | @Override 366 | public String toString() { 367 | StringBuilder sb = new StringBuilder("TStatus("); 368 | boolean first = true; 369 | 370 | sb.append("status:"); 371 | if (this.status == null) { 372 | sb.append("null"); 373 | } else { 374 | sb.append(this.status); 375 | } 376 | first = false; 377 | if (isSetError_msg()) { 378 | if (!first) sb.append(", "); 379 | sb.append("error_msg:"); 380 | if (this.error_msg == null) { 381 | sb.append("null"); 382 | } else { 383 | sb.append(this.error_msg); 384 | } 385 | first = false; 386 | } 387 | sb.append(")"); 388 | return sb.toString(); 389 | } 390 | 391 | public void validate() throws TException { 392 | // check for required fields 393 | if (status == null) { 394 | throw new TProtocolException("Required field 'status' was not present! Struct: " + toString()); 395 | } 396 | // check for sub-struct validity 397 | } 398 | 399 | private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { 400 | try { 401 | write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); 402 | } catch (TException te) { 403 | throw new java.io.IOException(te); 404 | } 405 | } 406 | 407 | private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { 408 | try { 409 | read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); 410 | } catch (TException te) { 411 | throw new java.io.IOException(te); 412 | } 413 | } 414 | 415 | private static class TStatusStandardSchemeFactory implements SchemeFactory { 416 | public TStatusStandardScheme getScheme() { 417 | return new TStatusStandardScheme(); 418 | } 419 | } 420 | 421 | private static class TStatusStandardScheme extends StandardScheme { 422 | 423 | public void read(org.apache.thrift.protocol.TProtocol iprot, TStatus struct) throws TException { 424 | org.apache.thrift.protocol.TField schemeField; 425 | iprot.readStructBegin(); 426 | while (true) 427 | { 428 | schemeField = iprot.readFieldBegin(); 429 | if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 430 | break; 431 | } 432 | switch (schemeField.id) { 433 | case 1: // STATUS 434 | if (schemeField.type == org.apache.thrift.protocol.TType.I32) { 435 | struct.status = TErrorCode.findByValue(iprot.readI32()); 436 | struct.setStatusIsSet(true); 437 | } else { 438 | org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); 439 | } 440 | break; 441 | case 2: // ERROR_MSG 442 | if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { 443 | struct.error_msg = iprot.readString(); 444 | struct.setError_msgIsSet(true); 445 | } else { 446 | org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); 447 | } 448 | break; 449 | default: 450 | org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); 451 | } 452 | iprot.readFieldEnd(); 453 | } 454 | iprot.readStructEnd(); 455 | 456 | // check for required fields of primitive type, which can't be checked in the validate method 457 | struct.validate(); 458 | } 459 | 460 | public void write(org.apache.thrift.protocol.TProtocol oprot, TStatus struct) throws TException { 461 | struct.validate(); 462 | 463 | oprot.writeStructBegin(STRUCT_DESC); 464 | if (struct.status != null) { 465 | oprot.writeFieldBegin(STATUS_FIELD_DESC); 466 | oprot.writeI32(struct.status.getValue()); 467 | oprot.writeFieldEnd(); 468 | } 469 | if (struct.error_msg != null) { 470 | if (struct.isSetError_msg()) { 471 | oprot.writeFieldBegin(ERROR_MSG_FIELD_DESC); 472 | oprot.writeString(struct.error_msg); 473 | oprot.writeFieldEnd(); 474 | } 475 | } 476 | oprot.writeFieldStop(); 477 | oprot.writeStructEnd(); 478 | } 479 | 480 | } 481 | 482 | private static class TStatusTupleSchemeFactory implements SchemeFactory { 483 | public TStatusTupleScheme getScheme() { 484 | return new TStatusTupleScheme(); 485 | } 486 | } 487 | 488 | private static class TStatusTupleScheme extends TupleScheme { 489 | 490 | @Override 491 | public void write(org.apache.thrift.protocol.TProtocol prot, TStatus struct) throws TException { 492 | TTupleProtocol oprot = (TTupleProtocol) prot; 493 | oprot.writeI32(struct.status.getValue()); 494 | BitSet optionals = new BitSet(); 495 | if (struct.isSetError_msg()) { 496 | optionals.set(0); 497 | } 498 | oprot.writeBitSet(optionals, 1); 499 | if (struct.isSetError_msg()) { 500 | oprot.writeString(struct.error_msg); 501 | } 502 | } 503 | 504 | @Override 505 | public void read(org.apache.thrift.protocol.TProtocol prot, TStatus struct) throws TException { 506 | TTupleProtocol iprot = (TTupleProtocol) prot; 507 | struct.status = TErrorCode.findByValue(iprot.readI32()); 508 | struct.setStatusIsSet(true); 509 | BitSet incoming = iprot.readBitSet(1); 510 | if (incoming.get(0)) { 511 | struct.error_msg = iprot.readString(); 512 | struct.setError_msgIsSet(true); 513 | } 514 | } 515 | } 516 | 517 | } 518 | 519 | -------------------------------------------------------------------------------- /src/main/java/org/apache/ranger/binding/metastore/thrift/TTableType.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Autogenerated by Thrift Compiler (0.9.3) 3 | * 4 | * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING 5 | * @generated 6 | */ 7 | package org.apache.ranger.binding.metastore.thrift; 8 | 9 | 10 | import java.util.Map; 11 | import java.util.HashMap; 12 | import org.apache.thrift.TEnum; 13 | 14 | public enum TTableType implements TEnum { 15 | TABLE(0), 16 | VIEW(1); 17 | 18 | private final int value; 19 | 20 | private TTableType(int value) { 21 | this.value = value; 22 | } 23 | 24 | /** 25 | * Get the integer value of this enum value, as defined in the Thrift IDL. 26 | */ 27 | public int getValue() { 28 | return value; 29 | } 30 | 31 | /** 32 | * Find a the enum type by its integer value, as defined in the Thrift IDL. 33 | * @return null if the value is not found. 34 | */ 35 | public static TTableType findByValue(int value) { 36 | switch (value) { 37 | case 0: 38 | return TABLE; 39 | case 1: 40 | return VIEW; 41 | default: 42 | return null; 43 | } 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /src/main/resources/MammutMapper.xml: -------------------------------------------------------------------------------- 1 | 2 | 4 | 5 | 6 | 10 | -------------------------------------------------------------------------------- /src/main/resources/hive-tools.properties: -------------------------------------------------------------------------------- 1 | # hive_merge.jdbc.url=jdbc:mysql://ip:port/database?useUnicode=true&characterEncoding=UTF-8 2 | # CHANGE_ON_MERGED 3 | # cd /Users/apple/Google 云端硬盘/Notes/统一HIVE集群/ 4 | # ssh -i ~/.ssh/id_rsa -P 1046 hive-tools.properties hzliuxun@10.164.96.25:/home/hzliuxun/hive-tools/ 5 | 6 | ########################################### mammut ################################################################ 7 | # mammut 8 | mammut.jdbc.driverClassName=com.mysql.jdbc.Driver 9 | mammut.jdbc.url=jdbc:mysql://10.120.232.16:3306/mengma_clone?useUnicode=true&characterEncoding=UTF-8&allowMultiQueries=true 10 | mammut.jdbc.username=mengma_clone 11 | mammut.jdbc.password=mengma_clone 12 | 13 | ########################################### test hive cluster mysql ################################################################ 14 | # hadoop357 15 | hadoop357.jdbc.driverClassName=com.mysql.jdbc.Driver 16 | hadoop357.jdbc.url=jdbc:mysql://10.120.232.16:3306/hive_haitao?useUnicode=true&characterEncoding=UTF-8&allowMultiQueries=true 17 | hadoop357.jdbc.username=hive_haitao 18 | hadoop357.jdbc.password=hive_haitao 19 | 20 | # cluster1_test 21 | cluster1_test.jdbc.driverClassName=com.mysql.jdbc.Driver 22 | cluster1_test.jdbc.url=jdbc:mysql://10.120.232.16:3306/cluster1_test?useUnicode=true&characterEncoding=UTF-8&allowMultiQueries=true 23 | cluster1_test.jdbc.username=cluster1_test 24 | cluster1_test.jdbc.password=cluster1_test 25 | 26 | # cluster2_test 27 | cluster2_test.jdbc.driverClassName=com.mysql.jdbc.Driver 28 | cluster2_test.jdbc.url=jdbc:mysql://10.120.232.16:3306/cluster2_test?useUnicode=true&characterEncoding=UTF-8&allowMultiQueries=true 29 | cluster2_test.jdbc.username=cluster2_test 30 | cluster2_test.jdbc.password=cluster2_test 31 | 32 | # cluster3_test 33 | cluster3_test.jdbc.driverClassName=com.mysql.jdbc.Driver 34 | cluster3_test.jdbc.url=jdbc:mysql://10.120.232.16:3306/cluster3_test?useUnicode=true&characterEncoding=UTF-8&allowMultiQueries=true 35 | cluster3_test.jdbc.username=cluster3_test 36 | cluster3_test.jdbc.password=cluster3_test 37 | 38 | # cluster4_test 39 | cluster4_test.jdbc.driverClassName=com.mysql.jdbc.Driver 40 | cluster4_test.jdbc.url=jdbc:mysql://10.120.232.16:3306/cluster4_test?useUnicode=true&characterEncoding=UTF-8&allowMultiQueries=true 41 | cluster4_test.jdbc.username=cluster4_test 42 | cluster4_test.jdbc.password=cluster4_test 43 | 44 | # cluster5_test 45 | cluster5_test.jdbc.driverClassName=com.mysql.jdbc.Driver 46 | cluster5_test.jdbc.url=jdbc:mysql://10.120.232.16:3306/cluster5_test?useUnicode=true&characterEncoding=UTF-8&allowMultiQueries=true 47 | cluster5_test.jdbc.username=cluster5_test 48 | cluster5_test.jdbc.password=cluster5_test 49 | 50 | ########################################### online hive cluster mysql ################################################################ 51 | # online_cluster1 52 | online_cluster1.jdbc.driverClassName=com.mysql.jdbc.Driver 53 | online_cluster1.jdbc.url=jdbc:mysql://10.172.121.126:3306/hivecluster1?useUnicode=true&characterEncoding=utf8&autoReconnect=true&allowMultiQueries=true 54 | online_cluster1.jdbc.username=hivecluster1 55 | online_cluster1.jdbc.password=SIpFGv2KgChQ 56 | 57 | # online_cluster2 58 | online_cluster2.jdbc.driverClassName=com.mysql.jdbc.Driver 59 | online_cluster2.jdbc.url=jdbc:mysql://10.172.121.127:3306/hive_cluster2?useUnicode=true&characterEncoding=UTF-8&allowMultiQueries=true 60 | online_cluster2.jdbc.username=hive_cluster2 61 | online_cluster2.jdbc.password=^wXXw@Rr_ 62 | 63 | # online_cluster3 64 | online_cluster3.jdbc.driverClassName=com.mysql.jdbc.Driver 65 | online_cluster3.jdbc.url=jdbc:mysql://10.172.121.127:3306/hive_cluster3?useUnicode=true&characterEncoding=UTF-8&allowMultiQueries=true 66 | online_cluster3.jdbc.username=hive_cluster3 67 | online_cluster3.jdbc.password=rAbs{{NLq 68 | 69 | # online_cluster4 70 | online_cluster4.jdbc.driverClassName=com.mysql.jdbc.Driver 71 | online_cluster4.jdbc.url=jdbc:mysql://10.172.121.131:3306/hivecluster4?useUnicode=true&characterEncoding=UTF-8&allowMultiQueries=true 72 | online_cluster4.jdbc.username=hivecluster4 73 | online_cluster4.jdbc.password=ydvcrn0gd9ZW 74 | 75 | # online_cluster5 76 | online_cluster5.jdbc.driverClassName=com.mysql.jdbc.Driver 77 | online_cluster5.jdbc.url=jdbc:mysql://10.172.121.132:3306/hivecluster5?useUnicode=true&characterEncoding=UTF-8&allowMultiQueries=true 78 | online_cluster5.jdbc.username=hivecluster5 79 | online_cluster5.jdbc.password=wHQ0eBFmJaJJ 80 | 81 | ############################################### hdfs://hz-cluster1 ################################################ 82 | # haitao 83 | HAITAO.jdbc.driverClassName=com.mysql.jdbc.Driver 84 | HAITAO.jdbc.url=jdbc:mysql://10.164.157.64:3306/dmp?useUnicode=true&characterEncoding=utf8&autoReconnect=true&allowMultiQueries=true 85 | HAITAO.jdbc.username=hive 86 | HAITAO.jdbc.password=)dC]jQsVI 87 | 88 | # haitao_da_hive 89 | haitao_da_hive.jdbc.driverClassName=com.mysql.jdbc.Driver 90 | haitao_da_hive.jdbc.url=jdbc:mysql://10.120.147.125:3306/hive_121?useUnicode=true&characterEncoding=UTF-8&allowMultiQueries=true 91 | haitao_da_hive.jdbc.username=hive 92 | haitao_da_hive.jdbc.password=hive 93 | 94 | # manmut_sandbox 95 | manmut_sandbox.jdbc.driverClassName=com.mysql.jdbc.Driver 96 | manmut_sandbox.jdbc.url=jdbc:mysql://10.172.121.121:3306/hive_sandbox?useUnicode=true&characterEncoding=UTF-8&allowMultiQueries=true 97 | manmut_sandbox.jdbc.username=hive 98 | manmut_sandbox.jdbc.password=loxwA5hORm4 99 | 100 | ############################################### hdfs://hz-cluster2 ############################################### 101 | # mammut_hive_xs 102 | mammut_hive_xs.jdbc.driverClassName=com.mysql.jdbc.Driver 103 | mammut_hive_xs.jdbc.url=jdbc:mysql://10.122.191.9:3306/hive_test?useUnicode=true&characterEncoding=UTF-8&allowMultiQueries=true 104 | mammut_hive_xs.jdbc.username=hive 105 | mammut_hive_xs.jdbc.password=hive 106 | 107 | # music_hive_xs 108 | music_hive_xs.jdbc.driverClassName=com.mysql.jdbc.Driver 109 | music_hive_xs.jdbc.url=jdbc:mysql://10.120.147.125:3306/hive_music?useUnicode=true&characterEncoding=UTF-8&allowMultiQueries=true 110 | music_hive_xs.jdbc.username=hive_music 111 | music_hive_xs.jdbc.password=Y4/EhCZ2B 112 | 113 | ############################################### hdfs://hz-cluster3 ############################################### 114 | # da_edu 115 | da_edu.jdbc.driverClassName=com.mysql.jdbc.Driver 116 | da_edu.jdbc.url=jdbc:mysql://10.122.136.253:3306/hivemetastore?useUnicode=true&characterEncoding=UTF-8&allowMultiQueries=true 117 | da_edu.jdbc.username=hivemetastore 118 | da_edu.jdbc.password=M]AKd^.Bx 119 | 120 | # epay 121 | epay.jdbc.driverClassName=com.mysql.jdbc.Driver 122 | epay.jdbc.url=jdbc:mysql://10.122.191.12:3306/hive_epay?useUnicode=true&characterEncoding=UTF-8&allowMultiQueries=true 123 | epay.jdbc.username=hive_epay 124 | epay.jdbc.password=CgPdFBrTB6NK 125 | 126 | ############################################### test ############################################################ 127 | 128 | # hive_merge 129 | hive_merge.jdbc.driverClassName=com.mysql.jdbc.Driver 130 | hive_merge.jdbc.url=jdbc:mysql://10.120.232.16:3306/hive_merge?useUnicode=true&characterEncoding=UTF-8 131 | hive_merge.jdbc.username=hive_merge 132 | hive_merge.jdbc.password=hive_merge 133 | 134 | # hive_music 135 | hive_music.jdbc.driverClassName=com.mysql.jdbc.Driver 136 | hive_music.jdbc.url=jdbc:mysql://10.120.232.16:3306/hive_music?useUnicode=true&characterEncoding=UTF-8 137 | hive_music.jdbc.username=hive_music 138 | hive_music.jdbc.password=hive_music 139 | 140 | # hive_haitao 141 | hive_haitao.jdbc.driverClassName=com.mysql.jdbc.Driver 142 | hive_haitao.jdbc.url=jdbc:mysql://10.120.232.16:3306/hive_haitao?useUnicode=true&characterEncoding=UTF-8 143 | hive_haitao.jdbc.username=hive_haitao 144 | hive_haitao.jdbc.password=hive_haitao 145 | 146 | # hive_edu 147 | hive_edu.jdbc.driverClassName=com.mysql.jdbc.Driver 148 | hive_edu.jdbc.url=jdbc:mysql://10.120.232.16:3306/hive_edu?useUnicode=true&characterEncoding=UTF-8 149 | hive_edu.jdbc.username=hive_edu 150 | hive_edu.jdbc.password=hive_edu 151 | 152 | # hive_epay 153 | hive_epay.jdbc.driverClassName=com.mysql.jdbc.Driver 154 | hive_epay.jdbc.url=jdbc:mysql://10.120.232.16:3306/hive_epay?useUnicode=true&characterEncoding=UTF-8 155 | hive_epay.jdbc.username=hive_epay 156 | hive_epay.jdbc.password=hive_epay -------------------------------------------------------------------------------- /src/main/resources/log4j.properties: -------------------------------------------------------------------------------- 1 | #定义输出格式 2 | ConversionPattern=%d %-p %l - %m%n 3 | 4 | log4j.rootLogger=INFO,Console,DailyFile 5 | log4j.logger.com.cnblogs.lzrabbit=DEBUG 6 | log4j.logger.org.springframework=ERROR 7 | log4j.logger.org.mybatis=ERROR 8 | log4j.logger.org.apache.ibatis=ERROR 9 | log4j.logger.org.quartz=ERROR 10 | log4j.logger.org.apache.axis2=ERROR 11 | log4j.logger.org.apache.axiom=ERROR 12 | log4j.logger.org.apache=ERROR 13 | log4j.logger.httpclient=ERROR 14 | #log4j.additivity.org.springframework=false 15 | #Console 16 | log4j.appender.Console=org.apache.log4j.ConsoleAppender 17 | log4j.appender.Console.Threshold=DEBUG 18 | log4j.appender.Console.Target=System.out 19 | log4j.appender.Console.layout=org.apache.log4j.PatternLayout 20 | log4j.appender.Console.layout.ConversionPattern=${ConversionPattern} 21 | #log4j.appender.Console.encoding=UTF-8 22 | 23 | #org.apache.log4j.DailyRollingFileAppender 24 | log4j.appender.DailyFile=org.apache.log4j.DailyRollingFileAppender 25 | log4j.appender.DailyFile.DatePattern=.yyyy-MM-dd 26 | log4j.appender.DailyFile.File=logs/hive-tools.log 27 | log4j.appender.DailyFile.Append=true 28 | log4j.appender.DailyFile.Threshold=DEBUG 29 | log4j.appender.DailyFile.layout=org.apache.log4j.PatternLayout 30 | log4j.appender.DailyFile.layout.ConversionPattern=${ConversionPattern} 31 | log4j.appender.DailyFile.encoding=UTF-8 32 | 33 | # %c 输出日志信息所属的类的全名 34 | # %d 输出日志时间点的日期或时间,默认格式为ISO8601,也可以在其后指定格式,比如:%d{yyy-MM-dd HH:mm:ss},输出类似:2002-10-18- 22:10:28 35 | # %f 输出日志信息所属的类的类名 36 | # %l 输出日志事件的发生位置,即输出日志信息的语句处于它所在的类的第几行 37 | # %m 输出代码中指定的信息,如log(message)中的message 38 | # %n 输出一个回车换行符,Windows平台为“rn”,Unix平台为“n” 39 | # %p 输出优先级,即DEBUG,INFO,WARN,ERROR,FATAL。如果是调用debug()输出的,则为DEBUG,依此类推 40 | # %r 输出自应用启动到输出该日志信息所耗费的毫秒数 41 | # %t 输出产生该日志事件的线程名 -------------------------------------------------------------------------------- /src/main/resources/mybatis-config.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | 7 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | -------------------------------------------------------------------------------- /src/main/thrift/MetastoreUpdater.thrift: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, 13 | * software distributed under the License is distributed on an 14 | * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 15 | * KIND, either express or implied. See the License for the 16 | * specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | namespace java org.apache.ranger.binding.metastore.thrift 20 | 21 | enum MetaStoreUpdateServiceVersion { 22 | V1 23 | } 24 | 25 | enum TErrorCode { 26 | OK = 0, 27 | ERROR = 1, 28 | INVALID = 2 29 | } 30 | 31 | enum TTableType { 32 | TABLE = 0, 33 | VIEW = 1 34 | } 35 | 36 | struct TStatus { 37 | // Error code 38 | 1: required TErrorCode status; 39 | 40 | // Error message 41 | 2: optional string error_msg; 42 | } 43 | 44 | enum TOperation { 45 | CREATE_DATABASE = 0, 46 | DROP_DATABASE = 1, 47 | CREATE_TABLE = 2, 48 | DROP_TABLE = 3, 49 | ALTER_TABLE = 4, 50 | REMAME_TABLE = 5, 51 | ADD_PARTITION = 6, 52 | DROP_PARTITION = 7, 53 | ALTER_PARTITION = 8, 54 | RENAME_PARTITION = 9, 55 | INIT_ID = 10 56 | } 57 | 58 | // Set of changes to a subscriber, sent from the metastore to a subscriber. 59 | struct TUpdateDelta { 60 | // global increment id 61 | 1: required i64 id; 62 | 63 | // database name 64 | 2: required string database; 65 | 66 | // table name 67 | 3: required string table; 68 | 69 | // operation type 70 | 4: required TOperation operation; 71 | 72 | // table type, table or view 73 | 5: optional TTableType type; 74 | 75 | // partition name 76 | 6: optional string partition; 77 | 78 | // new name, for rename operation 79 | 7: optional string new_name; 80 | } 81 | 82 | struct TUpdateMetadataRequest { 83 | 1: required MetaStoreUpdateServiceVersion protocol_version = 84 | MetaStoreUpdateServiceVersion.V1; 85 | 86 | // client hostname 87 | 2: required string hostname; 88 | 89 | // Map from topic name to a list of changes for that topic. 90 | 3: required list deltas; 91 | } 92 | 93 | struct TUpdateMetadataResponse { 94 | 1: required MetaStoreUpdateServiceVersion protocol_version = 95 | MetaStoreUpdateServiceVersion.V1; 96 | // Whether the call was executed correctly at the application level 97 | 2: required TStatus status; 98 | } 99 | 100 | service MetaStoreUpdateService { 101 | // Called when the metsatore sends a metastore update. The request contains a list of 102 | // update objects, sent from the metastore to the subscriber. 103 | TUpdateMetadataResponse updateMetadata(1: TUpdateMetadataRequest params); 104 | } 105 | 106 | struct TFetchUpdatesRequest { 107 | 1: required MetaStoreUpdateServiceVersion protocol_version = 108 | MetaStoreUpdateServiceVersion.V1; 109 | // start version 110 | 2: required i64 start_version; 111 | // end version 112 | 3: required i64 end_version; 113 | // server name 114 | 4: required string server_name; 115 | } 116 | 117 | struct TFetchUpdatesResponse { 118 | 1: required MetaStoreUpdateServiceVersion protocol_version = 119 | MetaStoreUpdateServiceVersion.V1; 120 | 121 | // Map from topic name to a list of changes for that topic. 122 | 2: required list deltas; 123 | } 124 | 125 | service MetaStoreHistoryService { 126 | // called when the client need fetch updates history. 127 | TFetchUpdatesResponse fetchUpdates(1: TFetchUpdatesRequest params); 128 | } --------------------------------------------------------------------------------