├── .gitignore ├── README.md ├── pom.xml ├── screenshots ├── all_secrets.png ├── config_secrets.png ├── hive_query.png ├── login.png └── user_config.png └── src ├── main ├── assembly │ ├── bin │ │ ├── restart.sh │ │ ├── startup.sh │ │ └── stop.sh │ └── conf │ │ └── application.properties ├── java │ └── com │ │ └── prophet │ │ ├── Application.java │ │ ├── common │ │ ├── Encryptor.java │ │ ├── HQLParser.java │ │ ├── QueryHistoryStatusEnum.java │ │ └── ThreadPool.java │ │ ├── config │ │ ├── HiveDataSourceConfig.java │ │ ├── HiveResultTextConfig.java │ │ └── MySQLDataSourceConfig.java │ │ ├── dao │ │ ├── AdminDao.java │ │ ├── EmailUtil.java │ │ ├── HiveMetaStoreDao.java │ │ ├── HiveSecretTableDao.java │ │ ├── HiveSecretUserPrivsDao.java │ │ ├── HiveServerDao.java │ │ ├── QueryHistoryDao.java │ │ ├── UserAuthLdapDao.java │ │ ├── UserAuthProphetDao.java │ │ └── task │ │ │ ├── HiveQueryAsyncTask.java │ │ │ └── HiveResultSendmailRunnableTask.java │ │ ├── domain │ │ ├── HiveSecretTable.java │ │ └── QueryHistory.java │ │ ├── filter │ │ └── LoginFilter.java │ │ ├── interfaces │ │ └── UserAuthDaoInterface.java │ │ ├── service │ │ ├── BaseService.java │ │ ├── HiveMetaStoreService.java │ │ ├── HiveSecretDataService.java │ │ ├── HiveServerService.java │ │ ├── QueryHistoryService.java │ │ └── UserAuthService.java │ │ ├── util │ │ └── DateTimeUtil.java │ │ └── web │ │ ├── BaseController.java │ │ ├── HiveQueryController.java │ │ ├── HiveSecretDataController.java │ │ ├── UserController.java │ │ └── postparameters │ │ └── HiveQueryCommand.java ├── resources │ ├── application.properties │ ├── application.properties.github │ ├── assembly.xml │ └── logging.xml └── webapp │ └── WEB-INF │ └── web.xml └── test └── java ├── junit └── EmailTest01.java └── prophet ├── HQLParserTest01.java └── HiveTest01.java /.gitignore: -------------------------------------------------------------------------------- 1 | /target/ 2 | .classpath 3 | .project 4 | .settings/ 5 | target/ 6 | .DS_Store 7 | **/cobertura 8 | logs/ 9 | tmp/ 10 | data/ 11 | **/.springBeans 12 | .idea 13 | *.iml 14 | *.jar 15 | /.swp 16 | .git 17 | src/main/resources/application.properties.* 18 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## Prophet是什么? 2 | * 一个优秀的大数据查询平台,提供hive异步任务查询、LDAP用户、表级查询权限控制、历史查询任务与结果存储、邮件通知、excel下载等功能。 3 | * 具有查询性能快、查询方便的特点 4 | 5 | ## 开发环境 6 | * java 8 7 | * springboot 8 | * VUE + iview 9 | 10 | ## 准备工作 11 | * 搭建hadoop集群、hive集群(强烈推荐hive-server2-2.x版本)、metastore 12 | * 搭建prophet会用到的mysql,推荐mysql 5.6及以上版本 13 | 14 | ## 安装步骤 15 | * 1.安装jdk,强烈推荐使用jdk 1.8 16 | * 安装jdk 17 | * 修改PATH 18 | * 2.下载文件(非编译安装) 19 | * 二进制文件下载地址:https://pan.baidu.com/s/1eSgx4fo 20 | * 或者下载ZIP包并解压,解压后会看到prophet_server、prophet_fe、prophet_sql三个目录 21 | * 3.后端服务部署 22 | * prophet_sql目录:连接到prophet会用到的mysql里source prophet.sql这个文件将库表建好 23 | * prophet_server目录:后端服务,请部署在后端服务器适当目录下 24 | * 修改主配置文件:prophet_server/conf/application.properties 25 | * 启动服务:./bin/startup.sh 26 | * 检查日志:./logs/prophet.log 27 | * 4.前端服务部署 28 | * prophet_fe目录:前端页面,请部署在nginx服务器或某个web服务器目录下例如/static/prophet_fe/,并参照下一步nginx配置 29 | * 5.前端服务nginx配置 30 | ```javascript 31 | upstream prophet{ 32 | ip_hash; 33 | server 192.168.1.11:8090; 34 | #server 192.168.1.12:8090; 35 | } 36 | 37 | server { 38 | listen 80; 39 | server_name prophet.xxx.com; 40 | 41 | gzip on; 42 | gzip_min_length 1k; 43 | gzip_proxied expired no-cache no-store private auth; 44 | gzip_types text/plain text/css application/xml application/json application/javascript application/xhtml+xml; 45 | 46 | client_max_body_size 300M; 47 | index index.php index.html index.htm; 48 | 49 | access_log /log/nginx/prophet.access.log main; 50 | error_log /log/nginx/prophet.error.log; 51 | 52 | location ~ \.json$ { 53 | proxy_pass http://prophet; 54 | proxy_set_header Host $host; 55 | proxy_set_header X-Real-IP $remote_addr; 56 | proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; 57 | client_max_body_size 200m; 58 | client_body_buffer_size 128k; 59 | proxy_connect_timeout 86400; 60 | #因为后端hive任务执行时间较长,因此该项应该设置无限大,单位秒 61 | proxy_read_timeout 259200; 62 | proxy_buffer_size 4k; 63 | } 64 | 65 | location / { 66 | root "/static/prophet_fe/"; 67 | } 68 | } 69 | ``` 70 | 配置完重启nginx即可 71 | * 6.配置域名解析prophet.xxx.com到该nginx所在ip 72 | * 7.打开浏览器,访问http://prophet.xxx.com/,输入用户名和密码进行登录。 73 | * 如果配置了LDAP:则填写LDAP账号,prophet内置用户系统不生效。 74 | * 如果配置了prophet内置用户系统:则默认初始化管理账号为admin1,密码为admin1 75 | * 8.开始使用吧! 76 | 77 | ## 系统截图 78 | * 1.登录页面 79 | * ![image](https://github.com/jly8866/prophet/raw/master/screenshots/login.png) 80 | * 2.主查询界面 81 | * ![image](https://github.com/jly8866/prophet/raw/master/screenshots/hive_query.png) 82 | * 3.所有机密表展示 83 | * ![image](https://github.com/jly8866/prophet/raw/master/screenshots/all_secrets.png) 84 | * 4.标记哪些表成为机密表 85 | * ![image](https://github.com/jly8866/prophet/raw/master/screenshots/config_secrets.png) 86 | * 5.内置用户系统管理 87 | * ![image](https://github.com/jly8866/prophet/raw/master/screenshots/user_config.png) 88 | 89 | ## 性能调优 90 | * prophet JVM能容纳的最大并发线程数NThreads = CPU核心数 * 总CPU利用率 * (1 + CPU等待时间/CPU处理时间) 91 | * 如果一个任务CPU处理时间为100ms,99ms是IO等待时间,系统8核心,CPU利用率50%,则NThreads = 8 * 50% * (1 + 99/100) = 7.96 ~ 8 92 | * 该指标可用于估算单进程prophet最大可运行的并发任务数 93 | * 如果指标不够则需要扩容 94 | 95 | ## 联系方式: 96 | QQ群:669833720 97 | 98 | 加群请注明来历 99 | -------------------------------------------------------------------------------- /pom.xml: -------------------------------------------------------------------------------- 1 | 3 | 4 | org.springframework.boot 5 | spring-boot-starter-parent 6 | 1.3.3.RELEASE 7 | 8 | 4.0.0 9 | com.prophet 10 | prophet 11 | jar 12 | 1.0 13 | prophet Maven Webapp 14 | http://maven.apache.org 15 | 16 | 1.8 17 | 18 | 19 | 20 | junit 21 | junit 22 | test 23 | 24 | 25 | org.springframework.boot 26 | spring-boot-starter-test 27 | test 28 | 29 | 30 | jdk.tools 31 | jdk.tools 32 | 1.8 33 | 34 | 35 | org.springframework.boot 36 | spring-boot-starter-web 37 | 38 | 39 | org.springframework.boot 40 | spring-boot-starter-jdbc 41 | 42 | 43 | mysql 44 | mysql-connector-java 45 | 46 | 47 | org.apache.tomcat.embed 48 | tomcat-embed-jasper 49 | provided 50 | 51 | 52 | javax.servlet 53 | jstl 54 | 55 | 56 | net.sf.json-lib 57 | json-lib-ext-spring 58 | 1.0.2 59 | 60 | 61 | org.apache.hive 62 | hive-jdbc 63 | 2.3.2 64 | 65 | 66 | org.eclipse.jetty.aggregate 67 | jetty-all 68 | 69 | 70 | org.apache.hive 71 | hive-shims 72 | 73 | 74 | org.slf4j 75 | slf4j-log4j12 76 | 77 | 78 | 79 | 80 | org.apache.hadoop 81 | hadoop-common 82 | 2.5.1 83 | 84 | 85 | org.slf4j 86 | slf4j-log4j12 87 | 88 | 89 | 90 | 91 | commons-io 92 | commons-io 93 | 2.4 94 | 95 | 96 | org.springframework.boot 97 | spring-boot-starter-mail 98 | 99 | 100 | org.apache.hive 101 | hive-exec 102 | 2.3.2 103 | 104 | 105 | org.slf4j 106 | slf4j-log4j12 107 | 108 | 109 | 110 | 111 | org.pentaho 112 | pentaho-aggdesigner-algorithm 113 | 5.1.5 114 | 115 | 116 | 117 | 118 | 131 | 132 | maven-assembly-plugin 133 | 134 | 135 | src/main/resources/assembly.xml 136 | 137 | 138 | 139 | 140 | make-assembly 141 | package 142 | 143 | single 144 | 145 | 146 | 147 | 148 | 149 | prophet 150 | 151 | 152 | 153 | -------------------------------------------------------------------------------- /screenshots/all_secrets.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jetbinliu/prophet/b80f85ae88d93e5f931dc6d344f78d989161348b/screenshots/all_secrets.png -------------------------------------------------------------------------------- /screenshots/config_secrets.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jetbinliu/prophet/b80f85ae88d93e5f931dc6d344f78d989161348b/screenshots/config_secrets.png -------------------------------------------------------------------------------- /screenshots/hive_query.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jetbinliu/prophet/b80f85ae88d93e5f931dc6d344f78d989161348b/screenshots/hive_query.png -------------------------------------------------------------------------------- /screenshots/login.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jetbinliu/prophet/b80f85ae88d93e5f931dc6d344f78d989161348b/screenshots/login.png -------------------------------------------------------------------------------- /screenshots/user_config.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jetbinliu/prophet/b80f85ae88d93e5f931dc6d344f78d989161348b/screenshots/user_config.png -------------------------------------------------------------------------------- /src/main/assembly/bin/restart.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | PROPHET_HOME="$( dirname "$( cd "$( dirname "$0" )" && pwd ) " )" 4 | 5 | ${PROPHET_HOME}/bin/stop.sh 6 | 7 | ${PROPHET_HOME}/bin/startup.sh 8 | -------------------------------------------------------------------------------- /src/main/assembly/bin/startup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | PROPHET_HOME="$( dirname "$( cd "$( dirname "$0" )" && pwd ) " )" 4 | 5 | export JAVA_HOME=/apps/srv/jdk/bin 6 | 7 | JAVA_CMD_OPTS="${JAVA_CMD_OPTS} -Xmx1500m" 8 | JAVA_CMD_OPTS="${JAVA_CMD_OPTS} -XX:+UseG1GC -verbose:gc" 9 | JAVA_CMD_OPTS="${JAVA_CMD_OPTS} -XX:+PrintGCDetails" 10 | JAVA_CMD_OPTS="${JAVA_CMD_OPTS} -XX:+PrintGCTimeStamps" 11 | JAVA_CMD_OPTS="${JAVA_CMD_OPTS} -XX:+PrintGCDateStamps" 12 | JAVA_CMD_OPTS="${JAVA_CMD_OPTS} -Xloggc:${PROPHET_HOME}/logs/prophet-gc.log" 13 | JAVA_CMD_OPTS="${JAVA_CMD_OPTS} -XX:+HeapDumpOnOutOfMemoryError" 14 | JAVA_CMD_OPTS="${JAVA_CMD_OPTS} -XX:HeapDumpPath=${PROPHET_HOME}/logs/heapdump.hprof" 15 | JAVA_CMD_OPTS="${JAVA_CMD_OPTS} -XX:+UseGCLogFileRotation" 16 | JAVA_CMD_OPTS="${JAVA_CMD_OPTS} -XX:GCLogFileSize=128M -XX:NumberOfGCLogFiles=4" 17 | JAVA_CMD_OPTS="${JAVA_CMD_OPTS} -XX:+HeapDumpOnOutOfMemoryError" 18 | JAVA_CMD_OPTS="${JAVA_CMD_OPTS} -Dcom.sun.management.jmxremote=true" 19 | JAVA_CMD_OPTS="${JAVA_CMD_OPTS} -Dcom.sun.management.jmxremote.port=30005" 20 | JAVA_CMD_OPTS="${JAVA_CMD_OPTS} -Dcom.sun.management.jmxremote.ssl=false" 21 | JAVA_CMD_OPTS="${JAVA_CMD_OPTS} -Dcom.sun.management.jmxremote.authenticate=false" 22 | 23 | MAIN_CLASS="com.prophet.Application" 24 | 25 | pid=`ps -ef |grep "java"|grep "prophet" |grep -v "grep" |awk '{print $2}'` 26 | if [ -f ${PROPHET_HOME}/logs/prophet.pid ];then 27 | echo "Error! Prophet is running and pid is ${pid}, please stop it first." 28 | exit 1 29 | else 30 | #set classpath 31 | for j in ${PROPHET_HOME}/lib/*.jar;do 32 | CLASSPATH=${j}:"${CLASSPATH}" 33 | done 34 | CLASSPATH="${PROPHET_HOME}/conf:${CLASSPATH}" 35 | 36 | #nohup java -jar 37 | nohup ${JAVA_HOME}/java ${JAVA_CMD_OPTS} -classpath .:${CLASSPATH} ${MAIN_CLASS} -Dglobal.config.path=${PROPHET_HOME}/conf/ --spring.config.location=${PROPHET_HOME}/conf/application.properties &>>${PROPHET_HOME}/logs/prophet.log & 38 | 39 | sleep 2 40 | pid=`ps -ef |grep "java"|grep "prophet" |grep -v "grep" |awk '{print $2}'` 41 | if [ ${pid} ];then 42 | echo "Prophet started successfully." 43 | echo ${pid} > ${PROPHET_HOME}/logs/prophet.pid 44 | else 45 | echo "Error! Prophet failed to start... please check the logs." 46 | exit 1 47 | fi 48 | fi 49 | exit 0 50 | -------------------------------------------------------------------------------- /src/main/assembly/bin/stop.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | PROPHET_HOME="$( dirname "$( cd "$( dirname "$0" )" && pwd ) " )" 4 | 5 | pid=`ps -ef |grep "java"|grep "prophet" |grep -v "grep" |awk '{print $2}'` 6 | if [ ${pid} ];then 7 | kill -9 ${pid} 8 | sleep 1 9 | if [[ $? -eq 0 ]];then 10 | echo "Prophet stopped successfully." 11 | rm -f ${PROPHET_HOME}/logs/prophet.pid &>/dev/null 12 | else 13 | echo "Error! Prophet failed to stop..." 14 | fi 15 | else 16 | echo "Prophet is not running, no need to stop." 17 | fi 18 | exit 0 19 | -------------------------------------------------------------------------------- /src/main/assembly/conf/application.properties: -------------------------------------------------------------------------------- 1 | #########################################datasource########################################### 2 | spring.ds_prophet.url=jdbc:mysql://192.168.5.10:5621/prophet 3 | spring.ds_prophet.username=prophet 4 | spring.ds_prophet.password=prophet 5 | spring.ds_prophet.driver-class-name=com.mysql.jdbc.Driver 6 | spring.ds_prophet.type=org.apache.commons.dbcp2.BasicDataSource 7 | #spring.ds_prophet.max-wait=300000 8 | #spring.ds_prophet.max-active=50 9 | #spring.ds_prophet.max-idle=10 10 | spring.ds_prophet.min-idle=4 11 | spring.ds_prophet.validation-query=select 123 12 | spring.ds_prophet.test-while-idle=true 13 | 14 | spring.ds_hive_metastore.url = jdbc:mysql://192.168.5.7:3306/hive_metadata 15 | spring.ds_hive_metastore.username = hiveuser 16 | spring.ds_hive_metastore.password = hivepass 17 | spring.ds_hive_metastore.driver-class-name=com.mysql.jdbc.Driver 18 | spring.ds_hive_metastore.type=org.apache.commons.dbcp2.BasicDataSource 19 | spring.ds_hive_metastore.min-idle=4 20 | spring.ds_hive_metastore.validation-query=select 123 21 | spring.ds_hive_metastore.test-while-idle=true 22 | 23 | spring.ds_hive_server.url = jdbc:hive2://192.168.1.25:10000/default 24 | spring.ds_hive_server.username = hadoop 25 | spring.ds_hive_server.password = 26 | spring.ds_hive_server.driver-class-name=org.apache.hive.jdbc.HiveDriver 27 | spring.ds_hive_server.type=org.apache.commons.dbcp2.BasicDataSource 28 | spring.ds_hive_server.min-idle=10 29 | spring.ds_hive_server.test-while-idle=true 30 | #########################################datasource########################################### 31 | 32 | #############################################jsp############################################## 33 | spring.mvc.view.prefix=/WEB-INF/jsp/ 34 | spring.mvc.view.suffix=.jsp 35 | #############################################jsp############################################## 36 | 37 | #######################################prophet server######################################### 38 | server.port=8090 39 | #session max alive seconds, default 30 mins 40 | server.session-timeout=2592000 41 | server.tomcat.uri-encoding=UTF-8 42 | server.tomcat.max-threads=800 43 | server.tomcat.basedir=logs/ 44 | server.tomcat.access-log-enabled=true 45 | server.tomcat.accesslog.directory=tomcat_access_logs/ 46 | 47 | #######################################prophet server######################################### 48 | 49 | #######################################logging################################################ 50 | #logging.config=/home/prophet_server/conf/logging.xml 51 | #######################################logging################################################ 52 | 53 | #####################################user auth################################################ 54 | #user authentication system, available values(case insensitive): 1.LDAP 2.prophet 55 | authentication.system=LDAP 56 | 57 | #if choose ldap, then following info is mandatory. Otherwise ignored. 58 | authentication.ldap.url=ldap://xxx.prophet.com/ 59 | authentication.ldap.base-dn=CN=mycn,OU=Users,DC=prophet,DC=com 60 | authentication.ldap.user-search-dn=OU=myou,DC=prophet,DC=com 61 | authentication.ldap.user-search-column=username 62 | authentication.ldap.factory=com.sun.jndi.ldap.LdapCtxFactory 63 | authentication.ldap.security-authentication=simple 64 | authentication.ldap.security-credenticials=abc123456790 65 | #####################################user auth################################################ 66 | 67 | #####################################email#################################################### 68 | spring.mail.host=smtp.prophet.com 69 | spring.mail.port=25 70 | #spring.mail.username=123 71 | #spring.mail.password=123 72 | spring.mail.from=sender@prophet.com 73 | spring.mail.properties.mail.smtp.auth=false 74 | spring.mail.properties.mail.smtp.starttls.enable=false 75 | spring.mail.properties.mail.smtp.starttls.required=false 76 | #suffix used within your company for everyone 77 | spring.mail.company.suffix=@prophet.com 78 | #####################################email#################################################### -------------------------------------------------------------------------------- /src/main/java/com/prophet/Application.java: -------------------------------------------------------------------------------- 1 | package com.prophet; 2 | import org.springframework.boot.SpringApplication; 3 | import org.springframework.boot.autoconfigure.SpringBootApplication; 4 | import org.springframework.transaction.annotation.EnableTransactionManagement; 5 | import org.springframework.boot.context.web.SpringBootServletInitializer; 6 | import org.springframework.boot.web.servlet.ServletComponentScan; 7 | import org.springframework.boot.builder.SpringApplicationBuilder; 8 | 9 | 10 | @SpringBootApplication 11 | @EnableTransactionManagement 12 | @ServletComponentScan 13 | //@ComponentScan 14 | public class Application extends SpringBootServletInitializer{ 15 | 16 | protected SpringApplicationBuilder configure(SpringApplicationBuilder application) { 17 | return application.sources(Application.class); 18 | } 19 | 20 | public static void main(String[] args) throws Exception{ 21 | SpringApplication.run(Application.class, args); 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /src/main/java/com/prophet/common/Encryptor.java: -------------------------------------------------------------------------------- 1 | package com.prophet.common; 2 | import java.math.BigInteger; 3 | import java.security.MessageDigest; 4 | import java.security.NoSuchAlgorithmException; 5 | 6 | 7 | /** 8 | * 加密算法基础工具类 9 | * 10 | */ 11 | public abstract class Encryptor { 12 | public static final String KEY_SHA = "SHA"; 13 | public static final String KEY_MD5 = "MD5"; 14 | 15 | /** 16 | * MAC算法可选以下多种算法 17 | * 18 | * HmacMD5 19 | * HmacSHA1 20 | * HmacSHA256 21 | * HmacSHA384 22 | * HmacSHA512 23 | */ 24 | public static final String KEY_MAC = "HmacMD5"; 25 | 26 | /** 27 | * MD5加密 28 | * 29 | * @param data 30 | * @return 31 | * @throws Exception 32 | */ 33 | public static String encryptMD5(String data){ 34 | MessageDigest md5 = null; 35 | try { 36 | md5 = MessageDigest.getInstance(KEY_MD5); 37 | } catch (NoSuchAlgorithmException e) { 38 | e.printStackTrace(); 39 | } 40 | md5.update(data.getBytes()); 41 | BigInteger bi = new BigInteger(md5.digest()); 42 | return bi.toString(16); 43 | } 44 | 45 | /** 46 | * SHA加密 47 | * 48 | * @param data 49 | * @return 50 | * @throws Exception 51 | */ 52 | public static String encryptSHA(String data){ 53 | MessageDigest sha = null; 54 | try { 55 | sha = MessageDigest.getInstance(KEY_SHA); 56 | } catch (NoSuchAlgorithmException e) { 57 | e.printStackTrace(); 58 | } 59 | sha.update(data.getBytes()); 60 | BigInteger bi = new BigInteger(sha.digest()); 61 | return bi.toString(32); 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /src/main/java/com/prophet/common/HQLParser.java: -------------------------------------------------------------------------------- 1 | package com.prophet.common; 2 | import java.util.HashMap; 3 | import java.util.HashSet; 4 | import java.util.Map; 5 | import java.util.Set; 6 | import java.util.Stack; 7 | import java.util.TreeMap; 8 | 9 | import org.apache.hadoop.hive.ql.parse.ASTNode; 10 | import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; 11 | import org.apache.hadoop.hive.ql.parse.HiveParser; 12 | import org.apache.hadoop.hive.ql.parse.ParseDriver; 13 | import org.apache.hadoop.hive.ql.parse.ParseException; 14 | 15 | public class HQLParser { 16 | /** 17 | * 该方法供外部调用 18 | * @param sql 19 | * @throws Exception 20 | */ 21 | public void parseHQL(String sql) throws ParseException, org.antlr.runtime.NoViableAltException { 22 | ParseDriver pd = new ParseDriver(); 23 | ASTNode ast = pd.parse(sql); 24 | this.parse(ast); 25 | } 26 | 27 | private static final String UNKNOWN = "UNKNOWN"; 28 | private Map alias = new HashMap(); 29 | private Map cols = new TreeMap(); 30 | private Map colAlais = new TreeMap(); 31 | private Set tables = new HashSet(); 32 | private Stack tableNameStack = new Stack(); 33 | private Stack operStack = new Stack(); 34 | private String nowQueryTable = "";//定义及处理不清晰,修改为query或from节点对应的table集合或许好点。目前正在查询处理的表可能不止一个。 35 | private Oper oper ; 36 | private boolean joinClause = false; 37 | 38 | public Map getCols() { 39 | return cols; 40 | } 41 | public Set getTables() { 42 | Set newTables = new HashSet(); 43 | //去掉default12.`user`里的`` 44 | for (String t : this.tables) { 45 | newTables.add(t.replaceAll("`", "")); 46 | } 47 | return newTables; 48 | } 49 | public String getOper() { 50 | return oper.toString(); 51 | } 52 | 53 | private enum Oper { 54 | SELECT, INSERT, DROP, TRUNCATE, LOAD, CREATETABLE, ALTER, CREATEDATABASE, DROPDATABASE; 55 | } 56 | public Set parseIteral(ASTNode ast) { 57 | Set set= new HashSet();//当前查询所对应到的表集合 58 | prepareToParseCurrentNodeAndChilds(ast); 59 | set.addAll(parseChildNodes(ast)); 60 | set.addAll(parseCurrentNode(ast ,set)); 61 | endParseCurrentNode(ast); 62 | return set; 63 | } 64 | private void endParseCurrentNode(ASTNode ast){ 65 | if (ast.getToken() != null) { 66 | switch (ast.getToken().getType()) {//join 从句结束,跳出join 67 | case HiveParser.TOK_RIGHTOUTERJOIN: 68 | case HiveParser.TOK_LEFTOUTERJOIN: 69 | case HiveParser.TOK_JOIN: 70 | joinClause = false; 71 | break; 72 | case HiveParser.TOK_QUERY: 73 | break; 74 | case HiveParser.TOK_INSERT: 75 | case HiveParser.TOK_SELECT: 76 | nowQueryTable = tableNameStack.pop(); 77 | oper = operStack.pop(); 78 | break; 79 | } 80 | } 81 | } 82 | private Set parseCurrentNode(ASTNode ast, Set set){ 83 | if (ast.getToken() != null) { 84 | switch (ast.getToken().getType()) { 85 | case HiveParser.TOK_TABLE_PARTITION: 86 | if (ast.getChildCount() != 2) { 87 | String table = BaseSemanticAnalyzer 88 | .getUnescapedName((ASTNode) ast.getChild(0)); 89 | if (oper == Oper.SELECT) { 90 | nowQueryTable = table; 91 | } 92 | //tables.add(table + "\t" + oper); 93 | tables.add(table); 94 | } 95 | break; 96 | 97 | case HiveParser.TOK_TAB:// outputTable 98 | String tableTab = BaseSemanticAnalyzer 99 | .getUnescapedName((ASTNode) ast.getChild(0)); 100 | if (oper == Oper.SELECT) { 101 | nowQueryTable = tableTab; 102 | } 103 | //tables.add(tableTab + "\t" + oper); 104 | tables.add(tableTab); 105 | break; 106 | case HiveParser.TOK_TABREF:// inputTable 107 | ASTNode tabTree = (ASTNode) ast.getChild(0); 108 | String tableName = (tabTree.getChildCount() == 1) ? BaseSemanticAnalyzer 109 | .getUnescapedName((ASTNode) tabTree.getChild(0)) 110 | : BaseSemanticAnalyzer 111 | .getUnescapedName((ASTNode) tabTree.getChild(0)) 112 | + "." + tabTree.getChild(1); 113 | if (oper == Oper.SELECT) { 114 | if(joinClause && !"".equals(nowQueryTable) ){ 115 | nowQueryTable += "&"+tableName;// 116 | }else{ 117 | nowQueryTable = tableName; 118 | } 119 | set.add(tableName); 120 | } 121 | //tables.add(tableName + "\t" + oper); 122 | tables.add(tableName); 123 | if (ast.getChild(1) != null) { 124 | String alia = ast.getChild(1).getText().toLowerCase(); 125 | alias.put(alia, tableName); 126 | } 127 | break; 128 | case HiveParser.TOK_TABLE_OR_COL: 129 | if (ast.getParent().getType() != HiveParser.DOT) { 130 | String col = ast.getChild(0).getText().toLowerCase(); 131 | if (alias.get(col) == null 132 | && colAlais.get(nowQueryTable + "." + col) == null) { 133 | if(nowQueryTable.indexOf("&") > 0){//sql23 134 | cols.put(UNKNOWN + "." + col, ""); 135 | }else{ 136 | cols.put(nowQueryTable + "." + col, ""); 137 | } 138 | } 139 | } 140 | break; 141 | case HiveParser.TOK_ALLCOLREF: 142 | cols.put(nowQueryTable + ".*", ""); 143 | break; 144 | case HiveParser.TOK_SUBQUERY: 145 | if (ast.getChildCount() == 2) { 146 | String tableAlias = unescapeIdentifier(ast.getChild(1) 147 | .getText()); 148 | String aliaReal = ""; 149 | for(String table : set){ 150 | aliaReal+=table+"&"; 151 | } 152 | if(aliaReal.length() !=0){ 153 | aliaReal = aliaReal.substring(0, aliaReal.length()-1); 154 | } 155 | alias.put(tableAlias, aliaReal); 156 | } 157 | break; 158 | 159 | case HiveParser.TOK_SELEXPR: 160 | if (ast.getChild(0).getType() == HiveParser.TOK_TABLE_OR_COL) { 161 | String column = ast.getChild(0).getChild(0).getText() 162 | .toLowerCase(); 163 | if(nowQueryTable.indexOf("&") > 0){ 164 | cols.put(UNKNOWN + "." + column, ""); 165 | }else if (colAlais.get(nowQueryTable + "." + column) == null) { 166 | cols.put(nowQueryTable + "." + column, ""); 167 | } 168 | } else if (ast.getChild(1) != null) {// TOK_SELEXPR (+ 169 | // (TOK_TABLE_OR_COL id) 170 | // 1) dd 171 | String columnAlia = ast.getChild(1).getText().toLowerCase(); 172 | colAlais.put(nowQueryTable + "." + columnAlia, ""); 173 | } 174 | break; 175 | case HiveParser.DOT: 176 | if (ast.getType() == HiveParser.DOT) { 177 | if (ast.getChildCount() == 2) { 178 | if (ast.getChild(0).getType() == HiveParser.TOK_TABLE_OR_COL 179 | && ast.getChild(0).getChildCount() == 1 180 | && ast.getChild(1).getType() == HiveParser.Identifier) { 181 | String alia = BaseSemanticAnalyzer 182 | .unescapeIdentifier(ast.getChild(0) 183 | .getChild(0).getText() 184 | .toLowerCase()); 185 | String column = BaseSemanticAnalyzer 186 | .unescapeIdentifier(ast.getChild(1) 187 | .getText().toLowerCase()); 188 | String realTable = null; 189 | if (!tables.contains(alia + "\t" + oper) 190 | && alias.get(alia) == null) {// [b SELECT, a 191 | // SELECT] 192 | alias.put(alia, nowQueryTable); 193 | } 194 | if (tables.contains(alia + "\t" + oper)) { 195 | realTable = alia; 196 | } else if (alias.get(alia) != null) { 197 | realTable = alias.get(alia); 198 | } 199 | if (realTable == null || realTable.length() == 0 || realTable.indexOf("&") > 0) { 200 | realTable = UNKNOWN; 201 | } 202 | cols.put(realTable + "." + column, ""); 203 | 204 | } 205 | } 206 | } 207 | break; 208 | case HiveParser.TOK_ALTERTABLE_ADDPARTS: 209 | case HiveParser.TOK_ALTERTABLE_RENAME: 210 | case HiveParser.TOK_ALTERTABLE_ADDCOLS: 211 | ASTNode alterTableName = (ASTNode) ast.getChild(0); 212 | //tables.add(alterTableName.getText() + "\t" + oper); 213 | tables.add(alterTableName.getText()); 214 | break; 215 | } 216 | } 217 | return set; 218 | } 219 | 220 | private Set parseChildNodes(ASTNode ast){ 221 | Set set= new HashSet(); 222 | int numCh = ast.getChildCount(); 223 | if (numCh > 0) { 224 | for (int num = 0; num < numCh; num++) { 225 | ASTNode child = (ASTNode) ast.getChild(num); 226 | set.addAll(parseIteral(child)); 227 | } 228 | } 229 | return set; 230 | } 231 | 232 | private void prepareToParseCurrentNodeAndChilds(ASTNode ast){ 233 | if (ast.getToken() != null) { 234 | switch (ast.getToken().getType()) { 235 | //join 从句开始 236 | case HiveParser.TOK_RIGHTOUTERJOIN: 237 | case HiveParser.TOK_LEFTOUTERJOIN: 238 | case HiveParser.TOK_JOIN: 239 | joinClause = true; 240 | break; 241 | case HiveParser.TOK_QUERY: 242 | tableNameStack.push(nowQueryTable); 243 | operStack.push(oper); 244 | nowQueryTable = "";//sql22 245 | oper = Oper.SELECT; 246 | break; 247 | case HiveParser.TOK_INSERT: 248 | tableNameStack.push(nowQueryTable); 249 | operStack.push(oper); 250 | oper = Oper.INSERT; 251 | break; 252 | case HiveParser.TOK_SELECT: 253 | tableNameStack.push(nowQueryTable); 254 | operStack.push(oper); 255 | // nowQueryTable = "";//语法树join 256 | oper = Oper.SELECT; 257 | break; 258 | case HiveParser.TOK_DROPTABLE: 259 | oper = Oper.DROP; 260 | break; 261 | case HiveParser.TOK_TRUNCATETABLE: 262 | oper = Oper.TRUNCATE; 263 | break; 264 | case HiveParser.TOK_LOAD: 265 | oper = Oper.LOAD; 266 | break; 267 | case HiveParser.TOK_CREATETABLE: 268 | oper = Oper.CREATETABLE; 269 | break; 270 | case HiveParser.TOK_CREATEDATABASE: 271 | oper = Oper.CREATETABLE; 272 | break; 273 | case HiveParser.TOK_DROPDATABASE: 274 | oper = Oper.DROPDATABASE; 275 | break; 276 | } 277 | if (ast.getToken() != null 278 | && ast.getToken().getType() >= HiveParser.TOK_ALTERDATABASE_PROPERTIES 279 | && ast.getToken().getType() <= HiveParser.TOK_ALTERVIEW_RENAME) { 280 | oper = Oper.ALTER; 281 | } 282 | } 283 | } 284 | public static String unescapeIdentifier(String val) { 285 | if (val == null) { 286 | return null; 287 | } 288 | if (val.charAt(0) == '`' && val.charAt(val.length() - 1) == '`') { 289 | val = val.substring(1, val.length() - 1); 290 | } 291 | return val; 292 | } 293 | 294 | public void parse(ASTNode ast) { 295 | parseIteral(ast); 296 | } 297 | 298 | } 299 | -------------------------------------------------------------------------------- /src/main/java/com/prophet/common/QueryHistoryStatusEnum.java: -------------------------------------------------------------------------------- 1 | package com.prophet.common; 2 | 3 | /** 4 | * query_history表status字段的枚举值 5 | * 6 | */ 7 | public enum QueryHistoryStatusEnum { 8 | FINISHED(0, "已运行完毕"), RUNNING(1, "任务执行中"), ABORTED(2, "已经被取消"), ERROR(3, "运行出现错误"); 9 | 10 | private int index; 11 | private String name; 12 | 13 | private QueryHistoryStatusEnum(int index, String name) { 14 | this.index = index; 15 | this.name = name; 16 | } 17 | 18 | public String getName() { 19 | return this.name; 20 | } 21 | 22 | public void setName(String name) { 23 | this.name = name; 24 | } 25 | 26 | public int getIndex() { 27 | return this.index; 28 | } 29 | 30 | public void setIndex(int index) { 31 | this.index = index; 32 | } 33 | 34 | /** 35 | * 根据键值获取对应的状态名的方法 36 | * @param index 37 | * @return 38 | */ 39 | public static String getNameByIndex(int index) throws Exception { 40 | String name = ""; 41 | for (QueryHistoryStatusEnum q : QueryHistoryStatusEnum.values()) { 42 | if (q.getIndex() == index) { 43 | name = q.getName(); 44 | } 45 | } 46 | //如果遍历完了都没有找到对应的index,则抛出异常 47 | if (name.equals("")) { 48 | throw new Exception(String.format("根据状态值%d无法找到对应的QueryHistoryStatus枚举值描述!", index)); 49 | } 50 | return name; 51 | } 52 | 53 | /** 54 | * 根据状态值描述获取对应的状态值的方法 55 | * @param name 56 | * @return 57 | * @throws Exception 58 | */ 59 | public static int getIndexByName(String name) throws Exception { 60 | int index = -1; 61 | for (QueryHistoryStatusEnum q : QueryHistoryStatusEnum.values()) { 62 | if (q.getName().equals(name)) { 63 | index = q.getIndex(); 64 | } 65 | } 66 | //如果遍历完了都没有找到对应的index,则抛出异常 67 | if (index == -1) { 68 | throw new Exception(String.format("根据状态值描述%s无法找到对应的QueryHistoryStatus枚举值!", name)); 69 | } 70 | return index; 71 | } 72 | } -------------------------------------------------------------------------------- /src/main/java/com/prophet/common/ThreadPool.java: -------------------------------------------------------------------------------- 1 | package com.prophet.common; 2 | 3 | import java.util.Map; 4 | import java.util.concurrent.Callable; 5 | import java.util.concurrent.ExecutorService; 6 | import java.util.concurrent.Executors; 7 | import java.util.concurrent.Future; 8 | import java.util.concurrent.ThreadFactory; 9 | import java.util.concurrent.ConcurrentHashMap; 10 | 11 | import com.prophet.dao.task.HiveQueryAsyncTask; 12 | 13 | /** 14 | * 线程池基础类 15 | */ 16 | public class ThreadPool { 17 | private final static int POOL_SIZE = 64; 18 | private static ExecutorService executorService; 19 | public static Map> activeThreadsMap = new ConcurrentHashMap>(); //String为线程名,Future为线程结果 20 | 21 | public final static String HIVE_QUERY_THREAD_NAME = "HiveQueryAsyncTaskThread-"; 22 | public final static String HIVE_EMAIL_THREAD_NAME = "HiveResultMailThread-"; 23 | 24 | private static ExecutorService getExecutor() { 25 | if (executorService == null || executorService.isShutdown()) { 26 | synchronized (ThreadFactory.class) { 27 | if (executorService == null || executorService.isShutdown()) { 28 | executorService = Executors.newFixedThreadPool(POOL_SIZE); 29 | activeThreadsMap = new ConcurrentHashMap>(); 30 | } 31 | } 32 | } 33 | return executorService; 34 | } 35 | 36 | /** 37 | * 手动终止某个线程,并将Future从活跃列表移除 38 | * @param queryHistId 39 | */ 40 | public static void stopThread(long queryHistId) { 41 | if (activeThreadsMap != null) { 42 | Future result = activeThreadsMap.get(HIVE_QUERY_THREAD_NAME + queryHistId); 43 | if (result != null) { 44 | result.cancel(true); 45 | } 46 | activeThreadsMap.remove(HIVE_QUERY_THREAD_NAME + queryHistId); 47 | } 48 | } 49 | 50 | /** 51 | * 不返回结果的execute方法 52 | * @param thread 53 | */ 54 | public static void execute(T thread) { 55 | getExecutor().execute(thread); 56 | } 57 | 58 | /** 59 | * 提交hive query任务 60 | * @param hiveTask 61 | * @return Future 62 | */ 63 | public static void executeHiveQuery(HiveQueryAsyncTask hiveTask) { 64 | Future result = getExecutor().submit(hiveTask); 65 | 66 | //加入活跃线程列表,方便后面取消任务 67 | if (hiveTask != null) { 68 | activeThreadsMap.put(HIVE_QUERY_THREAD_NAME + hiveTask.getQueryHistId(), result); 69 | 70 | } 71 | } 72 | 73 | /** 74 | * 可以返回结果的提交方法 75 | * @param Callable task 76 | * @return 一个Future对象 77 | */ 78 | public static Future submit(Callable task) { 79 | return getExecutor().submit(task); 80 | } 81 | 82 | /** 83 | * 不再使用线程池时,调用该方法关闭线程池即可 84 | */ 85 | public static final void shutdown() { 86 | getExecutor().shutdown(); 87 | activeThreadsMap = null; 88 | } 89 | } 90 | -------------------------------------------------------------------------------- /src/main/java/com/prophet/config/HiveDataSourceConfig.java: -------------------------------------------------------------------------------- 1 | package com.prophet.config; 2 | 3 | import javax.sql.DataSource; 4 | import org.springframework.beans.factory.annotation.Qualifier; 5 | import org.springframework.boot.autoconfigure.jdbc.DataSourceBuilder; 6 | import org.springframework.boot.context.properties.ConfigurationProperties; 7 | import org.springframework.context.annotation.Bean; 8 | import org.springframework.context.annotation.Configuration; 9 | //import org.springframework.context.annotation.Primary; 10 | import org.springframework.jdbc.core.JdbcTemplate; 11 | 12 | @Configuration 13 | public class HiveDataSourceConfig { 14 | @Bean(name="hiveServerDS") 15 | @ConfigurationProperties(prefix="spring.ds_hive_server") 16 | public DataSource prophetHiveServerDataSource() { 17 | return DataSourceBuilder.create().build(); 18 | } 19 | 20 | @Bean(name="hiveServerJdbcTemplate") 21 | public JdbcTemplate getHiveServerJdbcTemplate(@Qualifier("hiveServerDS") DataSource dsHiveServer) { 22 | return new JdbcTemplate(dsHiveServer); 23 | } 24 | 25 | } 26 | -------------------------------------------------------------------------------- /src/main/java/com/prophet/config/HiveResultTextConfig.java: -------------------------------------------------------------------------------- 1 | package com.prophet.config; 2 | 3 | public class HiveResultTextConfig { 4 | 5 | private final static String HIVE_RESULT_FILE_DIR = "data/"; 6 | //public final static String HIVE_RESULT_FIELD_DELIMITER = "##@@#"; 7 | public final static String HIVE_RESULT_FIELD_DELIMITER = "\001\001"; //hive默认列分隔符^A的八进制编码,这里是两个^A 8 | 9 | /** 10 | * 获取数据文件绝对路径 11 | * @param username 12 | * @param queryHistId 13 | * @return 14 | */ 15 | public final static String getDataFileName(String username, long queryHistId) { 16 | return String.format("%s%s-%d.txt", HIVE_RESULT_FILE_DIR, username, queryHistId); 17 | } 18 | 19 | /** 20 | * 获取meta文件绝对路径 21 | * @param username 22 | * @param queryHistId 23 | * @return 24 | */ 25 | public final static String getMetaFileName(String username, long queryHistId) { 26 | return String.format("%s%s-%d.meta", HIVE_RESULT_FILE_DIR, username, queryHistId); 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /src/main/java/com/prophet/config/MySQLDataSourceConfig.java: -------------------------------------------------------------------------------- 1 | package com.prophet.config; 2 | 3 | import javax.sql.DataSource; 4 | import org.springframework.beans.factory.annotation.Qualifier; 5 | import org.springframework.boot.autoconfigure.jdbc.DataSourceBuilder; 6 | import org.springframework.boot.context.properties.ConfigurationProperties; 7 | import org.springframework.context.annotation.Bean; 8 | import org.springframework.context.annotation.Configuration; 9 | import org.springframework.context.annotation.Primary; 10 | import org.springframework.jdbc.core.JdbcTemplate; 11 | 12 | @Configuration 13 | public class MySQLDataSourceConfig { 14 | @Bean(name="prophetDS") 15 | @Primary 16 | @ConfigurationProperties(prefix="spring.ds_prophet") 17 | public DataSource prophetMysqlDataSource() { 18 | return DataSourceBuilder.create().build(); 19 | } 20 | 21 | @Bean(name="prophetJdbcTemplate") 22 | public JdbcTemplate getProphetJdbcTemplate(@Qualifier("prophetDS") DataSource dsProphet) { 23 | return new JdbcTemplate(dsProphet); 24 | } 25 | 26 | @Bean(name="hiveMetaStoreDS") 27 | @ConfigurationProperties(prefix="spring.ds_hive_metastore") 28 | public DataSource hiveMetaStoreMysqlDataSource() { 29 | return DataSourceBuilder.create().build(); 30 | } 31 | 32 | @Bean(name="hiveMetaStoreJdbcTemplate") 33 | public JdbcTemplate getHiveMetaStoreJdbcTemplate(@Qualifier("hiveMetaStoreDS") DataSource dsHive) { 34 | return new JdbcTemplate(dsHive); 35 | } 36 | 37 | } 38 | -------------------------------------------------------------------------------- /src/main/java/com/prophet/dao/AdminDao.java: -------------------------------------------------------------------------------- 1 | package com.prophet.dao; 2 | 3 | import java.util.List; 4 | import java.util.Map; 5 | 6 | import org.springframework.beans.factory.annotation.Autowired; 7 | import org.springframework.beans.factory.annotation.Qualifier; 8 | import org.springframework.jdbc.core.JdbcTemplate; 9 | import org.springframework.stereotype.Repository; 10 | 11 | @Repository 12 | public class AdminDao { 13 | @Autowired 14 | @Qualifier("prophetJdbcTemplate") 15 | private JdbcTemplate jdbcTemplate; 16 | 17 | public List> checkIsAdmin(String username){ 18 | String sql = "select id,username from admins where username=?"; 19 | Object[] args = {username}; 20 | return this.jdbcTemplate.queryForList(sql, args); 21 | } 22 | 23 | public int insertOneAdmin(String username) { 24 | String sql = "insert into admins(username) values(?)"; 25 | Object[] args = {username}; 26 | return this.jdbcTemplate.update(sql, args); 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /src/main/java/com/prophet/dao/EmailUtil.java: -------------------------------------------------------------------------------- 1 | package com.prophet.dao; 2 | 3 | import java.io.File; 4 | 5 | import javax.mail.internet.MimeMessage; 6 | 7 | import org.springframework.beans.factory.annotation.Autowired; 8 | import org.springframework.beans.factory.annotation.Value; 9 | import org.springframework.core.io.FileSystemResource; 10 | import org.springframework.mail.SimpleMailMessage; 11 | import org.springframework.mail.javamail.JavaMailSender; 12 | import org.springframework.mail.javamail.MimeMessageHelper; 13 | import org.springframework.stereotype.Repository; 14 | 15 | @Repository 16 | public class EmailUtil { 17 | @Autowired 18 | private JavaMailSender mailSender; //在引入spring-boot-starter-mail依赖后会根据配置文件中的内容创建JavaMailSender实例 19 | @Value("${spring.mail.from}") 20 | private String mailFrom; //发件人 21 | @Value("${spring.mail.company.suffix}") 22 | private String companyEmailSuffix; //公司的邮箱统一 23 | 24 | /** 25 | * 发送不带附件的邮件,只有标题和正文 26 | * @param mailTo 27 | * @param subject 28 | * @param text 29 | * @throws Exception 30 | */ 31 | public void sendSimpleMail(String mailTo, String subject, String text) throws Exception { 32 | SimpleMailMessage message = new SimpleMailMessage(); 33 | message.setFrom(this.mailFrom); 34 | message.setTo(mailTo + this.companyEmailSuffix); 35 | message.setSubject(subject); 36 | message.setText(text); 37 | 38 | mailSender.send(message); 39 | } 40 | 41 | /** 42 | * 发送可以带一个或多个附件的邮件,文件名需要是绝对路径 43 | * @param mailTo 44 | * @param subject 45 | * @param text 46 | * @param attachments 47 | * @throws Exception 48 | */ 49 | public void sendAttachmentsMail(String mailTo, String subject, String text, String ... attachments) throws Exception { 50 | MimeMessage mimeMessage = mailSender.createMimeMessage(); 51 | MimeMessageHelper helper = new MimeMessageHelper(mimeMessage, true); 52 | helper.setFrom(this.mailFrom); 53 | helper.setTo(mailTo + this.companyEmailSuffix); 54 | helper.setSubject(subject); 55 | helper.setText(text); 56 | 57 | for (int i = 0; i < attachments.length; i++) { 58 | File file = new File(attachments[i]); 59 | 60 | FileSystemResource attachment = new FileSystemResource(file); 61 | helper.addAttachment(file.getName(), attachment); 62 | } 63 | 64 | mailSender.send(mimeMessage); 65 | } 66 | } 67 | -------------------------------------------------------------------------------- /src/main/java/com/prophet/dao/HiveMetaStoreDao.java: -------------------------------------------------------------------------------- 1 | package com.prophet.dao; 2 | 3 | import java.util.List; 4 | import java.util.Map; 5 | 6 | import org.springframework.beans.factory.annotation.Autowired; 7 | import org.springframework.beans.factory.annotation.Qualifier; 8 | import org.springframework.jdbc.core.JdbcTemplate; 9 | import org.springframework.stereotype.Repository; 10 | 11 | @Repository 12 | public class HiveMetaStoreDao { 13 | @Autowired 14 | @Qualifier("hiveMetaStoreJdbcTemplate") 15 | private JdbcTemplate jdbcTemplate; 16 | 17 | /** 18 | * 获取所有metastore里的hive表 19 | * @return 20 | * @throws Exception 21 | */ 22 | public List> getAllDbAndTablesInMetaStore() throws Exception{ 23 | String sql = "select DBS.NAME as DB_NAME,TBLS.TBL_ID,TBLS.TBL_NAME,TBLS.TBL_TYPE from TBLS,DBS where TBLS.DB_ID=DBS.DB_ID;"; 24 | return jdbcTemplate.queryForList(sql); 25 | } 26 | 27 | } 28 | -------------------------------------------------------------------------------- /src/main/java/com/prophet/dao/HiveSecretTableDao.java: -------------------------------------------------------------------------------- 1 | package com.prophet.dao; 2 | 3 | import java.sql.PreparedStatement; 4 | import java.sql.SQLException; 5 | import java.util.List; 6 | import java.util.Map; 7 | 8 | import org.springframework.beans.factory.annotation.Autowired; 9 | import org.springframework.beans.factory.annotation.Qualifier; 10 | import org.springframework.jdbc.core.BatchPreparedStatementSetter; 11 | import org.springframework.jdbc.core.JdbcTemplate; 12 | import org.springframework.stereotype.Repository; 13 | 14 | import com.prophet.domain.HiveSecretTable; 15 | 16 | @Repository 17 | public class HiveSecretTableDao { 18 | @Autowired 19 | @Qualifier("prophetJdbcTemplate") 20 | private JdbcTemplate jdbcTemplateProphet; 21 | 22 | @Autowired 23 | @Qualifier("hiveMetaStoreJdbcTemplate") 24 | private JdbcTemplate jdbcTemplateMetaStore; 25 | 26 | public List> checkIsSecretTable(String tableSchema, String tableName) { 27 | String sql = "select id, table_schema, table_name from hive_secret_tables where table_schema=? and table_name=?"; 28 | Object[] args = {tableSchema, tableName}; 29 | return jdbcTemplateProphet.queryForList(sql, args); 30 | } 31 | 32 | /** 33 | * 所有机密表面板,顺便展示哪些是当前用户有权限的 34 | * @param username 35 | * @return 36 | */ 37 | public List> getAllSecretTablesByUser(String username) { 38 | /* 39 | * 注意这里是b.username是在on里不是在where,这样才能在join时就把数据连接出来. 40 | * 因为在使用left jion时,on和where条件的区别如下: 41 | * 1、 on条件是在生成临时表时使用的条件,它不管on中的条件是否为真,都会返回左边表中的记录。 42 | * 2、where条件是在临时表生成好后,再对临时表进行过滤的条件。这时已经没有left join的含义(必须返回左边表的记录)了,条件不为真的就全部过滤掉。 43 | * 不管on上的条件是否为真都会返回left或right表中的记录,full则具有left和right的特性的并集。 而inner jion没这个特殊性,则条件放在on中和where中,返回的结果集是相同的。 44 | * 详见 https://www.cnblogs.com/fuge/archive/2011/12/26/2342576.html 45 | */ 46 | String sql = "select a.id as table_id, a.table_schema, a.table_name, if(b.username=?,'您已具有查询权限',null) as info " 47 | + "from hive_secret_tables a left join hive_secret_user_privs b on a.id=b.hive_secret_table_id and b.username=? "; 48 | Object[] args = {username, username}; 49 | return jdbcTemplateProphet.queryForList(sql, args); 50 | } 51 | 52 | /** 53 | * 从prophet数据库和metastore里获取所有的非机密数据表,需要动态拼接sql里的in值 54 | * 用到了线程安全的StringBuffer,以及MySQL的where (a,b) not in ((1,2),(3,4),(5,6))语法 55 | * @return 56 | */ 57 | public List> getAllNonSecretTables() { 58 | String sqlSecret = "select id,table_schema,table_name from hive_secret_tables"; 59 | List> secretTables = jdbcTemplateProphet.queryForList(sqlSecret); 60 | 61 | StringBuffer sqlMetastore = new StringBuffer("select DBS.NAME as DB_NAME,TBLS.TBL_ID,TBLS.TBL_NAME,TBLS.TBL_TYPE from TBLS,DBS " 62 | + "where TBLS.DB_ID=DBS.DB_ID and (DBS.NAME,TBLS.TBL_NAME) not in (" ); 63 | 64 | if (secretTables.size() == 0) { 65 | sqlMetastore.append("('','')"); 66 | } else { 67 | for (int i = 0; i < secretTables.size(); i++) { 68 | if (i != 0) { 69 | sqlMetastore.append(","); 70 | } 71 | sqlMetastore.append(String.format("('%s','%s')", secretTables.get(i).get("table_schema").toString(), secretTables.get(i).get("table_name").toString())); 72 | } 73 | } 74 | sqlMetastore.append(") order by DBS.NAME,TBLS.TBL_NAME"); 75 | 76 | return this.jdbcTemplateMetaStore.queryForList(sqlMetastore.toString()); 77 | } 78 | 79 | /** 80 | * 通过batchUpdate批量插入机密数据表记录 81 | * @param secretTables 82 | */ 83 | public void addSecretTables(List secretTables) { 84 | String sql = "insert into hive_secret_tables(table_schema,table_name) values(?,?)"; 85 | this.jdbcTemplateProphet.batchUpdate(sql, new BatchPreparedStatementSetter(){ 86 | 87 | @Override 88 | public int getBatchSize() { 89 | return secretTables.size(); 90 | } 91 | 92 | @Override 93 | public void setValues(PreparedStatement ps, int index) throws SQLException { 94 | HiveSecretTable h = secretTables.get(index); 95 | ps.setString(1, h.getTableSchema()); 96 | ps.setString(2, h.getTableName()); 97 | } 98 | 99 | }); 100 | } 101 | 102 | /** 103 | * 获取所有机密表 104 | * @return 105 | */ 106 | public List> getAllSecretTables() { 107 | String sql = "select id, table_schema, table_name from hive_secret_tables order by table_schema,table_name"; 108 | return this.jdbcTemplateProphet.queryForList(sql); 109 | } 110 | } 111 | -------------------------------------------------------------------------------- /src/main/java/com/prophet/dao/HiveSecretUserPrivsDao.java: -------------------------------------------------------------------------------- 1 | package com.prophet.dao; 2 | 3 | import java.util.List; 4 | import java.util.Map; 5 | 6 | import org.springframework.beans.factory.annotation.Autowired; 7 | import org.springframework.beans.factory.annotation.Qualifier; 8 | import org.springframework.jdbc.core.JdbcTemplate; 9 | import org.springframework.stereotype.Repository; 10 | 11 | @Repository 12 | public class HiveSecretUserPrivsDao { 13 | @Autowired 14 | @Qualifier("prophetJdbcTemplate") 15 | private JdbcTemplate jdbcTemplate; 16 | 17 | public List> checkSecretPrivilege(String username, String tableSchema, String tableName) { 18 | String sql = "select a.id from hive_secret_user_privs a, hive_secret_tables b" 19 | + " where a.hive_secret_table_id=b.id and a.username=? and b.table_schema=? and b.table_name=?"; 20 | Object[] args = {username, tableSchema, tableName}; 21 | return jdbcTemplate.queryForList(sql, args); 22 | } 23 | 24 | public int insertOneUserSecretPriv(String username, int hiveSecretTableId) { 25 | String sql = "insert into hive_secret_user_privs(hive_secret_table_id,username) values(?,?)"; 26 | Object[] args = {hiveSecretTableId, username}; 27 | return this.jdbcTemplate.update(sql, args); 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /src/main/java/com/prophet/dao/HiveServerDao.java: -------------------------------------------------------------------------------- 1 | package com.prophet.dao; 2 | 3 | import java.io.File; 4 | import java.io.IOException; 5 | import java.net.URLDecoder; 6 | import java.util.ArrayList; 7 | import java.util.HashMap; 8 | import java.util.List; 9 | import java.util.Map; 10 | 11 | import org.apache.commons.io.FileUtils; 12 | import org.apache.commons.io.LineIterator; 13 | import org.springframework.beans.factory.annotation.Autowired; 14 | import org.springframework.beans.factory.annotation.Qualifier; 15 | import org.springframework.jdbc.core.JdbcTemplate; 16 | import org.springframework.stereotype.Repository; 17 | 18 | import com.prophet.dao.EmailUtil; 19 | import com.prophet.common.ThreadPool; 20 | import com.prophet.dao.task.HiveQueryAsyncTask; 21 | 22 | @Repository 23 | public class HiveServerDao { 24 | @Autowired 25 | @Qualifier("hiveServerJdbcTemplate") 26 | private JdbcTemplate jdbcTemplateHiveServer; 27 | 28 | @Autowired 29 | @Qualifier("prophetJdbcTemplate") 30 | private JdbcTemplate jdbcTemplateProphet; 31 | 32 | private EmailUtil emailUtil; 33 | 34 | @Autowired 35 | public void setEmailUtil(EmailUtil emailUtil) { 36 | this.emailUtil = emailUtil; 37 | } 38 | 39 | public final static int PAGE_ROWS = 20; 40 | public final static int COL_MAX_CHARS = 100; 41 | 42 | /** 43 | * 开启线程向hive发送查询 44 | * @param sqlContent 45 | * @return 46 | */ 47 | public void sendHiveQuery(String queryContent, String username, long queryHistId, int emailNotify) { 48 | //开启新的线程去连接hive执行任务 49 | HiveQueryAsyncTask queryTask = new HiveQueryAsyncTask(); 50 | queryTask.setJdbcTemplateProphet(this.jdbcTemplateProphet); 51 | queryTask.setJdbcTemplateHiveServer(this.jdbcTemplateHiveServer); 52 | queryTask.setQueryContent(queryContent); 53 | queryTask.setUsername(username); 54 | queryTask.setQueryHistId(queryHistId); 55 | queryTask.setEmailNotify(emailNotify); 56 | queryTask.setEmailUtil(this.emailUtil); 57 | 58 | //Future> taskFuture = ThreadExecutor.submit(queryTask); 59 | ThreadPool.executeHiveQuery(queryTask); 60 | 61 | } 62 | 63 | /** 64 | * 查询表结构信息,同步方法 65 | * @param tableNameWithDb 66 | * @return 67 | * @throws Exception 68 | */ 69 | public List> descTableInfo(String tableNameWithDb) throws Exception { 70 | List> daoResult = new ArrayList>(); 71 | String sql = "describe `" + tableNameWithDb + "`"; 72 | List> result = jdbcTemplateHiveServer.queryForList(sql); 73 | 74 | for (Map line : result) { 75 | 76 | //处理comment 77 | if (line.containsKey("comment") && line.get("comment") != null) { 78 | String comment = line.get("comment").toString(); 79 | if (comment.contains("%")) { 80 | // 将application/x-www-from-urlencoded字符串转换成普通字符串 81 | line.put("comment", URLDecoder.decode(comment, "UTF-8")); 82 | } else { 83 | //正常字符编码不用修改 84 | } 85 | } 86 | daoResult.add(line); 87 | } 88 | return daoResult; 89 | } 90 | 91 | /** 92 | * 从磁盘上获取历史查询结果,分页获取 93 | * @param username 94 | * @param queryHistId 95 | * @return 数据结构: 96 | * { 97 | * "type":"sql_query", 98 | * "data":{ 99 | * "result_cols":[{"col_name":"col1", "col_width":40}], 100 | * "result_data":[] 101 | * } 102 | * (size会在service层加上) 103 | * @throws IOException 104 | */ 105 | public Map getResultFromDiskByIdByPage(String username, long queryHistId, int pageNo, int pageRows) throws Exception{ 106 | Map resultWithType = new HashMap(); 107 | Map result = new HashMap(); 108 | 109 | List> columns = new ArrayList>(); 110 | List> data = new ArrayList>(); 111 | 112 | File dataFile = new File(com.prophet.config.HiveResultTextConfig.getDataFileName(username, queryHistId)); 113 | File metaFile = new File(com.prophet.config.HiveResultTextConfig.getMetaFileName(username, queryHistId)); 114 | if (dataFile.isFile() && metaFile.isFile()) { 115 | 116 | List listColumns = FileUtils.readLines(metaFile, "UTF-8"); 117 | if (listColumns != null && listColumns.size() >= 1) { 118 | //先拼装列 119 | //理论上meta文件只有一行 120 | String[] splitCols = listColumns.get(0).split(com.prophet.config.HiveResultTextConfig.HIVE_RESULT_FIELD_DELIMITER); 121 | for (String col : splitCols) { 122 | Map colInfo = new HashMap(); 123 | colInfo.put("col_name", col); 124 | colInfo.put("col_width", -1); 125 | columns.add(colInfo); 126 | } 127 | 128 | //再拼装数据 129 | List listData = new ArrayList(); 130 | LineIterator it = null; 131 | try { 132 | it = FileUtils.lineIterator(dataFile, "UTF-8"); 133 | int startLineNo = 0; //闭区间 134 | 135 | //先移动startLineNo的指针 136 | while (it.hasNext() && startLineNo < (pageNo - 1) * pageRows) { 137 | it.nextLine(); 138 | startLineNo++; 139 | } 140 | int endLineNo = startLineNo; //闭区间 141 | //再移动endLineNo的指针 142 | while (it.hasNext() && endLineNo < (pageNo) * pageRows) { 143 | String line = it.nextLine(); 144 | listData.add(line); 145 | endLineNo++; 146 | } 147 | } catch (IOException ex) { 148 | ex.printStackTrace(); 149 | } finally { 150 | LineIterator.closeQuietly(it); 151 | } 152 | 153 | //组装成前端需要的json数据 154 | if (listData != null && listData.size() >= 1) { 155 | for (String line : listData) { 156 | String[] fields = line.split(com.prophet.config.HiveResultTextConfig.HIVE_RESULT_FIELD_DELIMITER); 157 | //遍历每一个列的数据,加入HashMap 158 | Map lineMap = new HashMap(); 159 | for (int i=0; i columns.get(i).get("col_name").toString().length() ? 163 | fields[i].length() : columns.get(i).get("col_name").toString().length(); 164 | //还要拿每一行里该列的宽度和列头以及最大列宽对比下,取最大 165 | if (colChars > COL_MAX_CHARS) { 166 | colChars = COL_MAX_CHARS; 167 | } 168 | Map newColInfo = new HashMap(); 169 | newColInfo.put("col_name", columns.get(i).get("col_name").toString()); 170 | newColInfo.put("col_width", colChars * 8); 171 | columns.set(i, newColInfo); 172 | } 173 | 174 | data.add(lineMap); 175 | } 176 | } 177 | } else { 178 | //否则columns继续保持空 179 | } 180 | } else { 181 | throw new Exception(String.format("历史查询结果文件已经不存在,请重新发起查询!查询语句id:%d", queryHistId)); 182 | } 183 | result.put("result_cols", columns); 184 | result.put("result_data", data); 185 | resultWithType.put("type", "sql_query"); 186 | resultWithType.put("data", result); 187 | return resultWithType; 188 | } 189 | 190 | } 191 | -------------------------------------------------------------------------------- /src/main/java/com/prophet/dao/QueryHistoryDao.java: -------------------------------------------------------------------------------- 1 | package com.prophet.dao; 2 | 3 | import java.sql.Connection; 4 | import java.sql.PreparedStatement; 5 | import java.sql.ResultSet; 6 | import java.sql.SQLException; 7 | import java.sql.Statement; 8 | import java.util.ArrayList; 9 | import java.util.List; 10 | 11 | import org.springframework.beans.factory.annotation.Autowired; 12 | import org.springframework.beans.factory.annotation.Qualifier; 13 | import org.springframework.jdbc.core.JdbcTemplate; 14 | import org.springframework.jdbc.core.PreparedStatementCreator; 15 | import org.springframework.jdbc.core.RowCallbackHandler; 16 | import org.springframework.jdbc.support.GeneratedKeyHolder; 17 | import org.springframework.jdbc.support.KeyHolder; 18 | import org.springframework.stereotype.Repository; 19 | 20 | import com.prophet.domain.QueryHistory; 21 | import com.prophet.common.QueryHistoryStatusEnum; 22 | 23 | @Repository 24 | public class QueryHistoryDao { 25 | @Autowired 26 | @Qualifier("prophetJdbcTemplate") 27 | private JdbcTemplate jdbcTemplate; 28 | 29 | /** 30 | * 插入一条查询历史 31 | * @param queryTime 32 | * @param queryContent 33 | * @param status 34 | * @param username 35 | * @return 36 | * @throws Exception 37 | */ 38 | public long insertQueryHistory(String queryTime, String queryContent, int status, String username, int emailNotify) throws Exception { 39 | String sql = "insert into query_history(query_time, query_content, status, username, email_notify) " 40 | + "values(?, ?, ?, ?, ?)"; 41 | 42 | //获取数据库自动生成的主键值 43 | KeyHolder keyHolder = new GeneratedKeyHolder(); 44 | this.jdbcTemplate.update(new PreparedStatementCreator() { 45 | 46 | @Override 47 | public PreparedStatement createPreparedStatement(Connection conn) throws SQLException { 48 | PreparedStatement ps = (PreparedStatement)conn.prepareStatement(sql, Statement.RETURN_GENERATED_KEYS); 49 | ps.setString(1, queryTime); 50 | ps.setString(2, queryContent); 51 | ps.setInt(3, status); 52 | ps.setString(4, username); 53 | ps.setInt(5, emailNotify); 54 | return ps; 55 | } 56 | 57 | }, keyHolder); 58 | return keyHolder.getKey().longValue(); 59 | 60 | } 61 | 62 | /** 63 | * 根据用户名获取某个人的查询历史 64 | * @param username 65 | * @return 66 | */ 67 | public List getAllQueryHistory(String username) throws SQLException { 68 | List listHistory = new ArrayList(); 69 | 70 | String sql = "select id, query_time, query_content, status, username from query_history " 71 | + "where username=? order by query_time desc limit 20"; 72 | Object[] args = {username}; 73 | this.jdbcTemplate.query(sql, args, new RowCallbackHandler(){ 74 | 75 | public void processRow(ResultSet rs) throws SQLException { 76 | QueryHistory q = new QueryHistory(); 77 | q.setId(rs.getInt("id")); 78 | q.setQueryTime(rs.getString("query_time")); 79 | q.setQueryContent(rs.getString("query_content")); 80 | q.setStatus(rs.getInt("status")); 81 | try { 82 | q.setStrStatus(QueryHistoryStatusEnum.getNameByIndex(rs.getInt("status"))); 83 | } catch (Exception e) { 84 | // TODO Auto-generated catch block 85 | e.printStackTrace(); 86 | } 87 | q.setUsername(rs.getString("username")); 88 | listHistory.add(q); 89 | } 90 | 91 | }); 92 | 93 | return listHistory; 94 | } 95 | 96 | /** 97 | * 更改查询历史记录状态值和消息 98 | * @param id 99 | * @param status 100 | * @param message 101 | * @return 102 | */ 103 | public int updateQueryHistoryStatusAndMsg(long id, int status, String message) { 104 | String sql = "update query_history set status=?,message=? where id=?"; 105 | Object[] args = {status, message,id}; 106 | return this.jdbcTemplate.update(sql, args); 107 | } 108 | 109 | /** 110 | * 根据id获取QueryHistory对象,可以进一步获得状态、SQL、查询时间等信息 111 | * @param id 112 | * @return 113 | */ 114 | public QueryHistory getQueryHistoryById(long id) { 115 | String sql = "select id, query_time, query_content, status, username, email_notify, result_size, message from query_history where id=?"; 116 | Object[] args = {id}; 117 | QueryHistory q = new QueryHistory(); 118 | this.jdbcTemplate.query(sql, args, new RowCallbackHandler(){ 119 | 120 | public void processRow(ResultSet rs) throws SQLException { 121 | q.setId(rs.getLong("id")); 122 | q.setQueryTime(rs.getString("query_time")); 123 | q.setQueryContent(rs.getString("query_content")); 124 | q.setStatus(rs.getInt("status")); 125 | try { 126 | q.setStrStatus(QueryHistoryStatusEnum.getNameByIndex(rs.getInt("status"))); 127 | } catch (Exception e) { 128 | // TODO Auto-generated catch block 129 | e.printStackTrace(); 130 | } 131 | q.setUsername(rs.getString("username")); 132 | q.setEmailNotify(rs.getInt("email_notify")); 133 | q.setResultSize(rs.getInt("result_size")); 134 | q.setMessage(rs.getString("message")); 135 | } 136 | 137 | }); 138 | return q; 139 | } 140 | 141 | /** 142 | * 保存结果集行数 143 | * @param queryHistId 144 | * @param resultSize 145 | * @return 146 | */ 147 | public int saveResultSizeById(long queryHistId, int resultSize) { 148 | String sql = "update query_history set result_size=? where id=?"; 149 | Object[] args = new Object[]{resultSize, queryHistId}; 150 | return this.jdbcTemplate.update(sql, args); 151 | } 152 | } 153 | -------------------------------------------------------------------------------- /src/main/java/com/prophet/dao/UserAuthLdapDao.java: -------------------------------------------------------------------------------- 1 | package com.prophet.dao; 2 | 3 | import java.util.Hashtable; 4 | 5 | import javax.naming.AuthenticationException; 6 | import javax.naming.Context; 7 | import javax.naming.NamingEnumeration; 8 | import javax.naming.NamingException; 9 | import javax.naming.directory.BasicAttribute; 10 | import javax.naming.directory.BasicAttributes; 11 | import javax.naming.directory.SearchControls; 12 | import javax.naming.directory.SearchResult; 13 | import javax.naming.ldap.Control; 14 | import javax.naming.ldap.InitialLdapContext; 15 | import javax.naming.ldap.LdapContext; 16 | 17 | import com.prophet.interfaces.UserAuthDaoInterface; 18 | 19 | /** 20 | * LDAP服务连接基础类 21 | * 22 | */ 23 | public class UserAuthLdapDao implements UserAuthDaoInterface{ 24 | private String URL; 25 | private String BASE_DN; 26 | private String USER_SEARCH_DN; 27 | private String USER_SEARCH_COLUMN; 28 | private String FACTORY; 29 | private String SECURITY_AUTHENTICATION; 30 | private String SECURITY_CREDENTIALS; 31 | 32 | private LdapContext ctx = null; //LDAP连接上下文,后续都操作这个 33 | private final Control[] connCtls = null; //控制连接的一些属性 34 | 35 | public UserAuthLdapDao(){ 36 | 37 | } 38 | 39 | public UserAuthLdapDao(String URL, String BASE_DN, String USER_SEARCH_DN, String USER_SEARCH_COLUMN, String FACTORY, String SECURITY_AUTHENTICATION, String SECURITY_CREDENTIALS) { 40 | this.URL = URL; 41 | this.BASE_DN = BASE_DN; 42 | this.USER_SEARCH_DN = USER_SEARCH_DN; 43 | this.USER_SEARCH_COLUMN = USER_SEARCH_COLUMN; 44 | this.FACTORY = FACTORY; 45 | this.SECURITY_AUTHENTICATION = SECURITY_AUTHENTICATION; 46 | this.SECURITY_CREDENTIALS = SECURITY_CREDENTIALS; 47 | } 48 | 49 | /** 50 | * 连接LDAP 51 | */ 52 | private void connect() { 53 | //设置一些env参数 54 | Hashtable env = new Hashtable(); 55 | env.put(Context.INITIAL_CONTEXT_FACTORY, this.FACTORY); 56 | env.put(Context.PROVIDER_URL, this.URL); 57 | env.put(Context.SECURITY_AUTHENTICATION, this.SECURITY_AUTHENTICATION); 58 | 59 | env.put(Context.SECURITY_PRINCIPAL, this.BASE_DN); 60 | env.put(Context.SECURITY_CREDENTIALS, this.SECURITY_CREDENTIALS); 61 | //env.put("java.naming.ldap.attributes.binary", "objectSid objectGUID"); 62 | 63 | try { 64 | ctx = new InitialLdapContext(env, connCtls); 65 | //logger.info("连接LDAP成功!"); 66 | } catch (javax.naming.AuthenticationException e) { 67 | e.printStackTrace(); 68 | } catch (Exception e) { 69 | e.printStackTrace(); 70 | } 71 | 72 | } 73 | 74 | /** 75 | * 断开连接 76 | */ 77 | private void closeContext(){ 78 | if (ctx != null) { 79 | try { 80 | ctx.close(); 81 | } 82 | catch (NamingException e) { 83 | e.printStackTrace(); 84 | } 85 | } 86 | } 87 | 88 | /** 89 | * 获取用户的完整DN 90 | * @param uid 91 | * @return 92 | */ 93 | private String getUserDN(String uid) { 94 | connect(); 95 | String userDN = ""; 96 | 97 | try { 98 | SearchControls constraints = new SearchControls(); 99 | String filter = String.format("(%s=%s)",this.USER_SEARCH_COLUMN, uid); 100 | String[] attrPersonArray = { "uid", "displayName", "cn" }; 101 | constraints.setSearchScope(SearchControls.SUBTREE_SCOPE); 102 | constraints.setReturningAttributes(attrPersonArray); 103 | 104 | NamingEnumeration en = ctx.search(this.USER_SEARCH_DN, filter, constraints); 105 | 106 | if (en == null || !en.hasMoreElements()) { 107 | //logger.info(String.format("LDAP里未找到该用户的信息! user:%s", uid)); 108 | return ""; 109 | } 110 | while (en != null && en.hasMoreElements()) { 111 | Object obj = en.nextElement(); 112 | if (obj instanceof SearchResult) { 113 | SearchResult si = (SearchResult) obj; 114 | userDN += si.getName(); 115 | userDN += "," + this.USER_SEARCH_DN; 116 | } else { 117 | //logger.info(obj.toString()); 118 | } 119 | } 120 | 121 | } catch (Exception e) { 122 | //logger.info(String.format("查找用户时产生异常! user:%s 报错信息如下:", uid)); 123 | //logger.error(e.getMessage()); 124 | } 125 | 126 | return userDN; 127 | } 128 | 129 | /** 130 | * 认证用户的用户名和密码 131 | */ 132 | @Override 133 | public int authenticate(String UID, String password) { 134 | int result = -1; 135 | String userDN = getUserDN(UID); 136 | 137 | //没有该用户信息 138 | if (userDN.equals("")) { 139 | result = 1; 140 | return result; 141 | } 142 | 143 | try { 144 | ctx.addToEnvironment(Context.SECURITY_PRINCIPAL, userDN); 145 | ctx.addToEnvironment(Context.SECURITY_CREDENTIALS, password); 146 | ctx.reconnect(connCtls); 147 | //logger.info(String.format("LDAP用户验证通过! user:%s", UID)); 148 | result = 0; 149 | } catch (AuthenticationException e) { 150 | //logger.info(String.format("LDAP用户验证失败! user:%s,报错信息如下:", UID)); 151 | //logger.warn(e.getMessage()); 152 | result = 1; 153 | } catch (NamingException e) { 154 | //logger.info(String.format("LDAP用户验证失败! user:%s,报错信息如下:", UID)); 155 | result = 1; 156 | } 157 | //一定要关闭LDAP连接 158 | closeContext(); 159 | return result; 160 | } 161 | 162 | @Override 163 | public boolean hasUser(String UID) { 164 | return this.getUserDN(UID).equals("") == true ? false : true; 165 | } 166 | 167 | /** 168 | * 向LDAP增加用户,暂未用到 169 | */ 170 | public boolean addUser(String usr, String pwd,String uid,String description) { 171 | try { 172 | BasicAttributes attrsbu = new BasicAttributes(); 173 | BasicAttribute objclassSet = new BasicAttribute("objectclass"); 174 | objclassSet.add("inetOrgPerson"); 175 | attrsbu.put(objclassSet); 176 | attrsbu.put("sn", usr); 177 | attrsbu.put("cn", usr); 178 | attrsbu.put("uid", uid); 179 | attrsbu.put("userPassword", pwd); 180 | attrsbu.put("description", description); 181 | ctx.createSubcontext("uid="+uid+"", attrsbu); 182 | 183 | return true; 184 | } catch (NamingException ex) { 185 | ex.printStackTrace(); 186 | } 187 | closeContext(); 188 | return false; 189 | } 190 | 191 | } 192 | -------------------------------------------------------------------------------- /src/main/java/com/prophet/dao/UserAuthProphetDao.java: -------------------------------------------------------------------------------- 1 | package com.prophet.dao; 2 | 3 | import java.util.List; 4 | import java.util.Map; 5 | 6 | import org.springframework.jdbc.core.JdbcTemplate; 7 | 8 | import com.prophet.interfaces.UserAuthDaoInterface; 9 | import com.prophet.common.Encryptor; 10 | 11 | /** 12 | * prophet内置用户认证基础类 13 | * 14 | */ 15 | public class UserAuthProphetDao implements UserAuthDaoInterface{ 16 | private JdbcTemplate jdbcTemplate; 17 | 18 | public UserAuthProphetDao(JdbcTemplate jdbcTemplate) { 19 | this.jdbcTemplate = jdbcTemplate; 20 | } 21 | 22 | @Override 23 | public int authenticate(String username, String password){ 24 | String sql = "select id,username from prophet_users where username=? and password=? and is_active=1"; 25 | Object[] args = {username, Encryptor.encryptSHA(password)}; 26 | return (this.jdbcTemplate.queryForList(sql, args).size() == 1) ? 0 : 1; 27 | } 28 | 29 | @Override 30 | public boolean hasUser(String username) { 31 | String sql = "select id,username from prophet_users where username=? and is_active=1"; 32 | Object[] args = {username}; 33 | return (this.jdbcTemplate.queryForList(sql, args).size() == 1) ? true : false; 34 | } 35 | 36 | /** 37 | * 获取所有prophet用户的信息 38 | * @return 39 | */ 40 | public List> getAllProphetUsers() { 41 | String sql = "select id,username,is_active,user_type,create_time from prophet_users;"; 42 | return this.jdbcTemplate.queryForList(sql); 43 | } 44 | 45 | 46 | public int addProphetUser(String username, String password, String isActive, String userType) { 47 | String sql = "insert into prophet_users (username,password,is_active,user_type)" 48 | + " values(?,?,?,?)"; 49 | int active = isActive.equals("true") ? 1 : 0; 50 | Object[] args = {username, Encryptor.encryptSHA(password), active, userType}; 51 | return this.jdbcTemplate.update(sql, args); 52 | } 53 | 54 | public void deleteUserById(int userId) { 55 | String sql = "delete from prophet_users where id=?"; 56 | Object[] args = {userId}; 57 | this.jdbcTemplate.update(sql, args); 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /src/main/java/com/prophet/dao/task/HiveQueryAsyncTask.java: -------------------------------------------------------------------------------- 1 | package com.prophet.dao.task; 2 | 3 | import java.io.File; 4 | import java.io.IOException; 5 | import java.util.ArrayList; 6 | import java.util.HashSet; 7 | import java.util.Iterator; 8 | import java.util.List; 9 | import java.util.Map; 10 | import java.util.Set; 11 | import java.util.Map.Entry; 12 | 13 | import org.springframework.dao.DataAccessException; 14 | import org.springframework.jdbc.core.JdbcTemplate; 15 | 16 | import com.prophet.common.QueryHistoryStatusEnum; 17 | import com.prophet.common.ThreadPool; 18 | import com.prophet.dao.EmailUtil; 19 | 20 | import org.apache.commons.io.FileUtils; 21 | 22 | /** 23 | * 开启多线程执行hive查询任务的线程执行体 24 | * 25 | */ 26 | public class HiveQueryAsyncTask implements Runnable{ 27 | private JdbcTemplate jdbcTemplateProphet; 28 | private JdbcTemplate jdbcTemplateHiveServer; 29 | private String queryContent; 30 | private String username; 31 | private long queryHistId; 32 | private int emailNotify; 33 | private EmailUtil emailUtil; 34 | 35 | public void setJdbcTemplateProphet(JdbcTemplate jdbcTemplateProphet) { 36 | this.jdbcTemplateProphet = jdbcTemplateProphet; 37 | } 38 | 39 | public void setJdbcTemplateHiveServer(JdbcTemplate jdbcTemplateHiveServer) { 40 | this.jdbcTemplateHiveServer = jdbcTemplateHiveServer; 41 | } 42 | 43 | public void setQueryContent(String queryContent) { 44 | this.queryContent = queryContent; 45 | } 46 | 47 | public void setUsername(String username) { 48 | this.username = username; 49 | } 50 | 51 | public long getQueryHistId() { 52 | return queryHistId; 53 | } 54 | 55 | public void setQueryHistId(long queryHistId) { 56 | this.queryHistId = queryHistId; 57 | } 58 | 59 | public void setEmailNotify(int emailNotify) { 60 | this.emailNotify = emailNotify; 61 | } 62 | public void setEmailUtil(EmailUtil emailUtil) { 63 | this.emailUtil = emailUtil; 64 | } 65 | 66 | /** 67 | * 要做的事: 68 | * 1.查询hive 69 | * 2.结果写入磁盘,写入结果集大小统计信息 70 | * 3.写入完毕更新状态和message 71 | * 4.如果勾选了邮件发送邮件 72 | * 5.将自己从活跃线程列表剔除 73 | */ 74 | @Override 75 | public void run() { 76 | List> hiveResult = null; 77 | 78 | try { 79 | //先查询hive 80 | hiveResult = this.jdbcTemplateHiveServer.queryForList(this.queryContent); 81 | Set columnSet = new HashSet(); 82 | if (!hiveResult.isEmpty()) { 83 | columnSet = hiveResult.get(0).keySet(); 84 | } 85 | 86 | //然后将结果写入disk 87 | if (this._writeHiveResultToDisk(hiveResult, columnSet, username, this.queryHistId)) { 88 | //一旦写入成功将结果集大小记录到db,并传给前端,方便分页 89 | int resultSize = hiveResult.size(); 90 | this._saveResultSizeById(this.queryHistId, resultSize); 91 | } 92 | 93 | //执行到最后没有问题,则更新状态 94 | this._updateQueryHistoryStatusAndMsg(this.queryHistId, 95 | QueryHistoryStatusEnum.FINISHED.getIndex(), "ok"); 96 | } catch (Exception ex) { 97 | this._updateQueryHistoryStatusAndMsg(this.queryHistId, 98 | QueryHistoryStatusEnum.ERROR.getIndex(), ex.getMessage()); 99 | 100 | } finally { 101 | //如果用户选了邮件通知,则异步发送邮件 102 | if (this.emailNotify == 1) { 103 | HiveResultSendmailRunnableTask hiveMailTask = new HiveResultSendmailRunnableTask(); 104 | hiveMailTask.setEmailUtil(this.emailUtil); 105 | hiveMailTask.setQueryHistId(this.queryHistId); 106 | hiveMailTask.setMailToUser(this.username); 107 | 108 | ThreadPool.execute(new Thread(hiveMailTask, ThreadPool.HIVE_EMAIL_THREAD_NAME + this.queryHistId)); 109 | } 110 | 111 | //将自己从活跃线程列表剔除 112 | ThreadPool.stopThread(this.queryHistId); 113 | } 114 | } 115 | 116 | /** 117 | * 结果集写入磁盘 118 | * @param hiveData 119 | * @param hiveCols 120 | * @param username 121 | * @param queryHistId 122 | * @return 123 | */ 124 | private boolean _writeHiveResultToDisk(List> hiveData, Set hiveCols, String username, long queryHistId) { 125 | Boolean isFinished = true; 126 | 127 | final String dataFileName = com.prophet.config.HiveResultTextConfig.getDataFileName(username, queryHistId); 128 | final String metaFileName = com.prophet.config.HiveResultTextConfig.getMetaFileName(username, queryHistId); 129 | 130 | //先生成数据文件,遍历数组加工成一个分割好的list 131 | List diskResult = new ArrayList(); 132 | for (Map line : hiveData) { 133 | //StringBuffer是一个可变对象,当对他进行修改的时候不会像String那样重新建立对象。字符串连接操作效率比String高 134 | StringBuffer strLine = new StringBuffer(""); 135 | 136 | Iterator> iter = line.entrySet().iterator(); //这样遍历HashMap效率较高 137 | while (iter.hasNext()) { 138 | Map.Entry entry = iter.next(); 139 | //String key = entry.getKey(); 140 | Object value = entry.getValue(); 141 | if (value == null || value.equals("")) { 142 | value = " "; 143 | } 144 | strLine.append(value + com.prophet.config.HiveResultTextConfig.HIVE_RESULT_FIELD_DELIMITER); 145 | } 146 | //一行结束,加入diskResult 147 | diskResult.add(strLine); 148 | } 149 | 150 | //生成meta文件 151 | StringBuffer strCols = new StringBuffer(""); 152 | for (String col : hiveCols) { 153 | strCols.append(col + com.prophet.config.HiveResultTextConfig.HIVE_RESULT_FIELD_DELIMITER); 154 | } 155 | 156 | //然后一次性写入磁盘,避免频繁IO 157 | try { 158 | //重要:结果集为空时依然会写文件,但txt和meta文件内容都是""空字符串 159 | FileUtils.writeLines(new File(dataFileName), "UTF-8", diskResult); 160 | FileUtils.writeStringToFile(new File(metaFileName), strCols.toString(), "UTF-8"); 161 | } catch (IOException e) { 162 | e.printStackTrace(); 163 | isFinished = false; 164 | } 165 | return isFinished; 166 | } 167 | 168 | /** 169 | * 保存查询结果集行数到query_history表 170 | * @param queryHistId 171 | * @param resultSize 172 | * @return 173 | */ 174 | private int _saveResultSizeById(long queryHistId, int resultSize) { 175 | String sql = "update query_history set result_size=? where id=?"; 176 | Object[] args = new Object[]{resultSize, queryHistId}; 177 | return this.jdbcTemplateProphet.update(sql, args); 178 | } 179 | 180 | /** 181 | * 更改查询历史记录状态值和消息 182 | * @param id 183 | * @param status 184 | * @param message 185 | * @return 186 | */ 187 | public int _updateQueryHistoryStatusAndMsg(long id, int status, String message) { 188 | String sql = "update query_history set status=?,message=? where id=?"; 189 | Object[] args = {status, message, id}; 190 | int result = -1; 191 | try { 192 | result = this.jdbcTemplateProphet.update(sql, args); 193 | } catch (DataAccessException e) { 194 | e.printStackTrace(); 195 | } 196 | return result; 197 | } 198 | } 199 | -------------------------------------------------------------------------------- /src/main/java/com/prophet/dao/task/HiveResultSendmailRunnableTask.java: -------------------------------------------------------------------------------- 1 | package com.prophet.dao.task; 2 | 3 | import com.prophet.dao.EmailUtil; 4 | 5 | /** 6 | * 开启线程执行hive任务完毕后发送邮件 7 | * 8 | */ 9 | public class HiveResultSendmailRunnableTask implements Runnable{ 10 | private EmailUtil emailUtil; 11 | private long queryHistId; 12 | private String mailToUser; 13 | 14 | public EmailUtil getEmailUtil() { 15 | return emailUtil; 16 | } 17 | 18 | public void setEmailUtil(EmailUtil emailUtil) { 19 | this.emailUtil = emailUtil; 20 | } 21 | 22 | public long getQueryHistId() { 23 | return queryHistId; 24 | } 25 | 26 | public void setQueryHistId(long queryHistId) { 27 | this.queryHistId = queryHistId; 28 | } 29 | 30 | public String getMailToUser() { 31 | return mailToUser; 32 | } 33 | 34 | public void setMailToUser(String mailToUser) { 35 | this.mailToUser = mailToUser; 36 | } 37 | 38 | @Override 39 | public void run() { 40 | try { 41 | this.emailUtil.sendSimpleMail(this.mailToUser, "[Prophet系统通知]Hive SQL任务执行完毕", String.format("您的Hive任务已经执行完毕,查询语句Id:%d", this.queryHistId)); 42 | } catch (Exception e) { 43 | e.printStackTrace(); 44 | } 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /src/main/java/com/prophet/domain/HiveSecretTable.java: -------------------------------------------------------------------------------- 1 | package com.prophet.domain; 2 | 3 | public class HiveSecretTable { 4 | private int id; 5 | private String tableSchema; 6 | private String tableName; 7 | 8 | public int getId() { 9 | return id; 10 | } 11 | public void setId(int id) { 12 | this.id = id; 13 | } 14 | public String getTableSchema() { 15 | return tableSchema; 16 | } 17 | public void setTableSchema(String tableSchema) { 18 | this.tableSchema = tableSchema; 19 | } 20 | public String getTableName() { 21 | return tableName; 22 | } 23 | public void setTableName(String tableName) { 24 | this.tableName = tableName; 25 | } 26 | 27 | } 28 | -------------------------------------------------------------------------------- /src/main/java/com/prophet/domain/QueryHistory.java: -------------------------------------------------------------------------------- 1 | package com.prophet.domain; 2 | 3 | /** 4 | * hive查询语句历史模型类 5 | * table: query_history 6 | */ 7 | public class QueryHistory { 8 | private long id; 9 | private String queryTime; 10 | private String queryContent; 11 | private int status; //status和strStatus是枚举里index和name的关系,在此冗余查询方便而已 12 | private String strStatus; 13 | private String username; 14 | private int emailNotify; 15 | private int resultSize; 16 | private String message; 17 | 18 | public long getId() { 19 | return id; 20 | } 21 | public void setId(long id) { 22 | this.id = id; 23 | } 24 | public String getQueryTime() { 25 | return queryTime; 26 | } 27 | public void setQueryTime(String queryTime) { 28 | this.queryTime = queryTime; 29 | } 30 | public String getQueryContent() { 31 | return queryContent; 32 | } 33 | public void setQueryContent(String queryContent) { 34 | this.queryContent = queryContent; 35 | } 36 | public int getStatus() { 37 | return status; 38 | } 39 | public void setStatus(int status) { 40 | this.status = status; 41 | } 42 | public String getStrStatus() { 43 | return strStatus; 44 | } 45 | public void setStrStatus(String strStatus) { 46 | this.strStatus = strStatus; 47 | } 48 | public String getUsername() { 49 | return username; 50 | } 51 | public void setUsername(String username) { 52 | this.username = username; 53 | } 54 | public int getEmailNotify() { 55 | return emailNotify; 56 | } 57 | public void setEmailNotify(int emailNotify) { 58 | this.emailNotify = emailNotify; 59 | } 60 | public int getResultSize() { 61 | return resultSize; 62 | } 63 | public void setResultSize(int resultSize) { 64 | this.resultSize = resultSize; 65 | } 66 | public String getMessage() { 67 | return message; 68 | } 69 | public void setMessage(String message) { 70 | this.message = message; 71 | } 72 | 73 | } 74 | -------------------------------------------------------------------------------- /src/main/java/com/prophet/filter/LoginFilter.java: -------------------------------------------------------------------------------- 1 | package com.prophet.filter; 2 | 3 | import java.io.IOException; 4 | import java.util.HashMap; 5 | import java.util.Map; 6 | 7 | import javax.servlet.Filter; 8 | import javax.servlet.FilterChain; 9 | import javax.servlet.FilterConfig; 10 | import javax.servlet.ServletException; 11 | import javax.servlet.ServletOutputStream; 12 | import javax.servlet.ServletRequest; 13 | import javax.servlet.ServletResponse; 14 | import javax.servlet.annotation.WebFilter; 15 | import javax.servlet.http.HttpServletRequest; 16 | import javax.servlet.http.HttpSession; 17 | 18 | import org.springframework.core.annotation.Order; 19 | 20 | import com.fasterxml.jackson.core.JsonProcessingException; 21 | import com.fasterxml.jackson.databind.ObjectMapper; 22 | 23 | //Order表示顺序,数字越小越优先处理 24 | @Order(1) 25 | @WebFilter(filterName = "loginFilter", urlPatterns = "/*") 26 | public class LoginFilter implements Filter{ 27 | 28 | @Override 29 | public void doFilter(ServletRequest servletRequest, ServletResponse servletResponse, FilterChain filterChain) 30 | throws IOException, ServletException { 31 | /** 32 | * 这里可以强制转换是因为HttpServletRequest和ServletRequest都是接口,前者继承自后者。 33 | * ServletRequest request并不是ServletRequest的实例,强制转换安全。 34 | */ 35 | HttpServletRequest request = (HttpServletRequest)servletRequest; 36 | //判断URL白名单 37 | String requestURI = request.getRequestURI(); 38 | if ( 39 | !requestURI.equals("/login.json") && 40 | !requestURI.equals("/logout.json") 41 | ) { 42 | HttpSession session = request.getSession(); 43 | 44 | if (session == null || session.getAttribute("loginedUser") == null) { 45 | //如果没有登录,则直接返回,不到后端执行请求 46 | Map m = new HashMap(); 47 | m.put("status", 2); 48 | m.put("message", "unlogin"); 49 | m.put("data", null); 50 | ObjectMapper mapper = new ObjectMapper(); 51 | String jsonResult = mapper.writeValueAsString(m); 52 | 53 | //将json作为http响应输出给前端 54 | ServletOutputStream out = servletResponse.getOutputStream(); 55 | out.print(jsonResult); 56 | out.close(); 57 | } else { 58 | //如果已登录,则到后端执行请求 59 | filterChain.doFilter(servletRequest, servletResponse); 60 | } 61 | } else { 62 | filterChain.doFilter(servletRequest, servletResponse); 63 | } 64 | 65 | } 66 | 67 | @Override 68 | public void init(FilterConfig filterConfig) throws ServletException { 69 | 70 | } 71 | 72 | @Override 73 | public void destroy() { 74 | 75 | } 76 | } 77 | -------------------------------------------------------------------------------- /src/main/java/com/prophet/interfaces/UserAuthDaoInterface.java: -------------------------------------------------------------------------------- 1 | /** 2 | * 3 | */ 4 | package com.prophet.interfaces; 5 | 6 | /** 7 | * 用户认证系统的接口 8 | */ 9 | public interface UserAuthDaoInterface { 10 | /** 11 | * 验证用户名密码,0通过,1失败 12 | * @param username 13 | * @param password 14 | * @return 15 | */ 16 | public int authenticate(String username, String password); 17 | 18 | /** 19 | * 检查是否存在该用户 20 | * @param username 21 | * @return 22 | */ 23 | public boolean hasUser(String username); 24 | } 25 | -------------------------------------------------------------------------------- /src/main/java/com/prophet/service/BaseService.java: -------------------------------------------------------------------------------- 1 | package com.prophet.service; 2 | 3 | import java.util.HashMap; 4 | import java.util.Map; 5 | 6 | /** 7 | * 所有Service类的基类 8 | * 背景:所有DAO层(除多线程包装的之外)会直接向service层返回数据和抛出异常,需要在service层进行捕获并封装成一个Map返回给controller层 9 | * Map结构:{"msg":"ok", "data":null} 10 | */ 11 | public class BaseService { 12 | /*protected ThreadLocal> serviceResult = new ThreadLocal>(){ 13 | @Override 14 | protected Map initialValue() { 15 | Map serviceResult = new HashMap(); 16 | serviceResult.put("msg", "ok"); 17 | serviceResult.put("data", null); 18 | return serviceResult; 19 | } 20 | };*/ 21 | 22 | protected Map initServiceResult() { 23 | Map serviceResult = new HashMap(); 24 | serviceResult.put("msg", "ok"); 25 | serviceResult.put("data", null); 26 | return serviceResult; 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /src/main/java/com/prophet/service/HiveMetaStoreService.java: -------------------------------------------------------------------------------- 1 | package com.prophet.service; 2 | 3 | import java.util.List; 4 | import java.util.ArrayList; 5 | import java.util.Map; 6 | import java.util.HashMap; 7 | 8 | import org.springframework.beans.factory.annotation.Autowired; 9 | import org.springframework.stereotype.Service; 10 | 11 | import com.prophet.dao.HiveMetaStoreDao; 12 | 13 | @Service 14 | public class HiveMetaStoreService extends BaseService{ 15 | private HiveMetaStoreDao hiveMetaStoreDao; 16 | 17 | public HiveMetaStoreDao getHiveMetaStoreDao() { 18 | return hiveMetaStoreDao; 19 | } 20 | 21 | @Autowired 22 | public void setHiveMetaStoreDao(HiveMetaStoreDao hiveMetaStoreDao) { 23 | this.hiveMetaStoreDao = hiveMetaStoreDao; 24 | } 25 | 26 | /** 27 | * 查询metastore中所有的库名和对应包含的表名列表 28 | * 返回数据结构dbTableResult:{ 29 | * 'default':[ 30 | * {'TBL_ID':21, 'TBL_NAME':'access_path1', 'TBL_TYPE':'EXTERNAL_TABLE'}, 31 | * {'TBL_ID':22, 'TBL_NAME':'access_path2', 'TBL_TYPE':'EXTERNAL_TABLE'}, 32 | * {'TBL_ID':23, 'TBL_NAME':'access_path3', 'TBL_TYPE':'MANAGED_TABLE'} 33 | * ... 34 | * ], 35 | * 'formatter':[ 36 | * {'TBL_ID':24, 'TBL_NAME':'access_path4', 'TBL_TYPE':'EXTERNAL_TABLE'}, 37 | * {'TBL_ID':25, 'TBL_NAME':'access_path5', 'TBL_TYPE':'EXTERNAL_TABLE'} 38 | * ] 39 | * } 40 | * */ 41 | public Map getAllDbAndTablesInMetaStore() { 42 | Map serviceResult = this.initServiceResult(); 43 | HashMap>> dbTableResult = new HashMap>>(); 44 | List> daoResult = null; 45 | try { 46 | daoResult = this.hiveMetaStoreDao.getAllDbAndTablesInMetaStore(); 47 | for (Map line : daoResult) { 48 | //如果数据里的db在当前db组内 49 | ArrayList> previousList = dbTableResult.get(line.get("DB_NAME")); 50 | if (previousList == null) { 51 | //如果结果集里没有该db,则需要初始化一个空ArrayList出来;否则直接加入即可 52 | previousList = new ArrayList>(); 53 | } 54 | HashMap currLine = new HashMap(); 55 | currLine.put("TBL_ID", line.get("TBL_ID")); 56 | currLine.put("TBL_NAME", line.get("TBL_NAME")); 57 | currLine.put("TBL_TYPE", line.get("TBL_TYPE")); 58 | previousList.add(currLine); 59 | 60 | //覆盖性写入 61 | dbTableResult.put(line.get("DB_NAME").toString(), previousList); 62 | } 63 | } catch (Exception ex) { 64 | serviceResult.put("msg", ex.getMessage()); 65 | } 66 | serviceResult.put("data", dbTableResult); 67 | return serviceResult; 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /src/main/java/com/prophet/service/HiveSecretDataService.java: -------------------------------------------------------------------------------- 1 | package com.prophet.service; 2 | import java.util.ArrayList; 3 | import java.util.HashMap; 4 | import java.util.List; 5 | import java.util.Map; 6 | 7 | import org.springframework.beans.factory.annotation.Autowired; 8 | import org.springframework.stereotype.Service; 9 | 10 | import com.prophet.dao.HiveSecretTableDao; 11 | import com.prophet.dao.HiveSecretUserPrivsDao; 12 | import com.prophet.domain.HiveSecretTable; 13 | import com.prophet.dao.HiveMetaStoreDao; 14 | 15 | @Service 16 | public class HiveSecretDataService extends BaseService{ 17 | private HiveSecretTableDao hiveSecretTableDao; 18 | private HiveSecretUserPrivsDao hiveSecretUserPrivsDao; 19 | private HiveMetaStoreDao hiveMetaStoreDao; 20 | 21 | @Autowired 22 | public void setHiveSecretTableDao(HiveSecretTableDao hiveSecretTableDao) { 23 | this.hiveSecretTableDao = hiveSecretTableDao; 24 | } 25 | 26 | @Autowired 27 | public void setHiveSecretUserPrivsDao(HiveSecretUserPrivsDao hiveSecretUserPrivsDao) { 28 | this.hiveSecretUserPrivsDao = hiveSecretUserPrivsDao; 29 | } 30 | 31 | @Autowired 32 | public void setHiveMetaStoreDao(HiveMetaStoreDao hiveMetaStoreDao) { 33 | this.hiveMetaStoreDao = hiveMetaStoreDao; 34 | } 35 | 36 | /** 37 | * 验证某个表是否为机密表 38 | * @return 39 | */ 40 | public List> checkIsSecretTable(String tableSchema, String tableName) { 41 | return this.hiveSecretTableDao.checkIsSecretTable(tableSchema, tableName); 42 | } 43 | 44 | /** 45 | * 检查某个表某个用户是否有权限 46 | * @param username 47 | * @param tableSchema 48 | * @param tableName 49 | * @return 50 | */ 51 | public boolean checkPrivilege(String username, String tableSchema, String tableName) { 52 | List> daoResult = this.hiveSecretUserPrivsDao.checkSecretPrivilege(username, tableSchema, tableName); 53 | if (daoResult.size() == 0) { 54 | return false; 55 | } else { 56 | return true; 57 | } 58 | } 59 | 60 | /** 61 | * 所有机密表面板,顺便展示哪些是当前用户有权限的 62 | * @param username 63 | * @return 64 | * 返回数据结构dbTableResult:{ 65 | * 'default':[ 66 | * {'table_id':21, 'table_name':'access_path1', 'info':'...'}, 67 | * {'table_id':22, 'table_name':'access_path2', 'info':null}, 68 | * ... 69 | * ], 70 | * 'users':[ 71 | * {'table_id':24, 'table_name':'access_path5', 'info':'...'}, 72 | * {'table_id':25, 'table_name':'access_path6', 'info':null}, 73 | * ... 74 | * ], 75 | * } 76 | */ 77 | public Map getAllSecretTablesByUser(String username) { 78 | Map serviceResult = this.initServiceResult(); 79 | Map>> dbTableResult = new HashMap>>(); 80 | List> daoResult = null; 81 | try { 82 | daoResult = this.hiveSecretTableDao.getAllSecretTablesByUser(username); 83 | for (Map line : daoResult) { 84 | //如果数据里的db在当前db组内 85 | ArrayList> previousList = dbTableResult.get(line.get("table_schema")); 86 | if (previousList == null) { 87 | //如果结果集里没有该db,则需要初始化一个空ArrayList出来;否则直接加入即可 88 | previousList = new ArrayList>(); 89 | } 90 | HashMap currLine = new HashMap(); 91 | currLine.put("table_id", line.get("table_id")); 92 | currLine.put("table_name", line.get("table_name")); 93 | currLine.put("info", line.get("info")); 94 | previousList.add(currLine); 95 | 96 | //覆盖性写入 97 | dbTableResult.put(line.get("table_schema").toString(), previousList); 98 | } 99 | } catch (Exception ex) { 100 | serviceResult.put("msg", ex.getMessage()); 101 | } 102 | serviceResult.put("data", dbTableResult); 103 | return serviceResult; 104 | } 105 | 106 | public Map getAllNonSecretTables() { 107 | Map serviceResult = this.initServiceResult(); 108 | List> nonSecretTables = null; 109 | try { 110 | nonSecretTables = this.hiveSecretTableDao.getAllNonSecretTables(); 111 | 112 | } catch (Exception ex) { 113 | serviceResult.put("msg", ex.getMessage()); 114 | } 115 | serviceResult.put("data", nonSecretTables); 116 | return serviceResult; 117 | } 118 | 119 | /** 120 | * 增加机密数据表 121 | * @param targetSecretTables 122 | * @return 123 | */ 124 | public Map addSecretTables(List targetSecretTables) { 125 | Map serviceResult = this.initServiceResult(); 126 | int data = 1; 127 | List secretTables = new ArrayList(); 128 | for (String dbAndTable : targetSecretTables) { 129 | String[] s = dbAndTable.split("\\."); 130 | HiveSecretTable h = new HiveSecretTable(); 131 | h.setTableSchema(s[0]); 132 | h.setTableName(s[1]); 133 | secretTables.add(h); 134 | } 135 | try { 136 | this.hiveSecretTableDao.addSecretTables(secretTables); 137 | 138 | } catch (Exception ex) { 139 | serviceResult.put("msg", ex.getMessage()); 140 | data = -1; 141 | } 142 | serviceResult.put("data", data); 143 | return serviceResult; 144 | } 145 | 146 | /** 147 | * 获取所有机密表 148 | * @return 149 | */ 150 | public Map getAllSecretTables() { 151 | Map serviceResult = this.initServiceResult(); 152 | List> daoResult = null; 153 | 154 | try { 155 | daoResult = this.hiveSecretTableDao.getAllSecretTables(); 156 | 157 | } catch (Exception ex) { 158 | serviceResult.put("msg", ex.getMessage()); 159 | } 160 | serviceResult.put("data", daoResult); 161 | return serviceResult; 162 | } 163 | 164 | /** 165 | * 给用户批量授权机密表权限 166 | * @param username 167 | * @param secretTableIds 168 | * @return 169 | */ 170 | public Map grantSecretPrivToUser(String username, List secretTableIds) { 171 | Map serviceResult = this.initServiceResult(); 172 | try { 173 | for (Integer id : secretTableIds) { 174 | this.hiveSecretUserPrivsDao.insertOneUserSecretPriv(username, id); 175 | } 176 | 177 | } catch (Exception ex) { 178 | serviceResult.put("msg", ex.getMessage()); 179 | } 180 | serviceResult.put("data", null); 181 | return serviceResult; 182 | } 183 | 184 | } 185 | -------------------------------------------------------------------------------- /src/main/java/com/prophet/service/HiveServerService.java: -------------------------------------------------------------------------------- 1 | package com.prophet.service; 2 | 3 | import java.util.List; 4 | import java.util.Map; 5 | import java.util.Set; 6 | import java.util.HashMap; 7 | 8 | import org.springframework.beans.factory.annotation.Autowired; 9 | import org.springframework.stereotype.Service; 10 | 11 | import com.prophet.dao.HiveServerDao; 12 | import com.prophet.dao.QueryHistoryDao; 13 | import com.prophet.common.QueryHistoryStatusEnum; 14 | import com.prophet.common.ThreadPool; 15 | 16 | 17 | @Service 18 | public class HiveServerService extends BaseService{ 19 | private HiveServerDao hiveServerDao; 20 | private QueryHistoryDao queryHistoryDao; 21 | 22 | @Autowired 23 | public void setHiveServerDao(HiveServerDao hiveServerDao) { 24 | this.hiveServerDao = hiveServerDao; 25 | } 26 | 27 | @Autowired 28 | public void setQueryHistoryDao(QueryHistoryDao queryHistoryDao) { 29 | this.queryHistoryDao = queryHistoryDao; 30 | } 31 | 32 | /** 33 | * 向metastore数据字典查询表结构信息 34 | * @return 35 | */ 36 | public Map descTable(String tableNameWithDb) { 37 | Map serviceResult = this.initServiceResult(); 38 | Map colsAndData = new HashMap(); 39 | 40 | Map dataWithType = new HashMap(); 41 | dataWithType.put("type", "desc_table"); 42 | dataWithType.put("data", colsAndData); 43 | List> daoResult = null; 44 | try { 45 | daoResult = this.hiveServerDao.descTableInfo(tableNameWithDb); 46 | Set columnSet = null; 47 | if (!daoResult.isEmpty()) { 48 | columnSet = daoResult.get(0).keySet(); 49 | } else { 50 | serviceResult.put("msg", String.format("从hiveserver里获取的%s表结构信息错误!请检查hiveserver...", tableNameWithDb)); 51 | } 52 | colsAndData.put("result_cols", columnSet); 53 | colsAndData.put("result_data", daoResult); 54 | 55 | } catch (Exception ex) { 56 | serviceResult.put("msg", ex.getMessage()); 57 | } 58 | //更新data应放在return前一句,防止单次session里在下次更新data之前出现了异常而没来得及更新data,从而返回了之前的data. 59 | serviceResult.put("data", dataWithType); 60 | return serviceResult; 61 | } 62 | 63 | 64 | /** 65 | * 利用线程池开启线程, 异步查询hive 66 | * @param queryContent 67 | * @param username 68 | * @return 69 | */ 70 | public Map executeHiveSqlQuery(String queryContent, String username, long queryHistId, int emailNotify) { 71 | Map serviceResult = this.initServiceResult(); 72 | try { 73 | //向hiveserver发送query 74 | this.hiveServerDao.sendHiveQuery(queryContent, username, queryHistId, emailNotify); 75 | } catch (Exception ex) { 76 | //这里仅会获取提交线程失败之类的异常,底层异常已被子线程捕获和处理了 77 | serviceResult.put("msg", ex.getMessage()); 78 | } 79 | 80 | return serviceResult; 81 | } 82 | 83 | /** 84 | * 从磁盘上获取某个查询的历史结果 85 | * @param username 86 | * @param queryHistId 87 | * @return serviceResult: 88 | * "msg":"ok", 89 | * "data": 90 | * { 91 | * "type":"sql_query", 92 | * "data":{ 93 | * "result_cols":[], 94 | * "result_data":[] 95 | * }, 96 | * "size":300 97 | * } 98 | */ 99 | public Map getHistoryResultFromDiskById(String username, long queryHistId, int pageNo) { 100 | Map serviceResult = this.initServiceResult(); 101 | Map daoResult = null; 102 | try { 103 | daoResult = this.hiveServerDao.getResultFromDiskByIdByPage(username, queryHistId, pageNo, HiveServerDao.PAGE_ROWS); 104 | int resultSize = this.queryHistoryDao.getQueryHistoryById(queryHistId).getResultSize(); 105 | daoResult.put("size", resultSize); 106 | } catch (Exception ex) { 107 | serviceResult.put("msg", ex.getMessage()); 108 | } 109 | 110 | serviceResult.put("data", daoResult); 111 | return serviceResult; 112 | } 113 | 114 | /** 115 | * 手动取消任务 116 | * @param queryHistId 117 | * @return 118 | */ 119 | public Map cancelTaskById (String username, long queryHistId) { 120 | Map serviceResult = this.initServiceResult(); 121 | try { 122 | //服务端验证:不能随意取消别人的任务 123 | if (this.queryHistoryDao.getQueryHistoryById(queryHistId).getUsername().equals(username)) { 124 | //更新任务状态 125 | this.queryHistoryDao.updateQueryHistoryStatusAndMsg(queryHistId, 126 | QueryHistoryStatusEnum.ABORTED.getIndex(), QueryHistoryStatusEnum.ABORTED.getName()); 127 | 128 | //终止线程 129 | ThreadPool.stopThread(queryHistId); 130 | } else { 131 | serviceResult.put("msg", "该语句id不属于你,无法取消"); 132 | } 133 | 134 | } catch (Exception ex) { 135 | serviceResult.put("msg", ex.getMessage()); 136 | } 137 | 138 | serviceResult.put("data", null); 139 | return serviceResult; 140 | } 141 | } 142 | -------------------------------------------------------------------------------- /src/main/java/com/prophet/service/QueryHistoryService.java: -------------------------------------------------------------------------------- 1 | package com.prophet.service; 2 | import java.util.List; 3 | import java.util.Map; 4 | 5 | import org.springframework.beans.factory.annotation.Autowired; 6 | import org.springframework.stereotype.Service; 7 | 8 | import com.prophet.dao.QueryHistoryDao; 9 | import com.prophet.util.DateTimeUtil; 10 | import com.prophet.domain.QueryHistory; 11 | import com.prophet.common.QueryHistoryStatusEnum; 12 | 13 | @Service 14 | public class QueryHistoryService extends BaseService{ 15 | private QueryHistoryDao queryHistoryDao; 16 | 17 | public QueryHistoryDao getQueryHistoryDao() { 18 | return queryHistoryDao; 19 | } 20 | 21 | @Autowired 22 | public void setQueryHistoryDao(QueryHistoryDao queryHistoryDao) { 23 | this.queryHistoryDao = queryHistoryDao; 24 | } 25 | 26 | /** 27 | * 向prophet的数据库中插入一条查询历史 28 | */ 29 | public Map insertOneQueryHistory(String queryContent, String username, int emailNotify) { 30 | Map serviceResult = this.initServiceResult(); 31 | long insertId = -1; 32 | try { 33 | insertId = this.queryHistoryDao.insertQueryHistory(DateTimeUtil.getNow(), queryContent, QueryHistoryStatusEnum.RUNNING.getIndex(), username, emailNotify); 34 | } catch (Exception ex) { 35 | serviceResult.put("msg", ex.getMessage()); 36 | } 37 | //更新data应放在return前一句,防止单次session里在下次更新data之前出现了异常而没来得及更新data,从而返回了之前的data. 38 | serviceResult.put("data", insertId); 39 | return serviceResult; 40 | } 41 | 42 | /** 43 | * 获取该用户查询历史 44 | * @param username 45 | * @return 46 | */ 47 | public Map getAllQueryHistoryByUser(String username) { 48 | Map serviceResult = this.initServiceResult(); 49 | List daoResult = null; 50 | try { 51 | daoResult = this.queryHistoryDao.getAllQueryHistory(username); 52 | } catch (Exception ex) { 53 | serviceResult.put("msg", ex.getMessage()); 54 | } 55 | serviceResult.put("data", daoResult); 56 | return serviceResult; 57 | } 58 | 59 | public Map getQueryHistoryById(long id) { 60 | Map serviceResult = this.initServiceResult(); 61 | QueryHistory qh = null; 62 | try { 63 | qh = this.queryHistoryDao.getQueryHistoryById(id); 64 | } catch (Exception ex) { 65 | serviceResult.put("msg", ex.getMessage()); 66 | } 67 | serviceResult.put("data", qh); 68 | return serviceResult; 69 | } 70 | 71 | /** 72 | * 更新某个查询任务历史的状态 73 | * @param queryHistId 74 | * @param status 75 | */ 76 | public void updateQueryHistoryStatusAndMsg(long queryHistId, QueryHistoryStatusEnum status, String message) { 77 | this.queryHistoryDao.updateQueryHistoryStatusAndMsg(queryHistId, 78 | status.getIndex(), message); 79 | } 80 | 81 | } 82 | -------------------------------------------------------------------------------- /src/main/java/com/prophet/service/UserAuthService.java: -------------------------------------------------------------------------------- 1 | package com.prophet.service; 2 | 3 | import java.util.List; 4 | import java.util.Map; 5 | 6 | import org.springframework.beans.factory.annotation.Autowired; 7 | import org.springframework.beans.factory.annotation.Qualifier; 8 | import org.springframework.beans.factory.annotation.Value; 9 | import org.springframework.jdbc.core.JdbcTemplate; 10 | import org.springframework.stereotype.Service; 11 | 12 | import com.prophet.dao.UserAuthLdapDao; 13 | import com.prophet.dao.UserAuthProphetDao; 14 | import com.prophet.dao.AdminDao; 15 | import com.prophet.interfaces.UserAuthDaoInterface; 16 | 17 | @Service 18 | public class UserAuthService extends BaseService{ 19 | /*用户认证系统的类型*/ 20 | @Value("${authentication.system}") 21 | private String authSystemType; 22 | 23 | /*LDAP相关配置*/ 24 | @Value("${authentication.ldap.url}") 25 | private String LDAP_URL; 26 | @Value("${authentication.ldap.base-dn}") 27 | private String LDAP_BASE_DN; 28 | @Value("${authentication.ldap.user-search-dn}") 29 | private String LDAP_USER_SEARCH_DN; 30 | @Value("${authentication.ldap.user-search-column}") 31 | private String LDAP_USER_SEARCH_COLUMN; 32 | @Value("${authentication.ldap.factory}") 33 | private String LDAP_FACTORY; 34 | @Value("${authentication.ldap.security-authentication}") 35 | private String LDAP_SECURITY_AUTHENTICATION; 36 | @Value("${authentication.ldap.security-credenticials}") 37 | private String LDAP_SECURITY_CREDENTIALS; 38 | 39 | private AdminDao adminDao; 40 | 41 | @Autowired 42 | @Qualifier("prophetJdbcTemplate") 43 | private JdbcTemplate jdbcTemplateProphet; 44 | 45 | @Autowired 46 | public void setAdminDao(AdminDao adminDao) { 47 | this.adminDao = adminDao; 48 | } 49 | 50 | public UserAuthService() { 51 | 52 | } 53 | 54 | /*单例,常驻内存,所有UserAuthService实例共享同一个userAuthDao*/ 55 | private static UserAuthDaoInterface userAuthDao; 56 | 57 | /** 58 | * 工厂模式:生产认证连接类DAO的工厂 59 | * 设计思想:UserAuthService由spring初始化后先从配置文件里读取配置的Auth开关、LDAP等信息,然后authenticate时调用该方法 60 | * 获取具体类型的dao(如果null则初始化并常驻内存)。 61 | * @return 具体类型的dao 62 | * @throws Exception 63 | */ 64 | private UserAuthDaoInterface getUserAuthDao() throws Exception { 65 | if (!this.validateConfig(this.authSystemType)) { 66 | throw new Exception(String.format("application.properties文件里%s相关参数配置不正确,请检查!", this.authSystemType)); 67 | } 68 | switch (this.authSystemType.toLowerCase()) { 69 | case "ldap": 70 | if (userAuthDao == null) { 71 | synchronized(UserAuthService.class) { 72 | if (userAuthDao == null) { 73 | userAuthDao = new UserAuthLdapDao(LDAP_URL, LDAP_BASE_DN, LDAP_USER_SEARCH_DN, LDAP_USER_SEARCH_COLUMN, 74 | LDAP_FACTORY, LDAP_SECURITY_AUTHENTICATION, LDAP_SECURITY_CREDENTIALS); 75 | } 76 | } 77 | } 78 | break; 79 | case "prophet": 80 | if (userAuthDao == null) { 81 | synchronized(UserAuthService.class) { 82 | if (userAuthDao == null) { 83 | userAuthDao = new UserAuthProphetDao(this.jdbcTemplateProphet); 84 | } 85 | } 86 | } 87 | break; 88 | } 89 | return userAuthDao; 90 | } 91 | 92 | /** 93 | * 验证用户名密码是否正确 94 | * @param uid 95 | * @param password 96 | * @return 97 | * @throws Exception 98 | */ 99 | public Map authenticate(String uid, String password) { 100 | Map serviceResult = this.initServiceResult(); 101 | int daoResult = -1; 102 | try { 103 | daoResult = this.getUserAuthDao().authenticate(uid, password); 104 | 105 | } catch (Exception ex) { 106 | serviceResult.put("msg", ex.getMessage()); 107 | } 108 | serviceResult.put("data", daoResult); 109 | return serviceResult; 110 | } 111 | 112 | /** 113 | * 根据auth系统类型验证对应的参数是否在application.properties文件里配置正确 114 | * @param authSystemType 115 | * @return 116 | */ 117 | private boolean validateConfig(String authSystemType) { 118 | return true; 119 | } 120 | 121 | /** 122 | * 检查某个用户是否是admin 123 | * @param username 124 | * @return 125 | */ 126 | public boolean isAdmin(String username) { 127 | return (this.adminDao.checkIsAdmin(username).size() == 0) ? false : true; 128 | } 129 | 130 | /** 131 | * 检查用户系统里是否存在某个用户 132 | * @param username 133 | * @return 134 | * @throws Exception 135 | */ 136 | public boolean hasUser(String username) throws Exception { 137 | return this.getUserAuthDao().hasUser(username); 138 | } 139 | 140 | /** 141 | * 获取用户认证系统的类型 142 | * @return 143 | */ 144 | public String getUserAuthSystemType() { 145 | return this.authSystemType; 146 | } 147 | 148 | /** 149 | * 在使用了prophet内置用户系统情况下,获取所有用户的信息 150 | * @return 151 | */ 152 | public Map getAllProphetUsers() { 153 | Map serviceResult = this.initServiceResult(); 154 | List> daoResult = null; 155 | try { 156 | if (this.authSystemType.toLowerCase().equals("prophet")) { 157 | daoResult = ((UserAuthProphetDao)this.getUserAuthDao()).getAllProphetUsers(); 158 | } 159 | } catch (Exception ex) { 160 | serviceResult.put("msg", ex.getMessage()); 161 | } 162 | serviceResult.put("data", daoResult); 163 | return serviceResult; 164 | } 165 | 166 | /** 167 | * 增加一个prophet user 168 | * @param username 169 | * @param password 170 | * @param isActive 171 | * @param userType 172 | * @return 173 | */ 174 | public Map addProphetUser(String username, String password, String isActive, String userType) { 175 | Map serviceResult = this.initServiceResult(); 176 | int daoResult = -1; 177 | try { 178 | //如果是admin则向admin表里插入一个 179 | if (userType.equals("admin")) { 180 | this.adminDao.insertOneAdmin(username); 181 | } 182 | 183 | if (this.authSystemType.toLowerCase().equals("prophet")) { 184 | daoResult = ((UserAuthProphetDao)this.getUserAuthDao()).addProphetUser(username, password, isActive, userType); 185 | } 186 | } catch (Exception ex) { 187 | serviceResult.put("msg", ex.getMessage()); 188 | } 189 | serviceResult.put("data", daoResult); 190 | return serviceResult; 191 | } 192 | 193 | public Map deleteUserById(int userId) { 194 | Map serviceResult = this.initServiceResult(); 195 | int daoResult = -1; 196 | try { 197 | 198 | if (this.authSystemType.toLowerCase().equals("prophet")) { 199 | ((UserAuthProphetDao)this.getUserAuthDao()).deleteUserById(userId); 200 | } 201 | } catch (Exception ex) { 202 | serviceResult.put("msg", ex.getMessage()); 203 | } 204 | serviceResult.put("data", daoResult); 205 | return serviceResult; 206 | } 207 | } 208 | -------------------------------------------------------------------------------- /src/main/java/com/prophet/util/DateTimeUtil.java: -------------------------------------------------------------------------------- 1 | package com.prophet.util; 2 | import java.text.SimpleDateFormat; 3 | import java.util.Date; 4 | 5 | public class DateTimeUtil { 6 | /** 7 | * 获取当前时间,时间格式:yyyy-MM-dd HH:mm:ss 8 | * @return 9 | */ 10 | public static String getNow() { 11 | Date now = new Date(); 12 | SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); 13 | return dateFormat.format(now); 14 | } 15 | 16 | /** 17 | * 将指定时间Date对象格式化为字符串,时间格式:yyyy-MM-dd HH:mm:ss 18 | * @param time 19 | * @return 20 | */ 21 | public static String formatDatetime(Date time) { 22 | SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); 23 | return dateFormat.format(time); 24 | } 25 | 26 | } 27 | -------------------------------------------------------------------------------- /src/main/java/com/prophet/web/BaseController.java: -------------------------------------------------------------------------------- 1 | package com.prophet.web; 2 | 3 | import java.util.HashMap; 4 | import java.util.Map; 5 | 6 | import javax.servlet.http.HttpServletRequest; 7 | import javax.servlet.http.HttpSession; 8 | 9 | public class BaseController { 10 | 11 | /** 12 | * 将数据封装成json格式作为RESTFUL接口返回给前端 13 | * @param data 14 | * @return Map restfulResult 15 | */ 16 | protected Map encodeToJsonResult(Map serviceResult) { 17 | Map restfulResult = new HashMap(); 18 | int status = 0; 19 | if (!serviceResult.get("msg").equals("ok")) { 20 | status = 1; 21 | } 22 | restfulResult.put("status", status); 23 | restfulResult.put("message", serviceResult.get("msg")); 24 | restfulResult.put("data", serviceResult.get("data")); 25 | return restfulResult; 26 | } 27 | 28 | /** 29 | * 方法重载1:将数据封装成json格式作为RESTFUL接口返回给前端 30 | * @param data 31 | * @return Map restfulResult 32 | */ 33 | protected Map encodeToJsonResult(int status, String msg, Object data) { 34 | Map restfulResult = new HashMap(); 35 | restfulResult.put("status", status); 36 | restfulResult.put("message", msg); 37 | restfulResult.put("data", data); 38 | return restfulResult; 39 | } 40 | 41 | /** 42 | * 获取当前session里登录的用户信息 43 | * @param request 44 | * @return Map 45 | */ 46 | protected Map getLoginUserInfo(HttpServletRequest request) { 47 | Map result = new HashMap(); 48 | HttpSession session = request.getSession(); 49 | String username = ""; 50 | String isAdmin = ""; 51 | if ( 52 | session == null || 53 | session.getAttribute("loginedUser") == null || 54 | session.getAttribute("isAdmin") == null 55 | ) { 56 | username = "匿名用户"; 57 | isAdmin = "0"; 58 | } else { 59 | username = session.getAttribute("loginedUser").toString(); 60 | isAdmin = session.getAttribute("isAdmin").toString(); 61 | } 62 | result.put("loginedUser", username); 63 | result.put("isAdmin", isAdmin); 64 | result.put("userAuthSystemType", session.getAttribute("userAuthSystemType")); 65 | return result; 66 | } 67 | } 68 | -------------------------------------------------------------------------------- /src/main/java/com/prophet/web/HiveQueryController.java: -------------------------------------------------------------------------------- 1 | package com.prophet.web; 2 | 3 | import java.io.File; 4 | import java.io.IOException; 5 | import java.io.PrintWriter; 6 | import java.util.ArrayList; 7 | import java.util.HashMap; 8 | import java.util.List; 9 | import java.util.Map; 10 | import java.util.Set; 11 | 12 | import javax.servlet.http.HttpServletRequest; 13 | import javax.servlet.http.HttpServletResponse; 14 | 15 | import org.springframework.beans.factory.annotation.Autowired; 16 | import org.springframework.core.io.FileSystemResource; 17 | import org.springframework.core.io.InputStreamResource; 18 | import org.springframework.http.HttpHeaders; 19 | import org.springframework.http.MediaType; 20 | import org.springframework.http.ResponseEntity; 21 | import org.springframework.web.bind.annotation.RequestMapping; 22 | import org.springframework.web.bind.annotation.RequestMethod; 23 | import org.springframework.web.bind.annotation.RequestParam; 24 | import org.springframework.web.bind.annotation.RestController; 25 | import org.apache.commons.io.FileUtils; 26 | import org.apache.commons.io.IOUtils; 27 | import org.apache.commons.io.LineIterator; 28 | 29 | import com.prophet.service.HiveMetaStoreService; 30 | import com.prophet.service.HiveServerService; 31 | import com.prophet.service.QueryHistoryService; 32 | import com.prophet.service.HiveSecretDataService; 33 | import com.prophet.web.postparameters.HiveQueryCommand; 34 | import com.fasterxml.jackson.core.JsonProcessingException; 35 | import com.fasterxml.jackson.databind.ObjectMapper; 36 | import com.prophet.common.HQLParser; 37 | import com.prophet.common.QueryHistoryStatusEnum; 38 | 39 | @RestController 40 | public class HiveQueryController extends BaseController{ 41 | private HiveMetaStoreService hiveMetaStoreService; 42 | private HiveServerService hiveServerService; 43 | private QueryHistoryService queryHistoryService; 44 | private HiveSecretDataService hiveSecretDataService; 45 | 46 | 47 | @Autowired 48 | public void setHiveMetaStoreService(HiveMetaStoreService hiveMetaStoreService) { 49 | this.hiveMetaStoreService = hiveMetaStoreService; 50 | } 51 | 52 | @Autowired 53 | public void setHiveServerService(HiveServerService hiveServerService) { 54 | this.hiveServerService = hiveServerService; 55 | } 56 | 57 | @Autowired 58 | public void setQueryHistoryService(QueryHistoryService queryHistoryService) { 59 | this.queryHistoryService = queryHistoryService; 60 | } 61 | 62 | @Autowired 63 | public void setHiveSecretDataService(HiveSecretDataService hiveSecretDataService) { 64 | this.hiveSecretDataService = hiveSecretDataService; 65 | } 66 | 67 | /** 68 | * 向前端返回所有metastore中的库表 69 | * @param request 70 | * @return 71 | */ 72 | @RequestMapping(value = "/hive_query/all_metastore_db_tables.json", method = RequestMethod.GET) 73 | public Map allDbAndTablesInMetaStoreController(HttpServletRequest request) throws Exception{ 74 | Map data = 75 | this.hiveMetaStoreService.getAllDbAndTablesInMetaStore(); 76 | return this.encodeToJsonResult(data); 77 | } 78 | 79 | /** 80 | * 查询某个表结构信息 81 | * @param request 82 | * @return 83 | */ 84 | @RequestMapping(value = "/hive_query/desc_table.json", method = RequestMethod.GET) 85 | public Map descTableController(HttpServletRequest request) { 86 | String tableNameWithDb = request.getParameter("tableNameWithDb"); 87 | Map serviceResult = this.hiveServerService.descTable(tableNameWithDb); 88 | 89 | return this.encodeToJsonResult(serviceResult); 90 | } 91 | 92 | /** 93 | * 向hiveserver发送SQL语句请求,需要经过高危过滤、权限验证等前置检验,并将结果保存在磁盘,状态、消息等入库。 94 | * 前端需要定时异步轮询获取状态和结果 95 | * @param request 96 | * @param hiveQueryCommand 97 | */ 98 | @RequestMapping(value = "/hive_query/send_query.json", method = RequestMethod.POST) 99 | public Map sendHiveSqlQueryController(HttpServletRequest request, HiveQueryCommand hiveQueryCommand) { 100 | String queryContent = hiveQueryCommand.getQueryContent().trim(); 101 | long queryHistId = hiveQueryCommand.getQueryHistId(); 102 | String strEmailNotify = hiveQueryCommand.getEmailNotify(); 103 | int emailNotify = strEmailNotify.equals("true") ? 1 : 0; 104 | //去掉结尾的分号 105 | if (queryContent.endsWith(";")) { 106 | queryContent = queryContent.substring(0, queryContent.length() - 1); 107 | } 108 | 109 | Map restfulResult = new HashMap(); 110 | //首先解析HQL,拦截高危语句不发送到后端执行 111 | HQLParser hqlParser = new HQLParser(); 112 | try { 113 | hqlParser.parseHQL(queryContent); 114 | } catch (Exception ex) { 115 | //更新状态 116 | String message = ex.getMessage() + "\tCaused by: " + ex.getCause(); 117 | this.queryHistoryService.updateQueryHistoryStatusAndMsg(queryHistId, QueryHistoryStatusEnum.ERROR, message); 118 | 119 | restfulResult.put("status", 1); 120 | restfulResult.put("message", message); 121 | restfulResult.put("data", null); 122 | return restfulResult; 123 | } 124 | 125 | String oper = ""; 126 | Set queriedTables = null; 127 | try { 128 | oper = hqlParser.getOper(); 129 | queriedTables = hqlParser.getTables(); 130 | } catch (Exception ex) { 131 | //更新状态 132 | String message = "该SQL语句类型不支持!"; 133 | restfulResult.put("status", 1); 134 | restfulResult.put("message", message); 135 | restfulResult.put("data", null); 136 | this.queryHistoryService.updateQueryHistoryStatusAndMsg(queryHistId, QueryHistoryStatusEnum.ERROR, message); 137 | return restfulResult; 138 | } 139 | 140 | if ( oper.equals("INSERT") || oper.equals("DROP") || oper.equals("TRUNCATE") || 141 | oper.equals("LOAD") || oper.equals("CREATETABLE") || oper.equals("ALTER") || 142 | oper.equals("CREATEDATABASE") || oper.equals("DROPDATABASE") 143 | ) { 144 | 145 | //更新状态 146 | String message = "INSERT、DROP、TRUNCATE、LOAD、CREATETABLE、ALTER、CREATEDATABASE、DROPDATABASE等高危语句不运行执行!"; 147 | restfulResult.put("status", 1); 148 | restfulResult.put("message", message); 149 | restfulResult.put("data", null); 150 | this.queryHistoryService.updateQueryHistoryStatusAndMsg(queryHistId, QueryHistoryStatusEnum.ERROR, message); 151 | return restfulResult; 152 | } 153 | 154 | //高危验证通过后,检查是否包含机密数据 155 | List> noPrivResult = new ArrayList>(); 156 | for (String queriedTable : queriedTables) { 157 | String queriedDb = ""; 158 | if (queriedTable.contains(".")) { 159 | String[] dbAndTable = queriedTable.split("\\."); //java里点号分割的正则必须是\\. 160 | if (dbAndTable.length != 2) { 161 | String message = String.format("SQL语句%s表解析出来的db和table不对,请检查!", queriedTable); 162 | restfulResult.put("status", 1); 163 | restfulResult.put("message", message); 164 | restfulResult.put("data", null); 165 | this.queryHistoryService.updateQueryHistoryStatusAndMsg(queryHistId, QueryHistoryStatusEnum.ERROR, message); 166 | return restfulResult; 167 | } 168 | queriedDb = dbAndTable[0]; 169 | queriedTable = dbAndTable[1]; 170 | } else { 171 | //如果不包含. 则为默认db 172 | queriedDb = "default"; 173 | } 174 | 175 | List> daoSecretResult = this.hiveSecretDataService.checkIsSecretTable(queriedDb, queriedTable); 176 | if (daoSecretResult.size() != 0) { 177 | //判断是机密数据的话,去检查该用户是否对其有权限 178 | if (this.hiveSecretDataService.checkPrivilege(this.getLoginUserInfo(request).get("loginedUser").toString(), queriedDb, queriedTable)) { 179 | continue; 180 | } else { 181 | //该用户没有权限则提示没有该表权限 182 | Map noPrivTable = new HashMap(); 183 | noPrivTable.put("table_id", daoSecretResult.get(0).get("id")); 184 | noPrivTable.put("table_schema", queriedDb); 185 | noPrivTable.put("table_name", queriedTable); 186 | noPrivResult.add(noPrivTable); 187 | } 188 | } else { 189 | //压根儿不是机密数据的话,直接跳过 190 | continue; 191 | } 192 | } 193 | 194 | //检查完数据权限后,汇总一下发给前端 195 | if (noPrivResult.size() >= 1) { 196 | //更新状态 197 | String jsonResult = ""; 198 | ObjectMapper mapper = new ObjectMapper(); 199 | try { 200 | jsonResult = mapper.writeValueAsString(noPrivResult); 201 | } catch (JsonProcessingException e) { 202 | e.printStackTrace(); 203 | } 204 | String message = "用户SQL中查询到的以下数据表为机密表,而且您没有权限查询,需要联系管理员申请权限!"; 205 | 206 | restfulResult.put("status", 3); 207 | restfulResult.put("message", message); 208 | restfulResult.put("data", noPrivResult); 209 | this.queryHistoryService.updateQueryHistoryStatusAndMsg(queryHistId, QueryHistoryStatusEnum.ERROR, message + jsonResult); 210 | return restfulResult; 211 | } 212 | 213 | //最后都通过后发送到hive server执行 214 | Map serviceResult = this.hiveServerService.executeHiveSqlQuery(queryContent, this.getLoginUserInfo(request).get("loginedUser").toString(), queryHistId, emailNotify); 215 | //回收解析器对象 216 | hqlParser = null; 217 | return this.encodeToJsonResult(serviceResult); 218 | } 219 | 220 | /** 221 | * 保存一条查询历史记录到数据库里,不管执行成功与失败 222 | * @param request 223 | * @param hiveQueryCommand 224 | * @return 225 | */ 226 | @RequestMapping(value = "/hive_query/save_query_history.json", method = RequestMethod.POST) 227 | public Map saveQueryHistoryController(HttpServletRequest request, HiveQueryCommand hiveQueryCommand) { 228 | String queryContent = hiveQueryCommand.getQueryContent(); 229 | String strEmailNotify = hiveQueryCommand.getEmailNotify(); 230 | int emailNotify = strEmailNotify.equals("true") ? 1 : 0; 231 | Map serviceResult = this.queryHistoryService.insertOneQueryHistory(queryContent, this.getLoginUserInfo(request).get("loginedUser").toString(), emailNotify); 232 | return this.encodeToJsonResult(serviceResult); 233 | } 234 | 235 | /** 236 | * 获取该用户最近的部分查询历史 237 | * @param request 238 | * @return 239 | */ 240 | @RequestMapping(value = "/hive_query/get_query_history.json", method = RequestMethod.GET) 241 | public Map getAllQueryHistoryController(HttpServletRequest request) { 242 | Map serviceResult = this.queryHistoryService.getAllQueryHistoryByUser(this.getLoginUserInfo(request).get("loginedUser").toString()); 243 | return this.encodeToJsonResult(serviceResult); 244 | } 245 | 246 | /** 247 | * 获取某个查询任务的状态值 248 | * @param queryHistId 249 | * @return 250 | */ 251 | @RequestMapping(value = "/hive_query/get_query_status.json", method = RequestMethod.GET) 252 | public Map getQueryHistoryStatusController(HttpServletRequest request, @RequestParam("queryHistId") long queryHistId) { 253 | Map serviceResult = this.queryHistoryService.getQueryHistoryById(queryHistId); 254 | return this.encodeToJsonResult(serviceResult); 255 | } 256 | 257 | /** 258 | * 从磁盘获取某个历史查询文本结果 259 | * @param request 260 | * @param queryHistId 261 | * @return 262 | */ 263 | @RequestMapping(value = "/hive_query/get_history_result.json", method = RequestMethod.GET) 264 | public Map getHistoryResultController(HttpServletRequest request, 265 | @RequestParam("queryHistId") long queryHistId, 266 | @RequestParam("pageNo") int pageNo 267 | ) { 268 | Map serviceResult = this.hiveServerService.getHistoryResultFromDiskById(this.getLoginUserInfo(request).get("loginedUser").toString(), queryHistId, pageNo); 269 | return this.encodeToJsonResult(serviceResult); 270 | } 271 | 272 | /** 273 | * 结果集下载 274 | * @param request 275 | * @param response 276 | */ 277 | @RequestMapping(value = "/hive_query/get_csv.json", method = RequestMethod.GET) 278 | public void getDownload(HttpServletRequest request, HttpServletResponse response, @RequestParam("queryHistId") long queryHistId) { 279 | //找到文件 280 | String filename = com.prophet.config.HiveResultTextConfig.getDataFileName(this.getLoginUserInfo(request).get("loginedUser").toString(), queryHistId); 281 | File file = new File(filename); 282 | if (!file.exists()) { 283 | //logger.error(String.format("数据文件%s不存在!", filename)); 284 | } else { 285 | //将文件解析成csv格式 286 | LineIterator iter = null; 287 | PrintWriter writer = null; 288 | try { 289 | iter = FileUtils.lineIterator(file, "UTF-8"); 290 | 291 | response.setContentType("application/octet-stream;charset=gbk"); 292 | 293 | String headerValue = String.format("attachment; filename=\"%s\"", String.format("data-%s.csv", com.prophet.util.DateTimeUtil.getNow())); 294 | response.setHeader("Content-Disposition", headerValue); 295 | response.setCharacterEncoding("GBK"); 296 | 297 | //这里用PrintWriter而不用ServletOutputStream是因为后者无法处理中文等unicode字符集 298 | writer = response.getWriter(); 299 | 300 | while (iter.hasNext()) { 301 | StringBuffer newLine = new StringBuffer(""); 302 | String line = iter.nextLine(); 303 | String[] fields = line.split(com.prophet.config.HiveResultTextConfig.HIVE_RESULT_FIELD_DELIMITER); 304 | for (int i = 0 ; i < fields.length ; i ++) { 305 | newLine.append(fields[i]); 306 | if (i != fields.length-1) { 307 | newLine.append(","); 308 | } 309 | } 310 | writer.println(newLine.toString()); 311 | } 312 | writer.flush(); 313 | 314 | } catch (IOException e) { 315 | e.printStackTrace(); 316 | } finally { 317 | LineIterator.closeQuietly(iter); 318 | writer.close(); 319 | try { 320 | response.flushBuffer(); 321 | } catch (IOException e) { 322 | e.printStackTrace(); 323 | } 324 | } 325 | } 326 | } 327 | 328 | /** 329 | * 手动取消某个任务 330 | * @param request 331 | * @param queryHistId 332 | * @return 333 | */ 334 | @RequestMapping(value = "/hive_query/cancel_task.json", method = RequestMethod.GET) 335 | public Map cancelTaskController(HttpServletRequest request, 336 | @RequestParam("queryHistId") long queryHistId 337 | ) { 338 | Map serviceResult = this.hiveServerService.cancelTaskById(this.getLoginUserInfo(request).get("loginedUser").toString(), queryHistId); 339 | 340 | return this.encodeToJsonResult(serviceResult); 341 | } 342 | } 343 | -------------------------------------------------------------------------------- /src/main/java/com/prophet/web/HiveSecretDataController.java: -------------------------------------------------------------------------------- 1 | package com.prophet.web; 2 | 3 | import java.util.HashMap; 4 | import java.util.List; 5 | import java.util.Map; 6 | 7 | import javax.servlet.http.HttpServletRequest; 8 | 9 | import org.springframework.beans.factory.annotation.Autowired; 10 | import org.springframework.web.bind.annotation.RequestMapping; 11 | import org.springframework.web.bind.annotation.RequestMethod; 12 | import org.springframework.web.bind.annotation.RequestParam; 13 | import org.springframework.web.bind.annotation.RestController; 14 | 15 | import com.prophet.service.HiveSecretDataService; 16 | import com.prophet.service.UserAuthService; 17 | 18 | @RestController 19 | public class HiveSecretDataController extends BaseController{ 20 | private HiveSecretDataService hiveSecretDataService; 21 | private UserAuthService userAuthService; 22 | 23 | @Autowired 24 | public void setHiveSecretDataService(HiveSecretDataService hiveSecretDataService) { 25 | this.hiveSecretDataService = hiveSecretDataService; 26 | } 27 | 28 | @Autowired 29 | public void setUserAuthService(UserAuthService userAuthService) { 30 | this.userAuthService = userAuthService; 31 | } 32 | 33 | 34 | 35 | @RequestMapping(value = "/hive_secret/get_all_secrets.json", method = RequestMethod.GET) 36 | public Map getAllSecretsByUser(HttpServletRequest request) { 37 | Map serviceResult = this.hiveSecretDataService.getAllSecretTablesByUser(this.getLoginUserInfo(request).get("loginedUser").toString()); 38 | return this.encodeToJsonResult(serviceResult); 39 | } 40 | 41 | @RequestMapping(value = "/hive_secret/get_all_non_secrets.json", method = RequestMethod.GET) 42 | public Map getAllNonSecrets(HttpServletRequest request) { 43 | Map serviceResult = this.hiveSecretDataService.getAllNonSecretTables(); 44 | return this.encodeToJsonResult(serviceResult); 45 | } 46 | 47 | @RequestMapping(value = "/hive_secret/add_secret_tables.json", method = RequestMethod.POST) 48 | public Map addSecretTables(HttpServletRequest request, @RequestParam("targetSecretTables") List targetSecretTables) { 49 | Map serviceResult = this.hiveSecretDataService.addSecretTables(targetSecretTables); 50 | return this.encodeToJsonResult(serviceResult); 51 | } 52 | 53 | @RequestMapping(value = "/hive_secret/get_all_secret_tables.json", method = RequestMethod.GET) 54 | public Map getAllSecretTables(HttpServletRequest request) { 55 | Map serviceResult = this.hiveSecretDataService.getAllSecretTables(); 56 | return this.encodeToJsonResult(serviceResult); 57 | } 58 | 59 | @RequestMapping(value = "/hive_secret/grant_user_priv.json", method = RequestMethod.POST) 60 | public Map grantSecretPriv(HttpServletRequest request, @RequestParam("targetSecretTables") List targetSecretTables, 61 | @RequestParam("username") String username) { 62 | //先检查用户是否存在 63 | boolean hasUser = false; 64 | try { 65 | hasUser = this.userAuthService.hasUser(username); 66 | } catch (Exception ex) { 67 | ex.printStackTrace(); 68 | } 69 | if (hasUser == false) { 70 | Map restfulResult = new HashMap(); 71 | restfulResult.put("status", 1); 72 | restfulResult.put("message", String.format("%s用户在系统里不存在, 请重新输入!", username)); 73 | restfulResult.put("data", null); 74 | return restfulResult; 75 | } else { 76 | //授权 77 | return this.encodeToJsonResult(this.hiveSecretDataService.grantSecretPrivToUser(username, targetSecretTables)); 78 | } 79 | 80 | } 81 | } 82 | -------------------------------------------------------------------------------- /src/main/java/com/prophet/web/UserController.java: -------------------------------------------------------------------------------- 1 | package com.prophet.web; 2 | 3 | import java.util.HashMap; 4 | import java.util.Map; 5 | 6 | import javax.servlet.http.HttpServletRequest; 7 | import javax.servlet.http.HttpSession; 8 | 9 | import org.springframework.beans.factory.annotation.Autowired; 10 | import org.springframework.web.bind.annotation.RequestMapping; 11 | import org.springframework.web.bind.annotation.RequestMethod; 12 | import org.springframework.web.bind.annotation.RequestParam; 13 | import org.springframework.web.bind.annotation.RestController; 14 | 15 | import com.prophet.service.UserAuthService; 16 | 17 | 18 | @RestController 19 | public class UserController extends BaseController{ 20 | private UserAuthService userAuthService; 21 | 22 | @Autowired 23 | public void setUserAuthService(UserAuthService userAuthService) { 24 | this.userAuthService = userAuthService; 25 | } 26 | 27 | /** 28 | * 登录接口 29 | * @param request 30 | * @param username 31 | * @param password 32 | * @return 33 | */ 34 | @RequestMapping(value = "/login.json", method = RequestMethod.POST) 35 | public Map loginController(HttpServletRequest request, @RequestParam("username") String username, @RequestParam("password") String password) { 36 | Map serviceResult = this.userAuthService.authenticate(username, password); 37 | 38 | if ((int)(serviceResult.get("data")) == 0) { 39 | //认证成功,则登录 40 | HttpSession session = request.getSession(); 41 | //检查该用户是否为admin并设置session 42 | int isAdmin = -1; 43 | if (this.userAuthService.isAdmin(username) == true) { 44 | isAdmin = 1; 45 | } else { 46 | isAdmin = 0; 47 | } 48 | session.setAttribute("userAuthSystemType", this.userAuthService.getUserAuthSystemType()); 49 | session.setAttribute("isAdmin", isAdmin); 50 | session.setAttribute("loginedUser", username); 51 | 52 | } 53 | 54 | return this.encodeToJsonResult(serviceResult); 55 | } 56 | 57 | /** 58 | * 退出登录的接口 59 | * @param request 60 | * @return 61 | */ 62 | @RequestMapping(value = "/logout.json", method = RequestMethod.POST) 63 | public Map logoutController(HttpServletRequest request) { 64 | Map controllerResult = new HashMap(); 65 | 66 | HttpSession session = request.getSession(); 67 | if (session.getAttribute("loginedUser") != null) { 68 | session.removeAttribute("loginedUser"); 69 | } 70 | if (session.getAttribute("isAdmin") != null) { 71 | session.removeAttribute("isAdmin"); 72 | } 73 | session.invalidate(); 74 | 75 | controllerResult.put("status", 0); 76 | controllerResult.put("message", "ok"); 77 | controllerResult.put("data", null); 78 | 79 | return controllerResult; 80 | } 81 | 82 | /** 83 | * 从session里获取当前登录的用户名 84 | * @param request 85 | * @return 86 | */ 87 | @RequestMapping(value = "/get_login_user.json", method = RequestMethod.GET) 88 | public Map currLoginUserController(HttpServletRequest request) { 89 | Map controllerResult = new HashMap(); 90 | Map data = new HashMap(); 91 | controllerResult.put("status", 0); 92 | controllerResult.put("message", "ok"); 93 | if (this.getLoginUserInfo(request) != null) { 94 | data.put("isAdmin", this.getLoginUserInfo(request).get("isAdmin")); 95 | data.put("loginedUser", this.getLoginUserInfo(request).get("loginedUser")); 96 | data.put("userAuthSystemType", this.getLoginUserInfo(request).get("userAuthSystemType")); 97 | controllerResult.put("data", this.getLoginUserInfo(request)); 98 | } 99 | return controllerResult; 100 | } 101 | 102 | @RequestMapping(value = "/get_all_prophet_users.json", method = RequestMethod.GET) 103 | public Map getAllProphetUsersController(HttpServletRequest request) { 104 | Map loginUserInfo = this.getLoginUserInfo(request); 105 | if ( 106 | loginUserInfo.get("isAdmin").toString().equals("0") || 107 | !loginUserInfo.get("userAuthSystemType").toString().equals("prophet") 108 | ) { 109 | Map restfulResult = new HashMap(); 110 | restfulResult.put("status", 1); 111 | restfulResult.put("message", "用户非管理员,或userAuthSystemType不是prophet,请求出错!"); 112 | restfulResult.put("data", null); 113 | return restfulResult; 114 | } 115 | return this.encodeToJsonResult(this.userAuthService.getAllProphetUsers()); 116 | } 117 | 118 | @RequestMapping(value = "/add_prophet_user.json", method = RequestMethod.POST) 119 | public Map addProphetUserController(HttpServletRequest request, 120 | @RequestParam("username") String username, 121 | @RequestParam("password") String password, 122 | @RequestParam("isActive") String isActive, 123 | @RequestParam("userType") String userType 124 | ) { 125 | Map loginUserInfo = this.getLoginUserInfo(request); 126 | if ( 127 | loginUserInfo.get("isAdmin").toString().equals("0") || 128 | !loginUserInfo.get("userAuthSystemType").toString().equals("prophet") 129 | ) { 130 | Map restfulResult = new HashMap(); 131 | restfulResult.put("status", 1); 132 | restfulResult.put("message", "用户非管理员,或userAuthSystemType不是prophet,请求出错!"); 133 | restfulResult.put("data", null); 134 | return restfulResult; 135 | } 136 | return this.encodeToJsonResult(this.userAuthService.addProphetUser(username, password, isActive, userType)); 137 | } 138 | 139 | @RequestMapping(value = "/delete_user_by_id.json", method = RequestMethod.POST) 140 | public Map deleteUserByIdController(HttpServletRequest request, @RequestParam("userId") int userId) { 141 | Map loginUserInfo = this.getLoginUserInfo(request); 142 | if ( 143 | loginUserInfo.get("isAdmin").toString().equals("0") || 144 | !loginUserInfo.get("userAuthSystemType").toString().equals("prophet") 145 | ) { 146 | Map restfulResult = new HashMap(); 147 | restfulResult.put("status", 1); 148 | restfulResult.put("message", "用户非管理员,或userAuthSystemType不是prophet,请求出错!"); 149 | restfulResult.put("data", null); 150 | return restfulResult; 151 | } 152 | return this.encodeToJsonResult(this.userAuthService.deleteUserById(userId)); 153 | } 154 | } 155 | -------------------------------------------------------------------------------- /src/main/java/com/prophet/web/postparameters/HiveQueryCommand.java: -------------------------------------------------------------------------------- 1 | package com.prophet.web.postparameters; 2 | 3 | public class HiveQueryCommand { 4 | private String queryContent; 5 | private long queryHistId; 6 | private String emailNotify; 7 | 8 | public String getQueryContent() { 9 | return queryContent; 10 | } 11 | 12 | public void setQueryContent(String queryContent) { 13 | this.queryContent = queryContent; 14 | } 15 | 16 | public long getQueryHistId() { 17 | return queryHistId; 18 | } 19 | 20 | public void setQueryHistId(long queryHistId) { 21 | this.queryHistId = queryHistId; 22 | } 23 | 24 | public String getEmailNotify() { 25 | return emailNotify; 26 | } 27 | 28 | public void setEmailNotify(String emailNotify) { 29 | this.emailNotify = emailNotify; 30 | } 31 | 32 | } 33 | -------------------------------------------------------------------------------- /src/main/resources/application.properties: -------------------------------------------------------------------------------- 1 | #########################################datasource########################################### 2 | spring.ds_prophet.url=jdbc:mysql://192.168.5.10:5621/prophet 3 | spring.ds_prophet.username=prophet 4 | spring.ds_prophet.password=prophet 5 | spring.ds_prophet.driver-class-name=com.mysql.jdbc.Driver 6 | spring.ds_prophet.type=org.apache.commons.dbcp2.BasicDataSource 7 | #spring.ds_prophet.max-wait=300000 8 | #spring.ds_prophet.max-active=50 9 | #spring.ds_prophet.max-idle=10 10 | spring.ds_prophet.min-idle=4 11 | spring.ds_prophet.validation-query=select 123 12 | spring.ds_prophet.test-while-idle=true 13 | 14 | spring.ds_hive_metastore.url = jdbc:mysql://192.168.5.7:3306/hive_metadata 15 | spring.ds_hive_metastore.username = hiveuser 16 | spring.ds_hive_metastore.password = hivepass 17 | spring.ds_hive_metastore.driver-class-name=com.mysql.jdbc.Driver 18 | spring.ds_hive_metastore.type=org.apache.commons.dbcp2.BasicDataSource 19 | spring.ds_hive_metastore.min-idle=4 20 | spring.ds_hive_metastore.validation-query=select 123 21 | spring.ds_hive_metastore.test-while-idle=true 22 | 23 | spring.ds_hive_server.url = jdbc:hive2://192.168.1.25:10000/default 24 | spring.ds_hive_server.username = hadoop 25 | spring.ds_hive_server.password = 26 | spring.ds_hive_server.driver-class-name=org.apache.hive.jdbc.HiveDriver 27 | spring.ds_hive_server.type=org.apache.commons.dbcp2.BasicDataSource 28 | spring.ds_hive_server.min-idle=10 29 | spring.ds_hive_server.test-while-idle=true 30 | #########################################datasource########################################### 31 | 32 | #############################################jsp############################################## 33 | spring.mvc.view.prefix=/WEB-INF/jsp/ 34 | spring.mvc.view.suffix=.jsp 35 | #############################################jsp############################################## 36 | 37 | #######################################prophet server######################################### 38 | server.port=8090 39 | #session max alive seconds, default 30 mins 40 | server.session-timeout=2592000 41 | server.tomcat.uri-encoding=UTF-8 42 | server.tomcat.max-threads=800 43 | server.tomcat.basedir=logs/ 44 | server.tomcat.access-log-enabled=true 45 | server.tomcat.accesslog.directory=tomcat_access_logs/ 46 | 47 | #######################################prophet server######################################### 48 | 49 | #######################################logging################################################ 50 | #logging.config=/home/prophet_server/conf/logging.xml 51 | #######################################logging################################################ 52 | 53 | #####################################user auth################################################ 54 | #user authentication system, available values(case insensitive): 1.LDAP 2.prophet 55 | authentication.system=LDAP 56 | 57 | #if choose ldap, then following info is mandatory. Otherwise ignored. 58 | authentication.ldap.url=ldap://xxx.prophet.com/ 59 | authentication.ldap.base-dn=CN=mycn,OU=Users,DC=prophet,DC=com 60 | authentication.ldap.user-search-dn=OU=myou,DC=prophet,DC=com 61 | authentication.ldap.user-search-column=username 62 | authentication.ldap.factory=com.sun.jndi.ldap.LdapCtxFactory 63 | authentication.ldap.security-authentication=simple 64 | authentication.ldap.security-credenticials=abc123456790 65 | #####################################user auth################################################ 66 | 67 | #####################################email#################################################### 68 | spring.mail.host=smtp.prophet.com 69 | spring.mail.port=25 70 | #spring.mail.username=123 71 | #spring.mail.password=123 72 | spring.mail.from=sender@prophet.com 73 | spring.mail.properties.mail.smtp.auth=false 74 | spring.mail.properties.mail.smtp.starttls.enable=false 75 | spring.mail.properties.mail.smtp.starttls.required=false 76 | #suffix used within your company for everyone 77 | spring.mail.company.suffix=@prophet.com 78 | #####################################email#################################################### -------------------------------------------------------------------------------- /src/main/resources/application.properties.github: -------------------------------------------------------------------------------- 1 | #########################################datasource########################################### 2 | spring.ds_prophet.url=jdbc:mysql://192.168.5.10:5621/prophet 3 | spring.ds_prophet.username=prophet 4 | spring.ds_prophet.password=prophet 5 | spring.ds_prophet.driver-class-name=com.mysql.jdbc.Driver 6 | spring.ds_prophet.type=org.apache.commons.dbcp2.BasicDataSource 7 | #spring.ds_prophet.max-wait=300000 8 | #spring.ds_prophet.max-active=50 9 | #spring.ds_prophet.max-idle=10 10 | spring.ds_prophet.min-idle=4 11 | spring.ds_prophet.validation-query=select 123 12 | spring.ds_prophet.test-while-idle=true 13 | 14 | spring.ds_hive_metastore.url = jdbc:mysql://192.168.5.7:3306/hive_metadata 15 | spring.ds_hive_metastore.username = hiveuser 16 | spring.ds_hive_metastore.password = hivepass 17 | spring.ds_hive_metastore.driver-class-name=com.mysql.jdbc.Driver 18 | spring.ds_hive_metastore.type=org.apache.commons.dbcp2.BasicDataSource 19 | spring.ds_hive_metastore.min-idle=4 20 | spring.ds_hive_metastore.validation-query=select 123 21 | spring.ds_hive_metastore.test-while-idle=true 22 | 23 | spring.ds_hive_server.url = jdbc:hive2://192.168.1.25:10000/default 24 | spring.ds_hive_server.username = hadoop 25 | spring.ds_hive_server.password = 26 | spring.ds_hive_server.driver-class-name=org.apache.hive.jdbc.HiveDriver 27 | spring.ds_hive_server.type=org.apache.commons.dbcp2.BasicDataSource 28 | spring.ds_hive_server.min-idle=10 29 | spring.ds_hive_server.test-while-idle=true 30 | #########################################datasource########################################### 31 | 32 | #############################################jsp############################################## 33 | spring.mvc.view.prefix=/WEB-INF/jsp/ 34 | spring.mvc.view.suffix=.jsp 35 | #############################################jsp############################################## 36 | 37 | #######################################prophet server######################################### 38 | server.port=8090 39 | #session max alive seconds, default 30 mins 40 | server.session-timeout=2592000 41 | server.tomcat.uri-encoding=UTF-8 42 | server.tomcat.max-threads=800 43 | server.tomcat.basedir=logs/ 44 | server.tomcat.access-log-enabled=true 45 | server.tomcat.accesslog.directory=tomcat_access_logs/ 46 | 47 | #######################################prophet server######################################### 48 | 49 | #######################################logging################################################ 50 | #logging.config=/home/prophet_server/conf/logging.xml 51 | #######################################logging################################################ 52 | 53 | #####################################user auth################################################ 54 | #user authentication system, available values(case insensitive): 1.LDAP 2.prophet 55 | authentication.system=LDAP 56 | 57 | #if choose ldap, then following info is mandatory. Otherwise ignored. 58 | authentication.ldap.url=ldap://xxx.prophet.com/ 59 | authentication.ldap.base-dn=CN=s-ldap,OU=Users,DC=prophet,DC=com 60 | authentication.ldap.user-search-dn=OU=myou,DC=prophet,DC=com 61 | authentication.ldap.user-search-column=username 62 | authentication.ldap.factory=com.sun.jndi.ldap.LdapCtxFactory 63 | authentication.ldap.security-authentication=simple 64 | authentication.ldap.security-credenticials=abc123456790 65 | #####################################user auth################################################ 66 | 67 | #####################################email#################################################### 68 | spring.mail.host=smtp.prophet.com 69 | spring.mail.port=25 70 | #spring.mail.username=123 71 | #spring.mail.password=123 72 | spring.mail.from=sender@prophet.com 73 | spring.mail.properties.mail.smtp.auth=false 74 | spring.mail.properties.mail.smtp.starttls.enable=false 75 | spring.mail.properties.mail.smtp.starttls.required=false 76 | #suffix used within your company for everyone 77 | spring.mail.company.suffix=@prophet.com 78 | #####################################email#################################################### -------------------------------------------------------------------------------- /src/main/resources/assembly.xml: -------------------------------------------------------------------------------- 1 | 2 | assembly 3 | 4 | 5 | tar.gz 6 | 7 | true 8 | 9 | 10 | 11 | src/main/assembly/bin 12 | bin 13 | 0755 14 | 15 | 16 | src/main/assembly/conf 17 | conf 18 | 0644 19 | 20 | 21 | src/main/assembly/logs 22 | logs 23 | 0644 24 | 25 | 26 | src/main/assembly/data 27 | data 28 | 0644 29 | 30 | 31 | 32 | 33 | 34 | lib 35 | true 36 | runtime 37 | 38 | 39 | -------------------------------------------------------------------------------- /src/main/resources/logging.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 18 | 19 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 38 | 39 | 40 | 41 | 42 | -------------------------------------------------------------------------------- /src/main/webapp/WEB-INF/web.xml: -------------------------------------------------------------------------------- 1 | 4 | 5 | 6 | Archetype Created Web Application 7 | 8 | -------------------------------------------------------------------------------- /src/test/java/junit/EmailTest01.java: -------------------------------------------------------------------------------- 1 | package junit; 2 | 3 | import java.io.File; 4 | 5 | import javax.mail.internet.MimeMessage; 6 | 7 | import org.junit.Test; 8 | import org.junit.runner.RunWith; 9 | import org.springframework.beans.factory.annotation.Autowired; 10 | import org.springframework.boot.test.SpringApplicationConfiguration; 11 | import org.springframework.core.io.FileSystemResource; 12 | import org.springframework.mail.SimpleMailMessage; 13 | import org.springframework.mail.javamail.JavaMailSender; 14 | import org.springframework.mail.javamail.MimeMessageHelper; 15 | import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; 16 | import com.prophet.Application; 17 | 18 | @RunWith(SpringJUnit4ClassRunner.class) 19 | @SpringApplicationConfiguration(classes = Application.class) 20 | public class EmailTest01 { 21 | @Autowired 22 | private JavaMailSender mailSender; 23 | 24 | @Test 25 | public void sendSimpleMail() throws Exception { 26 | SimpleMailMessage message = new SimpleMailMessage(); 27 | message.setFrom("123@qq.com"); 28 | message.setTo("aa@cc.com"); 29 | message.setSubject("主题:简单邮件"); 30 | message.setText("测试邮件内容"); 31 | 32 | mailSender.send(message); 33 | } 34 | 35 | @Test 36 | public void sendAttachmentsMail() throws Exception { 37 | 38 | MimeMessage mimeMessage = mailSender.createMimeMessage(); 39 | 40 | MimeMessageHelper helper = new MimeMessageHelper(mimeMessage, true); 41 | helper.setFrom("123@qq.com"); 42 | helper.setTo("aa@cc.com"); 43 | helper.setSubject("主题:有附件"); 44 | helper.setText("有附件的邮件"); 45 | 46 | FileSystemResource file = new FileSystemResource(new File("bb.txt")); 47 | helper.addAttachment("附件-1.txt", file); 48 | helper.addAttachment("附件-2.txt", file); 49 | 50 | mailSender.send(mimeMessage); 51 | 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /src/test/java/prophet/HQLParserTest01.java: -------------------------------------------------------------------------------- 1 | package prophet; 2 | 3 | import org.apache.hadoop.hive.ql.parse.ParseException; 4 | 5 | import com.prophet.common.HQLParser; 6 | 7 | public class HQLParserTest01 { 8 | public static void main(String[] args) { 9 | String parsesql = "select a from t1"; 10 | 11 | HQLParser hp= new HQLParser(); 12 | try { 13 | hp.parseHQL(parsesql); 14 | } catch (ParseException | org.antlr.runtime.NoViableAltException e) { 15 | System.out.println("catch"); 16 | } 17 | System.out.println(hp.getOper()+"=================="); 18 | for (String table : hp.getTables()) { 19 | System.out.println(table); 20 | } 21 | 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /src/test/java/prophet/HiveTest01.java: -------------------------------------------------------------------------------- 1 | package prophet; 2 | import java.sql.SQLException; 3 | import java.sql.Connection; 4 | import java.sql.ResultSet; 5 | import java.sql.Statement; 6 | import java.sql.DriverManager; 7 | 8 | public class HiveTest01 { 9 | private static String driverName = "org.apache.hive.jdbc.HiveDriver"; 10 | 11 | public static void main(String[] args) { 12 | try { 13 | Class.forName(driverName); 14 | Connection con = DriverManager.getConnection("jdbc:hive2://XX.XX.XX.XX:10000/default", "root", ""); 15 | Statement stmt = con.createStatement(); 16 | String sql = "SHOW TABLES"; 17 | System.out.println("Running: " + sql); 18 | ResultSet res = stmt.executeQuery(sql); 19 | while (res.next()) { 20 | System.out.println(res.getString(1)); 21 | } 22 | } catch (SQLException | ClassNotFoundException e) { 23 | e.printStackTrace(); 24 | } 25 | 26 | } 27 | 28 | } 29 | --------------------------------------------------------------------------------