├── .gitignore ├── CollectTomcatLogs ├── pom.xml └── src │ ├── main │ ├── java │ │ └── software │ │ │ └── analysis │ │ │ └── nju │ │ │ ├── CaptureLog │ │ │ ├── CaptureLog.java │ │ │ └── main.java │ │ │ ├── ConfigureManager │ │ │ └── ConfigurationManager.java │ │ │ ├── Util │ │ │ └── Util.java │ │ │ └── service │ │ │ ├── HDFSService.java │ │ │ └── impl │ │ │ └── HDFSServiceImpl.java │ └── resources │ │ └── my.properties │ └── test │ └── java │ └── Test.java ├── README.md ├── RestoreData ├── package-lock.json ├── package.json ├── pom.xml └── src │ ├── main │ ├── java │ │ └── software │ │ │ └── hbase │ │ │ ├── Entity │ │ │ ├── ConditionQueryData.java │ │ │ └── QueryData.java │ │ │ ├── controller │ │ │ ├── QueryData.java │ │ │ └── getTomcatLogsController.java │ │ │ ├── hbase │ │ │ ├── HbaseColumn.java │ │ │ ├── HbaseOneToMany.java │ │ │ ├── HbaseOneToOne.java │ │ │ ├── HbaseTable.java │ │ │ ├── QueryDAO.java │ │ │ ├── dao │ │ │ │ ├── BaseHbaseDao.java │ │ │ │ ├── HbaseDaoFactory.java │ │ │ │ ├── LogAnaDao.java │ │ │ │ └── LogDataDao.java │ │ │ └── dataObject │ │ │ │ ├── LogAna.java │ │ │ │ └── LogData.java │ │ │ ├── service │ │ │ ├── HBaseService.java │ │ │ ├── LogDataService.java │ │ │ ├── QueryService.java │ │ │ ├── impl │ │ │ │ ├── HbaseServiceImpl.java │ │ │ │ ├── LogDataServiceImpl.java │ │ │ │ └── QueryServiceImpl.java │ │ │ └── model │ │ │ │ ├── HbaseCell.java │ │ │ │ ├── HbaseColCouple.java │ │ │ │ ├── HbaseConditonModel.java │ │ │ │ ├── HbaseRow.java │ │ │ │ └── HbaseServiceConditonModel.java │ │ │ └── util │ │ │ ├── ArrayUtil.java │ │ │ ├── ClassUtil.java │ │ │ ├── DateUtil.java │ │ │ ├── ObjectUtil.java │ │ │ ├── ParseLogsUtil.java │ │ │ ├── PropertiesUtil.java │ │ │ └── StringUtil.java │ ├── resources │ │ ├── 200_localhost_access_log.2017-08-23.txt │ │ ├── 2200_localhost_access_log.2017-08-08.txt │ │ ├── applicationContext-service.xml │ │ ├── applicationContext.xml │ │ ├── applicationContextDataSource.xml │ │ └── dsr.properties │ └── webapp │ │ ├── WEB-INF │ │ ├── views │ │ │ ├── dataGrid.jsp │ │ │ ├── index.jsp │ │ │ ├── queryData.jsp │ │ │ └── test.html │ │ └── web.xml │ │ └── assets │ │ ├── css │ │ ├── bootstrap-select.min.css │ │ ├── bootstrapDatepickr-1.0.0.css │ │ ├── bootstrapDatepickr-1.0.0.min.css │ │ └── myStyle.css │ │ ├── fonts │ │ ├── FontAwesome.otf │ │ ├── fontawesome-webfont.eot │ │ ├── fontawesome-webfont.svg │ │ ├── fontawesome-webfont.ttf │ │ ├── fontawesome-webfont.woff │ │ ├── fontawesome-webfont.woff2 │ │ ├── glyphicons-halflings-regular.eot │ │ ├── glyphicons-halflings-regular.ttf │ │ ├── glyphicons-halflings-regular.woff │ │ └── glyphicons-halflings-regular.woff2 │ │ └── js │ │ ├── bootstrap-select.js │ │ ├── bootstrapDatepickr-1.0.0.min.js │ │ ├── index.js │ │ ├── inputCheck.js │ │ ├── myCharts.js │ │ └── queryData.js │ └── test │ ├── java │ └── HbaseTest │ │ ├── HbaseBatchInsertTest.java │ │ ├── HbaseConnectionTest.java │ │ ├── HbaseInsertTest.java │ │ ├── ListBean.java │ │ └── ParseLogTest.java │ └── resources │ └── applicationContext.xml ├── ScalaReadAndWrite ├── pom.xml └── src │ ├── main │ ├── java │ │ └── software │ │ │ └── analysis │ │ │ └── nju │ │ │ └── constant │ │ │ ├── ConfigurationManager.java │ │ │ └── SparkProperties.java │ ├── resources │ │ └── db.properties.properties │ └── scala │ │ ├── ScalaReadAndWrite │ │ └── App.scala │ │ └── software │ │ └── analysis │ │ └── nju │ │ ├── Accumulator │ │ ├── AllCourtAccumulator │ │ │ ├── AddIPRankItem.scala │ │ │ └── AddReqHourItem.scala │ │ ├── AllDataAccumulator.scala │ │ ├── ByteHourAccumulator.scala │ │ ├── ByteSecAccumulator.scala │ │ ├── DateResultAccumulator.scala │ │ ├── IPMapAccumulator.scala │ │ ├── MethodAccumulator.scala │ │ ├── RequestHourAccumulator.scala │ │ ├── RequestSecAccumulator.scala │ │ ├── StateAccumulator.scala │ │ └── URLAccumulator.scala │ │ ├── DAO │ │ └── ParseObjectToPut.scala │ │ ├── Entity │ │ └── Entity.scala │ │ ├── analysis │ │ ├── Analysis.scala │ │ ├── AnalysisAllCourt.scala │ │ └── AnalysisByCourt.scala │ │ └── util │ │ ├── CourtInfo.scala │ │ ├── DoBytesAnalysis.scala │ │ ├── DoIPAnalysis.scala │ │ ├── DoMethodAnalysis.scala │ │ ├── DoRequestAnalysis.scala │ │ ├── DoStateAnalysis.scala │ │ ├── DoURLAnalysis.scala │ │ ├── GetDate.scala │ │ ├── MapUtil.scala │ │ └── ParseMapToJson.scala │ └── test │ └── scala │ └── ScalaReadAndWrite │ ├── AppTest.scala │ └── MySpec.scala └── image ├── p1.png ├── p2.png └── p3.png /.gitignore: -------------------------------------------------------------------------------- 1 | # Compiled class file 2 | *.class 3 | 4 | # Log file 5 | *.log 6 | 7 | # BlueJ files 8 | *.ctxt 9 | 10 | # Mobile Tools for Java (J2ME) 11 | .mtj.tmp/ 12 | 13 | # Package Files # 14 | *.jar 15 | *.war 16 | *.ear 17 | *.zip 18 | *.tar.gz 19 | *.rar 20 | 21 | # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml 22 | hs_err_pid* 23 | !/image/p2.png 24 | !/image/p1.png 25 | !/image/p3.png 26 | -------------------------------------------------------------------------------- /CollectTomcatLogs/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 4.0.0 6 | 7 | CollectTomcatLogs 8 | CollectTomcatLogs 9 | 1.0-SNAPSHOT 10 | 11 | 12 | 1.2.17 13 | 14 | 15 | 16 | 17 | 18 | commons-net 19 | commons-net 20 | 3.3 21 | 22 | 23 | 24 | 25 | log4j 26 | log4j 27 | ${log4j.version} 28 | 29 | 30 | 31 | org.apache.hadoop 32 | hadoop-common 33 | 2.6.4 34 | 35 | 36 | org.apache.hadoop 37 | hadoop-hdfs 38 | 2.6.4 39 | 40 | 41 | org.apache.hadoop 42 | hadoop-client 43 | 2.6.4 44 | 45 | 46 | 47 | org.springframework 48 | spring-core 49 | 3.1.0.RELEASE 50 | 51 | 52 | org.springframework 53 | spring-expression 54 | 3.1.0.RELEASE 55 | 56 | 57 | org.springframework 58 | spring-beans 59 | 3.1.0.RELEASE 60 | 61 | 62 | org.springframework 63 | spring-context 64 | 3.1.0.RELEASE 65 | 66 | 67 | org.springframework 68 | spring-context-support 69 | 3.1.0.RELEASE 70 | 71 | 72 | org.springframework 73 | spring-orm 74 | 3.1.0.RELEASE 75 | 76 | 77 | org.springframework 78 | spring-oxm 79 | 3.1.0.RELEASE 80 | 81 | 82 | org.springframework 83 | spring-aop 84 | 3.1.0.RELEASE 85 | 86 | 87 | org.springframework 88 | spring-webmvc 89 | 3.1.0.RELEASE 90 | 91 | 92 | org.springframework 93 | spring-web 94 | 3.1.0.RELEASE 95 | 96 | 97 | 98 | -------------------------------------------------------------------------------- /CollectTomcatLogs/src/main/java/software/analysis/nju/CaptureLog/CaptureLog.java: -------------------------------------------------------------------------------- 1 | package software.analysis.nju.CaptureLog; 2 | 3 | import org.apache.commons.net.ftp.FTPClient; 4 | import org.apache.commons.net.ftp.FTPFile; 5 | import org.apache.commons.net.ftp.FTPReply; 6 | import software.analysis.nju.Util.Util; 7 | 8 | import java.io.*; 9 | 10 | public class CaptureLog { 11 | 12 | /** 13 | * 上传文件(可供Action/Controller层使用) 14 | * @param hostname FTP服务器地址 15 | * @param port FTP服务器端口号 16 | * @param username FTP登录帐号 17 | * @param password FTP登录密码 18 | * @param pathname FTP服务器保存目录 19 | * @param fileName 上传到FTP服务器后的文件名称 20 | * @param inputStream 输入文件流 21 | * @return 22 | */ 23 | public static boolean uploadFile(String hostname, int port, String username, String password, String pathname, String fileName, InputStream inputStream){ 24 | boolean flag = false; 25 | FTPClient ftpClient = new FTPClient(); 26 | ftpClient.setControlEncoding("UTF-8"); 27 | try { 28 | //连接FTP服务器 29 | ftpClient.connect(hostname,21); 30 | //登录FTP服务器 31 | ftpClient.login(username, password); 32 | //是否成功登录FTP服务器 33 | int replyCode = ftpClient.getReplyCode(); 34 | if(!FTPReply.isPositiveCompletion(replyCode)){ 35 | //如果出错,保存状态码到文件 36 | Util.writeLog(String.valueOf(replyCode)); 37 | return flag; 38 | } 39 | 40 | ftpClient.setFileType(FTPClient.BINARY_FILE_TYPE); 41 | ftpClient.makeDirectory(pathname); 42 | ftpClient.changeWorkingDirectory(pathname); 43 | ftpClient.storeFile(fileName, inputStream); 44 | inputStream.close(); 45 | ftpClient.logout(); 46 | flag = true; 47 | } catch (Exception e) { 48 | e.printStackTrace(); 49 | } finally{ 50 | if(ftpClient.isConnected()){ 51 | try { 52 | ftpClient.disconnect(); 53 | } catch (IOException e) { 54 | e.printStackTrace(); 55 | } 56 | } 57 | } 58 | return flag; 59 | } 60 | 61 | 62 | /** 63 | * 上传文件(可对文件进行重命名) 64 | * @param hostname FTP服务器地址 65 | * @param port FTP服务器端口号 66 | * @param username FTP登录帐号 67 | * @param password FTP登录密码 68 | * @param pathname FTP服务器保存目录 69 | * @param filename 上传到FTP服务器后的文件名称 70 | * @param originfilename 待上传文件的名称(绝对地址) 71 | * @return 72 | */ 73 | public static boolean uploadFileFromProduction(String hostname, int port, String username, String password, String pathname, String filename, String originfilename){ 74 | boolean flag = false; 75 | try { 76 | InputStream inputStream = new FileInputStream(new File(originfilename)); 77 | flag = uploadFile(hostname, port, username, password, pathname, filename, inputStream); 78 | } catch (Exception e) { 79 | e.printStackTrace(); 80 | } 81 | return flag; 82 | } 83 | 84 | /** 85 | * 上传文件(不可以进行文件的重命名操作) 86 | * @param hostname FTP服务器地址 87 | * @param port FTP服务器端口号 88 | * @param username FTP登录帐号 89 | * @param password FTP登录密码 90 | * @param pathname FTP服务器保存目录 91 | * @param originfilename 待上传文件的名称(绝对地址) 92 | * @return 93 | */ 94 | public static boolean uploadFileFromProduction(String hostname, int port, String username, String password, String pathname, String originfilename){ 95 | boolean flag = false; 96 | try { 97 | String fileName = new File(originfilename).getName(); 98 | InputStream inputStream = new FileInputStream(new File(originfilename)); 99 | flag = uploadFile(hostname, port, username, password, pathname, fileName, inputStream); 100 | } catch (Exception e) { 101 | e.printStackTrace(); 102 | } 103 | return flag; 104 | } 105 | 106 | 107 | /** 108 | * 删除文件 109 | * @param hostname FTP服务器地址 110 | * @param port FTP服务器端口号 111 | * @param username FTP登录帐号 112 | * @param password FTP登录密码 113 | * @param pathname FTP服务器保存目录 114 | * @param filename 要删除的文件名称 115 | * @return 116 | */ 117 | public static boolean deleteFile(String hostname, int port, String username, String password, String pathname, String filename){ 118 | boolean flag = false; 119 | FTPClient ftpClient = new FTPClient(); 120 | try { 121 | //连接FTP服务器 122 | ftpClient.connect(hostname, port); 123 | //登录FTP服务器 124 | ftpClient.login(username, password); 125 | //验证FTP服务器是否登录成功 126 | int replyCode = ftpClient.getReplyCode(); 127 | if(!FTPReply.isPositiveCompletion(replyCode)){ 128 | return flag; 129 | } 130 | //切换FTP目录 131 | ftpClient.changeWorkingDirectory(pathname); 132 | ftpClient.dele(filename); 133 | ftpClient.logout(); 134 | flag = true; 135 | } catch (Exception e) { 136 | e.printStackTrace(); 137 | } finally{ 138 | if(ftpClient.isConnected()){ 139 | try { 140 | ftpClient.logout(); 141 | } catch (IOException e) { 142 | 143 | } 144 | } 145 | } 146 | return flag; 147 | } 148 | 149 | /** 150 | * 下载文件 151 | * @param hostname FTP服务器地址 152 | * @param port FTP服务器端口号 153 | * @param username FTP登录帐号 154 | * @param password FTP登录密码 155 | * @param pathname FTP服务器文件目录 156 | * @param filename 文件名称 157 | * @param localpath 下载后的文件路径 158 | * @return 159 | */ 160 | public static boolean downloadFile(String hostname, int port, String username, String password, String pathname, String filename, String localpath){ 161 | boolean flag = false; 162 | FTPClient ftpClient = new FTPClient(); 163 | try { 164 | //连接FTP服务器 165 | ftpClient.connect(hostname, port); 166 | //登录FTP服务器 167 | ftpClient.login(username, password); 168 | //验证FTP服务器是否登录成功 169 | int replyCode = ftpClient.getReplyCode(); 170 | if(!FTPReply.isPositiveCompletion(replyCode)){ 171 | return flag; 172 | } 173 | //切换FTP目录 174 | ftpClient.changeWorkingDirectory(pathname); 175 | FTPFile[] ftpFiles = ftpClient.listFiles(); 176 | for(FTPFile file : ftpFiles){ 177 | if(filename.equalsIgnoreCase(file.getName())){ 178 | File localFile = new File(localpath + "/" + file.getName()); 179 | OutputStream os = new FileOutputStream(localFile); 180 | ftpClient.retrieveFile(file.getName(), os); 181 | os.close(); 182 | } 183 | } 184 | ftpClient.logout(); 185 | flag = true; 186 | } catch (Exception e) { 187 | e.printStackTrace(); 188 | } finally{ 189 | if(ftpClient.isConnected()){ 190 | try { 191 | ftpClient.logout(); 192 | } catch (IOException e) { 193 | e.printStackTrace(); 194 | } 195 | } 196 | } 197 | return flag; 198 | } 199 | 200 | } 201 | -------------------------------------------------------------------------------- /CollectTomcatLogs/src/main/java/software/analysis/nju/CaptureLog/main.java: -------------------------------------------------------------------------------- 1 | package software.analysis.nju.CaptureLog; 2 | 3 | import software.analysis.nju.ConfigureManager.ConfigurationManager; 4 | import software.analysis.nju.Util.Util; 5 | import software.analysis.nju.service.impl.HDFSServiceImpl; 6 | 7 | import java.io.File; 8 | 9 | public class main { 10 | public static void main(String args[]) throws Exception{ 11 | upLoadThroughHDFS(); 12 | 13 | } 14 | public static void upLoadThroughFTP() throws Exception{ 15 | String hostName = ConfigurationManager.getString("FTP.HostName"); 16 | int port = ConfigurationManager.getInteger("FTP.Port"); 17 | String userName = ConfigurationManager.getString("FTP.UserName"); 18 | String password = ConfigurationManager.getString("FTP.Password"); 19 | 20 | String originFileName = Util.getOriginFileName(); 21 | String fileName = ConfigurationManager.getString("FYDM")+ "_" + Util.getFileNamWithoutPath(originFileName); 22 | System.out.println(fileName); 23 | String pathName = ConfigurationManager.getString("PATH.RemoteFilePath"); 24 | //上传到FTP服务器 25 | boolean flag = CaptureLog.uploadFileFromProduction(hostName, port, userName, password, pathName, fileName, originFileName); 26 | 27 | if(flag){ 28 | Util.writeLog(" : success\n"); 29 | }else{ 30 | Util.writeLog(" : failed\n"); 31 | } 32 | } 33 | 34 | public static void upLoadThroughHDFS() throws Exception{ 35 | //原始路径 36 | String Path = ConfigurationManager.getString("PATH.OriginFilePath"); 37 | //原始文件名(包含路径) 38 | String originFileName = Util.getOriginFileName(); 39 | //要更改的文件名 40 | String fileName = ConfigurationManager.getString("FYDM")+ "_" + Util.getFileNamWithoutPath(originFileName); 41 | File file = new File(originFileName); 42 | String newFileName = Path + fileName; 43 | System.out.println("File origin name:" + originFileName); 44 | String dstName = ConfigurationManager.getString("hadoopIp") + "/tomcatLog/" + fileName; 45 | try { 46 | if (file.exists()) { 47 | boolean changeName = file.renameTo(new File(newFileName)); 48 | //改名成功再上传文件 49 | if (changeName) { 50 | System.out.println("开始上传到HDFS"); 51 | HDFSServiceImpl hdfsService = new HDFSServiceImpl(); 52 | hdfsService.copyFromLocalFile(newFileName, dstName); 53 | Util.writeLog(" : " + newFileName + " success\n"); 54 | } 55 | }else{ 56 | System.out.println("File not exists"); 57 | } 58 | }catch (Exception e){ 59 | e.printStackTrace(); 60 | Util.writeLog(" : " + newFileName + " Failec\n"); 61 | } 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /CollectTomcatLogs/src/main/java/software/analysis/nju/ConfigureManager/ConfigurationManager.java: -------------------------------------------------------------------------------- 1 | package software.analysis.nju.ConfigureManager; 2 | import org.apache.log4j.Logger; 3 | 4 | 5 | import java.io.IOException; 6 | import java.io.InputStream; 7 | import java.util.Properties; 8 | 9 | /** 10 | * 配置加载管理类 11 | */ 12 | public class ConfigurationManager { 13 | // 配置属性 14 | private static Properties properties = new Properties(); 15 | private static final Logger logger = Logger.getLogger(ConfigurationManager.class); 16 | 17 | static { 18 | InputStream in = ConfigurationManager.class.getClassLoader().getResourceAsStream("my.properties"); 19 | try { 20 | properties.load(in); 21 | } catch (IOException e) { 22 | logger.error(e.getStackTrace()); 23 | e.printStackTrace(); 24 | } 25 | } 26 | 27 | /** 28 | * 获取关键字对应的配置项 29 | * 30 | * @param key 31 | * @return 32 | */ 33 | private static synchronized String getProperty(String key) { 34 | try { 35 | return properties.getProperty(key); 36 | } catch (Exception e) { 37 | logger.error(e.getMessage()); 38 | e.printStackTrace(); 39 | } 40 | 41 | return null; 42 | } 43 | 44 | 45 | /** 46 | * 获取String配置项 47 | * 48 | * @param key 49 | * @return 50 | */ 51 | public static synchronized String getString(String key) { 52 | return getProperty(key); 53 | } 54 | 55 | /** 56 | * 获取Integer型配置项 57 | * 58 | * @param key 59 | * @return 60 | */ 61 | public static synchronized Integer getInteger(String key) { 62 | String value = getProperty(key); 63 | try { 64 | return Integer.valueOf(value); 65 | } catch (Exception e) { 66 | logger.error(e.getStackTrace()); 67 | e.printStackTrace(); 68 | } 69 | 70 | return 0; 71 | } 72 | 73 | /** 74 | * 获取Boolean型配置项 75 | * 76 | * @param key 77 | * @return 78 | */ 79 | public static synchronized Boolean getBoolean(String key) { 80 | String value = getProperty(key); 81 | try { 82 | return Boolean.valueOf(value); 83 | } catch (Exception e) { 84 | logger.error(e.getStackTrace()); 85 | e.printStackTrace(); 86 | } 87 | 88 | return false; 89 | } 90 | 91 | /** 92 | * 获取Long型配置项 93 | * 94 | * @param key 95 | * @return 96 | */ 97 | public static synchronized Long getLong(String key) { 98 | String value = getProperty(key); 99 | try { 100 | return Long.valueOf(value); 101 | } catch (Exception e) { 102 | logger.error(e.getStackTrace()); 103 | e.printStackTrace(); 104 | } 105 | 106 | return 0L; 107 | } 108 | 109 | /** 110 | * 获取Double型配置项 111 | * 112 | * @param key 113 | * @return 114 | */ 115 | public static synchronized Double getDouble(String key) { 116 | String value = getProperty(key); 117 | try { 118 | return Double.valueOf(value); 119 | } catch (Exception e) { 120 | logger.error(e.getStackTrace()); 121 | e.printStackTrace(); 122 | } 123 | 124 | return 0.0D; 125 | } 126 | } 127 | -------------------------------------------------------------------------------- /CollectTomcatLogs/src/main/java/software/analysis/nju/Util/Util.java: -------------------------------------------------------------------------------- 1 | package software.analysis.nju.Util; 2 | 3 | import software.analysis.nju.ConfigureManager.ConfigurationManager; 4 | 5 | import java.io.*; 6 | import java.text.SimpleDateFormat; 7 | import java.util.Calendar; 8 | import java.util.Date; 9 | import java.util.StringTokenizer; 10 | 11 | public class Util { 12 | public static String getYesterday(){ 13 | Calendar cal=Calendar.getInstance(); 14 | cal.add(Calendar.DATE,-1); 15 | Date time=cal.getTime(); 16 | return (new SimpleDateFormat("yyyy-MM-dd").format(time)); 17 | } 18 | 19 | public static String getFileNamWithoutPath(String fileName){ 20 | StringTokenizer st = new StringTokenizer(fileName, "/"); 21 | String realFileName = ""; 22 | while(st.hasMoreTokens()){ 23 | realFileName = st.nextToken(); 24 | } 25 | return realFileName; 26 | } 27 | 28 | /** 29 | * 获取当前文件夹下tomcat日志文件名 30 | */ 31 | public static String getOriginFileName(){ 32 | String originPath = ConfigurationManager.getString("PATH.OriginFilePath"); 33 | String originFileName = originPath + "localhost_access_log."+getYesterday()+".txt"; 34 | System.out.println(originFileName); 35 | return originFileName; 36 | } 37 | 38 | //每次上传之后保存日志,日志信息为:日期 + success/failed 39 | //如果上传失败,则还有保存失败到返回码,比如:530 ->用户名错误 40 | public static void writeLog(String content) throws Exception { 41 | String rawContent= new String(); //原有txt内容 42 | String s1 = new String();//内容更新 43 | content = getYesterday() + content; 44 | String fileName = ConfigurationManager.getString("LogFileName"); 45 | 46 | File f = new File(fileName); 47 | boolean newfile = false; 48 | if (f.exists()) { 49 | System.out.println("文件存在"); 50 | newfile = true; 51 | } else { 52 | System.out.println("文件不存在"); 53 | newfile = f.createNewFile();// 不存在则创建 54 | } 55 | BufferedReader input = new BufferedReader(new FileReader(f)); 56 | 57 | while ((rawContent = input.readLine()) != null) { 58 | s1 += rawContent + "\n"; 59 | } 60 | input.close(); 61 | s1 += content; 62 | if(newfile) { 63 | BufferedWriter output = new BufferedWriter(new FileWriter(f)); 64 | output.write(s1); 65 | output.close(); 66 | } 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /CollectTomcatLogs/src/main/java/software/analysis/nju/service/HDFSService.java: -------------------------------------------------------------------------------- 1 | /** 2 | * 3 | */ 4 | package software.analysis.nju.service; 5 | 6 | import java.io.IOException; 7 | 8 | 9 | public interface HDFSService { 10 | public void copyFromLocalFile(String src,String dst) throws Exception; 11 | 12 | public byte[] downloadFromHadoop(String src) throws IOException; 13 | } 14 | -------------------------------------------------------------------------------- /CollectTomcatLogs/src/main/java/software/analysis/nju/service/impl/HDFSServiceImpl.java: -------------------------------------------------------------------------------- 1 | package software.analysis.nju.service.impl; 2 | 3 | import org.apache.hadoop.conf.Configuration; 4 | import org.apache.hadoop.fs.FSDataInputStream; 5 | import org.apache.hadoop.fs.FileStatus; 6 | import org.apache.hadoop.fs.FileSystem; 7 | import org.apache.hadoop.fs.Path; 8 | import org.springframework.beans.factory.DisposableBean; 9 | import org.springframework.beans.factory.InitializingBean; 10 | import software.analysis.nju.ConfigureManager.ConfigurationManager; 11 | import software.analysis.nju.service.HDFSService; 12 | 13 | import java.io.IOException; 14 | public class HDFSServiceImpl implements HDFSService,InitializingBean,DisposableBean{ 15 | private FileSystem fs; 16 | 17 | public void afterPropertiesSet() throws Exception { 18 | Configuration conf = new Configuration(); 19 | conf.set("mapred.job.tracker", ConfigurationManager.getString("jobTracker")); 20 | conf.set("fs.default.name", ConfigurationManager.getString("hadoopIp")); 21 | System.out.println("初始化成功"); 22 | fs = FileSystem.get(conf); 23 | } 24 | 25 | public void destroy() throws Exception { 26 | fs.close(); 27 | } 28 | 29 | public void copyFromLocalFile(String src,String dst) throws Exception{ 30 | afterPropertiesSet(); 31 | Path srcPath = new Path(src); 32 | Path dstPath = new Path(dst); 33 | if(fs.exists(dstPath)){ 34 | return; 35 | } 36 | fs.copyFromLocalFile(srcPath, dstPath); 37 | } 38 | 39 | public byte[] downloadFromHadoop(String src) throws IOException { 40 | Path srcPath = new Path(src); 41 | if(fs.exists(srcPath)){ 42 | FSDataInputStream in = fs.open(srcPath); 43 | FileStatus stat = fs.getFileStatus(srcPath); 44 | //创建缓冲 45 | byte[] buffer = new byte[Integer.parseInt(String.valueOf(stat.getLen()))]; 46 | in.readFully(0,buffer); 47 | in.close(); 48 | return buffer; 49 | }else{ 50 | throw new IOException("文件不存在!"); 51 | } 52 | } 53 | 54 | } 55 | -------------------------------------------------------------------------------- /CollectTomcatLogs/src/main/resources/my.properties: -------------------------------------------------------------------------------- 1 | FTP.HostName=130.1.198.201 2 | FTP.Port=21 3 | FTP.UserName=spgl 4 | FTP.Password=spgl 5 | # 路径格式如下,务必使用 / 代替 \\ 6 | PATH.OriginFilePath=/Users/maicius/RestoreData/src/main/resources/ 7 | 8 | PATH.RemoteFilePath=test 9 | # 法院代码 10 | #"市高级法院" -> "200", "一中院" -> "210", "二中院" ->"220", "海事法院" -> "230", 11 | #"和平区法院" -> "211", "南开区法院" -> "212", "河西区法院" -> "222", 12 | #"河东区法院" -> "221", "河北区法院" -> "213", "红桥区法院" -> "214", 13 | #"滨海新区法院" -> "22A", "塘沽审判区" -> "223", "汉沽审判区" -> "224", 14 | #"大港审判区" -> "225", "功能区审判区" -> "229", "东丽区法院" -> "226", 15 | #"津南区法院" -> "227", "西青区法院" -> "215", "北辰区法院" -> "216", 16 | #"武清区法院" -> "217", "宝坻区法院" -> "219", "静海区法院" -> "218", 17 | #"宁河县法院" -> "228", "蓟县法院" -> "21A", "铁路法院" -> "132" 18 | FYDM= 210 19 | 20 | #日志文件路径 21 | LogFileName=/Users/maicius/TomcatLogUploadLog.txt 22 | 23 | jobTracker=hdfs://130.1.6.2:50030 24 | 25 | hadoopIp=hdfs://130.1.6.2:9000 -------------------------------------------------------------------------------- /CollectTomcatLogs/src/test/java/Test.java: -------------------------------------------------------------------------------- 1 | import software.analysis.nju.Util.Util; 2 | 3 | public class Test { 4 | public static void main(String args[]){ 5 | testGetYesterDay(); 6 | testGetFileName(); 7 | Util.getOriginFileName(); 8 | } 9 | private static void testGetYesterDay(){ 10 | System.out.println(Util.getYesterday()); 11 | } 12 | 13 | private static void testGetFileName(){ 14 | System.out.println(Util.getFileNamWithoutPath("/Users/maicius/RestoreData/src/main/resources/200_localhost_access_log.2017-08-08.txt")); 15 | } 16 | 17 | } 18 | 19 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # 基于HBase的网站日志数据分析系统 2 | 3 | ## 系统说明 4 | 5 | ### 1. 数据库设计 6 | 7 | #### LogData 8 | 9 | - 该表用于存储经数据清洗、转化后的数据 10 | - 数据库类型: HBase 11 | - 表结构 12 | 13 | Rowkey| prop | 14 | ----|:---------------------------:| 15 | | rowkey | IP / BYTES / URL / DATES / METHOD / FYDM / BYTES| 16 | - RowKey 结构设计说明 17 | > RowKey 分为 日期 + 网站代码后三位 + 六位数ID 18 | > 各部分说明如下: 19 | 20 | 字段 | 解释 | 例子 21 | ----| ----- |---- 22 | 日期 |日志文件的产生日期(纯数字,不含空格和-) | 20170808 23 | 公司代码| 公司代码后三位 |200 24 | ID | 从100000开始的六位数字,用于唯一的标明数据并对齐 | 100001 25 | > 完整例子 26 | > 201708082001000000 表示代号为200点公司在2017-08-08产生的一次请求 27 | 28 | - 建表语句 29 | > create "LogData", "prop" 30 | 31 | - 32 | 33 | #### LogAna 34 | - 该表用于存储分析之后的数据 35 | - 数据库类型: HBase 36 | - 表结构 37 | 38 | RowKey | IP | URL | BYTES | MTHOD_STATE |REQ 39 | -------|----|-----|-------|-------------|--- 40 | rowkey |IPSumVal IPTotalNum IPList |URLList MaxURL | BytesSecList BytesHourList / TotalBytes | MethodList StateList | ReqHourList ReqSecList ReqSum 41 | - 字段说明 42 | 43 | 字段 | 解释 | 例子 44 | ----| ----- |---- 45 | IPTotalNum| IP总数,不包含重复的 | 100 表示当天有100个IP访问网站 46 | IPSumVal | IP总数,包含重复 | 100表示有100个IP访问网站,IP可重复 47 | IPList | IP和对应访问量的排行,结构为 由mutable.Map[String, Int]转变来的JSON文件| {"190.1.1.1": 1000} 表示190.1.1.1的IP共在网站产生1000次请求) 48 | URLList | 被请求次数最多的10个URL,结构为Json | {"test.aj":100, "test2.aj":90, ...} 49 | MaxURL | 请求次数最多的URL(现在前端已经放弃使用这字段) |{"test.aj": 100} 50 | BytesSecList | 统计每秒内产生的流量,单位为Byte,但是前端展示时转化为MB | {"2017-08-08 01:00:00":9000, "2017-08-08 01:00:00": 500, ...} 51 | BytesHourList | 统计一天内每小时内产生的流量,单位为Byte,但是前端展示时转化为MB | {"08":9000, "09": 500, ...}, 08 表示 8点到9点内产生的流量 52 | TotalBytes | 一天内产生的总流量大小,单位为Byte,但是前端展示时转化为MB | 3000, 表示当天产生 3000b bytes的流量 53 | MethodList | 出现过的请求方法统计 | {"POST":3446,"OPTIONS":5,"HEAD":4} 54 | StateList | 出现过的请求状态中级 | {"501":8,"302":801,"404":1,"200":14738,"400":2,"405":4} 55 | ReqHourList | 按小时统计请求次数 | {"15":2350,"09":3503,"00":690,"11":1903} 56 | ReqSecList | 按秒统计请求次数 | {"2017-08-08 10:44:08":1,"2017-08-08 09:45:05":4,"2017-08-08 10:06:58":4} 57 | ReqSum | 一天内总请求次数 | 1000,表示当天内共有1000次请求 58 | 59 | - RowKey 结构设计说明 60 | > RowKey 分为 日期 + 公司代码后三位 61 | > 各部分说明如下: 62 | 63 | 字段 | 解释 | 例子 64 | ----| ----- |---- 65 | 日期 |日志文件的产生日期(纯数字,不含空格和-) | 20170808 66 | 公司代码| 公司代码后三位 |200, 需要注意的是000表示当天所有网站数据 67 | 68 | > example: 69 | 20170808200 表示天津高院在2017-08-08的所有数据 70 | 20170808000 表示所有法院在2017-08-08点所有数据 71 | 72 | - 建表语句 73 | > create "LogAna", "IP", "URL", "BYTES", "METHOD_STATE", "REQ" 74 | 75 | 76 | ### 2. 项目代码描述 77 | - 本项目分为三个子项目, 包括数据采集、数据存储和展示、数据离线分析 78 | 79 | #### 数据采集 80 | 81 | - 工程名:CollectTomcatLogs 82 | - 功能说明: 83 | 84 | > 收集指定路径下的tomcat日志 85 | > 重命名文件之后上传到HDFS或FTP服务器 86 | > 保存日志,记录是否上传成功 87 | 88 | - 部署说明: 部署在各个需要采集日志的服务器上,在 my.properties 里指定公司代码和日志路径 89 | - 配置管理: maven 90 | - 主要技术: Java FTPClient, HDFS 91 | - 测试用例说明: 主要用于测试重命名后的文件是否正常 92 | - 文件重命名: 在localhost_XXXXX.txt文件前加上法院代码,以此来区分各公司数据 93 | 94 | #### 数据存储和展示 95 | - 工程名: RestoreData 96 | - 功能说明: 97 | 98 | > 数据预处理: 包括数据解析、清洗和转化 99 | > 数据存储: 将转化后的数据保存在一个List中,批量插入HBase数据库 100 | > 前端展示:展示分析得到的数据 101 | > 数据查询: 根据各种输入条件查询对应的数据 102 | - 开发环境: 103 | > JDK 8 104 | > Hadoop 2.7 105 | > Hbase 1.2 106 | > tomcat 8 107 | - 部署说明:在 my.properties 里配置好各项数据,注意JDK、Hadoop的版本兼容 108 | - 配置管理: maven 109 | - 主要技术:Spring MVC / Hadoop / JSP 110 | - 测试用例说明: 111 | > HbaseBatchInsertTest.java: 用于测试批量插入 112 | > HbaseConnectionTest.java: 用于测试Hbase连接是否正常 113 | > ParseLogTest.java: 用于测试日志解析 114 | > ListBean.java: 打印所有的bean,用于应付@Autowried失败的情况 115 | - 前端部分: 116 | >####代码部分 117 | > index.jsp: 默认加载页面,加载完成后会请求数据,展示前一天所有网站数据 118 | > index.js: 用于处理index.jsp中的各种请求和数据解析 119 | 120 | > ---------- 121 | > queryData.jsp: 用于查询各家网站数据,输入为日期 + 网站,支持多选 122 | > queryData.js: 用于处理queryData.jsp中的各种请求和数据解析(待完成) 123 | 124 | > --------- 125 | > dataGrid.jsp: 以表格的形式展示数据(待完成) 126 | 127 | > -------- 128 | > myCharts.js: 使用echarts绘制各种图表(注意dom的初始化在外部完成) 129 | > inputCheck.js: 检查输入是否合法 130 | 131 | >--------- 132 | > mystyle.css: 定制各类样式 133 | >####第三方库 134 | > Bootstrap: 主要是用其格栅系统 135 | > Bootstrap-select: 多选框的实现 136 | > BootstrapDatepickr: 日期输入 137 | > echarts: 绘制各类图表 138 | > jQuery: 框架 139 | > font-awesome: 各类小图标 140 | 141 | 142 | #### 数据离线分析 143 | 144 | - 工程名: ScalaReadAndWrite 145 | - 功能说明: 146 | 147 | > 离线分析各类数据,共13个指标,详情见数据库表 LogAna 设计 148 | 149 | - 开发环境: 150 | > Scala 2.11 151 | > Spark 1.52 152 | > Hadoop 2.7 153 | - 特别说明: 154 | > spark 中全局变量只有两种实现方式,广播变量或累加器,本项目使用累加器 155 | > 自定义的累加器的时候非常一定要注意输入输出类型一定要正确 156 | > 一定要实现全部六个重载函数 157 | > 一个累加器只能传递一种变量,这个变量可以是复杂的对象 158 | > 不这样做的话累加器会失效! 159 | - 部署说明:暂无 160 | - 配置管理:maven 161 | - 主要技术:Spark 162 | - 项目结构说明: 163 | > Accumulator:累加器,包含各种自定义的累加器 164 | > analysis: 主要分析代码 165 | > DAO: 解析实体类,并存入到HBase 166 | > Entity: 两个实体类 167 | > util: 各种工具类 168 | 169 | #### 3. 项目截图: 170 | 171 | - Hbase数据库截图 172 | ![image](https://github.com/Maicius/WebLogsAnalysisSystem/blob/master/image/p2.png) 173 | 174 | - 数据展示界面 175 | ![image](https://github.com/Maicius/WebLogsAnalysisSystem/blob/master/image/p1.png) 176 | 177 | - 数据展示界面 178 | ![image](https://github.com/Maicius/WebLogsAnalysisSystem/blob/master/image/p3.png) 179 | 180 | 181 | 182 | 183 | 184 | 185 | 186 | 187 | 188 | 189 | 190 | 191 | 192 | -------------------------------------------------------------------------------- /RestoreData/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "restoredata", 3 | "version": "1.0.0", 4 | "lockfileVersion": 1, 5 | "requires": true, 6 | "dependencies": { 7 | "bootstrap": { 8 | "version": "3.3.7", 9 | "resolved": "http://registry.npm.taobao.org/bootstrap/download/bootstrap-3.3.7.tgz", 10 | "integrity": "sha1-WjiTlFSfIzMIdaOxUGVldPip63E=" 11 | }, 12 | "echarts": { 13 | "version": "3.6.2", 14 | "resolved": "http://registry.npm.taobao.org/echarts/download/echarts-3.6.2.tgz", 15 | "integrity": "sha1-hilUyLWBC/+HpIsN4EFu2MS7HDY=", 16 | "requires": { 17 | "zrender": "3.5.2" 18 | } 19 | }, 20 | "jquery": { 21 | "version": "3.2.1", 22 | "resolved": "http://registry.npm.taobao.org/jquery/download/jquery-3.2.1.tgz", 23 | "integrity": "sha1-XE2d5lKvbNCncBVKYxu6ErAVx4c=" 24 | }, 25 | "zrender": { 26 | "version": "3.5.2", 27 | "resolved": "http://registry.npm.taobao.org/zrender/download/zrender-3.5.2.tgz", 28 | "integrity": "sha1-53DL6Xi19JgcG5PZuEFHo0dPIdI=" 29 | } 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /RestoreData/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "restoredata", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1" 8 | }, 9 | "author": "", 10 | "license": "ISC", 11 | "dependencies": { 12 | "bootstrap": "^3.3.7", 13 | "echarts": "^3.6.2", 14 | "jquery": "^3.2.1" 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /RestoreData/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 4.0.0 6 | war 7 | 8 | TomcatLogsAnalysis 9 | software.hbase 10 | RestoreData 11 | 1.0-SNAPSHOT 12 | 13 | 14 | 2.0 15 | 3.1.0.RELEASE 16 | 17 | 18 | 19 | 20 | default-profile 21 | 22 | true 23 | 24 | ${java.home}/../lib/tools.jar 25 | 26 | 27 | 28 | ${java.home}/../lib/tools.jar 29 | 30 | 31 | 32 | mac-profile 33 | 34 | false 35 | 36 | ${java.home}/../Classes/classes.jar 37 | 38 | 39 | 40 | ${java.home}/../Classes/classes.jar 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | com.google.code.gson 49 | gson 50 | ${google-gson.version} 51 | 52 | 53 | 54 | 55 | org.springframework 56 | spring-core 57 | ${spring.core.version} 58 | 59 | 60 | org.springframework 61 | spring-expression 62 | ${spring.core.version} 63 | 64 | 65 | org.springframework 66 | spring-beans 67 | ${spring.core.version} 68 | 69 | 70 | org.springframework 71 | spring-context 72 | ${spring.core.version} 73 | 74 | 75 | org.springframework 76 | spring-context-support 77 | ${spring.core.version} 78 | 79 | 80 | org.springframework 81 | spring-orm 82 | ${spring.core.version} 83 | 84 | 85 | org.springframework 86 | spring-oxm 87 | ${spring.core.version} 88 | 89 | 90 | org.springframework 91 | spring-aop 92 | ${spring.core.version} 93 | 94 | 95 | org.springframework 96 | spring-webmvc 97 | ${spring.core.version} 98 | 99 | 100 | org.springframework 101 | spring-web 102 | ${spring.core.version} 103 | 104 | 105 | org.springframework 106 | spring-test 107 | ${spring.core.version} 108 | 109 | 110 | 111 | org.apache.hadoop 112 | hadoop-common 113 | 2.6.4 114 | 115 | 116 | tomcat 117 | jasper-compiler 118 | 119 | 120 | tomcat 121 | jasper-runtime 122 | 123 | 124 | log4j 125 | log4j 126 | 127 | 128 | 129 | 130 | 131 | 132 | 133 | 134 | org.apache.hadoop 135 | hadoop-hdfs 136 | 2.6.4 137 | 138 | 139 | org.apache.hadoop 140 | hadoop-client 141 | 2.6.4 142 | 143 | 144 | 145 | org.apache.hbase 146 | hbase-client 147 | 1.2.4 148 | 149 | 150 | 151 | org.apache.hbase 152 | hbase-common 153 | 1.2.4 154 | 155 | 156 | junit 157 | junit 158 | 4.12 159 | 160 | 161 | 162 | tools 163 | jdk.tools 164 | 1.7 165 | system 166 | ${toolsjar} 167 | 168 | 169 | org.springframework 170 | spring-web 171 | ${spring.core.version} 172 | 173 | 174 | org.springframework 175 | spring-web 176 | ${spring.core.version} 177 | 178 | 179 | 180 | 181 | com.fasterxml.jackson.core 182 | jackson-databind 183 | 2.5.3 184 | 185 | 186 | com.alibaba 187 | fastjson 188 | 1.2.30 189 | 190 | 191 | 192 | 193 | 194 | 195 | 196 | 197 | hbaseInterface 198 | 199 | 200 | org.apache.maven.plugins 201 | maven-compiler-plugin 202 | 203 | 1.8 204 | 1.8 205 | 206 | 207 | 208 | 209 | 210 | 211 | 212 | 213 | 214 | 215 | 216 | 217 | 218 | 219 | 220 | 221 | 222 | 223 | org.codehaus.mojo 224 | build-helper-maven-plugin 225 | 1.1 226 | 227 | 228 | add-source 229 | generate-sources 230 | 231 | add-source 232 | 233 | 234 | 235 | src/main/java 236 | 237 | 238 | 239 | 240 | 241 | 242 | 243 | 244 | 245 | 246 | 247 | 248 | 249 | 250 | 251 | 252 | 253 | 254 | 255 | 256 | 257 | 258 | -------------------------------------------------------------------------------- /RestoreData/src/main/java/software/hbase/Entity/ConditionQueryData.java: -------------------------------------------------------------------------------- 1 | package software.hbase.Entity; 2 | 3 | import java.util.ArrayList; 4 | import java.util.List; 5 | 6 | public class ConditionQueryData { 7 | private String dateBegin; 8 | private List courtList = new ArrayList<>(); 9 | 10 | public String getDateBegin() { 11 | return dateBegin; 12 | } 13 | 14 | public void setDateBegin(String dateBegin) { 15 | this.dateBegin = dateBegin; 16 | } 17 | 18 | public List getCourtList() { 19 | return courtList; 20 | } 21 | 22 | public void setCourtList(List courtList) { 23 | this.courtList = courtList; 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /RestoreData/src/main/java/software/hbase/Entity/QueryData.java: -------------------------------------------------------------------------------- 1 | package software.hbase.Entity; 2 | 3 | public class QueryData { 4 | private Long reqSum; 5 | private String IPList; 6 | private String IPSumVa; 7 | private String IPTotalNum; 8 | private String TotoBytes; 9 | private String ReqHourList; 10 | private String ReqSecList; 11 | private String MaxURL; 12 | private String Method; 13 | private String State; 14 | private String URLList; 15 | private String rowK; 16 | private String BytesSecList; 17 | private String BytesHourList; 18 | 19 | public Long getReqSum() { 20 | return reqSum; 21 | } 22 | 23 | public void setReqSum(Long reqSum) { 24 | this.reqSum = reqSum; 25 | } 26 | 27 | public String getIPList() { 28 | return IPList; 29 | } 30 | 31 | public void setIPList(String IPList) { 32 | this.IPList = IPList; 33 | } 34 | 35 | public String getIPSumVa() { 36 | return IPSumVa; 37 | } 38 | 39 | public void setIPSumVa(String IPSumVa) { 40 | this.IPSumVa = IPSumVa; 41 | } 42 | 43 | public String getIPTotalNum() { 44 | return IPTotalNum; 45 | } 46 | 47 | public void setIPTotalNum(String IPTotalNum) { 48 | this.IPTotalNum = IPTotalNum; 49 | } 50 | 51 | public String getTotoBytes() { 52 | return TotoBytes; 53 | } 54 | 55 | public void setTotoBytes(String totoBytes) { 56 | TotoBytes = totoBytes; 57 | } 58 | 59 | public String getReqHourList() { 60 | return ReqHourList; 61 | } 62 | 63 | public void setReqHourList(String reqHourList) { 64 | ReqHourList = reqHourList; 65 | } 66 | 67 | public String getReqSecList() { 68 | return ReqSecList; 69 | } 70 | 71 | public void setReqSecList(String reqSecList) { 72 | ReqSecList = reqSecList; 73 | } 74 | 75 | public String getMaxURL() { 76 | return MaxURL; 77 | } 78 | 79 | public void setMaxURL(String maxURL) { 80 | MaxURL = maxURL; 81 | } 82 | 83 | public String getMethod() { 84 | return Method; 85 | } 86 | 87 | public void setMethod(String method) { 88 | Method = method; 89 | } 90 | 91 | public String getState() { 92 | return State; 93 | } 94 | 95 | public void setState(String state) { 96 | State = state; 97 | } 98 | 99 | public String getURLList() { 100 | return URLList; 101 | } 102 | 103 | public void setURLList(String URLList) { 104 | this.URLList = URLList; 105 | } 106 | 107 | public String getRowK() { 108 | return rowK; 109 | } 110 | 111 | public void setRowK(String rowK) { 112 | this.rowK = rowK; 113 | } 114 | 115 | public String getBytesSecList() { 116 | return BytesSecList; 117 | } 118 | 119 | public void setBytesSecList(String bytesSecList) { 120 | BytesSecList = bytesSecList; 121 | } 122 | 123 | public String getBytesHourList() { 124 | return BytesHourList; 125 | } 126 | 127 | public void setBytesHourList(String bytesHourList) { 128 | BytesHourList = bytesHourList; 129 | } 130 | } 131 | -------------------------------------------------------------------------------- /RestoreData/src/main/java/software/hbase/controller/QueryData.java: -------------------------------------------------------------------------------- 1 | package software.hbase.controller; 2 | 3 | import com.fasterxml.jackson.databind.ObjectMapper; 4 | import org.springframework.beans.factory.annotation.Autowired; 5 | import org.springframework.stereotype.Controller; 6 | import org.springframework.web.bind.annotation.RequestBody; 7 | import org.springframework.web.bind.annotation.RequestMapping; 8 | import org.springframework.web.bind.annotation.RequestMethod; 9 | import org.springframework.web.bind.annotation.ResponseBody; 10 | import org.springframework.web.servlet.ModelAndView; 11 | import software.hbase.Entity.ConditionQueryData; 12 | import software.hbase.hbase.dataObject.LogAna; 13 | import software.hbase.service.QueryService; 14 | 15 | import java.io.UnsupportedEncodingException; 16 | import java.net.URLDecoder; 17 | import java.util.List; 18 | 19 | @Controller 20 | public class QueryData { 21 | @Autowired 22 | private QueryService queryService; 23 | @ResponseBody 24 | @RequestMapping(value="/queryAllData", method = RequestMethod.GET) 25 | public String queryAllData() throws Exception{ 26 | String result = queryService.queryAllData(); 27 | System.out.println(result); 28 | return result; 29 | } 30 | @RequestMapping(value="/conditionQuery", method = RequestMethod.POST) 31 | public String conditionQuery(@RequestBody String json) throws Exception{ 32 | String ujson = ""; 33 | String res = ""; 34 | ConditionQueryData conditionQueryData = new ConditionQueryData(); 35 | try { 36 | ujson = new String(json.getBytes("ISO-8859-1"),"utf-8"); 37 | res = URLDecoder.decode(ujson,"UTF-8"); 38 | } catch (UnsupportedEncodingException e) { 39 | e.printStackTrace(); 40 | return "未知错误"; 41 | } 42 | ObjectMapper mapper = new ObjectMapper(); 43 | conditionQueryData = mapper.readValue(res, ConditionQueryData.class); 44 | System.out.println("请求数据:" + res); 45 | List list = queryService.conditionQuery(conditionQueryData); 46 | String jsonLog = mapper.writeValueAsString(list); 47 | System.out.println("返回数据:" + jsonLog); 48 | return jsonLog; 49 | } 50 | 51 | 52 | @RequestMapping(value="/queryData", method = RequestMethod.GET) 53 | public ModelAndView queryData() throws Exception{ 54 | ModelAndView mv = new ModelAndView(); 55 | mv.setViewName("index"); 56 | return mv; 57 | } 58 | 59 | @RequestMapping(value="/queryCourtData", method = RequestMethod.GET) 60 | public ModelAndView queryCourtData() throws Exception{ 61 | ModelAndView mv = new ModelAndView(); 62 | mv.setViewName("queryData"); 63 | return mv; 64 | } 65 | 66 | @RequestMapping(value="/queryDataGrid", method = RequestMethod.GET) 67 | public ModelAndView queryDataGrid() throws Exception{ 68 | ModelAndView mv = new ModelAndView(); 69 | mv.setViewName("dataGrid"); 70 | return mv; 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /RestoreData/src/main/java/software/hbase/controller/getTomcatLogsController.java: -------------------------------------------------------------------------------- 1 | package software.hbase.controller; 2 | 3 | import org.springframework.stereotype.Controller; 4 | import org.springframework.web.bind.annotation.RequestBody; 5 | import org.springframework.web.bind.annotation.RequestMapping; 6 | import org.springframework.web.bind.annotation.RequestMethod; 7 | 8 | @Controller 9 | public class getTomcatLogsController { 10 | 11 | @RequestMapping(value="getTomcatLogs", method = RequestMethod.GET, produces="text/html;charset=UTF-8") 12 | public void getTomcatLogs(@RequestBody String json) throws Exception { 13 | 14 | } 15 | 16 | } 17 | -------------------------------------------------------------------------------- /RestoreData/src/main/java/software/hbase/hbase/HbaseColumn.java: -------------------------------------------------------------------------------- 1 | package software.hbase.hbase; 2 | 3 | import java.lang.annotation.Retention; 4 | import java.lang.annotation.Target; 5 | 6 | import static java.lang.annotation.ElementType.FIELD; 7 | import static java.lang.annotation.RetentionPolicy.RUNTIME; 8 | 9 | @Target(FIELD) 10 | @Retention(RUNTIME) 11 | public @interface HbaseColumn { 12 | String family(); 13 | String qualifier() default ""; 14 | } 15 | -------------------------------------------------------------------------------- /RestoreData/src/main/java/software/hbase/hbase/HbaseOneToMany.java: -------------------------------------------------------------------------------- 1 | package software.hbase.hbase; 2 | 3 | import java.lang.annotation.Retention; 4 | import java.lang.annotation.Target; 5 | 6 | import static java.lang.annotation.ElementType.FIELD; 7 | import static java.lang.annotation.RetentionPolicy.RUNTIME; 8 | 9 | //对应的field一般是TreeMap 10 | @Target(FIELD) 11 | @Retention(RUNTIME) 12 | public @interface HbaseOneToMany { 13 | String splitSign(); //分隔符 14 | String joinField(); //关联依据的列 15 | Class joinTableDao(); //连接表的Dao 16 | boolean lazy() default true; //是否主动加载 17 | } 18 | -------------------------------------------------------------------------------- /RestoreData/src/main/java/software/hbase/hbase/HbaseOneToOne.java: -------------------------------------------------------------------------------- 1 | package software.hbase.hbase; 2 | 3 | import java.lang.annotation.Retention; 4 | import java.lang.annotation.Target; 5 | 6 | import static java.lang.annotation.ElementType.FIELD; 7 | import static java.lang.annotation.RetentionPolicy.RUNTIME; 8 | 9 | //对应的field一般是TreeMap 10 | @Target(FIELD) 11 | @Retention(RUNTIME) 12 | public @interface HbaseOneToOne { 13 | boolean changeKey() default false; //是否更改原来的key为orderKey 14 | String orderKey() default ""; //根据关联对象的哪个列排序 15 | String joinField(); //关联依据的列 16 | Class joinTableDao(); //连接表的Dao 17 | boolean lazy() default true; //是否主动加载 18 | } 19 | -------------------------------------------------------------------------------- /RestoreData/src/main/java/software/hbase/hbase/HbaseTable.java: -------------------------------------------------------------------------------- 1 | package software.hbase.hbase; 2 | 3 | import java.lang.annotation.Retention; 4 | import java.lang.annotation.Target; 5 | 6 | import static java.lang.annotation.ElementType.TYPE; 7 | import static java.lang.annotation.RetentionPolicy.RUNTIME; 8 | 9 | @Target(TYPE) 10 | @Retention(RUNTIME) 11 | public @interface HbaseTable { 12 | String name(); 13 | String rowKey(); 14 | } 15 | -------------------------------------------------------------------------------- /RestoreData/src/main/java/software/hbase/hbase/QueryDAO.java: -------------------------------------------------------------------------------- 1 | package software.hbase.hbase; 2 | 3 | public interface QueryDAO { 4 | } 5 | -------------------------------------------------------------------------------- /RestoreData/src/main/java/software/hbase/hbase/dao/HbaseDaoFactory.java: -------------------------------------------------------------------------------- 1 | package software.hbase.hbase.dao; 2 | 3 | import org.springframework.beans.factory.annotation.Autowired; 4 | import org.springframework.stereotype.Component; 5 | 6 | @Component 7 | public class HbaseDaoFactory { 8 | @Autowired 9 | private LogDataDao logDataDao; 10 | 11 | public BaseHbaseDao getDao(Class daoType){ 12 | if(daoType.equals(LogDataDao.class)){ 13 | return logDataDao; 14 | } 15 | return null; 16 | } 17 | 18 | } 19 | -------------------------------------------------------------------------------- /RestoreData/src/main/java/software/hbase/hbase/dao/LogAnaDao.java: -------------------------------------------------------------------------------- 1 | package software.hbase.hbase.dao; 2 | 3 | import org.springframework.stereotype.Repository; 4 | import software.hbase.hbase.dataObject.LogAna; 5 | 6 | @Repository 7 | public class LogAnaDao extends BaseHbaseDao { 8 | 9 | public LogAnaDao() throws NoSuchFieldException, SecurityException { 10 | super(LogAna.class); 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /RestoreData/src/main/java/software/hbase/hbase/dao/LogDataDao.java: -------------------------------------------------------------------------------- 1 | package software.hbase.hbase.dao; 2 | 3 | import org.springframework.stereotype.Repository; 4 | import software.hbase.hbase.dataObject.LogData; 5 | 6 | @Repository 7 | public class LogDataDao extends BaseHbaseDao { 8 | public LogDataDao() throws NoSuchFieldException, SecurityException{ 9 | super(LogData.class); 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /RestoreData/src/main/java/software/hbase/hbase/dataObject/LogAna.java: -------------------------------------------------------------------------------- 1 | package software.hbase.hbase.dataObject; 2 | 3 | import software.hbase.hbase.HbaseColumn; 4 | import software.hbase.hbase.HbaseTable; 5 | 6 | /** 7 | * 保存分析结果的实体类 8 | */ 9 | @HbaseTable(name="LogAna", rowKey = "dateCourt") 10 | public class LogAna implements java.io.Serializable { 11 | 12 | public String dateCourt; 13 | 14 | @HbaseColumn(family = "REQ", qualifier = "ReqSum") 15 | public String ReqSum; 16 | 17 | @HbaseColumn(family = "IP", qualifier = "IPList") 18 | public String IPList; 19 | 20 | @HbaseColumn(family = "IP", qualifier = "IPSumVal") 21 | public String IPSumVal; 22 | @HbaseColumn(family = "IP", qualifier = "IPTotalNum") 23 | public String IPTotalNum; 24 | 25 | @HbaseColumn(family = "BYTES", qualifier = "TotalBytes") 26 | public String TotalBytes; 27 | 28 | @HbaseColumn(family = "BYTES", qualifier = "BytesHourList") 29 | public String BytesHourList; 30 | 31 | @HbaseColumn(family = "BYTES", qualifier = "BytesSecList") 32 | public String BytesSecList; 33 | 34 | @HbaseColumn(family = "URL", qualifier = "MaxURL") 35 | public String MaxURL; 36 | 37 | @HbaseColumn(family = "URL", qualifier = "URLList") 38 | public String URLList; 39 | 40 | @HbaseColumn(family = "METHOD_STATE", qualifier = "MethodList") 41 | public String MethodList; 42 | 43 | @HbaseColumn(family = "METHOD_STATE", qualifier = "StateList") 44 | public String StateList; 45 | 46 | @HbaseColumn(family = "REQ", qualifier = "ReqHourList") 47 | public String ReqHourList; 48 | 49 | @HbaseColumn(family = "REQ", qualifier = "ReqSecList") 50 | public String ReqSecList; 51 | 52 | public LogAna(){} 53 | 54 | public String getDateCourt() { 55 | return dateCourt; 56 | } 57 | 58 | public void setDateCourt(String dateCourt) { 59 | this.dateCourt = dateCourt; 60 | } 61 | 62 | public String getReqSum() { 63 | return ReqSum; 64 | } 65 | 66 | public void setReqSum(String reqSum) { 67 | ReqSum = reqSum; 68 | } 69 | 70 | public String getIPList() { 71 | return IPList; 72 | } 73 | 74 | public void setIPList(String IPList) { 75 | this.IPList = IPList; 76 | } 77 | 78 | public String getIPSumVal() { 79 | return IPSumVal; 80 | } 81 | 82 | public void setIPSumVal(String IPSumVal) { 83 | this.IPSumVal = IPSumVal; 84 | } 85 | 86 | public String getIPTotalNum() { 87 | return IPTotalNum; 88 | } 89 | 90 | public void setIPTotalNum(String IPTotalNum) { 91 | this.IPTotalNum = IPTotalNum; 92 | } 93 | 94 | public String getToTalBytes() { 95 | return TotalBytes; 96 | } 97 | 98 | public void setToTalBytes(String toTalBytes) { 99 | TotalBytes = toTalBytes; 100 | } 101 | 102 | public String getBytesHourList() { 103 | return BytesHourList; 104 | } 105 | 106 | public void setBytesHourList(String bytesHourList) { 107 | BytesHourList = bytesHourList; 108 | } 109 | 110 | public String getBytesSecList() { 111 | return BytesSecList; 112 | } 113 | 114 | public void setBytesSecList(String bytesSecList) { 115 | BytesSecList = bytesSecList; 116 | } 117 | 118 | public String getMaxURL() { 119 | return MaxURL; 120 | } 121 | 122 | public void setMaxURL(String maxURL) { 123 | MaxURL = maxURL; 124 | } 125 | 126 | public String getURLList() { 127 | return URLList; 128 | } 129 | 130 | public void setURLList(String URLList) { 131 | this.URLList = URLList; 132 | } 133 | 134 | public String getMethodList() { 135 | return MethodList; 136 | } 137 | 138 | public void setMethodList(String methodList) { 139 | MethodList = methodList; 140 | } 141 | 142 | public String getStateList() { 143 | return StateList; 144 | } 145 | 146 | public void setStateList(String stateList) { 147 | StateList = stateList; 148 | } 149 | 150 | public String getReqHourList() { 151 | return ReqHourList; 152 | } 153 | 154 | public void setReqHourList(String reqHourList) { 155 | ReqHourList = reqHourList; 156 | } 157 | 158 | public String getReqSecList() { 159 | return ReqSecList; 160 | } 161 | 162 | public void setReqSecList(String reqSecList) { 163 | ReqSecList = reqSecList; 164 | } 165 | } 166 | -------------------------------------------------------------------------------- /RestoreData/src/main/java/software/hbase/hbase/dataObject/LogData.java: -------------------------------------------------------------------------------- 1 | package software.hbase.hbase.dataObject; 2 | 3 | import software.hbase.hbase.HbaseColumn; 4 | import software.hbase.hbase.HbaseTable; 5 | 6 | /** 7 | * 保存原始数据 8 | */ 9 | @HbaseTable(name="LogData", rowKey = "LogId") 10 | public class LogData implements java.io.Serializable{ 11 | public String LogId; 12 | 13 | @HbaseColumn(family = "prop", qualifier = "FYDM") 14 | public String fydm; 15 | 16 | @HbaseColumn(family = "prop", qualifier = "DATES") 17 | public String dates; 18 | 19 | @HbaseColumn(family = "prop", qualifier = "IP") 20 | public String ip; 21 | 22 | @HbaseColumn(family = "prop", qualifier = "URL") 23 | public String URL; 24 | 25 | @HbaseColumn(family = "prop", qualifier = "STATE") 26 | public String state; 27 | 28 | @HbaseColumn(family = "prop", qualifier = "METHOD") 29 | public String methods; 30 | 31 | @HbaseColumn(family = "prop", qualifier = "BYTES") 32 | public String bytes; 33 | 34 | // public TomcatLogs(){} 35 | // public TomcatLogs(String fydm, String dates, String LogId, String ip, String method, 36 | // String URL, String state, String bytes){ 37 | // this.fydm = fydm; 38 | // this.dates = dates; 39 | // this.LogId = LogId; 40 | // this. ip =ip; 41 | // this.methods = method; 42 | // this.URL = URL; 43 | // this.state = state; 44 | // this.bytes = bytes; 45 | // } 46 | public LogData(){} 47 | public LogData(String LogId, String fydm, String dates, String ip, String URL, String state, String methods, String bytes){ 48 | this.LogId = LogId; 49 | this.fydm = fydm; 50 | this.dates = dates; 51 | this.ip = ip; 52 | this.URL = URL; 53 | this.state = state; 54 | this.methods = methods; 55 | this.bytes = bytes; 56 | } 57 | public String getFydm() { 58 | return fydm; 59 | } 60 | 61 | public void setFydm(String fydm) { 62 | this.fydm = fydm; 63 | } 64 | 65 | public String getDates() { 66 | return dates; 67 | } 68 | 69 | public void setDates(String dates) { 70 | this.dates = dates; 71 | } 72 | 73 | public String getLogId() { 74 | return LogId; 75 | } 76 | 77 | public void setLogId(String id) { 78 | this.LogId = id; 79 | } 80 | 81 | public String getIp() { 82 | return ip; 83 | } 84 | 85 | public void setIp(String ip) { 86 | this.ip = ip; 87 | } 88 | 89 | public String getURL() { 90 | return URL; 91 | } 92 | 93 | public void setURL(String URL) { 94 | this.URL = URL; 95 | } 96 | 97 | public String getState() { 98 | return state; 99 | } 100 | 101 | public void setState(String state) { 102 | this.state = state; 103 | } 104 | 105 | public String getBytes() { 106 | return bytes; 107 | } 108 | 109 | public void setBytes(String bytes) { 110 | this.bytes = bytes; 111 | } 112 | 113 | public String getMethods() { 114 | return methods; 115 | } 116 | 117 | public void setMethods(String methods) { 118 | this.methods = methods; 119 | } 120 | } 121 | -------------------------------------------------------------------------------- /RestoreData/src/main/java/software/hbase/service/HBaseService.java: -------------------------------------------------------------------------------- 1 | package software.hbase.service; 2 | 3 | import org.apache.hadoop.hbase.filter.FilterList.Operator; 4 | import software.hbase.service.model.HbaseRow; 5 | import software.hbase.service.model.HbaseServiceConditonModel; 6 | 7 | import java.io.IOException; 8 | import java.util.Collection; 9 | import java.util.List; 10 | 11 | 12 | public interface HBaseService { 13 | 14 | void showTables() throws Exception; 15 | 16 | /** 17 | * 插入单行 18 | * @param tableName 19 | * @param row 20 | * @throws IOException 21 | */ 22 | void saveOrUpdate(String tableName, HbaseRow row) throws IOException; 23 | /** 24 | * 插入多行 25 | * @param tableName 26 | * @param row 27 | * @throws IOException 28 | */ 29 | void saveOrUpdates(String tableName, Collection row) throws IOException; 30 | /** 31 | * 根据行键删除 32 | * @param tableName 33 | * @param rowKey 34 | * @param colFamily 35 | * @param col 36 | * @throws IOException 37 | */ 38 | void delete(String tableName, String rowKey, String colFamily, String col) throws IOException; 39 | /** 40 | * 根据行键列表批量删除 41 | * @param tableName 42 | * @param rowKeys 43 | * @param colFamily 44 | * @param col 45 | * @throws IOException 46 | */ 47 | void deletes(String tableName, Collection rowKeys, String colFamily, String col) throws IOException; 48 | /** 49 | * 根据行键获取数据 50 | * @param tableName 51 | * @param rowKey 52 | * @param colFamily 53 | * @param col 54 | * @return 55 | */ 56 | HbaseRow findById(String tableName, String rowKey, String colFamily, String col) throws IOException; 57 | /** 58 | * 根据行键列表获取数据 59 | * 60 | * @param tableName 表名称 61 | * @param rowKeys RowKey列表 62 | * @param colFamily 列族名称 63 | * @param col 列名称 64 | * @throws IOException 65 | */ 66 | List findByIds(String tableName, Collection rowKeys, String colFamily, String col) throws IOException; 67 | /** 68 | * 根据行键正则或值条件获取数据 69 | * @param tableName 70 | * @param rowRegexps 71 | * @param colFamily 72 | * @param col 73 | * @param conditions 74 | * @param op 75 | * @return 76 | * @throws IOException 77 | */ 78 | List findByIdRegexpAndCondition(String tableName, Collection rowRegexps,String colFamily, String col,Collection conditions,Operator op) throws IOException; 79 | 80 | } 81 | -------------------------------------------------------------------------------- /RestoreData/src/main/java/software/hbase/service/LogDataService.java: -------------------------------------------------------------------------------- 1 | package software.hbase.service; 2 | 3 | public interface LogDataService { 4 | void insertLogs() throws Exception; 5 | } 6 | -------------------------------------------------------------------------------- /RestoreData/src/main/java/software/hbase/service/QueryService.java: -------------------------------------------------------------------------------- 1 | package software.hbase.service; 2 | 3 | import software.hbase.Entity.ConditionQueryData; 4 | import software.hbase.hbase.dataObject.LogAna; 5 | 6 | import java.util.List; 7 | 8 | public interface QueryService { 9 | String queryAllData() throws Exception; 10 | List conditionQuery(ConditionQueryData data) throws Exception; 11 | } 12 | -------------------------------------------------------------------------------- /RestoreData/src/main/java/software/hbase/service/impl/HbaseServiceImpl.java: -------------------------------------------------------------------------------- 1 | package software.hbase.service.impl; 2 | 3 | import org.apache.hadoop.conf.Configuration; 4 | import org.apache.hadoop.hbase.*; 5 | import org.apache.hadoop.hbase.client.*; 6 | import org.apache.hadoop.hbase.filter.*; 7 | import org.apache.hadoop.hbase.util.Bytes; 8 | import org.springframework.beans.factory.DisposableBean; 9 | import org.springframework.beans.factory.InitializingBean; 10 | import org.springframework.stereotype.Service; 11 | import software.hbase.service.HBaseService; 12 | import software.hbase.service.model.HbaseCell; 13 | import software.hbase.service.model.HbaseRow; 14 | import software.hbase.service.model.HbaseServiceConditonModel; 15 | import software.hbase.util.PropertiesUtil; 16 | 17 | import java.io.IOException; 18 | import java.util.ArrayList; 19 | import java.util.Collection; 20 | import java.util.List; 21 | 22 | @Service("HBaseService") 23 | public class HbaseServiceImpl implements HBaseService, InitializingBean,DisposableBean { 24 | private static Configuration configuration; 25 | private Connection connection; 26 | private Admin admin; 27 | 28 | static{ 29 | configuration = HBaseConfiguration.create(); 30 | configuration.set("hbase.zookeeper.property.clientPort", PropertiesUtil.getZookeeperPort()); 31 | configuration.set("hbase.zookeeper.quorum", PropertiesUtil.getZookeeperIp()); 32 | configuration.set("hbase.master", PropertiesUtil.getHbaseIp()); 33 | configuration.set("hbase.root.dir",PropertiesUtil.getHbaseDir()); 34 | System.out.println("Hbase initialized"); 35 | } 36 | public void afterPropertiesSet() throws Exception { 37 | try { 38 | System.out.println("begin to create connect"); 39 | connection = ConnectionFactory.createConnection(configuration); 40 | admin = connection.getAdmin(); 41 | System.out.println("Connect to Hbase Successful"); 42 | } catch (IOException e) { 43 | e.printStackTrace(); 44 | } 45 | } 46 | 47 | public void destroy() throws Exception { 48 | try { 49 | if (null != admin) { 50 | admin.close(); 51 | } 52 | if (null != connection) { 53 | connection.close(); 54 | } 55 | } catch (IOException e) { 56 | e.printStackTrace(); 57 | } 58 | } 59 | 60 | public void showTables() throws Exception{ 61 | // Instantiating table descriptor class 62 | HTableDescriptor[] tableDescriptor =admin.listTables(); 63 | System.out.println("success to create descriptor"); 64 | // printing all the table names. 65 | for (int i=0; i rows) throws IOException { 89 | Table table = connection.getTable(TableName.valueOf(tableName)); 90 | List putList = new ArrayList(); 91 | for(HbaseRow row:rows){ 92 | Put put = new Put(Bytes.toBytes(row.getRowKey())); 93 | for(HbaseCell cell:row.getCells()){ 94 | put.addColumn(Bytes.toBytes(cell.getFamily()), 95 | Bytes.toBytes(cell.getQualifier()), Bytes.toBytes(cell.getValue())); 96 | } 97 | putList.add(put); 98 | } 99 | table.put(putList); 100 | table.close(); 101 | } 102 | 103 | /** 104 | * 根据RowKey获取数据 105 | * 106 | * @param tableName 表名称 107 | * @param rowKey RowKey名称 108 | * @param colFamily 列族名称 109 | * @param col 列名称 110 | * @throws IOException 111 | */ 112 | public HbaseRow findById(String tableName, String rowKey, String colFamily, String col) throws IOException { 113 | Table table = connection.getTable(TableName.valueOf(tableName)); 114 | Get get = new Get(Bytes.toBytes(rowKey)); 115 | if (colFamily != null) { 116 | get.addFamily(Bytes.toBytes(colFamily)); 117 | } 118 | if (colFamily != null && col != null) { 119 | get.addColumn(Bytes.toBytes(colFamily), Bytes.toBytes(col)); 120 | } 121 | //设置只读取最新版本 122 | get.setMaxVersions(1); 123 | Result result = table.get(get); 124 | HbaseRow row = praseOneRow(result); 125 | table.close(); 126 | return row; 127 | } 128 | 129 | /** 130 | * 根据RowKey列表获取数据 131 | * 132 | * @param tableName 表名称 133 | * @param rowKeys RowKey列表 134 | * @param colFamily 列族名称 135 | * @param col 列名称 136 | * @throws IOException 137 | */ 138 | public List findByIds(String tableName, Collection rowKeys, String colFamily, String col) throws IOException { 139 | Table table = connection.getTable(TableName.valueOf(tableName)); 140 | List getList = new ArrayList(); 141 | for(String rowKey:rowKeys){ 142 | Get get = new Get(Bytes.toBytes(rowKey)); 143 | if (colFamily != null) { 144 | get.addFamily(Bytes.toBytes(colFamily)); 145 | } 146 | if (colFamily != null && col != null) { 147 | get.addColumn(Bytes.toBytes(colFamily), Bytes.toBytes(col)); 148 | } 149 | //设置只读取最新版本 150 | get.setMaxVersions(1); 151 | getList.add(get); 152 | } 153 | Result[] results = table.get(getList); 154 | List rows = new ArrayList(); 155 | for(Result result:results){ 156 | HbaseRow row = praseOneRow(result); 157 | if(row!=null){ 158 | rows.add(row); 159 | } 160 | } 161 | table.close(); 162 | return rows; 163 | } 164 | 165 | /** 166 | * 根据行键正则和值条件获取数据 167 | * @param tableName 168 | * @param rowRegexps 169 | * @param colFamily 170 | * @param col 171 | * @param conditions 172 | * @param op 173 | * @return 174 | * @throws IOException 175 | */ 176 | public List findByIdRegexpAndCondition(String tableName, Collection rowRegexps,String colFamily, String col,Collection conditions,FilterList.Operator op) throws IOException { 177 | Table table = connection.getTable(TableName.valueOf(tableName)); 178 | Scan scan = new Scan(); 179 | if (colFamily != null) { 180 | scan.addFamily(Bytes.toBytes(colFamily)); 181 | } 182 | if (colFamily != null && col != null) { 183 | scan.addColumn(Bytes.toBytes(colFamily), Bytes.toBytes(col)); 184 | } 185 | //设置只读取最新版本 186 | scan.setMaxVersions(1); 187 | //设置条件 188 | List filters = new ArrayList(); 189 | if(rowRegexps != null){ 190 | for(String rowRegexp:rowRegexps){ 191 | RowFilter filter = new RowFilter(CompareFilter.CompareOp.EQUAL, new RegexStringComparator(rowRegexp)); 192 | filters.add(filter); 193 | } 194 | } 195 | if(conditions!=null){ 196 | for(HbaseServiceConditonModel c:conditions){ 197 | SingleColumnValueFilter filter = new SingleColumnValueFilter(Bytes.toBytes(c.getFamily()), 198 | Bytes.toBytes(c.getCol()),c.getOp(),Bytes.toBytes(c.getValue())); 199 | filters.add(filter); 200 | } 201 | } 202 | if(op==null){ 203 | op= FilterList.Operator.MUST_PASS_ONE; 204 | } 205 | FilterList filterList = new FilterList(op,filters); 206 | scan.setFilter(filterList); 207 | ResultScanner scanner = table.getScanner(scan); 208 | List rows = new ArrayList(); 209 | for(Result result = scanner.next();result!=null;result = scanner.next()){ 210 | HbaseRow row = praseOneRow(result); 211 | if(row!=null){ 212 | rows.add(row); 213 | } 214 | } 215 | scanner.close(); 216 | table.close(); 217 | return rows; 218 | } 219 | 220 | /** 221 | * 根据行键删除 222 | * @param tableName 223 | * @param rowKey 224 | * @param colFamily 225 | * @param col 226 | * @throws IOException 227 | */ 228 | public void delete(String tableName, String rowKey, String colFamily, String col) throws IOException { 229 | if (admin.tableExists(TableName.valueOf(tableName))) { 230 | Table table = connection.getTable(TableName.valueOf(tableName)); 231 | Delete del = new Delete(Bytes.toBytes(rowKey)); 232 | if (colFamily != null) { 233 | //删掉所有列的所有时间戳版本 234 | del.addFamily(Bytes.toBytes(colFamily)); 235 | } 236 | if (colFamily != null && col != null) { 237 | //删掉时间戳最新的值 238 | //del.addColumn(Bytes.toBytes(colFamily), Bytes.toBytes(col)); 239 | //删掉所有版本 240 | del.addColumns(Bytes.toBytes(colFamily), Bytes.toBytes(col)); 241 | } 242 | table.delete(del); 243 | table.close(); 244 | } 245 | } 246 | 247 | /** 248 | * 根据行键列表批量删除 249 | * @param tableName 250 | * @param rowKeys 251 | * @param colFamily 252 | * @param col 253 | * @throws IOException 254 | */ 255 | public void deletes(String tableName, Collection rowKeys, String colFamily, String col) throws IOException { 256 | if (admin.tableExists(TableName.valueOf(tableName))) { 257 | Table table = connection.getTable(TableName.valueOf(tableName)); 258 | List deleteList = new ArrayList(); 259 | for(String rowKey:rowKeys){ 260 | Delete del = new Delete(Bytes.toBytes(rowKey)); 261 | if (colFamily != null) { 262 | //删掉所有列的所有时间戳版本 263 | del.addFamily(Bytes.toBytes(colFamily)); 264 | } 265 | if (colFamily != null && col != null) { 266 | //删掉时间戳最新的值 267 | //del.addColumn(Bytes.toBytes(colFamily), Bytes.toBytes(col)); 268 | //删掉所有版本 269 | del.addColumns(Bytes.toBytes(colFamily), Bytes.toBytes(col)); 270 | } 271 | deleteList.add(del); 272 | } 273 | table.delete(deleteList); 274 | table.close(); 275 | } 276 | } 277 | 278 | private HbaseRow praseOneRow(Result result){ 279 | if(result.getRow()!=null){ 280 | HbaseRow row = new HbaseRow(Bytes.toString(result.getRow())); 281 | for(Cell cell:result.rawCells()){ 282 | HbaseCell hcell = new HbaseCell(Bytes.toString(CellUtil.cloneFamily(cell)), 283 | Bytes.toString(CellUtil.cloneQualifier(cell)), 284 | Bytes.toString(CellUtil.cloneValue(cell))); 285 | row.addCell(hcell); 286 | } 287 | return row; 288 | } 289 | return null; 290 | } 291 | } 292 | -------------------------------------------------------------------------------- /RestoreData/src/main/java/software/hbase/service/impl/LogDataServiceImpl.java: -------------------------------------------------------------------------------- 1 | package software.hbase.service.impl; 2 | 3 | import org.springframework.beans.factory.annotation.Autowired; 4 | import org.springframework.stereotype.Service; 5 | import software.hbase.hbase.dao.LogDataDao; 6 | import software.hbase.hbase.dataObject.LogData; 7 | import software.hbase.service.LogDataService; 8 | import software.hbase.util.ParseLogsUtil; 9 | 10 | import java.util.Date; 11 | import java.util.List; 12 | 13 | @Service("LogDataService") 14 | public class LogDataServiceImpl implements LogDataService { 15 | @Autowired 16 | private LogDataDao logDataDao; 17 | @Override 18 | public void insertLogs()throws Exception { 19 | List list = ParseLogsUtil.parseLogsUtil("/Users/maicius/RestoreData/src/main/resources/200_localhost_access_log.2017-08-08.txt"); 20 | System.out.println("parse end...\nbegin to insert"); 21 | Date a = new Date(); 22 | logDataDao.saveBatch(list); 23 | Date b = new Date(); 24 | System.out.println("插入历时:" + (b.getTime() - a.getTime()) + "毫秒"); 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /RestoreData/src/main/java/software/hbase/service/impl/QueryServiceImpl.java: -------------------------------------------------------------------------------- 1 | package software.hbase.service.impl; 2 | 3 | import com.fasterxml.jackson.databind.ObjectMapper; 4 | import org.springframework.beans.factory.annotation.Autowired; 5 | import org.springframework.stereotype.Service; 6 | import software.hbase.Entity.ConditionQueryData; 7 | import software.hbase.hbase.dao.LogAnaDao; 8 | import software.hbase.hbase.dataObject.LogAna; 9 | import software.hbase.service.QueryService; 10 | import software.hbase.util.DateUtil; 11 | 12 | import java.util.ArrayList; 13 | import java.util.List; 14 | 15 | @Service 16 | public class QueryServiceImpl implements QueryService{ 17 | 18 | @Autowired 19 | private LogAnaDao logAnaDao; 20 | 21 | @Override 22 | public String queryAllData() throws Exception { 23 | String yesterday = DateUtil.getYesterday(); 24 | //000表示一天中所有法院的数据的代码 25 | // String rowKey = yesterday + "000"; 26 | String rowKey = "20170823" + "000"; 27 | LogAna dataSets = logAnaDao.findById(rowKey); 28 | //将bytes转为MB 29 | int totalBytes = Integer.parseInt(dataSets.getToTalBytes()); 30 | totalBytes = totalBytes / 1024 / 1024; 31 | dataSets.setToTalBytes(String.valueOf(totalBytes)); 32 | ObjectMapper mapper = new ObjectMapper(); 33 | String json = mapper.writeValueAsString(dataSets); 34 | return json; 35 | } 36 | 37 | @Override 38 | public List conditionQuery(ConditionQueryData data) throws Exception { 39 | String yesterday = DateUtil.getYesterday(); 40 | // String rowKey = yesterday + "000"; 41 | String rowKey = ""; 42 | List rowKeys = new ArrayList<>(); 43 | for(String court: data.getCourtList()){ 44 | // rowKey = data.getDateBegin() + court; 45 | rowKey = "20170808" + court; 46 | rowKeys.add(rowKey); 47 | } 48 | List list = logAnaDao.findByIds(rowKeys); 49 | 50 | return list; 51 | } 52 | 53 | } 54 | -------------------------------------------------------------------------------- /RestoreData/src/main/java/software/hbase/service/model/HbaseCell.java: -------------------------------------------------------------------------------- 1 | package software.hbase.service.model; 2 | 3 | public class HbaseCell { 4 | 5 | private HbaseColCouple colCouple; 6 | private String value; 7 | 8 | public HbaseCell(String family, String qualifier, String value) { 9 | super(); 10 | colCouple = new HbaseColCouple(family, qualifier); 11 | this.value = value; 12 | } 13 | public String getFamily() { 14 | return colCouple.getFamily(); 15 | } 16 | public void setFamily(String family) { 17 | colCouple.setFamily(family); 18 | } 19 | public String getQualifier() { 20 | return colCouple.getQualifier(); 21 | } 22 | public void setQualifier(String qualifier) { 23 | colCouple.setQualifier(qualifier); 24 | } 25 | public String getValue() { 26 | return value; 27 | } 28 | public void setValue(String value) { 29 | this.value = value; 30 | } 31 | public HbaseColCouple getColCouple() { 32 | return colCouple; 33 | } 34 | public void setColCouple(HbaseColCouple colCouple) { 35 | this.colCouple = colCouple; 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /RestoreData/src/main/java/software/hbase/service/model/HbaseColCouple.java: -------------------------------------------------------------------------------- 1 | package software.hbase.service.model; 2 | 3 | public class HbaseColCouple { 4 | private String family; 5 | private String qualifier; 6 | 7 | public HbaseColCouple(String family) { 8 | super(); 9 | this.family = family; 10 | this.qualifier = ""; 11 | } 12 | public HbaseColCouple(String family, String qualifier) { 13 | super(); 14 | this.family = family; 15 | this.qualifier = qualifier; 16 | } 17 | public String getFamily() { 18 | return family; 19 | } 20 | public void setFamily(String family) { 21 | this.family = family; 22 | } 23 | public String getQualifier() { 24 | return qualifier; 25 | } 26 | public void setQualifier(String qualifier) { 27 | this.qualifier = qualifier; 28 | } 29 | @Override 30 | public int hashCode() { 31 | final int prime = 31; 32 | int result = 1; 33 | result = prime * result + ((family == null) ? 0 : family.hashCode()); 34 | result = prime * result 35 | + ((qualifier == null) ? 0 : qualifier.hashCode()); 36 | return result; 37 | } 38 | @Override 39 | public boolean equals(Object obj) { 40 | if (this == obj) 41 | return true; 42 | if (obj == null) 43 | return false; 44 | if (getClass() != obj.getClass()) 45 | return false; 46 | HbaseColCouple other = (HbaseColCouple) obj; 47 | if (family == null) { 48 | if (other.family != null) 49 | return false; 50 | } else if (!family.equals(other.family)) 51 | return false; 52 | if (qualifier == null) { 53 | if (other.qualifier != null) 54 | return false; 55 | } else if (!qualifier.equals(other.qualifier)) 56 | return false; 57 | return true; 58 | } 59 | 60 | 61 | } 62 | -------------------------------------------------------------------------------- /RestoreData/src/main/java/software/hbase/service/model/HbaseConditonModel.java: -------------------------------------------------------------------------------- 1 | package software.hbase.service.model; 2 | 3 | import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; 4 | 5 | public class HbaseConditonModel { 6 | private String field; 7 | private String value; 8 | private CompareOp op; 9 | 10 | public HbaseConditonModel(String field, String value, CompareOp op) { 11 | super(); 12 | this.field = field; 13 | this.value = value; 14 | this.op = op; 15 | } 16 | public String getField() { 17 | return field; 18 | } 19 | public void setField(String field) { 20 | this.field = field; 21 | } 22 | public String getValue() { 23 | return value; 24 | } 25 | public void setValue(String value) { 26 | this.value = value; 27 | } 28 | public CompareOp getOp() { 29 | return op; 30 | } 31 | public void setOp(CompareOp op) { 32 | this.op = op; 33 | } 34 | 35 | } 36 | -------------------------------------------------------------------------------- /RestoreData/src/main/java/software/hbase/service/model/HbaseRow.java: -------------------------------------------------------------------------------- 1 | package software.hbase.service.model; 2 | 3 | import software.hbase.util.StringUtil; 4 | 5 | import java.util.ArrayList; 6 | import java.util.List; 7 | 8 | public class HbaseRow { 9 | private String rowKey; 10 | private List cells; 11 | 12 | public HbaseRow(String rowKey) { 13 | super(); 14 | this.rowKey = rowKey; 15 | } 16 | public String getRowKey() { 17 | return rowKey; 18 | } 19 | public void setRowKey(String rowKey) { 20 | this.rowKey = rowKey; 21 | } 22 | public List getCells() { 23 | return cells==null?new ArrayList():cells; 24 | } 25 | public void setCells(List cells) { 26 | this.cells = cells; 27 | } 28 | public void addCell(HbaseCell cell) { 29 | if(cells==null){ 30 | cells = new ArrayList(); 31 | } 32 | //cell不为null,cell值不为null 33 | if(cell!=null&&StringUtil.isNotBlank(cell.getValue())){ 34 | cells.add(cell); 35 | } 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /RestoreData/src/main/java/software/hbase/service/model/HbaseServiceConditonModel.java: -------------------------------------------------------------------------------- 1 | package software.hbase.service.model; 2 | 3 | import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; 4 | 5 | public class HbaseServiceConditonModel { 6 | private String family; 7 | private String col; 8 | private String value; 9 | private CompareOp op; 10 | 11 | public HbaseServiceConditonModel(String family, String col, String value, 12 | CompareOp op) { 13 | super(); 14 | this.family = family; 15 | this.col = col; 16 | this.value = value; 17 | this.op = op; 18 | } 19 | public String getFamily() { 20 | return family; 21 | } 22 | public void setFamily(String family) { 23 | this.family = family; 24 | } 25 | public String getCol() { 26 | return col; 27 | } 28 | public void setCol(String col) { 29 | this.col = col; 30 | } 31 | public String getValue() { 32 | return value; 33 | } 34 | public void setValue(String value) { 35 | this.value = value; 36 | } 37 | public CompareOp getOp() { 38 | return op; 39 | } 40 | public void setOp(CompareOp op) { 41 | this.op = op; 42 | } 43 | 44 | } 45 | -------------------------------------------------------------------------------- /RestoreData/src/main/java/software/hbase/util/DateUtil.java: -------------------------------------------------------------------------------- 1 | /** 2 | * created by 2010-7-2 3 | */ 4 | package software.hbase.util; 5 | 6 | import java.text.SimpleDateFormat; 7 | import java.util.Calendar; 8 | import java.util.Date; 9 | import java.util.StringTokenizer; 10 | 11 | /** 12 | * 日期工具类 13 | * 14 | */ 15 | public class DateUtil { 16 | /** 17 | * 获取前一天的日期 18 | * @return 19 | */ 20 | public static String getYesterday(){ 21 | Calendar cal=Calendar.getInstance(); 22 | cal.add(Calendar.DATE,-1); 23 | Date time=cal.getTime(); 24 | return (new SimpleDateFormat("yyyyMMdd").format(time)); 25 | } 26 | public static String getShortDate(String date){ 27 | StringTokenizer st = new StringTokenizer(date, "-"); 28 | String shortDate = ""; 29 | while(st.hasMoreTokens()){ 30 | shortDate +=st.nextToken(); 31 | } 32 | return shortDate; 33 | } 34 | 35 | 36 | } -------------------------------------------------------------------------------- /RestoreData/src/main/java/software/hbase/util/ParseLogsUtil.java: -------------------------------------------------------------------------------- 1 | package software.hbase.util; 2 | 3 | import software.hbase.hbase.dataObject.LogData; 4 | 5 | import java.io.BufferedReader; 6 | import java.io.FileReader; 7 | import java.text.SimpleDateFormat; 8 | import java.util.*; 9 | 10 | public class ParseLogsUtil { 11 | 12 | public static List parseLogsUtil(String fileName) throws Exception { 13 | List logList = new ArrayList<>(); 14 | 15 | FileReader reader = new FileReader(fileName); 16 | BufferedReader br = new BufferedReader(reader); 17 | String str = null; 18 | int iplength = 0; 19 | String ip = null; 20 | int timebegin = 0; 21 | int timeend = 0; 22 | String date = null; 23 | Date dates; 24 | String method = null; 25 | int methodend = 0; 26 | String url = null; 27 | int urlend = 0; 28 | int statebegin = 0; 29 | String state = null; 30 | int stateend = 0; 31 | String bytes = null; 32 | String sql = null; 33 | String fydm = "120100 210"; 34 | int id = 100000; 35 | Date d = new Date(); 36 | System.out.println(d); 37 | while ((str = br.readLine()) != null) { 38 | // 每行一条记录 分析记录写入数据库 39 | //ip 40 | // str = br.readLine(); 41 | try { 42 | iplength = str.indexOf("-"); 43 | ip = str.substring(0, iplength - 1); 44 | // System.out.println("ip:"+ip); 45 | //time 46 | timebegin = str.indexOf("["); 47 | timeend = str.indexOf("+0800]"); 48 | date = str.substring(timebegin + 1, timeend - 1); 49 | SimpleDateFormat sf = new SimpleDateFormat("dd/MMM/yyyy:hh:mm:ss", Locale.ENGLISH); 50 | dates = sf.parse(date); 51 | // System.out.println("date:"+dates); 52 | //method 53 | str = str.substring(timeend + 8); 54 | System.out.println(str); 55 | methodend = str.indexOf("/"); 56 | method = str.substring(0, methodend - 1); 57 | System.out.println(method); 58 | urlend = str.indexOf("HTTP/1.1"); 59 | int urlend2 = str.indexOf("HTTP/1.0"); 60 | int urlend1 = str.indexOf("HTTP/1.1"); 61 | urlend = urlend2 > urlend1 ? urlend2 : urlend1; 62 | url = str.substring(methodend + 1, urlend); 63 | url = url.trim(); 64 | System.out.println(url); 65 | if (url.contains("tjspxt")) { 66 | if (url.length() == 6) { 67 | continue; 68 | } 69 | url = url.substring(7); 70 | } 71 | if (url.contains("jsessionid=")) { 72 | url = url.substring(0, url.indexOf("jsessionid=") - 1); 73 | } 74 | if (url.contains("resources")) { 75 | //包含资源请求的过滤掉 76 | continue; 77 | } else if (url.equals("") || url == "") { 78 | //过滤空白url 79 | url = "login.do"; 80 | // //continue; 81 | } else if (url.indexOf("?") > 0) { 82 | url = url.substring(0, url.indexOf("?")); 83 | } 84 | //过滤部分url 85 | if (url.equals("gjShow.aj") || url.equals("getQxtxCount.aj") || url.equals("qnsjajCount.aj") 86 | || url.equals("wjajCount.aj") || url.equals("csxajCount.aj") || url.equals("getLctxCountWithoutIgnored.aj") 87 | || url.equals("favicon.ico") || url.equals("connectDetection.aj")) { 88 | continue; 89 | } 90 | // System.out.println(url); 91 | str = str.substring(urlend + 1); 92 | statebegin = str.indexOf("HTTP/1.1"); 93 | str = str.substring(statebegin + 10); 94 | // System.out.println(str); 95 | stateend = str.indexOf(" "); 96 | state = str.substring(0, stateend); 97 | // System.out.println(state); 98 | bytes = str.substring(stateend + 1); 99 | // System.out.println(bytes); 100 | 101 | //再转换为sql.Date对象 102 | java.sql.Timestamp d2 = new java.sql.Timestamp(dates.getTime()); 103 | 104 | //赋值给logData 105 | LogData logData = new LogData(); 106 | logData.setLogId(getRowKeyPart1(fileName) + id); 107 | logData.setFydm(fydm); 108 | logData.setState(state); 109 | logData.setIp(ip); 110 | logData.setURL(url); 111 | logData.setBytes(bytes); 112 | logData.setMethods(method); 113 | logData.setDates(d2.toString()); 114 | logList.add(logData); 115 | //sql = "insert into ACCESSLOG values('"+fydm+"','"+d2+"',"+id+",'"+ip+"','"+method+"','"+url+"','"+state+"','"+bytes+"')"; 116 | id++; 117 | }catch(Exception e){ 118 | e.printStackTrace(); 119 | } 120 | } 121 | Date d1 = new Date(); 122 | System.out.println(d1); 123 | br.close(); 124 | reader.close(); 125 | return logList; 126 | } 127 | 128 | public static String getRowKeyPart1(String fileName){ 129 | StringTokenizer st = new StringTokenizer(fileName, "/"); 130 | String realFileName = ""; 131 | while(st.hasMoreTokens()){ 132 | realFileName = st.nextToken(); 133 | } 134 | String fydm = realFileName.substring(0, 3); 135 | return (getShortDate(realFileName.substring(25, 35)) + fydm); 136 | } 137 | 138 | public static String getShortDate(String date){ 139 | 140 | StringTokenizer st = new StringTokenizer(date, "-"); 141 | String shortDate = ""; 142 | while(st.hasMoreTokens()){ 143 | shortDate +=st.nextToken(); 144 | } 145 | return shortDate; 146 | } 147 | } 148 | -------------------------------------------------------------------------------- /RestoreData/src/main/java/software/hbase/util/PropertiesUtil.java: -------------------------------------------------------------------------------- 1 | package software.hbase.util; 2 | import org.springframework.core.io.ClassPathResource; 3 | 4 | import java.io.IOException; 5 | import java.util.Properties; 6 | 7 | /** 8 | * 配置文件(*.properties)工具类 9 | * 10 | */ 11 | public class PropertiesUtil { 12 | 13 | public static boolean setProperty(String key, String value) { 14 | ClassPathResource resource = new ClassPathResource("dsr.properties"); 15 | Properties pros = new Properties(); 16 | try { 17 | pros.load(resource.getInputStream()); 18 | } catch (IOException e) { 19 | e.printStackTrace(); 20 | String errorMessage = "dsr.properties文件不存在!"; 21 | System.out.println(errorMessage); 22 | return false; 23 | } 24 | pros.setProperty(key, value); 25 | String tempValue = pros.getProperty(key); 26 | if(value.equals(tempValue)) 27 | return true; 28 | else 29 | return false; 30 | } 31 | 32 | /** 33 | * 获取值 34 | * @return 35 | */ 36 | public static String getProperty(String propertyName){ 37 | ClassPathResource resource = new ClassPathResource("dsr.properties"); 38 | Properties pros = new Properties(); 39 | try { 40 | pros.load(resource.getInputStream()); 41 | } catch (IOException e) { 42 | e.printStackTrace(); 43 | String errorMessage = "dsr.properties文件不存在!"; 44 | System.out.println(errorMessage); 45 | } 46 | String propertyValue = pros.getProperty(propertyName); 47 | return propertyValue; 48 | } 49 | 50 | /** 51 | * 获取Hbase地址 52 | * @return 53 | */ 54 | public static String getHbaseIp(){ 55 | return getProperty("hbaseIp"); 56 | } 57 | 58 | /** 59 | * 获取Hbase文件夹 60 | * @return 61 | */ 62 | public static String getHbaseDir(){ 63 | return getProperty("hbaseDir"); 64 | } 65 | 66 | /** 67 | * 获取Zookeeper地址 68 | * @return 69 | */ 70 | public static String getZookeeperIp(){ 71 | return getProperty("zookeeperIp"); 72 | } 73 | 74 | /** 75 | * 获取Zookeeper端口 76 | * @return 77 | */ 78 | public static String getZookeeperPort(){ 79 | return getProperty("zookeeperPort"); 80 | } 81 | } 82 | -------------------------------------------------------------------------------- /RestoreData/src/main/resources/2200_localhost_access_log.2017-08-08.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Maicius/WebLogsAnalysisSystem/18a7bada53784b5f09dfa8b278795b3f8dbc63e7/RestoreData/src/main/resources/2200_localhost_access_log.2017-08-08.txt -------------------------------------------------------------------------------- /RestoreData/src/main/resources/applicationContext-service.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | application/json;charset=UTF-8 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | -------------------------------------------------------------------------------- /RestoreData/src/main/resources/applicationContext.xml: -------------------------------------------------------------------------------- 1 | 2 | 13 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 28 | 29 | 30 | 31 | 34 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | -------------------------------------------------------------------------------- /RestoreData/src/main/resources/applicationContextDataSource.xml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Maicius/WebLogsAnalysisSystem/18a7bada53784b5f09dfa8b278795b3f8dbc63e7/RestoreData/src/main/resources/applicationContextDataSource.xml -------------------------------------------------------------------------------- /RestoreData/src/main/resources/dsr.properties: -------------------------------------------------------------------------------- 1 | hbaseIp=hdfs://localhost:60000 2 | hbaseDir=hdfs://localhost:9000//hbase 3 | zookeeperIp=localhost 4 | zookeeperPort=2181 5 | 6 | #hbaseIp=hdfs://120.24.238.195:60000 7 | #hbaseDir=hdfs://120.24.238.195:9000 8 | #zookeeperIp=120.24.238.195 9 | #zookeeperPort=2181 -------------------------------------------------------------------------------- /RestoreData/src/main/webapp/WEB-INF/views/dataGrid.jsp: -------------------------------------------------------------------------------- 1 | <%@ page import="com.sun.org.apache.xpath.internal.operations.Div" %> 2 | <%@ page language="java" contentType="text/html; charset=UTF-8" 3 | pageEncoding="UTF-8"%> 4 | 5 | 6 | 7 | 网站服务器日志分析 8 | 9 | 13 | 14 | 15 |
16 | 17 | 36 |
37 |
38 |
39 | 42 |
43 |
44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | -------------------------------------------------------------------------------- /RestoreData/src/main/webapp/WEB-INF/views/index.jsp: -------------------------------------------------------------------------------- 1 | <%@ page import="com.sun.org.apache.xpath.internal.operations.Div" %> 2 | <%@ page language="java" contentType="text/html; charset=UTF-8" 3 | pageEncoding="UTF-8"%> 4 | 5 | 6 | 7 | 网站服务器日志分析 8 | 9 | 13 | 14 | 15 |
16 | 17 | 36 |
37 |

网站服务器日志数据统计分析

38 | 39 |
40 | 41 |
42 |
43 |
44 | 45 | 46 | 47 |
48 |
49 | 50 |
51 |
52 |
53 | 54 | 55 | 56 |
57 |
58 | 59 |
60 |
61 |
62 | 63 | 64 | 65 |
66 |
67 | 68 |
69 |
70 |
71 | 72 | 73 | 74 |
75 |
76 | 77 |
78 |
79 |
80 | 81 | 82 | 83 |
84 |
85 | 86 |
87 |
88 |
89 | 90 | 91 | 92 |
93 |
94 | 95 |
96 |
97 |
98 | 99 | 100 | 101 |
102 |
103 | 104 |
105 |
106 |
107 | 108 | 109 | 110 |
111 |
112 |
113 |
114 |
115 | 118 |
119 |
120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | 128 | 129 | 130 | 131 | 132 | 143 | 144 | 145 | 146 | 147 | 148 | 149 | 150 | 151 | 152 | 153 | 154 | 155 | 156 | 157 | 158 | -------------------------------------------------------------------------------- /RestoreData/src/main/webapp/WEB-INF/views/queryData.jsp: -------------------------------------------------------------------------------- 1 | <%@ page import="com.sun.org.apache.xpath.internal.operations.Div" %> 2 | <%@ page language="java" contentType="text/html; charset=UTF-8" 3 | pageEncoding="UTF-8"%> 4 | 5 | 6 | 7 | 天津法院网站服务器日志分析 8 | 9 | 13 | 14 | 15 |
16 | 17 | 36 |
37 |

网站服务器日志数据统计分析

38 | 39 |
40 |
41 |
42 |
43 | 44 | 45 |
46 |

起始日期

47 |
48 | 49 |
50 |
51 | 79 |
80 |

法院

81 |
82 |
83 | 84 |
85 |
86 |
87 |
88 | 89 | 90 |
91 | 92 |
93 |
94 |
95 | 96 | 97 | 98 |
99 |
100 | 101 |
102 |
103 |
104 | 105 | 106 | 107 |
108 |
109 | 110 |
111 |
112 |
113 | 114 | 115 | 116 |
117 |
118 | 119 |
120 |
121 |
122 | 123 | 124 | 125 |
126 |
127 | 128 |
129 |
130 |
131 | 132 | 133 | 134 |
135 |
136 | 137 |
138 |
139 |
140 | 141 | 142 | 143 |
144 |
145 | 146 |
147 |
148 |
149 | 150 | 151 | 152 |
153 |
154 | 155 |
156 |
157 |
158 | 159 | 160 | 161 |
162 |
163 |
164 |
165 |
166 | 169 |
170 |
171 | 172 | 173 | 174 | 175 | 176 | 177 | 178 | 179 | 180 | 181 | 182 | 192 | 193 | 194 | 195 | 196 | 197 | 198 | 199 | 200 | 201 | 202 | 203 | 204 | 205 | 206 | -------------------------------------------------------------------------------- /RestoreData/src/main/webapp/WEB-INF/views/test.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Title 6 | 7 | 8 | 17 | 18 | -------------------------------------------------------------------------------- /RestoreData/src/main/webapp/WEB-INF/web.xml: -------------------------------------------------------------------------------- 1 | 2 | 18 | 19 | 24 | 28 | 29 | 30 | /WEB-INF/views/index.jsp 31 | 32 | 33 | 34 | contextConfigLocation 35 | classpath:applicationContext-*.xml 36 | 37 | 38 | org.springframework.web.context.ContextLoaderListener 39 | 40 | 41 | 42 | spring_mvc 43 | org.springframework.web.servlet.DispatcherServlet 44 | 45 | contextConfigLocation 46 | classpath:applicationContext.xml 47 | 48 | 49 | 50 | spring_mvc 51 | *.action 52 | 53 | 54 | 55 | -------------------------------------------------------------------------------- /RestoreData/src/main/webapp/assets/css/bootstrap-select.min.css: -------------------------------------------------------------------------------- 1 | /*! 2 | * Bootstrap-select v1.10.0 (http://silviomoreto.github.io/bootstrap-select) 3 | * 4 | * Copyright 2013-2016 bootstrap-select 5 | * Licensed under MIT (https://github.com/silviomoreto/bootstrap-select/blob/master/LICENSE) 6 | */select.bs-select-hidden,select.selectpicker{display:none!important}.bootstrap-select{width:220px\9}.bootstrap-select>.dropdown-toggle{width:100%;padding-right:25px;z-index:1}.bootstrap-select>select{position:absolute!important;bottom:0;left:50%;display:block!important;width:.5px!important;height:100%!important;padding:0!important;opacity:0!important;border:none}.bootstrap-select>select.mobile-device{top:0;left:0;display:block!important;width:100%!important;z-index:2}.error .bootstrap-select .dropdown-toggle,.has-error .bootstrap-select .dropdown-toggle{border-color:#b94a48}.bootstrap-select.fit-width{width:auto!important}.bootstrap-select:not([class*=col-]):not([class*=form-control]):not(.input-group-btn){width:220px}.bootstrap-select .dropdown-toggle:focus{outline:thin dotted #333!important;outline:5px auto -webkit-focus-ring-color!important;outline-offset:-2px}.bootstrap-select.form-control{margin-bottom:0;padding:0;border:none}.bootstrap-select.form-control:not([class*=col-]){width:100%}.bootstrap-select.form-control.input-group-btn{z-index:auto}.bootstrap-select.btn-group:not(.input-group-btn),.bootstrap-select.btn-group[class*=col-]{float:none;display:inline-block;margin-left:0}.bootstrap-select.btn-group.dropdown-menu-right,.bootstrap-select.btn-group[class*=col-].dropdown-menu-right,.row .bootstrap-select.btn-group[class*=col-].dropdown-menu-right{float:right}.form-group .bootstrap-select.btn-group,.form-horizontal .bootstrap-select.btn-group,.form-inline .bootstrap-select.btn-group{margin-bottom:0}.form-group-lg .bootstrap-select.btn-group.form-control,.form-group-sm .bootstrap-select.btn-group.form-control{padding:0}.form-inline .bootstrap-select.btn-group .form-control{width:100%}.bootstrap-select.btn-group.disabled,.bootstrap-select.btn-group>.disabled{cursor:not-allowed}.bootstrap-select.btn-group.disabled:focus,.bootstrap-select.btn-group>.disabled:focus{outline:0!important}.bootstrap-select.btn-group.bs-container{position:absolute}.bootstrap-select.btn-group.bs-container .dropdown-menu{z-index:1060}.bootstrap-select.btn-group .dropdown-toggle .filter-option{display:inline-block;overflow:hidden;width:100%;text-align:left}.bootstrap-select.btn-group .dropdown-toggle .caret{position:absolute;top:50%;right:12px;margin-top:-2px;vertical-align:middle}.bootstrap-select.btn-group[class*=col-] .dropdown-toggle{width:100%}.bootstrap-select.btn-group .dropdown-menu{min-width:100%;-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}.bootstrap-select.btn-group .dropdown-menu.inner{position:static;float:none;border:0;padding:0;margin:0;border-radius:0;-webkit-box-shadow:none;box-shadow:none}.bootstrap-select.btn-group .dropdown-menu li{position:relative}.bootstrap-select.btn-group .dropdown-menu li.active small{color:#fff}.bootstrap-select.btn-group .dropdown-menu li.disabled a{cursor:not-allowed}.bootstrap-select.btn-group .dropdown-menu li a{cursor:pointer;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none}.bootstrap-select.btn-group .dropdown-menu li a.opt{position:relative;padding-left:2.25em}.bootstrap-select.btn-group .dropdown-menu li a span.check-mark{display:none}.bootstrap-select.btn-group .dropdown-menu li a span.text{display:inline-block}.bootstrap-select.btn-group .dropdown-menu li small{padding-left:.5em}.bootstrap-select.btn-group .dropdown-menu .notify{position:absolute;bottom:5px;width:96%;margin:0 2%;min-height:26px;padding:3px 5px;background:#f5f5f5;border:1px solid #e3e3e3;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.05);box-shadow:inset 0 1px 1px rgba(0,0,0,.05);pointer-events:none;opacity:.9;-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}.bootstrap-select.btn-group .no-results{padding:3px;background:#f5f5f5;margin:0 5px;white-space:nowrap}.bootstrap-select.btn-group.fit-width .dropdown-toggle .filter-option{position:static}.bootstrap-select.btn-group.fit-width .dropdown-toggle .caret{position:static;top:auto;margin-top:-1px}.bootstrap-select.btn-group.show-tick .dropdown-menu li.selected a span.check-mark{position:absolute;display:inline-block;right:15px;margin-top:5px}.bootstrap-select.btn-group.show-tick .dropdown-menu li a span.text{margin-right:34px}.bootstrap-select.show-menu-arrow.open>.dropdown-toggle{z-index:1061}.bootstrap-select.show-menu-arrow .dropdown-toggle:before{content:'';border-left:7px solid transparent;border-right:7px solid transparent;border-bottom:7px solid rgba(204,204,204,.2);position:absolute;bottom:-4px;left:9px;display:none}.bootstrap-select.show-menu-arrow .dropdown-toggle:after{content:'';border-left:6px solid transparent;border-right:6px solid transparent;border-bottom:6px solid #fff;position:absolute;bottom:-4px;left:10px;display:none}.bootstrap-select.show-menu-arrow.dropup .dropdown-toggle:before{bottom:auto;top:-3px;border-top:7px solid rgba(204,204,204,.2);border-bottom:0}.bootstrap-select.show-menu-arrow.dropup .dropdown-toggle:after{bottom:auto;top:-3px;border-top:6px solid #fff;border-bottom:0}.bootstrap-select.show-menu-arrow.pull-right .dropdown-toggle:before{right:12px;left:auto}.bootstrap-select.show-menu-arrow.pull-right .dropdown-toggle:after{right:13px;left:auto}.bootstrap-select.show-menu-arrow.open>.dropdown-toggle:after,.bootstrap-select.show-menu-arrow.open>.dropdown-toggle:before{display:block}.bs-actionsbox,.bs-donebutton,.bs-searchbox{padding:4px 8px}.bs-actionsbox{width:100%;-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}.bs-actionsbox .btn-group button{width:50%}.bs-donebutton{float:left;width:100%;-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}.bs-donebutton .btn-group button{width:100%}.bs-searchbox+.bs-actionsbox{padding:0 8px 4px}.bs-searchbox .form-control{margin-bottom:0;width:100%;float:none} -------------------------------------------------------------------------------- /RestoreData/src/main/webapp/assets/css/bootstrapDatepickr-1.0.0.css: -------------------------------------------------------------------------------- 1 | .bootstrapDatepickr-cal{ 2 | z-index: 1000; 3 | position:absolute; 4 | width: 100%; 5 | } 6 | 7 | .bootstrapDatepickr-today{ 8 | background-color: #f0ad4e; 9 | border-color: #eea236; 10 | color: #fff; 11 | cursor: pointer; 12 | } 13 | 14 | .bootstrapDatepickr-day{ 15 | cursor: pointer; 16 | } 17 | 18 | .bootstrapDatepickr-days{ 19 | font-weight: 700; 20 | } 21 | 22 | .bootstrapDatepickr-next-month, .bootstrapDatepickr-prev-month{ 23 | cursor: pointer; 24 | } 25 | 26 | .bootstrapDatepickr-selected_date{ 27 | color: #fff; 28 | background-color: #337ab7; 29 | border-color: #2e6da4; 30 | } 31 | 32 | .bootstrapDatepickr-next-month:hover, .bootstrapDatepickr-prev-month:hover, .bootstrapDatepickr-day:hover{ 33 | background-color: #f5f5f5; 34 | } 35 | 36 | .bootstrapDatepickr-hidden{ 37 | display: none; 38 | } 39 | 40 | #bootstrapDatepickr-table{ 41 | background-color: #fff; 42 | text-align: center; 43 | } 44 | 45 | .bootstrapDatepickr-header th{ 46 | text-align: center; 47 | font-weight: 700; 48 | } -------------------------------------------------------------------------------- /RestoreData/src/main/webapp/assets/css/bootstrapDatepickr-1.0.0.min.css: -------------------------------------------------------------------------------- 1 | .bootstrapDatepickr-day,.bootstrapDatepickr-next-month,.bootstrapDatepickr-prev-month{cursor:pointer}.bootstrapDatepickr-cal{z-index:1000;position:absolute;width:100%}.bootstrapDatepickr-today{background-color:#f0ad4e;border-color:#eea236;color:#fff;cursor:pointer}.bootstrapDatepickr-days{font-weight:700}.bootstrapDatepickr-selected_date{color:#fff;background-color:#337ab7;border-color:#2e6da4}.bootstrapDatepickr-day:hover,.bootstrapDatepickr-next-month:hover,.bootstrapDatepickr-prev-month:hover{background-color:#f5f5f5}.bootstrapDatepickr-hidden{display:none}#bootstrapDatepickr-table{background-color:#fff;text-align:center}.bootstrapDatepickr-header th{text-align:center;font-weight:700} -------------------------------------------------------------------------------- /RestoreData/src/main/webapp/assets/css/myStyle.css: -------------------------------------------------------------------------------- 1 | .mg-top-20{ 2 | margin-top: 20px; 3 | } 4 | .mg-top-30{ 5 | margin-top: 30px; 6 | } 7 | .wrapper{ 8 | background-color: #404a59; 9 | margin: 50px 0px; 10 | } 11 | .accssTime{ 12 | height: 300px; 13 | } 14 | .seqChart{ 15 | height: 600px; 16 | } 17 | .barChart{ 18 | height: 500px; 19 | } 20 | .h-title{ 21 | margin-top: 10px; 22 | text-align: center; 23 | color: #fff; 24 | } 25 | .p-footer{ 26 | width: 100%; 27 | height: 50px; 28 | padding: 15px; 29 | text-align: center; 30 | color: #999; 31 | background-color: #fff; 32 | font-size: 16px; 33 | } 34 | .dateContainer{ 35 | margin: 10px auto; 36 | background-color: #404a59; 37 | } 38 | 39 | .date-warning{ 40 | color: red; 41 | } -------------------------------------------------------------------------------- /RestoreData/src/main/webapp/assets/fonts/FontAwesome.otf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Maicius/WebLogsAnalysisSystem/18a7bada53784b5f09dfa8b278795b3f8dbc63e7/RestoreData/src/main/webapp/assets/fonts/FontAwesome.otf -------------------------------------------------------------------------------- /RestoreData/src/main/webapp/assets/fonts/fontawesome-webfont.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Maicius/WebLogsAnalysisSystem/18a7bada53784b5f09dfa8b278795b3f8dbc63e7/RestoreData/src/main/webapp/assets/fonts/fontawesome-webfont.eot -------------------------------------------------------------------------------- /RestoreData/src/main/webapp/assets/fonts/fontawesome-webfont.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Maicius/WebLogsAnalysisSystem/18a7bada53784b5f09dfa8b278795b3f8dbc63e7/RestoreData/src/main/webapp/assets/fonts/fontawesome-webfont.ttf -------------------------------------------------------------------------------- /RestoreData/src/main/webapp/assets/fonts/fontawesome-webfont.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Maicius/WebLogsAnalysisSystem/18a7bada53784b5f09dfa8b278795b3f8dbc63e7/RestoreData/src/main/webapp/assets/fonts/fontawesome-webfont.woff -------------------------------------------------------------------------------- /RestoreData/src/main/webapp/assets/fonts/fontawesome-webfont.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Maicius/WebLogsAnalysisSystem/18a7bada53784b5f09dfa8b278795b3f8dbc63e7/RestoreData/src/main/webapp/assets/fonts/fontawesome-webfont.woff2 -------------------------------------------------------------------------------- /RestoreData/src/main/webapp/assets/fonts/glyphicons-halflings-regular.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Maicius/WebLogsAnalysisSystem/18a7bada53784b5f09dfa8b278795b3f8dbc63e7/RestoreData/src/main/webapp/assets/fonts/glyphicons-halflings-regular.eot -------------------------------------------------------------------------------- /RestoreData/src/main/webapp/assets/fonts/glyphicons-halflings-regular.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Maicius/WebLogsAnalysisSystem/18a7bada53784b5f09dfa8b278795b3f8dbc63e7/RestoreData/src/main/webapp/assets/fonts/glyphicons-halflings-regular.ttf -------------------------------------------------------------------------------- /RestoreData/src/main/webapp/assets/fonts/glyphicons-halflings-regular.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Maicius/WebLogsAnalysisSystem/18a7bada53784b5f09dfa8b278795b3f8dbc63e7/RestoreData/src/main/webapp/assets/fonts/glyphicons-halflings-regular.woff -------------------------------------------------------------------------------- /RestoreData/src/main/webapp/assets/fonts/glyphicons-halflings-regular.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Maicius/WebLogsAnalysisSystem/18a7bada53784b5f09dfa8b278795b3f8dbc63e7/RestoreData/src/main/webapp/assets/fonts/glyphicons-halflings-regular.woff2 -------------------------------------------------------------------------------- /RestoreData/src/main/webapp/assets/js/bootstrapDatepickr-1.0.0.min.js: -------------------------------------------------------------------------------- 1 | !function(t,e,r,a){"use strict";function n(e,r){this.element=e,this.options=t.extend({},o,r),this._defaults=o,this._name=i,this.settings={date_object:new Date,selected_day:"",current_month:(new Date).getMonth()+1,current_year:(new Date).getFullYear(),selected_date:"",allowed_formats:["d","do","D","j","l","w","F","m","M","n","U","y","Y"],month_name:["January","February","March","April","May","June","July","August","September","October","November","December"],date_formatting:{weekdays:{shorthand:["Sun","Mon","Tue","Wed","Thu","Fri","Sat"],longhand:["Sunday","Monday","Tuesday","Wednesday","Thursday","Friday","Saturday"]},months:{shorthand:["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"],longhand:["January","February","March","April","May","June","July","August","September","October","November","December"]}}},this.init()}var i="bootstrapDatepickr",o={date_format:"d/m/Y"};t.extend(n.prototype,{init:function(){var e=this,r=e.element;t(r).parent().hasClass("input-group")&&t(r).prev().addClass("addonclick_id-"+t(r).attr("id")),this.bindEvents(),this.init_cal(e,r,e.settings.current_month,e.settings.current_year)},bindEvents:function(){var e=this,r=e.element;t(r).on("click",e,this.wrapper_clicked),t(".addonclick_id-"+t(r).attr("id")).on("click",e,this.addon_clicked)},init_cal:function(e,r,a,n){var i=e.settings,o='
';o+='',o+='',o+='',o+='",o+='',o+='',o+="
«'+i.month_name[a-1]+" "+n+"»
",t(r).after(o),this.generate_calendar(e,i.current_month,i.current_year)},addon_clicked:function(e){var r=e.data;t(".cal_id-"+t(r.element).attr("id")).toggleClass("bootstrapDatepickr-hidden")},wrapper_clicked:function(e){function r(){var e=new Date(i.settings.current_year,i.settings.current_month-1,i.settings.selected_day);i.settings.date_object=e;var r=i.options.date_format,o=new Date(e.getTime()),d={d:function(){var t=d.j();return 10>t?"0"+t:t},"do":function(){var t=d.j();return a(t)},D:function(){return i.settings.date_formatting.weekdays.shorthand[d.w()]},j:function(){return o.getDate()},l:function(){return i.settings.date_formatting.weekdays.longhand[d.w()]},w:function(){return o.getDay()},F:function(){return n(d.n()-1,!1)},m:function(){var t=d.n();return 10>t?"0"+t:t},M:function(){return n(d.n()-1,!0)},n:function(){return o.getMonth()+1},U:function(){return o.getTime()/1e3},y:function(){return String(d.Y()).substring(2)},Y:function(){return o.getFullYear()}},s="",c="";return r.indexOf("/")>-1?(s=r.split("/"),c="/"):r.indexOf("-")>-1?(s=r.split("-"),c="-"):(s=r.split(" "),c=" "),t.each(s,function(t,e){e=e.replace(/[\W_]+/g,"").trim(),i.settings.allowed_formats.indexOf(e)>-1&&(r=r.replace(e,d[e]))}),r}function a(t){var e=["th","st","nd","rd"],r=t%100;return t+(e[(r-20)%10]||e[r]||e[0])}function n(t,e){return e===!0?i.settings.date_formatting.months.shorthand[t]:i.settings.date_formatting.months.longhand[t]}var i=e.data,o=i.settings;t("body").unbind().on("click",".day_id-"+t(this).attr("id"),function(){if(" "!=t(this).html()){o.selected_day=t(this).html();var e=r();o.selected_date=e,t("#"+t(i.element).attr("id")).val(e),t(".cal_id-"+t(i.element).attr("id")).addClass("bootstrapDatepickr-hidden")}}),t("body").on("click",".next_id-"+t(this).attr("id"),function(){12===o.current_month?(o.current_year++,o.current_month=1):o.current_month++,i.generate_calendar(i,o.current_month,o.current_year),t(".month_id-"+t(i.element).attr("id")).html(o.month_name[o.current_month-1]+" - "+o.current_year)}),t("body").on("click",".prev_id-"+t(this).attr("id"),function(){1==o.current_month?(o.current_year=Number(o.current_year)-1,o.current_month=12):o.current_month=Number(o.current_month)-1,i.generate_calendar(i,o.current_month,o.current_year),t(".month_id-"+t(i.element).attr("id")).html(o.month_name[o.current_month-1]+" - "+o.current_year)}),t(".cal_id-"+t(this).attr("id")).toggleClass("bootstrapDatepickr-hidden"),i.highlight_selected_day(i)},highlight_selected_day:function(e){""!=t(e.element).val()&&""!=e.settings.selected_date&&e.settings.current_year==e.settings.date_object.getFullYear()&&e.settings.current_month==e.settings.date_object.getMonth()+1&&t(".day_id-"+t(e.element).attr("id")).each(function(r){e.settings.selected_day==t(this).text()?t(this).addClass("bootstrapDatepickr-selected_date"):t(this).removeClass("bootstrapDatepickr-selected_date")})},generate_calendar:function(e,r,a){var n=e.element,i=[31,0,31,30,31,30,31,31,30,31,30,31],o=new Date,d=new Date(a,r-1,1);d.od=d.getDay()+1;var s=a==o.getFullYear()&&r==o.getMonth()+1?o.getDate():0;i[1]=d.getFullYear()%100!=0&&d.getFullYear()%4==0||d.getFullYear()%400==0?29:28,t(".row_id-"+t(n).attr("id")).remove();for(var c='',l=0;7>l;l++)c+=''+"SMTWTFS".substr(l,1)+"";c+='';for(var u=1;42>=u;u++){var h="",_=u-d.od>=0&&u-d.od'+_+"",u%7==0&&36>u&&(c+='')}c+="",t("#bootstrapDatepickr-body-"+t(n).attr("id")).append(c),t(".row_id-"+t(n).attr("id")).each(function(){""===t(this).text().trim()&&t(this).remove()}),t(n).parent().hasClass("input-group")?(t(".cal_id-"+t(n).attr("id")).detach().insertAfter(t(n).parent()),t(".cal_id-"+t(n).attr("id")).css("left",t(n).parent().position().left),t(".cal_id-"+t(n).attr("id")).css("width",t(n).parent().outerWidth())):(t(".cal_id-"+t(n).attr("id")).css("left",t(n).position().left),t(".cal_id-"+t(n).attr("id")).css("width",t(n).outerWidth())),t(".cal_id-"+t(n).attr("id")).css("top",t(n).position().top+t(n).outerHeight()),this.highlight_selected_day(e)}}),t.fn[i]=function(e){return this.each(function(){t.data(this,"plugin_"+i)||t.data(this,"plugin_"+i,new n(this,e))})}}(jQuery,window,document); -------------------------------------------------------------------------------- /RestoreData/src/main/webapp/assets/js/index.js: -------------------------------------------------------------------------------- 1 | "use strict" 2 | const backColor = '#404a59'; 3 | $(document).ready(function () { 4 | let accessChart = echarts.init(document.getElementById("accessChart")); 5 | let totalFlow = echarts.init(document.getElementById("totalFlow")); 6 | let totalIP = echarts.init(document.getElementById("totalIP")); 7 | let IPRank= echarts.init(document.getElementById("IPRank")); 8 | let URLRank= echarts.init(document.getElementById("URLRank")); 9 | let PieChart = echarts.init(document.getElementById("PieChart")); 10 | let SeqChart = echarts.init(document.getElementById("flowSeq")); 11 | let state = []; 12 | let method = []; 13 | // $.ajax({ 14 | // url: "/queryAllData.action", 15 | // type:"get", 16 | // dataType: "json", 17 | // success: function(data){ 18 | // console.log("success"); 19 | // drawNumChart(accessChart, data.ReqSum, "总访问次数(/次)"); 20 | // drawNumChart(totalFlow, data.TotalBytes, "总流量(/MB)"); 21 | // drawNumChart(totalIP, data.iptotalNum, "IP总数"); 22 | // let IPList = convertJsonToArray(data.IPList); 23 | // let URLList = convertJsonToArray(data.URLList); 24 | // state= convertJsonToArray(data.stateList); 25 | // method = convertJsonToArray(data.methodList); 26 | // console.log("IPlist:" + IPList.length); 27 | // drawRankChart(IPRank, IPList, "IP访问排行", color1); 28 | // drawRankChart(URLRank, URLList, "URL访问排行", color2); 29 | // let methodName = method.map(function (methodItem) { 30 | // return methodItem[0]; 31 | // }); 32 | // let methodData = method.map(function (methodDataItem) { 33 | // return {name: methodDataItem[0], value: methodDataItem[1]} 34 | // }); 35 | // let stateName = state.map(function (methodItem) { 36 | // return methodItem[0]; 37 | // }); 38 | // let stateData = state.map(function (methodDataItem) { 39 | // return {name: methodDataItem[0], value: methodDataItem[1]} 40 | // }); 41 | // drawPieChart(PieChart,methodName, methodData, stateName, stateData); 42 | // 43 | // let BytesSec = convertJsonToArray(data.BytesSecList); 44 | // let ReqSec = convertJsonToArray(data.ReqSecList); 45 | // drawSeqChart(SeqChart, BytesSec, ReqSec); 46 | // } 47 | // }); 48 | let timeData = [ '2009/6/12 2:00', '2009/6/12 3:00']; 49 | timeData = timeData.map(function (str) { 50 | return str.replace('2009/', ''); 51 | }); 52 | let BarData = [220, 182, 191, 234, 290, 330, 310, 123, 442, 321, 90, 149, 210, 122, 133, 334, 198, 123, 125, 220]; 53 | let rankData = []; 54 | let color1 = ['red', 'orange', 'yellow', 'green', 'cyan', 'yellow', 'green', 'blue', 'purple', 'red', 'orange', 'cyan', 'blue', 'purple']; 55 | let color2 = ['green', 'blue', 'purple', ' red', 'orange', 'green', 'red', 'cyan', 'yellow', 'green', 'blue', 'purple', 'red', 'purple']; 56 | let data = 0; 57 | 58 | drawNumChart(accessChart, '43101', "总次数(/次)"); 59 | drawNumChart(totalFlow, '563', "总流量( /MB)"); 60 | drawNumChart(totalIP, '241', "IP总数"); 61 | drawSeqChart(SeqChart, [], []); 62 | drawRankChart(IPRank, rankData, "URL访问排行(TOP 10)", color1); 63 | drawRankChart(IPRank, rankData, "IP访问排行(TOP 10)", color2); 64 | drawCourtRank("CourtRank", data); 65 | drawPieChart("PieChart","", "", "", ""); 66 | }); 67 | -------------------------------------------------------------------------------- /RestoreData/src/main/webapp/assets/js/inputCheck.js: -------------------------------------------------------------------------------- 1 | "use strict" 2 | $(document).ready(function () { 3 | $("#calendarBegin").blur(function () { 4 | checkData(); 5 | }) 6 | $("#courtSelect").change(function () { 7 | //alert("court:" + $("#courtSelect").val()); 8 | checkData(); 9 | }) 10 | }); 11 | function checkData(){ 12 | let courtData = $("#courtSelect").val(); 13 | let dateBegin = $("#calendarBegin").val(); 14 | // alert("data:" + courtData); 15 | if(dateBegin !== "" && courtData != ""){ 16 | //alert("court:" + $("#courtSelect").val()); 17 | $("#conditionQuery").removeAttr("disabled"); 18 | } 19 | else{ 20 | $("#conditionQuery").attr("disabled", true); 21 | } 22 | } -------------------------------------------------------------------------------- /RestoreData/src/main/webapp/assets/js/queryData.js: -------------------------------------------------------------------------------- 1 | "use strict" 2 | const backColor = '#404a59'; 3 | $(document).ready(function () { 4 | 5 | //初始化 echarts 6 | let accessChart = echarts.init(document.getElementById("accessChartQuery")); 7 | let totalFlow = echarts.init(document.getElementById("totalFlowQuery")); 8 | let totalIP = echarts.init(document.getElementById("totalIPQuery")); 9 | let IPRank= echarts.init(document.getElementById("IPRankQuery")); 10 | let URLRank= echarts.init(document.getElementById("URLRankQuery")); 11 | let PieChart = echarts.init(document.getElementById("PieChartQuery")); 12 | let SeqChart = echarts.init(document.getElementById("flowSeqQuery")); 13 | let BarChart = echarts.init(document.getElementById("flowBarQuery")); 14 | let state = []; 15 | let method = []; 16 | $("#conditionQuery").click(function () { 17 | let courtData = $("#courtSelect").val(); 18 | let dateBegin = $("#calendarBegin").val(); 19 | //构造Json格式的数据 20 | let jsonData = "{\"dateBegin\":" + "\""+dateBegin +"\"" + ",\"courtList\":" + JSON.stringify(courtData) + "}"; 21 | // $.ajax({ 22 | // url: "/conditionQuery.action", 23 | // type:"post", 24 | // dataType: "json", 25 | // data: jsonData, 26 | // success: function(data){ 27 | // console.log("success"); 28 | // drawNumChart(accessChart, data.ReqSum, "总访问次数(/次)"); 29 | // drawNumChart(totalFlow, data.TotalBytes, "总流量(/MB)"); 30 | // drawNumChart(totalIP, data.iptotalNum, "IP总数"); 31 | // let IPList = convertJsonToArray(data.IPList); 32 | // let URLList = convertJsonToArray(data.URLList); 33 | // state= convertJsonToArray(data.stateList); 34 | // method = convertJsonToArray(data.methodList); 35 | // console.log("IPlist:" + IPList.length); 36 | // drawRankChart(IPRank, IPList, "IP访问排行", color1); 37 | // drawRankChart(URLRank, URLList, "URL访问排行", color2); 38 | // let methodName = method.map(function (methodItem) { 39 | // return methodItem[0]; 40 | // }); 41 | // let methodData = method.map(function (methodDataItem) { 42 | // return {name: methodDataItem[0], value: methodDataItem[1]} 43 | // }); 44 | // let stateName = state.map(function (methodItem) { 45 | // return methodItem[0]; 46 | // }); 47 | // let stateData = state.map(function (methodDataItem) { 48 | // return {name: methodDataItem[0], value: methodDataItem[1]} 49 | // }); 50 | // drawPieChart(PieChart,methodName, methodData, stateName, stateData); 51 | // 52 | // let BytesSec = convertJsonToArray(data.BytesSecList); 53 | // let ReqSec = convertJsonToArray(data.ReqSecList); 54 | // drawSeqChart(SeqChart, BytesSec, ReqSec); 55 | // } 56 | // }); 57 | }); 58 | let timeData = [ '2009/6/12 2:00', '2009/6/12 3:00']; 59 | timeData = timeData.map(function (str) { 60 | return str.replace('2009/', ''); 61 | }); 62 | let BarData = [220, 182, 191, 234, 290, 330, 310, 123, 442, 321, 90, 149, 210, 122, 133, 334, 198, 123, 125, 220]; 63 | let rankData = []; 64 | let color1 = ['red', 'orange', 'yellow', 'green', 'cyan', 'yellow', 'green', 'blue', 'purple', 'red', 'orange', 'cyan', 'blue', 'purple']; 65 | let color2 = ['green', 'blue', 'purple', ' red', 'orange', 'green', 'red', 'cyan', 'yellow', 'green', 'blue', 'purple', 'red', 'purple']; 66 | let data = 0; 67 | 68 | //画默认状态图 69 | drawNumChart(accessChart, data, "总次数(/次)"); 70 | drawNumChart(totalFlow, data, "总流量( /MB)"); 71 | drawNumChart(totalIP, data, "IP总数"); 72 | drawSeqChart(SeqChart, [], []); 73 | drawRankChart(URLRank, rankData, "URL访问排行(TOP 10)", color1); 74 | drawRankChart(IPRank, rankData, "IP访问排行(TOP 10)", color2); 75 | drawPieChart(PieChart,"", "", "", ""); 76 | drawBarChart(BarChart, BarData); 77 | }); -------------------------------------------------------------------------------- /RestoreData/src/test/java/HbaseTest/HbaseBatchInsertTest.java: -------------------------------------------------------------------------------- 1 | package HbaseTest; 2 | 3 | import org.springframework.context.ApplicationContext; 4 | import org.springframework.context.support.ClassPathXmlApplicationContext; 5 | import software.hbase.hbase.dao.LogDataDao; 6 | import software.hbase.hbase.dataObject.LogData; 7 | import software.hbase.util.ParseLogsUtil; 8 | 9 | import java.util.Date; 10 | import java.util.List; 11 | 12 | public class HbaseBatchInsertTest { 13 | public static void main(String args[]) throws Exception{ 14 | ApplicationContext ac = new ClassPathXmlApplicationContext("classpath:applicationContext.xml"); 15 | LogDataDao logDataDao = (LogDataDao) ac.getBean("logDataDao"); 16 | System.out.println("begin to parse...."); 17 | List list = ParseLogsUtil.parseLogsUtil("/Users/maicius/code/WebLogsAnalysisSystem/RestoreData/src/main/resources/200_localhost_access_log.2017-08-23.txt"); 18 | System.out.println("parse end...\nbegin to insert"); 19 | Date a = new Date(); 20 | logDataDao.saveBatch(list); 21 | Date b = new Date(); 22 | System.out.println("插入历时:" + (b.getTime() - a.getTime()) + "毫秒"); 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /RestoreData/src/test/java/HbaseTest/HbaseConnectionTest.java: -------------------------------------------------------------------------------- 1 | package HbaseTest; 2 | 3 | import org.springframework.context.ApplicationContext; 4 | import org.springframework.context.support.ClassPathXmlApplicationContext; 5 | import software.hbase.service.HBaseService; 6 | 7 | public class HbaseConnectionTest { 8 | 9 | public static void main(String args[]) { 10 | ApplicationContext ac = new ClassPathXmlApplicationContext("classpath:applicationContext.xml"); 11 | HBaseService hbaseService = (HBaseService) ac.getBean("HBaseService"); 12 | try { 13 | System.out.println("show tables"); 14 | hbaseService.showTables(); 15 | }catch(Exception e){ 16 | e.printStackTrace(); 17 | } 18 | } 19 | 20 | } 21 | -------------------------------------------------------------------------------- /RestoreData/src/test/java/HbaseTest/HbaseInsertTest.java: -------------------------------------------------------------------------------- 1 | package HbaseTest; 2 | 3 | import org.springframework.context.ApplicationContext; 4 | import org.springframework.context.support.ClassPathXmlApplicationContext; 5 | import software.hbase.hbase.dao.LogDataDao; 6 | import software.hbase.hbase.dataObject.LogData; 7 | import software.hbase.service.HBaseService; 8 | 9 | public class HbaseInsertTest { 10 | public static void main(String args[]) { 11 | ApplicationContext ac = new ClassPathXmlApplicationContext("classpath:applicationContext.xml"); 12 | HBaseService hbaseService = (HBaseService) ac.getBean("HBaseService"); 13 | LogDataDao logDataDao = (LogDataDao) ac.getBean("logDataDao"); 14 | try { 15 | LogData logData = new LogData(); 16 | logData.setLogId("TJGYTEST"); 17 | logData.setBytes("8555"); 18 | logData.setDates("2017-08-09"); 19 | logData.setFydm("TEST"); 20 | logData.setIp("192.168.1.1"); 21 | logData.setMethods("GET"); 22 | logData.setURL("/login.action"); 23 | logData.setState("200"); 24 | logDataDao.save(logData); 25 | System.out.println("插入成功"); 26 | }catch(Exception e){ 27 | e.printStackTrace(); 28 | } 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /RestoreData/src/test/java/HbaseTest/ListBean.java: -------------------------------------------------------------------------------- 1 | package HbaseTest; 2 | 3 | import org.springframework.context.ApplicationContext; 4 | import org.springframework.context.support.ClassPathXmlApplicationContext; 5 | 6 | public class ListBean { 7 | public static void main(String args[]){ 8 | ApplicationContext ctx = new ClassPathXmlApplicationContext("classpath:applicationContext.xml"); 9 | String[] beanNames = ctx.getBeanDefinitionNames(); 10 | int allBeansCount = ctx.getBeanDefinitionCount(); 11 | System.out.println("所有beans的数量是:" + allBeansCount); 12 | for (String beanName : beanNames) { 13 | Class beanType = ctx.getType(beanName); 14 | Package beanPackage = beanType.getPackage(); 15 | //Object bean = ctx.getBean(beanName); 16 | System.out.println("BeanName:" + beanName); 17 | System.out.println("Bean的类型:" + beanType); 18 | System.out.println("Bean所在的包:" + beanPackage); 19 | System.out.println("\r\n"); 20 | } 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /RestoreData/src/test/java/HbaseTest/ParseLogTest.java: -------------------------------------------------------------------------------- 1 | package HbaseTest; 2 | 3 | import software.hbase.hbase.dataObject.LogData; 4 | import software.hbase.util.ParseLogsUtil; 5 | 6 | import java.util.Date; 7 | import java.util.List; 8 | 9 | public class ParseLogTest { 10 | public static void main(String args[]) throws Exception{ 11 | testGetRowKeyPart1(); 12 | testParseLog(); 13 | } 14 | 15 | public static void testGetRowKeyPart1() throws Exception{ 16 | System.out.println(ParseLogsUtil.getRowKeyPart1("/Users/maicius/RestoreData/src/main/resources/200_localhost_access_log.2017-08-23.txt")); 17 | } 18 | public static void testParseLog() throws Exception{ 19 | Date begin = new Date(); 20 | List list = ParseLogsUtil.parseLogsUtil("/Users/maicius/RestoreData/src/main/resources/200_localhost_access_log.2017-08-23.txt"); 21 | for(LogData logData: list){ 22 | System.out.println("fydm: " + logData.getFydm() + "\t" +"date:"+logData.getDates()+"\t" + 23 | "id:" + logData.getLogId() + "\t" + "ip" + logData.getIp() + "\t" + 24 | "URL:" + logData.getURL() + "\t" + "method" + logData.getMethods() + "\t" + 25 | "bytes:" + logData.getBytes() + "\t" + "" + "state:" + logData.getState()); 26 | } 27 | Date end = new Date(); 28 | System.out.println("Total Parse Lines:" + list.size()); 29 | System.out.println("Total Time:" + (end.getTime() - begin.getTime()) + "mill sec"); 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /RestoreData/src/test/resources/applicationContext.xml: -------------------------------------------------------------------------------- 1 | 2 | 13 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 28 | 29 | 30 | 31 | 34 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | -------------------------------------------------------------------------------- /ScalaReadAndWrite/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 4.0.0 3 | ScalaReadAndWrite 4 | ScalaReadAndWrite 5 | 1.0-SNAPSHOT 6 | 2008 7 | 8 | 2.11.11 9 | 2.11 10 | 2.1.1 11 | 1.3.0 12 | 13 | 14 | 15 | 16 | 17 | scala-tools.org 18 | Scala-Tools Maven2 Repository 19 | http://scala-tools.org/repo-releases 20 | 21 | 22 | 23 | 24 | 25 | scala-tools.org 26 | Scala-Tools Maven2 Repository 27 | http://scala-tools.org/repo-releases 28 | 29 | 30 | 31 | 32 | 33 | org.scala-lang 34 | scala-library 35 | ${scala.version} 36 | 37 | 38 | junit 39 | junit 40 | 4.4 41 | test 42 | 43 | 44 | org.specs 45 | specs 46 | 1.2.5 47 | test 48 | 49 | 50 | 51 | 52 | org.apache.spark 53 | spark-sql_2.11 54 | ${spark.version} 55 | 56 | 57 | 58 | 59 | org.apache.spark 60 | spark-streaming_2.11 61 | ${spark.version} 62 | provided 63 | 64 | 65 | 66 | org.apache.hbase 67 | hbase-client 68 | ${hbase.version} 69 | 70 | 71 | 72 | org.apache.hbase 73 | hbase-server 74 | ${hbase.version} 75 | 76 | 77 | 78 | org.apache.hbase 79 | hbase-common 80 | ${hbase.version} 81 | 82 | 83 | 84 | 85 | org.apache.spark 86 | spark-core_2.11 87 | ${spark.version} 88 | 89 | 90 | 91 | 92 | net.minidev 93 | json-smart 94 | 1.3.1 95 | 96 | 97 | 98 | 99 | 100 | src/main/scala 101 | src/test/scala 102 | 103 | 104 | org.scala-tools 105 | maven-scala-plugin 106 | 107 | 108 | 109 | compile 110 | testCompile 111 | 112 | 113 | 114 | 115 | ${scala.version} 116 | 117 | -target:jvm-1.5 118 | 119 | 120 | 121 | 122 | org.apache.maven.plugins 123 | maven-eclipse-plugin 124 | 125 | true 126 | 127 | ch.epfl.lamp.sdt.core.scalabuilder 128 | 129 | 130 | ch.epfl.lamp.sdt.core.scalanature 131 | 132 | 133 | org.eclipse.jdt.launching.JRE_CONTAINER 134 | ch.epfl.lamp.sdt.launching.SCALA_CONTAINER 135 | 136 | 137 | 138 | 139 | 140 | 141 | 142 | 143 | org.scala-tools 144 | maven-scala-plugin 145 | 146 | ${scala.version} 147 | 148 | 149 | 150 | 151 | 152 | -------------------------------------------------------------------------------- /ScalaReadAndWrite/src/main/java/software/analysis/nju/constant/ConfigurationManager.java: -------------------------------------------------------------------------------- 1 | package software.analysis.nju.constant; 2 | 3 | import org.apache.log4j.Logger; 4 | 5 | import java.io.IOException; 6 | import java.io.InputStream; 7 | import java.util.Properties; 8 | 9 | public class ConfigurationManager { 10 | // 配置属性 11 | private static Properties properties = new Properties(); 12 | private static final Logger logger = Logger.getLogger(ConfigurationManager.class); 13 | 14 | static { 15 | InputStream in = ConfigurationManager.class.getClassLoader().getResourceAsStream("my.properties"); 16 | try { 17 | properties.load(in); 18 | } catch (IOException e) { 19 | logger.error(e.getStackTrace()); 20 | e.printStackTrace(); 21 | } 22 | } 23 | 24 | /** 25 | * 获取关键字对应的配置项 26 | * 27 | * @param key 28 | * @return 29 | */ 30 | private static synchronized String getProperty(String key) { 31 | try { 32 | return properties.getProperty(key); 33 | } catch (Exception e) { 34 | logger.error(e.getMessage()); 35 | e.printStackTrace(); 36 | } 37 | 38 | return null; 39 | } 40 | 41 | 42 | /** 43 | * 获取String配置项 44 | * 45 | * @param key 46 | * @return 47 | */ 48 | public static synchronized String getString(String key) { 49 | return getProperty(key); 50 | } 51 | 52 | /** 53 | * 获取Integer型配置项 54 | * 55 | * @param key 56 | * @return 57 | */ 58 | public static synchronized Integer getInteger(String key) { 59 | String value = getProperty(key); 60 | try { 61 | return Integer.valueOf(value); 62 | } catch (Exception e) { 63 | logger.error(e.getStackTrace()); 64 | e.printStackTrace(); 65 | } 66 | 67 | return 0; 68 | } 69 | 70 | /** 71 | * 获取Boolean型配置项 72 | * 73 | * @param key 74 | * @return 75 | */ 76 | public static synchronized Boolean getBoolean(String key) { 77 | String value = getProperty(key); 78 | try { 79 | return Boolean.valueOf(value); 80 | } catch (Exception e) { 81 | logger.error(e.getStackTrace()); 82 | e.printStackTrace(); 83 | } 84 | 85 | return false; 86 | } 87 | 88 | /** 89 | * 获取Long型配置项 90 | * 91 | * @param key 92 | * @return 93 | */ 94 | public static synchronized Long getLong(String key) { 95 | String value = getProperty(key); 96 | try { 97 | return Long.valueOf(value); 98 | } catch (Exception e) { 99 | logger.error(e.getStackTrace()); 100 | e.printStackTrace(); 101 | } 102 | 103 | return 0L; 104 | } 105 | 106 | /** 107 | * 获取Double型配置项 108 | * 109 | * @param key 110 | * @return 111 | */ 112 | public static synchronized Double getDouble(String key) { 113 | String value = getProperty(key); 114 | try { 115 | return Double.valueOf(value); 116 | } catch (Exception e) { 117 | logger.error(e.getStackTrace()); 118 | e.printStackTrace(); 119 | } 120 | 121 | return 0.0D; 122 | } 123 | } 124 | 125 | -------------------------------------------------------------------------------- /ScalaReadAndWrite/src/main/java/software/analysis/nju/constant/SparkProperties.java: -------------------------------------------------------------------------------- 1 | package software.analysis.nju.constant; 2 | 3 | public interface SparkProperties { 4 | String SPARK_MASTER = "local[2]"; 5 | String SPARK_APP_NAME = "TomcatLogAnalysis"; 6 | 7 | //Table Log Data 8 | byte[] LOG_CF = "prop".getBytes(); 9 | String LOG_TABLE_NAME = "LogData"; 10 | byte[] DATE= "DATES".getBytes(); 11 | byte[] STATE = "STATE".getBytes(); 12 | byte[] METHOD = "METHOD".getBytes(); 13 | byte[] BYTES = "BYTES".getBytes(); 14 | byte[] IP = "IP".getBytes(); 15 | 16 | //Table Log Analysis 17 | String ANA_TABLE_NAME = "LogAna"; 18 | byte[] IP_CF = "IP".getBytes(); 19 | byte[] BYTES_CF = "BYTES".getBytes(); 20 | byte[] URL_CF = "URL".getBytes(); 21 | byte[] REQ_CF = "REQ".getBytes(); 22 | byte[] ME_STATE_CF = "METHOD_STATE".getBytes(); 23 | byte[] ReqSum = "ReqSum".getBytes(); 24 | byte[] IPList = "IPList".getBytes(); 25 | byte[] IPSumVal = "IPSumVal".getBytes(); 26 | byte[] IPTotalNum = "IPTotalNum".getBytes(); 27 | byte[] TotalBytes = "TotalBytes".getBytes(); 28 | byte[] BytesHourList = "BytesHourList".getBytes(); 29 | byte[] BytesSecList = "BytesSecList".getBytes(); 30 | byte[] MaxURL = "MaxURL".getBytes(); 31 | byte[] URLList = "URLList".getBytes(); 32 | byte[] MethodList = "MethodList".getBytes(); 33 | byte[] StateList = "StateList".getBytes(); 34 | byte[] ReqHourList = "ReqHourList".getBytes(); 35 | byte[] RegSecList = "ReqSecList".getBytes(); 36 | 37 | } 38 | -------------------------------------------------------------------------------- /ScalaReadAndWrite/src/main/resources/db.properties.properties: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Maicius/WebLogsAnalysisSystem/18a7bada53784b5f09dfa8b278795b3f8dbc63e7/ScalaReadAndWrite/src/main/resources/db.properties.properties -------------------------------------------------------------------------------- /ScalaReadAndWrite/src/main/scala/ScalaReadAndWrite/App.scala: -------------------------------------------------------------------------------- 1 | //package ScalaReadAndWrite 2 | // 3 | ///** 4 | // * Hello world! 5 | // * 6 | // */ 7 | //object App extends Application { 8 | // println( "Hello World!" ) 9 | //} 10 | -------------------------------------------------------------------------------- /ScalaReadAndWrite/src/main/scala/software/analysis/nju/Accumulator/AllCourtAccumulator/AddIPRankItem.scala: -------------------------------------------------------------------------------- 1 | package software.analysis.nju.Accumulator.AllCourtAccumulator 2 | 3 | import org.apache.spark.util.AccumulatorV2 4 | 5 | import scala.collection.mutable 6 | 7 | class AddIPRankItem extends AccumulatorV2[List[(String, Int)], mutable.Map[String, Int]] { 8 | private var iPRankIMap: mutable.Map[String, Int] = mutable.Map() 9 | override def isZero: Boolean = iPRankIMap.isEmpty 10 | 11 | override def copy(): AccumulatorV2[List[(String, Int)], mutable.Map[String, Int]] = AddIPRankItem.this 12 | 13 | override def reset(): Unit = iPRankIMap.clear() 14 | 15 | override def add(v: List[(String, Int)]): Unit = { 16 | for(item <- v){ 17 | if(iPRankIMap.contains(item._1)){ 18 | iPRankIMap.update(item._1, iPRankIMap(item._1) + item._2) 19 | }else{ 20 | iPRankIMap. +=(item._1 ->item._2) 21 | } 22 | } 23 | } 24 | 25 | override def merge(other: AccumulatorV2[List[(String, Int)], mutable.Map[String, Int]]): Unit = { 26 | this.value ++=other.value 27 | } 28 | 29 | override def value: mutable.Map[String, Int] = iPRankIMap 30 | } 31 | -------------------------------------------------------------------------------- /ScalaReadAndWrite/src/main/scala/software/analysis/nju/Accumulator/AllCourtAccumulator/AddReqHourItem.scala: -------------------------------------------------------------------------------- 1 | package software.analysis.nju.Accumulator.AllCourtAccumulator 2 | 3 | import org.apache.spark.util.AccumulatorV2 4 | 5 | import scala.collection.mutable 6 | 7 | class AddReqHourItem extends AccumulatorV2[mutable.Map[String, Int], mutable.Map[String, Int]]{ 8 | private var map: mutable.Map[String, Int] =mutable.Map() 9 | override def isZero: Boolean = map.isEmpty 10 | 11 | override def copy(): AccumulatorV2[mutable.Map[String, Int], mutable.Map[String, Int]] = AddReqHourItem.this 12 | 13 | override def reset(): Unit = map.clear() 14 | 15 | override def add(v: mutable.Map[String, Int]): Unit = { 16 | 17 | } 18 | 19 | override def merge(other: AccumulatorV2[mutable.Map[String, Int], mutable.Map[String, Int]]): Unit = { 20 | this.map ++=map 21 | } 22 | 23 | override def value: mutable.Map[String, Int] = map 24 | } 25 | -------------------------------------------------------------------------------- /ScalaReadAndWrite/src/main/scala/software/analysis/nju/Accumulator/AllDataAccumulator.scala: -------------------------------------------------------------------------------- 1 | package software.analysis.nju.Accumulator 2 | 3 | import org.apache.spark.util.AccumulatorV2 4 | import software.analysis.nju.Entity.Entity.{DateResult, CourtResult} 5 | 6 | import scala.collection.mutable 7 | 8 | class AllDataAccumulator extends AccumulatorV2[CourtResult, mutable.Map[String, CourtResult]]{ 9 | val resultMap: mutable.Map[String, CourtResult] = mutable.Map() 10 | override def isZero: Boolean = resultMap.isEmpty 11 | 12 | override def copy(): AccumulatorV2[CourtResult, mutable.Map[String, CourtResult]] = AllDataAccumulator.this 13 | 14 | override def reset(): Unit = resultMap.clear() 15 | 16 | override def add(v: CourtResult): Unit = { 17 | resultMap +=(v.rowK -> v) 18 | } 19 | 20 | override def merge(other: AccumulatorV2[CourtResult, mutable.Map[String, CourtResult]]): Unit = { 21 | this.value ++=other.value 22 | } 23 | 24 | override def value: mutable.Map[String, CourtResult] = resultMap 25 | } 26 | -------------------------------------------------------------------------------- /ScalaReadAndWrite/src/main/scala/software/analysis/nju/Accumulator/ByteHourAccumulator.scala: -------------------------------------------------------------------------------- 1 | package software.analysis.nju.Accumulator 2 | 3 | import org.apache.spark.util.AccumulatorV2 4 | import software.analysis.nju.util.ParseMapToJson 5 | 6 | import scala.collection.mutable 7 | 8 | class ByteHourAccumulator extends AccumulatorV2[(String, Long), mutable.Map[String, Long]]{ 9 | private var ByteHourMap:mutable.Map[String, Long] = mutable.Map() 10 | 11 | override def isZero: Boolean = { 12 | ByteHourMap.isEmpty 13 | } 14 | 15 | override def copy(): AccumulatorV2[(String, Long), mutable.Map[String, Long]] = { 16 | ByteHourAccumulator.this 17 | } 18 | override def reset(): Unit = { 19 | ByteHourMap.clear() 20 | } 21 | //以时间为单位计算bytes 22 | override def add(v: (String, Long)): Unit = { 23 | val hour = v._1.slice(11, 13) 24 | //println(hour) 25 | if(ByteHourMap.contains(hour)){ 26 | ByteHourMap.update(hour, ByteHourMap(hour) + v._2) 27 | }else{ 28 | ByteHourMap +=(hour -> v._2) 29 | } 30 | } 31 | 32 | override def merge(other: AccumulatorV2[(String, Long), mutable.Map[String, Long]]): Unit = { 33 | this.value ++=other.value 34 | } 35 | override def value: mutable.Map[String, Long] = { 36 | ByteHourMap 37 | } 38 | 39 | override def toString(): String = { 40 | ParseMapToJson.map2Json2(ByteHourMap) 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /ScalaReadAndWrite/src/main/scala/software/analysis/nju/Accumulator/ByteSecAccumulator.scala: -------------------------------------------------------------------------------- 1 | package software.analysis.nju.Accumulator 2 | 3 | import org.apache.spark.util.AccumulatorV2 4 | import software.analysis.nju.util.ParseMapToJson 5 | 6 | import scala.collection.mutable 7 | 8 | /** 9 | * 以秒为单位统计流量 10 | * 因为一个Accumulator只能传出一个值,所以这个累加器不能与ByteAccumulator合并 11 | * 12 | */ 13 | class ByteSecAccumulator extends AccumulatorV2[(String, Long), mutable.Map[String, Long]]{ 14 | private var BytesSecMap: mutable.Map[String, Long] = mutable.Map() 15 | override def isZero: Boolean = { 16 | BytesSecMap.isEmpty 17 | } 18 | 19 | override def copy(): AccumulatorV2[(String, Long), mutable.Map[String, Long]] = { 20 | ByteSecAccumulator.this 21 | } 22 | 23 | override def reset(): Unit = { 24 | BytesSecMap.clear() 25 | } 26 | 27 | override def add(v: (String, Long)): Unit = { 28 | print("byteSec" + v) 29 | val sec = v._1.slice(0, 19) 30 | if(BytesSecMap.contains(sec)){ 31 | BytesSecMap.update(sec, BytesSecMap(sec) + v._2) 32 | }else{ 33 | BytesSecMap +=(sec -> v._2) 34 | } 35 | } 36 | 37 | override def merge(other: AccumulatorV2[(String, Long), mutable.Map[String, Long]]): Unit = { 38 | this.value ++= other.value 39 | } 40 | 41 | override def value: mutable.Map[String, Long] = { 42 | BytesSecMap 43 | } 44 | override def toString(): String = { 45 | ParseMapToJson.map2Json2(BytesSecMap) 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /ScalaReadAndWrite/src/main/scala/software/analysis/nju/Accumulator/DateResultAccumulator.scala: -------------------------------------------------------------------------------- 1 | package software.analysis.nju.Accumulator 2 | 3 | import org.apache.spark.util.AccumulatorV2 4 | import software.analysis.nju.Entity.Entity.DateResult 5 | 6 | import scala.collection.mutable 7 | 8 | class DateResultAccumulator extends AccumulatorV2[DateResult, mutable.Map[String, DateResult]]{ 9 | val resultMap: mutable.Map[String, DateResult] = mutable.Map() 10 | override def isZero: Boolean = resultMap.isEmpty 11 | 12 | override def copy(): AccumulatorV2[DateResult, mutable.Map[String, DateResult]] = DateResultAccumulator.this 13 | 14 | override def reset(): Unit = resultMap.clear() 15 | 16 | override def add(v: DateResult): Unit = { 17 | resultMap +=(v.rowK -> v) 18 | } 19 | 20 | override def merge(other: AccumulatorV2[DateResult, mutable.Map[String, DateResult]]): Unit = { 21 | this.value ++=other.value 22 | } 23 | 24 | override def value: mutable.Map[String, DateResult] = resultMap 25 | } 26 | -------------------------------------------------------------------------------- /ScalaReadAndWrite/src/main/scala/software/analysis/nju/Accumulator/IPMapAccumulator.scala: -------------------------------------------------------------------------------- 1 | package software.analysis.nju.Accumulator 2 | 3 | import org.apache.spark.util.{AccumulatorContext, AccumulatorMetadata, AccumulatorV2} 4 | import software.analysis.nju.util.ParseMapToJson 5 | 6 | import scala.collection.mutable 7 | 8 | class IPMapAccumulator extends AccumulatorV2[String, mutable.Map[String, Int]]{ 9 | 10 | private var IPMap: mutable.Map[String, Int] = mutable.Map() 11 | 12 | override def isZero: Boolean = { 13 | IPMap.isEmpty 14 | } 15 | 16 | override def copy(): AccumulatorV2[String, mutable.Map[String ,Int]] = { 17 | val accumulatorV2 = new IPMapAccumulator() 18 | var iPMap: mutable.Map[String, Int] = mutable.Map() 19 | for((x, y) <- this.IPMap){ 20 | iPMap +=(x -> y) 21 | } 22 | accumulatorV2.IPMap = iPMap 23 | accumulatorV2 24 | } 25 | 26 | override def reset(): Unit = { 27 | IPMap.clear() 28 | } 29 | 30 | override def value: mutable.Map[String, Int] = { 31 | IPMap 32 | } 33 | 34 | override def add(v: String): Unit = { 35 | if (IPMap.contains(v)){ 36 | IPMap.update(v,IPMap(v)+1) 37 | }else{ 38 | IPMap +=(v->1) 39 | } 40 | } 41 | 42 | override def merge(other: AccumulatorV2[String, mutable.Map[String, Int]]): Unit = { 43 | this.value ++= other.value 44 | } 45 | override def copyAndReset(): AccumulatorV2[String, mutable.Map[String, Int]] = super.copyAndReset() 46 | 47 | override def toString(): String = { 48 | ParseMapToJson.map2Json(IPMap) 49 | } 50 | 51 | } 52 | -------------------------------------------------------------------------------- /ScalaReadAndWrite/src/main/scala/software/analysis/nju/Accumulator/MethodAccumulator.scala: -------------------------------------------------------------------------------- 1 | package software.analysis.nju.Accumulator 2 | 3 | import org.apache.spark.util.AccumulatorV2 4 | import software.analysis.nju.util.ParseMapToJson 5 | 6 | import scala.collection.mutable 7 | 8 | class MethodAccumulator extends AccumulatorV2[String, mutable.Map[String, Int]]{ 9 | private var methodMap: mutable.Map[String, Int] = mutable.Map() 10 | 11 | override def isZero: Boolean = methodMap.isEmpty 12 | 13 | override def copy(): AccumulatorV2[String, mutable.Map[String, Int]] = MethodAccumulator.this 14 | 15 | override def reset(): Unit = methodMap.clear() 16 | 17 | override def add(v: String): Unit = { 18 | if(methodMap.contains(v)){ 19 | methodMap.update(v, methodMap(v) + 1) 20 | }else{ 21 | methodMap += (v -> 1) 22 | } 23 | } 24 | 25 | override def merge(other: AccumulatorV2[String, mutable.Map[String, Int]]): Unit ={ 26 | this.value ++= other.value 27 | } 28 | 29 | override def value: mutable.Map[String, Int] = methodMap 30 | 31 | override def toString(): String = { 32 | ParseMapToJson.map2Json(methodMap) 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /ScalaReadAndWrite/src/main/scala/software/analysis/nju/Accumulator/RequestHourAccumulator.scala: -------------------------------------------------------------------------------- 1 | package software.analysis.nju.Accumulator 2 | 3 | import org.apache.spark.util.AccumulatorV2 4 | import software.analysis.nju.util.ParseMapToJson 5 | 6 | import scala.collection.mutable 7 | 8 | class RequestHourAccumulator extends AccumulatorV2[String, mutable.Map[String, Int]]{ 9 | private val reHourMap: mutable.Map[String, Int] = mutable.Map() 10 | override def isZero: Boolean = reHourMap.isEmpty 11 | 12 | override def copy(): AccumulatorV2[String, mutable.Map[String, Int]] = RequestHourAccumulator.this 13 | 14 | override def reset(): Unit = reHourMap.clear() 15 | 16 | override def add(v: String): Unit = { 17 | val hour = v.slice(11, 13) 18 | if(reHourMap.contains(hour)){ 19 | reHourMap.update(hour, reHourMap(hour) + 1) 20 | }else{ 21 | reHourMap +=(hour -> 1) 22 | } 23 | } 24 | 25 | override def merge(other: AccumulatorV2[String, mutable.Map[String, Int]]): Unit = { 26 | this.value ++=other.value 27 | } 28 | 29 | override def value: mutable.Map[String, Int] = reHourMap 30 | 31 | override def toString(): String = { 32 | ParseMapToJson.map2Json(reHourMap); 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /ScalaReadAndWrite/src/main/scala/software/analysis/nju/Accumulator/RequestSecAccumulator.scala: -------------------------------------------------------------------------------- 1 | package software.analysis.nju.Accumulator 2 | 3 | import org.apache.spark.util.AccumulatorV2 4 | import software.analysis.nju.util.ParseMapToJson 5 | 6 | import scala.collection.mutable 7 | 8 | class RequestSecAccumulator extends AccumulatorV2[String, mutable.Map[String, Int]]{ 9 | private var RequestSecMap: mutable.Map[String, Int] = mutable.Map() 10 | override def isZero: Boolean = { 11 | RequestSecMap.isEmpty 12 | } 13 | 14 | override def copy(): AccumulatorV2[String, mutable.Map[String, Int]] = { 15 | RequestSecAccumulator.this 16 | } 17 | 18 | override def reset(): Unit = { 19 | RequestSecMap.clear() 20 | } 21 | 22 | override def add(v: String): Unit = { 23 | val sec = v.slice(0, 19) 24 | if(RequestSecMap.contains(sec)){ 25 | RequestSecMap.update(sec, RequestSecMap(sec) + 1) 26 | }else{ 27 | RequestSecMap +=(sec -> 1) 28 | } 29 | } 30 | 31 | override def merge(other: AccumulatorV2[String, mutable.Map[String, Int]]): Unit = { 32 | this.value ++=other.value 33 | } 34 | 35 | override def value: mutable.Map[String, Int] = { 36 | RequestSecMap 37 | } 38 | 39 | override def toString(): String = { 40 | ParseMapToJson.map2Json(RequestSecMap) 41 | } 42 | 43 | } 44 | -------------------------------------------------------------------------------- /ScalaReadAndWrite/src/main/scala/software/analysis/nju/Accumulator/StateAccumulator.scala: -------------------------------------------------------------------------------- 1 | package software.analysis.nju.Accumulator 2 | 3 | import org.apache.spark.util.AccumulatorV2 4 | import software.analysis.nju.util.ParseMapToJson 5 | 6 | import scala.collection.mutable 7 | 8 | class StateAccumulator extends AccumulatorV2[String, mutable.Map[String, Int]]{ 9 | private val stateMap: mutable.Map[String, Int] = mutable.Map() 10 | override def isZero: Boolean = stateMap.isEmpty 11 | 12 | override def copy(): AccumulatorV2[String, mutable.Map[String, Int]] = StateAccumulator.this 13 | 14 | override def reset(): Unit = stateMap.clear() 15 | 16 | override def add(v: String): Unit = { 17 | if(stateMap.contains(v)){ 18 | stateMap.update(v, stateMap(v) + 1) 19 | }else{ 20 | stateMap +=(v ->1) 21 | } 22 | } 23 | 24 | override def merge(other: AccumulatorV2[String, mutable.Map[String, Int]]): Unit = { 25 | this.value ++=other.value 26 | } 27 | 28 | override def value: mutable.Map[String, Int] = { 29 | stateMap 30 | } 31 | 32 | override def toString(): String = { 33 | ParseMapToJson.map2Json(stateMap) 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /ScalaReadAndWrite/src/main/scala/software/analysis/nju/Accumulator/URLAccumulator.scala: -------------------------------------------------------------------------------- 1 | package software.analysis.nju.Accumulator 2 | 3 | import org.apache.spark.util.AccumulatorV2 4 | import software.analysis.nju.util.ParseMapToJson 5 | 6 | import scala.collection.mutable 7 | 8 | class URLAccumulator extends AccumulatorV2[String, mutable.Map[String, Int]]{ 9 | private var URLMap: mutable.Map[String, Int] = mutable.Map() 10 | override def isZero: Boolean = { 11 | URLMap.isEmpty 12 | } 13 | 14 | override def copy(): AccumulatorV2[String, mutable.Map[String, Int]] = { 15 | URLAccumulator.this 16 | } 17 | 18 | override def reset(): Unit = { 19 | URLMap.clear() 20 | } 21 | 22 | override def add(v: String): Unit = { 23 | if(URLMap.contains(v)){ 24 | URLMap.update(v, URLMap(v) + 1) 25 | } 26 | else{ 27 | URLMap +=(v -> 1) 28 | } 29 | } 30 | 31 | override def merge(other: AccumulatorV2[String, mutable.Map[String, Int]]): Unit = { 32 | this.value ++=other.value 33 | } 34 | 35 | override def value: mutable.Map[String, Int] = { 36 | URLMap 37 | } 38 | 39 | override def toString(): String = { 40 | ParseMapToJson.map2Json(URLMap) 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /ScalaReadAndWrite/src/main/scala/software/analysis/nju/DAO/ParseObjectToPut.scala: -------------------------------------------------------------------------------- 1 | package software.analysis.nju.DAO 2 | 3 | import org.apache.hadoop.hbase.client.Put 4 | import org.apache.hadoop.hbase.io.ImmutableBytesWritable 5 | import org.apache.hadoop.hbase.util.Bytes 6 | import software.analysis.nju.Entity.Entity.DateResult 7 | import software.analysis.nju.constant.SparkProperties 8 | 9 | object ParseObjectToPut { 10 | def putDateResult(dateResult: DateResult): (ImmutableBytesWritable, Put) = { 11 | val put = new Put(Bytes.toBytes(dateResult.rowK)) 12 | // add to column family data, column data values to put object 13 | put.add(SparkProperties.BYTES_CF, SparkProperties.BytesHourList, Bytes.toBytes(dateResult.BytesHourList)) 14 | put.add(SparkProperties.BYTES_CF, SparkProperties.BytesSecList, Bytes.toBytes(dateResult.BytesSecList)) 15 | put.add(SparkProperties.BYTES_CF, SparkProperties.TotalBytes, Bytes.toBytes(dateResult.TotalBytes)) 16 | put.add(SparkProperties.IP_CF, SparkProperties.IPList, Bytes.toBytes(dateResult.IPList)) 17 | put.add(SparkProperties.IP_CF, SparkProperties.IPSumVal, Bytes.toBytes(dateResult.IPSumVal)) 18 | put.add(SparkProperties.IP_CF, SparkProperties.IPTotalNum, Bytes.toBytes(dateResult.IPTotalNum)) 19 | put.add(SparkProperties.REQ_CF, SparkProperties.ReqSum, Bytes.toBytes(dateResult.reqSum)) 20 | put.add(SparkProperties.REQ_CF, SparkProperties.ReqHourList, Bytes.toBytes(dateResult.ReqHourList)) 21 | put.add(SparkProperties.REQ_CF, SparkProperties.RegSecList, Bytes.toBytes(dateResult.ReqSecList)) 22 | put.add(SparkProperties.URL_CF, SparkProperties.MaxURL, Bytes.toBytes(dateResult.MaxURL)) 23 | put.add(SparkProperties.URL_CF, SparkProperties.URLList, Bytes.toBytes(dateResult.URLList)) 24 | put.add(SparkProperties.ME_STATE_CF, SparkProperties.StateList, Bytes.toBytes(dateResult.State)) 25 | put.add(SparkProperties.ME_STATE_CF, SparkProperties.MethodList, Bytes.toBytes(dateResult.Method)) 26 | (new ImmutableBytesWritable(Bytes.toBytes(dateResult.rowK)), put) 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /ScalaReadAndWrite/src/main/scala/software/analysis/nju/Entity/Entity.scala: -------------------------------------------------------------------------------- 1 | package software.analysis.nju.Entity 2 | 3 | import scala.collection.mutable 4 | 5 | object Entity{ 6 | 7 | //用String 代替所有的特殊类型,如map, list 8 | //DateResult用于存储一每一天的处理结果 9 | //rowkey为date + 法院代码 10 | // eg:20170808GY,表示天津高院2017-08-08这一天的数据信息 11 | case class DateResult(rowK:String, reqSum: String, 12 | IPList: String, IPSumVal: String, IPTotalNum: String, 13 | ReqHourList: String, ReqSecList: String, 14 | TotalBytes: String, BytesHourList: String, BytesSecList:String, 15 | MaxURL: String, URLList: String, Method: String, State: String) 16 | 17 | //CourtResult用于存储临时存储每个法院每一天的数据 18 | //累加各个法院数据之后转化为DateResult类型数据 19 | //注意这里大量使用List[(String, Int)]而不是Map[(Map, Int)]是因为Map保存的数据不知为何获取不到 20 | case class CourtResult(rowK:String, reqSum: Long, 21 | IPSumVal: Int, IPTotalNum: Int, IPRank: List[(String, Int)], 22 | ReqHourMap: List[(String, Int)], ReqSecMap: List[(String, Int)], 23 | TotalBytes: Long, BytesHourMap: List[(String, Long)], 24 | BytesSecMap: List[(String, Long)], 25 | MaxURL: List[(String, Int)], URLRank: List[(String, Int)], 26 | MethodMap: List[(String, Int)], StateMap: List[(String, Int)]) 27 | 28 | } 29 | -------------------------------------------------------------------------------- /ScalaReadAndWrite/src/main/scala/software/analysis/nju/analysis/Analysis.scala: -------------------------------------------------------------------------------- 1 | package software.analysis.nju.analysis 2 | 3 | import org.apache.hadoop.hbase.HBaseConfiguration 4 | import org.apache.hadoop.hbase.client.Scan 5 | import org.apache.hadoop.hbase.filter.{CompareFilter, RegexStringComparator, RowFilter} 6 | import org.apache.hadoop.hbase.mapred.TableOutputFormat 7 | import org.apache.hadoop.hbase.mapreduce.{TableInputFormat, TableMapReduceUtil} 8 | import org.apache.hadoop.mapred.JobConf 9 | import org.apache.spark.{SparkConf, SparkContext} 10 | import software.analysis.nju.Accumulator.{AllDataAccumulator, DateResultAccumulator} 11 | import software.analysis.nju.DAO.ParseObjectToPut 12 | import software.analysis.nju.Entity.Entity.CourtResult 13 | import software.analysis.nju.constant.SparkProperties 14 | import software.analysis.nju.util._ 15 | 16 | object Analysis { 17 | 18 | def main(args: Array[String]): Unit = { 19 | 20 | //获取昨天的日期和法院编号,用于构造正则表达式 21 | val yesterday: String = "20170808" 22 | val courtList: List[String] = CourtInfo.getCourtMap.values.toList 23 | val sparkConf = new SparkConf().setAppName(SparkProperties.SPARK_APP_NAME).setMaster(SparkProperties.SPARK_MASTER) 24 | val scan: Scan= new Scan() 25 | val sc = new SparkContext(sparkConf) 26 | val conf = HBaseConfiguration.create() 27 | conf.set(TableInputFormat.INPUT_TABLE, SparkProperties.LOG_TABLE_NAME) 28 | conf.set(TableOutputFormat.OUTPUT_TABLE, SparkProperties.ANA_TABLE_NAME) 29 | val jobConfig: JobConf = new JobConf(conf, this.getClass) 30 | jobConfig.setOutputFormat(classOf[TableOutputFormat]) 31 | jobConfig.set(TableOutputFormat.OUTPUT_TABLE, SparkProperties.ANA_TABLE_NAME) 32 | 33 | val finalMap = new DateResultAccumulator() 34 | val resultMap = new AllDataAccumulator() 35 | sc.register(finalMap, "finalMap") 36 | sc.register(resultMap, "ResultMap") 37 | for (court <- courtList) { 38 | //根据正则表达式匹配RowKey 39 | val rowRegexp = yesterday + court + "+[0-9]{6}" 40 | val filter = new RowFilter(CompareFilter.CompareOp.EQUAL, new RegexStringComparator(rowRegexp)) 41 | scan.setFilter(filter) 42 | val scan_str= TableMapReduceUtil.convertScanToString(scan) 43 | conf.set(TableInputFormat.SCAN,scan_str) 44 | val eachResult: CourtResult = AnalysisByCourt.AnalysisByCourtAndDay(sc, conf, yesterday + court, jobConfig, finalMap) 45 | resultMap.add(eachResult) 46 | } 47 | 48 | //000表示全部法院 49 | val rowKey = yesterday + "000" 50 | //计算所有法院数据,计算结果存储在finalMap里 51 | AnalysisAllCourt.getAllCourtData(resultMap, rowKey, finalMap) 52 | //生成RDD 53 | val initRdd = sc.makeRDD(finalMap.value.values.toArray) 54 | //存储数据到Hbase 55 | initRdd.map(dateResult => ParseObjectToPut.putDateResult(dateResult)).saveAsHadoopDataset(jobConfig) 56 | 57 | resultMap.reset() 58 | finalMap.reset() 59 | sc.stop() 60 | } 61 | 62 | } 63 | -------------------------------------------------------------------------------- /ScalaReadAndWrite/src/main/scala/software/analysis/nju/analysis/AnalysisAllCourt.scala: -------------------------------------------------------------------------------- 1 | package software.analysis.nju.analysis 2 | 3 | import software.analysis.nju.Accumulator.{AllDataAccumulator, DateResultAccumulator} 4 | import software.analysis.nju.Entity.Entity.{CourtResult, DateResult} 5 | import software.analysis.nju.util.{DoBytesAnalysis, MapUtil, ParseMapToJson} 6 | 7 | import scala.collection.mutable 8 | 9 | object AnalysisAllCourt { 10 | /** 11 | *根据每个法院每天的数据统计一天中所有法院数据 12 | * 计算结果也保存到LogAna中,rowKey为:日期 + 000 13 | */ 14 | def getAllCourtData(resultMap: AllDataAccumulator, rowKey: String, finalMap: DateResultAccumulator): DateResultAccumulator = { 15 | var tmpReqSum: Long = 0 16 | var tmpIPList: mutable.Map[String, Int] = mutable.Map() 17 | var tmpIPSumVal: Int = 0 18 | var tmpIPTotalNum: Int = 0 19 | var tmpReqHourList: mutable.Map[String, Int] = mutable.Map() 20 | var tmpReqSecList: mutable.Map[String, Int] = mutable.Map() 21 | var tmpTotalBytes: Long= 0L 22 | var tmpBytesHourList: mutable.Map[String, Long] = mutable.Map() 23 | var tmpBytesSecList: mutable.Map[String, Long] = mutable.Map() 24 | var tmpMaxURL: mutable.Map[String, Int] = mutable.Map() 25 | var tmpURLList: mutable.Map[String, Int] = mutable.Map() 26 | var tmpStateMap: mutable.Map[String, Int] = mutable.Map() 27 | var tmpMethodMap: mutable.Map[String, Int] = mutable.Map() 28 | 29 | for(result <- resultMap.value.values){ 30 | tmpIPSumVal +=result.IPSumVal 31 | tmpIPTotalNum +=result.IPTotalNum 32 | tmpIPList = MapUtil.addMapListItem(tmpIPList, result.IPRank) 33 | 34 | tmpReqSum +=result.reqSum 35 | println("rowKey:" + result.rowK + " HourList:" + result.ReqHourMap.toString()) 36 | tmpReqHourList = MapUtil.addMapListItem(tmpReqHourList, result.ReqHourMap) 37 | println("rowKey:" + result.rowK + " SecList:" + result.ReqSecMap.toString()) 38 | tmpReqSecList = MapUtil.addMapListItem(tmpReqSecList, result.ReqSecMap) 39 | 40 | tmpTotalBytes +=result.TotalBytes 41 | tmpBytesHourList = MapUtil.addMapListItemLong(tmpBytesHourList, result.BytesHourMap) 42 | tmpBytesSecList =MapUtil.addMapListItemLong(tmpBytesSecList, result.BytesSecMap) 43 | 44 | tmpURLList = MapUtil.addMapListItem(tmpURLList, result.URLRank) 45 | tmpMaxURL = MapUtil.addMapListItem(tmpMaxURL, result.MaxURL) 46 | 47 | tmpStateMap = MapUtil.addMapListItem(tmpStateMap, result.StateMap) 48 | tmpMethodMap = MapUtil.addMapListItem(tmpMethodMap, result.MethodMap) 49 | } 50 | val IPRank10 = ParseMapToJson.map2JsonList(MapUtil.getMax10(tmpIPList)) 51 | val maxURL = ParseMapToJson.map2JsonList(MapUtil.getMax(tmpMaxURL)) 52 | val URLRank10 = ParseMapToJson.map2JsonList(MapUtil.getMax10(tmpURLList)) 53 | val dateResult: DateResult = DateResult(rowKey, tmpReqSum.toString,IPRank10, tmpIPSumVal.toString, tmpIPTotalNum.toString, 54 | ParseMapToJson.map2Json(tmpReqHourList), ParseMapToJson.map2Json(tmpReqSecList), 55 | tmpTotalBytes.toString, ParseMapToJson.map2Json2(tmpBytesHourList), ParseMapToJson.map2Json2(tmpBytesSecList), 56 | maxURL, URLRank10, ParseMapToJson.map2Json(tmpMethodMap), ParseMapToJson.map2Json(tmpStateMap)) 57 | finalMap.add(dateResult) 58 | finalMap 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /ScalaReadAndWrite/src/main/scala/software/analysis/nju/analysis/AnalysisByCourt.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * This example reads a row of time series sensor data 3 | * calculates the the statistics for the hz data 4 | * and then writes these statistics to the stats column family 5 | * 6 | */ 7 | 8 | package software.analysis.nju.analysis 9 | 10 | import org.apache.hadoop.conf.Configuration 11 | import org.apache.hadoop.hbase.client.Put 12 | import org.apache.hadoop.hbase.io.ImmutableBytesWritable 13 | import org.apache.hadoop.hbase.mapreduce.TableInputFormat 14 | import org.apache.hadoop.hbase.util.Bytes 15 | import org.apache.hadoop.mapred.JobConf 16 | import org.apache.spark.{SparkConf, SparkContext} 17 | import software.analysis.nju.Accumulator._ 18 | import software.analysis.nju.Entity.Entity.{DateResult, CourtResult} 19 | import software.analysis.nju.constant.SparkProperties 20 | import software.analysis.nju.util._ 21 | object AnalysisByCourt extends Serializable { 22 | /** 23 | *统计每个法院每天的数据 24 | * 注意累加器的使用: 25 | * 声明 --> 注册 --> 计算-->清空 26 | */ 27 | def AnalysisByCourtAndDay(sc: SparkContext, conf: Configuration, rowKey:String, 28 | jobConf: JobConf, finalMap: DateResultAccumulator):CourtResult = { 29 | 30 | val IpMap = new IPMapAccumulator() 31 | val StateMap = new StateAccumulator() 32 | val URLMap = new URLAccumulator() 33 | val ByteMap = new ByteHourAccumulator() 34 | val ByteSecMap = new ByteSecAccumulator() 35 | val methodMap = new MethodAccumulator() 36 | val requestHourMap = new RequestHourAccumulator() 37 | val requestSecMap = new RequestSecAccumulator() 38 | 39 | //累加器必须先注册再使用 40 | sc.register(IpMap, "IPMap") 41 | sc.register(StateMap, "StateMap") 42 | sc.register(URLMap, "URLMap") 43 | sc.register(ByteMap, "ByteMap") 44 | sc.register(ByteSecMap, "BYTESSecMap") 45 | sc.register(methodMap, "MethodMap") 46 | sc.register(requestHourMap, "RequestHourMap") 47 | sc.register(requestSecMap, "RequestSecMap") 48 | 49 | // 构造RDD 50 | val hBaseRDD = sc.newAPIHadoopRDD(conf, classOf[TableInputFormat], 51 | classOf[org.apache.hadoop.hbase.io.ImmutableBytesWritable], 52 | classOf[org.apache.hadoop.hbase.client.Result]) 53 | //count值为所有的请求数量 54 | val count = hBaseRDD.count() 55 | hBaseRDD.foreach { case (_, result) => { 56 | //println(result) 57 | val state = Bytes.toString(result.getValue(SparkProperties.LOG_CF, SparkProperties.STATE)) 58 | val dates = Bytes.toString(result.getValue(SparkProperties.LOG_CF, SparkProperties.DATE)) 59 | val method = Bytes.toString(result.getValue(SparkProperties.LOG_CF, SparkProperties.METHOD)) 60 | val URL = Bytes.toString(result.getValue(SparkProperties.LOG_CF, SparkProperties.URL_CF)) 61 | val bytes = Bytes.toString(result.getValue(SparkProperties.LOG_CF, SparkProperties.BYTES)) 62 | val ip = Bytes.toString(result.getValue(SparkProperties.LOG_CF, SparkProperties.IP)) 63 | 64 | //分析请求方法,eg: GET, POST 65 | DoMethodAnalysis.getMethodMap(method, methodMap) 66 | //按秒统计流量 67 | DoBytesAnalysis.getSecBytesData(dates,bytes, ByteSecMap) 68 | //按小时统计流量 69 | DoBytesAnalysis.getHourBytesData(dates, bytes, ByteMap) 70 | //分析URL 71 | DoURLAnalysis.getURLMap(URL,URLMap) 72 | //分析请求状态,eg:200, 404 73 | DoStateAnalysis.getStateMap(state, StateMap) 74 | //分析IP 75 | DoIPAnalysis.getIpMap(ip, IpMap) 76 | //按小时统计请求数量 77 | DoRequestAnalysis.getRequestHourMap(dates, requestHourMap) 78 | //按秒统计请求数 79 | DoRequestAnalysis.getRequestSecMap(dates, requestSecMap) 80 | }} 81 | val ipSumVal = DoIPAnalysis.getIpSumValue(IpMap) 82 | val ipTotalNum = DoIPAnalysis.getIpTotalNum(IpMap) 83 | val ipRankList = DoIPAnalysis.getIpRank10(IpMap) 84 | val totalBytes = DoBytesAnalysis.getByteSum(ByteMap) 85 | val maxURL = DoURLAnalysis.getMaxURL(URLMap) 86 | val URLRankList = DoURLAnalysis.getURLRank10(URLMap) 87 | //构造最终的数据对象 88 | //全部用Json格式保存对象 89 | val dateResult = DateResult(rowKey, count.toString, ParseMapToJson.map2JsonList(ipRankList), ipSumVal.toString, ipTotalNum.toString, 90 | requestHourMap.toString(), requestSecMap.toString(), 91 | totalBytes.toString, ByteMap.toString(), ByteSecMap.toString(), 92 | ParseMapToJson.map2JsonList(maxURL), ParseMapToJson.map2JsonList(URLRankList), 93 | methodMap.toString(), StateMap.toString()) 94 | 95 | //此对象将被用于计算法院总数据 96 | /** 97 | * 此处存在疑问,如果用mutable.map而不用List,属性的值无法在外面获取 98 | */ 99 | val dateResult2 = CourtResult(rowKey, count, ipSumVal, ipTotalNum, ipRankList, 100 | requestHourMap.value.toList, requestSecMap.value.toList.take(200), totalBytes, 101 | ByteMap.value.toList, ByteSecMap.value.toList.take(200), maxURL, 102 | URLRankList, methodMap.value.toList, StateMap.value.toList) 103 | 104 | finalMap.add(dateResult) 105 | println("IPSumVal:" +rowKey+ " :"+ ipSumVal) 106 | println("reqSum:" + rowKey + ":" + dateResult2.reqSum) 107 | println("Out foreach:" + ByteMap.value) 108 | println("Hour Request:" + requestHourMap.value) 109 | println("Sec Request:" + requestSecMap.value) 110 | println("Out foreach:" + Bytes.toBytes(ByteSecMap.value.toString())) 111 | println("SecMap:" + Bytes.toString(Bytes.toBytes(ByteSecMap.value.toString()))) 112 | println("Out foreach:" + DoBytesAnalysis.getByteSum(ByteMap)) 113 | println("MapRank:" + DoIPAnalysis.getIpRank10(IpMap)) 114 | println("IpSum:" + DoIPAnalysis.getIpSumValue(IpMap)) 115 | println("IpSum:" + DoIPAnalysis.getIpTotalNum(IpMap)) 116 | 117 | //一轮分析完之后清空累加器 118 | IpMap.reset() 119 | URLMap.reset() 120 | ByteMap.reset() 121 | ByteSecMap.reset() 122 | methodMap.reset() 123 | StateMap.reset() 124 | requestHourMap.reset() 125 | requestSecMap.reset() 126 | 127 | //返回值 128 | dateResult2 129 | } 130 | 131 | 132 | } 133 | -------------------------------------------------------------------------------- /ScalaReadAndWrite/src/main/scala/software/analysis/nju/util/CourtInfo.scala: -------------------------------------------------------------------------------- 1 | package software.analysis.nju.util 2 | 3 | import scala.collection.mutable 4 | 5 | object CourtInfo { 6 | 7 | private val courtMap = Map("市高级法院" -> "200", "一中院" -> "210", "二中院" ->"220", "海事法院" -> "230", 8 | "和平区法院" -> "211", "南开区法院" -> "212", "河西区法院" -> "222", 9 | "河东区法院" -> "221", "河北区法院" -> "213", "红桥区法院" -> "214", 10 | "滨海新区法院" -> "22A", "塘沽审判区" -> "223", "汉沽审判区" -> "224", 11 | "大港审判区" -> "225", "功能区审判区" -> "229", "东丽区法院" -> "226", 12 | "津南区法院" -> "227", "西青区法院" -> "215", "北辰区法院" -> "216", 13 | "武清区法院" -> "217", "宝坻区法院" -> "219", "静海区法院" -> "218", 14 | "宁河县法院" -> "228", "蓟县法院" -> "21A", "铁路法院" -> "132") 15 | private val courtMap2 = Map("市高级法院" -> "200") 16 | def getCourtMap: Map[String, String] = courtMap2 17 | } 18 | -------------------------------------------------------------------------------- /ScalaReadAndWrite/src/main/scala/software/analysis/nju/util/DoBytesAnalysis.scala: -------------------------------------------------------------------------------- 1 | package software.analysis.nju.util 2 | 3 | import software.analysis.nju.Accumulator.{ByteHourAccumulator, ByteSecAccumulator} 4 | 5 | import scala.collection.mutable 6 | 7 | /** 8 | * 处理流量数据 9 | * 流量即每次请求返回的数据体积 10 | */ 11 | object DoBytesAnalysis { 12 | def getHourBytesData(date: String, bytes: String, ByteMap: ByteHourAccumulator): ByteHourAccumulator = { 13 | val byte = changeToLong(bytes) 14 | ByteMap.add((date, byte)) 15 | ByteMap 16 | } 17 | def getSecBytesData(date: String, bytes:String, ByteSecMap: ByteSecAccumulator): ByteSecAccumulator = { 18 | val byte = changeToLong(bytes) 19 | ByteSecMap.add((date, byte)) 20 | ByteSecMap 21 | } 22 | 23 | def getByteSum(ByteMap: ByteHourAccumulator): Long = { 24 | var tmp: Long = 0 25 | ByteMap.value.values.foreach((value: Long) =>tmp = tmp + value) 26 | tmp 27 | } 28 | def changeToLong(x: String): Long = { 29 | println(x) 30 | x match { 31 | case null => 0L 32 | case "-" => 0L 33 | case _ => x.toLong 34 | } 35 | } 36 | 37 | def addBytesMap(tmp: mutable.Map[String, Long], map:mutable.Map[String, Long]): mutable.Map[String, Long] = { 38 | for(item <- map){ 39 | if(tmp.contains(item._1)){ 40 | tmp.update(item._1, tmp(item._1) + item._2) 41 | } 42 | else{ 43 | tmp +=(item._1 -> item._2) 44 | } 45 | } 46 | tmp 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /ScalaReadAndWrite/src/main/scala/software/analysis/nju/util/DoIPAnalysis.scala: -------------------------------------------------------------------------------- 1 | package software.analysis.nju.util 2 | 3 | import software.analysis.nju.Accumulator.IPMapAccumulator 4 | 5 | import scala.collection.mutable 6 | 7 | object DoIPAnalysis { 8 | //获取IP的Map,Map类型为[ip地址, 数量] 9 | def getIpMap(ip: String, IpMap: IPMapAccumulator): IPMapAccumulator = { 10 | IpMap.add(ip) 11 | IpMap 12 | } 13 | 14 | def getIpRank10(IpMap: IPMapAccumulator): List[(String, Int)]= { 15 | val list = IpMap.value.toList.sortWith(_._2 > _._2) 16 | list.take(10) 17 | } 18 | 19 | // 总访问次数,即所有IPMap的value 20 | def getIpSumValue(IpMap: IPMapAccumulator): Int = { 21 | var tmp: Int = 0 22 | IpMap.value.values.foreach((value: Int) =>tmp = tmp + value) 23 | tmp 24 | } 25 | 26 | //返回IP总数(不包括重复的) 27 | def getIpTotalNum(IpMap: IPMapAccumulator): Int = { 28 | val length = IpMap.value.toList.length 29 | length 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /ScalaReadAndWrite/src/main/scala/software/analysis/nju/util/DoMethodAnalysis.scala: -------------------------------------------------------------------------------- 1 | package software.analysis.nju.util 2 | 3 | import org.apache.spark.util.CollectionAccumulator 4 | import software.analysis.nju.Accumulator.MethodAccumulator 5 | 6 | import scala.collection.mutable 7 | 8 | object DoMethodAnalysis { 9 | def getMethodMap(method: String, methodMap: MethodAccumulator): 10 | MethodAccumulator = { 11 | methodMap.add(method) 12 | methodMap 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /ScalaReadAndWrite/src/main/scala/software/analysis/nju/util/DoRequestAnalysis.scala: -------------------------------------------------------------------------------- 1 | package software.analysis.nju.util 2 | 3 | import software.analysis.nju.Accumulator.{RequestHourAccumulator, RequestSecAccumulator} 4 | 5 | import scala.collection.mutable 6 | 7 | object DoRequestAnalysis { 8 | def getRequestHourMap(request:String, requestHourAccumulator: RequestHourAccumulator): 9 | RequestHourAccumulator ={ 10 | requestHourAccumulator.add(request) 11 | requestHourAccumulator 12 | } 13 | 14 | def getRequestSecMap(request: String, requestSecAccumulator: RequestSecAccumulator): 15 | RequestSecAccumulator = { 16 | requestSecAccumulator.add(request) 17 | requestSecAccumulator 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /ScalaReadAndWrite/src/main/scala/software/analysis/nju/util/DoStateAnalysis.scala: -------------------------------------------------------------------------------- 1 | package software.analysis.nju.util 2 | 3 | import software.analysis.nju.Accumulator.{IPMapAccumulator, StateAccumulator, URLAccumulator} 4 | 5 | import scala.collection.mutable 6 | 7 | object DoStateAnalysis { 8 | 9 | //返回method的种类和次数 10 | def getMethod(method: String, methodMap: mutable.Map[String, Int]): mutable.Map[String, Int] = { 11 | if(methodMap.contains(method)){ 12 | methodMap(method) +=1 13 | } 14 | else{ 15 | methodMap +=(method -> 1) 16 | } 17 | methodMap 18 | } 19 | 20 | def getStateMap(state: String, stateAccumulator: StateAccumulator): StateAccumulator = { 21 | stateAccumulator.add(state) 22 | stateAccumulator 23 | } 24 | 25 | // def getState(state: String): Int = { 26 | // 27 | // } 28 | 29 | } 30 | -------------------------------------------------------------------------------- /ScalaReadAndWrite/src/main/scala/software/analysis/nju/util/DoURLAnalysis.scala: -------------------------------------------------------------------------------- 1 | package software.analysis.nju.util 2 | 3 | import software.analysis.nju.Accumulator.URLAccumulator 4 | 5 | import scala.collection.mutable 6 | 7 | /** 8 | * 做与URL有关的分析 9 | */ 10 | object DoURLAnalysis { 11 | def getURLMap(URL: String, uRLAccumulator: URLAccumulator):URLAccumulator ={ 12 | uRLAccumulator.add(URL) 13 | uRLAccumulator 14 | } 15 | 16 | def getURLRank10(uRLAccumulator: URLAccumulator): List[(String, Int)] = { 17 | uRLAccumulator.value.toList.sortWith(_._2 > _._2).take(10) 18 | } 19 | 20 | def getMaxURL(uRLAccumulator: URLAccumulator): List[(String, Int)] = { 21 | val list = uRLAccumulator.value.toList.sortWith(_._2 > _._2) 22 | //println("maxURL:" + list.take(1)) 23 | list.take(1) 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /ScalaReadAndWrite/src/main/scala/software/analysis/nju/util/GetDate.scala: -------------------------------------------------------------------------------- 1 | package software.analysis.nju.util 2 | 3 | import java.sql.Date 4 | import java.text.SimpleDateFormat 5 | import java.util.Calendar 6 | 7 | object GetDate { 8 | 9 | def getYesterday:String = { 10 | val dateFormat:SimpleDateFormat = new SimpleDateFormat("yyyyMMdd") 11 | val cal:Calendar=Calendar.getInstance() 12 | cal.add(Calendar.DATE,-1) 13 | val yesterday=dateFormat.format(cal.getTime()) 14 | yesterday 15 | } 16 | 17 | def getDateTest: String = "20170808" 18 | 19 | 20 | } 21 | -------------------------------------------------------------------------------- /ScalaReadAndWrite/src/main/scala/software/analysis/nju/util/MapUtil.scala: -------------------------------------------------------------------------------- 1 | package software.analysis.nju.util 2 | 3 | import scala.collection.mutable 4 | 5 | object MapUtil { 6 | def addMapListItem(map: mutable.Map[String, Int], list: List[(String, Int)]): mutable.Map[String, Int]={ 7 | for(item <- list){ 8 | if(map.contains(item._1)){ 9 | map.update(item._1, map(item._1) + item._2) 10 | }else{ 11 | map +=(item._1 ->item._2) 12 | } 13 | } 14 | map 15 | } 16 | def addMapListItemLong(map: mutable.Map[String, Long], list: List[(String, Long)]): mutable.Map[String, Long]={ 17 | for(item <- list){ 18 | if(map.contains(item._1)){ 19 | map.update(item._1, map(item._1) + item._2) 20 | }else{ 21 | map +=(item._1 ->item._2) 22 | } 23 | } 24 | map 25 | } 26 | 27 | def addMapItem(tmp: mutable.Map[String, Int], map:mutable.Map[String, Int]): mutable.Map[String, Int] = { 28 | for(item <- map){ 29 | if(tmp.contains(item._1)){ 30 | tmp.update(item._1, tmp(item._1) + item._2) 31 | } 32 | else{ 33 | tmp +=(item._1 -> item._2) 34 | } 35 | } 36 | tmp 37 | } 38 | 39 | def getMax10(map: mutable.Map[String, Int]): List[(String, Int)] = { 40 | map.toList.sortWith(_._2 > _._2).take(10) 41 | } 42 | 43 | def getMax(map: mutable.Map[String, Int]): List[(String, Int)] = { 44 | map.toList.sortWith(_._2 > _._2).take(1) 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /ScalaReadAndWrite/src/main/scala/software/analysis/nju/util/ParseMapToJson.scala: -------------------------------------------------------------------------------- 1 | package software.analysis.nju.util 2 | 3 | import net.minidev.json.{JSONObject} 4 | import net.minidev.json.parser.JSONParser 5 | import scala.collection.mutable 6 | import scala.collection.JavaConversions.mapAsScalaMap 7 | import scala.collection.JavaConversions.mutableMapAsJavaMap 8 | 9 | object ParseMapToJson extends App { 10 | def map2Json(map : mutable.Map[String,Int]) : String = { 11 | val jsonString = JSONObject.toJSONString(map) 12 | jsonString 13 | } 14 | 15 | def map2Json2(map : mutable.Map[String,Long]) : String = { 16 | val jsonString = JSONObject.toJSONString(map) 17 | jsonString 18 | } 19 | 20 | def map2JsonList(list: List[(String, Int)]): String = { 21 | var map: mutable.Map[String, Int] = mutable.Map() 22 | for(item <- list){ 23 | map +=(item._1 -> item._2) 24 | } 25 | map2Json(map) 26 | } 27 | } 28 | 29 | -------------------------------------------------------------------------------- /ScalaReadAndWrite/src/test/scala/ScalaReadAndWrite/AppTest.scala: -------------------------------------------------------------------------------- 1 | package ScalaReadAndWrite 2 | 3 | import java.util.Date 4 | 5 | import org.junit._ 6 | import Assert._ 7 | import org.apache.hadoop.hbase.HBaseConfiguration 8 | import org.apache.hadoop.hbase.client.Scan 9 | import org.apache.hadoop.hbase.filter.{CompareFilter, RegexStringComparator, RowFilter} 10 | import org.apache.hadoop.hbase.mapred.TableOutputFormat 11 | import org.apache.hadoop.hbase.mapreduce.{TableInputFormat, TableMapReduceUtil} 12 | import org.apache.hadoop.mapred.JobConf 13 | import org.apache.spark.{SparkConf, SparkContext} 14 | import software.analysis.nju.Accumulator.DateResultAccumulator 15 | import software.analysis.nju.analysis.AnalysisByCourt 16 | import software.analysis.nju.constant.SparkProperties 17 | import software.analysis.nju.util.GetDate 18 | 19 | @Test 20 | class AppTest { 21 | 22 | @Test 23 | def testDate() = { 24 | println(GetDate.getYesterday) 25 | } 26 | 27 | @Test 28 | def testSlice() = { 29 | val str = "20170808-10:10:10" 30 | println(str.slice(1, 9)) 31 | } 32 | 33 | @Test 34 | def testAnalysis() = { 35 | val sparkConf = new SparkConf().setAppName(SparkProperties.SPARK_APP_NAME).setMaster(SparkProperties.SPARK_MASTER) 36 | val scan: Scan= new Scan() 37 | val rowRegexp = "20170808200+[0-9]{6}" 38 | val begin: Date = new Date() 39 | val sc = new SparkContext(sparkConf) 40 | val conf = HBaseConfiguration.create() 41 | val finalMap = new DateResultAccumulator() 42 | sc.register(finalMap, "FINALMAP") 43 | conf.set(TableInputFormat.INPUT_TABLE, SparkProperties.LOG_TABLE_NAME) 44 | val filter = new RowFilter(CompareFilter.CompareOp.EQUAL, new RegexStringComparator(rowRegexp)) 45 | scan.setFilter(filter) 46 | val scan_str= TableMapReduceUtil.convertScanToString(scan) 47 | val jobConfig: JobConf = new JobConf(conf, this.getClass) 48 | // jobConfig.set("mapreduce.output.fileoutputformat.outputdir", "/") 49 | jobConfig.setOutputFormat(classOf[TableOutputFormat]) 50 | jobConfig.set(TableOutputFormat.OUTPUT_TABLE, SparkProperties.ANA_TABLE_NAME) 51 | conf.set(TableInputFormat.SCAN,scan_str) 52 | AnalysisByCourt.AnalysisByCourtAndDay(sc, conf, "20170808200", jobConfig, finalMap) 53 | val end: Date = new Date() 54 | println("分析耗时:" + (end.getTime - begin.getTime)) 55 | } 56 | 57 | // @Test 58 | // def testKO() = assertTrue(false) 59 | 60 | } 61 | 62 | 63 | -------------------------------------------------------------------------------- /ScalaReadAndWrite/src/test/scala/ScalaReadAndWrite/MySpec.scala: -------------------------------------------------------------------------------- 1 | //package ScalaReadAndWrite 2 | // 3 | //import org.specs._ 4 | //import org.specs.runner.{ConsoleRunner, JUnit4} 5 | // 6 | //class MySpecTest extends JUnit4(MySpec) 7 | ////class MySpecSuite extends ScalaTestSuite(MySpec) 8 | //object MySpecRunner extends ConsoleRunner(MySpec) 9 | // 10 | //object MySpec extends Specification { 11 | // "This wonderful system" should { 12 | // "save the world" in { 13 | // val list = Nil 14 | // list must beEmpty 15 | // } 16 | // } 17 | //} 18 | -------------------------------------------------------------------------------- /image/p1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Maicius/WebLogsAnalysisSystem/18a7bada53784b5f09dfa8b278795b3f8dbc63e7/image/p1.png -------------------------------------------------------------------------------- /image/p2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Maicius/WebLogsAnalysisSystem/18a7bada53784b5f09dfa8b278795b3f8dbc63e7/image/p2.png -------------------------------------------------------------------------------- /image/p3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Maicius/WebLogsAnalysisSystem/18a7bada53784b5f09dfa8b278795b3f8dbc63e7/image/p3.png --------------------------------------------------------------------------------