├── README.md ├── bin ├── alert.sh ├── check.sh ├── manual_job.sh ├── start.sh └── stop.sh ├── conf ├── config.properties ├── config.properties.bak ├── log4j.properties ├── package.xml ├── package.xml.bak └── test.properties ├── ddl └── datacheck.sql ├── import ├── check.xlsx ├── covert_xls.py └── mysql_import.sh ├── logs ├── jobstream.log ├── jobstream_error.log └── log4jtest.log ├── pom.xml ├── quartz.properties ├── src └── main │ └── java │ └── org │ └── datacheck │ ├── CheckScheduler.java │ ├── CommonUtil.java │ ├── DataCheck.java │ ├── DbCoonect.java │ ├── JobInfo.java │ ├── JobRunner.java │ ├── PropHelper.java │ ├── SchedJobExec.java │ └── Test.java └── target ├── classes ├── META-INF │ ├── MANIFEST.MF │ └── maven │ │ └── datacheck │ │ └── datacheck │ │ ├── pom.properties │ │ └── pom.xml ├── alert.sh ├── check.sh ├── manual_job.sh ├── org │ └── datacheck │ │ ├── CheckScheduler.class │ │ ├── CommonUtil.class │ │ ├── DataCheck$1.class │ │ ├── DataCheck.class │ │ ├── DbCoonect.class │ │ ├── JobInfo.class │ │ ├── JobRunner.class │ │ ├── PropHelper.class │ │ ├── SchedJobExec.class │ │ └── Test.class ├── start.sh └── stop.sh ├── datacheck-0.0.1-SNAPSHOT.jar ├── datacheck-0.0.1-SNAPSHOT.tar.gz ├── maven-archiver └── pom.properties └── maven-status └── maven-compiler-plugin └── compile └── default-compile ├── createdFiles.lst └── inputFiles.lst /README.md: -------------------------------------------------------------------------------- 1 | datacheck 2 | =================================== 3 | Data quality check tools by execute sql 4 | 5 | 6 | Requires 7 | -------- 8 | Java version>=1.6x 9 | Maven 3.x 10 | mysql 5.x 11 | 12 | Introduce 13 | -------- 14 | 1、support data sources mysql and greenplum 15 | 2、config sql to execute,if result product data,alert 16 | 4、sql Priority control 17 | 5、The number of concurrent control 18 | 19 | Installation 20 | -------- 21 | ``` 22 | $git clone https://github.com/xmingyang/datacheck.git 23 | $cd datacheck 24 | $mvn package -Pdist,native -DskipTests –Dtar 25 | $cd target 26 | $tar -zxvf datacheck-0.0.1-SNAPSHOT.tar.gz 27 | $cp ../quartz.properties jobStream-0.0.1-SNAPSHOT/ 28 | $cd datacheck-0.0.1-SNAPSHOT 29 | $cp datacheck-0.0.1-SNAPSHOT/lib/datacheck-0.0.1-SNAPSHOT.jar ../ 30 | ``` 31 | You can move datacheck-0.0.1-SNAPSHOT dir to your setup path,and set $DATACHECK_HOME environment variable 32 | Prepare a mysql db,create datacheck database and initialize datacheck table by ddl/datacheck.sql 33 | modify conf/config.properties jdbc.url jdbc.username jdbc.password 34 | modify quartz.properties org.quartz.dataSource.myDS.URL org.quartz.dataSource.myDS.user org.quartz.dataSource.myDS.password 35 | 36 | **start datacheck service:** 37 | ``` 38 | cd $DATACHECK_HOME/bin 39 | sh start.sh 40 | ``` 41 | 42 | 43 | **stop datacheck service:** 44 | ``` 45 | cd $DATACHECK_HOME/bin 46 | sh stop.sh 47 | ``` 48 | 49 | Example: 50 | -------- 51 | ``` 52 | add a project: 53 | insert into project (project_en,project_cn,max) values('proj_test','proj_test',2); 54 | add your datasoure: 55 | 56 | config your sql and import mysql: 57 | download import/check.xlsx module file to your pc,then write your sql and datasource .. 58 | setup xlrd to convert xls to csv then load to mysql 59 | pip install xlrd 60 | cd import 61 | sh mysql_import.sh check.xlsx 62 | 63 | config schedule time: 64 | insert into proj_crontab(project_id,cronexpression,is_enable) values(1,'0 30 3 * * ?',1); 65 | ``` 66 | 67 | So do it,we add a project named "proj_test" ,the project run max 2 jobs and the project contain two parameter: 68 | 69 | **E-Mail:**louiscool@126.com 70 | -------------------------------------------------------------------------------- /bin/alert.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | echo "$1" | /usr/local/bin/email -s $2 $3 3 | -------------------------------------------------------------------------------- /bin/check.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | basedir=$(cd `dirname $0`; pwd) 3 | cd $basedir 4 | source /home/hdp-ads-audit/.bash_profile 5 | pidcnt=`ps -ef | grep org.datacheck.CheckScheduler | grep -v grep | wc -l` 6 | echo $pidcnt 7 | if [ $pidcnt -eq 0 ] 8 | then 9 | echo "start alert" 10 | curl -d "group_name=360fenxi_jssetup&subject=datacheck-alert&content=datacheck-alert-error-plase-check" http://alarm.mis.corp.qihoo.net:8360/alarm 11 | sleep 10s 12 | sh start.sh 13 | fi 14 | -------------------------------------------------------------------------------- /bin/manual_job.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | #project_en=fenxi_main_day|fenxi_main_hour 3 | #指定项目手工跑批 4 | #usage 1: sh manual_job.sh project_en=xx 5 | #指定项目手工跑批,并传参 6 | #usage 1: sh manual_job.sh project_en=xx '${cdate}'=20160804 7 | #usage 1: sh manual_job.sh project_en=xx '${chour}'=2016080400 8 | 9 | basepath=$(cd `dirname $0`; pwd) 10 | cd $basepath/../ 11 | java -cp datacheck-0.0.1-SNAPSHOT.jar org.datacheck.DataCheck $* 12 | -------------------------------------------------------------------------------- /bin/start.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | basedir=$(cd `dirname $0`; pwd) 3 | cd $basedir/../ 4 | pid=`ps -ef|grep org.datacheck.CheckScheduler|grep -v grep|grep -v PPID|awk '{ print $2}'` 5 | if [[ $pid -gt 0 ]] 6 | then 7 | echo "CheckScheduler pid" $pid" exist ,please stop it first" 8 | exit 9 | fi 10 | echo "CheckScheduler Starting..." 11 | nohup java -Xms256m -Xmx2048m -cp datacheck-0.0.1-SNAPSHOT.jar org.datacheck.CheckScheduler >/dev/null 2>&1 & 12 | echo "CheckScheduler Started" 13 | -------------------------------------------------------------------------------- /bin/stop.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | pid=`ps -ef|grep org.datacheck.CheckScheduler|grep -v grep|grep -v PPID|awk '{ print $2}'` 3 | if [[ $pid -gt 0 ]] 4 | then 5 | echo "CheckScheduler Stopping..." 6 | kill -9 $pid 7 | echo "CheckScheduler Stopped" 8 | else 9 | echo "CheckScheduler Not Exist" 10 | fi 11 | -------------------------------------------------------------------------------- /conf/config.properties: -------------------------------------------------------------------------------- 1 | #mysql config 2 | jdbc.driverClassName=com.mysql.jdbc.Driver 3 | jdbc.url=jdbc:mysql://localhost:3306/datacheck?autoReconnect=true 4 | jdbc.username=test 5 | jdbc.password=test123 6 | #max 7 | retry.count=0 8 | cmd=bin/alert.sh 9 | #email address list 10 | 11 | email=xxx@xxx.com\,yyy@yyy.com 12 | -------------------------------------------------------------------------------- /conf/config.properties.bak: -------------------------------------------------------------------------------- 1 | #mysql config 2 | jdbc.driverClassName=com.mysql.jdbc.Driver 3 | jdbc.url=jdbc:mysql://localhost:3306/jobstreamdb?autoReconnect=true 4 | jdbc.username=jobuser 5 | jdbc.password=jobuser123 6 | #max 7 | retry.count=0 8 | cmd=bin/alert.sh 9 | sshmodule=bin/sshmodule.sh 10 | pythonmodule=bin/pythonmodule.sh 11 | javamodule=bin/javamodule.sh 12 | mapreducemodule=bin/mapreducemodule.sh 13 | #email address list 14 | email=xxx@xxx.com\,yyy@yyy.com 15 | -------------------------------------------------------------------------------- /conf/log4j.properties: -------------------------------------------------------------------------------- 1 | ### set log levels ### 2 | log4j.rootLogger = INF,ERR,CONSOLE,FILE 3 | #log4j.rootLogger = DEBUG,CONSOLE,INF,ERR 4 | #log4j.rootLogger = CONSOLE,INF,ERR 5 | 6 | ### \u8f93\u51fa\u5230\u63a7\u5236\u53f0 ### 7 | log4j.appender.CONSOLE = org.apache.log4j.ConsoleAppender 8 | log4j.appender.CONSOLE.Target = System.out 9 | log4j.appender.CONSOLE.layout = org.apache.log4j.PatternLayout 10 | log4j.appender.CONSOLE.layout.ConversionPattern = %d{ABSOLUTE} %5p %c{1}:%L - %m%n 11 | 12 | ### \u8f93\u51fa\u5230\u65e5\u5fd7\u6587\u4ef6 ### 13 | log4j.appender.INF = org.apache.log4j.DailyRollingFileAppender 14 | log4j.appender.INF.File =./logs/datacheck.log 15 | log4j.appender.INF.Append = true 16 | ## \u8f93\u51faDEBUG\u7ea7\u522b\u4ee5\u4e0a\u7684\u65e5\u5fd7 17 | log4j.appender.INF.Threshold = INFO 18 | log4j.appender.INF.layout = org.apache.log4j.PatternLayout 19 | log4j.appender.INF.layout.ConversionPattern = %-d{yyyy-MM-dd HH:mm:ss} [ %t:%r ] - [ %p ] %m%n 20 | 21 | log4j.appender.FILE=org.apache.log4j.RollingFileAppender 22 | log4j.appender.FILE.Append=true 23 | log4j.appender.FILE.File=./logs/datacheck.log 24 | log4j.appender.FILE.Threshold=INFO 25 | log4j.appender.FILE.layout=org.apache.log4j.PatternLayout 26 | log4j.appender.FILE.layout.ConversionPattern= %-d{yyyy-MM-dd HH:mm:ss} [ %t:%r ] - [ %p ] %m%n 27 | 28 | 29 | ### \u4fdd\u5b58\u5f02\u5e38\u4fe1\u606f\u5230\u5355\u72ec\u6587\u4ef6 ### 30 | log4j.appender.ERR = org.apache.log4j.DailyRollingFileAppender 31 | ## \u5f02\u5e38\u65e5\u5fd7\u6587\u4ef6\u540d 32 | log4j.appender.ERR.File =./logs/datacheck_error.log 33 | log4j.appender.ERR.Append = true 34 | ## \u53ea\u8f93\u51faERROR\u7ea7\u522b\u4ee5\u4e0a\u7684\u65e5\u5fd7!!! 35 | log4j.appender.ERR.Threshold = ERROR 36 | log4j.appender.ERR.layout = org.apache.log4j.PatternLayout 37 | log4j.appender.ERR.layout.ConversionPattern = %-d{yyyy-MM-dd HH:mm:ss} [ %t:%r ] - [ %p ] %m%n -------------------------------------------------------------------------------- /conf/package.xml: -------------------------------------------------------------------------------- 1 | 3 | package 4 | 5 | tar.gz 6 | 7 | true 8 | 9 | 10 | src/main/bin 11 | bin 12 | 13 | 14 | conf 15 | conf 16 | 17 | 18 | logs 19 | logs 20 | 21 | 22 | bin 23 | bin 24 | 25 | 26 | 27 | 28 | lib 29 | runtime 30 | 31 | 32 | -------------------------------------------------------------------------------- /conf/package.xml.bak: -------------------------------------------------------------------------------- 1 | 3 | package 4 | 5 | tar.gz 6 | 7 | true 8 | 9 | 10 | src/main/bin 11 | bin 12 | 13 | 14 | conf 15 | conf 16 | 17 | 18 | logs 19 | logs 20 | 21 | 22 | 23 | 24 | 25 | lib 26 | runtime 27 | 28 | 29 | -------------------------------------------------------------------------------- /conf/test.properties: -------------------------------------------------------------------------------- 1 | log4j.rootLogger=DEBUG,console,FILE 2 | 3 | log4j.appender.console=org.apache.log4j.ConsoleAppender 4 | log4j.appender.console.threshold=INFO 5 | log4j.appender.console.layout=org.apache.log4j.PatternLayout 6 | log4j.appender.console.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} [%5p] - %c -%F(%L) -%m%n 7 | 8 | log4j.appender.FILE=org.apache.log4j.RollingFileAppender 9 | log4j.appender.FILE.Append=true 10 | log4j.appender.FILE.File=./logs/log4jtest.log 11 | log4j.appender.FILE.Threshold=INFO 12 | log4j.appender.FILE.layout=org.apache.log4j.PatternLayout 13 | log4j.appender.FILE.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} [%5p] - %c -%F(%L) -%m%n 14 | log4j.appender.FILE.MaxFileSize=10MB -------------------------------------------------------------------------------- /ddl/datacheck.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xmingyang/datacheck/a5a8ed6c420490dc180dbc3965ffe925812834eb/ddl/datacheck.sql -------------------------------------------------------------------------------- /import/check.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xmingyang/datacheck/a5a8ed6c420490dc180dbc3965ffe925812834eb/import/check.xlsx -------------------------------------------------------------------------------- /import/covert_xls.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import time 3 | import traceback 4 | import xlrd 5 | import csv 6 | import sys 7 | import re 8 | 9 | logging.basicConfig(level=logging.INFO, format='%(asctime)s %(message)s') 10 | 11 | 12 | def csv_from_excel(): 13 | xls = sys.argv[1] 14 | target = sys.argv[2] 15 | 16 | logging.info("Start converting: From '" + xls + "' to '" + target + "'. ") 17 | 18 | try: 19 | start_time = time.time() 20 | wb = xlrd.open_workbook(xls) 21 | sh = wb.sheet_by_index(0) 22 | 23 | csvFile = open(target, 'wb') 24 | wr = csv.writer(csvFile, quoting=csv.QUOTE_ALL) 25 | 26 | for row in xrange(sh.nrows): 27 | rowValues = sh.row_values(row) 28 | 29 | newValues = [] 30 | for s in rowValues: 31 | if isinstance(s, unicode): 32 | strValue = (str(s.encode("utf-8"))) 33 | else: 34 | strValue = (str(s)) 35 | 36 | isInt = bool(re.match("^([0-9]+)\.0$", strValue)) 37 | 38 | if isInt: 39 | strValue = int(float(strValue)) 40 | else: 41 | isFloat = bool(re.match("^([0-9]+)\.([0-9]+)$", strValue)) 42 | isLong = bool(re.match("^([0-9]+)\.([0-9]+)e\+([0-9]+)$", strValue)) 43 | 44 | if isFloat: 45 | strValue = float(strValue) 46 | 47 | if isLong: 48 | strValue = int(float(strValue)) 49 | 50 | newValues.append(strValue) 51 | 52 | wr.writerow(newValues) 53 | 54 | csvFile.close() 55 | 56 | logging.info("Finished in %s seconds", time.time() - start_time) 57 | 58 | except Exception as e: 59 | print (str(e) + " " + traceback.format_exc()) 60 | 61 | 62 | csv_from_excel() 63 | -------------------------------------------------------------------------------- /import/mysql_import.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | basepath=$(cd `dirname $0`; pwd) 3 | if [ $# -eq 0 ] 4 | then 5 | echo "please set parameter:filename" 6 | exit 1 7 | fi 8 | filename=$1 9 | host=x.x.x.x 10 | user=x 11 | passwd=x 12 | db=datacheck 13 | port=3306 14 | #sed '1d' $filename >$filename.temp 15 | #iconv -f gbk -t utf-8 $filename.temp >$filename.new 16 | python covert_xls.py $filename $filename.tmp 17 | sed '1d' $filename.tmp >$filename.new 18 | dos2unix ./$filename.new 19 | echo `date` "truncate import" 20 | mysql -h $host -u$user -p$passwd -P $port -D $db < 2 | 4.0.0 3 | datacheck 4 | datacheck 5 | 0.0.1-SNAPSHOT 6 | datacheck 7 | datacheck 8 | 9 | UTF-8 10 | 11 | 12 | 13 | org.jsoup 14 | jsoup 15 | 1.6.3 16 | 17 | 18 | commons-configuration 19 | commons-configuration 20 | 1.6 21 | 22 | 23 | 24 | org.slf4j 25 | slf4j-api 26 | 1.6.6 27 | 28 | 29 | org.slf4j 30 | slf4j-log4j12 31 | 1.6.6 32 | 33 | 34 | commons-codec 35 | commons-codec 36 | 1.3 37 | 38 | 39 | 40 | log4j 41 | log4j 42 | 1.2.15 43 | 44 | 45 | com.sun.jmx 46 | jmxri 47 | 48 | 49 | com.sun.jdmk 50 | jmxtools 51 | 52 | 53 | javax.jms 54 | jms 55 | 56 | 57 | 58 | 59 | org.quartz-scheduler 60 | quartz 61 | 2.2.1 62 | 63 | 64 | mysql 65 | mysql-connector-java 66 | 5.1.33 67 | 68 | 69 | org.postgresql 70 | postgresql 71 | 9.4.1212.jre7 72 | 73 | 82 | 83 | 84 | 85 | 86 | compile 87 | 88 | 89 | 90 | 91 | 92 | 93 | org.eclipse.m2e 94 | 95 | lifecycle-mapping 96 | 97 | 1.0.0 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | 107 | 108 | 109 | org.apache.maven.plugins 110 | 111 | maven-resources-plugin 112 | 113 | [2.0,) 114 | 115 | 116 | 117 | resources 118 | 119 | testResources 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | 128 | 129 | 130 | 131 | 132 | 133 | 134 | 135 | 136 | 137 | 138 | 139 | 140 | 141 | 142 | 143 | 144 | 145 | 146 | 147 | 148 | 149 | 150 | org.apache.maven.plugins 151 | maven-compiler-plugin 152 | 3.1 153 | 154 | ${jdk.version} 155 | ${jdk.version} 156 | true 157 | 158 | 159 | 160 | 161 | 162 | 163 | org.apache.maven.plugins 164 | maven-resources-plugin 165 | 2.6 166 | 167 | UTF-8 168 | 169 | 170 | 171 | 172 | 173 | org.apache.maven.plugins 174 | maven-install-plugin 175 | 2.4 176 | 177 | 178 | 179 | 180 | org.apache.maven.plugins 181 | maven-clean-plugin 182 | 2.5 183 | 184 | 185 | 186 | 187 | org.apache.maven.plugins 188 | maven-antrun-plugin 189 | 1.7 190 | 191 | 192 | 193 | org.apache.maven.plugins 194 | maven-jar-plugin 195 | 2.4 196 | 197 | 198 | 199 | true 200 | lib/ 201 | org.jobstream.JobStream 202 | 203 | 204 | 205 | 206 | 207 | 208 | 209 | maven-assembly-plugin 210 | 211 | 212 | false 213 | 214 | conf/package.xml 215 | 216 | 217 | 218 | 219 | 220 | make-assembly 221 | package 222 | 223 | single 224 | 225 | 226 | 227 | 228 | 229 | 230 | 231 | 232 | 233 | -------------------------------------------------------------------------------- /quartz.properties: -------------------------------------------------------------------------------- 1 | 2 | #============================================================================ 3 | # Configure Main Scheduler Properties 4 | #============================================================================ 5 | 6 | org.quartz.scheduler.instanceName: JobStreamScheduler 7 | org.quartz.scheduler.instanceId: AUTO 8 | 9 | org.quartz.scheduler.skipUpdateCheck: true 10 | 11 | #============================================================================ 12 | # Configure ThreadPool 13 | #============================================================================ 14 | 15 | org.quartz.threadPool.class: org.quartz.simpl.SimpleThreadPool 16 | org.quartz.threadPool.threadCount: 12 17 | org.quartz.threadPool.threadPriority: 5 18 | 19 | #============================================================================ 20 | # Configure JobStore 21 | #============================================================================ 22 | 23 | org.quartz.jobStore.misfireThreshold: 60000 24 | 25 | #org.quartz.jobStore.class: org.quartz.simpl.RAMJobStore 26 | 27 | #org.quartz.jobStore.class: org.quartz.impl.jdbcjobstore.JobStoreTX 28 | #org.quartz.jobStore.driverDelegateClass: org.quartz.impl.jdbcjobstore.PostgreSQLDelegate 29 | #org.quartz.jobStore.useProperties: false 30 | #org.quartz.jobStore.dataSource: myDS 31 | #org.quartz.jobStore.tablePrefix: QRTZ_ 32 | #org.quartz.jobStore.isClustered: false 33 | 34 | #============================================================================ 35 | # Configure Datasources 36 | #============================================================================ 37 | 38 | #org.quartz.dataSource.myDS.driver: org.postgresql.Driver 39 | #org.quartz.dataSource.myDS.URL: jdbc:postgresql://localhost/dev 40 | #org.quartz.dataSource.myDS.user: jhouse 41 | #org.quartz.dataSource.myDS.password: 42 | #org.quartz.dataSource.myDS.maxConnections: 5 43 | org.quartz.jobStore.class = org.quartz.impl.jdbcjobstore.JobStoreTX 44 | org.quartz.jobStore.driverDelegateClass = org.quartz.impl.jdbcjobstore.StdJDBCDelegate 45 | org.quartz.jobStore.DataSource = myDS 46 | org.quartz.jobStore.tablePrefix: QRTZ_ 47 | org.quartz.jobStore.useProperties: false 48 | org.quartz.dataSource.myDS.driver = com.mysql.jdbc.Driver 49 | org.quartz.dataSource.myDS.URL = jdbc:mysql://localhost:3306/datacheck?autoReconnect=true 50 | org.quartz.dataSource.myDS.user = test 51 | org.quartz.dataSource.myDS.password = test123 52 | org.quartz.dataSource.myDS.maxConnections = 10 53 | org.quartz.jobStore.isClustered = true 54 | org.quartz.jobStore.clusterCheckinInterval = 20000 55 | -------------------------------------------------------------------------------- /src/main/java/org/datacheck/CheckScheduler.java: -------------------------------------------------------------------------------- 1 | 2 | package org.datacheck; 3 | 4 | import static org.quartz.CronScheduleBuilder.cronSchedule; 5 | import static org.quartz.JobBuilder.newJob; 6 | import static org.quartz.TriggerBuilder.newTrigger; 7 | 8 | import java.sql.Connection; 9 | import java.sql.ResultSet; 10 | import java.sql.SQLException; 11 | import java.sql.Statement; 12 | import java.util.Date; 13 | import java.util.HashMap; 14 | import java.util.List; 15 | 16 | import org.apache.log4j.Logger; 17 | import org.apache.log4j.PropertyConfigurator; 18 | import org.quartz.CronTrigger; 19 | import org.quartz.JobDetail; 20 | import org.quartz.JobKey; 21 | import org.quartz.Scheduler; 22 | import org.quartz.SchedulerException; 23 | import org.quartz.SchedulerFactory; 24 | import org.quartz.Trigger; 25 | import org.quartz.TriggerKey; 26 | import org.quartz.impl.StdSchedulerFactory; 27 | import org.quartz.impl.matchers.GroupMatcher; 28 | 29 | public class CheckScheduler { 30 | public static void scheduler() { 31 | /* 32 | * throws SchedulerException 33 | */ 34 | PropertyConfigurator.configure("conf/log4j.properties"); 35 | Logger logger = Logger.getLogger(CheckScheduler.class.getName()); 36 | logger.info("scheduler init.."); 37 | SchedulerFactory sf = new StdSchedulerFactory(); 38 | Scheduler sched = null; 39 | 40 | try { 41 | sched = sf.getScheduler(); 42 | sched.clear(); 43 | sched.start(); 44 | } catch (SchedulerException e1) { 45 | // TODO Auto-generated catch block 46 | e1.printStackTrace(); 47 | } 48 | 49 | Connection con = null; 50 | Statement sql = null; 51 | ResultSet rs = null; 52 | JobDetail job = null; 53 | CronTrigger trigger = null; 54 | HashMap enable_crontab = new HashMap(); 55 | HashMap enable_crontab0 = null; 56 | 57 | while (true) { 58 | enable_crontab0 = new HashMap(); 59 | try { 60 | 61 | con = DbCoonect.getConnectionMySql(); 62 | if (con == null) { 63 | 64 | System.exit(0); 65 | } 66 | 67 | sql = con.createStatement(); 68 | 69 | String strSql = "select a.id,b.project_en,a.cronexpression from proj_crontab a,project b where" 70 | + " a.project_id=b.id and a.is_enable=1 "; 71 | // System.out.println("3333333333:" + strSql); 72 | rs = sql.executeQuery(strSql); 73 | while (rs.next()) { 74 | int crontab_id = rs.getInt(1); 75 | String project_en = rs.getString(2); 76 | String cronexpression = rs.getString(3); 77 | // int max=rs.getInt(5); 78 | /* 79 | * if (sched.getJobDetail(new 80 | * JobKey(project_en+String.valueOf(crontab_id)),project_en+ 81 | * String.valueOf(crontab_id))==null)) { 82 | * 83 | * } 84 | */ 85 | String jobid = project_en + ";" + String.valueOf(crontab_id); 86 | // String jobid=String.valueOf(crontab_id); 87 | JobDetail job1 = sched.getJobDetail(new JobKey(jobid, jobid)); 88 | enable_crontab0.put(jobid, cronexpression); 89 | // 新加入的scheduler 90 | if (job1 == null) { 91 | // System.out.println("scheduler job:"+jobid+" 92 | // cronexpression:"+cronexpression+" max:"+max); 93 | logger.info("scheduler add project:" + jobid + " cronexpression:" + cronexpression); 94 | job = newJob(SchedJobExec.class).withIdentity(jobid, jobid).build(); 95 | trigger = newTrigger().withIdentity(jobid, jobid).withSchedule(cronSchedule(cronexpression)) 96 | .build(); 97 | // System.out.println(trigger.getCronExpression()); 98 | job.getJobDataMap().put("project_en", project_en); 99 | job.getJobDataMap().put("crontab_id", crontab_id); 100 | // job.getJobDataMap().put("max", max); 101 | 102 | sched.scheduleJob(job, trigger); 103 | 104 | } 105 | 106 | } 107 | enable_crontab = enable_crontab0; 108 | } catch (Exception e) { 109 | // e.printStackTrace(); 110 | logger.error(e.getMessage()); 111 | 112 | } finally { 113 | try { 114 | sql.close(); 115 | con.close(); 116 | } catch (SQLException e) { 117 | // e.printStackTrace(); 118 | logger.error(e.getMessage()); 119 | } 120 | } 121 | // 删除过期的crontab,或者修改变更过crontabexpression max的 122 | try { 123 | for (String groupName : sched.getJobGroupNames()) { 124 | for (JobKey jobKey : sched.getJobKeys(GroupMatcher.jobGroupEquals(groupName))) { 125 | String jobName = jobKey.getName(); 126 | String jobGroup = jobKey.getGroup(); 127 | List triggers = (List) sched.getTriggersOfJob(jobKey); 128 | // Date nextFireTime = 129 | // triggers.get(0).getNextFireTime(); 130 | // System.out.println("[jobName] : " + jobName + " 131 | // [groupName] : " + jobGroup + " - " + nextFireTime); 132 | CronTrigger trig = (CronTrigger) triggers.get(0); 133 | // 获取当前正在调度的表达式 134 | String cronexpression = trig.getCronExpression(); 135 | // 获取当前正在调度的并发数 136 | // int 137 | // max=sched.getJobDetail(jobKey).getJobDataMap().getInt("max"); 138 | 139 | if (!enable_crontab.containsKey(jobName)) { 140 | // System.out.println("delete job scheduler"); 141 | logger.info("scheduler delete project:" + jobName + " cronexpression:" + cronexpression); 142 | // removes the given trigger 143 | sched.unscheduleJob(new TriggerKey(jobName, jobGroup)); 144 | // removes all triggers to the given job 145 | sched.deleteJob(new JobKey(jobName, jobGroup)); 146 | } else { 147 | String enable_cronexpression = enable_crontab.get(jobName); 148 | // int 149 | // enable_max=Integer.parseInt(enable_crontab.get(jobName).split(";")[1]); 150 | if (!enable_cronexpression.equals(cronexpression)) {// 删除当前调度,重建 151 | 152 | // System.out.println("update job scheduler"); 153 | logger.info("scheduler update project:" + jobName + " cronexpression:" 154 | + enable_cronexpression); 155 | // removes the given trigger 156 | sched.unscheduleJob(new TriggerKey(jobName, jobGroup)); 157 | // removes all triggers to the given job 158 | sched.deleteJob(new JobKey(jobName, jobGroup)); 159 | job = newJob(SchedJobExec.class).withIdentity(jobName, jobName).build(); 160 | trigger = newTrigger().withIdentity(jobName, jobName) 161 | .withSchedule(cronSchedule(enable_cronexpression)).build(); 162 | 163 | // System.out.println(trigger.getCronExpression()); 164 | job.getJobDataMap().put("project_en", jobName.split(";")[0]); 165 | job.getJobDataMap().put("crontab_id", jobName.split(";")[1]); 166 | // job.getJobDataMap().put("max", enable_max); 167 | 168 | sched.scheduleJob(job, trigger); 169 | 170 | } 171 | } 172 | } 173 | } 174 | } catch (SchedulerException exs) { 175 | // exs.printStackTrace(); 176 | logger.error(exs.getMessage()); 177 | } 178 | 179 | try { 180 | // Thread.sleep(200000); 181 | Thread.sleep(60000); 182 | } catch (InterruptedException e) { 183 | // TODO Auto-generated catch block 184 | // e.printStackTrace(); 185 | logger.error(e.getMessage()); 186 | } 187 | 188 | } 189 | 190 | } 191 | 192 | public static void main(String[] args) throws SchedulerException { 193 | 194 | scheduler(); 195 | 196 | } 197 | 198 | } 199 | -------------------------------------------------------------------------------- /src/main/java/org/datacheck/CommonUtil.java: -------------------------------------------------------------------------------- 1 | package org.datacheck; 2 | 3 | import java.util.regex.Matcher; 4 | import java.util.regex.Pattern; 5 | 6 | import org.apache.commons.codec.binary.Base64; 7 | import org.apache.log4j.Logger; 8 | import org.apache.log4j.PropertyConfigurator; 9 | import java.text.ParseException; 10 | import java.text.SimpleDateFormat; 11 | import java.util.Calendar; 12 | import java.util.Date; 13 | 14 | public class CommonUtil { 15 | 16 | public static void sendmail(String msg, String title) { 17 | PropertyConfigurator.configure("conf/log4j.properties"); 18 | Logger logger = Logger.getLogger(CommonUtil.class.getName()); 19 | logger.info("调用本地shell发邮件,邮件内容为:" + msg); 20 | //String newmsg=msg.replace(" ", ""); 21 | 22 | try { 23 | String cmd = PropHelper.getStringValue("cmd"); 24 | String emaillist = PropHelper.getStringValue("email"); 25 | // logger.info("email command:"+cmd+" \'"+newmsg+"\' "+title+" "+emaillist); 26 | 27 | //直接调用python脚本 28 | // String[] cmds = new String[5]; 29 | // cmds[0] = cmd; 30 | // cmds[1] = "bin/mail.py"; 31 | // cmds[2] = "bin/mail_job.cfg"; 32 | // cmds[3] = title; 33 | // cmds[4] = msg; 34 | //调用alert.sh 35 | String[] cmds = new String[4]; 36 | cmds[0] = cmd; 37 | cmds[1] = msg; 38 | cmds[2] = title; 39 | cmds[3] = emaillist; 40 | 41 | // Process process= Runtime.getRuntime().exec(cmd+" \""+newmsg+"\" "+title+" "+emaillist); 42 | Process process = Runtime.getRuntime().exec(cmds); 43 | int exitValue = process.waitFor(); 44 | 45 | if (0 != exitValue) { 46 | logger.error("邮件发送失败. error code is :" + exitValue); 47 | } else { 48 | logger.info("邮件发送成功.emailist:" + emaillist + " title:" + title); 49 | } 50 | } catch (Exception e) { 51 | logger.error("邮件发送失败. " + e); 52 | } 53 | } 54 | 55 | public static String expr_date(String expr_date) { 56 | Pattern pat = Pattern.compile("expr_date\\(([date|hour].*),([a-zA-z0-9-/ ]+)\\)"); 57 | Matcher mat = pat.matcher(expr_date); 58 | if (mat.find()) { 59 | String dateval = mat.group(1); 60 | String dateformat = mat.group(2); 61 | 62 | Calendar thiscal = Calendar.getInstance(); 63 | 64 | Date thisb = new Date(); 65 | thiscal.setTime(thisb); 66 | 67 | // System.out.println(sdf.format(thiscal.getTime())); 68 | SimpleDateFormat sdf = new SimpleDateFormat(dateformat); 69 | 70 | if (dateval.contains("-")) { 71 | String[] datevals = dateval.split("-"); 72 | 73 | if (datevals[0].trim().equals("date")) { 74 | thiscal.add(Calendar.DAY_OF_MONTH, Integer.parseInt("-" + datevals[1].trim())); 75 | 76 | return sdf.format(thiscal.getTime()); 77 | } else if (datevals[0].trim().equals("hour")) { 78 | 79 | thiscal.add(Calendar.HOUR_OF_DAY, Integer.parseInt("-" + datevals[1].trim())); 80 | return sdf.format(thiscal.getTime()); 81 | 82 | } 83 | } else if (dateval.contains("+")) { 84 | String[] datevals = dateval.split("+"); 85 | 86 | if (datevals[0].trim().equals("date")) { 87 | thiscal.add(Calendar.DAY_OF_MONTH, Integer.parseInt("+" + datevals[1].trim())); 88 | 89 | return sdf.format(thiscal.getTime()); 90 | } else if (datevals[0].trim().equals("hour")) { 91 | thiscal.add(Calendar.HOUR_OF_DAY, Integer.parseInt("+" + datevals[1].trim())); 92 | return sdf.format(thiscal.getTime()); 93 | 94 | } 95 | } else { 96 | 97 | return sdf.format(thiscal.getTime()); 98 | /* 99 | if (dateval.trim().equals("date")) 100 | { 101 | //thiscal.add(Calendar.DAY_OF_MONTH,Integer.parseInt("+"+dateval.trim())); 102 | 103 | return sdf.format(thiscal.getTime()); 104 | } 105 | else if (dateval.trim().equals("hour")) 106 | { 107 | //thiscal.add(Calendar.HOUR_OF_DAY,Integer.parseInt("+"+dateval.trim())); 108 | return sdf.format(thiscal.getTime()); 109 | 110 | } 111 | */ 112 | } 113 | 114 | } 115 | return expr_date; 116 | } 117 | 118 | //验证是否有效的小时 119 | public static boolean is_hour(String hour) { 120 | if (hour != null && !hour.equals("")) { 121 | if (is2Numberic(hour)) { 122 | if (Integer.parseInt(hour) >= 0 && Integer.parseInt(hour) <= 23) { 123 | return true; 124 | } 125 | } 126 | } 127 | return false; 128 | } 129 | 130 | //验证是否有效的分钟 131 | public static boolean is_min(String min) { 132 | if (min != null && !min.equals("")) { 133 | if (is2Numberic(min)) { 134 | if (Integer.parseInt(min) >= 0 && Integer.parseInt(min) <= 59) { 135 | return true; 136 | } 137 | } 138 | } 139 | return false; 140 | } 141 | 142 | public static boolean is2Numberic(String str) { 143 | Pattern pattern = Pattern.compile("[0-9]{1,2}"); 144 | Matcher isNum = pattern.matcher(str); 145 | if (!isNum.matches()) { 146 | return false; 147 | } 148 | return true; 149 | } 150 | 151 | /** 152 | * @param bytes 153 | * @return 154 | */ 155 | public static byte[] decode(final byte[] bytes) { 156 | return Base64.decodeBase64(bytes); 157 | } 158 | 159 | /** 160 | * 二进制数据编码为BASE64字符串 161 | * 162 | * @param bytes 163 | * @return 164 | * @throws Exception 165 | */ 166 | public static String encode(final byte[] bytes) { 167 | return new String(Base64.encodeBase64(bytes)); 168 | } 169 | 170 | public static void main(String[] args) { 171 | if (args.length == 2) { 172 | System.out.println("args[0]:" + args[0]); 173 | System.out.println("args[1]:" + args[1]); 174 | sendmail(args[0], args[1]); 175 | } 176 | } 177 | 178 | } 179 | -------------------------------------------------------------------------------- /src/main/java/org/datacheck/DataCheck.java: -------------------------------------------------------------------------------- 1 | package org.datacheck; 2 | 3 | import java.sql.Connection; 4 | import java.sql.PreparedStatement; 5 | import java.sql.ResultSet; 6 | import java.sql.SQLException; 7 | import java.sql.Statement; 8 | import java.util.ArrayList; 9 | import java.util.Collections; 10 | import java.util.Comparator; 11 | import java.util.Date; 12 | import java.util.HashMap; 13 | import java.util.HashSet; 14 | import java.util.Iterator; 15 | import java.util.Map; 16 | import java.util.concurrent.ArrayBlockingQueue; 17 | import java.util.concurrent.ConcurrentHashMap; 18 | import java.util.concurrent.ThreadPoolExecutor; 19 | import java.util.concurrent.TimeUnit; 20 | 21 | import org.apache.log4j.Logger; 22 | import org.apache.log4j.PropertyConfigurator; 23 | 24 | public class DataCheck { 25 | // 项目代码,项目标识 26 | public String getProject_en() { 27 | return project_en; 28 | } 29 | 30 | public void setProject_en(String project_en) { 31 | this.project_en = project_en; 32 | } 33 | 34 | // crontab id,只有被自动调度的时候才有该id,写日志带入,手工跑该类id=0 35 | public int getCrontab_id() { 36 | return crontab_id; 37 | } 38 | 39 | public void setCrontab_id(int crontab_id) { 40 | this.crontab_id = crontab_id; 41 | } 42 | 43 | // 整个项目执行统一的序号,基于当前时间生成,作为版本号使用 44 | public String getScheduler_seq() { 45 | return scheduler_seq; 46 | } 47 | 48 | public void setScheduler_seq(String scheduler_seq) { 49 | this.scheduler_seq = scheduler_seq; 50 | } 51 | 52 | public String getCrontab_param() { 53 | return crontab_param; 54 | } 55 | 56 | public void setCrontab_param(String crontab_param) { 57 | this.crontab_param = crontab_param; 58 | } 59 | 60 | private String project_en; 61 | private int crontab_id = 0; 62 | private String scheduler_seq; 63 | // 调度表设置的参数 64 | private String crontab_param = ""; 65 | private int max = 2; // 并发数的控制,默认值2 66 | 67 | // 手工传入的参数 68 | private HashMap proj_param_manual = new HashMap(); 69 | 70 | public HashMap getProj_param_manual() { 71 | return proj_param_manual; 72 | } 73 | 74 | public void setProj_param_manual(HashMap proj_param_manual) { 75 | this.proj_param_manual = proj_param_manual; 76 | } 77 | 78 | public int getMax() { 79 | return max; 80 | } 81 | 82 | public void setMax(int max) { 83 | this.max = max; 84 | } 85 | 86 | ArrayList jobqueue; 87 | Map stautsmap;// job运行状态 88 | Map project_param; 89 | Map runningmap; 90 | Map pre_runningmap; 91 | Map runningmap_aftertime; 92 | 93 | public DataCheck(ArrayList jobqueue, Map stautsmap, Map project_param, 94 | Map runningmap, Map pre_runningmap, 95 | Map runningmap_aftertime) { 96 | this.jobqueue = jobqueue; 97 | this.stautsmap = stautsmap; 98 | this.project_param = project_param; 99 | this.runningmap = runningmap; 100 | this.pre_runningmap = pre_runningmap; 101 | this.runningmap_aftertime = runningmap_aftertime; 102 | } 103 | 104 | public void init() { 105 | PropertyConfigurator.configure("conf/log4j.properties"); 106 | Logger logger = Logger.getLogger(DataCheck.class.getName()); 107 | logger.info("init start!"); 108 | Connection con = null; 109 | Statement stmt = null; 110 | ResultSet rs = null; 111 | try { 112 | con = DbCoonect.getConnectionMySql(); 113 | if (con == null) { 114 | logger.error("connect is null"); 115 | System.exit(0); 116 | } 117 | stmt = con.createStatement(); 118 | String strSql = "select trim(project_en),trim(project_cn),a.id as sql_id,trim(sql_text),trim(sql_cn),priority,level,trim(a.owner),after_hour,after_min,a.datasource_id,c.dbtype,c.ip,c.port,c.dbname,c.username,c.passwd,a.module,a.check_type from proj_sql a,project b,datasource c where " 119 | + " a.project_id=b.id and a.datasource_id=c.id and trim(b.project_en)= '" + project_en + "'"; 120 | String projectSql = "select max from project where trim(project_en)='" + project_en + "'"; 121 | rs = stmt.executeQuery(projectSql); 122 | // 项目级别参数 123 | if (rs.next()) { 124 | // 项目并发个数 125 | int maxval = rs.getInt(1); 126 | if (maxval > 0) { 127 | setMax(maxval); 128 | logger.info("setMax:"+maxval); 129 | } 130 | } 131 | stmt.executeQuery("set names utf8"); 132 | rs = stmt.executeQuery(strSql); 133 | while (rs.next()) { 134 | JobInfo jobinfo = new JobInfo(); 135 | jobinfo.setSql_id(rs.getInt(3)); 136 | jobinfo.setSql_text(rs.getString(4)); 137 | jobinfo.setSql_cn(rs.getString(5)); 138 | jobinfo.setPriority(rs.getInt(6)); 139 | jobinfo.setLevel(rs.getInt(7)); 140 | jobinfo.setOwner(rs.getString(8)); 141 | jobinfo.setHour(rs.getString(9)); 142 | jobinfo.setMin(rs.getString(10)); 143 | jobinfo.setDatasource_id(rs.getInt(11)); 144 | jobinfo.setDbtype(rs.getString(12)); 145 | jobinfo.setIp(rs.getString(13)); 146 | jobinfo.setPort(rs.getInt(14)); 147 | jobinfo.setDbname(rs.getString(15)); 148 | jobinfo.setUsername(rs.getString(16)); 149 | jobinfo.setPasswd(rs.getString(17)); 150 | jobinfo.setModule(rs.getString(18)); 151 | jobinfo.setCheck_type(rs.getString(19)); 152 | long starttime = new Date().getTime(); 153 | jobinfo.setStarttime(starttime); 154 | jobqueue.add(jobinfo); 155 | pre_runningmap.put(jobinfo.getSql_id(), ""); 156 | } 157 | logger.info("init jobqueue size:" + jobqueue.size()); 158 | } catch (Exception e) { 159 | // e.printStackTrace(); 160 | logger.error(e.getMessage()); 161 | 162 | } finally { 163 | try { 164 | rs.close(); 165 | stmt.close(); 166 | con.close(); 167 | } catch (SQLException e) { 168 | // e.printStackTrace(); 169 | logger.error(e.getMessage()); 170 | } 171 | } 172 | resortJobqueue(); 173 | } 174 | 175 | public void resortJobqueue() { 176 | Comparator comp = new Comparator() { 177 | public int compare(JobInfo o1, JobInfo o2) { 178 | int res = o1.getPriority() - o2.getPriority(); 179 | if (res == 0) { 180 | if (o1.getStarttime() < o2.getStarttime()) 181 | res = 1; 182 | else 183 | res = (o1.getStarttime() == o2.getStarttime() ? 0 : -1); 184 | } 185 | return -res; 186 | } 187 | }; 188 | synchronized (jobqueue) { 189 | Collections.sort(jobqueue, comp); 190 | } 191 | } 192 | 193 | public void start() { 194 | PropertyConfigurator.configure("conf/log4j.properties"); 195 | Logger logger = Logger.getLogger(DataCheck.class.getName()); 196 | int x = 0; 197 | int c = 0; // 状态为s的个数 198 | ThreadPoolExecutor threadPool = new ThreadPoolExecutor(getMax(), 20, 5, TimeUnit.SECONDS, 199 | new ArrayBlockingQueue(5), new ThreadPoolExecutor.DiscardOldestPolicy()); 200 | while (true) { 201 | if (jobqueue.isEmpty()) { 202 | if (runningmap.size() == 0) { 203 | x++; 204 | logger.info("project_en:" + this.getProject_en() + " crontab_id:" + this.getCrontab_id() 205 | + " jobqueue is empty and running jobs:0:seq:" + x); 206 | } 207 | } 208 | 209 | c = runningmap.size() - runningmap_aftertime.size(); 210 | if (c < getMax()) { 211 | JobInfo jobinfo = null; 212 | if (!jobqueue.isEmpty()) { 213 | jobinfo = jobqueue.remove(0); 214 | } 215 | if (jobinfo != null) { 216 | pre_runningmap.remove(jobinfo.getSql_id()); 217 | x = 0; 218 | threadPool.execute(new JobRunner(jobinfo, jobqueue, stautsmap, project_en, crontab_id, 219 | scheduler_seq, project_param, runningmap, pre_runningmap, runningmap_aftertime)); 220 | // JobRunner jr = new JobRunner(jobinfo, jobqueue, 221 | // stautsmap, project_en, crontab_id, scheduler_seq, 222 | // project_param, runningmap, pre_runningmap, 223 | // runningmap_aftertime); 224 | // jr.start(); 225 | try { 226 | Thread.sleep(600); 227 | } catch (InterruptedException e) { 228 | // TODO Auto-generated catch block 229 | e.printStackTrace(); 230 | } 231 | } 232 | } else { 233 | x = 0; 234 | try { 235 | Thread.sleep(3000); 236 | } catch (InterruptedException e) { 237 | // TODO Auto-generated catch block 238 | e.printStackTrace(); 239 | } 240 | } 241 | if (x > 5) { 242 | threadPool.shutdown(); 243 | logger.info("project_en:" + this.getProject_en() + " crontab_id:" + this.getCrontab_id() 244 | + " the whole JobStream job exit!"); 245 | break; 246 | } 247 | 248 | } 249 | } 250 | 251 | public static void main(String[] args) { 252 | /* 253 | * 传参: 必需: project_en 254 | * 255 | * 256 | * 257 | */ 258 | if (args.length < 1) { 259 | System.out.println("必须传入参数"); 260 | System.exit(1); 261 | } 262 | HashMap param_value = new HashMap(); 263 | // 手工指定项目内参数 264 | HashMap proj_param_manual = new HashMap(); 265 | for (String para : args) { 266 | if (!para.contains("=")) { 267 | System.out.println("传入参数格式:param_name=param_value"); 268 | System.exit(1); 269 | } 270 | String[] s = para.split("="); 271 | // param_value.put(s[0], s[1]); 272 | 273 | System.out.println("传入参数:" + para); 274 | if (para.startsWith("$")) { 275 | System.out.println("传入项目内部参数" + s[0] + ":" + s[1]); 276 | proj_param_manual.put(s[0], s[1]); 277 | } else { 278 | param_value.put(s[0], s[1]); 279 | } 280 | 281 | } 282 | if (!param_value.containsKey("project_en")) { 283 | System.out.println("必须传入参数:project_en"); 284 | System.exit(1); 285 | } 286 | ArrayList jobqueue = new ArrayList(); 287 | Map stautsmap = new ConcurrentHashMap();// job运行状态 288 | Map project_param = new HashMap(); 289 | // 正运行的作业map列表 290 | Map runningmap = new ConcurrentHashMap(); 291 | // 准备运行,加入队列的作业map列表 292 | Map pre_runningmap = new HashMap(); 293 | // 设置了after time的作业,正运行还未到达after time时间的作业map列表 294 | Map runningmap_aftertime = new ConcurrentHashMap(); 295 | DataCheck jobmain = new DataCheck(jobqueue, stautsmap, project_param, runningmap, pre_runningmap, 296 | runningmap_aftertime); 297 | jobmain.setProject_en(param_value.get("project_en")); 298 | jobmain.setScheduler_seq(new java.text.SimpleDateFormat("yyyyMMddHHmmss").format(new java.util.Date())); 299 | jobmain.setProj_param_manual(proj_param_manual); 300 | jobmain.init(); 301 | jobmain.start(); 302 | } 303 | 304 | } 305 | -------------------------------------------------------------------------------- /src/main/java/org/datacheck/DbCoonect.java: -------------------------------------------------------------------------------- 1 | package org.datacheck; 2 | 3 | import java.sql.Connection; 4 | import java.sql.DriverManager; 5 | import java.sql.SQLException; 6 | 7 | 8 | public class DbCoonect { 9 | public static Connection getConnectionMySql() throws Exception { 10 | Connection c = null; 11 | try { 12 | Class.forName(PropHelper.getStringValue("jdbc.driverClassName")); 13 | } catch (ClassNotFoundException e) { 14 | e.printStackTrace(); 15 | } 16 | try { 17 | 18 | c = DriverManager 19 | .getConnection( 20 | PropHelper.getStringValue("jdbc.url"), 21 | PropHelper.getStringValue("jdbc.username"), PropHelper.getStringValue("jdbc.password")); 22 | } catch (SQLException e) { 23 | e.printStackTrace(); 24 | } 25 | return c; 26 | } 27 | //jdbc:mysql://localhost:3306/jobstreamdb?autoReconnect=true 28 | public static Connection getConnection(String dbtype,String ip,int port,String dbname,String username,String passwd) throws Exception { 29 | Connection c = null; 30 | String jdbc_driverClassName=""; 31 | String jdbc_url=""; 32 | if (dbtype.equals("mysql")) 33 | { 34 | jdbc_driverClassName="com.mysql.jdbc.Driver"; 35 | jdbc_url="jdbc:mysql://"+ip+":"+String.valueOf(port)+"/"+dbname; 36 | 37 | } 38 | else if (dbtype.equals("greenplum")) 39 | { 40 | jdbc_driverClassName="org.postgresql.Driver"; 41 | jdbc_url="jdbc:postgresql://"+ip+":"+String.valueOf(port)+"/"+dbname; 42 | 43 | } 44 | else 45 | { 46 | return c; 47 | } 48 | try { 49 | Class.forName(jdbc_driverClassName); 50 | } catch (ClassNotFoundException e) { 51 | e.printStackTrace(); 52 | } 53 | try { 54 | c = DriverManager 55 | .getConnection( 56 | jdbc_url, 57 | username, passwd); 58 | } catch (SQLException e) { 59 | e.printStackTrace(); 60 | } 61 | return c; 62 | } 63 | 64 | } 65 | -------------------------------------------------------------------------------- /src/main/java/org/datacheck/JobInfo.java: -------------------------------------------------------------------------------- 1 | package org.datacheck; 2 | 3 | import java.util.ArrayList; 4 | import java.util.Date; 5 | import java.util.HashSet; 6 | import java.util.Map; 7 | 8 | public class JobInfo { 9 | 10 | public int getPriority() { 11 | return priority; 12 | } 13 | public void setPriority(int priority) { 14 | this.priority = priority; 15 | } 16 | public long getStarttime() { 17 | return starttime; 18 | } 19 | public void setStarttime(long starttime) { 20 | this.starttime = starttime; 21 | } 22 | public String getSql_text() { 23 | return sql_text; 24 | } 25 | public void setSql_text(String sql_text) { 26 | this.sql_text = sql_text; 27 | } 28 | public String getSql_cn() { 29 | return sql_cn; 30 | } 31 | public void setSql_cn(String sql_cn) { 32 | this.sql_cn = sql_cn; 33 | } 34 | public String getOwner() { 35 | return owner; 36 | } 37 | public void setOwner(String owner) { 38 | this.owner = owner; 39 | } 40 | public String getHour() { 41 | return hour; 42 | } 43 | public void setHour(String hour) { 44 | this.hour = hour; 45 | } 46 | public String getMin() { 47 | return min; 48 | } 49 | public void setMin(String min) { 50 | this.min = min; 51 | } 52 | public int getLevel() { 53 | return level; 54 | } 55 | public void setLevel(int level) { 56 | this.level = level; 57 | } 58 | public int getSql_id() { 59 | return sql_id; 60 | } 61 | public void setSql_id(int sql_id) { 62 | this.sql_id = sql_id; 63 | } 64 | public int getDatasource_id() { 65 | return datasource_id; 66 | } 67 | public void setDatasource_id(int datasource_id) { 68 | this.datasource_id = datasource_id; 69 | } 70 | public String getDbtype() { 71 | return dbtype; 72 | } 73 | public void setDbtype(String dbtype) { 74 | this.dbtype = dbtype; 75 | } 76 | public String getIp() { 77 | return ip; 78 | } 79 | public void setIp(String ip) { 80 | this.ip = ip; 81 | } 82 | public int getPort() { 83 | return port; 84 | } 85 | public void setPort(int port) { 86 | this.port = port; 87 | } 88 | public String getDbname() { 89 | return dbname; 90 | } 91 | public void setDbname(String dbname) { 92 | this.dbname = dbname; 93 | } 94 | public String getUsername() { 95 | return username; 96 | } 97 | public void setUsername(String username) { 98 | this.username = username; 99 | } 100 | public String getPasswd() { 101 | return passwd; 102 | } 103 | public void setPasswd(String passwd) { 104 | this.passwd = passwd; 105 | } 106 | public String getModule() { 107 | return module; 108 | } 109 | public void setModule(String module) { 110 | this.module = module; 111 | } 112 | public String getCheck_type() { 113 | return check_type; 114 | } 115 | public void setCheck_type(String check_type) { 116 | this.check_type = check_type; 117 | } 118 | private int priority=0; 119 | private long starttime; 120 | private String sql_text; 121 | private String sql_cn; 122 | private String owner; 123 | private String hour; 124 | private String min; 125 | private int level=0; 126 | private int sql_id=0; 127 | private int datasource_id=0; 128 | private String dbtype=""; 129 | private String ip=""; 130 | private int port=0 ; 131 | private String dbname=""; 132 | private String username=""; 133 | private String passwd=""; 134 | private String module=""; 135 | private String check_type=""; 136 | 137 | 138 | 139 | } 140 | -------------------------------------------------------------------------------- /src/main/java/org/datacheck/JobRunner.java: -------------------------------------------------------------------------------- 1 | package org.datacheck; 2 | 3 | import java.io.Serializable; 4 | import java.io.UnsupportedEncodingException; 5 | import java.nio.charset.Charset; 6 | import java.sql.Connection; 7 | import java.sql.ResultSet; 8 | import java.sql.ResultSetMetaData; 9 | import java.sql.SQLException; 10 | import java.sql.Statement; 11 | import java.util.ArrayList; 12 | import java.util.Map; 13 | 14 | import org.apache.log4j.Logger; 15 | import org.apache.log4j.PropertyConfigurator; 16 | import java.util.Date; 17 | 18 | public class JobRunner implements Runnable, Serializable { 19 | 20 | JobInfo jobinfo; 21 | ArrayList jobqueue; 22 | Map stautsmap; 23 | String project_en; 24 | int crontab_id; 25 | String scheduler_seq; 26 | Map project_param; 27 | Map runningmap; 28 | Map pre_runningmap; 29 | Map runningmap_aftertime; 30 | 31 | public JobRunner(JobInfo jobinfo, ArrayList jobqueue, Map stautsmap, String project_en, 32 | int crontab_id, String scheduler_seq, Map project_param, Map runningmap, 33 | Map pre_runningmap, Map runningmap_aftertime) { 34 | this.jobinfo = jobinfo; 35 | this.jobqueue = jobqueue; 36 | this.stautsmap = stautsmap; 37 | this.project_en = project_en; 38 | this.crontab_id = crontab_id; 39 | this.scheduler_seq = scheduler_seq; 40 | this.project_param = project_param; 41 | this.runningmap = runningmap; 42 | this.pre_runningmap = pre_runningmap; 43 | this.runningmap_aftertime = runningmap_aftertime; 44 | } 45 | 46 | public void run() { 47 | Logger logger = Logger.getLogger(JobRunner.class.getName()); 48 | 49 | // opScnt("inc"); 50 | if (!runningmap.containsKey(jobinfo.getSql_id())) { 51 | runningmap.put(jobinfo.getSql_id(), ""); 52 | } 53 | // 时间依赖控制部分 54 | String after_hour = jobinfo.getHour(); 55 | String after_min = jobinfo.getMin(); 56 | if (CommonUtil.is_hour(after_hour) || CommonUtil.is_min(after_min)) { 57 | logger.info("sql_id:" + jobinfo.getSql_id() + " start time ref" + " after_hour:" + after_hour + " after_min:" + after_min); 58 | int cnt = 0; 59 | while (true) { 60 | if (!runningmap_aftertime.containsKey(jobinfo.getSql_id())) { 61 | runningmap_aftertime.put(jobinfo.getSql_id(), ""); 62 | } 63 | 64 | Date currentdate = new Date(); 65 | int hour = currentdate.getHours(); 66 | int min = currentdate.getMinutes(); 67 | // 同时设置了小时和分钟 68 | if (CommonUtil.is_hour(after_hour) && CommonUtil.is_min(after_min)) { 69 | if (hour > Integer.parseInt(after_hour) 70 | || (hour == Integer.parseInt(after_hour) && min >= Integer.parseInt(after_min))) { 71 | break; 72 | } 73 | 74 | } // 只设置分钟,大于该分钟即执行 75 | else if (!CommonUtil.is_hour(after_hour) && CommonUtil.is_min(after_min)) { 76 | if (min >= Integer.parseInt(after_min)) { 77 | break; 78 | } 79 | 80 | } else if (CommonUtil.is_hour(after_hour) && !CommonUtil.is_min(after_min)) { 81 | if (hour >= Integer.parseInt(after_hour)) { 82 | break; 83 | } 84 | } 85 | try { 86 | Thread.sleep(20000); 87 | } catch (InterruptedException e) { 88 | // TODO Auto-generated catch block 89 | e.printStackTrace(); 90 | } 91 | cnt++; 92 | // 超过6小时直接退出 93 | if (cnt > 1080) { 94 | break; 95 | } 96 | 97 | } 98 | if (runningmap_aftertime.containsKey(jobinfo.getSql_id())) { 99 | runningmap_aftertime.remove(jobinfo.getSql_id()); 100 | } 101 | logger.info("sql_id:" + jobinfo.getSql_id() + " end time ref" + " after_hour:" + after_hour + " after_min:" + after_min); 102 | } 103 | 104 | execSql(); 105 | 106 | if (runningmap.containsKey(jobinfo.getSql_id())) { 107 | runningmap.remove(jobinfo.getSql_id()); 108 | } 109 | 110 | } 111 | 112 | public int getLog_id() { 113 | return log_id; 114 | } 115 | 116 | public void setLog_id(int log_id) { 117 | this.log_id = log_id; 118 | } 119 | 120 | private int log_id = 0; 121 | 122 | public void init_log() { 123 | PropertyConfigurator.configure("conf/log4j.properties"); 124 | Logger logger = Logger.getLogger(JobRunner.class.getName()); 125 | Connection con = null; 126 | Statement sql = null; 127 | ResultSet rs = null; 128 | 129 | try { 130 | con = DbCoonect.getConnectionMySql(); 131 | if (con == null) { 132 | logger.error("connect is null"); 133 | System.exit(0); 134 | } 135 | sql = con.createStatement(); 136 | 137 | String start_date = new java.text.SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new java.util.Date()); 138 | String datekey = new java.text.SimpleDateFormat("yyyyMMdd").format(new java.util.Date()); 139 | String strSql = " insert into proj_log(project_en,proj_crontab_id,proj_scheduler_seq,sql_id,sql_cn,start_date,datekey,program_status,owner,module) values('" 140 | + project_en + "'," + crontab_id + ",'" + scheduler_seq + "'," + jobinfo.getSql_id() +",'" + jobinfo.getSql_cn() + "','" + start_date + "','" + datekey + "','" 141 | + "S" + "','" + jobinfo.getOwner() + "','" + jobinfo.getModule() + "')"; 142 | sql.executeQuery("set names utf8"); 143 | sql.executeUpdate(strSql); 144 | rs = sql.executeQuery("select last_insert_id()"); 145 | if (rs.next()) { 146 | setLog_id(rs.getInt(1)); 147 | } 148 | 149 | } catch (Exception e) { 150 | logger.error(e.getMessage()); 151 | 152 | } finally { 153 | try { 154 | con.close(); 155 | } catch (SQLException e) { 156 | logger.error(e.getMessage()); 157 | } 158 | } 159 | 160 | } 161 | 162 | public void finish_log(String status, String loginfo, int is_hasdata) { 163 | PropertyConfigurator.configure("conf/log4j.properties"); 164 | Logger logger = Logger.getLogger(JobRunner.class.getName()); 165 | 166 | Connection con = null; 167 | Statement sql = null; 168 | logger.info("sql_id:" + jobinfo.getSql_id() + " status:" + status + " finish_log db connecting "); 169 | 170 | try { 171 | con = DbCoonect.getConnectionMySql(); 172 | if (con == null) { 173 | logger.error("connect is null"); 174 | System.exit(0); 175 | } 176 | logger.info("sql_id:" + jobinfo.getSql_id() + " status:" + status + " finish_log db connected "); 177 | sql = con.createStatement(); 178 | 179 | String end_date = new java.text.SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new java.util.Date()); 180 | 181 | int log_id = getLog_id(); 182 | String strSql = "update proj_log set end_date='" + end_date + "',program_status='" + status + "',loginfo='" 183 | + loginfo + "',is_hasdata=" + is_hasdata + " where id =" + log_id; 184 | logger.info("sql_id:" + jobinfo.getSql_id() + " status:" + status + " finish_log db executeUpdating "); 185 | sql.executeUpdate(strSql); 186 | logger.info("sql_id:" + jobinfo.getSql_id() + " status:" + status + " finish_log db executeUpdated "); 187 | } catch (Exception e) { 188 | // e.printStackTrace(); 189 | logger.error(e.getMessage()); 190 | 191 | } finally { 192 | try { 193 | con.close(); 194 | } catch (SQLException e) { 195 | logger.error(e.getMessage()); 196 | } 197 | } 198 | } 199 | 200 | public void execSql() { 201 | PropertyConfigurator.configure("conf/log4j.properties"); 202 | Logger logger = Logger.getLogger(JobRunner.class.getName()); 203 | logger.info("sql_id:" + jobinfo.getSql_id() + " sql_cn:" + jobinfo.getSql_cn() + " begin exec"); 204 | // logger.info("sql:" + jobinfo.getIp() + jobinfo.getPort() + jobinfo.getDbname() + jobinfo.getPasswd()); 205 | // logger.info(jobinfo.getSql_text()); 206 | stautsmap.put(jobinfo.getSql_id(), "S"); 207 | init_log(); 208 | Connection con = null; 209 | Statement stmt = null; 210 | ResultSet rs = null; 211 | String status = ""; 212 | int is_hasdata = 0; 213 | try { 214 | 215 | con = DbCoonect.getConnection(jobinfo.getDbtype(), jobinfo.getIp(), jobinfo.getPort(), jobinfo.getDbname(), jobinfo.getUsername(), new String(CommonUtil.decode(jobinfo.getPasswd().getBytes()))); 216 | if (con == null) { 217 | logger.error("connect is null "+jobinfo.getDbtype()+" "+jobinfo.getIp()+" "+jobinfo.getPort()); 218 | //System.exit(0); 219 | status = "F"; 220 | stautsmap.put(jobinfo.getSql_id(), "F"); 221 | logger.info("sql_id: " + jobinfo.getSql_id() + " updated status F in memory"); 222 | finish_log(status, "connect is null "+jobinfo.getDbtype()+" "+jobinfo.getIp()+" "+jobinfo.getPort(), is_hasdata); 223 | logger.info("sql_id: " + jobinfo.getSql_id() + " updated status F in db"); 224 | logger.info("sql_id: " + jobinfo.getSql_id() + ": fail exec " + "connect is null "+jobinfo.getDbtype()+" "+jobinfo.getIp()+" "+jobinfo.getPort()); 225 | CommonUtil.sendmail( 226 | new StringBuilder("Hi," + jobinfo.getOwner()).append("\n").append("sql:" + jobinfo.getSql_text()) 227 | .append("\n").append("执行出错,请尽快修复!").append("错误信息:" + "connect is null "+jobinfo.getDbtype()+" "+jobinfo.getIp()+" "+jobinfo.getPort()).toString(), 228 | "数据质量稽核SQL执行出错" + "报警[" + jobinfo.getOwner() + "]"); 229 | return; 230 | } 231 | Date d = new Date(); 232 | String datastr = scheduler_seq; 233 | stmt = con.createStatement(); 234 | rs = stmt.executeQuery(jobinfo.getSql_text()); 235 | ResultSetMetaData rsmd = rs.getMetaData(); 236 | int colCount = rsmd.getColumnCount(); 237 | int datacnt = 0; 238 | 239 | // 读取数据构建邮件内容 240 | logger.info("begin to construct mail..."); 241 | StringBuilder tableBuidler = new StringBuilder(); 242 | String level = jobinfo.getLevel() == 0 ? "普通" : (jobinfo.getLevel() == 1) ? "中" : "高"; 243 | 244 | // 245 | String sqlHtmlStr = jobinfo.getSql_text().replace("<", "<").replace(">", ">"); 246 | tableBuidler.append(getMailCSSHead()); 247 | tableBuidler.append("

").append("任务详情").append("
").append("

"); 248 | tableBuidler.append(""); 249 | tableBuidler.append("").append("").append("").append(""); 250 | tableBuidler.append("").append("").append("").append(""); 251 | tableBuidler.append("").append("").append("").append(""); 252 | tableBuidler.append("").append("").append("").append(""); 253 | tableBuidler.append("").append("").append("").append(""); 254 | tableBuidler.append("
").append("SQL id").append("").append(jobinfo.getSql_id()).append("
").append("重要性").append("").append(level).append("
").append("模块").append("").append(jobinfo.getModule()).append("
").append("检查类型").append("").append(jobinfo.getCheck_type()).append("
").append("SQL语句").append("").append(sqlHtmlStr).append("

"); 255 | // 256 | tableBuidler.append("

").append("任务执行结果").append("
").append("

"); 257 | tableBuidler.append(""); 258 | // 构建表头 259 | for (int i = 1; i <= colCount; i++) { 260 | tableBuidler.append(""); 263 | } 264 | tableBuidler.append(""); 265 | //填充内容 266 | while (rs.next()) { 267 | if (++datacnt > 10) { 268 | break; 269 | } 270 | tableBuidler.append(""); 271 | for (int i = 1; i <= colCount; i++) { 272 | tableBuidler.append(""); 275 | } 276 | tableBuidler.append(""); 277 | } 278 | tableBuidler.append("
"); 261 | tableBuidler.append(rsmd.getColumnName(i)); 262 | tableBuidler.append("
"); 273 | tableBuidler.append(rs.getString(i)); 274 | tableBuidler.append("

"); 279 | tableBuidler.append(getMailCSSTail()); 280 | jobinfo.setSql_text(status); 281 | if (datacnt > 0) { 282 | is_hasdata = 1; 283 | // logger.info("DATA IS:" + tableBuidler.toString()); 284 | // 发邮件 285 | 286 | String title = "[" + jobinfo.getOwner() + "]"+"数据稽核"+" - "+jobinfo.getModule() + " - " + jobinfo.getCheck_type() + " - " + jobinfo.getSql_cn(); 287 | String content = new String(tableBuidler.toString().getBytes(), "utf-8"); 288 | CommonUtil.sendmail(content, title); 289 | logger.info("send mail successfully, title is " + title); 290 | } 291 | status = "C"; 292 | stautsmap.put(jobinfo.getSql_id(), "C"); 293 | logger.info("sql_id: " + jobinfo.getSql_id() + " updated status C in memory"); 294 | logger.info("sql_id: " + jobinfo.getSql_id() + " updating status C in db"); 295 | finish_log(status, "", is_hasdata); 296 | logger.info("sql_id: " + jobinfo.getSql_id() + " updated status C in db"); 297 | logger.info("sql_id: " + jobinfo.getSql_id() + ": success exec"); 298 | 299 | } catch (Exception e) { 300 | // e.printStackTrace(); 301 | logger.error(e.getMessage()); 302 | status = "F"; 303 | stautsmap.put(jobinfo.getSql_id(), "F"); 304 | logger.info("sql_id: " + jobinfo.getSql_id() + " updated status F in memory"); 305 | finish_log(status, e.getMessage(), is_hasdata); 306 | logger.info("sql_id: " + jobinfo.getSql_id() + " updated status F in db"); 307 | logger.info("sql_id: " + jobinfo.getSql_id() + ": fail exec " + e.getMessage()); 308 | CommonUtil.sendmail( 309 | new StringBuilder("Hi," + jobinfo.getOwner()).append("\n").append("sql:" + jobinfo.getSql_text()) 310 | .append("\n").append("执行出错,请尽快修复!").append("错误信息:" + e.getMessage()).toString(), 311 | "数据质量稽核SQL执行出错" + "报警[" + jobinfo.getOwner() + "]"); 312 | 313 | } finally { 314 | try { 315 | con.close(); 316 | } catch (SQLException e) { 317 | logger.error(e.getMessage()); 318 | } 319 | } 320 | 321 | } 322 | 323 | public static String getMailCSSHead() { 324 | return "\n" 325 | + " "; 349 | } 350 | 351 | public static String getMailCSSTail() { 352 | return ""; 353 | } 354 | } 355 | -------------------------------------------------------------------------------- /src/main/java/org/datacheck/PropHelper.java: -------------------------------------------------------------------------------- 1 | package org.datacheck; 2 | 3 | 4 | 5 | import org.apache.commons.configuration.AbstractFileConfiguration; 6 | import org.apache.commons.configuration.Configuration; 7 | import org.apache.commons.configuration.ConfigurationException; 8 | import org.apache.commons.configuration.PropertiesConfiguration; 9 | import org.apache.commons.configuration.reloading.FileChangedReloadingStrategy; 10 | 11 | 12 | public class PropHelper { 13 | 14 | private static Configuration config; 15 | 16 | static { 17 | try { 18 | config = new PropertiesConfiguration("conf/config.properties"); 19 | 20 | // ((AbstractFileConfiguration) config).setReloadingStrategy(new FileChangedReloadingStrategy()); 21 | } catch (ConfigurationException e) { 22 | e.printStackTrace(); 23 | } 24 | } 25 | 26 | public static String getStringValue(String key){ 27 | return config.getString(key); 28 | } 29 | 30 | public static String[] getStringsValue(String key){ 31 | return config.getStringArray(key); 32 | } 33 | 34 | public static int getIntegerValue(String key){ 35 | return config.getInt(key); 36 | } 37 | 38 | 39 | 40 | 41 | public static void main(String[] args) { 42 | 43 | System.out.println(PropHelper.getStringValue("cmd")); 44 | 45 | 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /src/main/java/org/datacheck/SchedJobExec.java: -------------------------------------------------------------------------------- 1 | package org.datacheck; 2 | 3 | import java.util.ArrayList; 4 | import java.util.Date; 5 | import java.util.HashMap; 6 | import java.util.Map; 7 | import java.util.concurrent.ConcurrentHashMap; 8 | 9 | import org.apache.log4j.Logger; 10 | import org.apache.log4j.PropertyConfigurator; 11 | import org.quartz.Job; 12 | import org.quartz.JobDataMap; 13 | import org.quartz.JobExecutionContext; 14 | import org.quartz.JobExecutionException; 15 | 16 | public class SchedJobExec implements Job { 17 | public void execute(JobExecutionContext jobCtx)throws JobExecutionException { 18 | PropertyConfigurator.configure("conf/log4j.properties"); 19 | Logger logger = Logger.getLogger(SchedJobExec.class.getName()); 20 | // System.out.println( " triggered. time is:" + (new Date())); 21 | JobDataMap data = jobCtx.getJobDetail().getJobDataMap(); 22 | String project_en=data.getString("project_en"); 23 | int crontab_id=data.getInt("crontab_id"); 24 | // int max=data.getInt("max"); 25 | logger.info("project exec:"+"project_en:"+project_en+" crontab_id:"+crontab_id); 26 | ArrayList jobqueue =new ArrayList(); 27 | Map stautsmap=new ConcurrentHashMap();//job运行状态 28 | Map project_param=new HashMap(); 29 | Map runningmap =new ConcurrentHashMap(); 30 | Map pre_runningmap =new HashMap(); 31 | //设置了after time的作业,正运行还未到达after time时间的作业map列表 32 | Map runningmap_aftertime =new ConcurrentHashMap(); 33 | DataCheck jobmain=new DataCheck(jobqueue, stautsmap, project_param, 34 | runningmap, pre_runningmap, runningmap_aftertime); 35 | jobmain.setProject_en(project_en); 36 | jobmain.setScheduler_seq(new java.text.SimpleDateFormat("yyyyMMddHHmmss").format(new java.util.Date())); 37 | jobmain.setCrontab_id(crontab_id); 38 | // jobmain.setMax(max); 39 | jobmain.init(); 40 | jobmain.start(); 41 | 42 | 43 | } 44 | 45 | 46 | } 47 | -------------------------------------------------------------------------------- /src/main/java/org/datacheck/Test.java: -------------------------------------------------------------------------------- 1 | package org.datacheck; 2 | 3 | import java.sql.Connection; 4 | import java.util.HashMap; 5 | import org.datacheck.CommonUtil; 6 | 7 | public class Test { 8 | public static void main(String[] args) throws Exception 9 | { 10 | String ip="10.160.133.79"; 11 | int port=5432; 12 | String db="adfenxi"; 13 | String username="adfenxi"; 14 | String passwd="MjFmZGNlYmQ3MTU2ZDFiOQ=="; 15 | //DbCoonect.getConnection(jobinfo.getDbtype(), jobinfo.getIp(), jobinfo.getPort(), jobinfo.getDbname(), jobinfo.getUsername(), new String(CommonUtil.decode(jobinfo.getPasswd().getBytes()))); 16 | Connection con = DbCoonect.getConnection("greenplum", ip, port, db,username, new String(CommonUtil.decode(passwd.getBytes()))); 17 | if (con == null) { 18 | System.out.println("x"); 19 | 20 | } 21 | else 22 | System.out.println("y"); 23 | 24 | } 25 | 26 | } 27 | -------------------------------------------------------------------------------- /target/classes/META-INF/MANIFEST.MF: -------------------------------------------------------------------------------- 1 | Manifest-Version: 1.0 2 | Class-Path: lib/jsoup-1.6.3.jar lib/commons-configuration-1.6.jar lib/ 3 | commons-collections-3.2.1.jar lib/commons-lang-2.4.jar lib/commons-lo 4 | gging-1.1.1.jar lib/commons-digester-1.8.jar lib/commons-beanutils-1. 5 | 7.0.jar lib/commons-beanutils-core-1.8.0.jar lib/slf4j-api-1.6.6.jar 6 | lib/slf4j-log4j12-1.6.6.jar lib/commons-codec-1.3.jar lib/log4j-1.2.1 7 | 5.jar lib/mail-1.4.jar lib/activation-1.1.jar lib/quartz-2.2.1.jar li 8 | b/c3p0-0.9.1.1.jar lib/mysql-connector-java-5.1.33.jar lib/postgresql 9 | -9.4.1212.jre7.jar 10 | Build-Jdk: 1.7.0_80 11 | Built-By: xumingyang 12 | Created-By: Maven Integration for Eclipse 13 | Main-Class: org.jobstream.JobStream 14 | 15 | -------------------------------------------------------------------------------- /target/classes/META-INF/maven/datacheck/datacheck/pom.properties: -------------------------------------------------------------------------------- 1 | #Generated by Maven Integration for Eclipse 2 | #Mon Jun 26 11:26:33 GMT+08:00 2017 3 | version=0.0.1-SNAPSHOT 4 | groupId=datacheck 5 | m2e.projectName=datacheck 6 | m2e.projectLocation=E\:\\\u4E34\u65F6\\workspace\\datacheck 7 | artifactId=datacheck 8 | -------------------------------------------------------------------------------- /target/classes/META-INF/maven/datacheck/datacheck/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 4.0.0 3 | datacheck 4 | datacheck 5 | 0.0.1-SNAPSHOT 6 | datacheck 7 | datacheck 8 | 9 | UTF-8 10 | 11 | 12 | 13 | org.jsoup 14 | jsoup 15 | 1.6.3 16 | 17 | 18 | commons-configuration 19 | commons-configuration 20 | 1.6 21 | 22 | 23 | 24 | org.slf4j 25 | slf4j-api 26 | 1.6.6 27 | 28 | 29 | org.slf4j 30 | slf4j-log4j12 31 | 1.6.6 32 | 33 | 34 | commons-codec 35 | commons-codec 36 | 1.3 37 | 38 | 39 | 40 | log4j 41 | log4j 42 | 1.2.15 43 | 44 | 45 | com.sun.jmx 46 | jmxri 47 | 48 | 49 | com.sun.jdmk 50 | jmxtools 51 | 52 | 53 | javax.jms 54 | jms 55 | 56 | 57 | 58 | 59 | org.quartz-scheduler 60 | quartz 61 | 2.2.1 62 | 63 | 64 | mysql 65 | mysql-connector-java 66 | 5.1.33 67 | 68 | 69 | org.postgresql 70 | postgresql 71 | 9.4.1212.jre7 72 | 73 | 82 | 83 | 84 | 85 | 86 | compile 87 | 88 | 89 | 90 | 91 | 92 | 93 | org.eclipse.m2e 94 | 95 | lifecycle-mapping 96 | 97 | 1.0.0 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | 107 | 108 | 109 | org.apache.maven.plugins 110 | 111 | maven-resources-plugin 112 | 113 | [2.0,) 114 | 115 | 116 | 117 | resources 118 | 119 | testResources 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | 128 | 129 | 130 | 131 | 132 | 133 | 134 | 135 | 136 | 137 | 138 | 139 | 140 | 141 | 142 | 143 | 144 | 145 | 146 | 147 | 148 | 149 | 150 | org.apache.maven.plugins 151 | maven-compiler-plugin 152 | 3.1 153 | 154 | ${jdk.version} 155 | ${jdk.version} 156 | true 157 | 158 | 159 | 160 | 161 | 162 | 163 | org.apache.maven.plugins 164 | maven-resources-plugin 165 | 2.6 166 | 167 | UTF-8 168 | 169 | 170 | 171 | 172 | 173 | org.apache.maven.plugins 174 | maven-install-plugin 175 | 2.4 176 | 177 | 178 | 179 | 180 | org.apache.maven.plugins 181 | maven-clean-plugin 182 | 2.5 183 | 184 | 185 | 186 | 187 | org.apache.maven.plugins 188 | maven-antrun-plugin 189 | 1.7 190 | 191 | 192 | 193 | org.apache.maven.plugins 194 | maven-jar-plugin 195 | 2.4 196 | 197 | 198 | 199 | true 200 | lib/ 201 | org.jobstream.JobStream 202 | 203 | 204 | 205 | 206 | 207 | 208 | 209 | maven-assembly-plugin 210 | 211 | 212 | false 213 | 214 | conf/package.xml 215 | 216 | 217 | 218 | 219 | 220 | make-assembly 221 | package 222 | 223 | single 224 | 225 | 226 | 227 | 228 | 229 | 230 | 231 | 232 | 233 | -------------------------------------------------------------------------------- /target/classes/alert.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | echo "$1" | /usr/local/bin/email -s $2 $3 3 | -------------------------------------------------------------------------------- /target/classes/check.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | basedir=$(cd `dirname $0`; pwd) 3 | cd $basedir 4 | source /home/hdp-ads-audit/.bash_profile 5 | pidcnt=`ps -ef | grep org.datacheck.CheckScheduler | grep -v grep | wc -l` 6 | echo $pidcnt 7 | if [ $pidcnt -eq 0 ] 8 | then 9 | echo "start alert" 10 | curl -d "group_name=360fenxi_jssetup&subject=datacheck-alert&content=datacheck-alert-error-plase-check" http://alarm.mis.corp.qihoo.net:8360/alarm 11 | sleep 10s 12 | sh start.sh 13 | fi 14 | -------------------------------------------------------------------------------- /target/classes/manual_job.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | #project_en=fenxi_main_day|fenxi_main_hour 3 | #指定项目手工跑批 4 | #usage 1: sh manual_job.sh project_en=xx 5 | #指定项目手工跑批,并传参 6 | #usage 1: sh manual_job.sh project_en=xx '${cdate}'=20160804 7 | #usage 1: sh manual_job.sh project_en=xx '${chour}'=2016080400 8 | 9 | basepath=$(cd `dirname $0`; pwd) 10 | cd $basepath/../ 11 | java -cp datacheck-0.0.1-SNAPSHOT.jar org.datacheck.DataCheck $* 12 | -------------------------------------------------------------------------------- /target/classes/org/datacheck/CheckScheduler.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xmingyang/datacheck/a5a8ed6c420490dc180dbc3965ffe925812834eb/target/classes/org/datacheck/CheckScheduler.class -------------------------------------------------------------------------------- /target/classes/org/datacheck/CommonUtil.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xmingyang/datacheck/a5a8ed6c420490dc180dbc3965ffe925812834eb/target/classes/org/datacheck/CommonUtil.class -------------------------------------------------------------------------------- /target/classes/org/datacheck/DataCheck$1.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xmingyang/datacheck/a5a8ed6c420490dc180dbc3965ffe925812834eb/target/classes/org/datacheck/DataCheck$1.class -------------------------------------------------------------------------------- /target/classes/org/datacheck/DataCheck.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xmingyang/datacheck/a5a8ed6c420490dc180dbc3965ffe925812834eb/target/classes/org/datacheck/DataCheck.class -------------------------------------------------------------------------------- /target/classes/org/datacheck/DbCoonect.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xmingyang/datacheck/a5a8ed6c420490dc180dbc3965ffe925812834eb/target/classes/org/datacheck/DbCoonect.class -------------------------------------------------------------------------------- /target/classes/org/datacheck/JobInfo.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xmingyang/datacheck/a5a8ed6c420490dc180dbc3965ffe925812834eb/target/classes/org/datacheck/JobInfo.class -------------------------------------------------------------------------------- /target/classes/org/datacheck/JobRunner.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xmingyang/datacheck/a5a8ed6c420490dc180dbc3965ffe925812834eb/target/classes/org/datacheck/JobRunner.class -------------------------------------------------------------------------------- /target/classes/org/datacheck/PropHelper.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xmingyang/datacheck/a5a8ed6c420490dc180dbc3965ffe925812834eb/target/classes/org/datacheck/PropHelper.class -------------------------------------------------------------------------------- /target/classes/org/datacheck/SchedJobExec.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xmingyang/datacheck/a5a8ed6c420490dc180dbc3965ffe925812834eb/target/classes/org/datacheck/SchedJobExec.class -------------------------------------------------------------------------------- /target/classes/org/datacheck/Test.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xmingyang/datacheck/a5a8ed6c420490dc180dbc3965ffe925812834eb/target/classes/org/datacheck/Test.class -------------------------------------------------------------------------------- /target/classes/start.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | basedir=$(cd `dirname $0`; pwd) 3 | cd $basedir/../ 4 | pid=`ps -ef|grep org.datacheck.CheckScheduler|grep -v grep|grep -v PPID|awk '{ print $2}'` 5 | if [[ $pid -gt 0 ]] 6 | then 7 | echo "CheckScheduler pid" $pid" exist ,please stop it first" 8 | exit 9 | fi 10 | echo "CheckScheduler Starting..." 11 | nohup java -Xms256m -Xmx2048m -cp datacheck-0.0.1-SNAPSHOT.jar org.datacheck.CheckScheduler >/dev/null 2>&1 & 12 | echo "CheckScheduler Started" 13 | -------------------------------------------------------------------------------- /target/classes/stop.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | pid=`ps -ef|grep org.datacheck.CheckScheduler|grep -v grep|grep -v PPID|awk '{ print $2}'` 3 | if [[ $pid -gt 0 ]] 4 | then 5 | echo "CheckScheduler Stopping..." 6 | kill -9 $pid 7 | echo "CheckScheduler Stopped" 8 | else 9 | echo "CheckScheduler Not Exist" 10 | fi 11 | -------------------------------------------------------------------------------- /target/datacheck-0.0.1-SNAPSHOT.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xmingyang/datacheck/a5a8ed6c420490dc180dbc3965ffe925812834eb/target/datacheck-0.0.1-SNAPSHOT.jar -------------------------------------------------------------------------------- /target/datacheck-0.0.1-SNAPSHOT.tar.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xmingyang/datacheck/a5a8ed6c420490dc180dbc3965ffe925812834eb/target/datacheck-0.0.1-SNAPSHOT.tar.gz -------------------------------------------------------------------------------- /target/maven-archiver/pom.properties: -------------------------------------------------------------------------------- 1 | #Generated by Maven 2 | #Thu Mar 16 18:51:43 GMT+08:00 2017 3 | version=0.0.1-SNAPSHOT 4 | groupId=datacheck 5 | artifactId=datacheck 6 | -------------------------------------------------------------------------------- /target/maven-status/maven-compiler-plugin/compile/default-compile/createdFiles.lst: -------------------------------------------------------------------------------- 1 | org\datacheck\Test.class 2 | org\datacheck\JobInfo.class 3 | org\datacheck\CommonUtil.class 4 | org\datacheck\PropHelper.class 5 | org\datacheck\DbCoonect.class 6 | org\datacheck\DataCheck$1.class 7 | org\datacheck\SchedJobExec.class 8 | org\datacheck\DataCheck.class 9 | org\datacheck\JobRunner.class 10 | org\datacheck\CheckScheduler.class 11 | -------------------------------------------------------------------------------- /target/maven-status/maven-compiler-plugin/compile/default-compile/inputFiles.lst: -------------------------------------------------------------------------------- 1 | E:\临时\workspace\datacheck\src\main\java\org\datacheck\CheckScheduler.java 2 | E:\临时\workspace\datacheck\src\main\java\org\datacheck\CommonUtil.java 3 | E:\临时\workspace\datacheck\src\main\java\org\datacheck\Test.java 4 | E:\临时\workspace\datacheck\src\main\java\org\datacheck\JobInfo.java 5 | E:\临时\workspace\datacheck\src\main\java\org\datacheck\JobRunner.java 6 | E:\临时\workspace\datacheck\src\main\java\org\datacheck\DataCheck.java 7 | E:\临时\workspace\datacheck\src\main\java\org\datacheck\SchedJobExec.java 8 | E:\临时\workspace\datacheck\src\main\java\org\datacheck\DbCoonect.java 9 | E:\临时\workspace\datacheck\src\main\java\org\datacheck\PropHelper.java 10 | --------------------------------------------------------------------------------