├── demo ├── sql │ ├── init │ │ ├── 9.revision.sql │ │ ├── 3.index.sql │ │ ├── 4.trigger.sql │ │ ├── 1.table.sql │ │ └── 2.data.sql │ ├── diff │ │ ├── reset.sql │ │ └── alter.sql │ ├── tree │ │ ├── renv.sql │ │ ├── fork.sql │ │ ├── revi.sql │ │ ├── json.sql │ │ ├── stbl.sql │ │ ├── test.sql │ │ └── tree.sql │ └── revi │ │ ├── 2019-01-11.sql │ │ ├── 2018-11-18.sql │ │ ├── 2018-11-20.sql │ │ ├── 2018-11-11.sql │ │ └── 2018-11-17.sql └── chk │ ├── sql │ ├── 01.sql │ └── 03.sql │ ├── txt │ ├── 01.txt │ ├── 06.txt │ ├── 10.txt │ ├── 04.txt │ ├── 02.txt │ ├── 05.txt │ ├── 08.txt │ ├── 07.txt │ └── 09.txt │ └── manual.sh ├── art ├── diff_test.go ├── sync_test.go ├── exec_test.go ├── util_test.go ├── revi_test.go ├── tree_test.go ├── sqls_test.go ├── test_base.go ├── init.go ├── sqlx_test.go ├── show_test.go ├── conn.go ├── util.go ├── ctrl_test.go ├── exec.go ├── other_test.go ├── mysql_test.go ├── sync.go ├── conf.go ├── sqls.go ├── show.go ├── help.go ├── revi.go ├── mysql.go └── diff.go ├── go.mod ├── .gitignore ├── changelog.txt ├── godbart.toml ├── LICENSE └── main.go /demo/sql/init/9.revision.sql: -------------------------------------------------------------------------------- 1 | REPLACE INTO sys_schema_version (version, created) VALUES( 2018112001, NOW()); 2 | -------------------------------------------------------------------------------- /demo/chk/sql/01.sql: -------------------------------------------------------------------------------- 1 | -- STR VAL[\n] SCHEMA_CHECK 2 | SHOW DATABASES LIKE 'godbart_%'; 3 | 4 | -- OUT FOR SCHEMA_CHECK 5 | CHK=SCHEMA_CHECK -------------------------------------------------------------------------------- /art/diff_test.go: -------------------------------------------------------------------------------- 1 | package art 2 | 3 | import "testing" 4 | 5 | func Test_Diff(t *testing.T) { 6 | Diff(dsrc, dstt, map[string]bool{DiffSum: true, DiffTbl: true, DiffTrg: true}, nil) 7 | } 8 | -------------------------------------------------------------------------------- /art/sync_test.go: -------------------------------------------------------------------------------- 1 | package art 2 | 3 | import "testing" 4 | 5 | func Test_Sync(t *testing.T) { 6 | Sync(dsrc, dstt, map[string]bool{SyncTbl: true, SyncTrg: true, SyncRow: true}, nil) 7 | } 8 | -------------------------------------------------------------------------------- /go.mod: -------------------------------------------------------------------------------- 1 | module github.com/trydofor/godbart 2 | 3 | go 1.16 4 | 5 | require ( 6 | github.com/go-sql-driver/mysql v1.6.0 7 | github.com/pelletier/go-toml v1.9.3 8 | github.com/urfave/cli v1.22.5 9 | ) 10 | -------------------------------------------------------------------------------- /art/exec_test.go: -------------------------------------------------------------------------------- 1 | package art 2 | 3 | import "testing" 4 | 5 | func Test_Exec(t *testing.T) { 6 | //MsgLevel = LvlTrace 7 | file := makeFileEntity("../demo/sql/init/1.table.sql", "../demo/sql/init/2.data.sql") 8 | Exec(pref, dsts, file, false) 9 | } 10 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Binaries for programs and plugins 2 | *.exe 3 | *.exe~ 4 | *.dll 5 | *.so 6 | *.dylib 7 | 8 | # Test binary, build with `go test -c` 9 | *.test 10 | 11 | # Output of the go coverage tool, specifically when used with LiteIDE 12 | *.out 13 | 14 | #ide 15 | .idea/ 16 | vender/ 17 | release/ -------------------------------------------------------------------------------- /art/util_test.go: -------------------------------------------------------------------------------- 1 | package art 2 | 3 | import ( 4 | "fmt" 5 | "regexp" 6 | "testing" 7 | ) 8 | 9 | func Test_Reg(t *testing.T) { 10 | 11 | fmt.Printf("%t\n", matchEntire(regexp.MustCompile("tx_parcle"), "tx_parcle_01")) 12 | fmt.Printf("%t\n", matchEntire(regexp.MustCompile("tx_parcle.*"), "tx_parcle_01")) 13 | } 14 | -------------------------------------------------------------------------------- /art/revi_test.go: -------------------------------------------------------------------------------- 1 | package art 2 | 3 | import "testing" 4 | 5 | func Test_Revi(t *testing.T) { 6 | 7 | //file := makeFileEntity("../demo/sql/revi/2018-11-18.sql", "../demo/sql/revi/2018-11-20.sql") 8 | file := makeFileEntity("../demo/sql/revi/2019-01-11.sql") 9 | Revi(pref, dsts, file, "2019030601", mask, "",false) 10 | } 11 | -------------------------------------------------------------------------------- /demo/chk/txt/01.txt: -------------------------------------------------------------------------------- 1 | -- -- SRC ID=2, LINE=2:2 2 | SHOW DATABASES LIKE 'godbart_%'; 3 | 4 | -- -- OUT ID=5, LINE=5:5, FOR 5 | -- CHK='godbart_dev_main'; 6 | 7 | -- -- OUT ID=5, LINE=5:5, FOR 8 | -- CHK='godbart_lcl_main'; 9 | 10 | -- -- OUT ID=5, LINE=5:5, FOR 11 | -- CHK='godbart_prd_2018'; 12 | 13 | -- -- OUT ID=5, LINE=5:5, FOR 14 | -- CHK='godbart_prd_main'; -------------------------------------------------------------------------------- /demo/chk/txt/06.txt: -------------------------------------------------------------------------------- 1 | ==== tree=demo/sql/tree/tree.sql ==== 2 | 3 | id=9 4 | |--id=12 5 | |--id=17 6 | | |--id=20 7 | | |--id=23 8 | | |--id=54 9 | |--id=28 10 | | |--id=31 11 | | |--id=35 12 | | |--id=51 13 | |--id=41 14 | | |--id=44 15 | | |--id=47 16 | |--id=57 17 | |--id=60 18 | |--id=63 19 | ==== debug to see more ==== 20 | -------------------------------------------------------------------------------- /demo/chk/txt/10.txt: -------------------------------------------------------------------------------- 1 | 2 | #DETAIL TABLE=tx_parcel, LEFT(>)=godbart_prd_main, RIGHT(<)=godbart_dev_main 3 | =Col Only Name | No. | Type | Nullable | Default | Comment | Extra 4 | >shelf_time | 15 | datetime | true | | 最新上架时间 | 5 | #DETAIL TABLE=tx_parcel$log, LEFT(>)=godbart_prd_main, RIGHT(<)=godbart_dev_main 6 | =Col Only Name | No. | Type | Nullable | Default | Comment | Extra 7 | >shelf_time | 15 | datetime | true | | 最新上架时间 | 8 | -------------------------------------------------------------------------------- /demo/sql/diff/reset.sql: -------------------------------------------------------------------------------- 1 | DROP DATABASE IF EXISTS `godbart_prd_main`; 2 | CREATE DATABASE `godbart_prd_main` DEFAULT CHARACTER SET utf8mb4; 3 | 4 | DROP DATABASE IF EXISTS `godbart_prd_2018`; 5 | CREATE DATABASE `godbart_prd_2018` DEFAULT CHARACTER SET utf8mb4; 6 | 7 | DROP DATABASE IF EXISTS `godbart_dev_main`; 8 | CREATE DATABASE `godbart_dev_main` DEFAULT CHARACTER SET utf8mb4; 9 | 10 | DROP DATABASE IF EXISTS `godbart_lcl_main`; 11 | CREATE DATABASE `godbart_lcl_main` DEFAULT CHARACTER SET utf8mb4; 12 | 13 | -------------------------------------------------------------------------------- /art/tree_test.go: -------------------------------------------------------------------------------- 1 | package art 2 | 3 | import ( 4 | "testing" 5 | ) 6 | 7 | func Test_Tree(t *testing.T) { 8 | 9 | //MsgLevel = LvlTrace 10 | envs := make(map[string]string) 11 | envs["DATE_FROM"] = "2018-01-01 01:01:01" 12 | envs[EnvRule] = EnvRuleEmpty 13 | BuiltinEnvs(envs) 14 | //file := makeFileEntity("../demo/sql/tree/tree.sql") 15 | //file := makeFileEntity("../demo/sql/tree/renv.sql") 16 | file := makeFileEntity("../demo/sql/tree/stbl.sql") 17 | Tree(pref, envs, dsrc, dstt, file, true) 18 | } 19 | -------------------------------------------------------------------------------- /demo/sql/tree/renv.sql: -------------------------------------------------------------------------------- 1 | -- STR SRC-DB SRC 2 | -- STR OUT-DB OUT 3 | 4 | -- OUT FOR ITSELF 5 | insert into OUT.A select SRC.A where id > 0 6 | 7 | -- ENV SRC-DB 'src_db' 8 | INSERT IGNORE SYS_HOT_SEPARATION SELECT 9 | TABLE_NAME,0,NOW() 10 | FROM 11 | INFORMATION_SCHEMA.TABLES 12 | WHERE 13 | TABLE_SCHEMA = 'src_db'; 14 | 15 | -- ENV OUT-DB 'out_db' 16 | -- OUT FOR ITSELF 17 | INSERT IGNORE SYS_HOT_SEPARATION SELECT 18 | TABLE_NAME,0,NOW() 19 | FROM 20 | INFORMATION_SCHEMA.TABLES 21 | WHERE 22 | TABLE_SCHEMA = 'out_db'; -------------------------------------------------------------------------------- /demo/sql/tree/fork.sql: -------------------------------------------------------------------------------- 1 | -- VAR now 'TIME-NOW' 2 | SELECT NOW() as now; 3 | 4 | -- REF id 'A.id' 5 | SELECT id FROM A; 6 | 7 | -- REF ib 'B.id' 8 | SELECT id FROM B where aid = 'A.id' and upd < 'TIME-NOW' 9 | 10 | -- REF ib 'C.id' 11 | SELECT id FROM C where bid = 'B.id' and upd < 'TIME-NOW' 12 | 13 | -- RUN FOR 'A.id' 调整分叉 14 | SELECT id FROM D where bid = 'B.id' and aid = 'A.id' 15 | 16 | 17 | -- REF eib 'E.id' 18 | -- REF fib 'F.id' 联合多个REF,避免多分叉 19 | SELECT E.id as eid, F.id as fid FROM E,F limit 3 20 | 21 | SELECT id FROM G where eid = 'E.id' and fid='F.id' 22 | -------------------------------------------------------------------------------- /demo/sql/tree/revi.sql: -------------------------------------------------------------------------------- 1 | -- VAR VER v2019010302 2 | SELECT MAX(version) as VER FROM sys_schema_version WHERE version = 2019010302; 3 | -- RUN NOT v2019010302 4 | -- STR tbl `tx_parcel_#` 为分表更新 5 | SELECT tbl FROM ( 6 | SELECT 'tx_parcel_0' AS tbl UNION ALL 7 | SELECT 'tx_parcel_1' UNION ALL 8 | SELECT 'tx_parcel_2' UNION ALL 9 | SELECT 'tx_parcel_3') TMP; 10 | 11 | -- RUN NOT v2019010302 12 | ALTER TABLE `tx_parcel_#` ADD CONSTRAINT uk_track_num UNIQUE (is_deleted, track_num); 13 | -- RUN NOT v2019010302 14 | REPLACE INTO sys_schema_version (version, created) VALUES(2019010302, NOW()); -------------------------------------------------------------------------------- /art/sqls_test.go: -------------------------------------------------------------------------------- 1 | package art 2 | 3 | import ( 4 | "io/ioutil" 5 | "testing" 6 | ) 7 | 8 | func Test_ParseSql(t *testing.T) { 9 | 10 | //file := "../demo/sql/tree/test.sql" 11 | //file := "../demo/sql/tree/tree.sql" 12 | file := "../demo/sql/init/2.data.sql" 13 | 14 | bytes, err := ioutil.ReadFile(file) 15 | panicIfErr(err) 16 | 17 | sqls := ParseSqls(pref, &FileEntity{file, string(bytes)}) 18 | 19 | OutTrace("segs------") 20 | for _, x := range sqls { 21 | OutTrace("%#v", x) 22 | } 23 | } 24 | 25 | func Test_DepairQuote(t *testing.T) { 26 | 27 | q2 := "`'12345'`" 28 | OutTrace("%s", q2) 29 | 30 | cnt := countQuotePair(q2) 31 | OutTrace("%d", cnt) 32 | } 33 | -------------------------------------------------------------------------------- /demo/sql/init/3.index.sql: -------------------------------------------------------------------------------- 1 | ALTER TABLE `tx_parcel` 2 | ADD INDEX `ix_user_id` (`user_id` ASC), 3 | ADD INDEX `ix_recver_id` (`recver_id`), 4 | ADD UNIQUE `uq_trknum` (`track_num` ASC); 5 | 6 | ALTER TABLE `tx_track` 7 | ADD INDEX `ix_user_id` (`user_id` ASC), 8 | ADD INDEX `ix_parcel_id` (`parcel_id` ASC), 9 | ADD UNIQUE `uq_trknum` (`track_num` ASC); 10 | 11 | ALTER TABLE `tx_parcel_event` 12 | ADD INDEX `ix_user_id` (`user_id` ASC), 13 | ADD INDEX `ix_parcel_id` (`parcel_id` ASC); 14 | 15 | ALTER TABLE `tx_receiver` 16 | ADD INDEX `ix_user_id` (`user_id` ASC), 17 | ADD INDEX `ix_name` (`name` ASC), 18 | ADD INDEX `ix_addr` (`province`,`city`,`district`,`address1` ASC), 19 | ADD INDEX `ix_hash` (`hash` ASC), 20 | ADD FULLTEXT `ft_phone` (`phone` ASC); -------------------------------------------------------------------------------- /demo/sql/tree/json.sql: -------------------------------------------------------------------------------- 1 | -- ENV DATE_FROM '2018-11-23 12:34:56' 2 | 3 | -- 生成 TSV 4 | -- STR COL[\t] $TSV_HEAD 5 | -- STR VAL[\t] $TSV_DATA 6 | 7 | SELECT * FROM tx_parcel WHERE create_time > '2018-11-23 12:34:56' LIMIT 2; 8 | 9 | -- OUT ONE $TSV_HEAD 10 | $TSV_HEAD ; 11 | -- OUT FOR $TSV_HEAD 12 | $TSV_DATA ; 13 | 14 | -- 生成CSV 15 | -- STR "COL[]" $CSV_HEAD 16 | -- STR "VAL[]" $CSV_DATA 17 | SELECT * FROM tx_parcel WHERE create_time > '2018-11-23 12:34:56' LIMIT 3; 18 | 19 | -- OUT ONE $CSV_HEAD 20 | $CSV_HEAD ; 21 | -- OUT FOR $CSV_DATA 22 | $CSV_DATA ; 23 | 24 | 25 | -- 生成JSON,此时有 脱引号,模式的组合 26 | -- STR `"COL[,\n]" = VAL[]` `$JSON_FIELD` 27 | 28 | -- REF ID 990003 29 | SELECT * FROM tx_parcel WHERE create_time > '2018-11-23 12:34:56' LIMIT 4; 30 | 31 | -- OUT FOR 990003 32 | { 33 | $JSON_FIELD 34 | } ; 35 | -------------------------------------------------------------------------------- /demo/sql/revi/2019-01-11.sql: -------------------------------------------------------------------------------- 1 | -- ------------------------------------------- 2 | SELECT max(version) FROM sys_schema_version; 3 | 4 | -- TBL tx_parcel(\$log)? `tx_parcel#` 5 | ALTER TABLE `tx_parcel#` DROP COLUMN `shelf_time`; 6 | 7 | REPLACE INTO sys_schema_version (version, created) VALUES( 2019011101, NOW()); 8 | 9 | -- ------------------------------------------- 10 | SELECT max(version) FROM sys_schema_version; 11 | 12 | update sys_schema_version set created = now(); 13 | 14 | REPLACE INTO sys_schema_version (version, created) VALUES( 2019030601, NOW()); 15 | 16 | -- ------------------------------------------- 17 | SELECT max(version) FROM sys_schema_version; 18 | 19 | update sys_schema_version set created = now(); 20 | 21 | REPLACE INTO sys_schema_version (version, created) VALUES( 2019030701, NOW()); 22 | 23 | -------------------------------------------------------------------------------- /art/test_base.go: -------------------------------------------------------------------------------- 1 | package art 2 | 3 | import ( 4 | "io/ioutil" 5 | ) 6 | 7 | var ( 8 | pref = &Preference{"mysql", ";", "DELIMITER", "--", []string{"/*", "*/"}, "2006-01-02 15:04:05.000", 59062, 10, 2} 9 | dsrc = &DataSource{"prd_main", "trydofor:moilioncircle@tcp(127.0.0.1:3306)/godbart_prd_main"} 10 | ddst = &DataSource{"prd_2018", "trydofor:moilioncircle@tcp(127.0.0.1:3306)/godbart_prd_2018"} 11 | dsts = []*DataSource{dsrc, ddst} 12 | dstt = []*DataSource{ddst} 13 | mask = "[0-9]{10,}" 14 | ) 15 | 16 | func makeFileEntity(file ...string) []FileEntity { 17 | rst := make([]FileEntity, len(file)) 18 | for i, f := range file { 19 | data, err := ioutil.ReadFile(f) 20 | panicIfErr(err) 21 | rst[i] = FileEntity{f, string(data)} 22 | } 23 | return rst 24 | } 25 | 26 | func panicIfErr(err error) { 27 | if err != nil { 28 | panic(err) 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /art/init.go: -------------------------------------------------------------------------------- 1 | package art 2 | 3 | const ( 4 | LvlDebug = 300 5 | LvlTrace = 200 6 | LvlError = 100 7 | 8 | EnvSrcDb = "SRC-DB" 9 | EnvOutDb = "OUT-DB" 10 | EnvUser = "USER" 11 | EnvHost = "HOST" 12 | EnvDate = "DATE" 13 | EnvRule = "ENV-CHECK-RULE" 14 | EnvRuleEmpty = "EMPTY" 15 | 16 | SqlNull = "NULL" 17 | SqlTrue = "TRUE" 18 | SqlFalse = "FALSE" 19 | 20 | DiffSum = "sum" // 分别对比`-s`和多个`-d` 间的表名差异 21 | DiffTrg = "trg" // 比较 trigger 22 | DiffTbl = "tbl" // 比较 column, index 23 | 24 | Joiner = "\n" 25 | 26 | SyncTbl = "tbl" // 同步表和索引 27 | SyncTrg = "trg" // 同步trigger 28 | SyncRow = "row" // 同步数据 29 | ) 30 | 31 | var ( 32 | MsgLevel = LvlDebug 33 | DiffType = map[string]bool{DiffSum: true, DiffTrg: true, DiffTbl: true} 34 | SyncType = map[string]bool{SyncTbl: true, SyncTrg: true, SyncRow: true} 35 | EmptyArr = make([]interface{}, 0) 36 | CtrlRoom = &Room{} 37 | ) 38 | -------------------------------------------------------------------------------- /demo/sql/init/4.trigger.sql: -------------------------------------------------------------------------------- 1 | 2 | DROP TABLE IF EXISTS `tx_parcel$log`; 3 | CREATE TABLE `tx_parcel$log` AS SELECT * FROM `tx_parcel` WHERE 1=0; 4 | ALTER TABLE `tx_parcel$log` ADD COLUMN `_id` INT(11) NOT NULL AUTO_INCREMENT, ADD PRIMARY KEY (`_id`); 5 | ALTER TABLE `tx_parcel$log` ADD COLUMN `_du` INT(11) NULL ; 6 | ALTER TABLE `tx_parcel$log` ADD COLUMN `_dt` DATETIME NULL ; 7 | 8 | DROP TRIGGER IF EXISTS `tx_parcel$log$bu`; 9 | DELIMITER $$ 10 | CREATE TRIGGER `tx_parcel$log$bu` BEFORE UPDATE ON `tx_parcel` 11 | FOR EACH ROW BEGIN 12 | insert into `tx_parcel$log` select *, null, 1, now() from `tx_parcel` where id= OLD.id; 13 | END $$ 14 | DELIMITER ; 15 | 16 | DROP TRIGGER IF EXISTS `tx_parcel$log$bd`; 17 | DELIMITER $$ 18 | CREATE TRIGGER `tx_parcel$log$bd` BEFORE DELETE ON `tx_parcel` 19 | FOR EACH ROW BEGIN 20 | insert into `tx_parcel$log` select *, null, 2, now() from `tx_parcel` where id= OLD.id; 21 | END $$ 22 | DELIMITER ; -------------------------------------------------------------------------------- /demo/sql/revi/2018-11-18.sql: -------------------------------------------------------------------------------- 1 | -- ------------------------------------------- 2 | SELECT max(version) FROM sys_schema_version; 3 | 4 | ALTER TABLE `tx_parcel` 5 | ADD INDEX `ix_user_id` (`user_id` ASC), 6 | ADD INDEX `ix_recver_id` (`recver_id`), 7 | ADD UNIQUE `uq_trknum` (`track_num` ASC); 8 | 9 | ALTER TABLE `tx_track` 10 | ADD INDEX `ix_user_id` (`user_id` ASC), 11 | ADD INDEX `ix_parcel_id` (`parcel_id` ASC), 12 | ADD UNIQUE `uq_trknum` (`track_num` ASC); 13 | 14 | ALTER TABLE `tx_parcel_event` 15 | ADD INDEX `ix_user_id` (`user_id` ASC), 16 | ADD INDEX `ix_parcel_id` (`parcel_id` ASC); 17 | 18 | ALTER TABLE `tx_receiver` 19 | ADD INDEX `ix_user_id` (`user_id` ASC), 20 | ADD INDEX `ix_name` (`name` ASC), 21 | ADD INDEX `ix_addr` (`province`,`city`,`district`,`address1` ASC), 22 | ADD INDEX `ix_hash` (`hash` ASC), 23 | ADD FULLTEXT `ft_phone` (`phone` ASC); 24 | 25 | REPLACE INTO sys_schema_version (version, created) VALUES( 2018111801, NOW()); 26 | -------------------------------------------------------------------------------- /demo/chk/sql/03.sql: -------------------------------------------------------------------------------- 1 | -- STR COL[\n]=VAL[] sys_hot_separation.VAL 2 | SELECT table_name, checked_id FROM sys_hot_separation; 3 | 4 | -- OUT FOR sys_hot_separation.VAL 5 | sys_hot_separation.VAL 6 | 7 | -- STR COL[\n]=VAL[] sys_schema_version.VAL 8 | SELECT version FROM sys_schema_version; 9 | 10 | -- OUT FOR sys_schema_version.VAL 11 | sys_schema_version.VAL 12 | 13 | -- STR COL[\n]=VAL[] tx_parcel.VAL 14 | SELECT * FROM tx_parcel; 15 | 16 | -- OUT FOR tx_parcel.VAL 17 | tx_parcel.VAL 18 | 19 | -- STR COL[\n]=VAL[] tx_parcel_event.VAL 20 | SELECT * FROM tx_parcel_event; 21 | 22 | -- OUT FOR tx_parcel_event.VAL 23 | tx_parcel_event.VAL 24 | 25 | -- STR COL[\n]=VAL[] tx_parcel$log.VAL 26 | SELECT * FROM tx_parcel$log; 27 | 28 | -- OUT FOR tx_parcel$log.VAL 29 | tx_parcel$log.VAL 30 | 31 | -- STR COL[\n]=VAL[] tx_receiver.VAL 32 | SELECT * FROM tx_receiver; 33 | 34 | -- OUT FOR tx_receiver.VAL 35 | tx_receiver.VAL 36 | 37 | -- STR COL[\n]=VAL[] tx_track.VAL 38 | SELECT * FROM tx_track; 39 | 40 | -- OUT FOR tx_track.VAL 41 | tx_track.VAL -------------------------------------------------------------------------------- /art/sqlx_test.go: -------------------------------------------------------------------------------- 1 | package art 2 | 3 | import ( 4 | "io/ioutil" 5 | "testing" 6 | ) 7 | 8 | 9 | 10 | func Test_ParseSqlx(t *testing.T) { 11 | 12 | //MsgLevel=LvlTrace 13 | //file := "../demo/sql/tree/test.sql" 14 | //file := "../demo/sql/tree/tree.sql" 15 | file := "../demo/sql/tree/stbl.sql" 16 | //file := "../demo/sql/init/1.table.sql" 17 | bytes, err := ioutil.ReadFile(file) 18 | panicIfErr(err) 19 | 20 | sqls := ParseSqls(pref, &FileEntity{file, string(bytes)}) 21 | 22 | envs := make(map[string]string) 23 | envs["DATE_FROM"] = "2018-11-30 10:31:20" 24 | envs["带空格的 时间"] = "2018-11-30 10:31:20" 25 | BuiltinEnvs(envs) 26 | 27 | sqlx, err := ParseSqlx(sqls, envs) 28 | panicIfErr(err) 29 | 30 | OutTrace("==== envx ====") 31 | for k, v := range sqlx.Envs { 32 | OutTrace("%s=%s", k, v) 33 | } 34 | 35 | OutTrace("==== exes ====") 36 | for _, x := range sqlx.Exes { 37 | OutTrace("%v", x) 38 | } 39 | 40 | OutTrace("==== summary ====") 41 | for _, x := range sqlx.Exes { 42 | OutTrace("%v", x.Tree()) 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /demo/sql/tree/stbl.sql: -------------------------------------------------------------------------------- 1 | -- 先创建01-10,共10个表 2 | -- SEQ tx_parcel_%02d[1,10] tx_parcel_##create 3 | CREATE TABLE IF NOT EXISTS `tx_parcel_##create` like `tx_parcel`; 4 | -- RUN FOR tx_parcel_##create 5 | INSERT IGNORE `tx_parcel_##create` SELECT * FROM `tx_parcel` limit 1; 6 | -- OUT FOR tx_parcel_##create 7 | CREATE TABLE IF NOT EXISTS `tx_parcel_##create` like `tx_parcel`; 8 | 9 | 10 | -- TBL tx_parcel_\d+ tx_parcel_##select 11 | -- REF id 'tx_parcel.id' #提取 id,作为'tx_parcel.id'节点 12 | -- STR VAL[] 'tx_parcel.VALS' 13 | SELECT * FROM `tx_parcel_##select` limit 1; 14 | 15 | -- OUT FOR 'tx_parcel.VALS' 16 | REPLACE INTO `tx_parcel_##select` VALUES ('tx_parcel.VALS'); 17 | 18 | -- RUN FOR 'tx_parcel.id' # 需要使用 RUN FOR,否则会按顺序立即执行。 19 | DELETE FROM `tx_parcel_##select` where id = 'tx_parcel.id'; 20 | 21 | -- TBL .*\$log any$log 22 | DELETE FROM `any$log` where create_time < now() - interval 1 year ; 23 | 24 | -- TBL tx_parcel_\d+ parcel_split 25 | DROP TABLE IF EXISTS `parcel_split`; 26 | -- OUT FOR parcel_split 27 | DROP TABLE IF EXISTS `parcel_split`; 28 | -------------------------------------------------------------------------------- /changelog.txt: -------------------------------------------------------------------------------- 1 | v0.9.9 2 | fix 分析revi的bug 3 | pro 在命令行增加help和example 4 | mod go升级到1.16,依赖包更新,macOs编译 5 | 6 | v0.9.8 7 | new 增加`show`命令,提供ddl功能 8 | new 在配置中增加sql模板 9 | mod 把`ddl`功能,从`diff`挪到`show` 10 | mod 把 `-t`参数,为逗号分割的多值 11 | pro `revi`不执行版本号查询sql 12 | pro manual.sh脚本增加日志检查 13 | pro 为COL模板增加缩进 14 | fix Conn接口增加TableNotFound方法,处理tree中不存在的表 15 | fix `revi`不识别format后换行的版本号脚本 16 | 17 | v0.9.7 18 | 增加`SEQ|TBL`指令,以支持分表的操作。 19 | 增加sqlx解析的后置检查。 20 | `revi`和`exec`支持`SEQ和TBL` 21 | `tree` 全面支持所有指令 22 | `diff`先排序,再差分,有序输出。 23 | 增加测试手册脚本,可做回归测试。 24 | 25 | v0.9.6 26 | 增加`VAR`指令,相当于`REF`,但不`挂树`。 27 | 调整`RUN|OUT`执行权重,只按行号排序。 28 | 调整`FOR|ONE|END`,引用的占位为NULL时,不执行。 29 | `tree`增加统计信息输出。 30 | 控制端口,增加stat命令。 31 | 调整日志级别和退出 32 | 修复控制端口缺陷和bug 33 | `sync`增加row选项,同步小表的数据。 34 | 增加`diff`的`col`用来比较除trigger外 35 | 36 | v0.9.5 37 | 因kill有点吓人,参数`-k`变为`-t` 38 | 精简输出,增加输出级别 `-l` 39 | `ENV`中可执行SQL初始变量 40 | `ENV`中增加内置变量 SRC-DB和OUT-DB 41 | `RUN|OUT` 增加内置`ITSELF`表示独立执行。 42 | 少量bug修复,代码调整 43 | 修改README 44 | 45 | v0.9.4 46 | #3 从A复制schema到B 47 | 48 | v0.9.3 49 | #1 通过控制端口(高位TCP)检查单例执行。 50 | #2 对Tree支持控制端口执行简单的控制命令。 51 | -------------------------------------------------------------------------------- /demo/sql/revi/2018-11-20.sql: -------------------------------------------------------------------------------- 1 | -- ------------------------------------------- 2 | SELECT max(version) FROM sys_schema_version; 3 | 4 | DROP TABLE IF EXISTS `tx_parcel$log`; 5 | CREATE TABLE `tx_parcel$log` AS SELECT * FROM `tx_parcel` WHERE 1=0; 6 | ALTER TABLE `tx_parcel$log` ADD COLUMN `_id` INT(11) NOT NULL AUTO_INCREMENT, ADD PRIMARY KEY (`_id`); 7 | ALTER TABLE `tx_parcel$log` ADD COLUMN `_du` INT(11) NULL ; 8 | ALTER TABLE `tx_parcel$log` ADD COLUMN `_dt` DATETIME NULL ; 9 | 10 | DROP TRIGGER IF EXISTS `tx_parcel$log$bu`; 11 | DELIMITER $$ 12 | CREATE TRIGGER `tx_parcel$log$bu` BEFORE UPDATE ON `tx_parcel` 13 | FOR EACH ROW BEGIN 14 | insert into `tx_parcel$log` select *, null, 1, now() from `tx_parcel` where id= OLD.id; 15 | END $$ 16 | DELIMITER ; 17 | 18 | DROP TRIGGER IF EXISTS `tx_parcel$log$bd`; 19 | DELIMITER $$ 20 | CREATE TRIGGER `tx_parcel$log$bd` BEFORE DELETE ON `tx_parcel` 21 | FOR EACH ROW BEGIN 22 | insert into `tx_parcel$log` select *, null, 2, now() from `tx_parcel` where id= OLD.id; 23 | END $$ 24 | DELIMITER ; 25 | 26 | REPLACE INTO sys_schema_version (version, created) VALUES( 2018112001, NOW()); 27 | -------------------------------------------------------------------------------- /demo/sql/tree/test.sql: -------------------------------------------------------------------------------- 1 | -- ENV "带空格的 时间" '2018-00-00 00:00:00' 2 | -- ENV DATE_FROM '2018-11-23 12:34:56' 3 | -- ENV ``SELECT YEAR(NOW())`` '2018-00-01 00:00:00' 4 | 5 | -- STR USER built_env_user # 直接定义 6 | -- STR HOST built_env_host # 直接定义 7 | -- STR DATE built_env_date # 直接定义 8 | 9 | -- STR '2018-00-01 00:00:00' $y4_table #重新定义,以使SQL语法正确。非加引号规则 10 | DROP TABLE IF EXISTS `tx_parcel_$y4_table`; 11 | 12 | CREATE TABLE `tx_parcel_$y4_table` LIKE tx_parcel; 13 | -- 替换后 14 | -- CREATE TABLE tx_parcel_2018 LIKE tx_parcel; 15 | 16 | -- STR VAL[1] 990001 #直接定义。 17 | -- STR "`COL[]` = VAL[]" "logno = -99009" #直接定义,脱壳,加引号,模式展开。 18 | -- REF VAL[,\t] '多值占位值' 19 | -- STR `COL[]` $COLX 20 | SELECT * FROM tx_parcel WHERE create_time > '2018-11-23 12:34:56' LIMIT 2; 21 | 22 | -- OUT FOR 990001 23 | REPLACE INTO tx_parcel ($COLX) VALUES ('多值占位值'); 24 | 25 | -- 替换后 26 | -- REPLACE INTO tx_parcel (`id`) VALUES ('多值占位值'); 27 | UPDATE tx_parcel SET logno = -99009 WHERE id = 990001; 28 | -- 替换后 29 | -- UPDATE tx_parcel SET `id` = VAL[1] ,`create_time` = VAL[2] /*循环加下去,逗号分割*/ WHERE id=990001; 30 | 31 | -- RUN END 990001 # 在src上执行 32 | -- OUT END 990001 # 也在 dst上执行 33 | INSERT IGNORE INTO sys_hot_separation VALUES ('tx_parcel', 990001, NOW()); 34 | 35 | 36 | -- REF max_id 'tx_item_no.max_id' 37 | select null as max_id; 38 | 39 | -- RUN FOR 'tx_item_no.max_id' 40 | replace into sys_hot_separation values ('tx_item_no', 'tx_item_no.max_id', now()); -------------------------------------------------------------------------------- /art/show_test.go: -------------------------------------------------------------------------------- 1 | package art 2 | 3 | import ( 4 | "regexp" 5 | "testing" 6 | ) 7 | 8 | func Test_Show(t *testing.T) { 9 | ktpl := []string{ 10 | `tbl`, ` 11 | DROP TABLE IF EXISTS ${TABLE_NAME}; 12 | ${TABLE_DDL}; 13 | `, 14 | 15 | `trg`, ` 16 | DROP TRIGGER IF EXISTS ${TRIGGER_NAME}; 17 | DELIMITER $$ 18 | ${TRIGGER_DDL} $$ 19 | DELIMITER ; 20 | `, 21 | 22 | `log`, ` 23 | DROP TABLE IF EXISTS ${TABLE_NAME}$log ; 24 | -- CREATE TABLE ${TABLE_NAME}$log AS SELECT * FROM ${TABLE_NAME} WHERE 1=0; 25 | 26 | CREATE TABLE ${TABLE_NAME}$log ( 27 | ${COLUMNS_FULL}, 28 | _id int(11) NOT NULL AUTO_INCREMENT, 29 | _du int(11) DEFAULT NULL, 30 | _dt datetime DEFAULT NULL, 31 | PRIMARY KEY (_id) 32 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; 33 | 34 | DROP TRIGGER IF EXISTS ${TABLE_NAME}$log$bu; 35 | DELIMITER $$ 36 | CREATE TRIGGER ${TABLE_NAME}$log$bu BEFORE UPDATE ON ${TABLE_NAME} 37 | FOR EACH ROW BEGIN 38 | insert into ${TABLE_NAME}$log select *, null, 1, now() from ${TABLE_NAME} 39 | where id= OLD.id ; 40 | END $$ 41 | DELIMITER ; 42 | 43 | DROP TRIGGER IF EXISTS ${TABLE_NAME}$log$bd; 44 | DELIMITER $$ 45 | CREATE TRIGGER ${TABLE_NAME}$log$bd BEFORE DELETE ON ${TABLE_NAME} 46 | FOR EACH ROW BEGIN 47 | insert into ${TABLE_NAME}$log select *, null, 2, now() from ${TABLE_NAME} 48 | where id= OLD.id ; 49 | END $$ 50 | DELIMITER ;`, 51 | } 52 | rgx := []*regexp.Regexp{regexp.MustCompile("tx_parcel")} 53 | Show(dsrc, ktpl, rgx) 54 | } 55 | -------------------------------------------------------------------------------- /art/conn.go: -------------------------------------------------------------------------------- 1 | package art 2 | 3 | import "database/sql" 4 | 5 | type Trg struct { 6 | Name string 7 | Timing string 8 | Event string 9 | Statement string 10 | } 11 | 12 | type Idx struct { 13 | Name string 14 | Uniq bool 15 | Cols string 16 | Type string 17 | } 18 | 19 | type Col struct { 20 | Name string 21 | Seq int 22 | Deft sql.NullString 23 | Null bool 24 | Type string 25 | Key string 26 | Cmnt string 27 | Extr string 28 | } 29 | 30 | type Conn interface { 31 | // Open 打开链接 32 | Open(p *Preference, d *DataSource) (err error) 33 | // DbConn 获得链接 34 | DbConn() (db *sql.DB) 35 | // DbName 数据库名 36 | DbName() string 37 | 38 | // Exec 执行脚本 39 | Exec(qr string, args ...interface{}) (cnt int64, err error) 40 | // Query 执行查询 41 | Query(fn func(*sql.Rows) error, qr string, args ...interface{}) (err error) 42 | 43 | // Tables 获得所有表名 44 | Tables() (tbls []string, err error) 45 | // Columns 获得表的所有字段 46 | Columns(table string) (cls map[string]Col, err error) 47 | // Indexes 获得表的所有索引 48 | Indexes(table string) (ixs map[string]Idx, err error) 49 | // Triggers 获得表的所有触发器 50 | Triggers(table string) (tgs map[string]Trg, err error) 51 | 52 | // DdlTable 生产建表SQL(含索引),格式化的 53 | DdlTable(table string) (ddl string, err error) 54 | // DdlTrigger 生产建触发器SQL,格式化的 55 | DdlTrigger(trigger string) (ddl string, err error) 56 | 57 | // Literal 转成SQL字面量,set x=val的 val部分字面量,是否需要引号扩上 58 | // databaseTypeName sql.ColumnType.DatabaseTypeName 59 | Literal(val interface{}, databaseTypeName string) (string, bool) 60 | // Nothing 数值<=0|布尔false|NULL|字符串""|其他字面量为"" 61 | Nothing(val interface{}) bool 62 | // Quotesc 转义的字符串 63 | Quotesc(str, qto string) string 64 | 65 | TableNotFound(err error) bool 66 | } 67 | -------------------------------------------------------------------------------- /demo/sql/diff/alter.sql: -------------------------------------------------------------------------------- 1 | DROP TABLE IF EXISTS `tx_parcel`; 2 | CREATE TABLE `tx_parcel` ( 3 | `id` bigint(20) NOT NULL COMMENT 'ID', 4 | `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', 5 | `modify_time` datetime DEFAULT NULL ON UPDATE CURRENT_TIMESTAMP COMMENT '更新时间', 6 | `is_deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '逻辑删除:true=1,false=0', 7 | `logno` bigint(19) DEFAULT NULL COMMENT '日志编号', 8 | `user_id` bigint(20) DEFAULT NULL COMMENT '用户ID', 9 | `sender_id` bigint(20) DEFAULT NULL COMMENT '发件人ID', 10 | `warehouse` int(11) DEFAULT NULL COMMENT '数据所在仓库 WareHouse', 11 | `recver_id` bigint(20) DEFAULT NULL COMMENT '收件人ID', 12 | `track_num` varchar(50) NOT NULL COMMENT '运单号', 13 | `weight_pkg` decimal(10,2) DEFAULT NULL COMMENT '商家包裹总重量(a+b+m)', 14 | `weight_dim` decimal(10,2) DEFAULT NULL COMMENT '商家体积重', 15 | `store_time` datetime DEFAULT NULL COMMENT '最新入库时间', 16 | `shelf_time` datetime DEFAULT NULL COMMENT '最新上架时间', 17 | `leave_time` datetime DEFAULT NULL COMMENT '最新出库时间', 18 | `track_time` datetime DEFAULT NULL COMMENT '首个国内物流时间', 19 | PRIMARY KEY (`id`) 20 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 ROW_FORMAT=COMPACT COMMENT='包裹'; 21 | 22 | ALTER TABLE `tx_parcel` 23 | ADD INDEX `ix_user_id` (`sender_id` ASC), 24 | ADD INDEX `ix_sender_id` (`sender_id` ASC), 25 | ADD INDEX `ix_recver_idx` (`recver_id`), 26 | ADD UNIQUE `uq_trknum` (`track_num` ASC); 27 | 28 | DROP TRIGGER IF EXISTS `tx_parcel$log$bu`; 29 | DELIMITER $$ 30 | CREATE TRIGGER `tx_parcel$log$bu` BEFORE UPDATE ON `tx_parcel` 31 | FOR EACH ROW BEGIN 32 | insert into `tx_parcel$log` select *, null, 3, now() from `tx_parcel` where id= OLD.id; 33 | END $$ 34 | DELIMITER ; 35 | 36 | DROP TRIGGER IF EXISTS `tx_parcel$log$bd`; 37 | DROP TABLE IF EXISTS `tx_track`; 38 | -------------------------------------------------------------------------------- /art/util.go: -------------------------------------------------------------------------------- 1 | package art 2 | 3 | import ( 4 | "regexp" 5 | "strings" 6 | "time" 7 | ) 8 | 9 | var blankRegexp = regexp.MustCompile("[ \t]+") 10 | 11 | func squashBlank(str string) string { 12 | return blankRegexp.ReplaceAllString(str, " ") 13 | } 14 | 15 | var trimxRegexp = regexp.MustCompile("[ \t]*[\r\n]+[ \t]*") 16 | 17 | func squashTrimx(str string) string { 18 | return trimxRegexp.ReplaceAllString(str, "\n") 19 | } 20 | 21 | var crlfReg = regexp.MustCompile("[ \t]*(\r\n|\r|\n)[ \t]*") // 换行分割并去掉左右空白 22 | func splitLinex(str string) []string { 23 | return crlfReg.Split(str, -1) 24 | } 25 | 26 | func countQuotePair(str string) (cnt int) { 27 | l := len(str) 28 | if l < 2 { 29 | return 0 30 | } 31 | 32 | cnt = 0 33 | for { 34 | i := len(str) - 1 35 | c, e := str[0], str[i] 36 | if c == e && (c == '"' || c == '\'' || c == '`') { 37 | cnt++ 38 | str = str[1:i] 39 | } else { 40 | break 41 | } 42 | } 43 | return 44 | } 45 | 46 | func matchEntire(reg *regexp.Regexp, str string) bool { 47 | ps := reg.FindStringIndex(str) 48 | if len(ps) != 2 { 49 | return false 50 | } 51 | return ps[0] == 0 && ps[1] == len(str) 52 | } 53 | 54 | func fmtTime(t time.Time, f string) string { 55 | if len(f) == 0 { 56 | return t.Format("2006-01-02 15:04:05.000") 57 | } else { 58 | return t.Format(f) 59 | } 60 | } 61 | 62 | func signifySql(str ... string) string { 63 | var sb strings.Builder 64 | // 只保留字母,数字,符号,除引号 65 | for _, s := range str { 66 | for _, c := range s { 67 | if c >= 'A' && c <= 'Z' { 68 | sb.WriteRune(c + 32) // a-A 69 | continue 70 | } 71 | if c == '\'' || c == '"' || c == '`' { 72 | continue // skip 73 | } 74 | if c >= '!' && c <= '~' { 75 | sb.WriteRune(c) 76 | } 77 | } 78 | } 79 | return sb.String() 80 | } 81 | 82 | func isCommaWhite(c rune) bool { 83 | return c == ',' || c == ' ' || c == '\t' || c == '\r' || c == '\n' 84 | } 85 | -------------------------------------------------------------------------------- /art/ctrl_test.go: -------------------------------------------------------------------------------- 1 | // +build manual 2 | 3 | package art 4 | 5 | import ( 6 | "io/ioutil" 7 | "strings" 8 | "sync" 9 | "testing" 10 | "time" 11 | ) 12 | 13 | func Test_MakePass(t *testing.T) { 14 | OutTrace(makePass()) 15 | } 16 | 17 | func Test_Ctrl_Sync(t *testing.T) { 18 | CtrlRoom.Open(59062, CtrlRoomTree, nil) 19 | } 20 | 21 | func testJob(h, v int, s string) { 22 | idt := strings.Repeat("| ", v) 23 | OutTrace("%s<==%d, lvl=%d, at=%s", idt, h, v, s) 24 | CtrlRoom.dealJobx(nil, h) 25 | } 26 | 27 | func mockExe(exe *Exe, lvl int) { 28 | 29 | head := exe.Seg.Head 30 | jobx := true 31 | defer func() { 32 | if jobx { 33 | testJob(head, lvl, "deref") 34 | } 35 | }() 36 | 37 | time.Sleep(time.Second * 3) 38 | idt := strings.Repeat("| ", lvl) 39 | if len(exe.Sons) > 0 { 40 | for i := 0; i < 2; i++ { 41 | jobx = true 42 | OutTrace("%sid=%d, lvl=%d, select=%d", idt, head, lvl, i+1) 43 | for _, v := range exe.Sons { 44 | mockExe(v, lvl+1) 45 | } 46 | jobx = false 47 | testJob(head, lvl, "for") 48 | } 49 | } else { 50 | OutTrace("%sid=%d, lvl=%d, update", idt, head, lvl) 51 | } 52 | } 53 | 54 | func Test_Ctrl_Mock(t *testing.T) { 55 | wg := &sync.WaitGroup{} 56 | wg.Add(1) 57 | go CtrlRoom.Open(59062, CtrlRoomTree, wg) 58 | wg.Wait() 59 | file := "../demo/sql/tree/tree.sql" 60 | //file := "../demo/sql/init/1.table.sql" 61 | bytes, err := ioutil.ReadFile(file) 62 | panicIfErr(err) 63 | 64 | sqls := ParseSqls(pref, &FileEntity{file, string(bytes)}) 65 | 66 | envs := make(map[string]string) 67 | envs["DATE_FROM"] = "2018-11-30 10:31:20" 68 | envs["带空格的 时间"] = "2018-11-30 10:31:20" 69 | BuiltinEnvs(envs) 70 | 71 | sqlx, err := ParseSqlx(sqls, envs) 72 | panicIfErr(err) 73 | 74 | CtrlRoom.putEnv(roomTreeEnvSqlx, sqlx) 75 | for _, e := range sqlx.Exes { 76 | OutTrace(e.Tree()) 77 | } 78 | for { 79 | for _, v := range sqlx.Exes { 80 | mockExe(v, 1) 81 | } 82 | } 83 | } 84 | -------------------------------------------------------------------------------- /art/exec.go: -------------------------------------------------------------------------------- 1 | package art 2 | 3 | import ( 4 | "strings" 5 | "sync" 6 | ) 7 | 8 | func Exec(pref *Preference, dest []*DataSource, file []FileEntity, risk bool) error { 9 | 10 | cnte, cntf, cntd := 0, len(file), len(dest) 11 | var exes []*Exe 12 | 13 | // 解析和计算执行语句 14 | envs := make(map[string]string) 15 | for _, f := range file { 16 | sqls := ParseSqls(pref, &f) 17 | sqlx, er := ParseSqlx(sqls, envs) 18 | if er != nil { 19 | return er 20 | } 21 | exes = append(exes, sqlx.Exes...) 22 | } 23 | 24 | walkExes(exes, func(exe *Exe) error { 25 | cnte++ 26 | return nil 27 | }) 28 | 29 | LogTrace("exec statements, sql-count=%d, file-count=%d", cnte, cntf) 30 | 31 | // 打开链接 32 | wg := &sync.WaitGroup{} 33 | conn := make([]*MyConn, cntd) 34 | for i, v := range dest { 35 | con, er := openDbAndLog(v) 36 | if er != nil { 37 | return errorAndLog("failed to open db=%s, err=%v", v.Code, er) 38 | } 39 | conn[i] = con 40 | wg.Add(1) 41 | } 42 | 43 | // 多库并发,单库有序 44 | cnt := 0 45 | walkExes(exes, func(exe *Exe) error { 46 | cnt ++ 47 | return nil 48 | }) 49 | 50 | cmn, dlt := pref.LineComment, pref.DelimiterRaw 51 | for _, con := range conn { 52 | cur, ddn := 0, con.DbName() 53 | ctx := make(map[string]interface{}) 54 | gogo := func() { 55 | defer wg.Done() 56 | pureRunExes(exes, ctx, con, func(exe *Exe, stm string) error { 57 | cur++ 58 | sql := exe.Seg 59 | if risk { 60 | a, err := con.Exec(stm) 61 | if err != nil { 62 | LogError("db=%s, %3d/%d, failed to exec sql, id=%3d, line=%s, file=%s, err=%v", ddn, cur, cnt, sql.Head, sql.Line, sql.File, err) 63 | return err 64 | } else { 65 | LogTrace("db=%s, %d/%d, %d affects. id=%3d, line=%s, file=%s", ddn, cur, cnt, a, sql.Head, sql.Line, sql.File) 66 | } 67 | } else { 68 | // 不处理 trigger 新结束符问题。 69 | if strings.Contains(stm, dlt) { 70 | OutTrace("%s find '%s', May Need '%s' to avoid", cmn, dlt, pref.DelimiterCmd) 71 | } 72 | OutTrace("%s db=%s, %3d/%d, id=%3d, line=%s, file=%s", cmn, ddn, cur, cnt, sql.Head, sql.Line, sql.File) 73 | OutDebug("%s%s", sql.Text, dlt) 74 | } 75 | 76 | return nil 77 | }) 78 | } 79 | 80 | if risk { 81 | go gogo() 82 | } else { 83 | gogo() 84 | } 85 | } 86 | 87 | wg.Wait() 88 | return nil 89 | } 90 | -------------------------------------------------------------------------------- /demo/sql/tree/tree.sql: -------------------------------------------------------------------------------- 1 | -- ENV DATE_FROM 'ENV_DATE_FROM' #定义环境变量 2 | -- STR 'ENV_DATE_FROM' $DATE_FROM #重定义,静态替换 3 | 4 | -- REF id 'tx_parcel.id' #提取 id,作为'tx_parcel.id'节点 5 | -- REF recver_id 'tx_parcel.recver_id' # 999777前缀,0001第一个SELECT,002,第二个REF 6 | -- REF track_num 'tx_parcel.track_num' #提取 id,作为'tx_parcel.track_num'节点 7 | -- REF `中文字段` 'tx_parcel.chinese-404' #假设存在,不存在且没引用不报错。 8 | -- STR VAL[] 'tx_parcel.VALS' 9 | SELECT * FROM tx_parcel WHERE create_time <= 'ENV_DATE_FROM'; 10 | 11 | -- OUT FOR 'tx_parcel.id' 12 | REPLACE INTO tx_parcel VALUES ('tx_parcel.VALS'); 13 | 14 | -- REF id 'tx_track.id' #提取id,作为'tx_track.id'节点,父节点为'tx_parcel.track_num' 15 | -- STR 'tx_parcel.track_num' $TRK 16 | -- STR VAL[] 'tx_track.VALS' 17 | SELECT * FROM tx_track WHERE track_num = 'tx_parcel.track_num'; 18 | 19 | -- OUT FOR 'tx_track.id' 20 | REPLACE INTO tx_track VALUES ('tx_track.VALS'); 21 | 22 | -- RUN FOR 'tx_track.id' 23 | DELETE FROM tx_track where id = 'tx_track.id'; 24 | 25 | 26 | -- REF id 'tx_parcel_event.id' #提取id,作为'tx_parcel_event.id'节点,父节点为'tx_parcel.id' 27 | -- STR VAL[] 'tx_parcel_event.VALS' 28 | SELECT * FROM tx_parcel_event WHERE parcel_id = 'tx_parcel.id'; 29 | 30 | -- OUT FOR 'tx_parcel_event.id' 31 | INSERT INTO tx_parcel_event VALUES ('tx_parcel_event.VALS') 32 | ON DUPLICATE KEY UPDATE modify_time = 'ENV_DATE_FROM'; 33 | 34 | -- RUN END 'tx_parcel_event.id' 35 | DELETE FROM tx_parcel_event where parcel_id = 'tx_parcel.id'; 36 | 37 | 38 | -- REF id 'tx_receiver.id' 39 | -- STR `COL[]` `$COLX_9997770004002` # 加引号规则,建议使用SQL合规字符 40 | -- STR VAL[,] 'tx_receiver.VALS' 41 | SELECT * FROM tx_receiver WHERE id = 'tx_parcel.recver_id'; 42 | 43 | -- OUT FOR 'tx_receiver.id' 44 | REPLACE INTO tx_receiver ($COLX_9997770004002) VALUES ('tx_receiver.VALS'); 45 | 46 | -- RUN FOR 'tx_receiver.id' 47 | DELETE FROM tx_receiver where id = 'tx_receiver.id'; 48 | 49 | 50 | -- RUN END 'tx_parcel_event.id' 51 | REPLACE INTO sys_hot_separation VALUES ('tx_parcel_event', 'tx_parcel_event.id', now()); -- 单行注释 52 | 53 | -- RUN END 'tx_track.id' 54 | REPLACE INTO sys_hot_separation VALUES ('tx_track', /*内嵌多行注释*/ 'tx_track.id', now()); 55 | 56 | -- RUN END 'tx_parcel.id' 57 | REPLACE INTO sys_hot_separation VALUES ('tx_parcel', 'tx_parcel.id', now()); 58 | 59 | -- RUN FOR 'tx_parcel.id' 60 | DELETE FROM tx_parcel where id = 'tx_parcel.id'; 61 | 62 | -- RUN END 'tx_parcel.id' #存在'tx_parcel.id'节点时执行,即'tx_parcel.id'不为空 63 | DELETE FROM tx_parcel$log WHERE create_time <= 'ENV_DATE_FROM'; -------------------------------------------------------------------------------- /godbart.toml: -------------------------------------------------------------------------------- 1 | [preference] 2 | # 数据库类型,目前只支持mysql 3 | databasetype = 'mysql' 4 | # 默认分割法 5 | delimiterraw = ';' 6 | # 重定义分隔符 7 | delimitercmd = 'DELIMITER' 8 | 9 | # 注释部分,只做字符串查找,不做语法分析,所以不能有效处理字符串转义。 10 | # 单多行注释 11 | linecomment = '--' 12 | # 多行注释,必须成对出现 13 | multcomment = ['/*','*/'] 14 | 15 | # 日期格式化 16 | fmtdatetime = '2006-01-02 15:04:05.000' 17 | 18 | # 控制端口号。负数和零关闭此功能,端口号建议(1024-65535) 19 | # 注意程序启动时,会输出远程连接的控制密码。 20 | # 当通过 127.0.0.* 连接时,可以不输入密码 21 | controlport = 59062 22 | 23 | # 连接池有关的,sql.DB一致 24 | connmaxopen = 10 25 | connmaxidel = 2 26 | 27 | [sqltemplet] 28 | # 模板在`godbart.toml`中的`sqltemplet`里配置,`key`就是`-t` 参数,多个时用`,`分割。 29 | # 模板使用的`变量`全都存在时,输出模板,全都不存在时不输出,其他则报错。 30 | # 31 | # 系统内置了以下`变量`,不想使用`${}`不可以省略。 32 | # 33 | # * ${TABLE_NAME} string, 当前table名 34 | # * ${TABLE_DDL} string, 当前table的DDL 35 | # * ${TRIGGER_NAME} []string, 当前table的trigger名 36 | # * ${TRIGGER_DDL} []string, 当前table的trigger的DDL 37 | # * ${COLUMNS_BASE} string, 当前table的所有列的基本信息(名字和类型)。 38 | # * ${COLUMNS_FULL} string, 当前table的所有列的全部信息(同创建时,创建DDL必须一行一列,否则解析可能错误)。 39 | 40 | tbl=''' 41 | DROP TABLE IF EXISTS `${TABLE_NAME}`; 42 | ${TABLE_DDL}; 43 | ''' 44 | 45 | trg=''' 46 | DROP TRIGGER IF EXISTS `${TRIGGER_NAME}`; 47 | DELIMITER $$ 48 | ${TRIGGER_DDL} $$ 49 | DELIMITER ; 50 | ''' 51 | 52 | log=''' 53 | DROP TABLE IF EXISTS `${TABLE_NAME}$log` ; 54 | -- CREATE TABLE `${TABLE_NAME}$log` AS SELECT * FROM `${TABLE_NAME}` WHERE 1=0; 55 | 56 | CREATE TABLE `${TABLE_NAME}$log` ( 57 | ${COLUMNS_FULL}, 58 | `_id` int(11) NOT NULL AUTO_INCREMENT, 59 | `_du` int(11) DEFAULT NULL, 60 | `_dt` datetime DEFAULT NULL, 61 | PRIMARY KEY (`_id`) 62 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; 63 | 64 | DROP TRIGGER IF EXISTS `${TABLE_NAME}$log$bu`; 65 | DELIMITER $$ 66 | CREATE TRIGGER `${TABLE_NAME}$log$bu` BEFORE UPDATE ON `${TABLE_NAME}` 67 | FOR EACH ROW BEGIN 68 | insert into `${TABLE_NAME}$log` select *, null, 1, now() from `${TABLE_NAME}` 69 | where id= OLD.id ; 70 | END $$ 71 | DELIMITER ; 72 | 73 | DROP TRIGGER IF EXISTS `${TABLE_NAME}$log$bd`; 74 | DELIMITER $$ 75 | CREATE TRIGGER `${TABLE_NAME}$log$bd` BEFORE DELETE ON `${TABLE_NAME}` 76 | FOR EACH ROW BEGIN 77 | insert into `${TABLE_NAME}$log` select *, null, 2, now() from `${TABLE_NAME}` 78 | where id= OLD.id ; 79 | END $$ 80 | DELIMITER ; 81 | ''' 82 | 83 | [datasource] 84 | # 数据源名字和信息(和驱动有关) 85 | prd_main = 'trydofor:moilioncircle@tcp(127.0.0.1:3306)/godbart_prd_main' 86 | prd_2018 = 'trydofor:moilioncircle@tcp(127.0.0.1:3306)/godbart_prd_2018' 87 | dev_main = 'trydofor:moilioncircle@tcp(127.0.0.1:3306)/godbart_dev_main' 88 | lcl_main = 'trydofor:moilioncircle@tcp(127.0.0.1:3306)/godbart_lcl_main' 89 | lcl_test = 'trydofor:moilioncircle@tcp(127.0.0.1:3306)/test' 90 | -------------------------------------------------------------------------------- /art/other_test.go: -------------------------------------------------------------------------------- 1 | // +build manual 2 | 3 | package art 4 | 5 | import ( 6 | "fmt" 7 | "io/ioutil" 8 | "net/http" 9 | "os" 10 | "os/user" 11 | "strings" 12 | "testing" 13 | "time" 14 | ) 15 | 16 | func Test_Env(t *testing.T) { 17 | name, _ := os.Hostname() 18 | OutTrace(name) 19 | current, _ := user.Current() 20 | OutTrace(current.Username) 21 | } 22 | 23 | func Test_Append(t *testing.T) { 24 | arr := []string{"0"} 25 | OutTrace("arr p=%p, l=%d, arr[-1]=%q", &arr, len(arr), arr[len(arr)-1]) 26 | appval(arr) 27 | OutTrace("arr p=%p, l=%d, arr[-1]=%q", &arr, len(arr), arr[len(arr)-1]) 28 | 29 | arr = []string{"0"} 30 | OutTrace("arr p=%p, l=%d, arr[-1]=%q", &arr, len(arr), arr[len(arr)-1]) 31 | appptr(&arr) 32 | OutTrace("arr p=%p, l=%d, arr[-1]=%q", &arr, len(arr), arr[len(arr)-1]) 33 | } 34 | 35 | func appval(arr []string) { 36 | for i := 0; i < 20; i++ { 37 | arr = append(arr, "val") 38 | } 39 | } 40 | 41 | func appptr(arr *[]string) { 42 | for i := 0; i < 20; i++ { 43 | *arr = append(*arr, "ptr") 44 | } 45 | } 46 | 47 | func Test_Point(t *testing.T) { 48 | 49 | arr := []string{"1", "2", "3"} 50 | OutTrace("----") 51 | OutTrace("arr p=%p", &arr) 52 | OutTrace("arr[0] p=%p", &arr[0]) 53 | OutTrace("arr[0] v=%q", arr[0]) 54 | 55 | var infun = func(arr []string) { 56 | OutTrace("--infun--") 57 | OutTrace("arr p=%p", &arr) 58 | OutTrace("arr[0] p=%p", &arr[0]) 59 | OutTrace("arr[0] v=%q", arr[0]) 60 | arr[0] = "infun" 61 | } 62 | 63 | prtval(arr) 64 | prtptr(&arr) 65 | infun(arr) 66 | 67 | OutTrace("----") 68 | OutTrace("arr p=%p", &arr) 69 | OutTrace("arr[0] p=%p", &arr[0]) 70 | OutTrace("arr[0] v=%q", arr[0]) 71 | } 72 | 73 | func prtval(arr []string) { 74 | OutTrace("--prtval--") 75 | OutTrace("arr p=%p", &arr) 76 | OutTrace("arr[0] p=%p", &arr[0]) 77 | OutTrace("arr[0] v=%q", arr[0]) 78 | arr[0] = "prtval" 79 | } 80 | 81 | func prtptr(arr *[]string) { 82 | OutTrace("--prtptr--") 83 | OutTrace("arr p=%p", arr) 84 | OutTrace("arr[0] p=%p", &(*arr)[0]) 85 | OutTrace("arr[0] v=%q", (*arr)[0]) 86 | (*arr)[0] = "prtptr" 87 | } 88 | 89 | func Test_HttpPost(t *testing.T) { 90 | client := &http.Client{} 91 | url := "" 92 | for i := 0; i < 10; i++ { 93 | for a := 'a'; a <= 'z'; a++ { 94 | code := fmt.Sprintf("%d---%c", i, a) 95 | payload := strings.NewReader("reginvcode=" + code) 96 | req, _ := http.NewRequest("POST", url, payload) 97 | //设置header 98 | req.Header.Add("Connection", "keep-alive") 99 | req.Header.Add("Pragma", "no-cache") 100 | req.Header.Add("Cache-Control", "no-cache") 101 | 102 | res, _ := client.Do(req) 103 | body, _ := ioutil.ReadAll(res.Body) 104 | OutTrace("%s,%s", code, body) 105 | res.Body.Close() 106 | time.Sleep(3 * time.Second) 107 | } 108 | } 109 | } 110 | -------------------------------------------------------------------------------- /art/mysql_test.go: -------------------------------------------------------------------------------- 1 | // +build database 2 | 3 | package art 4 | 5 | import ( 6 | "database/sql" 7 | "fmt" 8 | "github.com/go-sql-driver/mysql" 9 | "reflect" 10 | "strings" 11 | "testing" 12 | ) 13 | 14 | func Test_MyConn(t *testing.T) { 15 | 16 | conn := MyConn{} 17 | panicIfErr(conn.Open(pref, dsrc)) 18 | 19 | OutTrace("database: " + conn.DbName()) 20 | tables, err := conn.Tables() 21 | panicIfErr(err) 22 | 23 | OutTrace("tables: " + strings.Join(tables, "\n\t")) 24 | 25 | OutTrace("columns ------") 26 | cols, err := conn.Columns("tx_parcel") 27 | panicIfErr(err) 28 | 29 | for _, v := range cols { 30 | OutTrace("\t%v", v) 31 | } 32 | 33 | OutTrace("indexes ------") 34 | idxs, err := conn.Indexes("tx_receiver") 35 | panicIfErr(err) 36 | 37 | for _, v := range idxs { 38 | OutTrace("\t%v", v) 39 | } 40 | 41 | OutTrace("trigger ------") 42 | trgs, err := conn.Triggers("tx_parcel") 43 | panicIfErr(err) 44 | 45 | for _, v := range trgs { 46 | OutTrace("\t%v", v) 47 | } 48 | 49 | OutTrace("create table ------") 50 | ctb, err := conn.DdlTable("tx_parcel$log") 51 | panicIfErr(err) 52 | 53 | OutTrace(ctb) 54 | 55 | OutTrace("create trigger ------") 56 | ctg, err := conn.DdlTrigger("tx_parcel$log$bu") 57 | panicIfErr(err) 58 | 59 | OutTrace(ctg) 60 | 61 | OutTrace("select args ------") 62 | 63 | var qf = func(rw *sql.Rows) error { 64 | for rw.Next() { 65 | var id int64 66 | rw.Scan(&id) 67 | OutTrace("%d", id) 68 | } 69 | return nil 70 | } 71 | e := conn.Query(qf, "SELECT id FROM tx_parcel WHERE id <= ? and track_num != '??????'", "1163922") 72 | OutTrace("%v", e) 73 | } 74 | 75 | func Test_Query(t *testing.T) { 76 | conn := MyConn{} 77 | panicIfErr(conn.Open(pref, dsrc)) 78 | 79 | conn.Query(func(row *sql.Rows) error { 80 | if row.Next() { 81 | types, e := row.ColumnTypes() 82 | if e != nil { 83 | return e 84 | } 85 | ln := len(types) 86 | vals := make([]interface{}, ln) 87 | ptrs := make([]interface{}, ln) 88 | for i := 0; i < ln; i++ { 89 | ptrs[i] = &vals[i] 90 | } 91 | 92 | row.Scan(ptrs...) 93 | for i := 0; i < ln; i++ { 94 | s, b := conn.Literal(vals[i], types[i].DatabaseTypeName()) 95 | OutTrace("type=%v, val=%v,sql-type=%#v, literal=%s, quote=%t", reflect.TypeOf(vals[i]), vals[i], types[i].DatabaseTypeName(), s, b) 96 | } 97 | 98 | } 99 | return nil 100 | }, "select * from tx_parcel") 101 | 102 | conn.Query(func(row *sql.Rows) error { 103 | if row.Next() { 104 | var ct mysql.NullTime 105 | row.Scan(&ct) 106 | s, b := conn.Literal(ct, "") 107 | OutTrace("type=%v, val=%v, literal=%s, quote=%t", reflect.TypeOf(ct), ct, s, b) 108 | } 109 | return nil 110 | }, "select create_time from tx_parcel") 111 | } 112 | 113 | func Test_Mdb(t *testing.T) { 114 | conn := MyConn{} 115 | panicIfErr(conn.Open(pref, dsrc)) 116 | 117 | i, e := conn.Exec(`replace into godbart_prd_2018.sys_schema_version select * from sys_schema_version`) 118 | fmt.Printf("%d, %#v", i, e) 119 | 120 | } 121 | -------------------------------------------------------------------------------- /art/sync.go: -------------------------------------------------------------------------------- 1 | package art 2 | 3 | import ( 4 | "database/sql" 5 | "fmt" 6 | "regexp" 7 | "strings" 8 | "sync" 9 | ) 10 | 11 | func Sync(srce *DataSource, dest []*DataSource, kind map[string]bool, rgx []*regexp.Regexp) error { 12 | 13 | if srce == nil { 14 | return errorAndLog("need source db to diff, type=%#v", kind) 15 | } 16 | 17 | scon, err := openDbAndLog(srce) 18 | if err != nil { 19 | return err 20 | } 21 | 22 | // 要执行的 ddl 23 | var name, ddls []string 24 | 25 | // 获得所有表 26 | tbls, err := listTable(scon, rgx) 27 | if err != nil { 28 | return err 29 | } 30 | 31 | if kind[SyncTbl] { 32 | for _, v := range tbls { 33 | ddl, er := scon.DdlTable(v) 34 | if er != nil { 35 | return er 36 | } 37 | name = append(name, "table="+v) 38 | ddls = append(ddls, ddl) 39 | LogTrace("%4d ddl table=%s", len(ddls), v) 40 | } 41 | } 42 | 43 | if kind[SyncTrg] { 44 | for _, v := range tbls { 45 | tgs, er := scon.Triggers(v) 46 | if er != nil { 47 | return er 48 | } 49 | for k := range tgs { 50 | ddl, er := scon.DdlTrigger(k) 51 | if er != nil { 52 | return er 53 | } 54 | name = append(name, "trigger="+k) 55 | ddls = append(ddls, ddl) 56 | LogTrace("%4d ddl trigger=%s", len(ddls), k) 57 | } 58 | } 59 | } 60 | 61 | cnt := len(ddls) 62 | for _, db := range dest { 63 | conn, er := openDbAndLog(db) 64 | if er != nil { 65 | return er 66 | } 67 | 68 | for i, v := range ddls { 69 | _, e2 := conn.Exec(v) 70 | if e2 != nil { 71 | LogError("%4d/%d failed on db=%s, name=%s, err=%v", i+1, cnt, db.Code, name[i], e2) 72 | } else { 73 | LogTrace("%4d/%d done db=%s, name=%s", i+1, cnt, db.Code, name[i]) 74 | } 75 | } 76 | } 77 | 78 | if kind[SyncRow] { 79 | type udp struct { 80 | tbln string 81 | stms string 82 | vals []interface{} 83 | } 84 | 85 | chns := make([]chan *udp, len(dest)) 86 | 87 | wg := &sync.WaitGroup{} 88 | for i, db := range dest { 89 | conn, er := openDbAndLog(db) 90 | if er != nil { 91 | return er 92 | } 93 | 94 | chns[i] = make(chan *udp, 5) 95 | idb, icn := db.Code, chns[i] 96 | wg.Add(1) 97 | go func() { 98 | for i := 1; ; i++ { 99 | u := <-icn 100 | if len(u.stms) == 0 { 101 | LogTrace("end %d rows database=%s", i, idb) 102 | wg.Done() 103 | return 104 | } 105 | a, e := conn.Exec(u.stms, u.vals...) 106 | if e != nil { 107 | LogError("failed to sync %d-th row on db=%s, table=%s, err=%v", i, idb, u.tbln, e) 108 | } else { 109 | LogDebug("inserted %d-th row affects %d, db=%s, table=%s", i, a, idb, u.tbln) 110 | } 111 | } 112 | }() 113 | } 114 | 115 | tbln := len(tbls) 116 | for i, v := range tbls { 117 | LogTrace("%d/%d tables", i+1, tbln) 118 | var ff = func(row *sql.Rows) error { 119 | cols, er := row.Columns() 120 | if er != nil { 121 | return er 122 | } 123 | 124 | for ln, cnt := len(cols), 1; row.Next(); cnt++ { 125 | vals := make([]interface{}, ln) 126 | ptrs := make([]interface{}, ln) 127 | for i := 0; i < ln; i++ { 128 | ptrs[i] = &vals[i] 129 | } 130 | 131 | row.Scan(ptrs...) 132 | u := &udp{ 133 | v, 134 | fmt.Sprintf("insert into %s values(%s)", v, strings.Repeat(",?", ln)[1:]), 135 | vals, 136 | } 137 | LogDebug("sync %d row of table=%s", cnt, v) 138 | for _, c := range chns { 139 | c <- u 140 | } 141 | } 142 | return nil 143 | } 144 | er := scon.Query(ff, "select * from "+v) 145 | if er != nil { 146 | LogError("sync data failed, table=%s, err=%v", v, er) 147 | return er 148 | } 149 | } 150 | // END 151 | u := &udp{} 152 | for _, c := range chns { 153 | c <- u 154 | } 155 | LogTrace("waiting for sync done") 156 | wg.Wait() 157 | } 158 | 159 | return nil 160 | } 161 | -------------------------------------------------------------------------------- /art/conf.go: -------------------------------------------------------------------------------- 1 | package art 2 | 3 | import ( 4 | "errors" 5 | "github.com/pelletier/go-toml" 6 | "strings" 7 | ) 8 | 9 | type Config struct { 10 | Preference Preference 11 | SqlTemplet map[string]string 12 | DataSource map[string]DataSource 13 | StartupEnv map[string]string 14 | } 15 | 16 | type Preference struct { 17 | DatabaseType string 18 | DelimiterRaw string 19 | DelimiterCmd string 20 | LineComment string 21 | MultComment []string 22 | FmtDateTime string 23 | ControlPort int 24 | ConnMaxOpen int 25 | ConnMaxIdel int 26 | } 27 | 28 | type FileEntity struct { 29 | Path string 30 | Text string 31 | } 32 | 33 | type DataSource struct { 34 | Code string 35 | Conn string 36 | } 37 | 38 | // 39 | 40 | func ParseToml(text string) (config *Config, err error) { 41 | 42 | conf, err := toml.Load(text) 43 | if err != nil { 44 | return 45 | } 46 | 47 | preference, err := parsePreference(conf) 48 | if err != nil { 49 | return 50 | } 51 | sqltemplet, err := parseSqlTemplet(conf) 52 | if err != nil { 53 | return 54 | } 55 | datasource, err := parseDataSource(conf) 56 | if err != nil { 57 | return 58 | } 59 | 60 | config = &Config{ 61 | preference, 62 | sqltemplet, 63 | datasource, 64 | make(map[string]string), 65 | } 66 | 67 | return 68 | } 69 | 70 | func parseSqlTemplet(conf *toml.Tree) (rst map[string]string, err error) { 71 | if tree, ok := conf.Get("sqltemplet").(*toml.Tree); ok { 72 | rst = make(map[string]string) 73 | for k, v := range tree.ToMap() { 74 | switch v.(type) { 75 | case string: 76 | rst[k] = v.(string) 77 | default: 78 | err = errors.New("unsupported value, sqltemplet." + k) 79 | return 80 | } 81 | } 82 | } else { 83 | err = errorAndLog("failed to parse sqltemplet") 84 | } 85 | return 86 | } 87 | 88 | func parseDataSource(conf *toml.Tree) (rst map[string]DataSource, err error) { 89 | if tree, ok := conf.Get("datasource").(*toml.Tree); ok { 90 | rst = make(map[string]DataSource) 91 | for k, v := range tree.ToMap() { 92 | switch v.(type) { 93 | case string: 94 | rst[k] = DataSource{k, v.(string)} 95 | default: 96 | err = errors.New("unsupported value, sqltemplet." + k) 97 | return 98 | } 99 | } 100 | } else { 101 | err = errorAndLog("failed to parse datasource") 102 | } 103 | return 104 | } 105 | 106 | func parsePreference(conf *toml.Tree) (rst Preference, err error) { 107 | if tree, ok := conf.Get("preference").(*toml.Tree); ok { 108 | rst = Preference{ 109 | toString(tree, "databasetype"), 110 | toString(tree, "delimiterraw"), 111 | toString(tree, "delimitercmd"), 112 | toString(tree, "linecomment"), 113 | toArrString(tree, "multcomment"), 114 | toString(tree, "fmtdatetime"), 115 | toInt(tree, "controlport"), 116 | toInt(tree, "connmaxopen"), 117 | toInt(tree, "connmaxidel"), 118 | } 119 | } else { 120 | err = errorAndLog("failed to parse preference") 121 | } 122 | return 123 | } 124 | 125 | func toInt(tree *toml.Tree, key string) (rst int) { 126 | if num, ok := tree.Get(key).(int64); ok { 127 | rst = int(num) 128 | } else { 129 | LogError("failed to get int, key=%s", key) 130 | } 131 | return 132 | } 133 | 134 | func toString(tree *toml.Tree, key string) (rst string) { 135 | if str, ok := tree.Get(key).(string); ok { 136 | rst = str 137 | } else { 138 | LogError("failed to get string, key=%s", key) 139 | } 140 | return 141 | } 142 | 143 | func toArrString(tree *toml.Tree, key string) (rst []string) { 144 | if arr, ok := tree.Get(key).([]interface{}); ok { 145 | rst = make([]string, len(arr)) 146 | for i, j := 0, 0; i < len(arr); i++ { 147 | switch arr[i].(type) { 148 | case string: 149 | s := strings.TrimSpace(arr[i].(string)) 150 | if len(s) > 0 { 151 | rst[j] = s 152 | j++ 153 | } 154 | default: 155 | LogError("get unsupported type while parsing key=%s", key) 156 | } 157 | } 158 | } else { 159 | LogError("failed to get array, key=%s", key) 160 | } 161 | return 162 | } 163 | -------------------------------------------------------------------------------- /demo/sql/init/1.table.sql: -------------------------------------------------------------------------------- 1 | -- mysql 5.6+ 2 | -- 时区一律 UTC+0,慎用now和CURRENT_TIMESTAMP 3 | -- default-time-zone = '+00:00' 4 | -- 字符一律 UTF-8 5 | -- character-set-server=utf8mb4 6 | 7 | -- 添加表,按字母顺序排列 -- 8 | 9 | DROP TABLE IF EXISTS `sys_schema_version`; 10 | CREATE TABLE `sys_schema_version` ( 11 | `version` BIGINT NOT NULL COMMENT '版本号', 12 | `created` DATETIME NOT NULL COMMENT '创建时间', 13 | PRIMARY KEY (`version`) 14 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; 15 | 16 | DROP TABLE IF EXISTS `sys_hot_separation`; 17 | CREATE TABLE `sys_hot_separation` ( 18 | `table_name` VARCHAR(100) NOT NULL COMMENT '表名', 19 | `checked_id` BIGINT(20) NOT NULL COMMENT '检查过的最大ID', 20 | `checked_tm` DATETIME NOT NULL COMMENT '上次检查的时间', 21 | PRIMARY KEY (`table_name`) 22 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; 23 | 24 | DROP TABLE IF EXISTS `tx_parcel`; 25 | CREATE TABLE `tx_parcel` ( 26 | `id` bigint(20) NOT NULL COMMENT 'ID', 27 | `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', 28 | `modify_time` datetime DEFAULT NULL ON UPDATE CURRENT_TIMESTAMP COMMENT '更新时间', 29 | `is_deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '逻辑删除:true=1,false=0', 30 | `logno` bigint(20) DEFAULT NULL COMMENT '日志编号', 31 | `user_id` bigint(20) DEFAULT NULL COMMENT '用户ID', 32 | `warehouse` int(11) DEFAULT NULL COMMENT '数据所在仓库 WareHouse', 33 | `sender_id` bigint(20) DEFAULT NULL COMMENT '发件人ID', 34 | `recver_id` bigint(20) DEFAULT NULL COMMENT '收件人ID', 35 | `track_num` varchar(50) NOT NULL COMMENT '运单号', 36 | `weight_pkg` decimal(10,2) DEFAULT NULL COMMENT '商家包裹总重量(a+b+m)', 37 | `weight_dim` decimal(10,2) DEFAULT NULL COMMENT '商家体积重', 38 | `input_time` datetime DEFAULT NULL COMMENT '录单时间', 39 | `store_time` datetime DEFAULT NULL COMMENT '最新入库时间', 40 | `shelf_time` datetime DEFAULT NULL COMMENT '最新上架时间', 41 | `leave_time` datetime DEFAULT NULL COMMENT '最新出库时间', 42 | `track_time` datetime DEFAULT NULL COMMENT '首个国内物流时间', 43 | PRIMARY KEY (`id`) 44 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 ROW_FORMAT=COMPACT COMMENT='包裹'; 45 | 46 | DROP TABLE IF EXISTS `tx_track`; 47 | CREATE TABLE `tx_track` ( 48 | `id` bigint(20) NOT NULL COMMENT 'ID', 49 | `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', 50 | `modify_time` datetime DEFAULT NULL ON UPDATE CURRENT_TIMESTAMP COMMENT '更新时间', 51 | `is_deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '逻辑删除:true=1,false=0', 52 | `logno` bigint(20) DEFAULT NULL COMMENT '日志编号', 53 | `user_id` bigint(20) DEFAULT NULL COMMENT '用户ID', 54 | `parcel_id` bigint(20) NOT NULL COMMENT '包裹ID', 55 | `company` int(11) NOT NULL COMMENT '物流公司', 56 | `track_num` varchar(50) NOT NULL COMMENT '跟踪单号', 57 | `events` text NOT NULL COMMENT '事件json:[{date:xx,info:xx},]', 58 | `status` int(11) NOT NULL COMMENT '物流状态', 59 | `dest_city` varchar(50) DEFAULT NULL COMMENT '包裹目的地城市', 60 | PRIMARY KEY (`id`) 61 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 ROW_FORMAT=COMPACT COMMENT='物流跟踪'; 62 | 63 | DROP TABLE IF EXISTS `tx_parcel_event`; 64 | CREATE TABLE `tx_parcel_event` ( 65 | `id` bigint(20) NOT NULL COMMENT 'ID', 66 | `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', 67 | `modify_time` datetime DEFAULT NULL ON UPDATE CURRENT_TIMESTAMP COMMENT '更新时间', 68 | `is_deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '逻辑删除:true=1,false=0', 69 | `logno` bigint(20) DEFAULT NULL COMMENT '日志编号', 70 | `user_id` bigint(20) DEFAULT NULL COMMENT '用户ID', 71 | `parcel_id` bigint(20) NOT NULL COMMENT '包裹ID', 72 | `type` int(11) NOT NULL COMMENT '事件类型:取件|入库|出库', 73 | `source` varchar(100) DEFAULT NULL COMMENT '发生地', 74 | `operator_id` bigint(20) DEFAULT NULL COMMENT '操作员ID', 75 | `is_closed` tinyint(1) DEFAULT NULL COMMENT '是否关闭:true=1,false=0', 76 | PRIMARY KEY (`id`) 77 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 ROW_FORMAT=COMPACT COMMENT='包裹事件'; 78 | 79 | DROP TABLE IF EXISTS `tx_receiver`; 80 | CREATE TABLE `tx_receiver` ( 81 | `id` bigint(20) NOT NULL COMMENT 'ID', 82 | `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', 83 | `modify_time` datetime DEFAULT NULL ON UPDATE CURRENT_TIMESTAMP COMMENT '更新时间', 84 | `is_deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '逻辑删除:true=1,false=0', 85 | `logno` bigint(20) DEFAULT NULL COMMENT '日志编号', 86 | `user_id` bigint(20) DEFAULT NULL COMMENT '用户ID', 87 | `name` varchar(20) NOT NULL COMMENT '姓名', 88 | `phone` varchar(60) NOT NULL COMMENT '电话', 89 | `postcode` varchar(20) NOT NULL COMMENT '邮编', 90 | `country` int(11) NOT NULL COMMENT '国家(代码)', 91 | `province` char(3) NOT NULL COMMENT '州/省/直辖市(简写)', 92 | `city` varchar(20) NOT NULL COMMENT '城市', 93 | `district` varchar(45) DEFAULT NULL COMMENT '县区', 94 | `address1` varchar(100) NOT NULL COMMENT '区/路/街', 95 | `address2` varchar(100) DEFAULT NULL COMMENT '楼/室', 96 | `hash` varchar(40) NOT NULL COMMENT '姓名,电话,省市区地址1,2的hash', 97 | PRIMARY KEY (`id`) 98 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 ROW_FORMAT=COMPACT COMMENT='收件人'; -------------------------------------------------------------------------------- /art/sqls.go: -------------------------------------------------------------------------------- 1 | package art 2 | 3 | import ( 4 | "fmt" 5 | "strings" 6 | ) 7 | 8 | type Sql struct { 9 | Line string // 开始和结束行,全闭区间 10 | Head int // 首行 11 | Exeb bool // 语句,注释 12 | File string // 文件名或名字 13 | Text string // 正文部分 14 | } 15 | 16 | func ParseSqls(pref *Preference, file *FileEntity) []Sql { 17 | LogTrace("parse Sqls, file=%s", file.Path) 18 | 19 | lines := splitLinex(file.Text) 20 | sbgn, mbgn, tbgn := -1, -1, -1 21 | 22 | llen := len(lines) 23 | sqls := make([]Sql, 0, 32) 24 | dt, dc := pref.DelimiterRaw, pref.DelimiterCmd 25 | 26 | for i, line := range lines { 27 | 28 | //多行注释开始 29 | if isCmntMBgn(pref, line) { 30 | parseStatement(&sqls, lines, file.Path, &tbgn, i-1, &dt, dc) 31 | mbgn = i 32 | continue 33 | } 34 | 35 | // 多行注释结束 36 | if mbgn >= 0 { 37 | if isCmntMEnd(pref, line) { 38 | parseComment(&sqls, lines, file.Path, &mbgn, i) 39 | } 40 | continue 41 | } 42 | 43 | // 单行注释开始 44 | if isCmntLine(pref, line) { 45 | parseStatement(&sqls, lines, file.Path, &tbgn, i-1, &dt, dc) 46 | if sbgn < 0 { 47 | sbgn = i 48 | } 49 | continue 50 | } 51 | 52 | // 单行注释结束 53 | if sbgn >= 0 { 54 | parseComment(&sqls, lines, file.Path, &sbgn, i-1) 55 | } 56 | 57 | e := len(line) == 0 58 | 59 | // SQL正文 60 | if tbgn < 0 && !e { 61 | tbgn = i 62 | } 63 | 64 | // 空行分组 65 | if e { 66 | parseStatement(&sqls, lines, file.Path, &tbgn, i-1, &dt, dc) 67 | } 68 | } 69 | 70 | l := llen - 1 71 | if sbgn > 0 { 72 | parseComment(&sqls, lines, file.Path, &sbgn, l) 73 | } 74 | if mbgn > 0 { 75 | parseComment(&sqls, lines, file.Path, &mbgn, l) 76 | } 77 | if tbgn > 0 { 78 | parseStatement(&sqls, lines, file.Path, &tbgn, l, &dt, dc) 79 | } 80 | 81 | return sqls 82 | } 83 | 84 | func parseComment(segs *[]Sql, lines []string, name string, b *int, e int) { 85 | if *b < 0 || *b > e { 86 | return 87 | } 88 | 89 | i := e + 1 90 | text := strings.Join(lines[*b:i], Joiner) 91 | head := *b + 1 92 | line := fmt.Sprintf("%d:%d", head, i) 93 | *segs = append(*segs, Sql{ 94 | line, head, false, name, text, 95 | }) 96 | LogDebug("%3d, parsed Comment, line=%s", len(*segs), line) 97 | *b = -1 98 | } 99 | 100 | func parseStatement(segs *[]Sql, lines []string, name string, b *int, e int, dt *string, dc string) { 101 | if *b < 0 || *b > e { 102 | return 103 | } 104 | 105 | lns, lne := *b, e+1 106 | dtl, dcl := len(*dt), len(dc) 107 | 108 | for i := lns; i < lne; i++ { 109 | lll := len(lines[i]) 110 | n := i + 1 111 | if dcl > 0 && lll > dcl && strings.EqualFold(dc, lines[i][0:dcl]) { // 变更结束符 112 | c := lines[i][dcl] 113 | if c == ' ' || c == '\t' { 114 | *dt = strings.TrimSpace(lines[i][dcl+1:]) 115 | dtl = len(*dt) 116 | if lns < i { // 结束上一段 117 | head := lns + 1 118 | line := fmt.Sprintf("%d:%d", head, i) 119 | *segs = append(*segs, Sql{ 120 | line, 121 | head, 122 | true, 123 | name, 124 | strings.Join(lines[lns:i], Joiner), 125 | }) 126 | LogDebug("%3d, parsed Statement, line=%s", len(*segs), line) 127 | } 128 | lns = n 129 | // fmt.Printf("\t\tget new delimitor [%s] at line %d\n", *dt, n) 130 | continue 131 | } 132 | } 133 | 134 | dtp := lll - dtl 135 | if dtl > 0 && lll >= dtl && strings.EqualFold(*dt, lines[i][dtp:]) { // 结束符 136 | lines[i] = lines[i][0:dtp] // 必须去掉结束符,要不重新定义结束符不识别 137 | head := lns + 1 138 | line := fmt.Sprintf("%d:%d", head, n) 139 | *segs = append(*segs, Sql{ 140 | line, 141 | head, 142 | true, 143 | name, 144 | strings.Join(lines[lns:n], Joiner), 145 | }) 146 | LogDebug("%3d, parsed Statement, line=%s", len(*segs), line) 147 | lns = n 148 | // fmt.Printf("\t\tget the delimitor at line %d\n", n) 149 | } 150 | } 151 | 152 | if lns < lne { 153 | head := lns + 1 154 | line := fmt.Sprintf("%d:%d", head, lne) 155 | *segs = append(*segs, Sql{ 156 | line, 157 | head, 158 | true, 159 | name, 160 | strings.Join(lines[lns:lne], Joiner), 161 | }) 162 | LogDebug("%3d, parsed Statement, line=%s", len(*segs), line) 163 | } 164 | 165 | *b = -1 166 | } 167 | 168 | // helper 169 | 170 | func isCmntLine(pref *Preference, str string) bool { 171 | if pref.LineComment == "" { 172 | return false 173 | } 174 | return strings.HasPrefix(str, pref.LineComment) 175 | } 176 | 177 | func isCmntMBgn(pref *Preference, str string) bool { 178 | l := len(pref.MultComment) 179 | if l < 2 { 180 | return false 181 | } 182 | 183 | for i := 0; i < l; i += 2 { 184 | if strings.HasPrefix(str, pref.MultComment[i]) { 185 | return true 186 | } 187 | } 188 | return false 189 | } 190 | 191 | func isCmntMEnd(pref *Preference, str string) bool { 192 | l := len(pref.MultComment) 193 | if l < 2 { 194 | return false 195 | } 196 | 197 | for i := 1; i < l; i += 2 { 198 | if strings.HasSuffix(str, pref.MultComment[i]) { 199 | return true 200 | } 201 | } 202 | 203 | return false 204 | } 205 | -------------------------------------------------------------------------------- /demo/chk/txt/04.txt: -------------------------------------------------------------------------------- 1 | -- db=godbart_prd_2018, 1/6, table=sys_hot_separation 2 | DROP TABLE IF EXISTS `sys_hot_separation`; 3 | CREATE TABLE `sys_hot_separation` ( 4 | `table_name` varchar(100) COLLATE utf8mb4_bin NOT NULL COMMENT '表名', 5 | `checked_id` bigint(20) NOT NULL COMMENT '检查过的最大ID', 6 | `checked_tm` datetime NOT NULL COMMENT '上次检查的时间', 7 | PRIMARY KEY (`table_name`) 8 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; 9 | 10 | -- db=godbart_prd_2018, 2/6, table=sys_schema_version 11 | DROP TABLE IF EXISTS `sys_schema_version`; 12 | CREATE TABLE `sys_schema_version` ( 13 | `version` bigint(20) NOT NULL COMMENT '版本号', 14 | `created` datetime NOT NULL COMMENT '创建时间', 15 | PRIMARY KEY (`version`) 16 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; 17 | 18 | -- db=godbart_prd_2018, 3/6, table=tx_parcel 19 | DROP TABLE IF EXISTS `tx_parcel`; 20 | CREATE TABLE `tx_parcel` ( 21 | `id` bigint(20) NOT NULL COMMENT 'ID', 22 | `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', 23 | `modify_time` datetime DEFAULT NULL ON UPDATE CURRENT_TIMESTAMP COMMENT '更新时间', 24 | `is_deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '逻辑删除:true=1,false=0', 25 | `logno` bigint(20) DEFAULT NULL COMMENT '日志编号', 26 | `user_id` bigint(20) DEFAULT NULL COMMENT '用户ID', 27 | `warehouse` int(11) DEFAULT NULL COMMENT '数据所在仓库 WareHouse', 28 | `sender_id` bigint(20) DEFAULT NULL COMMENT '发件人ID', 29 | `recver_id` bigint(20) DEFAULT NULL COMMENT '收件人ID', 30 | `track_num` varchar(50) NOT NULL COMMENT '运单号', 31 | `weight_pkg` decimal(10,2) DEFAULT NULL COMMENT '商家包裹总重量(a+b+m)', 32 | `weight_dim` decimal(10,2) DEFAULT NULL COMMENT '商家体积重', 33 | `input_time` datetime DEFAULT NULL COMMENT '录单时间', 34 | `store_time` datetime DEFAULT NULL COMMENT '最新入库时间', 35 | `shelf_time` datetime DEFAULT NULL COMMENT '最新上架时间', 36 | `leave_time` datetime DEFAULT NULL COMMENT '最新出库时间', 37 | `track_time` datetime DEFAULT NULL COMMENT '首个国内物流时间', 38 | PRIMARY KEY (`id`) 39 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 ROW_FORMAT=COMPACT COMMENT='包裹'; 40 | 41 | -- db=godbart_prd_2018, 4/6, table=tx_parcel_event 42 | DROP TABLE IF EXISTS `tx_parcel_event`; 43 | CREATE TABLE `tx_parcel_event` ( 44 | `id` bigint(20) NOT NULL COMMENT 'ID', 45 | `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', 46 | `modify_time` datetime DEFAULT NULL ON UPDATE CURRENT_TIMESTAMP COMMENT '更新时间', 47 | `is_deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '逻辑删除:true=1,false=0', 48 | `logno` bigint(20) DEFAULT NULL COMMENT '日志编号', 49 | `user_id` bigint(20) DEFAULT NULL COMMENT '用户ID', 50 | `parcel_id` bigint(20) NOT NULL COMMENT '包裹ID', 51 | `type` int(11) NOT NULL COMMENT '事件类型:取件|入库|出库', 52 | `source` varchar(100) DEFAULT NULL COMMENT '发生地', 53 | `operator_id` bigint(20) DEFAULT NULL COMMENT '操作员ID', 54 | `is_closed` tinyint(1) DEFAULT NULL COMMENT '是否关闭:true=1,false=0', 55 | PRIMARY KEY (`id`) 56 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 ROW_FORMAT=COMPACT COMMENT='包裹事件'; 57 | 58 | -- db=godbart_prd_2018, 5/6, table=tx_receiver 59 | DROP TABLE IF EXISTS `tx_receiver`; 60 | CREATE TABLE `tx_receiver` ( 61 | `id` bigint(20) NOT NULL COMMENT 'ID', 62 | `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', 63 | `modify_time` datetime DEFAULT NULL ON UPDATE CURRENT_TIMESTAMP COMMENT '更新时间', 64 | `is_deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '逻辑删除:true=1,false=0', 65 | `logno` bigint(20) DEFAULT NULL COMMENT '日志编号', 66 | `user_id` bigint(20) DEFAULT NULL COMMENT '用户ID', 67 | `name` varchar(20) NOT NULL COMMENT '姓名', 68 | `phone` varchar(60) NOT NULL COMMENT '电话', 69 | `postcode` varchar(20) NOT NULL COMMENT '邮编', 70 | `country` int(11) NOT NULL COMMENT '国家(代码)', 71 | `province` char(3) NOT NULL COMMENT '州/省/直辖市(简写)', 72 | `city` varchar(20) NOT NULL COMMENT '城市', 73 | `district` varchar(45) DEFAULT NULL COMMENT '县区', 74 | `address1` varchar(100) NOT NULL COMMENT '区/路/街', 75 | `address2` varchar(100) DEFAULT NULL COMMENT '楼/室', 76 | `hash` varchar(40) NOT NULL COMMENT '姓名,电话,省市区地址1,2的hash', 77 | PRIMARY KEY (`id`) 78 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 ROW_FORMAT=COMPACT COMMENT='收件人'; 79 | 80 | -- db=godbart_prd_2018, 6/6, table=tx_track 81 | DROP TABLE IF EXISTS `tx_track`; 82 | CREATE TABLE `tx_track` ( 83 | `id` bigint(20) NOT NULL COMMENT 'ID', 84 | `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', 85 | `modify_time` datetime DEFAULT NULL ON UPDATE CURRENT_TIMESTAMP COMMENT '更新时间', 86 | `is_deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '逻辑删除:true=1,false=0', 87 | `logno` bigint(20) DEFAULT NULL COMMENT '日志编号', 88 | `user_id` bigint(20) DEFAULT NULL COMMENT '用户ID', 89 | `parcel_id` bigint(20) NOT NULL COMMENT '包裹ID', 90 | `company` int(11) NOT NULL COMMENT '物流公司', 91 | `track_num` varchar(50) NOT NULL COMMENT '跟踪单号', 92 | `events` text NOT NULL COMMENT '事件json:[{date:xx,info:xx},]', 93 | `status` int(11) NOT NULL COMMENT '物流状态', 94 | `dest_city` varchar(50) DEFAULT NULL COMMENT '包裹目的地城市', 95 | PRIMARY KEY (`id`) 96 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 ROW_FORMAT=COMPACT COMMENT='物流跟踪'; 97 | 98 | -------------------------------------------------------------------------------- /demo/sql/revi/2018-11-11.sql: -------------------------------------------------------------------------------- 1 | DROP TABLE IF EXISTS `sys_schema_version`; 2 | CREATE TABLE `sys_schema_version` ( 3 | `version` BIGINT NOT NULL COMMENT '版本号', 4 | `created` DATETIME NOT NULL COMMENT '创建时间', 5 | PRIMARY KEY (`version`) 6 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; 7 | 8 | REPLACE INTO sys_schema_version (version, created) VALUES( 2018111101, NOW()); 9 | 10 | -- ------------------------------------------- 11 | SELECT max(version) FROM sys_schema_version; 12 | 13 | DROP TABLE IF EXISTS `sys_hot_separation`; 14 | CREATE TABLE `sys_hot_separation` ( 15 | `table_name` VARCHAR(100) NOT NULL COMMENT '表名', 16 | `checked_id` BIGINT(20) NOT NULL COMMENT '检查过的最大ID', 17 | `checked_tm` DATETIME NOT NULL COMMENT '上次检查的时间', 18 | PRIMARY KEY (`table_name`) 19 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; 20 | 21 | REPLACE INTO sys_schema_version (version, created) VALUES( 2018111102, NOW()); 22 | 23 | -- ------------------------------------------- 24 | SELECT max(version) FROM sys_schema_version; 25 | 26 | DROP TABLE IF EXISTS `tx_parcel`; 27 | CREATE TABLE `tx_parcel` ( 28 | `id` bigint(20) NOT NULL COMMENT 'ID', 29 | `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', 30 | `modify_time` datetime DEFAULT NULL ON UPDATE CURRENT_TIMESTAMP COMMENT '更新时间', 31 | `is_deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '逻辑删除:true=1,false=0', 32 | `logno` bigint(20) DEFAULT NULL COMMENT '日志编号', 33 | `user_id` bigint(20) DEFAULT NULL COMMENT '用户ID', 34 | `warehouse` int(11) DEFAULT NULL COMMENT '数据所在仓库 WareHouse', 35 | `sender_id` bigint(20) DEFAULT NULL COMMENT '发件人ID', 36 | `recver_id` bigint(20) DEFAULT NULL COMMENT '收件人ID', 37 | `track_num` varchar(50) NOT NULL COMMENT '运单号', 38 | `weight_pkg` decimal(10,2) DEFAULT NULL COMMENT '商家包裹总重量(a+b+m)', 39 | `weight_dim` decimal(10,2) DEFAULT NULL COMMENT '商家体积重', 40 | `input_time` datetime DEFAULT NULL COMMENT '录单时间', 41 | `store_time` datetime DEFAULT NULL COMMENT '最新入库时间', 42 | `shelf_time` datetime DEFAULT NULL COMMENT '最新上架时间', 43 | `leave_time` datetime DEFAULT NULL COMMENT '最新出库时间', 44 | `track_time` datetime DEFAULT NULL COMMENT '首个国内物流时间', 45 | PRIMARY KEY (`id`) 46 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 ROW_FORMAT=COMPACT COMMENT='包裹'; 47 | 48 | DROP TABLE IF EXISTS `tx_track`; 49 | CREATE TABLE `tx_track` ( 50 | `id` bigint(20) NOT NULL COMMENT 'ID', 51 | `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', 52 | `modify_time` datetime DEFAULT NULL ON UPDATE CURRENT_TIMESTAMP COMMENT '更新时间', 53 | `is_deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '逻辑删除:true=1,false=0', 54 | `logno` bigint(20) DEFAULT NULL COMMENT '日志编号', 55 | `user_id` bigint(20) DEFAULT NULL COMMENT '用户ID', 56 | `parcel_id` bigint(20) NOT NULL COMMENT '包裹ID', 57 | `company` int(11) NOT NULL COMMENT '物流公司', 58 | `track_num` varchar(50) NOT NULL COMMENT '跟踪单号', 59 | `events` text NOT NULL COMMENT '事件json:[{date:xx,info:xx},]', 60 | `status` int(11) NOT NULL COMMENT '物流状态', 61 | `dest_city` varchar(50) DEFAULT NULL COMMENT '包裹目的地城市', 62 | PRIMARY KEY (`id`) 63 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 ROW_FORMAT=COMPACT COMMENT='物流跟踪'; 64 | 65 | DROP TABLE IF EXISTS `tx_parcel_event`; 66 | CREATE TABLE `tx_parcel_event` ( 67 | `id` bigint(20) NOT NULL COMMENT 'ID', 68 | `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', 69 | `modify_time` datetime DEFAULT NULL ON UPDATE CURRENT_TIMESTAMP COMMENT '更新时间', 70 | `is_deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '逻辑删除:true=1,false=0', 71 | `logno` bigint(20) DEFAULT NULL COMMENT '日志编号', 72 | `user_id` bigint(20) DEFAULT NULL COMMENT '用户ID', 73 | `parcel_id` bigint(20) NOT NULL COMMENT '包裹ID', 74 | `type` int(11) NOT NULL COMMENT '事件类型:取件|入库|出库', 75 | `source` varchar(100) DEFAULT NULL COMMENT '发生地', 76 | `operator_id` bigint(20) DEFAULT NULL COMMENT '操作员ID', 77 | `is_closed` tinyint(1) DEFAULT NULL COMMENT '是否关闭:true=1,false=0', 78 | PRIMARY KEY (`id`) 79 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 ROW_FORMAT=COMPACT COMMENT='包裹事件'; 80 | 81 | DROP TABLE IF EXISTS `tx_receiver`; 82 | CREATE TABLE `tx_receiver` ( 83 | `id` bigint(20) NOT NULL COMMENT 'ID', 84 | `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', 85 | `modify_time` datetime DEFAULT NULL ON UPDATE CURRENT_TIMESTAMP COMMENT '更新时间', 86 | `is_deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '逻辑删除:true=1,false=0', 87 | `logno` bigint(20) DEFAULT NULL COMMENT '日志编号', 88 | `user_id` bigint(20) DEFAULT NULL COMMENT '用户ID', 89 | `name` varchar(20) NOT NULL COMMENT '姓名', 90 | `phone` varchar(60) NOT NULL COMMENT '电话', 91 | `postcode` varchar(20) NOT NULL COMMENT '邮编', 92 | `country` int(11) NOT NULL COMMENT '国家(代码)', 93 | `province` char(3) NOT NULL COMMENT '州/省/直辖市(简写)', 94 | `city` varchar(20) NOT NULL COMMENT '城市', 95 | `district` varchar(45) DEFAULT NULL COMMENT '县区', 96 | `address1` varchar(100) NOT NULL COMMENT '区/路/街', 97 | `address2` varchar(100) DEFAULT NULL COMMENT '楼/室', 98 | `hash` varchar(40) NOT NULL COMMENT '姓名,电话,省市区地址1,2的hash', 99 | PRIMARY KEY (`id`) 100 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 ROW_FORMAT=COMPACT COMMENT='收件人'; 101 | 102 | REPLACE INTO sys_schema_version (version, created) VALUES( 2018111103, NOW()); 103 | -------------------------------------------------------------------------------- /demo/chk/manual.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | out_dir="/tmp/godbart" # 工作目录 4 | gui_dff="meld" # 比较文件的gui工具 5 | 6 | ### ######### 7 | 8 | if [[ ! -x godbart ]]; then 9 | echo "### 重新编译 godbart" 10 | go build 11 | fi 12 | 13 | echo -e "\e[0;34m 14 | ## 进行演示之前,一定要设置好mysql的链接(用户,密码)。 15 | ## 会用到兼容linux的以下命令: tee, diff, grep 16 | ## 留意控制台输出,没问题的话,按ENTER继续。\e[m 17 | " 18 | rm -rf "$out_dir" 19 | mkdir -p "$out_dir" 20 | 21 | function wait_txt(){ 22 | echo -e "\e[0;34m### $1\e[m" 23 | read -p "### press ENTER to continue, Ctrl-C to break." 24 | } 25 | 26 | function echo_txt(){ 27 | echo -e "\e[0;34m### $1\e[m" 28 | } 29 | 30 | function diff_txt(){ 31 | 32 | if [[ ! -f $1 || ! -f $2 ]]; then 33 | echo -e "\e[0;31m### $3 结果文件不存在\e[m" 34 | exit 35 | fi 36 | 37 | if [[ ! -z `grep -E 'ERROR|FATAL' $2` ]]; then 38 | echo -e "\e[0;31m### $3 执行日志中有错误\e[m" 39 | exit 40 | fi 41 | 42 | # 分离结果 43 | out=$2.out 44 | grep -vE '^[0-9]{4}[^0-9][0-9]{2}' $2 >$out 45 | 46 | if [[ -z `diff -wBZ $1 $out` ]]; then 47 | echo -e "\e[0;32m### $3 结果正确\e[m" 48 | rm -rf "$out" 49 | else 50 | echo -e "\e[0;31m!!! $3 结果对比不一致\e[m" 51 | echo "diff -wBZ $1 $out" 52 | $gui_dff $1 $out 53 | exit 54 | fi 55 | } 56 | 57 | wait_txt "初始化数据库" 58 | ./godbart exec \ 59 | -c godbart.toml \ 60 | -d lcl_test \ 61 | --agree \ 62 | demo/sql/diff/reset.sql \ 63 | 2>&1| tee $out_dir/01.txt \ 64 | |grep -E '^[0-9]{4}[^0-9][0-9]{2}' 65 | wait_txt "查询数据库" 66 | ./godbart tree \ 67 | -c godbart.toml \ 68 | -s lcl_test \ 69 | demo/chk/sql/01.sql \ 70 | 2>&1| tee -a $out_dir/01.txt \ 71 | |grep -E '^[0-9]{4}[^0-9][0-9]{2}' 72 | diff_txt demo/chk/txt/01.txt $out_dir/01.txt "数据库创建:prd_main" 73 | 74 | 75 | wait_txt "初始化数据:prd_main" 76 | ./godbart exec \ 77 | -c godbart.toml \ 78 | -d prd_main \ 79 | --agree \ 80 | demo/sql/init/ \ 81 | 2>&1| tee $out_dir/02.txt \ 82 | |grep -E '^[0-9]{4}[^0-9][0-9]{2}' 83 | wait_txt "查询表结构" 84 | ./godbart show \ 85 | -c godbart.toml \ 86 | -s prd_main \ 87 | -t tbl,trg \ 88 | 2>&1| tee -a $out_dir/02.txt \ 89 | |grep -E '^[0-9]{4}[^0-9][0-9]{2}' 90 | diff_txt demo/chk/txt/02.txt $out_dir/02.txt "表结构:prd_main" 91 | 92 | wait_txt "查询表记录" 93 | ./godbart tree \ 94 | -c godbart.toml \ 95 | -s prd_main \ 96 | demo/chk/sql/03.sql \ 97 | 2>&1| tee $out_dir/03.txt \ 98 | |grep -E '^[0-9]{4}[^0-9][0-9]{2}' 99 | diff_txt demo/chk/txt/03.txt $out_dir/03.txt "表记录:prd_main" 100 | 101 | 102 | wait_txt "执行版本控制:prd_main,prd_2018" 103 | ./godbart revi \ 104 | -c godbart.toml \ 105 | -d prd_main \ 106 | -d prd_2018 \ 107 | -r 2018111103 \ 108 | --agree \ 109 | demo/sql/revi/ \ 110 | 2>&1| tee $out_dir/04.txt \ 111 | |grep -E '^[0-9]{4}[^0-9][0-9]{2}' 112 | wait_txt "查询版本表结构:prd_2018" 113 | ./godbart show \ 114 | -c godbart.toml \ 115 | -s prd_2018 \ 116 | -t tbl,trg \ 117 | 2>&1| tee -a $out_dir/04.txt \ 118 | |grep -E '^[0-9]{4}[^0-9][0-9]{2}' 119 | diff_txt demo/chk/txt/04.txt $out_dir/04.txt "数据库表结构:prd_2018" 120 | 121 | wait_txt "同步表结构:prd_main,dev_main" 122 | ./godbart sync \ 123 | -c godbart.toml \ 124 | -s prd_main \ 125 | -d dev_main \ 126 | -t tbl,trg \ 127 | --agree 128 | wait_txt "同步版本号:dev_main" 129 | ./godbart sync \ 130 | -c godbart.toml \ 131 | -s prd_main \ 132 | -d dev_main \ 133 | -t row \ 134 | --agree \ 135 | sys_schema_version 136 | wait_txt "查询表结构:dev_main" 137 | ./godbart show \ 138 | -c godbart.toml \ 139 | -s dev_main \ 140 | -t tbl,trg \ 141 | 2>&1| tee $out_dir/05.txt \ 142 | |grep -E '^[0-9]{4}[^0-9][0-9]{2}' 143 | diff_txt demo/chk/txt/05.txt $out_dir/05.txt "数据库表结构:dev_main" 144 | 145 | wait_txt "静态分析 sqlx-tree.log" 146 | ./godbart sqlx \ 147 | -c godbart.toml \ 148 | -e "DATE_FROM=2018-01-01 00:00:00" \ 149 | -l trace \ 150 | demo/sql/tree/tree.sql \ 151 | 2>&1| tee $out_dir/06.txt 152 | diff_txt demo/chk/txt/06.txt $out_dir/06.txt "数据树结构:tree.sql" 153 | 154 | 155 | wait_txt "迁移数据 prd_main:prd_2018" 156 | ./godbart tree \ 157 | -c godbart.toml \ 158 | -s prd_main \ 159 | -d prd_2018 \ 160 | -e "DATE_FROM=2018-01-01 00:00:00" \ 161 | --agree \ 162 | demo/sql/tree/tree.sql \ 163 | 2>&1| tee $out_dir/07.txt \ 164 | |grep -E '^[0-9]{4}[^0-9][0-9]{2}' 165 | diff_txt demo/chk/txt/07.txt $out_dir/07.txt "迁移数据过程:tree.sql" 166 | 167 | wait_txt "对比迁移数据结果:prd_main" 168 | ./godbart tree \ 169 | -c godbart.toml \ 170 | -s prd_main \ 171 | demo/chk/sql/03.sql \ 172 | 2>&1| tee $out_dir/08.txt \ 173 | |grep -E '^[0-9]{4}[^0-9][0-9]{2}' 174 | diff_txt demo/chk/txt/08.txt $out_dir/08.txt "对比迁移数据结果:prd_main" 175 | 176 | 177 | wait_txt "对比迁移数据结果:prd_2018" 178 | ./godbart tree \ 179 | -c godbart.toml \ 180 | -s prd_2018 \ 181 | demo/chk/sql/03.sql \ 182 | 2>&1| tee $out_dir/09.txt \ 183 | |grep -E '^[0-9]{4}[^0-9][0-9]{2}' 184 | diff_txt demo/chk/txt/09.txt $out_dir/09.txt "对比迁移数据结果:prd_2018" 185 | 186 | wait_txt "高级版本管理:dev_main" 187 | ./godbart revi \ 188 | -c godbart.toml \ 189 | -d dev_main \ 190 | -r 2019011101 \ 191 | --agree \ 192 | demo/sql/revi/2019-01-11.sql \ 193 | 2>&1| tee $out_dir/10.txt \ 194 | |grep -E '^[0-9]{4}[^0-9][0-9]{2}' 195 | wait_txt "查询表结构:dev_main" 196 | ./godbart diff \ 197 | -c godbart.toml \ 198 | -s prd_main \ 199 | -d dev_main \ 200 | -t tbl,trg \ 201 | 2>&1| tee -a $out_dir/10.txt \ 202 | |grep -E '^[0-9]{4}[^0-9][0-9]{2}' 203 | diff_txt demo/chk/txt/10.txt $out_dir/10.txt "数据库表结构:dev_main" 204 | 205 | echo_txt "====所有测试结束===" -------------------------------------------------------------------------------- /demo/chk/txt/02.txt: -------------------------------------------------------------------------------- 1 | -- db=godbart_prd_main, 1/7, table=sys_hot_separation 2 | DROP TABLE IF EXISTS `sys_hot_separation`; 3 | CREATE TABLE `sys_hot_separation` ( 4 | `table_name` varchar(100) COLLATE utf8mb4_bin NOT NULL COMMENT '表名', 5 | `checked_id` bigint(20) NOT NULL COMMENT '检查过的最大ID', 6 | `checked_tm` datetime NOT NULL COMMENT '上次检查的时间', 7 | PRIMARY KEY (`table_name`) 8 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; 9 | 10 | -- db=godbart_prd_main, 2/7, table=sys_schema_version 11 | DROP TABLE IF EXISTS `sys_schema_version`; 12 | CREATE TABLE `sys_schema_version` ( 13 | `version` bigint(20) NOT NULL COMMENT '版本号', 14 | `created` datetime NOT NULL COMMENT '创建时间', 15 | PRIMARY KEY (`version`) 16 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; 17 | 18 | -- db=godbart_prd_main, 3/7, table=tx_parcel 19 | DROP TABLE IF EXISTS `tx_parcel`; 20 | CREATE TABLE `tx_parcel` ( 21 | `id` bigint(20) NOT NULL COMMENT 'ID', 22 | `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', 23 | `modify_time` datetime DEFAULT NULL ON UPDATE CURRENT_TIMESTAMP COMMENT '更新时间', 24 | `is_deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '逻辑删除:true=1,false=0', 25 | `logno` bigint(20) DEFAULT NULL COMMENT '日志编号', 26 | `user_id` bigint(20) DEFAULT NULL COMMENT '用户ID', 27 | `warehouse` int(11) DEFAULT NULL COMMENT '数据所在仓库 WareHouse', 28 | `sender_id` bigint(20) DEFAULT NULL COMMENT '发件人ID', 29 | `recver_id` bigint(20) DEFAULT NULL COMMENT '收件人ID', 30 | `track_num` varchar(50) NOT NULL COMMENT '运单号', 31 | `weight_pkg` decimal(10,2) DEFAULT NULL COMMENT '商家包裹总重量(a+b+m)', 32 | `weight_dim` decimal(10,2) DEFAULT NULL COMMENT '商家体积重', 33 | `input_time` datetime DEFAULT NULL COMMENT '录单时间', 34 | `store_time` datetime DEFAULT NULL COMMENT '最新入库时间', 35 | `shelf_time` datetime DEFAULT NULL COMMENT '最新上架时间', 36 | `leave_time` datetime DEFAULT NULL COMMENT '最新出库时间', 37 | `track_time` datetime DEFAULT NULL COMMENT '首个国内物流时间', 38 | PRIMARY KEY (`id`), 39 | UNIQUE KEY `uq_trknum` (`track_num`), 40 | KEY `ix_user_id` (`user_id`), 41 | KEY `ix_recver_id` (`recver_id`) 42 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 ROW_FORMAT=COMPACT COMMENT='包裹'; 43 | 44 | DROP TRIGGER IF EXISTS `tx_parcel$log$bd`; 45 | DELIMITER $$ 46 | CREATE TRIGGER `tx_parcel$log$bd` BEFORE DELETE ON `tx_parcel` 47 | FOR EACH ROW BEGIN 48 | insert into `tx_parcel$log` select *, null, 2, now() from `tx_parcel` where id= OLD.id; 49 | END $$ 50 | DELIMITER ; 51 | 52 | DROP TRIGGER IF EXISTS `tx_parcel$log$bu`; 53 | DELIMITER $$ 54 | CREATE TRIGGER `tx_parcel$log$bu` BEFORE UPDATE ON `tx_parcel` 55 | FOR EACH ROW BEGIN 56 | insert into `tx_parcel$log` select *, null, 1, now() from `tx_parcel` where id= OLD.id; 57 | END $$ 58 | DELIMITER ; 59 | 60 | -- db=godbart_prd_main, 4/7, table=tx_parcel$log 61 | DROP TABLE IF EXISTS `tx_parcel$log`; 62 | CREATE TABLE `tx_parcel$log` ( 63 | `id` bigint(20) NOT NULL COMMENT 'ID', 64 | `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', 65 | `modify_time` datetime DEFAULT NULL ON UPDATE CURRENT_TIMESTAMP COMMENT '更新时间', 66 | `is_deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '逻辑删除:true=1,false=0', 67 | `logno` bigint(20) DEFAULT NULL COMMENT '日志编号', 68 | `user_id` bigint(20) DEFAULT NULL COMMENT '用户ID', 69 | `warehouse` int(11) DEFAULT NULL COMMENT '数据所在仓库 WareHouse', 70 | `sender_id` bigint(20) DEFAULT NULL COMMENT '发件人ID', 71 | `recver_id` bigint(20) DEFAULT NULL COMMENT '收件人ID', 72 | `track_num` varchar(50) NOT NULL COMMENT '运单号', 73 | `weight_pkg` decimal(10,2) DEFAULT NULL COMMENT '商家包裹总重量(a+b+m)', 74 | `weight_dim` decimal(10,2) DEFAULT NULL COMMENT '商家体积重', 75 | `input_time` datetime DEFAULT NULL COMMENT '录单时间', 76 | `store_time` datetime DEFAULT NULL COMMENT '最新入库时间', 77 | `shelf_time` datetime DEFAULT NULL COMMENT '最新上架时间', 78 | `leave_time` datetime DEFAULT NULL COMMENT '最新出库时间', 79 | `track_time` datetime DEFAULT NULL COMMENT '首个国内物流时间', 80 | `_id` int(11) NOT NULL AUTO_INCREMENT, 81 | `_du` int(11) DEFAULT NULL, 82 | `_dt` datetime DEFAULT NULL, 83 | PRIMARY KEY (`_id`) 84 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; 85 | 86 | -- db=godbart_prd_main, 5/7, table=tx_parcel_event 87 | DROP TABLE IF EXISTS `tx_parcel_event`; 88 | CREATE TABLE `tx_parcel_event` ( 89 | `id` bigint(20) NOT NULL COMMENT 'ID', 90 | `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', 91 | `modify_time` datetime DEFAULT NULL ON UPDATE CURRENT_TIMESTAMP COMMENT '更新时间', 92 | `is_deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '逻辑删除:true=1,false=0', 93 | `logno` bigint(20) DEFAULT NULL COMMENT '日志编号', 94 | `user_id` bigint(20) DEFAULT NULL COMMENT '用户ID', 95 | `parcel_id` bigint(20) NOT NULL COMMENT '包裹ID', 96 | `type` int(11) NOT NULL COMMENT '事件类型:取件|入库|出库', 97 | `source` varchar(100) DEFAULT NULL COMMENT '发生地', 98 | `operator_id` bigint(20) DEFAULT NULL COMMENT '操作员ID', 99 | `is_closed` tinyint(1) DEFAULT NULL COMMENT '是否关闭:true=1,false=0', 100 | PRIMARY KEY (`id`), 101 | KEY `ix_user_id` (`user_id`), 102 | KEY `ix_parcel_id` (`parcel_id`) 103 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 ROW_FORMAT=COMPACT COMMENT='包裹事件'; 104 | 105 | -- db=godbart_prd_main, 6/7, table=tx_receiver 106 | DROP TABLE IF EXISTS `tx_receiver`; 107 | CREATE TABLE `tx_receiver` ( 108 | `id` bigint(20) NOT NULL COMMENT 'ID', 109 | `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', 110 | `modify_time` datetime DEFAULT NULL ON UPDATE CURRENT_TIMESTAMP COMMENT '更新时间', 111 | `is_deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '逻辑删除:true=1,false=0', 112 | `logno` bigint(20) DEFAULT NULL COMMENT '日志编号', 113 | `user_id` bigint(20) DEFAULT NULL COMMENT '用户ID', 114 | `name` varchar(20) NOT NULL COMMENT '姓名', 115 | `phone` varchar(60) NOT NULL COMMENT '电话', 116 | `postcode` varchar(20) NOT NULL COMMENT '邮编', 117 | `country` int(11) NOT NULL COMMENT '国家(代码)', 118 | `province` char(3) NOT NULL COMMENT '州/省/直辖市(简写)', 119 | `city` varchar(20) NOT NULL COMMENT '城市', 120 | `district` varchar(45) DEFAULT NULL COMMENT '县区', 121 | `address1` varchar(100) NOT NULL COMMENT '区/路/街', 122 | `address2` varchar(100) DEFAULT NULL COMMENT '楼/室', 123 | `hash` varchar(40) NOT NULL COMMENT '姓名,电话,省市区地址1,2的hash', 124 | PRIMARY KEY (`id`), 125 | KEY `ix_user_id` (`user_id`), 126 | KEY `ix_name` (`name`), 127 | KEY `ix_addr` (`province`,`city`,`district`,`address1`), 128 | KEY `ix_hash` (`hash`), 129 | FULLTEXT KEY `ft_phone` (`phone`) 130 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 ROW_FORMAT=COMPACT COMMENT='收件人'; 131 | 132 | -- db=godbart_prd_main, 7/7, table=tx_track 133 | DROP TABLE IF EXISTS `tx_track`; 134 | CREATE TABLE `tx_track` ( 135 | `id` bigint(20) NOT NULL COMMENT 'ID', 136 | `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', 137 | `modify_time` datetime DEFAULT NULL ON UPDATE CURRENT_TIMESTAMP COMMENT '更新时间', 138 | `is_deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '逻辑删除:true=1,false=0', 139 | `logno` bigint(20) DEFAULT NULL COMMENT '日志编号', 140 | `user_id` bigint(20) DEFAULT NULL COMMENT '用户ID', 141 | `parcel_id` bigint(20) NOT NULL COMMENT '包裹ID', 142 | `company` int(11) NOT NULL COMMENT '物流公司', 143 | `track_num` varchar(50) NOT NULL COMMENT '跟踪单号', 144 | `events` text NOT NULL COMMENT '事件json:[{date:xx,info:xx},]', 145 | `status` int(11) NOT NULL COMMENT '物流状态', 146 | `dest_city` varchar(50) DEFAULT NULL COMMENT '包裹目的地城市', 147 | PRIMARY KEY (`id`), 148 | UNIQUE KEY `uq_trknum` (`track_num`), 149 | KEY `ix_user_id` (`user_id`), 150 | KEY `ix_parcel_id` (`parcel_id`) 151 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 ROW_FORMAT=COMPACT COMMENT='物流跟踪'; 152 | -------------------------------------------------------------------------------- /demo/chk/txt/05.txt: -------------------------------------------------------------------------------- 1 | -- db=godbart_dev_main, 1/7, table=sys_hot_separation 2 | DROP TABLE IF EXISTS `sys_hot_separation`; 3 | CREATE TABLE `sys_hot_separation` ( 4 | `table_name` varchar(100) COLLATE utf8mb4_bin NOT NULL COMMENT '表名', 5 | `checked_id` bigint(20) NOT NULL COMMENT '检查过的最大ID', 6 | `checked_tm` datetime NOT NULL COMMENT '上次检查的时间', 7 | PRIMARY KEY (`table_name`) 8 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; 9 | 10 | -- db=godbart_dev_main, 2/7, table=sys_schema_version 11 | DROP TABLE IF EXISTS `sys_schema_version`; 12 | CREATE TABLE `sys_schema_version` ( 13 | `version` bigint(20) NOT NULL COMMENT '版本号', 14 | `created` datetime NOT NULL COMMENT '创建时间', 15 | PRIMARY KEY (`version`) 16 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; 17 | 18 | -- db=godbart_dev_main, 3/7, table=tx_parcel 19 | DROP TABLE IF EXISTS `tx_parcel`; 20 | CREATE TABLE `tx_parcel` ( 21 | `id` bigint(20) NOT NULL COMMENT 'ID', 22 | `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', 23 | `modify_time` datetime DEFAULT NULL ON UPDATE CURRENT_TIMESTAMP COMMENT '更新时间', 24 | `is_deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '逻辑删除:true=1,false=0', 25 | `logno` bigint(20) DEFAULT NULL COMMENT '日志编号', 26 | `user_id` bigint(20) DEFAULT NULL COMMENT '用户ID', 27 | `warehouse` int(11) DEFAULT NULL COMMENT '数据所在仓库 WareHouse', 28 | `sender_id` bigint(20) DEFAULT NULL COMMENT '发件人ID', 29 | `recver_id` bigint(20) DEFAULT NULL COMMENT '收件人ID', 30 | `track_num` varchar(50) NOT NULL COMMENT '运单号', 31 | `weight_pkg` decimal(10,2) DEFAULT NULL COMMENT '商家包裹总重量(a+b+m)', 32 | `weight_dim` decimal(10,2) DEFAULT NULL COMMENT '商家体积重', 33 | `input_time` datetime DEFAULT NULL COMMENT '录单时间', 34 | `store_time` datetime DEFAULT NULL COMMENT '最新入库时间', 35 | `shelf_time` datetime DEFAULT NULL COMMENT '最新上架时间', 36 | `leave_time` datetime DEFAULT NULL COMMENT '最新出库时间', 37 | `track_time` datetime DEFAULT NULL COMMENT '首个国内物流时间', 38 | PRIMARY KEY (`id`), 39 | UNIQUE KEY `uq_trknum` (`track_num`), 40 | KEY `ix_user_id` (`user_id`), 41 | KEY `ix_recver_id` (`recver_id`) 42 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 ROW_FORMAT=COMPACT COMMENT='包裹'; 43 | 44 | DROP TRIGGER IF EXISTS `tx_parcel$log$bd` ; 45 | DELIMITER $$ 46 | CREATE TRIGGER `tx_parcel$log$bd` BEFORE DELETE ON `tx_parcel` 47 | FOR EACH ROW BEGIN 48 | insert into `tx_parcel$log` select *, null, 2, now() from `tx_parcel` where id= OLD.id; 49 | END $$ 50 | DELIMITER ; 51 | 52 | DROP TRIGGER IF EXISTS `tx_parcel$log$bu` ; 53 | DELIMITER $$ 54 | CREATE TRIGGER `tx_parcel$log$bu` BEFORE UPDATE ON `tx_parcel` 55 | FOR EACH ROW BEGIN 56 | insert into `tx_parcel$log` select *, null, 1, now() from `tx_parcel` where id= OLD.id; 57 | END $$ 58 | DELIMITER ; 59 | 60 | -- db=godbart_dev_main, 4/7, table=tx_parcel$log 61 | DROP TABLE IF EXISTS `tx_parcel$log`; 62 | CREATE TABLE `tx_parcel$log` ( 63 | `id` bigint(20) NOT NULL COMMENT 'ID', 64 | `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', 65 | `modify_time` datetime DEFAULT NULL ON UPDATE CURRENT_TIMESTAMP COMMENT '更新时间', 66 | `is_deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '逻辑删除:true=1,false=0', 67 | `logno` bigint(20) DEFAULT NULL COMMENT '日志编号', 68 | `user_id` bigint(20) DEFAULT NULL COMMENT '用户ID', 69 | `warehouse` int(11) DEFAULT NULL COMMENT '数据所在仓库 WareHouse', 70 | `sender_id` bigint(20) DEFAULT NULL COMMENT '发件人ID', 71 | `recver_id` bigint(20) DEFAULT NULL COMMENT '收件人ID', 72 | `track_num` varchar(50) NOT NULL COMMENT '运单号', 73 | `weight_pkg` decimal(10,2) DEFAULT NULL COMMENT '商家包裹总重量(a+b+m)', 74 | `weight_dim` decimal(10,2) DEFAULT NULL COMMENT '商家体积重', 75 | `input_time` datetime DEFAULT NULL COMMENT '录单时间', 76 | `store_time` datetime DEFAULT NULL COMMENT '最新入库时间', 77 | `shelf_time` datetime DEFAULT NULL COMMENT '最新上架时间', 78 | `leave_time` datetime DEFAULT NULL COMMENT '最新出库时间', 79 | `track_time` datetime DEFAULT NULL COMMENT '首个国内物流时间', 80 | `_id` int(11) NOT NULL AUTO_INCREMENT, 81 | `_du` int(11) DEFAULT NULL, 82 | `_dt` datetime DEFAULT NULL, 83 | PRIMARY KEY (`_id`) 84 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; 85 | 86 | -- db=godbart_dev_main, 5/7, table=tx_parcel_event 87 | DROP TABLE IF EXISTS `tx_parcel_event`; 88 | CREATE TABLE `tx_parcel_event` ( 89 | `id` bigint(20) NOT NULL COMMENT 'ID', 90 | `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', 91 | `modify_time` datetime DEFAULT NULL ON UPDATE CURRENT_TIMESTAMP COMMENT '更新时间', 92 | `is_deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '逻辑删除:true=1,false=0', 93 | `logno` bigint(20) DEFAULT NULL COMMENT '日志编号', 94 | `user_id` bigint(20) DEFAULT NULL COMMENT '用户ID', 95 | `parcel_id` bigint(20) NOT NULL COMMENT '包裹ID', 96 | `type` int(11) NOT NULL COMMENT '事件类型:取件|入库|出库', 97 | `source` varchar(100) DEFAULT NULL COMMENT '发生地', 98 | `operator_id` bigint(20) DEFAULT NULL COMMENT '操作员ID', 99 | `is_closed` tinyint(1) DEFAULT NULL COMMENT '是否关闭:true=1,false=0', 100 | PRIMARY KEY (`id`), 101 | KEY `ix_user_id` (`user_id`), 102 | KEY `ix_parcel_id` (`parcel_id`) 103 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 ROW_FORMAT=COMPACT COMMENT='包裹事件'; 104 | 105 | -- db=godbart_dev_main, 6/7, table=tx_receiver 106 | DROP TABLE IF EXISTS `tx_receiver`; 107 | CREATE TABLE `tx_receiver` ( 108 | `id` bigint(20) NOT NULL COMMENT 'ID', 109 | `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', 110 | `modify_time` datetime DEFAULT NULL ON UPDATE CURRENT_TIMESTAMP COMMENT '更新时间', 111 | `is_deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '逻辑删除:true=1,false=0', 112 | `logno` bigint(20) DEFAULT NULL COMMENT '日志编号', 113 | `user_id` bigint(20) DEFAULT NULL COMMENT '用户ID', 114 | `name` varchar(20) NOT NULL COMMENT '姓名', 115 | `phone` varchar(60) NOT NULL COMMENT '电话', 116 | `postcode` varchar(20) NOT NULL COMMENT '邮编', 117 | `country` int(11) NOT NULL COMMENT '国家(代码)', 118 | `province` char(3) NOT NULL COMMENT '州/省/直辖市(简写)', 119 | `city` varchar(20) NOT NULL COMMENT '城市', 120 | `district` varchar(45) DEFAULT NULL COMMENT '县区', 121 | `address1` varchar(100) NOT NULL COMMENT '区/路/街', 122 | `address2` varchar(100) DEFAULT NULL COMMENT '楼/室', 123 | `hash` varchar(40) NOT NULL COMMENT '姓名,电话,省市区地址1,2的hash', 124 | PRIMARY KEY (`id`), 125 | KEY `ix_user_id` (`user_id`), 126 | KEY `ix_name` (`name`), 127 | KEY `ix_addr` (`province`,`city`,`district`,`address1`), 128 | KEY `ix_hash` (`hash`), 129 | FULLTEXT KEY `ft_phone` (`phone`) 130 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 ROW_FORMAT=COMPACT COMMENT='收件人'; 131 | 132 | -- db=godbart_dev_main, 7/7, table=tx_track 133 | DROP TABLE IF EXISTS `tx_track`; 134 | CREATE TABLE `tx_track` ( 135 | `id` bigint(20) NOT NULL COMMENT 'ID', 136 | `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', 137 | `modify_time` datetime DEFAULT NULL ON UPDATE CURRENT_TIMESTAMP COMMENT '更新时间', 138 | `is_deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '逻辑删除:true=1,false=0', 139 | `logno` bigint(20) DEFAULT NULL COMMENT '日志编号', 140 | `user_id` bigint(20) DEFAULT NULL COMMENT '用户ID', 141 | `parcel_id` bigint(20) NOT NULL COMMENT '包裹ID', 142 | `company` int(11) NOT NULL COMMENT '物流公司', 143 | `track_num` varchar(50) NOT NULL COMMENT '跟踪单号', 144 | `events` text NOT NULL COMMENT '事件json:[{date:xx,info:xx},]', 145 | `status` int(11) NOT NULL COMMENT '物流状态', 146 | `dest_city` varchar(50) DEFAULT NULL COMMENT '包裹目的地城市', 147 | PRIMARY KEY (`id`), 148 | UNIQUE KEY `uq_trknum` (`track_num`), 149 | KEY `ix_user_id` (`user_id`), 150 | KEY `ix_parcel_id` (`parcel_id`) 151 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 ROW_FORMAT=COMPACT COMMENT='物流跟踪'; 152 | -------------------------------------------------------------------------------- /art/show.go: -------------------------------------------------------------------------------- 1 | package art 2 | 3 | import ( 4 | "regexp" 5 | "sort" 6 | "strings" 7 | ) 8 | 9 | const ( 10 | ShowTblName = "${TABLE_NAME}" 11 | ShowTblDdl = "${TABLE_DDL}" 12 | ShowTgrName = "${TRIGGER_NAME}" 13 | ShowTgrDdl = "${TRIGGER_DDL}" 14 | ShowColBase = "${COLUMNS_BASE}" 15 | ShowColFull = "${COLUMNS_FULL}" 16 | ) 17 | 18 | var ShowParaRgx = regexp.MustCompile(`\$\{(TABLE_NAME|TABLE_DDL|TRIGGER_NAME|TRIGGER_DDL|COLUMNS_BASE|COLUMNS_FULL)\}`) 19 | var SqlSplitRgx = regexp.MustCompile(`[\r\n]+`) 20 | 21 | type ShowTmpl struct { 22 | Key string // 模板名 23 | Tpl string // 原始模板 24 | Arg map[string]bool // 模板中的参数 25 | Idx []int // 参数索引,[参数开始,参数结束,...] 26 | } 27 | 28 | func Show(srce *DataSource, ktpl []string, rgx []*regexp.Regexp) error { 29 | 30 | if srce == nil { 31 | return errorAndLog("need source db to show") 32 | } 33 | 34 | conn, err := openDbAndLog(srce) 35 | if err != nil { 36 | return err 37 | } 38 | 39 | tbls, err := listTable(conn, rgx) 40 | if err != nil { 41 | return err 42 | } 43 | if len(tbls) == 0 { 44 | LogTrace("no tables on db=%s", conn.DbName()) 45 | return nil 46 | } 47 | 48 | sort.Strings(tbls) 49 | 50 | lns := len(ktpl) 51 | tpl := make([]ShowTmpl, 0, lns/2) 52 | for i := 0; i < lns; i += 2 { 53 | k, t := ktpl[i], ktpl[i+1] 54 | LogTrace("parse templet for key=%s", k) 55 | tpl = append(tpl, ParseTmpl(k, t)) 56 | } 57 | 58 | dbn := conn.DbName() 59 | cnt := len(tbls) 60 | for j, tbl := range tbls { 61 | OutDebug("-- db=%s, %d/%d, table=%s", dbn, j+1, cnt, tbl) 62 | env := make(map[string]interface{}) 63 | for i, p := range tpl { 64 | key := ktpl[i*2] 65 | LogTrace("merge templet for key=%s, table=%s", key, tbl) 66 | out, er := MergeTmpl(p, env, tbl, conn) 67 | if er != nil { 68 | LogError("failed to merge templet. key=%s, table=%s, err=%v", key, tbl, er) 69 | return er 70 | } 71 | OutTrace(out) 72 | } 73 | } 74 | 75 | return nil 76 | } 77 | 78 | func ParseTmpl(key, tpl string) ShowTmpl { 79 | mtc := ShowParaRgx.FindAllStringSubmatchIndex(tpl, -1) 80 | lns := len(mtc) 81 | arg := make(map[string]bool) 82 | idx := make([]int, 0, lns*2) 83 | for _, v := range mtc { 84 | idx = append(idx, v[0], v[1]) 85 | arg[tpl[v[0]:v[1]]] = true 86 | } 87 | return ShowTmpl{key, tpl, arg, idx} 88 | } 89 | 90 | func MergeTmpl(tpl ShowTmpl, env map[string]interface{}, tbl string, con *MyConn) (string, error) { 91 | 92 | tm, zr := 1, 0 93 | for arg := range tpl.Arg { 94 | val, err := makeParaVal(arg, env, tbl, con) 95 | if err != nil { 96 | return "", err 97 | } 98 | switch arg { 99 | case ShowTgrName, ShowTgrDdl: 100 | if ln := len(val.([]string)); ln > 0 { 101 | tm = ln 102 | } else { 103 | LogTrace("empty templet val, arg=%s", arg) 104 | zr++ 105 | } 106 | default: 107 | if len(val.(string)) == 0 { 108 | LogTrace("empty templet val, arg=%s", arg) 109 | zr++ 110 | } 111 | } 112 | } 113 | 114 | kln := len(tpl.Arg) 115 | if zr == kln { 116 | tm = 0 117 | LogDebug("skip all empty para templet, arg=%s", tpl.Key) 118 | } else if zr > 0 && zr < kln { 119 | return "", errorAndLog("partly empty templat val, arg=%s", tpl.Key) 120 | } 121 | 122 | var sb strings.Builder 123 | pln := len(tpl.Idx) 124 | tmp := tpl.Tpl 125 | for i := 0; i < tm; i++ { 126 | off := 0 127 | for j := 0; j < pln; j += 2 { 128 | b, e := tpl.Idx[j], tpl.Idx[j+1] 129 | if b > off { 130 | sb.WriteString(tmp[off:b]) 131 | } 132 | key := tmp[b:e] 133 | off = e 134 | switch val := env[key]; val.(type) { 135 | case []string: 136 | sb.WriteString(val.([]string)[i]) 137 | case string: 138 | if key == ShowColBase || key == ShowColFull { 139 | // indent 140 | n := 0 141 | for m := b-1; m >= 0; m-- { 142 | c := tmp[m] 143 | if c == '\t' || c == ' ' { 144 | n ++ 145 | } else { 146 | break 147 | } 148 | } 149 | if n > 0 { 150 | nd := "\n" + strings.Repeat(" ", n) 151 | sb.WriteString(strings.Replace(val.(string), "\n", nd, -1)) 152 | }else{ 153 | sb.WriteString(val.(string)) 154 | } 155 | } else { 156 | sb.WriteString(val.(string)) 157 | } 158 | } 159 | } 160 | if off < len(tmp) { 161 | sb.WriteString(tmp[off:]) 162 | } 163 | } 164 | 165 | return sb.String(), nil 166 | } 167 | 168 | func makeParaVal(key string, env map[string]interface{}, tbl string, con *MyConn) (interface{}, error) { 169 | if v, ok := env[key]; ok { 170 | return v, nil 171 | } 172 | 173 | switch key { 174 | case ShowTblName: 175 | env[key] = tbl 176 | return tbl, nil 177 | case ShowTblDdl: 178 | if ddl, err := con.DdlTable(tbl); err == nil { 179 | env[key] = ddl 180 | return ddl, nil 181 | } else { 182 | return nil, err 183 | } 184 | case ShowTgrName: 185 | if trg, err := makeTrgList(tbl, con); err == nil { 186 | env[key] = trg 187 | return trg, nil 188 | } else { 189 | return nil, err 190 | } 191 | case ShowTgrDdl: 192 | var trg []string 193 | if val, ok := env[ShowTgrName]; ok { 194 | trg = val.([]string) 195 | } else { 196 | ntg, err := makeTrgList(tbl, con) 197 | if err == nil { 198 | trg = ntg 199 | env[ShowTgrName] = ntg 200 | } else { 201 | return nil, err 202 | } 203 | } 204 | 205 | ddl := make([]string, len(trg)) 206 | for i, v := range trg { 207 | dl, err := con.DdlTrigger(v) 208 | if err != nil { 209 | return nil, err 210 | } 211 | ddl[i] = dl 212 | } 213 | env[key] = ddl 214 | return ddl, nil 215 | case ShowColBase: 216 | col, err := makeColList(tbl, con) 217 | if err != nil { 218 | return nil, err 219 | } 220 | 221 | fld := make([]string, len(col)) 222 | for i, v := range col { 223 | fld[i] = v.Name + " " + v.Type 224 | } 225 | val := strings.Join(fld, ",\n") 226 | env[key] = val 227 | return val, nil 228 | case ShowColFull: 229 | var tmp string 230 | if val, ok := env[ShowTblDdl]; ok { 231 | tmp = val.(string) 232 | } else { 233 | if ddl, err := con.DdlTable(tbl); err == nil { 234 | tmp = ddl 235 | env[ShowTblDdl] = ddl 236 | } else { 237 | return nil, err 238 | } 239 | } 240 | 241 | col, err := makeColList(tbl, con) 242 | if err != nil { 243 | return nil, err 244 | } 245 | 246 | ddl := SqlSplitRgx.Split(tmp, -1) 247 | lnc, lnd := len(col), len(ddl) 248 | tkc, tkd := make([]string, lnc), make([]string, lnd) 249 | 250 | for i, c := range col { 251 | tkc[i] = signifySql(c.Name, c.Type) 252 | } 253 | for i, d := range ddl { 254 | tkd[i] = signifySql(d) 255 | } 256 | 257 | b := -1 258 | for c, d := 0, 0; c < lnc && d < lnd; d++ { 259 | if strings.HasPrefix(tkd[d], tkc[c]) { 260 | if b < 0 { 261 | b = d 262 | } 263 | c++ 264 | } else { 265 | if c > 0 { 266 | return nil, errorAndLog("columns seq not matched") 267 | } 268 | } 269 | } 270 | 271 | fld := make([]string, lnc) 272 | for i := range fld { 273 | fld[i] = strings.TrimFunc(ddl[b+i], isCommaWhite) 274 | } 275 | 276 | val := strings.Join(fld, ",\n") 277 | env[key] = val 278 | return val, nil 279 | } 280 | 281 | return nil, errorAndLog("unsupported show para=%s", key) 282 | } 283 | 284 | func makeColList(tbl string, con *MyConn) ([]Col, error) { 285 | tmp, err := con.Columns(tbl) 286 | if err != nil { 287 | return nil, err 288 | } 289 | col := make([]Col, 0, len(tmp)) 290 | for _, v := range tmp { 291 | col = append(col, v) 292 | } 293 | 294 | sort.Slice(col, func(i, j int) bool { 295 | return col[i].Seq < col[j].Seq 296 | }) 297 | return col, err 298 | } 299 | 300 | func makeTrgList(tbl string, con *MyConn) ([]string, error) { 301 | trg, err := con.Triggers(tbl) 302 | if err != nil { 303 | return nil, err 304 | } 305 | 306 | rst := make([]string, 0, len(trg)) 307 | for k := range trg { 308 | rst = append(rst, k) 309 | } 310 | sort.Strings(rst) 311 | return rst, nil 312 | } 313 | -------------------------------------------------------------------------------- /demo/chk/txt/08.txt: -------------------------------------------------------------------------------- 1 | 2 | -- -- SRC ID=2, LINE=2:2 3 | SELECT table_name, checked_id FROM sys_hot_separation; 4 | 5 | -- -- OUT ID=5, LINE=5:5, FOR 6 | -- table_name='tx_parcel' 7 | -- checked_id=278581; 8 | 9 | -- -- OUT ID=5, LINE=5:5, FOR 10 | -- table_name='tx_parcel_event' 11 | -- checked_id=1049551; 12 | 13 | -- -- OUT ID=5, LINE=5:5, FOR 14 | -- table_name='tx_track' 15 | -- checked_id=369644; 16 | 17 | -- -- SRC ID=8, LINE=8:8 18 | SELECT version FROM sys_schema_version; 19 | 20 | -- -- OUT ID=11, LINE=11:11, FOR 21 | -- version=2018112001; 22 | 23 | -- -- SRC ID=14, LINE=14:14 24 | SELECT * FROM tx_parcel; 25 | 26 | -- -- OUT ID=17, LINE=17:17, FOR 27 | -- id=1163922 28 | -- create_time='2018-01-02 17:39:50' 29 | -- modify_time='2018-01-17 06:49:00' 30 | -- is_deleted=0 31 | -- logno=3600736 32 | -- user_id=96 33 | -- warehouse=2 34 | -- sender_id=467122 35 | -- recver_id=761994 36 | -- track_num='NY180100000201' 37 | -- weight_pkg=0.30 38 | -- weight_dim=NULL 39 | -- input_time=NULL 40 | -- store_time='2018-01-03 09:41:27' 41 | -- shelf_time=NULL 42 | -- leave_time='2018-01-03 11:37:48' 43 | -- track_time=NULL; 44 | 45 | -- -- OUT ID=17, LINE=17:17, FOR 46 | -- id=1163923 47 | -- create_time='2018-01-02 17:39:51' 48 | -- modify_time='2018-01-17 06:49:00' 49 | -- is_deleted=0 50 | -- logno=3600736 51 | -- user_id=96 52 | -- warehouse=2 53 | -- sender_id=467123 54 | -- recver_id=761995 55 | -- track_num='NY180100000901' 56 | -- weight_pkg=1.10 57 | -- weight_dim=NULL 58 | -- input_time=NULL 59 | -- store_time='2018-01-03 09:42:11' 60 | -- shelf_time=NULL 61 | -- leave_time='2018-01-03 11:37:48' 62 | -- track_time=NULL; 63 | 64 | -- -- SRC ID=20, LINE=20:20 65 | SELECT * FROM tx_parcel_event; 66 | 67 | -- -- OUT ID=23, LINE=23:23, FOR 68 | -- id=3012022 69 | -- create_time='2018-01-02 17:39:51' 70 | -- modify_time=NULL 71 | -- is_deleted=0 72 | -- logno=3517088 73 | -- user_id=NULL 74 | -- parcel_id=1163922 75 | -- type=7 76 | -- source=NULL 77 | -- operator_id=NULL 78 | -- is_closed=NULL; 79 | 80 | -- -- OUT ID=23, LINE=23:23, FOR 81 | -- id=3012023 82 | -- create_time='2018-01-02 17:39:52' 83 | -- modify_time=NULL 84 | -- is_deleted=0 85 | -- logno=3517089 86 | -- user_id=NULL 87 | -- parcel_id=1163923 88 | -- type=7 89 | -- source=NULL 90 | -- operator_id=NULL 91 | -- is_closed=NULL; 92 | 93 | -- -- OUT ID=23, LINE=23:23, FOR 94 | -- id=3029856 95 | -- create_time='2018-01-03 00:58:31' 96 | -- modify_time=NULL 97 | -- is_deleted=0 98 | -- logno=3529662 99 | -- user_id=96 100 | -- parcel_id=1163922 101 | -- type=100 102 | -- source=NULL 103 | -- operator_id=NULL 104 | -- is_closed=NULL; 105 | 106 | -- -- OUT ID=23, LINE=23:23, FOR 107 | -- id=3029979 108 | -- create_time='2018-01-03 01:07:44' 109 | -- modify_time=NULL 110 | -- is_deleted=0 111 | -- logno=3529885 112 | -- user_id=96 113 | -- parcel_id=1163923 114 | -- type=100 115 | -- source=NULL 116 | -- operator_id=NULL 117 | -- is_closed=NULL; 118 | 119 | -- -- OUT ID=23, LINE=23:23, FOR 120 | -- id=3033050 121 | -- create_time='2018-01-03 09:41:27' 122 | -- modify_time=NULL 123 | -- is_deleted=0 124 | -- logno=3532704 125 | -- user_id=NULL 126 | -- parcel_id=1163922 127 | -- type=100 128 | -- source='125.119.237.185' 129 | -- operator_id=87 130 | -- is_closed=NULL; 131 | 132 | -- -- OUT ID=23, LINE=23:23, FOR 133 | -- id=3033333 134 | -- create_time='2018-01-03 09:42:11' 135 | -- modify_time=NULL 136 | -- is_deleted=0 137 | -- logno=3532954 138 | -- user_id=NULL 139 | -- parcel_id=1163923 140 | -- type=100 141 | -- source='125.119.237.185' 142 | -- operator_id=87 143 | -- is_closed=NULL; 144 | 145 | -- -- OUT ID=23, LINE=23:23, FOR 146 | -- id=3036336 147 | -- create_time='2018-01-03 11:54:01' 148 | -- modify_time=NULL 149 | -- is_deleted=0 150 | -- logno=3534378 151 | -- user_id=NULL 152 | -- parcel_id=1163922 153 | -- type=190 154 | -- source='125.119.237.185' 155 | -- operator_id=87 156 | -- is_closed=NULL; 157 | 158 | -- -- OUT ID=23, LINE=23:23, FOR 159 | -- id=3036337 160 | -- create_time='2018-01-03 11:54:04' 161 | -- modify_time=NULL 162 | -- is_deleted=0 163 | -- logno=3534378 164 | -- user_id=NULL 165 | -- parcel_id=1163923 166 | -- type=190 167 | -- source='125.119.237.185' 168 | -- operator_id=87 169 | -- is_closed=NULL; 170 | 171 | -- -- OUT ID=23, LINE=23:23, FOR 172 | -- id=3038150 173 | -- create_time='2018-01-03 23:20:40' 174 | -- modify_time=NULL 175 | -- is_deleted=0 176 | -- logno=3534716 177 | -- user_id=NULL 178 | -- parcel_id=1163922 179 | -- type=300 180 | -- source='72.227.141.116' 181 | -- operator_id=57 182 | -- is_closed=NULL; 183 | 184 | -- -- OUT ID=23, LINE=23:23, FOR 185 | -- id=3038151 186 | -- create_time='2018-01-03 23:20:40' 187 | -- modify_time=NULL 188 | -- is_deleted=0 189 | -- logno=3534716 190 | -- user_id=NULL 191 | -- parcel_id=1163923 192 | -- type=300 193 | -- source='72.227.141.116' 194 | -- operator_id=57 195 | -- is_closed=NULL; 196 | 197 | -- -- OUT ID=23, LINE=23:23, FOR 198 | -- id=3106745 199 | -- create_time='2018-01-17 06:50:48' 200 | -- modify_time=NULL 201 | -- is_deleted=0 202 | -- logno=3600736 203 | -- user_id=NULL 204 | -- parcel_id=1163922 205 | -- type=501 206 | -- source=NULL 207 | -- operator_id=NULL 208 | -- is_closed=NULL; 209 | 210 | -- -- OUT ID=23, LINE=23:23, FOR 211 | -- id=3106746 212 | -- create_time='2018-01-17 06:50:50' 213 | -- modify_time=NULL 214 | -- is_deleted=0 215 | -- logno=3600736 216 | -- user_id=NULL 217 | -- parcel_id=1163923 218 | -- type=501 219 | -- source=NULL 220 | -- operator_id=NULL 221 | -- is_closed=NULL; 222 | 223 | -- -- SRC ID=26, LINE=26:26 224 | SELECT * FROM tx_parcel$log; 225 | 226 | -- -- SRC ID=32, LINE=32:32 227 | SELECT * FROM tx_receiver; 228 | 229 | -- -- OUT ID=35, LINE=35:35, FOR 230 | -- id=761994 231 | -- create_time='2018-01-02 17:39:50' 232 | -- modify_time='2018-01-02 17:41:04' 233 | -- is_deleted=0 234 | -- logno=3528135 235 | -- user_id=NULL 236 | -- name='王五' 237 | -- phone='18000000005' 238 | -- postcode='301700' 239 | -- country=2 240 | -- province='TJ' 241 | -- city='天津' 242 | -- district='武清区' 243 | -- address1='杨村街和平里小区' 244 | -- address2='' 245 | -- hash='ca9a4340416208d05df21319095c332d'; 246 | 247 | -- -- OUT ID=35, LINE=35:35, FOR 248 | -- id=761995 249 | -- create_time='2018-01-02 17:39:52' 250 | -- modify_time='2018-01-02 17:41:03' 251 | -- is_deleted=0 252 | -- logno=3530808 253 | -- user_id=NULL 254 | -- name='王武' 255 | -- phone='18000000006' 256 | -- postcode='110100' 257 | -- country=2 258 | -- province='LN' 259 | -- city='沈阳市' 260 | -- district='浑南区' 261 | -- address1='辽宁省沈阳市浑南区金阳街' 262 | -- address2='' 263 | -- hash='b8516c744f0bfeebde027233bf29b546'; 264 | 265 | -- -- SRC ID=38, LINE=38:38 266 | SELECT * FROM tx_track; 267 | 268 | -- -- OUT ID=41, LINE=41:41, FOR 269 | -- id=988322 270 | -- create_time='2018-01-02 17:39:50' 271 | -- modify_time='2018-01-17 06:49:00' 272 | -- is_deleted=0 273 | -- logno=3600736 274 | -- user_id=NULL 275 | -- parcel_id=1163922 276 | -- company=200 277 | -- track_num='NY180100000201' 278 | -- events='[{\"date\":\"2018-01-02 17:39:51\",\"info\":\"运单已创建\",\"status\":0,\"template\":\"TX000\"},{\"date\":\"2018-01-03 04:41:27\",\"info\":\"【纽约】包裹已在纽约分拣中心入库\",\"status\":100,\"template\":\"TX100\"},{\"date\":\"2018-01-03 06:37:48\",\"info\":\"【纽约】包裹已从纽约分拣中心离开\",\"status\":200,\"template\":\"TX200\"}]' 279 | -- status=200 280 | -- dest_city=NULL; 281 | 282 | -- -- OUT ID=41, LINE=41:41, FOR 283 | -- id=988323 284 | -- create_time='2018-01-02 17:39:51' 285 | -- modify_time='2018-01-17 06:49:00' 286 | -- is_deleted=0 287 | -- logno=3600736 288 | -- user_id=NULL 289 | -- parcel_id=1163923 290 | -- company=200 291 | -- track_num='NY180100000901' 292 | -- events='[{\"date\":\"2018-01-02 17:39:52\",\"info\":\"运单已创建\",\"status\":0,\"template\":\"TX000\"},{\"date\":\"2018-01-03 04:42:11\",\"info\":\"【纽约】包裹已在纽约分拣中心入库\",\"status\":100,\"template\":\"TX100\"},{\"date\":\"2018-01-03 06:37:48\",\"info\":\"【纽约】包裹已从纽约分拣中心离开\",\"status\":200,\"template\":\"TX200\"},{\"date\":\"2018-01-04 08:00:33\",\"id\":1627,\"info\":\"【纽约】包裹已在【纽约肯尼迪国际机场】等待安排航空\",\"status\":300,\"template\":\"TX250\"}]' 293 | -- status=300 294 | -- dest_city=NULL; 295 | -------------------------------------------------------------------------------- /art/help.go: -------------------------------------------------------------------------------- 1 | package art 2 | 3 | import ( 4 | "errors" 5 | "fmt" 6 | "io/ioutil" 7 | "log" 8 | "os" 9 | "os/user" 10 | "path/filepath" 11 | "regexp" 12 | "strings" 13 | "time" 14 | ) 15 | 16 | // public 17 | 18 | func LogDebug(m string, a ...interface{}) { 19 | if MsgLevel >= LvlDebug { 20 | log.Printf("[DEBUG] "+m+"\n", a...) 21 | } 22 | } 23 | 24 | func LogTrace(m string, a ...interface{}) { 25 | if MsgLevel >= LvlTrace { 26 | log.Printf("[TRACE] "+m+"\n", a...) 27 | } 28 | 29 | } 30 | 31 | func LogError(m string, a ...interface{}) { 32 | if MsgLevel >= LvlError { 33 | log.Printf("[ERROR] "+m+"\n", a...) 34 | } 35 | } 36 | 37 | func LogFatal(m string, a ...interface{}) { 38 | if MsgLevel >= LvlError { 39 | log.Fatalf("[FATAL] "+m+"\n", a...) 40 | } 41 | } 42 | 43 | func OutDebug(m string, a ...interface{}) { 44 | if MsgLevel >= LvlDebug { 45 | fmt.Printf(m+"\n", a...) 46 | } 47 | } 48 | 49 | func OutTrace(m string, a ...interface{}) { 50 | fmt.Printf(m+"\n", a...) 51 | } 52 | 53 | func ExitIfError(err error, code int, format string, args ...interface{}) { 54 | if err != nil { 55 | args = append(args, err) 56 | LogError(""+format+", err=%v", args...) 57 | os.Exit(code) 58 | } 59 | } 60 | 61 | func ExitIfTrue(tru bool, code int, format string, args ...interface{}) { 62 | if tru { 63 | LogError(""+format+"", args...) 64 | os.Exit(code) 65 | } 66 | } 67 | 68 | func BuiltinEnvs(envs map[string]string) { 69 | 70 | if _, ok := envs[EnvUser]; !ok { 71 | cu, err := user.Current() 72 | if err == nil { 73 | envs[EnvUser] = cu.Username 74 | LogTrace("put builtin env, k=%s, v=%q", EnvUser, cu.Username) 75 | } else { 76 | envs[EnvUser] = "" 77 | LogFatal("put builtin env empty, k=%s, err=%v", EnvUser, err) 78 | } 79 | } 80 | 81 | if _, ok := envs[EnvHost]; !ok { 82 | ht, err := os.Hostname() 83 | if err == nil { 84 | envs[EnvHost] = ht 85 | LogTrace("put builtin env, k=%s, v=%q", EnvHost, ht) 86 | } else { 87 | envs[EnvHost] = "localhost" 88 | LogFatal("put builtin 'localhost', k=%s, err=%v", EnvHost, err) 89 | } 90 | } 91 | 92 | if _, ok := envs[EnvDate]; !ok { 93 | dt := fmtTime(time.Now(),"2006-01-02 15:04:05") // :-P 94 | envs[EnvDate] = dt 95 | LogTrace("put builtin env, k=%s, v=%q", EnvDate, dt) 96 | } 97 | 98 | if rl, ok := envs[EnvRule]; !ok { 99 | LogTrace("builtin env, k=%s not found", EnvRule) 100 | } else { 101 | LogTrace("use builtin env, k=%s, v=%q", EnvRule, rl) 102 | } 103 | 104 | envs[EnvSrcDb] = "UN-SET" 105 | envs[EnvOutDb] = "UN-SET" 106 | } 107 | 108 | func FileWalker(path []string, flag []string) ([]FileEntity, error) { 109 | 110 | sufx := make([]string, 0, len(flag)) 111 | for _, v := range flag { 112 | if len(v) > 0 { 113 | sufx = append(sufx, strings.ToLower(v)) 114 | } 115 | } 116 | 117 | var files []FileEntity 118 | var ff = func(p string, f os.FileInfo, e error) error { 119 | 120 | if e != nil { 121 | LogError("error=%v at a path=%q", e, p) 122 | return e 123 | } 124 | 125 | if f.IsDir() { 126 | return nil 127 | } 128 | 129 | h := false 130 | if len(sufx) > 0 { 131 | l := strings.ToLower(p) 132 | for _, v := range sufx { 133 | if strings.HasSuffix(l, v) { 134 | h = true 135 | break 136 | } 137 | } 138 | } else { 139 | h = true 140 | } 141 | 142 | if h { 143 | data, err := ioutil.ReadFile(p) 144 | if err != nil { 145 | LogError("can read file=%s", f) 146 | return err 147 | } 148 | LogTrace("got file=%s", p) 149 | files = append(files, FileEntity{p, string(data)}) 150 | } 151 | 152 | return nil 153 | } 154 | 155 | for _, p := range path { 156 | err := filepath.Walk(p, ff) 157 | if err != nil { 158 | return nil, err 159 | } 160 | } 161 | 162 | return files, nil 163 | } 164 | 165 | // private 166 | func errorAndLog(m string, a ...interface{}) error { 167 | s := fmt.Sprintf(m, a...) 168 | LogError("%s", s) 169 | return errors.New(s) 170 | } 171 | 172 | func openDbAndLog(db *DataSource) (conn *MyConn, err error) { 173 | LogDebug("try to open db=%s", db.Code) 174 | conn = &MyConn{} 175 | err = conn.Open(pref, db) 176 | 177 | if err == nil { 178 | LogTrace("successfully opened db=%s", db.Code) 179 | } else { 180 | LogError("failed to open db=%s, err=%v", db.Code, err) 181 | } 182 | 183 | return 184 | } 185 | 186 | func listTable(conn *MyConn, rgx []*regexp.Regexp) (rst []string, err error) { 187 | 188 | var tbs []string 189 | tbs, err = conn.Tables() 190 | if err != nil { 191 | LogError("failed to show tables db=%s, err=%v", conn.DbName(), err) 192 | return 193 | } 194 | 195 | if len(tbs) == 0 || len(rgx) == 0 { 196 | return tbs, nil 197 | } 198 | 199 | for _, r := range rgx { 200 | for _, t := range tbs { 201 | if matchEntire(r, t) { 202 | rst = append(rst, t) 203 | } 204 | } 205 | } 206 | return 207 | } 208 | 209 | func walkExes(exes []*Exe, fn func(exe *Exe) error) error { 210 | for _, exe := range exes { 211 | er := fn(exe) 212 | if er != nil { 213 | return er 214 | } 215 | er = walkExes(exe.Sons, fn) 216 | if er != nil { 217 | return er 218 | } 219 | } 220 | return nil 221 | } 222 | 223 | // 只支持 SEQ|TBL 224 | func pureRunExes(exes []*Exe, ctx map[string]interface{}, db *MyConn, fn func(exe *Exe, stm string) error) (err error) { 225 | for _, exe := range exes { 226 | if len(exe.Fors) == 0 { 227 | err = pureOneExes(exe, ctx, db, fn) 228 | } else { 229 | for i, arg := range exe.Fors { 230 | LogDebug("FOR exe [%d] on Arg=%s, exe=%d", i+1, arg, exe.Seg.Head) 231 | var vals []string 232 | switch arg.Type { 233 | case CmndSeq: 234 | gift := arg.Gift.(GiftSeq) 235 | for j := gift.Bgn; j <= gift.End; j = j + gift.Inc { 236 | v := fmt.Sprintf(gift.Fmt, j) 237 | vals = append(vals, v) 238 | LogDebug("FOR SEQ on Arg=%d, exe=%d, seq=%s", arg.Head, exe.Seg.Head, v) 239 | } 240 | case CmndTbl: 241 | tblKey := arg.Hold + magicDatabaseSrcTable 242 | tbls, ok := ctx[tblKey] 243 | if !ok { 244 | tbls, err = db.Tables() 245 | if err != nil { 246 | return err 247 | } 248 | ctx[tblKey] = tbls 249 | } 250 | 251 | reg := arg.Gift.(*regexp.Regexp) 252 | for _, v := range tbls.([]string) { 253 | if matchEntire(reg, v) { 254 | vals = append(vals, v) 255 | LogDebug("FOR TBL on Arg=%d, exe=%d, table=%s", arg.Head, exe.Seg.Head, v) 256 | } 257 | } 258 | default: 259 | return errorAndLog("unsupported FOR arg=%s", arg) 260 | } 261 | 262 | for _, v := range vals { 263 | LogTrace("FOR %s on Arg=%d, exe=%d, value=%s", arg.Type, arg.Head, exe.Seg.Head, v) 264 | ctx[arg.Hold] = v 265 | err = pureOneExes(exe, ctx, db, fn) 266 | if err != nil { 267 | return 268 | } 269 | } 270 | } 271 | } 272 | } 273 | return 274 | } 275 | 276 | func pureOneExes(exe *Exe, ctx map[string]interface{}, db *MyConn, fn func(exe *Exe, stm string) error) error { 277 | stm := exe.Seg.Text 278 | if len(exe.Deps) > 0 { 279 | // build stmt 280 | var sb strings.Builder // return,stdout 281 | off := 0 282 | for _, dep := range exe.Deps { 283 | LogDebug("parsing dep=%s", dep) 284 | 285 | if dep.Off > off { 286 | tmp := stm[off:dep.Off] 287 | sb.WriteString(tmp) 288 | } 289 | 290 | off = dep.End 291 | hld := dep.Str 292 | if ev, ok := ctx[hld]; ok && !dep.Dyn { 293 | v := ev.(string) 294 | sb.WriteString(v) 295 | LogDebug("static simple replace hold=%s, with value=%s", hld, v) 296 | } else { 297 | return errorAndLog("unsupported hold=%s in pure type, exe.head=%d, file=%s", hld, exe.Seg.Head, exe.Seg.File) 298 | } 299 | } 300 | 301 | if off > 0 && off < len(stm) { 302 | sb.WriteString(stm[off:]) 303 | } 304 | if off > 0 { 305 | stm = sb.String() 306 | } 307 | } 308 | 309 | err := fn(exe, stm) 310 | if err != nil { 311 | return err 312 | } 313 | 314 | pureRunExes(exe.Sons, ctx, db, fn) 315 | 316 | return err 317 | } 318 | -------------------------------------------------------------------------------- /demo/sql/init/2.data.sql: -------------------------------------------------------------------------------- 1 | /* 2 | -- Query: select * from tx_parcel 3 | where create_time > '2015-01-01' 4 | and leave_time is not null 5 | limit 2 6 | -- Date: 2018-11-16 11:25 7 | */ 8 | 9 | INSERT INTO `tx_parcel` (`id`,`create_time`,`modify_time`,`is_deleted`,`logno`,`user_id`,`warehouse`,`sender_id`,`recver_id`,`track_num`,`weight_pkg`,`weight_dim`,`input_time`,`store_time`,`shelf_time`,`leave_time`,`track_time`) VALUES 10 | (93152,'2016-08-11 21:17:43','2017-11-01 10:17:50',0,226608,1,NULL,49799,80385,'SU160816',2.75,NULL,NULL,NULL,NULL,'2016-08-12 22:12:10','2016-08-20 10:32:18') 11 | ,(93163,'2016-08-11 22:49:34','2017-11-01 10:17:50',0,226608,1,NULL,14245,80649,'100354269562',3.00,NULL,'2016-08-12 06:32:13','2016-08-12 15:01:30',NULL,'2016-08-12 22:12:07','2016-08-20 10:23:57') 12 | ,(278580,'2017-01-02 16:59:08','2017-11-01 10:17:50',0,2481043,78,NULL,93201,311828,'303354496818',4.10,NULL,NULL,'2017-01-04 16:58:03',NULL,'2017-01-04 20:15:56','2017-01-19 02:42:13') 13 | ,(278581,'2017-01-02 17:00:51','2017-11-01 10:17:50',0,2481043,78,NULL,93201,311829,'303762397428',4.10,NULL,NULL,'2017-01-04 16:57:03',NULL,'2017-01-06 17:33:45','2017-01-19 02:42:13') 14 | ,(1163922,'2018-01-02 17:39:50','2018-01-17 06:49:00',0,3600736,96,2,467122,761994,'NY180100000201',0.30,NULL,NULL,'2018-01-03 09:41:27',NULL,'2018-01-03 11:37:48',NULL) 15 | ,(1163923,'2018-01-02 17:39:51','2018-01-17 06:49:00',0,3600736,96,2,467123,761995,'NY180100000901',1.10,NULL,NULL,'2018-01-03 09:42:11',NULL,'2018-01-03 11:37:48',NULL) 16 | ; 17 | 18 | INSERT INTO `tx_track` (`id`,`create_time`,`modify_time`,`is_deleted`,`logno`,`user_id`,`parcel_id`,`company`,`track_num`,`events`,`status`,`dest_city`) VALUES 19 | (172215,'2016-08-11 22:49:34','2016-12-09 16:28:24',0,222515,1,93163,200, '100354269562','[{\"date\":\"2016-08-11 22:49:34\",\"info\":\"【纽约】美东仓库已揽件\",\"status\":100},{\"date\":\"2016-08-12 22:12:07\",\"info\":\"【纽约】包裹已从中转仓离开\",\"status\":200}]',200,NULL) 20 | ,(369647,'2017-01-04 16:58:03','2017-03-03 02:36:59',0,1025160,78,278580,200, '303354496818','[{\"date\":\"2017-01-04 16:58:03\",\"info\":\"【纽约】美东仓库已揽件\",\"status\":100},{\"date\":\"2017-01-13 09:37:21\",\"info\":\"【纽约】包裹已从纽约肯尼迪国际机场发出\",\"status\":200},{\"date\":\"2017-01-13 13:37:42\",\"info\":\"【纽约】干线航班已从【纽约肯尼迪国际机场】起飞,航班号:CZ600\",\"status\":300}]',300,NULL) 21 | ,(369644,'2017-01-04 16:57:03','2017-03-03 02:37:12',0,1024502,78,278581,200, '303762397428','[{\"date\":\"2017-01-04 16:57:03\",\"info\":\"【纽约】美东仓库已揽件\",\"status\":100},{\"date\":\"2017-01-13 09:37:21\",\"info\":\"【纽约】包裹已从纽约肯尼迪国际机场发出\",\"status\":200},{\"date\":\"2017-01-13 13:37:42\",\"info\":\"【纽约】干线航班已从【纽约肯尼迪国际机场】起飞,航班号:CZ600\",\"status\":300},{\"date\":\"2017-01-14 09:03:0\",\"info\":\"【广州】干线航班已到达【广州白云国际机场】\",\"status\":300},{\"date\":\"2017-01-15 12:42:11\",\"info\":\"【广州】干线航班已从【广州白云国际机场】起飞,航班号:CA3615\",\"status\":300},{\"date\":\"2017-01-15 13:37:42\",\"info\":\"【哈尔滨】干线航班已到达【哈尔滨太平国际机场】\",\"status\":300},{\"date\":\"2017-01-15 13:40:35\",\"info\":\"【哈尔滨】包裹清关中\",\"status\":400},{\"date\":\"2017-01-18 00:18:20\",\"info\":\"清关完成,正在国内配送\",\"status\":500}]',500,NULL) 22 | ,(988322,'2018-01-02 17:39:50','2018-01-17 06:49:00',0,3600736,NULL,1163922,200,'NY180100000201','[{\"date\":\"2018-01-02 17:39:51\",\"info\":\"运单已创建\",\"status\":0,\"template\":\"TX000\"},{\"date\":\"2018-01-03 04:41:27\",\"info\":\"【纽约】包裹已在纽约分拣中心入库\",\"status\":100,\"template\":\"TX100\"},{\"date\":\"2018-01-03 06:37:48\",\"info\":\"【纽约】包裹已从纽约分拣中心离开\",\"status\":200,\"template\":\"TX200\"}]',200,NULL) 23 | ,(988323,'2018-01-02 17:39:51','2018-01-17 06:49:00',0,3600736,NULL,1163923,200,'NY180100000901','[{\"date\":\"2018-01-02 17:39:52\",\"info\":\"运单已创建\",\"status\":0,\"template\":\"TX000\"},{\"date\":\"2018-01-03 04:42:11\",\"info\":\"【纽约】包裹已在纽约分拣中心入库\",\"status\":100,\"template\":\"TX100\"},{\"date\":\"2018-01-03 06:37:48\",\"info\":\"【纽约】包裹已从纽约分拣中心离开\",\"status\":200,\"template\":\"TX200\"},{\"date\":\"2018-01-04 08:00:33\",\"id\":1627,\"info\":\"【纽约】包裹已在【纽约肯尼迪国际机场】等待安排航空\",\"status\":300,\"template\":\"TX250\"}]',300,NULL) 24 | ,(172204,'2016-08-11 21:17:43','2016-12-09 16:28:24',0,222515,1,93152,200, 'SU160816','[{\"date\":\"2016-08-11 10:31:53\",\"info\":\"【纽约】美东仓库已揽件\",\"status\":100},{\"date\":\"2016-08-12 22:12:10\",\"info\":\"【纽约】包裹已从中转仓离开\",\"status\":200},{\"date\":\"2016-08-13 17:52:19\",\"info\":\"【纽约】干线航班已从【纽约肯尼迪国际机场】起飞,航班号:CA982\",\"status\":300},{\"date\":\"2016-08-17 08:46:33\",\"info\":\"【哈尔滨】干线航班已到达【哈尔滨太平国际机场】\",\"status\":300},{\"date\":\"2016-08-18 09:18:31\",\"info\":\"【哈尔滨】包裹清关中\",\"status\":400},{\"date\":\"2016-08-20 10:35:55\",\"info\":\"清关完成,正在国内配送\",\"status\":500}]',500,NULL) 25 | ; 26 | 27 | INSERT INTO `tx_parcel_event` (`id`,`create_time`,`modify_time`,`is_deleted`,`logno`,`user_id`,`parcel_id`,`type`,`source`,`operator_id`,`is_closed`) VALUES 28 | (351370,'2016-08-12 22:12:07','2016-08-12 22:12:07',0,212067,1,93152,300,'airwayBill=999-45480293',57,NULL) 29 | ,(363390,'2016-08-20 10:32:18','2016-08-20 10:32:18',0,222515,1,93152,500,NULL,NULL,NULL) 30 | ,(370092,'2016-08-22 13:10:05','2016-08-22 13:10:05',0,226608,1,93152,501,NULL,NULL,NULL) 31 | ,(350213,'2016-08-12 15:01:30','2016-08-12 15:01:30',0,210992,1,93163,100,NULL,NULL,NULL) 32 | ,(350946,'2016-08-12 22:12:07','2016-08-12 22:12:07',0,212067,1,93163,300,'airwayBill=999-45480293',57,NULL) 33 | ,(363385,'2016-08-20 10:23:57','2016-08-20 10:23:57',0,222515,1,93163,500,NULL,NULL,NULL) 34 | ,(370091,'2016-08-22 13:00:32','2016-08-22 13:00:32',0,226608,1,93163,501,NULL,NULL,NULL) 35 | ,(833154,'2017-01-04 16:58:03',NULL,0,663030,78,278580,100,NULL,NULL,NULL) 36 | ,(883598,'2017-01-11 23:59:37',NULL,0,710728,78,278580,300,'airwayBill=784-29622666',56,NULL) 37 | ,(933048,'2017-01-19 02:42:13',NULL,0,756412,78,278580,500,NULL,NULL,NULL) 38 | ,(1048772,'2017-01-30 22:05:59',NULL,0,782423,78,278580,502,NULL,NULL,NULL) 39 | ,(833144,'2017-01-04 16:57:03',NULL,0,663020,78,278581,100,NULL,NULL,NULL) 40 | ,(883616,'2017-01-11 23:59:37',NULL,0,710728,78,278581,300,'airwayBill=784-29622666',56,NULL) 41 | ,(933492,'2017-01-19 08:48:11',NULL,0,752392,78,278581,500,NULL,NULL,NULL) 42 | ,(933614,'2017-01-19 02:42:13',NULL,0,756412,78,278581,500,NULL,NULL,NULL) 43 | ,(1049551,'2017-01-31 00:00:22',NULL,0,782423,78,278581,502,NULL,NULL,NULL) 44 | ,(3012022,'2018-01-02 17:39:51',NULL,0,3517088,NULL,1163922,7,NULL,NULL,NULL) 45 | ,(3029856,'2018-01-03 00:58:31',NULL,0,3529662,96,1163922,100,NULL,NULL,NULL) 46 | ,(3033050,'2018-01-03 09:41:27',NULL,0,3532704,NULL,1163922,100,'125.119.237.185',87,NULL) 47 | ,(3036336,'2018-01-03 11:54:01',NULL,0,3534378,NULL,1163922,190,'125.119.237.185',87,NULL) 48 | ,(3038150,'2018-01-03 23:20:40',NULL,0,3534716,NULL,1163922,300,'72.227.141.116',57,NULL) 49 | ,(3106745,'2018-01-17 06:50:48',NULL,0,3600736,NULL,1163922,501,NULL,NULL,NULL) 50 | ,(3012023,'2018-01-02 17:39:52',NULL,0,3517089,NULL,1163923,7,NULL,NULL,NULL) 51 | ,(3029979,'2018-01-03 01:07:44',NULL,0,3529885,96,1163923,100,NULL,NULL,NULL) 52 | ,(3033333,'2018-01-03 09:42:11',NULL,0,3532954,NULL,1163923,100,'125.119.237.185',87,NULL) 53 | ,(3036337,'2018-01-03 11:54:04',NULL,0,3534378,NULL,1163923,190,'125.119.237.185',87,NULL) 54 | ,(3038151,'2018-01-03 23:20:40',NULL,0,3534716,NULL,1163923,300,'72.227.141.116',57,NULL) 55 | ,(3106746,'2018-01-17 06:50:50',NULL,0,3600736,NULL,1163923,501,NULL,NULL,NULL) 56 | ; 57 | 58 | INSERT INTO `tx_receiver` (`id`,`create_time`,`modify_time`,`is_deleted`,`logno`,`user_id`,`name`,`phone`,`postcode`,`country`,`province`,`city`,`district`,`address1`,`address2`,`hash`) VALUES 59 | (80385,'2016-08-11 21:17:43','2017-12-08 07:10:14',0,-2500,1,'张三','13000000001','310000',2,'ZJ','杭州市',NULL,'上城区江城路候潮公寓',NULL,'') 60 | ,(80649,'2016-08-12 06:32:13','2017-12-08 07:10:14',0,-2500,1,'张山','13000000002','311500',2,'ZJ','杭州市',NULL,'桐庐县分水镇东门雅苑','','') 61 | ,(311828,'2017-01-02 16:59:08','2017-12-08 07:10:14',0,-2500,78,'李四','13000000003','200000',2,'SH','上海','徐汇区','华泾路1425弄','','') 62 | ,(311829,'2017-01-02 17:00:51','2017-12-08 07:10:14',0,-2500,78,'李思','13000000004','200000',2,'SH','上海','徐汇区','华泾路1427弄1','','') 63 | ,(761994,'2018-01-02 17:39:50','2018-01-02 17:41:04',0,3528135,NULL,'王五','18000000005','301700',2,'TJ','天津','武清区','杨村街和平里小区','','ca9a4340416208d05df21319095c332d') 64 | ,(761995,'2018-01-02 17:39:52','2018-01-02 17:41:03',0,3530808,NULL,'王武','18000000006','110100',2,'LN','沈阳市','浑南区','辽宁省沈阳市浑南区金阳街','','b8516c744f0bfeebde027233bf29b546') 65 | ; -------------------------------------------------------------------------------- /art/revi.go: -------------------------------------------------------------------------------- 1 | package art 2 | 3 | import ( 4 | "database/sql" 5 | "fmt" 6 | "regexp" 7 | "strings" 8 | "sync" 9 | ) 10 | 11 | type ReviSeg struct { 12 | revi string 13 | exes []*Exe 14 | } 15 | 16 | func Revi(pref *Preference, dest []*DataSource, file []FileEntity, revi, mask, rqry string, risk bool) error { 17 | 18 | mreg, err := regexp.Compile(mask) 19 | if err != nil { 20 | LogFatal("failed to compile mask=%s, err=%v", mask, err) 21 | return err 22 | } 23 | 24 | if len(revi) == 0 || !mreg.MatchString(revi) { 25 | LogFatal("must assign revi number and match the mast") 26 | return err 27 | } 28 | 29 | var reviSegs []ReviSeg 30 | var reviSlt string 31 | var tknQry, tknSlt, tknUdp string 32 | 33 | reviFind, reviCurr := false, "" 34 | 35 | if len(rqry) == 0 { 36 | rqry = "SELECT" 37 | } 38 | tknQry = signifySql(rqry) 39 | 40 | // 倒序分版本块 41 | envs := make(map[string]string) 42 | for k := len(file) - 1; k >= 0; k-- { 43 | f := file[k] 44 | LogTrace("revi file=%s", f.Path) 45 | sqls := ParseSqls(pref, &f) 46 | 47 | // 按版本分段 48 | idxRevi := len(sqls) - 1 49 | reviSplit := func(bgn int, sqlRevi string) error { 50 | // find and check SELECT REVI 51 | ist := bgn // select-version-sql or bgn 52 | for j := bgn; j < idxRevi; j++ { 53 | if w := sqls[j]; w.Exeb { 54 | tkn := signifySql(w.Text) 55 | 56 | if !strings.HasPrefix(tkn, tknQry) { 57 | continue 58 | } 59 | 60 | if len(reviSlt) == 0 { 61 | reviSlt = w.Text 62 | tknSlt = tkn 63 | LogTrace("find SLT-REVI-SQL, line=%s, file=%s, sql=%s", w.Line, w.File, w.Text) 64 | } else { 65 | if tknSlt != tkn { 66 | return errorAndLog("SLT-REVI-SQL changed, first-sql=%s, file=%s, line=%s, now- sql=%s", reviSlt, w.File, w.Line, w.Text) 67 | } 68 | } 69 | ist = j 70 | break 71 | } 72 | } 73 | 74 | v := sqls[ist] 75 | if strings.Compare(sqlRevi, revi) > 0 { 76 | LogTrace("IGNORE bigger revi=%s, line=%s, file=%s", sqlRevi, v.Line, v.File) 77 | } else { 78 | LogTrace("build revi=%s, line from=%d, to=%d, file=%s", sqlRevi, sqls[ist].Head, sqls[idxRevi].Head, v.File) 79 | exe, er := ParseSqlx(sqls[ist:idxRevi+1], envs) 80 | if er != nil { 81 | return er 82 | } 83 | reviSegs = append(reviSegs, ReviSeg{sqlRevi, exe.Exes}) 84 | LogTrace("ADD candidate revi=%s, line from=%d, to=%d, file=%s", sqlRevi, sqls[ist].Head, sqls[idxRevi].Head, v.File) 85 | } 86 | return nil 87 | } 88 | 89 | numRevi := "" 90 | for i := idxRevi; i >= 0; i-- { 91 | v := sqls[i] 92 | if v.Exeb { 93 | stm := v.Text 94 | r := findUpdRevi(stm, tknUdp, mreg) 95 | 96 | if len(tknUdp) == 0 { // first 97 | if len(r) == 0 { 98 | return errorAndLog("REVI not matches in the last sql. line=%s, file=%s, sql=%s", v.Line, v.File, stm) 99 | } 100 | LogTrace("find UPD-REVI-SQL, revi=%s, line=%s, file=%s, sql=%s", r, v.Line, v.File, stm) 101 | p := strings.Index(stm, r) 102 | tknUdp = signifySql(stm[0:p]) 103 | } 104 | 105 | if len(r) > 0 { 106 | LogTrace("find more revi=%s, line=%sfile=%s, ", r, v.Line, v.File) 107 | 108 | if len(reviCurr) == 0 { 109 | reviCurr = r 110 | } else { 111 | if strings.Compare(reviCurr, r) <= 0 { 112 | return errorAndLog("need uniq&asc revi, but %s <= %s. line=%s, file=%s, sql=%s", reviCurr, r, v.Line, v.File, stm) 113 | } 114 | } 115 | 116 | if revi == r { 117 | LogTrace("find DONE revi=%s, line=%s, file=%s", r, v.Line, v.File) 118 | reviFind = true 119 | } 120 | 121 | if i < idxRevi { 122 | if er := reviSplit(i, numRevi); er != nil { 123 | return er 124 | } 125 | } 126 | 127 | idxRevi = i 128 | numRevi = r 129 | } 130 | } 131 | 132 | if i == 0 { 133 | if er := reviSplit(0, numRevi); er != nil { 134 | return er 135 | } 136 | } 137 | } 138 | } 139 | 140 | if !reviFind { 141 | return errorAndLog("can not find assigned revi=%s", revi) 142 | } 143 | 144 | lastIdx := len(reviSegs) - 1 145 | if lastIdx < 0 { 146 | return errorAndLog("no sqls to run for revi=%s", revi) 147 | } 148 | 149 | if len(reviSlt) == 0 { 150 | LogTrace("without SLT-REVI-SQL, means run all revi all") 151 | } 152 | 153 | // reverse 154 | for i, j := 0, lastIdx; i < j; i, j = i+1, j-1 { 155 | reviSegs[i], reviSegs[j] = reviSegs[j], reviSegs[i] 156 | } 157 | 158 | // run 159 | // 打开链接 160 | wg := &sync.WaitGroup{} 161 | cnln := len(dest) 162 | conn := make([]*MyConn, cnln) 163 | for i, v := range dest { 164 | con, er := openDbAndLog(v) 165 | if er != nil { 166 | return errorAndLog("failed to open db=%s, err=%v", v.Code, er) 167 | } 168 | conn[i] = con 169 | wg.Add(1) 170 | } 171 | 172 | // 多库并发,单库有序 173 | for i := 0; i < cnln; i++ { 174 | con := conn[i] 175 | var gogo = func() { 176 | defer wg.Done() 177 | ReviEach(pref, reviSegs, con, reviSlt, mreg, risk) 178 | } 179 | if risk { 180 | go gogo() 181 | } else { 182 | gogo() 183 | } 184 | } 185 | 186 | wg.Wait() 187 | return nil 188 | } 189 | 190 | func findUpdRevi(updSeg string, tknUdp string, mask *regexp.Regexp) (revi string) { 191 | if len(tknUdp) > 0 && !strings.HasPrefix(signifySql(updSeg), tknUdp) { // 判断相似度 192 | return 193 | } 194 | 195 | // 判断规则 196 | return mask.FindString(updSeg) 197 | } 198 | 199 | func ReviEach(pref *Preference, revs []ReviSeg, conn *MyConn, slt string, mask *regexp.Regexp, risk bool) { 200 | 201 | var revi string 202 | dbn := conn.DbName() 203 | var slv = func(rs *sql.Rows) (err error) { 204 | var cols []string 205 | cols, err = rs.Columns() 206 | if err != nil || len(cols) != 1 { 207 | return 208 | } 209 | r1 := sql.NullString{} 210 | if rs.Next() { 211 | err = rs.Scan(&r1) 212 | } 213 | 214 | if r1.Valid { 215 | revi = r1.String 216 | if !mask.MatchString(revi) { 217 | return errorAndLog(fmt.Sprintf("revi not matched. revi=%s on db=%s use sql=%s", revi, dbn, slt)) 218 | } 219 | } else { 220 | LogTrace("get NULL revi on db=%s use sql=%s", dbn, slt) 221 | } 222 | 223 | return 224 | } 225 | 226 | err := conn.Query(slv, slt) 227 | if err != nil { 228 | if conn.TableNotFound(err) { 229 | LogTrace("Table not exist, db=%s use sql=%s", dbn, slt) 230 | } else { 231 | LogError("failed to select revision on db=%s use sql=%s, err=v", dbn, slt, err) 232 | return 233 | } 234 | } 235 | 236 | if len(revi) == 0 { 237 | LogTrace("empty revi means always run. db=%s use sql=%s", dbn, slt) 238 | } else { 239 | LogTrace("get revi=%s on db=%s use sql=%s", revi, dbn, slt) 240 | } 241 | 242 | // run 243 | sts := make(map[string]bool) 244 | ctx := make(map[string]interface{}) 245 | for _, s := range revs { 246 | walkExes(s.exes, func(exe *Exe) error { 247 | sts[fmt.Sprintf("%s:%d", exe.Seg.File, exe.Seg.Head)] = true 248 | return nil 249 | }) 250 | } 251 | cnt := len(sts) 252 | lft := cnt 253 | 254 | cmn, dlt := pref.LineComment, pref.DelimiterRaw 255 | tkn := signifySql(slt) 256 | for _, s := range revs { 257 | 258 | pnt := 0 259 | if len(revi) > 0 && strings.Compare(s.revi, revi) <= 0 { 260 | walkExes(s.exes, func(exe *Exe) error { 261 | delete(sts, fmt.Sprintf("%s:%d", exe.Seg.File, exe.Seg.Head)) 262 | pnt++ 263 | return nil 264 | }) 265 | 266 | LogTrace("ignore smaller. db=%s, revi=%s, db-revi=%s, sqls=[%d,%d]/%d", dbn, s.revi, revi, cnt-lft+1, cnt-lft+pnt, cnt) 267 | lft = lft - pnt 268 | continue 269 | } else { 270 | walkExes(s.exes, func(exe *Exe) error { 271 | pnt++ 272 | return nil 273 | }) 274 | } 275 | 276 | LogTrace("db=%s, revi=%s, sqls count=%d", dbn, s.revi, pnt) 277 | pureRunExes(s.exes, ctx, conn, func(exe *Exe, stm string) error { 278 | v := exe.Seg 279 | delete(sts, fmt.Sprintf("%s:%d", v.File, v.Head)) 280 | lft = len(sts) 281 | if signifySql(stm) == tkn { 282 | LogTrace("db=%s, %d/%d. skip revi-slt. revi=%s, file=%s, line=%s", dbn, cnt-lft, cnt, s.revi, v.File, v.Line) 283 | return nil 284 | } 285 | if risk { 286 | a, err := conn.Exec(stm) 287 | if err != nil { 288 | LogError("db=%s, %d/%d, failed to revi sql, revi=%s, file=%s, line=%s, err=v", dbn, cnt-lft, cnt, s.revi, v.File, v.Line, err) 289 | return err 290 | } else { 291 | LogTrace("db=%s, %d/%d, %d affects. revi=%s, file=%s, line=%s", dbn, cnt-lft, cnt, a, s.revi, v.File, v.Line) 292 | } 293 | } else { 294 | // 不处理 trigger 新结束符问题。 295 | if strings.Contains(stm, dlt) { 296 | OutTrace("%s find '%s', May Need '%s' to avoid", cmn, dlt, pref.DelimiterCmd) 297 | } 298 | OutTrace("%s db=%s, %d/%d, revi=%s, file=%s ,line=%s\n%s%s", cmn, dbn, cnt-lft, cnt, s.revi, v.File, v.Line, stm, dlt) 299 | } 300 | return nil 301 | }) 302 | 303 | } 304 | 305 | if lft == 0 { 306 | LogTrace("db=%s, exes=%d, all done", dbn, cnt) 307 | } else { 308 | LogTrace("db=%s, %d/%d, in progress", dbn, cnt-lft, cnt) 309 | } 310 | } 311 | -------------------------------------------------------------------------------- /art/mysql.go: -------------------------------------------------------------------------------- 1 | package art 2 | 3 | import ( 4 | "database/sql" 5 | "errors" 6 | "fmt" 7 | _ "github.com/go-sql-driver/mysql" 8 | "strings" 9 | "time" 10 | ) 11 | 12 | type MyConn struct { 13 | Pref *Preference 14 | Conn *sql.DB 15 | Name string 16 | } 17 | 18 | func (m *MyConn) Open(p *Preference, d *DataSource) (err error) { 19 | if p.DatabaseType != "mysql" { 20 | return errors.New("unsupported DatabaseType, need mysql, but " + p.DatabaseType) 21 | } 22 | 23 | db, err := sql.Open("mysql", d.Conn) 24 | if err != nil { 25 | return 26 | } 27 | 28 | db.SetMaxOpenConns(p.ConnMaxOpen) 29 | db.SetMaxIdleConns(p.ConnMaxIdel) 30 | 31 | rs, err := db.Query(`SELECT DATABASE()`) 32 | if err != nil { 33 | return 34 | } 35 | defer rs.Close() 36 | 37 | var n string 38 | if rs.Next() { 39 | err = rs.Scan(&n) 40 | } 41 | 42 | m.Pref = p 43 | m.Conn = db 44 | m.Name = n 45 | 46 | return 47 | } 48 | 49 | func (m *MyConn) DbConn() (db *sql.DB) { 50 | return m.Conn 51 | } 52 | func (m *MyConn) DbName() string { 53 | return m.Name 54 | } 55 | 56 | func (m *MyConn) Exec(qr string, args ...interface{}) (int64, error) { 57 | if rs, err := m.Conn.Exec(qr, args...); err != nil { 58 | return 0, err 59 | } else { 60 | return rs.RowsAffected() 61 | } 62 | } 63 | 64 | func (m *MyConn) Query(fn func(*sql.Rows) error, qr string, args ...interface{}) error { 65 | if rs, er := m.Conn.Query(qr, args...); er != nil { 66 | return er 67 | } else { 68 | defer rs.Close() 69 | return fn(rs) 70 | } 71 | } 72 | 73 | func (m *MyConn) Tables() (tbs []string, err error) { 74 | fn := func(rs *sql.Rows) (er error) { 75 | for rs.Next() { 76 | var val string 77 | if er = rs.Scan(&val); er != nil { 78 | return 79 | } 80 | tbs = append(tbs, val) 81 | } 82 | return 83 | } 84 | 85 | err = m.Query(fn, `SHOW TABLES`) 86 | return 87 | } 88 | 89 | func (m *MyConn) Columns(table string) (map[string]Col, error) { 90 | cls := make(map[string]Col) 91 | fn := func(rs *sql.Rows) (er error) { 92 | for rs.Next() { 93 | cl, nl := Col{}, "" 94 | if er = rs.Scan(&cl.Name, &cl.Seq, &cl.Deft, &nl, &cl.Type, &cl.Key, &cl.Cmnt, &cl.Extr); er != nil { 95 | return 96 | } 97 | cl.Null = strings.EqualFold(nl, "YES") 98 | cls[cl.Name] = cl 99 | } 100 | return 101 | } 102 | 103 | err := m.Query(fn, ` 104 | SELECT 105 | COLUMN_NAME, 106 | ORDINAL_POSITION, 107 | COLUMN_DEFAULT, 108 | IS_NULLABLE, 109 | COLUMN_TYPE, 110 | COLUMN_KEY, 111 | COLUMN_COMMENT, 112 | EXTRA 113 | FROM 114 | INFORMATION_SCHEMA.COLUMNS 115 | WHERE 116 | TABLE_SCHEMA = ? 117 | AND TABLE_NAME = ? 118 | `, m.Name, table) 119 | return cls, err 120 | } 121 | 122 | func (m *MyConn) Indexes(table string) (map[string]Idx, error) { 123 | ixs := make(map[string]Idx) 124 | fn := func(rs *sql.Rows) (er error) { 125 | for rs.Next() { 126 | ix, nq := Idx{}, 0 127 | if er = rs.Scan(&ix.Name, &nq, &ix.Cols, &ix.Type); er != nil { 128 | return 129 | } 130 | ix.Uniq = nq == 0 131 | ixs[ix.Name] = ix 132 | } 133 | return 134 | } 135 | 136 | err := m.Query(fn, ` 137 | SELECT 138 | INDEX_NAME, 139 | GROUP_CONCAT(DISTINCT NON_UNIQUE) AS UNIQ, 140 | GROUP_CONCAT(DISTINCT COLUMN_NAME ORDER BY SEQ_IN_INDEX) AS COLUMN_NAME, 141 | GROUP_CONCAT(DISTINCT INDEX_TYPE) AS INDEX_TYPE 142 | FROM 143 | INFORMATION_SCHEMA.STATISTICS 144 | WHERE 145 | TABLE_SCHEMA = ? 146 | AND TABLE_NAME = ? 147 | GROUP BY INDEX_NAME; 148 | `, m.Name, table) 149 | return ixs, err 150 | } 151 | 152 | func (m *MyConn) Triggers(table string) (map[string]Trg, error) { 153 | tgs := make(map[string]Trg) 154 | fn := func(rs *sql.Rows) (er error) { 155 | for rs.Next() { 156 | tg := Trg{} 157 | if er = rs.Scan(&tg.Name, &tg.Timing, &tg.Event, &tg.Statement); er != nil { 158 | return 159 | } 160 | tgs[tg.Name] = tg 161 | } 162 | return 163 | } 164 | 165 | err := m.Query(fn, ` 166 | SELECT 167 | TRIGGER_NAME, 168 | ACTION_TIMING, 169 | EVENT_MANIPULATION, 170 | ACTION_STATEMENT 171 | FROM 172 | INFORMATION_SCHEMA.TRIGGERS 173 | WHERE 174 | EVENT_OBJECT_SCHEMA=? 175 | AND EVENT_OBJECT_TABLE=? 176 | `, m.Name, table) 177 | return tgs, err 178 | } 179 | 180 | func (m *MyConn) DdlTable(table string) (ddl string, err error) { 181 | fn := func(rs *sql.Rows) (er error) { 182 | var nm string 183 | if rs.Next() { 184 | if er = rs.Scan(&nm, &ddl); er != nil { 185 | return 186 | } 187 | } 188 | return 189 | } 190 | 191 | err = m.Query(fn, `SHOW CREATE TABLE `+table) 192 | return 193 | } 194 | 195 | func (m *MyConn) DdlTrigger(trigger string) (ddl string, err error) { 196 | fn := func(rs *sql.Rows) (er error) { 197 | cnt := 7 198 | var col = make([]string, cnt) 199 | var ptr = make([]interface{}, cnt) 200 | for i := range col { 201 | ptr[i] = &col[i] 202 | } 203 | if rs.Next() { 204 | er = rs.Scan(ptr...) 205 | if er != nil { 206 | return 207 | } 208 | } 209 | i1 := strings.Index(col[2], "DEFINER") 210 | i2 := strings.Index(col[2], "TRIGGER") 211 | if i1 > 0 && i1 < i2 { 212 | ddl = col[2][:i1] + col[2][i2:] 213 | } else { 214 | ddl = col[2] 215 | } 216 | return 217 | } 218 | 219 | err = m.Query(fn, `SHOW CREATE TRIGGER `+trigger) 220 | return 221 | } 222 | 223 | // Literal 字面量 224 | func (m *MyConn) Literal(val interface{}, col string) (string, bool) { 225 | 226 | if val == nil { 227 | return SqlNull, false 228 | } 229 | 230 | qto, tmf := true, m.Pref.FmtDateTime 231 | 232 | if len(col) > 0 { 233 | // https://dev.mysql.com/doc/refman/5.7/en/data-types.html 234 | switch strings.ToUpper(col) { 235 | case "INTEGER", "INT", "SMALLINT", "TINYINT", "MEDIUMINT", "BIGINT", "DECIMAL", "NUMERIC", "FLOAT", "DOUBLE": 236 | qto = false 237 | case "DATE": 238 | tmf = "2006-01-02" 239 | qto = true 240 | case "DATETIME": 241 | qto = true 242 | case "TIMESTAMP": 243 | qto = true 244 | case "TIME": 245 | tmf = "15:04:05" 246 | qto = true 247 | case "YEAR": 248 | tmf = "2006" 249 | qto = true 250 | case "CHAR", "VARCHAR", "BINARY", "VARBINARY", "BLOB", "TEXT", "ENUM", "SET": 251 | qto = true 252 | case "JSON": 253 | qto = true 254 | } 255 | } 256 | 257 | switch v := val.(type) { 258 | case string: 259 | return v, qto 260 | case []byte: 261 | return string(v), qto 262 | case sql.NullString: 263 | if v.Valid { 264 | return v.String, qto 265 | } else { 266 | return SqlNull, false 267 | } 268 | case uint, uint8, uint16, uint32, uint64: 269 | return fmt.Sprintf("%d", v), false 270 | case int, int8, int16, int32, int64: 271 | return fmt.Sprintf("%d", v), false 272 | case float32, float64: 273 | return fmt.Sprintf("%f", v), false 274 | case sql.NullBool: 275 | if v.Valid { 276 | if v.Bool { 277 | return SqlTrue, false 278 | } else { 279 | return SqlFalse, false 280 | } 281 | } else { 282 | return SqlNull, false 283 | } 284 | case sql.NullFloat64: 285 | if v.Valid { 286 | return fmt.Sprintf("%f", v.Float64), false 287 | } else { 288 | return SqlNull, false 289 | } 290 | case sql.NullInt64: 291 | if v.Valid { 292 | return fmt.Sprintf("%d", v.Int64), false 293 | } else { 294 | return SqlNull, false 295 | } 296 | case sql.NullTime: 297 | if v.Valid { 298 | return fmtTime(v.Time, tmf), true 299 | } else { 300 | return SqlNull, false 301 | } 302 | case time.Time: 303 | return fmtTime(v, tmf), true 304 | default: 305 | return fmt.Sprintf("%v", v), qto 306 | } 307 | } 308 | 309 | func (m *MyConn) Nothing(val interface{}) bool { 310 | if val == nil { 311 | return true 312 | } 313 | 314 | switch v := val.(type) { 315 | case uint: 316 | return v <= 0 317 | case uint8: 318 | return v <= 0 319 | case uint16: 320 | return v <= 0 321 | case uint32: 322 | return v <= 0 323 | case uint64: 324 | return v <= 0 325 | case int: 326 | return v <= 0 327 | case int8: 328 | return v <= 0 329 | case int16: 330 | return v <= 0 331 | case int32: 332 | return v <= 0 333 | case int64: 334 | case float32: 335 | return v <= 0 336 | case float64: 337 | return v <= 0 338 | case string: 339 | return len(v) == 0 340 | case []uint8: 341 | return len(string(v)) == 0 342 | case sql.NullBool: 343 | if v.Valid { 344 | return v.Bool == false 345 | } else { 346 | return true 347 | } 348 | case sql.NullString: 349 | if v.Valid { 350 | return len(v.String) == 0 351 | } else { 352 | return true 353 | } 354 | case sql.NullFloat64: 355 | if v.Valid { 356 | return v.Float64 <= 0 357 | } else { 358 | return true 359 | } 360 | case sql.NullInt64: 361 | if v.Valid { 362 | return v.Int64 <= 0 363 | } else { 364 | return true 365 | } 366 | case sql.NullTime: 367 | if v.Valid { 368 | return false 369 | } else { 370 | return true 371 | } 372 | case time.Time: 373 | return false 374 | default: 375 | return len(fmt.Sprintf("%v", v)) == 0 376 | } 377 | return false 378 | } 379 | 380 | // Quotesc https://github.com/mysql/mysql-server/blob/mysql-5.7.5/mysys/charset.c#L823-L932 381 | // https://github.com/mysql/mysql-server/blob/mysql-5.7.5/mysys/charset.c#L963-L1038 382 | func (m *MyConn) Quotesc(str, qto string) string { 383 | 384 | ln := len(str) 385 | var buf strings.Builder 386 | buf.Grow(ln + ln/20 + 10) 387 | 388 | buf.WriteString(qto) 389 | for i := 0; i < ln; i++ { 390 | c := str[i] 391 | switch c { 392 | case '\x00': 393 | buf.WriteByte('\\') 394 | buf.WriteByte('0') 395 | case '\n': 396 | buf.WriteByte('\\') 397 | buf.WriteByte('n') 398 | case '\r': 399 | buf.WriteByte('\\') 400 | buf.WriteByte('r') 401 | //case '\x1a': 402 | // buf.WriteByte('\\') 403 | // buf.WriteByte('Z') 404 | case '\'': 405 | buf.WriteByte('\\') 406 | buf.WriteByte('\'') 407 | case '"': 408 | buf.WriteByte('\\') 409 | buf.WriteByte('"') 410 | case '\\': 411 | buf.WriteByte('\\') 412 | buf.WriteByte('\\') 413 | default: 414 | buf.WriteByte(c) 415 | } 416 | } 417 | buf.WriteString(qto) 418 | return buf.String() 419 | } 420 | 421 | func (m *MyConn) TableNotFound(err error) bool { 422 | msg := err.Error() 423 | if strings.Contains(msg, "1146") || strings.Contains(msg, "doesn't exist") { 424 | return true 425 | } 426 | return false 427 | } 428 | -------------------------------------------------------------------------------- /demo/sql/revi/2018-11-17.sql: -------------------------------------------------------------------------------- 1 | -- ------------------------------------------- 2 | SELECT max(version) FROM sys_schema_version; 3 | 4 | INSERT INTO `tx_parcel` (`id`,`create_time`,`modify_time`,`is_deleted`,`logno`,`user_id`,`warehouse`,`sender_id`,`recver_id`,`track_num`,`weight_pkg`,`weight_dim`,`input_time`,`store_time`,`shelf_time`,`leave_time`,`track_time`) VALUES 5 | (93152,'2016-08-11 21:17:43','2017-11-01 10:17:50',0,226608,1,NULL,49799,80385,'SU160816',2.75,NULL,NULL,NULL,NULL,'2016-08-12 22:12:10','2016-08-20 10:32:18') 6 | ,(93163,'2016-08-11 22:49:34','2017-11-01 10:17:50',0,226608,1,NULL,14245,80649,'100354269562',3.00,NULL,'2016-08-12 06:32:13','2016-08-12 15:01:30',NULL,'2016-08-12 22:12:07','2016-08-20 10:23:57') 7 | ,(278580,'2017-01-02 16:59:08','2017-11-01 10:17:50',0,2481043,78,NULL,93201,311828,'303354496818',4.10,NULL,NULL,'2017-01-04 16:58:03',NULL,'2017-01-04 20:15:56','2017-01-19 02:42:13') 8 | ,(278581,'2017-01-02 17:00:51','2017-11-01 10:17:50',0,2481043,78,NULL,93201,311829,'303762397428',4.10,NULL,NULL,'2017-01-04 16:57:03',NULL,'2017-01-06 17:33:45','2017-01-19 02:42:13') 9 | ,(1163922,'2018-01-02 17:39:50','2018-01-17 06:49:00',0,3600736,96,2,467122,761994,'NY180100000201',0.30,NULL,NULL,'2018-01-03 09:41:27',NULL,'2018-01-03 11:37:48',NULL) 10 | ,(1163923,'2018-01-02 17:39:51','2018-01-17 06:49:00',0,3600736,96,2,467123,761995,'NY180100000901',1.10,NULL,NULL,'2018-01-03 09:42:11',NULL,'2018-01-03 11:37:48',NULL) 11 | ; 12 | 13 | INSERT INTO `tx_track` (`id`,`create_time`,`modify_time`,`is_deleted`,`logno`,`user_id`,`parcel_id`,`company`,`track_num`,`events`,`status`,`dest_city`) VALUES 14 | (172215,'2016-08-11 22:49:34','2016-12-09 16:28:24',0,222515,1,93163,200,'100354269562','[{\"date\":\"2016-08-11 22:49:34\",\"info\":\"【纽约】美东仓库已揽件\",\"status\":100},{\"date\":\"2016-08-12 22:12:07\",\"info\":\"【纽约】包裹已从中转仓离开\",\"status\":200},{\"date\":\"2016-08-13 17:52:19\",\"info\":\"【纽约】干线航班已从【纽约肯尼迪国际机场】起飞,航班号:CA982\",\"status\":300},{\"date\":\"2016-08-17 08:46:33\",\"info\":\"【哈尔滨】干线航班已到达【哈尔滨太平国际机场】\",\"status\":300},{\"date\":\"2016-08-18 09:18:31\",\"info\":\"【哈尔滨】包裹清关中\",\"status\":400},{\"date\":\"2016-08-20 10:31:58\",\"info\":\"清关完成,正在国内配送\",\"status\":500}]',500,NULL) 15 | ,(369647,'2017-01-04 16:58:03','2017-03-03 02:36:59',0,1025160,78,278580,200,'303354496818','[{\"date\":\"2017-01-04 16:58:03\",\"info\":\"【美国】美东仓库已揽件\",\"status\":100},{\"date\":\"2017-01-13 09:37:21\",\"info\":\"【纽约】包裹已从纽约肯尼迪国际机场发出\",\"status\":200},{\"date\":\"2017-01-13 13:37:42\",\"info\":\"【纽约】干线航班已从【纽约肯尼迪国际机场】起飞,航班号:CZ600\",\"status\":300},{\"date\":\"2017-01-14 09:03:0\",\"info\":\"【广州】干线航班已到达【广州白云国际机场】\",\"status\":300},{\"date\":\"2017-01-15 12:42:11\",\"info\":\"【广州】干线航班已从【广州白云国际机场】起飞,航班号:CA3615\",\"status\":300},{\"date\":\"2017-01-15 13:37:42\",\"info\":\"【哈尔滨】干线航班已到达【哈尔滨太平国际机场】\",\"status\":300},{\"date\":\"2017-01-15 13:40:35\",\"info\":\"【哈尔滨】包裹清关中\",\"status\":400},{\"date\":\"2017-01-18 08:18:15\",\"info\":\"清关完成,正在国内配送\",\"status\":500}]',500,NULL) 16 | ,(369644,'2017-01-04 16:57:03','2017-03-03 02:37:12',0,1024502,78,278581,200,'303762397428','[{\"date\":\"2017-01-04 16:57:03\",\"info\":\"【美国】美东仓库已揽件\",\"status\":100},{\"date\":\"2017-01-13 09:37:21\",\"info\":\"【纽约】包裹已从纽约肯尼迪国际机场发出\",\"status\":200},{\"date\":\"2017-01-13 13:37:42\",\"info\":\"【纽约】干线航班已从【纽约肯尼迪国际机场】起飞,航班号:CZ600\",\"status\":300},{\"date\":\"2017-01-14 09:03:0\",\"info\":\"【广州】干线航班已到达【广州白云国际机场】\",\"status\":300},{\"date\":\"2017-01-15 12:42:11\",\"info\":\"【广州】干线航班已从【广州白云国际机场】起飞,航班号:CA3615\",\"status\":300},{\"date\":\"2017-01-15 13:37:42\",\"info\":\"【哈尔滨】干线航班已到达【哈尔滨太平国际机场】\",\"status\":300},{\"date\":\"2017-01-15 13:40:35\",\"info\":\"【哈尔滨】包裹清关中\",\"status\":400},{\"date\":\"2017-01-18 00:18:20\",\"info\":\"清关完成,正在国内配送\",\"status\":500}]',500,NULL) 17 | ,(988322,'2018-01-02 17:39:50','2018-01-17 06:49:00',0,3600736,NULL,1163922,200,'NY180100000201','[{\"date\":\"2018-01-02 17:39:51\",\"info\":\"运单已创建\",\"status\":0,\"template\":\"TX000\"},{\"date\":\"2018-01-03 04:41:27\",\"info\":\"【纽约】包裹已在纽约分拣中心入库\",\"status\":100,\"template\":\"TX100\"},{\"date\":\"2018-01-03 06:37:48\",\"info\":\"【纽约】包裹已从纽约分拣中心离开\",\"status\":200,\"template\":\"TX200\"},{\"date\":\"2018-01-04 08:00:33\",\"id\":1627,\"info\":\"【纽约】包裹已在【纽约肯尼迪国际机场】等待安排航空\",\"status\":300,\"template\":\"TX250\"},{\"date\":\"2018-01-10 01:59:21\",\"id\":2702,\"info\":\"【纽约】干线航班已从【纽约肯尼迪国际机场】起飞,航班号:CZ600\",\"status\":300,\"template\":\"TX300\"},{\"date\":\"2018-01-11 08:12:03\",\"id\":2725,\"info\":\"【广州】干线航班已到达【广州白云机场】\",\"status\":300,\"template\":\"TX360\"},{\"date\":\"2018-01-12 09:00:12\",\"id\":2802,\"info\":\"【广州】包裹清关中\",\"status\":400,\"template\":\"TX400\"},{\"date\":\"2018-01-14 13:15:08\",\"info\":\"韵达跨境物流公司 进行揽件扫描\",\"status\":500},{\"date\":\"2018-01-14 14:28:34\",\"info\":\"韵达跨境物流公司 进行下级地点扫描,将发往:天津分拨中心\",\"status\":500},{\"date\":\"2018-01-14 14:59:18\",\"info\":\"韵达跨境物流公司 进行揽件扫描\",\"status\":500},{\"date\":\"2018-01-14 21:29:29\",\"info\":\"广东东莞分拨中心 在分拨中心进行称重扫描\",\"status\":500},{\"date\":\"2018-01-14 21:30:53\",\"info\":\"广东东莞分拨中心 进行装车扫描,即将发往:天津分拨中心\",\"status\":500},{\"date\":\"2018-01-16 14:23:38\",\"info\":\"天津分拨中心 在分拨中心进行卸车扫描\",\"status\":500},{\"date\":\"2018-01-16 15:00:28\",\"info\":\"天津分拨中心 从站点发出,本次转运目的地:天津武清区公司\",\"status\":500},{\"date\":\"2018-01-17 07:30:47\",\"info\":\"天津武清区公司 进行派件扫描;派送业务员:张瑶;联系电话:15722099883\",\"status\":500},{\"date\":\"2018-01-17 12:31:00\",\"info\":\"天津武清区公司 快件已被 入快递柜 签收\",\"status\":500},{\"date\":\"2018-01-17 14:04:08\",\"info\":\"天津武清区公司 快件已被 已签收 签收\",\"status\":500}]',600,NULL) 18 | ,(988323,'2018-01-02 17:39:51','2018-01-17 06:49:00',0,3600736,NULL,1163923,200,'NY180100000901','[{\"date\":\"2018-01-02 17:39:52\",\"info\":\"运单已创建\",\"status\":0,\"template\":\"TX000\"},{\"date\":\"2018-01-03 04:42:11\",\"info\":\"【纽约】包裹已在纽约分拣中心入库\",\"status\":100,\"template\":\"TX100\"},{\"date\":\"2018-01-03 06:37:48\",\"info\":\"【纽约】包裹已从纽约分拣中心离开\",\"status\":200,\"template\":\"TX200\"},{\"date\":\"2018-01-04 08:00:33\",\"id\":1627,\"info\":\"【纽约】包裹已在【纽约肯尼迪国际机场】等待安排航空\",\"status\":300,\"template\":\"TX250\"},{\"date\":\"2018-01-10 01:59:21\",\"id\":2702,\"info\":\"【纽约】干线航班已从【纽约肯尼迪国际机场】起飞,航班号:CZ600\",\"status\":300,\"template\":\"TX300\"},{\"date\":\"2018-01-11 08:12:03\",\"id\":2725,\"info\":\"【广州】干线航班已到达【广州白云机场】\",\"status\":300,\"template\":\"TX360\"},{\"date\":\"2018-01-12 09:00:12\",\"id\":2802,\"info\":\"【广州】包裹清关中\",\"status\":400,\"template\":\"TX400\"},{\"date\":\"2018-01-14 13:26:58\",\"info\":\"韵达跨境物流公司 进行揽件扫描\",\"status\":500},{\"date\":\"2018-01-14 14:06:11\",\"info\":\"韵达跨境物流公司 进行下级地点扫描,将发往:辽宁沈阳网点包\",\"status\":500},{\"date\":\"2018-01-14 14:51:10\",\"info\":\"韵达跨境物流公司 进行揽件扫描\",\"status\":500},{\"date\":\"2018-01-14 21:34:31\",\"info\":\"广东东莞分拨中心 在分拨中心进行称重扫描\",\"status\":500},{\"date\":\"2018-01-14 21:37:10\",\"info\":\"广东东莞分拨中心 进行装车扫描,即将发往:辽宁沈阳分拨中心\",\"status\":500},{\"date\":\"2018-01-16 23:45:26\",\"info\":\"辽宁沈阳分拨中心 在分拨中心进行卸车扫描\",\"status\":500},{\"date\":\"2018-01-17 00:05:45\",\"info\":\"辽宁沈阳分拨中心 从站点发出,本次转运目的地:辽宁沈阳浑南开发区二公司\",\"status\":500},{\"date\":\"2018-01-17 07:24:19\",\"info\":\"辽宁沈阳浑南开发区二公司 到达目的地网点,快件将很快进行派送\",\"status\":500},{\"date\":\"2018-01-17 07:56:48\",\"info\":\"辽宁沈阳浑南开发区二公司 进行派件扫描;派送业务员:王凤祥;联系电话:13166748696\",\"status\":500},{\"date\":\"2018-01-17 13:03:25\",\"info\":\"辽宁沈阳浑南开发区二公司 快件已被 已签收 签收\",\"status\":500}]',600,NULL) 19 | ,(172204,'2016-08-11 21:17:43','2016-12-09 16:28:24',0,222515,1,93152,200,'SU160816','[{\"date\":\"2016-08-11 10:31:53\",\"info\":\"【纽约】美东仓库已揽件\",\"status\":100},{\"date\":\"2016-08-12 22:12:10\",\"info\":\"【纽约】包裹已从中转仓离开\",\"status\":200},{\"date\":\"2016-08-13 17:52:19\",\"info\":\"【纽约】干线航班已从【纽约肯尼迪国际机场】起飞,航班号:CA982\",\"status\":300},{\"date\":\"2016-08-17 08:46:33\",\"info\":\"【哈尔滨】干线航班已到达【哈尔滨太平国际机场】\",\"status\":300},{\"date\":\"2016-08-18 09:18:31\",\"info\":\"【哈尔滨】包裹清关中\",\"status\":400},{\"date\":\"2016-08-20 10:35:55\",\"info\":\"清关完成,正在国内配送\",\"status\":500}]',500,NULL) 20 | ; 21 | 22 | INSERT INTO `tx_parcel_event` (`id`,`create_time`,`modify_time`,`is_deleted`,`logno`,`user_id`,`parcel_id`,`type`,`source`,`operator_id`,`is_closed`) VALUES 23 | (351370,'2016-08-12 22:12:07','2016-08-12 22:12:07',0,212067,1,93152,300,'airwayBill=999-45480293',57,NULL) 24 | ,(363390,'2016-08-20 10:32:18','2016-08-20 10:32:18',0,222515,1,93152,500,NULL,NULL,NULL) 25 | ,(370092,'2016-08-22 13:10:05','2016-08-22 13:10:05',0,226608,1,93152,501,NULL,NULL,NULL) 26 | ,(350213,'2016-08-12 15:01:30','2016-08-12 15:01:30',0,210992,1,93163,100,NULL,NULL,NULL) 27 | ,(350946,'2016-08-12 22:12:07','2016-08-12 22:12:07',0,212067,1,93163,300,'airwayBill=999-45480293',57,NULL) 28 | ,(363385,'2016-08-20 10:23:57','2016-08-20 10:23:57',0,222515,1,93163,500,NULL,NULL,NULL) 29 | ,(370091,'2016-08-22 13:00:32','2016-08-22 13:00:32',0,226608,1,93163,501,NULL,NULL,NULL) 30 | ,(833154,'2017-01-04 16:58:03',NULL,0,663030,78,278580,100,NULL,NULL,NULL) 31 | ,(883598,'2017-01-11 23:59:37',NULL,0,710728,78,278580,300,'airwayBill=784-29622666',56,NULL) 32 | ,(933048,'2017-01-19 02:42:13',NULL,0,756412,78,278580,500,NULL,NULL,NULL) 33 | ,(1048772,'2017-01-30 22:05:59',NULL,0,782423,78,278580,502,NULL,NULL,NULL) 34 | ,(833144,'2017-01-04 16:57:03',NULL,0,663020,78,278581,100,NULL,NULL,NULL) 35 | ,(883616,'2017-01-11 23:59:37',NULL,0,710728,78,278581,300,'airwayBill=784-29622666',56,NULL) 36 | ,(933492,'2017-01-19 08:48:11',NULL,0,752392,78,278581,500,NULL,NULL,NULL) 37 | ,(933614,'2017-01-19 02:42:13',NULL,0,756412,78,278581,500,NULL,NULL,NULL) 38 | ,(1049551,'2017-01-31 00:00:22',NULL,0,782423,78,278581,502,NULL,NULL,NULL) 39 | ,(3012022,'2018-01-02 17:39:51',NULL,0,3517088,NULL,1163922,7,NULL,NULL,NULL) 40 | ,(3029856,'2018-01-03 00:58:31',NULL,0,3529662,96,1163922,100,NULL,NULL,NULL) 41 | ,(3033050,'2018-01-03 09:41:27',NULL,0,3532704,NULL,1163922,100,'125.119.237.185',87,NULL) 42 | ,(3036336,'2018-01-03 11:54:01',NULL,0,3534378,NULL,1163922,190,'125.119.237.185',87,NULL) 43 | ,(3038150,'2018-01-03 23:20:40',NULL,0,3534716,NULL,1163922,300,'72.227.141.116',57,NULL) 44 | ,(3106745,'2018-01-17 06:50:48',NULL,0,3600736,NULL,1163922,501,NULL,NULL,NULL) 45 | ,(3012023,'2018-01-02 17:39:52',NULL,0,3517089,NULL,1163923,7,NULL,NULL,NULL) 46 | ,(3029979,'2018-01-03 01:07:44',NULL,0,3529885,96,1163923,100,NULL,NULL,NULL) 47 | ,(3033333,'2018-01-03 09:42:11',NULL,0,3532954,NULL,1163923,100,'125.119.237.185',87,NULL) 48 | ,(3036337,'2018-01-03 11:54:04',NULL,0,3534378,NULL,1163923,190,'125.119.237.185',87,NULL) 49 | ,(3038151,'2018-01-03 23:20:40',NULL,0,3534716,NULL,1163923,300,'72.227.141.116',57,NULL) 50 | ,(3106746,'2018-01-17 06:50:50',NULL,0,3600736,NULL,1163923,501,NULL,NULL,NULL) 51 | ; 52 | 53 | INSERT INTO `tx_receiver` (`id`,`create_time`,`modify_time`,`is_deleted`,`logno`,`user_id`,`name`,`phone`,`postcode`,`country`,`province`,`city`,`district`,`address1`,`address2`,`hash`) VALUES 54 | (80385,'2016-08-11 21:17:43','2017-12-08 07:10:14',0,-2500,1,'张三','13000000001','310000',2,'ZJ','杭州市',NULL,'上城区江城路候潮公寓',NULL,'') 55 | ,(80649,'2016-08-12 06:32:13','2017-12-08 07:10:14',0,-2500,1,'张山','13000000002','311500',2,'ZJ','杭州市',NULL,'桐庐县分水镇东门雅苑','','') 56 | ,(311828,'2017-01-02 16:59:08','2017-12-08 07:10:14',0,-2500,78,'李四','13000000003','200000',2,'SH','上海','徐汇区','华泾路1425弄','','') 57 | ,(311829,'2017-01-02 17:00:51','2017-12-08 07:10:14',0,-2500,78,'李思','13000000004','200000',2,'SH','上海','徐汇区','华泾路1427弄1','','') 58 | ,(761994,'2018-01-02 17:39:50','2018-01-02 17:41:04',0,3528135,NULL,'王五','18000000005','301700',2,'TJ','天津','武清区','杨村街和平里小区','','ca9a4340416208d05df21319095c332d') 59 | ,(761995,'2018-01-02 17:39:52','2018-01-02 17:41:03',0,3530808,NULL,'王武','18000000006','110100',2,'LN','沈阳市','浑南区','辽宁省沈阳市浑南区金阳街','','b8516c744f0bfeebde027233bf29b546') 60 | ; 61 | 62 | REPLACE INTO sys_schema_version (version, created) VALUES( 2018111701, NOW()); 63 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /demo/chk/txt/07.txt: -------------------------------------------------------------------------------- 1 | 2 | -- -- SRC ID=9, LINE=9:9, DEP 3 | SELECT * FROM tx_parcel WHERE create_time <= '2018-01-01 00:00:00'; 4 | 5 | -- -- OUT ID=12, LINE=12:12, FOR 6 | -- REPLACE INTO tx_parcel VALUES (93152,'2016-08-11 21:17:43','2017-11-01 10:17:50',0,226608,1,NULL,49799,80385,'SU160816',2.75,NULL,NULL,NULL,NULL,'2016-08-12 22:12:10','2016-08-20 10:32:18'); 7 | 8 | -- -- SRC ID=17, LINE=17:17, DEP 9 | SELECT * FROM tx_track WHERE track_num = 'SU160816'; 10 | 11 | -- -- OUT ID=20, LINE=20:20, FOR 12 | -- REPLACE INTO tx_track VALUES (172204,'2016-08-11 21:17:43','2016-12-09 16:28:24',0,222515,1,93152,200,'SU160816','[{\"date\":\"2016-08-11 10:31:53\",\"info\":\"【纽约】美东仓库已揽件\",\"status\":100},{\"date\":\"2016-08-12 22:12:10\",\"info\":\"【纽约】包裹已从中转仓离开\",\"status\":200},{\"date\":\"2016-08-13 17:52:19\",\"info\":\"【纽约】干线航班已从【纽约肯尼迪国际机场】起飞,航班号:CA982\",\"status\":300},{\"date\":\"2016-08-17 08:46:33\",\"info\":\"【哈尔滨】干线航班已到达【哈尔滨太平国际机场】\",\"status\":300},{\"date\":\"2016-08-18 09:18:31\",\"info\":\"【哈尔滨】包裹清关中\",\"status\":400},{\"date\":\"2016-08-20 10:35:55\",\"info\":\"清关完成,正在国内配送\",\"status\":500}]',500,NULL); 13 | 14 | -- -- SRC ID=23, LINE=23:23, FOR 15 | DELETE FROM tx_track where id = 172204; 16 | 17 | -- -- SRC ID=54, LINE=54:54, END 18 | REPLACE INTO sys_hot_separation VALUES ('tx_track', /*内嵌多行注释*/ 172204, now()); 19 | 20 | -- -- SRC ID=28, LINE=28:28, DEP 21 | SELECT * FROM tx_parcel_event WHERE parcel_id = 93152; 22 | 23 | -- -- OUT ID=31, LINE=31:32, FOR 24 | -- INSERT INTO tx_parcel_event VALUES (351370,'2016-08-12 22:12:07','2016-08-12 22:12:07',0,212067,1,93152,300,'airwayBill=999-45480293',57,NULL) 25 | -- ON DUPLICATE KEY UPDATE modify_time = '2018-01-01 00:00:00'; 26 | 27 | -- -- OUT ID=31, LINE=31:32, FOR 28 | -- INSERT INTO tx_parcel_event VALUES (363390,'2016-08-20 10:32:18','2016-08-20 10:32:18',0,222515,1,93152,500,NULL,NULL,NULL) 29 | -- ON DUPLICATE KEY UPDATE modify_time = '2018-01-01 00:00:00'; 30 | 31 | -- -- OUT ID=31, LINE=31:32, FOR 32 | -- INSERT INTO tx_parcel_event VALUES (370092,'2016-08-22 13:10:05','2016-08-22 13:10:05',0,226608,1,93152,501,NULL,NULL,NULL) 33 | -- ON DUPLICATE KEY UPDATE modify_time = '2018-01-01 00:00:00'; 34 | 35 | -- -- SRC ID=35, LINE=35:35, END 36 | DELETE FROM tx_parcel_event where parcel_id = 93152; 37 | 38 | -- -- SRC ID=51, LINE=51:51, END 39 | REPLACE INTO sys_hot_separation VALUES ('tx_parcel_event', 370092, now()); -- 单行注释; 40 | 41 | -- -- SRC ID=41, LINE=41:41, DEP 42 | SELECT * FROM tx_receiver WHERE id = 80385; 43 | 44 | -- -- OUT ID=44, LINE=44:44, FOR 45 | -- REPLACE INTO tx_receiver (id,create_time,modify_time,is_deleted,logno,user_id,name,phone,postcode,country,province,city,district,address1,address2,hash) VALUES (80385,'2016-08-11 21:17:43','2017-12-08 07:10:14',0,-2500,1,'张三','13000000001','310000',2,'ZJ','杭州市',NULL,'上城区江城路候潮公寓',NULL,''); 46 | 47 | -- -- SRC ID=47, LINE=47:47, FOR 48 | DELETE FROM tx_receiver where id = 80385; 49 | 50 | -- -- SRC ID=60, LINE=60:60, FOR 51 | DELETE FROM tx_parcel where id = 93152; 52 | 53 | -- -- OUT ID=12, LINE=12:12, FOR 54 | -- REPLACE INTO tx_parcel VALUES (93163,'2016-08-11 22:49:34','2017-11-01 10:17:50',0,226608,1,NULL,14245,80649,'100354269562',3.00,NULL,'2016-08-12 06:32:13','2016-08-12 15:01:30',NULL,'2016-08-12 22:12:07','2016-08-20 10:23:57'); 55 | 56 | -- -- SRC ID=17, LINE=17:17, DEP 57 | SELECT * FROM tx_track WHERE track_num = '100354269562'; 58 | 59 | -- -- OUT ID=20, LINE=20:20, FOR 60 | -- REPLACE INTO tx_track VALUES (172215,'2016-08-11 22:49:34','2016-12-09 16:28:24',0,222515,1,93163,200,'100354269562','[{\"date\":\"2016-08-11 22:49:34\",\"info\":\"【纽约】美东仓库已揽件\",\"status\":100},{\"date\":\"2016-08-12 22:12:07\",\"info\":\"【纽约】包裹已从中转仓离开\",\"status\":200}]',200,NULL); 61 | 62 | -- -- SRC ID=23, LINE=23:23, FOR 63 | DELETE FROM tx_track where id = 172215; 64 | 65 | -- -- SRC ID=54, LINE=54:54, END 66 | REPLACE INTO sys_hot_separation VALUES ('tx_track', /*内嵌多行注释*/ 172215, now()); 67 | 68 | -- -- SRC ID=28, LINE=28:28, DEP 69 | SELECT * FROM tx_parcel_event WHERE parcel_id = 93163; 70 | 71 | -- -- OUT ID=31, LINE=31:32, FOR 72 | -- INSERT INTO tx_parcel_event VALUES (350213,'2016-08-12 15:01:30','2016-08-12 15:01:30',0,210992,1,93163,100,NULL,NULL,NULL) 73 | -- ON DUPLICATE KEY UPDATE modify_time = '2018-01-01 00:00:00'; 74 | 75 | -- -- OUT ID=31, LINE=31:32, FOR 76 | -- INSERT INTO tx_parcel_event VALUES (350946,'2016-08-12 22:12:07','2016-08-12 22:12:07',0,212067,1,93163,300,'airwayBill=999-45480293',57,NULL) 77 | -- ON DUPLICATE KEY UPDATE modify_time = '2018-01-01 00:00:00'; 78 | 79 | -- -- OUT ID=31, LINE=31:32, FOR 80 | -- INSERT INTO tx_parcel_event VALUES (363385,'2016-08-20 10:23:57','2016-08-20 10:23:57',0,222515,1,93163,500,NULL,NULL,NULL) 81 | -- ON DUPLICATE KEY UPDATE modify_time = '2018-01-01 00:00:00'; 82 | 83 | -- -- OUT ID=31, LINE=31:32, FOR 84 | -- INSERT INTO tx_parcel_event VALUES (370091,'2016-08-22 13:00:32','2016-08-22 13:00:32',0,226608,1,93163,501,NULL,NULL,NULL) 85 | -- ON DUPLICATE KEY UPDATE modify_time = '2018-01-01 00:00:00'; 86 | 87 | -- -- SRC ID=35, LINE=35:35, END 88 | DELETE FROM tx_parcel_event where parcel_id = 93163; 89 | 90 | -- -- SRC ID=51, LINE=51:51, END 91 | REPLACE INTO sys_hot_separation VALUES ('tx_parcel_event', 370091, now()); -- 单行注释; 92 | 93 | -- -- SRC ID=41, LINE=41:41, DEP 94 | SELECT * FROM tx_receiver WHERE id = 80649; 95 | 96 | -- -- OUT ID=44, LINE=44:44, FOR 97 | -- REPLACE INTO tx_receiver (id,create_time,modify_time,is_deleted,logno,user_id,name,phone,postcode,country,province,city,district,address1,address2,hash) VALUES (80649,'2016-08-12 06:32:13','2017-12-08 07:10:14',0,-2500,1,'张山','13000000002','311500',2,'ZJ','杭州市',NULL,'桐庐县分水镇东门雅苑','',''); 98 | 99 | -- -- SRC ID=47, LINE=47:47, FOR 100 | DELETE FROM tx_receiver where id = 80649; 101 | 102 | -- -- SRC ID=60, LINE=60:60, FOR 103 | DELETE FROM tx_parcel where id = 93163; 104 | 105 | -- -- OUT ID=12, LINE=12:12, FOR 106 | -- REPLACE INTO tx_parcel VALUES (278580,'2017-01-02 16:59:08','2017-11-01 10:17:50',0,2481043,78,NULL,93201,311828,'303354496818',4.10,NULL,NULL,'2017-01-04 16:58:03',NULL,'2017-01-04 20:15:56','2017-01-19 02:42:13'); 107 | 108 | -- -- SRC ID=17, LINE=17:17, DEP 109 | SELECT * FROM tx_track WHERE track_num = '303354496818'; 110 | 111 | -- -- OUT ID=20, LINE=20:20, FOR 112 | -- REPLACE INTO tx_track VALUES (369647,'2017-01-04 16:58:03','2017-03-03 02:36:59',0,1025160,78,278580,200,'303354496818','[{\"date\":\"2017-01-04 16:58:03\",\"info\":\"【纽约】美东仓库已揽件\",\"status\":100},{\"date\":\"2017-01-13 09:37:21\",\"info\":\"【纽约】包裹已从纽约肯尼迪国际机场发出\",\"status\":200},{\"date\":\"2017-01-13 13:37:42\",\"info\":\"【纽约】干线航班已从【纽约肯尼迪国际机场】起飞,航班号:CZ600\",\"status\":300}]',300,NULL); 113 | 114 | -- -- SRC ID=23, LINE=23:23, FOR 115 | DELETE FROM tx_track where id = 369647; 116 | 117 | -- -- SRC ID=54, LINE=54:54, END 118 | REPLACE INTO sys_hot_separation VALUES ('tx_track', /*内嵌多行注释*/ 369647, now()); 119 | 120 | -- -- SRC ID=28, LINE=28:28, DEP 121 | SELECT * FROM tx_parcel_event WHERE parcel_id = 278580; 122 | 123 | -- -- OUT ID=31, LINE=31:32, FOR 124 | -- INSERT INTO tx_parcel_event VALUES (833154,'2017-01-04 16:58:03',NULL,0,663030,78,278580,100,NULL,NULL,NULL) 125 | -- ON DUPLICATE KEY UPDATE modify_time = '2018-01-01 00:00:00'; 126 | 127 | -- -- OUT ID=31, LINE=31:32, FOR 128 | -- INSERT INTO tx_parcel_event VALUES (883598,'2017-01-11 23:59:37',NULL,0,710728,78,278580,300,'airwayBill=784-29622666',56,NULL) 129 | -- ON DUPLICATE KEY UPDATE modify_time = '2018-01-01 00:00:00'; 130 | 131 | -- -- OUT ID=31, LINE=31:32, FOR 132 | -- INSERT INTO tx_parcel_event VALUES (933048,'2017-01-19 02:42:13',NULL,0,756412,78,278580,500,NULL,NULL,NULL) 133 | -- ON DUPLICATE KEY UPDATE modify_time = '2018-01-01 00:00:00'; 134 | 135 | -- -- OUT ID=31, LINE=31:32, FOR 136 | -- INSERT INTO tx_parcel_event VALUES (1048772,'2017-01-30 22:05:59',NULL,0,782423,78,278580,502,NULL,NULL,NULL) 137 | -- ON DUPLICATE KEY UPDATE modify_time = '2018-01-01 00:00:00'; 138 | 139 | -- -- SRC ID=35, LINE=35:35, END 140 | DELETE FROM tx_parcel_event where parcel_id = 278580; 141 | 142 | -- -- SRC ID=51, LINE=51:51, END 143 | REPLACE INTO sys_hot_separation VALUES ('tx_parcel_event', 1048772, now()); -- 单行注释; 144 | 145 | -- -- SRC ID=41, LINE=41:41, DEP 146 | SELECT * FROM tx_receiver WHERE id = 311828; 147 | 148 | -- -- OUT ID=44, LINE=44:44, FOR 149 | -- REPLACE INTO tx_receiver (id,create_time,modify_time,is_deleted,logno,user_id,name,phone,postcode,country,province,city,district,address1,address2,hash) VALUES (311828,'2017-01-02 16:59:08','2017-12-08 07:10:14',0,-2500,78,'李四','13000000003','200000',2,'SH','上海','徐汇区','华泾路1425弄','',''); 150 | 151 | -- -- SRC ID=47, LINE=47:47, FOR 152 | DELETE FROM tx_receiver where id = 311828; 153 | 154 | -- -- SRC ID=60, LINE=60:60, FOR 155 | DELETE FROM tx_parcel where id = 278580; 156 | 157 | -- -- OUT ID=12, LINE=12:12, FOR 158 | -- REPLACE INTO tx_parcel VALUES (278581,'2017-01-02 17:00:51','2017-11-01 10:17:50',0,2481043,78,NULL,93201,311829,'303762397428',4.10,NULL,NULL,'2017-01-04 16:57:03',NULL,'2017-01-06 17:33:45','2017-01-19 02:42:13'); 159 | 160 | -- -- SRC ID=17, LINE=17:17, DEP 161 | SELECT * FROM tx_track WHERE track_num = '303762397428'; 162 | 163 | -- -- OUT ID=20, LINE=20:20, FOR 164 | -- REPLACE INTO tx_track VALUES (369644,'2017-01-04 16:57:03','2017-03-03 02:37:12',0,1024502,78,278581,200,'303762397428','[{\"date\":\"2017-01-04 16:57:03\",\"info\":\"【纽约】美东仓库已揽件\",\"status\":100},{\"date\":\"2017-01-13 09:37:21\",\"info\":\"【纽约】包裹已从纽约肯尼迪国际机场发出\",\"status\":200},{\"date\":\"2017-01-13 13:37:42\",\"info\":\"【纽约】干线航班已从【纽约肯尼迪国际机场】起飞,航班号:CZ600\",\"status\":300},{\"date\":\"2017-01-14 09:03:0\",\"info\":\"【广州】干线航班已到达【广州白云国际机场】\",\"status\":300},{\"date\":\"2017-01-15 12:42:11\",\"info\":\"【广州】干线航班已从【广州白云国际机场】起飞,航班号:CA3615\",\"status\":300},{\"date\":\"2017-01-15 13:37:42\",\"info\":\"【哈尔滨】干线航班已到达【哈尔滨太平国际机场】\",\"status\":300},{\"date\":\"2017-01-15 13:40:35\",\"info\":\"【哈尔滨】包裹清关中\",\"status\":400},{\"date\":\"2017-01-18 00:18:20\",\"info\":\"清关完成,正在国内配送\",\"status\":500}]',500,NULL); 165 | 166 | -- -- SRC ID=23, LINE=23:23, FOR 167 | DELETE FROM tx_track where id = 369644; 168 | 169 | -- -- SRC ID=54, LINE=54:54, END 170 | REPLACE INTO sys_hot_separation VALUES ('tx_track', /*内嵌多行注释*/ 369644, now()); 171 | 172 | -- -- SRC ID=28, LINE=28:28, DEP 173 | SELECT * FROM tx_parcel_event WHERE parcel_id = 278581; 174 | 175 | -- -- OUT ID=31, LINE=31:32, FOR 176 | -- INSERT INTO tx_parcel_event VALUES (833144,'2017-01-04 16:57:03',NULL,0,663020,78,278581,100,NULL,NULL,NULL) 177 | -- ON DUPLICATE KEY UPDATE modify_time = '2018-01-01 00:00:00'; 178 | 179 | -- -- OUT ID=31, LINE=31:32, FOR 180 | -- INSERT INTO tx_parcel_event VALUES (883616,'2017-01-11 23:59:37',NULL,0,710728,78,278581,300,'airwayBill=784-29622666',56,NULL) 181 | -- ON DUPLICATE KEY UPDATE modify_time = '2018-01-01 00:00:00'; 182 | 183 | -- -- OUT ID=31, LINE=31:32, FOR 184 | -- INSERT INTO tx_parcel_event VALUES (933492,'2017-01-19 08:48:11',NULL,0,752392,78,278581,500,NULL,NULL,NULL) 185 | -- ON DUPLICATE KEY UPDATE modify_time = '2018-01-01 00:00:00'; 186 | 187 | -- -- OUT ID=31, LINE=31:32, FOR 188 | -- INSERT INTO tx_parcel_event VALUES (933614,'2017-01-19 02:42:13',NULL,0,756412,78,278581,500,NULL,NULL,NULL) 189 | -- ON DUPLICATE KEY UPDATE modify_time = '2018-01-01 00:00:00'; 190 | 191 | -- -- OUT ID=31, LINE=31:32, FOR 192 | -- INSERT INTO tx_parcel_event VALUES (1049551,'2017-01-31 00:00:22',NULL,0,782423,78,278581,502,NULL,NULL,NULL) 193 | -- ON DUPLICATE KEY UPDATE modify_time = '2018-01-01 00:00:00'; 194 | 195 | -- -- SRC ID=35, LINE=35:35, END 196 | DELETE FROM tx_parcel_event where parcel_id = 278581; 197 | 198 | -- -- SRC ID=51, LINE=51:51, END 199 | REPLACE INTO sys_hot_separation VALUES ('tx_parcel_event', 1049551, now()); -- 单行注释; 200 | 201 | -- -- SRC ID=41, LINE=41:41, DEP 202 | SELECT * FROM tx_receiver WHERE id = 311829; 203 | 204 | -- -- OUT ID=44, LINE=44:44, FOR 205 | -- REPLACE INTO tx_receiver (id,create_time,modify_time,is_deleted,logno,user_id,name,phone,postcode,country,province,city,district,address1,address2,hash) VALUES (311829,'2017-01-02 17:00:51','2017-12-08 07:10:14',0,-2500,78,'李思','13000000004','200000',2,'SH','上海','徐汇区','华泾路1427弄1','',''); 206 | 207 | -- -- SRC ID=47, LINE=47:47, FOR 208 | DELETE FROM tx_receiver where id = 311829; 209 | 210 | -- -- SRC ID=60, LINE=60:60, FOR 211 | DELETE FROM tx_parcel where id = 278581; 212 | 213 | -- -- SRC ID=57, LINE=57:57, END 214 | REPLACE INTO sys_hot_separation VALUES ('tx_parcel', 278581, now()); 215 | 216 | -- -- SRC ID=63, LINE=63:63, END 217 | DELETE FROM tx_parcel$log WHERE create_time <= '2018-01-01 00:00:00'; 218 | -------------------------------------------------------------------------------- /demo/chk/txt/09.txt: -------------------------------------------------------------------------------- 1 | 2 | -- -- SRC ID=2, LINE=2:2 3 | SELECT table_name, checked_id FROM sys_hot_separation; 4 | 5 | -- -- SRC ID=8, LINE=8:8 6 | SELECT version FROM sys_schema_version; 7 | 8 | -- -- OUT ID=11, LINE=11:11, FOR 9 | -- version=2018111101; 10 | 11 | -- -- OUT ID=11, LINE=11:11, FOR 12 | -- version=2018111102; 13 | 14 | -- -- OUT ID=11, LINE=11:11, FOR 15 | -- version=2018111103; 16 | 17 | -- -- SRC ID=14, LINE=14:14 18 | SELECT * FROM tx_parcel; 19 | 20 | -- -- OUT ID=17, LINE=17:17, FOR 21 | -- id=93152 22 | -- create_time='2016-08-11 21:17:43' 23 | -- modify_time='2017-11-01 10:17:50' 24 | -- is_deleted=0 25 | -- logno=226608 26 | -- user_id=1 27 | -- warehouse=NULL 28 | -- sender_id=49799 29 | -- recver_id=80385 30 | -- track_num='SU160816' 31 | -- weight_pkg=2.75 32 | -- weight_dim=NULL 33 | -- input_time=NULL 34 | -- store_time=NULL 35 | -- shelf_time=NULL 36 | -- leave_time='2016-08-12 22:12:10' 37 | -- track_time='2016-08-20 10:32:18'; 38 | 39 | -- -- OUT ID=17, LINE=17:17, FOR 40 | -- id=93163 41 | -- create_time='2016-08-11 22:49:34' 42 | -- modify_time='2017-11-01 10:17:50' 43 | -- is_deleted=0 44 | -- logno=226608 45 | -- user_id=1 46 | -- warehouse=NULL 47 | -- sender_id=14245 48 | -- recver_id=80649 49 | -- track_num='100354269562' 50 | -- weight_pkg=3.00 51 | -- weight_dim=NULL 52 | -- input_time='2016-08-12 06:32:13' 53 | -- store_time='2016-08-12 15:01:30' 54 | -- shelf_time=NULL 55 | -- leave_time='2016-08-12 22:12:07' 56 | -- track_time='2016-08-20 10:23:57'; 57 | 58 | -- -- OUT ID=17, LINE=17:17, FOR 59 | -- id=278580 60 | -- create_time='2017-01-02 16:59:08' 61 | -- modify_time='2017-11-01 10:17:50' 62 | -- is_deleted=0 63 | -- logno=2481043 64 | -- user_id=78 65 | -- warehouse=NULL 66 | -- sender_id=93201 67 | -- recver_id=311828 68 | -- track_num='303354496818' 69 | -- weight_pkg=4.10 70 | -- weight_dim=NULL 71 | -- input_time=NULL 72 | -- store_time='2017-01-04 16:58:03' 73 | -- shelf_time=NULL 74 | -- leave_time='2017-01-04 20:15:56' 75 | -- track_time='2017-01-19 02:42:13'; 76 | 77 | -- -- OUT ID=17, LINE=17:17, FOR 78 | -- id=278581 79 | -- create_time='2017-01-02 17:00:51' 80 | -- modify_time='2017-11-01 10:17:50' 81 | -- is_deleted=0 82 | -- logno=2481043 83 | -- user_id=78 84 | -- warehouse=NULL 85 | -- sender_id=93201 86 | -- recver_id=311829 87 | -- track_num='303762397428' 88 | -- weight_pkg=4.10 89 | -- weight_dim=NULL 90 | -- input_time=NULL 91 | -- store_time='2017-01-04 16:57:03' 92 | -- shelf_time=NULL 93 | -- leave_time='2017-01-06 17:33:45' 94 | -- track_time='2017-01-19 02:42:13'; 95 | 96 | -- -- SRC ID=20, LINE=20:20 97 | SELECT * FROM tx_parcel_event; 98 | 99 | -- -- OUT ID=23, LINE=23:23, FOR 100 | -- id=350213 101 | -- create_time='2016-08-12 15:01:30' 102 | -- modify_time='2016-08-12 15:01:30' 103 | -- is_deleted=0 104 | -- logno=210992 105 | -- user_id=1 106 | -- parcel_id=93163 107 | -- type=100 108 | -- source=NULL 109 | -- operator_id=NULL 110 | -- is_closed=NULL; 111 | 112 | -- -- OUT ID=23, LINE=23:23, FOR 113 | -- id=350946 114 | -- create_time='2016-08-12 22:12:07' 115 | -- modify_time='2016-08-12 22:12:07' 116 | -- is_deleted=0 117 | -- logno=212067 118 | -- user_id=1 119 | -- parcel_id=93163 120 | -- type=300 121 | -- source='airwayBill=999-45480293' 122 | -- operator_id=57 123 | -- is_closed=NULL; 124 | 125 | -- -- OUT ID=23, LINE=23:23, FOR 126 | -- id=351370 127 | -- create_time='2016-08-12 22:12:07' 128 | -- modify_time='2016-08-12 22:12:07' 129 | -- is_deleted=0 130 | -- logno=212067 131 | -- user_id=1 132 | -- parcel_id=93152 133 | -- type=300 134 | -- source='airwayBill=999-45480293' 135 | -- operator_id=57 136 | -- is_closed=NULL; 137 | 138 | -- -- OUT ID=23, LINE=23:23, FOR 139 | -- id=363385 140 | -- create_time='2016-08-20 10:23:57' 141 | -- modify_time='2016-08-20 10:23:57' 142 | -- is_deleted=0 143 | -- logno=222515 144 | -- user_id=1 145 | -- parcel_id=93163 146 | -- type=500 147 | -- source=NULL 148 | -- operator_id=NULL 149 | -- is_closed=NULL; 150 | 151 | -- -- OUT ID=23, LINE=23:23, FOR 152 | -- id=363390 153 | -- create_time='2016-08-20 10:32:18' 154 | -- modify_time='2016-08-20 10:32:18' 155 | -- is_deleted=0 156 | -- logno=222515 157 | -- user_id=1 158 | -- parcel_id=93152 159 | -- type=500 160 | -- source=NULL 161 | -- operator_id=NULL 162 | -- is_closed=NULL; 163 | 164 | -- -- OUT ID=23, LINE=23:23, FOR 165 | -- id=370091 166 | -- create_time='2016-08-22 13:00:32' 167 | -- modify_time='2016-08-22 13:00:32' 168 | -- is_deleted=0 169 | -- logno=226608 170 | -- user_id=1 171 | -- parcel_id=93163 172 | -- type=501 173 | -- source=NULL 174 | -- operator_id=NULL 175 | -- is_closed=NULL; 176 | 177 | -- -- OUT ID=23, LINE=23:23, FOR 178 | -- id=370092 179 | -- create_time='2016-08-22 13:10:05' 180 | -- modify_time='2016-08-22 13:10:05' 181 | -- is_deleted=0 182 | -- logno=226608 183 | -- user_id=1 184 | -- parcel_id=93152 185 | -- type=501 186 | -- source=NULL 187 | -- operator_id=NULL 188 | -- is_closed=NULL; 189 | 190 | -- -- OUT ID=23, LINE=23:23, FOR 191 | -- id=833144 192 | -- create_time='2017-01-04 16:57:03' 193 | -- modify_time=NULL 194 | -- is_deleted=0 195 | -- logno=663020 196 | -- user_id=78 197 | -- parcel_id=278581 198 | -- type=100 199 | -- source=NULL 200 | -- operator_id=NULL 201 | -- is_closed=NULL; 202 | 203 | -- -- OUT ID=23, LINE=23:23, FOR 204 | -- id=833154 205 | -- create_time='2017-01-04 16:58:03' 206 | -- modify_time=NULL 207 | -- is_deleted=0 208 | -- logno=663030 209 | -- user_id=78 210 | -- parcel_id=278580 211 | -- type=100 212 | -- source=NULL 213 | -- operator_id=NULL 214 | -- is_closed=NULL; 215 | 216 | -- -- OUT ID=23, LINE=23:23, FOR 217 | -- id=883598 218 | -- create_time='2017-01-11 23:59:37' 219 | -- modify_time=NULL 220 | -- is_deleted=0 221 | -- logno=710728 222 | -- user_id=78 223 | -- parcel_id=278580 224 | -- type=300 225 | -- source='airwayBill=784-29622666' 226 | -- operator_id=56 227 | -- is_closed=NULL; 228 | 229 | -- -- OUT ID=23, LINE=23:23, FOR 230 | -- id=883616 231 | -- create_time='2017-01-11 23:59:37' 232 | -- modify_time=NULL 233 | -- is_deleted=0 234 | -- logno=710728 235 | -- user_id=78 236 | -- parcel_id=278581 237 | -- type=300 238 | -- source='airwayBill=784-29622666' 239 | -- operator_id=56 240 | -- is_closed=NULL; 241 | 242 | -- -- OUT ID=23, LINE=23:23, FOR 243 | -- id=933048 244 | -- create_time='2017-01-19 02:42:13' 245 | -- modify_time=NULL 246 | -- is_deleted=0 247 | -- logno=756412 248 | -- user_id=78 249 | -- parcel_id=278580 250 | -- type=500 251 | -- source=NULL 252 | -- operator_id=NULL 253 | -- is_closed=NULL; 254 | 255 | -- -- OUT ID=23, LINE=23:23, FOR 256 | -- id=933492 257 | -- create_time='2017-01-19 08:48:11' 258 | -- modify_time=NULL 259 | -- is_deleted=0 260 | -- logno=752392 261 | -- user_id=78 262 | -- parcel_id=278581 263 | -- type=500 264 | -- source=NULL 265 | -- operator_id=NULL 266 | -- is_closed=NULL; 267 | 268 | -- -- OUT ID=23, LINE=23:23, FOR 269 | -- id=933614 270 | -- create_time='2017-01-19 02:42:13' 271 | -- modify_time=NULL 272 | -- is_deleted=0 273 | -- logno=756412 274 | -- user_id=78 275 | -- parcel_id=278581 276 | -- type=500 277 | -- source=NULL 278 | -- operator_id=NULL 279 | -- is_closed=NULL; 280 | 281 | -- -- OUT ID=23, LINE=23:23, FOR 282 | -- id=1048772 283 | -- create_time='2017-01-30 22:05:59' 284 | -- modify_time=NULL 285 | -- is_deleted=0 286 | -- logno=782423 287 | -- user_id=78 288 | -- parcel_id=278580 289 | -- type=502 290 | -- source=NULL 291 | -- operator_id=NULL 292 | -- is_closed=NULL; 293 | 294 | -- -- OUT ID=23, LINE=23:23, FOR 295 | -- id=1049551 296 | -- create_time='2017-01-31 00:00:22' 297 | -- modify_time=NULL 298 | -- is_deleted=0 299 | -- logno=782423 300 | -- user_id=78 301 | -- parcel_id=278581 302 | -- type=502 303 | -- source=NULL 304 | -- operator_id=NULL 305 | -- is_closed=NULL; 306 | 307 | -- -- SRC ID=26, LINE=26:26 308 | SELECT * FROM tx_parcel$log; 309 | 310 | -- -- SRC ID=32, LINE=32:32 311 | SELECT * FROM tx_receiver; 312 | 313 | -- -- OUT ID=35, LINE=35:35, FOR 314 | -- id=80385 315 | -- create_time='2016-08-11 21:17:43' 316 | -- modify_time='2017-12-08 07:10:14' 317 | -- is_deleted=0 318 | -- logno=-2500 319 | -- user_id=1 320 | -- name='张三' 321 | -- phone='13000000001' 322 | -- postcode='310000' 323 | -- country=2 324 | -- province='ZJ' 325 | -- city='杭州市' 326 | -- district=NULL 327 | -- address1='上城区江城路候潮公寓' 328 | -- address2=NULL 329 | -- hash=''; 330 | 331 | -- -- OUT ID=35, LINE=35:35, FOR 332 | -- id=80649 333 | -- create_time='2016-08-12 06:32:13' 334 | -- modify_time='2017-12-08 07:10:14' 335 | -- is_deleted=0 336 | -- logno=-2500 337 | -- user_id=1 338 | -- name='张山' 339 | -- phone='13000000002' 340 | -- postcode='311500' 341 | -- country=2 342 | -- province='ZJ' 343 | -- city='杭州市' 344 | -- district=NULL 345 | -- address1='桐庐县分水镇东门雅苑' 346 | -- address2='' 347 | -- hash=''; 348 | 349 | -- -- OUT ID=35, LINE=35:35, FOR 350 | -- id=311828 351 | -- create_time='2017-01-02 16:59:08' 352 | -- modify_time='2017-12-08 07:10:14' 353 | -- is_deleted=0 354 | -- logno=-2500 355 | -- user_id=78 356 | -- name='李四' 357 | -- phone='13000000003' 358 | -- postcode='200000' 359 | -- country=2 360 | -- province='SH' 361 | -- city='上海' 362 | -- district='徐汇区' 363 | -- address1='华泾路1425弄' 364 | -- address2='' 365 | -- hash=''; 366 | 367 | -- -- OUT ID=35, LINE=35:35, FOR 368 | -- id=311829 369 | -- create_time='2017-01-02 17:00:51' 370 | -- modify_time='2017-12-08 07:10:14' 371 | -- is_deleted=0 372 | -- logno=-2500 373 | -- user_id=78 374 | -- name='李思' 375 | -- phone='13000000004' 376 | -- postcode='200000' 377 | -- country=2 378 | -- province='SH' 379 | -- city='上海' 380 | -- district='徐汇区' 381 | -- address1='华泾路1427弄1' 382 | -- address2='' 383 | -- hash=''; 384 | 385 | -- -- SRC ID=38, LINE=38:38 386 | SELECT * FROM tx_track; 387 | 388 | -- -- OUT ID=41, LINE=41:41, FOR 389 | -- id=172204 390 | -- create_time='2016-08-11 21:17:43' 391 | -- modify_time='2016-12-09 16:28:24' 392 | -- is_deleted=0 393 | -- logno=222515 394 | -- user_id=1 395 | -- parcel_id=93152 396 | -- company=200 397 | -- track_num='SU160816' 398 | -- events='[{\"date\":\"2016-08-11 10:31:53\",\"info\":\"【纽约】美东仓库已揽件\",\"status\":100},{\"date\":\"2016-08-12 22:12:10\",\"info\":\"【纽约】包裹已从中转仓离开\",\"status\":200},{\"date\":\"2016-08-13 17:52:19\",\"info\":\"【纽约】干线航班已从【纽约肯尼迪国际机场】起飞,航班号:CA982\",\"status\":300},{\"date\":\"2016-08-17 08:46:33\",\"info\":\"【哈尔滨】干线航班已到达【哈尔滨太平国际机场】\",\"status\":300},{\"date\":\"2016-08-18 09:18:31\",\"info\":\"【哈尔滨】包裹清关中\",\"status\":400},{\"date\":\"2016-08-20 10:35:55\",\"info\":\"清关完成,正在国内配送\",\"status\":500}]' 399 | -- status=500 400 | -- dest_city=NULL; 401 | 402 | -- -- OUT ID=41, LINE=41:41, FOR 403 | -- id=172215 404 | -- create_time='2016-08-11 22:49:34' 405 | -- modify_time='2016-12-09 16:28:24' 406 | -- is_deleted=0 407 | -- logno=222515 408 | -- user_id=1 409 | -- parcel_id=93163 410 | -- company=200 411 | -- track_num='100354269562' 412 | -- events='[{\"date\":\"2016-08-11 22:49:34\",\"info\":\"【纽约】美东仓库已揽件\",\"status\":100},{\"date\":\"2016-08-12 22:12:07\",\"info\":\"【纽约】包裹已从中转仓离开\",\"status\":200}]' 413 | -- status=200 414 | -- dest_city=NULL; 415 | 416 | -- -- OUT ID=41, LINE=41:41, FOR 417 | -- id=369644 418 | -- create_time='2017-01-04 16:57:03' 419 | -- modify_time='2017-03-03 02:37:12' 420 | -- is_deleted=0 421 | -- logno=1024502 422 | -- user_id=78 423 | -- parcel_id=278581 424 | -- company=200 425 | -- track_num='303762397428' 426 | -- events='[{\"date\":\"2017-01-04 16:57:03\",\"info\":\"【纽约】美东仓库已揽件\",\"status\":100},{\"date\":\"2017-01-13 09:37:21\",\"info\":\"【纽约】包裹已从纽约肯尼迪国际机场发出\",\"status\":200},{\"date\":\"2017-01-13 13:37:42\",\"info\":\"【纽约】干线航班已从【纽约肯尼迪国际机场】起飞,航班号:CZ600\",\"status\":300},{\"date\":\"2017-01-14 09:03:0\",\"info\":\"【广州】干线航班已到达【广州白云国际机场】\",\"status\":300},{\"date\":\"2017-01-15 12:42:11\",\"info\":\"【广州】干线航班已从【广州白云国际机场】起飞,航班号:CA3615\",\"status\":300},{\"date\":\"2017-01-15 13:37:42\",\"info\":\"【哈尔滨】干线航班已到达【哈尔滨太平国际机场】\",\"status\":300},{\"date\":\"2017-01-15 13:40:35\",\"info\":\"【哈尔滨】包裹清关中\",\"status\":400},{\"date\":\"2017-01-18 00:18:20\",\"info\":\"清关完成,正在国内配送\",\"status\":500}]' 427 | -- status=500 428 | -- dest_city=NULL; 429 | 430 | -- -- OUT ID=41, LINE=41:41, FOR 431 | -- id=369647 432 | -- create_time='2017-01-04 16:58:03' 433 | -- modify_time='2017-03-03 02:36:59' 434 | -- is_deleted=0 435 | -- logno=1025160 436 | -- user_id=78 437 | -- parcel_id=278580 438 | -- company=200 439 | -- track_num='303354496818' 440 | -- events='[{\"date\":\"2017-01-04 16:58:03\",\"info\":\"【纽约】美东仓库已揽件\",\"status\":100},{\"date\":\"2017-01-13 09:37:21\",\"info\":\"【纽约】包裹已从纽约肯尼迪国际机场发出\",\"status\":200},{\"date\":\"2017-01-13 13:37:42\",\"info\":\"【纽约】干线航班已从【纽约肯尼迪国际机场】起飞,航班号:CZ600\",\"status\":300}]' 441 | -- status=300 442 | -- dest_city=NULL; 443 | -------------------------------------------------------------------------------- /art/diff.go: -------------------------------------------------------------------------------- 1 | package art 2 | 3 | import ( 4 | "fmt" 5 | "regexp" 6 | "sort" 7 | "strings" 8 | ) 9 | 10 | type DiffItem struct { 11 | ColArr []string 12 | ColMap map[string]Col 13 | IdxArr []string 14 | IdxMap map[string]Idx 15 | TrgArr []string 16 | TrgMap map[string]Trg 17 | } 18 | 19 | func Diff(srce *DataSource, dest []*DataSource, kind map[string]bool, rgx []*regexp.Regexp) error { 20 | 21 | if srce == nil { 22 | return errorAndLog("need source db to diff, type=%#v", kind) 23 | } 24 | 25 | if len(dest) == 0 { 26 | return errorAndLog("need dest db to diff, type=%#v", kind) 27 | } 28 | 29 | scon, err := openDbAndLog(srce) 30 | if err != nil { 31 | return err 32 | } 33 | 34 | dcon := make([]*MyConn, len(dest)) 35 | for i, db := range dest { 36 | conn, er := openDbAndLog(db) 37 | if er != nil { 38 | return er 39 | } 40 | dcon[i] = conn 41 | } 42 | 43 | stbl, sset, err := makeDiffTbl(scon, rgx) 44 | sdnm := scon.DbName() 45 | if err != nil { 46 | LogFatal("failed to list tables, db=%s, err=%v", sdnm, err) 47 | return err 48 | } 49 | 50 | detail := kind[DiffTbl] 51 | hastrg := kind[DiffTrg] 52 | sdtl := make(map[string]DiffItem) 53 | 54 | for _, con := range dcon { 55 | dtbl, dset, er := makeDiffTbl(con, rgx) 56 | ddnm := con.DbName() 57 | if er != nil { 58 | LogFatal("failed to list tables, db=%s, err=%v", ddnm, er) 59 | return er 60 | } 61 | LogTrace("=== diff tbname ===, left=%s, right=%s", sdnm, ddnm) 62 | 63 | rep, ch := strings.Builder{}, true 64 | var ih []string // 两库都有,交集 65 | head := fmt.Sprintf("\n#TBNAME LEFT(>)=%s, RIGHT(<)=%s", sdnm, ddnm) 66 | for _, tbl := range stbl { 67 | if dset[tbl] { 68 | ih = append(ih, tbl) 69 | } else { 70 | if ch { 71 | ch = false 72 | rep.WriteString(head) 73 | } 74 | rep.WriteString("\n>") 75 | rep.WriteString(tbl) 76 | } 77 | } 78 | 79 | for _, tbl := range dtbl { 80 | if !sset[tbl] { 81 | if ch { 82 | ch = false 83 | rep.WriteString(head) 84 | } 85 | 86 | rep.WriteString("\n<") 87 | rep.WriteString(tbl) 88 | } 89 | } 90 | 91 | sort.Strings(ih) // 排序 92 | 93 | if detail { 94 | LogTrace("=== diff detail ===, left=%s, right=%s", sdnm, ddnm) 95 | 96 | e1 := makeDiffAll(scon, ih, sdtl, hastrg) // 比较多库,逐步添加表 97 | if e1 != nil { 98 | return e1 99 | } 100 | 101 | ddtl := make(map[string]DiffItem) // 当前比较项 102 | e2 := makeDiffAll(con, ih, ddtl, hastrg) 103 | if e2 != nil { 104 | return e2 105 | } 106 | 107 | diffAll(ih, sdtl, ddtl, &rep, sdnm, ddnm) 108 | } 109 | 110 | if rep.Len() > 0 { 111 | LogTrace("== HAS SOME DIFF ==. LEFT=%s, RIGHT=%s", sdnm, ddnm) 112 | OutTrace(rep.String()) 113 | } else { 114 | LogTrace("== ALL THE SAME ==. LEFT=%s, RIGHT=%s", sdnm, ddnm) 115 | } 116 | } 117 | 118 | return nil 119 | } 120 | 121 | func diffCol(ld, rd DiffItem, rep *strings.Builder) { 122 | if len(ld.ColArr) == 0 && len(rd.ColArr) == 0 { 123 | return 124 | } 125 | 126 | la, ra := ld.ColArr, rd.ColArr 127 | lm, rm := ld.ColMap, rd.ColMap 128 | 129 | tit := "=Col Only Name" 130 | fc := len(tit) 131 | for k := range lm { 132 | i := len(k) 133 | if i > fc { 134 | fc = i 135 | } 136 | } 137 | 138 | off := len(lm) - len(rm) 139 | 140 | pad := fmt.Sprintf("%d", fc) 141 | head := "\n" + tit + strings.Repeat(" ", fc-len(tit)) + " | No. | Type | Nullable | Default | Comment | Extra" 142 | null := "" 143 | fmto := "\n%-" + pad + "s | %3d | %s | %t | %s | %s | %s" 144 | fmtb := "\n%-" + pad + "s | %s | %s | %s | %s | %s | %s" 145 | 146 | fmth := func(c *Col, tok string) { 147 | dvl := null 148 | if c.Deft.Valid { 149 | dvl = c.Deft.String 150 | } 151 | rep.WriteString(fmt.Sprintf(fmto, tok+c.Name, c.Seq, c.Type, c.Null, dvl, c.Cmnt, c.Extr)) 152 | } 153 | 154 | var ic []Col 155 | ch := true 156 | for _, c := range la { 157 | li := lm[c] 158 | ri, ok := rm[c] 159 | if ok { 160 | ic = append(ic, li, ri) 161 | } else { 162 | if ch { 163 | rep.WriteString(head) 164 | ch = false 165 | } 166 | fmth(&li, ">") 167 | } 168 | } 169 | 170 | // 右侧有,左侧没有 171 | for _, c := range ra { 172 | ri := rm[c] 173 | _, ok := lm[c] 174 | if !ok { 175 | if ch { 176 | rep.WriteString(head) 177 | ch = false 178 | } 179 | fmth(&ri, "<") 180 | } 181 | } 182 | 183 | // 比较两者都有的 184 | ih := true 185 | for i := 0; i < len(ic); i = i + 2 { 186 | li, ri := ic[i], ic[i+1] 187 | var seq, typ, nul, dft, cmt, ext string 188 | cnt := 0 189 | 190 | if li.Seq == ri.Seq || li.Seq-ri.Seq == off { 191 | seq = fmt.Sprintf("%3d", li.Seq) 192 | } else { 193 | seq = fmt.Sprintf("%d:%d", li.Seq, ri.Seq) 194 | cnt++ 195 | } 196 | 197 | if li.Type != ri.Type { 198 | typ = fmt.Sprintf("%s:%s", li.Type, ri.Type) 199 | cnt++ 200 | } 201 | 202 | if li.Null != ri.Null { 203 | nul = fmt.Sprintf("%t:%t", li.Null, ri.Null) 204 | cnt++ 205 | } 206 | 207 | if (!li.Deft.Valid && !ri.Deft.Valid) || (li.Deft.Valid && ri.Deft.Valid && li.Deft.String == ri.Deft.String) { 208 | // equals 209 | } else { 210 | ln, rn := null, null 211 | if li.Deft.Valid { 212 | ln = li.Deft.String 213 | } 214 | if ri.Deft.Valid { 215 | rn = ri.Deft.String 216 | } 217 | dft = fmt.Sprintf("%s:%s", ln, rn) 218 | cnt++ 219 | } 220 | 221 | if li.Cmnt != ri.Cmnt { 222 | cmt = fmt.Sprintf("%s:%s", li.Cmnt, ri.Cmnt) 223 | cnt++ 224 | } 225 | 226 | if li.Extr != ri.Extr { 227 | ext = fmt.Sprintf("%s:%s", li.Extr, ri.Extr) 228 | cnt++ 229 | } 230 | 231 | if cnt > 0 { 232 | if ih { 233 | ih = false 234 | rep.WriteString(strings.Replace(head, "Only", "Diff", 1)) 235 | } 236 | rep.WriteString(fmt.Sprintf(fmtb, "!"+li.Name, seq, typ, nul, dft, cmt, ext)) 237 | } 238 | } 239 | } 240 | 241 | func diffIdx(ld, rd DiffItem, rep *strings.Builder) { 242 | if len(ld.IdxArr) == 0 && len(rd.IdxArr) == 0 { 243 | return 244 | } 245 | 246 | la, ra := ld.IdxArr, rd.IdxArr 247 | lm, rm := ld.IdxMap, rd.IdxMap 248 | 249 | ch, ih := true, true 250 | 251 | tit := "=Idx Only Name" 252 | fc := len(tit) 253 | for k := range lm { 254 | i := len(k) 255 | if i > fc { 256 | fc = i 257 | } 258 | } 259 | 260 | pad := fmt.Sprintf("%d", fc) 261 | head := "\n" + tit + strings.Repeat(" ", fc-len(tit)) + " | Uniq | Type | Cols" 262 | fmto := "\n%-" + pad + "s | %t | %s | %s" 263 | fmtb := "\n%-" + pad + "s | %s | %s | %s" 264 | 265 | var ic []Idx 266 | for _, c := range la { 267 | li := lm[c] 268 | ri, ok := lm[c] 269 | if ok { 270 | ic = append(ic, li, ri) 271 | } else { 272 | if ch { 273 | ch = false 274 | rep.WriteString(head) 275 | } 276 | rep.WriteString(fmt.Sprintf(fmto, ">"+li.Name, li.Uniq, li.Type, li.Cols)) 277 | } 278 | } 279 | 280 | // 右侧有,左侧没有 281 | for _, c := range ra { 282 | ri := rm[c] 283 | _, ok := lm[c] 284 | if !ok { 285 | if ch { 286 | ch = false 287 | rep.WriteString(head) 288 | } 289 | rep.WriteString(fmt.Sprintf(fmto, "<"+ri.Name, ri.Uniq, ri.Type, ri.Cols)) 290 | } 291 | } 292 | 293 | // 比较两者都有的 294 | for i := 0; i < len(ic); i = i + 2 { 295 | li, ri := ic[i], ic[i+1] 296 | var typ, unq, cols string 297 | cnt := 0 298 | 299 | if li.Type != ri.Type { 300 | typ = fmt.Sprintf("%s:%s", li.Type, ri.Type) 301 | cnt++ 302 | } 303 | 304 | if li.Uniq != ri.Uniq { 305 | unq = fmt.Sprintf("%t:%t", li.Uniq, ri.Uniq) 306 | cnt++ 307 | } 308 | 309 | if li.Cols != ri.Cols { 310 | cols = fmt.Sprintf("%s:%s", li.Cols, ri.Cols) 311 | cnt++ 312 | } 313 | 314 | if cnt > 0 { 315 | if ih { 316 | ih = false 317 | rep.WriteString(strings.Replace(head, "Only", "Diff", 1)) 318 | } 319 | rep.WriteString(fmt.Sprintf(fmtb, "!"+li.Name, typ, unq, cols)) 320 | } 321 | } 322 | } 323 | 324 | func diffTrg(ld, rd DiffItem, rep *strings.Builder) { 325 | if len(ld.TrgArr) == 0 && len(rd.TrgArr) == 0 { 326 | return 327 | } 328 | 329 | la, ra := ld.TrgArr, rd.TrgArr 330 | lm, rm := ld.TrgMap, rd.TrgMap 331 | 332 | 333 | ch, ih := true, true 334 | 335 | tit := "=Trg Only Name" 336 | fc := len(tit) 337 | for k := range lm { 338 | i := len(k) 339 | if i > fc { 340 | fc = i 341 | } 342 | } 343 | 344 | pad := fmt.Sprintf("%d", fc) 345 | head := "\n" + tit + strings.Repeat(" ", fc-len(tit)) + " | Timing | Event | Statement" 346 | fmto := "\n%-" + pad + "s | %s | %s | %q" 347 | fmtb := "\n%-" + pad + "s | %s | %s | %s" 348 | 349 | var ic []Trg 350 | for _, c := range la { 351 | li := lm[c] 352 | ri, ok := rm[c] 353 | if ok { 354 | ic = append(ic, li, ri) 355 | } else { 356 | if ch { 357 | ch = false 358 | rep.WriteString(head) 359 | } 360 | rep.WriteString(fmt.Sprintf(fmto, ">"+li.Name, li.Timing, li.Event, li.Statement)) 361 | } 362 | } 363 | 364 | // 右侧有,左侧没有 365 | for _,c := range ra { 366 | ri := rm[c] 367 | _, ok := lm[c] 368 | if !ok { 369 | if ch { 370 | ch = false 371 | rep.WriteString(head) 372 | } 373 | rep.WriteString(fmt.Sprintf(fmto, "<"+ri.Name, ri.Timing, ri.Event, ri.Statement)) 374 | } 375 | } 376 | // 比较两者都有的 377 | for i := 0; i < len(ic); i = i + 2 { 378 | li, ri := ic[i], ic[i+1] 379 | var tim, evt, stm string 380 | cnt := 0 381 | if li.Timing != ri.Timing { 382 | tim = fmt.Sprintf("%s:%s", li.Timing, ri.Timing) 383 | cnt++ 384 | } 385 | if li.Event != ri.Event { 386 | evt = fmt.Sprintf("%s:%s", li.Event, ri.Event) 387 | cnt++ 388 | } 389 | if trimStatement(li.Statement) != trimStatement(ri.Statement) { 390 | stm = fmt.Sprintf("%q:%q", li.Statement, ri.Statement) 391 | cnt++ 392 | } 393 | 394 | if cnt > 0 { 395 | if ih { 396 | ih = false 397 | rep.WriteString(strings.Replace(head, "Only", "Diff", 1)) 398 | } 399 | rep.WriteString(fmt.Sprintf(fmtb, "!"+li.Name, tim, evt, stm)) 400 | } 401 | } 402 | } 403 | 404 | func trimStatement(str string) string { 405 | str = squashBlank(str) 406 | str = squashTrimx(str) 407 | str = strings.ToLower(str) 408 | return str 409 | } 410 | 411 | func diffAll(tbl []string, lit, rit map[string]DiffItem, rep *strings.Builder, ldb, rdb string) { 412 | // 右侧是左侧的子集 413 | for _, tb := range tbl { 414 | ld, rd := lit[tb], rit[tb] 415 | sb := &strings.Builder{} 416 | // column 417 | diffCol(ld, rd, sb) 418 | // index 419 | diffIdx(ld, rd, sb) 420 | // trigger 421 | diffTrg(ld, rd, sb) 422 | 423 | if sb.Len() > 0 { 424 | rep.WriteString(fmt.Sprintf("\n#DETAIL TABLE=%s, LEFT(>)=%s, RIGHT(<)=%s", tb, ldb, rdb)) 425 | rep.WriteString(sb.String()) 426 | } 427 | } 428 | } 429 | 430 | func makeDiffTbl(conn *MyConn, rgx []*regexp.Regexp) (rst []string, set map[string]bool, err error) { 431 | rst, err = listTable(conn, rgx) 432 | if err != nil { 433 | return 434 | } 435 | sort.Strings(rst) 436 | 437 | set = make(map[string]bool) 438 | for _, v := range rst { 439 | set[v] = true 440 | } 441 | 442 | return 443 | } 444 | 445 | func makeDiffAll(con *MyConn, tbl []string, dtl map[string]DiffItem, trg bool) error { 446 | for _, t := range tbl { 447 | _, ok := dtl[t] 448 | if !ok { 449 | var cla, ixa, tga []string 450 | 451 | clm, err := con.Columns(t) 452 | if err != nil { 453 | LogError("failed to list columns, table=%s, db=%s, err=%v", t, con.DbName(), err) 454 | return err 455 | } 456 | if ln := len(clm); ln > 0 { 457 | tmp := make([]Col, 0, ln) 458 | for _, v := range clm { 459 | tmp = append(tmp, v) 460 | } 461 | sort.Slice(tmp, func(i, j int) bool { 462 | return tmp[i].Seq < tmp[j].Seq 463 | }) 464 | cla = make([]string, ln) 465 | for i, v := range tmp { 466 | cla[i] = v.Name 467 | } 468 | } 469 | 470 | ixm, err := con.Indexes(t) 471 | if err != nil { 472 | LogError("failed to list indexes, table=%s, db=%s, err=%v", t, con.DbName(), err) 473 | return err 474 | } 475 | if ln := len(ixm); ln > 0 { 476 | ixa = make([]string, 0, ln) 477 | for k := range ixm { 478 | ixa = append(ixa, k) 479 | } 480 | sort.Strings(ixa) 481 | } 482 | 483 | var tgm map[string]Trg 484 | if trg { 485 | tgm, err = con.Triggers(t) 486 | if err != nil { 487 | LogError("failed to list triggers, table=%s, db=%s, err=%v", t, con.DbName(), err) 488 | return err 489 | } 490 | if ln := len(tgm); ln > 0 { 491 | tga = make([]string, 0, ln) 492 | for k := range tgm { 493 | tga = append(tga, k) 494 | } 495 | sort.Strings(tga) 496 | } 497 | } 498 | 499 | dtl[t] = DiffItem{cla, clm, ixa, ixm, tga, tgm} 500 | } 501 | } 502 | return nil 503 | } 504 | -------------------------------------------------------------------------------- /main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "github.com/trydofor/godbart/art" 5 | "github.com/urfave/cli" 6 | "io/ioutil" 7 | "os" 8 | "regexp" 9 | "strings" 10 | "sync" 11 | "time" 12 | ) 13 | 14 | func checkConf(ctx *cli.Context) *art.Config { 15 | file := ctx.String("c") 16 | art.LogTrace("got conf=%s", file) 17 | 18 | data, err := ioutil.ReadFile(file) 19 | art.ExitIfError(err, -1, "can read config=%s", file) 20 | 21 | conf, err := art.ParseToml(string(data)) 22 | art.ExitIfError(err, -1, "can not parse TOML, config=%s", file) 23 | 24 | return conf 25 | } 26 | 27 | func checkMlvl(ctx *cli.Context) { 28 | lvl := ctx.String("l") 29 | art.LogTrace("got level=%s", lvl) 30 | switch strings.ToLower(lvl) { 31 | case "debug": 32 | art.MsgLevel = art.LvlDebug 33 | case "trace": 34 | art.MsgLevel = art.LvlTrace 35 | case "error": 36 | art.MsgLevel = art.LvlError 37 | default: 38 | art.MsgLevel = art.LvlDebug 39 | } 40 | } 41 | 42 | func checkDest(ctx *cli.Context, cnf *art.Config, req bool) []*art.DataSource { 43 | flag := ctx.StringSlice("d") 44 | art.ExitIfTrue(req && len(flag) == 0, -2, "no dest db selected") 45 | 46 | dest := make([]*art.DataSource, len(flag)) 47 | for i := 0; i < len(flag); i++ { 48 | d, ok := cnf.DataSource[flag[i]] 49 | art.ExitIfTrue(!ok, -2, "db not found, dest=%s", flag[i]) 50 | art.LogTrace("got dest db=%s", flag[i]) 51 | dest[i] = &d 52 | } 53 | 54 | return dest 55 | } 56 | 57 | func checkSrce(ctx *cli.Context, cnf *art.Config, req bool) *art.DataSource { 58 | flag := ctx.String("s") 59 | art.ExitIfTrue(req && len(flag) == 0, -5, "no source db selected") 60 | 61 | ds, ok := cnf.DataSource[flag] 62 | art.ExitIfTrue(!ok, -5, "db not found in config, source=%s", flag) 63 | art.LogTrace("got source db=%s", flag) 64 | 65 | return &ds 66 | } 67 | 68 | func checkSqls(ctx *cli.Context) (files []art.FileEntity) { 69 | art.ExitIfTrue(ctx.NArg() == 0, -3, "must give a path or file for args") 70 | 71 | flag := ctx.StringSlice("x") 72 | files, err := art.FileWalker(ctx.Args(), flag) 73 | art.ExitIfError(err, -3, "failed to read file") 74 | art.ExitIfTrue(len(files) < 1, -3, "can not find any SQLs") 75 | 76 | return 77 | } 78 | 79 | func buildEnvs(ctx *cli.Context, envs map[string]string) { 80 | flag := ctx.StringSlice("e") 81 | for _, env := range flag { 82 | kv := strings.SplitN(env, "=", 2) 83 | if len(kv) == 2 { 84 | envs[kv[0]] = kv[1] 85 | art.LogTrace("got input env, k=%q, v=%q", kv[0], kv[1]) 86 | } else { 87 | ov, ok := os.LookupEnv(kv[0]) 88 | art.ExitIfTrue(!ok, -6, "system ENV not found, env=%q", env) 89 | art.LogTrace("got system env, k=%q, v=%q", kv[0], ov) 90 | } 91 | } 92 | 93 | art.BuiltinEnvs(envs) 94 | return 95 | } 96 | 97 | func checkTmpl(ctx *cli.Context, tmpl map[string]string) (tps []string) { 98 | flag := ctx.String("t") 99 | keys := strings.SplitN(flag, ",", -1) 100 | for _, k := range keys { 101 | k = strings.TrimSpace(k) 102 | if tp, ok := tmpl[k]; ok { 103 | tps = append(tps, k, tp) 104 | art.LogTrace("got tmpl in sqltemplet, key=%s", k) 105 | } else { 106 | art.ExitIfTrue(!ok, -6, "templet not found in sqltemplet, key=%s", k) 107 | } 108 | } 109 | return 110 | } 111 | 112 | func checkType(ctx *cli.Context, knd map[string]bool, dft string) map[string]bool { 113 | flag := ctx.String("t") 114 | rst := make(map[string]bool) 115 | for _, k := range strings.SplitN(flag, ",", -1) { 116 | if knd[k] { 117 | rst[k] = true 118 | art.LogTrace("got type=%s", k) 119 | } else { 120 | art.ExitIfTrue(true, -6, "unsupported (T)ype=%s, in %s", k, flag) 121 | } 122 | } 123 | if len(rst) == 0 { 124 | rst[dft] = true 125 | } 126 | return rst 127 | } 128 | 129 | func checkRegx(ctx *cli.Context) []*regexp.Regexp { 130 | args := ctx.Args() 131 | regx := make([]*regexp.Regexp, 0, len(args)) 132 | for _, v := range args { 133 | re, err := regexp.Compile(v) 134 | art.ExitIfError(err, -6, "failed to compile Regexp=%v", v) 135 | art.LogTrace("got table regexp=%s", v) 136 | regx = append(regx, re) 137 | } 138 | return regx 139 | } 140 | 141 | func checkRisk(ctx *cli.Context) bool { 142 | agr := ctx.Bool("agree") 143 | return agr 144 | } 145 | 146 | // command // 147 | func exec(ctx *cli.Context) (err error) { 148 | checkMlvl(ctx) 149 | conf := checkConf(ctx) 150 | dest := checkDest(ctx, conf, true) 151 | risk := checkRisk(ctx) 152 | sqls := checkSqls(ctx) 153 | return art.Exec(&conf.Preference, dest, sqls, risk) 154 | } 155 | 156 | func revi(ctx *cli.Context) (err error) { 157 | checkMlvl(ctx) 158 | conf := checkConf(ctx) 159 | dest := checkDest(ctx, conf, true) 160 | revi := ctx.String("r") 161 | mask := ctx.String("m") 162 | rqry := ctx.String("q") 163 | risk := checkRisk(ctx) 164 | sqls := checkSqls(ctx) 165 | return art.Revi(&conf.Preference, dest, sqls, revi, mask, rqry, risk) 166 | } 167 | 168 | func diff(ctx *cli.Context) error { 169 | checkMlvl(ctx) 170 | conf := checkConf(ctx) 171 | dest := checkDest(ctx, conf, false) 172 | srce := checkSrce(ctx, conf, false) 173 | kind := checkType(ctx, art.DiffType, art.DiffSum) 174 | tbls := checkRegx(ctx) 175 | return art.Diff(srce, dest, kind, tbls) 176 | } 177 | 178 | func show(ctx *cli.Context) error { 179 | checkMlvl(ctx) 180 | conf := checkConf(ctx) 181 | srce := checkSrce(ctx, conf, false) 182 | ktpl := checkTmpl(ctx, conf.SqlTemplet) 183 | tbls := checkRegx(ctx) 184 | return art.Show(srce, ktpl, tbls) 185 | } 186 | 187 | func synk(ctx *cli.Context) error { 188 | checkMlvl(ctx) 189 | conf := checkConf(ctx) 190 | dest := checkDest(ctx, conf, false) 191 | srce := checkSrce(ctx, conf, false) 192 | kind := checkType(ctx, art.SyncType, art.SyncTbl) 193 | tbls := checkRegx(ctx) 194 | return art.Sync(srce, dest, kind, tbls) 195 | } 196 | 197 | func tree(ctx *cli.Context) error { 198 | checkMlvl(ctx) 199 | conf := checkConf(ctx) 200 | buildEnvs(ctx, conf.StartupEnv) 201 | srce := checkSrce(ctx, conf, true) 202 | dest := checkDest(ctx, conf, false) 203 | risk := checkRisk(ctx) 204 | sqls := checkSqls(ctx) 205 | wg := &sync.WaitGroup{} 206 | wg.Add(1) 207 | go art.CtrlRoom.Open(conf.Preference.ControlPort, art.CtrlRoomTree, wg) 208 | wg.Wait() 209 | return art.Tree(&conf.Preference, conf.StartupEnv, srce, dest, sqls, risk) 210 | } 211 | 212 | func sqlx(ctx *cli.Context) error { 213 | checkMlvl(ctx) 214 | conf := checkConf(ctx) 215 | buildEnvs(ctx, conf.StartupEnv) 216 | conf.StartupEnv["ENV-CHECK-RULE"] = "EMPTY" 217 | sqls := checkSqls(ctx) 218 | sqlx, err := art.ParseTree(&conf.Preference, conf.StartupEnv, sqls) 219 | if err != nil { 220 | return err 221 | } 222 | 223 | for i, t := range sqlx { 224 | pth := sqls[i].Path 225 | art.OutTrace("==== tree=%s ====", pth) 226 | for _, x := range t.Exes { 227 | art.OutTrace("%s", x.Tree()) 228 | } 229 | art.OutTrace("==== debug to see more ====") 230 | 231 | art.OutDebug("==== envx file=%s ====", pth) 232 | for k, v := range t.Envs { 233 | art.OutDebug("%s=%s", k, v) 234 | } 235 | 236 | art.OutDebug("==== exex file=%s ====", pth) 237 | for _, x := range t.Exes { 238 | art.OutDebug("%v", x) 239 | } 240 | } 241 | return nil 242 | } 243 | 244 | // cli // 245 | func main() { 246 | 247 | app := cli.NewApp() 248 | 249 | app.Author = "github.com/trydofor" 250 | app.Version = "0.9.9" 251 | app.Compiled = time.Now() 252 | 253 | app.Name = "godbart" 254 | app.Usage = "god, bart is a boy of ten." 255 | app.UsageText = app.Name + " command [options] [arguments...]" 256 | 257 | app.Description = `a SQL-based CLI for RDBMS versioning & migration 258 | 259 | use "command -h" to see command's help and example. 260 | supposing godbart in $PATH and godbart.toml in $PWD 261 | 262 | opt - require exactly one 263 | opt? - optional zero or one 264 | opt* - conditional zero or more 265 | 266 | readme - https://github.com/trydofor/godbart 267 | config - https://github.com/trydofor/godbart/blob/master/godbart.toml 268 | sample - https://github.com/trydofor/godbart/tree/master/demo/sql/ 269 | 270 | 2>&1 | tee /tmp/tmp.log to save log 271 | | grep -E '^[0-9]{4}[^0-9][0-9]{2}' to skip output 272 | ` 273 | 274 | // 275 | confFlag := &cli.StringFlag{ 276 | Name: "c", 277 | Usage: "the main (C)onfig `FILE`", 278 | Value: "godbart.toml", 279 | } 280 | 281 | destFlag := &cli.StringSliceFlag{ 282 | Name: "d", 283 | Usage: "the (D)estination `DB*` in config", 284 | } 285 | 286 | envsFlag := &cli.StringSliceFlag{ 287 | Name: "e", 288 | Usage: "the (E)nvironment, `K=v*`", 289 | } 290 | 291 | mlvlFlag := &cli.StringFlag{ 292 | Name: "l", 293 | Usage: "the message (L)evel, `debug?` :[debug|trace|error]", 294 | Value: "debug", 295 | } 296 | 297 | maskFlag := &cli.StringFlag{ 298 | Name: "m", 299 | Usage: "the (M)ask `regexp?` of the revision", 300 | Value: "[0-9]{10,}", 301 | } 302 | 303 | rqryFlag := &cli.StringFlag{ 304 | Name: "q", 305 | Usage: "the (Q)uery Prefix `string?` of revision", 306 | Value: "SELECT", 307 | } 308 | 309 | reviFlag := &cli.StringFlag{ 310 | Name: "r", 311 | Usage: "the (R)evision `string` to run to", 312 | } 313 | 314 | srceFlag := &cli.StringFlag{ 315 | Name: "s", 316 | Usage: "the (S)ource `DB` in config", 317 | } 318 | 319 | difkFlag := &cli.StringFlag{ 320 | Name: "t", 321 | Usage: "diff (T)ype,`type?` in,\n\tcol:columns\n\ttbl:table name\n\ttrg:trigger\n\t", 322 | Value: "tbl", 323 | } 324 | 325 | shwkFlag := &cli.StringFlag{ 326 | Name: "t", 327 | Usage: "show (T)emplet,`templet?` in config's sqltemplet", 328 | Value: "tbl,trg", 329 | } 330 | 331 | synkFlag := &cli.StringFlag{ 332 | Name: "t", 333 | Usage: "sync (T)ype `type?` in,\n\ttrg:trigger\n\ttbl:col+idx\n\trow:sync data\n\t", 334 | Value: "tbl", 335 | } 336 | 337 | sufxFlag := &cli.StringSliceFlag{ 338 | Name: "x", 339 | Usage: "the Suffi(X) `string?` of SQL files. eg \".sql\"", 340 | } 341 | 342 | riskFlag := &cli.BoolFlag{ 343 | Name: "agree", 344 | Usage: "dangerous SQL can lost data, you agree to take any risk on yourself!", 345 | } 346 | 347 | // 348 | app.Commands = []cli.Command{ 349 | { 350 | Name: "exec", 351 | Usage: "execute SQLs on DBs", 352 | ArgsUsage: "some files or paths of SQLs", 353 | Flags: []cli.Flag{ 354 | confFlag, 355 | sufxFlag, 356 | destFlag, 357 | mlvlFlag, 358 | riskFlag, 359 | }, 360 | Action: exec, 361 | }, 362 | { 363 | Name: "revi", 364 | Usage: "upgrade schema by revision", 365 | ArgsUsage: "some files or paths of SQLs", 366 | Description:` 367 | # save all tables(exclde $) create-table to prd_main_tbl.sql 368 | godbart show -s prd_main -t tbl 'tx_[^s]+' > prd_main_tbl.sql 369 | # save all tables(exclde $) create-trigger to prd_main_trg.sql 370 | godbart show -s prd_main -t trg 'tx_[^s]+' > prd_main_trg.sql 371 | `, 372 | Flags: []cli.Flag{ 373 | confFlag, 374 | sufxFlag, 375 | destFlag, 376 | reviFlag, 377 | maskFlag, 378 | rqryFlag, 379 | mlvlFlag, 380 | riskFlag, 381 | }, 382 | Action: revi, 383 | }, 384 | { 385 | Name: "diff", 386 | Usage: "diff table, column, index, trigger", 387 | ArgsUsage: "tables to diff (regexp). empty means all", 388 | Description:` 389 | # save all tables(exclde $) create-table to prd_main_tbl.sql 390 | godbart show -s prd_main -t tbl 'tx_[^s]+' > prd_main_tbl.sql 391 | # save all tables(exclde $) create-trigger to prd_main_trg.sql 392 | godbart show -s prd_main -t trg 'tx_[^s]+' > prd_main_trg.sql 393 | `, 394 | Flags: []cli.Flag{ 395 | confFlag, 396 | srceFlag, 397 | destFlag, 398 | difkFlag, 399 | mlvlFlag, 400 | }, 401 | Action: diff, 402 | }, 403 | { 404 | Name: "sync", 405 | Usage: "create table d.A like s.B or sync small data", 406 | ArgsUsage: "tables to sync (regexp). empty means all", 407 | Description:` 408 | # sync table&trigger from main to 2018 409 | godbart sync -s prd_main -d prd_2018 -t tbl,trg 'tx_[^s]+' 410 | # sync data from main to 2018 411 | godbart sync -s prd_main -d prd_2018 -t row 'tx_[^s]+' 412 | `, 413 | Flags: []cli.Flag{ 414 | confFlag, 415 | srceFlag, 416 | destFlag, 417 | synkFlag, 418 | mlvlFlag, 419 | riskFlag, 420 | }, 421 | Action: synk, 422 | }, 423 | { 424 | Name: "tree", 425 | Usage: "deal data-tree between DBs", 426 | ArgsUsage: "some files or paths of SQLs", 427 | Description:` 428 | # save all tables(exclde $) create-table to prd_main_tbl.sql 429 | godbart tree -s prd_main -d prd_2018 demo/sql/tree/tree.sql 430 | `, 431 | Flags: []cli.Flag{ 432 | confFlag, 433 | sufxFlag, 434 | destFlag, 435 | srceFlag, 436 | envsFlag, 437 | mlvlFlag, 438 | riskFlag, 439 | }, 440 | Action: tree, 441 | }, 442 | { 443 | Name: "sqlx", 444 | Usage: "static analyze data-tree by sql file", 445 | ArgsUsage: "some files or paths of SQLs", 446 | Flags: []cli.Flag{ 447 | confFlag, 448 | envsFlag, 449 | mlvlFlag, 450 | }, 451 | Action: sqlx, 452 | }, 453 | { 454 | Name: "show", 455 | Usage: "show ddl of table", 456 | ArgsUsage: "tables to show (regexp). empty means all", 457 | Description:` 458 | # save all tables(exclde $) create-table to prd_main_tbl.sql 459 | godbart show -s prd_main -t tbl 'tx_[^s]+' > prd_main_tbl.sql 460 | # save all tables(exclde $) create-trigger to prd_main_trg.sql 461 | godbart show -s prd_main -t trg 'tx_[^s]+' > prd_main_trg.sql 462 | `, 463 | Flags: []cli.Flag{ 464 | confFlag, 465 | srceFlag, 466 | shwkFlag, 467 | mlvlFlag, 468 | }, 469 | Action: show, 470 | }, 471 | } 472 | 473 | err := app.Run(os.Args) 474 | if err != nil { 475 | art.LogFatal("exit by error=%v", err) 476 | } 477 | } 478 | --------------------------------------------------------------------------------