├── imgs ├── skp.png ├── demo.gif ├── bypass.png └── chrome_path.png ├── Disclaimer.md ├── pkg ├── logger │ └── logger.go ├── tools │ ├── requests │ │ ├── utils.go │ │ ├── response.go │ │ └── requests.go │ ├── random.go │ └── common.go ├── domain_collect.go ├── model │ ├── url_test.go │ ├── url.go │ └── request.go ├── filter │ ├── simple_filter.go │ └── smart_filter.go ├── engine │ ├── collect_links.go │ ├── browser.go │ ├── after_loaded_tasks.go │ ├── after_dom_tasks.go │ ├── intercept_request.go │ └── tab.go ├── config │ └── config.go ├── path_expansion.go ├── task_main.go └── js │ └── javascript.go ├── go.mod ├── examples ├── subprocess_call.py ├── request_with_cookie.py ├── host_binding.py └── zombie_clean.py ├── README_zh-cn.md ├── go.sum ├── README.md ├── cmd └── crawlergo │ └── crawlergo_cmd.go └── LICENSE /imgs/skp.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hahwul/crawlergo/master/imgs/skp.png -------------------------------------------------------------------------------- /imgs/demo.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hahwul/crawlergo/master/imgs/demo.gif -------------------------------------------------------------------------------- /imgs/bypass.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hahwul/crawlergo/master/imgs/bypass.png -------------------------------------------------------------------------------- /imgs/chrome_path.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hahwul/crawlergo/master/imgs/chrome_path.png -------------------------------------------------------------------------------- /Disclaimer.md: -------------------------------------------------------------------------------- 1 | ## 免责声明 2 | 3 | 本工具仅面向**合法授权**的企业安全建设行为,**请勿对非授权目标进行爬取行为。** 4 | 5 | 禁止对本软件实施逆向工程、反编译、试图破译源代码等行为。 6 | 7 | **如果发现上述禁止行为,我们将保留追究您法律责任的权利。** 8 | 9 | 如您在使用本工具的过程中存在任何非法行为,您需自行承担相应后果,我们将不承担任何法律及连带责任。 10 | 11 | 在安装并使用本工具前,请您**务必审慎阅读、充分理解各条款内容**,限制、免责条款或者其他涉及您重大权益的条款可能会以加粗、加下划线等形式提示您重点注意。 除非您已充分阅读、完全理解并接受本协议所有条款,否则,请您不要安装并使用本工具。您的使用行为或者您以其他任何明示或者默示方式表示接受本协议的,即视为您已阅读并同意本协议的约束。 12 | 13 | -------------------------------------------------------------------------------- /pkg/logger/logger.go: -------------------------------------------------------------------------------- 1 | package logger 2 | 3 | import ( 4 | "github.com/sirupsen/logrus" 5 | ) 6 | 7 | var logLevelMap = map[string]logrus.Level{ 8 | //"Trace": logrus.TraceLevel, 9 | "Debug": logrus.DebugLevel, 10 | "Info": logrus.InfoLevel, 11 | "Warn": logrus.WarnLevel, 12 | "Error": logrus.ErrorLevel, 13 | "Fatal": logrus.FatalLevel, 14 | //"Panic": logrus.PanicLevel, 15 | } 16 | 17 | var Logger *logrus.Logger 18 | 19 | func init() { 20 | Logger = logrus.New() 21 | level := "Warn" 22 | Logger.SetLevel(logLevelMap[level]) 23 | } 24 | -------------------------------------------------------------------------------- /go.mod: -------------------------------------------------------------------------------- 1 | module crawlergo 2 | 3 | go 1.12 4 | 5 | replace git.apache.org/thrift.git => github.com/apache/thrift v0.13.0 6 | 7 | require ( 8 | github.com/chromedp/cdproto v0.0.0-20191114225735-6626966fbae4 9 | github.com/chromedp/chromedp v0.5.2 10 | github.com/cpuguy83/go-md2man/v2 v2.0.0 // indirect 11 | github.com/deckarep/golang-set v1.7.1 12 | github.com/gogf/gf v1.16.6 13 | github.com/panjf2000/ants/v2 v2.2.2 14 | github.com/pkg/errors v0.8.1 15 | github.com/sirupsen/logrus v1.4.2 16 | github.com/urfave/cli/v2 v2.0.0 17 | golang.org/x/net v0.0.0-20210520170846-37e1c6afe023 18 | ) 19 | -------------------------------------------------------------------------------- /pkg/tools/requests/utils.go: -------------------------------------------------------------------------------- 1 | package requests 2 | 3 | import ( 4 | "github.com/pkg/errors" 5 | "net/url" 6 | "strings" 7 | ) 8 | 9 | // UrlParse 调用url.Parse,增加了对%的处理 10 | func UrlParse(sourceUrl string) (*url.URL, error) { 11 | u, err := url.Parse(sourceUrl) 12 | if err != nil { 13 | u, err = url.Parse(escapePercentSign(sourceUrl)) 14 | } 15 | if err != nil { 16 | return nil, errors.Wrap(err, "parse url error") 17 | } 18 | return u, nil 19 | } 20 | 21 | // escapePercentSign 把url中的%替换为%25 22 | func escapePercentSign(raw string) string { 23 | return strings.ReplaceAll(raw, "%", "%25") 24 | } 25 | -------------------------------------------------------------------------------- /examples/subprocess_call.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | # coding: utf-8 3 | 4 | import simplejson 5 | import subprocess 6 | 7 | 8 | def main(): 9 | target = "http://testphp.vulnweb.com/" 10 | cmd = ["./crawlergo_cmd", "-c", "/tmp/chrome-linux/chrome", "-o", "json", target] 11 | rsp = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 12 | output, error = rsp.communicate() 13 | 14 | result = simplejson.loads(output.decode().split("--[Mission Complete]--")[1]) 15 | req_list = result["req_list"] 16 | print(req_list[0]) 17 | 18 | 19 | if __name__ == '__main__': 20 | main() 21 | -------------------------------------------------------------------------------- /pkg/tools/requests/response.go: -------------------------------------------------------------------------------- 1 | package requests 2 | 3 | import ( 4 | "crawlergo/pkg/logger" 5 | "io/ioutil" 6 | "net/http" 7 | ) 8 | 9 | // 自定义一些函数 10 | type Response struct { 11 | http.Response 12 | // raw text Response 13 | Text string 14 | } 15 | 16 | func getTextFromResp(r *http.Response) string { 17 | // TODO: 编码转换 18 | if r.ContentLength == 0 { 19 | return "" 20 | } 21 | b, err := ioutil.ReadAll(r.Body) 22 | if err != nil { 23 | logger.Logger.Debug("get response body err ", err) 24 | } 25 | _ = r.Body.Close() 26 | return string(b) 27 | } 28 | 29 | func NewResponse(r *http.Response) *Response { 30 | return &Response{ 31 | Response: *r, 32 | Text: getTextFromResp(r), 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /pkg/domain_collect.go: -------------------------------------------------------------------------------- 1 | package pkg 2 | 3 | import ( 4 | "crawlergo/pkg/model" 5 | mapset "github.com/deckarep/golang-set" 6 | "strings" 7 | ) 8 | 9 | func SubDomainCollect(reqList []*model.Request, HostLimit string) []string { 10 | var subDomainList []string 11 | uniqueSet := mapset.NewSet() 12 | for _, req := range reqList { 13 | domain := req.URL.Hostname() 14 | if uniqueSet.Contains(domain) { 15 | continue 16 | } 17 | uniqueSet.Add(domain) 18 | if strings.HasSuffix(domain, "."+HostLimit) { 19 | subDomainList = append(subDomainList, domain) 20 | } 21 | } 22 | return subDomainList 23 | } 24 | 25 | func AllDomainCollect(reqList []*model.Request) []string { 26 | uniqueSet := mapset.NewSet() 27 | var allDomainList []string 28 | for _, req := range reqList { 29 | domain := req.URL.Hostname() 30 | if uniqueSet.Contains(domain) { 31 | continue 32 | } 33 | uniqueSet.Add(domain) 34 | allDomainList = append(allDomainList, req.URL.Hostname()) 35 | } 36 | return allDomainList 37 | } 38 | -------------------------------------------------------------------------------- /pkg/model/url_test.go: -------------------------------------------------------------------------------- 1 | package model 2 | 3 | import ( 4 | "net/url" 5 | "testing" 6 | 7 | "golang.org/x/net/publicsuffix" 8 | ) 9 | 10 | var ( 11 | rootDomainTestCases = []struct { 12 | domain string 13 | rootDomain string 14 | wantICANN bool 15 | }{ 16 | {"www.amazon.co.uk", "amazon.co.uk", true}, 17 | {"www.baidu.com", "baidu.com", true}, 18 | {"www.baidu.com.cn", "baidu.com.cn", true}, 19 | {"www.pku.edu.cn", "pku.edu.cn", true}, 20 | {"www.example1.debian.org", "debian.org", true}, 21 | {"www.golang.dev", "golang.dev", true}, 22 | // 以下都是一些特殊的 case,主要包括一些特殊的域名和私有域名,一般情况遇不到 23 | // error domains 24 | {"com.cn", "", true}, 25 | // not an icann domain 26 | {"www.example0.debian.net", "", false}, 27 | {"s3.cn-north-1.amazonaws.com.cn", "", false}, 28 | {"www.0emm.com", "", false}, 29 | {"there.is.no.such-tld", "", false}, 30 | } 31 | ) 32 | 33 | func TestRootDomain(t *testing.T) { 34 | for _, tc := range rootDomainTestCases { 35 | u := &URL{url.URL{Host: tc.domain}} 36 | rootDomain := u.RootDomain() 37 | _, icann := publicsuffix.PublicSuffix(u.Hostname()) 38 | if rootDomain != tc.rootDomain { 39 | t.Errorf("%s parse root domain failed", tc.domain) 40 | } 41 | if icann != tc.wantICANN { 42 | t.Errorf("%s not an icann domain", tc.domain) 43 | } 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /examples/request_with_cookie.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | # coding: utf-8 3 | 4 | import simplejson 5 | import subprocess 6 | """ 7 | 添加Cookie扫描示例 8 | 9 | 命令行调用时: 10 | ./crawlergo -c /home/test/chrome-linux/chrome -o json --ignore-url-keywords quit,exit,zhuxiao --custom-headers "{\"Cookie\": \"crawlergo=Cool\"}" 11 | 12 | 使用 --ignore-url-keywords 添加你想要的排除的关键字,避免访问注销请求 13 | """ 14 | 15 | 16 | def main(): 17 | target = "http://testphp.vulnweb.com/" 18 | headers = { 19 | "User-Agent": "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) " 20 | "Chrome/74.0.3945.0 Safari/537.36", 21 | "Cookie": "crawlergo=Cool" 22 | } 23 | cmd = ["./crawlergo", "-c", "/home/test/chrome-linux/chrome", 24 | "-o", "json", "--ignore-url-keywords", "quit,exit,zhuxiao", "--custom-headers", simplejson.dumps(headers), 25 | target] 26 | 27 | rsp = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 28 | output, error = rsp.communicate() 29 | 30 | result = simplejson.loads(output.decode().split("--[Mission Complete]--")[1]) 31 | req_list = result["req_list"] 32 | for each in req_list: 33 | print(each) 34 | 35 | 36 | if __name__ == '__main__': 37 | main() 38 | 39 | 40 | 41 | 42 | 43 | -------------------------------------------------------------------------------- /examples/host_binding.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | # coding: utf-8 3 | 4 | import simplejson 5 | import subprocess 6 | """ 7 | 628235 版本的chrome可用 8 | 9 | 为什么高版本无法Host绑定? 10 | https://github.com/chromium/chromium/commit/d31383577e0517843c8059dec9b87469bf30900f#diff-d717572478f6a97f889b33917c9d3a5f 11 | 12 | 查找历史版本 13 | https://github.com/macchrome/winchrome/releases?after=v77.0.3865.90-r681094-Win64 14 | 15 | 下载地址 16 | https://storage.googleapis.com/chromium-browser-snapshots/Linux_x64/628235/chrome-linux.zip 17 | """ 18 | 19 | 20 | def main(): 21 | target = "http://176.28.50.165/" 22 | headers = { 23 | "Host": "testphp.vulnweb.com", 24 | "User-Agent": "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) " 25 | "Chrome/74.0.3945.0 Safari/537.36", 26 | } 27 | cmd = ["./crawlergo_cmd", "-c", "/tmp/chrome-linux-628235/chrome", 28 | "-o", "json", "--custom-headers", simplejson.dumps(headers), target] 29 | rsp = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 30 | output, error = rsp.communicate() 31 | 32 | result = simplejson.loads(output.decode().split("--[Mission Complete]--")[1]) 33 | req_list = result["req_list"] 34 | for each in req_list: 35 | print(each) 36 | 37 | 38 | if __name__ == '__main__': 39 | main() 40 | 41 | 42 | 43 | 44 | 45 | -------------------------------------------------------------------------------- /pkg/tools/random.go: -------------------------------------------------------------------------------- 1 | // 随机数相关函数 2 | package tools 3 | 4 | import ( 5 | "math/rand" 6 | "strings" 7 | "time" 8 | ) 9 | 10 | const letterBytes = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789" 11 | const ( 12 | letterIdxBits = 6 // 6 bits to represent a letter index 13 | letterIdxMask = 1<= 0; { 32 | if remain == 0 { 33 | cache, remain = src.Int63(), letterIdxMax 34 | } 35 | if idx := int(cache & letterIdxMask); idx < len(letterBytes) { 36 | sb.WriteByte(letterBytes[idx]) 37 | i-- 38 | } 39 | cache >>= letterIdxBits 40 | remain-- 41 | } 42 | 43 | return sb.String() 44 | } 45 | -------------------------------------------------------------------------------- /pkg/tools/common.go: -------------------------------------------------------------------------------- 1 | package tools 2 | 3 | import ( 4 | "bufio" 5 | "crawlergo/pkg/logger" 6 | "crypto/md5" 7 | "encoding/hex" 8 | "fmt" 9 | "io" 10 | "os" 11 | "strings" 12 | ) 13 | 14 | func StrMd5(str string) string { 15 | h := md5.New() 16 | h.Write([]byte(str)) 17 | return hex.EncodeToString(h.Sum(nil)) 18 | } 19 | 20 | func ConvertHeaders(h map[string]interface{}) map[string]string { 21 | a := map[string]string{} 22 | for key, value := range h { 23 | a[key] = value.(string) 24 | } 25 | return a 26 | } 27 | 28 | func WriteFile(fileName string, content []byte) { 29 | f, err := os.OpenFile(fileName, os.O_WRONLY|os.O_CREATE, 0644) 30 | defer f.Close() 31 | if err != nil { 32 | fmt.Println(err.Error()) 33 | } else { 34 | _, err = f.Write(content) 35 | if err != nil { 36 | logger.Logger.Error("write to file error ", err) 37 | } 38 | } 39 | } 40 | 41 | func ReadFile(filePath string) []string { 42 | filePaths := []string{} 43 | f, err := os.OpenFile(filePath, os.O_RDONLY, 0644) 44 | defer f.Close() 45 | if err != nil { 46 | fmt.Println(err.Error()) 47 | } else { 48 | rd := bufio.NewReader(f) 49 | for { 50 | line, err := rd.ReadString('\n') 51 | if err != nil || io.EOF == err { 52 | break 53 | } 54 | filePaths = append(filePaths, line) 55 | } 56 | } 57 | return filePaths 58 | } 59 | 60 | func StringSliceContain(data []string, item string) bool { 61 | for _, value := range data { 62 | if value == item { 63 | return true 64 | } 65 | } 66 | return false 67 | } 68 | 69 | func MapStringFormat(data map[string]string) string { 70 | str := "" 71 | for key, value := range data { 72 | str += fmt.Sprintf("%s=%s,", key, value) 73 | } 74 | str = strings.Trim(str, ",") 75 | return str 76 | } 77 | -------------------------------------------------------------------------------- /examples/zombie_clean.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | # coding: utf-8 3 | 4 | """ 5 | author: 猪猪侠 https://github.com/ring04h 6 | 7 | """ 8 | 9 | import logging 10 | import subprocess 11 | 12 | logging.basicConfig(level=logging.DEBUG) 13 | 14 | # 15 | # (crontab -l;echo '0 2 * * * /usr/local/bin/python3 /data/script/zombie_clean.py') | crontab - 16 | # 17 | 18 | def is_timeout(etime): 19 | if '-' in etime: 20 | day, hour = etime.split('-') 21 | return True if int(day) >= 1 else False 22 | else: 23 | return False 24 | 25 | 26 | def cmdprocess(cmdline): 27 | 28 | pipe = subprocess.Popen(cmdline, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 29 | output, stderr = pipe.communicate() 30 | return_code = pipe.returncode 31 | stderr = stderr.decode(errors='replace') 32 | output = output.decode(errors='replace') 33 | return output, stderr, return_code 34 | 35 | 36 | 37 | def main(): 38 | 39 | cmdline = "ps -ef | grep crawlergo | grep -v grep | awk '{print $2}'" 40 | output, stderr, return_code = cmdprocess(cmdline) 41 | 42 | if return_code != 0: 43 | return 44 | 45 | zombie_pids = output.splitlines() 46 | 47 | for zombie_pid in zombie_pids: 48 | 49 | cmdline = f'''ps -eo pid,etime | grep {zombie_pid}''' 50 | ps_output, ps_stderr, ps_return_code = cmdprocess(cmdline) 51 | 52 | if ps_return_code != 0: 53 | continue 54 | 55 | for line in ps_output.splitlines(): 56 | 57 | pid, etime = line.split() 58 | 59 | status = is_timeout(etime) 60 | logging.debug(f"PID: {pid:<8} ETIME: {etime:<15} TIMEOUT: {status}") 61 | 62 | if not status: 63 | continue 64 | 65 | kill_cmdline = f"kill -9 {pid}" 66 | logging.debug(f"call kill : [{kill_cmdline}]") 67 | 68 | cmdprocess(kill_cmdline) 69 | 70 | if __name__ == "__main__": 71 | main() 72 | 73 | -------------------------------------------------------------------------------- /pkg/filter/simple_filter.go: -------------------------------------------------------------------------------- 1 | package filter 2 | 3 | import ( 4 | "crawlergo/pkg/config" 5 | "crawlergo/pkg/model" 6 | "github.com/deckarep/golang-set" 7 | "strings" 8 | ) 9 | 10 | type SimpleFilter struct { 11 | UniqueSet mapset.Set 12 | HostLimit string 13 | } 14 | 15 | /** 16 | 需要过滤则返回 true 17 | */ 18 | func (s *SimpleFilter) DoFilter(req *model.Request) bool { 19 | if s.UniqueSet == nil { 20 | s.UniqueSet = mapset.NewSet() 21 | } 22 | // 首先判断是否需要过滤域名 23 | if s.HostLimit != "" && s.DomainFilter(req) { 24 | return true 25 | } 26 | // 去重 27 | if s.UniqueFilter(req) { 28 | return true 29 | } 30 | // 过滤静态资源 31 | if s.StaticFilter(req) { 32 | return true 33 | } 34 | return false 35 | } 36 | 37 | /** 38 | 请求去重 39 | */ 40 | func (s *SimpleFilter) UniqueFilter(req *model.Request) bool { 41 | if s.UniqueSet == nil { 42 | s.UniqueSet = mapset.NewSet() 43 | } 44 | if s.UniqueSet.Contains(req.UniqueId()) { 45 | return true 46 | } else { 47 | s.UniqueSet.Add(req.UniqueId()) 48 | return false 49 | } 50 | } 51 | 52 | /** 53 | 静态资源过滤 54 | */ 55 | func (s *SimpleFilter) StaticFilter(req *model.Request) bool { 56 | if s.UniqueSet == nil { 57 | s.UniqueSet = mapset.NewSet() 58 | } 59 | // 首先将slice转换成map 60 | extMap := map[string]int{} 61 | staticSuffix := append(config.StaticSuffix, "js", "css", "json") 62 | for _, suffix := range staticSuffix { 63 | extMap[suffix] = 1 64 | } 65 | 66 | if req.URL.FileExt() == "" { 67 | return false 68 | } 69 | if _, ok := extMap[req.URL.FileExt()]; ok { 70 | return true 71 | } 72 | return false 73 | } 74 | 75 | /** 76 | 只保留指定域名的链接 77 | */ 78 | func (s *SimpleFilter) DomainFilter(req *model.Request) bool { 79 | if s.UniqueSet == nil { 80 | s.UniqueSet = mapset.NewSet() 81 | } 82 | if req.URL.Host == s.HostLimit || req.URL.Hostname() == s.HostLimit { 83 | return false 84 | } 85 | if strings.HasSuffix(s.HostLimit, ":80") && req.URL.Port() == "" && req.URL.Scheme == "http" { 86 | if req.URL.Hostname()+":80" == s.HostLimit { 87 | return false 88 | } 89 | } 90 | if strings.HasSuffix(s.HostLimit, ":443") && req.URL.Port() == "" && req.URL.Scheme == "https" { 91 | if req.URL.Hostname()+":443" == s.HostLimit { 92 | return false 93 | } 94 | } 95 | return true 96 | } 97 | -------------------------------------------------------------------------------- /pkg/engine/collect_links.go: -------------------------------------------------------------------------------- 1 | package engine 2 | 3 | import ( 4 | "context" 5 | "crawlergo/pkg/config" 6 | "crawlergo/pkg/logger" 7 | "fmt" 8 | "github.com/chromedp/cdproto/cdp" 9 | "github.com/chromedp/chromedp" 10 | "regexp" 11 | "time" 12 | ) 13 | 14 | /** 15 | 最后收集所有的链接 16 | */ 17 | func (tab *Tab) collectLinks() { 18 | go tab.collectHrefLinks() 19 | go tab.collectObjectLinks() 20 | go tab.collectCommentLinks() 21 | } 22 | 23 | func (tab *Tab) collectHrefLinks() { 24 | defer tab.collectLinkWG.Done() 25 | ctx := tab.GetExecutor() 26 | // 收集 src href data-url 属性值 27 | attrNameList := []string{"src", "href", "data-url", "data-href"} 28 | for _, attrName := range attrNameList { 29 | tCtx, cancel := context.WithTimeout(ctx, time.Second*1) 30 | var attrs []map[string]string 31 | _ = chromedp.AttributesAll(fmt.Sprintf(`[%s]`, attrName), &attrs, chromedp.ByQueryAll).Do(tCtx) 32 | cancel() 33 | for _, attrMap := range attrs { 34 | tab.AddResultUrl(config.GET, attrMap[attrName], config.FromDOM) 35 | } 36 | } 37 | } 38 | 39 | func (tab *Tab) collectObjectLinks() { 40 | defer tab.collectLinkWG.Done() 41 | ctx := tab.GetExecutor() 42 | // 收集 object[data] links 43 | tCtx, cancel := context.WithTimeout(ctx, time.Second*1) 44 | defer cancel() 45 | var attrs []map[string]string 46 | _ = chromedp.AttributesAll(`object[data]`, &attrs, chromedp.ByQueryAll).Do(tCtx) 47 | for _, attrMap := range attrs { 48 | tab.AddResultUrl(config.GET, attrMap["data"], config.FromDOM) 49 | } 50 | } 51 | 52 | func (tab *Tab) collectCommentLinks() { 53 | defer tab.collectLinkWG.Done() 54 | ctx := tab.GetExecutor() 55 | // 收集注释中的链接 56 | var nodes []*cdp.Node 57 | tCtxComment, cancel := context.WithTimeout(ctx, time.Second*1) 58 | defer cancel() 59 | commentErr := chromedp.Nodes(`//comment()`, &nodes, chromedp.BySearch).Do(tCtxComment) 60 | if commentErr != nil { 61 | logger.Logger.Debug("get comment nodes err") 62 | logger.Logger.Debug(commentErr) 63 | return 64 | } 65 | urlRegex := regexp.MustCompile(config.URLRegex) 66 | for _, node := range nodes { 67 | content := node.NodeValue 68 | urlList := urlRegex.FindAllString(content, -1) 69 | for _, url := range urlList { 70 | tab.AddResultUrl(config.GET, url, config.FromComment) 71 | } 72 | } 73 | } 74 | -------------------------------------------------------------------------------- /pkg/engine/browser.go: -------------------------------------------------------------------------------- 1 | package engine 2 | 3 | import ( 4 | "context" 5 | "crawlergo/pkg/logger" 6 | "log" 7 | "sync" 8 | "time" 9 | 10 | "github.com/chromedp/cdproto/browser" 11 | "github.com/chromedp/chromedp" 12 | ) 13 | 14 | type Browser struct { 15 | Ctx *context.Context 16 | Cancel *context.CancelFunc 17 | tabs []*context.Context 18 | tabCancels []context.CancelFunc 19 | ExtraHeaders map[string]interface{} 20 | lock sync.Mutex 21 | } 22 | 23 | func init() { 24 | 25 | } 26 | 27 | func InitBrowser(chromiumPath string, incognito bool, extraHeaders map[string]interface{}, proxy string, noHeadless bool) *Browser { 28 | var bro Browser 29 | opts := append(chromedp.DefaultExecAllocatorOptions[:], 30 | 31 | // 执行路径 32 | chromedp.ExecPath(chromiumPath), 33 | // 无头模式 34 | chromedp.Flag("headless", !noHeadless), 35 | // 禁用GPU,不显示GUI 36 | chromedp.Flag("disable-gpu", true), 37 | // 隐身模式启动 38 | chromedp.Flag("incognito", incognito), 39 | // 取消沙盒模式 40 | chromedp.Flag("no-sandbox", true), 41 | // 忽略证书错误 42 | chromedp.Flag("ignore-certificate-errors", true), 43 | 44 | chromedp.Flag("disable-images", true), 45 | // 46 | chromedp.Flag("disable-web-security", true), 47 | // 48 | chromedp.Flag("disable-xss-auditor", true), 49 | // 50 | chromedp.Flag("disable-setuid-sandbox", true), 51 | 52 | chromedp.Flag("allow-running-insecure-content", true), 53 | 54 | chromedp.Flag("disable-webgl", true), 55 | 56 | chromedp.Flag("disable-popup-blocking", true), 57 | 58 | chromedp.WindowSize(1920, 1080), 59 | ) 60 | // 设置浏览器代理 61 | if proxy != "" { 62 | opts = append(opts, chromedp.ProxyServer(proxy)) 63 | } 64 | 65 | allocCtx, cancel := chromedp.NewExecAllocator(context.Background(), opts...) 66 | bctx, _ := chromedp.NewContext(allocCtx, 67 | chromedp.WithLogf(log.Printf), 68 | ) 69 | // https://github.com/chromedp/chromedp/issues/824#issuecomment-845664441 70 | // 如果需要在一个浏览器上创建多个tab,则需要先创建浏览器的上下文,即运行下面的语句 71 | chromedp.Run(bctx) 72 | bro.Cancel = &cancel 73 | bro.Ctx = &bctx 74 | bro.ExtraHeaders = extraHeaders 75 | return &bro 76 | } 77 | 78 | func (bro *Browser) NewTab(timeout time.Duration) (*context.Context, context.CancelFunc) { 79 | bro.lock.Lock() 80 | ctx, cancel := chromedp.NewContext(*bro.Ctx) 81 | //defer cancel() 82 | tCtx, _ := context.WithTimeout(ctx, timeout) 83 | bro.tabs = append(bro.tabs, &tCtx) 84 | bro.tabCancels = append(bro.tabCancels, cancel) 85 | //defer cancel2() 86 | bro.lock.Unlock() 87 | 88 | //return bro.Ctx, &cancel 89 | return &tCtx, cancel 90 | } 91 | 92 | func (bro *Browser) Close() { 93 | logger.Logger.Info("closing browser.") 94 | for _, cancel := range bro.tabCancels { 95 | cancel() 96 | } 97 | 98 | for _, ctx := range bro.tabs { 99 | err := browser.Close().Do(*ctx) 100 | if err != nil { 101 | logger.Logger.Debug(err) 102 | } 103 | } 104 | 105 | err := browser.Close().Do(*bro.Ctx) 106 | if err != nil { 107 | logger.Logger.Debug(err) 108 | } 109 | (*bro.Cancel)() 110 | } 111 | -------------------------------------------------------------------------------- /pkg/model/url.go: -------------------------------------------------------------------------------- 1 | package model 2 | 3 | import ( 4 | "errors" 5 | "fmt" 6 | "net/url" 7 | "regexp" 8 | "strings" 9 | 10 | "golang.org/x/net/publicsuffix" 11 | 12 | "crawlergo/pkg/tools/requests" 13 | ) 14 | 15 | type URL struct { 16 | url.URL 17 | } 18 | 19 | func GetUrl(_url string, parentUrls ...URL) (*URL, error) { 20 | // 补充解析URL为完整格式 21 | var u URL 22 | _url, err := u.parse(_url, parentUrls...) 23 | if err != nil { 24 | return nil, err 25 | } 26 | 27 | if len(parentUrls) == 0 { 28 | _u, err := requests.UrlParse(_url) 29 | if err != nil { 30 | return nil, err 31 | } 32 | u = URL{*_u} 33 | if u.Path == "" { 34 | u.Path = "/" 35 | } 36 | } else { 37 | pUrl := parentUrls[0] 38 | _u, err := pUrl.Parse(_url) 39 | if err != nil { 40 | return nil, err 41 | } 42 | u = URL{*_u} 43 | if u.Path == "" { 44 | u.Path = "/" 45 | } 46 | //fmt.Println(_url, pUrl.String(), u.String()) 47 | } 48 | 49 | fixPath := regexp.MustCompile("^/{2,}") 50 | 51 | if fixPath.MatchString(u.Path) { 52 | u.Path = fixPath.ReplaceAllString(u.Path, "/") 53 | } 54 | 55 | return &u, nil 56 | } 57 | 58 | /** 59 | 修复不完整的URL 60 | */ 61 | func (u *URL) parse(_url string, parentUrls ...URL) (string, error) { 62 | _url = strings.Trim(_url, " ") 63 | 64 | if len(_url) == 0 { 65 | return "", errors.New("invalid url, length 0") 66 | } 67 | // 替换掉多余的# 68 | if strings.Count(_url, "#") > 1 { 69 | _url = regexp.MustCompile(`#+`).ReplaceAllString(_url, "#") 70 | } 71 | 72 | // 没有父链接,直接退出 73 | if len(parentUrls) == 0 { 74 | return _url, nil 75 | } 76 | 77 | if strings.HasPrefix(_url, "http://") || strings.HasPrefix(_url, "https://") { 78 | return _url, nil 79 | } else if strings.HasPrefix(_url, "javascript:") { 80 | return "", errors.New("invalid url, javascript protocol") 81 | } else if strings.HasPrefix(_url, "mailto:") { 82 | return "", errors.New("invalid url, mailto protocol") 83 | } 84 | return _url, nil 85 | } 86 | 87 | func (u *URL) QueryMap() map[string]interface{} { 88 | queryMap := map[string]interface{}{} 89 | for key, value := range u.Query() { 90 | if len(value) == 1 { 91 | queryMap[key] = value[0] 92 | } else { 93 | queryMap[key] = value 94 | } 95 | } 96 | return queryMap 97 | } 98 | 99 | /** 100 | 返回去掉请求参数的URL 101 | */ 102 | func (u *URL) NoQueryUrl() string { 103 | return fmt.Sprintf("%s://%s%s", u.Scheme, u.Host, u.Path) 104 | } 105 | 106 | /** 107 | 返回不带Fragment的URL 108 | */ 109 | func (u *URL) NoFragmentUrl() string { 110 | return strings.Replace(u.String(), u.Fragment, "", -1) 111 | } 112 | 113 | func (u *URL) NoSchemeFragmentUrl() string { 114 | return fmt.Sprintf("://%s%s", u.Host, u.Path) 115 | } 116 | 117 | func (u *URL) NavigationUrl() string { 118 | return u.NoSchemeFragmentUrl() 119 | } 120 | 121 | /** 122 | 返回根域名 123 | 124 | 如 a.b.c.360.cn 返回 360.cn 125 | */ 126 | func (u *URL) RootDomain() string { 127 | domain := u.Hostname() 128 | suffix, icann := publicsuffix.PublicSuffix(strings.ToLower(domain)) 129 | // 如果不是 icann 的域名,返回空字符串 130 | if !icann { 131 | return "" 132 | } 133 | i := len(domain) - len(suffix) - 1 134 | // 如果域名错误 135 | if i <= 0 { 136 | return "" 137 | } 138 | if domain[i] != '.' { 139 | return "" 140 | } 141 | return domain[1+strings.LastIndex(domain[:i], "."):] 142 | } 143 | 144 | /** 145 | 文件扩展名 146 | */ 147 | func (u *URL) FileName() string { 148 | parts := strings.Split(u.Path, `/`) 149 | lastPart := parts[len(parts)-1] 150 | if strings.Contains(lastPart, ".") { 151 | return lastPart 152 | } else { 153 | return "" 154 | } 155 | } 156 | 157 | /** 158 | 文件扩展名 159 | */ 160 | func (u *URL) FileExt() string { 161 | fileName := u.FileName() 162 | if fileName == "" { 163 | return "" 164 | } 165 | parts := strings.Split(fileName, ".") 166 | return strings.ToLower(parts[len(parts)-1]) 167 | } 168 | 169 | /** 170 | 回去上一级path, 如果当前就是root path,则返回空字符串 171 | */ 172 | func (u *URL) ParentPath() string { 173 | if u.Path == "/" { 174 | return "" 175 | } else if strings.HasSuffix(u.Path, "/") { 176 | if strings.Count(u.Path, "/") == 2 { 177 | return "/" 178 | } 179 | parts := strings.Split(u.Path, "/") 180 | parts = parts[:len(parts)-2] 181 | return strings.Join(parts, "/") 182 | } else { 183 | if strings.Count(u.Path, "/") == 1 { 184 | return "/" 185 | } 186 | parts := strings.Split(u.Path, "/") 187 | parts = parts[:len(parts)-1] 188 | return strings.Join(parts, "/") 189 | } 190 | } 191 | -------------------------------------------------------------------------------- /pkg/config/config.go: -------------------------------------------------------------------------------- 1 | package config 2 | 3 | import "time" 4 | 5 | const ( 6 | DefaultUA = "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.0 Safari/537.36" 7 | MaxTabsCount = 10 8 | TabRunTimeout = 20 * time.Second 9 | DefaultInputText = "Crawlergo" 10 | FormInputKeyword = "Crawlergo" 11 | SuspectURLRegex = `(?:"|')(((?:[a-zA-Z]{1,10}://|//)[^"'/]{1,}\.[a-zA-Z]{2,}[^"']{0,})|((?:/|\.\./|\./)[^"'><,;|*()(%%$^/\\\[\]][^"'><,;|()]{1,})|([a-zA-Z0-9_\-/]{1,}/[a-zA-Z0-9_\-/]{1,}\.(?:[a-zA-Z]{1,4}|action)(?:[\?|#][^"|']{0,}|))|([a-zA-Z0-9_\-/]{1,}/[a-zA-Z0-9_\-/]{3,}(?:[\?|#][^"|']{0,}|))|([a-zA-Z0-9_\-]{1,}\.(?:php|asp|aspx|jsp|json|action|html|js|txt|xml)(?:[\?|#][^"|']{0,}|)))(?:"|')` 12 | URLRegex = `((https?|ftp|file):)?//[-A-Za-z0-9+&@#/%?=~_|!:,.;]+[-A-Za-z0-9+&@#/%=~_|]` 13 | AttrURLRegex = `` 14 | DomContentLoadedTimeout = 5 * time.Second 15 | EventTriggerInterval = 100 * time.Millisecond // 单位毫秒 16 | BeforeExitDelay = 1 * time.Second 17 | DefaultEventTriggerMode = EventTriggerAsync 18 | MaxCrawlCount = 200 19 | ) 20 | 21 | // 请求方法 22 | const ( 23 | GET = "GET" 24 | POST = "POST" 25 | PUT = "PUT" 26 | DELETE = "DELETE" 27 | HEAD = "HEAD" 28 | OPTIONS = "OPTIONS" 29 | ) 30 | 31 | // 过滤模式 32 | const ( 33 | SimpleFilterMode = "simple" 34 | SmartFilterMode = "smart" 35 | StrictFilterMode = "strict" 36 | ) 37 | 38 | // 事件触发模式 39 | const ( 40 | EventTriggerAsync = "async" 41 | EventTriggerSync = "sync" 42 | ) 43 | 44 | // 请求的来源 45 | const ( 46 | FromTarget = "Target" //初始输入的目标 47 | FromNavigation = "Navigation" //页面导航请求 48 | FromXHR = "XHR" //ajax异步请求 49 | FromDOM = "DOM" //dom解析出来的请求 50 | FromJSFile = "JavaScript" //JS脚本中解析 51 | FromFuzz = "PathFuzz" //初始path fuzz 52 | FromRobots = "robots.txt" //robots.txt 53 | FromComment = "Comment" //页面中的注释 54 | FromWebSocket = "WebSocket" 55 | FromEventSource = "EventSource" 56 | FromFetch = "Fetch" 57 | FromHistoryAPI = "HistoryAPI" 58 | FromOpenWindow = "OpenWindow" 59 | FromHashChange = "HashChange" 60 | FromStaticRes = "StaticResource" 61 | FromStaticRegex = "StaticRegex" 62 | ) 63 | 64 | // content-type 65 | const ( 66 | JSON = "application/json" 67 | URLENCODED = "application/x-www-form-urlencoded" 68 | MULTIPART = "multipart/form-data" 69 | ) 70 | 71 | var StaticSuffix = []string{ 72 | "png", "gif", "jpg", "mp4", "mp3", "mng", "pct", "bmp", "jpeg", "pst", "psp", "ttf", 73 | "tif", "tiff", "ai", "drw", "wma", "ogg", "wav", "ra", "aac", "mid", "au", "aiff", 74 | "dxf", "eps", "ps", "svg", "3gp", "asf", "asx", "avi", "mov", "mpg", "qt", "rm", 75 | "wmv", "m4a", "bin", "xls", "xlsx", "ppt", "pptx", "doc", "docx", "odt", "ods", "odg", 76 | "odp", "exe", "zip", "rar", "tar", "gz", "iso", "rss", "pdf", "txt", "dll", "ico", 77 | "gz2", "apk", "crt", "woff", "map", "woff2", "webp", "less", "dmg", "bz2", "otf", "swf", 78 | "flv", "mpeg", "dat", "xsl", "csv", "cab", "exif", "wps", "m4v", "rmvb", 79 | } 80 | 81 | var ScriptSuffix = []string{ 82 | "php", "asp", "jsp", "asa", 83 | } 84 | 85 | var DefaultIgnoreKeywords = []string{"logout", "quit", "exit"} 86 | var AllowedFormName = []string{"default", "mail", "code", "phone", "username", "password", "qq", "id_card", "url", "date", "number"} 87 | 88 | type ContinueResourceList []string 89 | 90 | var InputTextMap = map[string]map[string]interface{}{ 91 | "mail": { 92 | "keyword": []string{"mail"}, 93 | "value": "crawlergo@gmail.com", 94 | }, 95 | "code": { 96 | "keyword": []string{"yanzhengma", "code", "ver", "captcha"}, 97 | "value": "123a", 98 | }, 99 | "phone": { 100 | "keyword": []string{"phone", "number", "tel", "shouji"}, 101 | "value": "18812345678", 102 | }, 103 | "username": { 104 | "keyword": []string{"name", "user", "id", "login", "account"}, 105 | "value": "crawlergo@gmail.com", 106 | }, 107 | "password": { 108 | "keyword": []string{"pass", "pwd"}, 109 | "value": "Crawlergo6.", 110 | }, 111 | "qq": { 112 | "keyword": []string{"qq", "wechat", "tencent", "weixin"}, 113 | "value": "123456789", 114 | }, 115 | "IDCard": { 116 | "keyword": []string{"card", "shenfen"}, 117 | "value": "511702197409284963", 118 | }, 119 | "url": { 120 | "keyword": []string{"url", "site", "web", "blog", "link"}, 121 | "value": "https://crawlergo.nice.cn/", 122 | }, 123 | "date": { 124 | "keyword": []string{"date", "time", "year", "now"}, 125 | "value": "2018-01-01", 126 | }, 127 | "number": { 128 | "keyword": []string{"day", "age", "num", "count"}, 129 | "value": "10", 130 | }, 131 | } 132 | -------------------------------------------------------------------------------- /pkg/model/request.go: -------------------------------------------------------------------------------- 1 | package model 2 | 3 | import ( 4 | "crawlergo/pkg/config" 5 | "crawlergo/pkg/tools" 6 | "encoding/json" 7 | "errors" 8 | "fmt" 9 | "net/url" 10 | "strings" 11 | ) 12 | 13 | type Filter struct { 14 | MarkedQueryMap map[string]interface{} 15 | QueryKeysId string 16 | QueryMapId string 17 | MarkedPostDataMap map[string]interface{} 18 | PostDataId string 19 | MarkedPath string 20 | PathId string 21 | UniqueId string 22 | } 23 | 24 | type Options struct { 25 | Headers map[string]interface{} 26 | PostData string 27 | } 28 | 29 | type Request struct { 30 | URL *URL 31 | Method string 32 | Headers map[string]interface{} 33 | PostData string 34 | Filter Filter 35 | Source string 36 | RedirectionFlag bool 37 | Proxy string 38 | } 39 | 40 | var supportContentType = []string{config.JSON, config.URLENCODED} 41 | 42 | /** 43 | 获取Request对象 44 | 可选设置headers和postData 45 | */ 46 | func GetRequest(method string, URL *URL, options ...Options) Request { 47 | var req Request 48 | req.URL = URL 49 | req.Method = strings.ToUpper(method) 50 | if len(options) != 0 { 51 | option := options[0] 52 | if option.Headers != nil { 53 | req.Headers = option.Headers 54 | } 55 | 56 | if option.PostData != "" { 57 | req.PostData = option.PostData 58 | } 59 | } else { 60 | req.Headers = map[string]interface{}{} 61 | } 62 | 63 | return req 64 | } 65 | 66 | /** 67 | 完整格式化输出 68 | */ 69 | func (req *Request) FormatPrint() { 70 | var tempStr = req.Method 71 | tempStr += " " + req.URL.String() + " HTTP/1.1\r\n" 72 | for k, v := range req.Headers { 73 | tempStr += k + ": " + v.(string) + "\r\n" 74 | } 75 | tempStr += "\r\n" 76 | if req.Method == config.POST { 77 | tempStr += req.PostData 78 | } 79 | fmt.Println(tempStr) 80 | } 81 | 82 | /** 83 | 简要输出 84 | */ 85 | func (req *Request) SimplePrint() { 86 | var tempStr = req.Method 87 | tempStr += " " + req.URL.String() + " " 88 | if req.Method == config.POST { 89 | tempStr += req.PostData 90 | } 91 | fmt.Println(tempStr) 92 | } 93 | 94 | func (req *Request) SimpleFormat() string { 95 | var tempStr = req.Method 96 | tempStr += " " + req.URL.String() + " " 97 | if req.Method == config.POST { 98 | tempStr += req.PostData 99 | } 100 | return tempStr 101 | } 102 | 103 | /** 104 | 不加入Header的请求ID 105 | */ 106 | func (req *Request) NoHeaderId() string { 107 | return tools.StrMd5(req.Method + req.URL.String() + req.PostData) 108 | } 109 | 110 | func (req *Request) UniqueId() string { 111 | if req.RedirectionFlag { 112 | return tools.StrMd5(req.NoHeaderId() + "Redirection") 113 | } else { 114 | return req.NoHeaderId() 115 | } 116 | } 117 | 118 | /** 119 | 返回POST请求数据解析后的map结构 120 | 121 | 支持 application/x-www-form-urlencoded 、application/json 122 | 123 | 如果解析失败,则返回 key: postDataStr 的map结构 124 | */ 125 | func (req *Request) PostDataMap() map[string]interface{} { 126 | contentType, err := req.getContentType() 127 | if err != nil { 128 | return map[string]interface{}{ 129 | "key": req.PostData, 130 | } 131 | } 132 | 133 | if strings.HasPrefix(contentType, config.JSON) { 134 | var result map[string]interface{} 135 | err = json.Unmarshal([]byte(req.PostData), &result) 136 | if err != nil { 137 | return map[string]interface{}{ 138 | "key": req.PostData, 139 | } 140 | } else { 141 | return result 142 | } 143 | } else if strings.HasPrefix(contentType, config.URLENCODED) { 144 | var result = map[string]interface{}{} 145 | r, err := url.ParseQuery(req.PostData) 146 | if err != nil { 147 | return map[string]interface{}{ 148 | "key": req.PostData, 149 | } 150 | } else { 151 | for key, value := range r { 152 | if len(value) == 1 { 153 | result[key] = value[0] 154 | } else { 155 | result[key] = value 156 | } 157 | } 158 | return result 159 | } 160 | } else { 161 | return map[string]interface{}{ 162 | "key": req.PostData, 163 | } 164 | } 165 | } 166 | 167 | /** 168 | 返回GET请求参数解析后的map结构 169 | */ 170 | func (req *Request) QueryMap() map[string][]string { 171 | return req.URL.Query() 172 | } 173 | 174 | /** 175 | 获取content-type 176 | */ 177 | func (req *Request) getContentType() (string, error) { 178 | headers := req.Headers 179 | var contentType string 180 | if ct, ok := headers["Content-Type"]; ok { 181 | contentType = ct.(string) 182 | } else if ct, ok := headers["Content-type"]; ok { 183 | contentType = ct.(string) 184 | } else if ct, ok := headers["content-type"]; ok { 185 | contentType = ct.(string) 186 | } else { 187 | return "", errors.New("no content-type") 188 | } 189 | 190 | for _, ct := range supportContentType { 191 | if strings.HasPrefix(contentType, ct) { 192 | return contentType, nil 193 | } 194 | } 195 | return "", errors.New("dont support such content-type:" + contentType) 196 | } 197 | -------------------------------------------------------------------------------- /pkg/engine/after_loaded_tasks.go: -------------------------------------------------------------------------------- 1 | package engine 2 | 3 | import ( 4 | "context" 5 | "crawlergo/pkg/config" 6 | "crawlergo/pkg/js" 7 | "crawlergo/pkg/logger" 8 | "crawlergo/pkg/tools" 9 | "fmt" 10 | "github.com/chromedp/cdproto/cdp" 11 | "github.com/chromedp/chromedp" 12 | "time" 13 | ) 14 | 15 | /** 16 | 根据NODE节点执行JS的代码 17 | err := EvaluateAsDevTools(snippet(submitJS, cashX(true), sel, nodes[0]), &res).Do(ctx) 18 | 19 | 具体环境实现在 chromedp.submit 函数中 参考即可写出 20 | */ 21 | 22 | /** 23 | 在页面Loaded之后执行 24 | 同时等待 afterDOMRun 之后执行 25 | */ 26 | func (tab *Tab) AfterLoadedRun() { 27 | defer tab.WG.Done() 28 | logger.Logger.Debug("afterLoadedRun start") 29 | tab.formSubmitWG.Add(2) 30 | tab.loadedWG.Add(3) 31 | tab.removeLis.Add(1) 32 | 33 | go tab.formSubmit() 34 | tab.formSubmitWG.Wait() 35 | logger.Logger.Debug("formSubmit end") 36 | 37 | if tab.config.EventTriggerMode == config.EventTriggerAsync { 38 | go tab.triggerJavascriptProtocol() 39 | go tab.triggerInlineEvents() 40 | go tab.triggerDom2Events() 41 | tab.loadedWG.Wait() 42 | } else if tab.config.EventTriggerMode == config.EventTriggerSync { 43 | tab.triggerInlineEvents() 44 | time.Sleep(tab.config.EventTriggerInterval) 45 | tab.triggerDom2Events() 46 | time.Sleep(tab.config.EventTriggerInterval) 47 | tab.triggerJavascriptProtocol() 48 | } 49 | 50 | // 事件触发之后 需要等待一点时间让浏览器成功发出ajax请求 更新DOM 51 | time.Sleep(tab.config.BeforeExitDelay) 52 | 53 | go tab.RemoveDOMListener() 54 | tab.removeLis.Wait() 55 | logger.Logger.Debug("afterLoadedRun end") 56 | } 57 | 58 | /** 59 | 自动化点击提交表单 60 | */ 61 | func (tab *Tab) formSubmit() { 62 | 63 | logger.Logger.Debug("formSubmit start") 64 | 65 | // 首先对form表单设置target 66 | tab.setFormToFrame() 67 | 68 | // 接下来尝试三种方式提交表单 69 | go tab.clickSubmit() 70 | go tab.clickAllButton() 71 | } 72 | 73 | /** 74 | 设置form的target指向一个frame 75 | */ 76 | func (tab *Tab) setFormToFrame() { 77 | // 首先新建 frame 78 | nameStr := tools.RandSeq(8) 79 | tab.Evaluate(fmt.Sprintf(js.NewFrameTemplate, nameStr, nameStr)) 80 | 81 | // 接下来将所有的 form 节点target都指向它 82 | ctx := tab.GetExecutor() 83 | formNodes, formErr := tab.GetNodeIDs(`form`) 84 | if formErr != nil || len(formNodes) == 0 { 85 | logger.Logger.Debug("setFormToFrame: get form element err") 86 | if formErr != nil { 87 | logger.Logger.Debug(formErr) 88 | } 89 | return 90 | } 91 | tCtx, cancel := context.WithTimeout(ctx, time.Second*2) 92 | defer cancel() 93 | _ = chromedp.SetAttributeValue(formNodes, "target", nameStr, chromedp.ByNodeID).Do(tCtx) 94 | } 95 | 96 | /** 97 | 点击按钮 type=submit 98 | */ 99 | func (tab *Tab) clickSubmit() { 100 | defer tab.formSubmitWG.Done() 101 | 102 | // 首先点击按钮 type=submit 103 | ctx := tab.GetExecutor() 104 | 105 | // 获取所有的form节点 直接执行submit 106 | formNodes, formErr := tab.GetNodeIDs(`form`) 107 | if formErr != nil || len(formNodes) == 0 { 108 | logger.Logger.Debug("clickSubmit: get form element err") 109 | if formErr != nil { 110 | logger.Logger.Debug(formErr) 111 | } 112 | return 113 | } 114 | tCtx1, cancel1 := context.WithTimeout(ctx, time.Second*2) 115 | defer cancel1() 116 | _ = chromedp.Submit(formNodes, chromedp.ByNodeID).Do(tCtx1) 117 | 118 | // 获取所有的input标签 119 | inputNodes, inputErr := tab.GetNodeIDs(`form input[type=submit]`) 120 | if inputErr != nil || len(inputNodes) == 0 { 121 | logger.Logger.Debug("clickSubmit: get form input element err") 122 | if inputErr != nil { 123 | logger.Logger.Debug(inputErr) 124 | } 125 | return 126 | } 127 | tCtx2, cancel2 := context.WithTimeout(ctx, time.Second*2) 128 | defer cancel2() 129 | _ = chromedp.Click(inputNodes, chromedp.ByNodeID).Do(tCtx2) 130 | } 131 | 132 | /** 133 | click all button 134 | */ 135 | func (tab *Tab) clickAllButton() { 136 | defer tab.formSubmitWG.Done() 137 | 138 | // 获取所有的form中的button节点 139 | ctx := tab.GetExecutor() 140 | // 获取所有的button标签 141 | btnNodeIDs, bErr := tab.GetNodeIDs(`form button`) 142 | if bErr != nil || len(btnNodeIDs) == 0 { 143 | logger.Logger.Debug("clickAllButton: get form button element err") 144 | if bErr != nil { 145 | logger.Logger.Debug(bErr) 146 | } 147 | return 148 | } 149 | tCtx, cancel1 := context.WithTimeout(ctx, time.Second*2) 150 | defer cancel1() 151 | _ = chromedp.Click(btnNodeIDs, chromedp.ByNodeID).Do(tCtx) 152 | 153 | // 使用JS的click方法进行点击 154 | var btnNodes []*cdp.Node 155 | tCtx2, cancel2 := context.WithTimeout(ctx, time.Second*2) 156 | defer cancel2() 157 | err := chromedp.Nodes(btnNodeIDs, &btnNodes, chromedp.ByNodeID).Do(tCtx2) 158 | if err != nil { 159 | return 160 | } 161 | for _, node := range btnNodes { 162 | _ = tab.EvaluateWithNode(js.FormNodeClickJS, node) 163 | } 164 | } 165 | 166 | /** 167 | 触发内联事件 168 | */ 169 | func (tab *Tab) triggerInlineEvents() { 170 | defer tab.loadedWG.Done() 171 | logger.Logger.Debug("triggerInlineEvents start") 172 | tab.Evaluate(fmt.Sprintf(js.TriggerInlineEventJS, tab.config.EventTriggerInterval.Seconds()*1000)) 173 | logger.Logger.Debug("triggerInlineEvents end") 174 | } 175 | 176 | /** 177 | 触发DOM2级事件 178 | */ 179 | func (tab *Tab) triggerDom2Events() { 180 | defer tab.loadedWG.Done() 181 | logger.Logger.Debug("triggerDom2Events start") 182 | tab.Evaluate(fmt.Sprintf(js.TriggerDom2EventJS, tab.config.EventTriggerInterval.Seconds()*1000)) 183 | logger.Logger.Debug("triggerDom2Events end") 184 | } 185 | 186 | /** 187 | a标签的href值为伪协议, 188 | */ 189 | func (tab *Tab) triggerJavascriptProtocol() { 190 | defer tab.loadedWG.Done() 191 | logger.Logger.Debug("clickATagJavascriptProtocol start") 192 | tab.Evaluate(fmt.Sprintf(js.TriggerJavascriptProtocol, tab.config.EventTriggerInterval.Seconds()*1000, 193 | tab.config.EventTriggerInterval.Seconds()*1000)) 194 | logger.Logger.Debug("clickATagJavascriptProtocol end") 195 | } 196 | 197 | /** 198 | 移除DOM节点变化监听 199 | */ 200 | func (tab *Tab) RemoveDOMListener() { 201 | defer tab.removeLis.Done() 202 | logger.Logger.Debug("RemoveDOMListener start") 203 | // 移除DOM节点变化监听 204 | tab.Evaluate(js.RemoveDOMListenerJS) 205 | logger.Logger.Debug("RemoveDOMListener end") 206 | } 207 | -------------------------------------------------------------------------------- /pkg/path_expansion.go: -------------------------------------------------------------------------------- 1 | package pkg 2 | 3 | import ( 4 | "crawlergo/pkg/config" 5 | "crawlergo/pkg/logger" 6 | model2 "crawlergo/pkg/model" 7 | "crawlergo/pkg/tools" 8 | "crawlergo/pkg/tools/requests" 9 | "fmt" 10 | "regexp" 11 | "strings" 12 | "sync" 13 | 14 | mapset "github.com/deckarep/golang-set" 15 | "github.com/panjf2000/ants/v2" 16 | ) 17 | 18 | const pathStr = "11/123/2017/2018/message/mis/model/abstract/account/act/action" + 19 | "/activity/ad/address/ajax/alarm/api/app/ar/attachment/auth/authority/award/back/backup/bak/base" + 20 | "/bbs/bbs1/cms/bd/gallery/game/gift/gold/bg/bin/blacklist/blog/bootstrap/brand/build/cache/caches" + 21 | "/caching/cacti/cake/captcha/category/cdn/ch/check/city/class/classes/classic/client/cluster" + 22 | "/collection/comment/commit/common/commons/components/conf/config/mysite/confs/console/consumer" + 23 | "/content/control/controllers/core/crontab/crud/css/daily/dashboard/data/database/db/default/demo" + 24 | "/dev/doc/download/duty/es/eva/examples/excel/export/ext/fe/feature/file/files/finance/flashchart" + 25 | "/follow/forum/frame/framework/ft/group/gss/hello/helper/helpers/history/home/hr/htdocs/html/hunter" + 26 | "/image/img11/import/improve/inc/include/includes/index/info/install/interface/item/jobconsume/jobs" + 27 | "/json/kindeditor/l/languages/lib/libraries/libs/link/lite/local/log/login/logs/mail/main" + 28 | "/maintenance/manage/manager/manufacturer/menus/models/modules/monitor/movie/mysql/n/nav/network" + 29 | "/news/notice/nw/oauth/other/page/pages/passport/pay/pcheck/people/person/php/phprpc" + 30 | "/phptest/picture/pl/platform/pm/portal/post/product/project/protected/proxy/ps/public/qq/question" + 31 | "/quote/redirect/redisclient/report/resource/resources/s/save/schedule/schema/script/scripts/search" + 32 | "/security/server/service/shell/show/simple/site/sites/skin/sms/soap/sola/sort/spider/sql/stat" + 33 | "/static/statistics/stats/submit/subways/survey/sv/syslog/system/tag/task/tasks/tcpdf/template" + 34 | "/templates/test/tests/ticket/tmp/token/tool/tools/top/tpl/txt/upload/uploadify/uploads/url/user" + 35 | "/util/v1/v2/vendor/view/views/web/weixin/widgets/wm/wordpress/workspace/ws/www/www2/wwwroot/zone" + 36 | "/admin/admin_bak/mobile/m/js" 37 | 38 | var pathFuzzWG sync.WaitGroup 39 | var validateUrl mapset.Set 40 | 41 | /** 42 | 从robots.txt文件中获取路径信息 43 | */ 44 | func GetPathsFromRobots(navReq model2.Request) []*model2.Request { 45 | logger.Logger.Info("starting to get paths from robots.txt.") 46 | var result []*model2.Request 47 | var urlFindRegex = regexp.MustCompile(`(?:Disallow|Allow):.*?(/.+)`) 48 | var urlRegex = regexp.MustCompile(`(/.+)`) 49 | 50 | navReq.URL.Path = "/" 51 | url := navReq.URL.NoQueryUrl() + "robots.txt" 52 | 53 | resp, err := requests.Get(url, tools.ConvertHeaders(navReq.Headers), 54 | &requests.ReqOptions{AllowRedirect: false, 55 | Timeout: 5, 56 | Proxy: navReq.Proxy}) 57 | if err != nil { 58 | //for 59 | //logger.Logger.Error("request to robots.txt error ", err) 60 | return result 61 | } 62 | 63 | if resp.StatusCode < 200 || resp.StatusCode >= 300 { 64 | return result 65 | } 66 | urlList := urlFindRegex.FindAllString(resp.Text, -1) 67 | for _, _url := range urlList { 68 | _url = strings.TrimSpace(_url) 69 | _url = urlRegex.FindString(_url) 70 | url, err := model2.GetUrl(_url, *navReq.URL) 71 | if err != nil { 72 | continue 73 | } 74 | req := model2.GetRequest(config.GET, url) 75 | req.Source = config.FromRobots 76 | result = append(result, &req) 77 | } 78 | return result 79 | } 80 | 81 | /** 82 | 使用常见路径列表进行fuzz 83 | */ 84 | func GetPathsByFuzz(navReq model2.Request) []*model2.Request { 85 | logger.Logger.Info("starting to get paths by fuzzing.") 86 | pathList := strings.Split(pathStr, "/") 87 | return doFuzz(navReq, pathList) 88 | } 89 | 90 | /** 91 | 使用字典列表进行fuzz 92 | */ 93 | func GetPathsByFuzzDict(navReq model2.Request, dictPath string) []*model2.Request { 94 | logger.Logger.Infof("starting to get dict path by fuzzing: %s", dictPath) 95 | pathList := tools.ReadFile(dictPath) 96 | logger.Logger.Debugf("valid path count: %d", len(pathList)) 97 | return doFuzz(navReq, pathList) 98 | } 99 | 100 | type singleFuzz struct { 101 | navReq model2.Request 102 | path string 103 | } 104 | 105 | func doFuzz(navReq model2.Request, pathList []string) []*model2.Request { 106 | validateUrl = mapset.NewSet() 107 | var result []*model2.Request 108 | pool, _ := ants.NewPool(20) 109 | defer pool.Release() 110 | for _, path := range pathList { 111 | path = strings.TrimPrefix(path, "/") 112 | path = strings.TrimSuffix(path, "\n") 113 | task := singleFuzz{ 114 | navReq: navReq, 115 | path: path, 116 | } 117 | pathFuzzWG.Add(1) 118 | go func() { 119 | err := pool.Submit(task.doRequest) 120 | if err != nil { 121 | pathFuzzWG.Done() 122 | } 123 | }() 124 | } 125 | 126 | pathFuzzWG.Wait() 127 | for _, _url := range validateUrl.ToSlice() { 128 | _url := _url.(string) 129 | url, err := model2.GetUrl(_url) 130 | if err != nil { 131 | continue 132 | } 133 | req := model2.GetRequest(config.GET, url) 134 | req.Source = config.FromFuzz 135 | result = append(result, &req) 136 | } 137 | return result 138 | } 139 | 140 | /** 141 | 142 | */ 143 | func (s singleFuzz) doRequest() { 144 | defer pathFuzzWG.Done() 145 | 146 | url := fmt.Sprintf(`%s://%s/%s`, s.navReq.URL.Scheme, s.navReq.URL.Host, s.path) 147 | resp, errs := requests.Get(url, tools.ConvertHeaders(s.navReq.Headers), 148 | &requests.ReqOptions{Timeout: 2, AllowRedirect: false, Proxy: s.navReq.Proxy}) 149 | if errs != nil { 150 | return 151 | } 152 | if resp.StatusCode >= 200 && resp.StatusCode < 300 { 153 | validateUrl.Add(url) 154 | } else if resp.StatusCode == 301 { 155 | locations := resp.Header["Location"] 156 | if len(locations) == 0 { 157 | return 158 | } 159 | location := locations[0] 160 | redirectUrl, err := model2.GetUrl(location) 161 | if err != nil { 162 | return 163 | } 164 | if redirectUrl.Host == s.navReq.URL.Host { 165 | validateUrl.Add(url) 166 | } 167 | } 168 | } 169 | -------------------------------------------------------------------------------- /pkg/tools/requests/requests.go: -------------------------------------------------------------------------------- 1 | package requests 2 | 3 | import ( 4 | "bytes" 5 | "crawlergo/pkg/logger" 6 | "crypto/tls" 7 | "fmt" 8 | "github.com/pkg/errors" 9 | "net/http" 10 | "net/url" 11 | "strings" 12 | "time" 13 | ) 14 | 15 | const DefaultUa = "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko)" + 16 | " Chrome/76.0.3809.132 Safari/537.36 C845D9D38B3A68F4F74057DB542AD252 tx/2.0" 17 | 18 | const defaultTimeout int = 15 19 | 20 | // 最大获取100K的响应,适用于绝大部分场景 21 | const defaultResponseLength = 10240 22 | const defaultRetry = 0 23 | 24 | var ContentTypes = map[string]string{ 25 | "json": "application/json", 26 | "xml": "application/xml", 27 | "soap": "application/soap+xml", 28 | "multipart": "multipart/form-data", 29 | "form": "application/x-www-form-urlencoded; charset=utf-8", 30 | } 31 | 32 | // ReqInfo 是一个HTTP请求元素的封装,可以快速进行简单的http请求 33 | type ReqInfo struct { 34 | Verb string 35 | Url string 36 | Headers map[string]string 37 | Body []byte 38 | } 39 | 40 | type ReqOptions struct { 41 | Timeout int // in seconds 42 | Retry int // 0为默认值,-1 代表关闭不retry 43 | VerifySSL bool // default false 44 | AllowRedirect bool // default false 45 | Proxy string // proxy settings, support http/https proxy only, e.g. http://127.0.0.1:8080 46 | } 47 | 48 | type session struct { 49 | ReqOptions 50 | client *http.Client 51 | } 52 | 53 | // getSessionByOptions 根据配置获取一个session 54 | func getSessionByOptions(options *ReqOptions) *session { 55 | if options == nil { 56 | options = &ReqOptions{} 57 | } 58 | // 设置client的超时与ssl验证 59 | timeout := time.Duration(options.Timeout) * time.Second 60 | if options.Timeout == 0 { 61 | timeout = time.Duration(defaultTimeout) * time.Second 62 | } 63 | tr := &http.Transport{ 64 | TLSClientConfig: &tls.Config{InsecureSkipVerify: !options.VerifySSL}, 65 | } 66 | if options.Proxy != "" { 67 | proxyUrl, err := url.Parse(options.Proxy) 68 | if err == nil { 69 | tr.Proxy = http.ProxyURL(proxyUrl) 70 | } 71 | } 72 | client := &http.Client{ 73 | Timeout: timeout, 74 | Transport: tr} 75 | // 设置是否跟踪跳转 76 | if !options.AllowRedirect { 77 | client.CheckRedirect = func(req *http.Request, via []*http.Request) error { 78 | return http.ErrUseLastResponse 79 | } 80 | } 81 | // options内容同步到session中 82 | return &session{ 83 | ReqOptions: ReqOptions{ 84 | options.Timeout, 85 | options.Retry, 86 | options.VerifySSL, 87 | options.AllowRedirect, 88 | options.Proxy, 89 | }, 90 | client: client, 91 | } 92 | } 93 | 94 | // Get GET请求 95 | func Get(url string, headers map[string]string, options *ReqOptions) (*Response, error) { 96 | sess := getSessionByOptions(options) 97 | return sess.doRequest("GET", url, headers, nil) 98 | } 99 | 100 | // Request 自定义请求类型 101 | func Request(verb string, url string, headers map[string]string, body []byte, options *ReqOptions) (*Response, error) { 102 | sess := getSessionByOptions(options) 103 | return sess.doRequest(verb, url, headers, body) 104 | } 105 | 106 | // session functions 107 | 108 | // Get Session的GET请求 109 | func (sess *session) Get(url string, headers map[string]string) (*Response, error) { 110 | return sess.doRequest("GET", url, headers, nil) 111 | } 112 | 113 | // Post Session的POST请求 114 | func (sess *session) Post(url string, headers map[string]string, body []byte) (*Response, error) { 115 | return sess.doRequest("POST", url, headers, body) 116 | } 117 | 118 | // Request Session的自定义请求类型 119 | func (sess *session) Request(verb string, url string, headers map[string]string, body []byte) (*Response, error) { 120 | return sess.doRequest(verb, url, headers, body) 121 | } 122 | 123 | // Request reqInfo的快速调用 124 | func (r *ReqInfo) Request() (*Response, error) { 125 | return Request(r.Verb, r.Url, r.Headers, r.Body, nil) 126 | } 127 | 128 | func (r *ReqInfo) RequestWithOptions(options *ReqOptions) (*Response, error) { 129 | return Request(r.Verb, r.Url, r.Headers, r.Body, options) 130 | } 131 | 132 | func (r *ReqInfo) Clone() *ReqInfo { 133 | return &ReqInfo{ 134 | Verb: r.Verb, 135 | Url: r.Url, 136 | Headers: r.Headers, 137 | Body: r.Body, 138 | } 139 | } 140 | 141 | func (r *ReqInfo) SetHeader(name, value string) { 142 | if r.Headers == nil { 143 | r.Headers = make(map[string]string) 144 | } 145 | r.Headers[name] = value 146 | } 147 | 148 | // doRequest 实际请求的函数 149 | func (sess *session) doRequest(verb string, url string, headers map[string]string, body []byte) (*Response, error) { 150 | logger.Logger.Debug("do request to ", url) 151 | verb = strings.ToUpper(verb) 152 | bodyReader := bytes.NewReader(body) 153 | req, err := http.NewRequest(verb, url, bodyReader) 154 | if err != nil { 155 | // 多数情况下是url中包含% 156 | url = escapePercentSign(url) 157 | req, err = http.NewRequest(verb, url, bodyReader) 158 | } 159 | if err != nil { 160 | return nil, errors.Wrap(err, "build request error") 161 | } 162 | 163 | // 设置headers头 164 | for key, value := range headers { 165 | req.Header.Set(key, value) 166 | } 167 | // 设置默认的headers头 168 | defaultHeaders := map[string]string{ 169 | "User-Agent": DefaultUa, 170 | "Range": fmt.Sprintf("bytes=0-%d", defaultResponseLength), 171 | "Connection": "close", 172 | } 173 | for key, value := range defaultHeaders { 174 | if _, ok := headers[key]; !ok { 175 | req.Header.Set(key, value) 176 | } 177 | } 178 | // 设置Host头 179 | if host, ok := headers["Host"]; ok { 180 | req.Host = host 181 | } 182 | // 设置默认的Content-Type头 183 | if verb == "POST" && headers["Content-Type"] == "" { 184 | req.Header.Set("Content-Type", "application/x-www-form-urlencoded; charset=UTF-8") 185 | // 应该手动设置Referer、Origin、和X-Requested-With字段 186 | } 187 | // 覆盖Connection头 188 | req.Header.Set("Connection", "close") 189 | 190 | // 设置重试次数 191 | retry := sess.Retry 192 | if retry == 0 { 193 | retry = defaultRetry 194 | } else if retry == -1 { 195 | retry = 0 196 | } 197 | 198 | // 请求 199 | var resp *http.Response 200 | for i := 0; i <= retry; i++ { 201 | resp, err = sess.client.Do(req) 202 | if err != nil { 203 | // sleep 0.1s 204 | time.Sleep(100 * time.Microsecond) 205 | continue 206 | } else { 207 | break 208 | } 209 | } 210 | 211 | if err != nil { 212 | return nil, errors.Wrap(err, "error occurred during request") 213 | } 214 | // 带Range头后一般webserver响应都是206 PARTIAL CONTENT,修正为200 OK 215 | if resp.StatusCode == 206 { 216 | resp.StatusCode = 200 217 | resp.Status = "200 OK" 218 | } 219 | 220 | return NewResponse(resp), nil 221 | } 222 | -------------------------------------------------------------------------------- /pkg/engine/after_dom_tasks.go: -------------------------------------------------------------------------------- 1 | package engine 2 | 3 | import ( 4 | "context" 5 | "crawlergo/pkg/config" 6 | "crawlergo/pkg/js" 7 | "crawlergo/pkg/logger" 8 | "github.com/chromedp/cdproto/cdp" 9 | "github.com/chromedp/chromedp" 10 | "os" 11 | "strings" 12 | "time" 13 | ) 14 | 15 | /** 16 | 在DOMContentLoaded完成后执行 17 | */ 18 | func (tab *Tab) AfterDOMRun() { 19 | defer tab.WG.Done() 20 | 21 | logger.Logger.Debug("afterDOMRun start") 22 | 23 | // 获取当前body节点的nodeId 用于之后查找子节点 24 | if !tab.getBodyNodeId() { 25 | logger.Logger.Debug("no body document NodeID, exit.") 26 | return 27 | } 28 | 29 | tab.domWG.Add(2) 30 | go tab.fillForm() 31 | go tab.setObserverJS() 32 | tab.domWG.Wait() 33 | logger.Logger.Debug("afterDOMRun end") 34 | tab.WG.Add(1) 35 | go tab.AfterLoadedRun() 36 | } 37 | 38 | /** 39 | 获取的Body的NodeId 用于之后子节点无等待查询 40 | 最多等待3秒 如果DOM依旧没有渲染完成,则退出 41 | */ 42 | func (tab *Tab) getBodyNodeId() bool { 43 | var docNodeIDs []cdp.NodeID 44 | ctx := tab.GetExecutor() 45 | tCtx, cancel := context.WithTimeout(ctx, time.Second*3) 46 | defer cancel() 47 | // 获取 Frame document root 48 | err := chromedp.NodeIDs(`body`, &docNodeIDs, chromedp.ByQuery).Do(tCtx) 49 | if len(docNodeIDs) == 0 || err != nil { 50 | // not root node yet? 51 | logger.Logger.Debug("getBodyNodeId failed, maybe DOM not ready?") 52 | if err != nil { 53 | logger.Logger.Debug(err) 54 | } 55 | return false 56 | } 57 | tab.DocBodyNodeId = docNodeIDs[0] 58 | return true 59 | } 60 | 61 | /** 62 | 自动化填充表单 63 | */ 64 | func (tab *Tab) fillForm() { 65 | defer tab.domWG.Done() 66 | logger.Logger.Debug("fillForm start") 67 | tab.fillFormWG.Add(3) 68 | f := FillForm{ 69 | tab: tab, 70 | } 71 | 72 | go f.fillInput() 73 | go f.fillMultiSelect() 74 | go f.fillTextarea() 75 | 76 | tab.fillFormWG.Wait() 77 | logger.Logger.Debug("fillForm end") 78 | } 79 | 80 | /** 81 | 设置Dom节点变化的观察函数 82 | */ 83 | func (tab *Tab) setObserverJS() { 84 | defer tab.domWG.Done() 85 | logger.Logger.Debug("setObserverJS start") 86 | // 设置Dom节点变化的观察函数 87 | go tab.Evaluate(js.ObserverJS) 88 | logger.Logger.Debug("setObserverJS end") 89 | } 90 | 91 | type FillForm struct { 92 | tab *Tab 93 | } 94 | 95 | /** 96 | 填充所有 input 标签 97 | */ 98 | func (f *FillForm) fillInput() { 99 | defer f.tab.fillFormWG.Done() 100 | var nodes []*cdp.Node 101 | ctx := f.tab.GetExecutor() 102 | 103 | tCtx, cancel := context.WithTimeout(ctx, time.Second*2) 104 | defer cancel() 105 | // 首先判断input标签是否存在,减少等待时间 提前退出 106 | inputNodes, inputErr := f.tab.GetNodeIDs(`input`) 107 | if inputErr != nil || len(inputNodes) == 0 { 108 | logger.Logger.Debug("fillInput: get form input element err") 109 | if inputErr != nil { 110 | logger.Logger.Debug(inputErr) 111 | } 112 | return 113 | } 114 | // 获取所有的input标签 115 | err := chromedp.Nodes(`input`, &nodes, chromedp.ByQueryAll).Do(tCtx) 116 | 117 | if err != nil { 118 | logger.Logger.Debug("get all input element err") 119 | logger.Logger.Debug(err) 120 | return 121 | } 122 | 123 | // 找出 type 为空 或者 type=text 124 | for _, node := range nodes { 125 | // 兜底超时 126 | tCtxN, cancelN := context.WithTimeout(ctx, time.Second*5) 127 | attrType := node.AttributeValue("type") 128 | if attrType == "text" || attrType == "" { 129 | inputName := node.AttributeValue("id") + node.AttributeValue("class") + node.AttributeValue("name") 130 | value := f.GetMatchInputText(inputName) 131 | var nodeIds = []cdp.NodeID{node.NodeID} 132 | // 先使用模拟输入 133 | _ = chromedp.SendKeys(nodeIds, value, chromedp.ByNodeID).Do(tCtxN) 134 | // 再直接赋值JS属性 135 | _ = chromedp.SetAttributeValue(nodeIds, "value", value, chromedp.ByNodeID).Do(tCtxN) 136 | } else if attrType == "email" || attrType == "password" || attrType == "tel" { 137 | value := f.GetMatchInputText(attrType) 138 | var nodeIds = []cdp.NodeID{node.NodeID} 139 | // 先使用模拟输入 140 | _ = chromedp.SendKeys(nodeIds, value, chromedp.ByNodeID).Do(tCtxN) 141 | // 再直接赋值JS属性 142 | _ = chromedp.SetAttributeValue(nodeIds, "value", value, chromedp.ByNodeID).Do(tCtxN) 143 | } else if attrType == "radio" || attrType == "checkbox" { 144 | var nodeIds = []cdp.NodeID{node.NodeID} 145 | _ = chromedp.SetAttributeValue(nodeIds, "checked", "true", chromedp.ByNodeID).Do(tCtxN) 146 | } else if attrType == "file" || attrType == "image" { 147 | var nodeIds = []cdp.NodeID{node.NodeID} 148 | wd, _ := os.Getwd() 149 | filePath := wd + "/upload/image.png" 150 | _ = chromedp.RemoveAttribute(nodeIds, "accept", chromedp.ByNodeID).Do(tCtxN) 151 | _ = chromedp.RemoveAttribute(nodeIds, "required", chromedp.ByNodeID).Do(tCtxN) 152 | _ = chromedp.SendKeys(nodeIds, filePath, chromedp.ByNodeID).Do(tCtxN) 153 | } 154 | cancelN() 155 | } 156 | } 157 | 158 | func (f *FillForm) fillTextarea() { 159 | defer f.tab.fillFormWG.Done() 160 | ctx := f.tab.GetExecutor() 161 | tCtx, cancel := context.WithTimeout(ctx, time.Second*2) 162 | defer cancel() 163 | value := f.GetMatchInputText("other") 164 | 165 | textareaNodes, textareaErr := f.tab.GetNodeIDs(`textarea`) 166 | if textareaErr != nil || len(textareaNodes) == 0 { 167 | logger.Logger.Debug("fillTextarea: get textarea element err") 168 | if textareaErr != nil { 169 | logger.Logger.Debug(textareaErr) 170 | } 171 | return 172 | } 173 | 174 | _ = chromedp.SendKeys(textareaNodes, value, chromedp.ByNodeID).Do(tCtx) 175 | } 176 | 177 | func (f *FillForm) fillMultiSelect() { 178 | defer f.tab.fillFormWG.Done() 179 | ctx := f.tab.GetExecutor() 180 | tCtx, cancel := context.WithTimeout(ctx, time.Second*2) 181 | defer cancel() 182 | optionNodes, optionErr := f.tab.GetNodeIDs(`select option:first-child`) 183 | if optionErr != nil || len(optionNodes) == 0 { 184 | logger.Logger.Debug("fillMultiSelect: get select option element err") 185 | if optionErr != nil { 186 | logger.Logger.Debug(optionErr) 187 | } 188 | return 189 | } 190 | _ = chromedp.SetAttributeValue(optionNodes, "selected", "true", chromedp.ByNodeID).Do(tCtx) 191 | _ = chromedp.SetJavascriptAttribute(optionNodes, "selected", "true", chromedp.ByNodeID).Do(tCtx) 192 | } 193 | 194 | func (f *FillForm) GetMatchInputText(name string) string { 195 | // 如果自定义了关键词,模糊匹配 196 | for key, value := range f.tab.config.CustomFormKeywordValues { 197 | if strings.Contains(name, key) { 198 | return value 199 | } 200 | } 201 | 202 | name = strings.ToLower(name) 203 | for key, item := range config.InputTextMap { 204 | for _, keyword := range item["keyword"].([]string) { 205 | if strings.Contains(name, keyword) { 206 | if customValue, ok := f.tab.config.CustomFormValues[key]; ok { 207 | return customValue 208 | } else { 209 | return item["value"].(string) 210 | } 211 | } 212 | } 213 | } 214 | return f.tab.config.CustomFormValues["default"] 215 | } 216 | -------------------------------------------------------------------------------- /README_zh-cn.md: -------------------------------------------------------------------------------- 1 | # crawlergo 2 | 3 | ![chromedp](https://img.shields.io/badge/chromedp-v0.5.2-brightgreen.svg) [![BlackHat EU Arsenal](https://img.shields.io/badge/BlackHat%20Europe-2021%20Arsenal-blue.svg)](https://www.blackhat.com/eu-21/arsenal/schedule/index.html#crawlergo-a-powerful-browser-crawler-for-web-vulnerability-scanners-25113) 4 | 5 | > A powerful browser crawler for web vulnerability scanners 6 | 7 | [English Document](./README.md) | 中文文档 8 | 9 | crawlergo是一个使用`chrome headless`模式进行URL收集的浏览器爬虫。它对整个网页的关键位置与DOM渲染阶段进行HOOK,自动进行表单填充并提交,配合智能的JS事件触发,尽可能的收集网站暴露出的入口。内置URL去重模块,过滤掉了大量伪静态URL,对于大型网站仍保持较快的解析与抓取速度,最后得到高质量的请求结果集合。 10 | 11 | crawlergo 目前支持以下特性: 12 | 13 | * 原生浏览器环境,协程池调度任务 14 | * 表单智能填充、自动化提交 15 | * 完整DOM事件收集,自动化触发 16 | * 智能URL去重,去掉大部分的重复请求 17 | * 全面分析收集,包括javascript文件内容、页面注释、robots.txt文件和常见路径Fuzz 18 | * 支持Host绑定,自动添加Referer 19 | * 支持请求代理,支持爬虫结果主动推送 20 | 21 | ## 运行截图 22 | 23 | ![](./imgs/demo.gif) 24 | 25 | ## 安装 26 | 27 | **安装使用之前,请仔细阅读并确认[免责声明](./Disclaimer.md)。** 28 | 29 | **Build** 30 | 31 | ```shell 32 | cd crawlergo/cmd/crawlergo 33 | go build crawlergo_cmd.go 34 | ``` 35 | 36 | 1. crawlergo 只依赖chrome运行即可,前往[下载](https://www.chromium.org/getting-involved/download-chromium)新版本的chromium,或者直接[点击下载Linux79版本](https://storage.googleapis.com/chromium-browser-snapshots/Linux_x64/706915/chrome-linux.zip)。 37 | 2. 前往[页面下载](https://github.com/0Kee-Team/crawlergo/releases)最新版本的crawlergo解压到任意目录,如果是linux或者macOS系统,请赋予crawlergo**可执行权限(+x)**。 38 | 3. 或者直接根据源码自行编译。 39 | 40 | > 如果你使用linux系统,运行时chrome提示缺少一些依赖组件,请看下方 Trouble Shooting 41 | 42 | ## Quick Start 43 | 44 | ### Go! 45 | 46 | 假设你的chromium安装在 `/tmp/chromium/` ,开启最大10标签页,爬取AWVS靶场: 47 | 48 | ```shell 49 | ./crawlergo -c /tmp/chromium/chrome -t 10 http://testphp.vulnweb.com/ 50 | ``` 51 | 52 | 53 | 54 | ### 使用代理 55 | 56 | ```shell 57 | ./crawlergo -c /tmp/chromium/chrome -t 10 --request-proxy socks5://127.0.0.1:7891 http://testphp.vulnweb.com/ 58 | ``` 59 | 60 | 61 | 62 | ### 系统调用 63 | 64 | 默认打印当前域名请求,但多数情况我们希望调用crawlergo返回的结果,所以设置输出模式为 `json`,使用python调用并收集结果的示例如下: 65 | 66 | ```python 67 | #!/usr/bin/python3 68 | # coding: utf-8 69 | 70 | import simplejson 71 | import subprocess 72 | 73 | 74 | def main(): 75 | target = "http://testphp.vulnweb.com/" 76 | cmd = ["./crawlergo", "-c", "/tmp/chromium/chrome", "-o", "json", target] 77 | rsp = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 78 | output, error = rsp.communicate() 79 | # "--[Mission Complete]--" 是任务结束的分隔字符串 80 | result = simplejson.loads(output.decode().split("--[Mission Complete]--")[1]) 81 | req_list = result["req_list"] 82 | print(req_list[0]) 83 | 84 | 85 | if __name__ == '__main__': 86 | main() 87 | ``` 88 | 89 | ### 返回结果 90 | 91 | 当设置输出模式为 `json`时,返回的结果反序列化之后包含四个部分: 92 | 93 | * `all_req_list`: 本次爬取任务过程中发现的所有请求,包含其他域名的任何资源类型。 94 | * `req_list`:本次爬取任务的**同域名结果**,经过伪静态去重,不包含静态资源链接。理论上是 `all_req_list `的子集 95 | * `all_domain_list`:发现的所有域名列表。 96 | * `sub_domain_list`:发现的任务目标的子域名列表。 97 | 98 | 99 | 100 | ## 完整参数说明 101 | 102 | crawlergo 拥有灵活的参数配置,以下是详细的选项说明: 103 | 104 | * `--chromium-path Path, -c Path` chrome的可执行程序路径 105 | * `--custom-headers Headers` 自定义HTTP头,使用传入json序列化之后的数据,这个是全局定义,将被用于所有请求 106 | * `--post-data PostData, -d PostData` 提供POST数据,目标使用POST请求方法 107 | * `--max-crawled-count Number, -m Number` 爬虫最大任务数量,避免因伪静态造成长时间无意义抓取。 108 | * `--filter-mode Mode, -f Mode` 过滤模式,简单:只过滤静态资源和完全重复的请求。智能:拥有过滤伪静态的能力。严格:更加严格的伪静态过滤规则。 109 | * `--output-mode value, -o value` 结果输出模式,`console`:打印当前域名结果。`json`:打印所有结果的json序列化字符串,可直接被反序列化解析。`none`:不打印输出。 110 | * `--output-json filepath` 将爬虫结果JSON序列化之后写入到json文件。 111 | * `--incognito-context, -i` 浏览器启动隐身模式 112 | * `--max-tab-count Number, -t Number` 爬虫同时开启最大标签页,即同时爬取的页面数量。 113 | * `--fuzz-path` 使用常见路径Fuzz目标,获取更多入口。 114 | * `--fuzz-path-dict` 通过字典文件自定义Fuzz目录,传入字典文件路径,如:`/home/user/fuzz_dir.txt`,文件每行代表一个要fuzz的目录。 115 | * `--robots-path` 从 /robots.txt 文件中解析路径,获取更多入口。 116 | * `--request-proxy proxyAddress` 支持**socks5**代理,crawlergo和chrome浏览器的所有网络请求均经过代理发送。 117 | * `--tab-run-timeout Timeout` 单个Tab标签页的最大运行超时。 118 | * `--wait-dom-content-loaded-timeout Timeout` 爬虫等待页面加载完毕的最大超时。 119 | * `--event-trigger-interval Interval` 事件自动触发时的间隔时间,一般用于目标网络缓慢,DOM更新冲突时导致的URL漏抓。 120 | * `--event-trigger-mode Value` 事件自动触发的模式,分为异步和同步,用于DOM更新冲突时导致的URL漏抓。 121 | * `--before-exit-delay` 单个tab标签页任务结束时,延迟退出关闭chrome的时间,用于等待部分DOM更新和XHR请求的发起捕获。 122 | * `--ignore-url-keywords` 不想访问的URL关键字,一般用于在携带Cookie访问时排除注销链接。用法:`-iuk logout -iuk exit`。 123 | * `--form-values` 自定义表单填充的值,按照文本类型设置。支持定义类型:default, mail, code, phone, username, password, qq, id_card, url, date, number,文本类型通过输入框标签的`id`、`name`、`class`、`type`四个属性值关键字进行识别。如,定义邮箱输入框自动填充A,密码输入框自动填充B,`-fv mail=A -fv password=B`。其中default代表无法识别文本类型时的默认填充值,目前为Cralwergo。 124 | * `--form-keyword-values` 自定义表单填充的值,按照关键字模糊匹配设置。关键字匹配输入框标签的`id`、`name`、`class`、`type`四个属性值。如,模糊匹配pass关键词填充123456,user关键词填充admin,`-fkv user=admin -fkv pass=123456`。 125 | * `--push-to-proxy` 拟接收爬虫结果的监听地址,一般为被动扫描器的监听地址。 126 | * `--push-pool-max` 发送爬虫结果到监听地址时的最大并发数。 127 | * `--log-level` 打印日志等级,可选 debug, info, warn, error 和 fatal。 128 | * `--no-headless` 关闭chrome headless模式,可直观的看到爬虫过程。 129 | 130 | 131 | 132 | ## 使用举例 133 | 134 | crawlergo 返回了全量的请求和URL信息,可以有多种使用方法: 135 | 136 | * 联动其它的开源被动扫描器 137 | 138 | 首先,启动某被动扫描器,设置监听地址为:`http://127.0.0.1:1234/`。 139 | 140 | 接下来,假设crawlergo与扫描器在同一台机器,启动 crawlergo,设置参数: 141 | 142 | `--push-to-proxy http://127.0.0.1:1234/` 143 | 144 | * 子域名收集 example 145 | 146 | * 旁站入口收集 example 147 | 148 | * 结合celery实现分布式扫描 149 | 150 | * Host绑定设置(高版本chrome无法使用) [(查看例子)](https://github.com/0Kee-Team/crawlergo/blob/master/examples/host_binding.py) 151 | 152 | * 带Cookie扫描 [(查看例子)](https://github.com/0Kee-Team/crawlergo/blob/master/examples/request_with_cookie.py) 153 | 154 | * 调用crawlergo调用产生僵尸进程,定时清理 [(查看例子)](https://github.com/0Kee-Team/crawlergo/blob/master/examples/zombie_clean.py) , contributed by @ring04h 155 | 156 | ## Trouble Shooting 157 | 158 | * 'Fetch.enable' wasn't found 159 | 160 | Fetch是新版chrome支持的功能,如果出现此错误,说明你的版本较低,请升级chrome到最新版即可。 161 | 162 | * chrome运行提示缺少 xxx.so 等依赖 163 | 164 | ```shell 165 | // Ubuntu 166 | apt-get install -yq --no-install-recommends \ 167 | libasound2 libatk1.0-0 libc6 libcairo2 libcups2 libdbus-1-3 \ 168 | libexpat1 libfontconfig1 libgcc1 libgconf-2-4 libgdk-pixbuf2.0-0 libglib2.0-0 libgtk-3-0 libnspr4 \ 169 | libpango-1.0-0 libpangocairo-1.0-0 libstdc++6 libx11-6 libx11-xcb1 libxcb1 \ 170 | libxcursor1 libxdamage1 libxext6 libxfixes3 libxi6 libxrandr2 libxrender1 libxss1 libxtst6 libnss3 171 | 172 | // CentOS 7 173 | sudo yum install pango.x86_64 libXcomposite.x86_64 libXcursor.x86_64 libXdamage.x86_64 libXext.x86_64 libXi.x86_64 \ 174 | libXtst.x86_64 cups-libs.x86_64 libXScrnSaver.x86_64 libXrandr.x86_64 GConf2.x86_64 alsa-lib.x86_64 atk.x86_64 gtk3.x86_64 \ 175 | ipa-gothic-fonts xorg-x11-fonts-100dpi xorg-x11-fonts-75dpi xorg-x11-utils xorg-x11-fonts-cyrillic xorg-x11-fonts-Type1 xorg-x11-fonts-misc -y 176 | 177 | sudo yum update nss -y 178 | ``` 179 | 180 | 181 | * 运行提示**导航超时** / 浏览器无法找到 / 不知道正确的**浏览器可执行文件路径** 182 | 183 | 确认配置的浏览器可执行路径正确,在地址栏中输入:`chrome://version`,找到可执行程序文件路径: 184 | 185 | ![](./imgs/chrome_path.png) 186 | 187 | ## Bypass headless detect 188 | 189 | https://intoli.com/blog/not-possible-to-block-chrome-headless/chrome-headless-test.html 190 | 191 | ![](./imgs/bypass.png) 192 | 193 | 194 | ## Follow me 195 | 196 | 如果你有关于浏览器爬虫的想法,欢迎和我交流。 197 | 198 | 微博:[@9ian1i](https://weibo.com/u/5242748339) 199 | Github: [@9ian1i](https://github.com/Qianlitp) 200 | 201 | 相关文章:[漏扫动态爬虫实践](https://www.anquanke.com/post/id/178339) 202 | -------------------------------------------------------------------------------- /go.sum: -------------------------------------------------------------------------------- 1 | github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ= 2 | github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= 3 | github.com/chromedp/cdproto v0.0.0-20191114225735-6626966fbae4 h1:QD3KxSJ59L2lxG6MXBjNHxiQO2RmxTQ3XcK+wO44WOg= 4 | github.com/chromedp/cdproto v0.0.0-20191114225735-6626966fbae4/go.mod h1:PfAWWKJqjlGFYJEidUM6aVIWPr0EpobeyVWEEmplX7g= 5 | github.com/chromedp/chromedp v0.5.2 h1:W8xBXQuUnd2dZK0SN/lyVwsQM7KgW+kY5HGnntms194= 6 | github.com/chromedp/chromedp v0.5.2/go.mod h1:rsTo/xRo23KZZwFmWk2Ui79rBaVRRATCjLzNQlOFSiA= 7 | github.com/clbanning/mxj v1.8.5-0.20200714211355-ff02cfb8ea28 h1:LdXxtjzvZYhhUaonAaAKArG3pyC67kGL3YY+6hGG8G4= 8 | github.com/clbanning/mxj v1.8.5-0.20200714211355-ff02cfb8ea28/go.mod h1:BVjHeAH+rl9rs6f+QIpeRl0tfu10SXn1pUSa5PVGJng= 9 | github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= 10 | github.com/cpuguy83/go-md2man/v2 v2.0.0 h1:EoUDS0afbrsXAZ9YQ9jdu/mZ2sXgT1/2yyNng4PGlyM= 11 | github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= 12 | github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= 13 | github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= 14 | github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= 15 | github.com/deckarep/golang-set v1.7.1 h1:SCQV0S6gTtp6itiFrTqI+pfmJ4LN85S1YzhDf9rTHJQ= 16 | github.com/deckarep/golang-set v1.7.1/go.mod h1:93vsz/8Wt4joVM7c2AVqh+YRMiUSc14yDtF28KmMOgQ= 17 | github.com/fatih/color v1.12.0 h1:mRhaKNwANqRgUBGKmnI5ZxEk7QXmjQeCcuYFMX2bfcc= 18 | github.com/fatih/color v1.12.0/go.mod h1:ELkj/draVOlAH/xkhN6mQ50Qd0MPOk5AAr3maGEBuJM= 19 | github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4= 20 | github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= 21 | github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE= 22 | github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= 23 | github.com/gobwas/httphead v0.0.0-20180130184737-2c6c146eadee h1:s+21KNqlpePfkah2I+gwHF8xmJWRjooY+5248k6m4A0= 24 | github.com/gobwas/httphead v0.0.0-20180130184737-2c6c146eadee/go.mod h1:L0fX3K22YWvt/FAX9NnzrNzcI4wNYi9Yku4O0LKYflo= 25 | github.com/gobwas/pool v0.2.0 h1:QEmUOlnSjWtnpRGHF3SauEiOsy82Cup83Vf2LcMlnc8= 26 | github.com/gobwas/pool v0.2.0/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw= 27 | github.com/gobwas/ws v1.0.2 h1:CoAavW/wd/kulfZmSIBt6p24n4j7tHgNVCjsfHVNUbo= 28 | github.com/gobwas/ws v1.0.2/go.mod h1:szmBTxLgaFppYjEmNtny/v3w89xOydFnnZMcgRRu/EM= 29 | github.com/gogf/gf v1.16.6 h1:Yp5YfwnGz41d1tiVqxcWXiPXyuzjTb7ax4SnPSXxDE8= 30 | github.com/gogf/gf v1.16.6/go.mod h1:4LoHfEBl2jbVmZpVx+qk2La3zWr1V315FtF2PVZuyQ8= 31 | github.com/gomodule/redigo v1.8.5 h1:nRAxCa+SVsyjSBrtZmG/cqb6VbTmuRzpg/PoTFlpumc= 32 | github.com/gomodule/redigo v1.8.5/go.mod h1:P9dn9mFrCBvWhGE1wpxx6fgq7BAeLBk+UUUzlpkBYO0= 33 | github.com/google/go-cmp v0.5.6 h1:BKbKCqvP6I+rmFHt06ZmyQtvB8xAkWdhFyr0ZUNZcxQ= 34 | github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= 35 | github.com/gorilla/websocket v1.4.2 h1:+/TMaTYc4QFitKJxsQ7Yye35DkWvkdLcvGKqM+x0Ufc= 36 | github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= 37 | github.com/grokify/html-strip-tags-go v0.0.0-20190921062105-daaa06bf1aaf h1:wIOAyJMMen0ELGiFzlmqxdcV1yGbkyHBAB6PolcNbLA= 38 | github.com/grokify/html-strip-tags-go v0.0.0-20190921062105-daaa06bf1aaf/go.mod h1:2Su6romC5/1VXOQMaWL2yb618ARB8iVo6/DR99A6d78= 39 | github.com/knq/sysutil v0.0.0-20191005231841-15668db23d08 h1:V0an7KRw92wmJysvFvtqtKMAPmvS5O0jtB0nYo6t+gs= 40 | github.com/knq/sysutil v0.0.0-20191005231841-15668db23d08/go.mod h1:dFWs1zEqDjFtnBXsd1vPOZaLsESovai349994nHx3e0= 41 | github.com/konsorten/go-windows-terminal-sequences v1.0.1 h1:mweAR1A6xJ3oS2pRaGiHgQ4OO8tzTaLawm8vnODuwDk= 42 | github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= 43 | github.com/mailru/easyjson v0.7.0 h1:aizVhC/NAAcKWb+5QsU1iNOZb4Yws5UO2I+aIprQITM= 44 | github.com/mailru/easyjson v0.7.0/go.mod h1:KAzv3t3aY1NaHWoQz1+4F1ccyAH66Jk7yos7ldAVICs= 45 | github.com/mattn/go-colorable v0.1.8 h1:c1ghPdyEDarC70ftn0y+A/Ee++9zz8ljHG1b13eJ0s8= 46 | github.com/mattn/go-colorable v0.1.8/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= 47 | github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY= 48 | github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= 49 | github.com/mattn/go-runewidth v0.0.9 h1:Lm995f3rfxdpd6TSmuVCHVb/QhupuXlYr8sCI/QdE+0= 50 | github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= 51 | github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec= 52 | github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY= 53 | github.com/panjf2000/ants/v2 v2.2.2 h1:TWzusBjq/IflXhy+/S6u5wmMLCBdJnB9tPIx9Zmhvok= 54 | github.com/panjf2000/ants/v2 v2.2.2/go.mod h1:1GFm8bV8nyCQvU5K4WvBCTG1/YBFOD2VzjffD8fV55A= 55 | github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I= 56 | github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= 57 | github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= 58 | github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= 59 | github.com/russross/blackfriday/v2 v2.0.1 h1:lPqVAte+HuHNfhJ/0LC98ESWRz8afy9tM/0RK8m9o+Q= 60 | github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= 61 | github.com/shurcooL/sanitized_anchor_name v1.0.0 h1:PdmoCO6wvbs+7yrJyMORt4/BmY5IYyJwS/kOiWx8mHo= 62 | github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= 63 | github.com/sirupsen/logrus v1.4.2 h1:SPIRibHv4MatM3XXNO2BJeFLZwZ2LvZgfQ5+UNI2im4= 64 | github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= 65 | github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= 66 | github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= 67 | github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= 68 | github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= 69 | github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= 70 | github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= 71 | github.com/urfave/cli/v2 v2.0.0 h1:+HU9SCbu8GnEUFtIBfuUNXN39ofWViIEJIp6SURMpCg= 72 | github.com/urfave/cli/v2 v2.0.0/go.mod h1:SE9GqnLQmjVa0iPEY0f1w3ygNIYcIJ0OKPMoW2caLfQ= 73 | go.opentelemetry.io/otel v1.0.0-RC2 h1:SHhxSjB+omnGZPgGlKe+QMp3MyazcOHdQ8qwo89oKbg= 74 | go.opentelemetry.io/otel v1.0.0-RC2/go.mod h1:w1thVQ7qbAy8MHb0IFj8a5Q2QU0l2ksf8u/CN8m3NOM= 75 | go.opentelemetry.io/otel/oteltest v1.0.0-RC2 h1:xNKqMhlZYkASSyvF4JwObZFMq0jhFN3c3SP+2rCzVPk= 76 | go.opentelemetry.io/otel/oteltest v1.0.0-RC2/go.mod h1:kiQ4tw5tAL4JLTbcOYwK1CWI1HkT5aiLzHovgOVnz/A= 77 | go.opentelemetry.io/otel/trace v1.0.0-RC2 h1:dunAP0qDULMIT82atj34m5RgvsIK6LcsXf1c/MsYg1w= 78 | go.opentelemetry.io/otel/trace v1.0.0-RC2/go.mod h1:JPQ+z6nNw9mqEGT8o3eoPTdnNI+Aj5JcxEsVGREIAy4= 79 | golang.org/x/net v0.0.0-20210520170846-37e1c6afe023 h1:ADo5wSpq2gqaCGQWzk7S5vd//0iyyLeAratkEoG5dLE= 80 | golang.org/x/net v0.0.0-20210520170846-37e1c6afe023/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= 81 | golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= 82 | golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= 83 | golang.org/x/sys v0.0.0-20191113165036-4c7a9d0fe056/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= 84 | golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= 85 | golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= 86 | golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= 87 | golang.org/x/sys v0.0.0-20210423082822-04245dca01da h1:b3NXsE2LusjYGGjL5bxEVZZORm/YEFFrWFjR8eFrw/c= 88 | golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= 89 | golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= 90 | golang.org/x/text v0.3.6 h1:aRYxNxv6iGQlyVaZmk6ZgYEDa+Jg18DxebPSrd6bg1M= 91 | golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= 92 | golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= 93 | golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= 94 | golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= 95 | gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= 96 | gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= 97 | gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= 98 | gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= 99 | gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b h1:h8qDotaEPuJATrMmW04NCwg7v22aHH28wwpauUhK9Oo= 100 | gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= 101 | -------------------------------------------------------------------------------- /pkg/engine/intercept_request.go: -------------------------------------------------------------------------------- 1 | package engine 2 | 3 | import ( 4 | "bufio" 5 | "context" 6 | "crawlergo/pkg/config" 7 | "crawlergo/pkg/logger" 8 | model2 "crawlergo/pkg/model" 9 | "crawlergo/pkg/tools" 10 | "crawlergo/pkg/tools/requests" 11 | "encoding/base64" 12 | "github.com/chromedp/cdproto/fetch" 13 | "github.com/chromedp/cdproto/network" 14 | "io" 15 | "net/textproto" 16 | "regexp" 17 | "strconv" 18 | "strings" 19 | "time" 20 | ) 21 | 22 | /** 23 | 处理每一个HTTP请求 24 | */ 25 | func (tab *Tab) InterceptRequest(v *fetch.EventRequestPaused) { 26 | defer tab.WG.Done() 27 | ctx := tab.GetExecutor() 28 | _req := v.Request 29 | // 拦截到的URL格式一定正常 不处理错误 30 | url, err := model2.GetUrl(_req.URL, *tab.NavigateReq.URL) 31 | if err != nil { 32 | logger.Logger.Debug("InterceptRequest parse url failed: ", err) 33 | _ = fetch.ContinueRequest(v.RequestID).Do(ctx) 34 | return 35 | } 36 | _option := model2.Options{ 37 | Headers: _req.Headers, 38 | PostData: _req.PostData, 39 | } 40 | req := model2.GetRequest(_req.Method, url, _option) 41 | 42 | if IsIgnoredByKeywordMatch(req, tab.config.IgnoreKeywords) { 43 | _ = fetch.FailRequest(v.RequestID, network.ErrorReasonBlockedByClient).Do(ctx) 44 | req.Source = config.FromXHR 45 | tab.AddResultRequest(req) 46 | return 47 | } 48 | 49 | tab.HandleHostBinding(&req) 50 | 51 | // 静态资源 全部阻断 52 | for _, suffix := range config.StaticSuffix { 53 | if strings.HasSuffix(strings.ToLower(url.Path), suffix) { 54 | _ = fetch.FailRequest(v.RequestID, network.ErrorReasonBlockedByClient).Do(ctx) 55 | req.Source = config.FromStaticRes 56 | tab.AddResultRequest(req) 57 | return 58 | } 59 | } 60 | 61 | // 处理导航请求 62 | if tab.IsNavigatorRequest(v.NetworkID.String()) { 63 | tab.NavNetworkID = v.NetworkID.String() 64 | tab.HandleNavigationReq(&req, v) 65 | req.Source = config.FromNavigation 66 | tab.AddResultRequest(req) 67 | return 68 | } 69 | 70 | req.Source = config.FromXHR 71 | tab.AddResultRequest(req) 72 | _ = fetch.ContinueRequest(v.RequestID).Do(ctx) 73 | } 74 | 75 | /** 76 | 判断是否为导航请求 77 | */ 78 | func (tab *Tab) IsNavigatorRequest(networkID string) bool { 79 | return networkID == tab.LoaderID 80 | } 81 | 82 | /** 83 | 处理 401 407 认证弹窗 84 | */ 85 | func (tab *Tab) HandleAuthRequired(req *fetch.EventAuthRequired) { 86 | defer tab.WG.Done() 87 | logger.Logger.Debug("auth required found, auto auth.") 88 | ctx := tab.GetExecutor() 89 | authRes := fetch.AuthChallengeResponse{ 90 | Response: fetch.AuthChallengeResponseResponseProvideCredentials, 91 | Username: "Crawlergo", 92 | Password: "Crawlergo", 93 | } 94 | // 取消认证 95 | _ = fetch.ContinueWithAuth(req.RequestID, &authRes).Do(ctx) 96 | } 97 | 98 | /** 99 | 处理导航请求 100 | */ 101 | func (tab *Tab) HandleNavigationReq(req *model2.Request, v *fetch.EventRequestPaused) { 102 | navReq := tab.NavigateReq 103 | ctx := tab.GetExecutor() 104 | tCtx, cancel := context.WithTimeout(ctx, time.Second*5) 105 | defer cancel() 106 | overrideReq := fetch.ContinueRequest(v.RequestID).WithURL(req.URL.String()) 107 | 108 | // 处理后端重定向请求 109 | if tab.FoundRedirection && tab.IsTopFrame(v.FrameID.String()) { 110 | logger.Logger.Debug("redirect navigation req: " + req.URL.String()) 111 | //_ = fetch.FailRequest(v.RequestID, network.ErrorReasonConnectionAborted).Do(ctx) 112 | body := base64.StdEncoding.EncodeToString([]byte(`Crawlergo`)) 113 | param := fetch.FulfillRequest(v.RequestID, 200).WithBody(body) 114 | err := param.Do(ctx) 115 | if err != nil { 116 | logger.Logger.Debug(err) 117 | } 118 | navReq.RedirectionFlag = true 119 | navReq.Source = config.FromNavigation 120 | tab.AddResultRequest(navReq) 121 | // 处理重定向标记 122 | } else if navReq.RedirectionFlag && tab.IsTopFrame(v.FrameID.String()) { 123 | navReq.RedirectionFlag = false 124 | logger.Logger.Debug("has redirection_flag: " + req.URL.String()) 125 | headers := tools.ConvertHeaders(req.Headers) 126 | headers["Range"] = "bytes=0-1048576" 127 | res, err := requests.Request(req.Method, req.URL.String(), headers, []byte(req.PostData), &requests.ReqOptions{ 128 | AllowRedirect: false, Proxy: tab.config.Proxy}) 129 | if err != nil { 130 | logger.Logger.Debug(err) 131 | _ = fetch.FailRequest(v.RequestID, network.ErrorReasonConnectionAborted).Do(ctx) 132 | return 133 | } 134 | body := base64.StdEncoding.EncodeToString([]byte(res.Text)) 135 | param := fetch.FulfillRequest(v.RequestID, 200).WithResponseHeaders(ConvertHeadersNoLocation(res.Header)).WithBody(body) 136 | errR := param.Do(ctx) 137 | if errR != nil { 138 | logger.Logger.Debug(errR) 139 | } 140 | // 主导航请求 141 | } else if tab.IsTopFrame(v.FrameID.String()) && req.URL.NavigationUrl() == navReq.URL.NavigationUrl() { 142 | logger.Logger.Debug("main navigation req: " + navReq.URL.String()) 143 | // 手动设置POST信息 144 | if navReq.Method == config.POST || navReq.Method == config.PUT { 145 | overrideReq = overrideReq.WithPostData(navReq.PostData) 146 | } 147 | overrideReq = overrideReq.WithMethod(navReq.Method) 148 | overrideReq = overrideReq.WithHeaders(MergeHeaders(navReq.Headers, req.Headers)) 149 | _ = overrideReq.Do(tCtx) 150 | // 子frame的导航 151 | } else if !tab.IsTopFrame(v.FrameID.String()) { 152 | _ = overrideReq.Do(tCtx) 153 | // 前端跳转 返回204 154 | } else { 155 | _ = fetch.FulfillRequest(v.RequestID, 204).Do(ctx) 156 | } 157 | } 158 | 159 | /** 160 | 处理Host绑定 161 | */ 162 | func (tab *Tab) HandleHostBinding(req *model2.Request) { 163 | url := req.URL 164 | navUrl := tab.NavigateReq.URL 165 | // 导航请求的域名和HOST绑定中的域名不同,且当前请求的domain和导航请求header中的Host相同,则替换当前请求的domain并绑定Host 166 | if host, ok := tab.NavigateReq.Headers["Host"]; ok { 167 | if navUrl.Hostname() != host && url.Host == host { 168 | urlObj, _ := model2.GetUrl(strings.Replace(req.URL.String(), "://"+url.Hostname(), "://"+navUrl.Hostname(), -1), *navUrl) 169 | req.URL = urlObj 170 | req.Headers["Host"] = host 171 | 172 | } else if navUrl.Hostname() != host && url.Host == navUrl.Host { 173 | req.Headers["Host"] = host 174 | } 175 | // 修正Origin 176 | if _, ok := req.Headers["Origin"]; ok { 177 | req.Headers["Origin"] = strings.Replace(req.Headers["Origin"].(string), navUrl.Host, host.(string), 1) 178 | } 179 | // 修正Referer 180 | if _, ok := req.Headers["Referer"]; ok { 181 | req.Headers["Referer"] = strings.Replace(req.Headers["Referer"].(string), navUrl.Host, host.(string), 1) 182 | } else { 183 | req.Headers["Referer"] = strings.Replace(navUrl.String(), navUrl.Host, host.(string), 1) 184 | } 185 | } 186 | } 187 | 188 | func (tab *Tab) IsTopFrame(FrameID string) bool { 189 | return FrameID == tab.TopFrameId 190 | } 191 | 192 | /** 193 | 解析响应内容中的URL 使用正则匹配 194 | */ 195 | func (tab *Tab) ParseResponseURL(v *network.EventResponseReceived) { 196 | defer tab.WG.Done() 197 | ctx := tab.GetExecutor() 198 | res, err := network.GetResponseBody(v.RequestID).Do(ctx) 199 | if err != nil { 200 | logger.Logger.Debug("ParseResponseURL ", err) 201 | return 202 | } 203 | resStr := string(res) 204 | 205 | urlRegex := regexp.MustCompile(config.SuspectURLRegex) 206 | urlList := urlRegex.FindAllString(resStr, -1) 207 | for _, url := range urlList { 208 | 209 | url = url[1 : len(url)-1] 210 | url_lower := strings.ToLower(url) 211 | if strings.HasPrefix(url_lower, "image/x-icon") || strings.HasPrefix(url_lower, "text/css") || strings.HasPrefix(url_lower, "text/javascript") { 212 | continue 213 | } 214 | 215 | tab.AddResultUrl(config.GET, url, config.FromJSFile) 216 | } 217 | } 218 | 219 | func (tab *Tab) HandleRedirectionResp(v *network.EventResponseReceivedExtraInfo) { 220 | defer tab.WG.Done() 221 | statusCode := tab.GetStatusCode(v.HeadersText) 222 | // 导航请求,且返回重定向 223 | if 300 <= statusCode && statusCode < 400 { 224 | logger.Logger.Debug("set redirect flag.") 225 | tab.FoundRedirection = true 226 | } 227 | } 228 | 229 | func (tab *Tab) GetContentCharset(v *network.EventResponseReceived) { 230 | defer tab.WG.Done() 231 | var getCharsetRegex = regexp.MustCompile("charset=(.+)$") 232 | for key, value := range v.Response.Headers { 233 | if key == "Content-Type" { 234 | value := value.(string) 235 | if strings.Contains(value, "charset") { 236 | value = getCharsetRegex.FindString(value) 237 | value = strings.ToUpper(strings.Replace(value, "charset=", "", -1)) 238 | tab.PageCharset = value 239 | tab.PageCharset = strings.TrimSpace(tab.PageCharset) 240 | } 241 | } 242 | } 243 | } 244 | 245 | func (tab *Tab) GetStatusCode(headerText string) int { 246 | rspInput := strings.NewReader(headerText) 247 | rspBuf := bufio.NewReader(rspInput) 248 | tp := textproto.NewReader(rspBuf) 249 | line, err := tp.ReadLine() 250 | if err != nil { 251 | if err == io.EOF { 252 | err = io.ErrUnexpectedEOF 253 | } 254 | return 0 255 | } 256 | parts := strings.Split(line, " ") 257 | if len(parts) < 3 { 258 | return 0 259 | } 260 | code, _ := strconv.Atoi(parts[1]) 261 | return code 262 | } 263 | 264 | func MergeHeaders(navHeaders map[string]interface{}, headers map[string]interface{}) []*fetch.HeaderEntry { 265 | var mergedHeaders []*fetch.HeaderEntry 266 | for key, value := range navHeaders { 267 | if _, ok := headers[key]; !ok { 268 | var header fetch.HeaderEntry 269 | header.Name = key 270 | header.Value = value.(string) 271 | mergedHeaders = append(mergedHeaders, &header) 272 | } 273 | } 274 | 275 | for key, value := range headers { 276 | var header fetch.HeaderEntry 277 | header.Name = key 278 | header.Value = value.(string) 279 | mergedHeaders = append(mergedHeaders, &header) 280 | } 281 | return mergedHeaders 282 | } 283 | 284 | func ConvertHeadersNoLocation(h map[string][]string) []*fetch.HeaderEntry { 285 | var headers []*fetch.HeaderEntry 286 | for key, value := range h { 287 | if key == "Location" { 288 | continue 289 | } 290 | var header fetch.HeaderEntry 291 | header.Name = key 292 | header.Value = value[0] 293 | headers = append(headers, &header) 294 | } 295 | return headers 296 | } 297 | -------------------------------------------------------------------------------- /pkg/task_main.go: -------------------------------------------------------------------------------- 1 | package pkg 2 | 3 | import ( 4 | "crawlergo/pkg/config" 5 | engine2 "crawlergo/pkg/engine" 6 | filter2 "crawlergo/pkg/filter" 7 | "crawlergo/pkg/logger" 8 | "crawlergo/pkg/model" 9 | "encoding/json" 10 | "sync" 11 | "time" 12 | 13 | "github.com/panjf2000/ants/v2" 14 | ) 15 | 16 | type CrawlerTask struct { 17 | Browser *engine2.Browser // 18 | RootDomain string // 当前爬取根域名 用于子域名收集 19 | Targets []*model.Request // 输入目标 20 | Result *Result // 最终结果 21 | Config *TaskConfig // 配置信息 22 | smartFilter filter2.SmartFilter // 过滤对象 23 | Pool *ants.Pool // 协程池 24 | taskWG sync.WaitGroup // 等待协程池所有任务结束 25 | crawledCount int // 爬取过的数量 26 | taskCountLock sync.Mutex // 已爬取的任务总数锁 27 | } 28 | 29 | type Result struct { 30 | ReqList []*model.Request // 返回的同域名结果 31 | AllReqList []*model.Request // 所有域名的请求 32 | AllDomainList []string // 所有域名列表 33 | SubDomainList []string // 子域名列表 34 | resultLock sync.Mutex // 合并结果时加锁 35 | } 36 | 37 | type TaskConfig struct { 38 | MaxCrawlCount int // 最大爬取的数量 39 | FilterMode string // simple、smart、strict 40 | ExtraHeaders map[string]interface{} 41 | ExtraHeadersString string 42 | AllDomainReturn bool // 全部域名收集 43 | SubDomainReturn bool // 子域名收集 44 | IncognitoContext bool // 开启隐身模式 45 | NoHeadless bool // headless模式 46 | DomContentLoadedTimeout time.Duration 47 | TabRunTimeout time.Duration // 单个标签页超时 48 | PathByFuzz bool // 通过字典进行Path Fuzz 49 | FuzzDictPath string //Fuzz目录字典 50 | PathFromRobots bool // 解析Robots文件找出路径 51 | MaxTabsCount int // 允许开启的最大标签页数量 即同时爬取的数量 52 | ChromiumPath string // Chromium的程序路径 `/home/zhusiyu1/chrome-linux/chrome` 53 | EventTriggerMode string // 事件触发的调用方式: 异步 或 顺序 54 | EventTriggerInterval time.Duration // 事件触发的间隔 55 | BeforeExitDelay time.Duration // 退出前的等待时间,等待DOM渲染,等待XHR发出捕获 56 | EncodeURLWithCharset bool // 使用检测到的字符集自动编码URL 57 | IgnoreKeywords []string // 忽略的关键字,匹配上之后将不再扫描且不发送请求 58 | Proxy string // 请求代理 59 | CustomFormValues map[string]string // 自定义表单填充参数 60 | CustomFormKeywordValues map[string]string // 自定义表单关键词填充内容 61 | } 62 | 63 | type tabTask struct { 64 | crawlerTask *CrawlerTask 65 | browser *engine2.Browser 66 | req *model.Request 67 | pool *ants.Pool 68 | } 69 | 70 | /** 71 | 新建爬虫任务 72 | */ 73 | func NewCrawlerTask(targets []*model.Request, taskConf TaskConfig) (*CrawlerTask, error) { 74 | crawlerTask := CrawlerTask{ 75 | Result: &Result{}, 76 | Config: &taskConf, 77 | smartFilter: filter2.SmartFilter{ 78 | SimpleFilter: filter2.SimpleFilter{ 79 | HostLimit: targets[0].URL.Host, 80 | }, 81 | }, 82 | } 83 | 84 | if len(targets) == 1 { 85 | _newReq := *targets[0] 86 | newReq := &_newReq 87 | _newURL := *_newReq.URL 88 | newReq.URL = &_newURL 89 | if targets[0].URL.Scheme == "http" { 90 | newReq.URL.Scheme = "https" 91 | } else { 92 | newReq.URL.Scheme = "http" 93 | } 94 | targets = append(targets, newReq) 95 | } 96 | crawlerTask.Targets = targets[:] 97 | 98 | for _, req := range targets { 99 | req.Source = config.FromTarget 100 | } 101 | 102 | if taskConf.TabRunTimeout == 0 { 103 | taskConf.TabRunTimeout = config.TabRunTimeout 104 | } 105 | 106 | if taskConf.MaxTabsCount == 0 { 107 | taskConf.MaxTabsCount = config.MaxTabsCount 108 | } 109 | 110 | if taskConf.FilterMode == config.StrictFilterMode { 111 | crawlerTask.smartFilter.StrictMode = true 112 | } 113 | 114 | if taskConf.MaxCrawlCount == 0 { 115 | taskConf.MaxCrawlCount = config.MaxCrawlCount 116 | } 117 | 118 | if taskConf.DomContentLoadedTimeout == 0 { 119 | taskConf.DomContentLoadedTimeout = config.DomContentLoadedTimeout 120 | } 121 | 122 | if taskConf.EventTriggerInterval == 0 { 123 | taskConf.EventTriggerInterval = config.EventTriggerInterval 124 | } 125 | 126 | if taskConf.BeforeExitDelay == 0 { 127 | taskConf.BeforeExitDelay = config.BeforeExitDelay 128 | } 129 | 130 | if taskConf.EventTriggerMode == "" { 131 | taskConf.EventTriggerMode = config.DefaultEventTriggerMode 132 | } 133 | 134 | if len(taskConf.IgnoreKeywords) == 0 { 135 | taskConf.IgnoreKeywords = config.DefaultIgnoreKeywords 136 | } 137 | 138 | if taskConf.ExtraHeadersString != "" { 139 | err := json.Unmarshal([]byte(taskConf.ExtraHeadersString), &taskConf.ExtraHeaders) 140 | if err != nil { 141 | logger.Logger.Error("custom headers can't be Unmarshal.") 142 | return nil, err 143 | } 144 | } 145 | 146 | crawlerTask.Browser = engine2.InitBrowser(taskConf.ChromiumPath, taskConf.IncognitoContext, taskConf.ExtraHeaders, taskConf.Proxy, taskConf.NoHeadless) 147 | crawlerTask.RootDomain = targets[0].URL.RootDomain() 148 | 149 | crawlerTask.smartFilter.Init() 150 | 151 | // 创建协程池 152 | p, _ := ants.NewPool(taskConf.MaxTabsCount) 153 | crawlerTask.Pool = p 154 | 155 | return &crawlerTask, nil 156 | } 157 | 158 | /** 159 | 根据请求列表生成tabTask协程任务列表 160 | */ 161 | func (t *CrawlerTask) generateTabTask(req *model.Request) *tabTask { 162 | task := tabTask{ 163 | crawlerTask: t, 164 | browser: t.Browser, 165 | req: req, 166 | } 167 | return &task 168 | } 169 | 170 | /** 171 | 开始当前任务 172 | */ 173 | func (t *CrawlerTask) Run() { 174 | defer t.Pool.Release() // 释放协程池 175 | defer t.Browser.Close() // 关闭浏览器 176 | 177 | if t.Config.PathFromRobots { 178 | reqsFromRobots := GetPathsFromRobots(*t.Targets[0]) 179 | logger.Logger.Info("get paths from robots.txt: ", len(reqsFromRobots)) 180 | t.Targets = append(t.Targets, reqsFromRobots...) 181 | } 182 | 183 | if t.Config.FuzzDictPath != "" { 184 | if t.Config.PathByFuzz { 185 | logger.Logger.Warn("`--fuzz-path` is ignored, using `--fuzz-path-dict` instead") 186 | } 187 | reqsByFuzz := GetPathsByFuzzDict(*t.Targets[0], t.Config.FuzzDictPath) 188 | t.Targets = append(t.Targets, reqsByFuzz...) 189 | } else if t.Config.PathByFuzz { 190 | reqsByFuzz := GetPathsByFuzz(*t.Targets[0]) 191 | logger.Logger.Info("get paths by fuzzing: ", len(reqsByFuzz)) 192 | t.Targets = append(t.Targets, reqsByFuzz...) 193 | } 194 | 195 | t.Result.AllReqList = t.Targets[:] 196 | 197 | var initTasks []*model.Request 198 | for _, req := range t.Targets { 199 | if t.smartFilter.DoFilter(req) { 200 | logger.Logger.Debugf("filter req: " + req.URL.RequestURI()) 201 | continue 202 | } 203 | initTasks = append(initTasks, req) 204 | t.Result.ReqList = append(t.Result.ReqList, req) 205 | } 206 | logger.Logger.Info("filter repeat, target count: ", len(initTasks)) 207 | 208 | for _, req := range initTasks { 209 | if !engine2.IsIgnoredByKeywordMatch(*req, t.Config.IgnoreKeywords) { 210 | t.addTask2Pool(req) 211 | } 212 | } 213 | 214 | t.taskWG.Wait() 215 | 216 | // 对全部请求进行唯一去重 217 | todoFilterAll := make([]*model.Request, len(t.Result.AllReqList)) 218 | for index := range t.Result.AllReqList { 219 | todoFilterAll[index] = t.Result.AllReqList[index] 220 | } 221 | 222 | t.Result.AllReqList = []*model.Request{} 223 | var simpleFilter filter2.SimpleFilter 224 | for _, req := range todoFilterAll { 225 | if !simpleFilter.UniqueFilter(req) { 226 | t.Result.AllReqList = append(t.Result.AllReqList, req) 227 | } 228 | } 229 | 230 | // 全部域名 231 | t.Result.AllDomainList = AllDomainCollect(t.Result.AllReqList) 232 | // 子域名 233 | t.Result.SubDomainList = SubDomainCollect(t.Result.AllReqList, t.RootDomain) 234 | } 235 | 236 | /** 237 | 添加任务到协程池 238 | 添加之前实时过滤 239 | */ 240 | func (t *CrawlerTask) addTask2Pool(req *model.Request) { 241 | t.taskCountLock.Lock() 242 | if t.crawledCount >= t.Config.MaxCrawlCount { 243 | t.taskCountLock.Unlock() 244 | return 245 | } else { 246 | t.crawledCount += 1 247 | } 248 | t.taskCountLock.Unlock() 249 | 250 | t.taskWG.Add(1) 251 | task := t.generateTabTask(req) 252 | go func() { 253 | err := t.Pool.Submit(task.Task) 254 | if err != nil { 255 | t.taskWG.Done() 256 | logger.Logger.Error("addTask2Pool ", err) 257 | } 258 | }() 259 | } 260 | 261 | /** 262 | 单个运行的tab标签任务,实现了workpool的接口 263 | */ 264 | func (t *tabTask) Task() { 265 | defer t.crawlerTask.taskWG.Done() 266 | tab := engine2.NewTab(t.browser, *t.req, engine2.TabConfig{ 267 | TabRunTimeout: t.crawlerTask.Config.TabRunTimeout, 268 | DomContentLoadedTimeout: t.crawlerTask.Config.DomContentLoadedTimeout, 269 | EventTriggerMode: t.crawlerTask.Config.EventTriggerMode, 270 | EventTriggerInterval: t.crawlerTask.Config.EventTriggerInterval, 271 | BeforeExitDelay: t.crawlerTask.Config.BeforeExitDelay, 272 | EncodeURLWithCharset: t.crawlerTask.Config.EncodeURLWithCharset, 273 | IgnoreKeywords: t.crawlerTask.Config.IgnoreKeywords, 274 | CustomFormValues: t.crawlerTask.Config.CustomFormValues, 275 | CustomFormKeywordValues: t.crawlerTask.Config.CustomFormKeywordValues, 276 | }) 277 | tab.Start() 278 | 279 | // 收集结果 280 | t.crawlerTask.Result.resultLock.Lock() 281 | t.crawlerTask.Result.AllReqList = append(t.crawlerTask.Result.AllReqList, tab.ResultList...) 282 | t.crawlerTask.Result.resultLock.Unlock() 283 | 284 | for _, req := range tab.ResultList { 285 | if t.crawlerTask.Config.FilterMode == config.SimpleFilterMode { 286 | if !t.crawlerTask.smartFilter.SimpleFilter.DoFilter(req) { 287 | t.crawlerTask.Result.resultLock.Lock() 288 | t.crawlerTask.Result.ReqList = append(t.crawlerTask.Result.ReqList, req) 289 | t.crawlerTask.Result.resultLock.Unlock() 290 | if !engine2.IsIgnoredByKeywordMatch(*req, t.crawlerTask.Config.IgnoreKeywords) { 291 | t.crawlerTask.addTask2Pool(req) 292 | } 293 | } 294 | } else { 295 | if !t.crawlerTask.smartFilter.DoFilter(req) { 296 | t.crawlerTask.Result.resultLock.Lock() 297 | t.crawlerTask.Result.ReqList = append(t.crawlerTask.Result.ReqList, req) 298 | t.crawlerTask.Result.resultLock.Unlock() 299 | if !engine2.IsIgnoredByKeywordMatch(*req, t.crawlerTask.Config.IgnoreKeywords) { 300 | t.crawlerTask.addTask2Pool(req) 301 | } 302 | } 303 | } 304 | } 305 | } 306 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # crawlergo 2 | 3 | ![chromedp](https://img.shields.io/badge/chromedp-v0.5.2-brightgreen.svg) [![BlackHat EU Arsenal](https://img.shields.io/badge/BlackHat%20Europe-2021%20Arsenal-blue.svg)](https://www.blackhat.com/eu-21/arsenal/schedule/index.html#crawlergo-a-powerful-browser-crawler-for-web-vulnerability-scanners-25113) 4 | 5 | > A powerful browser crawler for web vulnerability scanners 6 | 7 | English Document | [中文文档](./README_zh-cn.md) 8 | 9 | crawlergo is a browser crawler that uses `chrome headless` mode for URL collection. It hooks key positions of the whole web page with DOM rendering stage, automatically fills and submits forms, with intelligent JS event triggering, and collects as many entries exposed by the website as possible. The built-in URL de-duplication module filters out a large number of pseudo-static URLs, still maintains a fast parsing and crawling speed for large websites, and finally gets a high-quality collection of request results. 10 | 11 | crawlergo currently supports the following features: 12 | * chrome browser environment rendering 13 | * Intelligent form filling, automated submission 14 | * Full DOM event collection with automated triggering 15 | * Smart URL de-duplication to remove most duplicate requests 16 | * Intelligent analysis of web pages and collection of URLs, including javascript file content, page comments, robots.txt files and automatic Fuzz of common paths 17 | * Support Host binding, automatically fix and add Referer 18 | * Support browser request proxy 19 | * Support pushing the results to passive web vulnerability scanners 20 | 21 | ## Screenshot 22 | 23 | ![](./imgs/demo.gif) 24 | 25 | ## Installation 26 | 27 | **Please read and confirm [disclaimer](./Disclaimer.md) carefully before installing and using。** 28 | 29 | **Build** 30 | 31 | ```shell 32 | cd crawlergo/cmd/crawlergo 33 | go build crawlergo_cmd.go 34 | ``` 35 | 36 | 1. crawlergo relies only on the chrome environment to run, go to [download](https://www.chromium.org/getting-involved/download-chromium) for the new version of chromium, or just [click to download Linux version 79](https://storage.googleapis.com/chromium-browser-snapshots/Linux_x64/706915/chrome-linux.zip). 37 | 2. Go to [download page](https://github.com/0Kee-Team/crawlergo/releases) for the latest version of crawlergo and extract it to any directory. If you are on linux or macOS, please give crawlergo **executable permissions (+x)**. 38 | 3. Or you can modify the code and build it yourself. 39 | 40 | > If you are using a linux system and chrome prompts you with missing dependencies, please see TroubleShooting below 41 | 42 | ## Quick Start 43 | ### Go! 44 | 45 | Assuming your chromium installation directory is `/tmp/chromium/`, set up 10 tabs open at the same time and crawl the `testphp.vulnweb.com`: 46 | 47 | ```shell 48 | ./crawlergo -c /tmp/chromium/chrome -t 10 http://testphp.vulnweb.com/ 49 | ``` 50 | 51 | 52 | ### Using Proxy 53 | 54 | ```shell 55 | ./crawlergo -c /tmp/chromium/chrome -t 10 --request-proxy socks5://127.0.0.1:7891 http://testphp.vulnweb.com/ 56 | ``` 57 | 58 | 59 | ### Calling crawlergo with python 60 | 61 | By default, crawlergo prints the results directly on the screen. We next set the output mode to `json`, and the sample code for calling it using python is as follows: 62 | 63 | ```python 64 | #!/usr/bin/python3 65 | # coding: utf-8 66 | 67 | import simplejson 68 | import subprocess 69 | 70 | 71 | def main(): 72 | target = "http://testphp.vulnweb.com/" 73 | cmd = ["./crawlergo", "-c", "/tmp/chromium/chrome", "-o", "json", target] 74 | rsp = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 75 | output, error = rsp.communicate() 76 | # "--[Mission Complete]--" is the end-of-task separator string 77 | result = simplejson.loads(output.decode().split("--[Mission Complete]--")[1]) 78 | req_list = result["req_list"] 79 | print(req_list[0]) 80 | 81 | 82 | if __name__ == '__main__': 83 | main() 84 | ``` 85 | 86 | ### Crawl Results 87 | 88 | When the output mode is set to `json`, the returned result, after JSON deserialization, contains four parts: 89 | 90 | * `all_req_list`: All requests found during this crawl task, containing any resource type from other domains. 91 | * `req_list`:Returns the **current domain results** of this crawl task, pseudo-statically de-duplicated, without static resource links. It is a subset of `all_req_list `. 92 | * `all_domain_list`:List of all domains found. 93 | * `sub_domain_list`:List of subdomains found. 94 | 95 | 96 | ## Examples 97 | 98 | crawlergo returns the full request and URL, which can be used in a variety of ways: 99 | 100 | * Used in conjunction with other passive web vulnerability scanners 101 | 102 | First, start a passive scanner and set the listening address to: `http://127.0.0.1:1234/` 103 | 104 | Next, assuming crawlergo is on the same machine as the scanner, start crawlergo and set the parameters: 105 | 106 | `--push-to-proxy http://127.0.0.1:1234/` 107 | 108 | * Host binding (not available for high version chrome) [(example)](https://github.com/0Kee-Team/crawlergo/blob/master/examples/host_binding.py) 109 | 110 | * Custom Cookies [(example)](https://github.com/0Kee-Team/crawlergo/blob/master/examples/request_with_cookie.py) 111 | 112 | * Regularly clean up zombie processes generated by crawlergo [(example)](https://github.com/0Kee-Team/crawlergo/blob/master/examples/zombie_clean.py) , contributed by @ring04h 113 | 114 | 115 | ## Bypass headless detect 116 | crawlergo can bypass headless mode detection by default. 117 | 118 | https://intoli.com/blog/not-possible-to-block-chrome-headless/chrome-headless-test.html 119 | 120 | ![](./imgs/bypass.png) 121 | 122 | 123 | ## TroubleShooting 124 | 125 | * 'Fetch.enable' wasn't found 126 | 127 | Fetch is a feature supported by the new version of chrome, if this error occurs, it means your version is too low, please upgrade the chrome version. 128 | 129 | * chrome runs with missing dependencies such as xxx.so 130 | 131 | ```shell 132 | // Ubuntu 133 | apt-get install -yq --no-install-recommends \ 134 | libasound2 libatk1.0-0 libc6 libcairo2 libcups2 libdbus-1-3 \ 135 | libexpat1 libfontconfig1 libgcc1 libgconf-2-4 libgdk-pixbuf2.0-0 libglib2.0-0 libgtk-3-0 libnspr4 \ 136 | libpango-1.0-0 libpangocairo-1.0-0 libstdc++6 libx11-6 libx11-xcb1 libxcb1 \ 137 | libxcursor1 libxdamage1 libxext6 libxfixes3 libxi6 libxrandr2 libxrender1 libxss1 libxtst6 libnss3 138 | 139 | // CentOS 7 140 | sudo yum install pango.x86_64 libXcomposite.x86_64 libXcursor.x86_64 libXdamage.x86_64 libXext.x86_64 libXi.x86_64 \ 141 | libXtst.x86_64 cups-libs.x86_64 libXScrnSaver.x86_64 libXrandr.x86_64 GConf2.x86_64 alsa-lib.x86_64 atk.x86_64 gtk3.x86_64 \ 142 | ipa-gothic-fonts xorg-x11-fonts-100dpi xorg-x11-fonts-75dpi xorg-x11-utils xorg-x11-fonts-cyrillic xorg-x11-fonts-Type1 xorg-x11-fonts-misc -y 143 | 144 | sudo yum update nss -y 145 | ``` 146 | 147 | 148 | * Run prompt **Navigation timeout** / browser not found / don't know correct **browser executable path** 149 | 150 | Make sure the browser executable path is configured correctly, type: `chrome://version` in the address bar, and find the executable file path: 151 | 152 | ![](./imgs/chrome_path.png) 153 | 154 | 155 | 156 | ## Parameters 157 | ### Required parameters 158 | * `--chromium-path Path, -c Path` The path to the chrome executable. (**Required**) 159 | ### Basic parameters 160 | * `--custom-headers Headers` Customize the HTTP header. Please pass in the data after JSON serialization, this is globally defined and will be used for all requests. (Default: null) 161 | * `--post-data PostData, -d PostData` POST data. (Default: null) 162 | * `--max-crawled-count Number, -m Number` The maximum number of tasks for crawlers to avoid long crawling time due to pseudo-static. (Default: 200) 163 | * `--filter-mode Mode, -f Mode` Filtering mode, `simple`: only static resources and duplicate requests are filtered. `smart`: with the ability to filter pseudo-static. `strict`: stricter pseudo-static filtering rules. (Default: smart) 164 | * `--output-mode value, -o value` Result output mode, `console`: print the glorified results directly to the screen. `json`: print the json serialized string of all results. `none`: don't print the output. (Default: console) 165 | * `--output-json filepath` Write the result to the specified file after JSON serializing it. (Default: null) 166 | * `--request-proxy proxyAddress` socks5 proxy address, all network requests from crawlergo and chrome browser are sent through the proxy. (Default: null) 167 | 168 | ### Expand input URL 169 | * `--fuzz-path` Use the built-in dictionary for path fuzzing. (Default: false) 170 | * `--fuzz-path-dict` Customize the Fuzz path by passing in a dictionary file path, e.g. /home/user/fuzz_dir.txt, each line of the file represents a path to be fuzzed. (Default: null) 171 | * `--robots-path` Resolve the path from the /robots.txt file. (Default: false) 172 | 173 | ### Form auto-fill 174 | * `--ignore-url-keywords, -iuk` URL keyword that you don't want to visit, generally used to exclude logout links when customizing cookies. Usage: `-iuk logout -iuk exit`. (default: "logout", "quit", "exit") 175 | * `--form-values, -fv` Customize the value of the form fill, set by text type. Support definition types: default, mail, code, phone, username, password, qq, id_card, url, date and number. Text types are identified by the four attribute value keywords `id`, `name`, `class`, `type` of the input box label. For example, define the mailbox input box to be automatically filled with A and the password input box to be automatically filled with B, `-fv mail=A -fv password=B`.Where default represents the fill value when the text type is not recognized, as "Cralwergo". (Default: Cralwergo) 176 | * `--form-keyword-values, -fkv` Customize the value of the form fill, set by keyword fuzzy match. The keyword matches the four attribute values of `id`, `name`, `class`, `type` of the input box label. For example, fuzzy match the pass keyword to fill 123456 and the user keyword to fill admin, `-fkv user=admin -fkv pass=123456`. (Default: Cralwergo) 177 | 178 | ### Advanced settings for the crawling process 179 | * `--incognito-context, -i` Browser start incognito mode. (Default: true) 180 | * `--max-tab-count Number, -t Number` The maximum number of tabs the crawler can open at the same time. (Default: 8) 181 | * `--tab-run-timeout Timeout` Maximum runtime for a single tab page. (Default: 20s) 182 | * `--wait-dom-content-loaded-timeout Timeout` The maximum timeout to wait for the page to finish loading. (Default: 5s) 183 | * `--event-trigger-interval Interval` The interval when the event is triggered automatically, generally used in the case of slow target network and DOM update conflicts that lead to URL miss capture. (Default: 100ms) 184 | * `--event-trigger-mode Value` DOM event auto-triggered mode, with `async` and `sync`, for URL miss-catching caused by DOM update conflicts. (Default: async) 185 | * `--before-exit-delay` Delay exit to close chrome at the end of a single tab task. Used to wait for partial DOM updates and XHR requests to be captured. (Default: 1s) 186 | 187 | ### Other 188 | * `--push-to-proxy` The listener address of the crawler result to be received, usually the listener address of the passive scanner. (Default: null) 189 | * `--push-pool-max` The maximum number of concurrency when sending crawler results to the listening address. (Default: 10) 190 | * `--log-level` Logging levels, debug, info, warn, error and fatal. (Default: info) 191 | * `--no-headless` Turn off chrome headless mode to visualize the crawling process. (Default: false) 192 | 193 | 194 | ## Follow me 195 | 196 | Weibo:[@9ian1i](https://weibo.com/u/5242748339) 197 | Twitter: [@9ian1i](https://twitter.com/9ian1i) 198 | 199 | Related articles:[A browser crawler practice for web vulnerability scanning](https://www.anquanke.com/post/id/178339) 200 | -------------------------------------------------------------------------------- /pkg/engine/tab.go: -------------------------------------------------------------------------------- 1 | package engine 2 | 3 | import ( 4 | "context" 5 | "crawlergo/pkg/config" 6 | "crawlergo/pkg/js" 7 | "crawlergo/pkg/logger" 8 | model2 "crawlergo/pkg/model" 9 | "encoding/json" 10 | "fmt" 11 | "github.com/chromedp/cdproto/cdp" 12 | "github.com/chromedp/cdproto/dom" 13 | "github.com/chromedp/cdproto/fetch" 14 | "github.com/chromedp/cdproto/network" 15 | "github.com/chromedp/cdproto/page" 16 | "github.com/chromedp/cdproto/runtime" 17 | "github.com/chromedp/chromedp" 18 | "github.com/gogf/gf/encoding/gcharset" 19 | "regexp" 20 | "strings" 21 | "sync" 22 | "time" 23 | ) 24 | 25 | type Tab struct { 26 | Ctx *context.Context 27 | Cancel context.CancelFunc 28 | NavigateReq model2.Request 29 | ExtraHeaders map[string]interface{} 30 | ResultList []*model2.Request 31 | TopFrameId string 32 | LoaderID string 33 | NavNetworkID string 34 | PageCharset string 35 | PageBindings map[string]interface{} 36 | NavDone chan int 37 | FoundRedirection bool 38 | DocBodyNodeId cdp.NodeID 39 | config TabConfig 40 | 41 | lock sync.Mutex 42 | 43 | WG sync.WaitGroup //当前Tab页的等待同步计数 44 | collectLinkWG sync.WaitGroup 45 | loadedWG sync.WaitGroup //Loaded之后的等待计数 46 | formSubmitWG sync.WaitGroup //表单提交完毕的等待计数 47 | removeLis sync.WaitGroup //移除事件监听 48 | domWG sync.WaitGroup //DOMContentLoaded 的等待计数 49 | fillFormWG sync.WaitGroup //填充表单任务 50 | } 51 | 52 | type TabConfig struct { 53 | TabRunTimeout time.Duration 54 | DomContentLoadedTimeout time.Duration 55 | EventTriggerMode string // 事件触发的调用方式: 异步 或 顺序 56 | EventTriggerInterval time.Duration // 事件触发的间隔 单位毫秒 57 | BeforeExitDelay time.Duration // 退出前的等待时间,等待DOM渲染,等待XHR发出捕获 58 | EncodeURLWithCharset bool 59 | IgnoreKeywords []string // 60 | Proxy string 61 | CustomFormValues map[string]string 62 | CustomFormKeywordValues map[string]string 63 | } 64 | 65 | type bindingCallPayload struct { 66 | Name string `json:"name"` 67 | Seq int `json:"seq"` 68 | Args []string `json:"args"` 69 | } 70 | 71 | func NewTab(browser *Browser, navigateReq model2.Request, config TabConfig) *Tab { 72 | var tab Tab 73 | tab.ExtraHeaders = map[string]interface{}{} 74 | var DOMContentLoadedRun = false 75 | tab.Ctx, tab.Cancel = browser.NewTab(config.TabRunTimeout) 76 | for key, value := range browser.ExtraHeaders { 77 | navigateReq.Headers[key] = value 78 | if key != "Host" { 79 | tab.ExtraHeaders[key] = value 80 | } 81 | } 82 | tab.NavigateReq = navigateReq 83 | tab.config = config 84 | tab.NavDone = make(chan int) 85 | tab.DocBodyNodeId = 0 86 | 87 | // 设置请求拦截监听 88 | chromedp.ListenTarget(*tab.Ctx, func(v interface{}) { 89 | switch v := v.(type) { 90 | // 根据不同的事件 选择执行对应的动作 91 | case *network.EventRequestWillBeSent: 92 | if string(v.RequestID) == string(v.LoaderID) && v.Type == "Document" && tab.TopFrameId == "" { 93 | tab.LoaderID = string(v.LoaderID) 94 | tab.TopFrameId = string(v.FrameID) 95 | } 96 | 97 | // 请求发出时暂停 即 请求拦截 98 | case *fetch.EventRequestPaused: 99 | tab.WG.Add(1) 100 | go tab.InterceptRequest(v) 101 | 102 | // 解析所有JS文件中的URL并添加到结果中 103 | // 解析HTML文档中的URL 104 | // 查找当前页面的编码 105 | case *network.EventResponseReceived: 106 | if v.Response.MimeType == "application/javascript" || v.Response.MimeType == "text/html" || v.Response.MimeType == "application/json" { 107 | tab.WG.Add(1) 108 | go tab.ParseResponseURL(v) 109 | } 110 | if v.RequestID.String() == tab.NavNetworkID { 111 | tab.WG.Add(1) 112 | go tab.GetContentCharset(v) 113 | } 114 | // 处理后端重定向 3XX 115 | case *network.EventResponseReceivedExtraInfo: 116 | if v.RequestID.String() == tab.NavNetworkID { 117 | tab.WG.Add(1) 118 | go tab.HandleRedirectionResp(v) 119 | } 120 | //case *network.EventLoadingFailed: 121 | // logger.Logger.Error("EventLoadingFailed ", v.ErrorText) 122 | // 401 407 要求认证 此时会阻塞当前页面 需要处理解决 123 | case *fetch.EventAuthRequired: 124 | tab.WG.Add(1) 125 | go tab.HandleAuthRequired(v) 126 | 127 | // DOMContentLoaded 128 | // 开始执行表单填充 和 执行DOM节点观察函数 129 | // 只执行一次 130 | case *page.EventDomContentEventFired: 131 | if DOMContentLoadedRun { 132 | return 133 | } 134 | DOMContentLoadedRun = true 135 | tab.WG.Add(1) 136 | go tab.AfterDOMRun() 137 | // Loaded 138 | case *page.EventLoadEventFired: 139 | if DOMContentLoadedRun { 140 | return 141 | } 142 | DOMContentLoadedRun = true 143 | tab.WG.Add(1) 144 | go tab.AfterDOMRun() 145 | 146 | // close Dialog 147 | case *page.EventJavascriptDialogOpening: 148 | tab.WG.Add(1) 149 | go tab.dismissDialog() 150 | 151 | // handle expose function 152 | case *runtime.EventBindingCalled: 153 | tab.WG.Add(1) 154 | go tab.HandleBindingCalled(v) 155 | } 156 | }) 157 | 158 | return &tab 159 | } 160 | 161 | /** 162 | 163 | */ 164 | func waitNavigateDone(ctx context.Context) error { 165 | ch := make(chan struct{}) 166 | lCtx, lCancel := context.WithCancel(ctx) 167 | tCtx, cancel := context.WithTimeout(ctx, config.DomContentLoadedTimeout) 168 | defer cancel() 169 | chromedp.ListenTarget(lCtx, func(ev interface{}) { 170 | if _, ok := ev.(*page.EventDomContentEventFired); ok { 171 | lCancel() 172 | close(ch) 173 | } else if _, ok := ev.(*page.EventLoadEventFired); ok { 174 | lCancel() 175 | close(ch) 176 | } 177 | }) 178 | select { 179 | case <-ch: 180 | return nil 181 | case <-ctx.Done(): 182 | return ctx.Err() 183 | case <-tCtx.Done(): 184 | return tCtx.Err() 185 | } 186 | } 187 | 188 | func (tab *Tab) Start() { 189 | logger.Logger.Info("Crawling " + tab.NavigateReq.Method + " " + tab.NavigateReq.URL.String()) 190 | defer tab.Cancel() 191 | if err := chromedp.Run(*tab.Ctx, 192 | RunWithTimeOut(tab.Ctx, tab.config.DomContentLoadedTimeout, chromedp.Tasks{ 193 | // 194 | runtime.Enable(), 195 | // 开启网络层API 196 | network.Enable(), 197 | // 开启请求拦截API 198 | fetch.Enable().WithHandleAuthRequests(true), 199 | // 添加回调函数绑定 200 | // XSS-Scan 使用的回调 201 | runtime.AddBinding("addLink"), 202 | runtime.AddBinding("Test"), 203 | // 初始化执行JS 204 | chromedp.ActionFunc(func(ctx context.Context) error { 205 | var err error 206 | _, err = page.AddScriptToEvaluateOnNewDocument(js.TabInitJS).Do(ctx) 207 | if err != nil { 208 | return err 209 | } 210 | return nil 211 | }), 212 | network.SetExtraHTTPHeaders(tab.ExtraHeaders), 213 | // 执行导航 214 | //chromedp.Navigate(tab.NavigateReq.URL.String()), 215 | chromedp.ActionFunc(func(ctx context.Context) error { 216 | _, _, _, err := page.Navigate(tab.NavigateReq.URL.String()).Do(ctx) 217 | if err != nil { 218 | return err 219 | } 220 | return waitNavigateDone(ctx) 221 | }), 222 | }), 223 | ); err != nil { 224 | if err.Error() == "context canceled" { 225 | return 226 | } 227 | logger.Logger.Warn("navigate timeout ", tab.NavigateReq.URL.String()) 228 | } 229 | 230 | go func() { 231 | // 等待所有协程任务结束 232 | tab.WG.Wait() 233 | tab.NavDone <- 1 234 | }() 235 | 236 | select { 237 | case <-tab.NavDone: 238 | logger.Logger.Debug("all navigation tasks done.") 239 | case <-time.After(tab.config.DomContentLoadedTimeout + time.Second*10): 240 | logger.Logger.Warn("navigation tasks TIMEOUT.") 241 | } 242 | 243 | // 等待收集所有链接 244 | logger.Logger.Debug("collectLinks start.") 245 | tab.collectLinkWG.Add(3) 246 | go tab.collectLinks() 247 | tab.collectLinkWG.Wait() 248 | logger.Logger.Debug("collectLinks end.") 249 | 250 | // 识别页面编码 并编码所有URL 251 | if tab.config.EncodeURLWithCharset { 252 | tab.DetectCharset() 253 | tab.EncodeAllURLWithCharset() 254 | } 255 | 256 | //fmt.Println(tab.NavigateReq.URL.String(), len(tab.ResultList)) 257 | //for _, v := range tab.ResultList { 258 | // v.SimplePrint() 259 | //} 260 | // fmt.Println("Finished " + tab.NavigateReq.Method + " " + tab.NavigateReq.URL.String()) 261 | } 262 | 263 | func RunWithTimeOut(ctx *context.Context, timeout time.Duration, tasks chromedp.Tasks) chromedp.ActionFunc { 264 | return func(ctx context.Context) error { 265 | timeoutContext, _ := context.WithTimeout(ctx, timeout) 266 | //defer cancel() 267 | return tasks.Do(timeoutContext) 268 | } 269 | } 270 | 271 | /** 272 | 添加收集到的URL到结果列表,需要处理Host绑定 273 | */ 274 | func (tab *Tab) AddResultUrl(method string, _url string, source string) { 275 | navUrl := tab.NavigateReq.URL 276 | url, err := model2.GetUrl(_url, *navUrl) 277 | if err != nil { 278 | return 279 | } 280 | option := model2.Options{ 281 | Headers: map[string]interface{}{}, 282 | PostData: "", 283 | } 284 | referer := navUrl.String() 285 | 286 | // 处理Host绑定 287 | if host, ok := tab.NavigateReq.Headers["Host"]; ok { 288 | if host != navUrl.Hostname() && url.Hostname() == host { 289 | url, _ = model2.GetUrl(strings.Replace(url.String(), "://"+url.Hostname(), "://"+navUrl.Hostname(), -1), *navUrl) 290 | option.Headers["Host"] = host 291 | referer = strings.Replace(navUrl.String(), navUrl.Host, host.(string), -1) 292 | } 293 | } 294 | // 添加Cookie 295 | if cookie, ok := tab.NavigateReq.Headers["Cookie"]; ok { 296 | option.Headers["Cookie"] = cookie 297 | } 298 | 299 | // 修正Referer 300 | option.Headers["Referer"] = referer 301 | for key, value := range tab.ExtraHeaders { 302 | option.Headers[key] = value 303 | } 304 | req := model2.GetRequest(method, url, option) 305 | req.Source = source 306 | 307 | tab.lock.Lock() 308 | tab.ResultList = append(tab.ResultList, &req) 309 | tab.lock.Unlock() 310 | } 311 | 312 | /** 313 | 添加请求到结果列表,拦截请求时处理了Host绑定,此处无需处理 314 | */ 315 | func (tab *Tab) AddResultRequest(req model2.Request) { 316 | for key, value := range tab.ExtraHeaders { 317 | req.Headers[key] = value 318 | } 319 | tab.lock.Lock() 320 | tab.ResultList = append(tab.ResultList, &req) 321 | tab.lock.Unlock() 322 | } 323 | 324 | /** 325 | 获取当前标签页CDP的执行上下文 326 | */ 327 | func (tab *Tab) GetExecutor() context.Context { 328 | c := chromedp.FromContext(*tab.Ctx) 329 | ctx := cdp.WithExecutor(*tab.Ctx, c.Target) 330 | return ctx 331 | } 332 | 333 | /** 334 | 关闭弹窗 335 | */ 336 | func (tab *Tab) dismissDialog() { 337 | defer tab.WG.Done() 338 | ctx := tab.GetExecutor() 339 | _ = page.HandleJavaScriptDialog(false).Do(ctx) 340 | } 341 | 342 | /** 343 | 处理回调 344 | */ 345 | func (tab *Tab) HandleBindingCalled(event *runtime.EventBindingCalled) { 346 | defer tab.WG.Done() 347 | payload := []byte(event.Payload) 348 | var bcPayload bindingCallPayload 349 | _ = json.Unmarshal(payload, &bcPayload) 350 | if bcPayload.Name == "addLink" && len(bcPayload.Args) > 1 { 351 | tab.AddResultUrl(config.GET, bcPayload.Args[0], bcPayload.Args[1]) 352 | } 353 | if bcPayload.Name == "Test" { 354 | fmt.Println(bcPayload.Args) 355 | } 356 | tab.Evaluate(fmt.Sprintf(js.DeliverResultJS, bcPayload.Name, bcPayload.Seq, "s")) 357 | } 358 | 359 | /** 360 | 执行JS 361 | */ 362 | func (tab *Tab) Evaluate(expression string) { 363 | ctx := tab.GetExecutor() 364 | tCtx, cancel := context.WithTimeout(ctx, time.Second*5) 365 | defer cancel() 366 | _, exception, err := runtime.Evaluate(expression).Do(tCtx) 367 | if exception != nil { 368 | logger.Logger.Debug("tab Evaluate: ", exception.Text) 369 | } 370 | if err != nil { 371 | logger.Logger.Debug("tab Evaluate: ", err) 372 | } 373 | } 374 | 375 | /** 376 | 立即根据条件获取Nodes的ID,不等待 377 | */ 378 | func (tab *Tab) GetNodeIDs(sel string) ([]cdp.NodeID, error) { 379 | ctx := tab.GetExecutor() 380 | return dom.QuerySelectorAll(tab.DocBodyNodeId, sel).Do(ctx) 381 | } 382 | 383 | /** 384 | 根据给的Node执行JS 385 | */ 386 | func (tab *Tab) EvaluateWithNode(expression string, node *cdp.Node) error { 387 | ctx := tab.GetExecutor() 388 | var res bool 389 | err := chromedp.EvaluateAsDevTools(js.Snippet(expression, js.CashX(true), "", node), &res).Do(ctx) 390 | if err != nil { 391 | return err 392 | } 393 | return nil 394 | } 395 | 396 | /** 397 | 识别页面的编码 398 | */ 399 | func (tab *Tab) DetectCharset() { 400 | ctx := tab.GetExecutor() 401 | tCtx, cancel := context.WithTimeout(ctx, time.Millisecond*500) 402 | defer cancel() 403 | var content string 404 | var ok bool 405 | var getCharsetRegex = regexp.MustCompile("charset=(.+)$") 406 | err := chromedp.AttributeValue(`meta[http-equiv=Content-Type]`, "content", &content, &ok, chromedp.ByQuery).Do(tCtx) 407 | if err != nil || ok != true { 408 | return 409 | } 410 | if strings.Contains(content, "charset=") { 411 | charset := getCharsetRegex.FindString(content) 412 | if charset != "" { 413 | tab.PageCharset = strings.ToUpper(strings.Replace(charset, "charset=", "", -1)) 414 | tab.PageCharset = strings.TrimSpace(tab.PageCharset) 415 | } 416 | } 417 | } 418 | 419 | func (tab *Tab) EncodeAllURLWithCharset() { 420 | if tab.PageCharset == "" || tab.PageCharset == "UTF-8" { 421 | return 422 | } 423 | for _, req := range tab.ResultList { 424 | newRawQuery, err := gcharset.UTF8To(tab.PageCharset, req.URL.RawQuery) 425 | if err == nil { 426 | req.URL.RawQuery = newRawQuery 427 | } 428 | newRawPath, err := gcharset.UTF8To(tab.PageCharset, req.URL.RawPath) 429 | if err == nil { 430 | req.URL.RawPath = newRawPath 431 | } 432 | } 433 | } 434 | 435 | func IsIgnoredByKeywordMatch(req model2.Request, IgnoreKeywords []string) bool { 436 | for _, _str := range IgnoreKeywords { 437 | if strings.Contains(req.URL.String(), _str) { 438 | logger.Logger.Info("ignore request: ", req.SimpleFormat()) 439 | return true 440 | } 441 | } 442 | return false 443 | } 444 | -------------------------------------------------------------------------------- /cmd/crawlergo/crawlergo_cmd.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "crawlergo/pkg" 5 | "crawlergo/pkg/config" 6 | "crawlergo/pkg/logger" 7 | model2 "crawlergo/pkg/model" 8 | "crawlergo/pkg/tools" 9 | "crawlergo/pkg/tools/requests" 10 | "encoding/json" 11 | "errors" 12 | "fmt" 13 | "github.com/panjf2000/ants/v2" 14 | "github.com/sirupsen/logrus" 15 | "github.com/urfave/cli/v2" 16 | "log" 17 | "os" 18 | "os/signal" 19 | "strings" 20 | "sync" 21 | ) 22 | 23 | /** 24 | 命令行调用适配器 25 | 26 | 用于生成开源的二进制程序 27 | */ 28 | 29 | type Result struct { 30 | ReqList []Request `json:"req_list"` 31 | AllReqList []Request `json:"all_req_list"` 32 | AllDomainList []string `json:"all_domain_list"` 33 | SubDomainList []string `json:"sub_domain_list"` 34 | } 35 | 36 | type Request struct { 37 | Url string `json:"url"` 38 | Method string `json:"method"` 39 | Headers map[string]interface{} `json:"headers"` 40 | Data string `json:"data"` 41 | Source string `json:"source"` 42 | } 43 | 44 | type ProxyTask struct { 45 | req *model2.Request 46 | pushProxy string 47 | } 48 | 49 | const DefaultMaxPushProxyPoolMax = 10 50 | const DefaultLogLevel = "Info" 51 | 52 | var taskConfig pkg.TaskConfig 53 | var outputMode string 54 | var postData string 55 | var signalChan chan os.Signal 56 | var ignoreKeywords *cli.StringSlice 57 | var customFormTypeValues *cli.StringSlice 58 | var customFormKeywordValues *cli.StringSlice 59 | var pushAddress string 60 | var pushProxyPoolMax int 61 | var pushProxyWG sync.WaitGroup 62 | var outputJsonPath string 63 | var logLevel string 64 | 65 | func main() { 66 | author := cli.Author{ 67 | Name: "9ian1i", 68 | Email: "9ian1itp@gmail.com", 69 | } 70 | 71 | ignoreKeywords = cli.NewStringSlice(config.DefaultIgnoreKeywords...) 72 | customFormTypeValues = cli.NewStringSlice() 73 | customFormKeywordValues = cli.NewStringSlice() 74 | 75 | app := &cli.App{ 76 | Name: "crawlergo", 77 | Usage: "A powerful browser crawler for web vulnerability scanners", 78 | UsageText: "crawlergo [global options] url1 url2 url3 ... (must be same host)", 79 | Version: "v0.4.2", 80 | Authors: []*cli.Author{&author}, 81 | Flags: []cli.Flag{ 82 | &cli.PathFlag{ 83 | Name: "chromium-path", 84 | Aliases: []string{"c"}, 85 | Usage: "`Path` of chromium executable. Such as \"/home/test/chrome-linux/chrome\"", 86 | Required: true, 87 | Destination: &taskConfig.ChromiumPath, 88 | EnvVars: []string{"CRAWLERGO_CHROMIUM_PATH"}, 89 | }, 90 | &cli.StringFlag{ 91 | Name: "custom-headers", 92 | Usage: "add additional `Headers` to each request. The input string will be called json.Unmarshal", 93 | Value: fmt.Sprintf(`{"Spider-Name": "crawlergo", "User-Agent": "%s"}`, config.DefaultUA), 94 | Destination: &taskConfig.ExtraHeadersString, 95 | }, 96 | &cli.StringFlag{ 97 | Name: "post-data", 98 | Aliases: []string{"d"}, 99 | Usage: "set `PostData` to target and use POST method.", 100 | Destination: &postData, 101 | }, 102 | &cli.IntFlag{ 103 | Name: "max-crawled-count", 104 | Aliases: []string{"m"}, 105 | Value: config.MaxCrawlCount, 106 | Usage: "the maximum `Number` of URLs visited by the crawler in this task.", 107 | Destination: &taskConfig.MaxCrawlCount, 108 | }, 109 | &cli.StringFlag{ 110 | Name: "filter-mode", 111 | Aliases: []string{"f"}, 112 | Value: "smart", 113 | Usage: "filtering `Mode` used for collected requests. Allowed mode:\"simple\", \"smart\" or \"strict\".", 114 | Destination: &taskConfig.FilterMode, 115 | }, 116 | &cli.StringFlag{ 117 | Name: "output-mode", 118 | Aliases: []string{"o"}, 119 | Value: "console", 120 | Usage: "console print or serialize output. Allowed mode:\"console\" ,\"json\" or \"none\".", 121 | Destination: &outputMode, 122 | }, 123 | &cli.StringFlag{ 124 | Name: "output-json", 125 | Usage: "write output to a json file.Such as result_www_crawlergo_com.json", 126 | Destination: &outputJsonPath, 127 | }, 128 | &cli.BoolFlag{ 129 | Name: "incognito-context", 130 | Aliases: []string{"i"}, 131 | Value: true, 132 | Usage: "whether the browser is launched in incognito mode.", 133 | Destination: &taskConfig.IncognitoContext, 134 | }, 135 | &cli.IntFlag{ 136 | Name: "max-tab-count", 137 | Aliases: []string{"t"}, 138 | Value: 8, 139 | Usage: "maximum `Number` of tabs allowed.", 140 | Destination: &taskConfig.MaxTabsCount, 141 | }, 142 | &cli.BoolFlag{ 143 | Name: "fuzz-path", 144 | Value: false, 145 | Usage: "whether to fuzz the target with common paths.", 146 | Destination: &taskConfig.PathByFuzz, 147 | }, 148 | &cli.PathFlag{ 149 | Name: "fuzz-path-dict", 150 | Usage: "`Path` of fuzz dict. Such as \"/home/test/fuzz_path.txt\"", 151 | Destination: &taskConfig.FuzzDictPath, 152 | }, 153 | &cli.BoolFlag{ 154 | Name: "robots-path", 155 | Value: false, 156 | Usage: "whether to resolve paths from /robots.txt.", 157 | Destination: &taskConfig.PathFromRobots, 158 | }, 159 | &cli.StringFlag{ 160 | Name: "request-proxy", 161 | Usage: "all requests connect through defined proxy server.", 162 | Destination: &taskConfig.Proxy, 163 | }, 164 | //&cli.BoolFlag{ 165 | // Name: "bypass", 166 | // Value: false, 167 | // Usage: "whether to encode url with detected charset.", 168 | // Destination: &taskConfig.EncodeURLWithCharset, 169 | //}, 170 | &cli.BoolFlag{ 171 | Name: "encode-url", 172 | Value: false, 173 | Usage: "whether to encode url with detected charset.", 174 | Destination: &taskConfig.EncodeURLWithCharset, 175 | }, 176 | &cli.DurationFlag{ 177 | Name: "tab-run-timeout", 178 | Value: config.TabRunTimeout, 179 | Usage: "the `Timeout` of a single tab task.", 180 | Destination: &taskConfig.TabRunTimeout, 181 | }, 182 | &cli.DurationFlag{ 183 | Name: "wait-dom-content-loaded-timeout", 184 | Value: config.DomContentLoadedTimeout, 185 | Usage: "the `Timeout` of waiting for a page dom ready.", 186 | Destination: &taskConfig.DomContentLoadedTimeout, 187 | }, 188 | &cli.StringFlag{ 189 | Name: "event-trigger-mode", 190 | Value: config.EventTriggerAsync, 191 | Usage: "this `Value` determines how the crawler automatically triggers events.Allowed mode:\"async\" or \"sync\".", 192 | Destination: &taskConfig.EventTriggerMode, 193 | }, 194 | &cli.DurationFlag{ 195 | Name: "event-trigger-interval", 196 | Value: config.EventTriggerInterval, 197 | Usage: "the `Interval` of triggering each event.", 198 | Destination: &taskConfig.EventTriggerInterval, 199 | }, 200 | &cli.DurationFlag{ 201 | Name: "before-exit-delay", 202 | Value: config.BeforeExitDelay, 203 | Usage: "the `Time` of waiting before crawler exit.", 204 | Destination: &taskConfig.BeforeExitDelay, 205 | }, 206 | &cli.StringSliceFlag{ 207 | Name: "ignore-url-keywords", 208 | Aliases: []string{"iuk"}, 209 | Value: ignoreKeywords, 210 | Usage: "crawlergo will not crawl these URLs matched by `Keywords`. e.g.: -iuk logout -iuk quit -iuk exit", 211 | DefaultText: "Default [logout quit exit]", 212 | }, 213 | &cli.StringSliceFlag{ 214 | Name: "form-values", 215 | Aliases: []string{"fv"}, 216 | Value: customFormTypeValues, 217 | Usage: "custom filling text for each form type. e.g.: -fv username=crawlergo_nice -fv password=admin123", 218 | }, 219 | // 根据关键词自行选择填充文本 220 | &cli.StringSliceFlag{ 221 | Name: "form-keyword-values", 222 | Aliases: []string{"fkv"}, 223 | Value: customFormKeywordValues, 224 | Usage: "custom filling text, fuzzy matched by keyword. e.g.: -fkv user=crawlergo_nice -fkv pass=admin123", 225 | }, 226 | &cli.StringFlag{ 227 | Name: "push-to-proxy", 228 | Usage: "every request in 'req_list' will be pushed to the proxy `Address`. Such as \"http://127.0.0.1:8080/\"", 229 | Destination: &pushAddress, 230 | }, 231 | &cli.IntFlag{ 232 | Name: "push-pool-max", 233 | Usage: "maximum `Number` of concurrency when pushing results to proxy.", 234 | Value: DefaultMaxPushProxyPoolMax, 235 | Destination: &pushProxyPoolMax, 236 | }, 237 | &cli.StringFlag{ 238 | Name: "log-level", 239 | Usage: "log print `Level`, options include debug, info, warn, error and fatal.", 240 | Value: DefaultLogLevel, 241 | Destination: &logLevel, 242 | }, 243 | &cli.BoolFlag{ 244 | Name: "no-headless", 245 | Value: false, 246 | Usage: "no headless mode", 247 | Destination: &taskConfig.NoHeadless, 248 | }, 249 | }, 250 | Action: run, 251 | } 252 | 253 | err := app.Run(os.Args) 254 | if err != nil { 255 | logger.Logger.Fatal(err) 256 | } 257 | } 258 | 259 | func run(c *cli.Context) error { 260 | var req model2.Request 261 | signalChan = make(chan os.Signal, 1) 262 | signal.Notify(signalChan, os.Interrupt) 263 | 264 | if c.Args().Len() == 0 { 265 | logger.Logger.Error("url must be set") 266 | return errors.New("url must be set") 267 | } 268 | 269 | // 设置日志输出级别 270 | level, err := logrus.ParseLevel(logLevel) 271 | if err != nil { 272 | logger.Logger.Fatal(err) 273 | } 274 | logger.Logger.SetLevel(level) 275 | 276 | var targets []*model2.Request 277 | for _, _url := range c.Args().Slice() { 278 | url, err := model2.GetUrl(_url) 279 | if err != nil { 280 | logger.Logger.Error("parse url failed, ", err) 281 | continue 282 | } 283 | if postData != "" { 284 | req = model2.GetRequest(config.POST, url, getOption()) 285 | } else { 286 | req = model2.GetRequest(config.GET, url, getOption()) 287 | } 288 | req.Proxy = taskConfig.Proxy 289 | targets = append(targets, &req) 290 | } 291 | taskConfig.IgnoreKeywords = ignoreKeywords.Value() 292 | if taskConfig.Proxy != "" { 293 | logger.Logger.Info("request with proxy: ", taskConfig.Proxy) 294 | } 295 | 296 | if len(targets) == 0 { 297 | logger.Logger.Fatal("no validate target.") 298 | } 299 | 300 | // 检查自定义的表单参数配置 301 | taskConfig.CustomFormValues, err = parseCustomFormValues(customFormTypeValues.Value()) 302 | if err != nil { 303 | logger.Logger.Fatal(err) 304 | } 305 | taskConfig.CustomFormKeywordValues, err = keywordStringToMap(customFormKeywordValues.Value()) 306 | if err != nil { 307 | logger.Logger.Fatal(err) 308 | } 309 | 310 | // 开始爬虫任务 311 | task, err := pkg.NewCrawlerTask(targets, taskConfig) 312 | if err != nil { 313 | logger.Logger.Error("create crawler task failed.") 314 | os.Exit(-1) 315 | } 316 | if len(targets) != 0 { 317 | logger.Logger.Info(fmt.Sprintf("Init crawler task, host: %s, max tab count: %d, max crawl count: %d.", 318 | targets[0].URL.Host, taskConfig.MaxTabsCount, taskConfig.MaxCrawlCount)) 319 | logger.Logger.Info("filter mode: ", taskConfig.FilterMode) 320 | } 321 | 322 | // 提示自定义表单填充参数 323 | if len(taskConfig.CustomFormValues) > 0 { 324 | logger.Logger.Info("Custom form values, " + tools.MapStringFormat(taskConfig.CustomFormValues)) 325 | } 326 | // 提示自定义表单填充参数 327 | if len(taskConfig.CustomFormKeywordValues) > 0 { 328 | logger.Logger.Info("Custom form keyword values, " + tools.MapStringFormat(taskConfig.CustomFormKeywordValues)) 329 | } 330 | if _, ok := taskConfig.CustomFormValues["default"]; !ok { 331 | logger.Logger.Info("If no matches, default form input text: " + config.DefaultInputText) 332 | taskConfig.CustomFormValues["default"] = config.DefaultInputText 333 | } 334 | 335 | go handleExit(task) 336 | logger.Logger.Info("Start crawling.") 337 | task.Run() 338 | result := task.Result 339 | 340 | logger.Logger.Info(fmt.Sprintf("Task finished, %d results, %d requests, %d subdomains, %d domains found.", 341 | len(result.ReqList), len(result.AllReqList), len(result.SubDomainList), len(result.AllDomainList))) 342 | 343 | // 内置请求代理 344 | if pushAddress != "" { 345 | logger.Logger.Info("pushing results to ", pushAddress, ", max pool number:", pushProxyPoolMax) 346 | Push2Proxy(result.ReqList) 347 | } 348 | 349 | // 输出结果 350 | outputResult(result) 351 | 352 | return nil 353 | } 354 | 355 | func getOption() model2.Options { 356 | var option model2.Options 357 | if postData != "" { 358 | option.PostData = postData 359 | } 360 | if taskConfig.ExtraHeadersString != "" { 361 | err := json.Unmarshal([]byte(taskConfig.ExtraHeadersString), &taskConfig.ExtraHeaders) 362 | if err != nil { 363 | logger.Logger.Fatal("custom headers can't be Unmarshal.") 364 | panic(err) 365 | } 366 | option.Headers = taskConfig.ExtraHeaders 367 | } 368 | return option 369 | } 370 | 371 | func parseCustomFormValues(customData []string) (map[string]string, error) { 372 | parsedData := map[string]string{} 373 | for _, item := range customData { 374 | keyValue := strings.Split(item, "=") 375 | if len(keyValue) < 2 { 376 | return nil, errors.New("invalid form item: " + item) 377 | } 378 | key := keyValue[0] 379 | if !tools.StringSliceContain(config.AllowedFormName, key) { 380 | return nil, errors.New("not allowed form key: " + key) 381 | } 382 | value := keyValue[1] 383 | parsedData[key] = value 384 | } 385 | return parsedData, nil 386 | } 387 | 388 | func keywordStringToMap(data []string) (map[string]string, error) { 389 | parsedData := map[string]string{} 390 | for _, item := range data { 391 | keyValue := strings.Split(item, "=") 392 | if len(keyValue) < 2 { 393 | return nil, errors.New("invalid keyword format: " + item) 394 | } 395 | key := keyValue[0] 396 | value := keyValue[1] 397 | parsedData[key] = value 398 | } 399 | return parsedData, nil 400 | } 401 | 402 | func outputResult(result *pkg.Result) { 403 | // 输出结果 404 | if outputMode == "json" { 405 | fmt.Println("--[Mission Complete]--") 406 | resBytes := getJsonSerialize(result) 407 | fmt.Println(string(resBytes)) 408 | } else if outputMode == "console" { 409 | for _, req := range result.ReqList { 410 | req.FormatPrint() 411 | } 412 | } 413 | if len(outputJsonPath) != 0 { 414 | resBytes := getJsonSerialize(result) 415 | tools.WriteFile(outputJsonPath, resBytes) 416 | } 417 | } 418 | 419 | /** 420 | 原生被动代理推送支持 421 | */ 422 | func Push2Proxy(reqList []*model2.Request) { 423 | pool, _ := ants.NewPool(pushProxyPoolMax) 424 | defer pool.Release() 425 | for _, req := range reqList { 426 | task := ProxyTask{ 427 | req: req, 428 | pushProxy: pushAddress, 429 | } 430 | pushProxyWG.Add(1) 431 | go func() { 432 | err := pool.Submit(task.doRequest) 433 | if err != nil { 434 | logger.Logger.Error("add Push2Proxy task failed: ", err) 435 | pushProxyWG.Done() 436 | } 437 | }() 438 | } 439 | pushProxyWG.Wait() 440 | } 441 | 442 | /** 443 | 协程池请求的任务 444 | */ 445 | func (p *ProxyTask) doRequest() { 446 | defer pushProxyWG.Done() 447 | _, _ = requests.Request(p.req.Method, p.req.URL.String(), tools.ConvertHeaders(p.req.Headers), []byte(p.req.PostData), 448 | &requests.ReqOptions{Timeout: 1, AllowRedirect: false, Proxy: p.pushProxy}) 449 | } 450 | 451 | func handleExit(t *pkg.CrawlerTask) { 452 | select { 453 | case <-signalChan: 454 | fmt.Println("exit ...") 455 | t.Pool.Tune(1) 456 | t.Pool.Release() 457 | t.Browser.Close() 458 | os.Exit(-1) 459 | } 460 | } 461 | 462 | func getJsonSerialize(result *pkg.Result) []byte { 463 | var res Result 464 | var reqList []Request 465 | var allReqList []Request 466 | for _, _req := range result.ReqList { 467 | var req Request 468 | req.Method = _req.Method 469 | req.Url = _req.URL.String() 470 | req.Source = _req.Source 471 | req.Data = _req.PostData 472 | req.Headers = _req.Headers 473 | reqList = append(reqList, req) 474 | } 475 | for _, _req := range result.AllReqList { 476 | var req Request 477 | req.Method = _req.Method 478 | req.Url = _req.URL.String() 479 | req.Source = _req.Source 480 | req.Data = _req.PostData 481 | req.Headers = _req.Headers 482 | allReqList = append(allReqList, req) 483 | } 484 | res.AllReqList = allReqList 485 | res.ReqList = reqList 486 | res.AllDomainList = result.AllDomainList 487 | res.SubDomainList = result.SubDomainList 488 | 489 | resBytes, err := json.Marshal(res) 490 | if err != nil { 491 | log.Fatal("Marshal result error") 492 | } 493 | return resBytes 494 | } 495 | -------------------------------------------------------------------------------- /pkg/js/javascript.go: -------------------------------------------------------------------------------- 1 | package js 2 | 3 | import ( 4 | "fmt" 5 | "github.com/chromedp/cdproto/cdp" 6 | ) 7 | 8 | const TabInitJS = ` 9 | (function addTabInitScript () { 10 | 11 | // Pass the Webdriver Test. 12 | Object.defineProperty(navigator, 'webdriver', { 13 | get: () => false, 14 | }); 15 | 16 | // Pass the Plugins Length Test. 17 | // Overwrite the plugins property to use a custom getter. 18 | Object.defineProperty(navigator, 'plugins', { 19 | // This just needs to have length > 0 for the current test, 20 | // but we could mock the plugins too if necessary. 21 | get: () => [1, 2, 3, 4, 5], 22 | }); 23 | 24 | // Pass the Chrome Test. 25 | // We can mock this in as much depth as we need for the test. 26 | window.chrome = { 27 | runtime: {}, 28 | }; 29 | 30 | // Pass the Permissions Test. 31 | const originalQuery = window.navigator.permissions.query; 32 | window.navigator.permissions.query = (parameters) => ( 33 | parameters.name === 'notifications' ? 34 | Promise.resolve({ state: Notification.permission }) : 35 | originalQuery(parameters) 36 | ); 37 | 38 | //Pass the Permissions Test. navigator.userAgent 39 | Object.defineProperty(navigator, 'userAgent', { 40 | get: () => "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.0 Safari/537.36", 41 | }); 42 | 43 | // 修改浏览器对象的属性 44 | Object.defineProperty(navigator, 'platform', { 45 | get: function () { return 'win32'; } 46 | }); 47 | 48 | Object.defineProperty(navigator, 'language', { 49 | get: function () { return 'zh-CN'; } 50 | }); 51 | 52 | Object.defineProperty(navigator, 'languages', { 53 | get: function () { return ["zh-CN", "zh"]; } 54 | }); 55 | 56 | // history api hook 57 | window.history.pushState = function(a, b, c) { 58 | window.addLink(c, "HistoryAPI"); 59 | } 60 | window.history.replaceState = function(a, b, c) { 61 | window.addLink(c, "HistoryAPI"); 62 | } 63 | Object.defineProperty(window.history,"pushState",{"writable": false, "configurable": false}); 64 | Object.defineProperty(window.history,"replaceState",{"writable": false, "configurable": false}); 65 | // 监听hash改变 66 | window.addEventListener("hashchange", function() { 67 | window.addLink(document.location.href, "HashChange"); 68 | }); 69 | 70 | var oldWebSocket = window.WebSocket; 71 | window.WebSocket = function(url, arg) { 72 | window.addLink(url, "WebSocket"); 73 | return new oldWebSocket(url, arg); 74 | } 75 | 76 | var oldEventSource = window.EventSource; 77 | window.EventSource = function(url) { 78 | window.addLink(url, "EventSource"); 79 | return new oldEventSource(url); 80 | } 81 | 82 | var oldFetch = window.fetch; 83 | window.fetch = function(url) { 84 | window.addLink(url, "Fetch"); 85 | return oldFetch(url); 86 | } 87 | 88 | // 锁定表单重置 89 | HTMLFormElement.prototype.reset = function() {console.log("cancel reset form")}; 90 | Object.defineProperty(HTMLFormElement.prototype,"reset",{"writable": false, "configurable": false}); 91 | 92 | // hook dom2 级事件监听 93 | window.add_even_listener_count_sec_auto = {}; 94 | // record event func , hook addEventListener 95 | let old_event_handle = Element.prototype.addEventListener; 96 | Element.prototype.addEventListener = function(event_name, event_func, useCapture) { 97 | let name = "<" + this.tagName + "> " + this.id + this.name + this.getAttribute("class") + "|" + event_name; 98 | // console.log(name) 99 | // 对每个事件设定最大的添加次数,防止无限触发,最大次数为5 100 | if (!window.add_even_listener_count_sec_auto.hasOwnProperty(name)) { 101 | window.add_even_listener_count_sec_auto[name] = 1; 102 | } else if (window.add_even_listener_count_sec_auto[name] == 5) { 103 | return ; 104 | } else { 105 | window.add_even_listener_count_sec_auto[name] += 1; 106 | } 107 | if (this.hasAttribute("sec_auto_dom2_event_flag")) { 108 | let sec_auto_dom2_event_flag = this.getAttribute("sec_auto_dom2_event_flag"); 109 | this.setAttribute("sec_auto_dom2_event_flag", sec_auto_dom2_event_flag + "|" + event_name); 110 | } else { 111 | this.setAttribute("sec_auto_dom2_event_flag", event_name); 112 | } 113 | old_event_handle.apply(this, arguments); 114 | }; 115 | 116 | function dom0_listener_hook(that, event_name) { 117 | let name = "<" + that.tagName + "> " + that.id + that.name + that.getAttribute("class") + "|" + event_name; 118 | // console.log(name); 119 | // 对每个事件设定最大的添加次数,防止无限触发,最大次数为5 120 | if (!window.add_even_listener_count_sec_auto.hasOwnProperty(name)) { 121 | window.add_even_listener_count_sec_auto[name] = 1; 122 | } else if (window.add_even_listener_count_sec_auto[name] == 5) { 123 | return ; 124 | } else { 125 | window.add_even_listener_count_sec_auto[name] += 1; 126 | } 127 | if (that.hasAttribute("sec_auto_dom2_event_flag")) { 128 | let sec_auto_dom2_event_flag = that.getAttribute("sec_auto_dom2_event_flag"); 129 | that.setAttribute("sec_auto_dom2_event_flag", sec_auto_dom2_event_flag + "|" + event_name); 130 | } else { 131 | that.setAttribute("sec_auto_dom2_event_flag", event_name); 132 | } 133 | } 134 | 135 | // hook dom0 级事件监听 136 | Object.defineProperties(HTMLElement.prototype, { 137 | onclick: {set: function(newValue){onclick = newValue;dom0_listener_hook(this, "click");}}, 138 | onchange: {set: function(newValue){onchange = newValue;dom0_listener_hook(this, "change");}}, 139 | onblur: {set: function(newValue){onblur = newValue;dom0_listener_hook(this, "blur");}}, 140 | ondblclick: {set: function(newValue){ondblclick = newValue;dom0_listener_hook(this, "dbclick");}}, 141 | onfocus: {set: function(newValue){onfocus = newValue;dom0_listener_hook(this, "focus");}}, 142 | onkeydown: {set: function(newValue){onkeydown = newValue;dom0_listener_hook(this, "keydown");}}, 143 | onkeypress: {set: function(newValue){onkeypress = newValue;dom0_listener_hook(this, "keypress");}}, 144 | onkeyup: {set: function(newValue){onkeyup = newValue;dom0_listener_hook(this, "keyup");}}, 145 | onload: {set: function(newValue){onload = newValue;dom0_listener_hook(this, "load");}}, 146 | onmousedown: {set: function(newValue){onmousedown = newValue;dom0_listener_hook(this, "mousedown");}}, 147 | onmousemove: {set: function(newValue){onmousemove = newValue;dom0_listener_hook(this, "mousemove");}}, 148 | onmouseout: {set: function(newValue){onmouseout = newValue;dom0_listener_hook(this, "mouseout");}}, 149 | onmouseover: {set: function(newValue){onmouseover = newValue;dom0_listener_hook(this, "mouseover");}}, 150 | onmouseup: {set: function(newValue){onmouseup = newValue;dom0_listener_hook(this, "mouseup");}}, 151 | onreset: {set: function(newValue){onreset = newValue;dom0_listener_hook(this, "reset");}}, 152 | onresize: {set: function(newValue){onresize = newValue;dom0_listener_hook(this, "resize");}}, 153 | onselect: {set: function(newValue){onselect = newValue;dom0_listener_hook(this, "select");}}, 154 | onsubmit: {set: function(newValue){onsubmit = newValue;dom0_listener_hook(this, "submit");}}, 155 | onunload: {set: function(newValue){onunload = newValue;dom0_listener_hook(this, "unload");}}, 156 | onabort: {set: function(newValue){onabort = newValue;dom0_listener_hook(this, "abort");}}, 157 | onerror: {set: function(newValue){onerror = newValue;dom0_listener_hook(this, "error");}}, 158 | }) 159 | 160 | // hook window.open 161 | window.open = function (url) { 162 | console.log("trying to open window."); 163 | window.addLink(url, "OpenWindow"); 164 | } 165 | Object.defineProperty(window,"open",{"writable": false, "configurable": false}); 166 | 167 | // hook window close 168 | window.close = function() {console.log("trying to close page.");}; 169 | Object.defineProperty(window,"close",{"writable": false, "configurable": false}); 170 | 171 | // hook setTimeout 172 | //window.__originalSetTimeout = window.setTimeout; 173 | //window.setTimeout = function() { 174 | // arguments[1] = 0; 175 | // return window.__originalSetTimeout.apply(this, arguments); 176 | //}; 177 | //Object.defineProperty(window,"setTimeout",{"writable": false, "configurable": false}); 178 | 179 | // hook setInterval 时间设置为60秒 目的是减轻chrome的压力 180 | window.__originalSetInterval = window.setInterval; 181 | window.setInterval = function() { 182 | arguments[1] = 60000; 183 | return window.__originalSetInterval.apply(this, arguments); 184 | }; 185 | Object.defineProperty(window,"setInterval",{"writable": false, "configurable": false}); 186 | 187 | // 劫持原生ajax,并对每个请求设置最大请求次数 188 | window.ajax_req_count_sec_auto = {}; 189 | XMLHttpRequest.prototype.__originalOpen = XMLHttpRequest.prototype.open; 190 | XMLHttpRequest.prototype.open = function(method, url, async, user, password) { 191 | // hook code 192 | this.url = url; 193 | this.method = method; 194 | let name = method + url; 195 | if (!window.ajax_req_count_sec_auto.hasOwnProperty(name)) { 196 | window.ajax_req_count_sec_auto[name] = 1 197 | } else { 198 | window.ajax_req_count_sec_auto[name] += 1 199 | } 200 | 201 | if (window.ajax_req_count_sec_auto[name] <= 10) { 202 | return this.__originalOpen(method, url, true, user, password); 203 | } 204 | } 205 | Object.defineProperty(XMLHttpRequest.prototype,"open",{"writable": false, "configurable": false}); 206 | 207 | XMLHttpRequest.prototype.__originalSend = XMLHttpRequest.prototype.send; 208 | XMLHttpRequest.prototype.send = function(data) { 209 | // hook code 210 | let name = this.method + this.url; 211 | if (window.ajax_req_count_sec_auto[name] <= 10) { 212 | return this.__originalSend(data); 213 | } 214 | } 215 | Object.defineProperty(XMLHttpRequest.prototype,"send",{"writable": false, "configurable": false}); 216 | 217 | XMLHttpRequest.prototype.__originalAbort = XMLHttpRequest.prototype.abort; 218 | XMLHttpRequest.prototype.abort = function() { 219 | // hook code 220 | } 221 | Object.defineProperty(XMLHttpRequest.prototype,"abort",{"writable": false, "configurable": false}); 222 | 223 | // 打乱数组的方法 224 | window.randArr = function (arr) { 225 | for (var i = 0; i < arr.length; i++) { 226 | var iRand = parseInt(arr.length * Math.random()); 227 | var temp = arr[i]; 228 | arr[i] = arr[iRand]; 229 | arr[iRand] = temp; 230 | } 231 | return arr; 232 | } 233 | 234 | window.sleep = function(time) { 235 | return new Promise((resolve) => setTimeout(resolve, time)); 236 | } 237 | 238 | Array.prototype.indexOf = function(val) { 239 | for (var i = 0; i < this.length; i++) { 240 | if (this[i] == val) return i; 241 | } 242 | return -1; 243 | }; 244 | 245 | Array.prototype.remove = function(val) { 246 | var index = this.indexOf(val); 247 | if (index > -1) { 248 | this.splice(index, 1); 249 | } 250 | }; 251 | 252 | const binding = window["addLink"]; 253 | window["addLink"] = async(...args) => { 254 | const me = window["addLink"]; 255 | let callbacks = me['callbacks']; 256 | if (!callbacks) { 257 | callbacks = new Map(); 258 | me['callbacks'] = callbacks; 259 | } 260 | const seq = (me['lastSeq'] || 0) + 1; 261 | me['lastSeq'] = seq; 262 | const promise = new Promise(fulfill => callbacks.set(seq, fulfill)); 263 | binding(JSON.stringify({name: "addLink", seq, args})); 264 | return promise; 265 | }; 266 | 267 | const bindingTest = window["Test"]; 268 | window["Test"] = async(...args) => { 269 | const me = window["Test"]; 270 | let callbacks = me['callbacks']; 271 | if (!callbacks) { 272 | callbacks = new Map(); 273 | me['callbacks'] = callbacks; 274 | } 275 | const seq = (me['lastSeq'] || 0) + 1; 276 | me['lastSeq'] = seq; 277 | const promise = new Promise(fulfill => callbacks.set(seq, fulfill)); 278 | binding(JSON.stringify({name: "Test", seq, args})); 279 | return promise; 280 | }; 281 | })(); 282 | ` 283 | 284 | const DeliverResultJS = ` 285 | (function deliverResult(name, seq, result) { 286 | window[name]['callbacks'].get(seq)(result); 287 | window[name]['callbacks'].delete(seq); 288 | })("%s", %v, "%s") 289 | ` 290 | 291 | const ObserverJS = ` 292 | (function init_observer_sec_auto_b() { 293 | window.dom_listener_func_sec_auto = function (e) { 294 | let node = e.target; 295 | let nodeListSrc = node.querySelectorAll("[src]"); 296 | for (let each of nodeListSrc) { 297 | if (each.src) { 298 | window.addLink(each.src, "DOM"); 299 | let attrValue = each.getAttribute("src"); 300 | if (attrValue.toLocaleLowerCase().startsWith("javascript:")) { 301 | try { 302 | eval(attrValue.substring(11)); 303 | } 304 | catch {} 305 | } 306 | } 307 | } 308 | 309 | let nodeListHref = node.querySelectorAll("[href]"); 310 | nodeListHref = window.randArr(nodeListHref); 311 | for (let each of nodeListHref) { 312 | if (each.href) { 313 | window.addLink(each.href, "DOM"); 314 | let attrValue = each.getAttribute("href"); 315 | if (attrValue.toLocaleLowerCase().startsWith("javascript:")) { 316 | try { 317 | eval(attrValue.substring(11)); 318 | } 319 | catch {} 320 | } 321 | } 322 | } 323 | }; 324 | document.addEventListener('DOMNodeInserted', window.dom_listener_func_sec_auto, true); 325 | document.addEventListener('DOMSubtreeModified', window.dom_listener_func_sec_auto, true); 326 | document.addEventListener('DOMNodeInsertedIntoDocument', window.dom_listener_func_sec_auto, true); 327 | document.addEventListener('DOMAttrModified', window.dom_listener_func_sec_auto, true); 328 | })() 329 | ` 330 | 331 | const RemoveDOMListenerJS = ` 332 | (function remove_dom_listener() { 333 | document.removeEventListener('DOMNodeInserted', window.dom_listener_func_sec_auto, true); 334 | document.removeEventListener('DOMSubtreeModified', window.dom_listener_func_sec_auto, true); 335 | document.removeEventListener('DOMNodeInsertedIntoDocument', window.dom_listener_func_sec_auto, true); 336 | document.removeEventListener('DOMAttrModified', window.dom_listener_func_sec_auto, true); 337 | })() 338 | ` 339 | 340 | const NewFrameTemplate = ` 341 | (function sec_auto_new_iframe () { 342 | let frame = document.createElement("iframe"); 343 | frame.setAttribute("name", "%s"); 344 | frame.setAttribute("id", "%s"); 345 | frame.setAttribute("style", "display: none"); 346 | document.body.appendChild(frame); 347 | })() 348 | ` 349 | 350 | const TriggerInlineEventJS = ` 351 | (async function trigger_all_inline_event(){ 352 | let eventNames = ["onabort", "onblur", "onchange", "onclick", "ondblclick", "onerror", "onfocus", "onkeydown", "onkeypress", "onkeyup", "onload", "onmousedown", "onmousemove", "onmouseout", "onmouseover", "onmouseup", "onreset", "onresize", "onselect", "onsubmit", "onunload"]; 353 | for (let eventName of eventNames) { 354 | let event = eventName.replace("on", ""); 355 | let nodeList = document.querySelectorAll("[" + eventName + "]"); 356 | if (nodeList.length > 100) { 357 | nodeList = nodeList.slice(0, 100); 358 | } 359 | nodeList = window.randArr(nodeList); 360 | for (let node of nodeList) { 361 | await window.sleep(%f); 362 | let evt = document.createEvent('CustomEvent'); 363 | evt.initCustomEvent(event, false, true, null); 364 | try { 365 | node.dispatchEvent(evt); 366 | } 367 | catch {} 368 | } 369 | } 370 | })() 371 | ` 372 | 373 | const TriggerDom2EventJS = ` 374 | (async function trigger_all_dom2_custom_event() { 375 | function transmit_child(node, event, loop) { 376 | let _loop = loop + 1 377 | if (_loop > 4) { 378 | return; 379 | } 380 | if (node.nodeType === 1) { 381 | if (node.hasChildNodes) { 382 | let index = parseInt(Math.random()*node.children.length,10); 383 | try { 384 | node.children[index].dispatchEvent(event); 385 | } catch(e) {} 386 | let max = node.children.length>5?5:node.children.length; 387 | for (let count=0;count 200) { 396 | nodes = nodes.slice(0, 200); 397 | } 398 | nodes = window.randArr(nodes); 399 | for (let node of nodes) { 400 | let loop = 0; 401 | await window.sleep(%f); 402 | let event_name_list = node.getAttribute("sec_auto_dom2_event_flag").split("|"); 403 | let event_name_set = new Set(event_name_list); 404 | event_name_list = [...event_name_set]; 405 | for (let event_name of event_name_list) { 406 | let evt = document.createEvent('CustomEvent'); 407 | evt.initCustomEvent(event_name, true, true, null); 408 | 409 | if (event_name == "click" || event_name == "focus" || event_name == "mouseover" || event_name == "select") { 410 | transmit_child(node, evt, loop); 411 | } 412 | if ( (node.className && node.className.includes("close")) || (node.id && node.id.includes("close"))) { 413 | continue; 414 | } 415 | 416 | try { 417 | node.dispatchEvent(evt); 418 | } catch(e) {} 419 | } 420 | } 421 | })() 422 | ` 423 | 424 | const TriggerJavascriptProtocol = ` 425 | (async function click_all_a_tag_javascript(){ 426 | let nodeListHref = document.querySelectorAll("[href]"); 427 | nodeListHref = window.randArr(nodeListHref); 428 | for (let node of nodeListHref) { 429 | let attrValue = node.getAttribute("href"); 430 | if (attrValue.toLocaleLowerCase().startsWith("javascript:")) { 431 | await window.sleep(%f); 432 | try { 433 | eval(attrValue.substring(11)); 434 | } 435 | catch {} 436 | } 437 | } 438 | let nodeListSrc = document.querySelectorAll("[src]"); 439 | nodeListSrc = window.randArr(nodeListSrc); 440 | for (let node of nodeListSrc) { 441 | let attrValue = node.getAttribute("src"); 442 | if (attrValue.toLocaleLowerCase().startsWith("javascript:")) { 443 | await window.sleep(%f); 444 | try { 445 | eval(attrValue.substring(11)); 446 | } 447 | catch {} 448 | } 449 | } 450 | })() 451 | ` 452 | 453 | const FormNodeClickJS = ` 454 | (function(a) { 455 | try { 456 | a.click(); 457 | return true; 458 | } catch(e) { 459 | return false; 460 | } 461 | })(%s) 462 | ` 463 | 464 | func Snippet(js string, f func(n *cdp.Node) string, sel string, n *cdp.Node, v ...interface{}) string { 465 | //return fmt.Sprintf(js, append([]interface{}{sel}, v...)...) 466 | return fmt.Sprintf(js, append([]interface{}{f(n)}, v...)...) 467 | } 468 | 469 | func CashX(flatten bool) func(*cdp.Node) string { 470 | return func(n *cdp.Node) string { 471 | if flatten { 472 | return fmt.Sprintf(`$x(%q)[0]`, n.FullXPath()) 473 | } 474 | return fmt.Sprintf(`$x(%q)`, n.FullXPath()) 475 | } 476 | } 477 | -------------------------------------------------------------------------------- /pkg/filter/smart_filter.go: -------------------------------------------------------------------------------- 1 | package filter 2 | 3 | import ( 4 | "crawlergo/pkg/config" 5 | "crawlergo/pkg/logger" 6 | "crawlergo/pkg/model" 7 | "crawlergo/pkg/tools" 8 | "go/types" 9 | "regexp" 10 | "sort" 11 | "strings" 12 | "sync" 13 | 14 | mapset "github.com/deckarep/golang-set" 15 | ) 16 | 17 | type SmartFilter struct { 18 | StrictMode bool 19 | SimpleFilter SimpleFilter 20 | filterLocationSet mapset.Set // 非逻辑型参数的位置记录 全局统一标记过滤 21 | filterParamKeyRepeatCount sync.Map 22 | filterParamKeySingleValues sync.Map // 所有参数名重复数量统计 23 | filterPathParamKeySymbol sync.Map // 某个path下的某个参数的值出现标记次数统计 24 | filterParamKeyAllValues sync.Map 25 | filterPathParamEmptyValues sync.Map 26 | filterParentPathValues sync.Map 27 | uniqueMarkedIds mapset.Set // 标记后的唯一ID,用于去重 28 | } 29 | 30 | const ( 31 | MaxParentPathCount = 32 // 相对于上一级目录,本级path目录的数量修正最大值 32 | MaxParamKeySingleCount = 8 // 某个URL参数名重复修正最大值 33 | MaxParamKeyAllCount = 10 // 本轮所有URL中某个参数名的重复修正最大值 34 | MaxPathParamEmptyCount = 10 // 某个path下的参数值为空,参数名个数修正最大值 35 | MaxPathParamKeySymbolCount = 5 // 某个Path下的某个参数的标记数量超过此值,则该参数被全局标记 36 | ) 37 | 38 | const ( 39 | CustomValueMark = "{{Crawlergo}}" 40 | FixParamRepeatMark = "{{fix_param}}" 41 | FixPathMark = "{{fix_path}}" 42 | TooLongMark = "{{long}}" 43 | NumberMark = "{{number}}" 44 | ChineseMark = "{{chinese}}" 45 | UpperMark = "{{upper}}" 46 | LowerMark = "{{lower}}" 47 | UrlEncodeMark = "{{urlencode}}" 48 | UnicodeMark = "{{unicode}}" 49 | BoolMark = "{{bool}}" 50 | ListMark = "{{list}}" 51 | TimeMark = "{{time}}" 52 | MixAlphaNumMark = "{{mix_alpha_num}}" 53 | MixSymbolMark = "{{mix_symbol}}" 54 | MixNumMark = "{{mix_num}}" 55 | NoLowerAlphaMark = "{{no_lower}}" 56 | MixStringMark = "{{mix_str}}" 57 | ) 58 | 59 | var chineseRegex = regexp.MustCompile("[\u4e00-\u9fa5]+") 60 | var urlencodeRegex = regexp.MustCompile("(?:%[A-Fa-f0-9]{2,6})+") 61 | var unicodeRegex = regexp.MustCompile(`(?:\\u\w{4})+`) 62 | var onlyAlphaRegex = regexp.MustCompile("^[a-zA-Z]+$") 63 | var onlyAlphaUpperRegex = regexp.MustCompile("^[A-Z]+$") 64 | var alphaUpperRegex = regexp.MustCompile("[A-Z]+") 65 | var alphaLowerRegex = regexp.MustCompile("[a-z]+") 66 | var replaceNumRegex = regexp.MustCompile(`[0-9]+\.[0-9]+|\d+`) 67 | var onlyNumberRegex = regexp.MustCompile(`^[0-9]+$`) 68 | var numberRegex = regexp.MustCompile(`[0-9]+`) 69 | var OneNumberRegex = regexp.MustCompile(`[0-9]`) 70 | var numSymbolRegex = regexp.MustCompile(`\.|_|-`) 71 | var timeSymbolRegex = regexp.MustCompile(`-|:|\s`) 72 | var onlyAlphaNumRegex = regexp.MustCompile(`^[0-9a-zA-Z]+$`) 73 | var markedStringRegex = regexp.MustCompile(`^{{.+}}$`) 74 | var htmlReplaceRegex = regexp.MustCompile(`\.shtml|\.html|\.htm`) 75 | 76 | func (s *SmartFilter) Init() { 77 | s.filterLocationSet = mapset.NewSet() 78 | s.filterParamKeyRepeatCount = sync.Map{} 79 | s.filterParamKeySingleValues = sync.Map{} 80 | s.filterPathParamKeySymbol = sync.Map{} 81 | s.filterParamKeyAllValues = sync.Map{} 82 | s.filterPathParamEmptyValues = sync.Map{} 83 | s.filterParentPathValues = sync.Map{} 84 | s.uniqueMarkedIds = mapset.NewSet() 85 | } 86 | 87 | /** 88 | 智能去重 89 | 可选严格模式 90 | 91 | 需要过滤则返回 true 92 | */ 93 | func (s *SmartFilter) DoFilter(req *model.Request) bool { 94 | // 首先过滤掉静态资源、基础的去重、过滤其它的域名 95 | if s.SimpleFilter.DoFilter(req) { 96 | logger.Logger.Debugf("filter req by simplefilter: " + req.URL.RequestURI()) 97 | return true 98 | } 99 | 100 | // 标记 101 | if req.Method == config.GET || req.Method == config.DELETE || req.Method == config.HEAD || req.Method == config.OPTIONS { 102 | s.getMark(req) 103 | } else if req.Method == config.POST || req.Method == config.PUT { 104 | s.postMark(req) 105 | } else { 106 | logger.Logger.Debug("dont support such method: " + req.Method) 107 | } 108 | 109 | if req.Method == config.GET || req.Method == config.DELETE || req.Method == config.HEAD || req.Method == config.OPTIONS { 110 | s.repeatCountStatistic(req) 111 | } 112 | 113 | // 对标记后的请求进行去重 114 | uniqueId := req.Filter.UniqueId 115 | if s.uniqueMarkedIds.Contains(uniqueId) { 116 | logger.Logger.Debugf("filter req by uniqueMarkedIds 1: " + req.URL.RequestURI()) 117 | return true 118 | } 119 | 120 | // 全局数值型参数标记 121 | s.globalFilterLocationMark(req) 122 | 123 | // 接下来对标记的GET请求进行去重 124 | if req.Method == config.GET || req.Method == config.DELETE || req.Method == config.HEAD || req.Method == config.OPTIONS { 125 | // 对超过阈值的GET请求进行标记 126 | s.overCountMark(req) 127 | 128 | // 重新计算 QueryMapId 129 | req.Filter.QueryMapId = s.getParamMapID(req.Filter.MarkedQueryMap) 130 | // 重新计算 PathId 131 | req.Filter.PathId = s.getPathID(req.Filter.MarkedPath) 132 | } else { 133 | // 重新计算 PostDataId 134 | req.Filter.PostDataId = s.getParamMapID(req.Filter.MarkedPostDataMap) 135 | } 136 | 137 | // 重新计算请求唯一ID 138 | req.Filter.UniqueId = s.getMarkedUniqueID(req) 139 | 140 | // 新的ID再次去重 141 | newUniqueId := req.Filter.UniqueId 142 | if s.uniqueMarkedIds.Contains(newUniqueId) { 143 | logger.Logger.Debugf("filter req by uniqueMarkedIds 2: " + req.URL.RequestURI()) 144 | return true 145 | } 146 | 147 | // 添加到结果集中 148 | s.uniqueMarkedIds.Add(newUniqueId) 149 | return false 150 | } 151 | 152 | /** 153 | Query的Map对象会自动解码,所以对RawQuery进行预先的标记 154 | */ 155 | func (s *SmartFilter) preQueryMark(rawQuery string) string { 156 | if chineseRegex.MatchString(rawQuery) { 157 | return chineseRegex.ReplaceAllString(rawQuery, ChineseMark) 158 | } else if urlencodeRegex.MatchString(rawQuery) { 159 | return urlencodeRegex.ReplaceAllString(rawQuery, UrlEncodeMark) 160 | } else if unicodeRegex.MatchString(rawQuery) { 161 | return unicodeRegex.ReplaceAllString(rawQuery, UnicodeMark) 162 | } 163 | return rawQuery 164 | } 165 | 166 | /** 167 | 对GET请求的参数和路径进行标记 168 | */ 169 | func (s *SmartFilter) getMark(req *model.Request) { 170 | // 首先是解码前的预先替换 171 | todoURL := *(req.URL) 172 | todoURL.RawQuery = s.preQueryMark(todoURL.RawQuery) 173 | 174 | // 依次打标记 175 | queryMap := todoURL.QueryMap() 176 | queryMap = s.markParamName(queryMap) 177 | queryMap = s.markParamValue(queryMap, *req) 178 | markedPath := s.MarkPath(todoURL.Path) 179 | 180 | // 计算唯一的ID 181 | var queryKeyID string 182 | var queryMapID string 183 | if len(queryMap) != 0 { 184 | queryKeyID = s.getKeysID(queryMap) 185 | queryMapID = s.getParamMapID(queryMap) 186 | } else { 187 | queryKeyID = "" 188 | queryMapID = "" 189 | } 190 | pathID := s.getPathID(markedPath) 191 | 192 | req.Filter.MarkedQueryMap = queryMap 193 | req.Filter.QueryKeysId = queryKeyID 194 | req.Filter.QueryMapId = queryMapID 195 | req.Filter.MarkedPath = markedPath 196 | req.Filter.PathId = pathID 197 | 198 | // 最后计算标记后的唯一请求ID 199 | req.Filter.UniqueId = s.getMarkedUniqueID(req) 200 | } 201 | 202 | /** 203 | 对POST请求的参数和路径进行标记 204 | */ 205 | func (s *SmartFilter) postMark(req *model.Request) { 206 | postDataMap := req.PostDataMap() 207 | 208 | postDataMap = s.markParamName(postDataMap) 209 | postDataMap = s.markParamValue(postDataMap, *req) 210 | markedPath := s.MarkPath(req.URL.Path) 211 | 212 | // 计算唯一的ID 213 | var postDataMapID string 214 | if len(postDataMap) != 0 { 215 | postDataMapID = s.getParamMapID(postDataMap) 216 | } else { 217 | postDataMapID = "" 218 | } 219 | pathID := s.getPathID(markedPath) 220 | 221 | req.Filter.MarkedPostDataMap = postDataMap 222 | req.Filter.PostDataId = postDataMapID 223 | req.Filter.MarkedPath = markedPath 224 | req.Filter.PathId = pathID 225 | 226 | // 最后计算标记后的唯一请求ID 227 | req.Filter.UniqueId = s.getMarkedUniqueID(req) 228 | } 229 | 230 | /** 231 | 标记参数名 232 | */ 233 | func (s *SmartFilter) markParamName(paramMap map[string]interface{}) map[string]interface{} { 234 | markedParamMap := map[string]interface{}{} 235 | for key, value := range paramMap { 236 | // 纯字母不处理 237 | if onlyAlphaRegex.MatchString(key) { 238 | markedParamMap[key] = value 239 | // 参数名过长 240 | } else if len(key) >= 32 { 241 | markedParamMap[TooLongMark] = value 242 | // 替换掉数字 243 | } else { 244 | key = replaceNumRegex.ReplaceAllString(key, NumberMark) 245 | markedParamMap[key] = value 246 | } 247 | } 248 | return markedParamMap 249 | } 250 | 251 | /** 252 | 标记参数值 253 | */ 254 | func (s *SmartFilter) markParamValue(paramMap map[string]interface{}, req model.Request) map[string]interface{} { 255 | markedParamMap := map[string]interface{}{} 256 | for key, value := range paramMap { 257 | switch value.(type) { 258 | case bool: 259 | markedParamMap[key] = BoolMark 260 | continue 261 | case types.Slice: 262 | markedParamMap[key] = ListMark 263 | continue 264 | case float64: 265 | markedParamMap[key] = NumberMark 266 | continue 267 | } 268 | // 只处理string类型 269 | valueStr, ok := value.(string) 270 | if !ok { 271 | continue 272 | } 273 | // Crawlergo 为特定字符,说明此参数位置为数值型,非逻辑型,记录下此参数,全局过滤 274 | if strings.Contains(valueStr, "Crawlergo") { 275 | name := req.URL.Hostname() + req.URL.Path + req.Method + key 276 | s.filterLocationSet.Add(name) 277 | markedParamMap[key] = CustomValueMark 278 | // 全大写字母 279 | } else if onlyAlphaUpperRegex.MatchString(valueStr) { 280 | markedParamMap[key] = UpperMark 281 | // 参数值长度大于等于16 282 | } else if len(valueStr) >= 16 { 283 | markedParamMap[key] = TooLongMark 284 | // 均为数字和一些符号组成 285 | } else if onlyNumberRegex.MatchString(valueStr) || onlyNumberRegex.MatchString(numSymbolRegex.ReplaceAllString(valueStr, "")) { 286 | markedParamMap[key] = NumberMark 287 | // 存在中文 288 | } else if chineseRegex.MatchString(valueStr) { 289 | markedParamMap[key] = ChineseMark 290 | // urlencode 291 | } else if urlencodeRegex.MatchString(valueStr) { 292 | markedParamMap[key] = UrlEncodeMark 293 | // unicode 294 | } else if unicodeRegex.MatchString(valueStr) { 295 | markedParamMap[key] = UnicodeMark 296 | // 时间 297 | } else if onlyNumberRegex.MatchString(timeSymbolRegex.ReplaceAllString(valueStr, "")) { 298 | markedParamMap[key] = TimeMark 299 | // 字母加数字 300 | } else if onlyAlphaNumRegex.MatchString(valueStr) && numberRegex.MatchString(valueStr) { 301 | markedParamMap[key] = MixAlphaNumMark 302 | // 含有一些特殊符号 303 | } else if s.hasSpecialSymbol(valueStr) { 304 | markedParamMap[key] = MixSymbolMark 305 | // 数字出现的次数超过3,视为数值型参数 306 | } else if b := OneNumberRegex.ReplaceAllString(valueStr, "0"); strings.Count(b, "0") >= 3 { 307 | markedParamMap[key] = MixNumMark 308 | // 严格模式 309 | } else if s.StrictMode { 310 | // 无小写字母 311 | if !alphaLowerRegex.MatchString(valueStr) { 312 | markedParamMap[key] = NoLowerAlphaMark 313 | // 常见的值一般为 大写字母、小写字母、数字、下划线的任意组合,组合类型超过三种则视为伪静态 314 | } else { 315 | count := 0 316 | if alphaLowerRegex.MatchString(valueStr) { 317 | count += 1 318 | } 319 | if alphaUpperRegex.MatchString(valueStr) { 320 | count += 1 321 | } 322 | if numberRegex.MatchString(valueStr) { 323 | count += 1 324 | } 325 | if strings.Contains(valueStr, "_") || strings.Contains(valueStr, "-") { 326 | count += 1 327 | } 328 | if count >= 3 { 329 | markedParamMap[key] = MixStringMark 330 | } 331 | } 332 | } else { 333 | markedParamMap[key] = value 334 | } 335 | } 336 | return markedParamMap 337 | } 338 | 339 | /** 340 | 标记路径 341 | */ 342 | func (s *SmartFilter) MarkPath(path string) string { 343 | pathParts := strings.Split(path, "/") 344 | for index, part := range pathParts { 345 | if len(part) >= 32 { 346 | pathParts[index] = TooLongMark 347 | } else if onlyNumberRegex.MatchString(numSymbolRegex.ReplaceAllString(part, "")) { 348 | pathParts[index] = NumberMark 349 | } else if strings.HasSuffix(part, ".html") || strings.HasSuffix(part, ".htm") || strings.HasSuffix(part, ".shtml") { 350 | part = htmlReplaceRegex.ReplaceAllString(part, "") 351 | // 大写、小写、数字混合 352 | if numberRegex.MatchString(part) && alphaUpperRegex.MatchString(part) && alphaLowerRegex.MatchString(part) { 353 | pathParts[index] = MixAlphaNumMark 354 | // 纯数字 355 | } else if b := numSymbolRegex.ReplaceAllString(part, ""); onlyNumberRegex.MatchString(b) { 356 | pathParts[index] = NumberMark 357 | } 358 | // 含有特殊符号 359 | } else if s.hasSpecialSymbol(part) { 360 | pathParts[index] = MixSymbolMark 361 | } else if chineseRegex.MatchString(part) { 362 | pathParts[index] = ChineseMark 363 | } else if unicodeRegex.MatchString(part) { 364 | pathParts[index] = UnicodeMark 365 | } else if onlyAlphaUpperRegex.MatchString(part) { 366 | pathParts[index] = UpperMark 367 | // 均为数字和一些符号组成 368 | } else if b := numSymbolRegex.ReplaceAllString(part, ""); onlyNumberRegex.MatchString(b) { 369 | pathParts[index] = NumberMark 370 | // 数字出现的次数超过3,视为伪静态path 371 | } else if b := OneNumberRegex.ReplaceAllString(part, "0"); strings.Count(b, "0") > 3 { 372 | pathParts[index] = MixNumMark 373 | } 374 | } 375 | newPath := strings.Join(pathParts, "/") 376 | return newPath 377 | } 378 | 379 | /** 380 | 全局数值型参数过滤 381 | */ 382 | func (s *SmartFilter) globalFilterLocationMark(req *model.Request) { 383 | name := req.URL.Hostname() + req.URL.Path + req.Method 384 | if req.Method == config.GET || req.Method == config.DELETE || req.Method == config.HEAD || req.Method == config.OPTIONS { 385 | for key := range req.Filter.MarkedQueryMap { 386 | name += key 387 | if s.filterLocationSet.Contains(name) { 388 | req.Filter.MarkedQueryMap[key] = CustomValueMark 389 | } 390 | } 391 | } else if req.Method == config.POST || req.Method == config.PUT { 392 | for key := range req.Filter.MarkedPostDataMap { 393 | name += key 394 | if s.filterLocationSet.Contains(name) { 395 | req.Filter.MarkedPostDataMap[key] = CustomValueMark 396 | } 397 | } 398 | } 399 | } 400 | 401 | /** 402 | 进行全局重复参数名、参数值、路径的统计标记 403 | 之后对超过阈值的部分再次打标记 404 | */ 405 | func (s *SmartFilter) repeatCountStatistic(req *model.Request) { 406 | queryKeyId := req.Filter.QueryKeysId 407 | pathId := req.Filter.PathId 408 | if queryKeyId != "" { 409 | // 所有参数名重复数量统计 410 | if v, ok := s.filterParamKeyRepeatCount.Load(queryKeyId); ok { 411 | s.filterParamKeyRepeatCount.Store(queryKeyId, v.(int)+1) 412 | } else { 413 | s.filterParamKeyRepeatCount.Store(queryKeyId, 1) 414 | } 415 | 416 | for key, value := range req.Filter.MarkedQueryMap { 417 | // 某个URL的所有参数名重复数量统计 418 | paramQueryKey := queryKeyId + key 419 | 420 | if set, ok := s.filterParamKeySingleValues.Load(paramQueryKey); ok { 421 | set := set.(mapset.Set) 422 | set.Add(value) 423 | } else { 424 | s.filterParamKeySingleValues.Store(paramQueryKey, mapset.NewSet(value)) 425 | } 426 | 427 | //本轮所有URL中某个参数重复数量统计 428 | if _, ok := s.filterParamKeyAllValues.Load(key); !ok { 429 | s.filterParamKeyAllValues.Store(key, mapset.NewSet(value)) 430 | } else { 431 | if v, ok := s.filterParamKeyAllValues.Load(key); ok { 432 | set := v.(mapset.Set) 433 | if !set.Contains(value) { 434 | set.Add(value) 435 | } 436 | } 437 | } 438 | 439 | // 如果参数值为空,统计该PATH下的空值参数名个数 440 | if value == "" { 441 | if _, ok := s.filterPathParamEmptyValues.Load(pathId); !ok { 442 | s.filterPathParamEmptyValues.Store(pathId, mapset.NewSet(key)) 443 | } else { 444 | if v, ok := s.filterPathParamEmptyValues.Load(pathId); ok { 445 | set := v.(mapset.Set) 446 | if !set.Contains(key) { 447 | set.Add(key) 448 | } 449 | } 450 | } 451 | } 452 | 453 | pathIdKey := pathId + key 454 | // 某path下的参数值去重标记出现次数统计 455 | if v, ok := s.filterPathParamKeySymbol.Load(pathIdKey); ok { 456 | if markedStringRegex.MatchString(value.(string)) { 457 | s.filterPathParamKeySymbol.Store(pathIdKey, v.(int)+1) 458 | } 459 | } else { 460 | s.filterPathParamKeySymbol.Store(pathIdKey, 1) 461 | } 462 | 463 | } 464 | } 465 | 466 | // 相对于上一级目录,本级path目录的数量统计,存在文件后缀的情况下,放行常见脚本后缀 467 | if req.URL.ParentPath() == "" || s.inCommonScriptSuffix(req.URL.FileExt()) { 468 | return 469 | } 470 | 471 | // 472 | parentPathId := tools.StrMd5(req.URL.ParentPath()) 473 | currentPath := strings.Replace(req.Filter.MarkedPath, req.URL.ParentPath(), "", -1) 474 | if _, ok := s.filterParentPathValues.Load(parentPathId); !ok { 475 | s.filterParentPathValues.Store(parentPathId, mapset.NewSet(currentPath)) 476 | } else { 477 | if v, ok := s.filterParentPathValues.Load(parentPathId); ok { 478 | set := v.(mapset.Set) 479 | if !set.Contains(currentPath) { 480 | set.Add(currentPath) 481 | } 482 | } 483 | } 484 | } 485 | 486 | /** 487 | 对重复统计之后,超过阈值的部分再次打标记 488 | */ 489 | func (s *SmartFilter) overCountMark(req *model.Request) { 490 | queryKeyId := req.Filter.QueryKeysId 491 | pathId := req.Filter.PathId 492 | // 参数不为空, 493 | if req.Filter.QueryKeysId != "" { 494 | // 某个URL的所有参数名重复数量超过阈值 且该参数有超过三个不同的值 则打标记 495 | if v, ok := s.filterParamKeyRepeatCount.Load(queryKeyId); ok && v.(int) > MaxParamKeySingleCount { 496 | for key := range req.Filter.MarkedQueryMap { 497 | paramQueryKey := queryKeyId + key 498 | if set, ok := s.filterParamKeySingleValues.Load(paramQueryKey); ok { 499 | set := set.(mapset.Set) 500 | if set.Cardinality() > 3 { 501 | req.Filter.MarkedQueryMap[key] = FixParamRepeatMark 502 | } 503 | } 504 | } 505 | } 506 | 507 | for key := range req.Filter.MarkedQueryMap { 508 | // 所有URL中,某个参数不同的值出现次数超过阈值,打标记去重 509 | if paramKeySet, ok := s.filterParamKeyAllValues.Load(key); ok { 510 | paramKeySet := paramKeySet.(mapset.Set) 511 | if paramKeySet.Cardinality() > MaxParamKeyAllCount { 512 | req.Filter.MarkedQueryMap[key] = FixParamRepeatMark 513 | } 514 | } 515 | 516 | pathIdKey := pathId + key 517 | // 某个PATH的GET参数值去重标记出现次数超过阈值,则对该PATH的该参数进行全局标记 518 | if v, ok := s.filterPathParamKeySymbol.Load(pathIdKey); ok && v.(int) > MaxPathParamKeySymbolCount { 519 | req.Filter.MarkedQueryMap[key] = FixParamRepeatMark 520 | } 521 | } 522 | 523 | // 处理某个path下空参数值的参数个数超过阈值 如伪静态: http://bang.360.cn/?chu_xiu 524 | if v, ok := s.filterPathParamEmptyValues.Load(pathId); ok { 525 | set := v.(mapset.Set) 526 | if set.Cardinality() > MaxPathParamEmptyCount { 527 | newMarkerQueryMap := map[string]interface{}{} 528 | for key, value := range req.Filter.MarkedQueryMap { 529 | if value == "" { 530 | newMarkerQueryMap[FixParamRepeatMark] = "" 531 | } else { 532 | newMarkerQueryMap[key] = value 533 | } 534 | } 535 | req.Filter.MarkedQueryMap = newMarkerQueryMap 536 | } 537 | } 538 | } 539 | 540 | // 处理本级path的伪静态 541 | if req.URL.ParentPath() == "" || s.inCommonScriptSuffix(req.URL.FileExt()) { 542 | return 543 | } 544 | parentPathId := tools.StrMd5(req.URL.ParentPath()) 545 | if set, ok := s.filterParentPathValues.Load(parentPathId); ok { 546 | set := set.(mapset.Set) 547 | if set.Cardinality() > MaxParentPathCount { 548 | if strings.HasSuffix(req.URL.ParentPath(), "/") { 549 | req.Filter.MarkedPath = req.URL.ParentPath() + FixPathMark 550 | } else { 551 | req.Filter.MarkedPath = req.URL.ParentPath() + "/" + FixPathMark 552 | } 553 | } 554 | } 555 | } 556 | 557 | /** 558 | 计算标记后的唯一请求ID 559 | */ 560 | func (s *SmartFilter) getMarkedUniqueID(req *model.Request) string { 561 | var paramId string 562 | if req.Method == config.GET || req.Method == config.DELETE || req.Method == config.HEAD || req.Method == config.OPTIONS { 563 | paramId = req.Filter.QueryMapId 564 | } else { 565 | paramId = req.Filter.PostDataId 566 | } 567 | 568 | uniqueStr := req.Method + paramId + req.Filter.PathId + req.URL.Host 569 | if req.RedirectionFlag { 570 | uniqueStr += "Redirection" 571 | } 572 | if req.URL.Path == "/" && req.URL.RawQuery == "" && req.URL.Scheme == "https" { 573 | uniqueStr += "https" 574 | } 575 | 576 | if req.URL.Fragment != "" && strings.HasPrefix(req.URL.Fragment, "/") { 577 | uniqueStr += req.URL.Fragment 578 | } 579 | return tools.StrMd5(uniqueStr) 580 | } 581 | 582 | /** 583 | 计算请求参数的key标记后的唯一ID 584 | */ 585 | func (s *SmartFilter) getKeysID(dataMap map[string]interface{}) string { 586 | var keys []string 587 | var idStr string 588 | for key := range dataMap { 589 | keys = append(keys, key) 590 | } 591 | sort.Strings(keys) 592 | for _, key := range keys { 593 | idStr += key 594 | } 595 | return tools.StrMd5(idStr) 596 | } 597 | 598 | /** 599 | 计算请求参数标记后的唯一ID 600 | */ 601 | func (s *SmartFilter) getParamMapID(dataMap map[string]interface{}) string { 602 | var keys []string 603 | var idStr string 604 | var markReplaceRegex = regexp.MustCompile(`{{.+}}`) 605 | for key := range dataMap { 606 | keys = append(keys, key) 607 | } 608 | sort.Strings(keys) 609 | for _, key := range keys { 610 | value := dataMap[key] 611 | idStr += key 612 | if value, ok := value.(string); ok { 613 | idStr += markReplaceRegex.ReplaceAllString(value, "{{mark}}") 614 | } 615 | } 616 | return tools.StrMd5(idStr) 617 | } 618 | 619 | /** 620 | 计算PATH标记后的唯一ID 621 | */ 622 | func (s *SmartFilter) getPathID(path string) string { 623 | return tools.StrMd5(path) 624 | } 625 | 626 | /** 627 | 判断字符串中是否存在以下特殊符号 628 | */ 629 | func (s *SmartFilter) hasSpecialSymbol(str string) bool { 630 | symbolList := []string{"{", "}", " ", "|", "#", "@", "$", "*", ",", "<", ">", "/", "?", "\\", "+", "="} 631 | for _, sym := range symbolList { 632 | if strings.Contains(str, sym) { 633 | return true 634 | } 635 | } 636 | return false 637 | } 638 | 639 | func (s *SmartFilter) inCommonScriptSuffix(suffix string) bool { 640 | for _, value := range config.ScriptSuffix { 641 | if value == suffix { 642 | return true 643 | } 644 | } 645 | return false 646 | } 647 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | GNU GENERAL PUBLIC LICENSE 2 | Version 3, 29 June 2007 3 | 4 | Copyright (C) 2007 Free Software Foundation, Inc. 5 | Everyone is permitted to copy and distribute verbatim copies 6 | of this license document, but changing it is not allowed. 7 | 8 | Preamble 9 | 10 | The GNU General Public License is a free, copyleft license for 11 | software and other kinds of works. 12 | 13 | The licenses for most software and other practical works are designed 14 | to take away your freedom to share and change the works. By contrast, 15 | the GNU General Public License is intended to guarantee your freedom to 16 | share and change all versions of a program--to make sure it remains free 17 | software for all its users. We, the Free Software Foundation, use the 18 | GNU General Public License for most of our software; it applies also to 19 | any other work released this way by its authors. You can apply it to 20 | your programs, too. 21 | 22 | When we speak of free software, we are referring to freedom, not 23 | price. Our General Public Licenses are designed to make sure that you 24 | have the freedom to distribute copies of free software (and charge for 25 | them if you wish), that you receive source code or can get it if you 26 | want it, that you can change the software or use pieces of it in new 27 | free programs, and that you know you can do these things. 28 | 29 | To protect your rights, we need to prevent others from denying you 30 | these rights or asking you to surrender the rights. Therefore, you have 31 | certain responsibilities if you distribute copies of the software, or if 32 | you modify it: responsibilities to respect the freedom of others. 33 | 34 | For example, if you distribute copies of such a program, whether 35 | gratis or for a fee, you must pass on to the recipients the same 36 | freedoms that you received. You must make sure that they, too, receive 37 | or can get the source code. And you must show them these terms so they 38 | know their rights. 39 | 40 | Developers that use the GNU GPL protect your rights with two steps: 41 | (1) assert copyright on the software, and (2) offer you this License 42 | giving you legal permission to copy, distribute and/or modify it. 43 | 44 | For the developers' and authors' protection, the GPL clearly explains 45 | that there is no warranty for this free software. For both users' and 46 | authors' sake, the GPL requires that modified versions be marked as 47 | changed, so that their problems will not be attributed erroneously to 48 | authors of previous versions. 49 | 50 | Some devices are designed to deny users access to install or run 51 | modified versions of the software inside them, although the manufacturer 52 | can do so. This is fundamentally incompatible with the aim of 53 | protecting users' freedom to change the software. The systematic 54 | pattern of such abuse occurs in the area of products for individuals to 55 | use, which is precisely where it is most unacceptable. Therefore, we 56 | have designed this version of the GPL to prohibit the practice for those 57 | products. If such problems arise substantially in other domains, we 58 | stand ready to extend this provision to those domains in future versions 59 | of the GPL, as needed to protect the freedom of users. 60 | 61 | Finally, every program is threatened constantly by software patents. 62 | States should not allow patents to restrict development and use of 63 | software on general-purpose computers, but in those that do, we wish to 64 | avoid the special danger that patents applied to a free program could 65 | make it effectively proprietary. To prevent this, the GPL assures that 66 | patents cannot be used to render the program non-free. 67 | 68 | The precise terms and conditions for copying, distribution and 69 | modification follow. 70 | 71 | TERMS AND CONDITIONS 72 | 73 | 0. Definitions. 74 | 75 | "This License" refers to version 3 of the GNU General Public License. 76 | 77 | "Copyright" also means copyright-like laws that apply to other kinds of 78 | works, such as semiconductor masks. 79 | 80 | "The Program" refers to any copyrightable work licensed under this 81 | License. Each licensee is addressed as "you". "Licensees" and 82 | "recipients" may be individuals or organizations. 83 | 84 | To "modify" a work means to copy from or adapt all or part of the work 85 | in a fashion requiring copyright permission, other than the making of an 86 | exact copy. The resulting work is called a "modified version" of the 87 | earlier work or a work "based on" the earlier work. 88 | 89 | A "covered work" means either the unmodified Program or a work based 90 | on the Program. 91 | 92 | To "propagate" a work means to do anything with it that, without 93 | permission, would make you directly or secondarily liable for 94 | infringement under applicable copyright law, except executing it on a 95 | computer or modifying a private copy. Propagation includes copying, 96 | distribution (with or without modification), making available to the 97 | public, and in some countries other activities as well. 98 | 99 | To "convey" a work means any kind of propagation that enables other 100 | parties to make or receive copies. Mere interaction with a user through 101 | a computer network, with no transfer of a copy, is not conveying. 102 | 103 | An interactive user interface displays "Appropriate Legal Notices" 104 | to the extent that it includes a convenient and prominently visible 105 | feature that (1) displays an appropriate copyright notice, and (2) 106 | tells the user that there is no warranty for the work (except to the 107 | extent that warranties are provided), that licensees may convey the 108 | work under this License, and how to view a copy of this License. If 109 | the interface presents a list of user commands or options, such as a 110 | menu, a prominent item in the list meets this criterion. 111 | 112 | 1. Source Code. 113 | 114 | The "source code" for a work means the preferred form of the work 115 | for making modifications to it. "Object code" means any non-source 116 | form of a work. 117 | 118 | A "Standard Interface" means an interface that either is an official 119 | standard defined by a recognized standards body, or, in the case of 120 | interfaces specified for a particular programming language, one that 121 | is widely used among developers working in that language. 122 | 123 | The "System Libraries" of an executable work include anything, other 124 | than the work as a whole, that (a) is included in the normal form of 125 | packaging a Major Component, but which is not part of that Major 126 | Component, and (b) serves only to enable use of the work with that 127 | Major Component, or to implement a Standard Interface for which an 128 | implementation is available to the public in source code form. A 129 | "Major Component", in this context, means a major essential component 130 | (kernel, window system, and so on) of the specific operating system 131 | (if any) on which the executable work runs, or a compiler used to 132 | produce the work, or an object code interpreter used to run it. 133 | 134 | The "Corresponding Source" for a work in object code form means all 135 | the source code needed to generate, install, and (for an executable 136 | work) run the object code and to modify the work, including scripts to 137 | control those activities. However, it does not include the work's 138 | System Libraries, or general-purpose tools or generally available free 139 | programs which are used unmodified in performing those activities but 140 | which are not part of the work. For example, Corresponding Source 141 | includes interface definition files associated with source files for 142 | the work, and the source code for shared libraries and dynamically 143 | linked subprograms that the work is specifically designed to require, 144 | such as by intimate data communication or control flow between those 145 | subprograms and other parts of the work. 146 | 147 | The Corresponding Source need not include anything that users 148 | can regenerate automatically from other parts of the Corresponding 149 | Source. 150 | 151 | The Corresponding Source for a work in source code form is that 152 | same work. 153 | 154 | 2. Basic Permissions. 155 | 156 | All rights granted under this License are granted for the term of 157 | copyright on the Program, and are irrevocable provided the stated 158 | conditions are met. This License explicitly affirms your unlimited 159 | permission to run the unmodified Program. The output from running a 160 | covered work is covered by this License only if the output, given its 161 | content, constitutes a covered work. This License acknowledges your 162 | rights of fair use or other equivalent, as provided by copyright law. 163 | 164 | You may make, run and propagate covered works that you do not 165 | convey, without conditions so long as your license otherwise remains 166 | in force. You may convey covered works to others for the sole purpose 167 | of having them make modifications exclusively for you, or provide you 168 | with facilities for running those works, provided that you comply with 169 | the terms of this License in conveying all material for which you do 170 | not control copyright. Those thus making or running the covered works 171 | for you must do so exclusively on your behalf, under your direction 172 | and control, on terms that prohibit them from making any copies of 173 | your copyrighted material outside their relationship with you. 174 | 175 | Conveying under any other circumstances is permitted solely under 176 | the conditions stated below. Sublicensing is not allowed; section 10 177 | makes it unnecessary. 178 | 179 | 3. Protecting Users' Legal Rights From Anti-Circumvention Law. 180 | 181 | No covered work shall be deemed part of an effective technological 182 | measure under any applicable law fulfilling obligations under article 183 | 11 of the WIPO copyright treaty adopted on 20 December 1996, or 184 | similar laws prohibiting or restricting circumvention of such 185 | measures. 186 | 187 | When you convey a covered work, you waive any legal power to forbid 188 | circumvention of technological measures to the extent such circumvention 189 | is effected by exercising rights under this License with respect to 190 | the covered work, and you disclaim any intention to limit operation or 191 | modification of the work as a means of enforcing, against the work's 192 | users, your or third parties' legal rights to forbid circumvention of 193 | technological measures. 194 | 195 | 4. Conveying Verbatim Copies. 196 | 197 | You may convey verbatim copies of the Program's source code as you 198 | receive it, in any medium, provided that you conspicuously and 199 | appropriately publish on each copy an appropriate copyright notice; 200 | keep intact all notices stating that this License and any 201 | non-permissive terms added in accord with section 7 apply to the code; 202 | keep intact all notices of the absence of any warranty; and give all 203 | recipients a copy of this License along with the Program. 204 | 205 | You may charge any price or no price for each copy that you convey, 206 | and you may offer support or warranty protection for a fee. 207 | 208 | 5. Conveying Modified Source Versions. 209 | 210 | You may convey a work based on the Program, or the modifications to 211 | produce it from the Program, in the form of source code under the 212 | terms of section 4, provided that you also meet all of these conditions: 213 | 214 | a) The work must carry prominent notices stating that you modified 215 | it, and giving a relevant date. 216 | 217 | b) The work must carry prominent notices stating that it is 218 | released under this License and any conditions added under section 219 | 7. This requirement modifies the requirement in section 4 to 220 | "keep intact all notices". 221 | 222 | c) You must license the entire work, as a whole, under this 223 | License to anyone who comes into possession of a copy. This 224 | License will therefore apply, along with any applicable section 7 225 | additional terms, to the whole of the work, and all its parts, 226 | regardless of how they are packaged. This License gives no 227 | permission to license the work in any other way, but it does not 228 | invalidate such permission if you have separately received it. 229 | 230 | d) If the work has interactive user interfaces, each must display 231 | Appropriate Legal Notices; however, if the Program has interactive 232 | interfaces that do not display Appropriate Legal Notices, your 233 | work need not make them do so. 234 | 235 | A compilation of a covered work with other separate and independent 236 | works, which are not by their nature extensions of the covered work, 237 | and which are not combined with it such as to form a larger program, 238 | in or on a volume of a storage or distribution medium, is called an 239 | "aggregate" if the compilation and its resulting copyright are not 240 | used to limit the access or legal rights of the compilation's users 241 | beyond what the individual works permit. Inclusion of a covered work 242 | in an aggregate does not cause this License to apply to the other 243 | parts of the aggregate. 244 | 245 | 6. Conveying Non-Source Forms. 246 | 247 | You may convey a covered work in object code form under the terms 248 | of sections 4 and 5, provided that you also convey the 249 | machine-readable Corresponding Source under the terms of this License, 250 | in one of these ways: 251 | 252 | a) Convey the object code in, or embodied in, a physical product 253 | (including a physical distribution medium), accompanied by the 254 | Corresponding Source fixed on a durable physical medium 255 | customarily used for software interchange. 256 | 257 | b) Convey the object code in, or embodied in, a physical product 258 | (including a physical distribution medium), accompanied by a 259 | written offer, valid for at least three years and valid for as 260 | long as you offer spare parts or customer support for that product 261 | model, to give anyone who possesses the object code either (1) a 262 | copy of the Corresponding Source for all the software in the 263 | product that is covered by this License, on a durable physical 264 | medium customarily used for software interchange, for a price no 265 | more than your reasonable cost of physically performing this 266 | conveying of source, or (2) access to copy the 267 | Corresponding Source from a network server at no charge. 268 | 269 | c) Convey individual copies of the object code with a copy of the 270 | written offer to provide the Corresponding Source. This 271 | alternative is allowed only occasionally and noncommercially, and 272 | only if you received the object code with such an offer, in accord 273 | with subsection 6b. 274 | 275 | d) Convey the object code by offering access from a designated 276 | place (gratis or for a charge), and offer equivalent access to the 277 | Corresponding Source in the same way through the same place at no 278 | further charge. You need not require recipients to copy the 279 | Corresponding Source along with the object code. If the place to 280 | copy the object code is a network server, the Corresponding Source 281 | may be on a different server (operated by you or a third party) 282 | that supports equivalent copying facilities, provided you maintain 283 | clear directions next to the object code saying where to find the 284 | Corresponding Source. Regardless of what server hosts the 285 | Corresponding Source, you remain obligated to ensure that it is 286 | available for as long as needed to satisfy these requirements. 287 | 288 | e) Convey the object code using peer-to-peer transmission, provided 289 | you inform other peers where the object code and Corresponding 290 | Source of the work are being offered to the general public at no 291 | charge under subsection 6d. 292 | 293 | A separable portion of the object code, whose source code is excluded 294 | from the Corresponding Source as a System Library, need not be 295 | included in conveying the object code work. 296 | 297 | A "User Product" is either (1) a "consumer product", which means any 298 | tangible personal property which is normally used for personal, family, 299 | or household purposes, or (2) anything designed or sold for incorporation 300 | into a dwelling. In determining whether a product is a consumer product, 301 | doubtful cases shall be resolved in favor of coverage. For a particular 302 | product received by a particular user, "normally used" refers to a 303 | typical or common use of that class of product, regardless of the status 304 | of the particular user or of the way in which the particular user 305 | actually uses, or expects or is expected to use, the product. A product 306 | is a consumer product regardless of whether the product has substantial 307 | commercial, industrial or non-consumer uses, unless such uses represent 308 | the only significant mode of use of the product. 309 | 310 | "Installation Information" for a User Product means any methods, 311 | procedures, authorization keys, or other information required to install 312 | and execute modified versions of a covered work in that User Product from 313 | a modified version of its Corresponding Source. The information must 314 | suffice to ensure that the continued functioning of the modified object 315 | code is in no case prevented or interfered with solely because 316 | modification has been made. 317 | 318 | If you convey an object code work under this section in, or with, or 319 | specifically for use in, a User Product, and the conveying occurs as 320 | part of a transaction in which the right of possession and use of the 321 | User Product is transferred to the recipient in perpetuity or for a 322 | fixed term (regardless of how the transaction is characterized), the 323 | Corresponding Source conveyed under this section must be accompanied 324 | by the Installation Information. But this requirement does not apply 325 | if neither you nor any third party retains the ability to install 326 | modified object code on the User Product (for example, the work has 327 | been installed in ROM). 328 | 329 | The requirement to provide Installation Information does not include a 330 | requirement to continue to provide support service, warranty, or updates 331 | for a work that has been modified or installed by the recipient, or for 332 | the User Product in which it has been modified or installed. Access to a 333 | network may be denied when the modification itself materially and 334 | adversely affects the operation of the network or violates the rules and 335 | protocols for communication across the network. 336 | 337 | Corresponding Source conveyed, and Installation Information provided, 338 | in accord with this section must be in a format that is publicly 339 | documented (and with an implementation available to the public in 340 | source code form), and must require no special password or key for 341 | unpacking, reading or copying. 342 | 343 | 7. Additional Terms. 344 | 345 | "Additional permissions" are terms that supplement the terms of this 346 | License by making exceptions from one or more of its conditions. 347 | Additional permissions that are applicable to the entire Program shall 348 | be treated as though they were included in this License, to the extent 349 | that they are valid under applicable law. If additional permissions 350 | apply only to part of the Program, that part may be used separately 351 | under those permissions, but the entire Program remains governed by 352 | this License without regard to the additional permissions. 353 | 354 | When you convey a copy of a covered work, you may at your option 355 | remove any additional permissions from that copy, or from any part of 356 | it. (Additional permissions may be written to require their own 357 | removal in certain cases when you modify the work.) You may place 358 | additional permissions on material, added by you to a covered work, 359 | for which you have or can give appropriate copyright permission. 360 | 361 | Notwithstanding any other provision of this License, for material you 362 | add to a covered work, you may (if authorized by the copyright holders of 363 | that material) supplement the terms of this License with terms: 364 | 365 | a) Disclaiming warranty or limiting liability differently from the 366 | terms of sections 15 and 16 of this License; or 367 | 368 | b) Requiring preservation of specified reasonable legal notices or 369 | author attributions in that material or in the Appropriate Legal 370 | Notices displayed by works containing it; or 371 | 372 | c) Prohibiting misrepresentation of the origin of that material, or 373 | requiring that modified versions of such material be marked in 374 | reasonable ways as different from the original version; or 375 | 376 | d) Limiting the use for publicity purposes of names of licensors or 377 | authors of the material; or 378 | 379 | e) Declining to grant rights under trademark law for use of some 380 | trade names, trademarks, or service marks; or 381 | 382 | f) Requiring indemnification of licensors and authors of that 383 | material by anyone who conveys the material (or modified versions of 384 | it) with contractual assumptions of liability to the recipient, for 385 | any liability that these contractual assumptions directly impose on 386 | those licensors and authors. 387 | 388 | All other non-permissive additional terms are considered "further 389 | restrictions" within the meaning of section 10. If the Program as you 390 | received it, or any part of it, contains a notice stating that it is 391 | governed by this License along with a term that is a further 392 | restriction, you may remove that term. If a license document contains 393 | a further restriction but permits relicensing or conveying under this 394 | License, you may add to a covered work material governed by the terms 395 | of that license document, provided that the further restriction does 396 | not survive such relicensing or conveying. 397 | 398 | If you add terms to a covered work in accord with this section, you 399 | must place, in the relevant source files, a statement of the 400 | additional terms that apply to those files, or a notice indicating 401 | where to find the applicable terms. 402 | 403 | Additional terms, permissive or non-permissive, may be stated in the 404 | form of a separately written license, or stated as exceptions; 405 | the above requirements apply either way. 406 | 407 | 8. Termination. 408 | 409 | You may not propagate or modify a covered work except as expressly 410 | provided under this License. Any attempt otherwise to propagate or 411 | modify it is void, and will automatically terminate your rights under 412 | this License (including any patent licenses granted under the third 413 | paragraph of section 11). 414 | 415 | However, if you cease all violation of this License, then your 416 | license from a particular copyright holder is reinstated (a) 417 | provisionally, unless and until the copyright holder explicitly and 418 | finally terminates your license, and (b) permanently, if the copyright 419 | holder fails to notify you of the violation by some reasonable means 420 | prior to 60 days after the cessation. 421 | 422 | Moreover, your license from a particular copyright holder is 423 | reinstated permanently if the copyright holder notifies you of the 424 | violation by some reasonable means, this is the first time you have 425 | received notice of violation of this License (for any work) from that 426 | copyright holder, and you cure the violation prior to 30 days after 427 | your receipt of the notice. 428 | 429 | Termination of your rights under this section does not terminate the 430 | licenses of parties who have received copies or rights from you under 431 | this License. If your rights have been terminated and not permanently 432 | reinstated, you do not qualify to receive new licenses for the same 433 | material under section 10. 434 | 435 | 9. Acceptance Not Required for Having Copies. 436 | 437 | You are not required to accept this License in order to receive or 438 | run a copy of the Program. Ancillary propagation of a covered work 439 | occurring solely as a consequence of using peer-to-peer transmission 440 | to receive a copy likewise does not require acceptance. However, 441 | nothing other than this License grants you permission to propagate or 442 | modify any covered work. These actions infringe copyright if you do 443 | not accept this License. Therefore, by modifying or propagating a 444 | covered work, you indicate your acceptance of this License to do so. 445 | 446 | 10. Automatic Licensing of Downstream Recipients. 447 | 448 | Each time you convey a covered work, the recipient automatically 449 | receives a license from the original licensors, to run, modify and 450 | propagate that work, subject to this License. You are not responsible 451 | for enforcing compliance by third parties with this License. 452 | 453 | An "entity transaction" is a transaction transferring control of an 454 | organization, or substantially all assets of one, or subdividing an 455 | organization, or merging organizations. If propagation of a covered 456 | work results from an entity transaction, each party to that 457 | transaction who receives a copy of the work also receives whatever 458 | licenses to the work the party's predecessor in interest had or could 459 | give under the previous paragraph, plus a right to possession of the 460 | Corresponding Source of the work from the predecessor in interest, if 461 | the predecessor has it or can get it with reasonable efforts. 462 | 463 | You may not impose any further restrictions on the exercise of the 464 | rights granted or affirmed under this License. For example, you may 465 | not impose a license fee, royalty, or other charge for exercise of 466 | rights granted under this License, and you may not initiate litigation 467 | (including a cross-claim or counterclaim in a lawsuit) alleging that 468 | any patent claim is infringed by making, using, selling, offering for 469 | sale, or importing the Program or any portion of it. 470 | 471 | 11. Patents. 472 | 473 | A "contributor" is a copyright holder who authorizes use under this 474 | License of the Program or a work on which the Program is based. The 475 | work thus licensed is called the contributor's "contributor version". 476 | 477 | A contributor's "essential patent claims" are all patent claims 478 | owned or controlled by the contributor, whether already acquired or 479 | hereafter acquired, that would be infringed by some manner, permitted 480 | by this License, of making, using, or selling its contributor version, 481 | but do not include claims that would be infringed only as a 482 | consequence of further modification of the contributor version. For 483 | purposes of this definition, "control" includes the right to grant 484 | patent sublicenses in a manner consistent with the requirements of 485 | this License. 486 | 487 | Each contributor grants you a non-exclusive, worldwide, royalty-free 488 | patent license under the contributor's essential patent claims, to 489 | make, use, sell, offer for sale, import and otherwise run, modify and 490 | propagate the contents of its contributor version. 491 | 492 | In the following three paragraphs, a "patent license" is any express 493 | agreement or commitment, however denominated, not to enforce a patent 494 | (such as an express permission to practice a patent or covenant not to 495 | sue for patent infringement). To "grant" such a patent license to a 496 | party means to make such an agreement or commitment not to enforce a 497 | patent against the party. 498 | 499 | If you convey a covered work, knowingly relying on a patent license, 500 | and the Corresponding Source of the work is not available for anyone 501 | to copy, free of charge and under the terms of this License, through a 502 | publicly available network server or other readily accessible means, 503 | then you must either (1) cause the Corresponding Source to be so 504 | available, or (2) arrange to deprive yourself of the benefit of the 505 | patent license for this particular work, or (3) arrange, in a manner 506 | consistent with the requirements of this License, to extend the patent 507 | license to downstream recipients. "Knowingly relying" means you have 508 | actual knowledge that, but for the patent license, your conveying the 509 | covered work in a country, or your recipient's use of the covered work 510 | in a country, would infringe one or more identifiable patents in that 511 | country that you have reason to believe are valid. 512 | 513 | If, pursuant to or in connection with a single transaction or 514 | arrangement, you convey, or propagate by procuring conveyance of, a 515 | covered work, and grant a patent license to some of the parties 516 | receiving the covered work authorizing them to use, propagate, modify 517 | or convey a specific copy of the covered work, then the patent license 518 | you grant is automatically extended to all recipients of the covered 519 | work and works based on it. 520 | 521 | A patent license is "discriminatory" if it does not include within 522 | the scope of its coverage, prohibits the exercise of, or is 523 | conditioned on the non-exercise of one or more of the rights that are 524 | specifically granted under this License. You may not convey a covered 525 | work if you are a party to an arrangement with a third party that is 526 | in the business of distributing software, under which you make payment 527 | to the third party based on the extent of your activity of conveying 528 | the work, and under which the third party grants, to any of the 529 | parties who would receive the covered work from you, a discriminatory 530 | patent license (a) in connection with copies of the covered work 531 | conveyed by you (or copies made from those copies), or (b) primarily 532 | for and in connection with specific products or compilations that 533 | contain the covered work, unless you entered into that arrangement, 534 | or that patent license was granted, prior to 28 March 2007. 535 | 536 | Nothing in this License shall be construed as excluding or limiting 537 | any implied license or other defenses to infringement that may 538 | otherwise be available to you under applicable patent law. 539 | 540 | 12. No Surrender of Others' Freedom. 541 | 542 | If conditions are imposed on you (whether by court order, agreement or 543 | otherwise) that contradict the conditions of this License, they do not 544 | excuse you from the conditions of this License. If you cannot convey a 545 | covered work so as to satisfy simultaneously your obligations under this 546 | License and any other pertinent obligations, then as a consequence you may 547 | not convey it at all. For example, if you agree to terms that obligate you 548 | to collect a royalty for further conveying from those to whom you convey 549 | the Program, the only way you could satisfy both those terms and this 550 | License would be to refrain entirely from conveying the Program. 551 | 552 | 13. Use with the GNU Affero General Public License. 553 | 554 | Notwithstanding any other provision of this License, you have 555 | permission to link or combine any covered work with a work licensed 556 | under version 3 of the GNU Affero General Public License into a single 557 | combined work, and to convey the resulting work. The terms of this 558 | License will continue to apply to the part which is the covered work, 559 | but the special requirements of the GNU Affero General Public License, 560 | section 13, concerning interaction through a network will apply to the 561 | combination as such. 562 | 563 | 14. Revised Versions of this License. 564 | 565 | The Free Software Foundation may publish revised and/or new versions of 566 | the GNU General Public License from time to time. Such new versions will 567 | be similar in spirit to the present version, but may differ in detail to 568 | address new problems or concerns. 569 | 570 | Each version is given a distinguishing version number. If the 571 | Program specifies that a certain numbered version of the GNU General 572 | Public License "or any later version" applies to it, you have the 573 | option of following the terms and conditions either of that numbered 574 | version or of any later version published by the Free Software 575 | Foundation. If the Program does not specify a version number of the 576 | GNU General Public License, you may choose any version ever published 577 | by the Free Software Foundation. 578 | 579 | If the Program specifies that a proxy can decide which future 580 | versions of the GNU General Public License can be used, that proxy's 581 | public statement of acceptance of a version permanently authorizes you 582 | to choose that version for the Program. 583 | 584 | Later license versions may give you additional or different 585 | permissions. However, no additional obligations are imposed on any 586 | author or copyright holder as a result of your choosing to follow a 587 | later version. 588 | 589 | 15. Disclaimer of Warranty. 590 | 591 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY 592 | APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT 593 | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY 594 | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, 595 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 596 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM 597 | IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF 598 | ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 599 | 600 | 16. Limitation of Liability. 601 | 602 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING 603 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS 604 | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY 605 | GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE 606 | USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF 607 | DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD 608 | PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), 609 | EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF 610 | SUCH DAMAGES. 611 | 612 | 17. Interpretation of Sections 15 and 16. 613 | 614 | If the disclaimer of warranty and limitation of liability provided 615 | above cannot be given local legal effect according to their terms, 616 | reviewing courts shall apply local law that most closely approximates 617 | an absolute waiver of all civil liability in connection with the 618 | Program, unless a warranty or assumption of liability accompanies a 619 | copy of the Program in return for a fee. 620 | 621 | END OF TERMS AND CONDITIONS 622 | 623 | How to Apply These Terms to Your New Programs 624 | 625 | If you develop a new program, and you want it to be of the greatest 626 | possible use to the public, the best way to achieve this is to make it 627 | free software which everyone can redistribute and change under these terms. 628 | 629 | To do so, attach the following notices to the program. It is safest 630 | to attach them to the start of each source file to most effectively 631 | state the exclusion of warranty; and each file should have at least 632 | the "copyright" line and a pointer to where the full notice is found. 633 | 634 | 635 | Copyright (C) 636 | 637 | This program is free software: you can redistribute it and/or modify 638 | it under the terms of the GNU General Public License as published by 639 | the Free Software Foundation, either version 3 of the License, or 640 | (at your option) any later version. 641 | 642 | This program is distributed in the hope that it will be useful, 643 | but WITHOUT ANY WARRANTY; without even the implied warranty of 644 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 645 | GNU General Public License for more details. 646 | 647 | You should have received a copy of the GNU General Public License 648 | along with this program. If not, see . 649 | 650 | Also add information on how to contact you by electronic and paper mail. 651 | 652 | If the program does terminal interaction, make it output a short 653 | notice like this when it starts in an interactive mode: 654 | 655 | Copyright (C) 656 | This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. 657 | This is free software, and you are welcome to redistribute it 658 | under certain conditions; type `show c' for details. 659 | 660 | The hypothetical commands `show w' and `show c' should show the appropriate 661 | parts of the General Public License. Of course, your program's commands 662 | might be different; for a GUI interface, you would use an "about box". 663 | 664 | You should also get your employer (if you work as a programmer) or school, 665 | if any, to sign a "copyright disclaimer" for the program, if necessary. 666 | For more information on this, and how to apply and follow the GNU GPL, see 667 | . 668 | 669 | The GNU General Public License does not permit incorporating your program 670 | into proprietary programs. If your program is a subroutine library, you 671 | may consider it more useful to permit linking proprietary applications with 672 | the library. If this is what you want to do, use the GNU Lesser General 673 | Public License instead of this License. But first, please read 674 | . 675 | --------------------------------------------------------------------------------