├── .gitignore
├── 2020.02.28_17-45-39.html
├── Auto_man.py
├── README.md
└── xlog.log
/.gitignore:
--------------------------------------------------------------------------------
1 | .Ds_store
2 |
--------------------------------------------------------------------------------
/Auto_man.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 | # @Author: J0k3r
4 | # @Date: 2020-02-28 00:39:51
5 |
6 | import multiprocessing
7 | import os
8 | import queue
9 | import subprocess
10 | import sys
11 | import time
12 |
13 | import requests as req
14 | import simplejson
15 | from colorama import Back, Fore, Style, init
16 |
17 | '''
18 | 一些配置参数
19 | '''
20 | log_name = "xlog.log"
21 | xray_proxy = "127.0.0.1:6666"
22 | close_request = 0
23 | firstStart = 1
24 | xray_output = "{}.html".format(time.strftime("%Y.%m.%d_%H-%M-%S", time.localtime()))
25 | xray_proxies = {
26 | 'http': 'http://127.0.0.1:6666',
27 | 'https': 'http://127.0.0.1:6666',
28 | }
29 |
30 | configure = {
31 | "chromium_path":"/Users/xc/Downloads/auto-vuln/chrome-mac/Chromium.app/Contents/MacOS/Chromium", # path to Chromium
32 | "crawlergo_path":"/Users/xc/Downloads/auto-vuln/crawlergo", # path to crawlergo executable file
33 | "xray_path":"/Users/xc/Downloads/xray/xray_darwin_amd64", # path to xray executable file
34 | }
35 |
36 | args = {
37 | "xray_args":["webscan", "--listen", xray_proxy, "--html-output", xray_output],
38 | "crawlergo_args":["-c", configure["chromium_path"], "-t", "10", "-f", "smart", "--fuzz-path", "--output-mode", "json"]
39 | #"crawlergo_args":["-c", configure["chromium_path"], "--push-to-proxy", "http://127.0.0.1:6666", "-t", "10", "-f", "smart", "--fuzz-path", "--output-mode", "json"]
40 | }
41 |
42 | template = "===> [{}] - - \"{}\" {}"
43 | time_coclor = Fore.YELLOW + "{}" + Style.RESET_ALL
44 | info_color = Fore.BLUE + "{}" + Style.RESET_ALL
45 | err_color = Fore.RED + "{}" + Style.RESET_ALL
46 | get_color = Fore.GREEN + "{}" + Style.RESET_ALL
47 | msg_color = Fore.CYAN + "{}" + Style.RESET_ALL
48 |
49 | def getTime():
50 | return time.strftime("%Y-%m-%d-%H:%M:%S", time.localtime())
51 |
52 | def getCtime():
53 | msg_time = getTime()
54 | return (time_coclor.format(msg_time))
55 |
56 | def Xlog(func):
57 | '''
58 | 日志记录
59 | '''
60 | def Xlog(*args):
61 | with open(log_name,'a') as log_file:
62 | log_file.write((template.format(func.__name__, args[0], getTime())) + "\n")
63 | func(*args)
64 | return Xlog
65 |
66 | class Cprint():
67 | '''
68 | 输出类
69 | '''
70 | @staticmethod
71 | @Xlog
72 | def info(string):
73 | print(template.format(info_color.format("info"),msg_color.format(string),getCtime()))
74 |
75 | @staticmethod
76 | @Xlog
77 | def err(string):
78 | print(template.format(err_color.format("error"),msg_color.format(string),getCtime()))
79 |
80 | @staticmethod
81 | @Xlog
82 | def get(string):
83 | print(template.format(get_color.format("get"),msg_color.format(string),getCtime()))
84 |
85 | def runCrawlergo(command,request_queue):
86 | '''
87 | 联动 Crawlergo
88 | '''
89 | try:
90 | res = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
91 | cmd_output = bytes()
92 | while res.poll() is None:
93 | line = res.stdout.readline()
94 | line = line.strip()
95 | if line:
96 | #print(line)
97 | cmd_output += line
98 | try:
99 | crawler_data = simplejson.loads(cmd_output.decode().split("--[Mission Complete]--")[1])
100 | except Exception as e:
101 | Cprint.err(e)
102 | return
103 | request_list = crawler_data["req_list"]
104 | #sub_domain_list = crawler_data["sub_domain_list"] # 子域名
105 | for req in request_list:
106 | request_queue.put(req)
107 | Cprint.info("Crawlergo Done")
108 | sendRequests(request_queue)
109 | except Exception as e:
110 | Cprint.err(e)
111 | return
112 |
113 | def runXray(command):
114 | '''
115 | 联动 Xray
116 | '''
117 | try:
118 | Cprint.info("Xray Started")
119 | res = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
120 | while res.poll() is None:
121 | line = res.stdout.readline()
122 | line = line.strip()
123 | if line:
124 | print(line.decode())
125 | except Exception as e:
126 | Cprint.err(e)
127 | return
128 |
129 | def urlCheck(url):
130 | '''
131 | URL 检查
132 | '''
133 | try:
134 | res = req.get(url,timeout=3)
135 | if res.status_code == 200:
136 | return True
137 | except Exception as e:
138 | Cprint.err("Problem with {}".format(url))
139 | Cprint.err(e)
140 | return False
141 |
142 | def sendRequests(request_queue):
143 | '''
144 | 发送请求
145 | '''
146 | Cprint.info("Send Requests")
147 | proxies = xray_proxies
148 | while True:
149 | if request_queue.empty() == True:
150 | return
151 | else:
152 | request_data = request_queue.get()
153 | req_url = request_data['url']
154 | Cprint.info("Processing {}, Remaining request: {}".format(req_url,request_queue.qsize()))
155 | req_header = request_data['headers']
156 | req_method = request_data['method']
157 | req_data = request_data['data']
158 | try:
159 | if(req_method=='GET'):
160 | req.get(req_url, headers=req_header, proxies=proxies,timeout=10,verify=False)
161 | elif(req_method=='POST'):
162 | req.post(req_url, headers=req_header,data=req_data, proxies=proxies,timeout=10,verify=False)
163 | except:
164 | continue
165 | return
166 |
167 | def start(url):
168 | '''
169 | 开始
170 | '''
171 | global firstStart
172 | request_queue = queue.Queue()
173 | cmd_xray = [configure["xray_path"]] + args["xray_args"] # xray 参数
174 | if firstStart == 1: # 启动 xray
175 | background_process = multiprocessing.Process(target=runXray, args=(cmd_xray,))
176 | background_process.daemon = False
177 | background_process.start()
178 | firstStart = 0
179 | Cprint.info("Target: " + url)
180 | Cprint.info("Starting crawlergo")
181 | cmd_crawlergo = [configure["crawlergo_path"]] + args["crawlergo_args"] # crawlergo 参数
182 | cmd_crawlergo.append(url)
183 | runCrawlergo(cmd_crawlergo,request_queue)
184 | Cprint.get(url + " Done")
185 |
186 | if __name__ == "__main__":
187 | '''
188 | python3 Auto_man.py [url/url_file]
189 | '''
190 | if len(sys.argv) == 2:
191 | para = sys.argv[1]
192 | if os.path.isfile(para):
193 | with open(para,'r') as urlfile:
194 | for url in urlfile.readlines():
195 | url = url.strip()
196 | if urlCheck(url):
197 | start(url)
198 | else:
199 | if urlCheck(para):
200 | start(para)
201 | else:
202 | print("help: python3 Auto_man.py [url/url_file]")
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | 配置好各参数即可一键联动 xray 和 crawlergo,方便漏扫使用
2 |
3 | [](https://asciinema.org/a/ls9BaofCImcbJNRGooSGNaJYy)
--------------------------------------------------------------------------------
/xlog.log:
--------------------------------------------------------------------------------
1 | ===> [info] - - "Target: http://127.0.0.1" - - 2020-02-28-01:45:35
2 | ===> [info] - - "Starting crawlergo" - - 2020-02-28-01:45:35
3 | ===> [info] - - "Xray Started" - - 2020-02-28-01:45:35
4 | ===> [info] - - "Crawlergo Done" - - 2020-02-28-01:45:41
5 | ===> [info] - - "Send Requests" - - 2020-02-28-01:45:41
6 | ===> [info] - - "Processing http://127.0.0.1/, Remaining request: 2" - - 2020-02-28-01:45:41
7 | ===> [info] - - "Processing https://127.0.0.1/, Remaining request: 1" - - 2020-02-28-01:45:41
8 | ===> [info] - - "Processing http://127.0.0.1/index, Remaining request: 0" - - 2020-02-28-01:45:41
9 | ===> [get] - - "http://127.0.0.1 Done" - - 2020-02-28-01:45:41
10 | ===> [info] - - "Target: http://127.0.0.1" 2020-02-28-01:46:34
11 | ===> [info] - - "Starting crawlergo" 2020-02-28-01:46:34
12 | ===> [info] - - "Xray Started" 2020-02-28-01:46:34
13 | ===> [info] - - "Crawlergo Done" 2020-02-28-01:46:39
14 | ===> [info] - - "Send Requests" 2020-02-28-01:46:39
15 | ===> [info] - - "Processing http://127.0.0.1/, Remaining request: 2" 2020-02-28-01:46:39
16 | ===> [info] - - "Processing https://127.0.0.1/, Remaining request: 1" 2020-02-28-01:46:39
17 | ===> [info] - - "Processing http://127.0.0.1/index, Remaining request: 0" 2020-02-28-01:46:39
18 | ===> [get] - - "http://127.0.0.1 Done" 2020-02-28-01:46:39
19 | ===> [info] - - "Target: http://127.0.0.1" 2020-02-28-02:06:10
20 | ===> [info] - - "Starting crawlergo" 2020-02-28-02:06:10
21 | ===> [info] - - "Xray Started" 2020-02-28-02:06:10
22 | ===> [info] - - "Crawlergo Done" 2020-02-28-02:06:15
23 | ===> [info] - - "Send Requests" 2020-02-28-02:06:15
24 | ===> [info] - - "Processing http://127.0.0.1/, Remaining request: 2" 2020-02-28-02:06:15
25 | ===> [info] - - "Processing https://127.0.0.1/, Remaining request: 1" 2020-02-28-02:06:15
26 | ===> [info] - - "Processing http://127.0.0.1/index, Remaining request: 0" 2020-02-28-02:06:15
27 | ===> [get] - - "http://127.0.0.1 Done" 2020-02-28-02:06:15
28 | ===> [info] - - "Target: http://127.0.0.1" 2020-02-28-17:45:39
29 | ===> [info] - - "Starting crawlergo" 2020-02-28-17:45:39
30 | ===> [info] - - "Xray Started" 2020-02-28-17:45:39
31 | ===> [info] - - "Crawlergo Done" 2020-02-28-17:45:44
32 | ===> [info] - - "Send Requests" 2020-02-28-17:45:44
33 | ===> [info] - - "Processing http://127.0.0.1/, Remaining request: 2" 2020-02-28-17:45:44
34 | ===> [info] - - "Processing https://127.0.0.1/, Remaining request: 1" 2020-02-28-17:45:44
35 | ===> [info] - - "Processing http://127.0.0.1/index, Remaining request: 0" 2020-02-28-17:45:44
36 | ===> [get] - - "http://127.0.0.1 Done" 2020-02-28-17:45:44
37 |
--------------------------------------------------------------------------------