├── .gitignore ├── ARL └── ArlScan.py ├── CScan └── CScan.py ├── Hx_config.py ├── JSmessage └── jsfinder │ └── JSFinder.py ├── LICENSE ├── OneForAll └── oneforallMain.py ├── README-ZH.md ├── README.md ├── ServerJiang └── jiangMain.py ├── Subfinder └── subfinderMain.py ├── Sublist3r ├── Sublist3rMain.py ├── requirements.txt ├── subbrute │ ├── __init__.py │ ├── names.txt │ ├── resolvers.txt │ └── subbrute.py └── sublist3r.py ├── Useragent.json ├── Xray └── pppXray.py ├── advancedModelFile.html ├── base.py ├── communityModelFile.html ├── crawlergo └── crawlergoMain.py ├── dict ├── dns_servers.txt ├── next_sub.txt ├── next_sub_full.txt ├── subnames.txt ├── subnames_all_5_letters.txt └── subnames_full.txt ├── images ├── 1.png ├── 2.png ├── 3.png ├── Architecture.png ├── Praise.png ├── image-20210817235656344.png ├── image-20210817235844858.png ├── image-20210818003323362.png ├── image-20210818003406757.png ├── image-20210818003639711.png └── image-20210818010542320.png ├── pipei.py ├── requirements.txt ├── scan.py ├── subDomainsBrute └── subDomainsBruteMain.py ├── test.py └── waf.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | .idea 7 | # C extensions 8 | *.so 9 | /venv 10 | /save 11 | /tmp 12 | bad_dns_servers.txt 13 | errors.log 14 | config.yaml 15 | Hx_config.py 16 | debug.log 17 | # Distribution / packaging 18 | .Python 19 | migrations/ 20 | build/ 21 | develop-eggs/ 22 | dist/ 23 | downloads/ 24 | eggs/ 25 | .eggs/ 26 | lib/ 27 | lib64/ 28 | parts/ 29 | sdist/ 30 | var/ 31 | wheels/ 32 | pip-wheel-metadata/ 33 | share/python-wheels/ 34 | *.egg-info/ 35 | .installed.cfg 36 | *.egg 37 | MANIFEST 38 | 39 | # PyInstaller 40 | # Usually these files are written by a python script from a template 41 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 42 | *.manifest 43 | *.spec 44 | 45 | # Installer logs 46 | pip-log.txt 47 | pip-delete-this-directory.txt 48 | 49 | # Unit test / coverage reports 50 | htmlcov/ 51 | .tox/ 52 | .nox/ 53 | .coverage 54 | .coverage.* 55 | .cache 56 | nosetests.xml 57 | coverage.xml 58 | *.cover 59 | *.py,cover 60 | .hypothesis/ 61 | .pytest_cache/ 62 | 63 | # Translations 64 | *.mo 65 | *.pot 66 | 67 | # Django stuff: 68 | *.log 69 | local_settings.py 70 | db.sqlite3 71 | db.sqlite3-journal 72 | 73 | # Flask stuff: 74 | instance/ 75 | .webassets-cache 76 | 77 | # Scrapy stuff: 78 | .scrapy 79 | 80 | # Sphinx documentation 81 | docs/_build/ 82 | 83 | # PyBuilder 84 | target/ 85 | 86 | # Jupyter Notebook 87 | .ipynb_checkpoints 88 | 89 | # IPython 90 | profile_default/ 91 | ipython_config.py 92 | 93 | # pyenv 94 | .python-version 95 | 96 | # pipenv 97 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 98 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 99 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 100 | # install all needed dependencies. 101 | #Pipfile.lock 102 | 103 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 104 | __pypackages__/ 105 | 106 | # Celery stuff 107 | celerybeat-schedule 108 | celerybeat.pid 109 | 110 | # SageMath parsed files 111 | *.sage.py 112 | 113 | # Environments 114 | .env 115 | .venv 116 | env/ 117 | venv/ 118 | ENV/ 119 | env.bak/ 120 | venv.bak/ 121 | 122 | # Spyder project settings 123 | .spyderproject 124 | .spyproject 125 | 126 | # Rope project settings 127 | .ropeproject 128 | 129 | # mkdocs documentation 130 | /site 131 | 132 | # mypy 133 | .mypy_cache/ 134 | .dmypy.json 135 | dmypy.json 136 | 137 | # Pyre type checker 138 | .pyre/ 139 | -------------------------------------------------------------------------------- /ARL/ArlScan.py: -------------------------------------------------------------------------------- 1 | # *coding:UTF-8 * 2 | import requests 3 | import Hx_config 4 | import json 5 | import base 6 | import time 7 | 8 | 9 | class Scan(object): 10 | def __init__(self, name = '', targets_list = ''): 11 | self._ = '' 12 | self.targets = '' 13 | self.name = base.url_http_delete(name) 14 | self._list = targets_list 15 | self.Prevent_duplicate_scanning() 16 | self.headers = { 17 | "token": Hx_config.API_KEY, 18 | "Content-type": "application/json", 19 | 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.97 Safari/537.36' 20 | } 21 | self.proxy = { 22 | 'http':'http://127.0.0.1:8080', 23 | 'https':'http://127.0.0.1:8080' 24 | } 25 | 26 | 27 | def make_targets(self, __list): # 获取发送给ARL服务器特定格式的targets 28 | self.targets = "\n".join(list(__list)) 29 | print(f"{Hx_config.green}ARL will add{Hx_config.end}") 30 | print(f"{Hx_config.green}{list(__list)}{Hx_config.end}") 31 | 32 | 33 | def Prevent_duplicate_scanning(self, delete_signal=False): # 防止多次对ARl服务器add同一个地址 34 | self._ = set(map(base.url_http_delete, self._list)) 35 | __file_name = f'save\\saveARL\\{self.name}.txt' 36 | 37 | if delete_signal is True: # 如果没有正确执行ARL。则就删除之前保存下的文件里的url 38 | with open(__file_name, 'w+') as f2: 39 | lines = f2.readlines() 40 | __ = [line for line in lines if line not in self._] 41 | for i in __: 42 | f2.write(i) 43 | return 44 | 45 | try: 46 | with open(__file_name, 'r') as f: # 有文件,和此次即将add的目标进行对比,把重复的去除,没有的继续添加扫描 47 | print(__file_name) 48 | lines = f.readlines() 49 | self._ = [item for item in self._ if item not in [line.strip() for line in lines]] 50 | print(self._) 51 | except: 52 | pass 53 | 54 | with open(__file_name, 'a+') as f1: 55 | print(f"{Hx_config.green}ARL新增{len(self._)}个domain{Hx_config.end}") 56 | for i in self._: 57 | f1.write(i + '\n') 58 | self.make_targets(self._) 59 | 60 | 61 | # 添加任务 62 | def add_task(self): 63 | url = Hx_config.arl_url_Path + '/api/task/' 64 | data = {"name": f"{self.name}", "target": f"{self.targets}", "domain_brute_type": "big", "port_scan_type": "top100", 65 | "domain_brute": True, "alt_dns": True, "riskiq_search": True, "arl_search": True, 66 | "port_scan": True, 67 | "service_detection": True, "os_detection": True, "fofa_search": True, "ssl_cert": True, 68 | "site_identify": True, "search_engines": True, "site_spider": True, "site_capture": True, 69 | "file_leak": True} 70 | try: 71 | r = requests.post(url=url, headers=self.headers, data=json.dumps(data), timeout=5, verify=False) 72 | result = r.json() 73 | print (f"{Hx_config.green}ARL_result : {str(result)}{Hx_config.end}") 74 | if len(result['items']) == 0: # 同样也是没有成功add 75 | self.Prevent_duplicate_scanning(delete_signal=True) 76 | except: 77 | if self._list == '' and len(self._list) == 1: 78 | print(f"{Hx_config.red}ARL没有接受到任何参数{Hx_config.end}") 79 | print(f"{Hx_config.red}ARL扫描启动失败,请检查ARL服务器网络!{Hx_config.end}") 80 | self.Prevent_duplicate_scanning(delete_signal=True) 81 | 82 | if __name__ == '__main__': 83 | a = Scan(name='test', targets_list=["baidu","baidu.com"]).add_task() -------------------------------------------------------------------------------- /CScan/CScan.py: -------------------------------------------------------------------------------- 1 | from multiprocessing.pool import ThreadPool 2 | import re 3 | import requests 4 | import socket 5 | import Hx_config 6 | 7 | 8 | ''' 9 | CScan C段扫描代码 10 | 传入IP 11 | 对其进行访问之后返回其title信息 12 | ''' 13 | def CScan(ip): 14 | try: 15 | rep = requests.get("http://" + ip, headers=Hx_config.GetHeaders(), timeout=2, verify=False) 16 | if rep.status_code != 404: 17 | title = re.findall(r'(.*?)', rep.text) 18 | if title: 19 | return "[T]" + ip + ' >>> ' + title[0] + "\n" 20 | else: 21 | return "[H]" + ip + " >>> have reason\n" 22 | except Exception as e: 23 | pass 24 | 25 | 26 | ''' 27 | C段扫描控制函数 CScanConole(host,Cfilename,path) 28 | 传入host,可以为域名或者IP,对于域名使用socket.gethostbyname进行转换 29 | 同时传入filename为host的MD5 hash之后的结果 30 | 传入的path为保存文件项目的绝对路径,方便保存到指定的文件夹下 31 | 缺点是不能绕过CDN防护 32 | 使用线程池进行多线程C段扫描 线程最大数量默认为 20 33 | 将结果经过过滤后保存到相应的域名MD5文件中 34 | ''' 35 | def CScanConsole(host, Cfilename = '', path = ''): 36 | pattern = re.compile('^\d+\.\d+\.\d+\.\d+(:(\d+))?$') 37 | if not pattern.findall(host): 38 | ip = socket.gethostbyname(host) 39 | if pattern.findall(host) and ":" in host: 40 | ip = host.split(":")[0] 41 | hostList = [] 42 | ip = ip.split('.') 43 | pools = 20 44 | for tmpCip in range(1, 256): 45 | ip[-1] = str(tmpCip) 46 | host = ".".join(ip) 47 | for port in Hx_config.portlist: 48 | host=host+":"+str(port) 49 | hostList.append(host) 50 | pool = ThreadPool(pools) 51 | C_Message = pool.map(CScan, hostList) 52 | pool.close() 53 | pool.join() 54 | content="".join(list(filter(None, C_Message))) 55 | print(content) 56 | print("CSan END,the path:" + Cfilename) 57 | 58 | def main(): 59 | CScanConsole('120.53.133.61:80') 60 | 61 | if __name__ == '__main__': 62 | main() 63 | 64 | -------------------------------------------------------------------------------- /Hx_config.py: -------------------------------------------------------------------------------- 1 | import os 2 | import queue 3 | import shutil 4 | import json 5 | import random 6 | 7 | 8 | ''' 9 | 配置文件: 10 | Root_Path路径信息 项目根目录的绝对路径 11 | ''' 12 | Root_Path = os.path.dirname(os.path.abspath(__file__)) 13 | 14 | ''' 15 | Server酱your_key 16 | ''' 17 | SERVERKEY = '' 18 | 19 | ''' 20 | ARL的api配置和网络路径 21 | ''' 22 | API_KEY = 'write_down_your_token' # 在 ARL/docker/config-docker.yaml里修改 23 | arl_url_Path = 'https://127.0.0.1:5004' 24 | 25 | ''' 26 | xray以后的参数 27 | ''' 28 | plugins = None 29 | 30 | ''' 31 | 工具所在路径: 32 | chrome路径信息 Chrome_Path 例如:C:\\Program Files (x86)\\Google\\Chrome\\Application\\chrome.exe 33 | Xray路径信息 Xray_Path 例如:D:\\Xray\\xray.exe 34 | crawlergo 可执行文件的所在的位置 例如:C:\\Users\\Cl0udG0d\\Desktop\\sec_tools\\crawlergo\\crawlergo.exe 35 | OneForAll 文件夹所在的位置 例如:C:\\Users\\Cl0udG0d\\Desktop\\OneForAll-master\\ 36 | subDomainsBrute 文件夹所在位置 37 | ''' 38 | Chrome_Path = 'D:\\HxnineTails_Script\\chrome-win\\chrome.exe' 39 | Xray_Path = 'D:\\HxnineTails_Script\\xray\\xray.exe' 40 | crawlergo_Path = 'D:\\HxnineTails_Script\\crawlergo.exe' 41 | OneForAll_Path = 'D:\\HxnineTails_Script\\OneForAll-master\\' 42 | subDomainsBrute_Path = 'D:\\HxnineTails_Script\\subDomainsBrute-master\\' 43 | subfinder_Path = 'D:\\HxnineTails_Script\\subfinder_2.4.5_windows_amd64\\' 44 | # Chrome_Path='D:\\HxnineTails_Script\\chrome-win\\chrome.exe' 45 | # Xray_Path='D:\\HxnineTails_Script\\xray\\xray.exe' 46 | # crawlergo_Path='D:\\HxnineTails_Script\\crawlergo.exe' 47 | # OneForAll_Path='D:\\HxnineTails_Script\\OneForAll-master\\' 48 | # subDomainsBrute_Path='D:\\HxnineTails_Script\\subDomainsBrute-master\\' 49 | # subfinder_Path='D:\\HxnineTails_Script\\subfinder_2.4.5_windows_amd64\\' 50 | 51 | ''' 52 | 一些全局通用配置: 53 | portlist C段探测时的默认端口 默认为:80,8080,8000,8081,8001 54 | sub_queue 存储子域名的队列,用于保存所有工具获取到的子域名 55 | ''' 56 | portlist = ['80', '8080', '8000', '8081', '8001'] 57 | blacklist = ["spider", "org"] 58 | ThreadNum = 5 59 | PYTHON = "py -3.8" 60 | 61 | ''' 62 | 输出报告路径: 63 | 报告全部输出在save文件夹下 64 | Xray_report_path Xray扫描后的输出html报告 65 | CScan_report_path C段扫描后的输出txt文件 66 | ''' 67 | Save_path = "{}\\save".format(Root_Path) 68 | Xray_report_path = "{}\\save\\saveXray".format(Root_Path) 69 | Xray_temp_report_path = '{}\\save\\saveTempXray'.format(Root_Path) 70 | CScan_report_path = "{}\\save\\saveCplus\\".format(Root_Path) 71 | Sub_report_path = "{}\\save\\saveSub\\".format(Root_Path) 72 | Temp_path = "{}\\save\\temp\\".format(Root_Path) 73 | JS_report_path = '{}\\save\\saveJS\\'.format(Root_Path) 74 | ARL_save_path = '{}\\save\\saveARL\\'.format(Root_Path) 75 | Crawlergo_save_path = '{}\\save\\saveCrawlergo\\'.format(Root_Path) 76 | 77 | ''' 78 | 全局队列 79 | ''' 80 | sub_queue = queue.Queue() 81 | target_queue = queue.Queue() 82 | xray_queue = queue.Queue() 83 | ppp_queue = queue.Queue() 84 | 85 | ''' 86 | GetHeaders()函数 87 | 使用fake-useragent函数 88 | 返回一个随机生成的请求头,防止因为python自带的请求头而被屏蔽 89 | ''' 90 | 91 | 92 | def GetHeaders(): 93 | try: 94 | with open('Useragent.json', 'r') as f: 95 | data = json.load(f) 96 | data_browsers =data['browsers'] 97 | data_randomize = list(data['randomize'].values()) 98 | browser = random.choice(data_randomize) 99 | headers = {'User-Agent': random.choice(data_browsers[browser])} 100 | 101 | return headers 102 | except Exception as e: 103 | exit("[*]Hx_config.py : GetHeaders error!") 104 | 105 | 106 | 107 | ''' 108 | 颜色配置 109 | ''' 110 | yellow = '\033[01;33m' 111 | white = '\033[01;37m' 112 | green = '\033[01;32m' 113 | blue = '\033[01;34m' 114 | red = '\033[1;31m' 115 | end = '\033[0m' 116 | 117 | 118 | def logo(): 119 | print(f''' 120 | {blue}花溪九尾,懒人必备web扫描器!{green} 121 | +-+-+-+-+-+-+-+-+-+-+-+ 122 | |{red}H|{yellow}X|n|{green}i|n|e|T|a|{yellow}i|l|{red}s| 123 | {green}+-+-+-+-+-+-+-+-+-+-+-+{white} 124 | v1.1{end} 125 | ''') 126 | 127 | 128 | ''' 129 | 递归删除临时保存目录下的保存信息 130 | ''' 131 | 132 | 133 | def delModel(): 134 | saveFolderList = ['saveCplus', 'saveJS', 'saveSub', 'saveXray', 'saveARL', 'temp', 'saveTempXray', 'saveCrawlergo'] 135 | for tempFolder in saveFolderList: 136 | shutil.rmtree("{}\\save\\{}".format(Root_Path, tempFolder)) 137 | os.mkdir("{}\\save\\{}".format(Root_Path, tempFolder)) 138 | print(f"{green}clean end :){end}") 139 | return 140 | 141 | 142 | ''' 143 | 帮助信息 144 | ''' 145 | 146 | 147 | def scanHelp(): 148 | print( 149 | f'{green}scan.py [options]\n\t-a --attone example: scan.py -a https://www.baidu.com\n\t-s --attsrc example:{end}' 150 | f'{green}scan.py -s baidu.com\n\t-d --attdetail example: scan.py -d baidu.com\n\t-r --readppp {end}' 151 | f'{green}example: scan.py -r target.txt\n\t-t --thread number of threads\n\t-c --clean clean saveFiles\n\t-h --help output help information\n{end}' 152 | ) 153 | 154 | 155 | def main(): 156 | print(Root_Path) 157 | return 158 | 159 | 160 | if __name__ == '__main__': 161 | main() 162 | -------------------------------------------------------------------------------- /JSmessage/jsfinder/JSFinder.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python" 2 | # coding: utf-8 3 | # By Threezh1 4 | # https://threezh1.github.io/ 5 | 6 | import requests, argparse, sys, re 7 | from requests.packages import urllib3 8 | from urllib.parse import urlparse 9 | from bs4 import BeautifulSoup 10 | import Hx_config 11 | urllib3.disable_warnings() 12 | import os 13 | 14 | 15 | 16 | # Regular expression comes from https://github.com/GerbenJavado/LinkFinder 17 | def extract_URL(JS): 18 | pattern_raw = r""" 19 | (?:"|') # Start newline delimiter 20 | ( 21 | ((?:[a-zA-Z]{1,10}://|//) # Match a scheme [a-Z]*1-10 or // 22 | [^"'/]{1,}\. # Match a domainname (any character + dot) 23 | [a-zA-Z]{2,}[^"']{0,}) # The domainextension and/or path 24 | | 25 | ((?:/|\.\./|\./) # Start with /,../,./ 26 | [^"'><,;| *()(%%$^/\\\[\]] # Next character can't be... 27 | [^"'><,;|()]{1,}) # Rest of the characters can't be 28 | | 29 | ([a-zA-Z0-9_\-/]{1,}/ # Relative endpoint with / 30 | [a-zA-Z0-9_\-/]{1,} # Resource name 31 | \.(?:[a-zA-Z]{1,4}|action) # Rest + extension (length 1-4 or action) 32 | (?:[\?|/][^"|']{0,}|)) # ? mark with parameters 33 | | 34 | ([a-zA-Z0-9_\-]{1,} # filename 35 | \.(?:php|asp|aspx|jsp|json| 36 | action|html|js|txt|xml) # . + extension 37 | (?:\?[^"|']{0,}|)) # ? mark with parameters 38 | ) 39 | (?:"|') # End newline delimiter 40 | """ 41 | pattern = re.compile(pattern_raw, re.VERBOSE) 42 | result = re.finditer(pattern, str(JS)) 43 | if result == None: 44 | return None 45 | js_url = [] 46 | return [match.group().strip('"').strip("'") for match in result 47 | if match.group() not in js_url] 48 | 49 | 50 | # Get the page source 51 | def Extract_html(URL): 52 | header = { 53 | "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.108 Safari/537.36", 54 | "Cookie": None} 55 | try: 56 | raw = requests.get(URL, headers=header, timeout=3, verify=False) 57 | raw = raw.content.decode("utf-8", "ignore") 58 | return raw 59 | except: 60 | return None 61 | 62 | 63 | # Handling relative URLs 64 | def process_url(URL, re_URL): 65 | black_url = ["javascript:"] # Add some keyword for filter url. 66 | URL_raw = urlparse(URL) 67 | ab_URL = URL_raw.netloc 68 | host_URL = URL_raw.scheme 69 | if re_URL[0:2] == "//": 70 | result = host_URL + ":" + re_URL 71 | elif re_URL[0:4] == "http": 72 | result = re_URL 73 | elif re_URL[0:2] != "//" and re_URL not in black_url: 74 | if re_URL[0:1] == "/": 75 | result = host_URL + "://" + ab_URL + re_URL 76 | else: 77 | if re_URL[0:1] == ".": 78 | if re_URL[0:2] == "..": 79 | result = host_URL + "://" + ab_URL + re_URL[2:] 80 | else: 81 | result = host_URL + "://" + ab_URL + re_URL[1:] 82 | else: 83 | result = host_URL + "://" + ab_URL + "/" + re_URL 84 | else: 85 | result = URL 86 | return result 87 | 88 | 89 | def find_last(string, str): 90 | positions = [] 91 | last_position = -1 92 | while True: 93 | position = string.find(str, last_position + 1) 94 | if position == -1: break 95 | last_position = position 96 | positions.append(position) 97 | return positions 98 | 99 | 100 | def find_by_url(url, js=False): 101 | if js == False: 102 | try: 103 | print("url:" + url) 104 | except: 105 | print("Please specify a URL like https://www.baidu.com") 106 | html_raw = Extract_html(url) 107 | if html_raw == None: 108 | print("Fail to access " + url) 109 | return None 110 | # print(html_raw) 111 | html = BeautifulSoup(html_raw, "html.parser") 112 | html_scripts = html.findAll("script") 113 | script_array = {} 114 | script_temp = "" 115 | for html_script in html_scripts: 116 | script_src = html_script.get("src") 117 | if script_src == None: 118 | script_temp += html_script.get_text() + "\n" 119 | else: 120 | purl = process_url(url, script_src) 121 | script_array[purl] = Extract_html(purl) 122 | script_array[url] = script_temp 123 | allurls = [] 124 | for script in script_array: 125 | # print(script) 126 | temp_urls = extract_URL(script_array[script]) 127 | if len(temp_urls) == 0: continue 128 | for temp_url in temp_urls: 129 | allurls.append(process_url(script, temp_url)) 130 | result = [] 131 | for singerurl in allurls: 132 | url_raw = urlparse(url) 133 | domain = url_raw.netloc 134 | positions = find_last(domain, ".") 135 | miandomain = domain 136 | if len(positions) > 1: miandomain = domain[positions[-2] + 1:] 137 | # print(miandomain) 138 | suburl = urlparse(singerurl) 139 | subdomain = suburl.netloc 140 | # print(singerurl) 141 | if miandomain in subdomain or subdomain.strip() == "": 142 | if singerurl.strip() not in result: 143 | result.append(singerurl) 144 | return result 145 | return sorted(set(extract_URL(Extract_html(url)))) or None 146 | 147 | 148 | def find_subdomain(urls, mainurl): 149 | url_raw = urlparse(mainurl) 150 | domain = url_raw.netloc 151 | miandomain = domain 152 | positions = find_last(domain, ".") 153 | if len(positions) > 1: miandomain = domain[positions[-2] + 1:] 154 | subdomains = [] 155 | for url in urls: 156 | suburl = urlparse(url) 157 | subdomain = suburl.netloc 158 | # print(subdomain) 159 | if subdomain.strip() == "": continue 160 | if miandomain in subdomain: 161 | if subdomain not in subdomains: 162 | subdomains.append(subdomain) 163 | return subdomains 164 | 165 | 166 | 167 | def giveresult(urls, domian,outputfilename): 168 | if urls == None: 169 | return None 170 | print("Find " + str(len(urls)) + " URL:") 171 | content_url = "" 172 | content_subdomain = "" 173 | for url in urls: 174 | content_url += url + "\n" 175 | print(url) 176 | subdomains = find_subdomain(urls, domian) 177 | print("\nFind " + str(len(subdomains)) + " Subdomain:") 178 | for subdomain in subdomains: 179 | content_subdomain += subdomain + "\n" 180 | print(subdomain) 181 | # print(current_dir) 182 | output_url_filename= Hx_config.JS_report_path + "url_" + outputfilename + '.txt' 183 | output_subdomain_filename= Hx_config.JS_report_path + "subdomain_" + outputfilename + '.txt' 184 | with open(output_url_filename, "a", encoding='utf-8') as fobject: 185 | fobject.write(content_url) 186 | print("\nOutput " + str(len(urls)) + " urls") 187 | print("Path:" + output_url_filename) 188 | with open(output_subdomain_filename, "a", encoding='utf-8') as fobject: 189 | fobject.write(content_subdomain) 190 | print("\nOutput " + str(len(subdomains)) + " subdomains") 191 | print("Path:" + output_subdomain_filename) 192 | 193 | 194 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 潘一二三 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /OneForAll/oneforallMain.py: -------------------------------------------------------------------------------- 1 | import Hx_config 2 | import os 3 | import csv 4 | 5 | import base 6 | ''' 7 | OneForALLScan调度主函数 8 | 输入:target ,为目标域名,例如:baidu.com 9 | 功能:将需要获取域名的target使用oneforall进行子域名收集,并将结果存储在队列中 10 | ''' 11 | def OneForAllScan(target): 12 | print(f"{Hx_config.yellow}{target} OneForALL Scan Start~ {Hx_config.end}") 13 | results_path = f"{Hx_config.OneForAll_Path}results" 14 | oneforall_filename = "{}\\{}".format(results_path, base.get_filename(results_path, target)) 15 | ''' 16 | 如果存在csv文件,则不需要爬取了 17 | ''' 18 | try: 19 | csvFile = open(oneforall_filename, "r") 20 | csv_read(csvFile) 21 | print(f"{Hx_config.blue}{oneforall_filename}{Hx_config.end}") 22 | print(f"{Hx_config.green}文件已存在,不进行oneforall扫描。{Hx_config.end}") 23 | except FileNotFoundError: 24 | oneforall_py = "{}\\oneforall.py".format(Hx_config.OneForAll_Path) 25 | scanCommand = "{} {} --target {} run".format(Hx_config.PYTHON, oneforall_py, target) 26 | print(f"{Hx_config.blue}{scanCommand}{Hx_config.end}") 27 | os.system(scanCommand) 28 | oneforall_filename = "{}\\{}".format(results_path, base.get_filename(results_path, target)) 29 | print(f"{Hx_config.blue}{oneforall_filename}{Hx_config.end}") 30 | csvFile = open(oneforall_filename, "r") 31 | csv_read(csvFile) 32 | 33 | print(f"{Hx_config.yellow}{target} OneForALL Scan end~ {Hx_config.end}") 34 | print(f"{Hx_config.green}oneforall 结束 !当前的url个数为{Hx_config.sub_queue.qsize()}{Hx_config.end}") 35 | 36 | return 0 37 | 38 | def csv_read(file_data): 39 | reader = csv.reader(file_data) 40 | for item in reader: 41 | if reader.line_num == 1: # 忽略第一行 42 | continue 43 | # print(item[4]) 44 | Hx_config.sub_queue.put(item[4]) 45 | 46 | 47 | 48 | def main(): 49 | OneForAllScan('vulnweb.com') 50 | return 51 | 52 | if __name__ == '__main__': 53 | main() -------------------------------------------------------------------------------- /README-ZH.md: -------------------------------------------------------------------------------- 1 | ## HXnineTails 花溪九尾 2 | 3 | [English](https://github.com/Cl0udG0d/HXnineTails/blob/main/README.md) | 简体中文 4 | 5 | > **平凡** **暴力** **强大** **可自行扩展的缝合怪物** 6 | 7 | ```python 8 | +-+-+-+-+-+-+-+-+-+-+-+ 9 | |H|X|n|i|n|e|T|a|i|l|s| 10 | +-+-+-+-+-+-+-+-+-+-+-+ 11 | ``` 12 | 13 | ### 内容列表🚀 14 | 15 | - [简介](#简介) 16 | - [安装](#安装) 17 | - [使用说明](#使用说明) 18 | - [部分截图](#部分截图) 19 | - [TODO](#TODO) 20 | - [维护者](#维护者) 21 | - [如何贡献](#如何贡献) 22 | - [打赏列表](#打赏列表) 23 | - [使用许可](#使用许可) 24 | - [赞赏码](#赞赏码) 25 | 26 | 27 | 28 | ### 简介 29 | 30 | :paw_prints:`python3`实现的集成了`github`上多个扫描工具的命令行WEB扫描工具 31 | 32 | :trident:目标是躺着挖洞 33 | 34 | 项目代码在最新版社区版`xray1.7`,专业版`xray1.3.3`下检测无误 35 | 36 | 该项目中目前集成:[crawlergo](https://github.com/0Kee-Team/crawlergo) [OneForAll](https://github.com/shmilylty/OneForAll) [subDomainsBrute](https://github.com/lijiejie/subDomainsBrute) [Subfinder](https://github.com/projectdiscovery/subfinder) [Sublist3r](https://github.com/aboul3la/Sublist3r) [Xray](https://github.com/chaitin/xray) [JSfinder](https://github.com/Threezh1/JSFinder) [pppXray](https://github.com/Cl0udG0d/pppXray) [Server酱](http://sc.ftqq.com/3.version) 37 | 38 | 下一个想要集成的项目是[ARL资产灯塔系统](https://github.com/TophantTechnology/ARL) 39 | 40 | 项目的成果在于将这些单独强大的组件融合到一个单一的应用中,适用于SRC批量扫描,CNVD垂直上分等 41 | 42 | `项目结构:` 43 | 44 | ![项目结构](https://github.com/Cl0udG0d/HXnineTails/blob/main/images/Architecture.png) 45 | 46 | ### 安装 47 | 48 | 安装`python3` (暂不支持 `python2` 49 | 50 | 下载本项目代码: `git clone https://github.com/Cl0udG0d/HXnineTails` 51 | 52 | 安装相应的库文件 `pip3 install -r requirements.txt` 53 | 54 | > 为方便国内用户,在`requirements.txt`第一行使用了阿里云镜像 55 | > 56 | > 如果是国外服务器进行`python`库文件安装,为提速请删除`requirements.txt`第一行 57 | 58 | 需要安装下列项目,并将路径配置在`config.py`文件中 59 | 60 | [谷歌浏览器](https://www.google.com/intl/zh-CN/chrome/) 61 | 62 | [Xray](https://github.com/chaitin/xray/releases) (配合高级版食用更佳 63 | 64 | [crawlergo](https://github.com/0Kee-Team/crawlergo/releases) 65 | 66 | [OneForAll](https://github.com/shmilylty/OneForAll/releases) 67 | 68 | [subDomainsBrute](https://github.com/lijiejie/subDomainsBrute) 69 | 70 | [subfinder](https://github.com/projectdiscovery/subfinder/releases) 71 | 72 | 73 | 74 | 例如在我的个人笔记本电脑上,`config.py`中的路径信息为: 75 | 76 | ```python 77 | ''' 78 | 各个项目所在路径: 79 | ''' 80 | Chrome_Path='C:\\Program Files (x86)\\Google\\Chrome\\Application\\chrome.exe' 81 | Xray_Path='D:\\Xray\\xray.exe' 82 | crawlergo_Path='C:\\Users\\Administrator\\Desktop\\test_tools\\crawlergo.exe' 83 | OneForAll_Path='C:\\Users\\Administrator\\Desktop\\test_tools\\OneForAll-master\\' 84 | subDomainsBrute_Path='C:\\Users\\Administrator\\Desktop\\test_tools\\subDomainsBrute-master\\' 85 | subfinder_Path='C:\\Users\\Administrator\\Desktop\\test_tools\\subfinder\\' 86 | ``` 87 | 88 | 在`HXnineTails`文件夹下打开命令行输入扫描参数 89 | 90 | ### ARL配置 91 | 92 | 第一步:确定部署完成并且可以正确访问ARL后台 93 | 94 | 第二步:修改黑名单(如果有需要的话) 95 | 96 | ![image-20210818003323362](images/image-20210818003323362.png) 97 | 98 | 第三步:设置api访问token 99 | 100 | 打开这个文件进行设置 101 | 102 | ![image-20210817235844858](images/image-20210817235844858.png) 103 | 104 | 另外如果你是本地访问的话,下面BlACK_IPS记得注释掉本地地址 105 | 106 | ![image-20210818003406757](images/image-20210818003406757.png) 107 | 108 | 第四步:配置文件添加token 109 | 110 | ![image-20210818010542320](images/image-20210818010542320.png) 111 | 112 | 测试: 113 | 114 | 115 | 116 | ![image-20210818003639711](images/image-20210818003639711.png) 117 | 118 | ### 使用说明 119 | 120 | 命令行使用,参数详情为: 121 | 122 | ```python 123 | --help 输出帮助信息 如python3 scan.py --help 124 | -a --attone 对单个URL,只进行crawlergo动态爬虫+xray扫描 例如 百度官网 python3 scan.py -a https://www.baidu.com 125 | -s --attsrc 对SRC资产,进行信息搜集+crawlergo+xray , 例如 百度SRC python3 scan.py -s baidu.com 126 | -d --attdetail 对SRC资产,进行信息搜集+crawlergo+xray+C段信息搜集+js敏感信息搜集 , 例如 百度SRC 输入 python3 scan.py -d baidu.com 127 | -t --thread 线程数量,默认线程为5 如 python3 scan.py -t 10 -a http://testphp.vulnweb.com/ 128 | -r 读取待扫描txt文件,每行一个URL 对取出的每个URL进行 -a 扫描,如 python3 scan.py -t 10 -r target.txt 129 | -c 对保存的漏洞相关报告进行清理,即清理save文件夹下的文件 130 | -p --plugins 自定义xray插件 例如 python3 scan.py -a https://www.baidu.com --plugins sqldet 131 | ``` 132 | 133 | 建议使用 `-a` 或 `-s`参数进行扫描 134 | 135 | 另外有一些全局配置在`config.py`中,可以自行修改,如: 136 | 137 | ```python 138 | SERVERKEY='' 139 | 140 | portlist=['80','8080','8000','8081','8001'] 141 | blacklist=["spider","org"] 142 | 143 | ThreadNum=5 144 | PYTHON="python3" 145 | ``` 146 | 147 | `SERVERKEY`是Server酱 你注册使用的`key`值 148 | 149 | `portlist`是C段扫描时的默认扫描端口列表 150 | 151 | `blacklist` 中的字符串,若出现在待扫描URL中,该URL不会被扫描 152 | 153 | `ThreadNum` 默认的线程数量 154 | 155 | `PYTHON` 主机python解释器的名称,默认为`python3` 156 | 157 | 158 | 159 | **上面提到了一些外部程序或配置,如果在你的扫描中不需要用到的话,可以不进行安装,在程序运行过程中会自行pass掉** 160 | 161 | ### 部分截图 162 | 163 | ```shell 164 | python3 scan.py --help 165 | ``` 166 | 167 | ![截图1](https://github.com/Cl0udG0d/HXnineTails/blob/main/images/1.png) 168 | 169 | ```shell 170 | python3 scan.py -t 3 -a http://testphp.vulnweb.com/ 171 | ``` 172 | 173 | ![截图2](https://github.com/Cl0udG0d/HXnineTails/blob/main/images/2.png) 174 | 175 | 查看保存的报告 176 | 177 | ![截图3](https://github.com/Cl0udG0d/HXnineTails/blob/main/images/3.png) 178 | 179 | 180 | 181 | ### TODO 182 | 183 | + ~~写个英文readme~~(感谢老哥 [wenyurush](https://github.com/wenyurush)) 184 | + 精简和添加模块 185 | + 添加ARL模块 186 | + ... 187 | 188 | 189 | 190 | ### 维护者 191 | 192 | [@春告鳥](https://github.com/Cl0udG0d) 193 | 194 | [@Throokie](https://github.com/Throokie) 195 | 196 | 197 | 198 | ### 如何贡献 199 | 200 | :beer:非常欢迎你的加入![提一个 Issue](https://github.com/Cl0udG0d/AutumnWater/issues/new) 或者提交一个 Pull Request。 201 | 202 | :beers:当然也欢迎给我发邮件 2585614464@qq.com Join us! 203 | 204 | 🍻也可以给Throokie发邮件!a.88bbd@gmail.com 205 | 206 | 207 | ### 打赏列表 208 | 209 | + `背人语` 210 | + `掌控安全-hab` 211 | 212 | 213 | 214 | ### 使用许可 215 | 216 | [MIT](LICENSE) © 春告鳥 217 | 218 | 219 | 220 | ### 赞赏码 221 | 222 | **如果对你有帮助的话要不请作者喝杯奶茶?~~(嘿嘿)~~:+1: (打赏时请留言你的ID** 223 | 224 | ![打赏码](https://github.com/Cl0udG0d/HXnineTails/blob/main/images/Praise.png) 225 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## HXnineTails 花溪九尾 2 | 3 | English | [简体中文](https://github.com/Cl0udG0d/HXnineTails/blob/main/README-ZH.md) 4 | 5 | > **Plain** **Violent** **Powerful** **Self-expanding stitching monster** 6 | 7 | ```python 8 | +-+-+-+-+-+-+-+-+-+-+-+-+ 9 | |H|X|n|i|n|e|T|a|i|l|s| 10 | +-+-+-+-+-+-+-+-+-+-+-+-+ 11 | ``` 12 | 13 | ### Content List🚀 14 | 15 | - [Introduction](#Introduction) 16 | - [Install](#Install) 17 | - [Instructions](#Instructions) 18 | - [screenshots](#screenshots) 19 | - [TODO](#TODO) 20 | - [Maintainer](#Maintainer) 21 | - [Contribute](#Contribute) 22 | - [Reward](#Reward) 23 | - [License](#License) 24 | - [Appreciation_Code](#Appreciation_Code) 25 | 26 | 27 | 28 | ### Introduction 29 | 30 | :paw_prints:`python3` implementation of a command-line WEB scanning tool that integrates several scanning tools on `github`. 31 | 32 | :trident:The goal is to lie down and dig a hole 33 | 34 | The project code is tested under the latest community version of `xray1.7` without errors 35 | 36 | Currently integrated in this project: [crawlergo](https://github.com/0Kee-Team/crawlergo) [OneForAll](https://github.com/shmilylty/OneForAll) [subDomainsBrute](https://github.com/lijiejie/subDomainsBrute) [Subfinder](https://github.com/projectdiscovery/subfinder) [Sublist3r](https://github.com/aboul3la/Sublist3r) [Xray](https://github.com/chaitin/xray) [JSfinder](https://github.com/Threezh1/JSFinder) [pppXray]( https://github.com/Cl0udG0d/pppXray) [Server Sauce](http://sc.ftqq.com/3.version) 37 | 38 | The next project that I want to integrate is [ARL Asset Lighthouse System](https://github.com/TophantTechnology/ARL) 39 | 40 | The result of the project is the fusion of these individually powerful components into a single application, suitable for SRC batch scanning, CNVD vertical upscaling, etc. 41 | 42 | `Project structure: ` 43 | 44 | ![Project Structure](https://github.com/Cl0udG0d/HXnineTails/blob/main/images/Architecture.png) 45 | 46 | 47 | 48 | ### Installation 49 | 50 | Install `python3` (`python2` is not supported at this time) 51 | 52 | Download the code for this project: `git clone https://github.com/Cl0udG0d/HXnineTails` 53 | 54 | Install the appropriate library files `pip3 install -r requirements.txt` 55 | 56 | > For domestic users, the first line of `requirements.txt` uses the Aliyun mirror 57 | > 58 | > If you are installing `python` library files on a foreign server, please delete the first line of `requirements.txt` for speedup 59 | 60 | The following project needs to be installed and the path configured in the `config.py` file 61 | 62 | [Google Chrome](https://www.google.com/intl/zh-CN/chrome/) 63 | 64 | [Xray](https://github.com/chaitin/xray/releases) (better with the premium version) 65 | 66 | [crawlergo](https://github.com/0Kee-Team/crawlergo/releases) 67 | 68 | [OneForAll](https://github.com/shmilylty/OneForAll/releases) 69 | 70 | [subDomainsBrute](https://github.com/lijiejie/subDomainsBrute) 71 | 72 | [subfinder](https://github.com/projectdiscovery/subfinder/releases) 73 | 74 | 75 | 76 | For example, on my personal laptop, the path information in `config.py` is 77 | 78 | ```python 79 | ''' 80 | Paths where each project is located. 81 | ''' 82 | Chrome_Path='C:\\Program Files (x86)\\Google\\Chrome\\Application\\chrome.exe' 83 | Xray_Path='D:\\Xray\\xray.exe' 84 | crawlergo_Path='C:\\Users\\Administrator\\Desktop\\test_tools\\crawlergo.exe' 85 | OneForAll_Path='C:\\Users\\Administrator\\Desktop\\test_tools\\OneForAll-master\\' 86 | subDomainsBrute_Path='C:\\Users\\Administrator\\Desktop\\test_tools\\subDomainsBrute-master\\' 87 | subfinder_Path='C:\\Users\\Administrator\\Desktop\\test_tools\\subfinder\\' 88 | ``` 89 | 90 | Open the command line in the `HXnineTails` folder and enter the scan parameters 91 | 92 | 93 | 94 | ### Instructions 95 | 96 | Command line use, with the following parameter details. 97 | 98 | ```python 99 | -h --help output help information such as python3 scan.py --help 100 | -a --attone for a single URL, only crawlergo dynamic crawler + xray scan For example Baidu official website python3 scan.py -a https://www.baidu.com 101 | -s --attsrc for SRC assets, information gathering +crawlergo+xray , for example Baidu SRC python3 scan.py -s baidu.com 102 | -d --attdetail for SRC assets, information collection + crawlergo + xray + C segment information collection + js sensitive information collection , for example Baidu SRC input python3 scan.py -d baidu.com 103 | -t --thread Number of threads, default is 5 e.g. python3 scan.py -t 10 -a http://testphp.vulnweb.com/ 104 | -r reads the txt file to be scanned, one URL per line, and -a scans each URL taken out, e.g. python3 scan.py -t 10 -r target.txt 105 | -c Clean up the saved vulnerability-related reports, i.e. clean up the files in the save folder 106 | ``` 107 | 108 | It is recommended to use the `-a` or `-s` parameter for scanning 109 | 110 | There are also some global configurations in `config.py` that can be modified by yourself, such as 111 | 112 | ```python 113 | SERVERKEY='' 114 | 115 | portlist=['80','8080','8000','8081','8001'] 116 | blacklist=["spider", "org"] 117 | 118 | ThreadNum=5 119 | PYTHON="python3" 120 | ``` 121 | 122 | `SERVERKEY` is the `key` value used by Server Sauce for your registration 123 | 124 | `portlist` is the default list of ports scanned during C-segment scanning 125 | 126 | The string in `blacklist` that appears in the URL to be scanned will not be scanned 127 | 128 | `ThreadNum` The default number of threads 129 | 130 | `PYTHON` The name of the host python interpreter, default is `python3` 131 | 132 | 133 | 134 | **Some external programs or configurations are mentioned above, if you don't need to use them in your scan, you can leave them uninstalled and they will pass by themselves during the program run** 135 | 136 | ### screenshots 137 | 138 | ```shell 139 | python3 scan.py --help 140 | ``` 141 | 142 | ![Screenshot 1](https://github.com/Cl0udG0d/HXnineTails/blob/main/images/1.png) 143 | 144 | ```shell 145 | python3 scan.py -t 3 -a http://testphp.vulnweb.com/ 146 | ``` 147 | 148 | ![Screenshot 2](https://github.com/Cl0udG0d/HXnineTails/blob/main/images/2.png) 149 | 150 | View saved reports 151 | 152 | ![Screenshot 3](https://github.com/Cl0udG0d/HXnineTails/blob/main/images/3.png) 153 | 154 | 155 | 156 | ### TODO 157 | 158 | + ~~Write an English readme~~(Thanks brother [wenyurush](https://github.com/wenyurush)) 159 | + Streamline and add modules 160 | + Add ARL module 161 | + ... 162 | 163 | 164 | 165 | ### Maintainer 166 | 167 | [@春告鳥](https://github.com/Cl0udG0d) 168 | [@Throokie](https://github.com/Throokie) 169 | 170 | 171 | 172 | ### Contribute 173 | 174 | :beer:You're very welcome to join us! [Raise an Issue](https://github.com/Cl0udG0d/HXnineTails/issues/new) or submit a Pull Request. 175 | 176 | :beers:And of course feel free to send me an email at 2585614464@qq.com Join us! 177 | 178 | 🍻 welcome call Throokie throught the email 326516678@qq.com 179 | 180 | ### Reward 181 | 182 | + `背人语` 183 | + `掌控安全-hab` 184 | 185 | 186 | 187 | ### License 188 | 189 | [MIT](LICENSE) © Spring Teller 190 | 191 | 192 | 193 | ### Appreciation_Code 194 | 195 | **If it helps you, how about buying the author a cup of milk tea? ~~(hehehe)~~:+1: (Please leave a message with your ID** when you reward 196 | 197 | ![打赏码](https://github.com/Cl0udG0d/HXnineTails/blob/main/images/Praise.png) 198 | -------------------------------------------------------------------------------- /ServerJiang/jiangMain.py: -------------------------------------------------------------------------------- 1 | import requests 2 | 3 | from Hx_config import SERVERKEY 4 | 5 | ''' 6 | 项目集成Server酱进行扫描结束的消息推送 7 | 关于Server酱:http://sc.ftqq.com/3.version 8 | ''' 9 | api = "https://sc.ftqq.com/{}.send".format(SERVERKEY) 10 | 11 | 12 | def SendNotice(message): 13 | try: 14 | title = "花溪九尾 扫描通知" 15 | data = { 16 | "text": title, 17 | "desp": message 18 | } 19 | req = requests.post(api, data=data) 20 | except Exception as e: 21 | print(e) 22 | pass 23 | return 24 | -------------------------------------------------------------------------------- /Subfinder/subfinderMain.py: -------------------------------------------------------------------------------- 1 | import os 2 | import Hx_config 3 | 4 | ''' 5 | subfinderScan(target) 6 | ''' 7 | def subfinderScan(target,filename): 8 | print(f"{Hx_config.yellow}{target} subfinderScan Scan Start ~{Hx_config.end}") 9 | tempFilePath="{}{}".format(Hx_config.Temp_path, filename) 10 | scanCommand = "{}subfinder.exe -d {} -o {}".format(Hx_config.subfinder_Path, target, tempFilePath) 11 | 12 | os.system(scanCommand) 13 | f = open(tempFilePath) 14 | lines = f.readlines() 15 | for line in lines: 16 | print(f"{Hx_config.green}{line.strip()}{Hx_config.end}") 17 | Hx_config.sub_queue.put(line.rstrip('\n')) 18 | f.close() 19 | print(f"{Hx_config.yellow}{target} subfinderScan Scan End ~{Hx_config.end}") 20 | print(f"{Hx_config.green}subfinderScan 结束 !当前的url个数为{Hx_config.sub_queue.qsize()}{Hx_config.end}") 21 | return 22 | 23 | def main(): 24 | subfinderScan('baidu.com') 25 | return 26 | 27 | if __name__ == '__main__': 28 | main() -------------------------------------------------------------------------------- /Sublist3r/Sublist3rMain.py: -------------------------------------------------------------------------------- 1 | import Hx_config 2 | from Sublist3r import sublist3r 3 | 4 | ''' 5 | Sublist3rScan(target) 函数 6 | 参数: 7 | target 需要收集子域名的目标 例如:baidu.com 8 | 作用: 9 | 使用Sublist3r进行子域名收集 并且将结果存储到 sub_queue 队列中 10 | 使用Sublist3r 模块化用法 sublist3r.main 11 | 输出: 12 | 无 13 | ''' 14 | def Sublist3rScan(target): 15 | print(f"{Hx_config.yellow}{target} Sublist3rScan Scan Start ~{Hx_config.end}") 16 | subdomains = sublist3r.main(target, 40, savefile=None, ports=None, silent=False, verbose=False, 17 | enable_bruteforce=False, engines=None) 18 | print(f"{Hx_config.yellow}{target} Sublist3rScan Scan End ~{Hx_config.end}") 19 | for temp_sub in subdomains: 20 | Hx_config.sub_queue.put(temp_sub) 21 | print(f"{Hx_config.yellow}{target} Sublist3r Save queue End ~{Hx_config.end}") 22 | print(f"{Hx_config.green}Sublist3r 结束 !当前的url个数为{Hx_config.sub_queue.qsize()}{Hx_config.end}") 23 | return 24 | 25 | def main(): 26 | Sublist3rScan('baidu.com') 27 | return 28 | 29 | if __name__ == '__main__': 30 | main() -------------------------------------------------------------------------------- /Sublist3r/requirements.txt: -------------------------------------------------------------------------------- 1 | argparse 2 | dnspython 3 | requests 4 | -------------------------------------------------------------------------------- /Sublist3r/subbrute/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Cl0udG0d/HXnineTails/f894ca643841ea1221a30c657bf2e1c4b87eca25/Sublist3r/subbrute/__init__.py -------------------------------------------------------------------------------- /Sublist3r/subbrute/resolvers.txt: -------------------------------------------------------------------------------- 1 | 103.20.188.35 2 | 103.20.188.83 3 | 103.22.248.62 4 | 103.3.46.105 5 | 106.186.17.181 6 | 106.51.255.133 7 | 109.69.8.34 8 | 109.69.8.51 9 | 110.170.117.15 10 | 110.76.151.17 11 | 114.114.114.114 12 | 114.114.114.119 13 | 114.114.115.115 14 | 114.114.115.119 15 | 115.68.100.102 16 | 115.68.100.103 17 | 115.68.62.210 18 | 115.68.62.222 19 | 115.85.69.162 20 | 117.102.224.154 21 | 117.102.224.230 22 | 119.160.208.251 23 | 119.160.208.252 24 | 119.18.159.222 25 | 119.252.167.229 26 | 121.152.231.196 27 | 121.194.2.2 28 | 12.127.16.67 29 | 12.127.17.72 30 | 121.52.206.130 31 | 121.52.87.128 32 | 122.0.0.12 33 | 122.155.12.41 34 | 122.155.167.38 35 | 122.155.167.70 36 | 122.155.3.119 37 | 122.210.229.161 38 | 122.255.96.132 39 | 124.107.135.126 40 | 1.2.4.8 41 | 128.199.248.105 42 | 129.250.35.250 43 | 129.250.35.251 44 | 129.7.1.1 45 | 129.7.1.6 46 | 130.180.228.2 47 | 131.155.140.130 48 | 131.191.7.12 49 | 134.48.1.32 50 | 134.60.1.111 51 | 137.82.1.1 52 | 139.0.27.186 53 | 139.130.4.4 54 | 139.175.55.244 55 | 141.1.1.1 56 | 141.1.27.249 57 | 141.211.125.15 58 | 141.211.125.17 59 | 141.211.144.15 60 | 141.211.144.17 61 | 142.103.1.1 62 | 142.46.1.130 63 | 142.46.128.130 64 | 144.76.202.253 65 | 147.235.250.2 66 | 147.235.251.3 67 | 147.29.10.55 68 | 147.29.10.6 69 | 148.233.151.6 70 | 148.233.151.8 71 | 148.243.65.17 72 | 149.156.64.210 73 | 149.211.153.50 74 | 151.11.85.5 75 | 152.99.1.10 76 | 152.99.200.6 77 | 152.99.78.136 78 | 153.19.1.254 79 | 158.43.128.1 80 | 158.43.128.72 81 | 158.43.192.1 82 | 158.43.240.3 83 | 158.43.240.4 84 | 159.90.200.7 85 | 160.7.240.20 86 | 164.124.101.2 87 | 164.124.107.9 88 | 165.166.142.42 89 | 165.21.100.88 90 | 165.21.83.88 91 | 165.87.13.129 92 | 165.87.201.244 93 | 168.126.63.1 94 | 168.188.1.1 95 | 168.213.3.10 96 | 168.213.3.11 97 | 168.215.165.186 98 | 168.215.210.50 99 | 168.95.1.1 100 | 170.51.255.100 101 | 170.56.58.53 102 | 173.44.32.2 103 | 174.34.129.34 104 | 178.151.86.169 105 | 178.161.146.10 106 | 178.254.21.113 107 | 180.211.129.42 108 | 185.46.7.100 109 | 185.46.7.110 110 | 187.115.52.83 111 | 187.73.241.67 112 | 189.90.16.20 113 | 190.11.32.199 114 | 192.116.16.26 115 | 192.172.250.8 116 | 192.190.173.40 117 | 192.43.161.22 118 | 192.76.144.66 119 | 193.101.111.10 120 | 193.111.144.145 121 | 193.111.144.161 122 | 193.111.200.191 123 | 193.111.238.5 124 | 193.138.78.117 125 | 193.142.218.3 126 | 193.148.29.100 127 | 193.148.29.103 128 | 193.151.32.40 129 | 193.16.255.2 130 | 193.17.213.10 131 | 193.189.114.254 132 | 193.200.68.230 133 | 193.201.185.3 134 | 193.205.136.1 135 | 193.22.119.195 136 | 193.226.128.129 137 | 193.226.61.1 138 | 193.228.86.5 139 | 193.230.161.3 140 | 193.230.161.4 141 | 193.230.183.201 142 | 193.230.230.1 143 | 193.231.112.1 144 | 193.231.249.1 145 | 193.231.80.7 146 | 193.232.69.22 147 | 193.252.247.52 148 | 193.252.247.53 149 | 193.254.232.1 150 | 193.255.146.53 151 | 193.26.6.130 152 | 193.27.192.98 153 | 193.33.114.2 154 | 193.33.220.3 155 | 193.33.236.1 156 | 193.41.10.1 157 | 193.41.59.151 158 | 193.43.108.3 159 | 193.43.108.62 160 | 193.43.17.4 161 | 193.58.204.59 162 | 193.58.251.251 163 | 193.67.79.39 164 | 193.78.240.12 165 | 193.86.86.2 166 | 193.89.221.124 167 | 193.89.221.2 168 | 193.89.248.1 169 | 193.95.93.243 170 | 193.95.93.77 171 | 194.102.106.1 172 | 194.113.160.68 173 | 194.1.154.37 174 | 194.117.245.2 175 | 194.12.224.34 176 | 194.126.130.7 177 | 194.132.119.151 178 | 194.132.32.32 179 | 194.141.12.1 180 | 194.141.45.4 181 | 194.145.147.194 182 | 194.145.240.6 183 | 194.146.136.1 184 | 194.149.133.11 185 | 194.149.146.2 186 | 194.149.156.140 187 | 194.150.168.168 188 | 194.153.232.17 189 | 194.158.206.205 190 | 194.158.206.206 191 | 194.164.181.2 192 | 194.169.239.10 193 | 194.169.244.33 194 | 194.169.244.34 195 | 194.172.160.4 196 | 194.179.109.10 197 | 194.179.1.100 198 | 194.18.231.5 199 | 194.187.164.20 200 | 194.190.225.2 201 | 194.20.0.24 202 | 194.213.193.5 203 | 194.226.211.11 204 | 194.246.126.68 205 | 194.246.127.11 206 | 194.250.223.1 207 | 194.250.223.2 208 | 194.25.0.52 209 | 194.25.0.60 210 | 194.39.185.10 211 | 194.50.10.2 212 | 194.52.202.98 213 | 194.54.181.90 214 | 194.6.240.1 215 | 194.72.9.61 216 | 194.75.147.212 217 | 194.77.8.1 218 | 194.88.202.11 219 | 194.88.203.6 220 | 194.98.65.165 221 | 195.112.96.34 222 | 195.113.144.194 223 | 195.114.173.153 224 | 195.12.4.247 225 | 195.129.12.114 226 | 195.129.12.122 227 | 195.129.12.83 228 | 195.13.38.3 229 | 195.137.162.149 230 | 195.140.236.250 231 | 195.140.236.253 232 | 195.14.50.21 233 | 195.146.81.130 234 | 195.153.19.10 235 | 195.153.19.5 236 | 195.158.239.4 237 | 195.167.98.3 238 | 195.170.96.2 239 | 195.170.97.254 240 | 195.175.121.10 241 | 195.175.39.39 242 | 195.175.39.40 243 | 195.177.223.3 244 | 195.177.240.3 245 | 195.178.123.130 246 | 195.182.110.132 247 | 195.182.192.10 248 | 195.182.192.2 249 | 195.186.1.110 250 | 195.186.1.111 251 | 195.186.4.110 252 | 195.186.4.111 253 | 195.189.130.1 254 | 195.189.131.1 255 | 195.198.214.72 256 | 195.20.193.11 257 | 195.2.195.1 258 | 195.22.192.252 259 | 195.24.228.3 260 | 195.243.214.4 261 | 195.244.25.3 262 | 195.245.76.6 263 | 195.27.1.1 264 | 195.35.110.4 265 | 195.5.125.3 266 | 195.60.70.5 267 | 195.67.15.102 268 | 195.67.15.73 269 | 195.67.160.3 270 | 195.67.27.18 271 | 195.69.65.98 272 | 195.70.237.42 273 | 195.70.248.1 274 | 195.74.128.6 275 | 195.7.64.3 276 | 195.88.84.100 277 | 195.96.208.1 278 | 195.99.66.220 279 | 196.41.225.11 280 | 198.60.22.2 281 | 198.82.247.34 282 | 199.249.18.1 283 | 199.249.19.2 284 | 199.44.194.2 285 | 199.80.64.202 286 | 200.113.185.227 287 | 200.118.2.88 288 | 200.175.3.232 289 | 200.221.11.100 290 | 200.221.11.101 291 | 200.221.137.40 292 | 200.221.137.41 293 | 200.221.137.42 294 | 200.221.137.43 295 | 200.221.137.44 296 | 200.221.137.45 297 | 200.221.137.46 298 | 200.221.137.47 299 | 200.35.174.126 300 | 200.40.230.36 301 | 200.49.160.31 302 | 200.49.160.35 303 | 200.53.250.1 304 | 200.56.224.11 305 | 200.57.2.108 306 | 200.57.7.61 307 | 200.69.193.2 308 | 200.85.0.105 309 | 200.85.35.158 310 | 200.85.61.90 311 | 200.88.127.22 312 | 200.88.127.23 313 | 200.95.144.3 314 | 201.131.4.5 315 | 201.131.4.9 316 | 202.120.111.3 317 | 202.130.97.65 318 | 202.130.97.66 319 | 202.136.162.11 320 | 202.138.120.4 321 | 202.138.120.6 322 | 202.138.120.87 323 | 202.148.202.3 324 | 202.148.202.4 325 | 202.152.162.66 326 | 202.180.160.1 327 | 202.181.224.2 328 | 202.199.160.206 329 | 202.248.20.133 330 | 202.248.37.74 331 | 202.28.162.1 332 | 202.30.143.11 333 | 202.38.128.58 334 | 202.43.178.244 335 | 202.43.178.245 336 | 202.44.204.63 337 | 202.44.55.193 338 | 202.46.1.2 339 | 202.51.96.5 340 | 202.62.224.2 341 | 202.83.20.101 342 | 202.83.30.5 343 | 202.86.8.100 344 | 202.91.8.234 345 | 203.109.129.67 346 | 203.109.129.68 347 | 203.113.11.37 348 | 203.115.130.74 349 | 203.115.71.66 350 | 203.115.81.38 351 | 203.119.36.106 352 | 203.119.8.106 353 | 203.130.2.3 354 | 203.133.1.7 355 | 203.133.1.8 356 | 203.146.237.222 357 | 203.146.237.237 358 | 203.156.104.21 359 | 203.176.144.12 360 | 203.176.144.20 361 | 203.189.88.10 362 | 203.189.88.11 363 | 203.189.88.133 364 | 203.189.88.148 365 | 203.189.88.151 366 | 203.189.88.152 367 | 203.189.88.154 368 | 203.189.88.156 369 | 203.189.88.211 370 | 203.189.88.212 371 | 203.189.88.213 372 | 203.189.88.214 373 | 203.189.88.54 374 | 203.189.89.1 375 | 203.189.89.134 376 | 203.189.89.15 377 | 203.189.89.209 378 | 203.189.89.241 379 | 203.189.89.36 380 | 203.189.89.65 381 | 203.193.139.150 382 | 203.196.0.6 383 | 203.198.7.66 384 | 203.2.193.67 385 | 203.239.131.1 386 | 203.248.252.2 387 | 203.250.129.214 388 | 203.253.31.1 389 | 203.41.44.20 390 | 203.63.8.27 391 | 203.80.96.10 392 | 203.89.226.24 393 | 203.89.226.26 394 | 203.90.78.65 395 | 204.116.57.2 396 | 204.117.214.10 397 | 204.174.120.45 398 | 204.95.160.2 399 | 205.134.162.209 400 | 205.151.222.250 401 | 205.152.6.20 402 | 205.171.2.65 403 | 205.172.19.193 404 | 205.172.19.79 405 | 205.236.148.130 406 | 205.236.148.131 407 | 205.242.187.234 408 | 206.124.0.254 409 | 206.124.1.254 410 | 206.124.64.1 411 | 206.124.64.253 412 | 206.248.95.194 413 | 206.253.194.65 414 | 206.253.33.130 415 | 206.253.33.131 416 | 206.51.143.55 417 | 206.80.254.4 418 | 206.80.254.68 419 | 207.17.190.5 420 | 207.17.190.7 421 | 207.179.3.25 422 | 207.241.160.34 423 | 207.248.224.71 424 | 207.248.224.72 425 | 207.248.57.10 426 | 207.249.163.155 427 | 207.91.130.4 428 | 207.91.250.34 429 | 208.116.30.21 430 | 208.38.1.15 431 | 208.48.253.106 432 | 208.59.89.20 433 | 208.67.220.220 434 | 208.67.220.222 435 | 208.67.222.220 436 | 208.67.222.222 437 | 208.72.120.204 438 | 208.78.24.238 439 | 208.79.56.204 440 | 208.90.237.9 441 | 209.0.205.11 442 | 209.143.0.10 443 | 209.143.22.182 444 | 209.172.128.2 445 | 209.191.129.65 446 | 209.195.95.95 447 | 209.197.128.2 448 | 209.213.223.18 449 | 209.216.160.131 450 | 209.216.160.2 451 | 209.252.33.101 452 | 209.51.161.14 453 | 209.51.161.58 454 | 209.55.0.110 455 | 209.55.1.220 456 | 209.63.0.18 457 | 209.87.64.70 458 | 209.87.79.232 459 | 210.180.98.69 460 | 210.220.163.82 461 | 210.2.4.8 462 | 210.29.96.33 463 | 210.34.0.18 464 | 210.34.48.34 465 | 210.44.112.66 466 | 210.80.58.3 467 | 210.80.58.66 468 | 210.94.0.7 469 | 211.115.194.2 470 | 211.115.194.3 471 | 211.161.46.84 472 | 211.172.208.2 473 | 211.175.82.66 474 | 211.237.65.21 475 | 211.237.65.31 476 | 211.41.128.70 477 | 211.41.128.71 478 | 211.60.155.5 479 | 211.63.64.11 480 | 211.67.112.1 481 | 211.78.130.10 482 | 211.78.130.11 483 | 211.78.130.3 484 | 212.102.225.2 485 | 212.110.122.132 486 | 212.1.118.3 487 | 212.112.39.22 488 | 212.112.39.25 489 | 212.116.76.76 490 | 212.118.0.2 491 | 212.118.241.1 492 | 212.118.241.33 493 | 212.122.224.10 494 | 212.14.253.242 495 | 212.15.86.12 496 | 212.181.124.8 497 | 212.19.149.226 498 | 212.192.128.3 499 | 212.19.96.2 500 | 212.203.32.11 501 | 212.203.33.12 502 | 212.211.132.4 503 | 212.214.229.170 504 | 212.216.172.222 505 | 212.230.255.1 506 | 212.230.255.129 507 | 212.236.250.4 508 | 212.245.158.66 509 | 212.26.6.11 510 | 212.28.34.90 511 | 212.30.96.211 512 | 212.31.253.69 513 | 212.31.32.130 514 | 212.31.32.131 515 | 212.34.194.211 516 | 212.36.24.3 517 | 212.37.208.3 518 | 212.40.0.10 519 | 212.40.5.50 520 | 212.40.5.51 521 | 212.49.128.65 522 | 212.51.16.1 523 | 212.51.17.1 524 | 212.54.160.7 525 | 212.57.190.166 526 | 212.58.3.2 527 | 212.58.3.7 528 | 212.58.3.8 529 | 212.59.199.2 530 | 212.59.199.6 531 | 212.62.98.10 532 | 212.66.0.1 533 | 212.66.1.1 534 | 212.66.129.98 535 | 212.66.160.2 536 | 212.67.131.4 537 | 212.73.209.34 538 | 212.73.65.40 539 | 212.82.225.7 540 | 212.82.226.212 541 | 212.85.112.32 542 | 212.85.32.3 543 | 212.89.130.180 544 | 212.9.160.1 545 | 212.94.162.33 546 | 212.94.32.32 547 | 212.94.34.34 548 | 212.96.1.70 549 | 212.97.32.2 550 | 212.98.160.50 551 | 212.98.160.65 552 | 213.0.76.5 553 | 213.0.77.5 554 | 213.0.77.8 555 | 213.115.244.69 556 | 213.128.194.2 557 | 213.131.178.10 558 | 213.135.67.1 559 | 213.151.109.1 560 | 213.157.0.194 561 | 213.157.196.130 562 | 213.157.196.131 563 | 213.157.196.132 564 | 213.158.72.1 565 | 213.16.104.61 566 | 213.164.38.66 567 | 213.171.220.209 568 | 213.172.33.34 569 | 213.178.66.2 570 | 213.184.242.6 571 | 213.211.50.1 572 | 213.211.50.2 573 | 213.218.117.85 574 | 213.234.128.211 575 | 213.235.248.228 576 | 213.239.204.35 577 | 213.241.193.250 578 | 213.244.72.31 579 | 213.27.209.53 580 | 213.27.209.8 581 | 213.55.96.166 582 | 213.8.145.133 583 | 213.88.195.146 584 | 213.88.195.147 585 | 213.88.195.148 586 | 216.106.1.2 587 | 216.106.184.6 588 | 216.131.94.5 589 | 216.131.95.20 590 | 216.136.95.2 591 | 216.138.119.6 592 | 216.146.35.230 593 | 216.147.131.33 594 | 216.17.128.1 595 | 216.17.128.2 596 | 216.175.203.51 597 | 216.181.31.11 598 | 216.184.96.4 599 | 216.184.96.5 600 | 216.184.96.6 601 | 216.185.64.6 602 | 216.186.27.15 603 | 216.194.28.33 604 | 216.198.139.68 605 | 216.21.128.22 606 | 216.21.129.22 607 | 216.218.221.6 608 | 216.218.226.238 609 | 216.235.1.3 610 | 216.237.221.42 611 | 216.244.192.3 612 | 216.244.192.32 613 | 216.254.141.13 614 | 216.254.141.2 615 | 216.254.95.2 616 | 216.27.175.2 617 | 216.47.160.12 618 | 216.47.160.13 619 | 216.52.126.1 620 | 216.52.129.1 621 | 216.52.161.33 622 | 216.52.169.1 623 | 216.52.190.33 624 | 216.52.254.1 625 | 216.52.254.33 626 | 216.52.41.1 627 | 216.52.41.33 628 | 216.52.65.1 629 | 216.52.65.33 630 | 216.52.94.1 631 | 216.52.94.33 632 | 216.52.97.33 633 | 216.54.201.11 634 | 216.58.97.20 635 | 216.58.97.21 636 | 216.66.22.2 637 | 216.66.38.58 638 | 216.66.80.26 639 | 216.66.80.30 640 | 216.66.80.98 641 | 216.81.128.132 642 | 216.81.96.67 643 | 216.81.96.68 644 | 217.107.10.254 645 | 217.107.11.35 646 | 217.113.48.1 647 | 217.115.16.2 648 | 217.115.16.3 649 | 217.117.0.38 650 | 217.117.111.1 651 | 217.144.144.211 652 | 217.144.6.6 653 | 217.148.0.17 654 | 217.149.155.180 655 | 217.149.17.1 656 | 217.15.17.2 657 | 217.156.106.1 658 | 217.173.198.3 659 | 217.17.34.68 660 | 217.174.252.116 661 | 217.18.206.12 662 | 217.18.206.22 663 | 217.18.80.105 664 | 217.18.90.105 665 | 217.196.1.5 666 | 217.196.1.6 667 | 217.219.236.8 668 | 217.22.209.254 669 | 217.24.112.2 670 | 217.27.240.20 671 | 217.28.113.13 672 | 217.28.98.62 673 | 217.31.204.130 674 | 217.32.105.66 675 | 217.64.163.1 676 | 217.64.167.1 677 | 217.65.192.1 678 | 217.66.226.8 679 | 217.69.160.18 680 | 217.69.169.25 681 | 217.72.1.2 682 | 217.72.168.34 683 | 217.73.17.110 684 | 217.76.240.2 685 | 217.78.80.70 686 | 217.78.80.74 687 | 217.79.225.8 688 | 217.8.180.98 689 | 218.102.23.228 690 | 218.192.240.2 691 | 218.223.32.1 692 | 218.232.110.36 693 | 218.232.110.37 694 | 219.250.36.130 695 | 219.252.2.100 696 | 220.128.173.228 697 | 220.227.60.12 698 | 220.233.0.1 699 | 221.139.13.130 700 | 24.154.1.4 701 | 24.154.1.5 702 | 35.8.2.41 703 | 35.8.2.42 704 | 35.8.2.45 705 | 35.8.98.43 706 | 37.19.5.135 707 | 37.235.1.174 708 | 37.235.1.177 709 | 42.62.176.30 710 | 4.79.132.219 711 | 50.21.174.18 712 | 58.68.121.230 713 | 58.96.3.34 714 | 61.19.252.238 715 | 61.208.115.242 716 | 61.56.211.185 717 | 61.63.0.66 718 | 61.70.87.96 719 | 62.105.17.252 720 | 62.108.161.161 721 | 62.109.182.2 722 | 62.116.30.200 723 | 62.128.1.42 724 | 62.128.1.53 725 | 62.129.252.215 726 | 62.129.252.252 727 | 62.134.11.4 728 | 62.140.239.1 729 | 62.141.38.230 730 | 62.149.128.2 731 | 62.165.32.250 732 | 62.165.33.250 733 | 62.168.59.67 734 | 62.177.42.174 735 | 62.196.2.70 736 | 62.20.15.234 737 | 62.20.57.226 738 | 62.231.76.49 739 | 62.233.128.17 740 | 62.24.228.202 741 | 62.33.203.33 742 | 62.3.32.16 743 | 62.3.32.17 744 | 62.36.225.150 745 | 62.37.225.56 746 | 62.37.225.57 747 | 62.37.228.20 748 | 62.40.32.34 749 | 62.76.76.62 750 | 62.77.85.100 751 | 62.77.85.98 752 | 62.77.94.72 753 | 62.8.96.38 754 | 62.94.0.41 755 | 62.94.0.42 756 | 62.95.15.107 757 | 62.97.84.4 758 | 63.105.204.164 759 | 63.171.232.38 760 | 63.171.232.39 761 | 63.218.44.186 762 | 63.251.129.33 763 | 63.251.161.1 764 | 63.251.161.33 765 | 63.251.62.1 766 | 63.251.62.33 767 | 64.105.163.106 768 | 64.105.172.26 769 | 64.105.179.138 770 | 64.105.189.26 771 | 64.105.199.74 772 | 64.105.199.76 773 | 64.105.202.138 774 | 64.105.97.90 775 | 64.119.60.5 776 | 64.119.60.9 777 | 64.13.115.12 778 | 64.132.61.131 779 | 64.132.94.250 780 | 64.13.48.12 781 | 64.135.1.20 782 | 64.135.1.22 783 | 64.254.99.13 784 | 64.56.129.2 785 | 64.61.99.2 786 | 64.79.224.3 787 | 64.81.127.2 788 | 64.81.159.2 789 | 64.94.1.1 790 | 64.94.1.33 791 | 64.94.33.33 792 | 65.163.107.11 793 | 65.203.109.2 794 | 65.39.139.53 795 | 65.74.130.5 796 | 65.74.130.6 797 | 66.118.80.4 798 | 66.119.93.10 799 | 66.119.93.4 800 | 66.163.0.161 801 | 66.163.0.173 802 | 66.165.177.69 803 | 66.165.183.87 804 | 66.182.208.5 805 | 66.203.72.10 806 | 66.207.160.111 807 | 66.216.18.222 808 | 66.218.245.13 809 | 66.218.44.5 810 | 66.232.139.10 811 | 66.252.170.3 812 | 66.28.0.45 813 | 66.28.0.61 814 | 66.51.206.100 815 | 66.80.130.18 816 | 66.81.0.252 817 | 66.92.159.2 818 | 66.92.224.2 819 | 66.92.64.2 820 | 66.93.87.2 821 | 67.100.88.27 822 | 67.214.64.6 823 | 68.179.203.94 824 | 69.146.17.3 825 | 69.16.169.11 826 | 69.16.170.11 827 | 69.24.112.11 828 | 69.25.1.1 829 | 69.25.1.33 830 | 69.26.129.2 831 | 69.28.104.5 832 | 69.28.136.102 833 | 69.28.148.102 834 | 69.28.97.4 835 | 69.54.70.15 836 | 69.67.97.18 837 | 69.7.192.1 838 | 69.7.192.2 839 | 70.36.0.5 840 | 70.36.0.6 841 | 72.11.150.10 842 | 72.11.150.74 843 | 72.52.104.74 844 | 74.222.30.2 845 | 74.82.46.6 846 | 75.94.255.12 847 | 76.73.18.50 848 | 77.240.144.164 849 | 77.241.112.23 850 | 77.247.176.114 851 | 77.41.229.2 852 | 77.72.192.3 853 | 77.73.104.3 854 | 77.87.152.9 855 | 77.88.8.1 856 | 77.88.8.2 857 | 77.88.8.8 858 | 77.88.8.88 859 | 78.159.224.224 860 | 78.159.232.232 861 | 78.31.96.2 862 | 79.132.192.2 863 | 79.141.81.250 864 | 79.141.82.250 865 | 79.141.83.250 866 | 80.149.86.20 867 | 80.254.79.157 868 | 80.67.169.12 869 | 80.72.146.2 870 | 80.73.1.1 871 | 80.74.160.11 872 | 80.79.179.2 873 | 80.84.72.20 874 | 80.88.171.16 875 | 80.92.178.98 876 | 80.94.48.254 877 | 81.17.66.14 878 | 81.17.72.70 879 | 81.180.201.98 880 | 81.18.242.100 881 | 81.189.212.129 882 | 81.18.97.50 883 | 81.200.80.11 884 | 81.222.80.2 885 | 81.23.144.250 886 | 81.24.128.146 887 | 81.25.152.2 888 | 81.27.133.50 889 | 81.27.135.50 890 | 81.28.128.34 891 | 8.15.12.5 892 | 81.7.200.80 893 | 81.92.96.22 894 | 81.92.97.12 895 | 81.95.128.218 896 | 82.115.163.2 897 | 82.141.136.2 898 | 82.144.181.1 899 | 82.145.160.140 900 | 82.145.163.1 901 | 82.151.90.1 902 | 82.198.129.138 903 | 82.199.32.36 904 | 82.212.67.100 905 | 82.212.67.101 906 | 82.96.65.2 907 | 82.96.81.10 908 | 82.96.86.20 909 | 82.99.211.195 910 | 83.137.41.8 911 | 83.137.41.9 912 | 83.142.192.2 913 | 83.142.9.30 914 | 83.143.12.246 915 | 83.143.8.220 916 | 83.149.244.194 917 | 83.151.112.193 918 | 83.166.8.18 919 | 83.240.154.200 920 | 83.242.140.10 921 | 83.97.97.3 922 | 84.200.69.80 923 | 84.200.70.40 924 | 84.8.2.11 925 | 85.114.105.3 926 | 85.115.224.18 927 | 85.119.136.158 928 | 85.119.72.2 929 | 85.124.252.33 930 | 85.132.32.41 931 | 85.132.32.42 932 | 85.158.50.50 933 | 85.174.190.2 934 | 8.5.244.5 935 | 85.88.19.10 936 | 85.88.19.11 937 | 87.103.133.167 938 | 87.104.254.135 939 | 87.104.254.39 940 | 87.197.40.58 941 | 87.204.12.130 942 | 87.204.28.12 943 | 87.229.99.1 944 | 88.147.158.1 945 | 88.255.242.6 946 | 88.255.96.196 947 | 8.8.4.4 948 | 88.82.84.129 949 | 8.8.8.8 950 | 89.107.129.15 951 | 89.107.16.2 952 | 89.185.75.244 953 | 89.186.66.6 954 | 89.186.66.7 955 | 89.233.250.137 956 | 89.249.224.1 957 | 90.189.109.2 958 | 91.143.20.6 959 | 91.144.248.227 960 | 91.185.2.10 961 | 91.185.6.10 962 | 91.188.0.35 963 | 91.188.0.5 964 | 91.194.112.10 965 | 91.197.164.11 966 | 91.198.154.133 967 | 91.199.139.1 968 | 91.203.177.4 969 | 91.203.188.1 970 | 91.207.40.2 971 | 91.210.24.22 972 | 91.211.16.6 973 | 91.212.56.5 974 | 91.214.72.33 975 | 91.214.72.34 976 | 91.98.128.112 977 | 92.43.224.1 978 | 93.157.14.65 979 | 93.157.233.3 980 | 93.188.152.3 981 | 94.247.200.2 982 | 94.247.200.3 983 | 95.158.128.2 984 | 95.158.129.2 985 | 95.173.193.3 986 | 95.85.9.86 987 | -------------------------------------------------------------------------------- /Sublist3r/subbrute/subbrute.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | #SubBrute v1.2 4 | #A (very) fast subdomain enumeration tool. 5 | # 6 | #Maintained by rook 7 | #Contributors: 8 | #JordanMilne, KxCode, rc0r, memoryprint, ppaulojr 9 | # 10 | import re 11 | import optparse 12 | import os 13 | import signal 14 | import sys 15 | import uuid 16 | import random 17 | import ctypes 18 | import dns.resolver 19 | import dns.rdatatype 20 | import json 21 | 22 | #Python 2.x and 3.x compatiablity 23 | #We need the Queue library for exception handling 24 | try: 25 | import queue as Queue 26 | except: 27 | import Queue 28 | 29 | #The 'multiprocessing' library does not rely upon a Global Interpreter Lock (GIL) 30 | import multiprocessing 31 | 32 | #Microsoft compatiablity 33 | if sys.platform.startswith('win'): 34 | #Drop-in replacement, subbrute + multiprocessing throws exceptions on windows. 35 | import threading 36 | multiprocessing.Process = threading.Thread 37 | 38 | class verify_nameservers(multiprocessing.Process): 39 | 40 | def __init__(self, target, record_type, resolver_q, resolver_list, wildcards): 41 | multiprocessing.Process.__init__(self, target = self.run) 42 | self.daemon = True 43 | signal_init() 44 | 45 | self.time_to_die = False 46 | self.resolver_q = resolver_q 47 | self.wildcards = wildcards 48 | #Do we need wildcards for other types of records? 49 | #This needs testing! 50 | self.record_type = "A" 51 | if record_type == "AAAA": 52 | self.record_type = record_type 53 | self.resolver_list = resolver_list 54 | resolver = dns.resolver.Resolver() 55 | #The domain provided by the user. 56 | self.target = target 57 | #1 website in the world, modify the following line when this status changes. 58 | #www.google.cn, I'm looking at you ;) 59 | self.most_popular_website = "www.google.com" 60 | #We shouldn't need the backup_resolver, but we we can use them if need be. 61 | #We must have a resolver, and localhost can work in some environments. 62 | self.backup_resolver = resolver.nameservers + ['127.0.0.1', '8.8.8.8', '8.8.4.4'] 63 | #Ideally a nameserver should respond in less than 1 sec. 64 | resolver.timeout = 1 65 | resolver.lifetime = 1 66 | try: 67 | #Lets test the letancy of our connection. 68 | #Google's DNS server should be an ideal time test. 69 | resolver.nameservers = ['8.8.8.8'] 70 | resolver.query(self.most_popular_website, self.record_type) 71 | except: 72 | #Our connection is slower than a junebug in molasses 73 | resolver = dns.resolver.Resolver() 74 | self.resolver = resolver 75 | 76 | def end(self): 77 | self.time_to_die = True 78 | 79 | #This process cannot block forever, it needs to check if its time to die. 80 | def add_nameserver(self, nameserver): 81 | keep_trying = True 82 | while not self.time_to_die and keep_trying: 83 | try: 84 | self.resolver_q.put(nameserver, timeout = 1) 85 | trace("Added nameserver:", nameserver) 86 | keep_trying = False 87 | except Exception as e: 88 | if type(e) == Queue.Full or str(type(e)) == "": 89 | keep_trying = True 90 | 91 | def verify(self, nameserver_list): 92 | added_resolver = False 93 | for server in nameserver_list: 94 | if self.time_to_die: 95 | #We are done here. 96 | break 97 | server = server.strip() 98 | if server: 99 | self.resolver.nameservers = [server] 100 | try: 101 | #test_result = self.resolver.query(self.most_popular_website, "A") 102 | #should throw an exception before this line. 103 | if True:#test_result: 104 | #Only add the nameserver to the queue if we can detect wildcards. 105 | if(self.find_wildcards(self.target)):# and self.find_wildcards(".com") 106 | #wildcards have been added to the set, it is now safe to be added to the queue. 107 | #blocking queue, this process will halt on put() when the queue is full: 108 | self.add_nameserver(server) 109 | added_resolver = True 110 | else: 111 | trace("Rejected nameserver - wildcard:", server) 112 | except Exception as e: 113 | #Rejected server :( 114 | trace("Rejected nameserver - unreliable:", server, type(e)) 115 | return added_resolver 116 | 117 | def run(self): 118 | #Every user will get a different set of resovlers, this helps redistribute traffic. 119 | random.shuffle(self.resolver_list) 120 | if not self.verify(self.resolver_list): 121 | #This should never happen, inform the user. 122 | sys.stderr.write('Warning: No nameservers found, trying fallback list.\n') 123 | #Try and fix it for the user: 124 | self.verify(self.backup_resolver) 125 | #End of the resolvers list. 126 | try: 127 | self.resolver_q.put(False, timeout = 1) 128 | except: 129 | pass 130 | 131 | #Only add the nameserver to the queue if we can detect wildcards. 132 | #Returns False on error. 133 | def find_wildcards(self, host): 134 | #We want sovle the following three problems: 135 | #1)The target might have a wildcard DNS record. 136 | #2)The target maybe using geolocaiton-aware DNS. 137 | #3)The DNS server we are testing may respond to non-exsistant 'A' records with advertizements. 138 | #I have seen a CloudFlare Enterprise customer with the first two conditions. 139 | try: 140 | #This is case #3, these spam nameservers seem to be more trouble then they are worth. 141 | wildtest = self.resolver.query(uuid.uuid4().hex + ".com", "A") 142 | if len(wildtest): 143 | trace("Spam DNS detected:", host) 144 | return False 145 | except: 146 | pass 147 | test_counter = 8 148 | looking_for_wildcards = True 149 | while looking_for_wildcards and test_counter >= 0 : 150 | looking_for_wildcards = False 151 | #Don't get lost, this nameserver could be playing tricks. 152 | test_counter -= 1 153 | try: 154 | testdomain = "%s.%s" % (uuid.uuid4().hex, host) 155 | wildtest = self.resolver.query(testdomain, self.record_type) 156 | #This 'A' record may contain a list of wildcards. 157 | if wildtest: 158 | for w in wildtest: 159 | w = str(w) 160 | if w not in self.wildcards: 161 | #wildcards were detected. 162 | self.wildcards[w] = None 163 | #We found atleast one wildcard, look for more. 164 | looking_for_wildcards = True 165 | except Exception as e: 166 | if type(e) == dns.resolver.NXDOMAIN or type(e) == dns.name.EmptyLabel: 167 | #not found 168 | return True 169 | else: 170 | #This resolver maybe flakey, we don't want it for our tests. 171 | trace("wildcard exception:", self.resolver.nameservers, type(e)) 172 | return False 173 | #If we hit the end of our depth counter and, 174 | #there are still wildcards, then reject this nameserver because it smells bad. 175 | return (test_counter >= 0) 176 | 177 | class lookup(multiprocessing.Process): 178 | 179 | def __init__(self, in_q, out_q, resolver_q, domain, wildcards, spider_blacklist): 180 | multiprocessing.Process.__init__(self, target = self.run) 181 | signal_init() 182 | self.required_nameservers = 16 183 | self.in_q = in_q 184 | self.out_q = out_q 185 | self.resolver_q = resolver_q 186 | self.domain = domain 187 | self.wildcards = wildcards 188 | self.spider_blacklist = spider_blacklist 189 | self.resolver = dns.resolver.Resolver() 190 | #Force pydns to use our nameservers 191 | self.resolver.nameservers = [] 192 | 193 | def get_ns(self): 194 | ret = [] 195 | try: 196 | ret = [self.resolver_q.get_nowait()] 197 | if ret == False: 198 | #Queue is empty, inform the rest. 199 | self.resolver_q.put(False) 200 | ret = [] 201 | except: 202 | pass 203 | return ret 204 | 205 | def get_ns_blocking(self): 206 | ret = [] 207 | ret = [self.resolver_q.get()] 208 | if ret == False: 209 | trace("get_ns_blocking - Resolver list is empty.") 210 | #Queue is empty, inform the rest. 211 | self.resolver_q.put(False) 212 | ret = [] 213 | return ret 214 | 215 | def check(self, host, record_type = "A", retries = 0): 216 | trace("Checking:", host) 217 | cname_record = [] 218 | retries = 0 219 | if len(self.resolver.nameservers) <= self.required_nameservers: 220 | #This process needs more nameservers, lets see if we have one avaible 221 | self.resolver.nameservers += self.get_ns() 222 | #Ok we should be good to go. 223 | while True: 224 | try: 225 | #Query the nameserver, this is not simple... 226 | if not record_type or record_type == "A": 227 | resp = self.resolver.query(host) 228 | #Crawl the response 229 | hosts = extract_hosts(str(resp.response), self.domain) 230 | for h in hosts: 231 | if h not in self.spider_blacklist: 232 | self.spider_blacklist[h]=None 233 | trace("Found host with spider:", h) 234 | self.in_q.put((h, record_type, 0)) 235 | return resp 236 | if record_type == "CNAME": 237 | #A max 20 lookups 238 | for x in range(20): 239 | try: 240 | resp = self.resolver.query(host, record_type) 241 | except dns.resolver.NoAnswer: 242 | resp = False 243 | pass 244 | if resp and resp[0]: 245 | host = str(resp[0]).rstrip(".") 246 | cname_record.append(host) 247 | else: 248 | return cname_record 249 | else: 250 | #All other records: 251 | return self.resolver.query(host, record_type) 252 | 253 | except Exception as e: 254 | if type(e) == dns.resolver.NoNameservers: 255 | #We should never be here. 256 | #We must block, another process should try this host. 257 | #do we need a limit? 258 | self.in_q.put((host, record_type, 0)) 259 | self.resolver.nameservers += self.get_ns_blocking() 260 | return False 261 | elif type(e) == dns.resolver.NXDOMAIN: 262 | #"Non-existent domain name." 263 | return False 264 | elif type(e) == dns.resolver.NoAnswer: 265 | #"The response did not contain an answer." 266 | if retries >= 1: 267 | trace("NoAnswer retry") 268 | return False 269 | retries += 1 270 | elif type(e) == dns.resolver.Timeout: 271 | trace("lookup failure:", host, retries) 272 | #Check if it is time to give up. 273 | if retries >= 3: 274 | if retries > 3: 275 | #Sometimes 'internal use' subdomains will timeout for every request. 276 | #As far as I'm concerned, the authorative name server has told us this domain exists, 277 | #we just can't know the address value using this method. 278 | return ['Mutiple Query Timeout - External address resolution was restricted'] 279 | else: 280 | #Maybe another process can take a crack at it. 281 | self.in_q.put((host, record_type, retries + 1)) 282 | return False 283 | retries += 1 284 | #retry... 285 | elif type(e) == IndexError: 286 | #Some old versions of dnspython throw this error, 287 | #doesn't seem to affect the results, and it was fixed in later versions. 288 | pass 289 | elif type(e) == TypeError: 290 | # We'll get here if the number procs > number of resolvers. 291 | # This is an internal error do we need a limit? 292 | self.in_q.put((host, record_type, 0)) 293 | return False 294 | elif type(e) == dns.rdatatype.UnknownRdatatype: 295 | error("DNS record type not supported:", record_type) 296 | else: 297 | trace("Problem processing host:", host) 298 | #dnspython threw some strange exception... 299 | raise e 300 | 301 | def run(self): 302 | #This process needs one resolver before it can start looking. 303 | self.resolver.nameservers += self.get_ns_blocking() 304 | while True: 305 | found_addresses = [] 306 | work = self.in_q.get() 307 | #Check if we have hit the end marker 308 | while not work: 309 | #Look for a re-queued lookup 310 | try: 311 | work = self.in_q.get(blocking = False) 312 | #if we took the end marker of the queue we need to put it back 313 | if work: 314 | self.in_q.put(False) 315 | except:#Queue.Empty 316 | trace('End of work queue') 317 | #There isn't an item behind the end marker 318 | work = False 319 | break 320 | #Is this the end all work that needs to be done? 321 | if not work: 322 | #Perpetuate the end marker for all threads to see 323 | self.in_q.put(False) 324 | #Notify the parent that we have died of natural causes 325 | self.out_q.put(False) 326 | break 327 | else: 328 | if len(work) == 3: 329 | #keep track of how many times this lookup has timedout. 330 | (hostname, record_type, timeout_retries) = work 331 | response = self.check(hostname, record_type, timeout_retries) 332 | else: 333 | (hostname, record_type) = work 334 | response = self.check(hostname, record_type) 335 | sys.stdout.flush() 336 | trace(response) 337 | #self.wildcards is populated by the verify_nameservers() thread. 338 | #This variable doesn't need a muetex, because it has a queue. 339 | #A queue ensure nameserver cannot be used before it's wildcard entries are found. 340 | reject = False 341 | if response: 342 | for a in response: 343 | a = str(a) 344 | if a in self.wildcards: 345 | trace("resovled wildcard:", hostname) 346 | reject= True 347 | #reject this domain. 348 | break; 349 | else: 350 | found_addresses.append(a) 351 | if not reject: 352 | #This request is filled, send the results back 353 | result = (hostname, record_type, found_addresses) 354 | self.out_q.put(result) 355 | 356 | #Extract relevant hosts 357 | #The dot at the end of a domain signifies the root, 358 | #and all TLDs are subs of the root. 359 | host_match = re.compile(r"((?<=[\s])[a-zA-Z0-9_-]+\.(?:[a-zA-Z0-9_-]+\.?)+(?=[\s]))") 360 | def extract_hosts(data, hostname): 361 | #made a global to avoid re-compilation 362 | global host_match 363 | ret = [] 364 | hosts = re.findall(host_match, data) 365 | for fh in hosts: 366 | host = fh.rstrip(".") 367 | #Is this host in scope? 368 | if host.endswith(hostname): 369 | ret.append(host) 370 | return ret 371 | 372 | #Return a list of unique sub domains, sorted by frequency. 373 | #Only match domains that have 3 or more sections subdomain.domain.tld 374 | domain_match = re.compile("([a-zA-Z0-9_-]*\.[a-zA-Z0-9_-]*\.[a-zA-Z0-9_-]*)+") 375 | def extract_subdomains(file_name): 376 | #Avoid re-compilation 377 | global domain_match 378 | subs = {} 379 | sub_file = open(file_name).read() 380 | f_all = re.findall(domain_match, sub_file) 381 | del sub_file 382 | for i in f_all: 383 | if i.find(".") >= 0: 384 | p = i.split(".")[0:-1] 385 | #gobble everything that might be a TLD 386 | while p and len(p[-1]) <= 3: 387 | p = p[0:-1] 388 | #remove the domain name 389 | p = p[0:-1] 390 | #do we have a subdomain.domain left? 391 | if len(p) >= 1: 392 | trace(str(p), " : ", i) 393 | for q in p: 394 | if q : 395 | #domain names can only be lower case. 396 | q = q.lower() 397 | if q in subs: 398 | subs[q] += 1 399 | else: 400 | subs[q] = 1 401 | #Free some memory before the sort... 402 | del f_all 403 | #Sort by freq in desc order 404 | subs_sorted = sorted(subs.keys(), key = lambda x: subs[x], reverse = True) 405 | return subs_sorted 406 | 407 | def print_target(target, record_type = None, subdomains = "names.txt", resolve_list = "resolvers.txt", process_count = 16, output = False, json_output = False, found_subdomains=[],verbose=False): 408 | subdomains_list = [] 409 | results_temp = [] 410 | run(target, record_type, subdomains, resolve_list, process_count) 411 | for result in run(target, record_type, subdomains, resolve_list, process_count): 412 | (hostname, record_type, response) = result 413 | if not record_type: 414 | result = hostname 415 | else: 416 | result = "%s,%s" % (hostname, ",".join(response).strip(",")) 417 | if result not in found_subdomains: 418 | if verbose: 419 | print(result) 420 | subdomains_list.append(result) 421 | 422 | return set(subdomains_list) 423 | 424 | def run(target, record_type = None, subdomains = "names.txt", resolve_list = "resolvers.txt", process_count = 16): 425 | subdomains = check_open(subdomains) 426 | resolve_list = check_open(resolve_list) 427 | if (len(resolve_list) / 16) < process_count: 428 | sys.stderr.write('Warning: Fewer than 16 resovlers per thread, consider adding more nameservers to resolvers.txt.\n') 429 | if os.name == 'nt': 430 | wildcards = {} 431 | spider_blacklist = {} 432 | else: 433 | wildcards = multiprocessing.Manager().dict() 434 | spider_blacklist = multiprocessing.Manager().dict() 435 | in_q = multiprocessing.Queue() 436 | out_q = multiprocessing.Queue() 437 | #have a buffer of at most two new nameservers that lookup processes can draw from. 438 | resolve_q = multiprocessing.Queue(maxsize = 2) 439 | 440 | #Make a source of fast nameservers avaiable for other processes. 441 | verify_nameservers_proc = verify_nameservers(target, record_type, resolve_q, resolve_list, wildcards) 442 | verify_nameservers_proc.start() 443 | #The empty string 444 | in_q.put((target, record_type)) 445 | spider_blacklist[target]=None 446 | #A list of subdomains is the input 447 | for s in subdomains: 448 | s = str(s).strip() 449 | if s: 450 | if s.find(","): 451 | #SubBrute should be forgiving, a comma will never be in a url 452 | #but the user might try an use a CSV file as input. 453 | s=s.split(",")[0] 454 | if not s.endswith(target): 455 | hostname = "%s.%s" % (s, target) 456 | else: 457 | #A user might feed an output list as a subdomain list. 458 | hostname = s 459 | if hostname not in spider_blacklist: 460 | spider_blacklist[hostname]=None 461 | work = (hostname, record_type) 462 | in_q.put(work) 463 | #Terminate the queue 464 | in_q.put(False) 465 | for i in range(process_count): 466 | worker = lookup(in_q, out_q, resolve_q, target, wildcards, spider_blacklist) 467 | worker.start() 468 | threads_remaining = process_count 469 | while True: 470 | try: 471 | #The output is valid hostnames 472 | result = out_q.get(True, 10) 473 | #we will get an empty exception before this runs. 474 | if not result: 475 | threads_remaining -= 1 476 | else: 477 | #run() is a generator, and yields results from the work queue 478 | yield result 479 | except Exception as e: 480 | #The cx_freeze version uses queue.Empty instead of Queue.Empty :( 481 | if type(e) == Queue.Empty or str(type(e)) == "": 482 | pass 483 | else: 484 | raise(e) 485 | #make sure everyone is complete 486 | if threads_remaining <= 0: 487 | break 488 | trace("killing nameserver process") 489 | #We no longer require name servers. 490 | try: 491 | killproc(pid = verify_nameservers_proc.pid) 492 | except: 493 | #Windows threading.tread 494 | verify_nameservers_proc.end() 495 | trace("End") 496 | 497 | #exit handler for signals. So ctrl+c will work. 498 | #The 'multiprocessing' library each process is it's own process which side-steps the GIL 499 | #If the user wants to exit prematurely, each process must be killed. 500 | def killproc(signum = 0, frame = 0, pid = False): 501 | if not pid: 502 | pid = os.getpid() 503 | if sys.platform.startswith('win'): 504 | try: 505 | kernel32 = ctypes.windll.kernel32 506 | handle = kernel32.OpenProcess(1, 0, pid) 507 | kernel32.TerminateProcess(handle, 0) 508 | except: 509 | #Oah windows. 510 | pass 511 | else: 512 | os.kill(pid, 9) 513 | 514 | #Toggle debug output 515 | verbose = False 516 | def trace(*args, **kwargs): 517 | if verbose: 518 | for a in args: 519 | sys.stderr.write(str(a)) 520 | sys.stderr.write(" ") 521 | sys.stderr.write("\n") 522 | 523 | def error(*args, **kwargs): 524 | for a in args: 525 | sys.stderr.write(str(a)) 526 | sys.stderr.write(" ") 527 | sys.stderr.write("\n") 528 | sys.exit(1) 529 | 530 | def check_open(input_file): 531 | ret = [] 532 | #If we can't find a resolver from an input file, then we need to improvise. 533 | try: 534 | ret = open(input_file).readlines() 535 | except: 536 | error("File not found:", input_file) 537 | if not len(ret): 538 | error("File is empty:", input_file) 539 | return ret 540 | 541 | #Every 'multiprocessing' process needs a signal handler. 542 | #All processes need to die, we don't want to leave zombies. 543 | def signal_init(): 544 | #Escliate signal to prevent zombies. 545 | signal.signal(signal.SIGINT, killproc) 546 | try: 547 | signal.signal(signal.SIGTSTP, killproc) 548 | signal.signal(signal.SIGQUIT, killproc) 549 | except: 550 | #Windows 551 | pass 552 | 553 | if __name__ == "__main__": 554 | if getattr(sys, 'frozen', False): 555 | # cx_freeze windows: 556 | base_path = os.path.dirname(sys.executable) 557 | multiprocessing.freeze_support() 558 | else: 559 | #everything else: 560 | base_path = os.path.dirname(os.path.realpath(__file__)) 561 | parser = optparse.OptionParser("usage: %prog [options] target") 562 | parser.add_option("-s", "--subs", dest = "subs", default = os.path.join(base_path, "names.txt"), 563 | type = "string", help = "(optional) list of subdomains, default = 'names.txt'") 564 | parser.add_option("-r", "--resolvers", dest = "resolvers", default = os.path.join(base_path, "resolvers.txt"), 565 | type = "string", help = "(optional) A list of DNS resolvers, if this list is empty it will OS's internal resolver default = 'resolvers.txt'") 566 | parser.add_option("-t", "--targets_file", dest = "targets", default = "", 567 | type = "string", help = "(optional) A file containing a newline delimited list of domains to brute force.") 568 | parser.add_option("-o", "--output", dest = "output", default = False, help = "(optional) Output to file (Greppable Format)") 569 | parser.add_option("-j", "--json", dest="json", default = False, help="(optional) Output to file (JSON Format)") 570 | parser.add_option("-a", "-A", action = 'store_true', dest = "ipv4", default = False, 571 | help = "(optional) Print all IPv4 addresses for sub domains (default = off).") 572 | parser.add_option("--type", dest = "type", default = False, 573 | type = "string", help = "(optional) Print all reponses for an arbitrary DNS record type (CNAME, AAAA, TXT, SOA, MX...)") 574 | parser.add_option("-c", "--process_count", dest = "process_count", 575 | default = 16, type = "int", 576 | help = "(optional) Number of lookup theads to run. default = 16") 577 | parser.add_option("-f", "--filter_subs", dest = "filter", default = "", 578 | type = "string", help = "(optional) A file containing unorganized domain names which will be filtered into a list of subdomains sorted by frequency. This was used to build names.txt.") 579 | parser.add_option("-v", "--verbose", action = 'store_true', dest = "verbose", default = False, 580 | help = "(optional) Print debug information.") 581 | (options, args) = parser.parse_args() 582 | 583 | 584 | verbose = options.verbose 585 | 586 | if len(args) < 1 and options.filter == "" and options.targets == "": 587 | parser.error("You must provie a target. Use -h for help.") 588 | 589 | if options.filter != "": 590 | #cleanup this file and print it out 591 | for d in extract_subdomains(options.filter): 592 | print(d) 593 | sys.exit() 594 | 595 | if options.targets != "": 596 | targets = check_open(options.targets) #the domains 597 | else: 598 | targets = args #multiple arguments on the cli: ./subbrute.py google.com gmail.com yahoo.com if (len(resolver_list) / 16) < options.process_count: 599 | 600 | output = False 601 | if options.output: 602 | try: 603 | output = open(options.output, "w") 604 | except: 605 | error("Failed writing to file:", options.output) 606 | 607 | json_output = False 608 | if options.json: 609 | try: 610 | json_output = open(options.json, "w") 611 | except: 612 | error("Failed writing to file:", options.json) 613 | 614 | record_type = False 615 | if options.ipv4: 616 | record_type="A" 617 | if options.type: 618 | record_type = str(options.type).upper() 619 | 620 | threads = [] 621 | for target in targets: 622 | target = target.strip() 623 | if target: 624 | 625 | #target => domain 626 | #record_type => 627 | #options.subs => file the contain the subdomains list 628 | #options.process_count => process count default = 16 629 | #options.resolvers => the resolvers file 630 | #options.output 631 | #options.json 632 | print(target, record_type, options.subs, options.resolvers, options.process_count, output, json_output) 633 | print_target(target, record_type, options.subs, options.resolvers, options.process_count, output, json_output) 634 | 635 | 636 | -------------------------------------------------------------------------------- /Sublist3r/sublist3r.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # coding: utf-8 3 | # Sublist3r v1.0 4 | # By Ahmed Aboul-Ela - twitter.com/aboul3la 5 | 6 | # modules in standard library 7 | import re 8 | import sys 9 | import os 10 | import argparse 11 | import time 12 | import hashlib 13 | import random 14 | import multiprocessing 15 | import threading 16 | import socket 17 | import json 18 | from collections import Counter 19 | 20 | # external modules 21 | from Sublist3r.subbrute import subbrute 22 | import dns.resolver 23 | import requests 24 | 25 | # Python 2.x and 3.x compatiablity 26 | if sys.version > '3': 27 | import urllib.parse as urlparse 28 | import urllib.parse as urllib 29 | else: 30 | import urlparse 31 | import urllib 32 | 33 | # In case you cannot install some of the required development packages 34 | # there's also an option to disable the SSL warning: 35 | try: 36 | import requests.packages.urllib3 37 | requests.packages.urllib3.disable_warnings() 38 | except: 39 | pass 40 | 41 | # Check if we are running this on windows platform 42 | is_windows = sys.platform.startswith('win') 43 | 44 | # Console Colors 45 | if is_windows: 46 | # Windows deserves coloring too :D 47 | G = '\033[92m' # green 48 | Y = '\033[93m' # yellow 49 | B = '\033[94m' # blue 50 | R = '\033[91m' # red 51 | W = '\033[0m' # white 52 | try: 53 | import win_unicode_console , colorama 54 | win_unicode_console.enable() 55 | colorama.init() 56 | #Now the unicode will work ^_^ 57 | except: 58 | # print("[!] Error: Coloring libraries not installed, no coloring will be used [Check the readme]") 59 | G = Y = B = R = W = G = Y = B = R = W = '' 60 | pass 61 | 62 | 63 | else: 64 | G = '\033[92m' # green 65 | Y = '\033[93m' # yellow 66 | B = '\033[94m' # blue 67 | R = '\033[91m' # red 68 | W = '\033[0m' # white 69 | 70 | def no_color(): 71 | global G, Y, B, R, W 72 | G = Y = B = R = W = '' 73 | 74 | 75 | def banner(): 76 | print("""%s 77 | ____ _ _ _ _ _____ 78 | / ___| _ _| |__ | (_)___| |_|___ / _ __ 79 | \___ \| | | | '_ \| | / __| __| |_ \| '__| 80 | ___) | |_| | |_) | | \__ \ |_ ___) | | 81 | |____/ \__,_|_.__/|_|_|___/\__|____/|_|%s%s 82 | 83 | # Coded By Ahmed Aboul-Ela - @aboul3la 84 | """ % (R, W, Y)) 85 | 86 | 87 | def parser_error(errmsg): 88 | banner() 89 | print("Usage: python " + sys.argv[0] + " [Options] use -h for help") 90 | print(R + "Error: " + errmsg + W) 91 | sys.exit() 92 | 93 | 94 | def parse_args(): 95 | # parse the arguments 96 | parser = argparse.ArgumentParser(epilog='\tExample: \r\npython ' + sys.argv[0] + " -d google.com") 97 | parser.error = parser_error 98 | parser._optionals.title = "OPTIONS" 99 | parser.add_argument('-d', '--domain', help="Domain name to enumerate it's subdomains", required=True) 100 | parser.add_argument('-b', '--bruteforce', help='Enable the subbrute bruteforce module', nargs='?', default=False) 101 | parser.add_argument('-p', '--ports', help='Scan the found subdomains against specified tcp ports') 102 | parser.add_argument('-v', '--verbose', help='Enable Verbosity and display results in realtime', nargs='?', default=False) 103 | parser.add_argument('-t', '--threads', help='Number of threads to use for subbrute bruteforce', type=int, default=30) 104 | parser.add_argument('-e', '--engines', help='Specify a comma-separated list of search engines') 105 | parser.add_argument('-o', '--output', help='Save the results to text file') 106 | parser.add_argument('-n', '--no-color', help='Output without color', default=False, action='store_true') 107 | return parser.parse_args() 108 | 109 | 110 | def write_file(filename, subdomains): 111 | # saving subdomains results to output file 112 | print("%s[-] Saving results to file: %s%s%s%s" % (Y, W, R, filename, W)) 113 | with open(str(filename), 'wt') as f: 114 | for subdomain in subdomains: 115 | f.write(subdomain + os.linesep) 116 | 117 | 118 | def subdomain_sorting_key(hostname): 119 | """Sorting key for subdomains 120 | 121 | This sorting key orders subdomains from the top-level domain at the right 122 | reading left, then moving '^' and 'www' to the top of their group. For 123 | example, the following list is sorted correctly: 124 | 125 | [ 126 | 'example.com', 127 | 'www.example.com', 128 | 'a.example.com', 129 | 'www.a.example.com', 130 | 'b.a.example.com', 131 | 'b.example.com', 132 | 'example.net', 133 | 'www.example.net', 134 | 'a.example.net', 135 | ] 136 | 137 | """ 138 | parts = hostname.split('.')[::-1] 139 | if parts[-1] == 'www': 140 | return parts[:-1], 1 141 | return parts, 0 142 | 143 | 144 | class enumratorBase(object): 145 | def __init__(self, base_url, engine_name, domain, subdomains=None, silent=False, verbose=True): 146 | subdomains = subdomains or [] 147 | self.domain = urlparse.urlparse(domain).netloc 148 | self.session = requests.Session() 149 | self.subdomains = [] 150 | self.timeout = 25 151 | self.base_url = base_url 152 | self.engine_name = engine_name 153 | self.silent = silent 154 | self.verbose = verbose 155 | self.headers = { 156 | 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36', 157 | 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8', 158 | 'Accept-Language': 'en-US,en;q=0.8', 159 | 'Accept-Encoding': 'gzip', 160 | } 161 | self.print_banner() 162 | 163 | def print_(self, text): 164 | if not self.silent: 165 | print(text) 166 | return 167 | 168 | def print_banner(self): 169 | """ subclass can override this if they want a fancy banner :)""" 170 | self.print_(G + "[-] Searching now in %s.." % (self.engine_name) + W) 171 | return 172 | 173 | def send_req(self, query, page_no=1): 174 | 175 | url = self.base_url.format(query=query, page_no=page_no) 176 | try: 177 | resp = self.session.get(url, headers=self.headers, timeout=self.timeout) 178 | except Exception: 179 | resp = None 180 | return self.get_response(resp) 181 | 182 | def get_response(self, response): 183 | if response is None: 184 | return "NULL" 185 | return response.text if hasattr(response, "text") else response.content 186 | 187 | def check_max_subdomains(self, count): 188 | if self.MAX_DOMAINS == 0: 189 | return False 190 | return count >= self.MAX_DOMAINS 191 | 192 | def check_max_pages(self, num): 193 | if self.MAX_PAGES == 0: 194 | return False 195 | return num >= self.MAX_PAGES 196 | 197 | # override 198 | def extract_domains(self, resp): 199 | """ chlid class should override this function """ 200 | return 201 | 202 | # override 203 | def check_response_errors(self, resp): 204 | """ chlid class should override this function 205 | The function should return True if there are no errors and False otherwise 206 | """ 207 | return True 208 | 209 | def should_sleep(self): 210 | """Some enumrators require sleeping to avoid bot detections like Google enumerator""" 211 | return 212 | 213 | def generate_query(self): 214 | """ chlid class should override this function """ 215 | return 216 | 217 | def get_page(self, num): 218 | """ chlid class that user different pagnation counter should override this function """ 219 | return num + 10 220 | 221 | def enumerate(self, altquery=False): 222 | flag = True 223 | page_no = 0 224 | prev_links = [] 225 | retries = 0 226 | 227 | while flag: 228 | query = self.generate_query() 229 | count = query.count(self.domain) # finding the number of subdomains found so far 230 | 231 | # if they we reached the maximum number of subdomains in search query 232 | # then we should go over the pages 233 | if self.check_max_subdomains(count): 234 | page_no = self.get_page(page_no) 235 | 236 | if self.check_max_pages(page_no): # maximum pages for Google to avoid getting blocked 237 | return self.subdomains 238 | resp = self.send_req(query, page_no) 239 | 240 | # check if there is any error occured 241 | if not self.check_response_errors(resp): 242 | return self.subdomains 243 | links = self.extract_domains(resp) 244 | 245 | # if the previous page hyperlinks was the similar to the current one, then maybe we have reached the last page 246 | if links == prev_links: 247 | retries += 1 248 | page_no = self.get_page(page_no) 249 | 250 | # make another retry maybe it isn't the last page 251 | if retries >= 3: 252 | return self.subdomains 253 | 254 | prev_links = links 255 | self.should_sleep() 256 | 257 | return self.subdomains 258 | 259 | 260 | class enumratorBaseThreaded(multiprocessing.Process, enumratorBase): 261 | def __init__(self, base_url, engine_name, domain, subdomains=None, q=None, silent=False, verbose=True): 262 | subdomains = subdomains or [] 263 | enumratorBase.__init__(self, base_url, engine_name, domain, subdomains, silent=silent, verbose=verbose) 264 | multiprocessing.Process.__init__(self) 265 | self.q = q 266 | return 267 | 268 | def run(self): 269 | domain_list = self.enumerate() 270 | for domain in domain_list: 271 | self.q.append(domain) 272 | 273 | 274 | class GoogleEnum(enumratorBaseThreaded): 275 | def __init__(self, domain, subdomains=None, q=None, silent=False, verbose=True): 276 | subdomains = subdomains or [] 277 | base_url = "https://google.com/search?q={query}&btnG=Search&hl=en-US&biw=&bih=&gbv=1&start={page_no}&filter=0" 278 | self.engine_name = "Google" 279 | self.MAX_DOMAINS = 11 280 | self.MAX_PAGES = 200 281 | super(GoogleEnum, self).__init__(base_url, self.engine_name, domain, subdomains, q=q, silent=silent, verbose=verbose) 282 | self.q = q 283 | return 284 | 285 | def extract_domains(self, resp): 286 | links_list = list() 287 | link_regx = re.compile('(.*?)<\/cite>') 288 | try: 289 | links_list = link_regx.findall(resp) 290 | for link in links_list: 291 | link = re.sub('', '', link) 292 | if not link.startswith('http'): 293 | link = "http://" + link 294 | subdomain = urlparse.urlparse(link).netloc 295 | if subdomain and subdomain not in self.subdomains and subdomain != self.domain: 296 | if self.verbose: 297 | self.print_("%s%s: %s%s" % (R, self.engine_name, W, subdomain)) 298 | self.subdomains.append(subdomain.strip()) 299 | except Exception: 300 | pass 301 | return links_list 302 | 303 | def check_response_errors(self, resp): 304 | if (type(resp) is str) and 'Our systems have detected unusual traffic' in resp: 305 | self.print_(R + "[!] Error: Google probably now is blocking our requests" + W) 306 | self.print_(R + "[~] Finished now the Google Enumeration ..." + W) 307 | return False 308 | return True 309 | 310 | def should_sleep(self): 311 | time.sleep(5) 312 | return 313 | 314 | def generate_query(self): 315 | if self.subdomains: 316 | fmt = 'site:{domain} -www.{domain} -{found}' 317 | found = ' -'.join(self.subdomains[:self.MAX_DOMAINS - 2]) 318 | query = fmt.format(domain=self.domain, found=found) 319 | else: 320 | query = "site:{domain} -www.{domain}".format(domain=self.domain) 321 | return query 322 | 323 | 324 | class YahooEnum(enumratorBaseThreaded): 325 | def __init__(self, domain, subdomains=None, q=None, silent=False, verbose=True): 326 | subdomains = subdomains or [] 327 | base_url = "https://search.yahoo.com/search?p={query}&b={page_no}" 328 | self.engine_name = "Yahoo" 329 | self.MAX_DOMAINS = 10 330 | self.MAX_PAGES = 0 331 | super(YahooEnum, self).__init__(base_url, self.engine_name, domain, subdomains, q=q, silent=silent, verbose=verbose) 332 | self.q = q 333 | return 334 | 335 | def extract_domains(self, resp): 336 | link_regx2 = re.compile('(.*?)') 337 | link_regx = re.compile('(.*?)') 338 | links_list = [] 339 | try: 340 | links = link_regx.findall(resp) 341 | links2 = link_regx2.findall(resp) 342 | links_list = links + links2 343 | for link in links_list: 344 | link = re.sub("<(\/)?b>", "", link) 345 | if not link.startswith('http'): 346 | link = "http://" + link 347 | subdomain = urlparse.urlparse(link).netloc 348 | if not subdomain.endswith(self.domain): 349 | continue 350 | if subdomain and subdomain not in self.subdomains and subdomain != self.domain: 351 | if self.verbose: 352 | self.print_("%s%s: %s%s" % (R, self.engine_name, W, subdomain)) 353 | self.subdomains.append(subdomain.strip()) 354 | except Exception: 355 | pass 356 | 357 | return links_list 358 | 359 | def should_sleep(self): 360 | return 361 | 362 | def get_page(self, num): 363 | return num + 10 364 | 365 | def generate_query(self): 366 | if self.subdomains: 367 | fmt = 'site:{domain} -domain:www.{domain} -domain:{found}' 368 | found = ' -domain:'.join(self.subdomains[:77]) 369 | query = fmt.format(domain=self.domain, found=found) 370 | else: 371 | query = "site:{domain}".format(domain=self.domain) 372 | return query 373 | 374 | 375 | class AskEnum(enumratorBaseThreaded): 376 | def __init__(self, domain, subdomains=None, q=None, silent=False, verbose=True): 377 | subdomains = subdomains or [] 378 | base_url = 'http://www.ask.com/web?q={query}&page={page_no}&qid=8D6EE6BF52E0C04527E51F64F22C4534&o=0&l=dir&qsrc=998&qo=pagination' 379 | self.engine_name = "Ask" 380 | self.MAX_DOMAINS = 11 381 | self.MAX_PAGES = 0 382 | enumratorBaseThreaded.__init__(self, base_url, self.engine_name, domain, subdomains, q=q, silent=silent, verbose=verbose) 383 | self.q = q 384 | return 385 | 386 | def extract_domains(self, resp): 387 | links_list = list() 388 | link_regx = re.compile('

(.*?)

') 389 | try: 390 | links_list = link_regx.findall(resp) 391 | for link in links_list: 392 | if not link.startswith('http'): 393 | link = "http://" + link 394 | subdomain = urlparse.urlparse(link).netloc 395 | if subdomain not in self.subdomains and subdomain != self.domain: 396 | if self.verbose: 397 | self.print_("%s%s: %s%s" % (R, self.engine_name, W, subdomain)) 398 | self.subdomains.append(subdomain.strip()) 399 | except Exception: 400 | pass 401 | 402 | return links_list 403 | 404 | def get_page(self, num): 405 | return num + 1 406 | 407 | def generate_query(self): 408 | if self.subdomains: 409 | fmt = 'site:{domain} -www.{domain} -{found}' 410 | found = ' -'.join(self.subdomains[:self.MAX_DOMAINS]) 411 | query = fmt.format(domain=self.domain, found=found) 412 | else: 413 | query = "site:{domain} -www.{domain}".format(domain=self.domain) 414 | 415 | return query 416 | 417 | 418 | class BingEnum(enumratorBaseThreaded): 419 | def __init__(self, domain, subdomains=None, q=None, silent=False, verbose=True): 420 | subdomains = subdomains or [] 421 | base_url = 'https://www.bing.com/search?q={query}&go=Submit&first={page_no}' 422 | self.engine_name = "Bing" 423 | self.MAX_DOMAINS = 30 424 | self.MAX_PAGES = 0 425 | enumratorBaseThreaded.__init__(self, base_url, self.engine_name, domain, subdomains, q=q, silent=silent) 426 | self.q = q 427 | self.verbose = verbose 428 | return 429 | 430 | def extract_domains(self, resp): 431 | links_list = list() 432 | link_regx = re.compile('
  • ||<|>', '', link) 441 | if not link.startswith('http'): 442 | link = "http://" + link 443 | subdomain = urlparse.urlparse(link).netloc 444 | if subdomain not in self.subdomains and subdomain != self.domain: 445 | if self.verbose: 446 | self.print_("%s%s: %s%s" % (R, self.engine_name, W, subdomain)) 447 | self.subdomains.append(subdomain.strip()) 448 | except Exception: 449 | pass 450 | 451 | return links_list 452 | 453 | def generate_query(self): 454 | if self.subdomains: 455 | fmt = 'domain:{domain} -www.{domain} -{found}' 456 | found = ' -'.join(self.subdomains[:self.MAX_DOMAINS]) 457 | query = fmt.format(domain=self.domain, found=found) 458 | else: 459 | query = "domain:{domain} -www.{domain}".format(domain=self.domain) 460 | return query 461 | 462 | 463 | class BaiduEnum(enumratorBaseThreaded): 464 | def __init__(self, domain, subdomains=None, q=None, silent=False, verbose=True): 465 | subdomains = subdomains or [] 466 | base_url = 'https://www.baidu.com/s?pn={page_no}&wd={query}&oq={query}' 467 | self.engine_name = "Baidu" 468 | self.MAX_DOMAINS = 2 469 | self.MAX_PAGES = 760 470 | enumratorBaseThreaded.__init__(self, base_url, self.engine_name, domain, subdomains, q=q, silent=silent, verbose=verbose) 471 | self.querydomain = self.domain 472 | self.q = q 473 | return 474 | 475 | def extract_domains(self, resp): 476 | links = list() 477 | found_newdomain = False 478 | subdomain_list = [] 479 | link_regx = re.compile('(.*?)') 480 | try: 481 | links = link_regx.findall(resp) 482 | for link in links: 483 | link = re.sub('<.*?>|>|<| ', '', link) 484 | if not link.startswith('http'): 485 | link = "http://" + link 486 | subdomain = urlparse.urlparse(link).netloc 487 | if subdomain.endswith(self.domain): 488 | subdomain_list.append(subdomain) 489 | if subdomain not in self.subdomains and subdomain != self.domain: 490 | found_newdomain = True 491 | if self.verbose: 492 | self.print_("%s%s: %s%s" % (R, self.engine_name, W, subdomain)) 493 | self.subdomains.append(subdomain.strip()) 494 | except Exception: 495 | pass 496 | if not found_newdomain and subdomain_list: 497 | self.querydomain = self.findsubs(subdomain_list) 498 | return links 499 | 500 | def findsubs(self, subdomains): 501 | count = Counter(subdomains) 502 | subdomain1 = max(count, key=count.get) 503 | count.pop(subdomain1, "None") 504 | subdomain2 = max(count, key=count.get) if count else '' 505 | return (subdomain1, subdomain2) 506 | 507 | def check_response_errors(self, resp): 508 | return True 509 | 510 | def should_sleep(self): 511 | time.sleep(random.randint(2, 5)) 512 | return 513 | 514 | def generate_query(self): 515 | if self.subdomains and self.querydomain != self.domain: 516 | found = ' -site:'.join(self.querydomain) 517 | query = "site:{domain} -site:www.{domain} -site:{found} ".format(domain=self.domain, found=found) 518 | else: 519 | query = "site:{domain} -site:www.{domain}".format(domain=self.domain) 520 | return query 521 | 522 | 523 | class NetcraftEnum(enumratorBaseThreaded): 524 | def __init__(self, domain, subdomains=None, q=None, silent=False, verbose=True): 525 | subdomains = subdomains or [] 526 | self.base_url = 'https://searchdns.netcraft.com/?restriction=site+ends+with&host={domain}' 527 | self.engine_name = "Netcraft" 528 | super(NetcraftEnum, self).__init__(self.base_url, self.engine_name, domain, subdomains, q=q, silent=silent, verbose=verbose) 529 | self.q = q 530 | return 531 | 532 | def req(self, url, cookies=None): 533 | cookies = cookies or {} 534 | try: 535 | resp = self.session.get(url, headers=self.headers, timeout=self.timeout, cookies=cookies) 536 | except Exception as e: 537 | self.print_(e) 538 | resp = None 539 | return resp 540 | 541 | def should_sleep(self): 542 | time.sleep(random.randint(1, 2)) 543 | return 544 | 545 | def get_next(self, resp): 546 | link_regx = re.compile('Next Page') 547 | link = link_regx.findall(resp) 548 | url = 'http://searchdns.netcraft.com' + link[0] 549 | return url 550 | 551 | def create_cookies(self, cookie): 552 | cookies = dict() 553 | cookies_list = cookie[0:cookie.find(';')].split("=") 554 | cookies[cookies_list[0]] = cookies_list[1] 555 | # hashlib.sha1 requires utf-8 encoded str 556 | cookies['netcraft_js_verification_response'] = hashlib.sha1(urllib.unquote(cookies_list[1]).encode('utf-8')).hexdigest() 557 | return cookies 558 | 559 | def get_cookies(self, headers): 560 | if 'set-cookie' in headers: 561 | cookies = self.create_cookies(headers['set-cookie']) 562 | else: 563 | cookies = {} 564 | return cookies 565 | 566 | def enumerate(self): 567 | start_url = self.base_url.format(domain='example.com') 568 | resp = self.req(start_url) 569 | cookies = self.get_cookies(resp.headers) 570 | url = self.base_url.format(domain=self.domain) 571 | while True: 572 | resp = self.get_response(self.req(url, cookies)) 573 | self.extract_domains(resp) 574 | if 'Next Page' not in resp: 575 | return self.subdomains 576 | break 577 | url = self.get_next(resp) 578 | self.should_sleep() 579 | 580 | def extract_domains(self, resp): 581 | links_list = list() 582 | link_regx = re.compile('', re.S) 642 | token = csrf_regex.findall(resp)[0] 643 | return token.strip() 644 | 645 | def enumerate(self): 646 | self.lock = threading.BoundedSemaphore(value=70) 647 | resp = self.req('GET', self.base_url) 648 | token = self.get_csrftoken(resp) 649 | params = {'csrfmiddlewaretoken': token, 'targetip': self.domain} 650 | post_resp = self.req('POST', self.base_url, params) 651 | self.extract_domains(post_resp) 652 | for subdomain in self.subdomains: 653 | t = threading.Thread(target=self.check_host, args=(subdomain,)) 654 | t.start() 655 | t.join() 656 | return self.live_subdomains 657 | 658 | def extract_domains(self, resp): 659 | tbl_regex = re.compile('<\/a>Host Records.*?(.*?)', re.S) 660 | link_regex = re.compile('(.*?)
    ', re.S) 661 | links = [] 662 | try: 663 | results_tbl = tbl_regex.findall(resp)[0] 664 | except IndexError: 665 | results_tbl = '' 666 | links_list = link_regex.findall(results_tbl) 667 | links = list(set(links_list)) 668 | for link in links: 669 | subdomain = link.strip() 670 | if not subdomain.endswith(self.domain): 671 | continue 672 | if subdomain and subdomain not in self.subdomains and subdomain != self.domain: 673 | self.subdomains.append(subdomain.strip()) 674 | return links 675 | 676 | 677 | class Virustotal(enumratorBaseThreaded): 678 | def __init__(self, domain, subdomains=None, q=None, silent=False, verbose=True): 679 | subdomains = subdomains or [] 680 | base_url = 'https://www.virustotal.com/ui/domains/{domain}/subdomains' 681 | self.engine_name = "Virustotal" 682 | self.q = q 683 | super(Virustotal, self).__init__(base_url, self.engine_name, domain, subdomains, q=q, silent=silent, verbose=verbose) 684 | self.url = self.base_url.format(domain=self.domain) 685 | return 686 | 687 | # the main send_req need to be rewritten 688 | def send_req(self, url): 689 | try: 690 | resp = self.session.get(url, headers=self.headers, timeout=self.timeout) 691 | except Exception as e: 692 | self.print_(e) 693 | resp = None 694 | 695 | return self.get_response(resp) 696 | 697 | # once the send_req is rewritten we don't need to call this function, the stock one should be ok 698 | def enumerate(self): 699 | while self.url != '': 700 | resp = self.send_req(self.url) 701 | resp = json.loads(resp) 702 | if 'error' in resp: 703 | self.print_(R + "[!] Error: Virustotal probably now is blocking our requests" + W) 704 | break 705 | if 'links' in resp and 'next' in resp['links']: 706 | self.url = resp['links']['next'] 707 | else: 708 | self.url = '' 709 | self.extract_domains(resp) 710 | return self.subdomains 711 | 712 | def extract_domains(self, resp): 713 | #resp is already parsed as json 714 | try: 715 | for i in resp['data']: 716 | if i['type'] == 'domain': 717 | subdomain = i['id'] 718 | if not subdomain.endswith(self.domain): 719 | continue 720 | if subdomain not in self.subdomains and subdomain != self.domain: 721 | if self.verbose: 722 | self.print_("%s%s: %s%s" % (R, self.engine_name, W, subdomain)) 723 | self.subdomains.append(subdomain.strip()) 724 | except Exception: 725 | pass 726 | 727 | 728 | class ThreatCrowd(enumratorBaseThreaded): 729 | def __init__(self, domain, subdomains=None, q=None, silent=False, verbose=True): 730 | subdomains = subdomains or [] 731 | base_url = 'https://www.threatcrowd.org/searchApi/v2/domain/report/?domain={domain}' 732 | self.engine_name = "ThreatCrowd" 733 | self.q = q 734 | super(ThreatCrowd, self).__init__(base_url, self.engine_name, domain, subdomains, q=q, silent=silent, verbose=verbose) 735 | return 736 | 737 | def req(self, url): 738 | try: 739 | resp = self.session.get(url, headers=self.headers, timeout=self.timeout) 740 | except Exception: 741 | resp = None 742 | 743 | return self.get_response(resp) 744 | 745 | def enumerate(self): 746 | url = self.base_url.format(domain=self.domain) 747 | resp = self.req(url) 748 | self.extract_domains(resp) 749 | return self.subdomains 750 | 751 | def extract_domains(self, resp): 752 | try: 753 | links = json.loads(resp)['subdomains'] 754 | for link in links: 755 | subdomain = link.strip() 756 | if not subdomain.endswith(self.domain): 757 | continue 758 | if subdomain not in self.subdomains and subdomain != self.domain: 759 | if self.verbose: 760 | self.print_("%s%s: %s%s" % (R, self.engine_name, W, subdomain)) 761 | self.subdomains.append(subdomain.strip()) 762 | except Exception as e: 763 | pass 764 | 765 | 766 | class CrtSearch(enumratorBaseThreaded): 767 | def __init__(self, domain, subdomains=None, q=None, silent=False, verbose=True): 768 | subdomains = subdomains or [] 769 | base_url = 'https://crt.sh/?q=%25.{domain}' 770 | self.engine_name = "SSL Certificates" 771 | self.q = q 772 | super(CrtSearch, self).__init__(base_url, self.engine_name, domain, subdomains, q=q, silent=silent, verbose=verbose) 773 | return 774 | 775 | def req(self, url): 776 | try: 777 | resp = self.session.get(url, headers=self.headers, timeout=self.timeout) 778 | except Exception: 779 | resp = None 780 | 781 | return self.get_response(resp) 782 | 783 | def enumerate(self): 784 | url = self.base_url.format(domain=self.domain) 785 | resp = self.req(url) 786 | if resp: 787 | self.extract_domains(resp) 788 | return self.subdomains 789 | 790 | def extract_domains(self, resp): 791 | link_regx = re.compile('(.*?)') 792 | try: 793 | links = link_regx.findall(resp) 794 | for link in links: 795 | link = link.strip() 796 | subdomains = [] 797 | if '
    ' in link: 798 | subdomains = link.split('
    ') 799 | else: 800 | subdomains.append(link) 801 | 802 | for subdomain in subdomains: 803 | if not subdomain.endswith(self.domain) or '*' in subdomain: 804 | continue 805 | 806 | if '@' in subdomain: 807 | subdomain = subdomain[subdomain.find('@')+1:] 808 | 809 | if subdomain not in self.subdomains and subdomain != self.domain: 810 | if self.verbose: 811 | self.print_("%s%s: %s%s" % (R, self.engine_name, W, subdomain)) 812 | self.subdomains.append(subdomain.strip()) 813 | except Exception as e: 814 | print(e) 815 | pass 816 | 817 | class PassiveDNS(enumratorBaseThreaded): 818 | def __init__(self, domain, subdomains=None, q=None, silent=False, verbose=True): 819 | subdomains = subdomains or [] 820 | base_url = 'https://api.sublist3r.com/search.php?domain={domain}' 821 | self.engine_name = "PassiveDNS" 822 | self.q = q 823 | super(PassiveDNS, self).__init__(base_url, self.engine_name, domain, subdomains, q=q, silent=silent, verbose=verbose) 824 | return 825 | 826 | def req(self, url): 827 | try: 828 | resp = self.session.get(url, headers=self.headers, timeout=self.timeout) 829 | except Exception as e: 830 | resp = None 831 | 832 | return self.get_response(resp) 833 | 834 | def enumerate(self): 835 | url = self.base_url.format(domain=self.domain) 836 | resp = self.req(url) 837 | if not resp: 838 | return self.subdomains 839 | 840 | self.extract_domains(resp) 841 | return self.subdomains 842 | 843 | def extract_domains(self, resp): 844 | try: 845 | subdomains = json.loads(resp) 846 | for subdomain in subdomains: 847 | if subdomain not in self.subdomains and subdomain != self.domain: 848 | if self.verbose: 849 | self.print_("%s%s: %s%s" % (R, self.engine_name, W, subdomain)) 850 | self.subdomains.append(subdomain.strip()) 851 | except Exception as e: 852 | pass 853 | 854 | 855 | class portscan(): 856 | def __init__(self, subdomains, ports): 857 | self.subdomains = subdomains 858 | self.ports = ports 859 | self.lock = None 860 | 861 | def port_scan(self, host, ports): 862 | openports = [] 863 | self.lock.acquire() 864 | for port in ports: 865 | try: 866 | s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 867 | s.settimeout(2) 868 | result = s.connect_ex((host, int(port))) 869 | if result == 0: 870 | openports.append(port) 871 | s.close() 872 | except Exception: 873 | pass 874 | self.lock.release() 875 | if len(openports) > 0: 876 | print("%s%s%s - %sFound open ports:%s %s%s%s" % (G, host, W, R, W, Y, ', '.join(openports), W)) 877 | 878 | def run(self): 879 | self.lock = threading.BoundedSemaphore(value=20) 880 | for subdomain in self.subdomains: 881 | t = threading.Thread(target=self.port_scan, args=(subdomain, self.ports)) 882 | t.start() 883 | 884 | 885 | def main(domain, threads, savefile, ports, silent, verbose, enable_bruteforce, engines): 886 | bruteforce_list = set() 887 | search_list = set() 888 | 889 | if is_windows: 890 | subdomains_queue = list() 891 | else: 892 | subdomains_queue = multiprocessing.Manager().list() 893 | 894 | # Check Bruteforce Status 895 | if enable_bruteforce or enable_bruteforce is None: 896 | enable_bruteforce = True 897 | 898 | # Validate domain 899 | domain_check = re.compile("^(http|https)?[a-zA-Z0-9]+([\-\.]{1}[a-zA-Z0-9]+)*\.[a-zA-Z]{2,}$") 900 | if not domain_check.match(domain): 901 | if not silent: 902 | print(R + "Error: Please enter a valid domain" + W) 903 | return [] 904 | 905 | if not domain.startswith('http://') or not domain.startswith('https://'): 906 | domain = 'http://' + domain 907 | 908 | parsed_domain = urlparse.urlparse(domain) 909 | 910 | if not silent: 911 | print(B + "[-] Enumerating subdomains now for %s" % parsed_domain.netloc + W) 912 | 913 | if verbose and not silent: 914 | print(Y + "[-] verbosity is enabled, will show the subdomains results in realtime" + W) 915 | 916 | supported_engines = {'baidu': BaiduEnum, 917 | 'yahoo': YahooEnum, 918 | 'google': GoogleEnum, 919 | 'bing': BingEnum, 920 | 'ask': AskEnum, 921 | 'netcraft': NetcraftEnum, 922 | 'dnsdumpster': DNSdumpster, 923 | 'virustotal': Virustotal, 924 | 'threatcrowd': ThreatCrowd, 925 | 'ssl': CrtSearch, 926 | 'passivedns': PassiveDNS 927 | } 928 | 929 | chosenEnums = [] 930 | 931 | if engines is None: 932 | chosenEnums = [ 933 | BaiduEnum, YahooEnum, GoogleEnum, BingEnum, AskEnum, 934 | NetcraftEnum, DNSdumpster, Virustotal, ThreatCrowd, 935 | CrtSearch, PassiveDNS 936 | ] 937 | else: 938 | engines = engines.split(',') 939 | for engine in engines: 940 | if engine.lower() in supported_engines: 941 | chosenEnums.append(supported_engines[engine.lower()]) 942 | 943 | # Start the engines enumeration 944 | enums = [enum(domain, [], q=subdomains_queue, silent=silent, verbose=verbose) for enum in chosenEnums] 945 | for enum in enums: 946 | enum.start() 947 | for enum in enums: 948 | enum.join() 949 | 950 | subdomains = set(subdomains_queue) 951 | for subdomain in subdomains: 952 | search_list.add(subdomain) 953 | 954 | if enable_bruteforce: 955 | if not silent: 956 | print(G + "[-] Starting bruteforce module now using subbrute.." + W) 957 | record_type = False 958 | path_to_file = os.path.dirname(os.path.realpath(__file__)) 959 | subs = os.path.join(path_to_file, 'subbrute', 'names.txt') 960 | resolvers = os.path.join(path_to_file, 'subbrute', 'resolvers.txt') 961 | process_count = threads 962 | output = False 963 | json_output = False 964 | bruteforce_list = subbrute.print_target(parsed_domain.netloc, record_type, subs, resolvers, process_count, output, json_output, search_list, verbose) 965 | 966 | subdomains = search_list.union(bruteforce_list) 967 | 968 | if subdomains: 969 | subdomains = sorted(subdomains, key=subdomain_sorting_key) 970 | 971 | if savefile: 972 | write_file(savefile, subdomains) 973 | 974 | if not silent: 975 | print(Y + "[-] Total Unique Subdomains Found: %s" % len(subdomains) + W) 976 | 977 | if ports: 978 | if not silent: 979 | print(G + "[-] Start port scan now for the following ports: %s%s" % (Y, ports) + W) 980 | ports = ports.split(',') 981 | pscan = portscan(subdomains, ports) 982 | pscan.run() 983 | 984 | elif not silent: 985 | for subdomain in subdomains: 986 | print(G + subdomain + W) 987 | return subdomains 988 | 989 | 990 | def interactive(): 991 | args = parse_args() 992 | domain = args.domain 993 | threads = args.threads 994 | savefile = args.output 995 | ports = args.ports 996 | enable_bruteforce = args.bruteforce 997 | verbose = args.verbose 998 | engines = args.engines 999 | if verbose or verbose is None: 1000 | verbose = True 1001 | if args.no_color: 1002 | no_color() 1003 | banner() 1004 | res = main(domain, threads, savefile, ports, silent=False, verbose=verbose, enable_bruteforce=enable_bruteforce, engines=engines) 1005 | 1006 | if __name__ == "__main__": 1007 | interactive() 1008 | -------------------------------------------------------------------------------- /Useragent.json: -------------------------------------------------------------------------------- 1 | {"browsers": {"chrome": ["Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.1 Safari/537.36", "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36", "Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2226.0 Safari/537.36", "Mozilla/5.0 (Windows NT 6.4; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2225.0 Safari/537.36", "Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2225.0 Safari/537.36", "Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2224.3 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.93 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.124 Safari/537.36", "Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2049.0 Safari/537.36", "Mozilla/5.0 (Windows NT 4.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2049.0 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.67 Safari/537.36", "Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.67 Safari/537.36", "Mozilla/5.0 (X11; OpenBSD i386) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.125 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1944.0 Safari/537.36", "Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.3319.102 Safari/537.36", "Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.2309.372 Safari/537.36", "Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.2117.157 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.47 Safari/537.36", "Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1866.237 Safari/537.36", "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1847.137 Safari/4E423F", "Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1847.116 Safari/537.36 Mozilla/5.0 (iPad; U; CPU OS 3_2 like Mac OS X; en-us) AppleWebKit/531.21.10 (KHTML, like Gecko) Version/4.0.4 Mobile/7B334b Safari/531.21.10", "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.517 Safari/537.36", "Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1667.0 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1664.3 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1664.3 Safari/537.36", "Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1650.16 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1623.0 Safari/537.36", "Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/30.0.1599.17 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/29.0.1547.62 Safari/537.36", "Mozilla/5.0 (X11; CrOS i686 4319.74.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/29.0.1547.57 Safari/537.36", "Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/29.0.1547.2 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1468.0 Safari/537.36", "Mozilla/5.0 (Windows NT 6.2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1467.0 Safari/537.36", "Mozilla/5.0 (Windows NT 6.2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1464.0 Safari/537.36", "Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1500.55 Safari/537.36", "Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36", "Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.90 Safari/537.36", "Mozilla/5.0 (X11; NetBSD) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.116 Safari/537.36", "Mozilla/5.0 (X11; CrOS i686 3912.101.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.116 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1312.60 Safari/537.17", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_2) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1309.0 Safari/537.17", "Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.15 (KHTML, like Gecko) Chrome/24.0.1295.0 Safari/537.15", "Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.14 (KHTML, like Gecko) Chrome/24.0.1292.0 Safari/537.14"], "opera": ["Opera/9.80 (X11; Linux i686; Ubuntu/14.10) Presto/2.12.388 Version/12.16", "Opera/9.80 (Windows NT 6.0) Presto/2.12.388 Version/12.14", "Mozilla/5.0 (Windows NT 6.0; rv:2.0) Gecko/20100101 Firefox/4.0 Opera 12.14", "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.0) Opera 12.14", "Opera/12.80 (Windows NT 5.1; U; en) Presto/2.10.289 Version/12.02", "Opera/9.80 (Windows NT 6.1; U; es-ES) Presto/2.9.181 Version/12.00", "Opera/9.80 (Windows NT 5.1; U; zh-sg) Presto/2.9.181 Version/12.00", "Opera/12.0(Windows NT 5.2;U;en)Presto/22.9.168 Version/12.00", "Opera/12.0(Windows NT 5.1;U;en)Presto/22.9.168 Version/12.00", "Mozilla/5.0 (Windows NT 5.1) Gecko/20100101 Firefox/14.0 Opera/12.0", "Opera/9.80 (Windows NT 6.1; WOW64; U; pt) Presto/2.10.229 Version/11.62", "Opera/9.80 (Windows NT 6.0; U; pl) Presto/2.10.229 Version/11.62", "Opera/9.80 (Macintosh; Intel Mac OS X 10.6.8; U; fr) Presto/2.9.168 Version/11.52", "Opera/9.80 (Macintosh; Intel Mac OS X 10.6.8; U; de) Presto/2.9.168 Version/11.52", "Opera/9.80 (Windows NT 5.1; U; en) Presto/2.9.168 Version/11.51", "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; de) Opera 11.51", "Opera/9.80 (X11; Linux x86_64; U; fr) Presto/2.9.168 Version/11.50", "Opera/9.80 (X11; Linux i686; U; hu) Presto/2.9.168 Version/11.50", "Opera/9.80 (X11; Linux i686; U; ru) Presto/2.8.131 Version/11.11", "Opera/9.80 (X11; Linux i686; U; es-ES) Presto/2.8.131 Version/11.11", "Mozilla/5.0 (Windows NT 5.1; U; en; rv:1.8.1) Gecko/20061208 Firefox/5.0 Opera 11.11", "Opera/9.80 (X11; Linux x86_64; U; bg) Presto/2.8.131 Version/11.10", "Opera/9.80 (Windows NT 6.0; U; en) Presto/2.8.99 Version/11.10", "Opera/9.80 (Windows NT 5.1; U; zh-tw) Presto/2.8.131 Version/11.10", "Opera/9.80 (Windows NT 6.1; Opera Tablet/15165; U; en) Presto/2.8.149 Version/11.1", "Opera/9.80 (X11; Linux x86_64; U; Ubuntu/10.10 (maverick); pl) Presto/2.7.62 Version/11.01", "Opera/9.80 (X11; Linux i686; U; ja) Presto/2.7.62 Version/11.01", "Opera/9.80 (X11; Linux i686; U; fr) Presto/2.7.62 Version/11.01", "Opera/9.80 (Windows NT 6.1; U; zh-tw) Presto/2.7.62 Version/11.01", "Opera/9.80 (Windows NT 6.1; U; zh-cn) Presto/2.7.62 Version/11.01", "Opera/9.80 (Windows NT 6.1; U; sv) Presto/2.7.62 Version/11.01", "Opera/9.80 (Windows NT 6.1; U; en-US) Presto/2.7.62 Version/11.01", "Opera/9.80 (Windows NT 6.1; U; cs) Presto/2.7.62 Version/11.01", "Opera/9.80 (Windows NT 6.0; U; pl) Presto/2.7.62 Version/11.01", "Opera/9.80 (Windows NT 5.2; U; ru) Presto/2.7.62 Version/11.01", "Opera/9.80 (Windows NT 5.1; U;) Presto/2.7.62 Version/11.01", "Opera/9.80 (Windows NT 5.1; U; cs) Presto/2.7.62 Version/11.01", "Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.2.13) Gecko/20101213 Opera/9.80 (Windows NT 6.1; U; zh-tw) Presto/2.7.62 Version/11.01", "Mozilla/5.0 (Windows NT 6.1; U; nl; rv:1.9.1.6) Gecko/20091201 Firefox/3.5.6 Opera 11.01", "Mozilla/5.0 (Windows NT 6.1; U; de; rv:1.9.1.6) Gecko/20091201 Firefox/3.5.6 Opera 11.01", "Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; de) Opera 11.01", "Opera/9.80 (X11; Linux x86_64; U; pl) Presto/2.7.62 Version/11.00", "Opera/9.80 (X11; Linux i686; U; it) Presto/2.7.62 Version/11.00", "Opera/9.80 (Windows NT 6.1; U; zh-cn) Presto/2.6.37 Version/11.00", "Opera/9.80 (Windows NT 6.1; U; pl) Presto/2.7.62 Version/11.00", "Opera/9.80 (Windows NT 6.1; U; ko) Presto/2.7.62 Version/11.00", "Opera/9.80 (Windows NT 6.1; U; fi) Presto/2.7.62 Version/11.00", "Opera/9.80 (Windows NT 6.1; U; en-GB) Presto/2.7.62 Version/11.00", "Opera/9.80 (Windows NT 6.1 x64; U; en) Presto/2.7.62 Version/11.00", "Opera/9.80 (Windows NT 6.0; U; en) Presto/2.7.39 Version/11.00"], "firefox": ["Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1", "Mozilla/5.0 (Windows NT 6.3; rv:36.0) Gecko/20100101 Firefox/36.0", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10; rv:33.0) Gecko/20100101 Firefox/33.0", "Mozilla/5.0 (X11; Linux i586; rv:31.0) Gecko/20100101 Firefox/31.0", "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:31.0) Gecko/20130401 Firefox/31.0", "Mozilla/5.0 (Windows NT 5.1; rv:31.0) Gecko/20100101 Firefox/31.0", "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:29.0) Gecko/20120101 Firefox/29.0", "Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:25.0) Gecko/20100101 Firefox/29.0", "Mozilla/5.0 (X11; OpenBSD amd64; rv:28.0) Gecko/20100101 Firefox/28.0", "Mozilla/5.0 (X11; Linux x86_64; rv:28.0) Gecko/20100101 Firefox/28.0", "Mozilla/5.0 (Windows NT 6.1; rv:27.3) Gecko/20130101 Firefox/27.3", "Mozilla/5.0 (Windows NT 6.2; Win64; x64; rv:27.0) Gecko/20121011 Firefox/27.0", "Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:25.0) Gecko/20100101 Firefox/25.0", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:25.0) Gecko/20100101 Firefox/25.0", "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:24.0) Gecko/20100101 Firefox/24.0", "Mozilla/5.0 (Windows NT 6.0; WOW64; rv:24.0) Gecko/20100101 Firefox/24.0", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:24.0) Gecko/20100101 Firefox/24.0", "Mozilla/5.0 (Windows NT 6.2; rv:22.0) Gecko/20130405 Firefox/23.0", "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:23.0) Gecko/20130406 Firefox/23.0", "Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:23.0) Gecko/20131011 Firefox/23.0", "Mozilla/5.0 (Windows NT 6.2; rv:22.0) Gecko/20130405 Firefox/22.0", "Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:22.0) Gecko/20130328 Firefox/22.0", "Mozilla/5.0 (Windows NT 6.1; rv:22.0) Gecko/20130405 Firefox/22.0", "Mozilla/5.0 (Microsoft Windows NT 6.2.9200.0); rv:22.0) Gecko/20130405 Firefox/22.0", "Mozilla/5.0 (Windows NT 6.2; Win64; x64; rv:16.0.1) Gecko/20121011 Firefox/21.0.1", "Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:16.0.1) Gecko/20121011 Firefox/21.0.1", "Mozilla/5.0 (Windows NT 6.2; Win64; x64; rv:21.0.0) Gecko/20121011 Firefox/21.0.0", "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:21.0) Gecko/20130331 Firefox/21.0", "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:21.0) Gecko/20100101 Firefox/21.0", "Mozilla/5.0 (X11; Linux i686; rv:21.0) Gecko/20100101 Firefox/21.0", "Mozilla/5.0 (Windows NT 6.2; WOW64; rv:21.0) Gecko/20130514 Firefox/21.0", "Mozilla/5.0 (Windows NT 6.2; rv:21.0) Gecko/20130326 Firefox/21.0", "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:21.0) Gecko/20130401 Firefox/21.0", "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:21.0) Gecko/20130331 Firefox/21.0", "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:21.0) Gecko/20130330 Firefox/21.0", "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:21.0) Gecko/20100101 Firefox/21.0", "Mozilla/5.0 (Windows NT 6.1; rv:21.0) Gecko/20130401 Firefox/21.0", "Mozilla/5.0 (Windows NT 6.1; rv:21.0) Gecko/20130328 Firefox/21.0", "Mozilla/5.0 (Windows NT 6.1; rv:21.0) Gecko/20100101 Firefox/21.0", "Mozilla/5.0 (Windows NT 5.1; rv:21.0) Gecko/20130401 Firefox/21.0", "Mozilla/5.0 (Windows NT 5.1; rv:21.0) Gecko/20130331 Firefox/21.0", "Mozilla/5.0 (Windows NT 5.1; rv:21.0) Gecko/20100101 Firefox/21.0", "Mozilla/5.0 (Windows NT 5.0; rv:21.0) Gecko/20100101 Firefox/21.0", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:21.0) Gecko/20100101 Firefox/21.0", "Mozilla/5.0 (Windows NT 6.2; Win64; x64;) Gecko/20100101 Firefox/20.0", "Mozilla/5.0 (Windows x86; rv:19.0) Gecko/20100101 Firefox/19.0", "Mozilla/5.0 (Windows NT 6.1; rv:6.0) Gecko/20100101 Firefox/19.0", "Mozilla/5.0 (Windows NT 6.1; rv:14.0) Gecko/20100101 Firefox/18.0.1", "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:18.0) Gecko/20100101 Firefox/18.0", "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:17.0) Gecko/20100101 Firefox/17.0.6"], "internetexplorer": ["Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; AS; rv:11.0) like Gecko", "Mozilla/5.0 (compatible, MSIE 11, Windows NT 6.3; Trident/7.0; rv:11.0) like Gecko", "Mozilla/5.0 (compatible; MSIE 10.6; Windows NT 6.1; Trident/5.0; InfoPath.2; SLCC1; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729; .NET CLR 2.0.50727) 3gpp-gba UNTRUSTED/1.0", "Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 7.0; InfoPath.3; .NET CLR 3.1.40767; Trident/6.0; en-IN)", "Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0)", "Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/6.0)", "Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/5.0)", "Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/4.0; InfoPath.2; SV1; .NET CLR 2.0.50727; WOW64)", "Mozilla/5.0 (compatible; MSIE 10.0; Macintosh; Intel Mac OS X 10_7_3; Trident/6.0)", "Mozilla/4.0 (Compatible; MSIE 8.0; Windows NT 5.2; Trident/6.0)", "Mozilla/4.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/5.0)", "Mozilla/1.22 (compatible; MSIE 10.0; Windows 3.1)", "Mozilla/5.0 (Windows; U; MSIE 9.0; WIndows NT 9.0; en-US))", "Mozilla/5.0 (Windows; U; MSIE 9.0; Windows NT 9.0; en-US)", "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 7.1; Trident/5.0)", "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; Media Center PC 6.0; InfoPath.3; MS-RTC LM 8; Zune 4.7)", "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; Media Center PC 6.0; InfoPath.3; MS-RTC LM 8; Zune 4.7", "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; Zune 4.0; InfoPath.3; MS-RTC LM 8; .NET4.0C; .NET4.0E)", "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; chromeframe/12.0.742.112)", "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET CLR 2.0.50727; Media Center PC 6.0)", "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Win64; x64; Trident/5.0; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET CLR 2.0.50727; Media Center PC 6.0)", "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Win64; x64; Trident/5.0; .NET CLR 2.0.50727; SLCC2; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; Zune 4.0; Tablet PC 2.0; InfoPath.3; .NET4.0C; .NET4.0E)", "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Win64; x64; Trident/5.0", "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0; yie8)", "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; InfoPath.2; .NET CLR 1.1.4322; .NET4.0C; Tablet PC 2.0)", "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0; FunWebProducts)", "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0; chromeframe/13.0.782.215)", "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0; chromeframe/11.0.696.57)", "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0) chromeframe/10.0.648.205", "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/4.0; GTB7.4; InfoPath.1; SV1; .NET CLR 2.8.52393; WOW64; en-US)", "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.0; Trident/5.0; chromeframe/11.0.696.57)", "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.0; Trident/4.0; GTB7.4; InfoPath.3; SV1; .NET CLR 3.1.76908; WOW64; en-US)", "Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 6.1; Trident/4.0; GTB7.4; InfoPath.2; SV1; .NET CLR 3.3.69573; WOW64; en-US)", "Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET CLR 1.0.3705; .NET CLR 1.1.4322)", "Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0; InfoPath.1; SV1; .NET CLR 3.8.36217; WOW64; en-US)", "Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0; .NET CLR 2.7.58687; SLCC2; Media Center PC 5.0; Zune 3.4; Tablet PC 3.6; InfoPath.3)", "Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 5.2; Trident/4.0; Media Center PC 4.0; SLCC1; .NET CLR 3.0.04320)", "Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0; SLCC1; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729; .NET CLR 1.1.4322)", "Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0; InfoPath.2; SLCC1; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729; .NET CLR 2.0.50727)", "Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0; .NET CLR 1.1.4322; .NET CLR 2.0.50727)", "Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 5.1; SLCC1; .NET CLR 1.1.4322)", "Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 5.0; Trident/4.0; InfoPath.1; SV1; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729; .NET CLR 3.0.04506.30)", "Mozilla/5.0 (compatible; MSIE 7.0; Windows NT 5.0; Trident/4.0; FBSMTWB; .NET CLR 2.0.34861; .NET CLR 3.0.3746.3218; .NET CLR 3.5.33652; msn OptimizedIE8;ENUS)", "Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.2; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0)", "Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; Media Center PC 6.0; InfoPath.2; MS-RTC LM 8)", "Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; Media Center PC 6.0; InfoPath.2; MS-RTC LM 8", "Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; Media Center PC 6.0; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET4.0C)", "Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; InfoPath.3; .NET4.0C; .NET4.0E; .NET CLR 3.5.30729; .NET CLR 3.0.30729; MS-RTC LM 8)", "Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; InfoPath.2)", "Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; Zune 3.0)"], "safari": ["Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.75.14 (KHTML, like Gecko) Version/7.0.3 Safari/7046A194A", "Mozilla/5.0 (iPad; CPU OS 6_0 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/6.0 Mobile/10A5355d Safari/8536.25", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/537.13+ (KHTML, like Gecko) Version/5.1.7 Safari/534.57.2", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_3) AppleWebKit/534.55.3 (KHTML, like Gecko) Version/5.1.3 Safari/534.53.10", "Mozilla/5.0 (iPad; CPU OS 5_1 like Mac OS X) AppleWebKit/534.46 (KHTML, like Gecko ) Version/5.1 Mobile/9B176 Safari/7534.48.3", "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_8; de-at) AppleWebKit/533.21.1 (KHTML, like Gecko) Version/5.0.5 Safari/533.21.1", "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_7; da-dk) AppleWebKit/533.21.1 (KHTML, like Gecko) Version/5.0.5 Safari/533.21.1", "Mozilla/5.0 (Windows; U; Windows NT 6.1; tr-TR) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27", "Mozilla/5.0 (Windows; U; Windows NT 6.1; ko-KR) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27", "Mozilla/5.0 (Windows; U; Windows NT 6.1; fr-FR) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27", "Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27", "Mozilla/5.0 (Windows; U; Windows NT 6.1; cs-CZ) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27", "Mozilla/5.0 (Windows; U; Windows NT 6.0; ja-JP) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27", "Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27", "Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_5_8; zh-cn) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27", "Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_5_8; ja-jp) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27", "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_7; ja-jp) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27", "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_6; zh-cn) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27", "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_6; sv-se) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27", "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_6; ko-kr) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27", "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_6; ja-jp) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27", "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_6; it-it) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27", "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_6; fr-fr) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27", "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_6; es-es) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27", "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_6; en-us) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27", "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_6; en-gb) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27", "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_6; de-de) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27", "Mozilla/5.0 (Windows; U; Windows NT 6.1; sv-SE) AppleWebKit/533.19.4 (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4", "Mozilla/5.0 (Windows; U; Windows NT 6.1; ja-JP) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4", "Mozilla/5.0 (Windows; U; Windows NT 6.1; de-DE) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4", "Mozilla/5.0 (Windows; U; Windows NT 6.0; hu-HU) AppleWebKit/533.19.4 (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4", "Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4", "Mozilla/5.0 (Windows; U; Windows NT 6.0; de-DE) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4", "Mozilla/5.0 (Windows; U; Windows NT 5.1; ru-RU) AppleWebKit/533.19.4 (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4", "Mozilla/5.0 (Windows; U; Windows NT 5.1; ja-JP) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4", "Mozilla/5.0 (Windows; U; Windows NT 5.1; it-IT) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4", "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4", "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_7; en-us) AppleWebKit/534.16+ (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4", "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_6; fr-ch) AppleWebKit/533.19.4 (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4", "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_5; de-de) AppleWebKit/534.15+ (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4", "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_5; ar) AppleWebKit/533.19.4 (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4", "Mozilla/5.0 (Android 2.2; Windows; U; Windows NT 6.1; en-US) AppleWebKit/533.19.4 (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4", "Mozilla/5.0 (Windows; U; Windows NT 6.1; zh-HK) AppleWebKit/533.18.1 (KHTML, like Gecko) Version/5.0.2 Safari/533.18.5", "Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/533.19.4 (KHTML, like Gecko) Version/5.0.2 Safari/533.18.5", "Mozilla/5.0 (Windows; U; Windows NT 6.0; tr-TR) AppleWebKit/533.18.1 (KHTML, like Gecko) Version/5.0.2 Safari/533.18.5", "Mozilla/5.0 (Windows; U; Windows NT 6.0; nb-NO) AppleWebKit/533.18.1 (KHTML, like Gecko) Version/5.0.2 Safari/533.18.5", "Mozilla/5.0 (Windows; U; Windows NT 6.0; fr-FR) AppleWebKit/533.18.1 (KHTML, like Gecko) Version/5.0.2 Safari/533.18.5", "Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-TW) AppleWebKit/533.19.4 (KHTML, like Gecko) Version/5.0.2 Safari/533.18.5", "Mozilla/5.0 (Windows; U; Windows NT 5.1; ru-RU) AppleWebKit/533.18.1 (KHTML, like Gecko) Version/5.0.2 Safari/533.18.5", "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_5_8; zh-cn) AppleWebKit/533.18.1 (KHTML, like Gecko) Version/5.0.2 Safari/533.18.5"]}, "randomize": {"344": "chrome", "819": "firefox", "346": "chrome", "347": "chrome", "340": "chrome", "341": "chrome", "342": "chrome", "343": "chrome", "810": "internetexplorer", "811": "internetexplorer", "812": "internetexplorer", "813": "firefox", "348": "chrome", "349": "chrome", "816": "firefox", "817": "firefox", "737": "chrome", "719": "chrome", "718": "chrome", "717": "chrome", "716": "chrome", "715": "chrome", "714": "chrome", "713": "chrome", "712": "chrome", "711": "chrome", "710": "chrome", "421": "chrome", "129": "chrome", "420": "chrome", "423": "chrome", "422": "chrome", "425": "chrome", "619": "chrome", "424": "chrome", "427": "chrome", "298": "chrome", "299": "chrome", "296": "chrome", "297": "chrome", "294": "chrome", "295": "chrome", "292": "chrome", "293": "chrome", "290": "chrome", "291": "chrome", "591": "chrome", "590": "chrome", "593": "chrome", "592": "chrome", "595": "chrome", "594": "chrome", "597": "chrome", "596": "chrome", "195": "chrome", "194": "chrome", "197": "chrome", "196": "chrome", "191": "chrome", "190": "chrome", "193": "chrome", "192": "chrome", "270": "chrome", "271": "chrome", "272": "chrome", "273": "chrome", "274": "chrome", "275": "chrome", "276": "chrome", "277": "chrome", "278": "chrome", "279": "chrome", "569": "chrome", "497": "chrome", "524": "chrome", "525": "chrome", "526": "chrome", "527": "chrome", "520": "chrome", "521": "chrome", "522": "chrome", "523": "chrome", "528": "chrome", "529": "chrome", "449": "chrome", "448": "chrome", "345": "chrome", "443": "chrome", "442": "chrome", "441": "chrome", "440": "chrome", "447": "chrome", "446": "chrome", "445": "chrome", "444": "chrome", "47": "chrome", "108": "chrome", "109": "chrome", "102": "chrome", "103": "chrome", "100": "chrome", "101": "chrome", "106": "chrome", "107": "chrome", "104": "chrome", "105": "chrome", "902": "firefox", "903": "firefox", "39": "chrome", "38": "chrome", "906": "firefox", "907": "firefox", "904": "firefox", "905": "firefox", "33": "chrome", "32": "chrome", "31": "chrome", "30": "chrome", "37": "chrome", "36": "chrome", "35": "chrome", "34": "chrome", "641": "chrome", "640": "chrome", "643": "chrome", "642": "chrome", "645": "chrome", "644": "chrome", "438": "chrome", "439": "chrome", "436": "chrome", "437": "chrome", "434": "chrome", "435": "chrome", "432": "chrome", "433": "chrome", "430": "chrome", "431": "chrome", "826": "firefox", "339": "chrome", "338": "chrome", "335": "chrome", "334": "chrome", "337": "chrome", "336": "chrome", "331": "chrome", "330": "chrome", "333": "chrome", "332": "chrome", "559": "chrome", "745": "chrome", "854": "firefox", "818": "firefox", "856": "firefox", "857": "firefox", "850": "firefox", "851": "firefox", "852": "firefox", "0": "chrome", "858": "firefox", "859": "firefox", "748": "chrome", "6": "chrome", "43": "chrome", "99": "chrome", "98": "chrome", "91": "chrome", "90": "chrome", "93": "chrome", "92": "chrome", "95": "chrome", "94": "chrome", "97": "chrome", "96": "chrome", "814": "firefox", "815": "firefox", "153": "chrome", "740": "chrome", "741": "chrome", "742": "chrome", "743": "chrome", "744": "chrome", "558": "chrome", "746": "chrome", "747": "chrome", "555": "chrome", "554": "chrome", "557": "chrome", "556": "chrome", "551": "chrome", "550": "chrome", "553": "chrome", "552": "chrome", "238": "chrome", "239": "chrome", "234": "chrome", "235": "chrome", "236": "chrome", "237": "chrome", "230": "chrome", "231": "chrome", "232": "chrome", "233": "chrome", "1": "chrome", "155": "chrome", "146": "chrome", "147": "chrome", "618": "chrome", "145": "chrome", "142": "chrome", "143": "chrome", "140": "chrome", "141": "chrome", "612": "chrome", "613": "chrome", "610": "chrome", "611": "chrome", "616": "chrome", "617": "chrome", "148": "chrome", "149": "chrome", "46": "chrome", "154": "chrome", "948": "safari", "949": "safari", "946": "safari", "947": "safari", "944": "safari", "945": "safari", "942": "safari", "943": "safari", "940": "safari", "941": "safari", "689": "chrome", "688": "chrome", "685": "chrome", "684": "chrome", "687": "chrome", "686": "chrome", "681": "chrome", "680": "chrome", "683": "chrome", "682": "chrome", "458": "chrome", "459": "chrome", "133": "chrome", "132": "chrome", "131": "chrome", "130": "chrome", "137": "chrome", "136": "chrome", "135": "chrome", "134": "chrome", "494": "chrome", "495": "chrome", "139": "chrome", "138": "chrome", "490": "chrome", "491": "chrome", "492": "chrome", "493": "chrome", "24": "chrome", "25": "chrome", "26": "chrome", "27": "chrome", "20": "chrome", "21": "chrome", "22": "chrome", "23": "chrome", "28": "chrome", "29": "chrome", "820": "firefox", "407": "chrome", "406": "chrome", "405": "chrome", "404": "chrome", "403": "chrome", "402": "chrome", "401": "chrome", "400": "chrome", "933": "firefox", "932": "firefox", "931": "firefox", "930": "firefox", "937": "safari", "452": "chrome", "409": "chrome", "408": "chrome", "453": "chrome", "414": "chrome", "183": "chrome", "415": "chrome", "379": "chrome", "378": "chrome", "228": "chrome", "829": "firefox", "828": "firefox", "371": "chrome", "370": "chrome", "373": "chrome", "372": "chrome", "375": "chrome", "374": "chrome", "377": "chrome", "376": "chrome", "708": "chrome", "709": "chrome", "704": "chrome", "705": "chrome", "706": "chrome", "707": "chrome", "700": "chrome", "144": "chrome", "702": "chrome", "703": "chrome", "393": "chrome", "392": "chrome", "88": "chrome", "89": "chrome", "397": "chrome", "396": "chrome", "395": "chrome", "394": "chrome", "82": "chrome", "83": "chrome", "80": "chrome", "81": "chrome", "86": "chrome", "87": "chrome", "84": "chrome", "85": "chrome", "797": "internetexplorer", "796": "internetexplorer", "795": "internetexplorer", "794": "internetexplorer", "793": "internetexplorer", "792": "internetexplorer", "791": "internetexplorer", "790": "internetexplorer", "749": "chrome", "799": "internetexplorer", "798": "internetexplorer", "7": "chrome", "170": "chrome", "586": "chrome", "587": "chrome", "584": "chrome", "585": "chrome", "582": "chrome", "583": "chrome", "580": "chrome", "581": "chrome", "588": "chrome", "589": "chrome", "245": "chrome", "244": "chrome", "247": "chrome", "246": "chrome", "241": "chrome", "614": "chrome", "243": "chrome", "242": "chrome", "615": "chrome", "249": "chrome", "248": "chrome", "418": "chrome", "419": "chrome", "519": "chrome", "518": "chrome", "511": "chrome", "510": "chrome", "513": "chrome", "512": "chrome", "515": "chrome", "514": "chrome", "517": "chrome", "516": "chrome", "623": "chrome", "622": "chrome", "621": "chrome", "620": "chrome", "627": "chrome", "626": "chrome", "625": "chrome", "624": "chrome", "450": "chrome", "451": "chrome", "629": "chrome", "628": "chrome", "454": "chrome", "455": "chrome", "456": "chrome", "457": "chrome", "179": "chrome", "178": "chrome", "177": "chrome", "199": "chrome", "175": "chrome", "174": "chrome", "173": "chrome", "172": "chrome", "171": "chrome", "198": "chrome", "977": "opera", "182": "chrome", "975": "opera", "974": "opera", "973": "opera", "972": "opera", "971": "opera", "970": "opera", "180": "chrome", "979": "opera", "978": "opera", "656": "chrome", "599": "chrome", "654": "chrome", "181": "chrome", "186": "chrome", "187": "chrome", "184": "chrome", "185": "chrome", "652": "chrome", "188": "chrome", "189": "chrome", "658": "chrome", "653": "chrome", "650": "chrome", "651": "chrome", "11": "chrome", "10": "chrome", "13": "chrome", "12": "chrome", "15": "chrome", "14": "chrome", "17": "chrome", "16": "chrome", "19": "chrome", "18": "chrome", "863": "firefox", "862": "firefox", "865": "firefox", "864": "firefox", "867": "firefox", "866": "firefox", "354": "chrome", "659": "chrome", "44": "chrome", "883": "firefox", "882": "firefox", "881": "firefox", "880": "firefox", "887": "firefox", "886": "firefox", "885": "firefox", "884": "firefox", "889": "firefox", "888": "firefox", "116": "chrome", "45": "chrome", "657": "chrome", "355": "chrome", "322": "chrome", "323": "chrome", "320": "chrome", "321": "chrome", "326": "chrome", "327": "chrome", "324": "chrome", "325": "chrome", "328": "chrome", "329": "chrome", "562": "chrome", "201": "chrome", "200": "chrome", "203": "chrome", "202": "chrome", "205": "chrome", "204": "chrome", "207": "chrome", "206": "chrome", "209": "chrome", "208": "chrome", "779": "internetexplorer", "778": "internetexplorer", "77": "chrome", "76": "chrome", "75": "chrome", "74": "chrome", "73": "chrome", "72": "chrome", "71": "chrome", "70": "chrome", "655": "chrome", "567": "chrome", "79": "chrome", "78": "chrome", "359": "chrome", "358": "chrome", "669": "chrome", "668": "chrome", "667": "chrome", "666": "chrome", "665": "chrome", "664": "chrome", "663": "chrome", "662": "chrome", "661": "chrome", "660": "chrome", "215": "chrome", "692": "chrome", "693": "chrome", "690": "chrome", "691": "chrome", "696": "chrome", "697": "chrome", "694": "chrome", "695": "chrome", "698": "chrome", "699": "chrome", "542": "chrome", "543": "chrome", "540": "chrome", "541": "chrome", "546": "chrome", "547": "chrome", "544": "chrome", "545": "chrome", "8": "chrome", "548": "chrome", "549": "chrome", "598": "chrome", "869": "firefox", "868": "firefox", "120": "chrome", "121": "chrome", "122": "chrome", "123": "chrome", "124": "chrome", "125": "chrome", "126": "chrome", "127": "chrome", "128": "chrome", "2": "chrome", "219": "chrome", "176": "chrome", "214": "chrome", "563": "chrome", "928": "firefox", "929": "firefox", "416": "chrome", "417": "chrome", "410": "chrome", "411": "chrome", "412": "chrome", "413": "chrome", "920": "firefox", "498": "chrome", "922": "firefox", "923": "firefox", "924": "firefox", "925": "firefox", "926": "firefox", "927": "firefox", "319": "chrome", "318": "chrome", "313": "chrome", "312": "chrome", "311": "chrome", "310": "chrome", "317": "chrome", "316": "chrome", "315": "chrome", "314": "chrome", "921": "firefox", "496": "chrome", "832": "firefox", "833": "firefox", "830": "firefox", "831": "firefox", "836": "firefox", "837": "firefox", "834": "firefox", "835": "firefox", "838": "firefox", "839": "firefox", "3": "chrome", "368": "chrome", "369": "chrome", "366": "chrome", "367": "chrome", "364": "chrome", "365": "chrome", "362": "chrome", "363": "chrome", "360": "chrome", "361": "chrome", "218": "chrome", "380": "chrome", "861": "firefox", "382": "chrome", "383": "chrome", "384": "chrome", "385": "chrome", "386": "chrome", "387": "chrome", "388": "chrome", "389": "chrome", "784": "internetexplorer", "785": "internetexplorer", "786": "internetexplorer", "787": "internetexplorer", "780": "internetexplorer", "781": "internetexplorer", "782": "internetexplorer", "381": "chrome", "788": "internetexplorer", "789": "internetexplorer", "860": "firefox", "151": "chrome", "579": "chrome", "578": "chrome", "150": "chrome", "573": "chrome", "572": "chrome", "571": "chrome", "570": "chrome", "577": "chrome", "576": "chrome", "575": "chrome", "574": "chrome", "60": "chrome", "61": "chrome", "62": "chrome", "259": "chrome", "64": "chrome", "65": "chrome", "66": "chrome", "67": "chrome", "68": "chrome", "253": "chrome", "250": "chrome", "251": "chrome", "256": "chrome", "257": "chrome", "254": "chrome", "255": "chrome", "499": "chrome", "157": "chrome", "156": "chrome", "939": "safari", "731": "chrome", "730": "chrome", "733": "chrome", "938": "safari", "735": "chrome", "734": "chrome", "508": "chrome", "736": "chrome", "506": "chrome", "738": "chrome", "504": "chrome", "505": "chrome", "502": "chrome", "503": "chrome", "500": "chrome", "501": "chrome", "630": "chrome", "631": "chrome", "632": "chrome", "633": "chrome", "469": "chrome", "468": "chrome", "636": "chrome", "637": "chrome", "465": "chrome", "464": "chrome", "467": "chrome", "466": "chrome", "461": "chrome", "900": "firefox", "463": "chrome", "462": "chrome", "901": "firefox", "168": "chrome", "169": "chrome", "164": "chrome", "165": "chrome", "166": "chrome", "167": "chrome", "160": "chrome", "161": "chrome", "162": "chrome", "163": "chrome", "964": "safari", "965": "safari", "966": "safari", "967": "safari", "960": "safari", "961": "safari", "962": "safari", "963": "safari", "783": "internetexplorer", "968": "safari", "969": "opera", "936": "firefox", "935": "firefox", "934": "firefox", "908": "firefox", "909": "firefox", "722": "chrome", "426": "chrome", "878": "firefox", "879": "firefox", "876": "firefox", "877": "firefox", "874": "firefox", "875": "firefox", "872": "firefox", "873": "firefox", "870": "firefox", "871": "firefox", "9": "chrome", "890": "firefox", "891": "firefox", "892": "firefox", "893": "firefox", "894": "firefox", "647": "chrome", "896": "firefox", "897": "firefox", "898": "firefox", "899": "firefox", "646": "chrome", "649": "chrome", "648": "chrome", "357": "chrome", "356": "chrome", "809": "internetexplorer", "808": "internetexplorer", "353": "chrome", "352": "chrome", "351": "chrome", "350": "chrome", "803": "internetexplorer", "802": "internetexplorer", "801": "internetexplorer", "800": "internetexplorer", "807": "internetexplorer", "806": "internetexplorer", "805": "internetexplorer", "804": "internetexplorer", "216": "chrome", "217": "chrome", "768": "chrome", "769": "chrome", "212": "chrome", "213": "chrome", "210": "chrome", "211": "chrome", "762": "chrome", "763": "chrome", "760": "chrome", "761": "chrome", "766": "chrome", "767": "chrome", "764": "chrome", "765": "chrome", "40": "chrome", "41": "chrome", "289": "chrome", "288": "chrome", "4": "chrome", "281": "chrome", "280": "chrome", "283": "chrome", "282": "chrome", "285": "chrome", "284": "chrome", "287": "chrome", "286": "chrome", "678": "chrome", "679": "chrome", "674": "chrome", "675": "chrome", "676": "chrome", "677": "chrome", "670": "chrome", "671": "chrome", "672": "chrome", "673": "chrome", "263": "chrome", "262": "chrome", "261": "chrome", "260": "chrome", "267": "chrome", "266": "chrome", "265": "chrome", "264": "chrome", "269": "chrome", "268": "chrome", "59": "chrome", "58": "chrome", "55": "chrome", "54": "chrome", "57": "chrome", "56": "chrome", "51": "chrome", "258": "chrome", "53": "chrome", "52": "chrome", "537": "chrome", "536": "chrome", "535": "chrome", "63": "chrome", "533": "chrome", "532": "chrome", "531": "chrome", "530": "chrome", "152": "chrome", "539": "chrome", "538": "chrome", "775": "internetexplorer", "774": "internetexplorer", "982": "opera", "983": "opera", "980": "opera", "981": "opera", "777": "internetexplorer", "984": "opera", "50": "chrome", "115": "chrome", "252": "chrome", "117": "chrome", "776": "internetexplorer", "111": "chrome", "110": "chrome", "113": "chrome", "69": "chrome", "771": "chrome", "119": "chrome", "118": "chrome", "770": "chrome", "773": "internetexplorer", "772": "internetexplorer", "429": "chrome", "428": "chrome", "534": "chrome", "919": "firefox", "918": "firefox", "915": "firefox", "914": "firefox", "917": "firefox", "916": "firefox", "911": "firefox", "910": "firefox", "913": "firefox", "912": "firefox", "308": "chrome", "309": "chrome", "855": "firefox", "300": "chrome", "301": "chrome", "302": "chrome", "303": "chrome", "304": "chrome", "305": "chrome", "306": "chrome", "307": "chrome", "895": "firefox", "825": "firefox", "824": "firefox", "827": "firefox", "847": "firefox", "846": "firefox", "845": "firefox", "844": "firefox", "843": "firefox", "842": "firefox", "841": "firefox", "840": "firefox", "821": "firefox", "853": "firefox", "849": "firefox", "848": "firefox", "823": "firefox", "822": "firefox", "240": "chrome", "390": "chrome", "732": "chrome", "753": "chrome", "752": "chrome", "751": "chrome", "750": "chrome", "757": "chrome", "756": "chrome", "755": "chrome", "754": "chrome", "560": "chrome", "561": "chrome", "759": "chrome", "758": "chrome", "564": "chrome", "565": "chrome", "566": "chrome", "701": "chrome", "739": "chrome", "229": "chrome", "507": "chrome", "227": "chrome", "226": "chrome", "225": "chrome", "224": "chrome", "223": "chrome", "222": "chrome", "221": "chrome", "220": "chrome", "114": "chrome", "391": "chrome", "726": "chrome", "727": "chrome", "724": "chrome", "725": "chrome", "568": "chrome", "723": "chrome", "720": "chrome", "721": "chrome", "728": "chrome", "729": "chrome", "605": "chrome", "604": "chrome", "607": "chrome", "606": "chrome", "601": "chrome", "600": "chrome", "603": "chrome", "602": "chrome", "159": "chrome", "158": "chrome", "112": "chrome", "609": "chrome", "608": "chrome", "976": "opera", "634": "chrome", "399": "chrome", "635": "chrome", "959": "safari", "958": "safari", "398": "chrome", "48": "chrome", "49": "chrome", "951": "safari", "950": "safari", "953": "safari", "952": "safari", "42": "chrome", "954": "safari", "957": "safari", "956": "safari", "638": "chrome", "5": "chrome", "639": "chrome", "460": "chrome", "489": "chrome", "488": "chrome", "487": "chrome", "486": "chrome", "485": "chrome", "484": "chrome", "483": "chrome", "482": "chrome", "481": "chrome", "480": "chrome", "509": "chrome", "955": "safari", "472": "chrome", "473": "chrome", "470": "chrome", "471": "chrome", "476": "chrome", "477": "chrome", "474": "chrome", "475": "chrome", "478": "chrome", "479": "chrome"}} -------------------------------------------------------------------------------- /Xray/pppXray.py: -------------------------------------------------------------------------------- 1 | import os 2 | import hashlib 3 | import Hx_config 4 | 5 | 6 | def logo(): 7 | logo=''' 8 | _ __ _ __ _ __ 9 | | '_ \| '_ \| '_ \ 10 | | |_) | |_) | |_) | 11 | | .__/| .__/| .__/ 12 | | | | | | | 13 | |_| |_| |_| 14 | __ __ 15 | \ \ / / 16 | \ V / _ __ __ _ _ _ 17 | / \| '__/ _` | | | | 18 | / /^\ \ | | (_| | |_| | 19 | \/ \/_| \__,_|\__, | 20 | __/ | 21 | |___/ 22 | v1.03 23 | author:springbird 24 | ''' 25 | return logo 26 | 27 | 28 | def xrayScan(targeturl,outputfilename="test"): 29 | try: 30 | scanCommand="{} webscan {} --url \"{}\" --html-output {}\{}.html".format(Hx_config.Xray_Path, '--plugins {}'.format(Hx_config.plugins) if Hx_config.plugins else '', targeturl, Hx_config.Xray_temp_report_path, outputfilename) 31 | print(scanCommand) 32 | os.system(scanCommand) 33 | except Exception as e: 34 | print(e) 35 | pass 36 | return 37 | 38 | 39 | def pppGet(req_queue): 40 | while not req_queue.empty(): 41 | try: 42 | target=req_queue.get() 43 | print("Now Xray Scan {}".format(target)) 44 | outputfilename = hashlib.md5(target.encode("utf-8")) 45 | xrayScan(target.strip(), outputfilename.hexdigest()) 46 | except Exception as e: 47 | print(e) 48 | pass 49 | print("Xray Scan End~") 50 | return 51 | 52 | 53 | def main(): 54 | print(logo()) 55 | xrayScan("http://127.0.0.1/") 56 | # pppGet() 57 | return 58 | 59 | if __name__ == '__main__': 60 | main() -------------------------------------------------------------------------------- /base.py: -------------------------------------------------------------------------------- 1 | import aiohttp 2 | import asyncio 3 | import hashlib 4 | import os 5 | import re 6 | import shutil 7 | 8 | import Hx_config 9 | from ARL.ArlScan import Scan 10 | from CScan import CScan 11 | from JSmessage.jsfinder import JSFinder 12 | from OneForAll import oneforallMain 13 | from ServerJiang.jiangMain import SendNotice 14 | from Subfinder import subfinderMain 15 | from Sublist3r import Sublist3rMain 16 | from subDomainsBrute import subDomainsBruteMain 17 | 18 | ''' 19 | init() 扫描初始化函数 20 | 功能: 21 | 初始化保存文件目录 22 | ''' 23 | 24 | 25 | def init(): 26 | try: 27 | if not os.path.exists(Hx_config.Save_path) or not os.path.exists(Hx_config.ARL_save_path) or not os.path.exists( 28 | Hx_config.Crawlergo_save_path): 29 | os.makedirs(Hx_config.Save_path) 30 | os.makedirs(Hx_config.Xray_report_path) 31 | os.makedirs(Hx_config.Xray_temp_report_path) 32 | os.makedirs(Hx_config.CScan_report_path) 33 | os.makedirs(Hx_config.Sub_report_path) 34 | os.makedirs(Hx_config.Temp_path) 35 | os.makedirs(Hx_config.JS_report_path) 36 | os.makedirs(Hx_config.ARL_save_path) 37 | os.makedirs(Hx_config.Crawlergo_save_path) 38 | 39 | except Exception as e: 40 | print(e) 41 | exit(0) 42 | print(f"{Hx_config.red}初始化完成{Hx_config.end}") 43 | return 44 | 45 | 46 | ''' 47 | cleanTempXrayReport()函数 48 | 功能:删除xray临时报告目录下的全部文件 49 | ''' 50 | 51 | 52 | def cleanTempXrayReport(): 53 | shutil.rmtree("{}".format(Hx_config.Xray_temp_report_path)) 54 | os.mkdir("{}".format(Hx_config.Xray_temp_report_path)) 55 | return 56 | 57 | 58 | ''' 59 | 函数 checkXrayVersion() 60 | 功能: 61 | 检测xray为社区版还是专业版 62 | 专业版返回 true 63 | 社区版返回 false 64 | ''' 65 | 66 | 67 | def checkXrayVersion(content): 68 | if "snapshot" in content: 69 | return False 70 | return True 71 | 72 | 73 | ''' 74 | 函数 advancedMergeReport(resultList) 75 | 功能: 76 | xray 专业版报告合并函数 77 | ''' 78 | 79 | 80 | def advancedMergeReport(resultList): 81 | context = "" 82 | requestMd5Set = set() 83 | with open("{}\\advancedModelFile.html".format(Hx_config.Root_Path), 'r', encoding='utf-8') as f: 84 | context += f.read() 85 | for result in resultList: 86 | tempResultDict = eval(result) 87 | tempDetailRequest = tempResultDict["detail"]["request"] 88 | tempRequestMd5 = hashlib.md5(tempDetailRequest.encode('utf-8')).hexdigest() 89 | if tempRequestMd5 not in requestMd5Set: 90 | requestMd5Set.add(tempRequestMd5) 91 | 92 | result = "".format(result) 93 | context += result 94 | return context 95 | 96 | 97 | ''' 98 | 函数 communityMergeReport(resultList) 99 | 功能: 100 | xray 社区版报告合并函数 101 | ''' 102 | 103 | 104 | def communityMergeReport(resultList): 105 | context = "" 106 | requestMd5Set = set() 107 | with open("{}\\communityModelFile.html".format(Hx_config.Root_Path), 'r', encoding='utf-8') as f: 108 | context += f.read() 109 | for result in resultList: 110 | tempResultDict = eval(result) 111 | tempDetailRequest = tempResultDict["detail"]["snapshot"][0][0] 112 | tempRequestMd5 = hashlib.md5(tempDetailRequest.encode('utf-8')).hexdigest() 113 | if tempRequestMd5 not in requestMd5Set: 114 | requestMd5Set.add(tempRequestMd5) 115 | 116 | result = "".format(result) 117 | context += result 118 | return context 119 | 120 | 121 | ''' 122 | mergeReport()函数 123 | 功能:合并报告 124 | 传入参数:目标保存文件名 filename 125 | 其中需要使用集合这种数据结构去除重复漏洞,其判断依据为:xray Request md5值 126 | ''' 127 | 128 | 129 | def mergeReport(filename): 130 | reportList = os.listdir(Hx_config.Xray_temp_report_path) 131 | print(reportList) 132 | if len(reportList) == 0: 133 | return 134 | 135 | resultList = [] 136 | 137 | pattern = re.compile(r'') 138 | 139 | for report in reportList: 140 | tempReport = "{}\\{}".format(Hx_config.Xray_temp_report_path, report) 141 | with open(tempReport, 'r', encoding='utf-8') as f: 142 | temp = f.read() 143 | result = pattern.findall(temp) 144 | resultList += result 145 | tempResult = eval(resultList[0]) 146 | if 'snapshot' in tempResult["detail"]: 147 | context = communityMergeReport(resultList) 148 | else: 149 | context = advancedMergeReport(resultList) 150 | with open("{}\\{}.html".format(Hx_config.Xray_report_path, filename), 'w', encoding='utf-8') as f: 151 | f.write(context) 152 | cleanTempXrayReport() 153 | 154 | return 155 | 156 | 157 | ''' 158 | transferJSFinder(url,filename)函数 159 | 参数: 160 | url 待扫描的URL 161 | filename 实际上为待扫描URL的MD5值,作为输出文件名的一部分 162 | 163 | 作用: 164 | 调用并魔改JSFinder代码 165 | 输出: 166 | 从JS中获取到的URL和subdomain 167 | 输出文件名为: 168 | output_url_filename="url_"+outputfilename 169 | output_subdomain_filename="subdomain"+outputfilename 170 | ''' 171 | 172 | 173 | def transferJSFinder(url, filename): 174 | try: 175 | urls = JSFinder.find_by_url(url) 176 | JSFinder.giveresult(urls, url, filename) 177 | except Exception as e: 178 | print(f"{Hx_config.red}JSFinder ERROR!{Hx_config.end}") 179 | print(e) 180 | pass 181 | 182 | 183 | ''' 184 | transferCScan(url,filename) 函数 185 | ''' 186 | 187 | 188 | def transferCScan(url, filename): 189 | try: 190 | CScan.CScanConsole(url, filename) 191 | except Exception as e: 192 | print(f"{Hx_config.red}C段扫描出错!{Hx_config.end}") 193 | print(e) 194 | pass 195 | 196 | 197 | ''' 198 | subScan(target) 函数 199 | 参数: 200 | target 待扫描的URL 201 | filename 扫描目标 target 的对应md5之后的十六进制 202 | 作用: 203 | 对输入的target进行子域名的收集,并将结果存储到队列sub_queue里 204 | 输出: 205 | 结果保存在队列sub_queue里面,传递给队列去重函数 206 | 子域名收集整合模块: 207 | OneForAll 208 | Knock 209 | subDomainsBrute 210 | Subfinder 211 | Sublist3r 212 | ...(可根据自己需要自行添加 213 | ''' 214 | 215 | 216 | def subScan(target, filename): 217 | ''' 218 | 调用四个子域名搜集模块,并将结果保存在 sub_queue 里面 219 | 使用 queueDeduplication 进行子域名 -> 网址的转换 ,同时检测存活 220 | :param target: 221 | :param filename: 222 | :return: 223 | ''' 224 | 225 | Sub_report_path = Hx_config.Sub_report_path + filename + ".txt" # save_sub.txt 226 | if os.path.exists(Sub_report_path): 227 | print(f"{Hx_config.red}savesub/{filename}.txt文件存在, 跳过资产扫描{Hx_config.end}") 228 | queueDeduplication(filename) 229 | return # 存在subtxt文件则直接跳过以下扫描。 230 | 231 | try: 232 | oneforallMain.OneForAllScan(target) 233 | pass 234 | except Exception as e: 235 | print(f'{Hx_config.red}OneForAllScan error :{Hx_config.end}', e) 236 | try: 237 | subDomainsBruteMain.subDomainsBruteScan(target, filename) 238 | pass 239 | except Exception as e: 240 | print(f'{Hx_config.red}subDomainsBruteScan error :{Hx_config.end}', e) 241 | try: 242 | Sublist3rMain.Sublist3rScan(target) 243 | pass 244 | except Exception as e: 245 | print(f'{Hx_config.red}Sublist3rScan error :{Hx_config.end}', e) 246 | pass 247 | try: 248 | subfinderMain.subfinderScan(target, filename) 249 | pass 250 | except Exception as e: 251 | print(f'{Hx_config.red}subfinderScan error:{Hx_config.end}', e) 252 | pass 253 | try: 254 | queueDeduplication(filename) 255 | pass 256 | except Exception as e: 257 | print(f'{Hx_config.red}queueDeduplication error:{Hx_config.end}', e) 258 | pass 259 | 260 | 261 | ''' 262 | urlCheck(url, f) 函数 263 | 参数: 264 | url 需要检测存活性的URL 265 | f 打开的文件流 266 | 作用: 267 | url存活性检测 268 | 输出: 269 | 返回是否的布尔值 270 | ''' 271 | 272 | 273 | async def urlCheck(target, f): 274 | print(f"{Hx_config.blue}now url live check: {target}{Hx_config.end}") 275 | async with aiohttp.ClientSession() as session: 276 | try: 277 | async with session.get(target, headers=Hx_config.GetHeaders()) as resp: 278 | if resp.status < 400: 279 | Hx_config.target_queue.put(target) # 存活的url 280 | print(f"{Hx_config.green}now save :{target}{Hx_config.end}") 281 | f.write(f"{target}\n") 282 | 283 | except Exception as e: 284 | return 285 | return 286 | 287 | 288 | def urlCheck_threads(__list, f): 289 | loop = asyncio.get_event_loop() 290 | __tasks = [ 291 | loop.create_task(urlCheck(url, f)) 292 | for url in __list 293 | ] 294 | loop.run_until_complete(asyncio.wait(__tasks)) 295 | 296 | 297 | ''' 298 | queueDeduplication(filename) 队列去重函数 299 | 参数: 300 | filename 扫描目标 target 的对应md5之后的十六进制 301 | 作用: 302 | 对子域名队列sub_queue里面的元素进行去重、验活处理 303 | 输出: 304 | 结果保存在target_queue队列里面,存储到saveSub文件夹下对应filename.txt中并且成为待扫描的目标 305 | ''' 306 | 307 | 308 | def queueDeduplication(filename): 309 | Sub_report_path = Hx_config.Sub_report_path + filename + ".txt" # save_sub.txt 310 | sub_set = set() 311 | while not Hx_config.sub_queue.empty(): 312 | target = Hx_config.sub_queue.get() 313 | sub_set.add(target) 314 | length = len(sub_set) 315 | if os.path.exists(Sub_report_path): 316 | with open(Sub_report_path, 'r+') as f: 317 | lines = f.readlines() 318 | if len(lines) > 1: # 文件有内容 319 | for line in lines: 320 | if line.strip() not in ['\n\r', '\n', '']: 321 | Hx_config.target_queue.put(line.strip()) # 存活的url 322 | print(f"{Hx_config.yellow}queueDeduplication End~{Hx_config.end}") 323 | print( 324 | f"{Hx_config.green}信息收集子域名搜集完毕,数量:{Hx_config.target_queue.qsize()},保存文件名:{filename}{Hx_config.end}") 325 | SendNotice(f"信息收集子域名搜集完毕,数量:{length},保存文件名:{filename}") # server酱 326 | return 327 | 328 | with open(Sub_report_path, 'a+') as f: 329 | if len(sub_set) != 0: 330 | urlCheck_threads(list(sub_set), f) # 启动验活多线程 331 | 332 | print(f"{Hx_config.yellow}queueDeduplication End~{Hx_config.end}") 333 | SendNotice("信息收集子域名搜集完毕,数量:{},保存文件名:{}".format(length, filename)) 334 | return 335 | 336 | 337 | ''' 338 | 对没有添加http的url添加http 339 | ''' 340 | 341 | 342 | def addHttpHeader(target): 343 | pattern = re.compile(r'^http') 344 | if not pattern.match(target.strip()): 345 | target = "https://" + target.strip() 346 | else: 347 | target = target.strip() 348 | return target 349 | 350 | 351 | ''' 352 | checkBlackList(url) 353 | 检测目标URL是否在黑名单列表中 354 | ''' 355 | 356 | 357 | def checkBlackList(url): 358 | for i in Hx_config.blacklist: 359 | if i in url: 360 | return False 361 | return True 362 | 363 | 364 | ''' 365 | ARL扫描 366 | ''' 367 | 368 | 369 | def ArlScan(name='', target=''): 370 | print(f"{Hx_config.yellow}This is ArlScan ~{Hx_config.end}") 371 | Scan(name, target).add_task() 372 | 373 | 374 | ''' 375 | 将队列变成列表 376 | ''' 377 | 378 | 379 | def from_queue_to_list(_queue): 380 | result = [] 381 | while not _queue.empty(): 382 | _ = Hx_config.target_queue.get() # 队列被掏空 383 | result.append(_.strip()) 384 | for item in result: # 再次将队列填满,便于crawlergo动态爬虫使用 385 | Hx_config.target_queue.put(item) 386 | 387 | return result 388 | 389 | 390 | ''' 391 | 将http去除 392 | oneforall的保存文件不带http。如果不进行过滤则无法打开文件 393 | ''' 394 | 395 | 396 | def url_http_delete(url): 397 | if 'https://' in url: 398 | url = url[8:] 399 | if 'http://' in url: 400 | url = url[7:] 401 | 402 | return url 403 | 404 | 405 | ''' 406 | 终极搜索文件方法,解决扫描的时候oneforall找文件的问题 407 | ''' 408 | 409 | 410 | def get_filename(abs_path, name): 411 | for i in os.walk(abs_path): 412 | for j in i[2]: 413 | if j[0:-4] in name: 414 | return j 415 | 416 | return False 417 | 418 | 419 | ''' 420 | 保存文件 421 | ''' 422 | 423 | 424 | def save(__list, filepath='abs\\xxx.txt', host=''): 425 | with open(filepath, 'a+') as f: 426 | for i in __list: 427 | if i == host or i == host + '/': 428 | continue 429 | f.write(i.strip() + '\n') 430 | 431 | 432 | def main(): 433 | a = set() 434 | a.add(1) 435 | a.add(2) 436 | print(list(a)) 437 | return 438 | 439 | 440 | if __name__ == '__main__': 441 | main() 442 | -------------------------------------------------------------------------------- /crawlergo/crawlergoMain.py: -------------------------------------------------------------------------------- 1 | import fnmatch 2 | import os 3 | import subprocess 4 | 5 | import simplejson 6 | import Hx_config 7 | 8 | ua = Hx_config.GetHeaders() 9 | 10 | # def GetHeaders(): 11 | # headers = {'User-Agent': ua.random} 12 | # return headers 13 | 14 | ''' 15 | 使用集合去除重复的URL 16 | 将去重后的URL存储进入queue队列 17 | ''' 18 | 19 | 20 | def removeDuplicates(req_list): 21 | req_pool = set() 22 | try: 23 | for url in req_list: 24 | req_pool.add(url['url'].strip()) 25 | except Exception as e: 26 | print(e) 27 | pass 28 | return req_pool 29 | 30 | 31 | ''' 32 | 使用crawlergo进行目标页面URL的爬取 33 | ''' 34 | 35 | 36 | def crawlergoGet(target): 37 | print(f"{Hx_config.yellow}Now crawlergoGet : {target}{Hx_config.end}") 38 | try: 39 | if jump_duplication(target) == 'pass': 40 | return 'pass' 41 | cmd = [Hx_config.crawlergo_Path, "-c", Hx_config.Chrome_Path, "-t", "10", "-f", 42 | "smart", "-o", "json", target] 43 | rsp = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 44 | output, error = rsp.communicate() 45 | # "--[Mission Complete]--" 是任务结束的分隔字符串 46 | result = simplejson.loads(output.decode().split("--[Mission Complete]--")[1]) 47 | # print(result) 48 | req_list = result["req_list"] 49 | 50 | except Exception as e: 51 | print(e) 52 | req_list = [] 53 | pass 54 | print(f"{Hx_config.yellow}target {target} crawlergo end~{Hx_config.end}") 55 | print(f"{Hx_config.green}crawlergo get url number {len(req_list)}{Hx_config.end}") 56 | return removeDuplicates(req_list) 57 | 58 | 59 | # 跳过已经完成爬取的host 60 | def jump_duplication(url): 61 | host = get_host(url) 62 | filenames = [] 63 | for i in range(0, len(host)): 64 | _ = '.'.join(host[i::]) 65 | filenames.append(_ + '.txt') 66 | 67 | del filenames[-2::] 68 | 69 | files = [] 70 | complete_urls = [] 71 | for root, dir, files in os.walk(r'save/saveCrawlergo'): 72 | pass 73 | for file in files: 74 | for filename in filenames: 75 | if fnmatch.fnmatch(filename, file): 76 | f = open(f'save//saveCrawlergo//{filename}', 'r') 77 | complete_urls = f.readlines() 78 | break 79 | 80 | for complete_url in complete_urls: 81 | if get_host(complete_url) == host: 82 | print(f"{Hx_config.yellow}target {url} exist, crawlergo pass~{Hx_config.end}") 83 | return 'pass' 84 | 85 | 86 | # 提取url里的host 87 | def get_host(url): 88 | host = url.split('.') 89 | end = host[-1] 90 | head = host[0] 91 | if ':' in end: 92 | end = end.split(':')[0] 93 | elif '/' in end: 94 | end = end.split('/')[0] 95 | if 'https://' in head or 'http://' in head: 96 | head = head.split('//')[-1] 97 | host[-1] = end 98 | host[0] = head 99 | 100 | return host 101 | 102 | 103 | def main(): 104 | return 105 | 106 | 107 | if __name__ == '__main__': 108 | main() 109 | -------------------------------------------------------------------------------- /dict/dns_servers.txt: -------------------------------------------------------------------------------- 1 | 119.29.29.29 2 | 182.254.116.116 3 | # 223.5.5.5 4 | # 223.6.6.6 5 | 114.114.115.115 6 | 114.114.114.114 7 | -------------------------------------------------------------------------------- /dict/next_sub.txt: -------------------------------------------------------------------------------- 1 | test 2 | test2 3 | t 4 | dev 5 | 1 6 | 2 7 | 3 8 | s1 9 | s2 10 | s3 11 | admin 12 | adm 13 | a 14 | ht 15 | adminht 16 | webht 17 | web 18 | gm 19 | sys 20 | system 21 | manage 22 | manager 23 | mgr 24 | b 25 | c 26 | passport 27 | bata 28 | wei 29 | weixin 30 | wechat 31 | wx 32 | wiki 33 | upload 34 | ftp 35 | pic 36 | jira 37 | zabbix 38 | nagios 39 | bug 40 | bugzilla 41 | sql 42 | mysql 43 | db 44 | stmp 45 | pop 46 | imap 47 | mail 48 | zimbra 49 | exchange 50 | forum 51 | bbs 52 | list 53 | count 54 | counter 55 | img 56 | img01 57 | img02 58 | img03 59 | img04 60 | api 61 | cache 62 | js 63 | css 64 | app 65 | apps 66 | wap 67 | m 68 | sms 69 | zip 70 | monitor 71 | proxy 72 | update 73 | upgrade 74 | stat 75 | stats 76 | data 77 | portal 78 | blog 79 | autodiscover 80 | en 81 | search 82 | so 83 | oa 84 | database 85 | home 86 | sso 87 | help 88 | vip 89 | s 90 | w 91 | down 92 | download 93 | downloads 94 | dl 95 | svn 96 | git 97 | log 98 | staff 99 | vpn 100 | sslvpn 101 | ssh 102 | scanner 103 | sandbox 104 | ldap 105 | lab 106 | go 107 | demo 108 | console 109 | cms 110 | auth 111 | crm 112 | erp 113 | res 114 | static 115 | old 116 | new 117 | beta 118 | image 119 | service 120 | login 121 | 3g 122 | docs 123 | it 124 | e 125 | live 126 | library 127 | files 128 | i 129 | d 130 | cp 131 | connect 132 | gateway 133 | lib 134 | preview 135 | backup 136 | share 137 | status 138 | assets 139 | user 140 | vote 141 | bugs 142 | cas 143 | feedback 144 | id 145 | edm 146 | survey 147 | union 148 | ceshi 149 | dev1 150 | updates 151 | phpmyadmin 152 | pma 153 | edit 154 | master 155 | xml 156 | control 157 | profile 158 | zhidao 159 | tool 160 | toolbox 161 | boss 162 | activity 163 | www 164 | -------------------------------------------------------------------------------- /dict/next_sub_full.txt: -------------------------------------------------------------------------------- 1 | test 2 | test2 3 | t 4 | dev 5 | 1 6 | 2 7 | 3 8 | s1 9 | s2 10 | s3 11 | admin 12 | adm 13 | a 14 | ht 15 | adminht 16 | webht 17 | web 18 | gm 19 | sys 20 | system 21 | manage 22 | manager 23 | mgr 24 | b 25 | c 26 | passport 27 | bata 28 | wei 29 | weixin 30 | wechat 31 | wx 32 | wiki 33 | upload 34 | ftp 35 | pic 36 | jira 37 | zabbix 38 | nagios 39 | bug 40 | bugzilla 41 | sql 42 | mysql 43 | db 44 | stmp 45 | pop 46 | imap 47 | mail 48 | zimbra 49 | exchange 50 | forum 51 | bbs 52 | list 53 | count 54 | counter 55 | img 56 | img01 57 | img02 58 | img03 59 | img04 60 | api 61 | cache 62 | js 63 | css 64 | app 65 | apps 66 | wap 67 | m 68 | sms 69 | zip 70 | monitor 71 | proxy 72 | update 73 | upgrade 74 | stat 75 | stats 76 | data 77 | portal 78 | blog 79 | autodiscover 80 | en 81 | search 82 | so 83 | oa 84 | database 85 | home 86 | sso 87 | help 88 | vip 89 | s 90 | w 91 | down 92 | download 93 | downloads 94 | dl 95 | svn 96 | git 97 | log 98 | staff 99 | vpn 100 | sslvpn 101 | ssh 102 | scanner 103 | sandbox 104 | ldap 105 | lab 106 | go 107 | demo 108 | console 109 | cms 110 | auth 111 | crm 112 | erp 113 | res 114 | static 115 | old 116 | new 117 | beta 118 | image 119 | service 120 | login 121 | 3g 122 | docs 123 | it 124 | e 125 | live 126 | library 127 | files 128 | i 129 | d 130 | cp 131 | connect 132 | gateway 133 | lib 134 | preview 135 | backup 136 | share 137 | status 138 | assets 139 | user 140 | vote 141 | bugs 142 | cas 143 | feedback 144 | id 145 | edm 146 | survey 147 | union 148 | ceshi 149 | dev1 150 | updates 151 | phpmyadmin 152 | pma 153 | edit 154 | master 155 | xml 156 | control 157 | profile 158 | zhidao 159 | tool 160 | toolbox 161 | boss 162 | activity 163 | www 164 | smtp 165 | webmail 166 | mx 167 | pop3 168 | ns1 169 | ns2 170 | webdisk 171 | www2 172 | news 173 | cpanel 174 | whm 175 | shop 176 | sip 177 | ns 178 | mobile 179 | www1 180 | email 181 | support 182 | mail2 183 | media 184 | lyncdiscover 185 | secure 186 | video 187 | my 188 | staging 189 | images 190 | dns 191 | info 192 | ns3 193 | mail1 194 | intranet 195 | cdn 196 | lists 197 | dns1 198 | www3 199 | dns2 200 | mobilemail 201 | store 202 | remote 203 | cn 204 | owa 205 | cs 206 | stage 207 | online 208 | jobs 209 | calendar 210 | community 211 | forums 212 | services 213 | dialin 214 | chat 215 | meet 216 | blogs 217 | hr 218 | office 219 | ww 220 | ftp2 221 | legacy 222 | b2b 223 | ns4 224 | v 225 | pda 226 | events 227 | av 228 | edu 229 | ads 230 | health 231 | es 232 | english 233 | ad 234 | extranet 235 | helpdesk 236 | training 237 | photo 238 | finance 239 | tv 240 | fr 241 | sc 242 | job 243 | cloud 244 | im 245 | careers 246 | game 247 | archive 248 | get 249 | gis 250 | access 251 | member 252 | mx1 253 | newsletter 254 | de 255 | qa 256 | direct 257 | alumni 258 | mx2 259 | hk 260 | sp 261 | gw 262 | relay 263 | jp 264 | content 265 | file 266 | citrix 267 | vpn2 268 | soft 269 | ssl 270 | server 271 | club 272 | ws 273 | host 274 | book 275 | www4 276 | sh 277 | tools 278 | mail3 279 | ms 280 | mailhost 281 | ca 282 | ntp 283 | ask 284 | sites 285 | sz 286 | spam 287 | wwww 288 | tw 289 | videos 290 | send 291 | music 292 | project 293 | uk 294 | start 295 | mall 296 | ns5 297 | outlook 298 | reports 299 | us 300 | partner 301 | mssql 302 | bj 303 | sharepoint 304 | link 305 | metrics 306 | partners 307 | smtp2 308 | webproxy 309 | mdm 310 | marketing 311 | ts 312 | security 313 | map 314 | ir 315 | fs 316 | origin 317 | travel 318 | feeds 319 | meeting 320 | u 321 | photos 322 | hq 323 | tj 324 | research 325 | pt 326 | members 327 | ru 328 | bm 329 | business 330 | eq 331 | cc 332 | w3 333 | student 334 | auto 335 | dx 336 | p 337 | rs 338 | dns3 339 | vc 340 | gmail 341 | uc 342 | press 343 | web1 344 | localhost 345 | ent 346 | tuan 347 | dj 348 | web2 349 | ss 350 | cnc 351 | vpn1 352 | pay 353 | time 354 | sx 355 | hd 356 | games 357 | lt 358 | projects 359 | g 360 | sales 361 | stream 362 | gb 363 | forms 364 | www5 365 | wt 366 | abc 367 | weather 368 | zb 369 | smtp1 370 | maps 371 | x 372 | register 373 | design 374 | radio 375 | software 376 | china 377 | math 378 | open 379 | view 380 | fax 381 | event 382 | pm 383 | test1 384 | alpha 385 | irc 386 | sg 387 | cq 388 | ftp1 389 | idc 390 | labs 391 | da 392 | directory 393 | developer 394 | reg 395 | catalog 396 | rss 397 | wh 398 | sd 399 | tg 400 | bb 401 | digital 402 | hb 403 | house 404 | site 405 | conference 406 | rt 407 | temp 408 | fw 409 | tz 410 | tech 411 | education 412 | biz 413 | f 414 | gallery 415 | gh 416 | car 417 | dc 418 | agent 419 | mis 420 | eng 421 | flash 422 | cx 423 | pub 424 | ticket 425 | doc 426 | card 427 | account 428 | code 429 | promo 430 | net 431 | kb 432 | jk 433 | social 434 | sports 435 | ems 436 | tp 437 | public 438 | mm 439 | pms 440 | mrtg 441 | as 442 | jw 443 | corp 444 | tr 445 | investor 446 | dm 447 | sts 448 | th 449 | bi 450 | 123 451 | st 452 | br 453 | wp 454 | art 455 | shopping 456 | global 457 | money 458 | prod 459 | students 460 | cj 461 | iphone 462 | vps 463 | ag 464 | food 465 | sb 466 | ly 467 | local 468 | sj 469 | server1 470 | testing 471 | brand 472 | sy 473 | buy 474 | life 475 | groups 476 | nl 477 | tour 478 | lms 479 | pro 480 | bc 481 | rtx 482 | hao 483 | exam 484 | fb 485 | in 486 | ams 487 | msoid 488 | idp 489 | vod 490 | cm 491 | dk 492 | hs 493 | usa 494 | ww2 495 | jwc 496 | lp 497 | rsc 498 | jd 499 | cf 500 | rms 501 | ec 502 | jabber 503 | streaming 504 | webdev 505 | dms 506 | investors 507 | bookstore 508 | kr 509 | cd 510 | corporate 511 | mail4 512 | fz 513 | order 514 | transfer 515 | hotel 516 | work 517 | bt 518 | au 519 | pages 520 | sm 521 | client 522 | r 523 | y 524 | audio 525 | cz 526 | ci 527 | se 528 | potala 529 | ch 530 | webservices 531 | dy 532 | cvs 533 | ra 534 | apple 535 | barracuda 536 | ip 537 | ja 538 | mkt 539 | archives 540 | www0 541 | intra 542 | gate 543 | youth 544 | internal 545 | mailgw 546 | customer 547 | linux 548 | registration 549 | movie 550 | mailgate 551 | q 552 | xx 553 | mx3 554 | mars 555 | phone 556 | desktop 557 | ds 558 | zz 559 | love 560 | show 561 | nc 562 | redmine 563 | ce 564 | pl 565 | wireless 566 | inside 567 | fx 568 | mp 569 | hz 570 | listserv 571 | analytics 572 | ks 573 | redirect 574 | accounts 575 | report 576 | hermes 577 | ae 578 | mobi 579 | ps 580 | edge 581 | resources 582 | img1 583 | law 584 | pr 585 | international 586 | ml 587 | trac 588 | rd 589 | market 590 | mailer 591 | cert 592 | hg 593 | cl 594 | img2 595 | development 596 | gs 597 | google 598 | space 599 | www6 600 | gd 601 | post 602 | voip 603 | ac 604 | push 605 | m2 606 | sq 607 | fc 608 | ar 609 | asp 610 | dr 611 | seo 612 | mobil 613 | sync 614 | kf 615 | be 616 | about 617 | mail01 618 | sns 619 | board 620 | pc 621 | links 622 | jj 623 | history 624 | mailman 625 | campus 626 | mms 627 | storage 628 | ns0 629 | cdn2 630 | cacti 631 | hy 632 | enterprise 633 | noc 634 | ic 635 | cgi 636 | track 637 | world 638 | act 639 | wl 640 | product 641 | ls 642 | sf 643 | affiliates 644 | android 645 | payment 646 | n 647 | gz 648 | web3 649 | learning 650 | signup 651 | z 652 | tao 653 | top 654 | wifi 655 | yy 656 | password 657 | cw 658 | wm 659 | ess 660 | ex 661 | resource 662 | print 663 | gc 664 | w2 665 | canada 666 | cr 667 | mc 668 | 0 669 | me 670 | keys 671 | sentry 672 | smtp3 673 | journal 674 | mt 675 | team 676 | orion 677 | edi 678 | test3 679 | tc 680 | main 681 | zs 682 | faq 683 | click 684 | hub 685 | tu 686 | golf 687 | phoenix 688 | bd 689 | build 690 | free 691 | ee 692 | int 693 | cdn1 694 | v2 695 | sa 696 | pos 697 | fi 698 | router 699 | rc 700 | mirror 701 | tracker 702 | ct 703 | special 704 | cal 705 | ns6 706 | atlas 707 | ids 708 | affiliate 709 | nj 710 | tt 711 | nz 712 | db1 713 | bg 714 | mercury 715 | family 716 | courses 717 | ipv6 718 | jupiter 719 | no 720 | venus 721 | nb 722 | beijing 723 | summer 724 | ma 725 | yp 726 | ocs 727 | star 728 | traveler 729 | multimedia 730 | fm 731 | study 732 | lb 733 | up 734 | shanghai 735 | bk 736 | www7 737 | join 738 | tfs 739 | feed 740 | h 741 | ns01 742 | php 743 | stock 744 | km 745 | books 746 | eu 747 | md 748 | 2013 749 | whois 750 | sw 751 | mailserver 752 | mb 753 | tms 754 | monitoring 755 | ys 756 | ga 757 | radius 758 | group 759 | mtest 760 | j 761 | www8 762 | wb 763 | m1 764 | billing 765 | aaa 766 | pf 767 | products 768 | faculty 769 | em 770 | opac 771 | cis 772 | xmpp 773 | nanjing 774 | taobao 775 | zp 776 | teacher 777 | co 778 | contact 779 | nt 780 | ky 781 | qq 782 | mp3 783 | gps 784 | hn 785 | users 786 | gl 787 | domain 788 | newsroom 789 | dh 790 | csc 791 | repo 792 | zw 793 | ismart 794 | pp 795 | gg 796 | wms 797 | ims 798 | www9 799 | 2014 800 | solutions 801 | at 802 | bak 803 | sl 804 | cwc 805 | firewall 806 | wordpress 807 | school 808 | nms 809 | developers 810 | pki 811 | pe 812 | v2-ag 813 | devel 814 | hp 815 | titan 816 | pluto 817 | kids 818 | sport 819 | mail5 820 | server2 821 | nas 822 | xh 823 | ap 824 | red 825 | mas 826 | translate 827 | dealer 828 | ipad 829 | demo2 830 | 2012 831 | dns4 832 | hh 833 | green 834 | dz 835 | hybrid 836 | discover 837 | adserver 838 | japan 839 | mi 840 | xf 841 | zeus 842 | am 843 | people 844 | aa 845 | win 846 | sk 847 | db2 848 | jenkins 849 | xb 850 | oss 851 | sdc 852 | wc 853 | its 854 | dw 855 | yun 856 | acs 857 | asia 858 | daj 859 | webadmin 860 | crl 861 | ebook 862 | mag 863 | csg 864 | blue 865 | bank 866 | one 867 | o 868 | horizon 869 | orders 870 | apis 871 | k 872 | l 873 | 4 874 | 5 875 | 6 876 | 7 877 | 8 878 | 9 879 | ab 880 | af 881 | ah 882 | ai 883 | aj 884 | ak 885 | al 886 | an 887 | ao 888 | aq 889 | aw 890 | ax 891 | ay 892 | az 893 | ba 894 | bf 895 | bh 896 | bl 897 | bn 898 | bo 899 | bp 900 | bq 901 | bs 902 | bu 903 | bv 904 | bw 905 | bx 906 | by 907 | bz 908 | cb 909 | cg 910 | ck 911 | cu 912 | cv 913 | cy 914 | dd 915 | df 916 | dg 917 | di 918 | dn 919 | do 920 | dp 921 | dq 922 | dt 923 | du 924 | dv 925 | ea 926 | eb 927 | ed 928 | ef 929 | eg 930 | eh 931 | ei 932 | ej 933 | ek 934 | el 935 | eo 936 | ep 937 | er 938 | et 939 | ev 940 | ew 941 | ey 942 | ez 943 | fa 944 | fd 945 | fe 946 | ff 947 | fg 948 | fh 949 | fj 950 | fk 951 | fl 952 | fn 953 | fo 954 | fp 955 | fq 956 | ft 957 | fu 958 | fv 959 | fy 960 | ge 961 | gf 962 | gi 963 | gj 964 | gk 965 | gn 966 | gp 967 | gq 968 | gr 969 | gt 970 | gu 971 | gv 972 | gx 973 | gy 974 | ha 975 | hc 976 | he 977 | hf 978 | hi 979 | hj 980 | hl 981 | hm 982 | ho 983 | hu 984 | hv 985 | hw 986 | hx 987 | ia 988 | ib 989 | ie 990 | if 991 | ig 992 | ih 993 | ii 994 | ij 995 | ik 996 | il 997 | io 998 | iq 999 | is 1000 | iu 1001 | iv 1002 | iw 1003 | ix 1004 | iy 1005 | iz 1006 | jb 1007 | jc 1008 | je 1009 | jf 1010 | jg 1011 | jh 1012 | ji 1013 | jl 1014 | jm 1015 | jn 1016 | jo 1017 | jq 1018 | jr 1019 | jt 1020 | ju 1021 | jv 1022 | jx 1023 | jy 1024 | jz 1025 | ka 1026 | kc 1027 | kd 1028 | ke 1029 | kg 1030 | kh 1031 | ki 1032 | kj 1033 | kk 1034 | kl 1035 | kn 1036 | ko 1037 | kp 1038 | kq 1039 | kt 1040 | ku 1041 | kv 1042 | kw 1043 | kx 1044 | kz 1045 | la 1046 | lc 1047 | ld 1048 | le 1049 | lf 1050 | lg 1051 | lh 1052 | li 1053 | lj 1054 | lk 1055 | ll 1056 | lm 1057 | ln 1058 | lo 1059 | lq 1060 | lr 1061 | lu 1062 | lv 1063 | lw 1064 | lx 1065 | lz 1066 | mf 1067 | mg 1068 | mh 1069 | mj 1070 | mk 1071 | mn 1072 | mo 1073 | mq 1074 | mr 1075 | mu 1076 | mv 1077 | mw 1078 | mz 1079 | na 1080 | nd 1081 | ne 1082 | nf 1083 | ng 1084 | nh 1085 | ni 1086 | nk 1087 | nm 1088 | nn 1089 | np 1090 | nq 1091 | nr 1092 | nu 1093 | nv 1094 | nw 1095 | nx 1096 | ny 1097 | ob 1098 | oc 1099 | od 1100 | oe 1101 | of 1102 | og 1103 | oh 1104 | oi 1105 | oj 1106 | ok 1107 | ol 1108 | om 1109 | on 1110 | oo 1111 | op 1112 | oq 1113 | or 1114 | os 1115 | ot 1116 | ou 1117 | ov 1118 | ow 1119 | ox 1120 | oy 1121 | oz 1122 | pa 1123 | pb 1124 | pd 1125 | pg 1126 | ph 1127 | pi 1128 | pj 1129 | pk 1130 | pn 1131 | po 1132 | pq 1133 | pu 1134 | pv 1135 | pw 1136 | px 1137 | py 1138 | pz 1139 | qb 1140 | qc 1141 | qd 1142 | qe 1143 | qf 1144 | qg 1145 | qh 1146 | qi 1147 | qj 1148 | qk 1149 | ql 1150 | qm 1151 | qn 1152 | qo 1153 | qp 1154 | qr 1155 | qs 1156 | qt 1157 | qu 1158 | qv 1159 | qw 1160 | qx 1161 | qy 1162 | qz 1163 | rb 1164 | re 1165 | rf 1166 | rg 1167 | rh 1168 | ri 1169 | rj 1170 | rk 1171 | rl 1172 | rm 1173 | rn 1174 | ro 1175 | rp 1176 | rq 1177 | rr 1178 | rv 1179 | rw 1180 | rx 1181 | ry 1182 | rz 1183 | si 1184 | sn 1185 | sr 1186 | su 1187 | sv 1188 | ta 1189 | tb 1190 | td 1191 | te 1192 | tf 1193 | ti 1194 | tk 1195 | tl 1196 | tm 1197 | tn 1198 | to 1199 | tq 1200 | tx 1201 | ty 1202 | ua 1203 | ub 1204 | ud 1205 | ue 1206 | uf 1207 | ug 1208 | uh 1209 | ui 1210 | uj 1211 | ul 1212 | um 1213 | un 1214 | uo 1215 | uq 1216 | ur 1217 | ut 1218 | uu 1219 | uv 1220 | uw 1221 | ux 1222 | uy 1223 | uz 1224 | va 1225 | vb 1226 | vd 1227 | ve 1228 | vf 1229 | vg 1230 | vh 1231 | vi 1232 | vj 1233 | vk 1234 | vl 1235 | vm 1236 | vn 1237 | vo 1238 | vp 1239 | vq 1240 | vr 1241 | vs 1242 | vt 1243 | vu 1244 | vv 1245 | vw 1246 | vx 1247 | vy 1248 | vz 1249 | wa 1250 | wd 1251 | we 1252 | wf 1253 | wg 1254 | wi 1255 | wj 1256 | wk 1257 | wn 1258 | wo 1259 | wq 1260 | wr 1261 | wu 1262 | wv 1263 | wy 1264 | wz 1265 | xa 1266 | xc 1267 | xd 1268 | xe 1269 | xg 1270 | xi 1271 | xj 1272 | xk 1273 | xl 1274 | xm 1275 | xn 1276 | xo 1277 | xp 1278 | xq 1279 | xr 1280 | xs 1281 | xt 1282 | xu 1283 | xv 1284 | xw 1285 | xy 1286 | xz 1287 | ya 1288 | yb 1289 | yc 1290 | yd 1291 | ye 1292 | yf 1293 | yg 1294 | yh 1295 | yi 1296 | yj 1297 | yk 1298 | yl 1299 | ym 1300 | yn 1301 | yo 1302 | yq 1303 | yr 1304 | yt 1305 | yu 1306 | yv 1307 | yw 1308 | yx 1309 | yz 1310 | za 1311 | zc 1312 | zd 1313 | ze 1314 | zf 1315 | zg 1316 | zh 1317 | zi 1318 | zj 1319 | zk 1320 | zl 1321 | zm 1322 | zn 1323 | zo 1324 | zq 1325 | zr 1326 | zt 1327 | zu 1328 | zv 1329 | zx 1330 | zy -------------------------------------------------------------------------------- /dict/subnames_all_5_letters.txt: -------------------------------------------------------------------------------- 1 | {alphnum} 2 | {alphnum}{alphnum} 3 | {alphnum}{alphnum}{alphnum} 4 | {alphnum}{alphnum}{alphnum}{alphnum} 5 | {alphnum}{alphnum}{alphnum}{alphnum}{alphnum} -------------------------------------------------------------------------------- /images/1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Cl0udG0d/HXnineTails/f894ca643841ea1221a30c657bf2e1c4b87eca25/images/1.png -------------------------------------------------------------------------------- /images/2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Cl0udG0d/HXnineTails/f894ca643841ea1221a30c657bf2e1c4b87eca25/images/2.png -------------------------------------------------------------------------------- /images/3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Cl0udG0d/HXnineTails/f894ca643841ea1221a30c657bf2e1c4b87eca25/images/3.png -------------------------------------------------------------------------------- /images/Architecture.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Cl0udG0d/HXnineTails/f894ca643841ea1221a30c657bf2e1c4b87eca25/images/Architecture.png -------------------------------------------------------------------------------- /images/Praise.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Cl0udG0d/HXnineTails/f894ca643841ea1221a30c657bf2e1c4b87eca25/images/Praise.png -------------------------------------------------------------------------------- /images/image-20210817235656344.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Cl0udG0d/HXnineTails/f894ca643841ea1221a30c657bf2e1c4b87eca25/images/image-20210817235656344.png -------------------------------------------------------------------------------- /images/image-20210817235844858.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Cl0udG0d/HXnineTails/f894ca643841ea1221a30c657bf2e1c4b87eca25/images/image-20210817235844858.png -------------------------------------------------------------------------------- /images/image-20210818003323362.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Cl0udG0d/HXnineTails/f894ca643841ea1221a30c657bf2e1c4b87eca25/images/image-20210818003323362.png -------------------------------------------------------------------------------- /images/image-20210818003406757.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Cl0udG0d/HXnineTails/f894ca643841ea1221a30c657bf2e1c4b87eca25/images/image-20210818003406757.png -------------------------------------------------------------------------------- /images/image-20210818003639711.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Cl0udG0d/HXnineTails/f894ca643841ea1221a30c657bf2e1c4b87eca25/images/image-20210818003639711.png -------------------------------------------------------------------------------- /images/image-20210818010542320.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Cl0udG0d/HXnineTails/f894ca643841ea1221a30c657bf2e1c4b87eca25/images/image-20210818010542320.png -------------------------------------------------------------------------------- /pipei.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Cl0udG0d/HXnineTails/f894ca643841ea1221a30c657bf2e1c4b87eca25/pipei.py -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | -i https://mirrors.aliyun.com/pypi/simple/ 2 | aiohttp==3.7.4 3 | aiocontextvars==0.2.2 4 | beautifulsoup4==4.9.3 5 | bs4==0.0.1 6 | certifi==2020.11.8 7 | chardet==3.0.4 8 | click==8.0.1 9 | colorama==0.4.4 10 | contextvars==2.4 11 | dnspython==2.0.0 12 | exrex==0.10.5 13 | fire==0.3.1 14 | future==0.18.2 15 | idna==2.10 16 | immutables==0.14 17 | loguru==0.5.3 18 | requests==2.25.0 19 | simplejson==3.17.2 20 | six==1.15.0 21 | soupsieve==2.0.1 22 | SQLAlchemy==1.3.20 23 | tenacity==6.2.0 24 | termcolor==1.1.0 25 | tqdm==4.54.0 26 | treelib==1.6.1 27 | urllib3==1.26.2 28 | win32-setctime==1.0.3 29 | win_unicode_console==0.5 30 | colorama==0.4.4 31 | aiodns==2.0.0 -------------------------------------------------------------------------------- /scan.py: -------------------------------------------------------------------------------- 1 | import click 2 | import getopt 3 | import hashlib 4 | import os 5 | import sys 6 | from concurrent.futures import ThreadPoolExecutor, wait, ALL_COMPLETED 7 | 8 | import Hx_config 9 | import base 10 | from ServerJiang.jiangMain import SendNotice 11 | from Xray import pppXray 12 | from crawlergo import crawlergoMain 13 | from waf import WAF 14 | 15 | ''' 16 | 漏洞扫描控制主函数 17 | 参数: 18 | url 19 | 格式如:https://www.baidu.com 20 | 21 | 扫描联动工具: 22 | JS发现: 23 | JSfinder 24 | xray扫描: 25 | crawlergo动态爬虫 -> Xray高级版 26 | C段: 27 | 自写C段扫描函数 28 | ''' 29 | 30 | 31 | def threadPoolDetailScan(temp_url, current_filename): 32 | pppXray.xrayScan(temp_url, current_filename) 33 | base.transferJSFinder(temp_url, current_filename) 34 | base.transferCScan(temp_url, current_filename) 35 | return 36 | 37 | 38 | def threadPoolScan(req_pool, filename, target): 39 | print("req_pool num is {}".format(len(req_pool))) 40 | thread = ThreadPoolExecutor(max_workers=Hx_config.ThreadNum) 41 | i = 0 42 | all_task = [] 43 | while len(req_pool) != 0: 44 | # 将 req_pool 里的URL依次弹出并扫描 45 | temp_url = req_pool.pop() 46 | current_filename = hashlib.md5(temp_url.encode("utf-8")).hexdigest() 47 | # 调用 xray 进行扫描并保存 48 | # pppXray.xrayScan(temp_url, current_filename) 49 | i += 1 50 | one_t = thread.submit(pppXray.xrayScan, temp_url, current_filename) 51 | all_task.append(one_t) 52 | if i == 5 or len(req_pool) == 0: 53 | i = 0 54 | wait(all_task, return_when=ALL_COMPLETED) 55 | all_task = [] 56 | base.mergeReport(filename) 57 | SendNotice("{} 花溪九尾扫描完毕".format(target)) 58 | 59 | 60 | ''' 61 | init() 扫描初始化函数 62 | 功能: 63 | 初始化保存文件目录 64 | 初始化扫描各参数 65 | attone=, attsrc=, attdetail=, readppp=, thread=,clean ,plugins= 66 | 67 | ''' 68 | 69 | 70 | @click.command() 71 | @click.option('-a', '--attone', help='对单个URL,只进行crawlergo动态爬虫+xray扫描 例如 百度官网 python3 scan.py -a https://www.baidu.com', 72 | type=str) 73 | @click.option('-s', '--attsrc', help='对SRC资产,进行信息搜集+crawlergo+xray , 例如 百度SRC python3 scan.py -s baidu.com', type=str) 74 | @click.option('-d', '--attdetail', 75 | help='对SRC资产,进行信息搜集+crawlergo+xray+C段信息搜集+js敏感信息搜集 , 例如 百度SRC 输入 python3 scan.py -d baidu.com', type=str) 76 | @click.option('-t', '--thread', default=5, help='线程数量,默认线程为5 如 python3 scan.py -t 10 -a http://testphp.vulnweb.com/ ', 77 | type=int) 78 | @click.option('-r', '--readppp', help='读取待扫描txt文件,每行一个URL 对取出的每个URL进行 -a 扫描,如 python3 scan.py -t 10 -r target.txt', 79 | type=str) 80 | @click.option('-c', '--clean', help='对保存的漏洞相关报告进行清理,即清理save文件夹下的文件', is_flag=True) 81 | @click.option('-p', '--plugins', help='自定义xray插件 plugins') 82 | def init(attone, attsrc, attdetail, thread, readppp, clean, plugins): 83 | """ 84 | 花溪九尾 懒狗必备\n 85 | https://github.com/Cl0udG0d/HXnineTails 86 | """ 87 | base.init() 88 | Hx_config.ThreadNum = int(thread) 89 | if plugins: 90 | Hx_config.plugins = plugins 91 | if clean: 92 | Hx_config.delModel() 93 | sys.exit() 94 | if attone: 95 | oneFoxScan(attone) 96 | if attsrc: 97 | foxScan(attsrc) 98 | if attdetail: 99 | foxScanDetail(attdetail) 100 | if readppp: 101 | pppFoxScan(readppp) 102 | return 103 | 104 | 105 | def pppFoxScan(filename): 106 | print(f"{Hx_config.yellow}Start pppFoxScan,filename is {filename}{Hx_config.end}") 107 | try: 108 | with open(filename, 'r') as f: 109 | lines = f.readlines() 110 | for line in lines: 111 | target = line.strip() 112 | target = base.addHttpHeader(target) 113 | Hx_config.ppp_queue.put(target) 114 | except Exception as e: 115 | print(e) 116 | pass 117 | while not Hx_config.ppp_queue.empty(): 118 | current_target = Hx_config.ppp_queue.get() 119 | # 对搜集到的目标挨个进行扫描 120 | currentfilename = hashlib.md5(current_target.encode("utf-8")).hexdigest() 121 | if base.checkBlackList(current_target): 122 | req_pool = crawlergoMain.crawlergoGet(current_target) 123 | if req_pool == 'pass': 124 | continue 125 | req_pool.add(current_target) 126 | # 对目标网址使用 crawlergoGet 页面URL动态爬取,保存在 req_pool 集合里 127 | threadPoolScan(req_pool, currentfilename, current_target) 128 | else: 129 | print("扫描网址在黑名单内,退出") 130 | print(f"{Hx_config.yellow}pppFoxScan End~{Hx_config.end}") 131 | return 132 | 133 | 134 | ''' 135 | oneFoxScan(target)函数 136 | 针对某一目标网址进行扫描而非对某一资产下的网址进行扫描,输入案例: www.baidu.com 137 | 扫描流程: 输入URL正确性检查+crawlergo+xray 138 | ''' 139 | 140 | 141 | def oneFoxScan(target): 142 | if base.checkBlackList(target): 143 | target = base.addHttpHeader(target) 144 | filename = hashlib.md5(target.encode("utf-8")).hexdigest() 145 | print(f"{Hx_config.yellow}Start foxScan {target}\nfilename : {filename}\n{Hx_config.end}") 146 | req_pool = crawlergoMain.crawlergoGet(target) 147 | # 对目标网址使用 crawlergoGet 页面URL动态爬取,保存在 req_pool 集合里 148 | req_pool.add(target) 149 | threadPoolScan(req_pool, filename, target) 150 | else: 151 | print("扫描网址在黑名单内,退出") 152 | print(f"{Hx_config.yellow}InPuT T4rGet {target} Sc3n EnD#{Hx_config.end}") 153 | return 154 | 155 | 156 | ''' 157 | 花溪九尾主函数 158 | foxScan(target) 函数 159 | 参数: 160 | target 待扫描的URL 示例:baidu.com 161 | 作用: 162 | 对输入的目标进行子域名收集 -> 存储去重 -> crawlergo动态爬虫 -> Xray高级版漏洞扫描 163 | ↓ 164 | ARL资产管理+漏洞扫描 165 | 输出: 166 | 对应阶段性结果都会保存在save 文件夹下对应的目录里面 167 | ''' 168 | 169 | 170 | def foxScan(target): 171 | filename = hashlib.md5(target.encode("utf-8")).hexdigest() 172 | print(f"{Hx_config.yellow}{Hx_config.green}Start attsrc foxScan {target}\nfilename : {filename}\n{Hx_config.end}") 173 | base.subScan(target, filename) 174 | # 将队列列表化并进行子域名搜集 175 | _ = base.from_queue_to_list(Hx_config.target_queue) 176 | base.ArlScan(name=target, target=_) # 启动ARL扫描,第一个参数target表示文件名 177 | print(f"{Hx_config.yellow}InPuT T4rGet {target} Sc3n Start!{Hx_config.end}") 178 | while not Hx_config.target_queue.empty(): 179 | current_target = base.addHttpHeader(Hx_config.target_queue.get()) 180 | try: 181 | if base.checkBlackList(current_target): 182 | # 对搜集到的目标挨个进行扫描 183 | req_pool = crawlergoMain.crawlergoGet(current_target) # 返回crawlergoGet结果列表,是多个url路径 184 | req_pool.add(current_target) # 添加自己本身到该列表里 185 | req_pool = WAF(req_pool).run_detect() 186 | base.save(req_pool, filepath=f"{Hx_config.Crawlergo_save_path}{target}.txt", host=current_target) 187 | tempFilename = hashlib.md5(current_target.encode("utf-8")).hexdigest() 188 | # 对目标网址使用 crawlergoGet 页面URL动态爬取,保存在 req_pool 集合里 189 | threadPoolScan(req_pool, tempFilename, target) 190 | except: 191 | pass 192 | print(f"{Hx_config.yellow}InPuT T4rGet {target} Sc3n EnD#{Hx_config.end}") 193 | return 194 | 195 | 196 | ''' 197 | foxScanDetail(target) 198 | 对于输入SRC进行详细的信息搜集+扫描 199 | 耗时很长+为防止遗漏搜集了部分重复信息(不建议使用 200 | 作用: 201 | -> JS敏感信息提取 202 | 对输入的目标进行子域名收集 -> 存储去重 -> crawlergo动态爬虫 -> Xray高级版漏洞扫描 203 | -> C段信息收集 204 | 输出: 205 | 对应阶段性结果都会保存在save 文件夹下对应的目录里面 206 | ''' 207 | 208 | 209 | def foxScanDetail(target): 210 | thread = ThreadPoolExecutor(Hx_config.ThreadNum) 211 | filename = hashlib.md5(target.encode("utf-8")).hexdigest() 212 | print(f"{Hx_config.yellow}Start attsrc foxScan {target}\nfilename : {filename}\n{Hx_config.end}") 213 | base.subScan(target, filename) 214 | # 进行子域名搜集 215 | while not Hx_config.target_queue.empty(): 216 | current_target = Hx_config.target_queue.get() 217 | # 对搜集到的目标挨个进行扫描 218 | if base.checkBlackList(current_target): 219 | req_pool = crawlergoMain.crawlergoGet(current_target) 220 | req_pool.add(current_target) 221 | i = 0 222 | all_task = [] 223 | while len(req_pool) != 0: 224 | # 将 req_pool 里的URL依次弹出并扫描 225 | temp_url = req_pool.pop() 226 | current_filename = hashlib.md5(temp_url.encode("utf-8")).hexdigest() 227 | i += 1 228 | one_t = thread.submit(threadPoolDetailScan, temp_url, current_filename) 229 | all_task.append(one_t) 230 | if i == 5 or len(req_pool) == 0: 231 | i = 0 232 | wait(all_task, return_when=ALL_COMPLETED) 233 | all_task = [] 234 | else: 235 | print("扫描网址在黑名单内,退出") 236 | print(f"{Hx_config.yellow}InPuT T4rGet {target} Sc3n EnD#{Hx_config.end}") 237 | return 238 | 239 | 240 | ''' 241 | 单元测试代码 242 | 支持三个攻击参数: 243 | 1,-a --attone 对单个URL,只进行crawlergo动态爬虫+xray扫描 例如 百度官网 输入 https://www.baidu.com 244 | 2,-s --attsrc 对SRC资产,进行信息搜集+ARL+crawlergo+xray , 例如 百度SRC 输入 baidu.com 245 | 3,-d --attdetail 对SRC资产,进行信息搜集+crawlergo+xray+C段信息搜集+js敏感信息搜集 , 例如 百度SRC 输入 baidu.com 246 | ''' 247 | 248 | 249 | def main(): 250 | try: 251 | Hx_config.logo() 252 | init.main(standalone_mode=False) 253 | except Exception as e: 254 | print(e) 255 | pass 256 | 257 | 258 | # def main(argv): 259 | # config.logo() 260 | # base.init() 261 | # try: 262 | # opts, args = getopt.getopt(argv, "ha:s:d:r:t:c", 263 | # ["help", "attone=", "attsrc=", "attdetail=", "readppp=", "thread=", "clean"]) 264 | # except getopt.GetoptError: 265 | # config.scanHelp() 266 | # sys.exit(2) 267 | # for opt, arg in opts: 268 | # target = arg.strip('/') # 因为url后缀带有\会造成oneforall保存错误 269 | # filename = arg 270 | # if opt in ("-h", "--help"): 271 | # config.scanHelp() 272 | # sys.exit() 273 | # elif opt in ("-t", "--thread"): 274 | # config.ThreadNum = int(arg) 275 | # elif opt in ("-a", "--attone"): 276 | # oneFoxScan(target) 277 | # elif opt in ("-s", "--attsrc"): 278 | # foxScan(target) 279 | # elif opt in ("-d", "--attdetail"): 280 | # foxScanDetail(target) 281 | # elif opt in ("-r", "--readppp"): 282 | # pppFoxScan(filename) 283 | # elif opt in ("-c", "--clean"): 284 | # config.delModel() 285 | # sys.exit() 286 | # else: 287 | # config.scanHelp() 288 | # sys.exit() 289 | # return 290 | 291 | 292 | if __name__ == '__main__': 293 | main() 294 | -------------------------------------------------------------------------------- /subDomainsBrute/subDomainsBruteMain.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import Hx_config 4 | import base 5 | 6 | ''' 7 | subDomainsBruteScan(target) 函数 8 | 参数: 9 | target 需要收集子域名的目标 例如:baidu.com 10 | 作用: 11 | 使用subDomainsBrute进行子域名收集 并且将结果存储到 sub_queue 队列中 12 | 输出: 13 | 无 14 | ''' 15 | 16 | 17 | def subDomainsBruteScan(target, filename): 18 | print(f"{Hx_config.yellow}{target} subDomainsBruteScan Scan Start ~{Hx_config.end}") 19 | subDomainsBrute_py = '{}subDomainsBrute.py'.format(Hx_config.subDomainsBrute_Path) 20 | saveFilePath = '{}{}.txt'.format(Hx_config.Temp_path, base.url_http_delete(filename)) 21 | scanCommand = "{} {} -t 10 --output {} {}".format(Hx_config.PYTHON, subDomainsBrute_py, saveFilePath, 22 | base.url_http_delete(target)) 23 | print(f"{Hx_config.blue}{scanCommand}{Hx_config.end}") 24 | os.system(scanCommand) 25 | if os.path.exists(saveFilePath): 26 | f = open(saveFilePath) 27 | lines = f.readlines() 28 | for line in lines: 29 | temp_url = line.split()[0].rstrip('\n') 30 | # print(temp_url) 31 | Hx_config.sub_queue.put(temp_url) 32 | f.close() 33 | print(f"{Hx_config.yellow}{target} subDomainsBruteScan Scan End ~{Hx_config.end}") 34 | print(f"{Hx_config.green}subdomainsbrute 结束 !当前的url个数为{Hx_config.sub_queue.qsize()}{Hx_config.end}") 35 | return 36 | 37 | 38 | def main(): 39 | # filename=hash('baidu.com') 40 | subDomainsBruteScan('wkj.work', "aa") 41 | return 42 | 43 | 44 | if __name__ == '__main__': 45 | main() 46 | -------------------------------------------------------------------------------- /test.py: -------------------------------------------------------------------------------- 1 | import json 2 | import random 3 | def GetHeaders(): 4 | try: 5 | with open('Useragent.json', 'r') as f: 6 | data = json.load(f) 7 | data_browsers =data['browsers'] 8 | data_randomize = list(data['randomize'].values()) 9 | browser = random.choice(data_randomize) 10 | headers = {'User-Agent': random.choice(data_browsers[browser])} 11 | 12 | return headers 13 | except Exception as e: 14 | exit("[*]Hx_config.py : GetHeaders error!") 15 | -------------------------------------------------------------------------------- /waf.py: -------------------------------------------------------------------------------- 1 | import re 2 | import requests 3 | from concurrent.futures import ThreadPoolExecutor, as_completed 4 | from urllib.parse import urlparse 5 | 6 | import Hx_config 7 | 8 | ''' 9 | 输入:待检测的url列表 10 | 功能:检测该url是否有waf 11 | 输出:没有waf的列表 12 | ''' 13 | 14 | 15 | class WAF(object): 16 | def __init__(self, __list): 17 | self.__list = __list 18 | self.__result = [] 19 | self.__waf_info() 20 | 21 | def __once_detect(self, url): 22 | headers = Hx_config.GetHeaders() 23 | headers["Referer"] = url 24 | try: 25 | resp = requests.get(url, headers=headers, timeout=3) 26 | if resp.status_code < 400: 27 | if self.__identify(resp.headers, resp.text): 28 | parse = urlparse(resp.url) 29 | new_url = "%s://%s/" % (parse.scheme, parse.netloc) 30 | self.__result.append(new_url) 31 | self.__result.append(url) 32 | except: 33 | print(f"{Hx_config.red}WAF~ {url} 网络连接失败{Hx_config.end}") 34 | 35 | return 36 | 37 | def run_detect(self): 38 | print(f"{Hx_config.green}WAF检测中~{Hx_config.end}") 39 | with ThreadPoolExecutor() as pool: 40 | pool.map(self.__once_detect, self.__list) 41 | as_completed(True) 42 | 43 | print(f"{Hx_config.blue}检测完毕,没有WAF的url:") 44 | for item in list(set(self.__result)): 45 | print(item) 46 | 47 | print(Hx_config.end) 48 | 49 | return list(set(self.__result)) 50 | 51 | def __waf_info(self): 52 | self.__mark_list = [] 53 | all_waf = '''WAF:Topsec-Waf|index|index|| 54 | WAF:360|headers|X-Powered-By-360wzb|wangzhan\.360\.cn 55 | WAF:360|url|/wzws-waf-cgi/|360wzws 56 | WAF:Anquanbao|headers|X-Powered-By-Anquanbao|MISS 57 | WAF:Anquanbao|url|/aqb_cc/error/|ASERVER 58 | WAF:BaiduYunjiasu|headers|Server|yunjiasu-nginx 59 | WAF:BigIP|headers|Server|BigIP|BIGipServer 60 | WAF:BigIP|headers|Set-Cookie|BigIP|BIGipServer 61 | WAF:BinarySEC|headers|x-binarysec-cache|fill|miss 62 | WAF:BinarySEC|headers|x-binarysec-via|binarysec\.com 63 | WAF:BlockDoS|headers|Server|BlockDos\.net 64 | WAF:CloudFlare|headers|Server|cloudflare-nginx 65 | WAF:Cloudfront|headers|Server|cloudfront 66 | WAF:Cloudfront|headers|X-Cache|cloudfront 67 | WAF:Comodo|headers|Server|Protected by COMODO 68 | WAF:IBM-DataPower|headers|X-Backside-Transport|\A(OK|FAIL) 69 | WAF:DenyAll|headers|Set-Cookie|\Asessioncookie= 70 | WAF:dotDefender|headers|X-dotDefender-denied|1 71 | WAF:Incapsula|headers|X-CDN|Incapsula 72 | WAF:Jiasule|headers|Set-Cookie|jsluid= 73 | WAF:KSYUN|headers|Server|KSYUN ELB 74 | WAF:KONA|headers|Server|AkamaiGHost 75 | WAF:ModSecurity|headers|Server|Mod_Security|NOYB 76 | WAF:NetContinuum|headers|Cneonction|\Aclose 77 | WAF:NetContinuum|headers|nnCoection|\Aclose 78 | WAF:NetContinuum|headers|Set-Cookie|citrix_ns_id 79 | WAF:Newdefend|headers|Server|newdefend 80 | WAF:NSFOCUS|headers|Server|NSFocus 81 | WAF:Safe3|headers|X-Powered-By|Safe3WAF 82 | WAF:Safe3|headers|Server|Safe3 Web Firewall 83 | WAF:Safedog|headers|X-Powered-By|WAF/2\.0 84 | WAF:Safedog|headers|Server|Safedog 85 | WAF:Safedog|headers|Set-Cookie|Safedog 86 | WAF:SonicWALL|headers|Server|SonicWALL 87 | WAF:Stingray|headers|Set-Cookie|\AX-Mapping- 88 | WAF:Sucuri|headers|Server|Sucuri/Cloudproxy 89 | WAF:Usp-Sec|headers|Server|Secure Entry Server 90 | WAF:Varnish|headers|X-Varnish|.*? 91 | WAF:Varnish|headers|Server|varnish 92 | WAF:Wallarm|headers|Server|nginx-wallarm 93 | WAF:WebKnight|headers|Server|WebKnight 94 | WAF:Yundun|headers|Server|YUNDUN 95 | WAF:Yundun|headers|X-Cache|YUNDUN 96 | WAF:Yunsuo|headers|Set-Cookie|yunsuo 97 | ''' 98 | marks = all_waf.strip().splitlines() # 按行显示 99 | for mark in marks: 100 | name, location, key, value = mark.strip().split("|", 3) 101 | self.__mark_list.append([name, location, key, value]) 102 | 103 | def __identify(self, header, html): 104 | for line in self.__mark_list: 105 | name, location, key, reg = line 106 | if location == "headers": 107 | if key in header and re.search(reg, header[key], re.I): 108 | return False 109 | elif location == "index": 110 | if re.search(reg, html, re.I): 111 | return False 112 | 113 | return True 114 | 115 | 116 | if __name__ == '__main__': 117 | list1 = WAF(['http://59.63.200.79:8014/dom_xss/', 'https://qq.com']) 118 | list1.run_detect() 119 | --------------------------------------------------------------------------------