├── url.txt ├── .gitignore ├── README.md └── main.py /url.txt: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .idea 2 | .Ds_Store 3 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # awvs_xray_url_batch-import 2 | 用于解决awvs和xray联动时url批量导入的小脚本 3 | 4 | ## 使用方法 5 | 修改`main.py`中的`awvs_url`,`Cookie`,并设置xray监听的ip和端口号. 6 | 7 | 在当前目录下新建`url.txt`文件 用于存放需要批量导入的url. 8 | 9 | ```bash 10 | python3 main.py 11 | ``` 12 | 13 | 即可完成批量导入并开始扫描。 14 | -------------------------------------------------------------------------------- /main.py: -------------------------------------------------------------------------------- 1 | import requests 2 | import json 3 | import time 4 | from requests.packages.urllib3.exceptions import InsecureRequestWarning 5 | requests.packages.urllib3.disable_warnings(InsecureRequestWarning) 6 | 7 | # awvs 8 | 9 | # awvs_url = "https://192.168.2.213:3443" 10 | Cookie = "ui_session=2986ad8c0a5b3df4d7028d5f3c06e936c5ed74164e1863b0cc1394bb38e9ce4a09df26d8477c83df670ecaccbdd50028ff6cbcccecd1e78ebc8fb8c44a5b62e2a" 11 | mod_id = { 12 | "full_scan": "11111111-1111-1111-1111-111111111111", 13 | "high_risk_vul": "11111111-1111-1111-1111-111111111112", 14 | "cross_site_vul": "11111111-1111-1111-1111-111111111116", 15 | "sql_inject_vul": "11111111-1111-1111-1111-111111111113", 16 | "week_pass_vul": "11111111-1111-1111-1111-111111111115", 17 | "crawl_only" : "11111111-1111-1111-1111-111111111117", 18 | "malware_scan": "11111111-1111-1111-1111-111111111120" 19 | } 20 | 21 | mod = mod_id['crawl_only'] 22 | 23 | # 扫描器速度 24 | speed = "slow" 25 | 26 | # xray 27 | xray_proxy_ip = "172.17.0.3" 28 | xray_proxy_port = "1111" 29 | 30 | 31 | headersjson = { 32 | "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:73.0) Gecko/20100101 Firefox/73.0", 33 | "Content-Type": "application/json;charset=UTF-8", 34 | "X-Auth": Cookie[11:], 35 | "Origin": awvs_url, 36 | "Referer": awvs_url, 37 | "Sec-Fetch-Site": "same-origin", 38 | "Sec-Fetch-Mode": "cors", 39 | "Accept": "application/json, text/plain, */*", 40 | "Cookie": Cookie 41 | } 42 | 43 | 44 | def add_from_file(): 45 | fileone = open('url.txt', 'r+') 46 | for i in fileone: 47 | urls = i.replace('\n', '') 48 | url = awvs_url + "/api/v1/targets" 49 | datajson = { 50 | 'address': urls, 51 | 'description': '', 52 | 'criticality': '10' 53 | } 54 | try: 55 | res = requests.post(url, headers=headersjson,verify=False, data=json.dumps(datajson)) 56 | respa = res.headers 57 | print("[+] " + urls + " 添加成功") 58 | print(res) 59 | except: 60 | print("[-] " + urls + " 添加失败! " + url + "接口请求失败") 61 | pass 62 | try: 63 | respa = respa['Location'] 64 | except: 65 | pass 66 | if "/api/v1/targets/" in respa: 67 | respa = respa.replace('/api/v1/targets/', '') 68 | set_proxy(urls, respa) 69 | else: 70 | pass 71 | 72 | 73 | def set_proxy(target_url,locationone): 74 | url = awvs_url + '/api/v1/targets/'+locationone+'/configuration' 75 | 76 | datajson = { 77 | "enabled": "true", 78 | "address": xray_proxy_ip, 79 | "protocol": "http", 80 | "port": xray_proxy_port 81 | } 82 | 83 | datajsontwo = { 84 | "proxy": datajson, 85 | "scan_speed": speed 86 | } 87 | 88 | 89 | try: 90 | res = requests.patch(url, headers=headersjson, verify=False,data=json.dumps(datajsontwo)) 91 | if(res.status_code == 204): 92 | print("[+] " + target_url + " 代理设置成功") 93 | except: 94 | print("[-] " + target_url + " 代理设置失败") 95 | pass 96 | try: 97 | set_scanmod(target_url,locationone) 98 | except: 99 | pass 100 | 101 | def set_scanmod(target_url,locationone): 102 | url = awvs_url + "/api/v1/scans" 103 | datajsona = { 104 | "disable": "false", 105 | "start_date": None, 106 | "time_sensitive": "false" 107 | } 108 | 109 | datajson = { 110 | "target_id": locationone, 111 | "profile_id": mod, 112 | "schedule": datajsona, 113 | "ui_session_id": "c45eab520de7822aa55c71ad71688136" 114 | } 115 | try: 116 | res = requests.post(url, headers=headersjson, verify=False,data=json.dumps(datajson)) 117 | if res.status_code == 201: 118 | print("[+] " + target_url + " 设置成功,开始扫描") 119 | respa = res.headers 120 | respa = respa['Location'] 121 | except: 122 | pass 123 | if "/api/v1/scans/" in respa: 124 | respa = respa.replace('/api/v1//scans/', '') 125 | urls = awvs_url + "/api/v1/scans/"+respa 126 | res = requests.get(urls, headers=headersjson, verify=False) 127 | else: 128 | pass 129 | 130 | 131 | if __name__ == '__main__': 132 | add_from_file() 133 | --------------------------------------------------------------------------------