├── LICENSE ├── README.md ├── config.json ├── functions └── scrapybacky.py ├── main.py ├── proxy.txt └── requirements.txt /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2023 0x29A 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # SecHex-Pr0xyHunter V1.0 [BETA] 2 | "test version" 3 | 4 | # Features 5 | Features: 6 | - Proxy Scraping 7 | - Proxy Testing 8 | - Webhook Integration 9 | - Configuration Loading 10 | - Multithreaded Proxy Testing 11 | - Restart Functionality (testing) 12 | - Error Handling 13 | - Pastbin Integration 14 | 15 | # Planned Features 16 | - More proxy support (http, ssl, ...) 17 | - Improve Multithreading 18 | - Custom Tests 19 | - Proxy Filtering 20 | - Rate Limit Handling 21 | - Geo-Locations 22 | - Paid Proxy Support 23 | - Proxy Health Metrics 24 | - Notification System 25 | - GEO-Location Blacklists 26 | 27 | 28 | ![dwdww](https://github.com/SecHex/SecHex-Pr0xyHunter/assets/96635023/3ce1d9bd-1ae0-4118-b02d-84fc55a52746) 29 | 30 | 31 | 32 | # Disclaimer 33 | This tool is only for testing and academic purposes and can only be used where strict consent has been given. Do not use it for illegal purposes! It is the end user’s responsibility to obey all applicable local, state and federal laws. Developers assume no liability and are not responsible for any misuse or damage caused by this tool and software in general. 34 | -------------------------------------------------------------------------------- /config.json: -------------------------------------------------------------------------------- 1 | { 2 | "webhook_url": "lol", 3 | "num_threads": 35, 4 | "proxy_file": "proxy.txt", 5 | "proxy_scraper": false, 6 | "restart_interval": 1520, 7 | "thread_logs": false, 8 | "pastebin_api_key": "your api key fella" 9 | } 10 | 11 | -------------------------------------------------------------------------------- /functions/scrapybacky.py: -------------------------------------------------------------------------------- 1 | import requests 2 | from bs4 import BeautifulSoup 3 | 4 | def socks4scrapy(proxy_file, verbose=True): 5 | url = [ 6 | 'https://www.socks-proxy.net/', 7 | ] 8 | proxies = [] 9 | for url in url: 10 | try: 11 | response = requests.get(url) 12 | soup = BeautifulSoup(response.text, 'html.parser') 13 | 14 | if url == 'https://www.socks-proxy.net/': 15 | table = soup.find('table') 16 | rows = table.find_all('tr') 17 | for row in rows[1:]: 18 | cols = row.find_all('td') 19 | if len(cols) > 0: 20 | ip = cols[0].text.strip() 21 | port = cols[1].text.strip() 22 | country = cols[2].text.strip() 23 | uptime = cols[6].text.strip() 24 | location = '' 25 | if country: 26 | location = country.split(',')[0] 27 | 28 | proxy = { 29 | 'ip': ip, 30 | 'port': port, 31 | 'uptime': uptime, 32 | 'location': location 33 | } 34 | proxies.append(proxy) 35 | 36 | except: 37 | print(f"Error scraping {url}") 38 | return proxies 39 | 40 | 41 | def socks4scrapy_no2(proxy_file, verbose=True): 42 | url = "https://www.proxy-list.download/api/v1/get?type=socks4" 43 | proxies = [] 44 | try: 45 | response = requests.get(url) 46 | if response.status_code == 200: 47 | proxy_list = response.text.split('\r\n') 48 | for proxy in proxy_list: 49 | if proxy: 50 | ip, port = proxy.split(':') 51 | proxy_info = { 52 | 'ip': ip, 53 | 'port': port, 54 | 'uptime': '', 55 | 'location': '' 56 | } 57 | proxies.append(proxy_info) 58 | except Exception as e: 59 | print(f"Error scraping {url}") 60 | return proxies 61 | 62 | 63 | def socks5scrapy(proxy_file, verbose=True): 64 | url = "https://www.proxy-list.download/api/v1/get?type=socks5" 65 | proxies = [] 66 | try: 67 | response = requests.get(url) 68 | if response.status_code == 200: 69 | proxy_list = response.text.split('\r\n') 70 | for proxy in proxy_list: 71 | if proxy: 72 | ip, port = proxy.split(':') 73 | proxy_info = { 74 | 'ip': ip, 75 | 'port': port, 76 | 'uptime': '', 77 | 'location': '' 78 | } 79 | proxies.append(proxy_info) 80 | except Exception as e: 81 | print(f"Error scraping {url}") 82 | return proxies 83 | 84 | -------------------------------------------------------------------------------- /main.py: -------------------------------------------------------------------------------- 1 | import socket 2 | import threading 3 | import os 4 | import json 5 | import requests 6 | import asyncio 7 | import platform 8 | import time 9 | from concurrent.futures import ThreadPoolExecutor 10 | from colorama import init, Fore 11 | from discord_webhook import DiscordWebhook, DiscordEmbed 12 | 13 | from functions.scrapybacky import socks5scrapy, socks4scrapy, socks4scrapy_no2 14 | 15 | 16 | 17 | banner_text = f""" 18 | {Fore.RED} 19 | ⠀⠀⢀⣤⣶⣶⣤⣄⡀ 20 | ⠀⢀⣿⣿⣿⣿⣿⣿⣿⡆ 21 | ⠀⠸⣿⣿⣿⣿⣿⡟⡟⡗ 22 | ⠀⠀⠙⠏⠯⠛⣉⢲⣧⠟ 23 | ⠀⠀⠠⢭⣝⣾⠿⣴⣿⠇ discord.gg/SecHex 24 | ⠀⠀⢐⣺⡿⠁⠀⠈⠉⠀⠀⠀⠀⠀⠀⠀⠀⠀ ⣶⣶⣶⣶⣶⣶⠀ 25 | ⠀⠀⣚⣿⠃ ⣶⣶⣶⣶ 26 | ⢀⣿⣿⣿⣷⢒⣢⡀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⣀⣠⣶⣶⣄⠄ 27 | ⢰⣿⣿⡿⣿⣦⠬⢝⡄⠀⠀⠀⠀⠀⠀⢠⣿⠿⠿⠟⠛⠋⠁ 28 | ⠠⢿⣿⣷⠺⣿⣗⠒⠜⡄⠀⠀⠀⠀⣴⠟⠁ 29 | ⠀⣰⣿⣷⣍⡛⣯⣯⣙⡁⠀⠀⣠⡾⠁ 30 | ⠀⠨⢽⣿⣷⢍⣛⣶⢷⣼⣠⣾⠋ 31 | ⠀⠀⠘⢿⣿⣖⠬⣹⣶⣿⠟⠁ 32 | ⠀⠀⠀⠚⠿⠿⡒⠨⠛⠋ 33 | ⠀⠀⠀⠐⢒⣛⣷ 34 | ⠀⠀⠀⢘⣻⣭⣭ 35 | ⠀⠀⠀⡰⢚⣺⣿ 36 | ⠀⠀⢠⣿⣿⣿⣿⣦⡄ 37 | ⠀⠀⢸⡿⢿⣿⢿⡿⠃ 38 | ⠀⠀⠘⡇⣸⣿⣿⣿⣆ 39 | {Fore.RESET} 40 | """ 41 | banner_bio = f"{Fore.GREEN}Pr0xyHunter V1.1{Fore.RESET}" 42 | banner_server = f"{Fore.GREEN}discord.gg/SecHex{Fore.RESET}" 43 | 44 | print(banner_text) 45 | print(banner_bio) 46 | print(banner_server) 47 | init(autoreset=True) 48 | print_lock = threading.Lock() 49 | timer_thread_stop = threading.Event() 50 | 51 | def set_title(title): 52 | if platform.system() == "Windows": 53 | os.system(f"title {title}") 54 | else: 55 | print(f"\033]0;{title}\007") 56 | 57 | set_title("Pr0xyHunter V1.1") 58 | 59 | 60 | 61 | 62 | 63 | def test_proxy(ip, port, good_proxies): 64 | current_thread = threading.current_thread().name 65 | thread_identifier = f"{Fore.LIGHTCYAN_EX}Thread: {current_thread[-1]}{Fore.RESET}" 66 | 67 | try: 68 | start_time = time.time() 69 | s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 70 | s.settimeout(5) 71 | s.connect((ip, int(port))) 72 | s.sendall(b"\x04\x01" + int(port).to_bytes(2, byteorder='big') + socket.inet_aton(ip) + b"\x00") 73 | response = s.recv(8) 74 | end_time = time.time() 75 | ping_time = int((end_time - start_time) * 1000) 76 | 77 | if len(response) < 2: 78 | with print_lock: 79 | print(f"{thread_identifier} | {Fore.RED}[BAD]{Fore.RESET} | {ip}:{port} | {Fore.LIGHTRED_EX}Ping: 404{Fore.RESET}") 80 | return 81 | 82 | status = response[1] 83 | if status == 0x5A: 84 | with print_lock: 85 | print(f"{thread_identifier} | {Fore.GREEN}[GOOD]{Fore.RESET} | {ip}:{port} | {Fore.GREEN}Ping: {ping_time}ms{Fore.RESET}") 86 | good_proxies.append(f"{ip}:{port}") 87 | else: 88 | with print_lock: 89 | print(f"{thread_identifier} | {Fore.RED}[BAD]{Fore.RESET} | {ip}:{port} | {Fore.LIGHTRED_EX}Ping: 404{Fore.RESET}") 90 | 91 | except: 92 | with print_lock: 93 | print(f"{thread_identifier} | {Fore.RED}[BAD]{Fore.RESET} | {ip}:{port} | {Fore.LIGHTRED_EX}Ping: 404{Fore.RESET}") 94 | finally: 95 | s.close() 96 | 97 | 98 | 99 | def discord_webhook(txt_filename, webhook_url, message=None): 100 | files = {'file': (txt_filename, open(txt_filename, 'rb'), 'text/plain')} 101 | 102 | if message: 103 | data = {'content': message} 104 | else: 105 | data = {} 106 | 107 | response = requests.post(webhook_url, files=files, data=data) 108 | 109 | if response.status_code == 200: 110 | print(f"proxys '{txt_filename}' sent to Discord successfully!") 111 | else: 112 | print(f"Failed to send TXT file to Discord. Status Code: {response.status_code}") 113 | 114 | 115 | def count_active_threads(): 116 | global previous_thread_count 117 | while True: 118 | num_active_threads = threading.active_count() 119 | if num_active_threads != previous_thread_count: 120 | print(f"{Fore.LIGHTCYAN_EX}Active Threads:{Fore.RESET} {num_active_threads}") 121 | previous_thread_count = num_active_threads 122 | time.sleep(5) 123 | 124 | 125 | def create_pastebin(proxies, pastebin_api_key, webhook_url): 126 | try: 127 | pastebin_url = "https://pastebin.com/api/api_post.php" 128 | proxies_data = "\n".join(proxies) 129 | 130 | response = requests.post(pastebin_url, data={ 131 | 'api_dev_key': pastebin_api_key, 132 | 'api_option': 'paste', 133 | 'api_paste_code': proxies_data 134 | }) 135 | 136 | if response.status_code == 200: 137 | pastebin_url = response.text 138 | print(f"Pastebin URL: {pastebin_url}") 139 | discord_webhook(pastebin_url, webhook_url) 140 | else: 141 | print(f"Failed to create Pastebin. Status Code: {response.status_code}") 142 | 143 | except Exception as e: 144 | print(f"Failed to create Pastebin: {str(e)}") 145 | 146 | 147 | async def main(): 148 | with open('config.json', 'r') as config_file: 149 | config = json.load(config_file) 150 | webhook_url = config.get('webhook_url', '') 151 | num_threads = config.get('num_threads', 5) 152 | proxy_file = config.get('proxy_file') 153 | proxy_scraper = config.get('proxy_scraper', False) 154 | restart_interval = config.get('restart_interval', None) 155 | active_threads_enabled = config.get('thread_logs', True) 156 | pastebin_api_key = config.get('pastebin_api_key', '') 157 | 158 | if active_threads_enabled: 159 | active_thread_checker = threading.Thread(target=count_active_threads) 160 | active_thread_checker.daemon = True 161 | active_thread_checker.start() 162 | 163 | await asyncio.sleep(4) 164 | 165 | if not webhook_url: 166 | print("Webhook URL is missing in config.json.") 167 | return 168 | 169 | start_time = time.time() 170 | 171 | while True: 172 | if proxy_scraper: 173 | proxies_socks4 = socks4scrapy(proxy_file) 174 | proxies_socks4_no2 = socks4scrapy_no2(proxy_file) 175 | proxies_socks5 = socks5scrapy(proxy_file) 176 | proxies = proxies_socks4 + proxies_socks4_no2 + proxies_socks5 177 | 178 | with open(proxy_file, 'w') as f: 179 | for proxy in proxies: 180 | f.write(f"{proxy['ip']}:{proxy['port']}\n") 181 | 182 | file_name = proxy_file 183 | 184 | with open(file_name, 'r') as f: 185 | proxies = f.readlines() 186 | 187 | good_proxies = [] 188 | 189 | if num_threads == 1: 190 | for proxy in proxies: 191 | ip, port = proxy.strip().split(':') 192 | test_proxy(ip, port, good_proxies) 193 | else: 194 | with ThreadPoolExecutor(max_workers=num_threads) as executor: 195 | for proxy in proxies: 196 | ip, port = proxy.strip().split(':') 197 | executor.submit(test_proxy, ip, port, good_proxies) 198 | num_threads_used = num_threads 199 | 200 | with open("good_proxies.txt", 'w') as f: 201 | for proxy in good_proxies: 202 | f.write(proxy + "\n") 203 | 204 | create_pastebin(good_proxies, pastebin_api_key, webhook_url) 205 | num_good_proxies = len(good_proxies) 206 | scan_duration = time.time() - start_time 207 | 208 | embed = DiscordEmbed(title="SecHex-Pr0xyHunter V1.1", color=16777215) 209 | embed.set_description( 210 | f"Found **{num_good_proxies}** good proxies in **{scan_duration:.2f}** seconds using **{num_threads_used}** threads\n[SecHex-Pr0xyHunter](https://github.com/SecHex/SecHex-Pr0xyHunter)") 211 | 212 | webhook = DiscordWebhook(url=webhook_url) 213 | webhook.add_embed(embed) 214 | webhook.execute() 215 | 216 | discord_webhook("good_proxies.txt", webhook_url) 217 | 218 | if restart_interval: 219 | print(f"Rebooting in {restart_interval} seconds...") 220 | await asyncio.sleep(restart_interval) 221 | else: 222 | print("Exiting...") 223 | 224 | if __name__ == "__main__": 225 | asyncio.run(main()) 226 | -------------------------------------------------------------------------------- /proxy.txt: -------------------------------------------------------------------------------- 1 | 162.19.7.57,16506 2 | 41.223.232.34,4145 3 | 192.111.137.34,18765 4 | 102.69.177.236,10081 5 | 170.78.92.98,5678 6 | 123.231.230.58,31196 7 | 69.167.170.149,63087 8 | 89.41.106.8,4145 9 | 82.103.70.227,4145 10 | 67.201.33.10,25283 11 | 192.141.236.3,5678 12 | 142.54.226.214,4145 13 | 202.134.67.102,1080 14 | 45.33.123.178,59479 15 | 192.111.139.163,19404 16 | 184.170.249.65,4145 17 | 109.245.231.73,8192 18 | 68.71.254.6,4145 19 | 24.172.34.114,60133 20 | 199.102.104.70,4145 21 | 91.203.25.28,4153 22 | 192.252.215.5,16137 23 | 51.83.116.2,34877 24 | 37.17.53.108,3629 25 | 103.174.178.131,1020 26 | 138.201.21.238,32849 27 | 182.253.166.54,5678 28 | 66.42.224.229,41679 29 | 103.120.146.32,5678 30 | 192.111.139.165,4145 31 | 202.70.34.72,65530 32 | 192.252.208.70,14282 33 | 51.161.33.206,32779 34 | 85.113.7.142,5678 35 | 203.210.235.91,5678 36 | 162.243.237.104,37030 37 | 199.102.105.242,4145 38 | 212.83.138.192,33840 39 | 167.172.159.43,49633 40 | 192.111.135.18,18301 41 | 192.252.211.197,14921 42 | 74.119.144.60,4145 43 | 188.93.213.242,1080 44 | 142.54.236.97,4145 45 | 67.213.210.60,48848 46 | 217.17.110.218,4153 47 | 159.65.3.202,42510 48 | 38.113.171.88,57775 49 | 104.200.135.46,4145 50 | 205.196.221.153,58965 51 | 192.111.139.162,4145 52 | 103.220.205.162,4673 53 | 199.58.185.9,4145 54 | 205.196.218.195,37692 55 | 185.140.100.178,5678 56 | 142.54.232.6,4145 57 | 159.89.233.227,6922 58 | 72.37.217.3,4145 59 | 72.49.49.11,31034 60 | 192.252.220.92,17328 61 | 116.97.10.147,17128 62 | 192.252.214.20,15864 63 | 185.2.100.97,64952 64 | 198.8.94.170,4145 65 | 192.252.208.67,14287 66 | 103.164.139.72,24572 67 | 68.71.247.130,4145 68 | 93.116.57.4,4153 69 | 220.247.165.142,9990 70 | 190.108.81.140,59311 71 | 103.245.204.139,1088 72 | 192.111.137.35,4145 73 | 165.154.233.18,45749 74 | 38.242.244.63,50318 75 | 50.250.205.21,32100 76 | 85.100.40.12,5678 77 | 179.253.8.244,7777 78 | 91.243.192.17,3629 79 | 222.252.99.84,5678 80 | 64.90.40.190,15521 81 | 199.102.106.94,4145 82 | 103.21.40.35,4145 83 | 167.71.100.140,34429 84 | 176.9.85.211,30141 85 | 94.253.95.241,3629 86 | 184.170.248.5,4145 87 | 192.111.130.5,17002 88 | 192.111.134.10,4145 89 | 167.172.86.46,10471 90 | 119.148.39.241,9990 91 | 181.209.103.98,5678 92 | 103.52.252.18,5678 93 | 203.98.76.139,4153 94 | 192.111.130.2,4145 95 | 164.138.43.14,1080 96 | 205.186.138.94,41552 97 | 67.213.212.55,25393 98 | 192.111.135.17,18302 99 | 43.252.74.202,1080 100 | 192.252.216.81,4145 101 | 5.1.104.67,33041 102 | 51.68.244.19,56697 103 | 84.236.185.247,61710 104 | 196.25.170.130,4145 105 | 113.11.136.114,4153 106 | 88.198.49.189,57981 107 | 116.202.213.207,54561 108 | 180.191.22.50,4153 109 | 69.61.200.104,36181 110 | 64.90.40.91,17233 111 | 110.74.202.186,4153 112 | 36.67.27.189,49524 113 | 118.27.36.135,10801 114 | 87.126.64.193,4145 115 | 64.111.126.0,9594 116 | 103.38.103.18,1080 117 | 47.242.234.73,15673 118 | 71.40.17.29,33651 119 | 205.196.222.80,58965 120 | 103.100.188.194,33333 121 | 109.232.106.150,52435 122 | 185.186.17.57,5678 123 | 213.145.134.174,3629 124 | 8.222.143.228,15673 125 | 103.164.139.112,24572 126 | 182.160.126.106,9990 127 | 115.85.84.163,5678 128 | 103.126.173.201,1080 129 | 104.236.14.15,17351 130 | 5.58.66.55,14888 131 | 157.245.6.140,50802 132 | 142.54.228.193,4145 133 | 179.108.158.204,4145 134 | 113.53.91.10,5678 135 | 66.33.223.2,28573 136 | 103.164.139.82,24572 137 | 109.248.236.150,9898 138 | 193.200.151.69,32777 139 | 208.113.221.198,61086 140 | 103.251.83.14,44550 141 | 102.132.76.29,5678 142 | 170.80.71.78,5678 143 | 85.92.164.179,4145 144 | 208.113.199.138,36449 145 | 83.221.194.199,1080 146 | 78.46.184.225,3260 147 | 69.163.160.197,11687 148 | 202.40.184.33,1088 149 | 151.106.13.116,60528 150 | 200.115.157.211,4145 151 | 121.139.218.165,43295 152 | 117.74.125.210,1133 153 | 208.113.162.131,49820 154 | 173.249.47.186,35826 155 | 51.75.126.150,57328 156 | 103.57.222.65,32466 157 | 67.213.212.52,40466 158 | 175.139.179.65,41527 159 | 186.67.152.107,4153 160 | 103.115.252.26,51372 161 | 208.113.172.52,40732 162 | 95.143.8.182,50285 163 | 188.164.199.163,32203 164 | 8.42.71.5,39593 165 | 188.237.60.27,1080 166 | 141.94.26.156,10826 167 | 200.146.229.129,8291 168 | 190.249.169.153,3629 169 | 185.126.235.19,35010 170 | 190.239.24.77,5678 171 | 50.238.47.86,32100 172 | 1.9.213.114,4153 173 | 1.32.59.217,31981 174 | 103.150.110.202,9969 175 | 185.51.92.108,51327 176 | 193.29.62.48,36857 177 | 36.37.244.41,5678 178 | 120.29.153.250,5678 179 | 205.196.208.106,63585 180 | 67.205.11.255,50868 181 | 12.218.209.130,13326 182 | 115.127.83.58,9990 183 | 50.116.23.15,2635 184 | 208.113.221.182,61086 185 | 185.216.18.138,44550 186 | 188.166.234.144,43224 187 | 78.9.110.94,1080 188 | 89.189.174.121,35010 189 | 103.127.23.10,5678 190 | 181.209.96.226,4153 191 | 51.38.155.24,36971 192 | 190.3.72.38,3629 193 | 198.1.78.73,26899 194 | 77.225.177.14,10801 195 | 185.89.156.130,5678 196 | 139.255.86.226,5678 197 | 203.113.114.94,39417 198 | 110.93.206.62,5678 199 | 46.171.28.162,59311 200 | 81.16.1.71,5678 201 | 200.32.105.86,4153 202 | 46.173.35.229,3629 203 | 208.113.198.191,28853 204 | 198.12.252.88,47205 205 | 51.158.105.157,16379 206 | 188.75.186.152,4145 207 | 83.174.149.18,4145 208 | 209.145.59.247,3847 209 | 97.74.233.206,14016 210 | 185.129.250.183,36332 211 | 92.42.8.21,4145 212 | 210.245.51.230,9898 213 | 212.79.107.116,5678 214 | 51.178.19.189,18711 215 | 162.19.7.50,18715 216 | 188.226.7.249,5678 217 | 69.163.166.4,64097 218 | 143.137.116.142,1080 219 | 45.128.133.177,1080 220 | 163.172.169.27,16379 221 | 163.172.132.238,16379 222 | 208.113.154.48,59927 223 | 103.234.27.198,9990 224 | 103.234.27.162,1080 225 | 185.161.186.133,54321 226 | 103.234.27.170,1080 227 | 67.205.11.205,22762 228 | 103.84.178.193,4153 229 | 208.113.162.119,46221 230 | 173.236.183.125,24749 231 | 176.88.177.197,61080 232 | 208.113.162.90,55034 233 | 212.200.149.242,5678 234 | 200.170.196.94,1080 235 | 95.128.142.76,1080 236 | 67.213.210.167,43029 237 | 45.128.133.65,1080 238 | 14.161.14.106,5678 239 | 5.78.95.229,27352 240 | 205.196.222.139,32654 241 | 91.205.131.110,53339 242 | 81.12.104.35,3629 243 | 208.113.196.222,28853 244 | 181.209.77.204,4153 245 | 202.142.159.204,31026 246 | 5.161.98.204,45055 247 | 36.91.45.11,51299 248 | 173.236.173.31,49979 249 | 199.119.74.208,33333 250 | 69.163.216.57,60963 251 | 64.90.51.36,40743 252 | 208.113.173.15,40732 253 | 173.236.187.145,44013 254 | 190.0.15.18,5678 255 | 37.193.40.16,1080 256 | 64.111.126.85,14492 257 | 177.53.221.89,44196 258 | 207.244.246.147,61400 259 | 173.236.136.62,25918 260 | 45.184.183.9,4145 261 | 190.119.62.42,18 262 | 45.73.0.118,5678 263 | 119.148.31.202,9990 264 | 213.32.66.64,56637 265 | 212.83.143.118,56000 266 | 85.89.184.87,5678 267 | 173.236.169.226,44410 268 | 103.164.139.104,24572 269 | 23.105.170.33,62195 270 | 34.125.67.181,54764 271 | 173.236.173.159,42523 272 | 83.0.244.106,5678 273 | 46.188.2.42,5678 274 | 64.111.125.96,26922 275 | 173.236.180.21,43471 276 | 103.207.98.82,41238 277 | 117.4.242.216,5678 278 | 208.113.152.104,18089 279 | 202.131.159.26,5678 280 | 41.190.152.130,4673 281 | 103.213.118.46,1080 282 | 208.113.152.245,18089 283 | 212.83.142.149,50056 284 | 190.139.7.34,5678 285 | 67.205.12.108,46008 286 | 103.146.184.19,1085 287 | 64.111.125.28,45241 288 | 81.199.14.17,1088 289 | 212.33.242.249,1080 290 | 92.119.74.49,5678 291 | 94.232.125.200,5678 292 | 103.189.234.161,1080 293 | 148.77.34.200,54321 294 | 87.117.11.57,1080 295 | 117.2.46.2,5678 296 | 173.236.181.60,60546 297 | 208.113.173.166,17362 298 | 212.83.143.151,25364 299 | 179.27.86.36,4153 300 | 208.113.154.248,18089 301 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | colorama==0.4.4 2 | pyfiglet==0.8.post1 3 | requests==2.26.0 4 | beautifulsoup4==4.10.0 5 | httpx==0.19.0 --------------------------------------------------------------------------------