├── README.md
└── main.py
/README.md:
--------------------------------------------------------------------------------
1 | # X-Proxy
2 |
3 | ## About
4 | Proxy scraper and checker. Scrapes from 40+ sources.
5 |
6 | ## Picture
7 | 
8 |
9 |
10 | ### Install
11 |
12 | https://dl.bintray.com/pycurl/pycurl/
13 | Get .exe pycurl installation for current Python version.
14 | ```
15 | pip install requests
16 | pip install colorama
17 | pip install proxy_checker
18 | ```
19 |
20 | ### then run the script..
21 | ```
22 | python main.py
23 | ```
24 | Make sure you are in the same directory as the
25 | python file.
26 |
27 | ### How to use
28 | - For the proxy scraper, you have to wait till it is finished. It takes approximately 1 minute to scrape. After that it will write all the proxies to ```scraped.txt```
29 | - For the proxy checker, drag and drop the text file you want to check to the program or type the file name [e.g scraped.txt] when asked
30 | for the file path. After that, put the number of threads you want [e.g 1200]
31 |
32 |
--------------------------------------------------------------------------------
/main.py:
--------------------------------------------------------------------------------
1 | from os import system
2 | from colorama import Fore, init, Style
3 | import requests
4 | import re
5 | import time
6 | import threading
7 | from proxy_checker import ProxyChecker
8 | from sys import stdout
9 |
10 | lock = threading.Lock()
11 | class UI:
12 | @staticmethod
13 | def banner():
14 | banner = f'''
15 | {Fore.LIGHTBLACK_EX}discord.gg/e8Qy8JKbUK{Style.RESET_ALL}
16 | \t\t\t ██╗ ██╗ ██████╗ ██████╗ ██████╗ ██╗ ██╗██╗ ██╗
17 | \t\t\t ╚██╗██╔╝ ██╔══██╗██╔══██╗██╔═══██╗╚██╗██╔╝╚██╗ ██╔╝
18 | \t\t\t ╚███╔╝█████╗██████╔╝██████╔╝██║ ██║ ╚███╔╝ ╚████╔╝
19 | \t\t\t ██╔██╗╚════╝██╔═══╝ ██╔══██╗██║ ██║ ██╔██╗ ╚██╔╝
20 | \t\t\t ██╔╝ ██╗ ██║ ██║ ██║╚██████╔╝██╔╝ ██╗ ██║
21 | \t\t\t ╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═╝ ╚═╝
22 | \t\t\t {Fore.LIGHTBLACK_EX}by Nightfall#2512{Style.RESET_ALL}
23 | '''
24 | return banner
25 |
26 | @staticmethod
27 | def menu():
28 | menu = f'''
29 | [{Fore.RED}1{Style.RESET_ALL}] Proxy Scrape
30 | [{Fore.RED}2{Style.RESET_ALL}] Proxy Check
31 | [{Fore.RED}3{Style.RESET_ALL}] Exit
32 | '''
33 | return menu
34 |
35 | def write(arg):
36 | lock.acquire()
37 | stdout.flush()
38 | print(arg)
39 | lock.release()
40 |
41 | class XProxy:
42 | proxy_w_regex = [
43 | ["http://spys.me/proxy.txt","%ip%:%port% "],
44 | ["http://www.httptunnel.ge/ProxyListForFree.aspx"," target=\"_new\">%ip%:%port%"],
45 | ["https://raw.githubusercontent.com/sunny9577/proxy-scraper/master/proxies.json", "\"ip\":\"%ip%\",\"port\":\"%port%\","],
46 | ["https://raw.githubusercontent.com/fate0/proxylist/master/proxy.list", '"host": "%ip%".*?"country": "(.*?){2}",.*?"port": %port%'],
47 | ["https://raw.githubusercontent.com/clarketm/proxy-list/master/proxy-list.txt", '%ip%:%port% (.*?){2}-.-S \\+'],
48 | ["https://www.us-proxy.org/", "
%ip%<\\/td> | %port%<\\/td> | (.*?){2}<\\/td> | .*?<\\/td> | .*?<\\/td> | .*?<\\/td> | (.*?)<\\/td> | .*?<\\/td><\\/tr>"],
49 | ["https://free-proxy-list.net/", " |
%ip%<\\/td> | %port%<\\/td> | (.*?){2}<\\/td> | .*?<\\/td> | .*?<\\/td> | .*?<\\/td> | (.*?)<\\/td> | .*?<\\/td><\\/tr>"],
50 | ["https://www.sslproxies.org/", " |
%ip%<\\/td> | %port%<\\/td> | (.*?){2}<\\/td> | .*?<\\/td> | .*?<\\/td> | .*?<\\/td> | (.*?)<\\/td> | .*?<\\/td><\\/tr>"],
51 | ['https://www.socks-proxy.net/', "%ip%:%port%"],
52 | ['https://free-proxy-list.net/uk-proxy.html', " |
%ip%<\\/td> | %port%<\\/td> | (.*?){2}<\\/td> | .*?<\\/td> | .*?<\\/td> | .*?<\\/td> | (.*?)<\\/td> | .*?<\\/td><\\/tr>"],
53 | ['https://free-proxy-list.net/anonymous-proxy.html', " |
%ip%<\\/td> | %port%<\\/td> | (.*?){2}<\\/td> | .*?<\\/td> | .*?<\\/td> | .*?<\\/td> | (.*?)<\\/td> | .*?<\\/td><\\/tr>"],
54 | ["https://www.proxy-list.download/api/v0/get?l=en&t=https", '"IP": "%ip%", "PORT": "%port%",'],
55 | ["https://api.proxyscrape.com/?request=getproxies&proxytype=http&timeout=6000&country=all&ssl=yes&anonymity=all", "%ip%:%port%"],
56 | ["https://raw.githubusercontent.com/TheSpeedX/SOCKS-List/master/http.txt", "%ip%:%port%"],
57 | ["https://raw.githubusercontent.com/shiftytr/proxy-list/master/proxy.txt", "%ip%:%port%"],
58 | ["https://raw.githubusercontent.com/TheSpeedX/SOCKS-List/master/socks5.txt", "%ip%:%port%"],
59 | ["https://www.hide-my-ip.com/proxylist.shtml", '"i":"%ip%","p":"%port%",'],
60 | ["https://raw.githubusercontent.com/scidam/proxy-list/master/proxy.json", '"ip": "%ip%",\n.*?"port": "%port%",'],
61 | ['https://www.freeproxychecker.com/result/socks4_proxies.txt', "%ip%:%port%"],
62 | ['https://proxy50-50.blogspot.com/', '%ip% | %port% | '],
63 | ['http://free-fresh-proxy-daily.blogspot.com/feeds/posts/default', "%ip%:%port%"],
64 | ['http://free-fresh-proxy-daily.blogspot.com/feeds/posts/default', "%ip%:%port%"],
65 | ['http://www.live-socks.net/feeds/posts/default', "%ip%:%port%"],
66 | ['http://www.socks24.org/feeds/posts/default', "%ip%:%port%"],
67 | ['http://www.proxyserverlist24.top/feeds/posts/default',"%ip%:%port%" ] ,
68 | ['http://proxysearcher.sourceforge.net/Proxy%20List.php?type=http',"%ip%:%port%"],
69 | ['http://proxysearcher.sourceforge.net/Proxy%20List.php?type=socks', "%ip%:%port%"],
70 | ['http://proxysearcher.sourceforge.net/Proxy%20List.php?type=socks', "%ip%:%port%"],
71 | ['https://www.my-proxy.com/free-anonymous-proxy.html', '%ip%:%port%'],
72 | ['https://www.my-proxy.com/free-transparent-proxy.html', '%ip%:%port%'],
73 | ['https://www.my-proxy.com/free-socks-4-proxy.html', '%ip%:%port%'],
74 | ['https://www.my-proxy.com/free-socks-5-proxy.html','%ip%:%port%'],
75 | ['https://www.my-proxy.com/free-proxy-list.html','%ip%:%port%'],
76 | ['https://www.my-proxy.com/free-proxy-list-2.html','%ip%:%port%'],
77 | ['https://www.my-proxy.com/free-proxy-list-3.html','%ip%:%port%'],
78 | ['https://www.my-proxy.com/free-proxy-list-4.html', '%ip%:%port%'],
79 | ['https://www.my-proxy.com/free-proxy-list-5.html','%ip%:%port%'],
80 | ['https://www.my-proxy.com/free-proxy-list-6.html','%ip%:%port%'],
81 | ['https://www.my-proxy.com/free-proxy-list-7.html','%ip%:%port%'],
82 | ['https://www.my-proxy.com/free-proxy-list-8.html','%ip%:%port%'],
83 | ['https://www.my-proxy.com/free-proxy-list-9.html','%ip%:%port%'],
84 | ['https://www.my-proxy.com/free-proxy-list-10.html','%ip%:%port%'],
85 | ]
86 |
87 | proxy_direct = [
88 | 'https://api.proxyscrape.com/v2/?request=getproxies&protocol=http&timeout=5000&country=all&ssl=all&anonymity=all',
89 | 'https://api.proxyscrape.com/v2/?request=getproxies&protocol=socks4&timeout=5000&country=all&ssl=all&anonymity=all',
90 | 'https://api.proxyscrape.com/v2/?request=getproxies&protocol=socks5&timeout=5000&country=all&ssl=all&anonymity=all',
91 | 'https://www.proxyscan.io/download?type=http',
92 | 'https://www.proxyscan.io/download?type=https',
93 | 'https://www.proxyscan.io/download?type=socks4',
94 | 'https://www.proxyscan.io/download?type=socks5',
95 | 'https://raw.githubusercontent.com/TheSpeedX/PROXY-List/master/http.txt',
96 | 'https://github.com/TheSpeedX/PROXY-List/blob/master/socks4.txt',
97 | 'https://raw.githubusercontent.com/TheSpeedX/PROXY-List/master/socks5.txt',
98 | 'https://raw.githubusercontent.com/ShiftyTR/Proxy-List/master/http.txt',
99 | 'https://raw.githubusercontent.com/ShiftyTR/Proxy-List/master/socks4.txt',
100 | 'https://raw.githubusercontent.com/ShiftyTR/Proxy-List/master/socks5.txt',
101 | 'https://raw.githubusercontent.com/ShiftyTR/Proxy-List/master/https.txt',
102 | 'https://raw.githubusercontent.com/hookzof/socks5_list/master/proxy.txt',
103 | 'https://multiproxy.org/txt_all/proxy.txt',
104 | 'http://rootjazz.com/proxies/proxies.txt',
105 | 'http://ab57.ru/downloads/proxyold.txt',
106 | 'https://raw.githubusercontent.com/clarketm/proxy-list/master/proxy-list-raw.txt',
107 | 'https://proxy-spider.com/api/proxies.example.txt',
108 | 'https://raw.githubusercontent.com/opsxcq/proxy-list/master/list.txt',
109 | 'https://www.proxy-list.download/api/v1/get?type=socks4'
110 | 'https://raw.githubusercontent.com/TheSpeedX/SOCKS-List/master/socks4.txt'
111 | ]
112 |
113 |
114 | headers = {"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.150 Safari/537.36"}
115 |
116 | def __init__(self):
117 | self.proxy_output = []
118 | self.scrape_counter = 0
119 | self.checked_counter= 0
120 |
121 | def _update_title(self):
122 | while True:
123 | elapsed = time.strftime('%H:%M:%S', time.gmtime(time.time() - self.start))
124 | system('title X-Proxy - Elapsed: %s ^| Scraped: %s ^| Checked :%s'% (elapsed, self.scrape_counter, self.checked_counter))
125 | time.sleep(0.4)
126 |
127 | def file_read(self, name):
128 | with open(name, 'r', encoding='UTF-8') as f:
129 | text = [line.strip('\n') for line in f]
130 | return text
131 |
132 | def file_write(self, name, contents):
133 | with open(name, 'w', encoding='UTF-8' ) as f:
134 | for x in contents:
135 | f.write(x + '\n')
136 |
137 | def background_task(self):
138 | self.start = time.time()
139 | threading.Thread(target = self._update_title, daemon=True).start()
140 |
141 |
142 | def get_proxies(self):
143 | return self.proxy_output
144 |
145 | class ProxyScrape(XProxy):
146 | def _scrape(self, url, custom_regex):
147 | try:
148 | proxylist = requests.get(url, timeout=5, headers=self.headers).text
149 | custom_regex = custom_regex.replace('%ip%', '([0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3})')
150 | custom_regex = custom_regex.replace('%port%', '([0-9]{1,5})')
151 |
152 | for proxy in re.findall(re.compile(custom_regex), proxylist):
153 | self.proxy_output.append(proxy[0] + ":" + proxy[1])
154 | write(' > '+proxy[0] + ":" + proxy[1])
155 | self.scrape_counter += 1
156 |
157 | except requests.exceptions.RequestException:
158 | write('Requests related error occured.')
159 |
160 | def scrape_regex(self):
161 | for source in self.proxy_w_regex:
162 | self._scrape(source[0], source[1])
163 |
164 | def scrape_direct(self):
165 | for source in self.proxy_direct:
166 | try:
167 | page = requests.get(source, timeout=5, headers=self.headers).text
168 | for proxy in re.findall(re.compile('([0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}):([0-9]{1,5})'), page):
169 | self.proxy_output.append(proxy[0] + ':' + proxy[1])
170 | write(' > ' + proxy[0] + ':' + proxy[1])
171 |
172 | except requests.exceptions.RequestException:
173 | write('Requests related error occured.')
174 |
175 |
176 | class ProxyCheck(XProxy):
177 | def __init__(self):
178 | XProxy.__init__(self)
179 | print('Loading..')
180 | self.checker = ProxyChecker()
181 | system('cls')
182 |
183 |
184 | def check(self, list_, timeout=5):
185 | for x in list_:
186 | c = self.checker.check_proxy(x)
187 | if c:
188 | write(Fore.GREEN + '[ALIVE] '+ x + ' | ' + c['anonymity'] + ' | ' + 'Timeout:'+ str(c['timeout']) + ' ' +c['country_code'] + Style.RESET_ALL + ' ' + c['protocols'][0])
189 | with open('all_alive.txt', 'a', encoding='UTF-8') as f:
190 | f.write(x + '\n')
191 |
192 | if c['protocols'][0] == 'http':
193 | with open('http_alive.txt', 'a', encoding='UTF-8') as f:
194 | f.write(x + '\n')
195 |
196 | elif c['protocols'][0] == 'socks4':
197 | with open('socks4_alive.txt', 'a', encoding='UTF-8') as f:
198 | f.write(x + '\n')
199 |
200 | elif c['protocols'][0] == 'socks5':
201 | with open('socks5_alive.txt', 'a', encoding='UTF-8') as f:
202 | f.write(x + '\n')
203 | else:
204 | pass
205 |
206 | self.checked_counter += 1
207 |
208 | else:
209 | write(Fore.RED + '[DEAD] ' + x + Style.RESET_ALL)
210 | with open('dead_proxies.txt', 'a', encoding='UTF-8') as f:
211 | f.write(x + '\n')
212 | self.checked_counter += 1
213 |
214 |
215 | def main():
216 | x = UI()
217 | p = ProxyScrape()
218 | system('title X-Proxy by Nightfall#2512 ^| AIO Proxy Tool')
219 | system('cls')
220 |
221 | print(x.banner())
222 | print(x.menu())
223 |
224 | print('\n')
225 |
226 | try:
227 | user_input = int(input(f'[{Fore.RED}>{Style.RESET_ALL}] > '))
228 | if user_input == 1:
229 | system('cls')
230 | print(x.banner())
231 | p.background_task()
232 | print('Scraping proxies...')
233 | p.scrape_regex()
234 | p.scrape_direct()
235 |
236 | output = p.get_proxies()
237 |
238 | print('\nChecking for duplicates..')
239 | print('Current length:', len(output))
240 | clean_output = list(set(output))
241 | print('Length after removing duplicates:', len(clean_output))
242 |
243 | print('Writing to scraped.txt..')
244 | p.file_write('scraped.txt', clean_output)
245 | print('Finished.')
246 | system('pause>nul')
247 |
248 | elif user_input == 2:
249 | pc = ProxyCheck()
250 |
251 | system('cls')
252 | print(x.banner())
253 |
254 | path = input('Path: ')
255 |
256 | if '"' in path:
257 | new_path = path.replace('"','')
258 |
259 | else:
260 | new_path = path
261 |
262 | proxy_list = pc.file_read(new_path)
263 |
264 | thread_count = int(input('Enter number of threads [e.g 200] : '))
265 | print('Loading..')
266 | threads = []
267 | system('cls')
268 | print(x.banner())
269 |
270 | pc.background_task()
271 |
272 | for i in range(thread_count):
273 | t = threading.Thread(target=pc.check, args= (proxy_list[int(len(proxy_list) / thread_count * i): int(len(proxy_list)/ thread_count* (i+1))],))
274 | threads.append(t)
275 | t.start()
276 |
277 | for t in threads:
278 | t.join()
279 |
280 | print('Finished.')
281 | system('pause>nul')
282 |
283 | else:
284 | print('Invalid!')
285 | main()
286 |
287 |
288 | except ValueError:
289 | main()
290 |
291 |
292 | if __name__ == '__main__':
293 | main()
294 |
--------------------------------------------------------------------------------