├── ProxyPy.exe ├── README.md ├── LICENSE ├── .gitignore └── ProxyPy.py /ProxyPy.exe: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/41alderson/ProxyPy/HEAD/ProxyPy.exe -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # ProxyPy 2 | High Speed All In One ProxyScraper.\ 3 | Scrape Tens Of Thousands Of Proxies Within Minutes.\ 4 | Enjoy Cracking. 5 | 6 | Tested And The Proxies Are Performing Well On\ 7 | Storm.\ 8 | OpenBullet. 9 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 41alderson 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | MANIFEST 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | .pytest_cache/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # Django stuff: 55 | *.log 56 | local_settings.py 57 | db.sqlite3 58 | 59 | # Flask stuff: 60 | instance/ 61 | .webassets-cache 62 | 63 | # Scrapy stuff: 64 | .scrapy 65 | 66 | # Sphinx documentation 67 | docs/_build/ 68 | 69 | # PyBuilder 70 | target/ 71 | 72 | # Jupyter Notebook 73 | .ipynb_checkpoints 74 | 75 | # pyenv 76 | .python-version 77 | 78 | # celery beat schedule file 79 | celerybeat-schedule 80 | 81 | # SageMath parsed files 82 | *.sage.py 83 | 84 | # Environments 85 | .env 86 | .venv 87 | env/ 88 | venv/ 89 | ENV/ 90 | env.bak/ 91 | venv.bak/ 92 | 93 | # Spyder project settings 94 | .spyderproject 95 | .spyproject 96 | 97 | # Rope project settings 98 | .ropeproject 99 | 100 | # mkdocs documentation 101 | /site 102 | 103 | # mypy 104 | .mypy_cache/ 105 | -------------------------------------------------------------------------------- /ProxyPy.py: -------------------------------------------------------------------------------- 1 | import requests 2 | from bs4 import BeautifulSoup 3 | from time import sleep 4 | import os 5 | 6 | user_agent = ["Mozilla/5.0 (platform; rv:geckoversion) Gecko/geckotrail Firefox/firefoxversion"] 7 | url_list_https = "https://api.proxyscrape.com?request=getproxies&proxytype=http&timeout=10000&country=all&ssl=all&anonymity=all" 8 | url_list_socks4 = "https://api.proxyscrape.com?request=getproxies&proxytype=socks4&timeout=10000&country=all" 9 | url_list_socks5 = "https://api.proxyscrape.com?request=getproxies&proxytype=socks5&timeout=10000&country=all" 10 | 11 | 12 | class scrape_proxy: 13 | 14 | @staticmethod 15 | def http(): 16 | sleep(2) 17 | print('\n\nStarting Scraping Http/Https Proxies....') 18 | 19 | os.system('type NUL > ProxyPy_http.txt') #linux users use os.system('touch ProxyPy_http.txt') 20 | r = requests.get(url_list_https) 21 | 22 | with open('ProxyPy_http.txt', 'wb+') as f: 23 | f.write(r.content) 24 | f.close() 25 | sleep(2) 26 | 27 | url2 = 'https://www.proxy-list.download/api/v1/get?type=https' 28 | url1 = 'https://www.proxy-list.download/api/v1/get?type=http' 29 | url3 = 'https://cyber-hub.net/proxy/http.txt' 30 | l = requests.get(url2) 31 | ll = requests.get(url1) 32 | lll = requests.get(url3) 33 | with open('ProxyPy_http.txt', 'ab+') as w: 34 | w.write(l.content) 35 | w.write(ll.content) 36 | w.write(lll.content) 37 | w.close() 38 | 39 | urll = 'https://www.proxy-daily.com/' 40 | r = requests.get(urll).text 41 | soup = BeautifulSoup(r, features='html.parser') 42 | k = soup.find('div', {'class': 'centeredProxyList freeProxyStyle'}) 43 | rep = str(k).replace('
', '') 44 | rep = rep.replace('
', '') 45 | with open('ProxyPy_http.txt', 'a') as ww: 46 | ww.writelines(rep) 47 | ww.close() 48 | 49 | print('Leeching Done Successfully') 50 | sleep(2) 51 | 52 | print('Removing Duplicates...') 53 | 54 | sleep(1) 55 | 56 | with open('ProxyPy_http.txt', 'r') as f: 57 | print('Total Http/Https Proxies Available: ', len(f.readlines())) 58 | 59 | @staticmethod 60 | def socks4(): 61 | sleep(2) 62 | print('\n\nStarting Scraping Socks4 Proxies...') 63 | 64 | os.system('type NUL > ProxyPy_socks4.txt') #linux users use os.system('touch ProxyPy_socks4.txt') 65 | 66 | r = requests.get(url_list_socks4) 67 | 68 | with open('ProxyPy_socks4.txt', 'wb+') as f: 69 | f.write(r.content) 70 | f.close() 71 | 72 | url = 'https://www.proxy-list.download/api/v1/get?type=socks4' 73 | rr = requests.get(url) 74 | 75 | with open('ProxyPy_socks4.txt', 'ab+') as ff: 76 | ff.write(rr.content) 77 | ff.close() 78 | 79 | sleep(2) 80 | 81 | urll = 'https://www.proxy-daily.com/' 82 | r = requests.get(urll).text 83 | soup = BeautifulSoup(r, features='html.parser') 84 | k = soup.find('div', {'class': 'centeredProxyList freeProxyStyle'}) 85 | rep = str(k).replace('
', '') 86 | rep = rep.replace('
', '') 87 | with open('ProxyPy_socks4.txt', 'a') as ww: 88 | ww.writelines(rep) 89 | ww.close() 90 | 91 | print('Removing Duplicates Please Wait') 92 | sleep(2) 93 | 94 | with open('ProxyPy_socks4.txt', 'r') as f: 95 | print('Total Socks4 Proxies Available: ', len(f.readlines())) 96 | 97 | @staticmethod 98 | def socks5(): 99 | sleep(2) 100 | print('\n\nStarting Scraping Socks5 Proxies') 101 | 102 | os.system('type NUL > ProxyPy_socks5.txt') #linux users use os.system('touch ProxyPy_socks5.txt') 103 | r = requests.get(url_list_socks5) 104 | 105 | with open('ProxyPy_socks5.txt', 'wb+') as f: 106 | f.write(r.content) 107 | f.close() 108 | 109 | url = 'https://www.proxy-list.download/api/v1/get?type=socks5' 110 | rr = requests.get(url) 111 | 112 | with open('ProxyPy_socks5.txt', 'ab+') as ff: 113 | ff.write(rr.content) 114 | ff.close() 115 | 116 | sleep(2) 117 | 118 | print('Removing Duplicates Please Wait') 119 | sleep(2) 120 | 121 | with open('ProxyPy_socks5.txt', 'r') as f: 122 | print('Total Socks5 Proxies Available: ', len(f.readlines())) 123 | 124 | @staticmethod 125 | def get_all(): 126 | scrape_proxy.http() 127 | scrape_proxy.socks4() 128 | scrape_proxy.socks5() 129 | 130 | 131 | def main(): 132 | print(''' 133 | \n\t\tMENU 134 | 1.Get Http/Https Proxies. 135 | 2.Get Socks4 Proxies. 136 | 3.Get Socks5 Proxies. 137 | 4.Get All Proxies At Once. 138 | [All Are Saved In Different Files] 139 | 5.Exit. 140 | ''') 141 | 142 | opt = input('Enter Your Choice: ') 143 | 144 | if opt == '1': 145 | scrape_proxy.http() 146 | sleep(1) 147 | input('\nPress Enter To Continue...') 148 | sleep(1) 149 | print('\nReturning To Main Menu..') 150 | sleep(1) 151 | return main() 152 | 153 | elif opt == '2': 154 | scrape_proxy.socks4() 155 | sleep(1) 156 | input('\nPress Enter To Continue...') 157 | sleep(1) 158 | print('\nReturning To Main Menu..') 159 | sleep(1) 160 | return main() 161 | 162 | elif opt == '3': 163 | scrape_proxy.socks5() 164 | sleep(1) 165 | input('\nPress Enter To Continue...') 166 | sleep(1) 167 | print('\nReturning To Main Menu..') 168 | sleep(1) 169 | return main() 170 | 171 | elif opt == '4': 172 | scrape_proxy.get_all() 173 | sleep(1) 174 | input('\nPress Enter To Continue...') 175 | sleep(1) 176 | print('\nReturning To Main Menu..') 177 | sleep(1) 178 | return main() 179 | 180 | elif opt == '5': 181 | print('\nSorry To See You Go...\n') 182 | print('Hope You Will Return To Me Again.') 183 | sleep(2) 184 | exit(0) 185 | 186 | else: 187 | print('Unknown Option...\nPlease Retry....') 188 | sleep(1.5) 189 | return main() 190 | 191 | 192 | if __name__ == '__main__': 193 | print(''' 194 | 195 | ______ ______ 196 | | ___ \ | ___ \ 197 | | |_/ /_ __ ___ __ __ _ _ | |_/ /_ _ 198 | | __/| '__|/ _ \\ \/ /| | | || __/| | | | 199 | | | | | | (_) |> < | |_| || | | |_| | 200 | \_| |_| \___//_/\_\ \__, |\_| \__, | 201 | __/ | __/ | 202 | |___/ |___/ 203 | 204 | A ProxyScraper By 41alderson 205 | ''') 206 | sleep(1.5) 207 | 208 | print(''' 209 | \t\tAuthor: 41_alderson 210 | \t\tGithub: https://github.com/41alderson/TorrentyPy 211 | \t\tTelegram: @destroyer41 ''') 212 | sleep(1.5) 213 | main() 214 | --------------------------------------------------------------------------------