├── setup.cfg ├── CHANGES.md ├── MANIFEST.in ├── proxyUtil ├── __init__.py ├── __version__.py ├── logFormatter.py ├── ipExtractor.py ├── dnsChecker.py ├── dnsUtil.py ├── cdnGen.py ├── dnsUrl.py ├── cfRecorder.py ├── v2rayChecker.py ├── data │ └── Clash-Template.yaml └── myUtil.py ├── requirements.txt ├── Makefile ├── LICENSE ├── scripts ├── sslocal2ssURI ├── ssURI2sslocal ├── connectMe ├── shadowChecker └── clashGen ├── .github └── workflows │ └── python-publish.yml ├── setup.py ├── .gitignore └── README.md /setup.cfg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /CHANGES.md: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include LICENSE 2 | include README.md 3 | include requirements.txt 4 | include proxyUtil/data/* 5 | -------------------------------------------------------------------------------- /proxyUtil/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding=utf-8 -*- 3 | 4 | from .__version__ import __version__ 5 | from .myUtil import * 6 | from .logFormatter import * 7 | from .dnsUrl import * 8 | from .dnsUtil import * 9 | -------------------------------------------------------------------------------- /proxyUtil/__version__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding=utf-8 -*- 3 | 4 | __title__ = "proxyUtil" 5 | __description__ = "some proxy tools" 6 | __url__ = "https://github.com/mheidari98/proxyUtil" 7 | __version__ = "0.1.3" 8 | __author__ = "Mahdi Heidari" 9 | __author_email__ = "mahdih3idari@gmail.com" 10 | __license__ = "MIT License" 11 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | anyio==3.6.2 2 | attrs==22.2.0 3 | beautifulsoup4==4.11.1 4 | certifi==2023.7.22 5 | cffi==1.15.1 6 | charset-normalizer==2.1.1 7 | cloudflare==2.11.1 8 | cryptography==41.0.4 9 | dnspython==2.3.0 10 | h11==0.14.0 11 | h2==4.1.0 12 | hpack==4.0.0 13 | httpcore==0.16.3 14 | httpx==0.23.3 15 | hyperframe==6.0.1 16 | idna==3.4 17 | jsonlines==3.1.0 18 | markdown-it-py==2.2.0 19 | mdurl==0.1.2 20 | numpy==1.23.4 21 | psutil==5.9.2 22 | pycparser==2.21 23 | Pygments==2.15.0 24 | PySocks==1.7.1 25 | PyYAML==6.0 26 | requests==2.31.0 27 | requests-toolbelt==0.10.1 28 | rfc3986==1.5.0 29 | rich==13.3.1 30 | ruamel.yaml==0.17.21 31 | ruamel.yaml.clib==0.2.7 32 | sniffio==1.3.0 33 | soupsieve==2.3.2.post1 34 | urllib3==1.26.12 35 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: test 2 | 3 | dev: 4 | pip install setuptools pytest black twine flake8 5 | # pip install -U --editable . 6 | 7 | ci: 8 | py.test --junitxml=report.xml 9 | 10 | test: 11 | python3 setup.py test 12 | pytest 13 | 14 | coverage: 15 | py.test --cov-config .coveragerc --verbose --cov-report term --cov-report xml --cov=proxyUtil --junitxml=report.xml tests 16 | 17 | flake8: 18 | black . 19 | flake8 --ignore=E501,F401,W503 proxyUtil 20 | 21 | clean: 22 | rm -fr build dist .egg proxyUtil.egg-info 23 | rm -fr .pytest_cache coverage.xml report.xml htmlcov 24 | find . | grep __pycache__ | xargs rm -fr 25 | find . | grep "\.pyc" | xargs rm -f 26 | pip uninstall proxyUtil 27 | 28 | install: 29 | python3 setup.py install 30 | 31 | publish: 32 | python3 setup.py sdist bdist_wheel 33 | twine upload dist/* 34 | -------------------------------------------------------------------------------- /proxyUtil/logFormatter.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | # https://stackoverflow.com/a/56944256 4 | class CustomFormatter(logging.Formatter): 5 | grey = "\x1b[38;20m" 6 | yellow = "\x1b[33;20m" 7 | red = "\x1b[31;20m" 8 | bold_red = "\x1b[31;1m" 9 | green = "\x1b[32;20m" 10 | reset = "\x1b[0m" 11 | 12 | #format = "%(asctime)s - %(name)s - %(levelname)s - %(message)s (%(filename)s:%(lineno)d)" 13 | format = "%(asctime)s - %(levelname)s - %(message)s" 14 | 15 | FORMATS = { 16 | logging.DEBUG: grey + format + reset, 17 | logging.INFO: green + format + reset, 18 | logging.WARNING: yellow + format + reset, 19 | logging.ERROR: red + format + reset, 20 | logging.CRITICAL: bold_red + format + reset 21 | } 22 | 23 | def format(self, record): 24 | log_fmt = self.FORMATS.get(record.levelno) 25 | formatter = logging.Formatter(log_fmt) 26 | return formatter.format(record) 27 | 28 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2022 Mahdi Heidari 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /scripts/sslocal2ssURI: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | import argparse 3 | from proxyUtil import * 4 | 5 | ch = logging.StreamHandler() 6 | ch.setFormatter(CustomFormatter()) 7 | logging.basicConfig(level=logging.ERROR, handlers=[ch]) 8 | 9 | def main(): 10 | parser = argparse.ArgumentParser(description="ss-local command to shadowsocks URI") 11 | parser.add_argument("-i", "--input", help="ss-local command") 12 | parser.add_argument("-f", "--file", help="file contain ss-local commands") 13 | parser.add_argument("-o", "--output", help="shadowsocks URI(s) output file") 14 | args = parser.parse_args() 15 | 16 | results = [] 17 | 18 | if args.input : 19 | results.append( sslocal2ssURI(args.input) ) 20 | 21 | if args.file : 22 | with open(args.file, 'r') as file: 23 | for line in file: 24 | if "ss-local" in line : 25 | results.append( sslocal2ssURI(line.rstrip()) ) 26 | 27 | outputs = '\n'.join(results) 28 | if args.output : 29 | with open(args.output, 'w') as f : 30 | f.write(outputs) 31 | else : 32 | print(outputs) 33 | 34 | 35 | if __name__ == '__main__': 36 | main() 37 | -------------------------------------------------------------------------------- /scripts/ssURI2sslocal: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | import argparse 3 | from proxyUtil import * 4 | 5 | ch = logging.StreamHandler() 6 | ch.setFormatter(CustomFormatter()) 7 | logging.basicConfig(level=logging.ERROR, handlers=[ch]) 8 | 9 | def main(): 10 | parser = argparse.ArgumentParser(description="shadowsocks URI to ss-local command") 11 | parser.add_argument("-i", "--input", help="shadowsocks URI") 12 | parser.add_argument("-f", "--file", help="file contain shadowsocks URIs") 13 | parser.add_argument("-o", "--output", help="ss-local command(s) output file") 14 | parser.add_argument("-l", "--lport", help="local port, default is 1080", default=1080, type=int) 15 | args = parser.parse_args() 16 | 17 | results = [] 18 | 19 | if args.input : 20 | results.append( ssURI2sslocal(args.input, args.lport) ) 21 | 22 | if args.file : 23 | with open(args.file, 'r') as file: 24 | lines = parseContent(file.read().strip(), [ss_scheme]) 25 | for line in lines: 26 | results.append( ssURI2sslocal(line.rstrip(), args.lport) ) 27 | 28 | outputs = '\n'.join(results) 29 | if args.output : 30 | with open(args.output, 'w') as f : 31 | f.write(outputs) 32 | else : 33 | print(outputs) 34 | 35 | 36 | if __name__ == '__main__': 37 | main() 38 | -------------------------------------------------------------------------------- /.github/workflows/python-publish.yml: -------------------------------------------------------------------------------- 1 | # This workflow will upload a Python Package using Twine when a release is created 2 | # For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python#publishing-to-package-registries 3 | 4 | # This workflow uses actions that are not certified by GitHub. 5 | # They are provided by a third-party and are governed by 6 | # separate terms of service, privacy policy, and support 7 | # documentation. 8 | 9 | # ref @ 10 | # https://packaging.python.org/en/latest/guides/publishing-package-distribution-releases-using-github-actions-ci-cd-workflows/ 11 | 12 | name: Publish Python 🐍 distributions 📦 to PyPI and TestPyPI 13 | 14 | on: 15 | push: 16 | tags: 17 | - '*' 18 | 19 | permissions: 20 | contents: read 21 | 22 | jobs: 23 | build-n-publish: 24 | name: Build and publish Python 🐍 distributions 📦 to PyPI and TestPyPI 25 | runs-on: ubuntu-latest 26 | 27 | steps: 28 | - uses: actions/checkout@v3 29 | 30 | - name: Set up Python 31 | uses: actions/setup-python@v4 32 | with: 33 | python-version: '3.x' 34 | 35 | - name: Install pypa/build 36 | run: | 37 | python -m pip install --upgrade pip 38 | pip install build setuptools wheel twine 39 | 40 | # https://stackoverflow.com/a/73607710 41 | - name: Build a binary wheel and a source tarball 42 | run: python -m build 43 | # run: python -m build --sdist --wheel --outdir dist/ . 44 | # run: python setup.py sdist bdist_wheel 45 | 46 | 47 | - name: Publish distribution 📦 to Test PyPI 48 | uses: pypa/gh-action-pypi-publish@release/v1 49 | with: 50 | password: ${{ secrets.TEST_PYPI_API_TOKEN }} 51 | repository-url: https://test.pypi.org/legacy/ 52 | 53 | - name: Publish distribution 📦 to PyPI 54 | #if: startsWith(github.ref, 'refs/tags') 55 | uses: pypa/gh-action-pypi-publish@release/v1 56 | with: 57 | password: ${{ secrets.PYPI_API_TOKEN }} 58 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding=utf-8 -*- 3 | 4 | from os import path 5 | from setuptools import find_namespace_packages, setup 6 | 7 | proj_dir = path.dirname(path.realpath(__file__)) 8 | about_file = path.join(proj_dir, "proxyUtil", "__version__.py") 9 | readme_file = path.join(proj_dir, "README.md") 10 | 11 | about = {} 12 | exec(open(about_file, "r", encoding="utf-8").read(), about) 13 | 14 | long_description = open(readme_file, "r", encoding="utf-8").read() 15 | 16 | requirements = open("requirements.txt", "r", encoding="utf-8").read().splitlines() 17 | 18 | setup( 19 | name=about["__title__"], 20 | version=about["__version__"], 21 | 22 | author=about["__author__"], 23 | author_email=about["__author_email__"], 24 | 25 | description=about["__description__"], 26 | long_description=long_description, 27 | long_description_content_type="text/markdown", 28 | 29 | url=about["__url__"], 30 | 31 | license=about["__license__"], 32 | 33 | #packages=find_packages(), # ['proxyUtil'] 34 | packages=find_namespace_packages(exclude=["scripts"]), 35 | 36 | include_package_data=True, 37 | #package_data={'': ['data/*']}, 38 | 39 | install_requires=requirements, 40 | 41 | extras_require={ 42 | "dev": [ 43 | "pytest >= 3.7", 44 | "twine" 45 | ] 46 | }, 47 | 48 | scripts=[ 49 | path.join("scripts", "clashGen"), # scripts/clashGen 50 | path.join("scripts", "connectMe"), 51 | path.join("scripts", "shadowChecker"), 52 | path.join("scripts", "sslocal2ssURI"), 53 | path.join("scripts", "ssURI2sslocal"), 54 | ], 55 | 56 | entry_points={ 57 | "console_scripts": [ 58 | "cdnGen = proxyUtil.cdnGen:main", 59 | "dnsChecker = proxyUtil.dnsChecker:main", 60 | "cfRecorder = proxyUtil.cfRecorder:main", 61 | "ipExtractor = proxyUtil.ipExtractor:main", 62 | "v2rayChecker = proxyUtil.v2rayChecker:main", 63 | ] 64 | }, 65 | 66 | classifiers=[ 67 | # https://pypi.org/classifiers 68 | 'Development Status :: 3 - Alpha', 69 | 'Intended Audience :: Developers', 70 | 'License :: OSI Approved :: MIT License', 71 | 'Operating System :: POSIX :: Linux', 72 | "Programming Language :: Python :: 3", 73 | ], 74 | 75 | python_requires='>=3', 76 | ) 77 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # IPython 81 | profile_default/ 82 | ipython_config.py 83 | 84 | # pyenv 85 | .python-version 86 | 87 | # pipenv 88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 91 | # install all needed dependencies. 92 | #Pipfile.lock 93 | 94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 95 | __pypackages__/ 96 | 97 | # Celery stuff 98 | celerybeat-schedule 99 | celerybeat.pid 100 | 101 | # SageMath parsed files 102 | *.sage.py 103 | 104 | # Environments 105 | .env 106 | .venv 107 | env/ 108 | venv/ 109 | ENV/ 110 | env.bak/ 111 | venv.bak/ 112 | 113 | # Spyder project settings 114 | .spyderproject 115 | .spyproject 116 | 117 | # Rope project settings 118 | .ropeproject 119 | 120 | # mkdocs documentation 121 | /site 122 | 123 | # mypy 124 | .mypy_cache/ 125 | .dmypy.json 126 | dmypy.json 127 | 128 | # Pyre type checker 129 | .pyre/ 130 | 131 | tmp/ -------------------------------------------------------------------------------- /proxyUtil/ipExtractor.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | ######################################################################### 3 | # Exctraction IP from shadowsocks, vmess, vless, trojan links # 4 | # # 5 | # Usage: ipExtractor "vmess://..." -o output.txt # 6 | # -o: output file # 7 | # Output: # 8 | # IP list # 9 | ########################################################################## 10 | import argparse 11 | import ipaddress 12 | from proxyUtil import * 13 | 14 | ch = logging.StreamHandler() 15 | ch.setFormatter(CustomFormatter()) 16 | logging.basicConfig(level=logging.ERROR, handlers=[ch]) 17 | 18 | 19 | def main(argv=sys.argv): 20 | parser = argparse.ArgumentParser(description="Exctraction IP from shadowsocks, vmess, vless, trojan links") 21 | parser.add_argument("-f", "--file", help="file contain proxy") 22 | parser.add_argument('--stdin', help="get proxies from stdin", action='store_true', default=False) 23 | parser.add_argument('--url', help="get proxies from url") 24 | parser.add_argument('--sort', help="sort output", action='store_true', default=False) 25 | parser.add_argument('-v', "--verbose", help="increase output verbosity", action="store_true", default=False) 26 | parser.add_argument('-o', '--output', help="output file") 27 | args = parser.parse_args(argv[1:]) 28 | 29 | if args.verbose: 30 | logging.getLogger().setLevel(logging.INFO) 31 | 32 | if args.stdin: 33 | proxies = parseContent(sys.stdin.read().strip()) 34 | elif args.file and os.path.isfile(args.file): 35 | with open(args.file, 'r', encoding='UTF-8') as file: 36 | proxies = parseContent(file.read().strip()) 37 | elif args.url: 38 | proxies = ScrapURL(args.url) 39 | else: 40 | logging.error("No proxy to check") 41 | return 42 | 43 | logging.info(f"Total proxies: {len(proxies)}") 44 | 45 | ips = list(filter(None, map(extractIPs, proxies))) 46 | 47 | if args.sort: 48 | ips = [ip for ip in ips if isIPv4(ip) or isIPv6(ip)] 49 | ips = sorted(ips, key=lambda ip: int(ipaddress.IPv4Address(ip))) 50 | 51 | outputs = '\n'.join(ips) 52 | if args.output : 53 | with open(args.output, 'w', encoding='UTF-8') as f : 54 | f.write(outputs) 55 | else : 56 | print(outputs) 57 | 58 | 59 | if __name__ == "__main__": 60 | main() 61 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # proxyUtil 2 | 3 | ## General info 4 | some proxy tools 5 | 6 | ## Requirements 7 | - [python 3](https://www.python.org/downloads) 8 | - [install shadowsocks](https://github.com/shadowsocks/shadowsocks-libev#installation) 9 | ```console 10 | sudo apt install shadowsocks-libev 11 | ``` 12 | - [install v2ray](https://www.v2fly.org/en_US/guide/install.html) 13 | ```console 14 | sudo bash <(curl -L https://raw.githubusercontent.com/v2fly/fhs-install-v2ray/master/install-release.sh) 15 | ``` 16 | - [install xray](https://github.com/XTLS/Xray-core#installation) 17 | ```console 18 | sudo bash -c "$(curl -L https://github.com/XTLS/Xray-install/raw/main/install-release.sh)" @ install 19 | ``` 20 | 21 | ## Installation 22 | ```console 23 | pip install --upgrade git+https://github.com/mheidari98/proxyUtil@main 24 | ``` 25 | 26 | ## Uninstall 27 | ```console 28 | pip uninstall proxyUtil 29 | ``` 30 | 31 | ## Usage 32 | + #### check [wiki](https://github.com/mheidari98/proxyUtil/wiki) 33 | 34 | --- 35 | 36 | ## Tools 37 | - [x] [**connectMe**](https://github.com/mheidari98/proxyUtil/wiki/connectMe) : Simple cli proxy client for shadowsocks, vmess, vless, trojan 38 | - [x] [**v2rayChecker**](https://github.com/mheidari98/proxyUtil/wiki/v2rayChecker) : Simple shadowsocks, vmess, vless, trojan checker with v2ray/xray core 39 | - [x] [**shadowChecker**](https://github.com/mheidari98/proxyUtil/wiki/shadowChecker) : Simple shadowsocks proxy checker with shadowsocks-libev 40 | - [x] [**dnsChecker**](https://github.com/mheidari98/proxyUtil/wiki/dnsChecker) : Simple DNS over UDP, DNS over TLS and DNS over HTTPS Checker 41 | - [x] [**clashGen**](https://github.com/mheidari98/proxyUtil/wiki/clashGen) : Convert vmess, vless, trojan, shadowsocks,... proxy to Clash Config 42 | - [x] [**cdnGen** ](https://github.com/mheidari98/proxyUtil/wiki/cdnGen) : Generating vmess url with cloudflare or arvan CDN IPs as address and our domain as host or sni for tls 43 | - [x] [**ssURI2sslocal**](https://github.com/mheidari98/proxyUtil/wiki/ssURI2sslocal) : shadowsocks URI to ss-local command 44 | - [x] [**sslocal2ssURI**](https://github.com/mheidari98/proxyUtil/wiki/sslocal2ssURI) : ss-local command to shadowsocks URI 45 | 46 | 47 | ## Status 48 | Project is: _in progress_ 49 | 50 | ## License 51 | [MIT](https://choosealicense.com/licenses/mit) 52 | 53 | ## Contact 54 | Created by [@mheidari98](https://github.com/mheidari98) 55 | 56 | ## Support 57 | If you like this project, please consider supporting it by donating to the following bitcoin address: 58 | 59 | ## Disclaimer 60 | * This project is meant for personal and educational uses only. 61 | * Please follow relevant laws and regulations when using this project. 62 | * Project owner is not responsible or liable in any manner for the use of the content. 63 | 64 | 65 | 76 | -------------------------------------------------------------------------------- /proxyUtil/dnsChecker.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # https://github.com/rthalley/dnspython 3 | # https://dnspython.readthedocs.io 4 | import argparse 5 | from rich.console import Console # pip install rich 6 | from rich.table import Table 7 | from proxyUtil import * 8 | 9 | ch = logging.StreamHandler() 10 | ch.setFormatter(CustomFormatter()) 11 | logging.basicConfig(level=logging.ERROR, format='%(asctime)s - %(levelname)s - %(message)s', handlers=[ch]) 12 | 13 | console = Console() 14 | 15 | def main(argv=sys.argv): 16 | parser = argparse.ArgumentParser(description="DNS Checker") 17 | parser.add_argument("-d", "--domain", help="Domain to check (default: example.com)", default="example.com") 18 | parser.add_argument("-r", "--rr", help="Record type to check (default: A)", default="A", choices=RR) 19 | parser.add_argument("-v", "--verbose", help="Verbose output", action="store_true") 20 | parser.add_argument("-s", "--request-dnssec", help="Request DNSSEC", action="store_true", default=False) 21 | parser.add_argument("-t", "--timeout", help=f"DNS Timeout (default: {DEFAULT_TIMEOUT})", default=DEFAULT_TIMEOUT, type=float) 22 | parser.add_argument("--do53", help="check DNS over UDP", action="store_true", default=False) 23 | parser.add_argument("--doh", help="check DNS over HTTPS", action="store_true", default=False) 24 | parser.add_argument("--dot", help="check DNS over TLS", action="store_true", default=False) 25 | parser.add_argument("--all", help="check all DNS over UDP, DoH and DoT", action="store_true", default=False) 26 | args = parser.parse_args(argv[1:]) 27 | 28 | if args.verbose: 29 | logging.getLogger().setLevel(logging.INFO) 30 | 31 | if args.all: 32 | args.do53 = True 33 | args.doh = True 34 | args.dot = True 35 | 36 | logging.info(f"Domain: {args.domain}") 37 | logging.info(f"Record type: {args.rr}") 38 | 39 | table = Table(show_lines=True, show_header=True, header_style="bold magenta", 40 | row_styles=["dim", ""], highlight=True) 41 | 42 | table.add_column("DNS NAME", style="bright_cyan", justify="center") 43 | table.add_column("DNS IP", style="bright_cyan", justify="center") 44 | table.add_column("Time", style="bright_yellow", justify="center") 45 | table.add_column("IPs", style="bright_green", justify="center") 46 | 47 | results = [] 48 | 49 | if args.do53: 50 | for name, servers in Do53_URLS.items(): 51 | for server in servers: 52 | dnsTime, ips = Do53_reolver(args.domain, args.rr, server, args.request_dnssec) 53 | results.append((name, server, dnsTime*100, ips)) 54 | 55 | if args.dot: 56 | for name, servers in DoT_URLS.items(): 57 | for server in servers: 58 | dnsTime, ips = DoT_resolver(args.domain, args.rr, server, args.request_dnssec) 59 | results.append((name, server, dnsTime*100, ips)) 60 | 61 | if args.doh: 62 | for name, servers in DoH_URLS.items(): 63 | for server in servers: 64 | dnsTime, ips = DoH_resolver(args.domain, args.rr, server, args.request_dnssec) 65 | results.append((name, server, dnsTime*100, ips)) 66 | 67 | results.sort(key=lambda x: x[2]) 68 | for result in results: 69 | table.add_row(result[0], result[1], f"{result[2]:.2f} ms", ", ".join(result[3])) 70 | 71 | #time.sleep(0.1) 72 | #clearScreen() 73 | console.print(table) 74 | 75 | 76 | if __name__ == "__main__": 77 | main() 78 | -------------------------------------------------------------------------------- /scripts/connectMe: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | import argparse 4 | import tempfile 5 | import shlex 6 | import signal 7 | from proxyUtil import * 8 | 9 | ch = logging.StreamHandler() 10 | ch.setFormatter(CustomFormatter()) 11 | logging.basicConfig(level=logging.INFO, handlers=[ch]) 12 | 13 | CORE = "xray" 14 | tempdir = tempfile.mkdtemp() 15 | 16 | def ss_runner(ss_url, localPort): 17 | cmd = ssURI2sslocal(ss_url, localPort) 18 | logging.info(f"Running {cmd}") 19 | try: 20 | #os.execv('/bin/sh', shlex.split(cmd)) 21 | p = subprocess.Popen([cmd], stdout=subprocess.PIPE, shell=True, preexec_fn=os.setsid) 22 | while True: 23 | time.sleep(1) 24 | except KeyboardInterrupt: 25 | logging.info("KeyboardInterrupt") 26 | os.killpg(os.getpgid(p.pid), signal.SIGTERM) # Send the signal to all the process groups 27 | time.sleep(1) # sleep 1 seconds 28 | except: 29 | logging.error(f"ss-local failed to start") 30 | return 31 | 32 | 33 | def v2ray_runner(url, localPort): 34 | configName = createConfig(url, localPort, tempdir) 35 | if configName is None : 36 | return 37 | 38 | cmd = f"{CORE} run -config {configName}" 39 | logging.info(f"Running {cmd}") 40 | try: 41 | p = subprocess.Popen([cmd], stdout=subprocess.PIPE, shell=True, preexec_fn=os.setsid) 42 | while True: 43 | time.sleep(1) 44 | except KeyboardInterrupt: 45 | logging.info("KeyboardInterrupt") 46 | #p.kill() 47 | #p.send_signal(signal.SIGINT) 48 | os.killpg(os.getpgid(p.pid), signal.SIGTERM) # Send the signal to all the process groups 49 | time.sleep(1) # sleep 1 seconds 50 | except: 51 | logging.error(f"{CORE} failed to start") 52 | return 53 | 54 | 55 | def main(): 56 | parser = argparse.ArgumentParser(description="Simple proxy client for ss/v2ray/trojan") 57 | parser.add_argument("link", help="proxy link") 58 | parser.add_argument("-l", "--lport", help="start local port, default is 1080", default=1080, type=int) 59 | parser.add_argument('-c', '--core', help="select core from [v2ray, xray, shadowsocks-libev]", 60 | choices=["xray", "v2ray", "ss", "wxray"], default="xray") 61 | parser.add_argument("--proxychains", help="set proxychains", action="store_true") 62 | parser.add_argument("--system", help="set system proxy", action="store_true") 63 | args = parser.parse_args() 64 | 65 | if is_port_in_use(args.lport): 66 | logging.error(f"port {args.lport} is in use") 67 | return 68 | 69 | if args.proxychains: 70 | if not is_tool("proxychains"): 71 | logging.error("proxychains not found, please install it first") 72 | logging.error("\tsudo apt install proxychains") 73 | return 74 | set_proxychains(args.lport) 75 | 76 | logging.info(f"Starting proxy client on port {args.lport} with PID {os.getpid()}") 77 | 78 | if args.system: 79 | set_system_proxy(proxyHost="127.0.0.1", proxyPort=args.lport, enable=True) 80 | 81 | if args.core == "ss" and args.link.startswith("ss://"): 82 | if not is_tool('ss-local'): 83 | logging.error("ss-local not found, please install shadowsocks client first") 84 | logging.error("\thttps://github.com/shadowsocks/shadowsocks-libev") 85 | return 86 | ss_runner(args.link, args.lport) 87 | else : 88 | global CORE 89 | os.environ["PATH"] += os.pathsep + os.path.join('.', 'xray') 90 | os.environ["PATH"] += os.pathsep + os.path.join('.', 'v2ray') 91 | 92 | CORE = shutil.which(args.core) 93 | if not CORE : 94 | logging.error(f"{args.core} not found!") 95 | if args.core == "v2ray" : 96 | logging.error("you can install v2ray from https://www.v2fly.org/en_US/guide/install.html") 97 | else: 98 | logging.error("you can install xray from https://github.com/XTLS/Xray-core#installation") 99 | download = input("do you want to download it now? [y/n]").strip() in ["yes", "y"] 100 | if download : 101 | if args.core == "v2ray" : 102 | downloadZray("v2fly", "v2ray") 103 | else: 104 | downloadZray("XTLS", "xray") 105 | CORE = shutil.which(args.core) 106 | else: 107 | exit(1) 108 | 109 | logging.info(f"using {CORE} core") 110 | 111 | v2ray_runner(args.link, args.lport) 112 | 113 | if args.system: 114 | set_system_proxy(enable=False) 115 | 116 | 117 | if __name__ == '__main__': 118 | main() 119 | shutil.rmtree(tempdir) 120 | 121 | -------------------------------------------------------------------------------- /proxyUtil/dnsUtil.py: -------------------------------------------------------------------------------- 1 | import dns.message # pip install dnspython[doh,dnssec,idna] 2 | import dns.name 3 | import dns.query 4 | import ipaddress 5 | import re 6 | import requests 7 | from bs4 import BeautifulSoup # pip install beautifulsoup4 8 | import socket 9 | import logging 10 | import urllib 11 | 12 | 13 | DEFAULT_TIMEOUT = 3.0 14 | Do53_DEFAULT_ENDPOINT = "8.8.8.8" 15 | DoT_DEFAULT_ENDPOINT = "tls://dns.google:853" 16 | DoH_DEFAULT_ENDPOINT = "https://dns.google/dns-query" 17 | 18 | FILTER_CIDRs = ["0.0.0.0/32", "10.10.34.0/24"] 19 | 20 | RR = ["A", "AAAA", "CNAME", "MX", "NS", "SOA", "SPF", "SRV", "TXT", "CAA", "DNSKEY", "DS"] 21 | 22 | 23 | def isFilter(ip, CIDR_LIST=FILTER_CIDRs): 24 | for cidr in CIDR_LIST: 25 | if ipaddress.ip_address(ip) in ipaddress.ip_network(cidr): 26 | return True 27 | return False 28 | 29 | 30 | def findURLs(text): 31 | regex = 'http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+' 32 | urls = re.findall(regex, text) 33 | return urls if urls else [] 34 | 35 | 36 | def scrapeDoH(): 37 | URL = "https://github.com/curl/curl/wiki/DNS-over-HTTPS" 38 | page = requests.get(URL) 39 | soup = BeautifulSoup(page.content, 'html.parser') 40 | results = soup.find_all('tbody')[0].find_all('tr') 41 | doh = {} 42 | for row in results[1:]: 43 | data = row.find_all('td') 44 | name = data[0].text.strip() 45 | #urls = findURLs( data[1].text ) 46 | urls = [aTag.get('href') for aTag in data[1].find_all('a')] 47 | if urls : 48 | doh[name] = urls 49 | return doh 50 | 51 | 52 | def isIPv4(ip): 53 | try: 54 | socket.inet_pton(socket.AF_INET, ip) 55 | except AttributeError: # no inet_pton here, sorry 56 | try: 57 | socket.inet_aton(ip) 58 | except socket.error: 59 | return False 60 | return ip.count('.') == 3 61 | except socket.error: # not a valid address 62 | return False 63 | return True 64 | 65 | 66 | def isIPv6(ip): 67 | try: 68 | socket.inet_pton(socket.AF_INET6, ip) 69 | except socket.error: # not a valid address 70 | return False 71 | return True 72 | 73 | 74 | def Do53_reolver(domain, rr="A", endpoint=Do53_DEFAULT_ENDPOINT, request_dnssec=False, timeout=DEFAULT_TIMEOUT): 75 | qname = dns.name.from_text(domain) 76 | rdtype = dns.rdatatype.from_text(rr) 77 | req = dns.message.make_query(qname, rdtype, want_dnssec=request_dnssec) 78 | try: 79 | res, tcp = dns.query.udp_with_fallback(req, endpoint, timeout=timeout) 80 | ips = [ item.address for answer in res.answer for item in answer] 81 | if any(isFilter(ip) for ip in ips): 82 | logging.critical(f"[Do53] {domain} resolved to {ips} using {endpoint} is Filtered") 83 | else: 84 | logging.info(f"[Do53] {domain} resolved to {ips} in {res.time} seconds using {endpoint}") 85 | return res.time, ips 86 | except Exception as e: 87 | logging.error(f"[Do53] Failed to resolve {domain} using {endpoint} : {e}") 88 | return timeout, [] 89 | 90 | 91 | def DoT_resolver(domain, rr="A", endpoint=DoT_DEFAULT_ENDPOINT, request_dnssec=False, timeout=DEFAULT_TIMEOUT): 92 | qname = dns.name.from_text(domain) 93 | rdtype = dns.rdatatype.from_text(rr) 94 | req = dns.message.make_query(qname, rdtype, want_dnssec=request_dnssec) 95 | finalEndpoint = endpoint 96 | if not isIPv4(endpoint) and not isIPv6(endpoint): 97 | hostname = urllib.parse.urlparse(endpoint).hostname 98 | dnsTime, ips = Do53_reolver(hostname, "A") 99 | if not ips : 100 | logging.error(f"[DoT] Failed to resolve {endpoint} using Do53") 101 | return timeout, [] 102 | if any(isFilter(ip) for ip in ips): 103 | logging.error(f"[DoT] {endpoint} resolved to {ips} is Filtered") 104 | return timeout, [] 105 | finalEndpoint = ips[0] 106 | try: 107 | res = dns.query.tls(req, finalEndpoint, timeout=timeout) 108 | ips = [ item.address for answer in res.answer for item in answer] 109 | if any(isFilter(ip) for ip in ips): 110 | logging.critical(f"[DoT] {domain} resolved to {ips} using {endpoint} is Filtered") 111 | else: 112 | logging.info(f"[DoT] {domain} resolved to {ips} in {res.time} seconds using {endpoint}") 113 | return float(res.time), ips 114 | except Exception as e: 115 | logging.error(f"[DoT] Failed to resolve {domain} using {endpoint} : {e}") 116 | return timeout, [] 117 | 118 | 119 | def DoH_resolver(domain, rr="A", endpoint=DoH_DEFAULT_ENDPOINT, request_dnssec=False, timeout=DEFAULT_TIMEOUT): 120 | qname = dns.name.from_text(domain) 121 | rdtype = dns.rdatatype.from_text(rr) 122 | req = dns.message.make_query(qname, rdtype, want_dnssec=request_dnssec) 123 | try: 124 | res = dns.query.https(req, endpoint, timeout=timeout) 125 | ips = [ item.address for answer in res.answer for item in answer] 126 | if any(isFilter(ip) for ip in ips): 127 | logging.critical(f"[DoH] {domain} resolved to {ips} using {endpoint} is Filtered") 128 | else: 129 | logging.info(f"[DoH] {domain} resolved to {ips} in {res.time} seconds using {endpoint}") 130 | return float(res.time), ips 131 | except Exception as e: 132 | logging.error(f"[DoH] Failed to resolve {domain} using {endpoint} : {e}") 133 | return timeout, [] 134 | 135 | -------------------------------------------------------------------------------- /proxyUtil/cdnGen.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | ######################################################################### 3 | # Generating vmess url with CDN IPs as address and our domain as host # 4 | # # 5 | # Usage: ./cdnGen.py "vmess://..." --cdn arvan -n 100 -o output.txt # 6 | # --cdn: CDN name # 7 | # -o: output file # 8 | # -n: number of IP to generate # 9 | # Output: # 10 | # vmess url with # 11 | # address: CDN IP # 12 | # host: our domain # 13 | ########################################################################## 14 | import argparse 15 | import ipaddress 16 | import random 17 | from proxyUtil import * 18 | 19 | ch = logging.StreamHandler() 20 | ch.setFormatter(CustomFormatter()) 21 | logging.basicConfig(level=logging.INFO, handlers=[ch]) 22 | 23 | cdn_url = { 24 | 'arvan' : "https://www.arvancloud.ir/fa/ips.txt" , 25 | 'cloudflare' : "https://www.cloudflare.com/ips-v4" , 26 | 'CFplus' : "https://raw.githubusercontent.com/mheidari98/CDNs-ip/main/Cloudflare_Organization.txt" , 27 | } 28 | 29 | def parseVlessTrojan(ParseResult): 30 | queryDict = { q.split('=', 1)[0] : q.split('=', 1)[1] for q in ParseResult.query.split('&') } 31 | queryDict['scheme'] = ParseResult.scheme 32 | if (res := re.search(f"^(.+)@(.+):(\d+)$", ParseResult.netloc)): 33 | queryDict['pass'], queryDict['add'], queryDict['port'] = res.groups() 34 | return queryDict 35 | 36 | 37 | def unparseVlessTrojan(queryDict): 38 | part1 = f"{queryDict['scheme']}://{queryDict['pass']}@{queryDict['add']}:{queryDict['port']}" 39 | part2 = urlencode({key:value for key, value in queryDict.items() if key not in ['scheme', 'pass','add', 'port']}) 40 | return f"{part1}?{part2}" 41 | 42 | 43 | def main(argv=sys.argv): 44 | parser = argparse.ArgumentParser(description="Generating vmess url with CDN IPs as address and our domain as host") 45 | parser.add_argument("link", help="vmess link") 46 | parser.add_argument("--cdn", choices=cdn_url.keys(), help="cdn name") 47 | parser.add_argument("-f", "--file", help="file contains cdn IPs") 48 | parser.add_argument("--url", help="url to get cdn IPs") 49 | parser.add_argument("-n", "--number", type=int, help="number of IP to generate (default: all)") 50 | parser.add_argument('-v', "--verbose", help="increase output verbosity", action="store_true", default=False) 51 | parser.add_argument("-o", "--output", help="output file") 52 | args = parser.parse_args(argv[1:]) 53 | 54 | if args.verbose: 55 | logging.getLogger().setLevel(logging.DEBUG) 56 | 57 | if args.cdn or args.url : 58 | cdnURL = args.url if args.url else cdn_url[args.cdn] 59 | req = requests.get(cdnURL) 60 | if req.status_code != 200: 61 | logging.error(f"Error to get {cdnURL} : {req.status_code}") 62 | exit(1) 63 | cidrs = req.text.split() 64 | 65 | elif args.file : 66 | with open(args.file, 'r') as f : 67 | cidrs = f.read().split() 68 | 69 | ip_list = [] 70 | for cidr in cidrs: 71 | ip_list.extend([str(ip) for ip in ipaddress.IPv4Network(cidr).hosts()]) 72 | 73 | if not ip_list : 74 | logging.error("Error to get CDN IPs") 75 | exit(1) 76 | logging.debug(f"{args.cdn} Total IP: {len(ip_list)}") 77 | 78 | if args.number : 79 | if args.number > len(ip_list) : 80 | logging.error(f"Number of IP to generate ({args.number}) is greater than total IP ({len(ip_list)})") 81 | exit(1) 82 | ip_list = random.sample(ip_list, args.number) 83 | 84 | ParseResult = urllib.parse.urlparse(args.link) # :///;?# 85 | if ParseResult.scheme == "vmess" and isBase64(args.link[8:]): 86 | jsonLoad = json.loads(base64Decode(args.link[8:])) 87 | tls = 'tls' 88 | elif ParseResult.scheme in ["vless", "trojan"] : 89 | jsonLoad = parseVlessTrojan(ParseResult) 90 | tls = 'security' 91 | else : 92 | logging.error("Error to parse proxy link") 93 | exit(1) 94 | 95 | if ('host' not in jsonLoad) or (not jsonLoad['host']) : 96 | jsonLoad['host'] = jsonLoad['add'] 97 | if tls in jsonLoad and jsonLoad[tls]=='tls' : 98 | if 'sni' in jsonLoad and jsonLoad['sni'] : 99 | jsonLoad['host'] = jsonLoad['sni'] 100 | else : 101 | jsonLoad['sni'] = jsonLoad['host'] 102 | logging.debug(f"sni : {jsonLoad['sni']}") 103 | logging.debug(f"host: {jsonLoad['host']}") 104 | 105 | results = [] 106 | 107 | for ip in ip_list: 108 | jsonLoad['add'] = ip 109 | if ParseResult.scheme == "vmess" : 110 | results.append( Create_vmess_url(jsonLoad) ) 111 | elif ParseResult.scheme in ["vless", "trojan"] : 112 | results.append( unparseVlessTrojan(jsonLoad) ) 113 | 114 | outputs = '\n'.join(results) 115 | if args.output : 116 | with open(args.output, 'w') as f : 117 | f.write(outputs) 118 | else : 119 | print(outputs) 120 | 121 | 122 | if __name__ == '__main__': 123 | main() 124 | 125 | -------------------------------------------------------------------------------- /scripts/shadowChecker: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # Installing shadowsocks client 3 | # https://github.com/shadowsocks/shadowsocks-libev 4 | # https://www.linuxbabe.com/desktop-linux/how-to-install-and-use-shadowsocks-command-line-client 5 | # sudo pip install shadowsocks 6 | import argparse 7 | import concurrent.futures 8 | import tempfile 9 | from proxyUtil import * 10 | 11 | ch = logging.StreamHandler() 12 | ch.setFormatter(CustomFormatter()) 13 | logging.basicConfig(level=logging.ERROR, handlers=[ch]) 14 | 15 | tempdir = tempfile.mkdtemp() 16 | 17 | def Checker(shadowList, localPort, testDomain, timeOut): 18 | liveProxy = [] 19 | 20 | proxy = PROXIES.copy() #deepcopy(PROXIES) 21 | proxy['http'] = proxy['http'].format(LOCAL_PORT=localPort) 22 | proxy['https'] = proxy['https'].format(LOCAL_PORT=localPort) 23 | 24 | pidPath = f"{tempdir}/ss.pid.{localPort}" 25 | 26 | for ss_url in shadowList : 27 | server, server_port, method, password, plugin, plugin_opts, tag = parse_ss_withPlugin(ss_url) 28 | 29 | if not isIPv4(server) and not isIPv6(server) and not getIP(server): 30 | continue 31 | 32 | cmd = ssURI2sslocal(ss_url, localPort, pidPath) 33 | os.system(cmd) 34 | time.sleep(0.2) 35 | 36 | ping = is_alive(testDomain, proxy, timeOut) 37 | if ping: 38 | liveProxy.append((ss_url, ping)) 39 | try : 40 | # http://httpbin.org/ip http://ip-api.com/json https://api.ipify.org 41 | result = json.loads(requests.get('http://ip-api.com/json/', proxies=proxy, timeout = timeOut).content) 42 | logging.info(f"[live] ip={result['query']} @ {result['country']} ping={ping}") 43 | except Exception as x: 44 | logging.warning(f"[{'failed'}] ip={server} with ping={ping}") 45 | pass 46 | else : 47 | logging.debug(f"[dead] ip={server}") 48 | 49 | os.system(f"if ps -p $(cat {pidPath}) > /dev/null 2>&1 ;then kill -9 $(cat {pidPath}); fi") 50 | time.sleep(0.3) # sleep 0.3 seconds 51 | 52 | return liveProxy 53 | 54 | 55 | def main(): 56 | parser = argparse.ArgumentParser(description="Simple shadowsocks proxy checker") 57 | parser.add_argument("-f", "--file", help="file contain ss proxy") 58 | parser.add_argument("-d", "--domain", help="test connect domain", default='https://www.google.com') 59 | parser.add_argument("-t", "--timeout", help="timeout in seconds, default is 3", default=3 , type=int) 60 | parser.add_argument("-l", "--lport", help="start local port, default is 1080", default=1080, type=int) 61 | parser.add_argument('-v', "--verbose", help="increase output verbosity", action="store_true", default=False) 62 | parser.add_argument('-vv', '--debug', help="debug log", action='store_true', default=False) 63 | parser.add_argument('-T', '--threads', help="threads number, default is 10", default=10, type=int) 64 | parser.add_argument('--url', help="get proxy from url") 65 | parser.add_argument('--free', help="get free proxy", action='store_true', default=False) 66 | parser.add_argument('--stdin', help="get proxy from stdin", action='store_true', default=False) 67 | parser.add_argument('--reuse', help="reuse last checked proxy", action='store_true', default=False) 68 | parser.add_argument('-o', '--output', help="output file", default='sortedShadow.txt') 69 | args = parser.parse_args() 70 | 71 | if args.verbose: 72 | logging.getLogger().setLevel(logging.INFO) 73 | if args.debug: 74 | logging.getLogger().setLevel(logging.DEBUG) 75 | 76 | if not is_tool('ss-local'): 77 | logging.error("ss-local not found, please install shadowsocks client first") 78 | logging.error("\thttps://github.com/shadowsocks/shadowsocks-libev") 79 | exit(1) 80 | 81 | killProcess('ss-local') # init system 82 | 83 | lines = set() 84 | if args.file and os.path.isfile(args.file): 85 | with open(args.file, 'r', encoding='UTF-8') as file: 86 | lines.update( parseContent(file.read().strip(), [ss_scheme]) ) 87 | logging.info(f"got {len(lines)} from reading proxy from file") 88 | 89 | if args.reuse and os.path.isfile(args.output): 90 | with open(args.output, 'r', encoding='UTF-8') as f: 91 | lines.update( parseContent(f.read().strip()) ) 92 | 93 | if args.url : 94 | lines.update( ScrapURL(args.url, [ss_scheme]) ) 95 | 96 | if args.free : 97 | lines.update( ScrapURL('https://raw.githubusercontent.com/freefq/free/master/v2', [ss_scheme]) ) 98 | 99 | if args.stdin : 100 | lines.update( parseContent(sys.stdin.read(), [ss_scheme]) ) 101 | 102 | lines = list(lines) 103 | logging.info(f"We have {len(lines)} proxy to check") 104 | 105 | if not lines: 106 | logging.error("No proxy to check") 107 | return 108 | 109 | N = min(args.threads, len(lines)) 110 | 111 | openPort = [] 112 | port = args.lport 113 | while len(openPort):///;?# 403 | if ParseResult.scheme == 'ss': 404 | return parse_ss_withPlugin(proxy)[0] 405 | elif ParseResult.scheme == 'ssr': 406 | return parse_ssr(proxy)['address'] 407 | elif ParseResult.scheme == 'vmess': 408 | return json.loads(base64Decode(proxy[8:]))['add'] 409 | elif ParseResult.scheme == 'vless': 410 | return parseVless(ParseResult)['add'] 411 | elif ParseResult.scheme == 'trojan': 412 | return parseTrojan(ParseResult)['address'] 413 | else: 414 | logging.error(f"Invalid proxy: {proxy}") 415 | except Exception as err : 416 | logging.error(f"Invalid proxy: {proxy} ({err})") 417 | 418 | 419 | def killProcess(processName, cmdline=None): 420 | for p in psutil.process_iter(attrs=['pid', 'name']): 421 | if processName in p.name() and (cmdline is None or cmdline in p.cmdline()): 422 | for child in p.children(): 423 | os.kill(child.pid, signal.SIGKILL) 424 | os.kill(p.pid, signal.SIGKILL) 425 | 426 | 427 | def silentremove(filename): 428 | try: 429 | os.remove(filename) 430 | except OSError: 431 | pass 432 | 433 | 434 | def is_tool(name): 435 | """Check whether `name` is on PATH and marked as executable.""" 436 | return shutil.which(name) is not None 437 | 438 | 439 | def is_port_in_use(port: int) -> bool: 440 | # stackoverflow.com/questions/2470971 441 | import socket 442 | with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: 443 | return s.connect_ex(('localhost', port)) == 0 444 | 445 | 446 | def is_alive(testDomain, proxy, timeOut=3): 447 | try: 448 | start = time.perf_counter() 449 | requests.head( testDomain, 450 | proxies = proxy, 451 | timeout = timeOut, 452 | ) 453 | end = time.perf_counter() 454 | except Exception as e: 455 | #logging.error(f"test live with {proxy} failed: {e}") 456 | return 0 457 | return ((end - start) * 100).__round__() 458 | 459 | 460 | def processShadowJson(jsonTxt): 461 | result = [] 462 | for line in json.loads(jsonTxt): 463 | method, password , server, server_port = line['method'], line['password'], line['server'], line['server_port'] 464 | ss = Create_ss_url(server, server_port, method, password) 465 | result.append(ss) 466 | return result 467 | 468 | 469 | def parseContent(content, patterns=proxyScheme): 470 | newProxy = [] 471 | if is_json(content): 472 | newProxy = processShadowJson(content) 473 | else: 474 | lines = [] 475 | for line in content.splitlines(): 476 | if isBase64(line): 477 | line = base64Decode(line) 478 | lines.extend( line.split() ) 479 | newProxy = checkPatternsInList(lines, patterns) 480 | return newProxy 481 | 482 | 483 | def ScrapURL(url, patterns=proxyScheme): 484 | newProxy = [] 485 | try: 486 | res = requests.get(url, timeout=4) 487 | except Exception as e: 488 | logging.debug("Exception occurred", exc_info=True) 489 | logging.error(f"Can't reach {url}") 490 | return newProxy 491 | 492 | if (res.status_code//100) == 2: 493 | content = res.text.strip().replace('\ufeff', '') 494 | newProxy = parseContent(content, patterns) 495 | logging.info(f"Got {len(newProxy)} new proxy from {url}") 496 | else: 497 | logging.error(f"Can't get {url} , status code = {res.status_code}") 498 | return newProxy 499 | 500 | 501 | def tagChanger(url, tag="4MahsaAmini"): 502 | ParseResult = urllib.parse.urlparse(url) 503 | if ParseResult.scheme == "ss" : 504 | #return Create_ss_url(*parse_ss(url))+f"#{tag}" 505 | return Create_ss_url_withPlugin( *parse_ss_withPlugin(url)[:6], tag ) 506 | 507 | elif ParseResult.scheme == "ssr" : 508 | url = f"ssr://{base64Decode(url[6:])}" 509 | params = {'remarks': base64.urlsafe_b64encode(tag.encode())} 510 | url_parts = list(urlparse(url)) 511 | query = dict(parse_qsl(url_parts[4])) 512 | query.update(params) 513 | url_parts[4] = urlencode(dict(sorted(query.items()))) 514 | return f"ssr://{base64.urlsafe_b64encode(urlunparse(url_parts)[6:].encode()).decode()}" 515 | 516 | elif ParseResult.scheme == "vmess" and isBase64(url[8:]): 517 | jsonLoad = json.loads(base64Decode(url[8:])) 518 | jsonLoad['ps'] = tag 519 | return "vmess://"+ base64.b64encode(json.dumps(dict(sorted(jsonLoad.items()))).encode()).decode() 520 | 521 | elif ParseResult.scheme in ["vless", "trojan"]: 522 | #return urldefrag(url).url+f"#{tag}" 523 | return ParseResult._replace(fragment=tag).geturl() 524 | 525 | return url 526 | 527 | 528 | def tagsChanger(urls, tag="4MahsaAmini", withCnt=False): 529 | lines = [] 530 | newTAG = tag 531 | for i, url in enumerate(urls): 532 | try: 533 | if withCnt: 534 | newTAG = f"{tag}-{i}" 535 | lines.append( tagChanger(url, newTAG) ) 536 | except: 537 | pass 538 | return lines 539 | 540 | 541 | def split2Npart(a, n): 542 | k, m = divmod(len(a), n) 543 | return (a[i*k+min(i, m):(i+1)*k+min(i+1, m)] for i in range(n)) 544 | 545 | 546 | def mergeMultiDicts(*dicts): 547 | # https://stackoverflow.com/questions/38987#26853961 548 | result = {} 549 | for d in dicts: 550 | if sys.version_info >= (3, 9): 551 | result |= d 552 | elif sys.version_info >= (3, 5): 553 | result = {**result, **d} 554 | else: 555 | result.update(d) 556 | return result 557 | 558 | 559 | def createShadowConfig(ss_url, port=1080): 560 | config = deepcopy( mergeMultiDicts(dnsServers, inbounds, ssOut) ) 561 | 562 | config['inbounds'][0]['port'] = port 563 | 564 | server, server_port, method, password, plugin, plugin_opts, tag = parse_ss_withPlugin(ss_url) 565 | 566 | config['outbounds'][0]['settings']['plugin'] = plugin 567 | config['outbounds'][0]['settings']['pluginOpts'] = plugin_opts 568 | config['outbounds'][0]['settings']['servers'][0]['address'] = server 569 | config['outbounds'][0]['settings']['servers'][0]['port'] = int(server_port) 570 | config['outbounds'][0]['settings']['servers'][0]['method'] = method 571 | config['outbounds'][0]['settings']['servers'][0]['password'] = password 572 | 573 | return config 574 | 575 | 576 | def createSsrConfig(ssr_url, localPort=1080): 577 | config = deepcopy( mergeMultiDicts(dnsServers, inbounds, ssrOut) ) 578 | ssr_parsed = parse_ssr(ssr_url) 579 | config['inbounds'][0]['port'] = localPort 580 | config['outbounds'][0]['settings']['servers'][0]['address'] = ssr_parsed['address'] 581 | config['outbounds'][0]['settings']['servers'][0]['port'] = int(ssr_parsed['port']) 582 | config['outbounds'][0]['settings']['servers'][0]['method'] = ssr_parsed['method'] 583 | config['outbounds'][0]['settings']['servers'][0]['password'] = ssr_parsed['password'] 584 | config['outbounds'][0]['settings']["pluginArgs"].append(f'--obfs={ssr_parsed["obfs"]}') 585 | config['outbounds'][0]['settings']["pluginArgs"].append(f'--obfs-param={ssr_parsed["obfsparam"]}') 586 | config['outbounds'][0]['settings']["pluginArgs"].append(f'--protocol={ssr_parsed["protocol"]}') 587 | config['outbounds'][0]['settings']["pluginArgs"].append(f'--protocol-param={ssr_parsed["protoparam"]}') 588 | return config 589 | 590 | 591 | def createVmessConfig(jsonLoad, port=1080): 592 | config = deepcopy( mergeMultiDicts(dnsServers, inbounds, vmessOut) ) 593 | 594 | config['inbounds'][0]['port'] = port 595 | 596 | config['outbounds'][0]["protocol"] = jsonLoad["protocol"] if 'protocol' in jsonLoad else "vmess" # vmess/vless 597 | config['outbounds'][0]["settings"]["vnext"][0]["address"] = jsonLoad['add'] 598 | config['outbounds'][0]["settings"]["vnext"][0]["port"] = int(jsonLoad['port']) 599 | 600 | if is_valid_uuid(jsonLoad['id']) : 601 | config['outbounds'][0]["settings"]["vnext"][0]["users"][0]["id"] = jsonLoad['id'] 602 | else: 603 | config['outbounds'][0]["settings"]["vnext"][0]["users"][0]["id"] = generate_uuid(jsonLoad['id']) 604 | 605 | if 'aid' in jsonLoad and jsonLoad['aid']: 606 | try: 607 | config['outbounds'][0]["settings"]["vnext"][0]["users"][0]["alterId"] = int(jsonLoad['aid']) 608 | except: 609 | logging.error(f"aid: {jsonLoad['aid']} is not int") 610 | 611 | if 'encryption' in jsonLoad: 612 | config['outbounds'][0]["settings"]["vnext"][0]["users"][0]["encryption"] = jsonLoad['encryption'] 613 | 614 | if 'flow' in jsonLoad: 615 | config['outbounds'][0]["settings"]["vnext"][0]["users"][0]["flow"] = jsonLoad['flow'] 616 | 617 | sec = jsonLoad["scy"] if 'scy' in jsonLoad else (jsonLoad['security'] if 'security' in jsonLoad else "auto" ) 618 | if sec!="auto" : 619 | config['outbounds'][0]["settings"]["vnext"][0]["users"][0]["security"] = sec # "aes-128-gcm" 620 | 621 | 622 | if jsonLoad["net"]=="ws": 623 | config['outbounds'][0]["streamSettings"]["network"] = "ws" 624 | if 'host' in jsonLoad : 625 | config['outbounds'][0]["streamSettings"]["wsSettings"] = {"headers":{"Host":jsonLoad['host']} , 626 | "connectionReuse": True, 627 | "path":jsonLoad['path']} 628 | elif jsonLoad["net"]=="h2": 629 | config['outbounds'][0]["streamSettings"]["network"] = "http" 630 | config['outbounds'][0]["streamSettings"]["httpSettings"] = {"headers":{"Host":jsonLoad['host']} , 631 | "path":jsonLoad['path']} 632 | elif jsonLoad["net"]=="grpc": 633 | config['outbounds'][0]["streamSettings"]["network"] = "grpc" 634 | config['outbounds'][0]["streamSettings"]["grpcSettings"] = {"serviceName":jsonLoad['path']} 635 | elif jsonLoad["net"]=="kcp": 636 | config['outbounds'][0]["streamSettings"]["network"] = "kcp" 637 | 638 | if jsonLoad["tls"]: # "tls" 639 | config['outbounds'][0]["streamSettings"]["security"] = jsonLoad["tls"] 640 | if jsonLoad["tls"] == "reality": 641 | realitySettings = {} 642 | if 'sni' in jsonLoad: 643 | realitySettings["serverName"] = jsonLoad['sni'] 644 | if 'fp' in jsonLoad: 645 | realitySettings["fingerprint"] = jsonLoad['fp'] 646 | if 'pbk' in jsonLoad: 647 | realitySettings["publicKey"] = jsonLoad['pbk'] 648 | if 'sid' in jsonLoad: 649 | realitySettings["shortId"] = jsonLoad['sid'] 650 | if 'spx' in jsonLoad: 651 | realitySettings["spiderX"] = jsonLoad['spx'] 652 | config['outbounds'][0]["streamSettings"]["realitySettings"] = realitySettings 653 | elif 'sni' in jsonLoad: 654 | config['outbounds'][0]["streamSettings"]["tlsSettings"]["serverName"] = jsonLoad['sni'] 655 | if "skip-cert-verify" in jsonLoad and jsonLoad["skip-cert-verify"]: 656 | config['outbounds'][0]["streamSettings"]["tlsSettings"]["allowInsecure"] = True 657 | 658 | return config 659 | 660 | 661 | def createTrojanConfig(loaded, localPort=1080): 662 | config = deepcopy( mergeMultiDicts(dnsServers, inbounds, trojanOut) ) 663 | 664 | trojan_parsed = parseTrojan(loaded) 665 | 666 | config['inbounds'][0]['port'] = localPort 667 | 668 | config['outbounds'][0]['settings']['servers'][0]['address'] = trojan_parsed['address'] 669 | config['outbounds'][0]['settings']['servers'][0]['port'] = int(trojan_parsed['port']) 670 | config['outbounds'][0]['settings']['servers'][0]['password'] = trojan_parsed['password'] 671 | 672 | if 'type' not in trojan_parsed: 673 | trojan_parsed['type'] = "tcp" 674 | #config['outbounds'][0]['streamSettings']['tcpSettings'] = {"header":{"type":"none"}} 675 | elif trojan_parsed['type'] =="ws": 676 | config['outbounds'][0]['streamSettings']["network"] = "ws" 677 | if 'host' in trojan_parsed : 678 | config['outbounds'][0]['streamSettings']['wsSettings'] = {"headers":{"Host":trojan_parsed['host']} , 679 | "path":trojan_parsed['path']} 680 | elif trojan_parsed['type'] =="grpc": 681 | config['outbounds'][0]['streamSettings']["network"] = "grpc" 682 | config['outbounds'][0]['streamSettings']['grpcSettings'] = {"serviceName":trojan_parsed['serviceName']} 683 | else: 684 | config['outbounds'][0]['streamSettings']["network"] = trojan_parsed['type'] 685 | 686 | if 'security' in trojan_parsed : 687 | config['outbounds'][0]['streamSettings']["security"] = trojan_parsed['security'] 688 | if 'sni' in trojan_parsed : 689 | config['outbounds'][0]['streamSettings']['tlsSettings']["serverName"] = trojan_parsed['sni'] 690 | if 'allowInsecure' in trojan_parsed and trojan_parsed['allowInsecure']=='1': 691 | config['outbounds'][0]['streamSettings']['tlsSettings']["allowInsecure"] = True 692 | 693 | return config 694 | 695 | 696 | def clearScreen(): 697 | #console.print("\033c", end="") 698 | os.system('cls' if os.name == 'nt' else 'clear') 699 | 700 | 701 | def set_proxychains(localPort=1080): 702 | pchPath = os.path.expanduser('~/.proxychains/proxychains.conf') 703 | os.makedirs(os.path.dirname(pchPath), exist_ok=True) 704 | if os.path.exists(pchPath): 705 | os.system(f"cp {pchPath} {pchPath}.bak") 706 | with open(pchPath, "w") as f: 707 | f.write(PROXYCHAINS.format(LOCAL_PORT=localPort)) 708 | logging.info("proxychains.conf updated!") 709 | 710 | 711 | def set_system_proxy(proxyHost="127.0.0.1", proxyPort=1080, proxyType="socks5", enable=True): 712 | if os.name == "nt": 713 | logging.info("Not Implemented for Windows") 714 | return 715 | else: 716 | # export {https,ftp,rsync,all}__proxy=socks5://127.0.0.1:1080 717 | proxy = f"{proxyType}://{proxyHost}:{proxyPort}" 718 | all_proxy = f"export all_proxy={proxy}" 719 | no_proxy = "export no_proxy=localhost,127.0.0.0/8,192.168.0.0/16,::1" 720 | 721 | SHELL = os.environ.get('SHELL') 722 | if "zsh" in SHELL: 723 | file = "~/.zshrc" 724 | elif "bash" in SHELL: 725 | file = "~/.bashrc" 726 | else: 727 | logging.error(f"Not supported SHELL: {SHELL}") 728 | return 729 | 730 | # get current bashrc 731 | with open(os.path.expanduser(file), "r") as f: 732 | lines = f.readlines() 733 | # remove old proxy setting 734 | lines = [line for line in lines if not line.startswith("export all_proxy=") and not line.startswith("export no_proxy=")] 735 | if enable: 736 | lines.append(f"{all_proxy} && {no_proxy}") 737 | # save to bashrc 738 | with open(os.path.expanduser(file), "w") as f: 739 | f.writelines(lines) 740 | #os.system(f"source {file}") 741 | logging.info("set system proxy done!" if enable else "unset proxy done!") 742 | 743 | 744 | def installDocker(): 745 | if not is_tool('docker'): 746 | # Install docker if docker are not installed 747 | try: 748 | try: 749 | logging.info("Docker Not Found.\nInstalling Docker ...") 750 | subprocess.run("curl https://get.docker.com | sh", shell=True, check=True) 751 | except subprocess.CalledProcessError: 752 | sys.exit("Download Failed !") 753 | 754 | # Check if Docker Service are Enabled 755 | systemctl = subprocess.call(["systemctl", "is-active", "--quiet", "docker"]) 756 | if systemctl: 757 | subprocess.call(["systemctl", "enable", "--now", "--quiet", "docker"]) 758 | time.sleep(2) 759 | except subprocess.CalledProcessError as e: 760 | sys.exit(e) 761 | except PermissionError: 762 | sys.exit("ًroot privileges required") 763 | logging.info("Docker Installed") 764 | 765 | 766 | def getSHA256(fileName): 767 | with open(fileName, 'rb') as f: 768 | data = f.read() 769 | return hashlib.sha256(data).hexdigest() 770 | 771 | 772 | def get_OS(): 773 | os = platform.system() 774 | logging.debug(f"OS: {os}") 775 | if os == 'Linux': 776 | return "linux" 777 | elif os == 'Darwin': 778 | return "macos" 779 | elif os == 'Windows': 780 | return "windows" 781 | else: 782 | logging.error("Unsupported OS") 783 | sys.exit(1) 784 | 785 | 786 | def get_arch(): 787 | arch = platform.machine() 788 | logging.debug(f"Architecture: {arch}") 789 | if arch == 'x86_64' or arch == 'AMD64': 790 | return "64" 791 | elif arch == 'i386' or arch == 'i686': 792 | return "32" 793 | elif arch == 'aarch64': 794 | return "arm64-v8a" 795 | elif arch == 'armv7l': 796 | return "arm32-v7a" 797 | else: 798 | logging.error("Unsupported Architecture") 799 | sys.exit(1) 800 | 801 | 802 | def chmodX(path): 803 | if get_OS() == "windows": 804 | return 805 | st = os.stat(path) 806 | os.chmod(path, st.st_mode | stat.S_IEXEC) 807 | 808 | 809 | def downloadZray(acc, repo): 810 | TAG = requests.get(f"https://api.github.com/repos/{acc}/{repo}-core/releases/latest").json()['tag_name'] 811 | 812 | ZRAY_FILE = f"{repo}-{get_OS()}-{get_arch()}.zip" 813 | ZRAY_URL = f"https://github.com/{acc}/{repo}-core/releases/download/{TAG}/{ZRAY_FILE}" 814 | DGST_FILE = f"{ZRAY_FILE}.dgst" 815 | DGST_URL = f"https://github.com/{acc}/{repo}-core/releases/download/{TAG}/{DGST_FILE}" 816 | ZIP_FILE = f"{repo}.zip" 817 | 818 | urllib.request.urlretrieve(ZRAY_URL, ZIP_FILE) 819 | logging.info(f"Downloaded {ZRAY_FILE}") 820 | r = requests.get(DGST_URL) 821 | FILE_SHA256 = [line for line in r.content.splitlines() if line.startswith(b"SHA2-256")][0].decode().split()[1] 822 | if getSHA256(ZIP_FILE) == FILE_SHA256 : 823 | logging.info(f"SHA256 Check passed") 824 | with zipfile.ZipFile(ZIP_FILE, 'r') as zip_ref: 825 | zip_ref.extractall(repo) 826 | os.remove(ZIP_FILE) 827 | chmodX(f"{repo}/{repo}") 828 | else: 829 | logging.error(f"SHA256 Check failed") 830 | logging.error(f"Expected: {FILE_SHA256}") 831 | logging.error(f"Actual: {getSHA256(ZIP_FILE)}") 832 | sys.exit(1) 833 | 834 | 835 | def createConfig(url, localPort, path): 836 | ParseResult = urllib.parse.urlparse(url) # :///;?# 837 | try: 838 | if ParseResult.scheme == "ss" : 839 | config = createShadowConfig(url, port=localPort) 840 | elif ParseResult.scheme == "vmess" : 841 | if isBase64(url[8:]): 842 | jsonLoad = json.loads(base64Decode(url[8:])) 843 | jsonLoad["protocol"] = "vmess" 844 | config = createVmessConfig(jsonLoad, port=localPort) 845 | else : 846 | logging.debug("Not Implemented this type of vmess url") 847 | return None 848 | elif ParseResult.scheme == "vless" : 849 | config = createVmessConfig(parseVless(ParseResult), port=localPort) 850 | elif ParseResult.scheme == "trojan" : 851 | config = createTrojanConfig(ParseResult, localPort=localPort) 852 | else : 853 | logging.debug(f"Not Implemented {ParseResult.scheme}") 854 | return None 855 | except Exception as err : 856 | logging.error(f"{url} : {err}") 857 | return None 858 | 859 | configName = os.path.join(path, f"config_{localPort}.json") 860 | with open(configName, "w") as f: 861 | json.dump(config, f) 862 | logging.debug(f"config file {configName} created.") 863 | return configName 864 | 865 | 866 | def winRunCore(CORE, configName): 867 | proc = subprocess.Popen(f"{CORE} run -config {configName}", stdout=subprocess.PIPE) 868 | return proc 869 | 870 | 871 | def unixRunCore(CORE, configName): 872 | proc = subprocess.Popen(f"{CORE} run -config {configName}", stdout=subprocess.PIPE, 873 | shell=True, preexec_fn=os.setsid) 874 | return proc 875 | 876 | 877 | def winKillCore(proc): 878 | proc.kill() # subprocess.call(f"TASKKILL /F /PID {proc.pid} /T") 879 | return 880 | 881 | 882 | def unixKillCore(proc): 883 | os.killpg(os.getpgid(proc.pid), signal.SIGTERM) # Send the signal to all the process groups 884 | return 885 | 886 | 887 | def getIPnCountry(proxy, timeOut): 888 | try : 889 | # http://httpbin.org/ip http://ip-api.com/json https://api.ipify.org 890 | result = json.loads(requests.get('http://ip-api.com/json/', proxies=proxy, timeout = timeOut).content) 891 | return result['query'], result['country'] 892 | except Exception as x: 893 | return None, None 894 | 895 | --------------------------------------------------------------------------------