├── http-tunnel ├── http-cla │ ├── python │ │ ├── python2 │ │ │ ├── scrapy │ │ │ │ ├── xbot │ │ │ │ │ ├── xbot │ │ │ │ │ │ ├── __init__.py │ │ │ │ │ │ ├── spiders │ │ │ │ │ │ │ ├── __init__.py │ │ │ │ │ │ │ └── abuyun.py │ │ │ │ │ │ ├── items.py │ │ │ │ │ │ ├── settings.py │ │ │ │ │ │ └── middlewares.py │ │ │ │ │ ├── scrapy.cfg │ │ │ │ │ ├── run.sh │ │ │ │ │ └── setup.py │ │ │ │ └── README.md │ │ │ ├── requests │ │ │ │ └── proxy-demo.py │ │ │ └── urllib2 │ │ │ │ └── proxy-demo.py │ │ └── python3 │ │ │ ├── scrapy │ │ │ └── xbot │ │ │ │ ├── xbot │ │ │ │ ├── __init__.py │ │ │ │ ├── spiders │ │ │ │ │ ├── __init__.py │ │ │ │ │ └── abuyun.py │ │ │ │ ├── items.py │ │ │ │ ├── settings.py │ │ │ │ └── middlewares.py │ │ │ │ ├── scrapy.cfg │ │ │ │ ├── run.sh │ │ │ │ └── setup.py │ │ │ └── urllib │ │ │ └── proxy-demo.py │ ├── java │ │ ├── httpclient │ │ │ └── 4.4+ │ │ │ │ ├── compile.sh │ │ │ │ ├── lib │ │ │ │ ├── httpclient-osgi-4.3.jar │ │ │ │ ├── org.apache.httpcore.jar │ │ │ │ ├── org.apache.httpclient.jar │ │ │ │ ├── org-apache-commons-logging.jar │ │ │ │ └── apache-httpcomponents-httpclient.jar │ │ │ │ └── HttpClient4xProxyDemo.java │ │ ├── jsoup │ │ │ └── ProxyDemo.java │ │ ├── puppeteer │ │ │ └── ProxyDemo.java │ │ ├── connection │ │ │ └── ProxyDemo.java │ │ └── selenium │ │ │ └── ProxyDemo.java │ ├── phantomjs │ │ ├── run.sh │ │ └── proxy-demo.js │ ├── curl │ │ └── proxy-demo.sh │ ├── ahk │ │ └── proxy-demo.ahk │ ├── php │ │ ├── stream-demo.php │ │ └── curl-demo.php │ ├── nodejs │ │ ├── request │ │ │ └── proxy-demo.js │ │ └── http(s) │ │ │ └── proxy-demo.js │ └── go │ │ └── proxy-demo.go ├── http-dyn │ ├── python │ │ ├── python2 │ │ │ ├── scrapy │ │ │ │ ├── xbot │ │ │ │ │ ├── xbot │ │ │ │ │ │ ├── __init__.py │ │ │ │ │ │ ├── spiders │ │ │ │ │ │ │ ├── __init__.py │ │ │ │ │ │ │ └── abuyun.py │ │ │ │ │ │ ├── items.py │ │ │ │ │ │ ├── settings.py │ │ │ │ │ │ └── middlewares.py │ │ │ │ │ ├── scrapy.cfg │ │ │ │ │ ├── run.sh │ │ │ │ │ └── setup.py │ │ │ │ └── README.md │ │ │ ├── requests │ │ │ │ └── proxy-demo.py │ │ │ └── urllib2 │ │ │ │ └── proxy-demo.py │ │ └── python3 │ │ │ ├── scrapy │ │ │ └── xbot │ │ │ │ ├── xbot │ │ │ │ ├── __init__.py │ │ │ │ ├── spiders │ │ │ │ │ ├── __init__.py │ │ │ │ │ └── abuyun.py │ │ │ │ ├── items.py │ │ │ │ ├── settings.py │ │ │ │ └── middlewares.py │ │ │ │ ├── scrapy.cfg │ │ │ │ ├── run.sh │ │ │ │ └── setup.py │ │ │ └── urllib │ │ │ └── proxy-demo.py │ ├── java │ │ ├── httpclient │ │ │ └── 4.4+ │ │ │ │ ├── compile.sh │ │ │ │ ├── lib │ │ │ │ ├── httpclient-osgi-4.3.jar │ │ │ │ ├── org.apache.httpcore.jar │ │ │ │ ├── org.apache.httpclient.jar │ │ │ │ ├── org-apache-commons-logging.jar │ │ │ │ └── apache-httpcomponents-httpclient.jar │ │ │ │ └── HttpClient4xProxyDemo.java │ │ ├── jsoup │ │ │ └── ProxyDemo.java │ │ ├── puppeteer │ │ │ └── ProxyDemo.java │ │ ├── connection │ │ │ └── ProxyDemo.java │ │ └── selenium │ │ │ └── ProxyDemo.java │ ├── phantomjs │ │ ├── run.sh │ │ └── proxy-demo.js │ ├── curl │ │ └── proxy-demo.sh │ ├── php │ │ ├── stream-demo.php │ │ └── curl-demo.php │ ├── ahk │ │ └── proxy-demo.ahk │ ├── nodejs │ │ ├── request │ │ │ └── proxy-demo.js │ │ └── http(s) │ │ │ └── proxy-demo.js │ └── go │ │ └── proxy-demo.go └── http-pro │ ├── python │ ├── python2 │ │ ├── scrapy │ │ │ ├── xbot │ │ │ │ ├── xbot │ │ │ │ │ ├── __init__.py │ │ │ │ │ ├── spiders │ │ │ │ │ │ ├── __init__.py │ │ │ │ │ │ └── abuyun.py │ │ │ │ │ ├── items.py │ │ │ │ │ ├── settings.py │ │ │ │ │ └── middlewares.py │ │ │ │ ├── scrapy.cfg │ │ │ │ ├── run.sh │ │ │ │ └── setup.py │ │ │ └── README.md │ │ ├── requests │ │ │ └── proxy-demo.py │ │ └── urllib2 │ │ │ └── proxy-demo.py │ └── python3 │ │ ├── scrapy │ │ └── xbot │ │ │ ├── xbot │ │ │ ├── __init__.py │ │ │ ├── spiders │ │ │ │ ├── __init__.py │ │ │ │ └── abuyun.py │ │ │ ├── items.py │ │ │ ├── settings.py │ │ │ └── middlewares.py │ │ │ ├── scrapy.cfg │ │ │ ├── run.sh │ │ │ └── setup.py │ │ └── urllib │ │ └── proxy-demo.py │ ├── java │ ├── httpclient │ │ └── 4.4+ │ │ │ ├── compile.sh │ │ │ ├── lib │ │ │ ├── httpclient-osgi-4.3.jar │ │ │ ├── org.apache.httpcore.jar │ │ │ ├── org.apache.httpclient.jar │ │ │ ├── org-apache-commons-logging.jar │ │ │ └── apache-httpcomponents-httpclient.jar │ │ │ └── HttpClient4xProxyDemo.java │ ├── jsoup │ │ └── ProxyDemo.java │ ├── puppeteer │ │ └── ProxyDemo.java │ ├── connection │ │ └── ProxyDemo.java │ └── selenium │ │ └── ProxyDemo.java │ ├── phantomjs │ ├── run.sh │ └── proxy-demo.js │ ├── curl │ └── proxy-demo.sh │ ├── ahk │ └── proxy-demo.ahk │ ├── php │ ├── stream-demo.php │ └── curl-demo.php │ ├── nodejs │ ├── request │ │ └── proxy-demo.js │ └── http(s) │ │ └── proxy-demo.js │ └── go │ └── proxy-demo.go ├── .gitignore └── README.md /http-tunnel/http-cla/python/python2/scrapy/xbot/xbot/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /http-tunnel/http-cla/python/python3/scrapy/xbot/xbot/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /http-tunnel/http-dyn/python/python2/scrapy/xbot/xbot/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /http-tunnel/http-dyn/python/python3/scrapy/xbot/xbot/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /http-tunnel/http-pro/python/python2/scrapy/xbot/xbot/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /http-tunnel/http-pro/python/python3/scrapy/xbot/xbot/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /http-tunnel/http-cla/python/python2/scrapy/xbot/xbot/spiders/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /http-tunnel/http-cla/python/python3/scrapy/xbot/xbot/spiders/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /http-tunnel/http-dyn/python/python2/scrapy/xbot/xbot/spiders/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /http-tunnel/http-dyn/python/python3/scrapy/xbot/xbot/spiders/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /http-tunnel/http-pro/python/python2/scrapy/xbot/xbot/spiders/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /http-tunnel/http-pro/python/python3/scrapy/xbot/xbot/spiders/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .idea 2 | .vscode 3 | node_modules 4 | 5 | *.pyc 6 | *.class 7 | -------------------------------------------------------------------------------- /http-tunnel/http-cla/python/python2/scrapy/README.md: -------------------------------------------------------------------------------- 1 | # 运行命令 2 | scrapy crawl abuyun -------------------------------------------------------------------------------- /http-tunnel/http-dyn/python/python2/scrapy/README.md: -------------------------------------------------------------------------------- 1 | # 运行命令 2 | scrapy crawl abuyun -------------------------------------------------------------------------------- /http-tunnel/http-pro/python/python2/scrapy/README.md: -------------------------------------------------------------------------------- 1 | # 运行命令 2 | scrapy crawl abuyun -------------------------------------------------------------------------------- /http-tunnel/http-cla/python/python2/scrapy/xbot/scrapy.cfg: -------------------------------------------------------------------------------- 1 | [settings] 2 | default = xbot.settings 3 | -------------------------------------------------------------------------------- /http-tunnel/http-cla/python/python3/scrapy/xbot/scrapy.cfg: -------------------------------------------------------------------------------- 1 | [settings] 2 | default = xbot.settings 3 | -------------------------------------------------------------------------------- /http-tunnel/http-dyn/python/python2/scrapy/xbot/scrapy.cfg: -------------------------------------------------------------------------------- 1 | [settings] 2 | default = xbot.settings 3 | -------------------------------------------------------------------------------- /http-tunnel/http-dyn/python/python3/scrapy/xbot/scrapy.cfg: -------------------------------------------------------------------------------- 1 | [settings] 2 | default = xbot.settings 3 | -------------------------------------------------------------------------------- /http-tunnel/http-pro/python/python2/scrapy/xbot/scrapy.cfg: -------------------------------------------------------------------------------- 1 | [settings] 2 | default = xbot.settings 3 | -------------------------------------------------------------------------------- /http-tunnel/http-pro/python/python3/scrapy/xbot/scrapy.cfg: -------------------------------------------------------------------------------- 1 | [settings] 2 | default = xbot.settings 3 | -------------------------------------------------------------------------------- /http-tunnel/http-cla/python/python2/scrapy/xbot/run.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | scrapy crawl abuyun 4 | -------------------------------------------------------------------------------- /http-tunnel/http-cla/python/python3/scrapy/xbot/run.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | scrapy crawl abuyun 4 | -------------------------------------------------------------------------------- /http-tunnel/http-dyn/python/python2/scrapy/xbot/run.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | scrapy crawl abuyun 4 | -------------------------------------------------------------------------------- /http-tunnel/http-dyn/python/python3/scrapy/xbot/run.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | scrapy crawl abuyun 4 | -------------------------------------------------------------------------------- /http-tunnel/http-pro/python/python2/scrapy/xbot/run.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | scrapy crawl abuyun 4 | -------------------------------------------------------------------------------- /http-tunnel/http-pro/python/python3/scrapy/xbot/run.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | scrapy crawl abuyun 4 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 阿布云·云代理 2 | ==== 3 | 4 | 云代理官方使用示例,欢迎提交 pull 进行纠错与补充。 5 | 6 | 提交 pull 前请把隧道通行证书与密钥统一替换成『H01234567890123P』与『0123456789012345』。 7 | -------------------------------------------------------------------------------- /http-tunnel/http-cla/java/httpclient/4.4+/compile.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | javac -Djava.ext.dirs=./lib HttpClient4xProxyDemo.java 4 | 5 | java -Djava.ext.dirs=./lib HttpClient4xProxyDemo 6 | -------------------------------------------------------------------------------- /http-tunnel/http-cla/java/httpclient/4.4+/lib/httpclient-osgi-4.3.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/abuyun/proxy-demo/HEAD/http-tunnel/http-cla/java/httpclient/4.4+/lib/httpclient-osgi-4.3.jar -------------------------------------------------------------------------------- /http-tunnel/http-cla/java/httpclient/4.4+/lib/org.apache.httpcore.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/abuyun/proxy-demo/HEAD/http-tunnel/http-cla/java/httpclient/4.4+/lib/org.apache.httpcore.jar -------------------------------------------------------------------------------- /http-tunnel/http-cla/python/python2/scrapy/xbot/xbot/items.py: -------------------------------------------------------------------------------- 1 | #! -*- encoding:utf-8 -*- 2 | 3 | from scrapy.item import Item, Field 4 | 5 | class TestItem(Item): 6 | text = Field() 7 | -------------------------------------------------------------------------------- /http-tunnel/http-cla/python/python3/scrapy/xbot/xbot/items.py: -------------------------------------------------------------------------------- 1 | #! -*- encoding:utf-8 -*- 2 | 3 | from scrapy.item import Item, Field 4 | 5 | class TestItem(Item): 6 | text = Field() 7 | -------------------------------------------------------------------------------- /http-tunnel/http-dyn/java/httpclient/4.4+/compile.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | javac -Djava.ext.dirs=./lib HttpClient4xProxyDemo.java 4 | 5 | java -Djava.ext.dirs=./lib HttpClient4xProxyDemo 6 | -------------------------------------------------------------------------------- /http-tunnel/http-dyn/java/httpclient/4.4+/lib/httpclient-osgi-4.3.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/abuyun/proxy-demo/HEAD/http-tunnel/http-dyn/java/httpclient/4.4+/lib/httpclient-osgi-4.3.jar -------------------------------------------------------------------------------- /http-tunnel/http-dyn/java/httpclient/4.4+/lib/org.apache.httpcore.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/abuyun/proxy-demo/HEAD/http-tunnel/http-dyn/java/httpclient/4.4+/lib/org.apache.httpcore.jar -------------------------------------------------------------------------------- /http-tunnel/http-dyn/python/python2/scrapy/xbot/xbot/items.py: -------------------------------------------------------------------------------- 1 | #! -*- encoding:utf-8 -*- 2 | 3 | from scrapy.item import Item, Field 4 | 5 | class TestItem(Item): 6 | text = Field() 7 | -------------------------------------------------------------------------------- /http-tunnel/http-dyn/python/python3/scrapy/xbot/xbot/items.py: -------------------------------------------------------------------------------- 1 | #! -*- encoding:utf-8 -*- 2 | 3 | from scrapy.item import Item, Field 4 | 5 | class TestItem(Item): 6 | text = Field() 7 | -------------------------------------------------------------------------------- /http-tunnel/http-pro/java/httpclient/4.4+/compile.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | javac -Djava.ext.dirs=./lib HttpClient4xProxyDemo.java 4 | 5 | java -Djava.ext.dirs=./lib HttpClient4xProxyDemo 6 | -------------------------------------------------------------------------------- /http-tunnel/http-pro/java/httpclient/4.4+/lib/httpclient-osgi-4.3.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/abuyun/proxy-demo/HEAD/http-tunnel/http-pro/java/httpclient/4.4+/lib/httpclient-osgi-4.3.jar -------------------------------------------------------------------------------- /http-tunnel/http-pro/java/httpclient/4.4+/lib/org.apache.httpcore.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/abuyun/proxy-demo/HEAD/http-tunnel/http-pro/java/httpclient/4.4+/lib/org.apache.httpcore.jar -------------------------------------------------------------------------------- /http-tunnel/http-pro/python/python2/scrapy/xbot/xbot/items.py: -------------------------------------------------------------------------------- 1 | #! -*- encoding:utf-8 -*- 2 | 3 | from scrapy.item import Item, Field 4 | 5 | class TestItem(Item): 6 | text = Field() 7 | -------------------------------------------------------------------------------- /http-tunnel/http-pro/python/python3/scrapy/xbot/xbot/items.py: -------------------------------------------------------------------------------- 1 | #! -*- encoding:utf-8 -*- 2 | 3 | from scrapy.item import Item, Field 4 | 5 | class TestItem(Item): 6 | text = Field() 7 | -------------------------------------------------------------------------------- /http-tunnel/http-cla/java/httpclient/4.4+/lib/org.apache.httpclient.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/abuyun/proxy-demo/HEAD/http-tunnel/http-cla/java/httpclient/4.4+/lib/org.apache.httpclient.jar -------------------------------------------------------------------------------- /http-tunnel/http-cla/phantomjs/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | phantomjs --proxy=http://http-cla.abuyun.com:9030 --proxy-auth=H01234567890123C:0123456789012345 --ignore-ssl-errors=true proxy-demo.js 4 | -------------------------------------------------------------------------------- /http-tunnel/http-dyn/java/httpclient/4.4+/lib/org.apache.httpclient.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/abuyun/proxy-demo/HEAD/http-tunnel/http-dyn/java/httpclient/4.4+/lib/org.apache.httpclient.jar -------------------------------------------------------------------------------- /http-tunnel/http-dyn/phantomjs/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | phantomjs --proxy=http://http-dyn.abuyun.com:9020 --proxy-auth=H01234567890123D:0123456789012345 --ignore-ssl-errors=true proxy-demo.js 4 | -------------------------------------------------------------------------------- /http-tunnel/http-pro/java/httpclient/4.4+/lib/org.apache.httpclient.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/abuyun/proxy-demo/HEAD/http-tunnel/http-pro/java/httpclient/4.4+/lib/org.apache.httpclient.jar -------------------------------------------------------------------------------- /http-tunnel/http-pro/phantomjs/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | phantomjs --proxy=http://http-pro.abuyun.com:9010 --proxy-auth=H01234567890123P:0123456789012345 --ignore-ssl-errors=true proxy-demo.js 4 | -------------------------------------------------------------------------------- /http-tunnel/http-cla/java/httpclient/4.4+/lib/org-apache-commons-logging.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/abuyun/proxy-demo/HEAD/http-tunnel/http-cla/java/httpclient/4.4+/lib/org-apache-commons-logging.jar -------------------------------------------------------------------------------- /http-tunnel/http-dyn/java/httpclient/4.4+/lib/org-apache-commons-logging.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/abuyun/proxy-demo/HEAD/http-tunnel/http-dyn/java/httpclient/4.4+/lib/org-apache-commons-logging.jar -------------------------------------------------------------------------------- /http-tunnel/http-pro/java/httpclient/4.4+/lib/org-apache-commons-logging.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/abuyun/proxy-demo/HEAD/http-tunnel/http-pro/java/httpclient/4.4+/lib/org-apache-commons-logging.jar -------------------------------------------------------------------------------- /http-tunnel/http-cla/java/httpclient/4.4+/lib/apache-httpcomponents-httpclient.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/abuyun/proxy-demo/HEAD/http-tunnel/http-cla/java/httpclient/4.4+/lib/apache-httpcomponents-httpclient.jar -------------------------------------------------------------------------------- /http-tunnel/http-dyn/curl/proxy-demo.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # 通过隧道请求目标URL 4 | curl -x "http://http-dyn.abuyun.com:9020" --proxy-basic --proxy-user H01234567890123D:0123456789012345 http://test.abuyun.com 5 | -------------------------------------------------------------------------------- /http-tunnel/http-dyn/java/httpclient/4.4+/lib/apache-httpcomponents-httpclient.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/abuyun/proxy-demo/HEAD/http-tunnel/http-dyn/java/httpclient/4.4+/lib/apache-httpcomponents-httpclient.jar -------------------------------------------------------------------------------- /http-tunnel/http-pro/java/httpclient/4.4+/lib/apache-httpcomponents-httpclient.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/abuyun/proxy-demo/HEAD/http-tunnel/http-pro/java/httpclient/4.4+/lib/apache-httpcomponents-httpclient.jar -------------------------------------------------------------------------------- /http-tunnel/http-cla/python/python2/scrapy/xbot/setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup, find_packages 2 | 3 | setup( 4 | name='xbot', 5 | version='1.0', 6 | packages=find_packages(), 7 | entry_points={'scrapy': ['settings = xbot.settings']}, 8 | ) 9 | -------------------------------------------------------------------------------- /http-tunnel/http-cla/python/python3/scrapy/xbot/setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup, find_packages 2 | 3 | setup( 4 | name='xbot', 5 | version='1.0', 6 | packages=find_packages(), 7 | entry_points={'scrapy': ['settings = xbot.settings']}, 8 | ) 9 | -------------------------------------------------------------------------------- /http-tunnel/http-dyn/python/python2/scrapy/xbot/setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup, find_packages 2 | 3 | setup( 4 | name='xbot', 5 | version='1.0', 6 | packages=find_packages(), 7 | entry_points={'scrapy': ['settings = xbot.settings']}, 8 | ) 9 | -------------------------------------------------------------------------------- /http-tunnel/http-dyn/python/python3/scrapy/xbot/setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup, find_packages 2 | 3 | setup( 4 | name='xbot', 5 | version='1.0', 6 | packages=find_packages(), 7 | entry_points={'scrapy': ['settings = xbot.settings']}, 8 | ) 9 | -------------------------------------------------------------------------------- /http-tunnel/http-pro/python/python2/scrapy/xbot/setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup, find_packages 2 | 3 | setup( 4 | name='xbot', 5 | version='1.0', 6 | packages=find_packages(), 7 | entry_points={'scrapy': ['settings = xbot.settings']}, 8 | ) 9 | -------------------------------------------------------------------------------- /http-tunnel/http-pro/python/python3/scrapy/xbot/setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup, find_packages 2 | 3 | setup( 4 | name='xbot', 5 | version='1.0', 6 | packages=find_packages(), 7 | entry_points={'scrapy': ['settings = xbot.settings']}, 8 | ) 9 | -------------------------------------------------------------------------------- /http-tunnel/http-dyn/phantomjs/proxy-demo.js: -------------------------------------------------------------------------------- 1 | var webpage = require('webpage'); 2 | 3 | var page = webpage.create(); 4 | 5 | page.open("http://test.abuyun.com", {}, function(status) { 6 | console.log('>> ' + page.content); 7 | 8 | setTimeout(function() { 9 | phantom.exit(); 10 | }, 3000); 11 | }); 12 | -------------------------------------------------------------------------------- /http-tunnel/http-cla/python/python2/scrapy/xbot/xbot/settings.py: -------------------------------------------------------------------------------- 1 | #! -*- encoding:utf-8 -*- 2 | 3 | SPIDER_MODULES = ['xbot.spiders'] 4 | NEWSPIDER_MODULE = 'xbot.spiders' 5 | DEFAULT_ITEM_CLASS = 'xbot.items.TestItem' 6 | 7 | ITEM_PIPELINES = {} 8 | 9 | COOKIES_ENABLED=True 10 | 11 | DOWNLOAD_DELAY=3 12 | 13 | DOWNLOADER_MIDDLEWARES = { 14 | 'scrapy.downloadermiddlewares.retry.RetryMiddleware': 90, 15 | 'xbot.middlewares.ProxyMiddleware': 110, 16 | } 17 | -------------------------------------------------------------------------------- /http-tunnel/http-cla/python/python3/scrapy/xbot/xbot/settings.py: -------------------------------------------------------------------------------- 1 | #! -*- encoding:utf-8 -*- 2 | 3 | SPIDER_MODULES = ['xbot.spiders'] 4 | NEWSPIDER_MODULE = 'xbot.spiders' 5 | DEFAULT_ITEM_CLASS = 'xbot.items.TestItem' 6 | 7 | ITEM_PIPELINES = {} 8 | 9 | COOKIES_ENABLED=True 10 | 11 | DOWNLOAD_DELAY=3 12 | 13 | DOWNLOADER_MIDDLEWARES = { 14 | 'scrapy.downloadermiddlewares.retry.RetryMiddleware': 90, 15 | 'xbot.middlewares.ProxyMiddleware': 110, 16 | } 17 | -------------------------------------------------------------------------------- /http-tunnel/http-dyn/python/python2/scrapy/xbot/xbot/settings.py: -------------------------------------------------------------------------------- 1 | #! -*- encoding:utf-8 -*- 2 | 3 | SPIDER_MODULES = ['xbot.spiders'] 4 | NEWSPIDER_MODULE = 'xbot.spiders' 5 | DEFAULT_ITEM_CLASS = 'xbot.items.TestItem' 6 | 7 | ITEM_PIPELINES = {} 8 | 9 | COOKIES_ENABLED=True 10 | 11 | DOWNLOAD_DELAY=3 12 | 13 | DOWNLOADER_MIDDLEWARES = { 14 | 'scrapy.downloadermiddlewares.retry.RetryMiddleware': 90, 15 | 'xbot.middlewares.ProxyMiddleware': 110, 16 | } 17 | -------------------------------------------------------------------------------- /http-tunnel/http-dyn/python/python3/scrapy/xbot/xbot/settings.py: -------------------------------------------------------------------------------- 1 | #! -*- encoding:utf-8 -*- 2 | 3 | SPIDER_MODULES = ['xbot.spiders'] 4 | NEWSPIDER_MODULE = 'xbot.spiders' 5 | DEFAULT_ITEM_CLASS = 'xbot.items.TestItem' 6 | 7 | ITEM_PIPELINES = {} 8 | 9 | COOKIES_ENABLED=True 10 | 11 | DOWNLOAD_DELAY=3 12 | 13 | DOWNLOADER_MIDDLEWARES = { 14 | 'scrapy.downloadermiddlewares.retry.RetryMiddleware': 90, 15 | 'xbot.middlewares.ProxyMiddleware': 110, 16 | } 17 | -------------------------------------------------------------------------------- /http-tunnel/http-pro/python/python2/scrapy/xbot/xbot/settings.py: -------------------------------------------------------------------------------- 1 | #! -*- encoding:utf-8 -*- 2 | 3 | SPIDER_MODULES = ['xbot.spiders'] 4 | NEWSPIDER_MODULE = 'xbot.spiders' 5 | DEFAULT_ITEM_CLASS = 'xbot.items.TestItem' 6 | 7 | ITEM_PIPELINES = {} 8 | 9 | COOKIES_ENABLED=True 10 | 11 | DOWNLOAD_DELAY=3 12 | 13 | DOWNLOADER_MIDDLEWARES = { 14 | 'scrapy.downloadermiddlewares.retry.RetryMiddleware': 90, 15 | 'xbot.middlewares.ProxyMiddleware': 110, 16 | } 17 | -------------------------------------------------------------------------------- /http-tunnel/http-pro/python/python3/scrapy/xbot/xbot/settings.py: -------------------------------------------------------------------------------- 1 | #! -*- encoding:utf-8 -*- 2 | 3 | SPIDER_MODULES = ['xbot.spiders'] 4 | NEWSPIDER_MODULE = 'xbot.spiders' 5 | DEFAULT_ITEM_CLASS = 'xbot.items.TestItem' 6 | 7 | ITEM_PIPELINES = {} 8 | 9 | COOKIES_ENABLED=True 10 | 11 | DOWNLOAD_DELAY=3 12 | 13 | DOWNLOADER_MIDDLEWARES = { 14 | 'scrapy.downloadermiddlewares.retry.RetryMiddleware': 90, 15 | 'xbot.middlewares.ProxyMiddleware': 110, 16 | } 17 | -------------------------------------------------------------------------------- /http-tunnel/http-cla/curl/proxy-demo.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # 切换隧道IP 4 | curl -x "http://http-cla.abuyun.com:9030" --proxy-basic --proxy-user H01234567890123C:0123456789012345 http://proxy.abuyun.com/switch-ip 5 | 6 | # 查看隧道当前IP 7 | curl -x "http://http-cla.abuyun.com:9030" --proxy-basic --proxy-user H01234567890123C:0123456789012345 http://proxy.abuyun.com/current-ip 8 | 9 | # 通过隧道请求目标URL 10 | curl -x "http://http-cla.abuyun.com:9030" --proxy-basic --proxy-user H01234567890123C:0123456789012345 http://test.abuyun.com 11 | -------------------------------------------------------------------------------- /http-tunnel/http-pro/curl/proxy-demo.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # 通过隧道请求目标URL 4 | curl -x "http://http-pro.abuyun.com:9010" --proxy-basic --proxy-user H01234567890123P:0123456789012345 http://test.abuyun.com 5 | 6 | # 切换隧道IP 7 | curl -x "http://http-pro.abuyun.com:9010" --proxy-basic --proxy-user H01234567890123P:0123456789012345 http://proxy.abuyun.com/switch-ip 8 | 9 | # 查看隧道当前IP 10 | curl -x "http://http-pro.abuyun.com:9010" --proxy-basic --proxy-user H01234567890123P:0123456789012345 http://proxy.abuyun.com/current-ip 11 | -------------------------------------------------------------------------------- /http-tunnel/http-cla/python/python2/scrapy/xbot/xbot/spiders/abuyun.py: -------------------------------------------------------------------------------- 1 | #! -*- encoding:utf-8 -*- 2 | 3 | from scrapy.spiders import Spider 4 | from scrapy.selector import Selector 5 | 6 | from xbot.items import TestItem 7 | 8 | class AbuyunSpider(Spider): 9 | name = "abuyun" 10 | allowed_domains = ["test.abuyun.com"] 11 | start_urls = [ 12 | "http://test.abuyun.com", 13 | ] 14 | 15 | def parse(self, response): 16 | item = TestItem() 17 | item['text'] = response 18 | 19 | items = [item] 20 | 21 | return items 22 | -------------------------------------------------------------------------------- /http-tunnel/http-cla/python/python3/scrapy/xbot/xbot/spiders/abuyun.py: -------------------------------------------------------------------------------- 1 | #! -*- encoding:utf-8 -*- 2 | 3 | from scrapy.spiders import Spider 4 | from scrapy.selector import Selector 5 | 6 | from xbot.items import TestItem 7 | 8 | class AbuyunSpider(Spider): 9 | name = "abuyun" 10 | allowed_domains = ["test.abuyun.com"] 11 | start_urls = [ 12 | "http://test.abuyun.com", 13 | ] 14 | 15 | def parse(self, response): 16 | item = TestItem() 17 | item['text'] = response 18 | 19 | items = [item] 20 | 21 | return items 22 | -------------------------------------------------------------------------------- /http-tunnel/http-dyn/python/python2/scrapy/xbot/xbot/spiders/abuyun.py: -------------------------------------------------------------------------------- 1 | #! -*- encoding:utf-8 -*- 2 | 3 | from scrapy.spiders import Spider 4 | from scrapy.selector import Selector 5 | 6 | from xbot.items import TestItem 7 | 8 | class AbuyunSpider(Spider): 9 | name = "abuyun" 10 | allowed_domains = ["test.abuyun.com"] 11 | start_urls = [ 12 | "http://test.abuyun.com", 13 | ] 14 | 15 | def parse(self, response): 16 | item = TestItem() 17 | item['text'] = response 18 | 19 | items = [item] 20 | 21 | return items 22 | -------------------------------------------------------------------------------- /http-tunnel/http-dyn/python/python3/scrapy/xbot/xbot/spiders/abuyun.py: -------------------------------------------------------------------------------- 1 | #! -*- encoding:utf-8 -*- 2 | 3 | from scrapy.spiders import Spider 4 | from scrapy.selector import Selector 5 | 6 | from xbot.items import TestItem 7 | 8 | class AbuyunSpider(Spider): 9 | name = "abuyun" 10 | allowed_domains = ["test.abuyun.com"] 11 | start_urls = [ 12 | "http://test.abuyun.com", 13 | ] 14 | 15 | def parse(self, response): 16 | item = TestItem() 17 | item['text'] = response 18 | 19 | items = [item] 20 | 21 | return items 22 | -------------------------------------------------------------------------------- /http-tunnel/http-pro/python/python2/scrapy/xbot/xbot/spiders/abuyun.py: -------------------------------------------------------------------------------- 1 | #! -*- encoding:utf-8 -*- 2 | 3 | from scrapy.spiders import Spider 4 | from scrapy.selector import Selector 5 | 6 | from xbot.items import TestItem 7 | 8 | class AbuyunSpider(Spider): 9 | name = "abuyun" 10 | allowed_domains = ["test.abuyun.com"] 11 | start_urls = [ 12 | "http://test.abuyun.com", 13 | ] 14 | 15 | def parse(self, response): 16 | item = TestItem() 17 | item['text'] = response 18 | 19 | items = [item] 20 | 21 | return items 22 | -------------------------------------------------------------------------------- /http-tunnel/http-pro/python/python3/scrapy/xbot/xbot/spiders/abuyun.py: -------------------------------------------------------------------------------- 1 | #! -*- encoding:utf-8 -*- 2 | 3 | from scrapy.spiders import Spider 4 | from scrapy.selector import Selector 5 | 6 | from xbot.items import TestItem 7 | 8 | class AbuyunSpider(Spider): 9 | name = "abuyun" 10 | allowed_domains = ["test.abuyun.com"] 11 | start_urls = [ 12 | "http://test.abuyun.com", 13 | ] 14 | 15 | def parse(self, response): 16 | item = TestItem() 17 | item['text'] = response 18 | 19 | items = [item] 20 | 21 | return items 22 | -------------------------------------------------------------------------------- /http-tunnel/http-dyn/python/python2/requests/proxy-demo.py: -------------------------------------------------------------------------------- 1 | #! -*- encoding:utf-8 -*- 2 | 3 | import requests 4 | 5 | # 要访问的目标页面 6 | targetUrl = "http://test.abuyun.com" 7 | 8 | # 代理服务器 9 | proxyHost = "http-dyn.abuyun.com" 10 | proxyPort = "9020" 11 | 12 | # 代理隧道验证信息 13 | proxyUser = "H01234567890123D" 14 | proxyPass = "0123456789012345" 15 | 16 | proxyMeta = "http://%(user)s:%(pass)s@%(host)s:%(port)s" % { 17 | "host" : proxyHost, 18 | "port" : proxyPort, 19 | "user" : proxyUser, 20 | "pass" : proxyPass, 21 | } 22 | 23 | proxies = { 24 | "http" : proxyMeta, 25 | "https" : proxyMeta, 26 | } 27 | 28 | resp = requests.get(targetUrl, proxies=proxies) 29 | 30 | print resp.status_code 31 | print resp.text 32 | -------------------------------------------------------------------------------- /http-tunnel/http-cla/phantomjs/proxy-demo.js: -------------------------------------------------------------------------------- 1 | var webpage = require('webpage'); 2 | 3 | var page = webpage.create(); 4 | 5 | page.open("http://proxy.abuyun.com/current-ip", {}, function(status) { 6 | console.log('>> ' + page.content); 7 | 8 | var page2 = webpage.create(); 9 | 10 | page2.onResourceReceived = function(j) { 11 | for (var i = 0; i < j.headers.length; i++) { 12 | console.log(j.headers[i].name + ': ' + j.headers[i].value); 13 | } 14 | }; 15 | 16 | page2.open("http://test.abuyun.com", {}, function(status) { 17 | console.log('status> ' + status); 18 | console.log(page.content); 19 | 20 | setTimeout(function() { 21 | phantom.exit(); 22 | }, 3000); 23 | }); 24 | }); 25 | -------------------------------------------------------------------------------- /http-tunnel/http-pro/phantomjs/proxy-demo.js: -------------------------------------------------------------------------------- 1 | var webpage = require('webpage'); 2 | 3 | var page = webpage.create(); 4 | 5 | page.open("http://proxy.abuyun.com/current-ip", {}, function(status) { 6 | console.log('>> ' + page.content); 7 | 8 | var page2 = webpage.create(); 9 | 10 | page2.onResourceReceived = function(j) { 11 | for (var i = 0; i < j.headers.length; i++) { 12 | console.log(j.headers[i].name + ': ' + j.headers[i].value); 13 | } 14 | }; 15 | 16 | page2.open("http://test.abuyun.com", {}, function(status) { 17 | console.log('status> ' + status); 18 | console.log(page.content); 19 | 20 | setTimeout(function() { 21 | phantom.exit(); 22 | }, 3000); 23 | }); 24 | }); 25 | -------------------------------------------------------------------------------- /http-tunnel/http-dyn/php/stream-demo.php: -------------------------------------------------------------------------------- 1 | [ 21 | "proxy" => PROXY_SERVER, 22 | "header" => $headers, 23 | "method" => "GET", 24 | ], 25 | ]; 26 | 27 | $context = stream_context_create($options); 28 | 29 | $result = file_get_contents($targetUrl, false, $context); 30 | 31 | var_dump($result); -------------------------------------------------------------------------------- /http-tunnel/http-cla/ahk/proxy-demo.ahk: -------------------------------------------------------------------------------- 1 | HTTPREQUEST_PROXYSETTING_PROXY := 2 2 | HTTPREQUEST_SETCREDENTIALS_FOR_PROXY := 1 3 | 4 | ;~ 代理服务器 5 | proxyHost := "http-cla.abuyun.com:9030" 6 | 7 | ;~ 代理隧道验证信息 8 | proxyUser := "H01234567890123C" 9 | proxyPass := "0123456789012345" 10 | 11 | ;~ 要访问的目标页面 12 | targetUrl := "http://test.abuyun.com" 13 | 14 | whr := ComObjCreate("WinHttp.WinHttpRequest.5.1") 15 | whr.Open("GET", targetUrl, true) 16 | 17 | ;~ 模拟curl的ua,方便测试 18 | whr.SetRequestHeader("User-Agent", "curl/7.41.0") 19 | 20 | ;~ 设置代理服务器 21 | whr.SetProxy(HTTPREQUEST_PROXYSETTING_PROXY, proxyHost) 22 | 23 | ;~ 设置代理隧道验证信息 24 | whr.SetCredentials(proxyUser, proxyPass, HTTPREQUEST_SETCREDENTIALS_FOR_PROXY) 25 | 26 | whr.Send() 27 | whr.WaitForResponse() 28 | 29 | MsgBox % whr.ResponseText 30 | -------------------------------------------------------------------------------- /http-tunnel/http-dyn/ahk/proxy-demo.ahk: -------------------------------------------------------------------------------- 1 | HTTPREQUEST_PROXYSETTING_PROXY := 2 2 | HTTPREQUEST_SETCREDENTIALS_FOR_PROXY := 1 3 | 4 | ;~ 代理服务器 5 | proxyHost := "http-dyn.abuyun.com:9020" 6 | 7 | ;~ 代理隧道验证信息 8 | proxyUser := "H01234567890123D" 9 | proxyPass := "0123456789012345" 10 | 11 | ;~ 要访问的目标页面 12 | targetUrl := "http://test.abuyun.com" 13 | 14 | whr := ComObjCreate("WinHttp.WinHttpRequest.5.1") 15 | whr.Open("GET", targetUrl, true) 16 | ;~ 模拟curl的ua,方便测试 17 | 18 | whr.SetRequestHeader("User-Agent", "curl/7.41.0") 19 | 20 | ;~ 设置代理服务器 21 | whr.SetProxy(HTTPREQUEST_PROXYSETTING_PROXY, proxyHost) 22 | 23 | ;~ 设置代理隧道验证信息 24 | whr.SetCredentials(proxyUser, proxyPass, HTTPREQUEST_SETCREDENTIALS_FOR_PROXY) 25 | 26 | whr.Send() 27 | whr.WaitForResponse() 28 | 29 | MsgBox % whr.ResponseText 30 | -------------------------------------------------------------------------------- /http-tunnel/http-pro/ahk/proxy-demo.ahk: -------------------------------------------------------------------------------- 1 | HTTPREQUEST_PROXYSETTING_PROXY := 2 2 | HTTPREQUEST_SETCREDENTIALS_FOR_PROXY := 1 3 | 4 | ;~ 代理服务器 5 | proxyHost := "http-pro.abuyun.com:9010" 6 | 7 | ;~ 代理隧道验证信息 8 | proxyUser := "H01234567890123P" 9 | proxyPass := "0123456789012345" 10 | 11 | ;~ 要访问的目标页面 12 | targetUrl := "http://test.abuyun.com" 13 | 14 | whr := ComObjCreate("WinHttp.WinHttpRequest.5.1") 15 | whr.Open("GET", targetUrl, true) 16 | 17 | ;~ 模拟curl的ua,方便测试 18 | whr.SetRequestHeader("User-Agent", "curl/7.41.0") 19 | 20 | ;~ 设置代理服务器 21 | whr.SetProxy(HTTPREQUEST_PROXYSETTING_PROXY, proxyHost) 22 | 23 | ;~ 设置代理隧道验证信息 24 | whr.SetCredentials(proxyUser, proxyPass, HTTPREQUEST_SETCREDENTIALS_FOR_PROXY) 25 | 26 | whr.Send() 27 | whr.WaitForResponse() 28 | 29 | MsgBox % whr.ResponseText 30 | -------------------------------------------------------------------------------- /http-tunnel/http-cla/python/python2/requests/proxy-demo.py: -------------------------------------------------------------------------------- 1 | #! -*- encoding:utf-8 -*- 2 | 3 | import requests 4 | 5 | # 要访问的目标页面 6 | targetUrl = "http://test.abuyun.com" 7 | #targetUrl = "http://proxy.abuyun.com/switch-ip" 8 | #targetUrl = "http://proxy.abuyun.com/current-ip" 9 | 10 | # 代理服务器 11 | proxyHost = "http-cla.abuyun.com" 12 | proxyPort = "9030" 13 | 14 | # 代理隧道验证信息 15 | proxyUser = "H01234567890123C" 16 | proxyPass = "0123456789012345" 17 | 18 | proxyMeta = "http://%(user)s:%(pass)s@%(host)s:%(port)s" % { 19 | "host" : proxyHost, 20 | "port" : proxyPort, 21 | "user" : proxyUser, 22 | "pass" : proxyPass, 23 | } 24 | 25 | proxies = { 26 | "http" : proxyMeta, 27 | "https" : proxyMeta, 28 | } 29 | 30 | resp = requests.get(targetUrl, proxies=proxies) 31 | 32 | print resp.status_code 33 | print resp.text 34 | -------------------------------------------------------------------------------- /http-tunnel/http-pro/python/python2/requests/proxy-demo.py: -------------------------------------------------------------------------------- 1 | #! -*- encoding:utf-8 -*- 2 | 3 | import requests 4 | 5 | # 要访问的目标页面 6 | targetUrl = "http://test.abuyun.com" 7 | #targetUrl = "http://proxy.abuyun.com/switch-ip" 8 | #targetUrl = "http://proxy.abuyun.com/current-ip" 9 | 10 | # 代理服务器 11 | proxyHost = "http-pro.abuyun.com" 12 | proxyPort = "9010" 13 | 14 | # 代理隧道验证信息 15 | proxyUser = "H01234567890123P" 16 | proxyPass = "0123456789012345" 17 | 18 | proxyMeta = "http://%(user)s:%(pass)s@%(host)s:%(port)s" % { 19 | "host" : proxyHost, 20 | "port" : proxyPort, 21 | "user" : proxyUser, 22 | "pass" : proxyPass, 23 | } 24 | 25 | proxies = { 26 | "http" : proxyMeta, 27 | "https" : proxyMeta, 28 | } 29 | 30 | resp = requests.get(targetUrl, proxies=proxies) 31 | 32 | print resp.status_code 33 | print resp.text 34 | -------------------------------------------------------------------------------- /http-tunnel/http-cla/python/python2/scrapy/xbot/xbot/middlewares.py: -------------------------------------------------------------------------------- 1 | #! -*- encoding:utf-8 -*- 2 | 3 | import base64 4 | from scrapy.downloadermiddlewares.httpproxy import HttpProxyMiddleware 5 | 6 | # 代理服务器 7 | proxyServer = "http://http-cla.abuyun.com:9030" 8 | 9 | # 隧道身份信息 10 | proxyUser = "H01234567890123C" 11 | proxyPass = "0123456789012345" 12 | proxyAuth = "Basic " + base64.urlsafe_b64encode(proxyUser + ":" + proxyPass) 13 | 14 | class ProxyMiddleware(HttpProxyMiddleware): 15 | proxies = {} 16 | 17 | def __init__(self, auth_encoding='latin-1'): 18 | self.auth_encoding = auth_encoding 19 | 20 | self.proxies[proxyServer] = proxyUser + proxyPass 21 | 22 | def process_request(self, request, spider): 23 | request.meta["proxy"] = proxyServer 24 | 25 | request.headers["Proxy-Authorization"] = proxyAuth 26 | -------------------------------------------------------------------------------- /http-tunnel/http-pro/python/python2/scrapy/xbot/xbot/middlewares.py: -------------------------------------------------------------------------------- 1 | #! -*- encoding:utf-8 -*- 2 | 3 | import base64 4 | from scrapy.downloadermiddlewares.httpproxy import HttpProxyMiddleware 5 | 6 | # 代理服务器 7 | proxyServer = "http://http-pro.abuyun.com:9010" 8 | 9 | # 隧道身份信息 10 | proxyUser = "H01234567890123P" 11 | proxyPass = "0123456789012345" 12 | proxyAuth = "Basic " + base64.urlsafe_b64encode(proxyUser + ":" + proxyPass) 13 | 14 | class ProxyMiddleware(HttpProxyMiddleware): 15 | proxies = {} 16 | 17 | def __init__(self, auth_encoding='latin-1'): 18 | self.auth_encoding = auth_encoding 19 | 20 | self.proxies[proxyServer] = proxyUser + proxyPass 21 | 22 | def process_request(self, request, spider): 23 | request.meta["proxy"] = proxyServer 24 | 25 | request.headers["Proxy-Authorization"] = proxyAuth 26 | -------------------------------------------------------------------------------- /http-tunnel/http-dyn/python/python2/urllib2/proxy-demo.py: -------------------------------------------------------------------------------- 1 | #! -*- encoding:utf-8 -*- 2 | 3 | import urllib2 4 | 5 | # 要访问的目标页面 6 | targetUrl = "http://test.abuyun.com" 7 | 8 | # 代理服务器 9 | proxyHost = "http-dyn.abuyun.com" 10 | proxyPort = "9020" 11 | 12 | # 代理隧道验证信息 13 | proxyUser = "H01234567890123D" 14 | proxyPass = "0123456789012345" 15 | 16 | proxyMeta = "http://%(user)s:%(pass)s@%(host)s:%(port)s" % { 17 | "host" : proxyHost, 18 | "port" : proxyPort, 19 | "user" : proxyUser, 20 | "pass" : proxyPass, 21 | } 22 | 23 | proxy_handler = urllib2.ProxyHandler({ 24 | "http" : proxyMeta, 25 | "https" : proxyMeta, 26 | }) 27 | 28 | opener = urllib2.build_opener(proxy_handler) 29 | 30 | #opener.addheaders = [("Proxy-Switch-Ip", "yes")] 31 | urllib2.install_opener(opener) 32 | resp = urllib2.urlopen(targetUrl).read() 33 | 34 | print resp -------------------------------------------------------------------------------- /http-tunnel/http-dyn/nodejs/request/proxy-demo.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | const request = require("request"); 4 | 5 | // 要访问的目标页面 6 | const targetUrl = "http://test.abuyun.com"; 7 | 8 | // 代理服务器 9 | const proxyHost = "http-dyn.abuyun.com"; 10 | const proxyPort = 9020; 11 | 12 | // 代理隧道验证信息 13 | const proxyUser = "H01234567890123D"; 14 | const proxyPass = "0123456789012345"; 15 | 16 | const proxyUrl = "http://" + proxyUser + ":" + proxyPass + "@" + proxyHost + ":" + proxyPort; 17 | 18 | const proxiedRequest = request.defaults({"proxy": proxyUrl}); 19 | 20 | const options = { 21 | url : targetUrl, 22 | headers : { 23 | } 24 | }; 25 | 26 | proxiedRequest 27 | .get(options, function (err, res, body) { 28 | console.log("got response: " + res.statusCode); 29 | }) 30 | .on("error", function (err) { 31 | console.log(err); 32 | }) 33 | ; 34 | -------------------------------------------------------------------------------- /http-tunnel/http-dyn/python/python2/scrapy/xbot/xbot/middlewares.py: -------------------------------------------------------------------------------- 1 | #! -*- encoding:utf-8 -*- 2 | 3 | import base64 4 | from scrapy.downloadermiddlewares.httpproxy import HttpProxyMiddleware 5 | 6 | # 代理服务器 7 | proxyServer = "http://http-dyn.abuyun.com:9020" 8 | 9 | # 隧道身份信息 10 | proxyUser = "H01234567890123D" 11 | proxyPass = "0123456789012345" 12 | proxyAuth = "Basic " + base64.urlsafe_b64encode(proxyUser + ":" + proxyPass) 13 | 14 | class ProxyMiddleware(HttpProxyMiddleware): 15 | proxies = {} 16 | 17 | def __init__(self, auth_encoding='latin-1'): 18 | self.auth_encoding = auth_encoding 19 | 20 | self.proxies[proxyServer] = proxyUser + proxyPass 21 | 22 | def process_request(self, request, spider): 23 | request.meta["proxy"] = proxyServer 24 | 25 | request.headers["Proxy-Authorization"] = proxyAuth 26 | request.headers["Connection"] = "close" 27 | -------------------------------------------------------------------------------- /http-tunnel/http-cla/php/stream-demo.php: -------------------------------------------------------------------------------- 1 | [ 24 | "proxy" => PROXY_SERVER, 25 | "header" => $headers, 26 | "method" => "GET", 27 | ], 28 | ]; 29 | 30 | $context = stream_context_create($options); 31 | 32 | $result = file_get_contents($targetUrl, false, $context); 33 | 34 | var_dump($result); -------------------------------------------------------------------------------- /http-tunnel/http-pro/php/stream-demo.php: -------------------------------------------------------------------------------- 1 | [ 24 | "proxy" => PROXY_SERVER, 25 | "header" => $headers, 26 | "method" => "GET", 27 | ], 28 | ]; 29 | 30 | $context = stream_context_create($options); 31 | 32 | $result = file_get_contents($targetUrl, false, $context); 33 | 34 | var_dump($result); -------------------------------------------------------------------------------- /http-tunnel/http-cla/python/python3/scrapy/xbot/xbot/middlewares.py: -------------------------------------------------------------------------------- 1 | #! -*- encoding:utf-8 -*- 2 | 3 | import base64 4 | from scrapy.downloadermiddlewares.httpproxy import HttpProxyMiddleware 5 | 6 | # 代理服务器 7 | proxyServer = "http://http-cla.abuyun.com:9030" 8 | 9 | # 隧道身份信息 10 | proxyUser = b"H01234567890123C" 11 | proxyPass = b"0123456789012345" 12 | proxyAuth = "Basic " + base64.b64encode(proxyUser + b":" + proxyPass).decode() 13 | 14 | class ProxyMiddleware(HttpProxyMiddleware): 15 | proxies = {} 16 | 17 | def __init__(self, auth_encoding='latin-1'): 18 | self.auth_encoding = auth_encoding 19 | 20 | self.proxies[proxyServer] = proxyUser + proxyPass 21 | 22 | def process_request(self, request, spider): 23 | request.meta["proxy"] = proxyServer 24 | request.meta["_auth_proxy"] = proxyServer 25 | 26 | request.headers["Proxy-Authorization"] = proxyAuth 27 | request.headers["Connection"] = "close" -------------------------------------------------------------------------------- /http-tunnel/http-pro/python/python3/scrapy/xbot/xbot/middlewares.py: -------------------------------------------------------------------------------- 1 | #! -*- encoding:utf-8 -*- 2 | 3 | import base64 4 | from scrapy.downloadermiddlewares.httpproxy import HttpProxyMiddleware 5 | 6 | # 代理服务器 7 | proxyServer = "http://http-pro.abuyun.com:9010" 8 | 9 | # 隧道身份信息 10 | proxyUser = b"H01234567890123P" 11 | proxyPass = b"0123456789012345" 12 | proxyAuth = "Basic " + base64.b64encode(proxyUser + b":" + proxyPass).decode() 13 | 14 | class ProxyMiddleware(HttpProxyMiddleware): 15 | proxies = {} 16 | 17 | def __init__(self, auth_encoding='latin-1'): 18 | self.auth_encoding = auth_encoding 19 | 20 | self.proxies[proxyServer] = proxyUser + proxyPass 21 | 22 | def process_request(self, request, spider): 23 | request.meta["proxy"] = proxyServer 24 | request.meta["_auth_proxy"] = proxyServer 25 | 26 | request.headers["Proxy-Authorization"] = proxyAuth 27 | request.headers["Connection"] = "close" -------------------------------------------------------------------------------- /http-tunnel/http-cla/python/python2/urllib2/proxy-demo.py: -------------------------------------------------------------------------------- 1 | #! -*- encoding:utf-8 -*- 2 | 3 | import urllib2 4 | 5 | # 要访问的目标页面 6 | targetUrl = "http://test.abuyun.com" 7 | #targetUrl = "http://proxy.abuyun.com/switch-ip" 8 | #targetUrl = "http://proxy.abuyun.com/current-ip" 9 | 10 | # 代理服务器 11 | proxyHost = "http-cla.abuyun.com" 12 | proxyPort = "9030" 13 | 14 | # 代理隧道验证信息 15 | proxyUser = "H01234567890123C" 16 | proxyPass = "0123456789012345" 17 | 18 | proxyMeta = "http://%(user)s:%(pass)s@%(host)s:%(port)s" % { 19 | "host" : proxyHost, 20 | "port" : proxyPort, 21 | "user" : proxyUser, 22 | "pass" : proxyPass, 23 | } 24 | 25 | proxy_handler = urllib2.ProxyHandler({ 26 | "http" : proxyMeta, 27 | "https" : proxyMeta, 28 | }) 29 | 30 | opener = urllib2.build_opener(proxy_handler) 31 | 32 | #opener.addheaders = [("Proxy-Switch-Ip", "yes")] 33 | urllib2.install_opener(opener) 34 | resp = urllib2.urlopen(targetUrl).read() 35 | 36 | print resp -------------------------------------------------------------------------------- /http-tunnel/http-dyn/python/python3/scrapy/xbot/xbot/middlewares.py: -------------------------------------------------------------------------------- 1 | #! -*- encoding:utf-8 -*- 2 | 3 | import base64 4 | from scrapy.downloadermiddlewares.httpproxy import HttpProxyMiddleware 5 | 6 | # 代理服务器 7 | proxyServer = "http://http-dyn.abuyun.com:9020" 8 | 9 | # 隧道身份信息 10 | proxyUser = b"H01234567890123D" 11 | proxyPass = b"0123456789012345" 12 | proxyAuth = "Basic " + base64.b64encode(proxyUser + b":" + proxyPass).decode() 13 | 14 | class ProxyMiddleware(HttpProxyMiddleware): 15 | proxies = {} 16 | 17 | def __init__(self, auth_encoding='latin-1'): 18 | self.auth_encoding = auth_encoding 19 | 20 | self.proxies[proxyServer] = proxyUser + proxyPass 21 | 22 | def process_request(self, request, spider): 23 | request.meta["proxy"] = proxyServer 24 | request.meta["_auth_proxy"] = proxyServer 25 | 26 | request.headers["Proxy-Authorization"] = proxyAuth 27 | request.headers["Connection"] = "close" 28 | -------------------------------------------------------------------------------- /http-tunnel/http-pro/python/python2/urllib2/proxy-demo.py: -------------------------------------------------------------------------------- 1 | #! -*- encoding:utf-8 -*- 2 | 3 | import urllib2 4 | 5 | # 要访问的目标页面 6 | targetUrl = "http://test.abuyun.com" 7 | #targetUrl = "http://proxy.abuyun.com/switch-ip" 8 | #targetUrl = "http://proxy.abuyun.com/current-ip" 9 | 10 | # 代理服务器 11 | proxyHost = "http-pro.abuyun.com" 12 | proxyPort = "9010" 13 | 14 | # 代理隧道验证信息 15 | proxyUser = "H01234567890123P" 16 | proxyPass = "0123456789012345" 17 | 18 | proxyMeta = "http://%(user)s:%(pass)s@%(host)s:%(port)s" % { 19 | "host" : proxyHost, 20 | "port" : proxyPort, 21 | "user" : proxyUser, 22 | "pass" : proxyPass, 23 | } 24 | 25 | proxy_handler = urllib2.ProxyHandler({ 26 | "http" : proxyMeta, 27 | "https" : proxyMeta, 28 | }) 29 | 30 | opener = urllib2.build_opener(proxy_handler) 31 | 32 | #opener.addheaders = [("Proxy-Switch-Ip", "yes")] 33 | urllib2.install_opener(opener) 34 | resp = urllib2.urlopen(targetUrl).read() 35 | 36 | print resp -------------------------------------------------------------------------------- /http-tunnel/http-dyn/python/python3/urllib/proxy-demo.py: -------------------------------------------------------------------------------- 1 | #! -*- encoding:utf-8 -*- 2 | 3 | from urllib import request 4 | 5 | # 要访问的目标页面 6 | targetUrl = "http://test.abuyun.com" 7 | 8 | # 代理服务器 9 | proxyHost = "http-dyn.abuyun.com" 10 | proxyPort = "9020" 11 | 12 | # 代理隧道验证信息 13 | proxyUser = "H01234567890123D" 14 | proxyPass = "0123456789012345" 15 | 16 | proxyMeta = "http://%(user)s:%(pass)s@%(host)s:%(port)s" % { 17 | "host" : proxyHost, 18 | "port" : proxyPort, 19 | "user" : proxyUser, 20 | "pass" : proxyPass, 21 | } 22 | 23 | proxy_handler = request.ProxyHandler({ 24 | "http" : proxyMeta, 25 | "https" : proxyMeta, 26 | }) 27 | 28 | #auth = request.HTTPBasicAuthHandler() 29 | #opener = request.build_opener(proxy_handler, auth, request.HTTPHandler) 30 | 31 | opener = request.build_opener(proxy_handler) 32 | 33 | # opener.addheaders = [("Proxy-Switch-Ip", "yes")] 34 | request.install_opener(opener) 35 | resp = request.urlopen(targetUrl).read() 36 | 37 | print (resp) -------------------------------------------------------------------------------- /http-tunnel/http-cla/nodejs/request/proxy-demo.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | const request = require("request"); 4 | 5 | // 要访问的目标页面 6 | const targetUrl = "http://test.abuyun.com"; 7 | //const targetUrl = "http://proxy.abuyun.com/switch-ip"; 8 | //const targetUrl = "http://proxy.abuyun.com/current-ip"; 9 | 10 | // 代理服务器 11 | const proxyHost = "http-cla.abuyun.com"; 12 | const proxyPort = 9030; 13 | 14 | // 代理隧道验证信息 15 | const proxyUser = "H01234567890123C"; 16 | const proxyPass = "0123456789012345"; 17 | 18 | const proxyUrl = "http://" + proxyUser + ":" + proxyPass + "@" + proxyHost + ":" + proxyPort; 19 | 20 | const proxiedRequest = request.defaults({"proxy": proxyUrl}); 21 | 22 | const options = { 23 | url : targetUrl, 24 | headers : { 25 | "Proxy-Switch-Ip" : "yes" 26 | } 27 | }; 28 | 29 | proxiedRequest 30 | .get(options, function (err, res, body) { 31 | console.log("got response: " + res.statusCode); 32 | }) 33 | .on("error", function (err) { 34 | console.log(err); 35 | }) 36 | ; 37 | -------------------------------------------------------------------------------- /http-tunnel/http-pro/nodejs/request/proxy-demo.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | const request = require("request"); 4 | 5 | // 要访问的目标页面 6 | const targetUrl = "http://test.abuyun.com"; 7 | //const targetUrl = "http://proxy.abuyun.com/switch-ip"; 8 | //const targetUrl = "http://proxy.abuyun.com/current-ip"; 9 | 10 | // 代理服务器 11 | const proxyHost = "http-pro.abuyun.com"; 12 | const proxyPort = 9010; 13 | 14 | // 代理隧道验证信息 15 | const proxyUser = "H01234567890123P"; 16 | const proxyPass = "0123456789012345"; 17 | 18 | const proxyUrl = "http://" + proxyUser + ":" + proxyPass + "@" + proxyHost + ":" + proxyPort; 19 | 20 | const proxiedRequest = request.defaults({"proxy": proxyUrl}); 21 | 22 | const options = { 23 | url : targetUrl, 24 | headers : { 25 | "Proxy-Switch-Ip" : "yes" 26 | } 27 | }; 28 | 29 | proxiedRequest 30 | .get(options, function (err, res, body) { 31 | console.log("got response: " + res.statusCode); 32 | }) 33 | .on("error", function (err) { 34 | console.log(err); 35 | }) 36 | ; 37 | -------------------------------------------------------------------------------- /http-tunnel/http-cla/python/python3/urllib/proxy-demo.py: -------------------------------------------------------------------------------- 1 | #! -*- encoding:utf-8 -*- 2 | 3 | from urllib import request 4 | 5 | # 要访问的目标页面 6 | targetUrl = "http://test.abuyun.com" 7 | #targetUrl = "http://proxy.abuyun.com/switch-ip" 8 | #targetUrl = "http://proxy.abuyun.com/current-ip" 9 | 10 | # 代理服务器 11 | proxyHost = "http-cla.abuyun.com" 12 | proxyPort = "9030" 13 | 14 | # 代理隧道验证信息 15 | proxyUser = "H01234567890123C" 16 | proxyPass = "0123456789012345" 17 | 18 | proxyMeta = "http://%(user)s:%(pass)s@%(host)s:%(port)s" % { 19 | "host" : proxyHost, 20 | "port" : proxyPort, 21 | "user" : proxyUser, 22 | "pass" : proxyPass, 23 | } 24 | 25 | proxy_handler = request.ProxyHandler({ 26 | "http" : proxyMeta, 27 | "https" : proxyMeta, 28 | }) 29 | 30 | #auth = request.HTTPBasicAuthHandler() 31 | #opener = request.build_opener(proxy_handler, auth, request.HTTPHandler) 32 | 33 | opener = request.build_opener(proxy_handler) 34 | 35 | # opener.addheaders = [("Proxy-Switch-Ip", "yes")] 36 | request.install_opener(opener) 37 | resp = request.urlopen(targetUrl).read() 38 | 39 | print (resp) -------------------------------------------------------------------------------- /http-tunnel/http-pro/python/python3/urllib/proxy-demo.py: -------------------------------------------------------------------------------- 1 | #! -*- encoding:utf-8 -*- 2 | 3 | from urllib import request 4 | 5 | # 要访问的目标页面 6 | targetUrl = "http://test.abuyun.com" 7 | #targetUrl = "http://proxy.abuyun.com/switch-ip" 8 | #targetUrl = "http://proxy.abuyun.com/current-ip" 9 | 10 | # 代理服务器 11 | proxyHost = "http-pro.abuyun.com" 12 | proxyPort = "9010" 13 | 14 | # 代理隧道验证信息 15 | proxyUser = "H01234567890123P" 16 | proxyPass = "0123456789012345" 17 | 18 | proxyMeta = "http://%(user)s:%(pass)s@%(host)s:%(port)s" % { 19 | "host" : proxyHost, 20 | "port" : proxyPort, 21 | "user" : proxyUser, 22 | "pass" : proxyPass, 23 | } 24 | 25 | proxy_handler = request.ProxyHandler({ 26 | "http" : proxyMeta, 27 | "https" : proxyMeta, 28 | }) 29 | 30 | #auth = request.HTTPBasicAuthHandler() 31 | #opener = request.build_opener(proxy_handler, auth, request.HTTPHandler) 32 | 33 | opener = request.build_opener(proxy_handler) 34 | 35 | # opener.addheaders = [("Proxy-Switch-Ip", "yes")] 36 | request.install_opener(opener) 37 | resp = request.urlopen(targetUrl).read() 38 | 39 | print (resp) -------------------------------------------------------------------------------- /http-tunnel/http-dyn/go/proxy-demo.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "net/url" 5 | "net/http" 6 | "bytes" 7 | "fmt" 8 | "io/ioutil" 9 | ) 10 | 11 | const ProxyServer = "http-dyn.abuyun.com:9020" 12 | 13 | type ProxyAuth struct { 14 | License string 15 | SecretKey string 16 | } 17 | 18 | func (p ProxyAuth) ProxyClient() http.Client { 19 | proxyURL, _ := url.Parse("http://" + p.License + ":" + p.SecretKey + "@" + ProxyServer) 20 | return http.Client{Transport: &http.Transport{Proxy:http.ProxyURL(proxyURL)}} 21 | } 22 | 23 | func main() { 24 | targetURI := "http://test.abuyun.com" 25 | 26 | // 初始化 proxy http client 27 | client := ProxyAuth{License: "H01234567890123D", SecretKey: "0123456789012345"}.ProxyClient() 28 | 29 | request, _ := http.NewRequest("GET", targetURI, bytes.NewBuffer([] byte(``))) 30 | 31 | response, err := client.Do(request) 32 | 33 | if err != nil { 34 | panic("failed to connect: " + err.Error()) 35 | } else { 36 | bodyByte, err := ioutil.ReadAll(response.Body) 37 | if err != nil { 38 | fmt.Println("读取 Body 时出错", err) 39 | return 40 | } 41 | response.Body.Close() 42 | 43 | body := string(bodyByte) 44 | 45 | fmt.Println("Response Status:", response.Status) 46 | fmt.Println("Response Header:", response.Header) 47 | fmt.Println("Response Body:\n", body) 48 | } 49 | } -------------------------------------------------------------------------------- /http-tunnel/http-cla/go/proxy-demo.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "net/url" 5 | "net/http" 6 | "bytes" 7 | "fmt" 8 | "io/ioutil" 9 | ) 10 | 11 | const ProxyServer = "http-cla.abuyun.com:9030" 12 | 13 | type ProxyAuth struct { 14 | License string 15 | SecretKey string 16 | } 17 | 18 | func (p ProxyAuth) ProxyClient() http.Client { 19 | proxyURL, _ := url.Parse("http://" + p.License + ":" + p.SecretKey + "@" + ProxyServer) 20 | return http.Client{Transport: &http.Transport{Proxy:http.ProxyURL(proxyURL)}} 21 | } 22 | 23 | func main() { 24 | targetURI := "http://test.abuyun.com" 25 | //targetURI := "http://proxy.abuyun.com/switch-ip" 26 | //targetURI := "http://proxy.abuyun.com/current-ip" 27 | 28 | // 初始化 proxy http client 29 | client := ProxyAuth{License: "H01234567890123C", SecretKey: "0123456789012345"}.ProxyClient() 30 | 31 | request, _ := http.NewRequest("GET", targetURI, bytes.NewBuffer([] byte(``))) 32 | 33 | // 切换IP (只支持 HTTP) 34 | request.Header.Set("Proxy-Switch-Ip", "yes") 35 | 36 | response, err := client.Do(request) 37 | 38 | if err != nil { 39 | panic("failed to connect: " + err.Error()) 40 | } else { 41 | bodyByte, err := ioutil.ReadAll(response.Body) 42 | if err != nil { 43 | fmt.Println("读取 Body 时出错", err) 44 | return 45 | } 46 | response.Body.Close() 47 | 48 | body := string(bodyByte) 49 | 50 | fmt.Println("Response Status:", response.Status) 51 | fmt.Println("Response Header:", response.Header) 52 | fmt.Println("Response Body:\n", body) 53 | } 54 | } -------------------------------------------------------------------------------- /http-tunnel/http-pro/go/proxy-demo.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "net/url" 5 | "net/http" 6 | "bytes" 7 | "fmt" 8 | "io/ioutil" 9 | ) 10 | 11 | const ProxyServer = "http-pro.abuyun.com:9010" 12 | 13 | type ProxyAuth struct { 14 | License string 15 | SecretKey string 16 | } 17 | 18 | func (p ProxyAuth) ProxyClient() http.Client { 19 | proxyURL, _ := url.Parse("http://" + p.License + ":" + p.SecretKey + "@" + ProxyServer) 20 | return http.Client{Transport: &http.Transport{Proxy:http.ProxyURL(proxyURL)}} 21 | } 22 | 23 | func main() { 24 | targetURI := "http://test.abuyun.com" 25 | //targetURI := "http://proxy.abuyun.com/switch-ip" 26 | //targetURI := "http://proxy.abuyun.com/current-ip" 27 | 28 | // 初始化 proxy http client 29 | client := ProxyAuth{License: "H01234567890123P", SecretKey: "0123456789012345"}.ProxyClient() 30 | 31 | request, _ := http.NewRequest("GET", targetURI, bytes.NewBuffer([] byte(``))) 32 | 33 | // 切换IP (只支持 HTTP) 34 | request.Header.Set("Proxy-Switch-Ip", "yes") 35 | 36 | response, err := client.Do(request) 37 | 38 | if err != nil { 39 | panic("failed to connect: " + err.Error()) 40 | } else { 41 | bodyByte, err := ioutil.ReadAll(response.Body) 42 | if err != nil { 43 | fmt.Println("读取 Body 时出错", err) 44 | return 45 | } 46 | response.Body.Close() 47 | 48 | body := string(bodyByte) 49 | 50 | fmt.Println("Response Status:", response.Status) 51 | fmt.Println("Response Header:", response.Header) 52 | fmt.Println("Response Body:\n", body) 53 | } 54 | } -------------------------------------------------------------------------------- /http-tunnel/http-dyn/php/curl-demo.php: -------------------------------------------------------------------------------- 1 | arrayList = new ArrayList<>(); 29 | arrayList.add("--no-sandbox"); 30 | arrayList.add("--disable-setuid-sandbox"); 31 | 32 | String proxy = ProxyHost + ":" + ProxyPort; 33 | arrayList.add("--proxy-server=" + proxy); 34 | 35 | // 自动下载 36 | // BrowserFetcher.downloadIfNotExist(null); 37 | 38 | LaunchOptions options = new LaunchOptionsBuilder() 39 | .withArgs(arrayList) 40 | .withExecutablePath("/Applications/Chromium.app/Contents/MacOS/Chromium") 41 | // 是否开启界面 42 | .withHeadless(true) 43 | .build(); 44 | 45 | Browser browser = Puppeteer.launch(options); 46 | 47 | Page page = browser.newPage(); 48 | Map headers = new HashMap<>(); 49 | 50 | page.authenticate(new Credentials(ProxyUser, ProxyPass)); 51 | page.setExtraHTTPHeaders(headers); 52 | page.goTo("https://test.abuyun.com"); 53 | page.screenshot("/tmp/screenshot_test_abu.png"); 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /http-tunnel/http-dyn/java/puppeteer/ProxyDemo.java: -------------------------------------------------------------------------------- 1 | package puppteer; 2 | 3 | import com.ruiyun.jvppeteer.core.Puppeteer; 4 | import com.ruiyun.jvppeteer.core.browser.Browser; 5 | import com.ruiyun.jvppeteer.core.browser.BrowserFetcher; 6 | import com.ruiyun.jvppeteer.core.page.Page; 7 | import com.ruiyun.jvppeteer.options.LaunchOptions; 8 | import com.ruiyun.jvppeteer.options.LaunchOptionsBuilder; 9 | import com.ruiyun.jvppeteer.protocol.webAuthn.Credentials; 10 | 11 | import java.io.IOException; 12 | import java.util.ArrayList; 13 | import java.util.HashMap; 14 | import java.util.Map; 15 | import java.util.concurrent.ExecutionException; 16 | 17 | public class ProxyDemo { 18 | 19 | // 代理隧道验证信息 20 | final static String ProxyUser = "H01234567890123D"; 21 | final static String ProxyPass = "0123456789012345"; 22 | 23 | // 代理服务器 24 | final static String ProxyHost = "http-dyn.abuyun.com"; 25 | final static Integer ProxyPort = 9020; 26 | 27 | public static void main(String[] args) throws IOException, InterruptedException, ExecutionException { 28 | ArrayList arrayList = new ArrayList<>(); 29 | arrayList.add("--no-sandbox"); 30 | arrayList.add("--disable-setuid-sandbox"); 31 | 32 | String proxy = ProxyHost + ":" + ProxyPort; 33 | arrayList.add("--proxy-server=" + proxy); 34 | 35 | // 自动下载 36 | // BrowserFetcher.downloadIfNotExist(null); 37 | 38 | LaunchOptions options = new LaunchOptionsBuilder() 39 | .withArgs(arrayList) 40 | .withExecutablePath("/Applications/Chromium.app/Contents/MacOS/Chromium") 41 | // 是否开启界面 42 | .withHeadless(true) 43 | .build(); 44 | 45 | Browser browser = Puppeteer.launch(options); 46 | 47 | Page page = browser.newPage(); 48 | Map headers = new HashMap<>(); 49 | 50 | page.authenticate(new Credentials(ProxyUser, ProxyPass)); 51 | page.setExtraHTTPHeaders(headers); 52 | page.goTo("https://test.abuyun.com"); 53 | page.screenshot("/tmp/screenshot_test_abu.png"); 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /http-tunnel/http-pro/java/puppeteer/ProxyDemo.java: -------------------------------------------------------------------------------- 1 | package puppteer; 2 | 3 | import com.ruiyun.jvppeteer.core.Puppeteer; 4 | import com.ruiyun.jvppeteer.core.browser.Browser; 5 | import com.ruiyun.jvppeteer.core.browser.BrowserFetcher; 6 | import com.ruiyun.jvppeteer.core.page.Page; 7 | import com.ruiyun.jvppeteer.options.LaunchOptions; 8 | import com.ruiyun.jvppeteer.options.LaunchOptionsBuilder; 9 | import com.ruiyun.jvppeteer.protocol.webAuthn.Credentials; 10 | 11 | import java.io.IOException; 12 | import java.util.ArrayList; 13 | import java.util.HashMap; 14 | import java.util.Map; 15 | import java.util.concurrent.ExecutionException; 16 | 17 | public class ProxyDemo { 18 | 19 | // 代理隧道验证信息 20 | final static String ProxyUser = "H01234567890123P"; 21 | final static String ProxyPass = "0123456789012345"; 22 | 23 | // 代理服务器 24 | final static String ProxyHost = "http-pro.abuyun.com"; 25 | final static Integer ProxyPort = 9010; 26 | 27 | public static void main(String[] args) throws IOException, InterruptedException, ExecutionException { 28 | ArrayList arrayList = new ArrayList<>(); 29 | arrayList.add("--no-sandbox"); 30 | arrayList.add("--disable-setuid-sandbox"); 31 | 32 | String proxy = ProxyHost + ":" + ProxyPort; 33 | arrayList.add("--proxy-server=" + proxy); 34 | 35 | // 自动下载 36 | // BrowserFetcher.downloadIfNotExist(null); 37 | 38 | LaunchOptions options = new LaunchOptionsBuilder() 39 | .withArgs(arrayList) 40 | .withExecutablePath("/Applications/Chromium.app/Contents/MacOS/Chromium") 41 | // 是否开启界面 42 | .withHeadless(true) 43 | .build(); 44 | 45 | Browser browser = Puppeteer.launch(options); 46 | 47 | Page page = browser.newPage(); 48 | Map headers = new HashMap<>(); 49 | 50 | page.authenticate(new Credentials(ProxyUser, ProxyPass)); 51 | page.setExtraHTTPHeaders(headers); 52 | page.goTo("https://test.abuyun.com"); 53 | page.screenshot("/tmp/screenshot_test_abu.png"); 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /http-tunnel/http-cla/java/connection/ProxyDemo.java: -------------------------------------------------------------------------------- 1 | import java.io.ByteArrayOutputStream; 2 | import java.io.InputStream; 3 | import java.net.Authenticator; 4 | import java.net.HttpURLConnection; 5 | import java.net.InetSocketAddress; 6 | import java.net.PasswordAuthentication; 7 | import java.net.Proxy; 8 | import java.net.URL; 9 | 10 | class ProxyAuthenticator extends Authenticator { 11 | private String user, password; 12 | 13 | public ProxyAuthenticator(String user, String password) { 14 | this.user = user; 15 | this.password = password; 16 | } 17 | 18 | protected PasswordAuthentication getPasswordAuthentication() { 19 | return new PasswordAuthentication(user, password.toCharArray()); 20 | } 21 | } 22 | 23 | /** 24 | * 注意:下面代码仅仅实现HTTP请求链接,每一次请求都是无状态保留的,仅仅是这次请求是更换IP的,如果下次请求的IP地址会改变 25 | * 如果是多线程访问的话,只要将下面的代码嵌入到你自己的业务逻辑里面,那么每次都会用新的IP进行访问,如果担心IP有重复, 26 | * 自己可以维护IP的使用情况,并做校验。 27 | */ 28 | public class ProxyDemo { 29 | public static void main(String args[]) throws Exception { 30 | // 要访问的目标页面 31 | String targetUrl = "http://test.abuyun.com"; 32 | //String targetUrl = "http://proxy.abuyun.com/switch-ip"; 33 | //String targetUrl = "http://proxy.abuyun.com/current-ip"; 34 | 35 | // 代理服务器 36 | String proxyServer = "http-cla.abuyun.com"; 37 | int proxyPort = 9030; 38 | 39 | // 代理隧道验证信息 40 | String proxyUser = "H01234567890123C"; 41 | String proxyPass = "0123456789012345"; 42 | 43 | try { 44 | URL url = new URL(targetUrl); 45 | 46 | Authenticator.setDefault(new ProxyAuthenticator(proxyUser, proxyPass)); 47 | 48 | // 创建代理服务器地址对象 49 | InetSocketAddress addr = new InetSocketAddress(proxyServer, proxyPort); 50 | // 创建HTTP类型代理对象 51 | Proxy proxy = new Proxy(Proxy.Type.HTTP, addr); 52 | 53 | // 设置通过代理访问目标页面 54 | HttpURLConnection connection = (HttpURLConnection) url.openConnection(proxy); 55 | // 设置IP切换头 56 | connection.setRequestProperty("Proxy-Switch-Ip","yes"); 57 | 58 | // 解析返回数据 59 | byte[] response = readStream(connection.getInputStream()); 60 | 61 | System.out.println(new String(response)); 62 | } catch (Exception e) { 63 | System.out.println(e.getLocalizedMessage()); 64 | } 65 | } 66 | 67 | /** 68 | * 将输入流转换成字符串 69 | * 70 | * @param inStream 71 | * @return 72 | * @throws Exception 73 | */ 74 | public static byte[] readStream(InputStream inStream) throws Exception { 75 | ByteArrayOutputStream outSteam = new ByteArrayOutputStream(); 76 | byte[] buffer = new byte[1024]; 77 | int len = -1; 78 | 79 | while ((len = inStream.read(buffer)) != -1) { 80 | outSteam.write(buffer, 0, len); 81 | } 82 | outSteam.close(); 83 | inStream.close(); 84 | 85 | return outSteam.toByteArray(); 86 | } 87 | } -------------------------------------------------------------------------------- /http-tunnel/http-pro/java/connection/ProxyDemo.java: -------------------------------------------------------------------------------- 1 | import java.io.ByteArrayOutputStream; 2 | import java.io.InputStream; 3 | import java.net.Authenticator; 4 | import java.net.HttpURLConnection; 5 | import java.net.InetSocketAddress; 6 | import java.net.PasswordAuthentication; 7 | import java.net.Proxy; 8 | import java.net.URL; 9 | 10 | class ProxyAuthenticator extends Authenticator { 11 | private String user, password; 12 | 13 | public ProxyAuthenticator(String user, String password) { 14 | this.user = user; 15 | this.password = password; 16 | } 17 | 18 | protected PasswordAuthentication getPasswordAuthentication() { 19 | return new PasswordAuthentication(user, password.toCharArray()); 20 | } 21 | } 22 | 23 | /** 24 | * 注意:下面代码仅仅实现HTTP请求链接,每一次请求都是无状态保留的,仅仅是这次请求是更换IP的,如果下次请求的IP地址会改变 25 | * 如果是多线程访问的话,只要将下面的代码嵌入到你自己的业务逻辑里面,那么每次都会用新的IP进行访问,如果担心IP有重复, 26 | * 自己可以维护IP的使用情况,并做校验。 27 | */ 28 | public class ProxyDemo { 29 | public static void main(String args[]) throws Exception { 30 | // 要访问的目标页面 31 | String targetUrl = "http://test.abuyun.com"; 32 | //String targetUrl = "http://proxy.abuyun.com/switch-ip"; 33 | //String targetUrl = "http://proxy.abuyun.com/current-ip"; 34 | 35 | // 代理服务器 36 | String proxyServer = "http-pro.abuyun.com"; 37 | int proxyPort = 9010; 38 | 39 | // 代理隧道验证信息 40 | String proxyUser = "H01234567890123P"; 41 | String proxyPass = "0123456789012345"; 42 | 43 | try { 44 | URL url = new URL(targetUrl); 45 | 46 | Authenticator.setDefault(new ProxyAuthenticator(proxyUser, proxyPass)); 47 | 48 | // 创建代理服务器地址对象 49 | InetSocketAddress addr = new InetSocketAddress(proxyServer, proxyPort); 50 | // 创建HTTP类型代理对象 51 | Proxy proxy = new Proxy(Proxy.Type.HTTP, addr); 52 | 53 | // 设置通过代理访问目标页面 54 | HttpURLConnection connection = (HttpURLConnection) url.openConnection(proxy); 55 | // 设置IP切换头 56 | connection.setRequestProperty("Proxy-Switch-Ip","yes"); 57 | 58 | // 解析返回数据 59 | byte[] response = readStream(connection.getInputStream()); 60 | 61 | System.out.println(new String(response)); 62 | } catch (Exception e) { 63 | System.out.println(e.getLocalizedMessage()); 64 | } 65 | } 66 | 67 | /** 68 | * 将输入流转换成字符串 69 | * 70 | * @param inStream 71 | * @return 72 | * @throws Exception 73 | */ 74 | public static byte[] readStream(InputStream inStream) throws Exception { 75 | ByteArrayOutputStream outSteam = new ByteArrayOutputStream(); 76 | byte[] buffer = new byte[1024]; 77 | int len = -1; 78 | 79 | while ((len = inStream.read(buffer)) != -1) { 80 | outSteam.write(buffer, 0, len); 81 | } 82 | outSteam.close(); 83 | inStream.close(); 84 | 85 | return outSteam.toByteArray(); 86 | } 87 | } -------------------------------------------------------------------------------- /http-tunnel/http-dyn/java/connection/ProxyDemo.java: -------------------------------------------------------------------------------- 1 | import java.io.ByteArrayOutputStream; 2 | import java.io.InputStream; 3 | import java.net.Authenticator; 4 | import java.net.HttpURLConnection; 5 | import java.net.InetSocketAddress; 6 | import java.net.PasswordAuthentication; 7 | import java.net.Proxy; 8 | import java.net.URL; 9 | 10 | class ProxyAuthenticator extends Authenticator { 11 | private String user, password; 12 | 13 | public ProxyAuthenticator(String user, String password) { 14 | this.user = user; 15 | this.password = password; 16 | } 17 | 18 | protected PasswordAuthentication getPasswordAuthentication() { 19 | return new PasswordAuthentication(user, password.toCharArray()); 20 | } 21 | } 22 | 23 | /** 24 | * 注意:下面代码仅仅实现HTTP请求链接,每一次请求都是无状态保留的,仅仅是这次请求是更换IP的,如果下次请求的IP地址会改变 25 | * 如果是多线程访问的话,只要将下面的代码嵌入到你自己的业务逻辑里面,那么每次都会用新的IP进行访问,如果担心IP有重复, 26 | * 自己可以维护IP的使用情况,并做校验。 27 | * 28 | * JDK 8u111版本后环境下:要访问的目标页面为HTTPS协议时,需修改“jdk.http.auth.tunneling.disabledSchemes”值 29 | */ 30 | public class ProxyDemo { 31 | public static void main(String args[]) throws Exception { 32 | // 要访问的目标页面 33 | String targetUrl = "http://test.abuyun.com"; 34 | //String targetUrl = "http://proxy.abuyun.com/switch-ip"; 35 | //String targetUrl = "http://proxy.abuyun.com/current-ip"; 36 | 37 | // JDK 8u111版本后,目标页面为HTTPS协议,启用proxy用户密码鉴权 38 | System.setProperty("jdk.http.auth.tunneling.disabledSchemes", ""); 39 | 40 | // 代理服务器 41 | String proxyServer = "http-dyn.abuyun.com"; 42 | int proxyPort = 9020; 43 | 44 | // 代理隧道验证信息 45 | String proxyUser = "H01234567890123D"; 46 | String proxyPass = "0123456789012345"; 47 | 48 | try { 49 | URL url = new URL(targetUrl); 50 | 51 | Authenticator.setDefault(new ProxyAuthenticator(proxyUser, proxyPass)); 52 | 53 | // 创建代理服务器地址对象 54 | InetSocketAddress addr = new InetSocketAddress(proxyServer, proxyPort); 55 | // 创建HTTP类型代理对象 56 | Proxy proxy = new Proxy(Proxy.Type.HTTP, addr); 57 | 58 | // 设置通过代理访问目标页面 59 | HttpURLConnection connection = (HttpURLConnection) url.openConnection(proxy); 60 | 61 | // 解析返回数据 62 | byte[] response = readStream(connection.getInputStream()); 63 | 64 | System.out.println(new String(response)); 65 | } catch (Exception e) { 66 | System.out.println(e.getLocalizedMessage()); 67 | } 68 | } 69 | 70 | /** 71 | * 将输入流转换成字符串 72 | * 73 | * @param inStream 74 | * @return 75 | * @throws Exception 76 | */ 77 | public static byte[] readStream(InputStream inStream) throws Exception { 78 | ByteArrayOutputStream outSteam = new ByteArrayOutputStream(); 79 | byte[] buffer = new byte[1024]; 80 | int len = -1; 81 | 82 | while ((len = inStream.read(buffer)) != -1) { 83 | outSteam.write(buffer, 0, len); 84 | } 85 | outSteam.close(); 86 | inStream.close(); 87 | 88 | return outSteam.toByteArray(); 89 | } 90 | } -------------------------------------------------------------------------------- /http-tunnel/http-cla/java/selenium/ProxyDemo.java: -------------------------------------------------------------------------------- 1 | import org.openqa.selenium.OutputType; 2 | import org.openqa.selenium.TakesScreenshot; 3 | import org.openqa.selenium.WebDriver; 4 | import org.openqa.selenium.chrome.ChromeDriver; 5 | import org.openqa.selenium.chrome.ChromeOptions; 6 | 7 | import org.apache.commons.io.FileUtils; 8 | 9 | import java.io.File; 10 | import java.io.FileOutputStream; 11 | import java.io.IOException; 12 | import java.util.zip.ZipEntry; 13 | import java.util.zip.ZipOutputStream; 14 | 15 | public class ProxyDemo { 16 | private static void createProxyAuthExtension(String proxyHost, String proxyPort, String proxyUser, String proxyPass, String pluginPath) throws IOException { 17 | 18 | String manifestJson = """ 19 | { 20 | "version": "1.0.0", 21 | "manifest_version": 2, 22 | "name": "Abuyun Proxy", 23 | "permissions": [ 24 | "proxy", 25 | "tabs", 26 | "unlimitedStorage", 27 | "storage", 28 | "", 29 | "webRequest", 30 | "webRequestBlocking" 31 | ], 32 | "background": { 33 | "scripts": ["background.js"] 34 | }, 35 | "minimum_chrome_version":"22.0.0" 36 | } 37 | """; 38 | 39 | String backgroundJs = """ 40 | var config = { 41 | mode: "fixed_servers", 42 | rules: { 43 | singleProxy: { 44 | scheme: "${scheme}", 45 | host: "${host}", 46 | port: parseInt(${port}) 47 | }, 48 | bypassList: ["foobar.com"] 49 | } 50 | }; 51 | 52 | chrome.proxy.settings.set({value: config, scope: "regular"}, function() {}); 53 | 54 | function callbackFn(details) { 55 | return { 56 | authCredentials: { 57 | username: "${username}", 58 | password: "${password}" 59 | } 60 | }; 61 | } 62 | 63 | chrome.webRequest.onAuthRequired.addListener( 64 | callbackFn, 65 | {urls: [""]}, 66 | ['blocking'] 67 | ); 68 | """ 69 | .replace("${host}", proxyHost) 70 | .replace("${port}", proxyPort) 71 | .replace("${username}", proxyUser) 72 | .replace("${password}", proxyPass) 73 | .replace("${scheme}", "http"); 74 | 75 | try (ZipOutputStream zp = new ZipOutputStream(new FileOutputStream(pluginPath))) { 76 | zp.putNextEntry(new ZipEntry("manifest.json")); 77 | zp.write(manifestJson.getBytes()); 78 | zp.closeEntry(); 79 | zp.putNextEntry(new ZipEntry("background.js")); 80 | zp.write(backgroundJs.getBytes()); 81 | zp.closeEntry(); 82 | } 83 | } 84 | 85 | public static void main(String[] args) throws IOException, InterruptedException { 86 | String proxyHost = "http-cla.abuyun.com"; 87 | String proxyPort = "9030"; 88 | // 隧道密码 89 | // TODO: 改成用户后台隧道列表中的值 90 | // 获取环境变量 91 | String proxyUser = "H01234567890123C"; 92 | String proxyPass = "0123456789012345"; 93 | // TODO: 可根据实际情况调整 94 | // 生成用户名和密码对应的浏览器插件压缩包用于后面启动加载 95 | String pluginPath = "/tmp/http-cla.abuyun.com_9030.zip"; 96 | createProxyAuthExtension(proxyHost, proxyPort, proxyUser, proxyPass, pluginPath); 97 | 98 | // chromedriver 99 | // TODO: 改成实际路径 100 | // @see https://chromedriver.chromium.org/downloads 101 | System.setProperty("webdriver.chrome.driver", "/tmp/chromedriver"); 102 | 103 | ChromeOptions chromeOptions = new ChromeOptions(); 104 | 105 | // The traditional --headless, and since version 96, 106 | // Chrome has a new headless mode that allows users to get the full browser functionality (even run extensions). 107 | // Between versions 96 to 108 it was --headless=chrome, after version 109 --headless=new. 108 | // Using --headless=new should bring a better experience when using headless with Selenium. 109 | // @see https://www.selenium.dev/blog/2023/headless-is-going-away/ 110 | // 无头模式 111 | chromeOptions.addArguments("--headless=new"); 112 | chromeOptions.addArguments("--start-maximized"); 113 | chromeOptions.addExtensions(new File(pluginPath)); 114 | 115 | WebDriver driver = new ChromeDriver(chromeOptions); 116 | 117 | driver.get("https://test.abuyun.com"); 118 | 119 | System.out.println(driver.getPageSource()); 120 | 121 | // 对网页进行截图 122 | File screenshot = ((TakesScreenshot)driver).getScreenshotAs(OutputType.FILE); 123 | 124 | try { 125 | // 保存截图文件 126 | FileUtils.copyFile(screenshot, new File("/tmp/screenshot.png")); 127 | } catch (Exception e) { 128 | e.printStackTrace(); 129 | } 130 | 131 | // 休眠30秒 132 | Thread.sleep(30000); 133 | 134 | // 关闭浏览器 135 | driver.quit(); 136 | } 137 | } 138 | 139 | -------------------------------------------------------------------------------- /http-tunnel/http-dyn/java/selenium/ProxyDemo.java: -------------------------------------------------------------------------------- 1 | import org.openqa.selenium.OutputType; 2 | import org.openqa.selenium.TakesScreenshot; 3 | import org.openqa.selenium.WebDriver; 4 | import org.openqa.selenium.chrome.ChromeDriver; 5 | import org.openqa.selenium.chrome.ChromeOptions; 6 | 7 | import org.apache.commons.io.FileUtils; 8 | 9 | import java.io.File; 10 | import java.io.FileOutputStream; 11 | import java.io.IOException; 12 | import java.util.zip.ZipEntry; 13 | import java.util.zip.ZipOutputStream; 14 | 15 | public class ProxyDemo { 16 | private static void createProxyAuthExtension(String proxyHost, String proxyPort, String proxyUser, String proxyPass, String pluginPath) throws IOException { 17 | 18 | String manifestJson = """ 19 | { 20 | "version": "1.0.0", 21 | "manifest_version": 2, 22 | "name": "Abuyun Proxy", 23 | "permissions": [ 24 | "proxy", 25 | "tabs", 26 | "unlimitedStorage", 27 | "storage", 28 | "", 29 | "webRequest", 30 | "webRequestBlocking" 31 | ], 32 | "background": { 33 | "scripts": ["background.js"] 34 | }, 35 | "minimum_chrome_version":"22.0.0" 36 | } 37 | """; 38 | 39 | String backgroundJs = """ 40 | var config = { 41 | mode: "fixed_servers", 42 | rules: { 43 | singleProxy: { 44 | scheme: "${scheme}", 45 | host: "${host}", 46 | port: parseInt(${port}) 47 | }, 48 | bypassList: ["foobar.com"] 49 | } 50 | }; 51 | 52 | chrome.proxy.settings.set({value: config, scope: "regular"}, function() {}); 53 | 54 | function callbackFn(details) { 55 | return { 56 | authCredentials: { 57 | username: "${username}", 58 | password: "${password}" 59 | } 60 | }; 61 | } 62 | 63 | chrome.webRequest.onAuthRequired.addListener( 64 | callbackFn, 65 | {urls: [""]}, 66 | ['blocking'] 67 | ); 68 | """ 69 | .replace("${host}", proxyHost) 70 | .replace("${port}", proxyPort) 71 | .replace("${username}", proxyUser) 72 | .replace("${password}", proxyPass) 73 | .replace("${scheme}", "http"); 74 | 75 | try (ZipOutputStream zp = new ZipOutputStream(new FileOutputStream(pluginPath))) { 76 | zp.putNextEntry(new ZipEntry("manifest.json")); 77 | zp.write(manifestJson.getBytes()); 78 | zp.closeEntry(); 79 | zp.putNextEntry(new ZipEntry("background.js")); 80 | zp.write(backgroundJs.getBytes()); 81 | zp.closeEntry(); 82 | } 83 | } 84 | 85 | public static void main(String[] args) throws IOException, InterruptedException { 86 | String proxyHost = "http-dyn.abuyun.com"; 87 | String proxyPort = "9020"; 88 | // 隧道密码 89 | // TODO: 改成用户后台隧道列表中的值 90 | // 获取环境变量 91 | String proxyUser = "H01234567890123D"; 92 | String proxyPass = "0123456789012345"; 93 | // TODO: 可根据实际情况调整 94 | // 生成用户名和密码对应的浏览器插件压缩包用于后面启动加载 95 | String pluginPath = "/tmp/http-dyn.abuyun.com_9020.zip"; 96 | createProxyAuthExtension(proxyHost, proxyPort, proxyUser, proxyPass, pluginPath); 97 | 98 | // chromedriver 99 | // TODO: 改成实际路径 100 | // @see https://chromedriver.chromium.org/downloads 101 | System.setProperty("webdriver.chrome.driver", "/tmp/chromedriver"); 102 | 103 | ChromeOptions chromeOptions = new ChromeOptions(); 104 | 105 | // The traditional --headless, and since version 96, 106 | // Chrome has a new headless mode that allows users to get the full browser functionality (even run extensions). 107 | // Between versions 96 to 108 it was --headless=chrome, after version 109 --headless=new. 108 | // Using --headless=new should bring a better experience when using headless with Selenium. 109 | // @see https://www.selenium.dev/blog/2023/headless-is-going-away/ 110 | // 无头模式 111 | chromeOptions.addArguments("--headless=new"); 112 | chromeOptions.addArguments("--start-maximized"); 113 | chromeOptions.addExtensions(new File(pluginPath)); 114 | 115 | WebDriver driver = new ChromeDriver(chromeOptions); 116 | 117 | driver.get("https://test.abuyun.com"); 118 | 119 | System.out.println(driver.getPageSource()); 120 | 121 | // 对网页进行截图 122 | File screenshot = ((TakesScreenshot)driver).getScreenshotAs(OutputType.FILE); 123 | 124 | try { 125 | // 保存截图文件 126 | FileUtils.copyFile(screenshot, new File("/tmp/screenshot.png")); 127 | } catch (Exception e) { 128 | e.printStackTrace(); 129 | } 130 | 131 | // 休眠30秒 132 | Thread.sleep(30000); 133 | 134 | // 关闭浏览器 135 | driver.quit(); 136 | } 137 | } 138 | 139 | -------------------------------------------------------------------------------- /http-tunnel/http-pro/java/selenium/ProxyDemo.java: -------------------------------------------------------------------------------- 1 | import org.openqa.selenium.OutputType; 2 | import org.openqa.selenium.TakesScreenshot; 3 | import org.openqa.selenium.WebDriver; 4 | import org.openqa.selenium.chrome.ChromeDriver; 5 | import org.openqa.selenium.chrome.ChromeOptions; 6 | 7 | import org.apache.commons.io.FileUtils; 8 | 9 | import java.io.File; 10 | import java.io.FileOutputStream; 11 | import java.io.IOException; 12 | import java.util.zip.ZipEntry; 13 | import java.util.zip.ZipOutputStream; 14 | 15 | public class ProxyDemo { 16 | private static void createProxyAuthExtension(String proxyHost, String proxyPort, String proxyUser, String proxyPass, String pluginPath) throws IOException { 17 | 18 | String manifestJson = """ 19 | { 20 | "version": "1.0.0", 21 | "manifest_version": 2, 22 | "name": "Abuyun Proxy", 23 | "permissions": [ 24 | "proxy", 25 | "tabs", 26 | "unlimitedStorage", 27 | "storage", 28 | "", 29 | "webRequest", 30 | "webRequestBlocking" 31 | ], 32 | "background": { 33 | "scripts": ["background.js"] 34 | }, 35 | "minimum_chrome_version":"22.0.0" 36 | } 37 | """; 38 | 39 | String backgroundJs = """ 40 | var config = { 41 | mode: "fixed_servers", 42 | rules: { 43 | singleProxy: { 44 | scheme: "${scheme}", 45 | host: "${host}", 46 | port: parseInt(${port}) 47 | }, 48 | bypassList: ["foobar.com"] 49 | } 50 | }; 51 | 52 | chrome.proxy.settings.set({value: config, scope: "regular"}, function() {}); 53 | 54 | function callbackFn(details) { 55 | return { 56 | authCredentials: { 57 | username: "${username}", 58 | password: "${password}" 59 | } 60 | }; 61 | } 62 | 63 | chrome.webRequest.onAuthRequired.addListener( 64 | callbackFn, 65 | {urls: [""]}, 66 | ['blocking'] 67 | ); 68 | """ 69 | .replace("${host}", proxyHost) 70 | .replace("${port}", proxyPort) 71 | .replace("${username}", proxyUser) 72 | .replace("${password}", proxyPass) 73 | .replace("${scheme}", "http"); 74 | 75 | try (ZipOutputStream zp = new ZipOutputStream(new FileOutputStream(pluginPath))) { 76 | zp.putNextEntry(new ZipEntry("manifest.json")); 77 | zp.write(manifestJson.getBytes()); 78 | zp.closeEntry(); 79 | zp.putNextEntry(new ZipEntry("background.js")); 80 | zp.write(backgroundJs.getBytes()); 81 | zp.closeEntry(); 82 | } 83 | } 84 | 85 | public static void main(String[] args) throws IOException, InterruptedException { 86 | String proxyHost = "http-pro.abuyun.com"; 87 | String proxyPort = "9010"; 88 | // 隧道密码 89 | // TODO: 改成用户后台隧道列表中的值 90 | // 获取环境变量 91 | String proxyUser = "H01234567890123P"; 92 | String proxyPass = "0123456789012345"; 93 | // TODO: 可根据实际情况调整 94 | // 生成用户名和密码对应的浏览器插件压缩包用于后面启动加载 95 | String pluginPath = "/tmp/http-pro.abuyun.com_9010.zip"; 96 | createProxyAuthExtension(proxyHost, proxyPort, proxyUser, proxyPass, pluginPath); 97 | 98 | // chromedriver 99 | // TODO: 改成实际路径 100 | // @see https://chromedriver.chromium.org/downloads 101 | System.setProperty("webdriver.chrome.driver", "/tmp/chromedriver"); 102 | 103 | ChromeOptions chromeOptions = new ChromeOptions(); 104 | 105 | // The traditional --headless, and since version 96, 106 | // Chrome has a new headless mode that allows users to get the full browser functionality (even run extensions). 107 | // Between versions 96 to 108 it was --headless=chrome, after version 109 --headless=new. 108 | // Using --headless=new should bring a better experience when using headless with Selenium. 109 | // @see https://www.selenium.dev/blog/2023/headless-is-going-away/ 110 | // 无头模式 111 | chromeOptions.addArguments("--headless=new"); 112 | chromeOptions.addArguments("--start-maximized"); 113 | chromeOptions.addExtensions(new File(pluginPath)); 114 | 115 | WebDriver driver = new ChromeDriver(chromeOptions); 116 | 117 | driver.get("https://test.abuyun.com"); 118 | 119 | System.out.println(driver.getPageSource()); 120 | 121 | // 对网页进行截图 122 | File screenshot = ((TakesScreenshot)driver).getScreenshotAs(OutputType.FILE); 123 | 124 | try { 125 | // 保存截图文件 126 | FileUtils.copyFile(screenshot, new File("/tmp/screenshot.png")); 127 | } catch (Exception e) { 128 | e.printStackTrace(); 129 | } 130 | 131 | // 休眠30秒 132 | Thread.sleep(30000); 133 | 134 | // 关闭浏览器 135 | driver.quit(); 136 | } 137 | } 138 | 139 | -------------------------------------------------------------------------------- /http-tunnel/http-dyn/java/httpclient/4.4+/HttpClient4xProxyDemo.java: -------------------------------------------------------------------------------- 1 | import java.io.BufferedReader; 2 | import java.io.InputStreamReader; 3 | import java.io.IOException; 4 | import java.net.URI; 5 | import java.util.Arrays; 6 | import java.util.ArrayList; 7 | import java.util.HashSet; 8 | import java.util.List; 9 | import java.util.Set; 10 | 11 | import org.apache.http.Header; 12 | import org.apache.http.HeaderElement; 13 | import org.apache.http.HttpHost; 14 | import org.apache.http.auth.AuthScope; 15 | import org.apache.http.auth.UsernamePasswordCredentials; 16 | import org.apache.http.client.AuthCache; 17 | import org.apache.http.client.CredentialsProvider; 18 | import org.apache.http.client.HttpRequestRetryHandler; 19 | import org.apache.http.client.config.RequestConfig; 20 | import org.apache.http.client.config.AuthSchemes; 21 | import org.apache.http.client.entity.GzipDecompressingEntity; 22 | import org.apache.http.client.entity.UrlEncodedFormEntity; 23 | import org.apache.http.client.methods.CloseableHttpResponse; 24 | import org.apache.http.client.methods.HttpGet; 25 | import org.apache.http.client.methods.HttpPost; 26 | import org.apache.http.client.methods.HttpRequestBase; 27 | import org.apache.http.client.protocol.HttpClientContext; 28 | import org.apache.http.config.Registry; 29 | import org.apache.http.config.RegistryBuilder; 30 | import org.apache.http.conn.socket.ConnectionSocketFactory; 31 | import org.apache.http.conn.socket.LayeredConnectionSocketFactory; 32 | import org.apache.http.conn.socket.PlainConnectionSocketFactory; 33 | import org.apache.http.conn.ssl.SSLConnectionSocketFactory; 34 | import org.apache.http.impl.auth.BasicScheme; 35 | import org.apache.http.impl.client.BasicAuthCache; 36 | import org.apache.http.impl.client.BasicCredentialsProvider; 37 | import org.apache.http.impl.client.CloseableHttpClient; 38 | import org.apache.http.impl.client.HttpClients; 39 | import org.apache.http.impl.client.ProxyAuthenticationStrategy; 40 | import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; 41 | import org.apache.http.message.BasicHeader; 42 | import org.apache.http.message.BasicNameValuePair; 43 | import org.apache.http.NameValuePair; 44 | import org.apache.http.util.EntityUtils; 45 | 46 | public class JavaHttpClient45Demo 47 | { 48 | // 代理服务器 49 | final static String proxyHost = "http-dyn.abuyun.com"; 50 | final static Integer proxyPort = 9020; 51 | 52 | // 代理隧道验证信息 53 | final static String proxyUser = "H01234567890123D"; 54 | final static String proxyPass = "0123456789012345"; 55 | 56 | private static PoolingHttpClientConnectionManager cm = null; 57 | private static HttpRequestRetryHandler httpRequestRetryHandler = null; 58 | private static HttpHost proxy = null; 59 | 60 | private static CredentialsProvider credsProvider = null; 61 | private static RequestConfig reqConfig = null; 62 | 63 | static { 64 | ConnectionSocketFactory plainsf = PlainConnectionSocketFactory.getSocketFactory(); 65 | LayeredConnectionSocketFactory sslsf = SSLConnectionSocketFactory.getSocketFactory(); 66 | 67 | Registry registry = RegistryBuilder.create() 68 | .register("http", plainsf) 69 | .register("https", sslsf) 70 | .build(); 71 | 72 | cm = new PoolingHttpClientConnectionManager(registry); 73 | cm.setMaxTotal(20); 74 | cm.setDefaultMaxPerRoute(5); 75 | 76 | proxy = new HttpHost(proxyHost, proxyPort, "http"); 77 | 78 | credsProvider = new BasicCredentialsProvider(); 79 | credsProvider.setCredentials( 80 | AuthScope.ANY, 81 | //new AuthScope(proxyHost, proxyPort), 82 | new UsernamePasswordCredentials(proxyUser, proxyPass)); 83 | 84 | reqConfig = RequestConfig.custom() 85 | .setConnectionRequestTimeout(5000) 86 | .setConnectTimeout(5000) 87 | .setSocketTimeout(5000) 88 | .setExpectContinueEnabled(false) 89 | .setProxy(new HttpHost(proxyHost, proxyPort)) 90 | //.setAuthenticationEnabled(true) 91 | //.setProxyPreferredAuthSchemes(Arrays.asList(AuthSchemes.BASIC)) 92 | .build(); 93 | } 94 | 95 | public static void doRequest(HttpRequestBase httpReq) { 96 | CloseableHttpResponse httpResp = null; 97 | 98 | try { 99 | // JDK 8u111版本后,目标页面为HTTPS协议,启用proxy用户密码鉴权 100 | System.setProperty("jdk.http.auth.tunneling.disabledSchemes", ""); 101 | 102 | setHeaders(httpReq); 103 | 104 | httpReq.setConfig(reqConfig); 105 | 106 | CloseableHttpClient httpClient = HttpClients.custom() 107 | .setConnectionManager(cm) 108 | .setDefaultCredentialsProvider(credsProvider) 109 | .build(); 110 | 111 | AuthCache authCache = new BasicAuthCache(); 112 | authCache.put(proxy, new BasicScheme()); 113 | 114 | HttpClientContext localContext = HttpClientContext.create(); 115 | localContext.setAuthCache(authCache); 116 | 117 | httpResp = httpClient.execute(httpReq, localContext); 118 | //httpResp = httpClient.execute(proxy, httpReq, localContext); 119 | 120 | int statusCode = httpResp.getStatusLine().getStatusCode(); 121 | 122 | System.out.println(statusCode); 123 | 124 | BufferedReader rd = new BufferedReader(new InputStreamReader(httpResp.getEntity().getContent())); 125 | 126 | String line = ""; 127 | while((line = rd.readLine()) != null) { 128 | System.out.println(line); 129 | } 130 | } catch (Exception e) { 131 | e.printStackTrace(); 132 | } finally { 133 | try { 134 | if (httpResp != null) { 135 | httpResp.close(); 136 | } 137 | } catch (IOException e) { 138 | e.printStackTrace(); 139 | } 140 | } 141 | } 142 | 143 | /** 144 | * 设置请求头 145 | * 146 | * @param httpReq 147 | */ 148 | private static void setHeaders(HttpRequestBase httpReq) { 149 | httpReq.setHeader("Accept-Encoding", null); 150 | } 151 | 152 | /** 153 | * 处理response 154 | * 155 | * @param httpResp 156 | * @return 157 | */ 158 | private static CloseableHttpResponse handleResponse(CloseableHttpResponse httpResp) { 159 | Header header = httpResp.getEntity().getContentEncoding(); 160 | if (header != null) { 161 | HeaderElement[] elem = header.getElements(); 162 | for (int i = 0; i < elem.length; i++) { 163 | if (elem[i].getName().equalsIgnoreCase("gzip")) { 164 | httpResp.setEntity(new GzipDecompressingEntity(httpResp.getEntity())); 165 | } 166 | } 167 | } 168 | return httpResp; 169 | } 170 | 171 | public static void doPostRequest() { 172 | try { 173 | // 要访问的目标页面 174 | HttpPost httpPost = new HttpPost("https://test.abuyun.com"); 175 | 176 | // 设置表单参数 177 | List params = new ArrayList(); 178 | params.add(new BasicNameValuePair("name", "http-cloud-proxy")); 179 | params.add(new BasicNameValuePair("params", "{\"broker\":\"abuyun\":\"site\":\"https://www.abuyun.com\"}")); 180 | 181 | httpPost.setEntity(new UrlEncodedFormEntity(params, "utf-8")); 182 | 183 | doRequest(httpPost); 184 | } catch (Exception e) { 185 | e.printStackTrace(); 186 | } 187 | } 188 | 189 | public static void doGetRequest() { 190 | // 要访问的目标页面 191 | String targetUrl = "https://test.abuyun.com"; 192 | //String targetUrl = "http://proxy.abuyun.com/switch-ip"; 193 | //String targetUrl = "http://proxy.abuyun.com/current-ip"; 194 | 195 | try { 196 | HttpGet httpGet = new HttpGet(targetUrl); 197 | 198 | doRequest(httpGet); 199 | } catch (Exception e) { 200 | e.printStackTrace(); 201 | } 202 | } 203 | 204 | public static void main(String[] args) { 205 | //doGetRequest(); 206 | 207 | doPostRequest(); 208 | } 209 | } 210 | -------------------------------------------------------------------------------- /http-tunnel/http-cla/java/httpclient/4.4+/HttpClient4xProxyDemo.java: -------------------------------------------------------------------------------- 1 | import java.io.BufferedReader; 2 | import java.io.InputStreamReader; 3 | import java.io.IOException; 4 | import java.net.URI; 5 | import java.util.Arrays; 6 | import java.util.ArrayList; 7 | import java.util.HashSet; 8 | import java.util.List; 9 | import java.util.Set; 10 | 11 | import org.apache.http.Header; 12 | import org.apache.http.HeaderElement; 13 | import org.apache.http.HttpHost; 14 | import org.apache.http.auth.AuthScope; 15 | import org.apache.http.auth.UsernamePasswordCredentials; 16 | import org.apache.http.client.AuthCache; 17 | import org.apache.http.client.CredentialsProvider; 18 | import org.apache.http.client.HttpRequestRetryHandler; 19 | import org.apache.http.client.config.RequestConfig; 20 | import org.apache.http.client.config.AuthSchemes; 21 | import org.apache.http.client.entity.GzipDecompressingEntity; 22 | import org.apache.http.client.entity.UrlEncodedFormEntity; 23 | import org.apache.http.client.methods.CloseableHttpResponse; 24 | import org.apache.http.client.methods.HttpGet; 25 | import org.apache.http.client.methods.HttpPost; 26 | import org.apache.http.client.methods.HttpRequestBase; 27 | import org.apache.http.client.protocol.HttpClientContext; 28 | import org.apache.http.config.Registry; 29 | import org.apache.http.config.RegistryBuilder; 30 | import org.apache.http.conn.socket.ConnectionSocketFactory; 31 | import org.apache.http.conn.socket.LayeredConnectionSocketFactory; 32 | import org.apache.http.conn.socket.PlainConnectionSocketFactory; 33 | import org.apache.http.conn.ssl.SSLConnectionSocketFactory; 34 | import org.apache.http.impl.auth.BasicScheme; 35 | import org.apache.http.impl.client.BasicAuthCache; 36 | import org.apache.http.impl.client.BasicCredentialsProvider; 37 | import org.apache.http.impl.client.CloseableHttpClient; 38 | import org.apache.http.impl.client.HttpClients; 39 | import org.apache.http.impl.client.ProxyAuthenticationStrategy; 40 | import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; 41 | import org.apache.http.message.BasicHeader; 42 | import org.apache.http.message.BasicNameValuePair; 43 | import org.apache.http.NameValuePair; 44 | import org.apache.http.util.EntityUtils; 45 | 46 | public class JavaHttpClient45Demo 47 | { 48 | // 代理服务器 49 | final static String proxyHost = "http-cla.abuyun.com"; 50 | final static Integer proxyPort = 9030; 51 | 52 | // 代理隧道验证信息 53 | final static String proxyUser = "H01234567890123C"; 54 | final static String proxyPass = "0123456789012345"; 55 | 56 | // IP切换协议头 57 | final static String switchIpHeaderKey = "Proxy-Switch-Ip"; 58 | final static String switchIpHeaderVal = "yes"; 59 | 60 | private static PoolingHttpClientConnectionManager cm = null; 61 | private static HttpRequestRetryHandler httpRequestRetryHandler = null; 62 | private static HttpHost proxy = null; 63 | 64 | private static CredentialsProvider credsProvider = null; 65 | private static RequestConfig reqConfig = null; 66 | 67 | static { 68 | ConnectionSocketFactory plainsf = PlainConnectionSocketFactory.getSocketFactory(); 69 | LayeredConnectionSocketFactory sslsf = SSLConnectionSocketFactory.getSocketFactory(); 70 | 71 | Registry registry = RegistryBuilder.create() 72 | .register("http", plainsf) 73 | .register("https", sslsf) 74 | .build(); 75 | 76 | cm = new PoolingHttpClientConnectionManager(registry); 77 | cm.setMaxTotal(20); 78 | cm.setDefaultMaxPerRoute(5); 79 | 80 | proxy = new HttpHost(proxyHost, proxyPort, "http"); 81 | 82 | credsProvider = new BasicCredentialsProvider(); 83 | credsProvider.setCredentials( 84 | AuthScope.ANY, 85 | //new AuthScope(proxyHost, proxyPort), 86 | new UsernamePasswordCredentials(proxyUser, proxyPass)); 87 | 88 | reqConfig = RequestConfig.custom() 89 | .setConnectionRequestTimeout(5000) 90 | .setConnectTimeout(5000) 91 | .setSocketTimeout(5000) 92 | .setExpectContinueEnabled(false) 93 | .setProxy(new HttpHost(proxyHost, proxyPort)) 94 | //.setAuthenticationEnabled(true) 95 | //.setProxyPreferredAuthSchemes(Arrays.asList(AuthSchemes.BASIC)) 96 | .build(); 97 | } 98 | 99 | public static void doRequest(HttpRequestBase httpReq) { 100 | CloseableHttpResponse httpResp = null; 101 | 102 | try { 103 | setHeaders(httpReq); 104 | 105 | httpReq.setConfig(reqConfig); 106 | 107 | CloseableHttpClient httpClient = HttpClients.custom() 108 | .setConnectionManager(cm) 109 | .setDefaultCredentialsProvider(credsProvider) 110 | .build(); 111 | 112 | AuthCache authCache = new BasicAuthCache(); 113 | authCache.put(proxy, new BasicScheme()); 114 | 115 | HttpClientContext localContext = HttpClientContext.create(); 116 | localContext.setAuthCache(authCache); 117 | 118 | httpResp = httpClient.execute(httpReq, localContext); 119 | //httpResp = httpClient.execute(proxy, httpReq, localContext); 120 | 121 | int statusCode = httpResp.getStatusLine().getStatusCode(); 122 | 123 | System.out.println(statusCode); 124 | 125 | BufferedReader rd = new BufferedReader(new InputStreamReader(httpResp.getEntity().getContent())); 126 | 127 | String line = ""; 128 | while((line = rd.readLine()) != null) { 129 | System.out.println(line); 130 | } 131 | } catch (Exception e) { 132 | e.printStackTrace(); 133 | } finally { 134 | try { 135 | if (httpResp != null) { 136 | httpResp.close(); 137 | } 138 | } catch (IOException e) { 139 | e.printStackTrace(); 140 | } 141 | } 142 | } 143 | 144 | /** 145 | * 设置请求头 146 | * 147 | * @param httpReq 148 | */ 149 | private static void setHeaders(HttpRequestBase httpReq) { 150 | httpReq.setHeader("Accept-Encoding", null); 151 | httpReq.setHeader(switchIpHeaderKey, switchIpHeaderVal); 152 | } 153 | 154 | /** 155 | * 处理response 156 | * 157 | * @param httpResp 158 | * @return 159 | */ 160 | private static CloseableHttpResponse handleResponse(CloseableHttpResponse httpResp) { 161 | Header header = httpResp.getEntity().getContentEncoding(); 162 | if (header != null) { 163 | HeaderElement[] elem = header.getElements(); 164 | for (int i = 0; i < elem.length; i++) { 165 | if (elem[i].getName().equalsIgnoreCase("gzip")) { 166 | httpResp.setEntity(new GzipDecompressingEntity(httpResp.getEntity())); 167 | } 168 | } 169 | } 170 | return httpResp; 171 | } 172 | 173 | public static void doPostRequest() { 174 | try { 175 | // 要访问的目标页面 176 | HttpPost httpPost = new HttpPost("https://test.abuyun.com"); 177 | 178 | // 设置表单参数 179 | List params = new ArrayList(); 180 | params.add(new BasicNameValuePair("name", "http-cloud-proxy")); 181 | params.add(new BasicNameValuePair("params", "{\"broker\":\"abuyun\":\"site\":\"https://www.abuyun.com\"}")); 182 | 183 | httpPost.setEntity(new UrlEncodedFormEntity(params, "utf-8")); 184 | 185 | doRequest(httpPost); 186 | } catch (Exception e) { 187 | e.printStackTrace(); 188 | } 189 | } 190 | 191 | public static void doGetRequest() { 192 | // 要访问的目标页面 193 | String targetUrl = "https://test.abuyun.com"; 194 | //String targetUrl = "http://proxy.abuyun.com/switch-ip"; 195 | //String targetUrl = "http://proxy.abuyun.com/current-ip"; 196 | 197 | try { 198 | HttpGet httpGet = new HttpGet(targetUrl); 199 | 200 | doRequest(httpGet); 201 | } catch (Exception e) { 202 | e.printStackTrace(); 203 | } 204 | } 205 | 206 | public static void main(String[] args) { 207 | //doGetRequest(); 208 | 209 | doPostRequest(); 210 | } 211 | } 212 | -------------------------------------------------------------------------------- /http-tunnel/http-pro/java/httpclient/4.4+/HttpClient4xProxyDemo.java: -------------------------------------------------------------------------------- 1 | import java.io.BufferedReader; 2 | import java.io.InputStreamReader; 3 | import java.io.IOException; 4 | import java.net.URI; 5 | import java.util.Arrays; 6 | import java.util.ArrayList; 7 | import java.util.HashSet; 8 | import java.util.List; 9 | import java.util.Set; 10 | 11 | import org.apache.http.Header; 12 | import org.apache.http.HeaderElement; 13 | import org.apache.http.HttpHost; 14 | import org.apache.http.auth.AuthScope; 15 | import org.apache.http.auth.UsernamePasswordCredentials; 16 | import org.apache.http.client.AuthCache; 17 | import org.apache.http.client.CredentialsProvider; 18 | import org.apache.http.client.HttpRequestRetryHandler; 19 | import org.apache.http.client.config.RequestConfig; 20 | import org.apache.http.client.config.AuthSchemes; 21 | import org.apache.http.client.entity.GzipDecompressingEntity; 22 | import org.apache.http.client.entity.UrlEncodedFormEntity; 23 | import org.apache.http.client.methods.CloseableHttpResponse; 24 | import org.apache.http.client.methods.HttpGet; 25 | import org.apache.http.client.methods.HttpPost; 26 | import org.apache.http.client.methods.HttpRequestBase; 27 | import org.apache.http.client.protocol.HttpClientContext; 28 | import org.apache.http.config.Registry; 29 | import org.apache.http.config.RegistryBuilder; 30 | import org.apache.http.conn.socket.ConnectionSocketFactory; 31 | import org.apache.http.conn.socket.LayeredConnectionSocketFactory; 32 | import org.apache.http.conn.socket.PlainConnectionSocketFactory; 33 | import org.apache.http.conn.ssl.SSLConnectionSocketFactory; 34 | import org.apache.http.impl.auth.BasicScheme; 35 | import org.apache.http.impl.client.BasicAuthCache; 36 | import org.apache.http.impl.client.BasicCredentialsProvider; 37 | import org.apache.http.impl.client.CloseableHttpClient; 38 | import org.apache.http.impl.client.HttpClients; 39 | import org.apache.http.impl.client.ProxyAuthenticationStrategy; 40 | import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; 41 | import org.apache.http.message.BasicHeader; 42 | import org.apache.http.message.BasicNameValuePair; 43 | import org.apache.http.NameValuePair; 44 | import org.apache.http.util.EntityUtils; 45 | 46 | public class JavaHttpClient45Demo 47 | { 48 | // 代理服务器 49 | final static String proxyHost = "http-pro.abuyun.com"; 50 | final static Integer proxyPort = 9010; 51 | 52 | // 代理隧道验证信息 53 | final static String proxyUser = "H01234567890123P"; 54 | final static String proxyPass = "0123456789012345"; 55 | 56 | // IP切换协议头 57 | final static String switchIpHeaderKey = "Proxy-Switch-Ip"; 58 | final static String switchIpHeaderVal = "yes"; 59 | 60 | private static PoolingHttpClientConnectionManager cm = null; 61 | private static HttpRequestRetryHandler httpRequestRetryHandler = null; 62 | private static HttpHost proxy = null; 63 | 64 | private static CredentialsProvider credsProvider = null; 65 | private static RequestConfig reqConfig = null; 66 | 67 | static { 68 | ConnectionSocketFactory plainsf = PlainConnectionSocketFactory.getSocketFactory(); 69 | LayeredConnectionSocketFactory sslsf = SSLConnectionSocketFactory.getSocketFactory(); 70 | 71 | Registry registry = RegistryBuilder.create() 72 | .register("http", plainsf) 73 | .register("https", sslsf) 74 | .build(); 75 | 76 | cm = new PoolingHttpClientConnectionManager(registry); 77 | cm.setMaxTotal(20); 78 | cm.setDefaultMaxPerRoute(5); 79 | 80 | proxy = new HttpHost(proxyHost, proxyPort, "http"); 81 | 82 | credsProvider = new BasicCredentialsProvider(); 83 | credsProvider.setCredentials( 84 | AuthScope.ANY, 85 | //new AuthScope(proxyHost, proxyPort), 86 | new UsernamePasswordCredentials(proxyUser, proxyPass)); 87 | 88 | reqConfig = RequestConfig.custom() 89 | .setConnectionRequestTimeout(5000) 90 | .setConnectTimeout(5000) 91 | .setSocketTimeout(5000) 92 | .setExpectContinueEnabled(false) 93 | .setProxy(new HttpHost(proxyHost, proxyPort)) 94 | //.setAuthenticationEnabled(true) 95 | //.setProxyPreferredAuthSchemes(Arrays.asList(AuthSchemes.BASIC)) 96 | .build(); 97 | } 98 | 99 | public static void doRequest(HttpRequestBase httpReq) { 100 | CloseableHttpResponse httpResp = null; 101 | 102 | try { 103 | setHeaders(httpReq); 104 | 105 | httpReq.setConfig(reqConfig); 106 | 107 | CloseableHttpClient httpClient = HttpClients.custom() 108 | .setConnectionManager(cm) 109 | .setDefaultCredentialsProvider(credsProvider) 110 | .build(); 111 | 112 | AuthCache authCache = new BasicAuthCache(); 113 | authCache.put(proxy, new BasicScheme()); 114 | 115 | HttpClientContext localContext = HttpClientContext.create(); 116 | localContext.setAuthCache(authCache); 117 | 118 | httpResp = httpClient.execute(httpReq, localContext); 119 | //httpResp = httpClient.execute(proxy, httpReq, localContext); 120 | 121 | int statusCode = httpResp.getStatusLine().getStatusCode(); 122 | 123 | System.out.println(statusCode); 124 | 125 | BufferedReader rd = new BufferedReader(new InputStreamReader(httpResp.getEntity().getContent())); 126 | 127 | String line = ""; 128 | while((line = rd.readLine()) != null) { 129 | System.out.println(line); 130 | } 131 | } catch (Exception e) { 132 | e.printStackTrace(); 133 | } finally { 134 | try { 135 | if (httpResp != null) { 136 | httpResp.close(); 137 | } 138 | } catch (IOException e) { 139 | e.printStackTrace(); 140 | } 141 | } 142 | } 143 | 144 | /** 145 | * 设置请求头 146 | * 147 | * @param httpReq 148 | */ 149 | private static void setHeaders(HttpRequestBase httpReq) { 150 | httpReq.setHeader("Accept-Encoding", null); 151 | httpReq.setHeader(switchIpHeaderKey, switchIpHeaderVal); 152 | } 153 | 154 | /** 155 | * 处理response 156 | * 157 | * @param httpResp 158 | * @return 159 | */ 160 | private static CloseableHttpResponse handleResponse(CloseableHttpResponse httpResp) { 161 | Header header = httpResp.getEntity().getContentEncoding(); 162 | if (header != null) { 163 | HeaderElement[] elem = header.getElements(); 164 | for (int i = 0; i < elem.length; i++) { 165 | if (elem[i].getName().equalsIgnoreCase("gzip")) { 166 | httpResp.setEntity(new GzipDecompressingEntity(httpResp.getEntity())); 167 | } 168 | } 169 | } 170 | return httpResp; 171 | } 172 | 173 | public static void doPostRequest() { 174 | try { 175 | // 要访问的目标页面 176 | HttpPost httpPost = new HttpPost("https://test.abuyun.com"); 177 | 178 | // 设置表单参数 179 | List params = new ArrayList(); 180 | params.add(new BasicNameValuePair("name", "http-cloud-proxy")); 181 | params.add(new BasicNameValuePair("params", "{\"broker\":\"abuyun\":\"site\":\"https://www.abuyun.com\"}")); 182 | 183 | httpPost.setEntity(new UrlEncodedFormEntity(params, "utf-8")); 184 | 185 | doRequest(httpPost); 186 | } catch (Exception e) { 187 | e.printStackTrace(); 188 | } 189 | } 190 | 191 | public static void doGetRequest() { 192 | // 要访问的目标页面 193 | String targetUrl = "https://test.abuyun.com"; 194 | //String targetUrl = "http://proxy.abuyun.com/switch-ip"; 195 | //String targetUrl = "http://proxy.abuyun.com/current-ip"; 196 | 197 | try { 198 | HttpGet httpGet = new HttpGet(targetUrl); 199 | 200 | doRequest(httpGet); 201 | } catch (Exception e) { 202 | e.printStackTrace(); 203 | } 204 | } 205 | 206 | public static void main(String[] args) { 207 | //doGetRequest(); 208 | 209 | doPostRequest(); 210 | } 211 | } 212 | --------------------------------------------------------------------------------