├── web-unblocker ├── GoLang │ ├── .gitignore │ ├── go.mod │ ├── go.sum │ ├── constant.go │ ├── util.go │ ├── url_list_ngrp.txt │ ├── proxy.go │ ├── scrape.go │ ├── main.go │ └── README.md ├── Ruby │ ├── Gemfile │ ├── Gemfile.lock │ ├── settings.rb │ ├── filesystem.rb │ ├── url_list_ngrp.txt │ ├── scraper.rb │ ├── proxy.rb │ ├── main.rb │ └── README.md ├── Shell │ └── shell.sh ├── Nodejs │ ├── util.js │ ├── package.json │ ├── settings.js │ ├── url_list_residential.txt │ ├── url_list_ngrp.txt │ ├── proxy.js │ ├── scraper.js │ ├── main.js │ ├── filesystem.js │ ├── README.md │ ├── yarn.lock │ ├── package-lock.json │ └── client.js ├── CSharp │ ├── Oxylabs │ │ ├── files │ │ │ └── url_list_residential.txt │ │ ├── ConsoleWriter.cs │ │ ├── Settings.cs │ │ └── Oxylabs.csproj │ ├── README.md │ └── Oxylabs.sln ├── PHP │ ├── src │ │ ├── ConsoleWriter.php │ │ ├── ProxyFormatter.php │ │ └── FileManager.php │ ├── url_list_ngrp.txt │ ├── composer.json │ └── README.md ├── Java │ ├── src │ │ └── main │ │ │ ├── java │ │ │ ├── ConsoleWriter.java │ │ │ ├── RateLimitInterceptor.java │ │ │ └── Settings.java │ │ │ └── resources │ │ │ └── url_list_ngrp.txt │ └── README.md └── Python │ ├── url_list_ngrp.txt │ ├── Pipfile │ └── README.md ├── .gitignore ├── scraper-apis ├── GoLang │ ├── batch-query │ │ └── go.mod │ ├── single-query │ │ └── go.mod │ ├── check-job-status │ │ ├── go.mod │ │ └── main.go │ ├── get-notifier-ip-list │ │ ├── go.mod │ │ └── main.go │ ├── retrieve-job-content │ │ └── go.mod │ ├── callback │ │ └── go.mod │ └── README.md ├── Java │ ├── Callback │ │ ├── javax.json-1.1.4.jar │ │ └── README.md │ ├── README.md │ ├── GetNotifierIPList.java │ ├── CheckJobStatus.java │ └── RetrieveJobContent.java ├── PHP │ ├── composer.json │ ├── GetNotifierIPList.php │ ├── CheckJobStatus.php │ ├── RetrieveJobContent.php │ ├── README.md │ ├── composer.lock │ ├── Callback.php │ ├── SingleQuery.php │ └── BatchQuery.php ├── Nodejs │ ├── package.json │ ├── GetNotifierIPList.js │ ├── CheckJobStatus.js │ ├── RetrieveJobContent.js │ ├── bower.json │ ├── README.md │ ├── BatchQuery.js │ ├── SingleQuery.js │ └── Callback.js ├── CSharp │ ├── Callback │ │ ├── appsettings.Development.json │ │ ├── appsettings.json │ │ ├── README.md │ │ ├── Program.cs │ │ ├── Properties │ │ │ └── launchSettings.json │ │ ├── rtc_listener.csproj │ │ ├── rtc_listener.sln │ │ └── Startup.cs │ └── README.md ├── Typescript │ ├── tsconfig.json │ ├── package.json │ ├── GetNotifierIPList.ts │ ├── CheckJobStatus.ts │ ├── RetrieveJobContent.ts │ ├── README.md │ ├── BatchQuery.ts │ ├── SingleQuery.ts │ └── Callback.ts └── Python │ ├── get_callbacker_ips.py │ ├── retrieve_job_content.py │ ├── check_job_status.py │ ├── README.md │ ├── push_pull │ ├── single_job.py │ ├── README.md │ ├── batch_query.py │ └── callback_listener_server.py │ ├── single_query.py │ └── batch_query.py ├── datacenter-proxies ├── Ruby │ ├── Gemfile │ ├── proxy.rb │ ├── Gemfile.lock │ ├── settings.rb │ ├── url_list_dc.txt │ ├── filesystem.rb │ ├── scraper.rb │ ├── README.md │ └── main.rb ├── Shell │ └── shell.sh ├── Nodejs │ ├── util.js │ ├── settings.js │ ├── proxy.js │ ├── package.json │ ├── url_list_dc.txt │ ├── scraper.js │ ├── filesystem.js │ ├── package-lock.json │ ├── main.js │ ├── README.md │ ├── yarn.lock │ └── client.js ├── GoLang │ ├── proxy.go │ ├── go.mod │ ├── constant.go │ ├── url_list_dc.txt │ ├── util.go │ ├── go.sum │ ├── scrape.go │ ├── README.md │ ├── main.go │ └── filesystem.go ├── PHP │ ├── url_list_dc.txt │ ├── src │ │ ├── ConsoleWriter.php │ │ ├── RoundRobinArrayWrapper.php │ │ └── FileManager.php │ ├── composer.json │ └── README.md ├── CSharp │ ├── Oxylabs │ │ ├── files │ │ │ └── url_list_dc.txt │ │ ├── ConsoleWriter.cs │ │ ├── Settings.cs │ │ └── Oxylabs.csproj │ ├── README.md │ └── Oxylabs.sln ├── Java │ ├── src │ │ └── main │ │ │ ├── resources │ │ │ └── url_list_dc.txt │ │ │ └── java │ │ │ ├── ConsoleWriter.java │ │ │ ├── RateLimitInterceptor.java │ │ │ ├── Settings.java │ │ │ └── RoundRobin.java │ └── README.md └── Python │ ├── url_list_dc.txt │ ├── Pipfile │ └── README.md ├── images └── oxy_logo.png ├── residential-proxies ├── Ruby │ ├── Gemfile │ ├── Gemfile.lock │ ├── settings.rb │ ├── url_list_residential.txt │ ├── filesystem.rb │ ├── scraper.rb │ ├── main.rb │ ├── proxy.rb │ └── README.md ├── Shell │ └── shell.sh ├── GoLang │ ├── go.mod │ ├── constant.go │ ├── url_list_residential.txt │ ├── util.go │ ├── go.sum │ ├── proxy.go │ ├── scrape.go │ ├── README.md │ └── main.go ├── Nodejs │ ├── util.js │ ├── package.json │ ├── settings.js │ ├── url_list_residential.txt │ ├── proxy.js │ ├── scraper.js │ ├── main.js │ ├── filesystem.js │ ├── README.md │ ├── client.js │ ├── yarn.lock │ └── package-lock.json ├── PHP │ ├── url_list_residential.txt │ ├── src │ │ ├── ConsoleWriter.php │ │ ├── ProxyFormatter.php │ │ └── FileManager.php │ ├── composer.json │ └── README.md ├── Python │ ├── url_list_residential.txt │ ├── Pipfile │ └── README.md ├── CSharp │ ├── Oxylabs │ │ ├── files │ │ │ └── url_list_residential.txt │ │ ├── ConsoleWriter.cs │ │ ├── Settings.cs │ │ └── Oxylabs.csproj │ ├── README.md │ └── Oxylabs.sln └── Java │ ├── src │ └── main │ │ ├── resources │ │ └── url_list_residential.txt │ │ └── java │ │ ├── ConsoleWriter.java │ │ ├── RateLimitInterceptor.java │ │ ├── Settings.java │ │ └── Proxy.java │ └── README.md ├── shared-datacenter-proxies ├── Ruby │ ├── Gemfile │ ├── proxy_list_shared_dc.txt │ ├── Gemfile.lock │ ├── url_list_shared_dc.txt │ ├── settings.rb │ ├── filesystem.rb │ ├── proxy.rb │ ├── scraper.rb │ ├── README.md │ └── main.rb ├── Shell │ └── shell.sh ├── GoLang │ ├── go.mod │ ├── proxy_list_shared_dc.txt │ ├── url_list_shared_dc.txt │ ├── constant.go │ ├── util.go │ ├── go.sum │ ├── regex.go │ ├── proxy.go │ ├── scrape.go │ ├── README.md │ └── main.go ├── Nodejs │ ├── util.js │ ├── package.json │ ├── proxy_list_shared_dc.txt │ ├── url_list_shared_dc.txt │ ├── settings.js │ ├── proxy.js │ ├── yarn.lock │ ├── scraper.js │ ├── main.js │ ├── client.js │ └── README.md ├── Java │ ├── target │ │ └── classes │ │ │ ├── Main.class │ │ │ ├── Proxy.class │ │ │ ├── Scraper.class │ │ │ ├── ApiClient.class │ │ │ ├── Settings.class │ │ │ ├── FileManager.class │ │ │ ├── ConsoleWriter.class │ │ │ ├── HeaderGenerator.class │ │ │ ├── RateLimitInterceptor.class │ │ │ ├── proxy_list_shared_dc.txt │ │ │ └── url_list_shared_dc.txt │ ├── src │ │ └── main │ │ │ ├── resources │ │ │ ├── proxy_list_shared_dc.txt │ │ │ └── url_list_shared_dc.txt │ │ │ └── java │ │ │ ├── ConsoleWriter.java │ │ │ ├── RateLimitInterceptor.java │ │ │ └── Settings.java │ └── README.md ├── PHP │ ├── proxy_list_shared_dc.txt │ ├── url_list_shared_dc.txt │ ├── src │ │ ├── ConsoleWriter.php │ │ └── ProxyFormatter.php │ ├── composer.json │ └── README.md ├── Python │ ├── proxy_list_shared_dc.txt │ ├── url_list_shared_dc.txt │ └── README.md └── CSharp │ ├── Oxylabs │ ├── files │ │ ├── proxy_list_shared_dc.txt │ │ └── url_list_shared_dc.txt │ ├── ConsoleWriter.cs │ ├── Settings.cs │ └── Oxylabs.csproj │ ├── README.md │ └── Oxylabs.sln └── Affiliate-Universal-1090x275.png /web-unblocker/GoLang/.gitignore: -------------------------------------------------------------------------------- 1 | failed_requests.txt 2 | result_*.html -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | .idea 3 | vendor 4 | node_modules 5 | bin 6 | obj 7 | packages -------------------------------------------------------------------------------- /scraper-apis/GoLang/batch-query/go.mod: -------------------------------------------------------------------------------- 1 | module scraper-api/batch-query 2 | 3 | go 1.17 4 | -------------------------------------------------------------------------------- /scraper-apis/GoLang/single-query/go.mod: -------------------------------------------------------------------------------- 1 | module scraper-api/single-query 2 | 3 | go 1.17 4 | -------------------------------------------------------------------------------- /web-unblocker/Ruby/Gemfile: -------------------------------------------------------------------------------- 1 | source 'https://rubygems.org/' 2 | 3 | gem "concurrent-ruby", "~> 1.1" -------------------------------------------------------------------------------- /datacenter-proxies/Ruby/Gemfile: -------------------------------------------------------------------------------- 1 | source 'https://rubygems.org/' 2 | 3 | gem "concurrent-ruby", "~> 1.1" -------------------------------------------------------------------------------- /images/oxy_logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oxylabs/product-integrations/HEAD/images/oxy_logo.png -------------------------------------------------------------------------------- /residential-proxies/Ruby/Gemfile: -------------------------------------------------------------------------------- 1 | source 'https://rubygems.org/' 2 | 3 | gem "concurrent-ruby", "~> 1.1" -------------------------------------------------------------------------------- /scraper-apis/GoLang/check-job-status/go.mod: -------------------------------------------------------------------------------- 1 | module scraper-api/check-job-status 2 | 3 | go 1.17 4 | -------------------------------------------------------------------------------- /scraper-apis/GoLang/get-notifier-ip-list/go.mod: -------------------------------------------------------------------------------- 1 | module scraper-api/get-notifier-ip-list 2 | 3 | go 1.17 4 | -------------------------------------------------------------------------------- /scraper-apis/GoLang/retrieve-job-content/go.mod: -------------------------------------------------------------------------------- 1 | module scraper-api/remote-job-content 2 | 3 | go 1.17 4 | -------------------------------------------------------------------------------- /web-unblocker/GoLang/go.mod: -------------------------------------------------------------------------------- 1 | module datacenter 2 | 3 | go 1.16 4 | 5 | require golang.org/x/time v0.3.0 6 | -------------------------------------------------------------------------------- /shared-datacenter-proxies/Ruby/Gemfile: -------------------------------------------------------------------------------- 1 | source 'https://rubygems.org/' 2 | 3 | gem "concurrent-ruby", "~> 1.1" -------------------------------------------------------------------------------- /Affiliate-Universal-1090x275.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oxylabs/product-integrations/HEAD/Affiliate-Universal-1090x275.png -------------------------------------------------------------------------------- /datacenter-proxies/Shell/shell.sh: -------------------------------------------------------------------------------- 1 | shell curl --proxy PROXY:PORT --proxy-user username:pass "https://ip.oxylabs.io/location" 2 | -------------------------------------------------------------------------------- /residential-proxies/Shell/shell.sh: -------------------------------------------------------------------------------- 1 | curl -x pr.oxylabs.io:7777 -U "customer-USERNAME-cc-US:PASSWORD" https://ip.oxylabs.io/location 2 | -------------------------------------------------------------------------------- /shared-datacenter-proxies/Shell/shell.sh: -------------------------------------------------------------------------------- 1 | curl -x dc.pr.oxylabs.io:10000 -U "customer-USERNAME:PASSWORD" https://ip.oxylabs.io/location 2 | -------------------------------------------------------------------------------- /web-unblocker/Shell/shell.sh: -------------------------------------------------------------------------------- 1 | shell curl -k --proxy unblock.oxylabs.io:60000 --proxy-user username:pass "https://ip.oxylabs.io/location" 2 | -------------------------------------------------------------------------------- /residential-proxies/GoLang/go.mod: -------------------------------------------------------------------------------- 1 | module datacenter 2 | 3 | go 1.16 4 | 5 | require ( 6 | golang.org/x/time v0.0.0-20211116232009-f0f3c7e86c11 7 | ) 8 | -------------------------------------------------------------------------------- /scraper-apis/Java/Callback/javax.json-1.1.4.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oxylabs/product-integrations/HEAD/scraper-apis/Java/Callback/javax.json-1.1.4.jar -------------------------------------------------------------------------------- /web-unblocker/Nodejs/util.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | printAndExit: (message) => { 3 | console.log(message); 4 | process.exit(1) 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /datacenter-proxies/Nodejs/util.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | printAndExit: (message) => { 3 | console.log(message); 4 | process.exit(1) 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /residential-proxies/Nodejs/util.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | printAndExit: (message) => { 3 | console.log(message); 4 | process.exit(1) 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /shared-datacenter-proxies/GoLang/go.mod: -------------------------------------------------------------------------------- 1 | module datacenter 2 | 3 | go 1.16 4 | 5 | require ( 6 | golang.org/x/time v0.0.0-20211116232009-f0f3c7e86c11 7 | ) 8 | -------------------------------------------------------------------------------- /shared-datacenter-proxies/Nodejs/util.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | printAndExit: (message) => { 3 | console.log(message); 4 | process.exit(1) 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /datacenter-proxies/Ruby/proxy.rb: -------------------------------------------------------------------------------- 1 | require_relative './settings' 2 | 3 | def format_proxy(proxy) 4 | "http://#{get_setting(:Username)}:#{get_setting(:Password)}@#{proxy}" 5 | end -------------------------------------------------------------------------------- /shared-datacenter-proxies/Java/target/classes/Main.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oxylabs/product-integrations/HEAD/shared-datacenter-proxies/Java/target/classes/Main.class -------------------------------------------------------------------------------- /shared-datacenter-proxies/Java/target/classes/Proxy.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oxylabs/product-integrations/HEAD/shared-datacenter-proxies/Java/target/classes/Proxy.class -------------------------------------------------------------------------------- /shared-datacenter-proxies/Java/target/classes/Scraper.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oxylabs/product-integrations/HEAD/shared-datacenter-proxies/Java/target/classes/Scraper.class -------------------------------------------------------------------------------- /shared-datacenter-proxies/Java/target/classes/ApiClient.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oxylabs/product-integrations/HEAD/shared-datacenter-proxies/Java/target/classes/ApiClient.class -------------------------------------------------------------------------------- /shared-datacenter-proxies/Java/target/classes/Settings.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oxylabs/product-integrations/HEAD/shared-datacenter-proxies/Java/target/classes/Settings.class -------------------------------------------------------------------------------- /web-unblocker/GoLang/go.sum: -------------------------------------------------------------------------------- 1 | golang.org/x/time v0.3.0 h1:rg5rLMjNzMS1RkNLzCG38eapWhnYLFYXDXj2gOlr8j4= 2 | golang.org/x/time v0.3.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= 3 | -------------------------------------------------------------------------------- /shared-datacenter-proxies/Java/target/classes/FileManager.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oxylabs/product-integrations/HEAD/shared-datacenter-proxies/Java/target/classes/FileManager.class -------------------------------------------------------------------------------- /datacenter-proxies/GoLang/proxy.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import "fmt" 4 | 5 | func formatProxy(proxy string) string { 6 | return fmt.Sprintf("http://%s:%s@%s", Username, Password, proxy) 7 | } 8 | -------------------------------------------------------------------------------- /shared-datacenter-proxies/Java/target/classes/ConsoleWriter.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oxylabs/product-integrations/HEAD/shared-datacenter-proxies/Java/target/classes/ConsoleWriter.class -------------------------------------------------------------------------------- /shared-datacenter-proxies/Java/target/classes/HeaderGenerator.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oxylabs/product-integrations/HEAD/shared-datacenter-proxies/Java/target/classes/HeaderGenerator.class -------------------------------------------------------------------------------- /web-unblocker/Nodejs/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "datacenter", 3 | "version": "1.0.0", 4 | "dependencies": { 5 | "axios": "^0.24.0", 6 | "axios-rate-limit": "^1.3.0" 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /residential-proxies/Nodejs/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "datacenter", 3 | "version": "1.0.0", 4 | "dependencies": { 5 | "axios": "^0.24.0", 6 | "axios-rate-limit": "^1.3.0" 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /scraper-apis/PHP/composer.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "oxylabs/scraper-api-example", 3 | "type": "project", 4 | "require": { 5 | "php": "^7.4|^8.0", 6 | "ext-curl": "*" 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /datacenter-proxies/Nodejs/settings.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | Username: "", 3 | Password: "", 4 | Timeout: 5, 5 | RequestsRate: 10, 6 | RetriesNum: 3, 7 | UrlListName: "url_list_dc.txt", 8 | } 9 | -------------------------------------------------------------------------------- /shared-datacenter-proxies/Java/target/classes/RateLimitInterceptor.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oxylabs/product-integrations/HEAD/shared-datacenter-proxies/Java/target/classes/RateLimitInterceptor.class -------------------------------------------------------------------------------- /shared-datacenter-proxies/Nodejs/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "datacenter", 3 | "version": "1.0.0", 4 | "dependencies": { 5 | "axios": "^0.24.0", 6 | "axios-rate-limit": "^1.3.0" 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /datacenter-proxies/GoLang/go.mod: -------------------------------------------------------------------------------- 1 | module datacenter 2 | 3 | go 1.16 4 | 5 | require ( 6 | github.com/sbabiv/roundrobin v0.0.0-20180428125943-85f671680a31 7 | golang.org/x/time v0.0.0-20211116232009-f0f3c7e86c11 8 | ) 9 | -------------------------------------------------------------------------------- /datacenter-proxies/GoLang/constant.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | const Username = "" 4 | const Password = "" 5 | const Timeout = 5 6 | const RequestsRate = 10 7 | const RetriesNum = 3 8 | const UrlListName = "url_list_dc.txt" 9 | -------------------------------------------------------------------------------- /datacenter-proxies/Nodejs/proxy.js: -------------------------------------------------------------------------------- 1 | const settings = require('./settings'); 2 | 3 | module.exports = { 4 | format: (proxyUrl) => { 5 | return `http://${settings.Username}:${settings.Password}@${proxyUrl}`; 6 | } 7 | } 8 | -------------------------------------------------------------------------------- /datacenter-proxies/Nodejs/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "datacenter", 3 | "version": "1.0.0", 4 | "dependencies": { 5 | "axios": "^0.24.0", 6 | "axios-rate-limit": "^1.3.0", 7 | "roundround": "^0.2.0" 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /scraper-apis/Nodejs/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "scraper-api-example", 3 | "version": "1.0.0", 4 | "dependencies": { 5 | "axios": "^1.2.1", 6 | "body-parser": "^1.20.1", 7 | "express": "^4.18.2" 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /scraper-apis/CSharp/Callback/appsettings.Development.json: -------------------------------------------------------------------------------- 1 | { 2 | "Logging": { 3 | "LogLevel": { 4 | "Default": "Information", 5 | "Microsoft": "Warning", 6 | "Microsoft.Hosting.Lifetime": "Information" 7 | } 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /shared-datacenter-proxies/PHP/proxy_list_shared_dc.txt: -------------------------------------------------------------------------------- 1 | dc.pr.oxylabs.io:10000 2 | dc.us-pr.oxylabs.io:30000 3 | dc.de-pr.oxylabs.io:40000 4 | dc.fr-pr.oxylabs.io:42000 5 | dc.nl-pr.oxylabs.io:44000 6 | dc.gb-pr.oxylabs.io:46000 7 | dc.ro-pr.oxylabs.io:48000 8 | -------------------------------------------------------------------------------- /shared-datacenter-proxies/Ruby/proxy_list_shared_dc.txt: -------------------------------------------------------------------------------- 1 | dc.pr.oxylabs.io:10000 2 | dc.us-pr.oxylabs.io:30000 3 | dc.de-pr.oxylabs.io:40000 4 | dc.fr-pr.oxylabs.io:42000 5 | dc.nl-pr.oxylabs.io:44000 6 | dc.gb-pr.oxylabs.io:46000 7 | dc.ro-pr.oxylabs.io:48000 8 | -------------------------------------------------------------------------------- /web-unblocker/Nodejs/settings.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | Username: "", 3 | Password: "", 4 | Timeout: 5, 5 | RequestsRate: 10, 6 | RetriesNum: 3, 7 | UrlListName: "url_list_ngrp.txt", 8 | ProxyAddress: "unblock.oxylabs.io:60000", 9 | } 10 | -------------------------------------------------------------------------------- /web-unblocker/Ruby/Gemfile.lock: -------------------------------------------------------------------------------- 1 | GEM 2 | remote: https://rubygems.org/ 3 | specs: 4 | concurrent-ruby (1.1.9) 5 | 6 | PLATFORMS 7 | x86_64-linux 8 | 9 | DEPENDENCIES 10 | concurrent-ruby (~> 1.1) 11 | 12 | BUNDLED WITH 13 | 2.2.32 14 | -------------------------------------------------------------------------------- /datacenter-proxies/Ruby/Gemfile.lock: -------------------------------------------------------------------------------- 1 | GEM 2 | remote: https://rubygems.org/ 3 | specs: 4 | concurrent-ruby (1.1.9) 5 | 6 | PLATFORMS 7 | x86_64-linux 8 | 9 | DEPENDENCIES 10 | concurrent-ruby (~> 1.1) 11 | 12 | BUNDLED WITH 13 | 2.2.32 14 | -------------------------------------------------------------------------------- /residential-proxies/Ruby/Gemfile.lock: -------------------------------------------------------------------------------- 1 | GEM 2 | remote: https://rubygems.org/ 3 | specs: 4 | concurrent-ruby (1.1.9) 5 | 6 | PLATFORMS 7 | x86_64-linux 8 | 9 | DEPENDENCIES 10 | concurrent-ruby (~> 1.1) 11 | 12 | BUNDLED WITH 13 | 2.2.32 14 | -------------------------------------------------------------------------------- /shared-datacenter-proxies/GoLang/proxy_list_shared_dc.txt: -------------------------------------------------------------------------------- 1 | dc.pr.oxylabs.io:10000 2 | dc.us-pr.oxylabs.io:30000 3 | dc.de-pr.oxylabs.io:40000 4 | dc.fr-pr.oxylabs.io:42000 5 | dc.nl-pr.oxylabs.io:44000 6 | dc.gb-pr.oxylabs.io:46000 7 | dc.ro-pr.oxylabs.io:48000 8 | -------------------------------------------------------------------------------- /shared-datacenter-proxies/Nodejs/proxy_list_shared_dc.txt: -------------------------------------------------------------------------------- 1 | dc.pr.oxylabs.io:10000 2 | dc.us-pr.oxylabs.io:30000 3 | dc.de-pr.oxylabs.io:40000 4 | dc.fr-pr.oxylabs.io:42000 5 | dc.nl-pr.oxylabs.io:44000 6 | dc.gb-pr.oxylabs.io:46000 7 | dc.ro-pr.oxylabs.io:48000 8 | -------------------------------------------------------------------------------- /shared-datacenter-proxies/Python/proxy_list_shared_dc.txt: -------------------------------------------------------------------------------- 1 | dc.pr.oxylabs.io:10000 2 | dc.us-pr.oxylabs.io:30000 3 | dc.de-pr.oxylabs.io:40000 4 | dc.fr-pr.oxylabs.io:42000 5 | dc.nl-pr.oxylabs.io:44000 6 | dc.gb-pr.oxylabs.io:46000 7 | dc.ro-pr.oxylabs.io:48000 8 | -------------------------------------------------------------------------------- /residential-proxies/Nodejs/settings.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | Username: "", 3 | Password: "", 4 | Timeout: 5, 5 | RequestsRate: 10, 6 | RetriesNum: 3, 7 | UrlListName: "url_list_residential.txt", 8 | ProxyAddress: "pr.oxylabs.io:7777", 9 | } 10 | -------------------------------------------------------------------------------- /shared-datacenter-proxies/CSharp/Oxylabs/files/proxy_list_shared_dc.txt: -------------------------------------------------------------------------------- 1 | dc.pr.oxylabs.io:10000 2 | dc.us-pr.oxylabs.io:30000 3 | dc.de-pr.oxylabs.io:40000 4 | dc.fr-pr.oxylabs.io:42000 5 | dc.nl-pr.oxylabs.io:44000 6 | dc.gb-pr.oxylabs.io:46000 7 | dc.ro-pr.oxylabs.io:48000 8 | -------------------------------------------------------------------------------- /shared-datacenter-proxies/Java/target/classes/proxy_list_shared_dc.txt: -------------------------------------------------------------------------------- 1 | dc.pr.oxylabs.io:10000 2 | dc.us-pr.oxylabs.io:30000 3 | dc.de-pr.oxylabs.io:40000 4 | dc.fr-pr.oxylabs.io:42000 5 | dc.nl-pr.oxylabs.io:44000 6 | dc.gb-pr.oxylabs.io:46000 7 | dc.ro-pr.oxylabs.io:48000 8 | -------------------------------------------------------------------------------- /shared-datacenter-proxies/Java/src/main/resources/proxy_list_shared_dc.txt: -------------------------------------------------------------------------------- 1 | dc.pr.oxylabs.io:10000 2 | dc.us-pr.oxylabs.io:30000 3 | dc.de-pr.oxylabs.io:40000 4 | dc.fr-pr.oxylabs.io:42000 5 | dc.nl-pr.oxylabs.io:44000 6 | dc.gb-pr.oxylabs.io:46000 7 | dc.ro-pr.oxylabs.io:48000 8 | -------------------------------------------------------------------------------- /web-unblocker/GoLang/constant.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | const Username = "" 4 | const Password = "" 5 | const Timeout = 5 6 | const RequestsRate = 10 7 | const RetriesNum = 3 8 | const UrlListName = "url_list_ngrp.txt" 9 | const ProxyAddress = "unblock.oxylabs.io:60000" 10 | -------------------------------------------------------------------------------- /scraper-apis/CSharp/Callback/appsettings.json: -------------------------------------------------------------------------------- 1 | { 2 | "Logging": { 3 | "LogLevel": { 4 | "Default": "Information", 5 | "Microsoft": "Warning", 6 | "Microsoft.Hosting.Lifetime": "Information" 7 | } 8 | }, 9 | "AllowedHosts": "*" 10 | } 11 | -------------------------------------------------------------------------------- /residential-proxies/GoLang/constant.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | const Username = "" 4 | const Password = "" 5 | const Timeout = 5 6 | const RequestsRate = 10 7 | const RetriesNum = 3 8 | const UrlListName = "url_list_residential.txt" 9 | const ProxyAddress = "pr.oxylabs.io:7777" 10 | -------------------------------------------------------------------------------- /shared-datacenter-proxies/Ruby/Gemfile.lock: -------------------------------------------------------------------------------- 1 | GEM 2 | remote: https://rubygems.org/ 3 | specs: 4 | concurrent-ruby (1.1.9) 5 | 6 | PLATFORMS 7 | universal-darwin-21 8 | x86_64-linux 9 | 10 | DEPENDENCIES 11 | concurrent-ruby (~> 1.1) 12 | 13 | BUNDLED WITH 14 | 2.2.32 15 | -------------------------------------------------------------------------------- /datacenter-proxies/GoLang/url_list_dc.txt: -------------------------------------------------------------------------------- 1 | https://ip.oxylabs.io/headers 2 | https://ip.oxylabs.io/headers 3 | https://ip.oxylabs.io/headers 4 | https://ip.oxylabs.io/headers 5 | https://ip.oxylabs.io/headers 6 | https://ip.oxylabs.io/headers 7 | https://ip.oxylabs.io/headers 8 | https://ip.oxylabs.io/headers 9 | -------------------------------------------------------------------------------- /datacenter-proxies/Nodejs/url_list_dc.txt: -------------------------------------------------------------------------------- 1 | https://ip.oxylabs.io/headers 2 | https://ip.oxylabs.io/headers 3 | https://ip.oxylabs.io/headers 4 | https://ip.oxylabs.io/headers 5 | https://ip.oxylabs.io/headers 6 | https://ip.oxylabs.io/headers 7 | https://ip.oxylabs.io/headers 8 | https://ip.oxylabs.io/headers 9 | -------------------------------------------------------------------------------- /datacenter-proxies/PHP/url_list_dc.txt: -------------------------------------------------------------------------------- 1 | https://ip.oxylabs.io/headers 2 | https://ip.oxylabs.io/headers 3 | https://ip.oxylabs.io/headers 4 | https://ip.oxylabs.io/headers 5 | https://ip.oxylabs.io/headers 6 | https://ip.oxylabs.io/headers 7 | https://ip.oxylabs.io/headers 8 | https://ip.oxylabs.io/headers 9 | -------------------------------------------------------------------------------- /datacenter-proxies/Ruby/settings.rb: -------------------------------------------------------------------------------- 1 | def get_setting(name) 2 | get_settings[name] 3 | end 4 | 5 | def get_settings 6 | { 7 | Username: "", 8 | Password: "", 9 | Timeout: 5, 10 | RequestsRate: 10, 11 | RetriesNum: 3, 12 | UrlListName: "url_list_dc.txt", 13 | } 14 | end 15 | -------------------------------------------------------------------------------- /datacenter-proxies/Ruby/url_list_dc.txt: -------------------------------------------------------------------------------- 1 | https://ip.oxylabs.io/headers 2 | https://ip.oxylabs.io/headers 3 | https://ip.oxylabs.io/headers 4 | https://ip.oxylabs.io/headers 5 | https://ip.oxylabs.io/headers 6 | https://ip.oxylabs.io/headers 7 | https://ip.oxylabs.io/headers 8 | https://ip.oxylabs.io/headers 9 | -------------------------------------------------------------------------------- /scraper-apis/Typescript/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "es2016", 4 | "module": "commonjs", 5 | "esModuleInterop": true, 6 | "forceConsistentCasingInFileNames": true, 7 | "strict": true, 8 | "skipLibCheck": true, 9 | "noImplicitAny": false 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /datacenter-proxies/CSharp/Oxylabs/files/url_list_dc.txt: -------------------------------------------------------------------------------- 1 | https://ip.oxylabs.io/headers 2 | https://ip.oxylabs.io/headers 3 | https://ip.oxylabs.io/headers 4 | https://ip.oxylabs.io/headers 5 | https://ip.oxylabs.io/headers 6 | https://ip.oxylabs.io/headers 7 | https://ip.oxylabs.io/headers 8 | https://ip.oxylabs.io/headers 9 | -------------------------------------------------------------------------------- /datacenter-proxies/Java/src/main/resources/url_list_dc.txt: -------------------------------------------------------------------------------- 1 | https://ip.oxylabs.io/headers 2 | https://ip.oxylabs.io/headers 3 | https://ip.oxylabs.io/headers 4 | https://ip.oxylabs.io/headers 5 | https://ip.oxylabs.io/headers 6 | https://ip.oxylabs.io/headers 7 | https://ip.oxylabs.io/headers 8 | https://ip.oxylabs.io/headers 9 | -------------------------------------------------------------------------------- /web-unblocker/Ruby/settings.rb: -------------------------------------------------------------------------------- 1 | def get_setting(name) 2 | get_settings[name] 3 | end 4 | 5 | def get_settings 6 | { 7 | Username: "", 8 | Password: "", 9 | Timeout: 5, 10 | RequestsRate: 10, 11 | RetriesNum: 3, 12 | UrlListName: "url_list_ngrp.txt", 13 | ProxyAddress: "unblock.oxylabs.io:60000", 14 | } 15 | end 16 | -------------------------------------------------------------------------------- /residential-proxies/Ruby/settings.rb: -------------------------------------------------------------------------------- 1 | def get_setting(name) 2 | get_settings[name] 3 | end 4 | 5 | def get_settings 6 | { 7 | Username: "", 8 | Password: "", 9 | Timeout: 5, 10 | RequestsRate: 10, 11 | RetriesNum: 3, 12 | UrlListName: "url_list_residential.txt", 13 | ProxyAddress: "pr.oxylabs.io:7777", 14 | } 15 | end 16 | -------------------------------------------------------------------------------- /shared-datacenter-proxies/Java/target/classes/url_list_shared_dc.txt: -------------------------------------------------------------------------------- 1 | https://ip.oxylabs.io/headers 2 | https://ip.oxylabs.io/headers 3 | https://ip.oxylabs.io/headers 4 | https://ip.oxylabs.io/headers 5 | https://ip.oxylabs.io/headers 6 | https://ip.oxylabs.io/headers 7 | https://ip.oxylabs.io/headers;US 8 | https://ip.oxylabs.io/headers;GB 9 | https://ip.oxylabs.io/headers;FR -------------------------------------------------------------------------------- /shared-datacenter-proxies/PHP/url_list_shared_dc.txt: -------------------------------------------------------------------------------- 1 | https://ip.oxylabs.io/headers 2 | https://ip.oxylabs.io/headers 3 | https://ip.oxylabs.io/headers 4 | https://ip.oxylabs.io/headers 5 | https://ip.oxylabs.io/headers 6 | https://ip.oxylabs.io/headers 7 | https://ip.oxylabs.io/headers;US 8 | https://ip.oxylabs.io/headers;GB 9 | https://ip.oxylabs.io/headers;FR 10 | -------------------------------------------------------------------------------- /shared-datacenter-proxies/Ruby/url_list_shared_dc.txt: -------------------------------------------------------------------------------- 1 | https://ip.oxylabs.io/headers 2 | https://ip.oxylabs.io/headers 3 | https://ip.oxylabs.io/headers 4 | https://ip.oxylabs.io/headers 5 | https://ip.oxylabs.io/headers 6 | https://ip.oxylabs.io/headers 7 | https://ip.oxylabs.io/headers;US 8 | https://ip.oxylabs.io/headers;GB 9 | https://ip.oxylabs.io/headers;FR 10 | -------------------------------------------------------------------------------- /shared-datacenter-proxies/GoLang/url_list_shared_dc.txt: -------------------------------------------------------------------------------- 1 | https://ip.oxylabs.io/headers 2 | https://ip.oxylabs.io/headers 3 | https://ip.oxylabs.io/headers 4 | https://ip.oxylabs.io/headers 5 | https://ip.oxylabs.io/headers 6 | https://ip.oxylabs.io/headers 7 | https://ip.oxylabs.io/headers;US 8 | https://ip.oxylabs.io/headers;GB 9 | https://ip.oxylabs.io/headers;FR 10 | -------------------------------------------------------------------------------- /shared-datacenter-proxies/Java/src/main/resources/url_list_shared_dc.txt: -------------------------------------------------------------------------------- 1 | https://ip.oxylabs.io/headers 2 | https://ip.oxylabs.io/headers 3 | https://ip.oxylabs.io/headers 4 | https://ip.oxylabs.io/headers 5 | https://ip.oxylabs.io/headers 6 | https://ip.oxylabs.io/headers 7 | https://ip.oxylabs.io/headers;US 8 | https://ip.oxylabs.io/headers;GB 9 | https://ip.oxylabs.io/headers;FR -------------------------------------------------------------------------------- /shared-datacenter-proxies/Nodejs/url_list_shared_dc.txt: -------------------------------------------------------------------------------- 1 | https://ip.oxylabs.io/headers 2 | https://ip.oxylabs.io/headers 3 | https://ip.oxylabs.io/headers 4 | https://ip.oxylabs.io/headers 5 | https://ip.oxylabs.io/headers 6 | https://ip.oxylabs.io/headers 7 | https://ip.oxylabs.io/headers;US 8 | https://ip.oxylabs.io/headers;GB 9 | https://ip.oxylabs.io/headers;FR 10 | -------------------------------------------------------------------------------- /shared-datacenter-proxies/Python/url_list_shared_dc.txt: -------------------------------------------------------------------------------- 1 | https://ip.oxylabs.io/headers 2 | https://ip.oxylabs.io/headers 3 | https://ip.oxylabs.io/headers 4 | https://ip.oxylabs.io/headers 5 | https://ip.oxylabs.io/headers 6 | https://ip.oxylabs.io/headers 7 | https://ip.oxylabs.io/headers;US 8 | https://ip.oxylabs.io/headers;GB 9 | https://ip.oxylabs.io/headers;FR 10 | -------------------------------------------------------------------------------- /datacenter-proxies/Python/url_list_dc.txt: -------------------------------------------------------------------------------- 1 | https://ip.oxylabs.io/headers 2 | https://ip.oxylabs.io/headers 3 | https://ip.oxylabs.io/headers 4 | https://ip.oxylabs.io/headers 5 | https://ip.oxylabs.io/headers 6 | https://ip.oxylabs.io/headers 7 | https://ip.oxylabs.io/headers 8 | https://ip.oxylabs.io/headers 9 | https://ip.oxylabs.io/headers 10 | https://ip.oxylabs.io/headers -------------------------------------------------------------------------------- /shared-datacenter-proxies/CSharp/Oxylabs/files/url_list_shared_dc.txt: -------------------------------------------------------------------------------- 1 | https://ip.oxylabs.io/headers 2 | https://ip.oxylabs.io/headers 3 | https://ip.oxylabs.io/headers 4 | https://ip.oxylabs.io/headers 5 | https://ip.oxylabs.io/headers 6 | https://ip.oxylabs.io/headers 7 | https://ip.oxylabs.io/headers;US 8 | https://ip.oxylabs.io/headers;GB 9 | https://ip.oxylabs.io/headers;FR 10 | -------------------------------------------------------------------------------- /shared-datacenter-proxies/Nodejs/settings.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | Username: "", 3 | Password: "", 4 | Timeout: 5, 5 | RequestsRate: 10, 6 | RetriesNum: 3, 7 | UrlListName: "url_list_shared_dc.txt", 8 | ProxyListName: "proxy_list_shared_dc.txt", 9 | 10 | ProxyRegex: /^dc\.(?\w{2})-?pr\.oxylabs\.io:\d+$/, 11 | DefaultProxyIndexName: "DEFAULT", 12 | } 13 | -------------------------------------------------------------------------------- /residential-proxies/PHP/url_list_residential.txt: -------------------------------------------------------------------------------- 1 | https://ip.oxylabs.io/headers 2 | https://ip.oxylabs.io/headers 3 | https://ip.oxylabs.io/headers 4 | https://ip.oxylabs.io/headers 5 | https://ip.oxylabs.io/headers;US 6 | https://ip.oxylabs.io/headers;GB 7 | https://ip.oxylabs.io/fasdfasf;CA 8 | https://ip.oxylabs.io/headers;AU 9 | https://ip.oxylabs.io/headers;NZ 10 | https://ip.oxylabs.io/headers;IE -------------------------------------------------------------------------------- /residential-proxies/Ruby/url_list_residential.txt: -------------------------------------------------------------------------------- 1 | https://ip.oxylabs.io/headers 2 | https://ip.oxylabs.io/headers 3 | https://ip.oxylabs.io/headers 4 | https://ip.oxylabs.io/headers 5 | https://ip.oxylabs.io/headers;US 6 | https://ip.oxylabs.io/headers;GB 7 | https://ip.oxylabs.io/fasdfasf;CA 8 | https://ip.oxylabs.io/headers;AU 9 | https://ip.oxylabs.io/headers;NZ 10 | https://ip.oxylabs.io/headers;IE -------------------------------------------------------------------------------- /residential-proxies/GoLang/url_list_residential.txt: -------------------------------------------------------------------------------- 1 | https://ip.oxylabs.io/headers 2 | https://ip.oxylabs.io/headers 3 | https://ip.oxylabs.io/headers 4 | https://ip.oxylabs.io/headers 5 | https://ip.oxylabs.io/headers;US 6 | https://ip.oxylabs.io/headers;GB 7 | https://ip.oxylabs.io/fasdfasf;CA 8 | https://ip.oxylabs.io/headers;AU 9 | https://ip.oxylabs.io/headers;NZ 10 | https://ip.oxylabs.io/headers;IE -------------------------------------------------------------------------------- /residential-proxies/Nodejs/url_list_residential.txt: -------------------------------------------------------------------------------- 1 | https://ip.oxylabs.io/headers 2 | https://ip.oxylabs.io/headers 3 | https://ip.oxylabs.io/headers 4 | https://ip.oxylabs.io/headers 5 | https://ip.oxylabs.io/headers;US 6 | https://ip.oxylabs.io/headers;GB 7 | https://ip.oxylabs.io/fasdfasf;CA 8 | https://ip.oxylabs.io/headers;AU 9 | https://ip.oxylabs.io/headers;NZ 10 | https://ip.oxylabs.io/headers;IE -------------------------------------------------------------------------------- /residential-proxies/Python/url_list_residential.txt: -------------------------------------------------------------------------------- 1 | https://ip.oxylabs.io/headers 2 | https://ip.oxylabs.io/headers 3 | https://ip.oxylabs.io/headers 4 | https://ip.oxylabs.io/headers 5 | https://ip.oxylabs.io/headers;US 6 | https://ip.oxylabs.io/headers;GB 7 | https://ip.oxylabs.io/fasdfasf;CA 8 | https://ip.oxylabs.io/headers;AU 9 | https://ip.oxylabs.io/headers;NZ 10 | https://ip.oxylabs.io/headers;IE -------------------------------------------------------------------------------- /web-unblocker/GoLang/util.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "fmt" 5 | "math/rand" 6 | "os" 7 | "reflect" 8 | ) 9 | 10 | func printAndExit(message string) { 11 | fmt.Println(message) 12 | os.Exit(1) 13 | } 14 | 15 | func getRandomMapKey(mapVariable interface{}) interface{} { 16 | keys := reflect.ValueOf(mapVariable).MapKeys() 17 | 18 | return keys[rand.Intn(len(keys))].Interface() 19 | } 20 | -------------------------------------------------------------------------------- /web-unblocker/Nodejs/url_list_residential.txt: -------------------------------------------------------------------------------- 1 | https://ip.oxylabs.io/headers 2 | https://ip.oxylabs.io/headers 3 | https://ip.oxylabs.io/headers 4 | https://ip.oxylabs.io/headers 5 | https://ip.oxylabs.io/headers;US 6 | https://ip.oxylabs.io/headers;GB 7 | https://ip.oxylabs.io/headers;CA 8 | https://ip.oxylabs.io/headers;AU 9 | https://ip.oxylabs.io/headers;NZ 10 | https://ip.oxylabs.io/headers;IE 11 | -------------------------------------------------------------------------------- /datacenter-proxies/GoLang/util.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "fmt" 5 | "math/rand" 6 | "os" 7 | "reflect" 8 | ) 9 | 10 | func printAndExit(message string) { 11 | fmt.Println(message) 12 | os.Exit(1) 13 | } 14 | 15 | func getRandomMapKey(mapVariable interface{}) interface{} { 16 | keys := reflect.ValueOf(mapVariable).MapKeys() 17 | 18 | return keys[rand.Intn(len(keys))].Interface() 19 | } 20 | -------------------------------------------------------------------------------- /scraper-apis/CSharp/Callback/README.md: -------------------------------------------------------------------------------- 1 | # Requirements 2 | 3 | - .NET Core 3.1 4 | 5 | # Before using 6 | 7 | Use `dotnet restore` command before 1st launch to restore dependencies 8 | 9 | ```bash 10 | $ dotnet restore 11 | ``` 12 | 13 | # Usage 14 | 15 | Use `dotnet run` command to launch the webserver (default url is http://localhost:5000/job_listener) 16 | 17 | ```bash 18 | $ dotnet run 19 | ``` 20 | -------------------------------------------------------------------------------- /shared-datacenter-proxies/Ruby/settings.rb: -------------------------------------------------------------------------------- 1 | def get_setting(name) 2 | get_settings[name] 3 | end 4 | 5 | def get_settings 6 | { 7 | Username: "", 8 | Password: "", 9 | Timeout: 5, 10 | RequestsRate: 10, 11 | RetriesNum: 3, 12 | UrlListName: "url_list_shared_dc.txt", 13 | ProxyListName: "proxy_list_shared_dc.txt", 14 | DefaultProxyIndexName: "DEFAULT", 15 | } 16 | end 17 | -------------------------------------------------------------------------------- /web-unblocker/CSharp/Oxylabs/files/url_list_residential.txt: -------------------------------------------------------------------------------- 1 | https://ip.oxylabs.io/headers 2 | https://ip.oxylabs.io/headers 3 | https://ip.oxylabs.io/headers 4 | https://ip.oxylabs.io/headers 5 | https://ip.oxylabs.io/headers;US 6 | https://ip.oxylabs.io/headers;GB 7 | https://ip.oxylabs.io/fasdfasf;CA 8 | https://ip.oxylabs.io/headers;AU 9 | https://ip.oxylabs.io/headers;NZ 10 | https://ip.oxylabs.io/headers;IE -------------------------------------------------------------------------------- /web-unblocker/PHP/src/ConsoleWriter.php: -------------------------------------------------------------------------------- 1 | \w{2})-?pr\.oxylabs\.io:\d+$` 12 | const DefaultProxyIndexName = "DEFAULT" 13 | 14 | -------------------------------------------------------------------------------- /shared-datacenter-proxies/GoLang/util.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "fmt" 5 | "math/rand" 6 | "os" 7 | "reflect" 8 | ) 9 | 10 | func printAndExit(message string) { 11 | fmt.Println(message) 12 | os.Exit(1) 13 | } 14 | 15 | func getRandomMapKey(mapVariable interface{}) interface{} { 16 | keys := reflect.ValueOf(mapVariable).MapKeys() 17 | 18 | return keys[rand.Intn(len(keys))].Interface() 19 | } 20 | -------------------------------------------------------------------------------- /web-unblocker/Java/src/main/java/ConsoleWriter.java: -------------------------------------------------------------------------------- 1 | public class ConsoleWriter { 2 | public void writelnAndExit(String str) { 3 | this.writelnError(str); 4 | System.exit(1); 5 | } 6 | 7 | public void writelnError(String output) { 8 | this.writeln("ERROR: " + output); 9 | } 10 | 11 | public void writeln(String str) { 12 | System.out.println(str); 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /datacenter-proxies/Java/src/main/java/ConsoleWriter.java: -------------------------------------------------------------------------------- 1 | public class ConsoleWriter { 2 | public void writelnAndExit(String str) { 3 | this.writelnError(str); 4 | System.exit(1); 5 | } 6 | 7 | public void writelnError(String output) { 8 | this.writeln("ERROR: " + output); 9 | } 10 | 11 | public void writeln(String str) { 12 | System.out.println(str); 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /shared-datacenter-proxies/PHP/src/ConsoleWriter.php: -------------------------------------------------------------------------------- 1 | console.log(data)) 14 | .catch(err => console.log(err)) 15 | -------------------------------------------------------------------------------- /shared-datacenter-proxies/Java/src/main/java/ConsoleWriter.java: -------------------------------------------------------------------------------- 1 | public class ConsoleWriter { 2 | public void writelnAndExit(String str) { 3 | this.writelnError(str); 4 | System.exit(1); 5 | } 6 | 7 | public void writelnError(String output) { 8 | this.writeln("ERROR: " + output); 9 | } 10 | 11 | public void writeln(String str) { 12 | System.out.println(str); 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /residential-proxies/Ruby/filesystem.rb: -------------------------------------------------------------------------------- 1 | def write_error_to_stdout(message) 2 | p "ERROR: #{message}" 3 | end 4 | 5 | def write_error_to_file(message) 6 | error_filename = "failed_requests.txt" 7 | 8 | open(error_filename, 'a') { |f| 9 | f.puts message + "\n" 10 | } 11 | end 12 | 13 | def write_success_to_file(position, content) 14 | open("result_#{position}.html", 'w') { |f| 15 | f.puts content 16 | } 17 | end 18 | -------------------------------------------------------------------------------- /scraper-apis/Nodejs/CheckJobStatus.js: -------------------------------------------------------------------------------- 1 | const axios = require('axios'); 2 | 3 | axios.get('https://data.oxylabs.io/v1/queries/1234567890987654321', { 4 | auth: { 5 | username: 'user', 6 | password: 'pass1' 7 | }, 8 | headers: { 9 | 'Content-Type': 'application/json', 10 | 'Accept-Encoding': 'gzip,deflate,compress', 11 | }, 12 | }) 13 | .then(({ data }) => console.log(data)) 14 | .catch(err => console.log(err)) 15 | -------------------------------------------------------------------------------- /scraper-apis/Java/Callback/README.md: -------------------------------------------------------------------------------- 1 | # Requirements 2 | 3 | - Java 1.8+ 4 | - javax.json-1.1.4.jar 5 | 6 | # Usage 7 | 8 | It is required to include JSON library in the classpath while compiling: 9 | ```bash 10 | $ javac -classpath javax.json-1.1.4.jar:. Listener.java 11 | ``` 12 | 13 | It is required to include JSON library in the classpath while rinning as well: 14 | ```bash 15 | $ java -classpath javax.json-1.1.4.jar:. Listener 16 | ``` -------------------------------------------------------------------------------- /shared-datacenter-proxies/Ruby/filesystem.rb: -------------------------------------------------------------------------------- 1 | def write_error_to_stdout(message) 2 | p "ERROR: #{message}" 3 | end 4 | 5 | def write_error_to_file(message) 6 | error_filename = "failed_requests.txt" 7 | 8 | open(error_filename, 'a') { |f| 9 | f.puts message + "\n" 10 | } 11 | end 12 | 13 | def write_success_to_file(position, content) 14 | open("result_#{position}.html", 'w') { |f| 15 | f.puts content 16 | } 17 | end 18 | -------------------------------------------------------------------------------- /web-unblocker/Java/src/main/resources/url_list_ngrp.txt: -------------------------------------------------------------------------------- 1 | https://ip.oxylabs.io/headers 2 | https://ip.oxylabs.io/headers 3 | https://ip.oxylabs.io/headers 4 | https://ip.oxylabs.io/headers 5 | https://ip.oxylabs.io/headers 6 | https://ip.oxylabs.io/headers;United States 7 | https://ip.oxylabs.io/headers;Germany 8 | https://ip.oxylabs.io/headers;United Kingdom 9 | https://ip.oxylabs.io/headers;United States 10 | https://ip.oxylabs.io/headers;France -------------------------------------------------------------------------------- /scraper-apis/Nodejs/RetrieveJobContent.js: -------------------------------------------------------------------------------- 1 | const axios = require('axios'); 2 | 3 | axios.get('https://data.oxylabs.io/v1/queries/1234567890987654321/results', { 4 | auth: { 5 | username: 'user', 6 | password: 'pass1' 7 | }, 8 | headers: { 9 | 'Content-Type': 'application/json', 10 | 'Accept-Encoding': 'gzip,deflate,compress', 11 | }, 12 | }) 13 | .then(({ data }) => console.log(data)) 14 | .catch(err => console.log(err)) 15 | -------------------------------------------------------------------------------- /datacenter-proxies/GoLang/go.sum: -------------------------------------------------------------------------------- 1 | github.com/sbabiv/roundrobin v0.0.0-20180428125943-85f671680a31 h1:bHPLWUWFkZqQhUlsq+jSiYt4C/pZibnADP+v7HrrF3Q= 2 | github.com/sbabiv/roundrobin v0.0.0-20180428125943-85f671680a31/go.mod h1:mvu7AhDJladBDgG+T0+cTcCK+ijqxv8zgbS+wv2zc3M= 3 | golang.org/x/time v0.0.0-20211116232009-f0f3c7e86c11 h1:GZokNIeuVkl3aZHJchRrr13WCsols02MLUcz1U9is6M= 4 | golang.org/x/time v0.0.0-20211116232009-f0f3c7e86c11/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= 5 | -------------------------------------------------------------------------------- /scraper-apis/Nodejs/bower.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "scraper-api-example", 3 | "description": "An example of using oxylabs scraper api", 4 | "main": "", 5 | "authors": [ 6 | "oxylabs" 7 | ], 8 | "homepage": "https://developers.oxylabs.io", 9 | "private": true, 10 | "ignore": [ 11 | "**/.*", 12 | "node_modules", 13 | "bower_components", 14 | "test", 15 | "tests" 16 | ], 17 | "dependencies": { 18 | "axios": "^1.2.1" 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /residential-proxies/GoLang/go.sum: -------------------------------------------------------------------------------- 1 | github.com/sbabiv/roundrobin v0.0.0-20180428125943-85f671680a31 h1:bHPLWUWFkZqQhUlsq+jSiYt4C/pZibnADP+v7HrrF3Q= 2 | github.com/sbabiv/roundrobin v0.0.0-20180428125943-85f671680a31/go.mod h1:mvu7AhDJladBDgG+T0+cTcCK+ijqxv8zgbS+wv2zc3M= 3 | golang.org/x/time v0.0.0-20211116232009-f0f3c7e86c11 h1:GZokNIeuVkl3aZHJchRrr13WCsols02MLUcz1U9is6M= 4 | golang.org/x/time v0.0.0-20211116232009-f0f3c7e86c11/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= 5 | -------------------------------------------------------------------------------- /shared-datacenter-proxies/GoLang/go.sum: -------------------------------------------------------------------------------- 1 | github.com/sbabiv/roundrobin v0.0.0-20180428125943-85f671680a31 h1:bHPLWUWFkZqQhUlsq+jSiYt4C/pZibnADP+v7HrrF3Q= 2 | github.com/sbabiv/roundrobin v0.0.0-20180428125943-85f671680a31/go.mod h1:mvu7AhDJladBDgG+T0+cTcCK+ijqxv8zgbS+wv2zc3M= 3 | golang.org/x/time v0.0.0-20211116232009-f0f3c7e86c11 h1:GZokNIeuVkl3aZHJchRrr13WCsols02MLUcz1U9is6M= 4 | golang.org/x/time v0.0.0-20211116232009-f0f3c7e86c11/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= 5 | -------------------------------------------------------------------------------- /web-unblocker/GoLang/proxy.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "fmt" 5 | "strings" 6 | ) 7 | 8 | func createProxyByUrl(url string) (string, *string, string) { 9 | var country *string 10 | parsedUrl := url 11 | 12 | urlParts := strings.Split(url, ";") 13 | if len(urlParts) == 2 { 14 | parsedUrl = urlParts[0] 15 | country = &urlParts[1] 16 | } 17 | 18 | return parsedUrl, country, fmt.Sprintf( 19 | "http://%s:%s@%s", 20 | Username, 21 | Password, 22 | ProxyAddress, 23 | ) 24 | } 25 | -------------------------------------------------------------------------------- /web-unblocker/Python/Pipfile: -------------------------------------------------------------------------------- 1 | [[source]] 2 | url = "https://pypi.org/simple" 3 | verify_ssl = true 4 | name = "pypi" 5 | 6 | [packages] 7 | aiohttp = "==3.8.0" 8 | aiosignal = "==1.2.0" 9 | async-timeout = "==4.0.1" 10 | asyncio = "==3.4.3" 11 | attrs = "==21.2.0" 12 | charset-normalizer = "==2.0.7" 13 | frozenlist = "==1.2.0" 14 | idna = "==3.3" 15 | multidict = "==5.2.0" 16 | typing-extensions = "==3.10.0.2" 17 | yarl = "==1.7.2" 18 | 19 | [dev-packages] 20 | 21 | [requires] 22 | python_version = "3.8" 23 | -------------------------------------------------------------------------------- /datacenter-proxies/Python/Pipfile: -------------------------------------------------------------------------------- 1 | [[source]] 2 | url = "https://pypi.org/simple" 3 | verify_ssl = true 4 | name = "pypi" 5 | 6 | [packages] 7 | aiohttp = "==3.8.0" 8 | aiosignal = "==1.2.0" 9 | async-timeout = "==4.0.1" 10 | asyncio = "==3.4.3" 11 | attrs = "==21.2.0" 12 | charset-normalizer = "==2.0.7" 13 | frozenlist = "==1.2.0" 14 | idna = "==3.3" 15 | multidict = "==5.2.0" 16 | typing-extensions = "==3.10.0.2" 17 | yarl = "==1.7.2" 18 | 19 | [dev-packages] 20 | 21 | [requires] 22 | python_version = "3.8" 23 | -------------------------------------------------------------------------------- /residential-proxies/Python/Pipfile: -------------------------------------------------------------------------------- 1 | [[source]] 2 | url = "https://pypi.org/simple" 3 | verify_ssl = true 4 | name = "pypi" 5 | 6 | [packages] 7 | aiohttp = "==3.8.0" 8 | aiosignal = "==1.2.0" 9 | async-timeout = "==4.0.1" 10 | asyncio = "==3.4.3" 11 | attrs = "==21.2.0" 12 | charset-normalizer = "==2.0.7" 13 | frozenlist = "==1.2.0" 14 | idna = "==3.3" 15 | multidict = "==5.2.0" 16 | typing-extensions = "==3.10.0.2" 17 | yarl = "==1.7.2" 18 | 19 | [dev-packages] 20 | 21 | [requires] 22 | python_version = "3.8" 23 | -------------------------------------------------------------------------------- /web-unblocker/Nodejs/proxy.js: -------------------------------------------------------------------------------- 1 | const settings = require('./settings'); 2 | 3 | module.exports = { 4 | createProxyByUrl: (url) => { 5 | let country 6 | let parsedUrl = url 7 | 8 | urlParts = url.split(';') 9 | if (urlParts.length === 2) { 10 | parsedUrl = urlParts[0] 11 | country = urlParts[1] 12 | } 13 | 14 | return [ 15 | parsedUrl, 16 | country, 17 | `http://${settings.Username}:${settings.Password}@${settings.ProxyAddress}` 18 | ] 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /datacenter-proxies/PHP/composer.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "oxylabs/datacenter-example", 3 | "type": "project", 4 | "require": { 5 | "guzzlehttp/guzzle": "^7.4", 6 | "spatie/guzzle-rate-limiter-middleware": "^2.0" 7 | }, 8 | "authors": [ 9 | { 10 | "name": "Oxylabs", 11 | "email": "developers@oxylabs.com" 12 | } 13 | ], 14 | "autoload": { 15 | "psr-4": {"Oxylabs\\DatacenterApi\\": "src/"}, 16 | "files": ["./header.php"] 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /residential-proxies/Nodejs/proxy.js: -------------------------------------------------------------------------------- 1 | const settings = require('./settings'); 2 | 3 | module.exports = { 4 | createProxyByUrl: (url) => { 5 | urlParts = url.split(';') 6 | if (urlParts.length === 2) { 7 | return [ 8 | urlParts[0], 9 | `http://customer-${settings.Username}-cc-${urlParts[1]}:${settings.Password}@${settings.ProxyAddress}`] 10 | } 11 | 12 | return [url, 13 | `http://customer-${settings.Username}:${settings.Password}@${settings.ProxyAddress}`, 14 | ] 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /residential-proxies/PHP/composer.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "oxylabs/residential-example", 3 | "type": "project", 4 | "require": { 5 | "guzzlehttp/guzzle": "^7.4", 6 | "spatie/guzzle-rate-limiter-middleware": "^2.0" 7 | }, 8 | "authors": [ 9 | { 10 | "name": "Oxylabs", 11 | "email": "developers@oxylabs.com" 12 | } 13 | ], 14 | "autoload": { 15 | "psr-4": {"Oxylabs\\ResidentialApi\\": "src/"}, 16 | "files": ["./header.php"] 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /scraper-apis/PHP/GetNotifierIPList.php: -------------------------------------------------------------------------------- 1 | console.log(data)) 16 | .catch(err => console.log(err)) 17 | -------------------------------------------------------------------------------- /shared-datacenter-proxies/Java/src/main/java/RateLimitInterceptor.java: -------------------------------------------------------------------------------- 1 | import okhttp3.Interceptor; 2 | import okhttp3.Response; 3 | import com.google.common.util.concurrent.RateLimiter; 4 | 5 | import java.io.IOException; 6 | 7 | public class RateLimitInterceptor implements Interceptor { 8 | private RateLimiter rateLimiter = RateLimiter.create(Settings.REQUESTS_RATE); 9 | 10 | @Override 11 | public Response intercept(Chain chain) throws IOException { 12 | rateLimiter.acquire(1); 13 | 14 | return chain.proceed(chain.request()); 15 | } 16 | } -------------------------------------------------------------------------------- /scraper-apis/Typescript/CheckJobStatus.ts: -------------------------------------------------------------------------------- 1 | import axios, {AxiosRequestConfig} from 'axios'; 2 | 3 | const config: AxiosRequestConfig = { 4 | auth: { 5 | username: 'user', // Don't forget to fill in user credentials 6 | password: 'pass1' 7 | }, 8 | headers: { 9 | 'Content-Type': 'application/json', 10 | 'Accept-Encoding': 'gzip,deflate,compress', 11 | } 12 | } 13 | 14 | axios.get('https://data.oxylabs.io/v1/queries/1234567890987654321', config) 15 | .then(({data}) => console.log(data)) 16 | .catch(err => console.log(err)) 17 | -------------------------------------------------------------------------------- /scraper-apis/Typescript/RetrieveJobContent.ts: -------------------------------------------------------------------------------- 1 | import axios, {AxiosRequestConfig} from "axios"; 2 | 3 | const config: AxiosRequestConfig = { 4 | auth: { 5 | username: 'user', // Don't forget to fill in user credentials 6 | password: 'pass1' 7 | }, 8 | headers: { 9 | 'Content-Type': 'application/json', 10 | 'Accept-Encoding': 'gzip,deflate,compress', 11 | } 12 | } 13 | 14 | axios.get('https://data.oxylabs.io/v1/queries/1234567890987654321/results', config) 15 | .then(({data}) => console.log(data)) 16 | .catch(err => console.log(err)) 17 | -------------------------------------------------------------------------------- /scraper-apis/Python/get_callbacker_ips.py: -------------------------------------------------------------------------------- 1 | import requests 2 | from pprint import pprint 3 | 4 | 5 | def get_callbacker_ips() -> dict: 6 | # Get response from the callbacker IPs endpoint. 7 | response = requests.request( 8 | method="GET", 9 | url="https://data.oxylabs.io/v1/info/callbacker_ips", 10 | auth=("user", "pass"), 11 | ) 12 | 13 | # Return the response JSON. 14 | return response.json() 15 | 16 | 17 | if __name__ == "__main__": 18 | callbacker_ips = get_callbacker_ips() 19 | # Print the prettified JSON response to stdout. 20 | pprint(callbacker_ips) 21 | -------------------------------------------------------------------------------- /scraper-apis/Python/retrieve_job_content.py: -------------------------------------------------------------------------------- 1 | import requests 2 | from pprint import pprint 3 | 4 | 5 | def get_result(job_id: int) -> dict: 6 | # Get response from results endpoint. 7 | response = requests.request( 8 | method="GET", 9 | url=f"https://data.oxylabs.io/v1/queries/{job_id}/results", 10 | auth=("user", "pass"), 11 | ) 12 | 13 | # Return the response JSON. 14 | return response.json() 15 | 16 | 17 | if __name__ == "__main__": 18 | result = get_result(job_id=1234567890987654321) 19 | # Print the prettified JSON response to stdout. 20 | pprint(result) 21 | -------------------------------------------------------------------------------- /web-unblocker/Java/src/main/java/Settings.java: -------------------------------------------------------------------------------- 1 | public class Settings { 2 | static final String USERNAME = ""; 3 | static final String PASSWORD = ""; 4 | static final int TIMEOUT = 5; 5 | static final int REQUESTS_RATE = 10; 6 | static final int RETRIES_NUM = 3; 7 | static final String URL_LIST_NAME = "./src/main/resources/url_list_ngrp.txt"; 8 | static final String PROXY_ADDRESS = "unblock.oxylabs.io:60000"; 9 | 10 | static final String BROWSER_LIST_NAME = "./src/main/resources/browser_headers.json"; 11 | static final String AGENT_LIST_NAME = "./src/main/resources/user_agents.json"; 12 | } 13 | -------------------------------------------------------------------------------- /residential-proxies/Java/src/main/java/Settings.java: -------------------------------------------------------------------------------- 1 | public class Settings { 2 | static final String USERNAME = ""; 3 | static final String PASSWORD = ""; 4 | static final int TIMEOUT = 5; 5 | static final int REQUESTS_RATE = 10; 6 | static final int RETRIES_NUM = 3; 7 | static final String URL_LIST_NAME = "./src/main/resources/url_list_residential.txt"; 8 | static final String PROXY_ADDRESS = "pr.oxylabs.io:7777"; 9 | 10 | static final String BROWSER_LIST_NAME = "./src/main/resources/browser_headers.json"; 11 | static final String AGENT_LIST_NAME = "./src/main/resources/user_agents.json"; 12 | } 13 | -------------------------------------------------------------------------------- /shared-datacenter-proxies/Ruby/proxy.rb: -------------------------------------------------------------------------------- 1 | require_relative './settings' 2 | 3 | def create_proxy_by_url(proxy_map, url) 4 | url_parts = url.split ';' 5 | default_proxy_index = get_setting :DefaultProxyIndexName 6 | 7 | country = nil 8 | if url_parts.length == 2 9 | url = url_parts[0] 10 | country = url_parts[1] 11 | end 12 | 13 | proxy_address = proxy_map[default_proxy_index] 14 | if proxy_map.key? country 15 | proxy_address = proxy_map[country] 16 | end 17 | 18 | proxy_address = "http://customer-#{get_setting(:Username)}:#{get_setting(:Password)}@#{proxy_address}" 19 | 20 | [url, proxy_address] 21 | end -------------------------------------------------------------------------------- /shared-datacenter-proxies/Nodejs/proxy.js: -------------------------------------------------------------------------------- 1 | const settings = require('./settings'); 2 | 3 | module.exports = { 4 | createProxyByUrl: (proxyMap, url) => { 5 | country = null; 6 | urlParts = url.split(';') 7 | if (urlParts.length === 2) { 8 | url = urlParts[0]; 9 | country = urlParts[1]; 10 | } 11 | 12 | proxyAddress = proxyMap[settings.DefaultProxyIndexName] 13 | if (typeof proxyMap[country] !== 'undefined') { 14 | proxyAddress = proxyMap[country]; 15 | } 16 | 17 | return [url, 18 | `http://customer-${settings.Username}:${settings.Password}@${proxyAddress}`, 19 | ] 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /shared-datacenter-proxies/GoLang/proxy.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "fmt" 5 | "strings" 6 | ) 7 | 8 | func createProxyByUrl(proxyMap map[string]string, url string) (string, string) { 9 | urlParts := strings.Split(url, ";") 10 | proxyAddress, _ := proxyMap[DefaultProxyIndexName] 11 | 12 | if len(urlParts) == 2 { 13 | url = urlParts[0] 14 | country := urlParts[1] 15 | 16 | if countrySpecificProxy, ok := proxyMap[country]; ok { 17 | proxyAddress = countrySpecificProxy 18 | } 19 | } 20 | 21 | return url, fmt.Sprintf( 22 | "http://customer-%s:%s@%s", 23 | Username, 24 | Password, 25 | proxyAddress, 26 | ) 27 | } 28 | -------------------------------------------------------------------------------- /scraper-apis/Java/README.md: -------------------------------------------------------------------------------- 1 | This project provides you with Scraper-API example use-cases. Use this code to interact with any other Scraper-APIs by changing the `source` parameter in POST payload data. 2 | 3 | You can find Scraper APIs documentation here: https://developers.oxylabs.io 4 | 5 | For full access to all available targets, please contact our Sales Manager or your Account Manager. 6 | 7 | # Requirements 8 | 9 | - Java 1.8+ 10 | 11 | This example does not require any 3rd party dependencies. 12 | 13 | # Usage 14 | 15 | Compile: 16 | ```bash 17 | $ javac BasicQuery.java 18 | ``` 19 | 20 | Run: 21 | ```bash 22 | $ java BasicQuery.java 23 | ``` 24 | -------------------------------------------------------------------------------- /web-unblocker/CSharp/Oxylabs/ConsoleWriter.cs: -------------------------------------------------------------------------------- 1 | using System; 2 | using System.Collections.Generic; 3 | using System.Text; 4 | 5 | namespace Oxylabs 6 | { 7 | class ConsoleWriter 8 | { 9 | public void WritelnAndExit(String str) 10 | { 11 | this.WritelnError(str); 12 | System.Environment.Exit(1); 13 | } 14 | 15 | public void WritelnError(String output) 16 | { 17 | this.Writeln("ERROR: " + output); 18 | } 19 | 20 | public void Writeln(String str) 21 | { 22 | Console.WriteLine(str); 23 | } 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /datacenter-proxies/CSharp/Oxylabs/ConsoleWriter.cs: -------------------------------------------------------------------------------- 1 | using System; 2 | using System.Collections.Generic; 3 | using System.Text; 4 | 5 | namespace Oxylabs 6 | { 7 | class ConsoleWriter 8 | { 9 | public void WritelnAndExit(String str) 10 | { 11 | this.WritelnError(str); 12 | System.Environment.Exit(1); 13 | } 14 | 15 | public void WritelnError(String output) 16 | { 17 | this.Writeln("ERROR: " + output); 18 | } 19 | 20 | public void Writeln(String str) 21 | { 22 | Console.WriteLine(str); 23 | } 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /residential-proxies/CSharp/Oxylabs/ConsoleWriter.cs: -------------------------------------------------------------------------------- 1 | using System; 2 | using System.Collections.Generic; 3 | using System.Text; 4 | 5 | namespace Oxylabs 6 | { 7 | class ConsoleWriter 8 | { 9 | public void WritelnAndExit(String str) 10 | { 11 | this.WritelnError(str); 12 | System.Environment.Exit(1); 13 | } 14 | 15 | public void WritelnError(String output) 16 | { 17 | this.Writeln("ERROR: " + output); 18 | } 19 | 20 | public void Writeln(String str) 21 | { 22 | Console.WriteLine(str); 23 | } 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /scraper-apis/CSharp/Callback/Program.cs: -------------------------------------------------------------------------------- 1 | using Microsoft.AspNetCore.Hosting; 2 | using Microsoft.Extensions.Hosting; 3 | 4 | namespace rtc_listener 5 | { 6 | public class Program 7 | { 8 | public static void Main(string[] args) 9 | { 10 | CreateHostBuilder(args).Build().Run(); 11 | } 12 | 13 | public static IHostBuilder CreateHostBuilder(string[] args) => 14 | Host.CreateDefaultBuilder(args) 15 | .ConfigureWebHostDefaults(webBuilder => 16 | { 17 | webBuilder.UseStartup(); 18 | }); 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /scraper-apis/Python/check_job_status.py: -------------------------------------------------------------------------------- 1 | import requests 2 | from pprint import pprint 3 | 4 | 5 | def get_job_info(job_id: int) -> dict: 6 | # Get response from the callback IPs endpoint. 7 | # Get response from stats endpoint. 8 | response = requests.request( 9 | method="GET", 10 | url=f"http://data.oxylabs.io/v1/queries/{job_id}", 11 | auth=("user", "pass"), 12 | ) 13 | 14 | # Return the response JSON. 15 | return response.json() 16 | 17 | 18 | if __name__ == "__main__": 19 | job_info = get_job_info(job_id=1234567890987654321) 20 | # Print the prettified JSON response to stdout. 21 | pprint(job_info) 22 | -------------------------------------------------------------------------------- /shared-datacenter-proxies/CSharp/Oxylabs/ConsoleWriter.cs: -------------------------------------------------------------------------------- 1 | using System; 2 | using System.Collections.Generic; 3 | using System.Text; 4 | 5 | namespace Oxylabs 6 | { 7 | class ConsoleWriter 8 | { 9 | public void WritelnAndExit(String str) 10 | { 11 | this.WritelnError(str); 12 | System.Environment.Exit(1); 13 | } 14 | 15 | public void WritelnError(String output) 16 | { 17 | this.Writeln("ERROR: " + output); 18 | } 19 | 20 | public void Writeln(String str) 21 | { 22 | Console.WriteLine(str); 23 | } 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /scraper-apis/GoLang/callback/go.mod: -------------------------------------------------------------------------------- 1 | module scraper-api/callback 2 | 3 | go 1.17 4 | 5 | require github.com/labstack/echo/v4 v4.9.1 6 | 7 | require ( 8 | github.com/labstack/gommon v0.4.0 // indirect 9 | github.com/mattn/go-colorable v0.1.11 // indirect 10 | github.com/mattn/go-isatty v0.0.14 // indirect 11 | github.com/valyala/bytebufferpool v1.0.0 // indirect 12 | github.com/valyala/fasttemplate v1.2.1 // indirect 13 | golang.org/x/crypto v0.0.0-20210817164053-32db794688a5 // indirect 14 | golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f // indirect 15 | golang.org/x/sys v0.0.0-20211103235746-7861aae1554b // indirect 16 | golang.org/x/text v0.3.7 // indirect 17 | ) 18 | -------------------------------------------------------------------------------- /scraper-apis/PHP/README.md: -------------------------------------------------------------------------------- 1 | This project provides you with Scraper-API example use-cases. Use this code to interact with any other Scraper-APIs by changing the `source` parameter in POST payload data. 2 | 3 | You can find Scraper APIs documentation here: https://developers.oxylabs.io 4 | 5 | For full access to all available targets, please contact our Sales Manager or your Account Manager. 6 | 7 | # Requirements 8 | 9 | - PHP >= 7.4 10 | - PHP curl extension 11 | 12 | You need to check whether you meet the requirements by running the following command: 13 | ``` 14 | $ composer install 15 | ``` 16 | 17 | # Usage 18 | 19 | Run: 20 | ```bash 21 | $ php SingleQuery.php 22 | ``` 23 | -------------------------------------------------------------------------------- /scraper-apis/CSharp/Callback/Properties/launchSettings.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "http://json.schemastore.org/launchsettings.json", 3 | "iisSettings": { 4 | "windowsAuthentication": false, 5 | "anonymousAuthentication": true, 6 | "iisExpress": { 7 | "applicationUrl": "http://localhost:47804", 8 | "sslPort": 44327 9 | } 10 | }, 11 | "profiles": { 12 | "oxy_callback": { 13 | "commandName": "Project", 14 | "launchBrowser": true, 15 | "launchUrl": "job_listener", 16 | "applicationUrl": "https://localhost:5001;http://localhost:5000", 17 | "environmentVariables": { 18 | "ASPNETCORE_ENVIRONMENT": "Development" 19 | } 20 | } 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /scraper-apis/PHP/composer.lock: -------------------------------------------------------------------------------- 1 | { 2 | "_readme": [ 3 | "This file locks the dependencies of your project to a known state", 4 | "Read more about it at https://getcomposer.org/doc/01-basic-usage.md#installing-dependencies", 5 | "This file is @generated automatically" 6 | ], 7 | "content-hash": "d925179933c13fdcaf246154419b11d9", 8 | "packages": [], 9 | "packages-dev": [], 10 | "aliases": [], 11 | "minimum-stability": "stable", 12 | "stability-flags": [], 13 | "prefer-stable": false, 14 | "prefer-lowest": false, 15 | "platform": { 16 | "php": "^7.4|^8.0", 17 | "ext-curl": "*" 18 | }, 19 | "platform-dev": [], 20 | "plugin-api-version": "1.1.0" 21 | } 22 | -------------------------------------------------------------------------------- /scraper-apis/Python/README.md: -------------------------------------------------------------------------------- 1 | # Scraper APIs Integration in Python 2 | 3 | This project provides you with Scraper APIs example use-cases. Use this code to 4 | interact with any other of the Scraper APIs by changing the `source` parameter 5 | in POST payload data. 6 | 7 | You can find Scraper APIs documentation here: https://developers.oxylabs.io/. 8 | For full access to all available targets, please contact our Sales Manager or 9 | your Account Manager. 10 | 11 | ## Requirements 12 | 13 | - Python 3.7+ 14 | - requests 15 | - prettyprinter 16 | - sanic 17 | 18 | ### Installing 19 | Using pip: 20 | 21 | ```bash 22 | pip install requests prettyprinter sanic 23 | ``` 24 | 25 | ## Usage 26 | 27 | ```bash 28 | python3 single_query.py 29 | ``` 30 | -------------------------------------------------------------------------------- /datacenter-proxies/CSharp/Oxylabs/Settings.cs: -------------------------------------------------------------------------------- 1 | using System; 2 | using System.Collections.Generic; 3 | using System.Text; 4 | 5 | namespace Oxylabs 6 | { 7 | class Settings 8 | { 9 | public readonly static String USERNAME = ""; 10 | public readonly static String PASSWORD = ""; 11 | public readonly static int TIMEOUT = 5; 12 | public readonly static int REQUESTS_RATE = 10; 13 | public readonly static int RETRIES_NUM = 3; 14 | public readonly static String URL_LIST_NAME = "./files/url_list_dc.txt"; 15 | public readonly static String BROWSER_LIST_NAME = "./files/browser_headers.json"; 16 | public readonly static String AGENT_LIST_NAME = "./files/user_agents.json"; 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /datacenter-proxies/PHP/src/RoundRobinArrayWrapper.php: -------------------------------------------------------------------------------- 1 | iterator = new InfiniteIterator( 18 | new ArrayIterator( 19 | $source 20 | ) 21 | ); 22 | 23 | $this->iterator->rewind(); 24 | 25 | } 26 | 27 | public function fetchNext(): string 28 | { 29 | $nextItem = $this->iterator->current(); 30 | $this->iterator->next(); 31 | 32 | return $nextItem; 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /scraper-apis/CSharp/Callback/rtc_listener.csproj: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | netcoreapp3.1 5 | 6 | 7 | 8 | 9 | 10 | runtime; build; native; contentfiles; analyzers; buildtransitive 11 | all 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | -------------------------------------------------------------------------------- /scraper-apis/Typescript/README.md: -------------------------------------------------------------------------------- 1 | This project provides you with Scraper-API example use-cases. Use this code to interact with any other Scraper-APIs by changing the `source` 2 | parameter in POST payload data. 3 | 4 | You can find Scraper APIs documentation here: https://developers.oxylabs.io 5 | 6 | For full access to all available targets, please contact our Sales Manager or your Account Manager. 7 | 8 | # Installing 9 | 10 | Using npm: 11 | 12 | ```bash 13 | $ npm i 14 | ``` 15 | 16 | Using yarn: 17 | 18 | ```bash 19 | $ yarn 20 | ``` 21 | 22 | # Usage 23 | 24 | Pass the file you wish to run to the locally-installed typescript binary 25 | 26 | Using npm: 27 | ```bash 28 | $ npx ts-node SingleQuery.js 29 | ``` 30 | 31 | Using yarn: 32 | ```bash 33 | $ yarn ts-node SingleQuery.js 34 | ``` 35 | -------------------------------------------------------------------------------- /datacenter-proxies/CSharp/Oxylabs/Oxylabs.csproj: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Exe 5 | netcoreapp3.1 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | Always 15 | 16 | 17 | Always 18 | 19 | 20 | Always 21 | 22 | 23 | 24 | 25 | -------------------------------------------------------------------------------- /shared-datacenter-proxies/Java/src/main/java/Settings.java: -------------------------------------------------------------------------------- 1 | public class Settings { 2 | static final String USERNAME = ""; 3 | static final String PASSWORD = ""; 4 | static final int TIMEOUT = 5; 5 | static final int REQUESTS_RATE = 10; 6 | static final int RETRIES_NUM = 3; 7 | static final String URL_LIST_NAME = "./src/main/resources/url_list_shared_dc.txt"; 8 | static final String PROXY_LIST_NAME = "./src/main/resources/proxy_list_shared_dc.txt"; 9 | static final String BROWSER_LIST_NAME = "./src/main/resources/browser_headers.json"; 10 | static final String AGENT_LIST_NAME = "./src/main/resources/user_agents.json"; 11 | static final String PROXY_REGEX = "^dc\\.(?\\w{2})-?pr\\.oxylabs\\.io:\\d+$"; 12 | static final String DEFAULT_PROXY_INDEX_NAME = "DEFAULT"; 13 | } 14 | -------------------------------------------------------------------------------- /residential-proxies/CSharp/Oxylabs/Settings.cs: -------------------------------------------------------------------------------- 1 | using System; 2 | using System.Collections.Generic; 3 | using System.Text; 4 | 5 | namespace Oxylabs 6 | { 7 | class Settings 8 | { 9 | public readonly static String USERNAME = ""; 10 | public readonly static String PASSWORD = ""; 11 | public readonly static int TIMEOUT = 5; 12 | public readonly static int REQUESTS_RATE = 10; 13 | public readonly static int RETRIES_NUM = 3; 14 | public readonly static String URL_LIST_NAME = "./files/url_list_residential.txt"; 15 | public readonly static String BROWSER_LIST_NAME = "./files/browser_headers.json"; 16 | public readonly static String AGENT_LIST_NAME = "./files/user_agents.json"; 17 | public readonly static String PROXY_ADDRESS = "pr.oxylabs.io:7777"; 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /web-unblocker/CSharp/Oxylabs/Settings.cs: -------------------------------------------------------------------------------- 1 | using System; 2 | using System.Collections.Generic; 3 | using System.Text; 4 | 5 | namespace Oxylabs 6 | { 7 | class Settings 8 | { 9 | public readonly static String USERNAME = ""; 10 | public readonly static String PASSWORD = ""; 11 | public readonly static int TIMEOUT = 5; 12 | public readonly static int REQUESTS_RATE = 10; 13 | public readonly static int RETRIES_NUM = 3; 14 | public readonly static String URL_LIST_NAME = "./files/url_list_residential.txt"; 15 | public readonly static String BROWSER_LIST_NAME = "./files/browser_headers.json"; 16 | public readonly static String AGENT_LIST_NAME = "./files/user_agents.json"; 17 | public readonly static String PROXY_ADDRESS = "unblock.oxylabs.io:60000"; 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /scraper-apis/GoLang/README.md: -------------------------------------------------------------------------------- 1 | # Scraper-API Golang example 2 | 3 | This project provides you with Scraper-API example use-cases. Use this code to interact with any other Scraper-APIs by changing the `source` parameter in POST payload data. 4 | 5 | You can find Scraper APIs documentation here: https://developers.oxylabs.io 6 | For full access to all available targets, please contact our Sales Manager or your Account Manager. 7 | 8 | # Requirements 9 | 10 | - Golang >= 1.17 11 | 12 | # Usage 13 | 14 | ### 1. Change the directory to an example you are interested in 15 | 16 | ```bash 17 | $ cd single-query 18 | ``` 19 | 20 | ### 2. Set the credentials in the code 21 | 22 | ```bash 23 | const AuthUsername = "YOUR_USERNAME" 24 | const AuthPassword = "YOUR_PASSWORD" 25 | ``` 26 | 27 | ### 3. Run the example 28 | 29 | ```bash 30 | go run main.go 31 | ``` 32 | -------------------------------------------------------------------------------- /scraper-apis/CSharp/README.md: -------------------------------------------------------------------------------- 1 | This project provides you with Scraper-API example use-cases. Use this code to interact with any other Scraper-APIs by changing the `source` parameter in POST payload data. 2 | 3 | You can find Scraper APIs documentation here: https://developers.oxylabs.io 4 | 5 | For full access to all available targets, please contact our Sales Manager or your Account Manager. 6 | 7 | # Requirements 8 | 9 | - .Net Core 10 | - Newtonsoft.Json 11 | - Dotnet script 12 | 13 | # Installing dependencies 14 | 15 | Install the package for dealing with Json 16 | ``` 17 | $ dotnet add package Newtonsoft.Json 18 | ``` 19 | 20 | Then, install a tool for running individual CSharp files 21 | ``` 22 | $ dotnet tool install -g dotnet-script 23 | ``` 24 | 25 | # Running the app 26 | 27 | Run the script: 28 | 29 | ``` 30 | dotnet script SingleQuery.cs 31 | ``` 32 | -------------------------------------------------------------------------------- /scraper-apis/PHP/Callback.php: -------------------------------------------------------------------------------- 1 | = get_setting(:RetriesNum) 27 | break 28 | end 29 | end 30 | end 31 | end 32 | -------------------------------------------------------------------------------- /residential-proxies/Ruby/scraper.rb: -------------------------------------------------------------------------------- 1 | require_relative './client' 2 | require_relative './settings' 3 | require_relative './filesystem' 4 | 5 | class Scraper 6 | def initialize(client) 7 | @client = client 8 | end 9 | 10 | def scrape(position, proxy, url) 11 | attempt = 0 12 | 13 | loop do 14 | response = @client.fetch_page(proxy, url) 15 | if response != nil && response.code != "200" 16 | write_error_to_file("#{url} - Response code #{response.code}") 17 | write_error_to_stdout("#{url} - Response code #{response.code}") 18 | end 19 | 20 | if response != nil && response.code == "200" 21 | write_success_to_file(position, response.body) 22 | break 23 | end 24 | 25 | attempt += 1 26 | if attempt >= get_setting(:RetriesNum) 27 | break 28 | end 29 | end 30 | end 31 | end 32 | -------------------------------------------------------------------------------- /scraper-apis/Typescript/BatchQuery.ts: -------------------------------------------------------------------------------- 1 | import axios, {AxiosRequestConfig} from 'axios'; 2 | 3 | // If you wish to get content in HTML you can delete parser_type and parse parameters 4 | const payload = { 5 | query: [ 6 | 'kettle', 7 | 'fridge', 8 | 'microwave' 9 | ], 10 | source: 'amazon_search', 11 | geo_location: '10005', 12 | parse: 'true', 13 | }; 14 | 15 | const config: AxiosRequestConfig = { 16 | auth: { 17 | username: 'user', // Don't forget to fill in user credentials 18 | password: 'pass1' 19 | }, 20 | headers: { 21 | 'Content-Type': 'application/json', 22 | 'Accept-Encoding': 'gzip,deflate,compress', 23 | } 24 | } 25 | 26 | axios.post('https://data.oxylabs.io/v1/queries/batch', payload, config) 27 | .then(({data}) => console.log(data)) 28 | .catch(err => console.log(err)) 29 | -------------------------------------------------------------------------------- /shared-datacenter-proxies/Ruby/scraper.rb: -------------------------------------------------------------------------------- 1 | require_relative './client' 2 | require_relative './settings' 3 | require_relative './filesystem' 4 | 5 | class Scraper 6 | def initialize(client) 7 | @client = client 8 | end 9 | 10 | def scrape(position, proxy, url) 11 | attempt = 0 12 | 13 | loop do 14 | response = @client.fetch_page(proxy, url) 15 | if response != nil && response.code != "200" 16 | write_error_to_file("#{url} - Response code #{response.code}") 17 | write_error_to_stdout("#{url} - Response code #{response.code}") 18 | end 19 | 20 | if response != nil && response.code == "200" 21 | write_success_to_file(position, response.body) 22 | break 23 | end 24 | 25 | attempt += 1 26 | if attempt >= get_setting(:RetriesNum) 27 | break 28 | end 29 | end 30 | end 31 | end 32 | -------------------------------------------------------------------------------- /web-unblocker/Ruby/scraper.rb: -------------------------------------------------------------------------------- 1 | require_relative './client' 2 | require_relative './settings' 3 | require_relative './filesystem' 4 | 5 | class Scraper 6 | def initialize(client) 7 | @client = client 8 | end 9 | 10 | def scrape(position, proxy, url, country) 11 | attempt = 0 12 | 13 | loop do 14 | response = @client.fetch_page(proxy, url, country) 15 | if response != nil && response.code != "200" 16 | write_error_to_file("#{url} - Response code #{response.code}") 17 | write_error_to_stdout("#{url} - Response code #{response.code}") 18 | end 19 | 20 | if response != nil && response.code == "200" 21 | write_success_to_file(position, response.body) 22 | break 23 | end 24 | 25 | attempt += 1 26 | if attempt >= get_setting(:RetriesNum) 27 | break 28 | end 29 | end 30 | end 31 | end 32 | -------------------------------------------------------------------------------- /shared-datacenter-proxies/CSharp/Oxylabs/Settings.cs: -------------------------------------------------------------------------------- 1 | using System; 2 | 3 | namespace Oxylabs 4 | { 5 | class Settings 6 | { 7 | public readonly static String USERNAME = ""; 8 | public readonly static String PASSWORD = ""; 9 | public readonly static int TIMEOUT = 5; 10 | public readonly static int REQUESTS_RATE = 10; 11 | public readonly static int RETRIES_NUM = 3; 12 | public readonly static String URL_LIST_NAME = "./files/url_list_shared_dc.txt"; 13 | public readonly static String PROXY_LIST_NAME = "./files/proxy_list_shared_dc.txt"; 14 | public readonly static String BROWSER_LIST_NAME = "./files/browser_headers.json"; 15 | public readonly static String AGENT_LIST_NAME = "./files/user_agents.json"; 16 | public readonly static String DEFAULT_PROXY_INDEX_NAME = "DEFAULT"; 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /scraper-apis/CSharp/Callback/rtc_listener.sln: -------------------------------------------------------------------------------- 1 | 2 | Microsoft Visual Studio Solution File, Format Version 12.00 3 | # Visual Studio 15 4 | Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "rtc_listener", "rtc_listener.csproj", "{A6C9545C-A9ED-4B15-A556-A715741BB6F8}" 5 | EndProject 6 | Global 7 | GlobalSection(SolutionConfigurationPlatforms) = preSolution 8 | Debug|Any CPU = Debug|Any CPU 9 | Release|Any CPU = Release|Any CPU 10 | EndGlobalSection 11 | GlobalSection(ProjectConfigurationPlatforms) = postSolution 12 | {A6C9545C-A9ED-4B15-A556-A715741BB6F8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU 13 | {A6C9545C-A9ED-4B15-A556-A715741BB6F8}.Debug|Any CPU.Build.0 = Debug|Any CPU 14 | {A6C9545C-A9ED-4B15-A556-A715741BB6F8}.Release|Any CPU.ActiveCfg = Release|Any CPU 15 | {A6C9545C-A9ED-4B15-A556-A715741BB6F8}.Release|Any CPU.Build.0 = Release|Any CPU 16 | EndGlobalSection 17 | EndGlobal 18 | -------------------------------------------------------------------------------- /web-unblocker/Ruby/proxy.rb: -------------------------------------------------------------------------------- 1 | require_relative './settings' 2 | 3 | def create_proxy_by_url(url) 4 | country = nil 5 | parsed_url = url 6 | 7 | url_parts = url.split ';' 8 | if url_parts.length == 2 9 | parsed_url = url_parts[0] 10 | country = url_parts[1] 11 | end 12 | 13 | [ 14 | parsed_url, 15 | country, 16 | "http://#{get_setting(:Username)}:#{get_setting(:Password)}@#{get_setting(:ProxyAddress)}" 17 | ] 18 | end 19 | 20 | # func createProxyByUrl(url string) (string, string) { 21 | # urlParts := strings.Split(url, ";") 22 | # if len(urlParts) == 2 { 23 | # return urlParts[0], fmt.Sprintf( 24 | # "http://customer-%s-cc-%s:%s@%s", 25 | # Username, 26 | # urlParts[1], 27 | # Password, 28 | # ProxyAddress, 29 | # ) 30 | # } 31 | # 32 | # return url, fmt.Sprintf( 33 | # "http://customer-%s:%s@%s", 34 | # Username, 35 | # Password, 36 | # ProxyAddress, 37 | # ) 38 | # } -------------------------------------------------------------------------------- /scraper-apis/Nodejs/README.md: -------------------------------------------------------------------------------- 1 | This project provides you with Scraper-API example use-cases. Use this code to interact with any other Scraper-APIs by changing the `source` parameter in POST payload data. 2 | 3 | You can find Scraper APIs documentation here: https://developers.oxylabs.io 4 | 5 | For full access to all available targets, please contact our Sales Manager or your Account Manager. 6 | 7 | # Installing dependencies 8 | Using npm: 9 | 10 | ```bash 11 | $ npm i 12 | ``` 13 | 14 | Using bower: 15 | 16 | ```bash 17 | $ bower i 18 | ``` 19 | 20 | Using yarn: 21 | 22 | ```bash 23 | $ yarn 24 | ``` 25 | 26 | Using jsDelivr CDN: 27 | 28 | ```html 29 | 30 | ``` 31 | 32 | Using unpkg CDN: 33 | 34 | ```html 35 | 36 | ``` 37 | 38 | # Usage 39 | 40 | ```bash 41 | $ node SingleQuery.js 42 | ``` 43 | -------------------------------------------------------------------------------- /web-unblocker/PHP/src/ProxyFormatter.php: -------------------------------------------------------------------------------- 1 | parseUrl($url); 17 | if (count($urlParts) == 2) { 18 | $parsedUrl = trim($urlParts[0]); 19 | $country = trim($urlParts[1]); 20 | } 21 | 22 | return [$parsedUrl, $country, strtr(self::TEMPLATE, [ 23 | '{USERNAME}' => USERNAME, 24 | '{PASSWORD}' => PASSWORD, 25 | '{PROXY_ADDRESS}' => PROXY_ADDRESS, 26 | ])]; 27 | } 28 | 29 | private function parseUrl(string $url): array 30 | { 31 | return explode(';', $url); 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /datacenter-proxies/Java/src/main/java/RoundRobin.java: -------------------------------------------------------------------------------- 1 | import java.util.Iterator; 2 | import java.util.List; 3 | 4 | public class RoundRobin implements Iterable { 5 | private final List list; 6 | 7 | public RoundRobin(List list) { 8 | this.list = list; 9 | } 10 | 11 | public Iterator iterator() { 12 | return new Iterator() { 13 | private int index = 0; 14 | 15 | @Override 16 | public boolean hasNext() { 17 | return true; 18 | } 19 | 20 | @Override 21 | public T next() { 22 | T item = list.get(index); 23 | index = (index + 1) % list.size(); 24 | return item; 25 | } 26 | 27 | @Override 28 | public void remove() { 29 | throw new UnsupportedOperationException(); 30 | } 31 | 32 | }; 33 | } 34 | } -------------------------------------------------------------------------------- /web-unblocker/CSharp/Oxylabs/Oxylabs.csproj: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Exe 5 | netcoreapp3.1 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | Always 15 | 16 | 17 | Always 18 | 19 | 20 | Always 21 | 22 | 23 | Always 24 | 25 | 26 | 27 | 28 | -------------------------------------------------------------------------------- /residential-proxies/CSharp/Oxylabs/Oxylabs.csproj: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Exe 5 | netcoreapp3.1 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | Always 15 | 16 | 17 | Always 18 | 19 | 20 | Always 21 | 22 | 23 | Always 24 | 25 | 26 | 27 | 28 | -------------------------------------------------------------------------------- /datacenter-proxies/GoLang/scrape.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import "net/http" 4 | 5 | type Scraper struct { 6 | apiClient *ApiClient 7 | } 8 | 9 | func NewScraper(apiClient *ApiClient) *Scraper { 10 | return &Scraper{apiClient} 11 | } 12 | 13 | func (s *Scraper) scrape(position int, proxy string, url string) { 14 | var response *http.Response 15 | var err error 16 | retry := 0 17 | 18 | for { 19 | response, err = s.apiClient.fetchPage(proxy, url) 20 | if response != nil && response.StatusCode != 200 { 21 | writeErrorToFile("%s - Response code %d", url, response.StatusCode) 22 | } else if err != nil { 23 | writeErrorToFile("%s - Response error %s", url, err) 24 | writeErrorToStdout("%s failed with error %s", url, err) 25 | } 26 | 27 | if response != nil && response.StatusCode == 200 { 28 | writeSuccessToFile(position, response.Body) 29 | break 30 | } 31 | 32 | retry += 1 33 | if retry >= RetriesNum { 34 | break 35 | } 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /residential-proxies/GoLang/scrape.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import "net/http" 4 | 5 | type Scraper struct { 6 | apiClient *ApiClient 7 | } 8 | 9 | func NewScraper(apiClient *ApiClient) *Scraper { 10 | return &Scraper{apiClient} 11 | } 12 | 13 | func (s *Scraper) scrape(position int, proxy string, url string) { 14 | var response *http.Response 15 | var err error 16 | retry := 0 17 | 18 | for { 19 | response, err = s.apiClient.fetchPage(proxy, url) 20 | if response != nil && response.StatusCode != 200 { 21 | writeErrorToFile("%s - Response code %d", url, response.StatusCode) 22 | } else if err != nil { 23 | writeErrorToFile("%s - Response error %s", url, err) 24 | writeErrorToStdout("%s failed with error %s", url, err) 25 | } 26 | 27 | if response != nil && response.StatusCode == 200 { 28 | writeSuccessToFile(position, response.Body) 29 | break 30 | } 31 | 32 | retry += 1 33 | if retry >= RetriesNum { 34 | break 35 | } 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /shared-datacenter-proxies/Nodejs/yarn.lock: -------------------------------------------------------------------------------- 1 | # THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. 2 | # yarn lockfile v1 3 | 4 | 5 | "axios-rate-limit@^1.3.0": 6 | "integrity" "sha512-cKR5wTbU/CeeyF1xVl5hl6FlYsmzDVqxlN4rGtfO5x7J83UxKDckudsW0yW21/ZJRcO0Qrfm3fUFbhEbWTLayw==" 7 | "resolved" "https://registry.npmjs.org/axios-rate-limit/-/axios-rate-limit-1.3.0.tgz" 8 | "version" "1.3.0" 9 | 10 | "axios@*", "axios@^0.24.0": 11 | "integrity" "sha512-Q6cWsys88HoPgAaFAVUb0WpPk0O8iTeisR9IMqy9G8AbO4NlpVknrnQS03zzF9PGAWgO3cgletO3VjV/P7VztA==" 12 | "resolved" "https://registry.npmjs.org/axios/-/axios-0.24.0.tgz" 13 | "version" "0.24.0" 14 | dependencies: 15 | "follow-redirects" "^1.14.4" 16 | 17 | "follow-redirects@^1.14.4": 18 | "integrity" "sha512-wtphSXy7d4/OR+MvIFbCVBDzZ5520qV8XfPklSN5QtxuMUJZ+b0Wnst1e1lCDocfzuCkHqj8k0FpZqO+UIaKNA==" 19 | "resolved" "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.14.5.tgz" 20 | "version" "1.14.5" 21 | -------------------------------------------------------------------------------- /datacenter-proxies/CSharp/README.md: -------------------------------------------------------------------------------- 1 | # Datacenter Proxies C# Example 2 | 3 | This example demonstrates how to use [oxylabs datacenter API](https://developers.oxylabs.io/datacenter-proxies/index.html#quick-start) 4 | 5 | ## Global variables 6 | 7 | Set up the script using the following constants (see Settings.cs) 8 | 9 | * USERNAME (String) - Username of a proxy user 10 | * PASSWORD (String) - Password of a proxy user 11 | * TIMEOUT (Integer) - Seconds to wait for a connection and data retrieval until timing out 12 | * REQUESTS_RATE (Integer) - Number of requests to make per one second 13 | * RETRIES_NUM (Integer) - Number of times to retry if initial request was unsuccessful 14 | * URL_LIST_NAME (String) - Filename of a txt file with the URLs that needs to scraped 15 | 16 | ## Prerequisites 17 | 18 | The following tools need to be present on your system 19 | * C# Development environment 20 | 21 | ## How to run the script 22 | 23 | Build and run by opening the attached sln file. 24 | -------------------------------------------------------------------------------- /scraper-apis/Python/push_pull/single_job.py: -------------------------------------------------------------------------------- 1 | from pprint import pprint 2 | 3 | from client import PushPullScraperAPIsClient 4 | 5 | 6 | def run_example() -> dict: 7 | # Instantiate a PushPullScraperAPIsClient. 8 | # Replace the values with your own credentials. 9 | pp = PushPullScraperAPIsClient("user", "pass") 10 | 11 | # Specify the payload for our job. 12 | payload = { 13 | "source": "google_search", 14 | "query": "large shoes", 15 | "geo_location": "United States", 16 | "parse": True, 17 | # Replace with your own callback URL: 18 | # "callback_url": "https://your.callback.url", 19 | } 20 | 21 | # Create a job and extract the results using the ID from the `job_info`. 22 | job_info = pp.create_job(payload) 23 | job_result = pp.wait_for_and_get_job_results(job_info["id"]) 24 | return job_result 25 | 26 | 27 | if __name__ == "__main__": 28 | result = run_example() 29 | pprint(result) 30 | -------------------------------------------------------------------------------- /shared-datacenter-proxies/GoLang/scrape.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import "net/http" 4 | 5 | type Scraper struct { 6 | apiClient *ApiClient 7 | } 8 | 9 | func NewScraper(apiClient *ApiClient) *Scraper { 10 | return &Scraper{apiClient} 11 | } 12 | 13 | func (s *Scraper) scrape(position int, proxy string, url string) { 14 | var response *http.Response 15 | var err error 16 | retry := 0 17 | 18 | for { 19 | response, err = s.apiClient.fetchPage(proxy, url) 20 | if response != nil && response.StatusCode != 200 { 21 | writeErrorToFile("%s - Response code %d", url, response.StatusCode) 22 | } else if err != nil { 23 | writeErrorToFile("%s - Response error %s", url, err) 24 | writeErrorToStdout("%s failed with error %s", url, err) 25 | } 26 | 27 | if response != nil && response.StatusCode == 200 { 28 | writeSuccessToFile(position, response.Body) 29 | break 30 | } 31 | 32 | retry += 1 33 | if retry >= RetriesNum { 34 | break 35 | } 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /scraper-apis/Nodejs/BatchQuery.js: -------------------------------------------------------------------------------- 1 | /* This example will submit a job request to Scraper API. 2 | The job will deliver parsed product data in JSON for multiple Amazon searches 3 | from United States geo-location*/ 4 | 5 | const axios = require('axios'); 6 | 7 | //If you wish to get content in HTML you can delete parser_type and parse parameters 8 | const payload = { 9 | 'query': [ 10 | 'kettle', 11 | 'fridge', 12 | 'microwave' 13 | ], 14 | 'source': 'amazon_search', 15 | 'geo_location': '10005', 16 | 'parse': 'true', 17 | } 18 | 19 | axios.post('https://data.oxylabs.io/v1/queries/batch', payload, { 20 | auth: { 21 | username: 'user', //Don't forget to fill in user credentials 22 | password: 'pass1' 23 | }, 24 | ContentType: 'application/json' 25 | }) 26 | .then(({ data }) => console.log(data)) 27 | .catch(err => console.log(err)) 28 | //To retrieve parsed or raw content from the webpage, use _links from the response dictionary and check RetrieveJobContent.js file 29 | -------------------------------------------------------------------------------- /scraper-apis/Python/push_pull/README.md: -------------------------------------------------------------------------------- 1 | This is a working example of the Push-Pull method. Use this example to create 2 | a single job or a batch query. 3 | 4 | You can find Push-Pull documentation here: 5 | [Push-Pull - Oxylabs Documentation](https://developers.oxylabs.io/scraper-apis/getting-started/api-reference/integration-methods/push-pull) 6 | 7 | # Requirements 8 | 9 | - Python 3.7+ 10 | - requests 11 | - prettyprinter 12 | - sanic 13 | 14 | # Installing 15 | Using pip: 16 | 17 | ```bash 18 | $ pip install requests prettyprinter sanic 19 | ``` 20 | 21 | # Usage 22 | You can edit the payload of these jobs 23 | 24 | For running a single job: 25 | ```bash 26 | $ python single_job.py 27 | ``` 28 | 29 | For running a batch job: 30 | ```bash 31 | $ python batch_query.py 32 | ``` 33 | 34 | If you specify a `callback_url` in the payload, you need a server that accepts 35 | requests from Oxylabs servers. 36 | An example of the code that should be running is provided in the `callback.py` file. -------------------------------------------------------------------------------- /web-unblocker/GoLang/scrape.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import "net/http" 4 | 5 | type Scraper struct { 6 | apiClient *ApiClient 7 | } 8 | 9 | func NewScraper(apiClient *ApiClient) *Scraper { 10 | return &Scraper{apiClient} 11 | } 12 | 13 | func (s *Scraper) scrape(position int, proxy string, url string, country *string) { 14 | var response *http.Response 15 | var err error 16 | retry := 0 17 | 18 | for { 19 | response, err = s.apiClient.fetchPage(proxy, url, country) 20 | if response != nil && response.StatusCode != 200 { 21 | writeErrorToFile("%s - Response code %d", url, response.StatusCode) 22 | } else if err != nil { 23 | writeErrorToFile("%s - Response error %s", url, err) 24 | writeErrorToStdout("%s failed with error %s", url, err) 25 | } 26 | 27 | if response != nil && response.StatusCode == 200 { 28 | writeSuccessToFile(position, response.Body) 29 | break 30 | } 31 | 32 | retry += 1 33 | if retry >= RetriesNum { 34 | break 35 | } 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /datacenter-proxies/Nodejs/scraper.js: -------------------------------------------------------------------------------- 1 | const client = require('./client'); 2 | const settings = require('./settings'); 3 | const filesystem = require('./filesystem'); 4 | 5 | module.exports = { 6 | scrape: async (position, proxy, url) => { 7 | let response = null; 8 | let retry = 0; 9 | do { 10 | [response, err] = await client.fetchPage(proxy, url); 11 | if (response !== null && response.status !== 200) { 12 | await filesystem.writeErrorToFile(`${url} - Response code ${response.status}`) 13 | } else if (err !== null) { 14 | await filesystem.writeErrorToFile(`${url} - Response error ${err.message}`) 15 | filesystem.writeErrorToStdout(`${url} failed with error ${err.message}`) 16 | } 17 | 18 | if (response !== null && response.status === 200) { 19 | await filesystem.writeSuccessToFile(position, response.data) 20 | break 21 | } 22 | 23 | retry += 1; 24 | } while (retry < settings.RetriesNum); 25 | } 26 | }; 27 | -------------------------------------------------------------------------------- /residential-proxies/Nodejs/scraper.js: -------------------------------------------------------------------------------- 1 | const client = require('./client'); 2 | const settings = require('./settings'); 3 | const filesystem = require('./filesystem'); 4 | 5 | module.exports = { 6 | scrape: async (position, proxy, url) => { 7 | let response = null; 8 | let retry = 0; 9 | do { 10 | [response, err] = await client.fetchPage(proxy, url); 11 | if (response !== null && response.status !== 200) { 12 | await filesystem.writeErrorToFile(`${url} - Response code ${response.status}`) 13 | } else if (err !== null) { 14 | await filesystem.writeErrorToFile(`${url} - Response error ${err.message}`) 15 | filesystem.writeErrorToStdout(`${url} failed with error ${err.message}`) 16 | } 17 | 18 | if (response !== null && response.status === 200) { 19 | await filesystem.writeSuccessToFile(position, response.data) 20 | break 21 | } 22 | 23 | retry += 1; 24 | } while (retry < settings.RetriesNum); 25 | } 26 | }; 27 | -------------------------------------------------------------------------------- /residential-proxies/Ruby/main.rb: -------------------------------------------------------------------------------- 1 | require_relative './header' 2 | require_relative './settings' 3 | require_relative './client' 4 | require_relative './proxy' 5 | require_relative './scraper' 6 | require 'concurrent' 7 | 8 | starting = Time.now 9 | 10 | client = Client.new 11 | scraper = Scraper.new(client) 12 | 13 | p 'Reading from the list...' 14 | urls = File::readlines(get_setting(:UrlListName), chomp: true) 15 | 16 | p 'Gathering results...' 17 | operations = [] 18 | urls.each_with_index do |url, position| 19 | parsed_url, formatted_proxy = create_proxy_by_url(url) 20 | 21 | operation = Concurrent::Future.execute do 22 | scraper.scrape(position, formatted_proxy, parsed_url) 23 | 24 | requests_rate = get_setting(:RequestsRate) 25 | sleep(1 / requests_rate) 26 | end 27 | 28 | operations.push(operation) 29 | end 30 | 31 | operations.each { |operation| operation.value } 32 | 33 | ending = Time.now 34 | elapsed = ending - starting 35 | printf "Script finished after %.2fs\n", elapsed 36 | -------------------------------------------------------------------------------- /scraper-apis/Nodejs/SingleQuery.js: -------------------------------------------------------------------------------- 1 | /* This example will submit a job request to Scraper API. 2 | The job will deliver parsed product data in JSON for Amazon searches 3 | from United States geo-location*/ 4 | 5 | const axios = require('axios'); 6 | 7 | //If you wish to get content in HTML you can delete parser_type and parse parameters 8 | const payload = { 9 | 'source': 'amazon_search', 10 | 'query': 'kettle', 11 | 'geo_location': '10005', 12 | 'parse': 'true', 13 | } 14 | 15 | axios.post('https://data.oxylabs.io/v1/queries', payload, { 16 | auth: { 17 | username: 'user', //Don't forget to fill in user credentials 18 | password: 'pass1' 19 | }, 20 | headers: { 21 | 'Content-Type': 'application/json', 22 | 'Accept-Encoding': 'gzip,deflate,compress', 23 | }, 24 | }) 25 | .then(({ data }) => console.log(data)) 26 | .catch(err => console.log(err)) 27 | //To retrieve parsed or raw content from the webpage, use _links from the response dictionary and check RetrieveJobContent.js file 28 | -------------------------------------------------------------------------------- /shared-datacenter-proxies/Nodejs/scraper.js: -------------------------------------------------------------------------------- 1 | const client = require('./client'); 2 | const settings = require('./settings'); 3 | const filesystem = require('./filesystem'); 4 | 5 | module.exports = { 6 | scrape: async (position, proxy, url) => { 7 | let response = null; 8 | let retry = 0; 9 | do { 10 | [response, err] = await client.fetchPage(proxy, url); 11 | if (response !== null && response.status !== 200) { 12 | await filesystem.writeErrorToFile(`${url} - Response code ${response.status}`) 13 | } else if (err !== null) { 14 | await filesystem.writeErrorToFile(`${url} - Response error ${err.message}`) 15 | filesystem.writeErrorToStdout(`${url} failed with error ${err.message}`) 16 | } 17 | 18 | if (response !== null && response.status === 200) { 19 | await filesystem.writeSuccessToFile(position, response.data) 20 | break 21 | } 22 | 23 | retry += 1; 24 | } while (retry < settings.RetriesNum); 25 | } 26 | }; 27 | -------------------------------------------------------------------------------- /web-unblocker/GoLang/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "fmt" 5 | "golang.org/x/time/rate" 6 | "sync" 7 | "time" 8 | ) 9 | 10 | func main() { 11 | start := time.Now() 12 | 13 | fmt.Println("Reading from the list...") 14 | urls, err := readLines(UrlListName) 15 | if err != nil { 16 | printAndExit("Failed to read the input file") 17 | } 18 | 19 | apiRateLimit := rate.NewLimiter(rate.Every(time.Second), RequestsRate) 20 | apiClient := NewClient(apiRateLimit) 21 | scraper := NewScraper(apiClient) 22 | 23 | wc := sync.WaitGroup{} 24 | 25 | fmt.Println("Gathering results...") 26 | for index, url := range urls { 27 | wc.Add(1) 28 | go func(url string, position int) { 29 | parsedUrl, country, formattedProxy := createProxyByUrl(url) 30 | scraper.scrape(position, formattedProxy, parsedUrl, country) 31 | wc.Done() 32 | }(url, index+1) 33 | } 34 | 35 | wc.Wait() 36 | 37 | elapsed := time.Since(start) 38 | fmt.Printf("Scraping finished after %.2fs\n", elapsed.Seconds()) 39 | } 40 | -------------------------------------------------------------------------------- /residential-proxies/Ruby/proxy.rb: -------------------------------------------------------------------------------- 1 | require_relative './settings' 2 | 3 | def create_proxy_by_url(url) 4 | url_parts = url.split ';' 5 | if url_parts.length == 2 6 | proxy_address = "http://customer-#{get_setting(:Username)}-cc-#{url_parts[1]}:#{get_setting(:Password)}@#{get_setting(:ProxyAddress)}" 7 | 8 | return [url_parts[0], proxy_address] 9 | end 10 | 11 | proxy_address = "http://customer-#{get_setting(:Username)}:#{get_setting(:Password)}@#{get_setting(:ProxyAddress)}" 12 | 13 | [url, proxy_address] 14 | end 15 | 16 | # func createProxyByUrl(url string) (string, string) { 17 | # urlParts := strings.Split(url, ";") 18 | # if len(urlParts) == 2 { 19 | # return urlParts[0], fmt.Sprintf( 20 | # "http://customer-%s-cc-%s:%s@%s", 21 | # Username, 22 | # urlParts[1], 23 | # Password, 24 | # ProxyAddress, 25 | # ) 26 | # } 27 | # 28 | # return url, fmt.Sprintf( 29 | # "http://customer-%s:%s@%s", 30 | # Username, 31 | # Password, 32 | # ProxyAddress, 33 | # ) 34 | # } -------------------------------------------------------------------------------- /web-unblocker/Nodejs/scraper.js: -------------------------------------------------------------------------------- 1 | const client = require('./client'); 2 | const settings = require('./settings'); 3 | const filesystem = require('./filesystem'); 4 | 5 | module.exports = { 6 | scrape: async (position, proxy, url, country) => { 7 | let response = null; 8 | let retry = 0; 9 | do { 10 | [response, err] = await client.fetchPage(proxy, url, country); 11 | if (response !== null && response.status !== 200) { 12 | await filesystem.writeErrorToFile(`${url} - Response code ${response.status}`) 13 | } else if (err !== null) { 14 | await filesystem.writeErrorToFile(`${url} - Response error ${err.message}`) 15 | filesystem.writeErrorToStdout(`${url} failed with error ${err.message}`) 16 | } 17 | 18 | if (response !== null && response.status === 200) { 19 | await filesystem.writeSuccessToFile(position, response.data) 20 | break 21 | } 22 | 23 | retry += 1; 24 | } while (retry < settings.RetriesNum); 25 | } 26 | }; 27 | -------------------------------------------------------------------------------- /web-unblocker/Ruby/main.rb: -------------------------------------------------------------------------------- 1 | require_relative './header' 2 | require_relative './settings' 3 | require_relative './client' 4 | require_relative './proxy' 5 | require_relative './scraper' 6 | require 'concurrent' 7 | 8 | starting = Time.now 9 | 10 | client = Client.new 11 | scraper = Scraper.new(client) 12 | 13 | p 'Reading from the list...' 14 | urls = File::readlines(get_setting(:UrlListName), chomp: true) 15 | 16 | p 'Gathering results...' 17 | operations = [] 18 | urls.each_with_index do |url, position| 19 | parsed_url, country, formatted_proxy = create_proxy_by_url(url) 20 | 21 | operation = Concurrent::Future.execute do 22 | scraper.scrape(position, formatted_proxy, parsed_url, country) 23 | 24 | requests_rate = get_setting(:RequestsRate) 25 | sleep(1 / requests_rate) 26 | end 27 | 28 | operations.push(operation) 29 | end 30 | 31 | operations.each { |operation| operation.value } 32 | 33 | ending = Time.now 34 | elapsed = ending - starting 35 | printf "Script finished after %.2fs\n", elapsed 36 | -------------------------------------------------------------------------------- /web-unblocker/CSharp/README.md: -------------------------------------------------------------------------------- 1 | # Web Unblocker C# Example 2 | 3 | This example demonstrates how to use [Web Unblocker API](https://developers.oxylabs.io/advanced-proxy-solutions/web-unblocker) 4 | 5 | ## Global variables 6 | 7 | Set up the script using the following constants (see Settings.cs) 8 | 9 | * USERNAME (String) - Username of a proxy user 10 | * PASSWORD (String) - Password of a proxy user 11 | * TIMEOUT (Integer) - Seconds to wait for a connection and data retrieval until timing out 12 | * REQUESTS_RATE (Integer) - Number of requests to make per one second 13 | * RETRIES_NUM (Integer) - Number of times to retry if initial request was unsuccessful 14 | * URL_LIST_NAME (String) - Filename of a txt file with the URLs that needs to scraped 15 | * PROXY_ADDRESS (String) - Web Unblocker proxy endpoint. 16 | 17 | ## Prerequisites 18 | 19 | The following tools need to be present on your system 20 | * C# Development environment 21 | 22 | ## How to run the script 23 | 24 | Build and run by opening the attached sln file. 25 | -------------------------------------------------------------------------------- /datacenter-proxies/Ruby/README.md: -------------------------------------------------------------------------------- 1 | # Datacenter Proxies Ruby Example 2 | 3 | This example demonstrates how to use [oxylabs datacenter API](https://developers.oxylabs.io/datacenter-proxies/index.html#quick-start) 4 | 5 | ## Global variables 6 | 7 | Set up the script using the following constants (see settings.rb) 8 | 9 | * Username (String) - Username of a proxy user 10 | * Password (String) - Password of a proxy user 11 | * Timeout (Integer) - Seconds to wait for a connection and data retrieval until timing out 12 | * RequestsRate (Integer) - Number of requests to make per one second 13 | * RetriesNum (Integer) - Number of times to retry if initial request was unsuccessful 14 | * UrlListName (String) - Filename of a txt file with the URLs that needs to scraped 15 | 16 | ## Prerequisites 17 | 18 | The following tools need to be present on your system 19 | * ruby >= 3.0.3 20 | 21 | ## How to run the script 22 | 23 | Install dependencies 24 | ``` 25 | bundle install 26 | ``` 27 | 28 | Execute the script: 29 | ``` 30 | ruby main.rb 31 | ``` 32 | -------------------------------------------------------------------------------- /residential-proxies/CSharp/README.md: -------------------------------------------------------------------------------- 1 | # Residential Proxies C# Example 2 | 3 | This example demonstrates how to use [residential proxy API](https://developers.oxylabs.io/residential-proxies/index.html#quick-start) 4 | 5 | ## Global variables 6 | 7 | Set up the script using the following constants (see Settings.cs) 8 | 9 | * USERNAME (String) - Username of a proxy user 10 | * PASSWORD (String) - Password of a proxy user 11 | * TIMEOUT (Integer) - Seconds to wait for a connection and data retrieval until timing out 12 | * REQUESTS_RATE (Integer) - Number of requests to make per one second 13 | * RETRIES_NUM (Integer) - Number of times to retry if initial request was unsuccessful 14 | * URL_LIST_NAME (String) - Filename of a txt file with the URLs that needs to scraped 15 | * PROXY_ADDRESS (String) - Residential proxies endpoint. 16 | 17 | ## Prerequisites 18 | 19 | The following tools need to be present on your system 20 | * C# Development environment 21 | 22 | ## How to run the script 23 | 24 | Build and run by opening the attached sln file. 25 | -------------------------------------------------------------------------------- /datacenter-proxies/GoLang/README.md: -------------------------------------------------------------------------------- 1 | # Datacenter Proxies Golang Example 2 | 3 | This example demonstrates how to use [oxylabs datacenter API](https://developers.oxylabs.io/datacenter-proxies/index.html#quick-start) 4 | 5 | ## Global variables 6 | 7 | Set up the script using the following constants (see constant.go) 8 | 9 | * Username (String) - Username of a proxy user 10 | * Password (String) - Password of a proxy user 11 | * Timeout (Integer) - Seconds to wait for a connection and data retrieval until timing out 12 | * RequestsRate (Integer) - Number of requests to make per one second 13 | * RetriesNum (Integer) - Number of times to retry if initial request was unsuccessful 14 | * UrlListName (String) - Filename of a txt file with the URLs that needs to scraped 15 | 16 | ## Prerequisites 17 | 18 | The following tools need to be present on your system 19 | * go 1.16 20 | 21 | ## How to run the script 22 | 23 | Install dependencies 24 | ``` 25 | go get ./... 26 | ``` 27 | 28 | Compile and execute the script: 29 | ``` 30 | go run *.go 31 | ``` 32 | -------------------------------------------------------------------------------- /residential-proxies/Nodejs/main.js: -------------------------------------------------------------------------------- 1 | (async() => { 2 | const { performance } = require('perf_hooks'); 3 | const settings = require('./settings'); 4 | const filesystem = require('./filesystem'); 5 | const client = require('./client'); 6 | const util = require('./util'); 7 | const scraper = require('./scraper'); 8 | const proxyUtils = require('./proxy'); 9 | 10 | const startTime = performance.now(); 11 | 12 | console.log('Reading from the list...'); 13 | const urls = filesystem.readLines(settings.UrlListName) 14 | 15 | console.log('Retrieving proxy list...') 16 | 17 | console.log('Gathering results...') 18 | let asyncJobs = []; 19 | for (let i = 0; i < urls.length; i++) { 20 | [parsedUrl, formattedProxy] = proxyUtils.createProxyByUrl(urls[i]); 21 | asyncJobs.push(scraper.scrape(i+1, formattedProxy, parsedUrl)) 22 | } 23 | 24 | await Promise.all(asyncJobs); 25 | 26 | const endTime = performance.now() 27 | 28 | console.log(`Script finished after ${((endTime - startTime)/1000).toFixed(2)} seconds`) 29 | })(); 30 | -------------------------------------------------------------------------------- /shared-datacenter-proxies/PHP/src/ProxyFormatter.php: -------------------------------------------------------------------------------- 1 | parseUrl($parsedUrl); 15 | if (count($urlParts) === 2) { 16 | $parsedUrl = trim($urlParts[0]); 17 | $country = trim($urlParts[1]); 18 | } 19 | 20 | $proxyAddress = $proxyMap[$country] ?? $proxyMap[DEFAULT_PROXY_INDEX_NAME]; 21 | $template = 'http://customer-{USERNAME}:{PASSWORD}@{PROXY_ADDRESS}'; 22 | 23 | return [$parsedUrl, strtr($template, [ 24 | '{USERNAME}' => USERNAME, 25 | '{PASSWORD}' => PASSWORD, 26 | '{PROXY_ADDRESS}' => $proxyAddress, 27 | ])]; 28 | } 29 | 30 | private function parseUrl(string $url): array 31 | { 32 | return explode(';', $url); 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /datacenter-proxies/Python/README.md: -------------------------------------------------------------------------------- 1 | # Datacenter Proxies Python Example 2 | 3 | This example demonstrates how to use [oxylabs datacenter API](https://developers.oxylabs.io/datacenter-proxies/index.html#quick-start) 4 | 5 | ## Global variables 6 | 7 | Set up the script using the following constants 8 | 9 | * USERNAME (String) - Username of a proxy user 10 | * PASSWORD (String) - Password of a proxy user 11 | * TIMEOUT (Integer) - Seconds to wait for a connection and data retrieval until timing out 12 | * REQUESTS_RATE (Integer) - Number of requests to make per one second 13 | * RETRIES_NUM (Integer) - Number of times to retry if initial request was unsuccessful 14 | * URL_LIST_NAME (String) - Filename of a txt file with the URLs that needs to scraped 15 | 16 | ## Prerequisites 17 | 18 | The following tools need to be present on your system 19 | * pipenv 20 | 21 | ## How to run the script 22 | 23 | Install dependencies 24 | ``` 25 | $ pipenv install 26 | ``` 27 | 28 | Run the script inside the virtualenv 29 | ``` 30 | $ pipenv run python dc.py 31 | ``` 32 | -------------------------------------------------------------------------------- /datacenter-proxies/Java/README.md: -------------------------------------------------------------------------------- 1 | # Datacenter Proxies Java Example 2 | 3 | This example demonstrates how to use [oxylabs datacenter API](https://developers.oxylabs.io/datacenter-proxies/index.html#quick-start) 4 | 5 | ## Global variables 6 | 7 | Set up the script using the following constants (see Settings.java) 8 | 9 | * USERNAME (String) - Username of a proxy user 10 | * PASSWORD (String) - Password of a proxy user 11 | * TIMEOUT (Integer) - Seconds to wait for a connection and data retrieval until timing out 12 | * REQUESTS_RATE (Integer) - Number of requests to make per one second 13 | * RETRIES_NUM (Integer) - Number of times to retry if initial request was unsuccessful 14 | * URL_LIST_NAME (String) - Filename of a txt file with the URLs that needs to scraped 15 | 16 | ## Prerequisites 17 | 18 | The following tools need to be present on your system 19 | * java 11.0.11 20 | 21 | ## How to run the script 22 | 23 | Compile and execute the script: 24 | ``` 25 | $ mvm package 26 | $ java -jar target/datacenter-1.0-SNAPSHOT-jar-with-dependencies.jar 27 | ``` 28 | -------------------------------------------------------------------------------- /web-unblocker/Nodejs/main.js: -------------------------------------------------------------------------------- 1 | (async() => { 2 | const { performance } = require('perf_hooks'); 3 | const settings = require('./settings'); 4 | const filesystem = require('./filesystem'); 5 | const client = require('./client'); 6 | const util = require('./util'); 7 | const scraper = require('./scraper'); 8 | const proxyUtils = require('./proxy'); 9 | 10 | const startTime = performance.now(); 11 | 12 | console.log('Reading from the list...'); 13 | const urls = filesystem.readLines(settings.UrlListName) 14 | 15 | console.log('Retrieving proxy list...') 16 | 17 | console.log('Gathering results...') 18 | let asyncJobs = []; 19 | for (let i = 0; i < urls.length; i++) { 20 | [parsedUrl, country, formattedProxy] = proxyUtils.createProxyByUrl(urls[i]); 21 | asyncJobs.push(scraper.scrape(i+1, formattedProxy, parsedUrl, country)) 22 | } 23 | 24 | await Promise.all(asyncJobs); 25 | 26 | const endTime = performance.now() 27 | 28 | console.log(`Script finished after ${((endTime - startTime)/1000).toFixed(2)} seconds`) 29 | })(); 30 | -------------------------------------------------------------------------------- /datacenter-proxies/PHP/README.md: -------------------------------------------------------------------------------- 1 | # Datacenter Proxies PHP Example 2 | 3 | This example demonstrates how to use [oxylabs datacenter API](https://developers.oxylabs.io/datacenter-proxies/index.html#quick-start) 4 | 5 | ## Global variables 6 | 7 | Set up the script using the following constants 8 | 9 | * USERNAME (String) - Username of a proxy user 10 | * PASSWORD (String) - Password of a proxy user 11 | * TIMEOUT (Integer) - Seconds to wait for a connection and data retrieval until timing out 12 | * REQUESTS_RATE (Integer) - Number of requests to make per one second 13 | * RETRIES_NUM (Integer) - Number of times to retry if initial request was unsuccessful 14 | * URL_LIST_NAME (String) - Filename of a txt file with the URLs that needs to scraped 15 | 16 | ## Prerequisites 17 | 18 | The following tools need to be present on your system 19 | * composer >= 2 20 | * php >= 8.0 21 | 22 | ## How to run the script 23 | 24 | Install dependencies using composer: 25 | ``` 26 | $ composer install 27 | ``` 28 | 29 | Execute the main file: 30 | ``` 31 | $ php main.php 32 | ``` 33 | -------------------------------------------------------------------------------- /scraper-apis/Nodejs/Callback.js: -------------------------------------------------------------------------------- 1 | // Simple express server with an endpoint listening for POST requests 2 | const axios = require('axios'); 3 | const express = require('express'); 4 | const bodyParser = require('body-parser'); 5 | 6 | const credentials = { 7 | auth: { 8 | username: 'user', 9 | password: 'pass1' 10 | }, 11 | headers: { 12 | 'Content-Type': 'application/json', 13 | 'Accept-Encoding': 'gzip,deflate,compress', 14 | } 15 | } 16 | 17 | const app = express(); 18 | const port = 8080; 19 | 20 | app.use(bodyParser.json()); 21 | 22 | app.post('/job_listener', (req, res) => { 23 | const links = req.body._links 24 | const resultLinks = links.filter(link => link.rel === 'results'); 25 | 26 | // create an array of promises from filtered links 27 | const requests = resultLinks.map(({ href }) => axios.get(href, {...credentials})); 28 | 29 | Promise.all(requests).then(values => { 30 | console.log(values); 31 | res.status(200).json({ status: 'ok '}); 32 | }) 33 | .catch(errors => console.log(errors)); 34 | }); 35 | 36 | app.listen(port); 37 | -------------------------------------------------------------------------------- /shared-datacenter-proxies/CSharp/README.md: -------------------------------------------------------------------------------- 1 | # Shared Datacenter Proxies C# Example 2 | 3 | This example demonstrates how to use [shared datacenter proxy API](https://developers.oxylabs.io/shared-dc/#quick-start) 4 | 5 | ## Global variables 6 | 7 | Set up the script using the following constants (see Settings.cs) 8 | 9 | * USERNAME (String) - Username of a proxy user 10 | * PASSWORD (String) - Password of a proxy user 11 | * TIMEOUT (Integer) - Seconds to wait for a connection and data retrieval until timing out 12 | * REQUESTS_RATE (Integer) - Number of requests to make per one second 13 | * RETRIES_NUM (Integer) - Number of times to retry if initial request was unsuccessful 14 | * URL_LIST_NAME (String) - Filename of a txt file with the URLs that needs to scraped 15 | * PROXY_LIST_NAME (String) - Filename of a txt file that contains country-specific proxy servers 16 | 17 | ## Prerequisites 18 | 19 | The following tools need to be present on your system 20 | * C# Development environment 21 | 22 | ## How to run the script 23 | 24 | Build and run by opening the attached sln file. 25 | -------------------------------------------------------------------------------- /web-unblocker/GoLang/README.md: -------------------------------------------------------------------------------- 1 | # Web Unblocker Golang Example 2 | 3 | This example demonstrates how to use [Web Unblocker API](https://developers.oxylabs.io/advanced-proxy-solutions/web-unblocker) 4 | 5 | ## Global variables 6 | 7 | Set up the script using the following constants: 8 | 9 | * Username (String) - Username of a proxy user 10 | * Password (String) - Password of a proxy user 11 | * Timeout (Integer) - Seconds to wait for a connection and data retrieval until timing out 12 | * RequestsRate (Integer) - Number of requests to make per one second 13 | * RetriesNum (Integer) - Number of times to retry if initial request was unsuccessful 14 | * UrlListName (String) - Filename of a txt file with the URLs that needs to scraped 15 | * ProxyAddress (String) - Web Unblocker proxy endpoint 16 | 17 | ## Prerequisites 18 | 19 | The following tools need to be present on your system 20 | * go 1.16+ 21 | 22 | ## How to run the script 23 | 24 | Install dependencies 25 | ``` 26 | go get ./... 27 | ``` 28 | 29 | Compile and execute the script: 30 | ``` 31 | go run *.go 32 | ``` 33 | 34 | -------------------------------------------------------------------------------- /web-unblocker/Nodejs/filesystem.js: -------------------------------------------------------------------------------- 1 | const stdUtil = require('util'); 2 | const fs = require('fs'); 3 | const writeFilePromisified = stdUtil.promisify(fs.writeFile) 4 | const appendFilePromisified = stdUtil.promisify(fs.appendFile) 5 | const util = require('./util'); 6 | 7 | const errorFilename = 'failed_requests.txt' 8 | 9 | const writeErrorToStdout = (error) => console.log(`ERROR: ${error}`); 10 | 11 | module.exports = { 12 | writeErrorToStdout: writeErrorToStdout, 13 | 14 | readLines: (path) => { 15 | try { 16 | const content = fs.readFileSync(path).toString('utf-8'); 17 | return content.split("\n").filter((url) => url !== '') 18 | } catch (e) { 19 | util.printAndExit('Could not read file: ' + e.message) 20 | } 21 | }, 22 | 23 | writeErrorToFile: async(error) => { 24 | return await appendFilePromisified(errorFilename, error + "\n") 25 | }, 26 | 27 | writeSuccessToFile: async(position, content) => { 28 | const fileName = `result_${position}.html`; 29 | 30 | return await writeFilePromisified(fileName, content) 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /web-unblocker/Ruby/README.md: -------------------------------------------------------------------------------- 1 | # Web Unblocker Ruby Example 2 | 3 | This example demonstrates how to use [Web Unblocker API](https://developers.oxylabs.io/advanced-proxy-solutions/web-unblocker) 4 | 5 | ## Global variables 6 | 7 | Set up the script using the following constants (see settings.rb) 8 | 9 | * Username (String) - Username of a proxy user 10 | * Password (String) - Password of a proxy user 11 | * Timeout (Integer) - Seconds to wait for a connection and data retrieval until timing out 12 | * RequestsRate (Integer) - Number of requests to make per one second 13 | * RetriesNum (Integer) - Number of times to retry if initial request was unsuccessful 14 | * UrlListName (String) - Filename of a txt file with the URLs that needs to scraped 15 | * ProxyAddress (String) - Web Unblocker proxy endpoint. 16 | 17 | ## Prerequisites 18 | 19 | The following tools need to be present on your system 20 | * ruby >= 3.0.3 21 | 22 | ## How to run the script 23 | 24 | Install dependencies 25 | ``` 26 | bundle install 27 | ``` 28 | 29 | Execute the script: 30 | ``` 31 | ruby main.rb 32 | ``` 33 | -------------------------------------------------------------------------------- /datacenter-proxies/Nodejs/filesystem.js: -------------------------------------------------------------------------------- 1 | const stdUtil = require('util'); 2 | const fs = require('fs'); 3 | const writeFilePromisified = stdUtil.promisify(fs.writeFile) 4 | const appendFilePromisified = stdUtil.promisify(fs.appendFile) 5 | const util = require('./util'); 6 | 7 | const errorFilename = 'failed_requests.txt' 8 | 9 | const writeErrorToStdout = (error) => console.log(`ERROR: ${error}`); 10 | 11 | module.exports = { 12 | writeErrorToStdout: writeErrorToStdout, 13 | 14 | readLines: (path) => { 15 | try { 16 | const content = fs.readFileSync(path).toString('utf-8'); 17 | return content.split("\n").filter((url) => url !== '') 18 | } catch (e) { 19 | util.printAndExit('Could not read file: ' + e.message) 20 | } 21 | }, 22 | 23 | writeErrorToFile: async(error) => { 24 | return await appendFilePromisified(errorFilename, error + "\n") 25 | }, 26 | 27 | writeSuccessToFile: async(position, content) => { 28 | const fileName = `result_${position}.html`; 29 | 30 | return await writeFilePromisified(fileName, content) 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /residential-proxies/Nodejs/filesystem.js: -------------------------------------------------------------------------------- 1 | const stdUtil = require('util'); 2 | const fs = require('fs'); 3 | const writeFilePromisified = stdUtil.promisify(fs.writeFile) 4 | const appendFilePromisified = stdUtil.promisify(fs.appendFile) 5 | const util = require('./util'); 6 | 7 | const errorFilename = 'failed_requests.txt' 8 | 9 | const writeErrorToStdout = (error) => console.log(`ERROR: ${error}`); 10 | 11 | module.exports = { 12 | writeErrorToStdout: writeErrorToStdout, 13 | 14 | readLines: (path) => { 15 | try { 16 | const content = fs.readFileSync(path).toString('utf-8'); 17 | return content.split("\n").filter((url) => url !== '') 18 | } catch (e) { 19 | util.printAndExit('Could not read file: ' + e.message) 20 | } 21 | }, 22 | 23 | writeErrorToFile: async(error) => { 24 | return await appendFilePromisified(errorFilename, error + "\n") 25 | }, 26 | 27 | writeSuccessToFile: async(position, content) => { 28 | const fileName = `result_${position}.html`; 29 | 30 | return await writeFilePromisified(fileName, content) 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /shared-datacenter-proxies/CSharp/Oxylabs/Oxylabs.csproj: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Exe 5 | netcoreapp3.1 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | Always 15 | 16 | 17 | Always 18 | 19 | 20 | Always 21 | 22 | 23 | Always 24 | 25 | 26 | Always 27 | 28 | 29 | 30 | 31 | -------------------------------------------------------------------------------- /residential-proxies/Ruby/README.md: -------------------------------------------------------------------------------- 1 | # Residential Proxies Ruby Example 2 | 3 | This example demonstrates how to use [residential proxy API](https://developers.oxylabs.io/residential-proxies/index.html#quick-start) 4 | 5 | ## Global variables 6 | 7 | Set up the script using the following constants (see settings.rb) 8 | 9 | * Username (String) - Username of a proxy user 10 | * Password (String) - Password of a proxy user 11 | * Timeout (Integer) - Seconds to wait for a connection and data retrieval until timing out 12 | * RequestsRate (Integer) - Number of requests to make per one second 13 | * RetriesNum (Integer) - Number of times to retry if initial request was unsuccessful 14 | * UrlListName (String) - Filename of a txt file with the URLs that needs to scraped 15 | * ProxyAddress (String) - Residential proxies endpoint 16 | 17 | ## Prerequisites 18 | 19 | The following tools need to be present on your system 20 | * ruby >= 3.0.3 21 | 22 | ## How to run the script 23 | 24 | Install dependencies 25 | ``` 26 | bundle install 27 | ``` 28 | 29 | Execute the script: 30 | ``` 31 | ruby main.rb 32 | ``` 33 | -------------------------------------------------------------------------------- /datacenter-proxies/Nodejs/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "datacenter", 3 | "version": "1.0.0", 4 | "lockfileVersion": 1, 5 | "requires": true, 6 | "dependencies": { 7 | "axios": { 8 | "version": "0.24.0", 9 | "resolved": "https://registry.npmjs.org/axios/-/axios-0.24.0.tgz", 10 | "integrity": "sha512-Q6cWsys88HoPgAaFAVUb0WpPk0O8iTeisR9IMqy9G8AbO4NlpVknrnQS03zzF9PGAWgO3cgletO3VjV/P7VztA==", 11 | "requires": { 12 | "follow-redirects": "^1.14.4" 13 | } 14 | }, 15 | "axios-rate-limit": { 16 | "version": "1.3.0", 17 | "resolved": "https://registry.npmjs.org/axios-rate-limit/-/axios-rate-limit-1.3.0.tgz", 18 | "integrity": "sha512-cKR5wTbU/CeeyF1xVl5hl6FlYsmzDVqxlN4rGtfO5x7J83UxKDckudsW0yW21/ZJRcO0Qrfm3fUFbhEbWTLayw==" 19 | }, 20 | "follow-redirects": { 21 | "version": "1.14.5", 22 | "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.14.5.tgz", 23 | "integrity": "sha512-wtphSXy7d4/OR+MvIFbCVBDzZ5520qV8XfPklSN5QtxuMUJZ+b0Wnst1e1lCDocfzuCkHqj8k0FpZqO+UIaKNA==" 24 | } 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /web-unblocker/Java/README.md: -------------------------------------------------------------------------------- 1 | # Web Unblocker Java Example 2 | 3 | This example demonstrates how to use [Web Unblocker API](https://developers.oxylabs.io/advanced-proxy-solutions/web-unblocker) 4 | 5 | ## Global variables 6 | 7 | Set up the script using the following constants (see Settings.java) 8 | 9 | * USERNAME (String) - Username of a proxy user 10 | * PASSWORD (String) - Password of a proxy user 11 | * TIMEOUT (Integer) - Seconds to wait for a connection and data retrieval until timing out 12 | * REQUESTS_RATE (Integer) - Number of requests to make per one second 13 | * RETRIES_NUM (Integer) - Number of times to retry if initial request was unsuccessful 14 | * URL_LIST_NAME (String) - Filename of a txt file with the URLs that needs to scraped 15 | * PROXY_ADDRESS (String) - Web Unblocker proxy endpoint. 16 | 17 | ## Prerequisites 18 | 19 | The following tools need to be present on your system 20 | * java 11.0.11 21 | 22 | ## How to run the script 23 | 24 | Compile and execute the script: 25 | ``` 26 | $ mvm package 27 | $ java -jar target/residential-1.0-SNAPSHOT-jar-with-dependencies.jar 28 | ``` 29 | -------------------------------------------------------------------------------- /web-unblocker/Python/README.md: -------------------------------------------------------------------------------- 1 | # Web Unblocker Python Example 2 | 3 | This example demonstrates how to use [Web Unblocker API](https://developers.oxylabs.io/advanced-proxy-solutions/web-unblocker) 4 | 5 | ## Global variables 6 | 7 | Set up the script using the following constants 8 | 9 | * USERNAME (String) - Username of a proxy user 10 | * PASSWORD (String) - Password of a proxy user 11 | * TIMEOUT (Integer) - Seconds to wait for a connection and data retrieval until timing out 12 | * PROXY_ADDRESS (String) - Web Unblocker proxy endpoint. 13 | * REQUESTS_RATE (Integer) - Number of requests to make per one second 14 | * RETRIES_NUM (Integer) - Number of times to retry if initial request was unsuccessful 15 | * URL_LIST_NAME (String) - Filename of a txt file with the URLs that needs to scraped 16 | 17 | 18 | ## Prerequisites 19 | 20 | The following tools need to be present on your system 21 | * pipenv 22 | 23 | ## How to run the script 24 | 25 | Install dependencies 26 | ``` 27 | $ pipenv install 28 | ``` 29 | 30 | Run the script inside the virtualenv 31 | ``` 32 | $ pipenv run python ngrp.py 33 | ``` 34 | -------------------------------------------------------------------------------- /datacenter-proxies/Ruby/main.rb: -------------------------------------------------------------------------------- 1 | require_relative './header' 2 | require_relative './settings' 3 | require_relative './client' 4 | require_relative './proxy' 5 | require_relative './scraper' 6 | require 'concurrent' 7 | 8 | starting = Time.now 9 | 10 | client = Client.new 11 | scraper = Scraper.new(client) 12 | 13 | p 'Reading from the list...' 14 | urls = File::readlines(get_setting(:UrlListName), chomp: true) 15 | 16 | p 'Retrieving proxy list...' 17 | round_robin_proxies = client.fetch_proxies.cycle 18 | 19 | p 'Gathering results...' 20 | operations = [] 21 | urls.each_with_index do |url, position| 22 | proxy = round_robin_proxies.next 23 | formatted_proxy = format_proxy(proxy) 24 | 25 | operation = Concurrent::Future.execute do 26 | scraper.scrape(position, formatted_proxy, url) 27 | 28 | requests_rate = get_setting(:RequestsRate) 29 | sleep(1 / requests_rate) 30 | end 31 | 32 | operations.push(operation) 33 | end 34 | 35 | operations.each { |operation| operation.value } 36 | 37 | ending = Time.now 38 | elapsed = ending - starting 39 | printf "Script finished after %.2fs\n", elapsed 40 | -------------------------------------------------------------------------------- /web-unblocker/PHP/README.md: -------------------------------------------------------------------------------- 1 | # Web Unblocker PHP Example 2 | 3 | This example demonstrates how to use [Web Unblocker API](https://developers.oxylabs.io/advanced-proxy-solutions/web-unblocker) 4 | 5 | ## Global variables 6 | 7 | Set up the script using the following constants 8 | 9 | * USERNAME (String) - Username of a proxy user 10 | * PASSWORD (String) - Password of a proxy user 11 | * TIMEOUT (Integer) - Seconds to wait for a connection and data retrieval until timing out 12 | * PROXY_ADDRESS (String) - Web Unblocker proxy endpoint. 13 | * REQUESTS_RATE (Integer) - Number of requests to make per one second 14 | * RETRIES_NUM (Integer) - Number of times to retry if initial request was unsuccessful 15 | * URL_LIST_NAME (String) - Filename of a txt file with the URLs that needs to scraped 16 | 17 | ## Prerequisites 18 | 19 | The following tools need to be present on your system 20 | * composer >= 2 21 | * php >= 8.0 22 | 23 | ## How to run the script 24 | 25 | Install dependencies using composer: 26 | ``` 27 | $ composer install 28 | ``` 29 | 30 | Execute the main file: 31 | ``` 32 | $ php main.php 33 | ``` 34 | -------------------------------------------------------------------------------- /residential-proxies/GoLang/README.md: -------------------------------------------------------------------------------- 1 | # Residential Proxies Golang Example 2 | 3 | This example demonstrates how to use [residential proxy API](https://developers.oxylabs.io/residential-proxies/index.html#quick-start) 4 | 5 | ## Global variables 6 | 7 | Set up the script using the following constants (see constant.go) 8 | 9 | * Username (String) - Username of a proxy user 10 | * Password (String) - Password of a proxy user 11 | * Timeout (Integer) - Seconds to wait for a connection and data retrieval until timing out 12 | * RequestsRate (Integer) - Number of requests to make per one second 13 | * RetriesNum (Integer) - Number of times to retry if initial request was unsuccessful 14 | * UrlListName (String) - Filename of a txt file with the URLs that needs to scraped 15 | * ProxyAddress (String) - Residential proxies endpoint. 16 | 17 | ## Prerequisites 18 | 19 | The following tools need to be present on your system 20 | * go 1.16 21 | 22 | ## How to run the script 23 | 24 | Install dependencies 25 | ``` 26 | go get ./... 27 | ``` 28 | 29 | Compile and execute the script: 30 | ``` 31 | go run *.go 32 | ``` 33 | 34 | -------------------------------------------------------------------------------- /scraper-apis/Typescript/SingleQuery.ts: -------------------------------------------------------------------------------- 1 | /* This example will submit a job request to Scraper API. 2 | The job will deliver parsed product data in JSON for Amazon searches 3 | from United States geo-location*/ 4 | 5 | import axios, {AxiosRequestConfig} from "axios"; 6 | 7 | //If you wish to get content in HTML you can delete parser_type and parse parameters 8 | const payload = { 9 | 'source': 'amazon_search', 10 | 'query': 'kettle', 11 | 'geo_location': '10005', 12 | 'parse': 'true', 13 | } 14 | 15 | const config: AxiosRequestConfig = { 16 | auth: { 17 | username: 'user', // Don't forget to fill in user credentials 18 | password: 'pass1' 19 | }, 20 | headers: { 21 | 'Content-Type': 'application/json', 22 | 'Accept-Encoding': 'gzip,deflate,compress', 23 | } 24 | } 25 | 26 | axios.post('https://data.oxylabs.io/v1/queries', payload, config) 27 | .then(({data}) => console.log(data)) 28 | .catch(err => console.log(err)) 29 | //To retrieve parsed or raw content from the webpage, use _links from the response dictionary and check RetrieveJobContent.js file 30 | -------------------------------------------------------------------------------- /residential-proxies/Java/README.md: -------------------------------------------------------------------------------- 1 | # Residential Proxies Java Example 2 | 3 | This example demonstrates how to use [residential proxy API](https://developers.oxylabs.io/residential-proxies/index.html#quick-start) 4 | 5 | ## Global variables 6 | 7 | Set up the script using the following constants (see Settings.java) 8 | 9 | * USERNAME (String) - Username of a proxy user 10 | * PASSWORD (String) - Password of a proxy user 11 | * TIMEOUT (Integer) - Seconds to wait for a connection and data retrieval until timing out 12 | * REQUESTS_RATE (Integer) - Number of requests to make per one second 13 | * RETRIES_NUM (Integer) - Number of times to retry if initial request was unsuccessful 14 | * URL_LIST_NAME (String) - Filename of a txt file with the URLs that needs to scraped 15 | * PROXY_ADDRESS (String) - Residential proxies endpoint. 16 | 17 | ## Prerequisites 18 | 19 | The following tools need to be present on your system 20 | * java 11.0.11 21 | 22 | ## How to run the script 23 | 24 | Compile and execute the script: 25 | ``` 26 | $ mvn package 27 | $ java -jar target/residential-1.0-SNAPSHOT-jar-with-dependencies.jar 28 | ``` 29 | -------------------------------------------------------------------------------- /residential-proxies/Python/README.md: -------------------------------------------------------------------------------- 1 | # Residential Proxies Python Example 2 | 3 | This example demonstrates how to use [residential proxy API](https://developers.oxylabs.io/residential-proxies/index.html#quick-start) 4 | 5 | ## Global variables 6 | 7 | Set up the script using the following constants 8 | 9 | * USERNAME (String) - Username of a proxy user 10 | * PASSWORD (String) - Password of a proxy user 11 | * TIMEOUT (Integer) - Seconds to wait for a connection and data retrieval until timing out 12 | * PROXY_ADDRESS (String) - Residential proxies endpoint. 13 | * REQUESTS_RATE (Integer) - Number of requests to make per one second 14 | * RETRIES_NUM (Integer) - Number of times to retry if initial request was unsuccessful 15 | * URL_LIST_NAME (String) - Filename of a txt file with the URLs that needs to scraped 16 | 17 | ## Prerequisites 18 | 19 | The following tools need to be present on your system 20 | * pipenv 21 | 22 | ## How to run the script 23 | 24 | Install dependencies 25 | ``` 26 | $ pipenv install 27 | ``` 28 | 29 | Run the script inside the virtualenv 30 | ``` 31 | $ pipenv run python residential.py 32 | ``` 33 | -------------------------------------------------------------------------------- /residential-proxies/GoLang/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "fmt" 5 | "golang.org/x/time/rate" 6 | "sync" 7 | "time" 8 | ) 9 | 10 | func main() { 11 | start := time.Now() 12 | 13 | fmt.Println("Reading from the list...") 14 | urls, err := readLines(UrlListName) 15 | if err != nil { 16 | printAndExit("Failed to read the input file") 17 | } 18 | 19 | fmt.Println("Retrieving proxy list...") 20 | apiRateLimit := rate.NewLimiter(rate.Every(time.Second), RequestsRate) 21 | apiClient := NewClient(apiRateLimit) 22 | if err != nil { 23 | printAndExit("Failed to download proxy list") 24 | } 25 | 26 | wc := sync.WaitGroup{} 27 | 28 | scraper := NewScraper(apiClient) 29 | 30 | fmt.Println("Gathering results...") 31 | for index, url := range urls { 32 | wc.Add(1) 33 | go func(url string, position int) { 34 | parsedUrl, formattedProxy := createProxyByUrl(url) 35 | scraper.scrape(position, formattedProxy, parsedUrl) 36 | wc.Done() 37 | }(url, index+1) 38 | } 39 | 40 | wc.Wait() 41 | 42 | elapsed := time.Since(start) 43 | fmt.Printf("Script finished after %.2fs\n", elapsed.Seconds()) 44 | } 45 | -------------------------------------------------------------------------------- /residential-proxies/PHP/README.md: -------------------------------------------------------------------------------- 1 | # Residential Proxies PHP Example 2 | 3 | This example demonstrates how to use [residential proxy API](https://developers.oxylabs.io/residential-proxies/index.html#quick-start) 4 | 5 | ## Global variables 6 | 7 | Set up the script using the following constants 8 | 9 | * USERNAME (String) - Username of a proxy user 10 | * PASSWORD (String) - Password of a proxy user 11 | * TIMEOUT (Integer) - Seconds to wait for a connection and data retrieval until timing out 12 | * PROXY_ADDRESS (String) - Residential proxies endpoint. 13 | * REQUESTS_RATE (Integer) - Number of requests to make per one second 14 | * RETRIES_NUM (Integer) - Number of times to retry if initial request was unsuccessful 15 | * URL_LIST_NAME (String) - Filename of a txt file with the URLs that needs to scraped 16 | 17 | ## Prerequisites 18 | 19 | The following tools need to be present on your system 20 | * composer >= 2 21 | * php >= 8.0 22 | 23 | ## How to run the script 24 | 25 | Install dependencies using composer: 26 | ``` 27 | $ composer install 28 | ``` 29 | 30 | Execute the main file: 31 | ``` 32 | $ php main.php 33 | ``` 34 | -------------------------------------------------------------------------------- /shared-datacenter-proxies/Nodejs/main.js: -------------------------------------------------------------------------------- 1 | (async() => { 2 | const { performance } = require('perf_hooks'); 3 | const settings = require('./settings'); 4 | const filesystem = require('./filesystem'); 5 | const client = require('./client'); 6 | const util = require('./util'); 7 | const scraper = require('./scraper'); 8 | const proxyUtils = require('./proxy'); 9 | 10 | const startTime = performance.now(); 11 | 12 | console.log('Reading from the list...'); 13 | const urls = filesystem.readLines(settings.UrlListName) 14 | const proxyMap = filesystem.readProxyMap(settings.ProxyListName) 15 | 16 | console.log('Retrieving proxy list...') 17 | 18 | console.log('Gathering results...') 19 | let asyncJobs = []; 20 | for (let i = 0; i < urls.length; i++) { 21 | [parsedUrl, formattedProxy] = proxyUtils.createProxyByUrl(proxyMap, urls[i]); 22 | asyncJobs.push(scraper.scrape(i+1, formattedProxy, parsedUrl)) 23 | } 24 | 25 | await Promise.all(asyncJobs); 26 | 27 | const endTime = performance.now() 28 | 29 | console.log(`Script finished after ${((endTime - startTime)/1000).toFixed(2)} seconds`) 30 | })(); 31 | -------------------------------------------------------------------------------- /shared-datacenter-proxies/Ruby/README.md: -------------------------------------------------------------------------------- 1 | # Shared Datacenter Proxies Ruby Example 2 | 3 | This example demonstrates how to use [shared datacenter proxy API](https://developers.oxylabs.io/shared-dc/#quick-start) 4 | 5 | ## Global variables 6 | 7 | Set up the script using the following constants (see settings.rb) 8 | 9 | * Username (String) - Username of a proxy user 10 | * Password (String) - Password of a proxy user 11 | * Timeout (Integer) - Seconds to wait for a connection and data retrieval until timing out 12 | * RequestsRate (Integer) - Number of requests to make per one second 13 | * RetriesNum (Integer) - Number of times to retry if initial request was unsuccessful 14 | * UrlListName (String) - Filename of a txt file with the URLs that needs to scraped 15 | * ProxyListName (String) - Filename of a txt file that contains country-specific proxy servers 16 | 17 | ## Prerequisites 18 | 19 | The following tools need to be present on your system 20 | * ruby >= 3.0.3 21 | 22 | ## How to run the script 23 | 24 | Install dependencies 25 | ``` 26 | bundle install 27 | ``` 28 | 29 | Execute the script: 30 | ``` 31 | ruby main.rb 32 | ``` 33 | -------------------------------------------------------------------------------- /datacenter-proxies/Nodejs/main.js: -------------------------------------------------------------------------------- 1 | (async() => { 2 | const { performance } = require('perf_hooks'); 3 | const roundround = require('roundround'); 4 | const settings = require('./settings'); 5 | const filesystem = require('./filesystem'); 6 | const client = require('./client'); 7 | const util = require('./util'); 8 | const scraper = require('./scraper'); 9 | const proxyUtils = require('./proxy'); 10 | 11 | const startTime = performance.now(); 12 | 13 | console.log('Reading from the list...'); 14 | const urls = filesystem.readLines(settings.UrlListName) 15 | 16 | console.log('Retrieving proxy list...') 17 | const proxiesRoundRobin = roundround(await client.fetchProxies()); 18 | 19 | console.log('Gathering results...') 20 | let asyncJobs = []; 21 | for (let i = 0; i < urls.length; i++) { 22 | const proxy = proxyUtils.format(proxiesRoundRobin()); 23 | asyncJobs.push(scraper.scrape(i+1, proxy, urls[i])) 24 | } 25 | 26 | await Promise.all(asyncJobs); 27 | 28 | const endTime = performance.now() 29 | 30 | console.log(`Script finished after ${((endTime - startTime)/1000).toFixed(2)} seconds`) 31 | })(); 32 | -------------------------------------------------------------------------------- /shared-datacenter-proxies/GoLang/README.md: -------------------------------------------------------------------------------- 1 | # Shared Datacenter Proxies Golang Example 2 | 3 | This example demonstrates how to use [shared datacenter proxy API](https://developers.oxylabs.io/shared-dc/#quick-start) 4 | 5 | ## Global variables 6 | 7 | Set up the script using the following constants (see constant.go) 8 | 9 | * Username (String) - Username of a proxy user 10 | * Password (String) - Password of a proxy user 11 | * Timeout (Integer) - Seconds to wait for a connection and data retrieval until timing out 12 | * RequestsRate (Integer) - Number of requests to make per one second 13 | * RetriesNum (Integer) - Number of times to retry if initial request was unsuccessful 14 | * UrlListName (String) - Filename of a txt file with the URLs that needs to scraped 15 | * ProxyListName (String) - Filename of a txt file that contains country-specific proxy servers 16 | 17 | ## Prerequisites 18 | 19 | The following tools need to be present on your system 20 | * go 1.16 21 | 22 | ## How to run the script 23 | 24 | Install dependencies 25 | ``` 26 | go get ./... 27 | ``` 28 | 29 | Compile and execute the script: 30 | ``` 31 | go run *.go 32 | ``` 33 | 34 | -------------------------------------------------------------------------------- /shared-datacenter-proxies/Java/README.md: -------------------------------------------------------------------------------- 1 | # Shared Datacenter Proxies Java Example 2 | 3 | This example demonstrates how to use [shared datacenter proxy API](https://developers.oxylabs.io/shared-dc/#quick-start) 4 | 5 | ## Global variables 6 | 7 | Set up the script using the following constants (see Settings.java) 8 | 9 | * USERNAME (String) - Username of a proxy user 10 | * PASSWORD (String) - Password of a proxy user 11 | * TIMEOUT (Integer) - Seconds to wait for a connection and data retrieval until timing out 12 | * REQUESTS_RATE (Integer) - Number of requests to make per one second 13 | * RETRIES_NUM (Integer) - Number of times to retry if initial request was unsuccessful 14 | * URL_LIST_NAME (String) - Filename of a txt file with the URLs that needs to scraped 15 | * PROXY_LIST_NAME (String) - Filename of a txt file that contains country-specific proxy servers 16 | 17 | ## Prerequisites 18 | 19 | The following tools need to be present on your system 20 | * java 11.0.11 21 | 22 | ## How to run the script 23 | 24 | Compile and execute the script: 25 | ``` 26 | $ mvn package 27 | $ java -jar target/shared-dc-1.0-SNAPSHOT-jar-with-dependencies.jar 28 | ``` 29 | -------------------------------------------------------------------------------- /datacenter-proxies/Nodejs/README.md: -------------------------------------------------------------------------------- 1 | # Datacenter Proxies NodeJs Example 2 | 3 | This example demonstrates how to use [oxylabs datacenter API](https://developers.oxylabs.io/datacenter-proxies/index.html#quick-start) 4 | 5 | ## Global variables 6 | 7 | Set up the script using the following constants (see settings.js) 8 | 9 | * Username (String) - Username of a proxy user 10 | * Password (String) - Password of a proxy user 11 | * Timeout (Integer) - Seconds to wait for a connection and data retrieval until timing out 12 | * RequestsRate (Integer) - Number of requests to make per one second 13 | * RetriesNum (Integer) - Number of times to retry if initial request was unsuccessful 14 | * UrlListName (String) - Filename of a txt file with the URLs that needs to scraped 15 | 16 | ## Prerequisites 17 | 18 | The following tools need to be present on your system 19 | * node >= 14.18.1 20 | * npm >= 6.14.15 or yarn >= 1.22.17 21 | 22 | ## How to run the script 23 | 24 | ### Install dependencies 25 | Using yarn 26 | ``` 27 | yarn install 28 | ``` 29 | 30 | Or by using npm 31 | ``` 32 | npm install 33 | ``` 34 | 35 | ### Run the script 36 | ``` 37 | node main.js 38 | ``` 39 | -------------------------------------------------------------------------------- /shared-datacenter-proxies/Python/README.md: -------------------------------------------------------------------------------- 1 | # Shared Datacenter Proxies Python Example 2 | 3 | This example demonstrates how to use [shared datacenter proxy API](https://developers.oxylabs.io/shared-dc/#quick-start) 4 | 5 | ## Global variables 6 | 7 | Set up the script using the following constants 8 | 9 | * USERNAME (String) - Username of a proxy user 10 | * PASSWORD (String) - Password of a proxy user 11 | * TIMEOUT (Integer) - Seconds to wait for a connection and data retrieval until timing out 12 | * REQUESTS_RATE (Integer) - Number of requests to make per one second 13 | * RETRIES_NUM (Integer) - Number of times to retry if initial request was unsuccessful 14 | * URL_LIST_NAME (String) - Filename of a txt file with the URLs that needs to scraped 15 | * PROXY_LIST_NAME (String) - Filename of a txt file that contains country-specific proxy servers 16 | 17 | ## Prerequisites 18 | 19 | The following tools need to be present on your system 20 | * pipenv 21 | 22 | ## How to run the script 23 | 24 | Install dependencies 25 | ``` 26 | $ pipenv install 27 | ``` 28 | 29 | Run the script inside the virtualenv 30 | ``` 31 | $ pipenv run python shared_dc.py 32 | ``` 33 | -------------------------------------------------------------------------------- /shared-datacenter-proxies/PHP/README.md: -------------------------------------------------------------------------------- 1 | # Shared Datacenter Proxies PHP Example 2 | 3 | This example demonstrates how to use [shared datacenter proxy API](https://developers.oxylabs.io/shared-dc/#quick-start) 4 | 5 | ## Global variables 6 | 7 | Set up the script using the following constants 8 | 9 | * USERNAME (String) - Username of a proxy user 10 | * PASSWORD (String) - Password of a proxy user 11 | * TIMEOUT (Integer) - Seconds to wait for a connection and data retrieval until timing out 12 | * REQUESTS_RATE (Integer) - Number of requests to make per one second 13 | * RETRIES_NUM (Integer) - Number of times to retry if initial request was unsuccessful 14 | * URL_LIST_NAME (String) - Filename of a txt file with the URLs that needs to scraped 15 | * PROXY_LIST_NAME (String) - Filename of a txt file that contains country-specific proxy servers 16 | 17 | ## Prerequisites 18 | 19 | The following tools need to be present on your system 20 | * composer >= 2 21 | * php >= 8.0 22 | 23 | ## How to run the script 24 | 25 | Install dependencies using composer: 26 | ``` 27 | $ composer install 28 | ``` 29 | 30 | Execute the main file: 31 | ``` 32 | $ php main.php 33 | ``` 34 | -------------------------------------------------------------------------------- /scraper-apis/Typescript/Callback.ts: -------------------------------------------------------------------------------- 1 | import axios from 'axios'; 2 | import express, {Express, Request, Response} from 'express'; 3 | import * as bodyParser from 'body-parser'; 4 | 5 | const credentials = { 6 | auth: { 7 | username: 'user', 8 | password: 'pass1' 9 | }, 10 | headers: { 11 | 'Content-Type': 'application/json', 12 | 'Accept-Encoding': 'gzip,deflate,compress', 13 | }, 14 | } 15 | 16 | const app: Express = express(); 17 | const port = 8080; 18 | 19 | app.use(bodyParser.json()); 20 | 21 | app.post('/job_listener', (req: Request, res: Response) => { 22 | const links = req.body._links 23 | const resultLinks = links.filter(link => link.rel === 'results'); 24 | 25 | // create an array of promises from filtered links 26 | const requests = resultLinks.map(({href}) => axios.get(href, {...credentials})); 27 | 28 | Promise.all(requests).then(values => { 29 | console.log(values); 30 | res.status(200).json({status: 'ok '}); 31 | }) 32 | .catch(errors => console.log(errors)); 33 | }); 34 | 35 | app.listen(port, () => { 36 | console.log(`Server running at http://localhost:${port}`) 37 | }); 38 | -------------------------------------------------------------------------------- /residential-proxies/PHP/src/ProxyFormatter.php: -------------------------------------------------------------------------------- 1 | parseUrl($parsedUrl); 15 | if (count($urlParts) === 2) { 16 | $parsedUrl = trim($urlParts[0]); 17 | $country = trim($urlParts[1]); 18 | } 19 | 20 | $trimmedCountry = $country ? trim($country) : null; 21 | $template = 'http://customer-{USERNAME}:{PASSWORD}@{PROXY_ADDRESS}'; 22 | if ($trimmedCountry) { 23 | $template = 'http://customer-{USERNAME}-cc-{COUNTRY}:{PASSWORD}@{PROXY_ADDRESS}'; 24 | } 25 | 26 | return [$parsedUrl, strtr($template, [ 27 | '{USERNAME}' => USERNAME, 28 | '{PASSWORD}' => PASSWORD, 29 | '{PROXY_ADDRESS}' => PROXY_ADDRESS, 30 | '{COUNTRY}' => $trimmedCountry, 31 | ])]; 32 | } 33 | 34 | private function parseUrl(string $url): array 35 | { 36 | return explode(';', $url); 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /web-unblocker/Nodejs/README.md: -------------------------------------------------------------------------------- 1 | # Web Unblocker NodeJs Example 2 | 3 | This example demonstrates how to use [Web Unblocker API](https://developers.oxylabs.io/advanced-proxy-solutions/web-unblocker) 4 | 5 | ## Global variables 6 | 7 | Set up the script using the following constants (see settings.js) 8 | 9 | * Username (String) - Username of a proxy user 10 | * Password (String) - Password of a proxy user 11 | * Timeout (Integer) - Seconds to wait for a connection and data retrieval until timing out 12 | * RequestsRate (Integer) - Number of requests to make per one second 13 | * RetriesNum (Integer) - Number of times to retry if initial request was unsuccessful 14 | * UrlListName (String) - Filename of a txt file with the URLs that needs to scraped 15 | * ProxyAddress (String) - Web Unblocker proxy endpoint. 16 | 17 | ## Prerequisites 18 | 19 | The following tools need to be present on your system 20 | * node >= 14.18.1 21 | * npm >= 6.14.15 or yarn >= 1.22.17 22 | 23 | ## How to run the script 24 | 25 | ### Install dependencies 26 | Using yarn 27 | ``` 28 | yarn install 29 | ``` 30 | 31 | Or by using npm 32 | ``` 33 | npm install 34 | ``` 35 | 36 | ### Run the script 37 | ``` 38 | node main.js 39 | ``` 40 | -------------------------------------------------------------------------------- /scraper-apis/PHP/SingleQuery.php: -------------------------------------------------------------------------------- 1 | 'amazon_search', 10 | 'query' => 'kettle', 11 | 'geo_location' => '10005', 12 | 'parse' => true, 13 | ]; 14 | 15 | $ch = curl_init(); 16 | 17 | curl_setopt($ch, CURLOPT_URL, 'https://data.oxylabs.io/v1/queries'); 18 | curl_setopt($ch, CURLOPT_RETURNTRANSFER, 1); 19 | curl_setopt($ch, CURLOPT_POSTFIELDS, json_encode($params)); 20 | curl_setopt($ch, CURLOPT_POST, 1); 21 | curl_setopt($ch, CURLOPT_USERPWD, 'user' . ':' . 'pass1'); //Don't forget to fill in user credentials 22 | 23 | $headers = ['Content-Type: application/json']; 24 | curl_setopt($ch, CURLOPT_HTTPHEADER, $headers); 25 | 26 | $result = curl_exec($ch); 27 | //To retrieve parsed or raw content from the webpage, use _links from the response JSON and check RetrieveJobContent.php file 28 | echo $result; 29 | 30 | if (curl_errno($ch)) { 31 | echo 'Error:' . curl_error($ch); 32 | } 33 | curl_close($ch); 34 | -------------------------------------------------------------------------------- /scraper-apis/Python/push_pull/batch_query.py: -------------------------------------------------------------------------------- 1 | from pprint import pprint 2 | 3 | from client import PushPullScraperAPIsClient 4 | 5 | 6 | def run_example() -> dict: 7 | # Instantiate a PushPullScraperAPIsClient. 8 | # Replace the values with your own credentials. 9 | pp = PushPullScraperAPIsClient("user", "pass") 10 | 11 | # Specify the payload for the batch of the jobs. 12 | payload = { 13 | "query": [ 14 | "kettle", 15 | "fridge", 16 | "microwave", 17 | ], 18 | "source": "amazon_search", 19 | "geo_location": "10005", 20 | "parse": True, 21 | # Replace with your own callback URL: 22 | # "callback_url": "https://your.callback.url", 23 | } 24 | 25 | # Create the jobs. 26 | batch_query = pp.create_jobs_batch(payload) 27 | 28 | # Extract results for each job. 29 | # For simplicity's sake, we are not using asynchronous execution. 30 | batch_results = [ 31 | pp.wait_for_and_get_job_results(query["id"]) for query in batch_query["queries"] 32 | ] 33 | 34 | return batch_results 35 | 36 | 37 | if __name__ == "__main__": 38 | result = run_example() 39 | pprint(result) 40 | -------------------------------------------------------------------------------- /residential-proxies/Nodejs/README.md: -------------------------------------------------------------------------------- 1 | # Residential Proxies NodeJs Example 2 | 3 | This example demonstrates how to use [residential proxy API](https://developers.oxylabs.io/residential-proxies/index.html#quick-start) 4 | 5 | ## Global variables 6 | 7 | Set up the script using the following constants (see settings.js) 8 | 9 | * Username (String) - Username of a proxy user 10 | * Password (String) - Password of a proxy user 11 | * Timeout (Integer) - Seconds to wait for a connection and data retrieval until timing out 12 | * RequestsRate (Integer) - Number of requests to make per one second 13 | * RetriesNum (Integer) - Number of times to retry if initial request was unsuccessful 14 | * UrlListName (String) - Filename of a txt file with the URLs that needs to scraped 15 | * ProxyAddress (String) - Residential proxies endpoint. 16 | 17 | ## Prerequisites 18 | 19 | The following tools need to be present on your system 20 | * node >= 14.18.1 21 | * npm >= 6.14.15 or yarn >= 1.22.17 22 | 23 | ## How to run the script 24 | 25 | ### Install dependencies 26 | Using yarn 27 | ``` 28 | yarn install 29 | ``` 30 | 31 | Or by using npm 32 | ``` 33 | npm install 34 | ``` 35 | 36 | ### Run the script 37 | ``` 38 | node main.js 39 | ``` 40 | -------------------------------------------------------------------------------- /web-unblocker/CSharp/Oxylabs.sln: -------------------------------------------------------------------------------- 1 | 2 | Microsoft Visual Studio Solution File, Format Version 12.00 3 | # Visual Studio Version 16 4 | VisualStudioVersion = 16.0.31727.386 5 | MinimumVisualStudioVersion = 10.0.40219.1 6 | Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Oxylabs", "Oxylabs\Oxylabs.csproj", "{35889503-BA73-44E9-AC77-4AA304F4D1FA}" 7 | EndProject 8 | Global 9 | GlobalSection(SolutionConfigurationPlatforms) = preSolution 10 | Debug|Any CPU = Debug|Any CPU 11 | Release|Any CPU = Release|Any CPU 12 | EndGlobalSection 13 | GlobalSection(ProjectConfigurationPlatforms) = postSolution 14 | {35889503-BA73-44E9-AC77-4AA304F4D1FA}.Debug|Any CPU.ActiveCfg = Debug|Any CPU 15 | {35889503-BA73-44E9-AC77-4AA304F4D1FA}.Debug|Any CPU.Build.0 = Debug|Any CPU 16 | {35889503-BA73-44E9-AC77-4AA304F4D1FA}.Release|Any CPU.ActiveCfg = Release|Any CPU 17 | {35889503-BA73-44E9-AC77-4AA304F4D1FA}.Release|Any CPU.Build.0 = Release|Any CPU 18 | EndGlobalSection 19 | GlobalSection(SolutionProperties) = preSolution 20 | HideSolutionNode = FALSE 21 | EndGlobalSection 22 | GlobalSection(ExtensibilityGlobals) = postSolution 23 | SolutionGuid = {5F64074A-DFB2-48B0-A674-C7C837763483} 24 | EndGlobalSection 25 | EndGlobal 26 | -------------------------------------------------------------------------------- /datacenter-proxies/CSharp/Oxylabs.sln: -------------------------------------------------------------------------------- 1 | 2 | Microsoft Visual Studio Solution File, Format Version 12.00 3 | # Visual Studio Version 16 4 | VisualStudioVersion = 16.0.31727.386 5 | MinimumVisualStudioVersion = 10.0.40219.1 6 | Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Oxylabs", "Oxylabs\Oxylabs.csproj", "{35889503-BA73-44E9-AC77-4AA304F4D1FA}" 7 | EndProject 8 | Global 9 | GlobalSection(SolutionConfigurationPlatforms) = preSolution 10 | Debug|Any CPU = Debug|Any CPU 11 | Release|Any CPU = Release|Any CPU 12 | EndGlobalSection 13 | GlobalSection(ProjectConfigurationPlatforms) = postSolution 14 | {35889503-BA73-44E9-AC77-4AA304F4D1FA}.Debug|Any CPU.ActiveCfg = Debug|Any CPU 15 | {35889503-BA73-44E9-AC77-4AA304F4D1FA}.Debug|Any CPU.Build.0 = Debug|Any CPU 16 | {35889503-BA73-44E9-AC77-4AA304F4D1FA}.Release|Any CPU.ActiveCfg = Release|Any CPU 17 | {35889503-BA73-44E9-AC77-4AA304F4D1FA}.Release|Any CPU.Build.0 = Release|Any CPU 18 | EndGlobalSection 19 | GlobalSection(SolutionProperties) = preSolution 20 | HideSolutionNode = FALSE 21 | EndGlobalSection 22 | GlobalSection(ExtensibilityGlobals) = postSolution 23 | SolutionGuid = {5F64074A-DFB2-48B0-A674-C7C837763483} 24 | EndGlobalSection 25 | EndGlobal 26 | -------------------------------------------------------------------------------- /residential-proxies/CSharp/Oxylabs.sln: -------------------------------------------------------------------------------- 1 | 2 | Microsoft Visual Studio Solution File, Format Version 12.00 3 | # Visual Studio Version 16 4 | VisualStudioVersion = 16.0.31727.386 5 | MinimumVisualStudioVersion = 10.0.40219.1 6 | Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Oxylabs", "Oxylabs\Oxylabs.csproj", "{35889503-BA73-44E9-AC77-4AA304F4D1FA}" 7 | EndProject 8 | Global 9 | GlobalSection(SolutionConfigurationPlatforms) = preSolution 10 | Debug|Any CPU = Debug|Any CPU 11 | Release|Any CPU = Release|Any CPU 12 | EndGlobalSection 13 | GlobalSection(ProjectConfigurationPlatforms) = postSolution 14 | {35889503-BA73-44E9-AC77-4AA304F4D1FA}.Debug|Any CPU.ActiveCfg = Debug|Any CPU 15 | {35889503-BA73-44E9-AC77-4AA304F4D1FA}.Debug|Any CPU.Build.0 = Debug|Any CPU 16 | {35889503-BA73-44E9-AC77-4AA304F4D1FA}.Release|Any CPU.ActiveCfg = Release|Any CPU 17 | {35889503-BA73-44E9-AC77-4AA304F4D1FA}.Release|Any CPU.Build.0 = Release|Any CPU 18 | EndGlobalSection 19 | GlobalSection(SolutionProperties) = preSolution 20 | HideSolutionNode = FALSE 21 | EndGlobalSection 22 | GlobalSection(ExtensibilityGlobals) = postSolution 23 | SolutionGuid = {5F64074A-DFB2-48B0-A674-C7C837763483} 24 | EndGlobalSection 25 | EndGlobal 26 | -------------------------------------------------------------------------------- /residential-proxies/Nodejs/client.js: -------------------------------------------------------------------------------- 1 | const settings = require('./settings'); 2 | const headers = require('./headers'); 3 | 4 | const axios = require('axios') 5 | const rateLimit = require('axios-rate-limit') 6 | 7 | const http = rateLimit(axios.create(), { 8 | maxRequests: settings.RequestsRate, 9 | perMilliseconds: 1000 10 | }) 11 | 12 | const filesystem = require('./filesystem'); 13 | 14 | const apiUrl = `https://${settings.Username}:${settings.Password}@proxy.oxylabs.io/all` 15 | 16 | const inSeconds = (ms) => ms * 1000; 17 | 18 | module.exports = { 19 | fetchPage: async (proxy, url) => { 20 | parsedProxy = new URL(proxy); 21 | 22 | const proxyPort = parsedProxy.port; 23 | parsedProxy.port = ''; 24 | 25 | let response = null; 26 | try { 27 | response = await http.get(url, { 28 | headers: headers.getRandomBrowserHeaders(), 29 | timeout: inSeconds(settings.Timeout), 30 | proxy: { 31 | host: parsedProxy.host, 32 | port: proxyPort, 33 | auth: { username: parsedProxy.username, password: parsedProxy.password } 34 | } 35 | }); 36 | } catch (e) { 37 | return [null, e] 38 | } 39 | 40 | return [response, null] 41 | }, 42 | }; 43 | 44 | -------------------------------------------------------------------------------- /scraper-apis/Python/push_pull/callback_listener_server.py: -------------------------------------------------------------------------------- 1 | """ 2 | This is a simple Sanic web server that will listen for the callback. 3 | You will need to run this on a server that can receive requests from Oxylabs callbackers. 4 | 5 | You can find the callbacker IPs by calling the `get_callbacker_ips()` method 6 | from our `PushPullScraperAPIsClient` class or running the `get_callbacker_ips.py`. 7 | """ 8 | from pprint import pprint 9 | from sanic import Sanic, response 10 | 11 | from client import PushPullScraperAPIsClient 12 | 13 | # Instantiate a PushPullScraperAPIsClient. 14 | # Replace the values with your own credentials. 15 | pp = PushPullScraperAPIsClient("user", "pass") 16 | app = Sanic(name="callback_listener") 17 | 18 | # Define /callback_listener endpoint that accepts POST requests. 19 | @app.route("/callback_listener", methods=["POST"]) 20 | async def job_listener(request): 21 | res = request.json 22 | pprint(res) 23 | 24 | # Get job results from the ID provided in the callback payload. 25 | job_results = pp.get_job_results(res["id"]) 26 | pprint(job_results) 27 | 28 | return response.json(status=200, body={"status": "ok"}) 29 | 30 | 31 | if __name__ == "__main__": 32 | app.run(host="0.0.0.0", port=8080) 33 | -------------------------------------------------------------------------------- /scraper-apis/Python/single_query.py: -------------------------------------------------------------------------------- 1 | """ 2 | # This example will submit a job request to the E-Commerce Scraper API. 3 | # The job will deliver parsed product data in a JSON for Amazon searches 4 | # from United States geo-location. 5 | """ 6 | import requests 7 | from pprint import pprint 8 | 9 | 10 | def submit() -> dict: 11 | # Compose a Job request payload. 12 | # If you wish to get content in HTML, you can set the `parse` to `False`. 13 | payload = { 14 | "source": "amazon_search", 15 | "query": "kettle", 16 | "geo_location": "10005", 17 | "parse": True, 18 | } 19 | 20 | # Get response. 21 | response = requests.request( 22 | "POST", 23 | "https://data.oxylabs.io/v1/queries", 24 | auth=("user", "pass"), # Don't forget to fill in user credentials. 25 | json=payload, 26 | ) 27 | 28 | # Return the response JSON. 29 | # To retrieve parsed or raw content from the webpage, use `_links` from the 30 | # response dictionary and check `retrieve_job_content.py` file. 31 | return response.json() 32 | 33 | 34 | if __name__ == "__main__": 35 | job_info = submit() 36 | # Print the prettified JSON response to stdout. 37 | pprint(job_info) 38 | -------------------------------------------------------------------------------- /shared-datacenter-proxies/Nodejs/client.js: -------------------------------------------------------------------------------- 1 | const settings = require('./settings'); 2 | const headers = require('./headers'); 3 | 4 | const axios = require('axios') 5 | const rateLimit = require('axios-rate-limit') 6 | 7 | const http = rateLimit(axios.create(), { 8 | maxRequests: settings.RequestsRate, 9 | perMilliseconds: 1000 10 | }) 11 | 12 | const filesystem = require('./filesystem'); 13 | 14 | const apiUrl = `https://${settings.Username}:${settings.Password}@proxy.oxylabs.io/all` 15 | 16 | const inSeconds = (ms) => ms * 1000; 17 | 18 | module.exports = { 19 | fetchPage: async (proxy, url) => { 20 | parsedProxy = new URL(proxy); 21 | 22 | const proxyPort = parsedProxy.port; 23 | parsedProxy.port = ''; 24 | 25 | let response = null; 26 | try { 27 | response = await http.get(url, { 28 | headers: headers.getRandomBrowserHeaders(), 29 | timeout: inSeconds(settings.Timeout), 30 | proxy: { 31 | host: parsedProxy.host, 32 | port: proxyPort, 33 | auth: { username: parsedProxy.username, password: parsedProxy.password } 34 | } 35 | }); 36 | } catch (e) { 37 | return [null, e] 38 | } 39 | 40 | return [response, null] 41 | }, 42 | }; 43 | 44 | -------------------------------------------------------------------------------- /shared-datacenter-proxies/CSharp/Oxylabs.sln: -------------------------------------------------------------------------------- 1 | 2 | Microsoft Visual Studio Solution File, Format Version 12.00 3 | # Visual Studio Version 16 4 | VisualStudioVersion = 16.0.31727.386 5 | MinimumVisualStudioVersion = 10.0.40219.1 6 | Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Oxylabs", "Oxylabs\Oxylabs.csproj", "{35889503-BA73-44E9-AC77-4AA304F4D1FA}" 7 | EndProject 8 | Global 9 | GlobalSection(SolutionConfigurationPlatforms) = preSolution 10 | Debug|Any CPU = Debug|Any CPU 11 | Release|Any CPU = Release|Any CPU 12 | EndGlobalSection 13 | GlobalSection(ProjectConfigurationPlatforms) = postSolution 14 | {35889503-BA73-44E9-AC77-4AA304F4D1FA}.Debug|Any CPU.ActiveCfg = Debug|Any CPU 15 | {35889503-BA73-44E9-AC77-4AA304F4D1FA}.Debug|Any CPU.Build.0 = Debug|Any CPU 16 | {35889503-BA73-44E9-AC77-4AA304F4D1FA}.Release|Any CPU.ActiveCfg = Release|Any CPU 17 | {35889503-BA73-44E9-AC77-4AA304F4D1FA}.Release|Any CPU.Build.0 = Release|Any CPU 18 | EndGlobalSection 19 | GlobalSection(SolutionProperties) = preSolution 20 | HideSolutionNode = FALSE 21 | EndGlobalSection 22 | GlobalSection(ExtensibilityGlobals) = postSolution 23 | SolutionGuid = {5F64074A-DFB2-48B0-A674-C7C837763483} 24 | EndGlobalSection 25 | EndGlobal 26 | -------------------------------------------------------------------------------- /shared-datacenter-proxies/Nodejs/README.md: -------------------------------------------------------------------------------- 1 | # Shared Datacenter Proxies NodeJs Example 2 | 3 | This example demonstrates how to use [shared datacenter proxy API](https://developers.oxylabs.io/shared-dc/#quick-start) 4 | 5 | ## Global variables 6 | 7 | Set up the script using the following constants (see settings.js) 8 | 9 | * Username (String) - Username of a proxy user 10 | * Password (String) - Password of a proxy user 11 | * Timeout (Integer) - Seconds to wait for a connection and data retrieval until timing out 12 | * RequestsRate (Integer) - Number of requests to make per one second 13 | * RetriesNum (Integer) - Number of times to retry if initial request was unsuccessful 14 | * UrlListName (String) - Filename of a txt file with the URLs that needs to scraped 15 | * ProxyListName (String) - Filename of a txt file that contains country-specific proxy servers 16 | 17 | ## Prerequisites 18 | 19 | The following tools need to be present on your system 20 | * node >= 14.18.1 21 | * npm >= 6.14.15 or yarn >= 1.22.17 22 | 23 | ## How to run the script 24 | 25 | ### Install dependencies 26 | Using yarn 27 | ``` 28 | yarn install 29 | ``` 30 | 31 | Or by using npm 32 | ``` 33 | npm install 34 | ``` 35 | 36 | ### Run the script 37 | ``` 38 | node main.js 39 | ``` 40 | -------------------------------------------------------------------------------- /web-unblocker/Nodejs/yarn.lock: -------------------------------------------------------------------------------- 1 | # THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. 2 | # yarn lockfile v1 3 | 4 | 5 | axios-rate-limit@^1.3.0: 6 | version "1.3.0" 7 | resolved "https://registry.yarnpkg.com/axios-rate-limit/-/axios-rate-limit-1.3.0.tgz#03241d24c231c47432dab6e8234cfde819253c2e" 8 | integrity sha512-cKR5wTbU/CeeyF1xVl5hl6FlYsmzDVqxlN4rGtfO5x7J83UxKDckudsW0yW21/ZJRcO0Qrfm3fUFbhEbWTLayw== 9 | 10 | axios@^0.24.0: 11 | version "0.24.0" 12 | resolved "https://registry.yarnpkg.com/axios/-/axios-0.24.0.tgz#804e6fa1e4b9c5288501dd9dff56a7a0940d20d6" 13 | integrity sha512-Q6cWsys88HoPgAaFAVUb0WpPk0O8iTeisR9IMqy9G8AbO4NlpVknrnQS03zzF9PGAWgO3cgletO3VjV/P7VztA== 14 | dependencies: 15 | follow-redirects "^1.14.4" 16 | 17 | follow-redirects@^1.14.4: 18 | version "1.14.5" 19 | resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.14.5.tgz#f09a5848981d3c772b5392309778523f8d85c381" 20 | integrity sha512-wtphSXy7d4/OR+MvIFbCVBDzZ5520qV8XfPklSN5QtxuMUJZ+b0Wnst1e1lCDocfzuCkHqj8k0FpZqO+UIaKNA== 21 | 22 | roundround@^0.2.0: 23 | version "0.2.0" 24 | resolved "https://registry.yarnpkg.com/roundround/-/roundround-0.2.0.tgz#ea0e5659d31f0b266bf03d0c040aa51d9da68f79" 25 | integrity sha1-6g5WWdMfCyZr8D0MBAqlHZ2mj3k= 26 | -------------------------------------------------------------------------------- /datacenter-proxies/Nodejs/yarn.lock: -------------------------------------------------------------------------------- 1 | # THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. 2 | # yarn lockfile v1 3 | 4 | 5 | axios-rate-limit@^1.3.0: 6 | version "1.3.0" 7 | resolved "https://registry.yarnpkg.com/axios-rate-limit/-/axios-rate-limit-1.3.0.tgz#03241d24c231c47432dab6e8234cfde819253c2e" 8 | integrity sha512-cKR5wTbU/CeeyF1xVl5hl6FlYsmzDVqxlN4rGtfO5x7J83UxKDckudsW0yW21/ZJRcO0Qrfm3fUFbhEbWTLayw== 9 | 10 | axios@^0.24.0: 11 | version "0.24.0" 12 | resolved "https://registry.yarnpkg.com/axios/-/axios-0.24.0.tgz#804e6fa1e4b9c5288501dd9dff56a7a0940d20d6" 13 | integrity sha512-Q6cWsys88HoPgAaFAVUb0WpPk0O8iTeisR9IMqy9G8AbO4NlpVknrnQS03zzF9PGAWgO3cgletO3VjV/P7VztA== 14 | dependencies: 15 | follow-redirects "^1.14.4" 16 | 17 | follow-redirects@^1.14.4: 18 | version "1.14.5" 19 | resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.14.5.tgz#f09a5848981d3c772b5392309778523f8d85c381" 20 | integrity sha512-wtphSXy7d4/OR+MvIFbCVBDzZ5520qV8XfPklSN5QtxuMUJZ+b0Wnst1e1lCDocfzuCkHqj8k0FpZqO+UIaKNA== 21 | 22 | roundround@^0.2.0: 23 | version "0.2.0" 24 | resolved "https://registry.yarnpkg.com/roundround/-/roundround-0.2.0.tgz#ea0e5659d31f0b266bf03d0c040aa51d9da68f79" 25 | integrity sha1-6g5WWdMfCyZr8D0MBAqlHZ2mj3k= 26 | -------------------------------------------------------------------------------- /residential-proxies/Nodejs/yarn.lock: -------------------------------------------------------------------------------- 1 | # THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. 2 | # yarn lockfile v1 3 | 4 | 5 | axios-rate-limit@^1.3.0: 6 | version "1.3.0" 7 | resolved "https://registry.yarnpkg.com/axios-rate-limit/-/axios-rate-limit-1.3.0.tgz#03241d24c231c47432dab6e8234cfde819253c2e" 8 | integrity sha512-cKR5wTbU/CeeyF1xVl5hl6FlYsmzDVqxlN4rGtfO5x7J83UxKDckudsW0yW21/ZJRcO0Qrfm3fUFbhEbWTLayw== 9 | 10 | axios@^0.24.0: 11 | version "0.24.0" 12 | resolved "https://registry.yarnpkg.com/axios/-/axios-0.24.0.tgz#804e6fa1e4b9c5288501dd9dff56a7a0940d20d6" 13 | integrity sha512-Q6cWsys88HoPgAaFAVUb0WpPk0O8iTeisR9IMqy9G8AbO4NlpVknrnQS03zzF9PGAWgO3cgletO3VjV/P7VztA== 14 | dependencies: 15 | follow-redirects "^1.14.4" 16 | 17 | follow-redirects@^1.14.4: 18 | version "1.14.5" 19 | resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.14.5.tgz#f09a5848981d3c772b5392309778523f8d85c381" 20 | integrity sha512-wtphSXy7d4/OR+MvIFbCVBDzZ5520qV8XfPklSN5QtxuMUJZ+b0Wnst1e1lCDocfzuCkHqj8k0FpZqO+UIaKNA== 21 | 22 | roundround@^0.2.0: 23 | version "0.2.0" 24 | resolved "https://registry.yarnpkg.com/roundround/-/roundround-0.2.0.tgz#ea0e5659d31f0b266bf03d0c040aa51d9da68f79" 25 | integrity sha1-6g5WWdMfCyZr8D0MBAqlHZ2mj3k= 26 | -------------------------------------------------------------------------------- /scraper-apis/Python/batch_query.py: -------------------------------------------------------------------------------- 1 | """ 2 | This example submits a job request to E-Commerce Scraper API. 3 | The job will deliver parsed product data in JSON for multiple Amazon searches 4 | from United States geo-location. 5 | """ 6 | import requests 7 | from pprint import pprint 8 | 9 | 10 | def submit_batch() -> dict: 11 | # If you wish to get content in HTML, you can set the `parse` to `False`. 12 | payload = { 13 | "query": [ 14 | "kettle", 15 | "fridge", 16 | "microwave", 17 | ], 18 | "source": "amazon_search", 19 | "geo_location": "10005", 20 | "parse": True, 21 | } 22 | 23 | response = requests.request( 24 | "POST", 25 | "https://data.oxylabs.io/v1/queries/batch", 26 | auth=("user", "pass"), # Don't forget to fill in user credentials. 27 | json=payload, 28 | ) 29 | 30 | # Return the response JSON. 31 | # To retrieve parsed or raw content from the webpage, use `_links` from the 32 | # response dictionary and check `retrieve_job_content.py` file. 33 | return response.json() 34 | 35 | 36 | if __name__ == "__main__": 37 | job_infos = submit_batch() 38 | # Print the prettified JSON response to stdout. 39 | pprint(job_infos) 40 | -------------------------------------------------------------------------------- /datacenter-proxies/GoLang/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "fmt" 5 | "github.com/sbabiv/roundrobin" 6 | "golang.org/x/time/rate" 7 | "sync" 8 | "time" 9 | ) 10 | 11 | func main() { 12 | start := time.Now() 13 | 14 | fmt.Println("Reading from the list...") 15 | urls, err := readLines(UrlListName) 16 | if err != nil { 17 | printAndExit("Failed to read the input file") 18 | } 19 | 20 | fmt.Println("Retrieving proxy list...") 21 | apiRateLimit := rate.NewLimiter(rate.Every(time.Second), RequestsRate) 22 | apiClient := NewClient(apiRateLimit) 23 | proxies, err := apiClient.fetchProxies() 24 | if err != nil { 25 | printAndExit("Failed to download proxy list") 26 | } 27 | 28 | proxiesRoundRobin := roundrobin.New(proxies) 29 | wc := sync.WaitGroup{} 30 | 31 | scraper := NewScraper(apiClient) 32 | 33 | fmt.Println("Gathering results...") 34 | for index, url := range urls { 35 | wc.Add(1) 36 | go func(url string, position int) { 37 | proxy, _ := proxiesRoundRobin.Pick() 38 | formattedProxy := formatProxy(proxy.(string)) 39 | scraper.scrape(position, formattedProxy, url) 40 | wc.Done() 41 | }(url, index+1) 42 | } 43 | 44 | wc.Wait() 45 | 46 | elapsed := time.Since(start) 47 | fmt.Printf("Script finished after %.2fs\n", elapsed.Seconds()) 48 | } 49 | -------------------------------------------------------------------------------- /scraper-apis/PHP/BatchQuery.php: -------------------------------------------------------------------------------- 1 | [ 10 | 'kettle', 11 | 'fridge', 12 | 'microwave' 13 | ], 14 | 'source' => 'amazon_search', 15 | 'geo_location' => '10005', 16 | 'parse' => true, 17 | ]; 18 | 19 | $ch = curl_init(); 20 | 21 | curl_setopt($ch, CURLOPT_URL, 'https://data.oxylabs.io/v1/queries/batch'); 22 | curl_setopt($ch, CURLOPT_RETURNTRANSFER, 1); 23 | curl_setopt($ch, CURLOPT_POSTFIELDS, json_encode($params)); 24 | curl_setopt($ch, CURLOPT_POST, 1); 25 | curl_setopt($ch, CURLOPT_USERPWD, 'user' . ':' . 'pass1'); //Don't forget to fill in user credentials 26 | 27 | $headers = ['Content-Type: application/json']; 28 | curl_setopt($ch, CURLOPT_HTTPHEADER, $headers); 29 | 30 | $result = curl_exec($ch); 31 | //To retrieve parsed or raw content from the webpage, use _links from the response dictionary and check RetrieveJobContent.php file 32 | echo $result; 33 | 34 | if (curl_errno($ch)) { 35 | echo 'Error:' . curl_error($ch); 36 | } 37 | curl_close($ch); 38 | -------------------------------------------------------------------------------- /web-unblocker/Nodejs/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "datacenter", 3 | "version": "1.0.0", 4 | "lockfileVersion": 1, 5 | "requires": true, 6 | "dependencies": { 7 | "axios": { 8 | "version": "0.24.0", 9 | "resolved": "https://registry.npmjs.org/axios/-/axios-0.24.0.tgz", 10 | "integrity": "sha512-Q6cWsys88HoPgAaFAVUb0WpPk0O8iTeisR9IMqy9G8AbO4NlpVknrnQS03zzF9PGAWgO3cgletO3VjV/P7VztA==", 11 | "requires": { 12 | "follow-redirects": "^1.14.4" 13 | } 14 | }, 15 | "axios-rate-limit": { 16 | "version": "1.3.0", 17 | "resolved": "https://registry.npmjs.org/axios-rate-limit/-/axios-rate-limit-1.3.0.tgz", 18 | "integrity": "sha512-cKR5wTbU/CeeyF1xVl5hl6FlYsmzDVqxlN4rGtfO5x7J83UxKDckudsW0yW21/ZJRcO0Qrfm3fUFbhEbWTLayw==" 19 | }, 20 | "follow-redirects": { 21 | "version": "1.14.5", 22 | "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.14.5.tgz", 23 | "integrity": "sha512-wtphSXy7d4/OR+MvIFbCVBDzZ5520qV8XfPklSN5QtxuMUJZ+b0Wnst1e1lCDocfzuCkHqj8k0FpZqO+UIaKNA==" 24 | }, 25 | "roundround": { 26 | "version": "0.2.0", 27 | "resolved": "https://registry.npmjs.org/roundround/-/roundround-0.2.0.tgz", 28 | "integrity": "sha1-6g5WWdMfCyZr8D0MBAqlHZ2mj3k=" 29 | } 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /residential-proxies/Nodejs/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "datacenter", 3 | "version": "1.0.0", 4 | "lockfileVersion": 1, 5 | "requires": true, 6 | "dependencies": { 7 | "axios": { 8 | "version": "0.24.0", 9 | "resolved": "https://registry.npmjs.org/axios/-/axios-0.24.0.tgz", 10 | "integrity": "sha512-Q6cWsys88HoPgAaFAVUb0WpPk0O8iTeisR9IMqy9G8AbO4NlpVknrnQS03zzF9PGAWgO3cgletO3VjV/P7VztA==", 11 | "requires": { 12 | "follow-redirects": "^1.14.4" 13 | } 14 | }, 15 | "axios-rate-limit": { 16 | "version": "1.3.0", 17 | "resolved": "https://registry.npmjs.org/axios-rate-limit/-/axios-rate-limit-1.3.0.tgz", 18 | "integrity": "sha512-cKR5wTbU/CeeyF1xVl5hl6FlYsmzDVqxlN4rGtfO5x7J83UxKDckudsW0yW21/ZJRcO0Qrfm3fUFbhEbWTLayw==" 19 | }, 20 | "follow-redirects": { 21 | "version": "1.14.5", 22 | "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.14.5.tgz", 23 | "integrity": "sha512-wtphSXy7d4/OR+MvIFbCVBDzZ5520qV8XfPklSN5QtxuMUJZ+b0Wnst1e1lCDocfzuCkHqj8k0FpZqO+UIaKNA==" 24 | }, 25 | "roundround": { 26 | "version": "0.2.0", 27 | "resolved": "https://registry.npmjs.org/roundround/-/roundround-0.2.0.tgz", 28 | "integrity": "sha1-6g5WWdMfCyZr8D0MBAqlHZ2mj3k=" 29 | } 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /web-unblocker/Nodejs/client.js: -------------------------------------------------------------------------------- 1 | process.env.NODE_TLS_REJECT_UNAUTHORIZED = '0'; 2 | 3 | const settings = require('./settings'); 4 | const headers = require('./headers'); 5 | 6 | const axios = require('axios') 7 | const rateLimit = require('axios-rate-limit') 8 | 9 | const http = rateLimit(axios.create(), { 10 | maxRequests: settings.RequestsRate, 11 | perMilliseconds: 1000 12 | }) 13 | 14 | const filesystem = require('./filesystem'); 15 | 16 | const apiUrl = `https://${settings.Username}:${settings.Password}@proxy.oxylabs.io/all` 17 | 18 | const inSeconds = (ms) => ms * 1000; 19 | 20 | module.exports = { 21 | fetchPage: async (proxy, url, country) => { 22 | parsedProxy = new URL(proxy); 23 | 24 | const proxyPort = parsedProxy.port; 25 | parsedProxy.port = ''; 26 | 27 | let response = null; 28 | try { 29 | response = await http.get(url, { 30 | headers: headers.getRandomBrowserHeaders(country), 31 | timeout: inSeconds(settings.Timeout), 32 | proxy: { 33 | host: parsedProxy.host, 34 | port: proxyPort, 35 | auth: { username: parsedProxy.username, password: parsedProxy.password } 36 | } 37 | }); 38 | } catch (e) { 39 | return [null, e] 40 | } 41 | 42 | return [response, null] 43 | }, 44 | }; 45 | 46 | -------------------------------------------------------------------------------- /scraper-apis/Java/GetNotifierIPList.java: -------------------------------------------------------------------------------- 1 | import java.io.BufferedReader; 2 | import java.io.IOException; 3 | import java.io.InputStreamReader; 4 | import java.net.HttpURLConnection; 5 | import java.net.URL; 6 | import java.util.Base64; 7 | 8 | public class GetNotifierIPList { 9 | private static final String AUTH = "user:pass1"; 10 | 11 | public static void main(String[] args) throws IOException { 12 | String authHeaderValue = "Basic " + Base64.getEncoder().encodeToString(AUTH.getBytes()); 13 | 14 | URL url = new URL("https://data.oxylabs.io/v1/info/callbacker_ips"); 15 | 16 | HttpURLConnection con = (HttpURLConnection) url.openConnection(); 17 | con.setRequestMethod("GET"); 18 | 19 | con.setRequestProperty("Authorization", authHeaderValue); 20 | 21 | int code = con.getResponseCode(); 22 | System.out.println(code); 23 | 24 | try (BufferedReader br = new BufferedReader(new InputStreamReader(con.getInputStream(), "utf-8"))) { 25 | StringBuilder response = new StringBuilder(); 26 | String responseLine = null; 27 | while ((responseLine = br.readLine()) != null) { 28 | response.append(responseLine.trim()); 29 | } 30 | System.out.println(response.toString()); 31 | } 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /scraper-apis/Java/CheckJobStatus.java: -------------------------------------------------------------------------------- 1 | import java.io.BufferedReader; 2 | import java.io.IOException; 3 | import java.io.InputStreamReader; 4 | import java.net.HttpURLConnection; 5 | import java.net.URL; 6 | import java.util.Base64; 7 | 8 | public class CheckJobStatus { 9 | private static final String AUTH = "user:pass1"; 10 | 11 | public static void main(String[] args) throws IOException { 12 | String authHeaderValue = "Basic " + Base64.getEncoder().encodeToString(AUTH.getBytes()); 13 | 14 | URL url = new URL("https://data.oxylabs.io/v1/queries/12345678900987654321"); 15 | 16 | HttpURLConnection con = (HttpURLConnection) url.openConnection(); 17 | con.setRequestMethod("GET"); 18 | 19 | con.setRequestProperty("Authorization", authHeaderValue); 20 | 21 | int code = con.getResponseCode(); 22 | System.out.println(code); 23 | 24 | try (BufferedReader br = new BufferedReader(new InputStreamReader(con.getInputStream(), "utf-8"))) { 25 | StringBuilder response = new StringBuilder(); 26 | String responseLine = null; 27 | while ((responseLine = br.readLine()) != null) { 28 | response.append(responseLine.trim()); 29 | } 30 | System.out.println(response.toString()); 31 | } 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /web-unblocker/PHP/src/FileManager.php: -------------------------------------------------------------------------------- 1 | consoleWriter->writeln('Reading from the list...'); 19 | 20 | $urlList = @file(sprintf('%s/%s', $this->inputDirectory, URL_LIST_NAME)); 21 | if (!$urlList) { 22 | $this->consoleWriter->writelnError('Failed to read input file'); 23 | exit(1); 24 | } 25 | 26 | return array_filter($urlList); 27 | } 28 | 29 | public function writeError(string $contents): void 30 | { 31 | $this->consoleWriter->writeln('ERROR: ' . $contents); 32 | 33 | $fileName = sprintf('%s/failed_requests.txt', $this->outputDirectory); 34 | file_put_contents($fileName, $contents . PHP_EOL, FILE_APPEND); 35 | } 36 | 37 | public function writeSuccess($position, string $contents): void 38 | { 39 | $fileName = sprintf( '%s/result_%d.html', $this->outputDirectory, $position); 40 | file_put_contents($fileName, $contents); 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /shared-datacenter-proxies/GoLang/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "fmt" 5 | "golang.org/x/time/rate" 6 | "sync" 7 | "time" 8 | ) 9 | 10 | func main() { 11 | start := time.Now() 12 | 13 | fmt.Println("Reading from the url list...") 14 | urls, err := readLines(UrlListName) 15 | if err != nil { 16 | printAndExit("Failed to read the url input file") 17 | } 18 | 19 | fmt.Println("Reading from the proxy list...") 20 | proxyMap, err := readProxyMap(ProxyListName) 21 | if err != nil { 22 | printAndExit("Failed to read the proxy input file") 23 | } 24 | 25 | fmt.Println("Retrieving proxy list...") 26 | apiRateLimit := rate.NewLimiter(rate.Every(time.Second), RequestsRate) 27 | apiClient := NewClient(apiRateLimit) 28 | if err != nil { 29 | printAndExit("Failed to download proxy list") 30 | } 31 | 32 | wc := sync.WaitGroup{} 33 | 34 | scraper := NewScraper(apiClient) 35 | 36 | fmt.Println("Gathering results...") 37 | for index, url := range urls { 38 | wc.Add(1) 39 | go func(url string, position int) { 40 | parsedUrl, formattedProxy := createProxyByUrl(proxyMap, url) 41 | scraper.scrape(position, formattedProxy, parsedUrl) 42 | wc.Done() 43 | }(url, index+1) 44 | } 45 | 46 | wc.Wait() 47 | 48 | elapsed := time.Since(start) 49 | fmt.Printf("Script finished after %.2fs\n", elapsed.Seconds()) 50 | } 51 | -------------------------------------------------------------------------------- /datacenter-proxies/PHP/src/FileManager.php: -------------------------------------------------------------------------------- 1 | consoleWriter->writeln('Reading from the list...'); 19 | 20 | $urlList = @file(sprintf('%s/%s', $this->inputDirectory, URL_LIST_NAME)); 21 | if (!$urlList) { 22 | $this->consoleWriter->writelnError('Failed to read input file'); 23 | exit(1); 24 | } 25 | 26 | return array_filter($urlList); 27 | } 28 | 29 | public function writeError(string $contents): void 30 | { 31 | $this->consoleWriter->writeln('ERROR: ' . $contents); 32 | 33 | $fileName = sprintf('%s/failed_requests.txt', $this->outputDirectory); 34 | file_put_contents($fileName, $contents . PHP_EOL, FILE_APPEND); 35 | } 36 | 37 | public function writeSuccess($position, string $contents): void 38 | { 39 | $fileName = sprintf( '%s/result_%d.html', $this->outputDirectory, $position); 40 | file_put_contents($fileName, $contents); 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /residential-proxies/PHP/src/FileManager.php: -------------------------------------------------------------------------------- 1 | consoleWriter->writeln('Reading from the list...'); 19 | 20 | $urlList = @file(sprintf('%s/%s', $this->inputDirectory, URL_LIST_NAME)); 21 | if (!$urlList) { 22 | $this->consoleWriter->writelnError('Failed to read input file'); 23 | exit(1); 24 | } 25 | 26 | return array_filter($urlList); 27 | } 28 | 29 | public function writeError(string $contents): void 30 | { 31 | $this->consoleWriter->writeln('ERROR: ' . $contents); 32 | 33 | $fileName = sprintf('%s/failed_requests.txt', $this->outputDirectory); 34 | file_put_contents($fileName, $contents . PHP_EOL, FILE_APPEND); 35 | } 36 | 37 | public function writeSuccess($position, string $contents): void 38 | { 39 | $fileName = sprintf( '%s/result_%d.html', $this->outputDirectory, $position); 40 | file_put_contents($fileName, $contents); 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /scraper-apis/Java/RetrieveJobContent.java: -------------------------------------------------------------------------------- 1 | import java.io.BufferedReader; 2 | import java.io.IOException; 3 | import java.io.InputStreamReader; 4 | import java.net.HttpURLConnection; 5 | import java.net.URL; 6 | import java.util.Base64; 7 | 8 | public class RetrieveJobContent { 9 | private static final String AUTH = "user:pass1"; 10 | 11 | public static void main(String[] args) throws IOException { 12 | String authHeaderValue = "Basic " + Base64.getEncoder().encodeToString(AUTH.getBytes()); 13 | 14 | URL url = new URL("https://data.oxylabs.io/v1/queries/12345678900987654321/results"); 15 | 16 | HttpURLConnection con = (HttpURLConnection) url.openConnection(); 17 | con.setRequestMethod("GET"); 18 | 19 | con.setRequestProperty("Authorization", authHeaderValue); 20 | 21 | int code = con.getResponseCode(); 22 | System.out.println(code); 23 | 24 | try (BufferedReader br = new BufferedReader(new InputStreamReader(con.getInputStream(), "utf-8"))) { 25 | StringBuilder response = new StringBuilder(); 26 | String responseLine = null; 27 | while ((responseLine = br.readLine()) != null) { 28 | response.append(responseLine.trim()); 29 | } 30 | System.out.println(response.toString()); 31 | } 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /scraper-apis/CSharp/Callback/Startup.cs: -------------------------------------------------------------------------------- 1 | using Microsoft.AspNetCore.Builder; 2 | using Microsoft.AspNetCore.Hosting; 3 | using Microsoft.Extensions.Configuration; 4 | using Microsoft.Extensions.DependencyInjection; 5 | using Microsoft.Extensions.Hosting; 6 | 7 | namespace rtc_listener 8 | { 9 | public class Startup 10 | { 11 | public Startup(IConfiguration configuration) 12 | { 13 | Configuration = configuration; 14 | } 15 | 16 | public IConfiguration Configuration { get; } 17 | 18 | // This method gets called by the runtime. Use this method to add services to the container. 19 | public void ConfigureServices(IServiceCollection services) 20 | { 21 | services.AddControllers(); 22 | } 23 | 24 | // This method gets called by the runtime. Use this method to configure the HTTP request pipeline. 25 | public void Configure(IApplicationBuilder app, IWebHostEnvironment env) 26 | { 27 | if (env.IsDevelopment()) 28 | { 29 | app.UseDeveloperExceptionPage(); 30 | } 31 | 32 | // app.UseHttpsRedirection(); 33 | 34 | app.UseRouting(); 35 | 36 | app.UseAuthorization(); 37 | 38 | app.UseEndpoints(endpoints => 39 | { 40 | endpoints.MapControllers(); 41 | }); 42 | } 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /residential-proxies/Java/src/main/java/Proxy.java: -------------------------------------------------------------------------------- 1 | public class Proxy { 2 | private final String url; 3 | private final String proxyHost; 4 | private final String proxyUsername; 5 | private final String proxyPassword; 6 | 7 | public Proxy(String url, 8 | String proxyHost, 9 | String proxyUsername, 10 | String proxyPassword) { 11 | this.url = url; 12 | this.proxyHost = proxyHost; 13 | this.proxyUsername = proxyUsername; 14 | this.proxyPassword = proxyPassword; 15 | } 16 | 17 | public String getProxyHost() { 18 | return proxyHost; 19 | } 20 | 21 | public String getProxyUsername() { 22 | return proxyUsername; 23 | } 24 | 25 | public String getProxyPassword() { 26 | return proxyPassword; 27 | } 28 | 29 | public String getUrl() { 30 | return url; 31 | } 32 | 33 | public static Proxy createProxyByUrl(String url) { 34 | var urlParts = url.split(";"); 35 | if (urlParts.length == 2) { 36 | var proxyUsername = String.format("customer-%s-cc-%s", Settings.USERNAME, urlParts[1]); 37 | 38 | return new Proxy(urlParts[0], Settings.PROXY_ADDRESS, proxyUsername, Settings.PASSWORD); 39 | } 40 | 41 | var proxyUsername = String.format("customer-%s", Settings.USERNAME); 42 | 43 | return new Proxy(url, Settings.PROXY_ADDRESS, proxyUsername, Settings.PASSWORD); 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /datacenter-proxies/Nodejs/client.js: -------------------------------------------------------------------------------- 1 | const settings = require('./settings'); 2 | const headers = require('./headers'); 3 | 4 | const axios = require('axios') 5 | const rateLimit = require('axios-rate-limit') 6 | 7 | const http = rateLimit(axios.create(), { 8 | maxRequests: settings.RequestsRate, 9 | perMilliseconds: 1000 10 | }) 11 | 12 | const filesystem = require('./filesystem'); 13 | 14 | const apiUrl = `https://${settings.Username}:${settings.Password}@proxy.oxylabs.io/all` 15 | 16 | const inSeconds = (ms) => ms * 1000; 17 | 18 | module.exports = { 19 | fetchProxies: async () => { 20 | const response = await http.get(apiUrl, { timeout: inSeconds(settings.Timeout) }); 21 | 22 | return response 23 | .data 24 | .split("\n") 25 | .filter((ip) => ip.length !== 0) 26 | }, 27 | 28 | fetchPage: async (proxy, url) => { 29 | parsedProxy = new URL(proxy); 30 | 31 | const proxyPort = parsedProxy.port; 32 | parsedProxy.port = ''; 33 | 34 | let response = null; 35 | try { 36 | response = await http.get(url, { 37 | headers: headers.getRandomBrowserHeaders(), 38 | timeout: inSeconds(settings.Timeout), 39 | proxy: { 40 | host: parsedProxy.host, 41 | port: proxyPort, 42 | auth: { username: parsedProxy.username, password: parsedProxy.password } 43 | } 44 | }); 45 | } catch (e) { 46 | return [null, e] 47 | } 48 | 49 | return [response, null] 50 | }, 51 | }; 52 | 53 | -------------------------------------------------------------------------------- /shared-datacenter-proxies/Ruby/main.rb: -------------------------------------------------------------------------------- 1 | require_relative './header' 2 | require_relative './settings' 3 | require_relative './client' 4 | require_relative './proxy' 5 | require_relative './scraper' 6 | require 'concurrent' 7 | 8 | starting = Time.now 9 | 10 | client = Client.new 11 | scraper = Scraper.new(client) 12 | 13 | p 'Reading from the list...' 14 | urls = File::readlines(get_setting(:UrlListName), chomp: true) 15 | 16 | p 'Reading proxy map...' 17 | proxy_list = File::readlines(get_setting(:ProxyListName), chomp: true) 18 | 19 | pattern = /^dc\.(\w{2})-?pr\.oxylabs\.io:\d+$/i 20 | proxy_map = proxy_list.map do |proxy_url| 21 | result = proxy_url.match(pattern) 22 | if result == nil 23 | default_index_name = get_setting(:DefaultProxyIndexName) 24 | [default_index_name, proxy_url] 25 | else 26 | country, = result.captures 27 | [country.upcase, proxy_url] 28 | end 29 | end.to_h 30 | 31 | p 'Gathering results...' 32 | operations = [] 33 | urls.each_with_index do |url, position| 34 | parsed_url, formatted_proxy = create_proxy_by_url(proxy_map, url) 35 | 36 | operation = Concurrent::Future.execute do 37 | scraper.scrape(position, formatted_proxy, parsed_url) 38 | 39 | requests_rate = get_setting(:RequestsRate) 40 | sleep(1 / requests_rate) 41 | end 42 | 43 | operations.push(operation) 44 | end 45 | 46 | operations.each { |operation| operation.value } 47 | 48 | ending = Time.now 49 | elapsed = ending - starting 50 | printf "Script finished after %.2fs\n", elapsed 51 | -------------------------------------------------------------------------------- /scraper-apis/GoLang/check-job-status/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "encoding/base64" 5 | "fmt" 6 | "io" 7 | "net/http" 8 | "os" 9 | ) 10 | 11 | const AuthUsername = "YOUR_USERNAME" 12 | const AuthPassword = "YOUR_PASSWORD" 13 | 14 | func main() { 15 | client := &http.Client{} 16 | 17 | request, err := http.NewRequest( 18 | "GET", 19 | "https://data.oxylabs.io/v1/queries/1234567890987654321", 20 | nil, 21 | ) 22 | if err != nil { 23 | panic(err) 24 | } 25 | 26 | request.Header.Add("Content-Type", "application/json") 27 | request.Header.Add("Authorization", generateAuthHeader(AuthUsername, AuthPassword)) 28 | 29 | response, err := client.Do(request) 30 | if err != nil { 31 | panic(err) 32 | } 33 | defer response.Body.Close() 34 | 35 | if response.StatusCode != http.StatusOK { 36 | fmt.Printf("Invalid status code received: %d\n", response.StatusCode) 37 | responseBytes, _ := io.ReadAll(response.Body) 38 | responseString := string(responseBytes) 39 | fmt.Println(responseString) 40 | 41 | os.Exit(1) 42 | } 43 | 44 | responseBytes, err := io.ReadAll(response.Body) 45 | if err != nil { 46 | panic(err) 47 | } 48 | 49 | responseString := string(responseBytes) 50 | fmt.Println(responseString) 51 | } 52 | 53 | func generateAuthHeader(username, password string) string { 54 | authCredentials := fmt.Sprintf("%s:%s", username, password) 55 | encodedCredentials := base64.StdEncoding.EncodeToString([]byte(authCredentials)) 56 | 57 | return fmt.Sprintf("Basic %s", encodedCredentials) 58 | } 59 | -------------------------------------------------------------------------------- /scraper-apis/GoLang/get-notifier-ip-list/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "encoding/base64" 5 | "fmt" 6 | "io" 7 | "net/http" 8 | "os" 9 | ) 10 | 11 | const AuthUsername = "YOUR_USERNAME" 12 | const AuthPassword = "YOUR_PASSWORD" 13 | 14 | func main() { 15 | client := &http.Client{} 16 | 17 | request, err := http.NewRequest( 18 | "GET", 19 | "https://data.oxylabs.io/v1/info/callbacker_ips", 20 | nil, 21 | ) 22 | if err != nil { 23 | panic(err) 24 | } 25 | 26 | request.Header.Add("Content-Type", "application/json") 27 | request.Header.Add("Authorization", generateAuthHeader(AuthUsername, AuthPassword)) 28 | 29 | response, err := client.Do(request) 30 | if err != nil { 31 | panic(err) 32 | } 33 | defer response.Body.Close() 34 | 35 | if response.StatusCode != http.StatusOK { 36 | fmt.Printf("Invalid status code received: %d\n", response.StatusCode) 37 | responseBytes, _ := io.ReadAll(response.Body) 38 | responseString := string(responseBytes) 39 | fmt.Println(responseString) 40 | 41 | os.Exit(1) 42 | } 43 | 44 | responseBytes, err := io.ReadAll(response.Body) 45 | if err != nil { 46 | panic(err) 47 | } 48 | 49 | responseString := string(responseBytes) 50 | fmt.Println(responseString) 51 | } 52 | 53 | func generateAuthHeader(username, password string) string { 54 | authCredentials := fmt.Sprintf("%s:%s", username, password) 55 | encodedCredentials := base64.StdEncoding.EncodeToString([]byte(authCredentials)) 56 | 57 | return fmt.Sprintf("Basic %s", encodedCredentials) 58 | } 59 | -------------------------------------------------------------------------------- /datacenter-proxies/GoLang/filesystem.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "bufio" 5 | "bytes" 6 | "fmt" 7 | "io" 8 | "os" 9 | ) 10 | 11 | const errorFilename = "failed_requests.txt" 12 | const successFilename = "result_%d.html" 13 | 14 | func readLines(path string) ([]string, error) { 15 | file, err := os.Open(path) 16 | if err != nil { 17 | return nil, err 18 | } 19 | defer file.Close() 20 | 21 | var lines []string 22 | scanner := bufio.NewScanner(file) 23 | for scanner.Scan() { 24 | lines = append(lines, scanner.Text()) 25 | } 26 | return lines, scanner.Err() 27 | } 28 | 29 | func writeErrorToStdout(format string, a ...interface{}) { 30 | fmt.Printf(format+"\n", a...) 31 | } 32 | 33 | func writeErrorToFile(format string, a ...interface{}) { 34 | message := fmt.Sprintf(format+"\n", a...) 35 | writeToFile(errorFilename, message) 36 | } 37 | 38 | func writeSuccessToFile(position int, responseStream io.Reader) { 39 | contents := streamToString(responseStream) 40 | formattedFilename := fmt.Sprintf(successFilename, position) 41 | writeToFile(formattedFilename, contents) 42 | } 43 | 44 | func streamToString(stream io.Reader) string { 45 | buf := new(bytes.Buffer) 46 | buf.ReadFrom(stream) 47 | return buf.String() 48 | } 49 | 50 | func writeToFile(filename, text string) { 51 | f, err := os.OpenFile(filename, os.O_APPEND|os.O_WRONLY|os.O_CREATE, 0600) 52 | if err != nil { 53 | panic(err) 54 | } 55 | 56 | defer f.Close() 57 | 58 | if _, err = f.WriteString(text); err != nil { 59 | panic(err) 60 | } 61 | } 62 | --------------------------------------------------------------------------------