├── .github └── workflows │ └── main.yml ├── .gitignore ├── CONTRIBUTORS.md ├── LICENSE.md ├── README-vi.md ├── README.md ├── README.rst ├── codecov.yml ├── docs ├── .nojekyll ├── _sources │ ├── code.rst.txt │ ├── index.rst.txt │ ├── readme.rst.txt │ └── source │ │ ├── http_request_randomizer.requests.errors.rst.txt │ │ ├── http_request_randomizer.requests.parsers.js.rst.txt │ │ ├── http_request_randomizer.requests.parsers.rst.txt │ │ ├── http_request_randomizer.requests.proxy.rst.txt │ │ ├── http_request_randomizer.requests.rst.txt │ │ ├── http_request_randomizer.requests.runners.rst.txt │ │ ├── http_request_randomizer.requests.useragent.rst.txt │ │ ├── http_request_randomizer.rst.txt │ │ └── modules.rst.txt ├── _static │ ├── basic.css │ ├── css │ │ ├── badge_only.css │ │ ├── fonts │ │ │ ├── Roboto-Slab-Bold.woff │ │ │ ├── Roboto-Slab-Bold.woff2 │ │ │ ├── Roboto-Slab-Regular.woff │ │ │ ├── Roboto-Slab-Regular.woff2 │ │ │ ├── fontawesome-webfont.eot │ │ │ ├── fontawesome-webfont.svg │ │ │ ├── fontawesome-webfont.ttf │ │ │ ├── fontawesome-webfont.woff │ │ │ ├── fontawesome-webfont.woff2 │ │ │ ├── lato-bold-italic.woff │ │ │ ├── lato-bold-italic.woff2 │ │ │ ├── lato-bold.woff │ │ │ ├── lato-bold.woff2 │ │ │ ├── lato-normal-italic.woff │ │ │ ├── lato-normal-italic.woff2 │ │ │ ├── lato-normal.woff │ │ │ └── lato-normal.woff2 │ │ └── theme.css │ ├── doctools.js │ ├── documentation_options.js │ ├── file.png │ ├── fonts │ │ ├── FontAwesome.otf │ │ ├── Lato │ │ │ ├── lato-bold.eot │ │ │ ├── lato-bold.ttf │ │ │ ├── lato-bold.woff │ │ │ ├── lato-bold.woff2 │ │ │ ├── lato-bolditalic.eot │ │ │ ├── lato-bolditalic.ttf │ │ │ ├── lato-bolditalic.woff │ │ │ ├── lato-bolditalic.woff2 │ │ │ ├── lato-italic.eot │ │ │ ├── lato-italic.ttf │ │ │ ├── lato-italic.woff │ │ │ ├── lato-italic.woff2 │ │ │ ├── lato-regular.eot │ │ │ ├── lato-regular.ttf │ │ │ ├── lato-regular.woff │ │ │ └── lato-regular.woff2 │ │ ├── Roboto-Slab-Bold.woff │ │ ├── Roboto-Slab-Bold.woff2 │ │ ├── Roboto-Slab-Light.woff │ │ ├── Roboto-Slab-Light.woff2 │ │ ├── Roboto-Slab-Regular.woff │ │ ├── Roboto-Slab-Regular.woff2 │ │ ├── Roboto-Slab-Thin.woff │ │ ├── Roboto-Slab-Thin.woff2 │ │ ├── RobotoSlab │ │ │ ├── roboto-slab-v7-bold.eot │ │ │ ├── roboto-slab-v7-bold.ttf │ │ │ ├── roboto-slab-v7-bold.woff │ │ │ ├── roboto-slab-v7-bold.woff2 │ │ │ ├── roboto-slab-v7-regular.eot │ │ │ ├── roboto-slab-v7-regular.ttf │ │ │ ├── roboto-slab-v7-regular.woff │ │ │ └── roboto-slab-v7-regular.woff2 │ │ ├── fontawesome-webfont.eot │ │ ├── fontawesome-webfont.svg │ │ ├── fontawesome-webfont.ttf │ │ ├── fontawesome-webfont.woff │ │ ├── fontawesome-webfont.woff2 │ │ ├── lato-bold-italic.woff │ │ ├── lato-bold-italic.woff2 │ │ ├── lato-bold.woff │ │ ├── lato-bold.woff2 │ │ ├── lato-normal-italic.woff │ │ ├── lato-normal-italic.woff2 │ │ ├── lato-normal.woff │ │ └── lato-normal.woff2 │ ├── jquery-3.5.1.js │ ├── jquery.js │ ├── js │ │ ├── badge_only.js │ │ ├── html5shiv-printshiv.min.js │ │ ├── html5shiv.min.js │ │ ├── modernizr.min.js │ │ └── theme.js │ ├── language_data.js │ ├── minus.png │ ├── plus.png │ ├── pygments.css │ ├── searchtools.js │ ├── underscore-1.3.1.js │ └── underscore.js ├── code.html ├── genindex.html ├── index.html ├── objects.inv ├── py-modindex.html ├── readme.html ├── search.html ├── searchindex.js └── source │ ├── http_request_randomizer.html │ ├── http_request_randomizer.requests.errors.html │ ├── http_request_randomizer.requests.html │ ├── http_request_randomizer.requests.parsers.html │ ├── http_request_randomizer.requests.parsers.js.html │ ├── http_request_randomizer.requests.proxy.html │ ├── http_request_randomizer.requests.runners.html │ ├── http_request_randomizer.requests.useragent.html │ └── modules.html ├── http_request_randomizer ├── __init__.py └── requests │ ├── __init__.py │ ├── data │ └── user_agents.txt │ ├── errors │ ├── ParserExceptions.py │ ├── ProxyListException.py │ └── __init__.py │ ├── parsers │ ├── FreeProxyParser.py │ ├── PremProxyParser.py │ ├── ProxyForEuParser.py │ ├── RebroWeeblyParser.py │ ├── SslProxyParser.py │ ├── UrlParser.py │ ├── __init__.py │ └── js │ │ ├── UnPacker.py │ │ └── __init__.py │ ├── proxy │ ├── ProxyObject.py │ ├── __init__.py │ └── requestProxy.py │ ├── runners │ ├── __init__.py │ └── proxyList.py │ └── useragent │ ├── __init__.py │ └── userAgent.py ├── requirements.txt ├── setup.py ├── tests ├── __init__.py ├── mocks.py ├── test_js_unpacker.py ├── test_parsers.py ├── test_providers.py ├── test_proxyList_args.py ├── test_proxyObject.py └── test_useragent.py └── tox.ini /.github/workflows/main.yml: -------------------------------------------------------------------------------- 1 | # This is a basic workflow to help you get started with Actions 2 | 3 | name: CI 4 | 5 | # Controls when the action will run. Triggers the workflow on push or pull request 6 | # events but only for the master branch 7 | on: 8 | push: 9 | branches: [ master ] 10 | pull_request: 11 | branches: [ master ] 12 | workflow_dispatch: 13 | 14 | # A workflow run is made up of one or more jobs that can run sequentially or in parallel 15 | jobs: 16 | tests: 17 | name: "Python ${{ matrix.python-version }}" 18 | runs-on: "ubuntu-latest" 19 | env: 20 | USING_COVERAGE: '3.9' 21 | 22 | strategy: 23 | matrix: 24 | python-version: ["3.6","3.7","3.8","3.9","pypy3"] 25 | 26 | steps: 27 | - uses: "actions/checkout@v2" 28 | - uses: "actions/setup-python@v2" 29 | with: 30 | python-version: "${{ matrix.python-version }}" 31 | - name: "Install dependencies" 32 | run: | 33 | set -xe 34 | python -VV 35 | python -m site 36 | python -m pip install --upgrade pip setuptools wheel 37 | python -m pip install --upgrade coverage[toml] virtualenv tox tox-gh-actions 38 | pip install -r requirements.txt 39 | - name: "Run unit tests for tox ${{ matrix.python-version }}" 40 | run: | 41 | python -m tox 42 | pip install pytest 43 | py.test tests 44 | - name: "Convert coverage" 45 | run: "python -m coverage xml" 46 | if: "contains(env.USING_COVERAGE, matrix.python-version)" 47 | - name: "Upload coverage to Codecov" 48 | if: "contains(env.USING_COVERAGE, matrix.python-version)" 49 | uses: "codecov/codecov-action@v1" 50 | with: 51 | fail_ci_if_error: true 52 | # ensure Dev env works everywhere 53 | install-dev: 54 | strategy: 55 | matrix: 56 | os: ["ubuntu-latest", "windows-latest", "macos-latest"] 57 | 58 | name: "Verify dev env" 59 | runs-on: "${{ matrix.os }}" 60 | steps: 61 | - uses: "actions/checkout@v2" 62 | - uses: "actions/setup-python@v2" 63 | with: 64 | python-version: "3.9" 65 | - name: "Install in dev mode" 66 | run: "python -m pip install -e .[dev]" 67 | - name: "Import package" 68 | run: "python -c 'import http_request_randomizer; print(http_request_randomizer.__version__)'" -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | ### JetBrains ### 3 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm 4 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 5 | 6 | # User-specific stuff: 7 | .idea/workspace.xml 8 | .idea/tasks.xml 9 | .idea/dictionaries 10 | .idea/vcs.xml 11 | .idea/jsLibraryMappings.xml 12 | 13 | # Sensitive or high-churn files: 14 | .idea/dataSources.ids 15 | .idea/dataSources.xml 16 | .idea/dataSources.local.xml 17 | .idea/sqlDataSources.xml 18 | .idea/dynamic.xml 19 | .idea/uiDesigner.xml 20 | 21 | ## File-based project format: 22 | *.iws 23 | -------------------------------------------------------------------------------- /CONTRIBUTORS.md: -------------------------------------------------------------------------------- 1 | Contributors (as ordered by Github) 2 | ============================================ 3 | 4 | * **[ieguiguren](https://github.com/ieguiguren)** 5 | 6 | * Samair.ru proxy parser 7 | * Rebro.weebly.com proxy parser 8 | 9 | * **[asmaier](https://github.com/asmaier)** 10 | 11 | * Fist version of setup.py 12 | * Thread safe proxy object access 13 | * Relative data paths 14 | 15 | * **[Glenn McGuire](https://github.com/glen-mac)** 16 | 17 | * Custom request methods and headers 18 | * Proxy sustain ability between requests 19 | * Custom logging 20 | 21 | * **[JohnBae93](https://github.com/JohnBae93)** 22 | 23 | * Log user agent info along with proxy info 24 | 25 | * **[Dimitris Agelou](https://github.com/jimangel2001)** 26 | 27 | * Introduced proxy object 28 | 29 | * **[Hessu1337](https://github.com/Hessu1337)** 30 | 31 | * Samair parser fix 32 | 33 | * **[hmphu](https://github.com/hmphu)** 34 | 35 | * README translation to Vietnamese 36 | 37 | * **[gabrielgradinaru](https://github.com/gabrielgradinaru)** 38 | 39 | * Configurable requests timeout with error handling 40 | * Test cases covering mocked providers 41 | 42 | * **[christinabo](https://github.com/christinabo)** 43 | 44 | * Python 3 compatibility 45 | 46 | * **[la55u](https://github.com/la55u)** 47 | 48 | * JS unpacker 49 | * PremProxy migration 50 | * SslProxyParser 51 | 52 | 53 | **[More details](https://github.com/pgaref/HTTP_Request_Randomizer/contributors).** -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | The MIT License 2 | 3 | Copyright (c) 2020 Panagiotis Garefalakis http://pgaref.github.io 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in 13 | all copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 21 | THE SOFTWARE. -------------------------------------------------------------------------------- /README-vi.md: -------------------------------------------------------------------------------- 1 | # HTTP Request Randomizer [![Build Status](https://travis-ci.org/pgaref/HTTP_Request_Randomizer.svg?branch=master)](https://travis-ci.org/pgaref/HTTP_Request_Randomizer) [![Coverage Status](https://coveralls.io/repos/github/pgaref/HTTP_Request_Randomizer/badge.svg?branch=master)](https://coveralls.io/github/pgaref/HTTP_Request_Randomizer?branch=master) [![Dependency Status](https://gemnasium.com/badges/github.com/pgaref/HTTP_Request_Randomizer.svg)](https://gemnasium.com/github.com/pgaref/HTTP_Request_Randomizer) [![PyPI version](https://badge.fury.io/py/http-request-randomizer.svg)](https://badge.fury.io/py/http-request-randomizer) 2 | 3 | [View English version of this file here](README.md) 4 | 5 | Thư viện này cung cấp để cải tiến cách thực thi các request HTTP của thư viện **requests** trong Python. Một trong những tính năng rất cần thiết đó là hỗ trợ proxy. HTTP được thiết kế rất tốt để làm việc với proxy. 6 | 7 | Proxy rất hữu ích khi làm các công việc liên quan đến thu thập dữ liệu web hoặc đơn giản là khi bạn muốn ẩn danh (anomization). 8 | 9 | Trong dự án này tôi sử dụng các proxy được cung cấp trên mạng và sử dụng nhiều user-agent khác nhau để gửi các request http bằng những IP ngẫu nhiên. 10 | 11 | ## Proxy là gì 12 | 13 | Proxy cho phép sử dụng máy chủ P (trung gian) để liên lạc với máy chủ A và sau đó phản hồi lại cho bạn kết quả. Một cách nói khác, việc này giúp ẩn đi sự hiện diện của bạn khi truy cập vào một trang web, website sẽ hiểu đây là truy cập từ nhiều người thay vì chỉ một người duy nhất. 14 | 15 | Thông thường, các trang web sẽ chặn các địa chỉ IP gửi quá nhiều request, và proxy là một cách để giải quyết vấn đề này. Bạn có thể lợi dụng proxy để thực hiện tấn công một website, nhưng tốt hơn bạn nên hiểu cách proxy hoạt động như thế nào ;) 16 | 17 | ## User-Agent là gì 18 | 19 | User-agent chỉ là một giá trị gửi kèm trong HTTP request để giúp máy chủ web có thể giả lập trình duyệt và gửi yêu cầu đến một website bất kỳ. 20 | 21 | ## Mã nguồn 22 | 23 | Mã nguồn trong repository này sẽ thực hiện lấy proxy từ **bốn** website khác nhau: 24 | * http://proxyfor.eu/geo.php 25 | * http://free-proxy-list.net 26 | * http://rebro.weebly.com/proxy-list.html 27 | * http://www.samair.ru/proxy/time-01.htm 28 | * https://www.sslproxies.org 29 | 30 | Sau khi thu thập danh sách các proxy và loại bỏ những proxy chậm nó sẽ lấy ngẫu nhiên một proxy để gửi request đến url được chỉ định. 31 | Thời gian chờ được thiết lập là 30 giây và nếu proxy không phản hồi kết quả nó sẽ được xóa bỏ trong danh sách proxy. 32 | Tôi phải nhắc lại rằng mỗi request được gửi bằng một user-agent khác nhau, danh sách user-agent (khoảng 900 chuỗi khác nhau) được lưu trong file **/data/user_agents.txt** 33 | 34 | ## Làm sao để sử dụng? 35 | 36 | Project này đã được phân phối như là một thư viện PyPI! 37 | Đây là phần source code mẫu cho việc sử dụng thư viện này. Bạn chỉ cần thêm **http-request-randomizer** vào file requirements.txt và chạy đoạn code dưới đây: 38 | 39 | ````python 40 | import time 41 | from http_request_randomizer.requests.proxy.requestProxy import RequestProxy 42 | 43 | if __name__ == '__main__': 44 | 45 | start = time.time() 46 | req_proxy = RequestProxy() 47 | print("Initialization took: {0} sec".format((time.time() - start))) 48 | print("Size: {0}".format(len(req_proxy.get_proxy_list()))) 49 | print("ALL = {0} ".format(req_proxy.get_proxy_list())) 50 | 51 | test_url = 'http://ipv4.icanhazip.com' 52 | 53 | while True: 54 | start = time.time() 55 | request = req_proxy.generate_proxied_request(test_url) 56 | print("Proxied Request Took: {0} sec => Status: {1}".format((time.time() - start), request.__str__())) 57 | if request is not None: 58 | print("\t Response: ip={0}".format(u''.join(request.text).encode('utf-8'))) 59 | print("Proxy List Size: {0}".format(len(req_proxy.get_proxy_list()))) 60 | 61 | print("-> Going to sleep..") 62 | time.sleep(10) 63 | ```` 64 | 65 | ## Tài liệu 66 | 67 | [http-request-randomizer documentation](http://pythonhosted.org/http-request-randomizer) 68 | 69 | 70 | ## Đóng góp 71 | 72 | Mọi đóng góp của bạn luôn được trân trọng. Đừng ngần ngại gửi pull request cho chúng tôi. 73 | 74 | ## Bạn gặp vấn đề với thư viện này? 75 | 76 | Hãy nêu lên vấn đề của bạn [tại đây](https://github.com/pgaref/HTTP_Request_Randomizer/issues), và càng chi tiết càng tốt. Chúng tôi sẽ hỗ trợ bạn trong sớm nhất có thể :) 77 | 78 | ## Giấy phép 79 | 80 | Dự án này được cấp phép theo các điều khoản của giấy phép MIT. 81 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # HTTP Request Randomizer [![Build Status](https://github.com/pgaref/http_request_randomizer/workflows/CI/badge.svg)](https://github.com/pgaref/http_request_randomizer/actions) [![codecov](https://codecov.io/gh/pgaref/HTTP_Request_Randomizer/branch/master/graph/badge.svg?token=FjHh47wdYV)](undefined) [![Requirements Status](https://requires.io/github/pgaref/HTTP_Request_Randomizer/requirements.svg?branch=master)](https://requires.io/github/pgaref/HTTP_Request_Randomizer/requirements/?branch=master) [![PyPI version](https://badge.fury.io/py/http-request-randomizer.svg)](https://badge.fury.io/py/http-request-randomizer) 2 | 3 | [Vietnamese version](README-vi.md) 4 | 5 | A convenient way to implement HTTP requests is using Pythons' **requests** library. 6 | One of requests’ most popular features is simple proxying support. 7 | HTTP as a protocol has very well-defined semantics for dealing with proxies, and this contributed to the widespread deployment of HTTP proxies 8 | 9 | Proxying is very useful when conducting intensive web crawling/scrapping or when you just want to hide your identity (anonymization). 10 | 11 | In this project I am using public proxies to randomise http requests over a number of IP addresses and using a variety of known user agent headers these requests look to have been produced by different applications and operating systems. 12 | 13 | 14 | ## Proxies 15 | 16 | Proxies provide a way to use server P (the middleman) to contact server A and then route the response back to you. In more nefarious circles, it's a prime way to make your presence unknown and pose as many clients to a website instead of just one client. 17 | Often times websites will block IPs that make too many requests, and proxies is a way to get around this. But even for simulating an attack, you should know how it's done. 18 | 19 | 20 | ## User Agent 21 | 22 | Surprisingly, the only thing that tells a server the application triggered the request (like browser type or from a script) is a header called a "user agent" which is included in the HTTP request. 23 | 24 | ## The source code 25 | 26 | The project code in this repository is crawling **five** different public proxy websites: 27 | * http://proxyfor.eu/geo.php 28 | * http://free-proxy-list.net 29 | * http://rebro.weebly.com/proxy-list.html 30 | * http://www.samair.ru/proxy/time-01.htm 31 | * https://www.sslproxies.org 32 | 33 | After collecting the proxy data and filtering the slowest ones it is randomly selecting one of them to query the target url. 34 | The request timeout is configured at 30 seconds and if the proxy fails to return a response it is deleted from the application proxy list. 35 | I have to mention that for each request a different agent header is used. The different headers are stored in the **/data/user_agents.txt** file which contains around 900 different agents. 36 | 37 | ## Installation 38 | If you wish to use this module as a [CLI tool](#command-line-interface), install it globally via pip: 39 | ``` 40 | pip install http-request-randomizer 41 | ``` 42 | 43 | Otherwise, you can clone the repository and use setup tools: 44 | ``` 45 | python setup.py install 46 | ``` 47 | 48 | ## Dev testing 49 | Clone repo, install requirements, develop and run tests: 50 | ``` 51 | pip install -r requirements.txt 52 | tox -e pyDevVerbose 53 | ``` 54 | 55 | ## How to use 56 | 57 | * [Command-line interface](#command-line-interface) 58 | * [Library API](#api) 59 | 60 | ## Command-line interface 61 | 62 | Assuming that you have **http-request-randomizer** installed, you can use the commands below: 63 | 64 | show help message: 65 | ``` 66 | proxyList -h, --help 67 | ``` 68 | specify proxy provider(s) (required): 69 | ``` 70 | -s {proxyforeu,rebro,samair,freeproxy,all} 71 | ``` 72 | Specify output stream (default: sys.stdout), could also be a file: 73 | ``` 74 | -o, --outfile 75 | ``` 76 | specify provider timeout threshold in seconds: 77 | ``` 78 | -t, --timeout 79 | ``` 80 | specify proxy bandwidth threshold in KBs: 81 | ``` 82 | -bw, --bandwidth 83 | ``` 84 | show program's version number: 85 | ``` 86 | -v, --version 87 | ``` 88 | 89 | ## API 90 | 91 | 92 | To use **http-request-randomizer** as a library, include it in your requirements.txt file. 93 | Then you can simply generate a proxied request using a method call: 94 | 95 | ````python 96 | import logging 97 | import time 98 | from http_request_randomizer.requests.proxy.requestProxy import RequestProxy 99 | 100 | if __name__ == '__main__': 101 | 102 | start = time.time() 103 | req_proxy = RequestProxy(log_level=logging.ERROR) 104 | print("Initialization took: {0} sec".format((time.time() - start))) 105 | print("Size: {0}".format(len(req_proxy.get_proxy_list()))) 106 | print("ALL = {0} ".format(list(map(lambda x: x.get_address(), req_proxy.get_proxy_list())))) 107 | 108 | test_url = 'http://ipv4.icanhazip.com' 109 | 110 | while True: 111 | start = time.time() 112 | request = req_proxy.generate_proxied_request(test_url) 113 | print("Proxied Request Took: {0} sec => Status: {1}".format((time.time() - start), request.__str__())) 114 | if request is not None: 115 | print("\t Response: ip={0}".format(u''.join(request.text).encode('utf-8'))) 116 | print("Proxy List Size: {0}".format(len(req_proxy.get_proxy_list()))) 117 | 118 | print("-> Going to sleep..") 119 | time.sleep(10) 120 | ```` 121 | 122 | ### Changing log levels 123 | The `RequestProxy` constructor accepts an optional parameter of `log_level` that can be used to change the level of logging. By default, this is equal to 0, or NOTSET. The python logging levels are documented [here](https://docs.python.org/3/library/logging.html#logging-levels). You can either use integers or their equivalent constant in the logging module. (e.g. `logging.DEBUG`, `logging.ERROR`, etc) 124 | 125 | ## Documentation 126 | 127 | [http-request-randomizer documentation](https://pgaref.com/HTTP_Request_Randomizer) 128 | 129 | 130 | ## Contributing 131 | 132 | Many thanks to the open-source community for 133 | [contributing](https://github.com/pgaref/HTTP_Request_Randomizer/blob/master/CONTRIBUTORS.md) to this project! 134 | 135 | 136 | ## Faced an issue? 137 | 138 | Open an issue [here](https://github.com/pgaref/HTTP_Request_Randomizer/issues), and be as detailed as possible :) 139 | 140 | ## Feels like a feature is missing? 141 | 142 | Feel free to open a ticket! PRs are always welcome! 143 | 144 | ## License 145 | 146 | This project is licensed under the terms of the MIT license. 147 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | HTTP Request Randomizer |Build Status| |codecov| |Requirements Status| |PyPI version| 2 | ===================================================================================== 3 | 4 | `Vietnamese version `__ 5 | 6 | A convenient way to implement HTTP requests is using Pythons’ 7 | **requests** library. One of requests’ most popular features is simple 8 | proxying support. HTTP as a protocol has very well-defined semantics for 9 | dealing with proxies, and this contributed to the widespread deployment 10 | of HTTP proxies 11 | 12 | Proxying is very useful when conducting intensive web crawling/scrapping 13 | or when you just want to hide your identity (anonymization). 14 | 15 | In this project I am using public proxies to randomise http requests 16 | over a number of IP addresses and using a variety of known user agent 17 | headers these requests look to have been produced by different 18 | applications and operating systems. 19 | 20 | Proxies 21 | ------- 22 | 23 | Proxies provide a way to use server P (the middleman) to contact server 24 | A and then route the response back to you. In more nefarious circles, 25 | it’s a prime way to make your presence unknown and pose as many clients 26 | to a website instead of just one client. Often times websites will block 27 | IPs that make too many requests, and proxies is a way to get around 28 | this. But even for simulating an attack, you should know how it’s done. 29 | 30 | User Agent 31 | ---------- 32 | 33 | Surprisingly, the only thing that tells a server the application 34 | triggered the request (like browser type or from a script) is a header 35 | called a “user agent” which is included in the HTTP request. 36 | 37 | The source code 38 | --------------- 39 | 40 | The project code in this repository is crawling **five** different 41 | public proxy websites: \* http://proxyfor.eu/geo.php \* 42 | http://free-proxy-list.net \* http://rebro.weebly.com/proxy-list.html \* 43 | http://www.samair.ru/proxy/time-01.htm \* https://www.sslproxies.org 44 | 45 | After collecting the proxy data and filtering the slowest ones it is 46 | randomly selecting one of them to query the target url. The request 47 | timeout is configured at 30 seconds and if the proxy fails to return a 48 | response it is deleted from the application proxy list. I have to 49 | mention that for each request a different agent header is used. The 50 | different headers are stored in the **/data/user_agents.txt** file which 51 | contains around 900 different agents. 52 | 53 | Installation 54 | ------------ 55 | 56 | If you wish to use this module as a `CLI 57 | tool <#command-line-interface>`__, install it globally via pip: 58 | 59 | :: 60 | 61 | pip install http-request-randomizer 62 | 63 | Otherwise, you can clone the repository and use setup tools: 64 | 65 | :: 66 | 67 | python setup.py install 68 | 69 | Dev testing 70 | ----------- 71 | 72 | Clone repo, install requirements, develop and run tests: 73 | 74 | :: 75 | 76 | pip install -r requirements.txt 77 | tox -e pyDevVerbose 78 | 79 | How to use 80 | ---------- 81 | 82 | - `Command-line interface <#command-line-interface>`__ 83 | - `Library API <#api>`__ 84 | 85 | Command-line interface 86 | ---------------------- 87 | 88 | Assuming that you have **http-request-randomizer** installed, you can 89 | use the commands below: 90 | 91 | show help message: 92 | 93 | :: 94 | 95 | proxyList -h, --help 96 | 97 | specify proxy provider(s) (required): 98 | 99 | :: 100 | 101 | -s {proxyforeu,rebro,samair,freeproxy,all} 102 | 103 | Specify output stream (default: sys.stdout), could also be a file: 104 | 105 | :: 106 | 107 | -o, --outfile 108 | 109 | specify provider timeout threshold in seconds: 110 | 111 | :: 112 | 113 | -t, --timeout 114 | 115 | specify proxy bandwidth threshold in KBs: 116 | 117 | :: 118 | 119 | -bw, --bandwidth 120 | 121 | show program’s version number: 122 | 123 | :: 124 | 125 | -v, --version 126 | 127 | API 128 | --- 129 | 130 | To use **http-request-randomizer** as a library, include it in your 131 | requirements.txt file. Then you can simply generate a proxied request 132 | using a method call: 133 | 134 | .. code:: python 135 | 136 | import logging 137 | import time 138 | from http_request_randomizer.requests.proxy.requestProxy import RequestProxy 139 | 140 | if __name__ == '__main__': 141 | 142 | start = time.time() 143 | req_proxy = RequestProxy(log_level=logging.ERROR) 144 | print("Initialization took: {0} sec".format((time.time() - start))) 145 | print("Size: {0}".format(len(req_proxy.get_proxy_list()))) 146 | print("ALL = {0} ".format(list(map(lambda x: x.get_address(), req_proxy.get_proxy_list())))) 147 | 148 | test_url = 'http://ipv4.icanhazip.com' 149 | 150 | while True: 151 | start = time.time() 152 | request = req_proxy.generate_proxied_request(test_url) 153 | print("Proxied Request Took: {0} sec => Status: {1}".format((time.time() - start), request.__str__())) 154 | if request is not None: 155 | print("\t Response: ip={0}".format(u''.join(request.text).encode('utf-8'))) 156 | print("Proxy List Size: {0}".format(len(req_proxy.get_proxy_list()))) 157 | 158 | print("-> Going to sleep..") 159 | time.sleep(10) 160 | 161 | Changing log levels 162 | ~~~~~~~~~~~~~~~~~~~ 163 | 164 | The ``RequestProxy`` constructor accepts an optional parameter of 165 | ``log_level`` that can be used to change the level of logging. By 166 | default, this is equal to 0, or NOTSET. The python logging levels are 167 | documented 168 | `here `__. 169 | You can either use integers or their equivalent constant in the logging 170 | module. (e.g. ``logging.DEBUG``, ``logging.ERROR``, etc) 171 | 172 | Documentation 173 | ------------- 174 | 175 | `http-request-randomizer 176 | documentation `__ 177 | 178 | Contributing 179 | ------------ 180 | 181 | Many thanks to the open-source community for 182 | `contributing `__ 183 | to this project! 184 | 185 | Faced an issue? 186 | --------------- 187 | 188 | Open an issue 189 | `here `__, and 190 | be as detailed as possible :) 191 | 192 | Feels like a feature is missing? 193 | -------------------------------- 194 | 195 | Feel free to open a ticket! PRs are always welcome! 196 | 197 | License 198 | ------- 199 | 200 | This project is licensed under the terms of the MIT license. 201 | 202 | .. |Build Status| image:: https://github.com/pgaref/http_request_randomizer/workflows/CI/badge.svg 203 | :target: https://github.com/pgaref/http_request_randomizer/actions 204 | .. |codecov| image:: https://codecov.io/gh/pgaref/HTTP_Request_Randomizer/branch/master/graph/badge.svg?token=FjHh47wdYV 205 | :target: undefined 206 | .. |Requirements Status| image:: https://requires.io/github/pgaref/HTTP_Request_Randomizer/requirements.svg?branch=master 207 | :target: https://requires.io/github/pgaref/HTTP_Request_Randomizer/requirements/?branch=master 208 | .. |PyPI version| image:: https://badge.fury.io/py/http-request-randomizer.svg 209 | :target: https://badge.fury.io/py/http-request-randomizer 210 | -------------------------------------------------------------------------------- /codecov.yml: -------------------------------------------------------------------------------- 1 | --- 2 | comment: false 3 | coverage: 4 | status: 5 | patch: 6 | default: 7 | target: "65" 8 | project: 9 | default: 10 | target: "69" -------------------------------------------------------------------------------- /docs/.nojekyll: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/.nojekyll -------------------------------------------------------------------------------- /docs/_sources/code.rst.txt: -------------------------------------------------------------------------------- 1 | Code documentation 2 | =================================================== 3 | 4 | .. automodule:: source/modules 5 | :members: -------------------------------------------------------------------------------- /docs/_sources/index.rst.txt: -------------------------------------------------------------------------------- 1 | .. HTTP Request Randomizer documentation master file, created by 2 | sphinx-quickstart on Fri Oct 16 16:49:09 2020. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | HTTP Request Randomizer's docs 7 | =================================================== 8 | 9 | .. toctree:: 10 | :maxdepth: 2 11 | :Caption: Package: 12 | 13 | source/modules 14 | 15 | .. toctree:: 16 | :maxdepth: 2 17 | :Caption: Instructions: 18 | 19 | readme 20 | 21 | 22 | Indices and tables 23 | ================== 24 | 25 | * :ref:`genindex` 26 | * :ref:`modindex` 27 | * :ref:`search` 28 | -------------------------------------------------------------------------------- /docs/_sources/readme.rst.txt: -------------------------------------------------------------------------------- 1 | .. include:: ../README.rst -------------------------------------------------------------------------------- /docs/_sources/source/http_request_randomizer.requests.errors.rst.txt: -------------------------------------------------------------------------------- 1 | http\_request\_randomizer.requests.errors package 2 | ================================================= 3 | 4 | Submodules 5 | ---------- 6 | 7 | http\_request\_randomizer.requests.errors.ParserExceptions module 8 | ----------------------------------------------------------------- 9 | 10 | .. automodule:: http_request_randomizer.requests.errors.ParserExceptions 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | http\_request\_randomizer.requests.errors.ProxyListException module 16 | ------------------------------------------------------------------- 17 | 18 | .. automodule:: http_request_randomizer.requests.errors.ProxyListException 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | Module contents 24 | --------------- 25 | 26 | .. automodule:: http_request_randomizer.requests.errors 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | -------------------------------------------------------------------------------- /docs/_sources/source/http_request_randomizer.requests.parsers.js.rst.txt: -------------------------------------------------------------------------------- 1 | http\_request\_randomizer.requests.parsers.js package 2 | ===================================================== 3 | 4 | Submodules 5 | ---------- 6 | 7 | http\_request\_randomizer.requests.parsers.js.UnPacker module 8 | ------------------------------------------------------------- 9 | 10 | .. automodule:: http_request_randomizer.requests.parsers.js.UnPacker 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | Module contents 16 | --------------- 17 | 18 | .. automodule:: http_request_randomizer.requests.parsers.js 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | -------------------------------------------------------------------------------- /docs/_sources/source/http_request_randomizer.requests.parsers.rst.txt: -------------------------------------------------------------------------------- 1 | http\_request\_randomizer.requests.parsers package 2 | ================================================== 3 | 4 | Subpackages 5 | ----------- 6 | 7 | .. toctree:: 8 | :maxdepth: 4 9 | 10 | http_request_randomizer.requests.parsers.js 11 | 12 | Submodules 13 | ---------- 14 | 15 | http\_request\_randomizer.requests.parsers.FreeProxyParser module 16 | ----------------------------------------------------------------- 17 | 18 | .. automodule:: http_request_randomizer.requests.parsers.FreeProxyParser 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | http\_request\_randomizer.requests.parsers.PremProxyParser module 24 | ----------------------------------------------------------------- 25 | 26 | .. automodule:: http_request_randomizer.requests.parsers.PremProxyParser 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | 31 | http\_request\_randomizer.requests.parsers.ProxyForEuParser module 32 | ------------------------------------------------------------------ 33 | 34 | .. automodule:: http_request_randomizer.requests.parsers.ProxyForEuParser 35 | :members: 36 | :undoc-members: 37 | :show-inheritance: 38 | 39 | http\_request\_randomizer.requests.parsers.RebroWeeblyParser module 40 | ------------------------------------------------------------------- 41 | 42 | .. automodule:: http_request_randomizer.requests.parsers.RebroWeeblyParser 43 | :members: 44 | :undoc-members: 45 | :show-inheritance: 46 | 47 | http\_request\_randomizer.requests.parsers.SslProxyParser module 48 | ---------------------------------------------------------------- 49 | 50 | .. automodule:: http_request_randomizer.requests.parsers.SslProxyParser 51 | :members: 52 | :undoc-members: 53 | :show-inheritance: 54 | 55 | http\_request\_randomizer.requests.parsers.UrlParser module 56 | ----------------------------------------------------------- 57 | 58 | .. automodule:: http_request_randomizer.requests.parsers.UrlParser 59 | :members: 60 | :undoc-members: 61 | :show-inheritance: 62 | 63 | Module contents 64 | --------------- 65 | 66 | .. automodule:: http_request_randomizer.requests.parsers 67 | :members: 68 | :undoc-members: 69 | :show-inheritance: 70 | -------------------------------------------------------------------------------- /docs/_sources/source/http_request_randomizer.requests.proxy.rst.txt: -------------------------------------------------------------------------------- 1 | http\_request\_randomizer.requests.proxy package 2 | ================================================ 3 | 4 | Submodules 5 | ---------- 6 | 7 | http\_request\_randomizer.requests.proxy.ProxyObject module 8 | ----------------------------------------------------------- 9 | 10 | .. automodule:: http_request_randomizer.requests.proxy.ProxyObject 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | http\_request\_randomizer.requests.proxy.requestProxy module 16 | ------------------------------------------------------------ 17 | 18 | .. automodule:: http_request_randomizer.requests.proxy.requestProxy 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | Module contents 24 | --------------- 25 | 26 | .. automodule:: http_request_randomizer.requests.proxy 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | -------------------------------------------------------------------------------- /docs/_sources/source/http_request_randomizer.requests.rst.txt: -------------------------------------------------------------------------------- 1 | http\_request\_randomizer.requests package 2 | ========================================== 3 | 4 | Subpackages 5 | ----------- 6 | 7 | .. toctree:: 8 | :maxdepth: 4 9 | 10 | http_request_randomizer.requests.errors 11 | http_request_randomizer.requests.parsers 12 | http_request_randomizer.requests.proxy 13 | http_request_randomizer.requests.runners 14 | http_request_randomizer.requests.useragent 15 | 16 | Module contents 17 | --------------- 18 | 19 | .. automodule:: http_request_randomizer.requests 20 | :members: 21 | :undoc-members: 22 | :show-inheritance: 23 | -------------------------------------------------------------------------------- /docs/_sources/source/http_request_randomizer.requests.runners.rst.txt: -------------------------------------------------------------------------------- 1 | http\_request\_randomizer.requests.runners package 2 | ================================================== 3 | 4 | Submodules 5 | ---------- 6 | 7 | http\_request\_randomizer.requests.runners.proxyList module 8 | ----------------------------------------------------------- 9 | 10 | .. automodule:: http_request_randomizer.requests.runners.proxyList 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | Module contents 16 | --------------- 17 | 18 | .. automodule:: http_request_randomizer.requests.runners 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | -------------------------------------------------------------------------------- /docs/_sources/source/http_request_randomizer.requests.useragent.rst.txt: -------------------------------------------------------------------------------- 1 | http\_request\_randomizer.requests.useragent package 2 | ==================================================== 3 | 4 | Submodules 5 | ---------- 6 | 7 | http\_request\_randomizer.requests.useragent.userAgent module 8 | ------------------------------------------------------------- 9 | 10 | .. automodule:: http_request_randomizer.requests.useragent.userAgent 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | Module contents 16 | --------------- 17 | 18 | .. automodule:: http_request_randomizer.requests.useragent 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | -------------------------------------------------------------------------------- /docs/_sources/source/http_request_randomizer.rst.txt: -------------------------------------------------------------------------------- 1 | http\_request\_randomizer package 2 | ================================= 3 | 4 | Subpackages 5 | ----------- 6 | 7 | .. toctree:: 8 | :maxdepth: 4 9 | 10 | http_request_randomizer.requests 11 | 12 | Module contents 13 | --------------- 14 | 15 | .. automodule:: http_request_randomizer 16 | :members: 17 | :undoc-members: 18 | :show-inheritance: 19 | -------------------------------------------------------------------------------- /docs/_sources/source/modules.rst.txt: -------------------------------------------------------------------------------- 1 | http_request_randomizer 2 | ======================= 3 | 4 | .. toctree:: 5 | :maxdepth: 4 6 | 7 | http_request_randomizer 8 | -------------------------------------------------------------------------------- /docs/_static/css/badge_only.css: -------------------------------------------------------------------------------- 1 | .fa:before{-webkit-font-smoothing:antialiased}.clearfix{*zoom:1}.clearfix:after,.clearfix:before{display:table;content:""}.clearfix:after{clear:both}@font-face{font-family:FontAwesome;font-style:normal;font-weight:400;src:url(fonts/fontawesome-webfont.eot?674f50d287a8c48dc19ba404d20fe713?#iefix) format("embedded-opentype"),url(fonts/fontawesome-webfont.woff2?af7ae505a9eed503f8b8e6982036873e) format("woff2"),url(fonts/fontawesome-webfont.woff?fee66e712a8a08eef5805a46892932ad) format("woff"),url(fonts/fontawesome-webfont.ttf?b06871f281fee6b241d60582ae9369b9) format("truetype"),url(fonts/fontawesome-webfont.svg?912ec66d7572ff821749319396470bde#FontAwesome) format("svg")}.fa:before{font-family:FontAwesome;font-style:normal;font-weight:400;line-height:1}.fa:before,a .fa{text-decoration:inherit}.fa:before,a .fa,li .fa{display:inline-block}li .fa-large:before{width:1.875em}ul.fas{list-style-type:none;margin-left:2em;text-indent:-.8em}ul.fas li .fa{width:.8em}ul.fas li .fa-large:before{vertical-align:baseline}.fa-book:before,.icon-book:before{content:"\f02d"}.fa-caret-down:before,.icon-caret-down:before{content:"\f0d7"}.fa-caret-up:before,.icon-caret-up:before{content:"\f0d8"}.fa-caret-left:before,.icon-caret-left:before{content:"\f0d9"}.fa-caret-right:before,.icon-caret-right:before{content:"\f0da"}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;z-index:400}.rst-versions a{color:#2980b9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27ae60}.rst-versions .rst-current-version:after{clear:both;content:"";display:block}.rst-versions .rst-current-version .fa{color:#fcfcfc}.rst-versions .rst-current-version .fa-book,.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#e74c3c;color:#fff}.rst-versions .rst-current-version.rst-active-old-version{background-color:#f1c40f;color:#000}.rst-versions.shift-up{height:auto;max-height:100%;overflow-y:scroll}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:grey;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:1px solid #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px;max-height:90%}.rst-versions.rst-badge .fa-book,.rst-versions.rst-badge .icon-book{float:none;line-height:30px}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .fa-book,.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge>.rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width:768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}} -------------------------------------------------------------------------------- /docs/_static/css/fonts/Roboto-Slab-Bold.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/css/fonts/Roboto-Slab-Bold.woff -------------------------------------------------------------------------------- /docs/_static/css/fonts/Roboto-Slab-Bold.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/css/fonts/Roboto-Slab-Bold.woff2 -------------------------------------------------------------------------------- /docs/_static/css/fonts/Roboto-Slab-Regular.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/css/fonts/Roboto-Slab-Regular.woff -------------------------------------------------------------------------------- /docs/_static/css/fonts/Roboto-Slab-Regular.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/css/fonts/Roboto-Slab-Regular.woff2 -------------------------------------------------------------------------------- /docs/_static/css/fonts/fontawesome-webfont.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/css/fonts/fontawesome-webfont.eot -------------------------------------------------------------------------------- /docs/_static/css/fonts/fontawesome-webfont.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/css/fonts/fontawesome-webfont.ttf -------------------------------------------------------------------------------- /docs/_static/css/fonts/fontawesome-webfont.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/css/fonts/fontawesome-webfont.woff -------------------------------------------------------------------------------- /docs/_static/css/fonts/fontawesome-webfont.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/css/fonts/fontawesome-webfont.woff2 -------------------------------------------------------------------------------- /docs/_static/css/fonts/lato-bold-italic.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/css/fonts/lato-bold-italic.woff -------------------------------------------------------------------------------- /docs/_static/css/fonts/lato-bold-italic.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/css/fonts/lato-bold-italic.woff2 -------------------------------------------------------------------------------- /docs/_static/css/fonts/lato-bold.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/css/fonts/lato-bold.woff -------------------------------------------------------------------------------- /docs/_static/css/fonts/lato-bold.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/css/fonts/lato-bold.woff2 -------------------------------------------------------------------------------- /docs/_static/css/fonts/lato-normal-italic.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/css/fonts/lato-normal-italic.woff -------------------------------------------------------------------------------- /docs/_static/css/fonts/lato-normal-italic.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/css/fonts/lato-normal-italic.woff2 -------------------------------------------------------------------------------- /docs/_static/css/fonts/lato-normal.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/css/fonts/lato-normal.woff -------------------------------------------------------------------------------- /docs/_static/css/fonts/lato-normal.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/css/fonts/lato-normal.woff2 -------------------------------------------------------------------------------- /docs/_static/doctools.js: -------------------------------------------------------------------------------- 1 | /* 2 | * doctools.js 3 | * ~~~~~~~~~~~ 4 | * 5 | * Sphinx JavaScript utilities for all documentation. 6 | * 7 | * :copyright: Copyright 2007-2020 by the Sphinx team, see AUTHORS. 8 | * :license: BSD, see LICENSE for details. 9 | * 10 | */ 11 | 12 | /** 13 | * select a different prefix for underscore 14 | */ 15 | $u = _.noConflict(); 16 | 17 | /** 18 | * make the code below compatible with browsers without 19 | * an installed firebug like debugger 20 | if (!window.console || !console.firebug) { 21 | var names = ["log", "debug", "info", "warn", "error", "assert", "dir", 22 | "dirxml", "group", "groupEnd", "time", "timeEnd", "count", "trace", 23 | "profile", "profileEnd"]; 24 | window.console = {}; 25 | for (var i = 0; i < names.length; ++i) 26 | window.console[names[i]] = function() {}; 27 | } 28 | */ 29 | 30 | /** 31 | * small helper function to urldecode strings 32 | */ 33 | jQuery.urldecode = function(x) { 34 | return decodeURIComponent(x).replace(/\+/g, ' '); 35 | }; 36 | 37 | /** 38 | * small helper function to urlencode strings 39 | */ 40 | jQuery.urlencode = encodeURIComponent; 41 | 42 | /** 43 | * This function returns the parsed url parameters of the 44 | * current request. Multiple values per key are supported, 45 | * it will always return arrays of strings for the value parts. 46 | */ 47 | jQuery.getQueryParameters = function(s) { 48 | if (typeof s === 'undefined') 49 | s = document.location.search; 50 | var parts = s.substr(s.indexOf('?') + 1).split('&'); 51 | var result = {}; 52 | for (var i = 0; i < parts.length; i++) { 53 | var tmp = parts[i].split('=', 2); 54 | var key = jQuery.urldecode(tmp[0]); 55 | var value = jQuery.urldecode(tmp[1]); 56 | if (key in result) 57 | result[key].push(value); 58 | else 59 | result[key] = [value]; 60 | } 61 | return result; 62 | }; 63 | 64 | /** 65 | * highlight a given string on a jquery object by wrapping it in 66 | * span elements with the given class name. 67 | */ 68 | jQuery.fn.highlightText = function(text, className) { 69 | function highlight(node, addItems) { 70 | if (node.nodeType === 3) { 71 | var val = node.nodeValue; 72 | var pos = val.toLowerCase().indexOf(text); 73 | if (pos >= 0 && 74 | !jQuery(node.parentNode).hasClass(className) && 75 | !jQuery(node.parentNode).hasClass("nohighlight")) { 76 | var span; 77 | var isInSVG = jQuery(node).closest("body, svg, foreignObject").is("svg"); 78 | if (isInSVG) { 79 | span = document.createElementNS("http://www.w3.org/2000/svg", "tspan"); 80 | } else { 81 | span = document.createElement("span"); 82 | span.className = className; 83 | } 84 | span.appendChild(document.createTextNode(val.substr(pos, text.length))); 85 | node.parentNode.insertBefore(span, node.parentNode.insertBefore( 86 | document.createTextNode(val.substr(pos + text.length)), 87 | node.nextSibling)); 88 | node.nodeValue = val.substr(0, pos); 89 | if (isInSVG) { 90 | var rect = document.createElementNS("http://www.w3.org/2000/svg", "rect"); 91 | var bbox = node.parentElement.getBBox(); 92 | rect.x.baseVal.value = bbox.x; 93 | rect.y.baseVal.value = bbox.y; 94 | rect.width.baseVal.value = bbox.width; 95 | rect.height.baseVal.value = bbox.height; 96 | rect.setAttribute('class', className); 97 | addItems.push({ 98 | "parent": node.parentNode, 99 | "target": rect}); 100 | } 101 | } 102 | } 103 | else if (!jQuery(node).is("button, select, textarea")) { 104 | jQuery.each(node.childNodes, function() { 105 | highlight(this, addItems); 106 | }); 107 | } 108 | } 109 | var addItems = []; 110 | var result = this.each(function() { 111 | highlight(this, addItems); 112 | }); 113 | for (var i = 0; i < addItems.length; ++i) { 114 | jQuery(addItems[i].parent).before(addItems[i].target); 115 | } 116 | return result; 117 | }; 118 | 119 | /* 120 | * backward compatibility for jQuery.browser 121 | * This will be supported until firefox bug is fixed. 122 | */ 123 | if (!jQuery.browser) { 124 | jQuery.uaMatch = function(ua) { 125 | ua = ua.toLowerCase(); 126 | 127 | var match = /(chrome)[ \/]([\w.]+)/.exec(ua) || 128 | /(webkit)[ \/]([\w.]+)/.exec(ua) || 129 | /(opera)(?:.*version|)[ \/]([\w.]+)/.exec(ua) || 130 | /(msie) ([\w.]+)/.exec(ua) || 131 | ua.indexOf("compatible") < 0 && /(mozilla)(?:.*? rv:([\w.]+)|)/.exec(ua) || 132 | []; 133 | 134 | return { 135 | browser: match[ 1 ] || "", 136 | version: match[ 2 ] || "0" 137 | }; 138 | }; 139 | jQuery.browser = {}; 140 | jQuery.browser[jQuery.uaMatch(navigator.userAgent).browser] = true; 141 | } 142 | 143 | /** 144 | * Small JavaScript module for the documentation. 145 | */ 146 | var Documentation = { 147 | 148 | init : function() { 149 | this.fixFirefoxAnchorBug(); 150 | this.highlightSearchWords(); 151 | this.initIndexTable(); 152 | if (DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) { 153 | this.initOnKeyListeners(); 154 | } 155 | }, 156 | 157 | /** 158 | * i18n support 159 | */ 160 | TRANSLATIONS : {}, 161 | PLURAL_EXPR : function(n) { return n === 1 ? 0 : 1; }, 162 | LOCALE : 'unknown', 163 | 164 | // gettext and ngettext don't access this so that the functions 165 | // can safely bound to a different name (_ = Documentation.gettext) 166 | gettext : function(string) { 167 | var translated = Documentation.TRANSLATIONS[string]; 168 | if (typeof translated === 'undefined') 169 | return string; 170 | return (typeof translated === 'string') ? translated : translated[0]; 171 | }, 172 | 173 | ngettext : function(singular, plural, n) { 174 | var translated = Documentation.TRANSLATIONS[singular]; 175 | if (typeof translated === 'undefined') 176 | return (n == 1) ? singular : plural; 177 | return translated[Documentation.PLURALEXPR(n)]; 178 | }, 179 | 180 | addTranslations : function(catalog) { 181 | for (var key in catalog.messages) 182 | this.TRANSLATIONS[key] = catalog.messages[key]; 183 | this.PLURAL_EXPR = new Function('n', 'return +(' + catalog.plural_expr + ')'); 184 | this.LOCALE = catalog.locale; 185 | }, 186 | 187 | /** 188 | * add context elements like header anchor links 189 | */ 190 | addContextElements : function() { 191 | $('div[id] > :header:first').each(function() { 192 | $('\u00B6'). 193 | attr('href', '#' + this.id). 194 | attr('title', _('Permalink to this headline')). 195 | appendTo(this); 196 | }); 197 | $('dt[id]').each(function() { 198 | $('\u00B6'). 199 | attr('href', '#' + this.id). 200 | attr('title', _('Permalink to this definition')). 201 | appendTo(this); 202 | }); 203 | }, 204 | 205 | /** 206 | * workaround a firefox stupidity 207 | * see: https://bugzilla.mozilla.org/show_bug.cgi?id=645075 208 | */ 209 | fixFirefoxAnchorBug : function() { 210 | if (document.location.hash && $.browser.mozilla) 211 | window.setTimeout(function() { 212 | document.location.href += ''; 213 | }, 10); 214 | }, 215 | 216 | /** 217 | * highlight the search words provided in the url in the text 218 | */ 219 | highlightSearchWords : function() { 220 | var params = $.getQueryParameters(); 221 | var terms = (params.highlight) ? params.highlight[0].split(/\s+/) : []; 222 | if (terms.length) { 223 | var body = $('div.body'); 224 | if (!body.length) { 225 | body = $('body'); 226 | } 227 | window.setTimeout(function() { 228 | $.each(terms, function() { 229 | body.highlightText(this.toLowerCase(), 'highlighted'); 230 | }); 231 | }, 10); 232 | $('') 234 | .appendTo($('#searchbox')); 235 | } 236 | }, 237 | 238 | /** 239 | * init the domain index toggle buttons 240 | */ 241 | initIndexTable : function() { 242 | var togglers = $('img.toggler').click(function() { 243 | var src = $(this).attr('src'); 244 | var idnum = $(this).attr('id').substr(7); 245 | $('tr.cg-' + idnum).toggle(); 246 | if (src.substr(-9) === 'minus.png') 247 | $(this).attr('src', src.substr(0, src.length-9) + 'plus.png'); 248 | else 249 | $(this).attr('src', src.substr(0, src.length-8) + 'minus.png'); 250 | }).css('display', ''); 251 | if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) { 252 | togglers.click(); 253 | } 254 | }, 255 | 256 | /** 257 | * helper function to hide the search marks again 258 | */ 259 | hideSearchWords : function() { 260 | $('#searchbox .highlight-link').fadeOut(300); 261 | $('span.highlighted').removeClass('highlighted'); 262 | }, 263 | 264 | /** 265 | * make the url absolute 266 | */ 267 | makeURL : function(relativeURL) { 268 | return DOCUMENTATION_OPTIONS.URL_ROOT + '/' + relativeURL; 269 | }, 270 | 271 | /** 272 | * get the current relative url 273 | */ 274 | getCurrentURL : function() { 275 | var path = document.location.pathname; 276 | var parts = path.split(/\//); 277 | $.each(DOCUMENTATION_OPTIONS.URL_ROOT.split(/\//), function() { 278 | if (this === '..') 279 | parts.pop(); 280 | }); 281 | var url = parts.join('/'); 282 | return path.substring(url.lastIndexOf('/') + 1, path.length - 1); 283 | }, 284 | 285 | initOnKeyListeners: function() { 286 | $(document).keydown(function(event) { 287 | var activeElementType = document.activeElement.tagName; 288 | // don't navigate when in search box or textarea 289 | if (activeElementType !== 'TEXTAREA' && activeElementType !== 'INPUT' && activeElementType !== 'SELECT' 290 | && !event.altKey && !event.ctrlKey && !event.metaKey && !event.shiftKey) { 291 | switch (event.keyCode) { 292 | case 37: // left 293 | var prevHref = $('link[rel="prev"]').prop('href'); 294 | if (prevHref) { 295 | window.location.href = prevHref; 296 | return false; 297 | } 298 | case 39: // right 299 | var nextHref = $('link[rel="next"]').prop('href'); 300 | if (nextHref) { 301 | window.location.href = nextHref; 302 | return false; 303 | } 304 | } 305 | } 306 | }); 307 | } 308 | }; 309 | 310 | // quick alias for translations 311 | _ = Documentation.gettext; 312 | 313 | $(document).ready(function() { 314 | Documentation.init(); 315 | }); 316 | -------------------------------------------------------------------------------- /docs/_static/documentation_options.js: -------------------------------------------------------------------------------- 1 | var DOCUMENTATION_OPTIONS = { 2 | URL_ROOT: document.getElementById("documentation_options").getAttribute('data-url_root'), 3 | VERSION: '1.3.1', 4 | LANGUAGE: 'None', 5 | COLLAPSE_INDEX: false, 6 | BUILDER: 'html', 7 | FILE_SUFFIX: '.html', 8 | LINK_SUFFIX: '.html', 9 | HAS_SOURCE: true, 10 | SOURCELINK_SUFFIX: '.txt', 11 | NAVIGATION_WITH_KEYS: false 12 | }; -------------------------------------------------------------------------------- /docs/_static/file.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/file.png -------------------------------------------------------------------------------- /docs/_static/fonts/FontAwesome.otf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/fonts/FontAwesome.otf -------------------------------------------------------------------------------- /docs/_static/fonts/Lato/lato-bold.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/fonts/Lato/lato-bold.eot -------------------------------------------------------------------------------- /docs/_static/fonts/Lato/lato-bold.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/fonts/Lato/lato-bold.ttf -------------------------------------------------------------------------------- /docs/_static/fonts/Lato/lato-bold.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/fonts/Lato/lato-bold.woff -------------------------------------------------------------------------------- /docs/_static/fonts/Lato/lato-bold.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/fonts/Lato/lato-bold.woff2 -------------------------------------------------------------------------------- /docs/_static/fonts/Lato/lato-bolditalic.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/fonts/Lato/lato-bolditalic.eot -------------------------------------------------------------------------------- /docs/_static/fonts/Lato/lato-bolditalic.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/fonts/Lato/lato-bolditalic.ttf -------------------------------------------------------------------------------- /docs/_static/fonts/Lato/lato-bolditalic.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/fonts/Lato/lato-bolditalic.woff -------------------------------------------------------------------------------- /docs/_static/fonts/Lato/lato-bolditalic.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/fonts/Lato/lato-bolditalic.woff2 -------------------------------------------------------------------------------- /docs/_static/fonts/Lato/lato-italic.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/fonts/Lato/lato-italic.eot -------------------------------------------------------------------------------- /docs/_static/fonts/Lato/lato-italic.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/fonts/Lato/lato-italic.ttf -------------------------------------------------------------------------------- /docs/_static/fonts/Lato/lato-italic.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/fonts/Lato/lato-italic.woff -------------------------------------------------------------------------------- /docs/_static/fonts/Lato/lato-italic.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/fonts/Lato/lato-italic.woff2 -------------------------------------------------------------------------------- /docs/_static/fonts/Lato/lato-regular.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/fonts/Lato/lato-regular.eot -------------------------------------------------------------------------------- /docs/_static/fonts/Lato/lato-regular.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/fonts/Lato/lato-regular.ttf -------------------------------------------------------------------------------- /docs/_static/fonts/Lato/lato-regular.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/fonts/Lato/lato-regular.woff -------------------------------------------------------------------------------- /docs/_static/fonts/Lato/lato-regular.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/fonts/Lato/lato-regular.woff2 -------------------------------------------------------------------------------- /docs/_static/fonts/Roboto-Slab-Bold.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/fonts/Roboto-Slab-Bold.woff -------------------------------------------------------------------------------- /docs/_static/fonts/Roboto-Slab-Bold.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/fonts/Roboto-Slab-Bold.woff2 -------------------------------------------------------------------------------- /docs/_static/fonts/Roboto-Slab-Light.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/fonts/Roboto-Slab-Light.woff -------------------------------------------------------------------------------- /docs/_static/fonts/Roboto-Slab-Light.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/fonts/Roboto-Slab-Light.woff2 -------------------------------------------------------------------------------- /docs/_static/fonts/Roboto-Slab-Regular.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/fonts/Roboto-Slab-Regular.woff -------------------------------------------------------------------------------- /docs/_static/fonts/Roboto-Slab-Regular.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/fonts/Roboto-Slab-Regular.woff2 -------------------------------------------------------------------------------- /docs/_static/fonts/Roboto-Slab-Thin.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/fonts/Roboto-Slab-Thin.woff -------------------------------------------------------------------------------- /docs/_static/fonts/Roboto-Slab-Thin.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/fonts/Roboto-Slab-Thin.woff2 -------------------------------------------------------------------------------- /docs/_static/fonts/RobotoSlab/roboto-slab-v7-bold.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/fonts/RobotoSlab/roboto-slab-v7-bold.eot -------------------------------------------------------------------------------- /docs/_static/fonts/RobotoSlab/roboto-slab-v7-bold.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/fonts/RobotoSlab/roboto-slab-v7-bold.ttf -------------------------------------------------------------------------------- /docs/_static/fonts/RobotoSlab/roboto-slab-v7-bold.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/fonts/RobotoSlab/roboto-slab-v7-bold.woff -------------------------------------------------------------------------------- /docs/_static/fonts/RobotoSlab/roboto-slab-v7-bold.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/fonts/RobotoSlab/roboto-slab-v7-bold.woff2 -------------------------------------------------------------------------------- /docs/_static/fonts/RobotoSlab/roboto-slab-v7-regular.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/fonts/RobotoSlab/roboto-slab-v7-regular.eot -------------------------------------------------------------------------------- /docs/_static/fonts/RobotoSlab/roboto-slab-v7-regular.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/fonts/RobotoSlab/roboto-slab-v7-regular.ttf -------------------------------------------------------------------------------- /docs/_static/fonts/RobotoSlab/roboto-slab-v7-regular.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/fonts/RobotoSlab/roboto-slab-v7-regular.woff -------------------------------------------------------------------------------- /docs/_static/fonts/RobotoSlab/roboto-slab-v7-regular.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/fonts/RobotoSlab/roboto-slab-v7-regular.woff2 -------------------------------------------------------------------------------- /docs/_static/fonts/fontawesome-webfont.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/fonts/fontawesome-webfont.eot -------------------------------------------------------------------------------- /docs/_static/fonts/fontawesome-webfont.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/fonts/fontawesome-webfont.ttf -------------------------------------------------------------------------------- /docs/_static/fonts/fontawesome-webfont.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/fonts/fontawesome-webfont.woff -------------------------------------------------------------------------------- /docs/_static/fonts/fontawesome-webfont.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/fonts/fontawesome-webfont.woff2 -------------------------------------------------------------------------------- /docs/_static/fonts/lato-bold-italic.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/fonts/lato-bold-italic.woff -------------------------------------------------------------------------------- /docs/_static/fonts/lato-bold-italic.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/fonts/lato-bold-italic.woff2 -------------------------------------------------------------------------------- /docs/_static/fonts/lato-bold.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/fonts/lato-bold.woff -------------------------------------------------------------------------------- /docs/_static/fonts/lato-bold.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/fonts/lato-bold.woff2 -------------------------------------------------------------------------------- /docs/_static/fonts/lato-normal-italic.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/fonts/lato-normal-italic.woff -------------------------------------------------------------------------------- /docs/_static/fonts/lato-normal-italic.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/fonts/lato-normal-italic.woff2 -------------------------------------------------------------------------------- /docs/_static/fonts/lato-normal.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/fonts/lato-normal.woff -------------------------------------------------------------------------------- /docs/_static/fonts/lato-normal.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/_static/fonts/lato-normal.woff2 -------------------------------------------------------------------------------- /docs/_static/js/badge_only.js: -------------------------------------------------------------------------------- 1 | !function(e){var t={};function r(n){if(t[n])return t[n].exports;var o=t[n]={i:n,l:!1,exports:{}};return e[n].call(o.exports,o,o.exports,r),o.l=!0,o.exports}r.m=e,r.c=t,r.d=function(e,t,n){r.o(e,t)||Object.defineProperty(e,t,{enumerable:!0,get:n})},r.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},r.t=function(e,t){if(1&t&&(e=r(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var n=Object.create(null);if(r.r(n),Object.defineProperty(n,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var o in e)r.d(n,o,function(t){return e[t]}.bind(null,o));return n},r.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return r.d(t,"a",t),t},r.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},r.p="",r(r.s=4)}({4:function(e,t,r){}}); -------------------------------------------------------------------------------- /docs/_static/js/html5shiv-printshiv.min.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @preserve HTML5 Shiv 3.7.3-pre | @afarkas @jdalton @jon_neal @rem | MIT/GPL2 Licensed 3 | */ 4 | !function(a,b){function c(a,b){var c=a.createElement("p"),d=a.getElementsByTagName("head")[0]||a.documentElement;return c.innerHTML="x",d.insertBefore(c.lastChild,d.firstChild)}function d(){var a=y.elements;return"string"==typeof a?a.split(" "):a}function e(a,b){var c=y.elements;"string"!=typeof c&&(c=c.join(" ")),"string"!=typeof a&&(a=a.join(" ")),y.elements=c+" "+a,j(b)}function f(a){var b=x[a[v]];return b||(b={},w++,a[v]=w,x[w]=b),b}function g(a,c,d){if(c||(c=b),q)return c.createElement(a);d||(d=f(c));var e;return e=d.cache[a]?d.cache[a].cloneNode():u.test(a)?(d.cache[a]=d.createElem(a)).cloneNode():d.createElem(a),!e.canHaveChildren||t.test(a)||e.tagUrn?e:d.frag.appendChild(e)}function h(a,c){if(a||(a=b),q)return a.createDocumentFragment();c=c||f(a);for(var e=c.frag.cloneNode(),g=0,h=d(),i=h.length;i>g;g++)e.createElement(h[g]);return e}function i(a,b){b.cache||(b.cache={},b.createElem=a.createElement,b.createFrag=a.createDocumentFragment,b.frag=b.createFrag()),a.createElement=function(c){return y.shivMethods?g(c,a,b):b.createElem(c)},a.createDocumentFragment=Function("h,f","return function(){var n=f.cloneNode(),c=n.createElement;h.shivMethods&&("+d().join().replace(/[\w\-:]+/g,function(a){return b.createElem(a),b.frag.createElement(a),'c("'+a+'")'})+");return n}")(y,b.frag)}function j(a){a||(a=b);var d=f(a);return!y.shivCSS||p||d.hasCSS||(d.hasCSS=!!c(a,"article,aside,dialog,figcaption,figure,footer,header,hgroup,main,nav,section{display:block}mark{background:#FF0;color:#000}template{display:none}")),q||i(a,d),a}function k(a){for(var b,c=a.getElementsByTagName("*"),e=c.length,f=RegExp("^(?:"+d().join("|")+")$","i"),g=[];e--;)b=c[e],f.test(b.nodeName)&&g.push(b.applyElement(l(b)));return g}function l(a){for(var b,c=a.attributes,d=c.length,e=a.ownerDocument.createElement(A+":"+a.nodeName);d--;)b=c[d],b.specified&&e.setAttribute(b.nodeName,b.nodeValue);return e.style.cssText=a.style.cssText,e}function m(a){for(var b,c=a.split("{"),e=c.length,f=RegExp("(^|[\\s,>+~])("+d().join("|")+")(?=[[\\s,>+~#.:]|$)","gi"),g="$1"+A+"\\:$2";e--;)b=c[e]=c[e].split("}"),b[b.length-1]=b[b.length-1].replace(f,g),c[e]=b.join("}");return c.join("{")}function n(a){for(var b=a.length;b--;)a[b].removeNode()}function o(a){function b(){clearTimeout(g._removeSheetTimer),d&&d.removeNode(!0),d=null}var d,e,g=f(a),h=a.namespaces,i=a.parentWindow;return!B||a.printShived?a:("undefined"==typeof h[A]&&h.add(A),i.attachEvent("onbeforeprint",function(){b();for(var f,g,h,i=a.styleSheets,j=[],l=i.length,n=Array(l);l--;)n[l]=i[l];for(;h=n.pop();)if(!h.disabled&&z.test(h.media)){try{f=h.imports,g=f.length}catch(o){g=0}for(l=0;g>l;l++)n.push(f[l]);try{j.push(h.cssText)}catch(o){}}j=m(j.reverse().join("")),e=k(a),d=c(a,j)}),i.attachEvent("onafterprint",function(){n(e),clearTimeout(g._removeSheetTimer),g._removeSheetTimer=setTimeout(b,500)}),a.printShived=!0,a)}var p,q,r="3.7.3",s=a.html5||{},t=/^<|^(?:button|map|select|textarea|object|iframe|option|optgroup)$/i,u=/^(?:a|b|code|div|fieldset|h1|h2|h3|h4|h5|h6|i|label|li|ol|p|q|span|strong|style|table|tbody|td|th|tr|ul)$/i,v="_html5shiv",w=0,x={};!function(){try{var a=b.createElement("a");a.innerHTML="",p="hidden"in a,q=1==a.childNodes.length||function(){b.createElement("a");var a=b.createDocumentFragment();return"undefined"==typeof a.cloneNode||"undefined"==typeof a.createDocumentFragment||"undefined"==typeof a.createElement}()}catch(c){p=!0,q=!0}}();var y={elements:s.elements||"abbr article aside audio bdi canvas data datalist details dialog figcaption figure footer header hgroup main mark meter nav output picture progress section summary template time video",version:r,shivCSS:s.shivCSS!==!1,supportsUnknownElements:q,shivMethods:s.shivMethods!==!1,type:"default",shivDocument:j,createElement:g,createDocumentFragment:h,addElements:e};a.html5=y,j(b);var z=/^$|\b(?:all|print)\b/,A="html5shiv",B=!q&&function(){var c=b.documentElement;return!("undefined"==typeof b.namespaces||"undefined"==typeof b.parentWindow||"undefined"==typeof c.applyElement||"undefined"==typeof c.removeNode||"undefined"==typeof a.attachEvent)}();y.type+=" print",y.shivPrint=o,o(b),"object"==typeof module&&module.exports&&(module.exports=y)}("undefined"!=typeof window?window:this,document); -------------------------------------------------------------------------------- /docs/_static/js/html5shiv.min.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @preserve HTML5 Shiv 3.7.3 | @afarkas @jdalton @jon_neal @rem | MIT/GPL2 Licensed 3 | */ 4 | !function(a,b){function c(a,b){var c=a.createElement("p"),d=a.getElementsByTagName("head")[0]||a.documentElement;return c.innerHTML="x",d.insertBefore(c.lastChild,d.firstChild)}function d(){var a=t.elements;return"string"==typeof a?a.split(" "):a}function e(a,b){var c=t.elements;"string"!=typeof c&&(c=c.join(" ")),"string"!=typeof a&&(a=a.join(" ")),t.elements=c+" "+a,j(b)}function f(a){var b=s[a[q]];return b||(b={},r++,a[q]=r,s[r]=b),b}function g(a,c,d){if(c||(c=b),l)return c.createElement(a);d||(d=f(c));var e;return e=d.cache[a]?d.cache[a].cloneNode():p.test(a)?(d.cache[a]=d.createElem(a)).cloneNode():d.createElem(a),!e.canHaveChildren||o.test(a)||e.tagUrn?e:d.frag.appendChild(e)}function h(a,c){if(a||(a=b),l)return a.createDocumentFragment();c=c||f(a);for(var e=c.frag.cloneNode(),g=0,h=d(),i=h.length;i>g;g++)e.createElement(h[g]);return e}function i(a,b){b.cache||(b.cache={},b.createElem=a.createElement,b.createFrag=a.createDocumentFragment,b.frag=b.createFrag()),a.createElement=function(c){return t.shivMethods?g(c,a,b):b.createElem(c)},a.createDocumentFragment=Function("h,f","return function(){var n=f.cloneNode(),c=n.createElement;h.shivMethods&&("+d().join().replace(/[\w\-:]+/g,function(a){return b.createElem(a),b.frag.createElement(a),'c("'+a+'")'})+");return n}")(t,b.frag)}function j(a){a||(a=b);var d=f(a);return!t.shivCSS||k||d.hasCSS||(d.hasCSS=!!c(a,"article,aside,dialog,figcaption,figure,footer,header,hgroup,main,nav,section{display:block}mark{background:#FF0;color:#000}template{display:none}")),l||i(a,d),a}var k,l,m="3.7.3-pre",n=a.html5||{},o=/^<|^(?:button|map|select|textarea|object|iframe|option|optgroup)$/i,p=/^(?:a|b|code|div|fieldset|h1|h2|h3|h4|h5|h6|i|label|li|ol|p|q|span|strong|style|table|tbody|td|th|tr|ul)$/i,q="_html5shiv",r=0,s={};!function(){try{var a=b.createElement("a");a.innerHTML="",k="hidden"in a,l=1==a.childNodes.length||function(){b.createElement("a");var a=b.createDocumentFragment();return"undefined"==typeof a.cloneNode||"undefined"==typeof a.createDocumentFragment||"undefined"==typeof a.createElement}()}catch(c){k=!0,l=!0}}();var t={elements:n.elements||"abbr article aside audio bdi canvas data datalist details dialog figcaption figure footer header hgroup main mark meter nav output picture progress section summary template time video",version:m,shivCSS:n.shivCSS!==!1,supportsUnknownElements:l,shivMethods:n.shivMethods!==!1,type:"default",shivDocument:j,createElement:g,createDocumentFragment:h,addElements:e};a.html5=t,j(b),"object"==typeof module&&module.exports&&(module.exports=t)}("undefined"!=typeof window?window:this,document); -------------------------------------------------------------------------------- /docs/_static/js/theme.js: -------------------------------------------------------------------------------- 1 | !function(n){var e={};function t(i){if(e[i])return e[i].exports;var o=e[i]={i:i,l:!1,exports:{}};return n[i].call(o.exports,o,o.exports,t),o.l=!0,o.exports}t.m=n,t.c=e,t.d=function(n,e,i){t.o(n,e)||Object.defineProperty(n,e,{enumerable:!0,get:i})},t.r=function(n){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(n,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(n,"__esModule",{value:!0})},t.t=function(n,e){if(1&e&&(n=t(n)),8&e)return n;if(4&e&&"object"==typeof n&&n&&n.__esModule)return n;var i=Object.create(null);if(t.r(i),Object.defineProperty(i,"default",{enumerable:!0,value:n}),2&e&&"string"!=typeof n)for(var o in n)t.d(i,o,function(e){return n[e]}.bind(null,o));return i},t.n=function(n){var e=n&&n.__esModule?function(){return n.default}:function(){return n};return t.d(e,"a",e),e},t.o=function(n,e){return Object.prototype.hasOwnProperty.call(n,e)},t.p="",t(t.s=0)}([function(n,e,t){t(1),n.exports=t(3)},function(n,e,t){(function(){var e="undefined"!=typeof window?window.jQuery:t(2);n.exports.ThemeNav={navBar:null,win:null,winScroll:!1,winResize:!1,linkScroll:!1,winPosition:0,winHeight:null,docHeight:null,isRunning:!1,enable:function(n){var t=this;void 0===n&&(n=!0),t.isRunning||(t.isRunning=!0,e((function(e){t.init(e),t.reset(),t.win.on("hashchange",t.reset),n&&t.win.on("scroll",(function(){t.linkScroll||t.winScroll||(t.winScroll=!0,requestAnimationFrame((function(){t.onScroll()})))})),t.win.on("resize",(function(){t.winResize||(t.winResize=!0,requestAnimationFrame((function(){t.onResize()})))})),t.onResize()})))},enableSticky:function(){this.enable(!0)},init:function(n){n(document);var e=this;this.navBar=n("div.wy-side-scroll:first"),this.win=n(window),n(document).on("click","[data-toggle='wy-nav-top']",(function(){n("[data-toggle='wy-nav-shift']").toggleClass("shift"),n("[data-toggle='rst-versions']").toggleClass("shift")})).on("click",".wy-menu-vertical .current ul li a",(function(){var t=n(this);n("[data-toggle='wy-nav-shift']").removeClass("shift"),n("[data-toggle='rst-versions']").toggleClass("shift"),e.toggleCurrent(t),e.hashChange()})).on("click","[data-toggle='rst-current-version']",(function(){n("[data-toggle='rst-versions']").toggleClass("shift-up")})),n("table.docutils:not(.field-list,.footnote,.citation)").wrap("
"),n("table.docutils.footnote").wrap("
"),n("table.docutils.citation").wrap("
"),n(".wy-menu-vertical ul").not(".simple").siblings("a").each((function(){var t=n(this);expand=n(''),expand.on("click",(function(n){return e.toggleCurrent(t),n.stopPropagation(),!1})),t.prepend(expand)}))},reset:function(){var n=encodeURI(window.location.hash)||"#";try{var e=$(".wy-menu-vertical"),t=e.find('[href="'+n+'"]');if(0===t.length){var i=$('.document [id="'+n.substring(1)+'"]').closest("div.section");0===(t=e.find('[href="#'+i.attr("id")+'"]')).length&&(t=e.find('[href="#"]'))}t.length>0&&($(".wy-menu-vertical .current").removeClass("current"),t.addClass("current"),t.closest("li.toctree-l1").addClass("current"),t.closest("li.toctree-l1").parent().addClass("current"),t.closest("li.toctree-l1").addClass("current"),t.closest("li.toctree-l2").addClass("current"),t.closest("li.toctree-l3").addClass("current"),t.closest("li.toctree-l4").addClass("current"),t.closest("li.toctree-l5").addClass("current"),t[0].scrollIntoView())}catch(n){console.log("Error expanding nav for anchor",n)}},onScroll:function(){this.winScroll=!1;var n=this.win.scrollTop(),e=n+this.winHeight,t=this.navBar.scrollTop()+(n-this.winPosition);n<0||e>this.docHeight||(this.navBar.scrollTop(t),this.winPosition=n)},onResize:function(){this.winResize=!1,this.winHeight=this.win.height(),this.docHeight=$(document).height()},hashChange:function(){this.linkScroll=!0,this.win.one("hashchange",(function(){this.linkScroll=!1}))},toggleCurrent:function(n){var e=n.closest("li");e.siblings("li.current").removeClass("current"),e.siblings().find("li.current").removeClass("current"),e.find("> ul li.current").removeClass("current"),e.toggleClass("current")}},"undefined"!=typeof window&&(window.SphinxRtdTheme={Navigation:n.exports.ThemeNav,StickyNav:n.exports.ThemeNav}),function(){for(var n=0,e=["ms","moz","webkit","o"],t=0;t 4 | 5 | 6 | 7 | 8 | 9 | 10 | Code documentation — HTTP Request Randomizer 1.3.1 documentation 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 |
45 | 46 | 94 | 95 |
96 | 97 | 98 | 104 | 105 | 106 |
107 | 108 |
109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 |
127 | 128 |
    129 | 130 |
  • »
  • 131 | 132 |
  • Code documentation
  • 133 | 134 | 135 |
  • 136 | 137 | 138 | View page source 139 | 140 | 141 |
  • 142 | 143 |
144 | 145 | 146 |
147 |
148 |
149 |
150 | 151 |
152 |

Code documentation

153 |
154 | 155 | 156 |
157 | 158 |
159 |
160 | 161 | 162 |
163 | 164 |
165 |

166 | 167 | © Copyright 2020, Panagiotis Garefalakis 168 | 169 |

170 |
171 | 172 | 173 | 174 | Built with Sphinx using a 175 | 176 | theme 177 | 178 | provided by Read the Docs. 179 | 180 |
181 | 182 |
183 |
184 | 185 |
186 | 187 |
188 | 189 | 190 | 195 | 196 | 197 | 198 | 199 | 200 | 201 | 202 | -------------------------------------------------------------------------------- /docs/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | HTTP Request Randomizer’s docs — HTTP Request Randomizer 1.3.1 documentation 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 |
46 | 47 | 99 | 100 |
101 | 102 | 103 | 109 | 110 | 111 |
112 | 113 |
114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | 128 | 129 | 130 | 131 |
132 | 133 |
    134 | 135 |
  • »
  • 136 | 137 |
  • HTTP Request Randomizer’s docs
  • 138 | 139 | 140 |
  • 141 | 142 | 143 | View page source 144 | 145 | 146 |
  • 147 | 148 |
149 | 150 | 151 |
152 |
153 |
154 |
155 | 156 |
157 |

HTTP Request Randomizer’s docs

158 |
159 |

Package:

160 | 166 |
167 |
168 |

Instructions:

169 | 187 |
188 |
189 |
190 |

Indices and tables

191 | 196 |
197 | 198 | 199 |
200 | 201 |
202 |
203 | 204 | 210 | 211 | 212 |
213 | 214 |
215 |

216 | 217 | © Copyright 2020, Panagiotis Garefalakis 218 | 219 |

220 |
221 | 222 | 223 | 224 | Built with Sphinx using a 225 | 226 | theme 227 | 228 | provided by Read the Docs. 229 | 230 |
231 | 232 |
233 |
234 | 235 |
236 | 237 |
238 | 239 | 240 | 245 | 246 | 247 | 248 | 249 | 250 | 251 | 252 | -------------------------------------------------------------------------------- /docs/objects.inv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/docs/objects.inv -------------------------------------------------------------------------------- /docs/search.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | Search — HTTP Request Randomizer 1.3.1 documentation 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 |
47 | 48 | 100 | 101 |
102 | 103 | 104 | 110 | 111 | 112 |
113 | 114 |
115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | 128 | 129 | 130 | 131 | 132 |
133 | 134 |
    135 | 136 |
  • »
  • 137 | 138 |
  • Search
  • 139 | 140 | 141 |
  • 142 | 143 | 144 | 145 |
  • 146 | 147 |
148 | 149 | 150 |
151 |
152 |
153 |
154 | 155 | 162 | 163 | 164 |
165 | 166 |
167 | 168 |
169 | 170 |
171 |
172 | 173 | 174 |
175 | 176 |
177 |

178 | 179 | © Copyright 2020, Panagiotis Garefalakis 180 | 181 |

182 |
183 | 184 | 185 | 186 | Built with Sphinx using a 187 | 188 | theme 189 | 190 | provided by Read the Docs. 191 | 192 |
193 | 194 |
195 |
196 | 197 |
198 | 199 |
200 | 201 | 202 | 207 | 208 | 209 | 210 | 211 | 212 | 215 | 216 | 217 | 218 | 219 | 220 | 221 | -------------------------------------------------------------------------------- /docs/searchindex.js: -------------------------------------------------------------------------------- 1 | Search.setIndex({docnames:["index","readme","source/http_request_randomizer","source/http_request_randomizer.requests","source/http_request_randomizer.requests.errors","source/http_request_randomizer.requests.parsers","source/http_request_randomizer.requests.parsers.js","source/http_request_randomizer.requests.proxy","source/http_request_randomizer.requests.runners","source/http_request_randomizer.requests.useragent","source/modules"],envversion:{"sphinx.domains.c":2,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":3,"sphinx.domains.index":1,"sphinx.domains.javascript":2,"sphinx.domains.math":2,"sphinx.domains.python":2,"sphinx.domains.rst":2,"sphinx.domains.std":1,sphinx:56},filenames:["index.rst","readme.rst","source/http_request_randomizer.rst","source/http_request_randomizer.requests.rst","source/http_request_randomizer.requests.errors.rst","source/http_request_randomizer.requests.parsers.rst","source/http_request_randomizer.requests.parsers.js.rst","source/http_request_randomizer.requests.proxy.rst","source/http_request_randomizer.requests.runners.rst","source/http_request_randomizer.requests.useragent.rst","source/modules.rst"],objects:{"":{http_request_randomizer:[2,0,0,"-"]},"http_request_randomizer.requests":{errors:[4,0,0,"-"],parsers:[5,0,0,"-"],proxy:[7,0,0,"-"],runners:[8,0,0,"-"],useragent:[9,0,0,"-"]},"http_request_randomizer.requests.errors":{ParserExceptions:[4,0,0,"-"],ProxyListException:[4,0,0,"-"]},"http_request_randomizer.requests.errors.ParserExceptions":{ParserException:[4,1,1,""]},"http_request_randomizer.requests.errors.ProxyListException":{ProxyListException:[4,1,1,""]},"http_request_randomizer.requests.parsers":{FreeProxyParser:[5,0,0,"-"],PremProxyParser:[5,0,0,"-"],ProxyForEuParser:[5,0,0,"-"],RebroWeeblyParser:[5,0,0,"-"],SslProxyParser:[5,0,0,"-"],UrlParser:[5,0,0,"-"],js:[6,0,0,"-"]},"http_request_randomizer.requests.parsers.FreeProxyParser":{FreeProxyParser:[5,2,1,""]},"http_request_randomizer.requests.parsers.FreeProxyParser.FreeProxyParser":{create_proxy_object:[5,3,1,""],parse_proxyList:[5,3,1,""]},"http_request_randomizer.requests.parsers.PremProxyParser":{PremProxyParser:[5,2,1,""]},"http_request_randomizer.requests.parsers.PremProxyParser.PremProxyParser":{create_proxy_object:[5,3,1,""],get_pagination_set:[5,3,1,""],init_js_unpacker:[5,3,1,""],parse_proxyList:[5,3,1,""]},"http_request_randomizer.requests.parsers.ProxyForEuParser":{ProxyForEuParser:[5,2,1,""]},"http_request_randomizer.requests.parsers.ProxyForEuParser.ProxyForEuParser":{create_proxy_object:[5,3,1,""],parse_proxyList:[5,3,1,""]},"http_request_randomizer.requests.parsers.RebroWeeblyParser":{RebroWeeblyParser:[5,2,1,""]},"http_request_randomizer.requests.parsers.RebroWeeblyParser.RebroWeeblyParser":{create_proxy_object:[5,3,1,""],parse_proxyList:[5,3,1,""]},"http_request_randomizer.requests.parsers.SslProxyParser":{SslProxyParser:[5,2,1,""]},"http_request_randomizer.requests.parsers.SslProxyParser.SslProxyParser":{create_proxy_object:[5,3,1,""],parse_proxyList:[5,3,1,""]},"http_request_randomizer.requests.parsers.UrlParser":{UrlParser:[5,2,1,""]},"http_request_randomizer.requests.parsers.UrlParser.UrlParser":{get_id:[5,3,1,""],get_min_bandwidth:[5,3,1,""],get_url:[5,3,1,""],parse_proxyList:[5,3,1,""],valid_ip:[5,3,1,""],valid_ip_port:[5,3,1,""],valid_port:[5,3,1,""]},"http_request_randomizer.requests.parsers.js":{UnPacker:[6,0,0,"-"]},"http_request_randomizer.requests.parsers.js.UnPacker":{JsUnPacker:[6,2,1,""]},"http_request_randomizer.requests.parsers.js.UnPacker.JsUnPacker":{baseN:[6,3,1,""],get_port:[6,3,1,""],get_ports:[6,3,1,""],unpack:[6,3,1,""]},"http_request_randomizer.requests.proxy":{ProxyObject:[7,0,0,"-"],requestProxy:[7,0,0,"-"]},"http_request_randomizer.requests.proxy.ProxyObject":{AnonymityLevel:[7,2,1,""],Protocol:[7,2,1,""],ProxyObject:[7,2,1,""]},"http_request_randomizer.requests.proxy.ProxyObject.AnonymityLevel":{ANONYMOUS:[7,4,1,""],ELITE:[7,4,1,""],TRANSPARENT:[7,4,1,""],UNKNOWN:[7,4,1,""],get:[7,3,1,""]},"http_request_randomizer.requests.proxy.ProxyObject.Protocol":{HTTP:[7,4,1,""],HTTPS:[7,4,1,""],SOCS4:[7,4,1,""],SOCS5:[7,4,1,""],UNKNOWN:[7,4,1,""]},"http_request_randomizer.requests.proxy.ProxyObject.ProxyObject":{get_address:[7,3,1,""],to_str:[7,3,1,""]},"http_request_randomizer.requests.proxy.requestProxy":{RequestProxy:[7,2,1,""]},"http_request_randomizer.requests.proxy.requestProxy.RequestProxy":{generate_proxied_request:[7,3,1,""],generate_random_request_headers:[7,3,1,""],get_proxy_list:[7,3,1,""],randomize_proxy:[7,3,1,""],set_logger_level:[7,3,1,""]},"http_request_randomizer.requests.runners":{proxyList:[8,0,0,"-"]},"http_request_randomizer.requests.runners.proxyList":{ProxyList:[8,2,1,""],create_parser:[8,5,1,""],main:[8,5,1,""],run:[8,5,1,""]},"http_request_randomizer.requests.runners.proxyList.ProxyList":{get_source_options:[8,3,1,""]},"http_request_randomizer.requests.useragent":{userAgent:[9,0,0,"-"]},"http_request_randomizer.requests.useragent.userAgent":{UserAgentManager:[9,2,1,""]},"http_request_randomizer.requests.useragent.userAgent.UserAgentManager":{get_first_user_agent:[9,3,1,""],get_last_user_agent:[9,3,1,""],get_len_user_agent:[9,3,1,""],get_random_user_agent:[9,3,1,""],load_user_agents:[9,3,1,""]},http_request_randomizer:{requests:[3,0,0,"-"]}},objnames:{"0":["py","module","Python module"],"1":["py","exception","Python exception"],"2":["py","class","Python class"],"3":["py","method","Python method"],"4":["py","attribute","Python attribute"],"5":["py","function","Python function"]},objtypes:{"0":"py:module","1":"py:exception","2":"py:class","3":"py:method","4":"py:attribute","5":"py:function"},terms:{"0123456789abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz":6,"900":1,"abstract":5,"class":[5,6,7,8,9],"default":1,"enum":7,"import":1,"public":1,"return":[1,5],"static":5,"true":[1,5],"while":1,But:1,Going:1,IPs:1,KBs:[1,5],One:1,PRs:1,The:[0,6,7],Then:[1,6],__main__:1,__name__:1,__str__:1,accept:1,add:[5,7],address:[1,5,7],after:1,agent:[0,9],all:1,also:1,alwai:1,ani:[5,7],anonym:[1,5,7],anonymity_level:7,anonymitylevel:7,api:0,applic:1,arg:8,around:1,assum:1,attack:1,attribut:5,avoid:5,back:1,bandwidth:[1,5,8],bandwidth_kb:5,bandwidthkb:5,base:[4,5,6,7,8,9],basen:6,been:1,below:1,block:1,bool:5,browser:1,call:1,can:1,capabl:7,circl:1,classmethod:7,clear:7,cli:1,client:1,clone:1,code:0,collect:1,com:[1,6],command:0,commun:1,conduct:1,configur:1,constant:1,constructor:1,contact:1,contain:[1,5,6],content:10,contribut:0,conveni:1,could:1,countri:[5,7],crawl:1,creat:6,create_pars:8,create_proxy_object:5,data:[1,7],dataset:5,deal:1,debug:1,defin:1,delet:1,deploy:1,detail:1,dev:0,develop:1,dictionari:6,differ:1,document:0,doe:[6,7],done:1,each:1,either:1,elit:7,encod:1,encrypt:6,enumer:7,equal:1,equival:1,error:[1,2,3],etc:1,even:1,everi:6,except:4,expos:7,extra:5,face:0,fail:1,fallback:9,fals:[5,7],featur:0,feel:0,file:[1,6,9],filter:1,five:1,form:6,format:1,forward:7,free:1,freeproxi:1,freeproxypars:[2,3],from:[1,5],gener:1,generate_proxied_request:[1,7],generate_random_request_head:7,geo:1,get:[1,7],get_address:[1,7],get_first_user_ag:9,get_id:5,get_last_user_ag:9,get_len_user_ag:9,get_min_bandwidth:5,get_pagination_set:5,get_port:6,get_proxy_list:[1,7],get_random_user_ag:9,get_source_opt:8,get_url:5,given:5,global:1,has:[1,6],have:[1,5],header:[1,7],help:1,here:1,hhtp:5,hide:[1,7],how:0,htm:1,html:[1,6],http:[6,7],http_request_random:[0,1],icanhazip:1,ident:1,identifi:5,implement:[1,5],includ:1,index:0,info:5,inform:5,init_js_unpack:5,initi:1,instal:0,instead:1,instruct:0,integ:1,intens:1,interfac:0,ipv4:[1,5],issu:0,javascript:6,join:1,js_file_url:6,jsunpack:6,just:[1,5,6],kei:6,know:1,known:1,lambda:1,len:1,level:7,librari:1,licens:0,like:[0,6],line:[0,9],list:1,load_user_ag:9,log_level:[1,7],look:1,made:7,main:8,make:[1,7],mani:1,map:1,matthewfl:6,mention:1,messag:1,method:[1,5,7],middleman:1,minimum:5,miss:0,mit:1,modul:[0,1,10],more:1,most:1,name:7,nefari:1,net:1,none:[1,5,6,7,9],notset:1,num:6,number:[1,6],numer:6,object:[5,6,7,8,9],often:1,one:[1,9],ones:1,onli:1,open:1,oper:1,option:1,org:1,otherwis:1,outfil:1,output:1,over:1,packag:[0,10],page:0,pair:6,param:[5,7],paramet:[1,5],parse_proxylist:5,parser:[2,3],parserexcept:[2,3],path:9,per:9,php:1,pip:1,popular:1,port:[5,6,7],pose:1,possibl:1,premproxypars:[2,3],presenc:1,prime:1,print:1,produc:1,program:1,project:1,protocol:[1,7],provid:[1,5],proxi:[0,2,3,5],proxyfor:1,proxyforeu:1,proxyforeupars:[2,3],proxylist:[1,2,3],proxylistexcept:[2,3],proxyobject:[2,3],pydevverbos:1,python:1,queri:1,randomis:1,randomize_proxi:7,randomli:1,readabl:6,readm:[],rebro:1,rebroweeblypars:[2,3],relat:7,repo:1,repositori:1,repres:5,req_proxi:1,req_timeout:7,request:[2,10],requestproxi:[1,2,3],respons:1,rout:1,row:5,run:[1,8],runner:[2,3],samair:1,scrap:1,script:1,search:0,sec:1,second:1,select:1,semant:1,server:1,set_logger_level:7,setup:1,should:1,show:1,simpl:1,simpli:1,simul:1,size:1,sleep:1,slowest:1,socs4:7,socs5:7,sourc:[0,7],specif:5,specifi:1,sslproxi:1,sslproxypars:[2,3],start:1,stdout:1,store:1,straggl:5,stream:1,string:[5,6,9],submodul:[2,3],subpackag:10,support:1,surprisingli:1,sustain:7,sys:1,system:1,take:6,target:1,tell:1,term:1,test:0,test_url:1,text:[1,9],thank:1,them:1,thi:[1,5],thing:1,threshold:1,ticket:1,time:1,timeout:[1,5,7,8],to_str:7,too:1,took:1,tool:1,tox:1,transpar:7,trigger:1,tunnel:7,txt:1,type:[1,5],under:1,uniqu:5,unknown:[1,7],unpack:[3,5],url:[1,5,6,7],urlpars:[2,3],use:0,use_top15k:5,used:1,useful:1,user:[0,9],user_ag:1,userag:[2,3],useragentmanag:9,useragentsfil:9,using:[1,7],utf:1,valid:5,valid_ip:5,valid_ip_port:5,valid_port:5,valu:7,varieti:1,veri:1,via:1,vietnames:1,wai:1,want:1,web:1,web_proxy_list:7,web_url:5,websit:1,weebli:1,welcom:1,well:1,when:[1,5],which:[1,6],widespread:1,wish:1,www:1,you:1,your:1},titles:["HTTP Request Randomizer\u2019s docs","HTTP Request Randomizer ","http_request_randomizer package","http_request_randomizer.requests package","http_request_randomizer.requests.errors package","http_request_randomizer.requests.parsers package","http_request_randomizer.requests.parsers.js package","http_request_randomizer.requests.proxy package","http_request_randomizer.requests.runners package","http_request_randomizer.requests.useragent package","http_request_randomizer"],titleterms:{The:1,agent:1,api:1,build:1,chang:1,code:1,codecov:1,command:1,content:[2,3,4,5,6,7,8,9],contribut:1,dev:1,doc:0,document:1,error:4,face:1,featur:1,feel:1,freeproxypars:5,how:1,http:[0,1],http_request_random:[2,3,4,5,6,7,8,9,10],indic:0,instal:1,interfac:1,issu:1,level:1,licens:1,like:1,line:1,log:1,miss:1,modul:[2,3,4,5,6,7,8,9],packag:[2,3,4,5,6,7,8,9],parser:[5,6],parserexcept:4,premproxypars:5,proxi:[1,7],proxyforeupars:5,proxylist:8,proxylistexcept:4,proxyobject:7,pypi:1,random:[0,1],readm:[],rebroweeblypars:5,request:[0,1,3,4,5,6,7,8,9],requestproxi:7,requir:1,runner:8,sourc:1,sslproxypars:5,statu:1,submodul:[4,5,6,7,8,9],subpackag:[2,3,5],tabl:0,test:1,unpack:6,urlpars:5,use:1,user:1,userag:9,version:1}}) -------------------------------------------------------------------------------- /docs/source/http_request_randomizer.requests.errors.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | http_request_randomizer.requests.errors package — HTTP Request Randomizer 1.3.1 documentation 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 |
47 | 48 | 106 | 107 |
108 | 109 | 110 | 116 | 117 | 118 |
119 | 120 |
121 | 122 | 123 | 124 | 125 | 126 | 127 | 128 | 129 | 130 | 131 | 132 | 133 | 134 | 135 | 136 | 137 | 138 |
139 | 140 | 162 | 163 | 164 |
165 |
166 |
167 |
168 | 169 |
170 |

http_request_randomizer.requests.errors package

171 |
172 |

Submodules

173 |
174 |
175 |

http_request_randomizer.requests.errors.ParserExceptions module

176 |
177 |
178 | exception http_request_randomizer.requests.errors.ParserExceptions.ParserException
179 |

Bases: Exception

180 |
181 | 182 |
183 |
184 |

http_request_randomizer.requests.errors.ProxyListException module

185 |
186 |
187 | exception http_request_randomizer.requests.errors.ProxyListException.ProxyListException
188 |

Bases: Exception

189 |
190 | 191 |
192 |
193 |

Module contents

194 |
195 |
196 | 197 | 198 |
199 | 200 |
201 |
202 | 203 | 211 | 212 | 213 |
214 | 215 |
216 |

217 | 218 | © Copyright 2020, Panagiotis Garefalakis 219 | 220 |

221 |
222 | 223 | 224 | 225 | Built with Sphinx using a 226 | 227 | theme 228 | 229 | provided by Read the Docs. 230 | 231 |
232 | 233 |
234 |
235 | 236 |
237 | 238 |
239 | 240 | 241 | 246 | 247 | 248 | 249 | 250 | 251 | 252 | 253 | -------------------------------------------------------------------------------- /docs/source/http_request_randomizer.requests.runners.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | http_request_randomizer.requests.runners package — HTTP Request Randomizer 1.3.1 documentation 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 |
47 | 48 | 106 | 107 |
108 | 109 | 110 | 116 | 117 | 118 |
119 | 120 |
121 | 122 | 123 | 124 | 125 | 126 | 127 | 128 | 129 | 130 | 131 | 132 | 133 | 134 | 135 | 136 | 137 | 138 |
139 | 140 | 162 | 163 | 164 |
165 |
166 |
167 |
168 | 169 |
170 |

http_request_randomizer.requests.runners package

171 |
172 |

Submodules

173 |
174 |
175 |

http_request_randomizer.requests.runners.proxyList module

176 |
177 |
178 | class http_request_randomizer.requests.runners.proxyList.ProxyList(timeout=1.0, bandwidth=10.0)
179 |

Bases: object

180 |
181 |
182 | get_source_options()
183 |
184 | 185 |
186 | 187 |
188 |
189 | http_request_randomizer.requests.runners.proxyList.create_parser(proxyList)
190 |
191 | 192 |
193 |
194 | http_request_randomizer.requests.runners.proxyList.main()
195 |
196 | 197 |
198 |
199 | http_request_randomizer.requests.runners.proxyList.run(args)
200 |
201 | 202 |
203 |
204 |

Module contents

205 |
206 |
207 | 208 | 209 |
210 | 211 |
212 |
213 | 214 | 222 | 223 | 224 |
225 | 226 |
227 |

228 | 229 | © Copyright 2020, Panagiotis Garefalakis 230 | 231 |

232 |
233 | 234 | 235 | 236 | Built with Sphinx using a 237 | 238 | theme 239 | 240 | provided by Read the Docs. 241 | 242 |
243 | 244 |
245 |
246 | 247 |
248 | 249 |
250 | 251 | 252 | 257 | 258 | 259 | 260 | 261 | 262 | 263 | 264 | -------------------------------------------------------------------------------- /docs/source/modules.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | http_request_randomizer — HTTP Request Randomizer 1.3.1 documentation 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 |
47 | 48 | 103 | 104 |
105 | 106 | 107 | 113 | 114 | 115 |
116 | 117 |
118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | 128 | 129 | 130 | 131 | 132 | 133 | 134 | 135 |
136 | 137 |
    138 | 139 |
  • »
  • 140 | 141 |
  • http_request_randomizer
  • 142 | 143 | 144 |
  • 145 | 146 | 147 | View page source 148 | 149 | 150 |
  • 151 | 152 |
153 | 154 | 155 |
156 |
157 |
158 |
159 | 160 |
161 |

http_request_randomizer

162 |
163 | 177 |
178 |
179 | 180 | 181 |
182 | 183 |
184 |
185 | 186 | 194 | 195 | 196 |
197 | 198 |
199 |

200 | 201 | © Copyright 2020, Panagiotis Garefalakis 202 | 203 |

204 |
205 | 206 | 207 | 208 | Built with Sphinx using a 209 | 210 | theme 211 | 212 | provided by Read the Docs. 213 | 214 |
215 | 216 |
217 |
218 | 219 |
220 | 221 |
222 | 223 | 224 | 229 | 230 | 231 | 232 | 233 | 234 | 235 | 236 | -------------------------------------------------------------------------------- /http_request_randomizer/__init__.py: -------------------------------------------------------------------------------- 1 | __author__ = 'pgaref' 2 | 3 | __version__ = '1.3.2' 4 | 5 | __title__ = 'http_request_randomizer' 6 | __description__ = 'A package using public proxies to randomise http requests' 7 | __uri__ = 'http://pgaref.com/blog/python-proxy' 8 | 9 | __author__ = 'Panagiotis Garefalakis' 10 | __email__ = 'pangaref@gmail.com' 11 | 12 | __license__ = 'MIT' 13 | __copyright__ = 'Copyright (c) 2020 ' + __author__ 14 | -------------------------------------------------------------------------------- /http_request_randomizer/requests/__init__.py: -------------------------------------------------------------------------------- 1 | __author__ = 'pgaref' 2 | -------------------------------------------------------------------------------- /http_request_randomizer/requests/errors/ParserExceptions.py: -------------------------------------------------------------------------------- 1 | class ParserException(Exception): 2 | def __init___(self, extraArguments): 3 | Exception.__init__(self, " was raised with arguments {0}".format(extraArguments)) 4 | self.dErrorArguments = extraArguments 5 | -------------------------------------------------------------------------------- /http_request_randomizer/requests/errors/ProxyListException.py: -------------------------------------------------------------------------------- 1 | class ProxyListException(Exception): 2 | def __init___(self, extraArguments): 3 | Exception.__init__(self, " was raised - {0}".format(extraArguments)) 4 | self.dErrorArguments = extraArguments -------------------------------------------------------------------------------- /http_request_randomizer/requests/errors/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/http_request_randomizer/requests/errors/__init__.py -------------------------------------------------------------------------------- /http_request_randomizer/requests/parsers/FreeProxyParser.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | import requests 4 | from bs4 import BeautifulSoup 5 | 6 | from http_request_randomizer.requests.parsers.UrlParser import UrlParser 7 | from http_request_randomizer.requests.proxy.ProxyObject import ProxyObject, AnonymityLevel, Protocol 8 | 9 | logger = logging.getLogger(__name__) 10 | __author__ = 'pgaref' 11 | 12 | 13 | class FreeProxyParser(UrlParser): 14 | def __init__(self, id, web_url, timeout=None): 15 | UrlParser.__init__(self, id=id, web_url=web_url, timeout=timeout) 16 | 17 | def parse_proxyList(self): 18 | curr_proxy_list = [] 19 | try: 20 | response = requests.get(self.get_url(), timeout=self.timeout) 21 | if not response.ok: 22 | logger.warning("Proxy Provider url failed: {}".format(self.get_url())) 23 | return [] 24 | 25 | content = response.content 26 | soup = BeautifulSoup(content, "html.parser") 27 | table = soup.find("table", attrs={"id": "proxylisttable"}) 28 | 29 | # The first tr contains the field names. 30 | headings = [th.get_text() for th in table.find("tr").find_all("th")] 31 | 32 | datasets = [] 33 | for row in table.find_all("tr")[1:-1]: 34 | dataset = zip(headings, (td.get_text() for td in row.find_all("td"))) 35 | if dataset: 36 | datasets.append(dataset) 37 | 38 | for dataset in datasets: 39 | proxy_obj = self.create_proxy_object(dataset) 40 | # Make sure it is a Valid Proxy Address 41 | if proxy_obj is not None and UrlParser.valid_ip_port(proxy_obj.get_address()): 42 | curr_proxy_list.append(proxy_obj) 43 | else: 44 | logger.debug("Proxy Invalid: {}".format(dataset)) 45 | except AttributeError as e: 46 | logger.error("Provider {0} failed with Attribute error: {1}".format(self.id, e)) 47 | except KeyError as e: 48 | logger.error("Provider {0} failed with Key error: {1}".format(self.id, e)) 49 | except Exception as e: 50 | logger.error("Provider {0} failed with Unknown error: {1}".format(self.id, e)) 51 | finally: 52 | return curr_proxy_list 53 | 54 | def create_proxy_object(self, dataset): 55 | # Check Field[0] for tags and field[1] for values! 56 | ip = "" 57 | port = None 58 | anonymity = AnonymityLevel.UNKNOWN 59 | country = None 60 | protocols = [] 61 | for field in dataset: 62 | if field[0] == 'IP Address': 63 | # Make sure it is a Valid IP 64 | ip = field[1].strip() # String strip() 65 | # Make sure it is a Valid IP 66 | if not UrlParser.valid_ip(ip): 67 | logger.debug("IP with Invalid format: {}".format(ip)) 68 | return None 69 | elif field[0] == 'Port': 70 | port = field[1].strip() # String strip() 71 | elif field[0] == 'Anonymity': 72 | anonymity = AnonymityLevel.get(field[1].strip()) # String strip() 73 | elif field[0] == 'Country': 74 | country = field[1].strip() # String strip() 75 | elif field[0] == 'Https': 76 | if field[1].strip().lower() == 'yes': protocols.extend([Protocol.HTTP, Protocol.HTTPS]) 77 | elif field[1].strip().lower() == 'no': protocols.append(Protocol.HTTP) 78 | return ProxyObject(source=self.id, ip=ip, port=port, anonymity_level=anonymity, country=country, protocols=protocols) 79 | 80 | def __str__(self): 81 | return "{0} parser of '{1}' with required bandwidth: '{2}' KBs" \ 82 | .format(self.id, self.url, self.minimum_bandwidth_in_KBs) 83 | -------------------------------------------------------------------------------- /http_request_randomizer/requests/parsers/PremProxyParser.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | import requests 4 | from bs4 import BeautifulSoup 5 | 6 | from http_request_randomizer.requests.parsers.js.UnPacker import JsUnPacker 7 | from http_request_randomizer.requests.parsers.UrlParser import UrlParser 8 | from http_request_randomizer.requests.proxy.ProxyObject import ProxyObject, AnonymityLevel, Protocol 9 | 10 | logger = logging.getLogger(__name__) 11 | __author__ = 'pgaref' 12 | 13 | 14 | # Samair Proxy now renamed to: premproxy.com 15 | class PremProxyParser(UrlParser): 16 | def __init__(self, id, web_url, timeout=None): 17 | self.base_url = web_url 18 | web_url += "/list/" 19 | # Ports decoded by the JS unpacker 20 | self.js_unpacker = None 21 | UrlParser.__init__(self, id=id, web_url=web_url, timeout=timeout) 22 | 23 | def parse_proxyList(self): 24 | curr_proxy_list = [] 25 | try: 26 | # Parse all proxy pages -> format: /list/{num}.htm 27 | # Get the pageRange from the 'pagination' table 28 | page_set = self.get_pagination_set() 29 | logger.debug("Pages: {}".format(page_set)) 30 | # One JS unpacker per provider (not per page) 31 | self.js_unpacker = self.init_js_unpacker() 32 | 33 | for page in page_set: 34 | response = requests.get("{0}{1}".format(self.get_url(), page), timeout=self.timeout) 35 | if not response.ok: 36 | # Could not parse ANY page - Let user know 37 | if not curr_proxy_list: 38 | logger.warning("Proxy Provider url failed: {}".format(self.get_url())) 39 | # Return proxies parsed so far 40 | return curr_proxy_list 41 | content = response.content 42 | soup = BeautifulSoup(content, "html.parser", from_encoding="iso-8859-1") 43 | 44 | table = soup.find("div", attrs={"id": "proxylist"}) 45 | # The first tr contains the field names. 46 | headings = [th.get_text() for th in table.find("tr").find_all("th")] 47 | # skip last 'Select All' row 48 | for row in table.find_all("tr")[1:-1]: 49 | td_row = row.find("td") 50 | portKey = td_row.find('span', attrs={'class': True}).get('class')[0] 51 | port = self.js_unpacker.get_port(portKey) 52 | proxy_obj = self.create_proxy_object(row, port) 53 | # Make sure it is a Valid Proxy Address 54 | if proxy_obj is not None and UrlParser.valid_ip(proxy_obj.ip) and UrlParser.valid_port(port): 55 | curr_proxy_list.append(proxy_obj) 56 | else: 57 | logger.debug("Proxy Invalid: {}".format(proxy_obj.to_str())) 58 | except AttributeError as e: 59 | logger.error("Provider {0} failed with Attribute error: {1}".format(self.id, e)) 60 | except KeyError as e: 61 | logger.error("Provider {0} failed with Key error: {1}".format(self.id, e)) 62 | except Exception as e: 63 | logger.error("Provider {0} failed with Unknown error: {1}".format(self.id, e)) 64 | finally: 65 | return curr_proxy_list 66 | 67 | def get_pagination_set(self): 68 | response = requests.get(self.get_url(), timeout=self.timeout) 69 | page_set = set() 70 | # Could not parse pagination page - Let user know 71 | if not response.ok: 72 | logger.warning("Proxy Provider url failed: {}".format(self.get_url())) 73 | return page_set 74 | content = response.content 75 | soup = BeautifulSoup(content, "html.parser") 76 | for ultag in soup.find_all('ul', {'class': 'pagination'}): 77 | for litag in ultag.find_all('li'): 78 | page_ref = litag.a.get('href') 79 | # Skip current page '/list' 80 | if page_ref.endswith(('htm', 'html')): 81 | page_set.add(page_ref) 82 | else: 83 | page_set.add("") 84 | return page_set 85 | 86 | def init_js_unpacker(self): 87 | response = requests.get(self.get_url(), timeout=self.timeout) 88 | # Could not parse provider page - Let user know 89 | if not response.ok: 90 | logger.warning("Proxy Provider url failed: {}".format(self.get_url())) 91 | return None 92 | content = response.content 93 | soup = BeautifulSoup(content, "html.parser") 94 | 95 | # js file contains the values for the ports 96 | for script in soup.findAll('script'): 97 | if '/js/' in script.get('src'): 98 | jsUrl = self.base_url + script.get('src') 99 | return JsUnPacker(jsUrl) 100 | return None 101 | 102 | def create_proxy_object(self, row, port): 103 | for td_row in row.findAll("td"): 104 | if td_row.attrs['data-label'] == 'IP:port ': 105 | text = td_row.text.strip() 106 | ip = text.split(":")[0] 107 | # Make sure it is a Valid IP 108 | if not UrlParser.valid_ip(ip): 109 | logger.debug("IP with Invalid format: {}".format(ip)) 110 | return None 111 | elif td_row.attrs['data-label'] == 'Anonymity Type: ': 112 | anonymity = AnonymityLevel.get(td_row.text.strip()) 113 | elif td_row.attrs['data-label'] == 'Country: ': 114 | country = td_row.text.strip() 115 | protocols = [Protocol.HTTP] 116 | return ProxyObject(source=self.id, ip=ip, port=port, anonymity_level=anonymity, country=country, protocols=protocols) 117 | 118 | def __str__(self): 119 | return "{0} parser of '{1}' with required bandwidth: '{2}' KBs" \ 120 | .format(self.id, self.url, self.minimum_bandwidth_in_KBs) 121 | -------------------------------------------------------------------------------- /http_request_randomizer/requests/parsers/ProxyForEuParser.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | import requests 4 | from bs4 import BeautifulSoup 5 | 6 | from http_request_randomizer.requests.parsers.UrlParser import UrlParser 7 | from http_request_randomizer.requests.proxy.ProxyObject import ProxyObject, AnonymityLevel 8 | 9 | logger = logging.getLogger(__name__) 10 | __author__ = 'pgaref' 11 | 12 | 13 | class ProxyForEuParser(UrlParser): 14 | def __init__(self, id, web_url, bandwidth=None, timeout=None): 15 | UrlParser.__init__(self, id=id, web_url=web_url, bandwidth_KBs=bandwidth, timeout=timeout) 16 | 17 | def parse_proxyList(self): 18 | curr_proxy_list = [] 19 | try: 20 | response = requests.get(self.get_url(), timeout=self.timeout) 21 | 22 | if not response.ok: 23 | logger.warning("Proxy Provider url failed: {}".format(self.get_url())) 24 | return [] 25 | 26 | content = response.content 27 | soup = BeautifulSoup(content, "html.parser") 28 | table = soup.find("table", attrs={"class": "proxy_list"}) 29 | 30 | # The first tr contains the field names. 31 | headings = [th.get_text() for th in table.find("tr").find_all("th")] 32 | 33 | datasets = [] 34 | for row in table.find_all("tr")[1:]: 35 | dataset = zip(headings, (td.get_text() for td in row.find_all("td"))) 36 | datasets.append(dataset) 37 | 38 | for dataset in datasets: 39 | # Avoid Straggler proxies and make sure it is a Valid Proxy Address 40 | proxy_obj = self.create_proxy_object(dataset) 41 | if proxy_obj is not None and UrlParser.valid_ip_port(proxy_obj.get_address()): 42 | curr_proxy_list.append(proxy_obj) 43 | else: 44 | logger.debug("Proxy Invalid: {}".format(dataset)) 45 | except AttributeError as e: 46 | logger.error("Provider {0} failed with Attribute error: {1}".format(self.id, e)) 47 | except KeyError as e: 48 | logger.error("Provider {0} failed with Key error: {1}".format(self.id, e)) 49 | except Exception as e: 50 | logger.error("Provider {0} failed with Unknown error: {1}".format(self.id, e)) 51 | finally: 52 | return curr_proxy_list 53 | 54 | def create_proxy_object(self, dataset): 55 | ip = "" 56 | port = None 57 | anonymity = AnonymityLevel.UNKNOWN 58 | country = None 59 | # Check Field[0] for tags and field[1] for values! 60 | for field in dataset: 61 | # Discard slow proxies! Speed is in KB/s 62 | if field[0] == 'Speed': 63 | if float(field[1]) < self.get_min_bandwidth(): 64 | logger.debug("Proxy with low bandwidth: {}".format(float(field[1]))) 65 | return None 66 | if field[0] == 'IP': 67 | ip = field[1].strip() # String strip() 68 | # Make sure it is a Valid IP 69 | if not UrlParser.valid_ip(ip): 70 | logger.debug("IP with Invalid format: {}".format(ip)) 71 | return None 72 | elif field[0] == 'Port': 73 | port = field[1].strip() # String strip() 74 | elif field[0] == 'Anon': 75 | anonymity = AnonymityLevel.get(field[1].strip()) # String strip() 76 | elif field[0] == 'Country': 77 | country = field[1].strip() # String strip() 78 | return ProxyObject(source=self.id, ip=ip, port=port, anonymity_level=anonymity, country=country) 79 | 80 | def __str__(self): 81 | return "ProxyForEU Parser of '{0}' with required bandwidth: '{1}' KBs" \ 82 | .format(self.url, self.minimum_bandwidth_in_KBs) 83 | -------------------------------------------------------------------------------- /http_request_randomizer/requests/parsers/RebroWeeblyParser.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | import requests 4 | from bs4 import BeautifulSoup 5 | 6 | from http_request_randomizer.requests.parsers.UrlParser import UrlParser 7 | from http_request_randomizer.requests.proxy.ProxyObject import ProxyObject, AnonymityLevel 8 | 9 | logger = logging.getLogger(__name__) 10 | __author__ = 'pgaref' 11 | 12 | 13 | class RebroWeeblyParser(UrlParser): 14 | def __init__(self, id, web_url, timeout=None): 15 | self.top_proxy_path = "proxy-list.html" 16 | self.txt_proxy_path = "txt-lists.html" 17 | UrlParser.__init__(self, id=id, web_url=web_url, timeout=timeout) 18 | 19 | def parse_proxyList(self, use_top15k=False): 20 | curr_proxy_list = [] 21 | try: 22 | response = requests.get(self.get_url() + "/" + self.top_proxy_path, timeout=self.timeout) 23 | 24 | if not response.ok: 25 | logger.warning("Proxy Provider url failed: {}".format(self.get_url())) 26 | return [] 27 | 28 | content = response.content 29 | soup = BeautifulSoup(content, "html.parser") 30 | all_divs = soup.findAll("div", attrs={"class": "paragraph", 'style': "text-align:left;"}) 31 | # address_table = soup.find("div", attrs={"class": "paragraph", 'style': "text-align:left;"}) 32 | # .find('font', attrs={'color': '#33a27f'}) 33 | # Parse Top Proxy List page 34 | address_list = [] 35 | country_list = [] 36 | anonymity_list = [] 37 | for div in all_divs: 38 | address_div = div.find('font', attrs={'color': '#33a27f'}) 39 | if address_div is not None: 40 | for row in [x for x in address_div.contents if getattr(x, 'name', None) != 'br']: 41 | address_list.append(str(row)) 42 | curr_div = div.findAll('font', attrs={'size': '2'}) 43 | if curr_div[0] is not None: 44 | row_data = [] 45 | # font -> strong -> font 46 | title = curr_div[0].contents[0].contents[0].contents[0] 47 | for row in [x for x in curr_div[-1].contents if getattr(x, 'name', None) != 'br']: 48 | row_data.append(str(row)) 49 | if 'Country' in str(title): 50 | country_list.extend(row_data) 51 | if 'Status' in str(title): 52 | anonymity_list.extend(row_data) 53 | for address, country, anonymity in zip(address_list, country_list, anonymity_list): 54 | # Make sure it is a Valid Proxy Address 55 | proxy_obj = self.create_proxy_object(address, country, anonymity) 56 | if proxy_obj is not None and UrlParser.valid_ip_port(proxy_obj.get_address()): 57 | curr_proxy_list.append(proxy_obj) 58 | else: 59 | logger.debug("Proxy Invalid: {}".format(row)) 60 | # Usually these proxies are stale 61 | if use_top15k: 62 | # Parse 15k Nodes Text file (named *-all-*.txt) 63 | content = requests.get(self.get_url() + "/" + self.txt_proxy_path).content 64 | soup = BeautifulSoup(content, "html.parser") 65 | table = soup.find("div", attrs={"class": "wsite-multicol-table-wrap"}) 66 | for link in table.findAll('a'): 67 | current_link = link.get('href') 68 | if current_link is not None and "all" in current_link: 69 | self.txt_proxy_path = current_link 70 | more_content = requests.get(self.get_url() + self.txt_proxy_path).text 71 | for proxy_address in more_content.split(): 72 | if UrlParser.valid_ip_port(proxy_address): 73 | proxy_obj = self.create_proxy_object(row) 74 | curr_proxy_list.append(proxy_obj) 75 | except AttributeError as e: 76 | logger.error("Provider {0} failed with Attribute error: {1}".format(self.id, e)) 77 | except KeyError as e: 78 | logger.error("Provider {0} failed with Key error: {1}".format(self.id, e)) 79 | except Exception as e: 80 | logger.error("Provider {0} failed with Unknown error: {1}".format(self.id, e)) 81 | finally: 82 | return curr_proxy_list 83 | 84 | def create_proxy_object(self, address, country, anonymity): 85 | # Make sure it is a Valid IP 86 | ip = address.strip().split(":")[0] 87 | if not UrlParser.valid_ip(ip): 88 | logger.debug("IP with Invalid format: {}".format(ip)) 89 | return None 90 | port = address.strip().split(":")[1] 91 | country = country.strip() 92 | anonymity = AnonymityLevel.get(anonymity.strip()) 93 | 94 | return ProxyObject(source=self.id, ip=ip, port=port, anonymity_level=anonymity, country=country) 95 | 96 | def __str__(self): 97 | return "RebroWeebly Parser of '{0}' with required bandwidth: '{1}' KBs" \ 98 | .format(self.url, self.minimum_bandwidth_in_KBs) 99 | -------------------------------------------------------------------------------- /http_request_randomizer/requests/parsers/SslProxyParser.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | import requests 4 | from bs4 import BeautifulSoup 5 | 6 | from http_request_randomizer.requests.parsers.UrlParser import UrlParser 7 | from http_request_randomizer.requests.proxy.ProxyObject import ProxyObject, AnonymityLevel, Protocol 8 | 9 | logger = logging.getLogger(__name__) 10 | __author__ = 'pgaref' 11 | 12 | 13 | class SslProxyParser(UrlParser): 14 | def __init__(self, id, web_url, timeout=None): 15 | UrlParser.__init__(self, id=id, web_url=web_url, timeout=timeout) 16 | 17 | def parse_proxyList(self): 18 | curr_proxy_list = [] 19 | try: 20 | response = requests.get(self.get_url(), timeout=self.timeout) 21 | if not response.ok: 22 | logger.warning("Proxy Provider url failed: {}".format(self.get_url())) 23 | return [] 24 | 25 | content = response.content 26 | soup = BeautifulSoup(content, "html.parser") 27 | table = soup.find("table", attrs={"id": "proxylisttable"}) 28 | 29 | # The first tr contains the field names. 30 | headings = [th.get_text() for th in table.find("tr").find_all("th")] 31 | 32 | datasets = [] 33 | for row in table.find_all("tr")[1:-1]: 34 | dataset = zip(headings, (td.get_text() for td in row.find_all("td"))) 35 | if dataset: 36 | datasets.append(dataset) 37 | 38 | for dataset in datasets: 39 | proxy_obj = self.create_proxy_object(dataset) 40 | # Make sure it is a Valid Proxy Address 41 | if proxy_obj is not None and UrlParser.valid_ip_port(proxy_obj.get_address()): 42 | curr_proxy_list.append(proxy_obj) 43 | else: 44 | logger.debug("Proxy Invalid: {}".format(dataset)) 45 | except AttributeError as e: 46 | logger.error("Provider {0} failed with Attribute error: {1}".format(self.id, e)) 47 | except KeyError as e: 48 | logger.error("Provider {0} failed with Key error: {1}".format(self.id, e)) 49 | except Exception as e: 50 | logger.error("Provider {0} failed with Unknown error: {1}".format(self.id, e)) 51 | finally: 52 | return curr_proxy_list 53 | 54 | def create_proxy_object(self, dataset): 55 | # Check Field[0] for tags and field[1] for values! 56 | ip = "" 57 | port = None 58 | anonymity = AnonymityLevel.UNKNOWN 59 | country = None 60 | protocols = [] 61 | for field in dataset: 62 | if field[0] == 'IP Address': 63 | # Make sure it is a Valid IP 64 | ip = field[1].strip() # String strip() 65 | # Make sure it is a Valid IP 66 | if not UrlParser.valid_ip(ip): 67 | logger.debug("IP with Invalid format: {}".format(ip)) 68 | return None 69 | elif field[0] == 'Port': 70 | port = field[1].strip() # String strip() 71 | elif field[0] == 'Anonymity': 72 | anonymity = AnonymityLevel.get(field[1].strip()) # String strip() 73 | elif field[0] == 'Country': 74 | country = field[1].strip() # String strip() 75 | elif field[0] == 'Https': 76 | if field[1].strip().lower() == 'yes': protocols.extend([Protocol.HTTP, Protocol.HTTPS]) 77 | elif field[1].strip().lower() == 'no': protocols.append(Protocol.HTTP) 78 | return ProxyObject(source=self.id, ip=ip, port=port, anonymity_level=anonymity, country=country, protocols=protocols) 79 | 80 | def __str__(self): 81 | return "{0} parser of '{1}' with required bandwidth: '{2}' KBs" \ 82 | .format(self.id, self.url, self.minimum_bandwidth_in_KBs) 83 | -------------------------------------------------------------------------------- /http_request_randomizer/requests/parsers/UrlParser.py: -------------------------------------------------------------------------------- 1 | import re 2 | 3 | from http_request_randomizer.requests.errors.ParserExceptions import ParserException 4 | 5 | __author__ = 'pgaref' 6 | 7 | 8 | class UrlParser(object): 9 | """ 10 | An abstract class representing any URL containing Proxy information 11 | To add an extra Proxy URL just implement this class and provide a 'url specific' parse_proxyList method 12 | 13 | Attributes: 14 | :param id: A unique provider identifier 15 | :param web_url: A provider url (hhtp) 16 | :param bandwidthKBs: minimum bandwidth in KBs (to avoid straggling proxies when having the extra info from proxy provider) 17 | """ 18 | 19 | def __init__(self, id, web_url, bandwidth_KBs=None, timeout=None): 20 | self.id = id 21 | self.url = web_url 22 | self.timeout = timeout 23 | if bandwidth_KBs is not None: 24 | self.minimum_bandwidth_in_KBs = bandwidth_KBs 25 | else: 26 | self.minimum_bandwidth_in_KBs = 150 27 | 28 | def get_id(self): 29 | return self.id 30 | 31 | def get_url(self): 32 | if self.url is None: 33 | raise ParserException("webURL is NONE") 34 | return self.url 35 | 36 | def get_min_bandwidth(self): 37 | if self.minimum_bandwidth_in_KBs < 0: 38 | raise ParserException("invalid minimum bandwidth limit {0} ".format(self.minimum_bandwidth_in_KBs)) 39 | return self.minimum_bandwidth_in_KBs 40 | 41 | def parse_proxyList(self): 42 | raise ParserException(" abstract method should be implemented by each subclass") 43 | 44 | def __str__(self): 45 | return "URL Parser of '{0}' with required bandwidth: '{1}' KBs" \ 46 | .format(self.url, self.minimum_bandwidth_in_KBs) 47 | 48 | @staticmethod 49 | def valid_ip(address): 50 | """Return ``True`` if the the given *IP* is a *valid* IPv4 address 51 | 52 | :param address: ip address 53 | :type address: string 54 | :rtype: bool 55 | 56 | """ 57 | try: 58 | host_bytes = address.split('.') 59 | valid = [int(b) for b in host_bytes] 60 | valid = [b for b in valid if b >= 0 and b <= 255] 61 | return len(host_bytes) == 4 and len(valid) == 4 62 | except: 63 | return False 64 | 65 | @staticmethod 66 | def valid_ip_port(address): 67 | """Return ``True`` if the the given *Port* is a *valid* IPv4 port 68 | 69 | :param address: ip address 70 | :type address: string 71 | :rtype: bool 72 | 73 | """ 74 | match = re.findall(r'[0-9]+(?:\.[0-9]+){3}:[0-9]+', address) 75 | # hostIP = re.compile("\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3}") 76 | if not match: 77 | return False 78 | return True 79 | 80 | @staticmethod 81 | def valid_port(port): 82 | return 1 <= int(port) <= 65535 83 | -------------------------------------------------------------------------------- /http_request_randomizer/requests/parsers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/http_request_randomizer/requests/parsers/__init__.py -------------------------------------------------------------------------------- /http_request_randomizer/requests/parsers/js/UnPacker.py: -------------------------------------------------------------------------------- 1 | import re 2 | import requests 3 | import logging 4 | 5 | logger = logging.getLogger(__name__) 6 | 7 | 8 | class JsUnPacker(object): 9 | """ 10 | It takes the javascript file's url which contains the port numbers for 11 | the encrypted strings. The file has to be unpacked to a readable form just like 12 | http://matthewfl.com/unPacker.html does. Then we create a dictionary for 13 | every key:port pair. 14 | """ 15 | # TODO: it might not be necessary to unpack the js code 16 | 17 | def __init__(self, js_file_url): 18 | logger.info("JS UnPacker init path: {}".format(js_file_url)) 19 | r = requests.get(js_file_url) 20 | encrypted = r.text.strip() 21 | encrypted = '(' + encrypted.split('}(')[1][:-1] 22 | unpacked = eval('self.unpack' +encrypted) # string of the js code in unpacked form 23 | matches = re.findall(r".*?\('\.([a-zA-Z0-9]{1,6})'\).*?\((\d+)\)", unpacked) 24 | self.ports = dict((key, port) for key, port in matches) 25 | logger.debug('portmap: '+str(self.ports)) 26 | 27 | def baseN(self, num, b, numerals="0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"): 28 | return ((num == 0) and numerals[0]) or (self.baseN(num // b, b, numerals).lstrip(numerals[0]) + numerals[num % b]) 29 | 30 | def unpack(self, p, a, c, k, e=None, d=None): 31 | while c: 32 | c -= 1 33 | if k[c]: 34 | p = re.sub("\\b" + self.baseN(c, a) + "\\b", k[c], p) 35 | return p 36 | 37 | def get_port(self, key): 38 | return self.ports[key] 39 | 40 | def get_ports(self): 41 | return self.ports 42 | -------------------------------------------------------------------------------- /http_request_randomizer/requests/parsers/js/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/http_request_randomizer/requests/parsers/js/__init__.py -------------------------------------------------------------------------------- /http_request_randomizer/requests/proxy/ProxyObject.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | 4 | class ProxyObject(object): 5 | def __init__(self, source, ip, port, anonymity_level, country=None, protocols=[], tunnel=False): 6 | """ Proxy object implementation - base for all the parsing logic 7 | 8 | :param source: The name of the proxy list from which the proxy was collected 9 | :param ip: The IP address of the proxy 10 | :param port: The port number of the proxy 11 | :param anonymity_level: The anonymity level of the proxy. Can be any of :AnonymityLevel 12 | :param country: Alpha-2 country code of the country in which the proxy is geo-located 13 | :param protocols: A list of protocols that the proxy supports. May contain one or more of {HTTP, HTTPS, SOCKS5, SOCKS6} 14 | :param tunnel: Whether or not the proxy supports tunneling to HTTPS target URLs. 15 | """ 16 | self.source = source 17 | self.ip = ip 18 | self.port = port 19 | self.anonymity_level = anonymity_level 20 | self.country = country 21 | self.protocols = protocols 22 | self.tunnel = tunnel 23 | 24 | def get_address(self): 25 | return "{0}:{1}".format(self.ip, self.port) 26 | 27 | def __str__(self): 28 | """ Method is heavily used for Logging - make sure we have a readable output 29 | 30 | :return: The address representation of the proxy 31 | """ 32 | return "{0} | {1}".format(self.get_address(), self.source) 33 | 34 | def to_str(self): 35 | return "Address: {0} | Src: {1} | | Country: {2} | Anonymity: {3} | Protoc: {4} | Tunnel: {5}"\ 36 | .format(self.get_address(), self.source, self.country, self.anonymity_level, self.protocols, 37 | self.tunnel) 38 | 39 | 40 | # class AnonymityEnumMeta(EnumMeta): 41 | # def __call__(cls, value, *args, **kw): 42 | # if isinstance(value, str): 43 | # # map string Alias to enum values, defaults to Unknown 44 | # value = { 45 | # 'transparent': 1, 46 | # 'transparent proxy': 1, 47 | # 'LOW': 1, 48 | # 'anonymous': 2, 49 | # 'anonymous proxy': 2, 50 | # 'high-anonymous': 2, 51 | # 'elite': 3, 52 | # 'elite proxy': 3, 53 | # 'HIGH': 3 54 | # }.get(value, 0) 55 | # return super(AnonymityEnumMeta, cls).__call__(value, *args, **kw) 56 | 57 | 58 | class AnonymityLevel(Enum): 59 | # __metaclass__ = AnonymityEnumMeta 60 | """ 61 | UNKNOWN: The proxy anonymity capabilities are not exposed 62 | TRANSPARENT: The proxy does not hide the requester's IP address. 63 | ANONYMOUS: The proxy hides the requester's IP address, but adds headers to the forwarded request that make it clear 64 | that the request was made using a proxy. 65 | ELITE: The proxy hides the requester's IP address and does not add any proxy-related headers to the request. 66 | """ 67 | UNKNOWN = 0 # default 68 | TRANSPARENT = 1, 'transparent', 'transparent proxy', 'LOW' 69 | ANONYMOUS = 2, 'anonymous', 'anonymous proxy', 'high-anonymous' 70 | ELITE = 3, 'elite', 'elite proxy', 'HIGH', 'Elite & Anonymous' 71 | 72 | def __new__(cls, int_value, *value_aliases): 73 | obj = object.__new__(cls) 74 | obj._value_ = int_value 75 | for alias in value_aliases: 76 | cls._value2member_map_[alias] = obj 77 | return obj 78 | 79 | @classmethod 80 | def get(cls, name): 81 | try: 82 | return cls(name) 83 | except ValueError: 84 | return cls.UNKNOWN 85 | 86 | class Protocol(Enum): 87 | UNKNOWN = 0 88 | HTTP = 1 89 | HTTPS = 2 90 | SOCS4 = 3 91 | SOCS5 = 4 92 | -------------------------------------------------------------------------------- /http_request_randomizer/requests/proxy/__init__.py: -------------------------------------------------------------------------------- 1 | __author__ = 'pgaref' 2 | -------------------------------------------------------------------------------- /http_request_randomizer/requests/proxy/requestProxy.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | import random 4 | import sys 5 | import time 6 | 7 | import requests 8 | from requests.exceptions import ChunkedEncodingError 9 | from requests.exceptions import TooManyRedirects 10 | from requests.exceptions import ConnectionError 11 | from requests.exceptions import ReadTimeout 12 | 13 | from http_request_randomizer.requests.proxy.ProxyObject import Protocol 14 | from http_request_randomizer.requests.errors.ProxyListException import ProxyListException 15 | from http_request_randomizer.requests.parsers.FreeProxyParser import FreeProxyParser 16 | from http_request_randomizer.requests.parsers.ProxyForEuParser import ProxyForEuParser 17 | from http_request_randomizer.requests.parsers.RebroWeeblyParser import RebroWeeblyParser 18 | from http_request_randomizer.requests.parsers.PremProxyParser import PremProxyParser 19 | from http_request_randomizer.requests.parsers.SslProxyParser import SslProxyParser 20 | from http_request_randomizer.requests.useragent.userAgent import UserAgentManager 21 | 22 | __author__ = 'pgaref' 23 | sys.path.insert(0, os.path.abspath('../../../../')) 24 | 25 | # Push back requests library to at least warnings 26 | logging.getLogger("requests").setLevel(logging.WARNING) 27 | logging.getLogger("urllib3").setLevel(logging.WARNING) 28 | handler = logging.StreamHandler() 29 | formatter = logging.Formatter('%(asctime)s %(name)-6s %(levelname)-8s %(message)s') 30 | handler.setFormatter(formatter) 31 | 32 | 33 | class RequestProxy: 34 | def __init__(self, web_proxy_list=[], sustain=False, timeout=5, protocol=Protocol.HTTP, log_level=0): 35 | self.logger = logging.getLogger() 36 | self.logger.addHandler(handler) 37 | self.logger.setLevel(log_level) 38 | self.userAgent = UserAgentManager(file=os.path.join(os.path.dirname(__file__), '../data/user_agents.txt')) 39 | 40 | ##### 41 | # Each of the classes below implements a specific URL Parser 42 | ##### 43 | parsers = list([]) 44 | parsers.append(FreeProxyParser('FreeProxy', 'http://free-proxy-list.net', timeout=timeout)) 45 | #parsers.append(ProxyForEuParser('ProxyForEU', 'http://proxyfor.eu/geo.php', 1.0, timeout=timeout)) <--doesn't work anymore 46 | #parsers.append(RebroWeeblyParser('ReBro', 'http://rebro.weebly.com', timeout=timeout)) <--doesn't work anymore 47 | parsers.append(PremProxyParser('PremProxy', 'https://premproxy.com', timeout=timeout)) 48 | parsers.append(SslProxyParser('SslProxy', 'https://www.sslproxies.org', timeout=timeout)) 49 | 50 | self.logger.debug("=== Initialized Proxy Parsers ===") 51 | for i in range(len(parsers)): 52 | self.logger.debug("\t {0}".format(parsers[i].__str__())) 53 | self.logger.debug("=================================") 54 | 55 | self.sustain = sustain 56 | self.parsers = parsers 57 | self.proxy_list = web_proxy_list 58 | for parser in parsers: 59 | try: 60 | size = len(self.proxy_list) 61 | self.proxy_list += parser.parse_proxyList() 62 | self.logger.debug('Added {} proxies from {}'.format(len(self.proxy_list)-size, parser.id)) 63 | except ReadTimeout: 64 | self.logger.warning("Proxy Parser: '{}' TimedOut!".format(parser.url)) 65 | self.logger.debug('Total proxies = '+str(len(self.proxy_list))) 66 | # filtering the list of available proxies according to user preferences 67 | self.proxy_list = [p for p in self.proxy_list if protocol in p.protocols] 68 | self.logger.debug('Filtered proxies = '+str(len(self.proxy_list))) 69 | self.current_proxy = self.randomize_proxy() 70 | 71 | def set_logger_level(self, level): 72 | self.logger.setLevel(level) 73 | 74 | def get_proxy_list(self): 75 | return self.proxy_list 76 | 77 | def generate_random_request_headers(self): 78 | headers = { 79 | "Connection": "close", # another way to cover tracks 80 | "User-Agent": self.userAgent.get_random_user_agent() 81 | } # select a random user agent 82 | return headers 83 | 84 | def randomize_proxy(self): 85 | if len(self.proxy_list) == 0: 86 | raise ProxyListException("list is empty") 87 | rand_proxy = random.choice(self.proxy_list) 88 | while not rand_proxy: 89 | rand_proxy = random.choice(self.proxy_list) 90 | self.current_proxy = rand_proxy 91 | return rand_proxy 92 | 93 | ##### 94 | # Proxy format: 95 | # http://:@: 96 | ##### 97 | def generate_proxied_request(self, url, method="GET", params={}, data={}, headers={}, req_timeout=30): 98 | try: 99 | random.shuffle(self.proxy_list) 100 | # req_headers = dict(params.items() + self.generate_random_request_headers().items()) 101 | 102 | req_headers = dict(params.items()) 103 | req_headers_random = dict(self.generate_random_request_headers().items()) 104 | req_headers.update(req_headers_random) 105 | 106 | if not self.sustain: 107 | self.randomize_proxy() 108 | 109 | headers.update(req_headers) 110 | 111 | self.logger.debug("Using headers: {0}".format(str(headers))) 112 | self.logger.debug("Using proxy: {0}".format(str(self.current_proxy))) 113 | request = requests.request(method, url, headers=headers, data=data, params=params, timeout=req_timeout, 114 | proxies={ 115 | "http": "http://{0}".format(self.current_proxy.get_address()), 116 | "https": "https://{0}".format(self.current_proxy.get_address()) 117 | }) 118 | # Avoid HTTP request errors 119 | if request.status_code == 409: 120 | raise ConnectionError("HTTP Response [409] - Possible Cloudflare DNS resolution error") 121 | elif request.status_code == 403: 122 | raise ConnectionError("HTTP Response [403] - Permission denied error") 123 | elif request.status_code == 503: 124 | raise ConnectionError("HTTP Response [503] - Service unavailable error") 125 | self.logger.info('RR Status {}'.format(request.status_code)) 126 | return request 127 | except ConnectionError: 128 | try: 129 | self.proxy_list.remove(self.current_proxy) 130 | except ValueError: 131 | pass 132 | self.logger.debug("Proxy unreachable - Removed Straggling proxy: {0} PL Size = {1}".format( 133 | self.current_proxy, len(self.proxy_list))) 134 | self.randomize_proxy() 135 | except ReadTimeout: 136 | try: 137 | self.proxy_list.remove(self.current_proxy) 138 | except ValueError: 139 | pass 140 | self.logger.debug("Read timed out - Removed Straggling proxy: {0} PL Size = {1}".format( 141 | self.current_proxy, len(self.proxy_list))) 142 | self.randomize_proxy() 143 | except ChunkedEncodingError: 144 | try: 145 | self.proxy_list.remove(self.current_proxy) 146 | except ValueError: 147 | pass 148 | self.logger.debug("Wrong server chunked encoding - Removed Straggling proxy: {0} PL Size = {1}".format( 149 | self.current_proxy, len(self.proxy_list))) 150 | self.randomize_proxy() 151 | except TooManyRedirects: 152 | try: 153 | self.proxy_list.remove(self.current_proxy) 154 | except ValueError: 155 | pass 156 | self.logger.debug("Too many redirects - Removed Straggling proxy: {0} PL Size = {1}".format( 157 | self.current_proxy, len(self.proxy_list))) 158 | self.randomize_proxy() 159 | 160 | 161 | if __name__ == '__main__': 162 | 163 | start = time.time() 164 | req_proxy = RequestProxy() 165 | print("Initialization took: {0} sec".format((time.time() - start))) 166 | print("Size: {0}".format(len(req_proxy.get_proxy_list()))) 167 | print("ALL = {0} ".format(list(map(lambda x: x.get_address(), req_proxy.get_proxy_list())))) 168 | 169 | test_url = 'http://ipv4.icanhazip.com' 170 | 171 | while True: 172 | start = time.time() 173 | request = req_proxy.generate_proxied_request(test_url) 174 | print("Proxied Request Took: {0} sec => Status: {1}".format((time.time() - start), request.__str__())) 175 | if request is not None: 176 | print("\t Response: ip={0}".format(u''.join(request.text).encode('utf-8'))) 177 | print("Proxy List Size: {0}".format(len(req_proxy.get_proxy_list()))) 178 | 179 | print("-> Going to sleep..") 180 | time.sleep(10) 181 | -------------------------------------------------------------------------------- /http_request_randomizer/requests/runners/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/http_request_randomizer/requests/runners/__init__.py -------------------------------------------------------------------------------- /http_request_randomizer/requests/runners/proxyList.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import logging 3 | import sys 4 | 5 | import pkg_resources 6 | 7 | from http_request_randomizer.requests.parsers.FreeProxyParser import FreeProxyParser 8 | from http_request_randomizer.requests.parsers.ProxyForEuParser import ProxyForEuParser 9 | from http_request_randomizer.requests.parsers.RebroWeeblyParser import RebroWeeblyParser 10 | from http_request_randomizer.requests.parsers.PremProxyParser import PremProxyParser 11 | 12 | __author__ = 'pgaref' 13 | 14 | handler = logging.StreamHandler() 15 | formatter = logging.Formatter('%(levelname)-8s %(name)-6s %(message)s') 16 | handler.setFormatter(formatter) 17 | 18 | logging.getLogger().addHandler(handler) 19 | 20 | 21 | class ProxyList(object): 22 | def __init__(self, timeout=1.0, bandwidth=10.0): 23 | # Each of the entries implements a specific URL Parser 24 | self.parsers = dict() 25 | self.parsers['rebro'] = RebroWeeblyParser('ReBro', 'http://rebro.weebly.com', timeout=timeout) 26 | self.parsers['prem'] = PremProxyParser('Prem', 'https://premproxy.com', timeout=timeout) 27 | self.parsers['freeproxy'] = FreeProxyParser('FreeProxy', 'http://free-proxy-list.net', timeout=timeout) 28 | self.parsers['proxyforeu'] = ProxyForEuParser('ProxyForEU', 'http://proxyfor.eu/geo.php', 29 | bandwidth=bandwidth, timeout=timeout) 30 | 31 | def get_source_options(self): 32 | sources = list(map(lambda x: x.id.lower(), self.parsers.values())) 33 | sources.append('all') 34 | return sources 35 | 36 | 37 | def run(args): 38 | # re-initialise with current user settings 39 | proxy_list = ProxyList(bandwidth=args.bandwidth, timeout=args.timeout) 40 | # eliminate duplicates 41 | providers = set(args.source) 42 | # keep proxy list in memory 43 | proxy_out = list() 44 | for source in providers: 45 | if source == 'all': 46 | for p in proxy_list.parsers.values(): 47 | print("* id: {0:<30} url: {1:<50}".format(p.id, p.get_url())) 48 | proxy_out += p.parse_proxyList() 49 | else: 50 | p = proxy_list.parsers[source] 51 | print("* id: {0:<30} url: {1:<50}".format(p.id, p.get_url())) 52 | proxy_out += p.parse_proxyList() 53 | 54 | # dump proxies to output stream 55 | for proxy in proxy_out: 56 | args.outfile.write("{} \n".format(proxy.to_str())) 57 | 58 | 59 | def create_parser(proxyList): 60 | parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter, 61 | description='ProxyList tool retrieving proxies from publicly available providers.') 62 | parser.add_argument('-s', '--source', 63 | nargs='+', 64 | choices=proxyList.get_source_options(), 65 | dest='source', 66 | help='Specify proxy provider(s)', 67 | required=True) 68 | 69 | parser.add_argument('-o', '--outfile', 70 | nargs='?', 71 | type=argparse.FileType('w'), 72 | metavar='output-file/sys.stdout', 73 | dest='outfile', 74 | help='Specify output stream', 75 | required=False) 76 | parser.set_defaults(outfile=sys.stdout) 77 | 78 | parser.add_argument('-t', '--timeout', 79 | type=float, 80 | dest='timeout', 81 | help='Specify provider timeout threshold (seconds)', 82 | required=False) 83 | parser.set_defaults(timeout=1.0) 84 | 85 | parser.add_argument('-bw', '--bandwidth', 86 | type=float, 87 | dest='bandwidth', 88 | help='Specify proxy bandwidth threshold (KBs)', 89 | required=False) 90 | parser.set_defaults(bandwidth=10.0) 91 | 92 | version = pkg_resources.require("http_request_randomizer")[0].version 93 | parser.add_argument('-v', '--version', 94 | action='version', 95 | version='%(prog)s {}'.format(version)) 96 | return parser 97 | 98 | 99 | # Wrapper method to satisfy setup.py entry_point 100 | def main(): 101 | parser = create_parser(ProxyList()) 102 | args = parser.parse_args(sys.argv[1:]) 103 | run(args) 104 | print("\n All Done \n") 105 | 106 | 107 | if __name__ == '__main__': 108 | main() 109 | -------------------------------------------------------------------------------- /http_request_randomizer/requests/useragent/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/http_request_randomizer/requests/useragent/__init__.py -------------------------------------------------------------------------------- /http_request_randomizer/requests/useragent/userAgent.py: -------------------------------------------------------------------------------- 1 | import os 2 | import random 3 | from fake_useragent import FakeUserAgent 4 | import logging 5 | 6 | logger = logging.getLogger(__name__) 7 | 8 | 9 | class UserAgentManager: 10 | def __init__(self, fallback=None, file=None): 11 | self.agent_file = file 12 | if file is not None: 13 | logger.info('Using local file for user agents: '+self.agent_file) 14 | self.useragents = self.load_user_agents(self.agent_file) 15 | else: 16 | logger.info('Using fake-useragent package for user agents.') 17 | if fallback is None: 18 | fallback = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.87 Safari/537.36' 19 | self.fakeuseragent = FakeUserAgent(fallback=fallback, cache=False) 20 | 21 | def load_user_agents(self, useragentsfile): 22 | """ 23 | useragentsfile : string 24 | path to text file of user agents, one per line 25 | """ 26 | useragents = [] 27 | with open(useragentsfile, 'rb') as uaf: 28 | for ua in uaf.readlines(): 29 | if ua: 30 | useragents.append(ua.strip()[1:-1 - 1]) 31 | return useragents 32 | 33 | def get_random_user_agent(self): 34 | if self.agent_file: 35 | user_agent = random.choice(self.useragents) 36 | return user_agent.decode('utf-8') 37 | else: 38 | return self.fakeuseragent.random 39 | 40 | def get_first_user_agent(self): 41 | if self.agent_file: 42 | return self.useragents[0].decode('utf-8') 43 | else: 44 | logger.warning('Fake-useragent library does not support operaration get_first - change to user-agent file!') 45 | return None 46 | 47 | def get_last_user_agent(self): 48 | if self.agent_file: 49 | return self.useragents[-1].decode('utf-8') 50 | else: 51 | logger.warning('Fake-useragent library does not support operaration get_last - change to user-agent file!') 52 | return None 53 | 54 | def get_len_user_agent(self): 55 | if self.agent_file: 56 | return len(self.useragents) 57 | else: 58 | logger.warning('Fake-useragent library does not support operaration get_len - change to user-agent file!') 59 | return None 60 | 61 | 62 | if __name__ == '__main__': 63 | ua = UserAgentManager() 64 | if ua.agent_file: 65 | print("Number of User Agent headers: {0}".format(ua.get_len_user_agent())) 66 | print("First User Agent in file: {0}".format(ua.get_first_user_agent())) 67 | print("Last User Agent in file: {0}".format(ua.get_last_user_agent())) 68 | else: 69 | print("Using up-to-date user agents from online databse.") 70 | print("If you want one random header for a request, you may use the following header:\n") 71 | print("User-Agent: " + ua.get_random_user_agent() + "\n") 72 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | beautifulsoup4 >= 4.9.3 2 | coverage >= 5.3 3 | httmock >= 1.3.0 4 | psutil >= 5.7.2 5 | pytest >= 6.1.1 6 | pytest-cov >= 2.10.1 7 | python-dateutil >= 2.8.1 8 | requests >= 2.24.0 9 | pyOpenSSL >= 19.1.0 10 | fake-useragent >= 0.1.11 11 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | '''Setup script for HTTP_Request_Randomizer.''' 2 | from setuptools import setup, find_packages 3 | from setuptools.command.test import test as TestCommand 4 | import codecs 5 | import sys 6 | import os 7 | import re 8 | 9 | NAME = 'http_request_randomizer' 10 | HERE = os.path.abspath(os.path.dirname(__file__)) 11 | 12 | PROJECT_URLS = { 13 | 'Blog': 'http://pgaref.com/blog/python-proxy', 14 | 'Documentation': 'https://pythonhosted.org/http-request-randomizer', 15 | 'Source Code': 'https://github.com/pgaref/http_request_randomizer', 16 | } 17 | 18 | def read(*parts): 19 | """Return multiple read calls to different readable objects as a single 20 | string.""" 21 | # intentionally *not* adding an encoding option to open 22 | return codecs.open(os.path.join(HERE, *parts), "rb", "utf-8").read() 23 | 24 | try: 25 | META_PATH = os.path.join(HERE, "http_request_randomizer", "__init__.py") 26 | finally: 27 | print(META_PATH) 28 | META_FILE = read(META_PATH) 29 | 30 | def find_meta(meta): 31 | """ 32 | Extract __*meta*__ from META_FILE. 33 | """ 34 | print(META_PATH) 35 | meta_match = re.search( 36 | fr"^__{meta}__ = ['\"]([^'\"]*)['\"]", META_FILE, re.M 37 | ) 38 | if meta_match: 39 | return meta_match.group(1) 40 | raise RuntimeError(f"Unable to find __{ meta }__ string.") 41 | 42 | LONG_DESCRIPTION = read('README.rst') 43 | ######################################################################### 44 | class Tox(TestCommand): 45 | def finalize_options(self): 46 | TestCommand.finalize_options(self) 47 | self.test_args = [] 48 | self.test_suite = True 49 | 50 | def run_tests(self): 51 | #import here, cause outside the eggs aren't loaded 52 | import tox 53 | errcode = tox.cmdline(self.test_args) 54 | sys.exit(errcode) 55 | 56 | 57 | class PyTest(TestCommand): 58 | def finalize_options(self): 59 | TestCommand.finalize_options(self) 60 | self.test_args = [ 61 | '--strict', 62 | '--verbose', 63 | '--tb=long', 64 | 'tests'] 65 | self.test_suite = True 66 | 67 | def run_tests(self): 68 | import pytest 69 | errno = pytest.main(self.test_args) 70 | sys.exit(errno) 71 | ######################################################################### 72 | setup( 73 | name=NAME, 74 | version=find_meta("version"), 75 | url=find_meta("uri"), 76 | project_urls=PROJECT_URLS, 77 | license=find_meta("license"), 78 | author=find_meta("author"), 79 | author_email=find_meta("email"), 80 | maintainer=find_meta("author"), 81 | maintainer_email=find_meta("email"), 82 | description=find_meta("description"), 83 | long_description=LONG_DESCRIPTION, 84 | packages=find_packages(exclude=['tests']), 85 | platforms='any', 86 | test_suite='tests.test_parsers', 87 | # tests_require=['tox'], 88 | # cmdclass={'test': Tox}, 89 | tests_require=['pytest', 'pytest-cov'], 90 | cmdclass={'test': PyTest}, 91 | install_requires=['beautifulsoup4 >= 4.9.3', 92 | 'httmock >= 1.3.0', 93 | 'psutil >= 5.7.2', 94 | 'python-dateutil >= 2.8.1', 95 | 'requests >= 2.24.0', 96 | 'pyOpenSSL >= 19.1.0', 97 | 'fake-useragent >= 0.1.11' 98 | ], 99 | use_scm_version=True, 100 | setup_requires=['setuptools-scm', 'pytest-runner'], 101 | zip_safe=False, 102 | # include_package_data=True, 103 | package_data={ 104 | # Include agents.txt files 105 | 'http_request_randomizer.requests': ['data/*'], 106 | }, 107 | # To provide executable scripts, use entry points in preference to the 108 | # "scripts" keyword. Entry points provide cross-platform support and allow 109 | # pip to create the appropriate form of executable for the target platform. 110 | entry_points={ 111 | 'console_scripts': [ 112 | 'proxyList = http_request_randomizer.requests.runners.proxyList:main', 113 | ], 114 | }, 115 | classifiers=[ 116 | 'Development Status :: 4 - Beta', 117 | 'Intended Audience :: Developers', 118 | 'Operating System :: OS Independent', 119 | 'License :: OSI Approved :: MIT License', 120 | 'Natural Language :: English', 121 | 'Environment :: Web Environment', 122 | 'Topic :: Internet :: WWW/HTTP', 123 | 'Programming Language :: Python :: 3', 124 | 'Programming Language :: Python :: 3.6', 125 | 'Programming Language :: Python :: 3.7', 126 | 'Programming Language :: Python :: 3.8', 127 | 'Programming Language :: Python :: 3.9', 128 | 'Topic :: Software Development :: Libraries :: Python Modules', 129 | ], 130 | ) 131 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pgaref/HTTP_Request_Randomizer/5c41348e90190a78a45c67ded7da121239f3d23a/tests/__init__.py -------------------------------------------------------------------------------- /tests/mocks.py: -------------------------------------------------------------------------------- 1 | from httmock import urlmatch 2 | 3 | 4 | free_proxy_expected = ['138.197.136.46:3128', '177.207.75.227:8080'] 5 | proxy_for_eu_expected = ['107.151.136.222:80', '37.187.253.39:8115'] 6 | rebro_weebly_expected = ['213.149.105.12:8080', '119.188.46.42:8080'] 7 | prem_expected = ['191.252.61.28:80', '167.114.203.141:8080', '152.251.141.93:8080'] 8 | sslproxy_expected = ['24.211.89.146:8080', '187.84.222.153:80', '41.193.238.249:8080'] 9 | 10 | @urlmatch(netloc=r'(.*\.)?sslproxies\.org$') 11 | def sslproxy_mock(url, request): 12 | return """ 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 |
IP AddressPortCodeCountryAnonymityGoogleHttpsLast Checked
24.211.89.1468080USUnited Stateselite proxynoyes8 seconds ago
187.84.222.15380BRBrazilanonymousnoyes1 minute ago
41.193.238.2498080ZASouth Africaelite proxynoyes1 minute ago
69 | """ 70 | 71 | @urlmatch(netloc=r'(.*\.)?free-proxy-list\.net$') 72 | def free_proxy_mock(url, request): 73 | return """\n 75 | \n 76 | \n 77 | 78 | \n 79 | 80 | \n 81 | 82 | \n 83 | 84 | \n 85 | 86 | \n 87 | 88 | \n 89 | 90 | \n 91 | 92 | \n 93 | 94 | \n 95 | 96 | \n 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | 107 | 108 | \n 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | \n 120 | 121 | \n 122 | \n 123 | \n 124 | 125 | \n 126 | 127 | \n 128 | 129 | \n 130 | 131 | \n 132 | 133 | \n 134 | 135 | \n 136 | 137 | \n 138 | 139 | \n 140 | 141 | \n 142 | 143 | \n 144 |
IP AddressPortCodeCountryAnonymityGoogleHttpsLast Checked
138.197.136.463128CACanadaanonymousnono7 seconds ago
177.207.75.2278080BRBraziltransparentnono2 hours 21 minutes ago
""" 145 | 146 | 147 | @urlmatch(netloc=r'(.*\.)?proxyfor\.eu') 148 | def proxy_for_eu_mock(url, request): 149 | return """ 150 | 151 | 152 | 153 | 154 | 155 | 156 | 157 | 158 | 159 | 160 | 161 | 162 | 163 | 164 | 165 | 166 | 167 | 168 | 169 | 170 | 171 | 172 | 173 | 174 | 175 | 176 | 177 |
IPPortCountryAnonSpeed CheckCookie/POST
107.151.136.22280United StatesHIGH1.6432016-04-12 17:02:43Yes/Yes
37.187.253.398115FranceHIGH12.7792016-04-12 14:36:18Yes/Yes
""" 178 | 179 | 180 | @urlmatch(netloc=r'(.*\.)?rebro\.weebly\.com$') 181 | def rebro_weebly_mock(url, request): 182 | return """
IP:Port
213.149.105.12:8080
119.188.46.42:8080

185 |
186 | 187 | 188 |
Country 189 |
Montenegro
China

190 |
191 | 192 |
Status
193 | Elite & Anonymous
Elite & Anonymous

194 |
195 | 196 | """ 197 | 198 | 199 | @urlmatch(netloc=r'(.*\.)?www\.premproxy\.com') 200 | def prem_mock(url, request): 201 | return """ 202 | 203 | 204 | 205 |
\n 206 | \n 207 | IP address 208 | \n 209 | Anonymity 210 | \n 211 | Checked 212 | \n 213 | Country 214 | \n 215 | City 216 | \n 217 | ISP 218 | \n 219 | 220 | \n 221 | 224 | \n 225 | 226 | 191.252.61.28: 227 | high-anonymous 228 | Apr-18, 17:18 229 | Brazil 230 | S\xe3o Jos\xe9 Dos Campos 231 | Locaweb 232 | Servi\xe7o... 233 | 234 | \n 235 | 236 | 167.114.203.141: 237 | transparent 238 | Apr-18, 13:22 239 | Canada 240 | Montr\xe9al (QC) 241 | OVH Hosting 242 | 243 | \n 244 | 245 | 152.251.141.93: 246 | elite 247 | Jul-16, 04:39 248 | Brazil 249 |   250 | Vivo 251 | 252 | \n 253 | Select All Proxies 254 |
""" 255 | 256 | 257 | @urlmatch(netloc=r'(.*\.)?www\.premproxy\.com', path='/js/test.js', method='get', scheme='https') 258 | def prem_js_mock(url, request): 259 | return b"eval(function(p,a,c,k,e,d){e=function(c){return(c35?String.fromCharCode(c+29):c.toString(36))};" \ 260 | b"if(!''.replace(/^/,String)){while(c--){d[e(c)]=k[c]||e(c)}k=[function(e){return d[e]}];e=function(){return'\\\\w+'};c=1};" \ 261 | b"while(c--){if(k[c]){p=p.replace(new RegExp('\\\\b'+e(c)+'\\\\b','g'),k[c])}}return p}('$(t).u(v(){$(\\'.s\\').0(r);" \ 262 | b"$(\\'.n\\').0(o);$(\\'.p\\').0(q);$(\\'.w\\').0(x);$(\\'.D\\').0(E);$(\\'.F\\').0(C);$(\\'.B\\').0(y);$(\\'.z\\').0(A);" \ 263 | b"$(\\'.m\\').0(i);$(\\'.7\\').0(8);$(\\'.9\\').0(6);$(\\'.4\\').0(1);$(\\'.2\\').0(5);$(\\'.3\\').0(a);$(\\'.l\\').0(b);" \ 264 | b"$(\\'.j\\').0(k);$(\\'.h\\').0(g);$(\\'.c\\').0(d);$(\\'.e\\').0(f);$(\\'.G\\').0(1n);$(\\'.H\\').0(1b);$(\\'.1c\\').0(19);" \ 265 | b"$(\\'.18\\').0(14);$(\\'.15\\').0(16);$(\\'.17\\').0(1d);$(\\'.1e\\').0(1k);$(\\'.1l\\').0(1m);$(\\'.1j\\').0(1i);$(\\'.1f\\').0(1g);" \ 266 | b"$(\\'.1h\\').0(13);$(\\'.12\\').0(O);$(\\'.P\\').0(Q);$(\\'.N\\').0(M);$(\\'.I\\').0(J);$(\\'.K\\').0(L);$(\\'.R\\').0(S);$(\\'.Z\\').0(10)" \ 267 | b";$(\\'.11\\').0(Y);$(\\'.X\\').0(T);$(\\'.U\\').0(V);$(\\'.W\\').0(1a)});',62,86,'html|20183|r97e1|rff0a|r117f|65103|65205|r76d3|52335|r21e1|" \ 268 | b"62225|9000|r2e7b|81|r0d8a|9797|6666|r1f9b|28080|rdde2|31773|rf51a|rd687|r1c53|53281|raceb|3128|8080|r63c5|document|ready|function|r60e6|80|8888|" \ 269 | b"r6ec1|8181|rb058|8197|r40ed|8081|re3f0|r28a8|r55d0|ra6df|8090|r4381|8000|53282|r125a|8082|r2f55|2016|r6714|47753|55012|rb59a|9090|ra346|r4b77|54214|" \ 270 | b"rd762|1080|rc6d0|r9946|60088|9999|r3e10|8118|r7f82|r371f|54314|63909|41258|r8065|8380|rf914|r9e8e|8088|r3c82|808|r3165|8383|r6643|555|3130'.split('|'),0,{}))\n" 271 | -------------------------------------------------------------------------------- /tests/test_js_unpacker.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import sys 3 | import os 4 | from httmock import HTTMock 5 | 6 | from http_request_randomizer.requests.parsers.js.UnPacker import JsUnPacker 7 | from tests.mocks import prem_js_mock 8 | 9 | sys.path.insert(0, os.path.abspath('.')) 10 | 11 | __author__ = 'pgaref' 12 | 13 | 14 | class TestJS(unittest.TestCase): 15 | 16 | def test_js_unpacker(self): 17 | with HTTMock(prem_js_mock): 18 | JsUnPacker('https://www.premproxy.com/js/test.js') 19 | 20 | 21 | if __name__ == '__main__': 22 | unittest.main() -------------------------------------------------------------------------------- /tests/test_parsers.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | 3 | import unittest 4 | import sys 5 | import os 6 | 7 | sys.path.insert(0, os.path.abspath('.')) 8 | 9 | from http_request_randomizer.requests.parsers.UrlParser import UrlParser 10 | 11 | __author__ = 'pgaref' 12 | 13 | 14 | class TestBaseProxyParsers(unittest.TestCase): 15 | def setUp(self): 16 | self.normal_parser = UrlParser("proxy-test", "http://proxy-test.com", bandwidth_KBs=50) 17 | self.no_bdwidthParser = UrlParser("slow-proxy", "http://slow-proxy.com") 18 | 19 | def test_normal_parser(self): 20 | self.assertEqual(self.normal_parser.get_url(), "http://proxy-test.com", "incorrect parser URL") 21 | self.assertEqual(self.normal_parser.get_min_bandwidth(), 50, "incorrect parser bandwidth") 22 | 23 | def test_no_bandwidth_parser(self): 24 | self.assertEqual(self.no_bdwidthParser.get_url(), "http://slow-proxy.com", "incorrect parser URL") 25 | self.assertEqual(self.no_bdwidthParser.get_min_bandwidth(), 150, "incorrect parser bandwidth") 26 | 27 | 28 | if __name__ == '__main__': 29 | unittest.main() 30 | -------------------------------------------------------------------------------- /tests/test_providers.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | 3 | import unittest 4 | import sys 5 | import os 6 | from httmock import HTTMock 7 | 8 | sys.path.insert(0, os.path.abspath('.')) 9 | 10 | from tests.mocks import free_proxy_mock, proxy_for_eu_mock, rebro_weebly_mock, prem_mock, sslproxy_mock 11 | from tests.mocks import free_proxy_expected, proxy_for_eu_expected, rebro_weebly_expected, prem_expected, prem_js_mock, sslproxy_expected 12 | from http_request_randomizer.requests.parsers.FreeProxyParser import FreeProxyParser 13 | from http_request_randomizer.requests.parsers.ProxyForEuParser import ProxyForEuParser 14 | from http_request_randomizer.requests.parsers.RebroWeeblyParser import RebroWeeblyParser 15 | from http_request_randomizer.requests.parsers.PremProxyParser import PremProxyParser 16 | from http_request_randomizer.requests.parsers.SslProxyParser import SslProxyParser 17 | 18 | __author__ = 'pgaref' 19 | 20 | 21 | class TestProxyProviders(unittest.TestCase): 22 | 23 | def test_FreeProxyParser(self): 24 | with HTTMock(free_proxy_mock): 25 | proxy_provider = FreeProxyParser('FreeProxy', 'http://free-proxy-list.net') 26 | proxy_list = proxy_provider.parse_proxyList() 27 | proxy_list_addr = [] 28 | for proxy in proxy_list: 29 | proxy_list_addr.append(proxy.get_address()) 30 | self.assertEqual(proxy_list_addr, free_proxy_expected) 31 | 32 | def test_ProxyForEuParser(self): 33 | with HTTMock(proxy_for_eu_mock): 34 | proxy_provider = ProxyForEuParser('ProxyForEU', 'http://proxyfor.eu/geo.php', 1.0) 35 | proxy_list = proxy_provider.parse_proxyList() 36 | proxy_list_addr = [] 37 | for proxy in proxy_list: 38 | proxy_list_addr.append(proxy.get_address()) 39 | self.assertEqual(proxy_list_addr, proxy_for_eu_expected) 40 | 41 | def test_RebroWeeblyParser(self): 42 | with HTTMock(rebro_weebly_mock): 43 | proxy_provider = RebroWeeblyParser('ReBro', 'http://rebro.weebly.com') 44 | proxy_list = proxy_provider.parse_proxyList() 45 | proxy_list_addr = [] 46 | for proxy in proxy_list: 47 | proxy_list_addr.append(proxy.get_address()) 48 | self.assertEqual(proxy_list_addr, rebro_weebly_expected) 49 | 50 | def test_PremProxyParser(self): 51 | with HTTMock(prem_js_mock, prem_mock): 52 | proxy_provider = PremProxyParser('Prem', 'https://www.premproxy.com') 53 | proxy_list = proxy_provider.parse_proxyList() 54 | proxy_list_addr = [] 55 | for proxy in proxy_list: 56 | proxy_list_addr.append(proxy.get_address()) 57 | for item in prem_expected: 58 | self.assertTrue(item in proxy_list_addr) 59 | 60 | def test_SslProxyParser(self): 61 | with HTTMock(sslproxy_mock): 62 | proxy_provider = SslProxyParser('SslProxy', 'https://www.sslproxies.org/') 63 | proxy_list = proxy_provider.parse_proxyList() 64 | proxy_list_addr = [] 65 | for proxy in proxy_list: 66 | proxy_list_addr.append(proxy.get_address()) 67 | self.assertEqual(proxy_list_addr, sslproxy_expected) 68 | 69 | if __name__ == '__main__': 70 | unittest.main() 71 | -------------------------------------------------------------------------------- /tests/test_proxyList_args.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | 3 | import os 4 | import sys 5 | import unittest 6 | 7 | from http_request_randomizer.requests.runners.proxyList import ProxyList, create_parser 8 | 9 | sys.path.insert(0, os.path.abspath('.')) 10 | 11 | __author__ = 'pgaref' 12 | 13 | 14 | class ParserTest(unittest.TestCase): 15 | def setUp(self): 16 | self.proxyList = ProxyList() 17 | self.parser = create_parser(self.proxyList) 18 | 19 | def test_parser_source(self): 20 | parsed = self.parser.parse_args(['-s', 'freeproxy']) 21 | self.assertEqual(parsed.source, ['freeproxy']) 22 | 23 | parsed = self.parser.parse_args(['-s', 'freeproxy', 'all', 'proxyforeu']) 24 | self.assertEqual(parsed.source, ['freeproxy', 'all', 'proxyforeu']) 25 | 26 | with self.assertRaises(SystemExit): 27 | self.parser.parse_args(['-s']) 28 | self.parser.parse_args(['-s', 'blah']) 29 | 30 | def test_parser_output(self): 31 | # default 32 | parsed = self.parser.parse_args(['-s', 'all']) 33 | self.assertEqual(parsed.outfile, sys.stdout) 34 | 35 | parsed = self.parser.parse_args(['-s', 'all', '-o', 'out.txt']) 36 | self.assertEqual(parsed.outfile.name, 'out.txt') 37 | 38 | def test_parser_timeout(self): 39 | # default 40 | parsed = self.parser.parse_args(['-s', 'all']) 41 | self.assertEqual(parsed.timeout, 1) 42 | 43 | parsed = self.parser.parse_args(['-s', 'all', '-t', '20']) 44 | self.assertEqual(parsed.timeout, 20) 45 | 46 | with self.assertRaises(SystemExit): 47 | self.parser.parse_args(['-s', 'all', '-t', 't']) 48 | 49 | def test_parser_bandwidth(self): 50 | # default 51 | parsed = self.parser.parse_args(['-s', 'all']) 52 | self.assertEqual(parsed.bandwidth, 10) 53 | 54 | parsed = self.parser.parse_args(['-s', 'all', '-bw', '500']) 55 | self.assertEqual(parsed.bandwidth, 500) 56 | 57 | with self.assertRaises(SystemExit): 58 | self.parser.parse_args(['-s', 'all', '-bw', 'b']) 59 | 60 | 61 | if __name__ == '__main__': 62 | unittest.main() 63 | -------------------------------------------------------------------------------- /tests/test_proxyObject.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | 3 | import os 4 | import sys 5 | import unittest 6 | 7 | sys.path.insert(0, os.path.abspath('.')) 8 | 9 | from http_request_randomizer.requests.proxy.ProxyObject import AnonymityLevel, ProxyObject 10 | 11 | 12 | class TestProxyObject(unittest.TestCase): 13 | 14 | def test_ProxyObjectSimple(self): 15 | src = 'Test' 16 | ip = '127.0.0.1' 17 | port = '8080' 18 | po = ProxyObject(src, ip, port, AnonymityLevel.UNKNOWN) 19 | self.assertEqual(po.source, src) 20 | self.assertEqual(po.ip, ip) 21 | self.assertEqual(po.port, port) 22 | self.assertEqual(po.get_address(), "{0}:{1}".format(ip, port)) 23 | 24 | def test_AnonymityLevels(self): 25 | self.assertTrue(AnonymityLevel.UNKNOWN.value == 0) 26 | self.assertTrue(AnonymityLevel.TRANSPARENT.value == 1) 27 | self.assertTrue(AnonymityLevel.ANONYMOUS.value == 2) 28 | self.assertTrue(AnonymityLevel.ELITE.value == 3) 29 | self.assertTrue(len(AnonymityLevel) == 4) # Enum values 30 | 31 | def test_UnknownEnumLevel(self): 32 | self.assertEqual(AnonymityLevel.UNKNOWN, AnonymityLevel.get('unknown')) 33 | self.assertEqual(AnonymityLevel.UNKNOWN, AnonymityLevel.get('none')) 34 | self.assertEqual(AnonymityLevel.UNKNOWN, AnonymityLevel.get('bad')) 35 | self.assertEqual(AnonymityLevel.UNKNOWN, AnonymityLevel.get('')) 36 | self.assertEqual(AnonymityLevel.UNKNOWN, AnonymityLevel.get('*')) 37 | self.assertEqual(AnonymityLevel.UNKNOWN, AnonymityLevel.get('??')) 38 | 39 | def test_TransparentEnumLevel(self): 40 | self.assertEqual(AnonymityLevel.TRANSPARENT, AnonymityLevel.get('transparent')) 41 | self.assertEqual(AnonymityLevel.TRANSPARENT, AnonymityLevel.get('transparent proxy')) 42 | self.assertEqual(AnonymityLevel.TRANSPARENT, AnonymityLevel.get('LOW')) 43 | 44 | def test_AnonymousEnumLevel(self): 45 | self.assertEqual(AnonymityLevel.ANONYMOUS, AnonymityLevel.get('anonymous')) 46 | self.assertEqual(AnonymityLevel.ANONYMOUS, AnonymityLevel.get('anonymous proxy')) 47 | self.assertEqual(AnonymityLevel.ANONYMOUS, AnonymityLevel.get('high-anonymous')) 48 | 49 | def test_EliteEnumLevel(self): 50 | self.assertEqual(AnonymityLevel.ELITE, AnonymityLevel.get('elite')) 51 | self.assertEqual(AnonymityLevel.ELITE, AnonymityLevel.get('elite proxy')) 52 | self.assertEqual(AnonymityLevel.ELITE, AnonymityLevel.get('HIGH')) 53 | self.assertEqual(AnonymityLevel.ELITE, AnonymityLevel.get('Elite & Anonymous')) 54 | 55 | 56 | if __name__ == '__main__': 57 | unittest.main() 58 | -------------------------------------------------------------------------------- /tests/test_useragent.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | 3 | import unittest 4 | import sys 5 | import os 6 | 7 | from http_request_randomizer.requests.useragent.userAgent import UserAgentManager 8 | 9 | sys.path.insert(0, os.path.abspath('.')) 10 | 11 | __author__ = 'pgaref' 12 | 13 | 14 | class TestBaseProxyParsers(unittest.TestCase): 15 | def setUp(self): 16 | agentsfile = os.path.join(os.path.dirname(__file__), '../http_request_randomizer/requests/data/user_agents.txt') 17 | self.uafile = UserAgentManager(file=agentsfile) 18 | self.uafake = UserAgentManager() 19 | 20 | def test_agent_size(self): 21 | self.assertTrue(self.uafile.get_len_user_agent() >= 899) 22 | self.assertIsNone(self.uafake.get_len_user_agent()) 23 | 24 | def test_fist_user_agent(self): 25 | expected = "Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0" 26 | self.assertEqual(self.uafile.get_first_user_agent(), expected) 27 | self.assertIsNone(self.uafake.get_first_user_agent()) 28 | 29 | def test_last_user_agent(self): 30 | expected = "Opera/9.80 (Windows NT 5.1; U; ru) Presto/2.2.15 Version/10.0" 31 | self.assertEqual(self.uafile.get_last_user_agent(), expected) 32 | self.assertIsNone(self.uafake.get_last_user_agent()) 33 | 34 | def test_random_user_agent(self): 35 | count = 0 36 | for i in range(1, 101): 37 | if self.uafile.get_random_user_agent() == self.uafile.get_random_user_agent(): 38 | count = count + 1 39 | self.assertNotEqual(count, i) 40 | 41 | 42 | if __name__ == '__main__': 43 | unittest.main() 44 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | # Tox (http://tox.testrun.org/) is a tool for running tests 2 | # in multiple virtualenvs. This configuration file will run the 3 | # test suite on all supported python versions. To use it, "pip install tox" 4 | # and then run "tox" from this directory. 5 | 6 | [tox] 7 | envlist = py36, py37, py38, py39, pypy3 8 | 9 | [gh-actions] 10 | python = 11 | 3.6: py36 12 | 3.7: py37 13 | 3.8: py38, docs 14 | 3.9: py39, lint, manifest 15 | pypy3: pypy3 16 | 17 | [testenv] 18 | deps = 19 | requests 20 | pytest 21 | coverage 22 | pytest-cov 23 | setenv= 24 | PYTHONWARNINGS=all 25 | commands = py.test tests --doctest-modules 26 | 27 | [pytest] 28 | addopts=--doctest-modules --ignore=setup.py 29 | python_files=*.py 30 | python_functions=test_ 31 | norecursedirs=.tox .git .eggs 32 | 33 | [testenv:pypy3] 34 | skip_install = true 35 | commands = python setup.py install 36 | py.test tests --doctest-modules 37 | 38 | [testenv:py39] 39 | basepython=python3.9 40 | commands= 41 | py.test tests --doctest-modules --cov=. 42 | 43 | [testenv:pyDevVerbose] 44 | basepython=python3.9 45 | commands= 46 | py.test tests --doctest-modules --cov=. --cov-report term --------------------------------------------------------------------------------