├── .github └── workflows │ ├── ci.yml │ └── publish.yml ├── .gitignore ├── .readthedocs.yml ├── .travis.yml ├── CHANGES.rst ├── LICENSE ├── LICENSE.APACHE2 ├── LICENSE.MIT ├── MANIFEST.in ├── README.rst ├── ci ├── deploy.sh ├── install.sh ├── run.sh └── upload_coverage.sh ├── codecov.yml ├── demo ├── async-demo.py └── sync-demo.py ├── dev-requirements.txt ├── docs ├── Makefile ├── _static │ └── hackrtd.css ├── _templates │ └── layout.html ├── advanced-usage.rst ├── conf.py ├── contributing.rst ├── index.rst ├── make.bat ├── reference │ ├── hip.util.rst │ └── index.rst ├── requirements.txt └── user-guide.rst ├── dummyserver ├── __init__.py ├── certs │ ├── README.rst │ ├── cacert.key │ ├── cacert.no_san.pem │ ├── cacert.pem │ ├── client_bad.pem │ ├── client_intermediate.pem │ ├── client_password.key │ ├── server.combined.pem │ ├── server.crt │ ├── server.csr │ ├── server.ip_san.crt │ ├── server.ipv6_san.crt │ ├── server.ipv6addr.crt │ ├── server.ipv6addr.key │ ├── server.key │ ├── server.key.org │ ├── server.no_san.crt │ ├── server.no_san.csr │ └── server_password.key ├── handlers.py ├── proxy.py ├── server.py └── testcase.py ├── notes.org ├── noxfile.py ├── pyproject.toml ├── setup.cfg ├── setup.py ├── src └── ahip │ ├── __init__.py │ ├── _backends │ ├── __init__.py │ ├── _common.py │ ├── _loader.py │ ├── anyio_backend.py │ ├── async_backend.py │ ├── sync_backend.py │ └── trio_backend.py │ ├── _collections.py │ ├── backends.py │ ├── base.py │ ├── connection.py │ ├── connectionpool.py │ ├── contrib │ ├── __init__.py │ ├── _securetransport │ │ ├── __init__.py │ │ ├── bindings.py │ │ └── low_level.py │ ├── securetransport.py │ └── socks.py │ ├── exceptions.py │ ├── fields.py │ ├── filepost.py │ ├── packages │ ├── __init__.py │ ├── six.py │ └── ssl_match_hostname │ │ ├── __init__.py │ │ └── _implementation.py │ ├── poolmanager.py │ ├── request.py │ ├── response.py │ └── util │ ├── __init__.py │ ├── connection.py │ ├── queue.py │ ├── request.py │ ├── retry.py │ ├── ssl_.py │ ├── timeout.py │ ├── unasync.py │ ├── url.py │ └── wait.py └── test ├── __init__.py ├── async └── test_backends.py ├── benchmark.py ├── conftest.py ├── contrib ├── __init__.py ├── test_securetransport.py └── test_socks.py ├── port_helpers.py ├── socketpair_helper.py ├── test_backends.py ├── test_collections.py ├── test_compatibility.py ├── test_connection.py ├── test_connectionpool.py ├── test_exceptions.py ├── test_fields.py ├── test_filepost.py ├── test_no_ssl.py ├── test_poolmanager.py ├── test_proxymanager.py ├── test_queue_monkeypatch.py ├── test_response.py ├── test_retry.py ├── test_ssl.py ├── test_sync_connection.py ├── test_util.py ├── test_wait.py └── with_dummyserver ├── __init__.py ├── async ├── __init__.py └── test_poolmanager.py ├── async_only ├── __init__.py └── test_poolmanager.py ├── conftest.py ├── test_chunked_transfer.py ├── test_connectionpool.py ├── test_https.py ├── test_no_ssl.py ├── test_proxy_poolmanager.py └── test_socketlevel.py /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: CI 3 | 4 | on: [push, pull_request] 5 | 6 | jobs: 7 | package: 8 | runs-on: ubuntu-latest 9 | 10 | steps: 11 | - name: Checkout Repository 12 | uses: actions/checkout@v2 13 | - name: Set up Python 3.7 14 | uses: actions/setup-python@v2 15 | with: 16 | python-version: 3.7 17 | - name: Install dependencies 18 | run: | 19 | python3.7 -m pip install unasync setuptools wheel twine 20 | - name: Build packages 21 | run: | 22 | python3.7 setup.py sdist bdist_wheel 23 | - name: Check packages 24 | run: | 25 | set -exo pipefail; 26 | if [ $(python3.7 -m twine check dist/* | grep -c 'warning') != 0 ]; then exit 1; fi 27 | 28 | lint: 29 | runs-on: ubuntu-latest 30 | 31 | steps: 32 | - name: Checkout Repository 33 | uses: actions/checkout@v2 34 | - name: Set up Python 3.7 35 | uses: actions/setup-python@v2 36 | with: 37 | python-version: 3.7 38 | - name: Install dependencies 39 | run: | 40 | python3.7 -m pip install nox 41 | - name: Lint the code 42 | run: nox -s lint 43 | 44 | docs: 45 | runs-on: ubuntu-latest 46 | 47 | steps: 48 | - name: Checkout Repository 49 | uses: actions/checkout@v2 50 | - name: Set up Python 3.7 51 | uses: actions/setup-python@v2 52 | with: 53 | python-version: 3.7 54 | - name: Install dependencies 55 | run: | 56 | python3.7 -m pip install nox 57 | - name: Build the docs 58 | run: nox -s docs 59 | 60 | ubuntu: 61 | runs-on: ubuntu-latest 62 | 63 | strategy: 64 | fail-fast: false 65 | matrix: 66 | python-version: [3.5, 3.6, 3.7, 3.8] 67 | 68 | steps: 69 | - name: Checkout Repository 70 | uses: actions/checkout@v2 71 | - name: Set Up Python 3.7 to run nox 72 | uses: actions/setup-python@v2 73 | with: 74 | python-version: 3.7 75 | - name: Set Up Python - ${{ matrix.python-version }} 76 | if: matrix.python_version != '3.7' 77 | uses: actions/setup-python@v2 78 | with: 79 | python-version: ${{ matrix.python-version }} 80 | - name: Install Dependencies 81 | run: | 82 | python3.7 -m pip install nox unasync 83 | - name: Run Tests 84 | run: | 85 | nox -s test-${{ matrix.python-version }} 86 | - name: Upload Coverage 87 | run: ./ci/upload_coverage.sh 88 | env: 89 | JOB_NAME: "ubuntu (${{ matrix.python-version }})" 90 | 91 | macOS: 92 | runs-on: macos-latest 93 | 94 | strategy: 95 | fail-fast: false 96 | matrix: 97 | python-version: [3.5, 3.6, 3.7, 3.8] 98 | 99 | steps: 100 | - name: Checkout Repository 101 | uses: actions/checkout@v2 102 | - name: Set Up Python 3.7 to run nox 103 | uses: actions/setup-python@v2 104 | with: 105 | python-version: 3.7 106 | - name: Set Up Python - ${{ matrix.python-version }} 107 | if: matrix.python_version != '3.7' 108 | uses: actions/setup-python@v2 109 | with: 110 | python-version: ${{ matrix.python-version }} 111 | - name: Install Dependencies 112 | run: | 113 | python3.7 -m pip install nox unasync 114 | - name: Run Tests 115 | run: | 116 | nox -s test-${{ matrix.python-version }} 117 | - name: Upload Coverage 118 | run: ./ci/upload_coverage.sh 119 | env: 120 | JOB_NAME: "macOS (${{ matrix.python-version }})" 121 | 122 | Windows: 123 | runs-on: windows-latest 124 | 125 | strategy: 126 | fail-fast: false 127 | matrix: 128 | python-version: [3.5, 3.6, 3.7, 3.8] 129 | 130 | steps: 131 | - name: Checkout Repository 132 | uses: actions/checkout@v2 133 | - name: Set Up Python 3.7 to run nox 134 | uses: actions/setup-python@v2 135 | with: 136 | python-version: 3.7 137 | - name: Set Up Python - ${{ matrix.python-version }} 138 | if: matrix.python_version != '3.7' 139 | uses: actions/setup-python@v2 140 | with: 141 | python-version: ${{ matrix.python-version }} 142 | - name: Install dependencies (Windows) 143 | run: | 144 | # Work around https://github.com/theacodes/nox/issues/250 145 | Remove-Item C:\ProgramData\Chocolatey\bin\python2.7.exe 146 | py -3.7 -m pip install nox unasync 147 | - name: Run Tests 148 | run: | 149 | nox -s test-${{ matrix.python-version }} 150 | - name: Upload Coverage 151 | run: ./ci/upload_coverage.sh 152 | shell: bash 153 | env: 154 | JOB_NAME: "Windows (${{ matrix.python-version }})" 155 | -------------------------------------------------------------------------------- /.github/workflows/publish.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Publish To PYPI 3 | 4 | on: 5 | release: 6 | types: [created] 7 | 8 | jobs: 9 | deploy: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - uses: actions/checkout@v1 13 | - name: Set up Python 14 | uses: actions/setup-python@v1 15 | with: 16 | python-version: "3.x" 17 | - name: Install dependencies 18 | run: | 19 | python -m pip install --upgrade pip 20 | pip install setuptools wheel twine 21 | - name: Build and publish 22 | env: 23 | TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }} 24 | TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} 25 | run: | 26 | python setup.py sdist bdist_wheel 27 | twine upload dist/* 28 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Project-specific generated files 2 | docs/build/ 3 | src/hip/ 4 | test/with_dummyserver/sync/ 5 | 6 | bench/results/ 7 | bench/env/ 8 | bench/trio/ 9 | 10 | # Byte-compiled / optimized / DLL files / editor temp files 11 | __pycache__/ 12 | *.py[cod] 13 | *~ 14 | \#* 15 | .#* 16 | *.swp 17 | 18 | # C extensions 19 | *.so 20 | 21 | # Distribution / packaging 22 | .Python 23 | /build/ 24 | /develop-eggs/ 25 | /dist/ 26 | /eggs/ 27 | /lib/ 28 | /lib64/ 29 | /parts/ 30 | /sdist/ 31 | /var/ 32 | *.egg-info/ 33 | .installed.cfg 34 | *.egg 35 | /.pybuild 36 | 37 | # Installer logs 38 | pip-log.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .nox 43 | .tox/ 44 | .venv/ 45 | .coverage 46 | .coverage.* 47 | .cache 48 | .pytest_cache/ 49 | .mypy_cache/ 50 | nosetests.xml 51 | coverage.xml 52 | 53 | # Translations 54 | *.mo 55 | 56 | # Mr Developer 57 | .mr.developer.cfg 58 | .project 59 | .pydevproject 60 | 61 | # Rope 62 | .ropeproject 63 | 64 | # Django stuff: 65 | *.log 66 | *.pot 67 | 68 | # Sphinx documentation 69 | docs/_build/ 70 | -------------------------------------------------------------------------------- /.readthedocs.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | sphinx: 4 | configuration: docs/conf.py 5 | 6 | formats: all 7 | 8 | python: 9 | version: 3.7 10 | install: 11 | - requirements: docs/requirements.txt 12 | - method: pip 13 | path: . 14 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | os: linux 3 | dist: xenial 4 | 5 | before_install: 6 | - env 7 | - openssl version 8 | - python -c "import ssl; print(ssl.OPENSSL_VERSION)" 9 | 10 | install: 11 | - ./ci/install.sh 12 | 13 | script: 14 | - ./ci/run.sh 15 | - ./ci/upload_coverage.sh 16 | 17 | cache: 18 | directories: 19 | - ${HOME}/.cache 20 | 21 | notifications: 22 | email: false 23 | 24 | env: 25 | global: 26 | - PYTHONWARNINGS=always::DeprecationWarning 27 | 28 | - PYPI_USERNAME=hip 29 | # PYPI_PASSWORD is set in Travis control panel. 30 | 31 | jobs: 32 | include: 33 | # Unit tests 34 | - python: 3.5 35 | env: NOX_SESSION=test-3.5 36 | - python: 3.6 37 | env: NOX_SESSION=test-3.6 38 | - python: 3.7 39 | env: NOX_SESSION=test-3.7 40 | - python: 3.8-dev 41 | env: NOX_SESSION=test-3.8 42 | - python: pypy 43 | env: NOX_SESSION=test-pypy 44 | - python: pypy3 45 | env: NOX_SESSION=test-pypy3 46 | 47 | - python: 2.7 48 | env: NOX_SESSION=google_brotli-2.7 49 | - python: 3.7 50 | env: NOX_SESSION=google_brotli-3.7 51 | 52 | stages: 53 | - name: test 54 | if: tag IS blank 55 | 56 | # Run integration tests for release candidates 57 | - name: integration 58 | if: type = pull_request AND head_branch =~ ^release-[\d.]+$ AND tag IS blank 59 | 60 | # Deploy on any tags 61 | - name: deploy 62 | if: tag IS present AND tag =~ /^(\d+\.\d+(?:.\d+)?)$/ AND repo = python-trio/hip 63 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | This software is made available under the terms of *either* of the 2 | licenses found in LICENSE.APACHE2 or LICENSE.MIT. Contributions to 3 | Hip are made under the terms of *both* these licenses. 4 | 5 | This software contains work from the urllib3 project, used under the following license: 6 | 7 | MIT License 8 | 9 | Copyright (c) 2008-2019 Andrey Petrov and contributors (see CONTRIBUTORS.txt) 10 | 11 | Permission is hereby granted, free of charge, to any person obtaining a copy 12 | of this software and associated documentation files (the "Software"), to deal 13 | in the Software without restriction, including without limitation the rights 14 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 15 | copies of the Software, and to permit persons to whom the Software is 16 | furnished to do so, subject to the following conditions: 17 | 18 | The above copyright notice and this permission notice shall be included in all 19 | copies or substantial portions of the Software. 20 | 21 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 22 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 23 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 24 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 25 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 26 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 27 | SOFTWARE. 28 | -------------------------------------------------------------------------------- /LICENSE.MIT: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining 4 | a copy of this software and associated documentation files (the 5 | "Software"), to deal in the Software without restriction, including 6 | without limitation the rights to use, copy, modify, merge, publish, 7 | distribute, sublicense, and/or sell copies of the Software, and to 8 | permit persons to whom the Software is furnished to do so, subject to 9 | the following conditions: 10 | 11 | The above copyright notice and this permission notice shall be 12 | included in all copies or substantial portions of the Software. 13 | 14 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 15 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 16 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 17 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE 18 | LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 19 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION 20 | WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include README.rst CHANGES.rst LICENSE LICENSE.MIT LICENSE.APACHE2 CONTRIBUTORS.txt dev-requirements.txt Makefile pyproject.toml 2 | recursive-include dummyserver * 3 | recursive-include test * 4 | recursive-include docs * 5 | recursive-exclude docs/_build * 6 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | Hip 2 | === 3 | 4 | .. image:: https://travis-ci.org/python-trio/hip.svg?branch=master 5 | :alt: Build status on Travis 6 | :target: https://travis-ci.org/python-trio/hip 7 | 8 | .. image:: https://github.com/python-trio/hip/workflows/CI/badge.svg 9 | :alt: Build status on GitHub Actions 10 | :target: https://github.com/python-trio/hip/actions 11 | 12 | .. image:: https://codecov.io/gh/python-trio/hip/branch/master/graph/badge.svg 13 | :alt: Coverage Status 14 | :target: https://codecov.io/gh/python-trio/hip 15 | 16 | .. image:: https://img.shields.io/pypi/v/hip.svg?maxAge=86400 17 | :alt: PyPI version 18 | :target: https://pypi.org/project/hip 19 | 20 | .. image:: https://badges.gitter.im/python-trio/hip.svg 21 | :alt: Gitter 22 | :target: https://gitter.im/python-trio/hip 23 | 24 | .. image:: https://img.shields.io/badge/code%20style-black-000000.svg 25 | :target: https://github.com/psf/black 26 | 27 | Hip is a new Python HTTP client for everybody. It supports synchronous Python (just like requests does), but also Trio, asyncio and Curio. 28 | 29 | .. important:: Hip is still in its early days, use at your own risk! In particular, the async support is still experimental and untested. 30 | 31 | Hip is robust as it is based on urllib3 and uses its extensive test suite that was refined over the years. It also shares most urllib3 features: 32 | 33 | - Thread safety. 34 | - Connection pooling. 35 | - Client-side SSL/TLS verification. 36 | - File uploads with multipart encoding. 37 | - Helpers for retrying requests and dealing with HTTP redirects. 38 | - Support for gzip, deflate, and brotli encoding. 39 | - Proxy support for HTTP. 40 | - 100% test coverage. 41 | 42 | However, we currently do not support SOCKS proxies nor the pyOpenSSL and SecureTransport TLS backends. 43 | 44 | Sample code 45 | ----------- 46 | 47 | Hip is powerful and easy to use: 48 | 49 | .. code-block:: python 50 | 51 | >>> import hip 52 | >>> http = hip.PoolManager() 53 | >>> r = http.request('GET', 'http://httpbin.org/robots.txt') 54 | >>> r.status 55 | 200 56 | >>> r.data 57 | 'User-agent: *\nDisallow: /deny\n' 58 | 59 | It also supports async/await: 60 | 61 | .. code-block:: python 62 | 63 | import ahip 64 | import trio 65 | 66 | async def main(): 67 | with ahip.PoolManager() as http: 68 | r = await http.request("GET", "http://httpbin.org/uuid") 69 | print("Status:", r.status) # 200 70 | print("Data:", r.data) # 'User-agent: *\nDisallow: /deny\n' 71 | 72 | trio.run(main) 73 | 74 | Installing 75 | ---------- 76 | 77 | Hip can be installed with `pip `_:: 78 | 79 | $ python -m pip install hip 80 | 81 | Alternatively, you can grab the latest source code from `GitHub `_:: 82 | 83 | $ python -m pip install git+https://github.com/python-trio/hip 84 | 85 | - OR - 86 | 87 | $ git clone git://github.com/python-trio/hip.git 88 | $ cd hip && python setup.py install 89 | 90 | Documentation 91 | ------------- 92 | 93 | Hip will soon have usage and reference documentation at `hip.readthedocs.io `_. 94 | 95 | 96 | Contributing 97 | ------------ 98 | 99 | Hip happily accepts contributions. Please see our 100 | `contributing documentation `_ 101 | for some tips on getting started. 102 | -------------------------------------------------------------------------------- /ci/deploy.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -exo pipefail 4 | 5 | python3 -m pip install --upgrade twine wheel 6 | python3 setup.py sdist bdist_wheel 7 | python3 -m twine upload dist/* -u $PYPI_USERNAME -p $PYPI_PASSWORD --skip-existing 8 | -------------------------------------------------------------------------------- /ci/install.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -exo pipefail 4 | 5 | # Linux Setup 6 | # Even when testing on Python 2, we need Python 3 for Nox. This detects if 7 | # we're in one of the Travis Python 2 sessions and sets up the Python 3 install 8 | # for Nox. 9 | if ! python3 -m pip --version; then 10 | curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py 11 | sudo python3 get-pip.py 12 | # https://github.com/theacodes/nox/issues/328 13 | sudo python3 -m pip install nox==2019.11.9 unasync 14 | else 15 | # We're not in "dual Python" mode, so we can just install Nox normally. 16 | python3 -m pip install nox unasync 17 | fi 18 | -------------------------------------------------------------------------------- /ci/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -exo pipefail 4 | 5 | nox -s "${NOX_SESSION}" 6 | -------------------------------------------------------------------------------- /ci/upload_coverage.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -exo pipefail 4 | 5 | # Cribbed from Trio's ci.sh 6 | function curl-harder() { 7 | for BACKOFF in 0 1 2 4 8 15 15 15 15; do 8 | sleep $BACKOFF 9 | if curl -fL --connect-timeout 5 "$@"; then 10 | return 0 11 | fi 12 | done 13 | return 1 14 | } 15 | 16 | if [ "$JOB_NAME" = "" ]; then 17 | JOB_NAME="${TRAVIS_OS_NAME}-${TRAVIS_PYTHON_VERSION:-unknown}" 18 | fi 19 | 20 | curl-harder -o codecov.sh https://codecov.io/bash 21 | bash codecov.sh -f coverage.xml -n "${JOB_NAME}" 22 | -------------------------------------------------------------------------------- /codecov.yml: -------------------------------------------------------------------------------- 1 | coverage: 2 | status: 3 | patch: 4 | default: 5 | target: '100' 6 | project: 7 | default: 8 | target: '40' 9 | -------------------------------------------------------------------------------- /demo/async-demo.py: -------------------------------------------------------------------------------- 1 | # This should work on python 3.6+ 2 | 3 | import ahip 4 | 5 | URL = "http://httpbin.org/uuid" 6 | 7 | async def main(backend=None): 8 | with ahip.PoolManager(backend=backend) as http: 9 | print("URL:", URL) 10 | r = await http.request("GET", URL, preload_content=False) 11 | print("Status:", r.status) 12 | print("Data:", await r.read()) 13 | 14 | print("--- Trio ---") 15 | import trio 16 | trio.run(main) 17 | 18 | print("\n--- asyncio (via AnyIO) ---") 19 | import asyncio 20 | loop = asyncio.get_event_loop() 21 | loop.run_until_complete(main()) 22 | loop.close() 23 | 24 | print("\n--- Curio (via AnyIO) ---") 25 | import curio 26 | curio.run(main) 27 | -------------------------------------------------------------------------------- /demo/sync-demo.py: -------------------------------------------------------------------------------- 1 | # This should work on all versions of Python 2 and 3 2 | 3 | from __future__ import print_function 4 | 5 | import hip 6 | 7 | URL = "http://httpbin.org/uuid" 8 | 9 | print("--- synchronous sockets ---") 10 | with hip.PoolManager() as http: 11 | print("URL:", URL) 12 | r = http.request("GET", URL, preload_content=False) 13 | print("Status:", r.status) 14 | print("Data: {!r}".format(r.data)) 15 | -------------------------------------------------------------------------------- /dev-requirements.txt: -------------------------------------------------------------------------------- 1 | mock==3.0.5 2 | coverage~=5.1 3 | tornado==5.1.1 4 | PySocks==1.7.1 5 | # https://github.com/Anorov/PySocks/issues/131 6 | win-inet-pton==1.1.0 7 | pytest==4.6.6 8 | pytest-timeout==1.3.3 9 | pytest-cov==2.7.1 10 | h11==0.8.0 11 | cryptography==2.8 12 | flaky==3.6.1 13 | trustme==0.5.3 14 | 15 | # https://github.com/GoogleCloudPlatform/python-repo-tools/issues/23 16 | pylint<2.0;python_version<="2.7" 17 | gcp-devrel-py-tools 18 | 19 | # optional dependencies, only intended for use with Python 3.6+ 20 | curio==0.9; python_version >= "3.6" 21 | trio==0.13.0; python_version >= "3.6" 22 | anyio==1.3.0; python_version >= "3.6" 23 | pytest-trio==0.5.2;python_version>="3.6" 24 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = '-W' 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # Internal variables. 11 | PAPEROPT_a4 = -D latex_paper_size=a4 12 | PAPEROPT_letter = -D latex_paper_size=letter 13 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 14 | 15 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest 16 | 17 | help: 18 | @echo "Please use \`make ' where is one of" 19 | @echo " html to make standalone HTML files" 20 | @echo " dirhtml to make HTML files named index.html in directories" 21 | @echo " singlehtml to make a single large HTML file" 22 | @echo " pickle to make pickle files" 23 | @echo " json to make JSON files" 24 | @echo " htmlhelp to make HTML files and a HTML help project" 25 | @echo " qthelp to make HTML files and a qthelp project" 26 | @echo " devhelp to make HTML files and a Devhelp project" 27 | @echo " epub to make an epub" 28 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 29 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 30 | @echo " text to make text files" 31 | @echo " man to make manual pages" 32 | @echo " changes to make an overview of all changed/added/deprecated items" 33 | @echo " linkcheck to check all external links for integrity" 34 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 35 | 36 | clean: 37 | -rm -rf $(BUILDDIR)/* 38 | 39 | html: 40 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 41 | @echo 42 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 43 | 44 | dirhtml: 45 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 46 | @echo 47 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 48 | 49 | singlehtml: 50 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 51 | @echo 52 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 53 | 54 | pickle: 55 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 56 | @echo 57 | @echo "Build finished; now you can process the pickle files." 58 | 59 | json: 60 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 61 | @echo 62 | @echo "Build finished; now you can process the JSON files." 63 | 64 | htmlhelp: 65 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 66 | @echo 67 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 68 | ".hhp project file in $(BUILDDIR)/htmlhelp." 69 | 70 | qthelp: 71 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 72 | @echo 73 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 74 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 75 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/hip.qhcp" 76 | @echo "To view the help file:" 77 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/hip.qhc" 78 | 79 | devhelp: 80 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 81 | @echo 82 | @echo "Build finished." 83 | @echo "To view the help file:" 84 | @echo "# mkdir -p $$HOME/.local/share/devhelp/hip" 85 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/hip" 86 | @echo "# devhelp" 87 | 88 | epub: 89 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 90 | @echo 91 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 92 | 93 | latex: 94 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 95 | @echo 96 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 97 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 98 | "(use \`make latexpdf' here to do that automatically)." 99 | 100 | latexpdf: 101 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 102 | @echo "Running LaTeX files through pdflatex..." 103 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 104 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 105 | 106 | text: 107 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 108 | @echo 109 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 110 | 111 | man: 112 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 113 | @echo 114 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 115 | 116 | changes: 117 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 118 | @echo 119 | @echo "The overview file is in $(BUILDDIR)/changes." 120 | 121 | linkcheck: 122 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 123 | @echo 124 | @echo "Link check complete; look for any errors in the above output " \ 125 | "or in $(BUILDDIR)/linkcheck/output.txt." 126 | 127 | doctest: 128 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 129 | @echo "Testing of doctests in the sources finished, look at the " \ 130 | "results in $(BUILDDIR)/doctest/output.txt." 131 | -------------------------------------------------------------------------------- /docs/_static/hackrtd.css: -------------------------------------------------------------------------------- 1 | /* Temporary hack to work around bug in rtd theme 2.0 through 2.4 2 | See https://github.com/rtfd/sphinx_rtd_theme/pull/382 3 | */ 4 | pre { 5 | line-height: normal !important; 6 | } 7 | 8 | /* Make .. deprecation:: blocks visible 9 | * (by default they're entirely unstyled) 10 | */ 11 | .deprecated { 12 | background-color: #ffe13b; 13 | } 14 | 15 | /* Add a snakey triskelion ornament to
16 | * https://stackoverflow.com/questions/8862344/css-hr-with-ornament/18541258#18541258 17 | * but only do it to
s in the content box, b/c the RTD popup control panel 18 | * thingummy also has an
in it, and putting the ornament on that looks 19 | * *really weird*. (In particular, the background color is wrong.) 20 | */ 21 | .rst-content hr:after { 22 | /* This .svg gets displayed on top of the middle of the hrule. It has a box 23 | * behind the logo that's colored to match the RTD theme body background 24 | * color (#fcfcfc), which hides the middle part of the hrule to make it 25 | * look like there's a gap in it. The size of the box determines the size 26 | * of the gap. 27 | */ 28 | background: url('ornament.svg') no-repeat top center; 29 | background-size: contain; 30 | content: ""; 31 | display: block; 32 | height: 30px; 33 | position: relative; 34 | top: -15px; 35 | } 36 | 37 | /* Hacks to make the upper-left logo area look nicer */ 38 | 39 | .wy-side-nav-search { 40 | /* Lighter background color to match logo */ 41 | background-color: #d2e7fa !important; 42 | } 43 | 44 | .wy-side-nav-search > a { 45 | color: #306998 !important; 46 | } 47 | 48 | .wy-side-nav-search > a.logo { 49 | display: block !important; 50 | padding-bottom: 0.809em !important; 51 | } 52 | 53 | .wy-side-nav-search > a img.logo { 54 | display: inline !important; 55 | padding: 0 !important; 56 | } 57 | 58 | .trio-version { 59 | display: inline; 60 | /* I *cannot* figure out how to get the version text vertically centered 61 | on the logo. Oh well... 62 | height: 32px; 63 | line-height: 32px; 64 | */ 65 | } 66 | 67 | .wy-side-nav-search > a { 68 | /* Mostly this is just to simplify things, so we don't have margin/padding 69 | * on both the and the inside it */ 70 | margin: 0 !important; 71 | padding: 0 !important; 72 | } 73 | 74 | /* Get rid of the weird super dark "Contents" label that wastes vertical space 75 | */ 76 | .wy-menu-vertical > p.caption { 77 | display: none !important; 78 | } 79 | 80 | /* I do not like RTD's use of Roboto Slab for headlines. So force it back to 81 | * Lato (or whatever fallback it's using if Lato isn't available for some 82 | * reason). I also experimented with using Montserrat to be extra obnoxiously 83 | * on brand, but honestly you couldn't really tell so there wasn't much point 84 | * in adding page weight for that, and this is going to match the body text 85 | * better. (Montserrat for body text *definitely* didn't look good, alas.) 86 | */ 87 | h1, h2, h3, h4, h5, h6, legend, .rst-content .toctree-wrapper p.caption { 88 | font-family: inherit !important; 89 | } 90 | 91 | /* Get rid of the horrible red for literal content */ 92 | .rst-content tt.literal, .rst-content tt.literal, .rst-content code.literal { 93 | color: #222 !important; 94 | } 95 | 96 | /* Style the "Need help?" text just underneath the search box */ 97 | .trio-help-hint { 98 | line-height: normal; 99 | margin-bottom: 0; 100 | /* font-size: 12px; */ 101 | font-size: 80%; /* matches the "Search docs" box */ 102 | padding-top: 6px; 103 | color: #306998; 104 | text-align: center; 105 | } 106 | 107 | a.trio-help-hint, .trio-help-hint a:link, .trio-help-hint a:visited { 108 | color: inherit; 109 | /* Like text-decoration: underline, but with a thinner line */ 110 | text-decoration: none; 111 | border-bottom: 1px solid; 112 | } 113 | -------------------------------------------------------------------------------- /docs/_templates/layout.html: -------------------------------------------------------------------------------- 1 | {# 2 | https://stackoverflow.com/questions/25243482/how-to-add-sphinx-generated-index-to-the-sidebar-when-using-read-the-docs-theme 3 | #} 4 | {% extends "!layout.html" %} 5 | 6 | {% block sidebartitle %} 7 | 18 | 19 | {% include "searchbox.html" %} 20 | 21 |

Need help? Live chat, StackOverflow.

24 | {% endblock %} 25 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | import os 5 | import sys 6 | import datetime 7 | 8 | sys.path.insert(0, os.path.abspath("../..")) 9 | 10 | import hip 11 | 12 | # Warn about all references to unknown targets 13 | nitpicky = False # TODO: Switch this to 'True' after interface has solidified. 14 | nitpick_ignore = [] 15 | autodoc_inherit_docstrings = False 16 | default_role = "obj" 17 | 18 | # XX hack the RTD theme until 19 | # https://github.com/rtfd/sphinx_rtd_theme/pull/382 20 | # is shipped (should be in the release after 0.2.4) 21 | # ...note that this has since grown to contain a bunch of other CSS hacks too 22 | # though. 23 | 24 | 25 | def setup(app): 26 | app.add_css_file("hackrtd.css") 27 | 28 | 29 | # -- General configuration ------------------------------------------------ 30 | 31 | # Add any Sphinx extension module names here, as strings. They can be 32 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 33 | # ones. 34 | extensions = [ 35 | "sphinx.ext.autodoc", 36 | "sphinx.ext.intersphinx", 37 | "sphinx.ext.coverage", 38 | "sphinx.ext.napoleon", 39 | "sphinxcontrib_trio", 40 | ] 41 | 42 | intersphinx_mapping = { 43 | "python": ("https://docs.python.org/3", None), 44 | } 45 | 46 | autodoc_member_order = "bysource" 47 | 48 | # Add any paths that contain templates here, relative to this directory. 49 | templates_path = ["_templates"] 50 | 51 | # The suffix(es) of source filenames. 52 | # You can specify multiple suffix as a list of string: 53 | # 54 | # source_suffix = ['.rst', '.md'] 55 | source_suffix = ".rst" 56 | 57 | # The master toctree document. 58 | master_doc = "index" 59 | 60 | # General information about the project. 61 | project = "Hip" 62 | author = hip.__author__ 63 | copyright = "%d, %s" % (datetime.date.today().year, author) 64 | version = hip.__version__ 65 | release = version 66 | 67 | # List of patterns, relative to source directory, that match files and 68 | # directories to ignore when looking for source files. 69 | # This patterns also effect to html_static_path and html_extra_path 70 | exclude_patterns = [] 71 | 72 | # The name of the Pygments (syntax highlighting) style to use. 73 | pygments_style = "default" 74 | 75 | # If true, `todo` and `todoList` produce output, else they produce nothing. 76 | todo_include_todos = False 77 | 78 | # -- Options for HTML output ---------------------------------------------- 79 | 80 | # We have to set this ourselves, not only because it's useful for local 81 | # testing, but also because if we don't then RTD will throw away our 82 | # html_theme_options. 83 | import sphinx_rtd_theme 84 | 85 | html_theme = "sphinx_rtd_theme" 86 | html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] 87 | 88 | # Theme options are theme-specific and customize the look and feel of a theme 89 | # further. For a list of options available for each theme, see the 90 | # documentation. 91 | html_theme_options = { 92 | # default is 2 93 | # show deeper nesting in the RTD theme's sidebar TOC 94 | # https://stackoverflow.com/questions/27669376/ 95 | # I'm not 100% sure this actually does anything with our current 96 | # versions/settings... 97 | "navigation_depth": 4, 98 | "logo_only": True, 99 | "prev_next_buttons_location": "both", 100 | } 101 | 102 | # Add any paths that contain custom static files (such as style sheets) here, 103 | # relative to this directory. They are copied after the builtin static files, 104 | # so a file named "default.css" will overwrite the builtin "default.css". 105 | html_static_path = ["_static"] 106 | 107 | 108 | # -- Options for HTMLHelp output ------------------------------------------ 109 | 110 | # Output file base name for HTML help builder. 111 | htmlhelp_basename = "Hipdoc" 112 | 113 | 114 | # -- Options for LaTeX output --------------------------------------------- 115 | 116 | latex_elements = { 117 | # The paper size ('letterpaper' or 'a4paper'). 118 | # 119 | # 'papersize': 'letterpaper', 120 | # The font size ('10pt', '11pt' or '12pt'). 121 | # 122 | # 'pointsize': '10pt', 123 | # Additional stuff for the LaTeX preamble. 124 | # 125 | # 'preamble': '', 126 | # Latex figure (float) alignment 127 | # 128 | # 'figure_align': 'htbp', 129 | } 130 | 131 | # Grouping the document tree into LaTeX files. List of tuples 132 | # (source start file, target name, title, 133 | # author, documentclass [howto, manual, or own class]). 134 | latex_documents = [ 135 | (master_doc, "Hip.tex", "Hip Documentation", author, "manual"), 136 | ] 137 | 138 | 139 | # -- Options for manual page output --------------------------------------- 140 | 141 | # One entry per manual page. List of tuples 142 | # (source start file, name, description, authors, manual section). 143 | man_pages = [(master_doc, "hip", "Hip Documentation", [author], 1)] 144 | 145 | 146 | # -- Options for Texinfo output ------------------------------------------- 147 | 148 | # Grouping the document tree into Texinfo files. List of tuples 149 | # (source start file, target name, title, author, 150 | # dir menu entry, description, category) 151 | texinfo_documents = [ 152 | ( 153 | master_doc, 154 | "Hip", 155 | "Hip Documentation", 156 | author, 157 | "Hip", 158 | "One line description of project.", 159 | "Miscellaneous", 160 | ), 161 | ] 162 | -------------------------------------------------------------------------------- /docs/contributing.rst: -------------------------------------------------------------------------------- 1 | Contributing 2 | ============ 3 | 4 | Hip is a community-maintained project and we happily accept contributions. 5 | 6 | If you wish to add a new feature or fix a bug: 7 | 8 | #. `Check for open issues `_ or open 9 | a fresh issue to start a discussion around a feature idea or a bug. There is 10 | a *Contributor Friendly* tag for issues that should be ideal for people who 11 | are not very familiar with the codebase yet. 12 | #. Fork the `Hip repository on Github `_ 13 | to start making your changes. 14 | #. Write a test which shows that the bug was fixed or that the feature works 15 | as expected. 16 | #. Format your changes with black using command `$ nox -s blacken` and lint your 17 | changes using command `nox -s lint`. 18 | #. Send a pull request and bug the maintainer until it gets merged and published. 19 | 20 | 21 | Setting up your development environment 22 | --------------------------------------- 23 | 24 | In order to setup the development environment all that you need is 25 | `nox `_ installed in your machine:: 26 | 27 | $ python -m pip install --user --upgrade nox 28 | 29 | 30 | Running the tests 31 | ----------------- 32 | 33 | We use some external dependencies, multiple interpreters and code coverage 34 | analysis while running test suite. Our ``noxfile.py`` handles much of this for 35 | you:: 36 | 37 | $ nox --sessions test-2.7 test-3.7 38 | [ Nox will create virtualenv, install the specified dependencies, and run the commands in order.] 39 | nox > Running session test-2.7 40 | ....... 41 | ....... 42 | nox > Session test-2.7 was successful. 43 | ....... 44 | ....... 45 | nox > Running session test-3.7 46 | ....... 47 | ....... 48 | nox > Session test-3.7 was successful. 49 | 50 | There is also a nox command for running all of our tests and multiple python 51 | versions. 52 | 53 | $ nox --sessions test 54 | 55 | Note that code coverage less than 100% is regarded as a failing run. Some 56 | platform-specific tests are skipped unless run in that platform. To make sure 57 | the code works in all of Hip's supported platforms, you can run our ``tox`` 58 | suite:: 59 | 60 | $ nox --sessions test 61 | [ Nox will create virtualenv, install the specified dependencies, and run the commands in order.] 62 | ....... 63 | ....... 64 | nox > Session test-2.7 was successful. 65 | nox > Session test-3.4 was successful. 66 | nox > Session test-3.5 was successful. 67 | nox > Session test-3.6 was successful. 68 | nox > Session test-3.7 was successful. 69 | nox > Session test-3.8 was successful. 70 | nox > Session test-pypy was successful. 71 | 72 | Releases 73 | -------- 74 | 75 | A release candidate can be created by any contributor by creating a branch 76 | named ``release-x.x`` where ``x.x`` is the version of the proposed release. 77 | 78 | - Update ``CHANGES.rst`` and ``hip/__init__.py`` with the proper version number 79 | and commit the changes to ``release-x.x``. 80 | - Open a pull request to merge the ``release-x.x`` branch into the ``master`` branch. 81 | - Integration tests are run against the release candidate on Travis. From here on all 82 | the steps below will be handled by a maintainer so unless you receive review comments 83 | you are done here. 84 | - Once the pull request is squash merged into master the merging maintainer 85 | will tag the merge commit with the version number: 86 | 87 | - ``git tag -a 1.24.1 [commit sha]`` 88 | - ``git push origin master --tags`` 89 | 90 | - After the commit is tagged Travis will build the tagged commit and upload the sdist and wheel 91 | to PyPI and create a draft release on GitHub for the tag. The merging maintainer will 92 | ensure that the PyPI sdist and wheel are properly uploaded. 93 | - The merging maintainer will mark the draft release on GitHub as an approved release. 94 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | Hip: A new Python HTTP client for Everyone 2 | ========================================== 3 | 4 | .. toctree:: 5 | :hidden: 6 | :maxdepth: 2 7 | 8 | user-guide 9 | advanced-usage 10 | reference/index 11 | contributing 12 | 13 | Usage 14 | ----- 15 | 16 | The :doc:`user-guide` is the place to go to learn how to use the library and 17 | accomplish common tasks. The more in-depth :doc:`advanced-usage` guide is the place to go for lower-level tweaking. 18 | 19 | The :doc:`reference/index` documentation provides API-level documentation. 20 | 21 | License 22 | ------- 23 | 24 | Hip is made available under both the MIT License and Apache-2.0 License. 25 | For more details, see `LICENSE `_. 26 | 27 | Contributing 28 | ------------ 29 | 30 | We happily welcome contributions, please see :doc:`contributing` for details. 31 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | REM Command file for Sphinx documentation 4 | 5 | if "%SPHINXBUILD%" == "" ( 6 | set SPHINXBUILD=sphinx-build 7 | ) 8 | set BUILDDIR=_build 9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . 10 | if NOT "%PAPER%" == "" ( 11 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% 12 | ) 13 | 14 | if "%1" == "" goto help 15 | 16 | if "%1" == "help" ( 17 | :help 18 | echo.Please use `make ^` where ^ is one of 19 | echo. html to make standalone HTML files 20 | echo. dirhtml to make HTML files named index.html in directories 21 | echo. singlehtml to make a single large HTML file 22 | echo. pickle to make pickle files 23 | echo. json to make JSON files 24 | echo. htmlhelp to make HTML files and a HTML help project 25 | echo. qthelp to make HTML files and a qthelp project 26 | echo. devhelp to make HTML files and a Devhelp project 27 | echo. epub to make an epub 28 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter 29 | echo. text to make text files 30 | echo. man to make manual pages 31 | echo. changes to make an overview over all changed/added/deprecated items 32 | echo. linkcheck to check all external links for integrity 33 | echo. doctest to run all doctests embedded in the documentation if enabled 34 | goto end 35 | ) 36 | 37 | if "%1" == "clean" ( 38 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i 39 | del /q /s %BUILDDIR%\* 40 | goto end 41 | ) 42 | 43 | if "%1" == "html" ( 44 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html 45 | if errorlevel 1 exit /b 1 46 | echo. 47 | echo.Build finished. The HTML pages are in %BUILDDIR%/html. 48 | goto end 49 | ) 50 | 51 | if "%1" == "dirhtml" ( 52 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml 53 | if errorlevel 1 exit /b 1 54 | echo. 55 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. 56 | goto end 57 | ) 58 | 59 | if "%1" == "singlehtml" ( 60 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml 61 | if errorlevel 1 exit /b 1 62 | echo. 63 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. 64 | goto end 65 | ) 66 | 67 | if "%1" == "pickle" ( 68 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle 69 | if errorlevel 1 exit /b 1 70 | echo. 71 | echo.Build finished; now you can process the pickle files. 72 | goto end 73 | ) 74 | 75 | if "%1" == "json" ( 76 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json 77 | if errorlevel 1 exit /b 1 78 | echo. 79 | echo.Build finished; now you can process the JSON files. 80 | goto end 81 | ) 82 | 83 | if "%1" == "htmlhelp" ( 84 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp 85 | if errorlevel 1 exit /b 1 86 | echo. 87 | echo.Build finished; now you can run HTML Help Workshop with the ^ 88 | .hhp project file in %BUILDDIR%/htmlhelp. 89 | goto end 90 | ) 91 | 92 | if "%1" == "qthelp" ( 93 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp 94 | if errorlevel 1 exit /b 1 95 | echo. 96 | echo.Build finished; now you can run "qcollectiongenerator" with the ^ 97 | .qhcp project file in %BUILDDIR%/qthelp, like this: 98 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\hip.qhcp 99 | echo.To view the help file: 100 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\hip.ghc 101 | goto end 102 | ) 103 | 104 | if "%1" == "devhelp" ( 105 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp 106 | if errorlevel 1 exit /b 1 107 | echo. 108 | echo.Build finished. 109 | goto end 110 | ) 111 | 112 | if "%1" == "epub" ( 113 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub 114 | if errorlevel 1 exit /b 1 115 | echo. 116 | echo.Build finished. The epub file is in %BUILDDIR%/epub. 117 | goto end 118 | ) 119 | 120 | if "%1" == "latex" ( 121 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 122 | if errorlevel 1 exit /b 1 123 | echo. 124 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. 125 | goto end 126 | ) 127 | 128 | if "%1" == "text" ( 129 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text 130 | if errorlevel 1 exit /b 1 131 | echo. 132 | echo.Build finished. The text files are in %BUILDDIR%/text. 133 | goto end 134 | ) 135 | 136 | if "%1" == "man" ( 137 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man 138 | if errorlevel 1 exit /b 1 139 | echo. 140 | echo.Build finished. The manual pages are in %BUILDDIR%/man. 141 | goto end 142 | ) 143 | 144 | if "%1" == "changes" ( 145 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes 146 | if errorlevel 1 exit /b 1 147 | echo. 148 | echo.The overview file is in %BUILDDIR%/changes. 149 | goto end 150 | ) 151 | 152 | if "%1" == "linkcheck" ( 153 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck 154 | if errorlevel 1 exit /b 1 155 | echo. 156 | echo.Link check complete; look for any errors in the above output ^ 157 | or in %BUILDDIR%/linkcheck/output.txt. 158 | goto end 159 | ) 160 | 161 | if "%1" == "doctest" ( 162 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest 163 | if errorlevel 1 exit /b 1 164 | echo. 165 | echo.Testing of doctests in the sources finished, look at the ^ 166 | results in %BUILDDIR%/doctest/output.txt. 167 | goto end 168 | ) 169 | 170 | :end 171 | -------------------------------------------------------------------------------- /docs/reference/hip.util.rst: -------------------------------------------------------------------------------- 1 | hip.util package 2 | ==================== 3 | 4 | Useful methods for working with :mod:`httplib`, completely decoupled from 5 | code specific to **Hip**. 6 | 7 | At the very core, just like its predecessors, :mod:`hip` is built on top of 8 | :mod:`httplib` -- the lowest level HTTP library included in the Python 9 | standard library. 10 | 11 | To aid the limited functionality of the :mod:`httplib` module, :mod:`hip` 12 | provides various helper methods which are used with the higher level components 13 | but can also be used independently. 14 | 15 | hip.util.connection module 16 | -------------------------- 17 | 18 | .. automodule:: hip.util.connection 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | hip.util.request module 24 | ----------------------- 25 | 26 | .. automodule:: hip.util.request 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | 31 | hip.util.retry module 32 | --------------------- 33 | 34 | .. automodule:: hip.util.retry 35 | :members: 36 | :undoc-members: 37 | :show-inheritance: 38 | 39 | hip.util.timeout module 40 | ----------------------- 41 | 42 | .. automodule:: hip.util.timeout 43 | :members: 44 | :undoc-members: 45 | :show-inheritance: 46 | 47 | hip.util.url module 48 | ------------------- 49 | 50 | .. automodule:: hip.util.url 51 | :members: 52 | :undoc-members: 53 | :show-inheritance: 54 | 55 | 56 | Module contents 57 | --------------- 58 | 59 | .. automodule:: hip.util 60 | :members: 61 | :undoc-members: 62 | :show-inheritance: 63 | -------------------------------------------------------------------------------- /docs/reference/index.rst: -------------------------------------------------------------------------------- 1 | Reference 2 | ========= 3 | 4 | .. contents:: 5 | :local: 6 | :backlinks: none 7 | 8 | Subpackages 9 | ----------- 10 | 11 | .. toctree:: 12 | 13 | hip.util 14 | 15 | Submodules 16 | ---------- 17 | 18 | hip.connectionpool module 19 | ------------------------- 20 | 21 | .. automodule:: hip.connectionpool 22 | :members: 23 | :undoc-members: 24 | :show-inheritance: 25 | 26 | hip.exceptions module 27 | --------------------- 28 | 29 | .. automodule:: hip.exceptions 30 | :members: 31 | :undoc-members: 32 | :show-inheritance: 33 | 34 | hip.fields module 35 | ----------------- 36 | 37 | .. automodule:: hip.fields 38 | :members: 39 | :undoc-members: 40 | :show-inheritance: 41 | 42 | hip.filepost module 43 | ------------------- 44 | 45 | .. automodule:: hip.filepost 46 | :members: 47 | :undoc-members: 48 | :show-inheritance: 49 | 50 | hip.poolmanager module 51 | ---------------------- 52 | 53 | .. automodule:: hip.poolmanager 54 | :members: 55 | :undoc-members: 56 | :show-inheritance: 57 | 58 | hip.request module 59 | ------------------ 60 | 61 | .. automodule:: hip.request 62 | :members: 63 | :undoc-members: 64 | :show-inheritance: 65 | 66 | hip.response module 67 | ------------------- 68 | 69 | .. automodule:: hip.response 70 | :members: 71 | :undoc-members: 72 | :show-inheritance: 73 | 74 | 75 | Module contents 76 | --------------- 77 | 78 | .. automodule:: hip 79 | :members: 80 | :undoc-members: 81 | :show-inheritance: 82 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | -r ../dev-requirements.txt 2 | sphinx>=1.7.0 3 | sphinx-rtd-theme 4 | sphinxcontrib-trio 5 | requests 6 | -------------------------------------------------------------------------------- /dummyserver/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/python-trio/hip/e198149c677edbeec023aeb934758c9195a4d2e3/dummyserver/__init__.py -------------------------------------------------------------------------------- /dummyserver/certs/README.rst: -------------------------------------------------------------------------------- 1 | Creating a new SAN-less CRT 2 | --------------------------- 3 | 4 | (Instructions lifted from Heroku_) 5 | 6 | 1. Generate a new CSR:: 7 | 8 | openssl req -new -key server.key -out server.new.csr -nodes -days 10957 9 | 10 | 2. Generate a new CRT:: 11 | 12 | openssl x509 -req -in server.new.csr -signkey server.key -out server.new.crt -days 10957 13 | 14 | Creating a new PEM file with your new CRT 15 | ----------------------------------------- 16 | 17 | 1. Concatenate the ``crt`` and ``key`` files into one:: 18 | 19 | cat server.new.crt server.key > cacert.new.pem 20 | 21 | 22 | :Last Modified: 1 Nov 2014 23 | 24 | .. _Heroku: https://devcenter.heroku.com/articles/ssl-certificate-self 25 | -------------------------------------------------------------------------------- /dummyserver/certs/cacert.key: -------------------------------------------------------------------------------- 1 | -----BEGIN RSA PRIVATE KEY----- 2 | MIICXgIBAAKBgQDKz8a9X2SfNms9TffyNaFO/K42fAjUI1dAM1G8TVoj0a81ay7W 3 | z4R7V1zfjXFT/WoRW04Y6xek0bff0OtsW+AriooUy7+pPYnrchpAW0p7hPjH1DIB 4 | Vab01CJMhQ24er92Q1dF4WBv4yKqEaV1IYz1cvqvCCJgAbsWn1I8Cna1lwIDAQAB 5 | AoGAPpkK+oBrCkk9qFpcYUH0W/DZxK9b+j4+O+6bF8e4Pr4FmjNO7bZ3aap5W/bI 6 | N+hLyLepzz8guRqR6l8NixCAi+JiVW/agh5o4Jrek8UJWQamwSL4nJ36U3Iw/l7w 7 | vcN1txfkpsA2SB9QFPGfDKcP3+IZMOZ7uFLzk/gzgLYiCEECQQD+M5Lj+e/sNBkb 8 | XeIBxWIrPfEeIkk4SDkqImzDjq1FcfxZkvfskqyJgUvcLe5hb+ibY8jqWvtpvFTI 9 | 5v/tzHvPAkEAzD8fNrGz8KiAVTo7+0vrb4AebAdSLZUvbp0AGs5pXUAuQx6VEgz8 10 | opNKpZjBwAFsZKlwhgDqaChiAt9aKUkzuQJBALlai9I2Dg7SkjgVRdX6wjE7slRB 11 | tdgXOa+SeHJD1+5aRiJeeu8CqFJ/d/wtdbOQsTCVGwxfmREpZT00ywrvXpsCQQCU 12 | gs1Kcrn5Ijx2PCrDFbfyUkFMoaIiXNipYGVkGHRKhtFcoo8YGfNUry7W7BTtbNuI 13 | 8h9MgLvw0nQ5zHf9jymZAkEA7o4uA6XSS1zUqEQ55bZRFHcz/99pLH35G906iwVb 14 | d5rd1Z4Cf5s/91o5gwL6ZP2Ig34CCn+NSL4avgz6K0VUaA== 15 | -----END RSA PRIVATE KEY----- 16 | -------------------------------------------------------------------------------- /dummyserver/certs/cacert.no_san.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIChzCCAfACCQCmk6is+6REjDANBgkqhkiG9w0BAQUFADCBhzELMAkGA1UEBhMC 3 | Q0ExEDAOBgNVBAgMB09udGFyaW8xEDAOBgNVBAcMB09udGFyaW8xHzAdBgNVBAoM 4 | FlNoYXpvdydzIFVzZWQgQ2FycyBJbmMxEjAQBgNVBAMMCWxvY2FsaG9zdDEfMB0G 5 | CSqGSIb3DQEJARYQc2hhem93QGdtYWlsLmNvbTAeFw0xNDEyMDMyMjE3MjVaFw00 6 | NDEyMDIyMjE3MjVaMIGHMQswCQYDVQQGEwJDQTEQMA4GA1UECAwHT250YXJpbzEQ 7 | MA4GA1UEBwwHT250YXJpbzEfMB0GA1UECgwWU2hhem93J3MgVXNlZCBDYXJzIElu 8 | YzESMBAGA1UEAwwJbG9jYWxob3N0MR8wHQYJKoZIhvcNAQkBFhBzaGF6b3dAZ21h 9 | aWwuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDXe3FqmCWvP8XPxqtT 10 | +0bfL1Tvzvebi46k0WIcUV8bP3vyYiSRXG9ALmyzZH4GHY9UVs4OEDkCMDOBSezB 11 | 0y9ai/9doTNcaictdEBu8nfdXKoTtzrn+VX4UPrkH5hm7NQ1fTQuj1MR7yBCmYqN 12 | 3Q2Q+Efuujyx0FwBzAuy1aKYuwIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAHI/m9/O 13 | bVR3zBOJZUKlHzTRvfYbYhhfrrcQlbwhjKqNyZcQTL/bJdtQSL19g3ftC5wZPI+y 14 | 66R24MqGmRcv5kT32HcuIK1Xhx4nDqTqnTNvGkaIh5CqS4DEP+iqtwDoEbQt8DwL 15 | ejKtvZlyQRKFPTMtmv4VsTIHeVOAj+pXn595 16 | -----END CERTIFICATE----- 17 | -----BEGIN RSA PRIVATE KEY----- 18 | MIICXgIBAAKBgQDXe3FqmCWvP8XPxqtT+0bfL1Tvzvebi46k0WIcUV8bP3vyYiSR 19 | XG9ALmyzZH4GHY9UVs4OEDkCMDOBSezB0y9ai/9doTNcaictdEBu8nfdXKoTtzrn 20 | +VX4UPrkH5hm7NQ1fTQuj1MR7yBCmYqN3Q2Q+Efuujyx0FwBzAuy1aKYuwIDAQAB 21 | AoGBANOGBM6bbhq7ImYU4qf8+RQrdVg2tc9Fzo+yTnn30sF/rx8/AiCDOV4qdGAh 22 | HKjKKaGj2H/rotqoEFcxBy05LrgJXxydBP72e9PYhNgKOcSmCQu4yALIPEXfKuIM 23 | zgAErHVJ2l79fif3D4hzNyz+u5E1A9n3FG9cgaJSiYP8IG2RAkEA82GZ8rBkSGQQ 24 | ZQ3oFuzPAAL21lbj8D0p76fsCpvS7427DtZDOjhOIKZmaeykpv+qSzRraqEqjDRi 25 | S4kjQvwh6QJBAOKniZ+NDo2lSpbOFk+XlmABK1DormVpj8KebHEZYok1lRI+WiX9 26 | Nnoe9YLgix7++6H5SBBCcTB4HvM+5A4BuwMCQQChcX/eZbXP81iQwB3Rfzp8xnqY 27 | icDf7qKvz9Ma4myU7Y5E9EpaB1mD/P14jDpYcMW050vNyqTfpiwB8TFL0NZpAkEA 28 | 02jkFH9UyMgZV6qo4tqI98l/ZrtyF8OrxSNSEPhVkZf6EQc5vN9/lc8Uv1vESEgb 29 | 3AwRrKDcxRH2BHtv6qSwkwJAGjqnkIcEkA75r1e55/EF2chcZW1+tpwKupE8CtAH 30 | VXGd5DVwt4cYWkLUj2gF2fJbV97uu2MAg5CFDb+vQ6p5eA== 31 | -----END RSA PRIVATE KEY----- 32 | -------------------------------------------------------------------------------- /dummyserver/certs/cacert.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIDzDCCAzWgAwIBAgIJALPrscov4b/jMA0GCSqGSIb3DQEBBQUAMIGBMQswCQYD 3 | VQQGEwJGSTEOMAwGA1UECAwFZHVtbXkxDjAMBgNVBAcMBWR1bW15MQ4wDAYDVQQK 4 | DAVkdW1teTEOMAwGA1UECwwFZHVtbXkxETAPBgNVBAMMCFNuYWtlT2lsMR8wHQYJ 5 | KoZIhvcNAQkBFhBkdW1teUB0ZXN0LmxvY2FsMB4XDTExMTIyMjA3NTYxNVoXDTIx 6 | MTIxOTA3NTYxNVowgYExCzAJBgNVBAYTAkZJMQ4wDAYDVQQIDAVkdW1teTEOMAwG 7 | A1UEBwwFZHVtbXkxDjAMBgNVBAoMBWR1bW15MQ4wDAYDVQQLDAVkdW1teTERMA8G 8 | A1UEAwwIU25ha2VPaWwxHzAdBgkqhkiG9w0BCQEWEGR1bW15QHRlc3QubG9jYWww 9 | gZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAMrPxr1fZJ82az1N9/I1oU78rjZ8 10 | CNQjV0AzUbxNWiPRrzVrLtbPhHtXXN+NcVP9ahFbThjrF6TRt9/Q62xb4CuKihTL 11 | v6k9ietyGkBbSnuE+MfUMgFVpvTUIkyFDbh6v3ZDV0XhYG/jIqoRpXUhjPVy+q8I 12 | ImABuxafUjwKdrWXAgMBAAGjggFIMIIBRDAdBgNVHQ4EFgQUGXd/I2JiQllF+3Wd 13 | x3NyBLszCi0wgbYGA1UdIwSBrjCBq4AUGXd/I2JiQllF+3Wdx3NyBLszCi2hgYek 14 | gYQwgYExCzAJBgNVBAYTAkZJMQ4wDAYDVQQIDAVkdW1teTEOMAwGA1UEBwwFZHVt 15 | bXkxDjAMBgNVBAoMBWR1bW15MQ4wDAYDVQQLDAVkdW1teTERMA8GA1UEAwwIU25h 16 | a2VPaWwxHzAdBgkqhkiG9w0BCQEWEGR1bW15QHRlc3QubG9jYWyCCQCz67HKL+G/ 17 | 4zAPBgNVHRMBAf8EBTADAQH/MBEGCWCGSAGG+EIBAQQEAwIBBjAJBgNVHRIEAjAA 18 | MCsGCWCGSAGG+EIBDQQeFhxUaW55Q0EgR2VuZXJhdGVkIENlcnRpZmljYXRlMA4G 19 | A1UdDwEB/wQEAwICBDANBgkqhkiG9w0BAQUFAAOBgQBvz3AlIM1x7CMmwkmhLV6+ 20 | PJkMnPW7XbP+cDYUlddCk7XhIDY4486JxqZegMTWgbUt0AgXYfHLFsTqUJXrnLj2 21 | WqLb3KP2D1HvnvxJjdJV3M6+TP7tGiY4ICi0zff96FG5C2w9Avsozhr3xDFtjKBv 22 | gyA6UdP3oZGN93oOFiMJXg== 23 | -----END CERTIFICATE----- 24 | -------------------------------------------------------------------------------- /dummyserver/certs/client_bad.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIICsDCCAhmgAwIBAgIJAL63Nc6KY94BMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV 3 | BAYTAkFVMRMwEQYDVQQIEwpTb21lLVN0YXRlMSEwHwYDVQQKExhJbnRlcm5ldCBX 4 | aWRnaXRzIFB0eSBMdGQwHhcNMTExMDExMjMxMjAzWhcNMjExMDA4MjMxMjAzWjBF 5 | MQswCQYDVQQGEwJBVTETMBEGA1UECBMKU29tZS1TdGF0ZTEhMB8GA1UEChMYSW50 6 | ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKB 7 | gQC8HGxvblJ4Z0i/lIlG8jrNsFrCqYRAXtj3xdnnjfUpd/kNhU/KahMsG6urAe/4 8 | Yj+Zqf1sVnt0Cye8FZE3cN9RAcwJrlTCRiicJiXEbA7cPfMphqNGqjVHtmxQ1OsU 9 | NHK7cxKa9OX3xmg4h55vxSZYgibAEPO2g3ueGk7RWIAQ8wIDAQABo4GnMIGkMB0G 10 | A1UdDgQWBBSeeo/YRpdn5DK6bUI7ZDJ57pzGdDB1BgNVHSMEbjBsgBSeeo/YRpdn 11 | 5DK6bUI7ZDJ57pzGdKFJpEcwRTELMAkGA1UEBhMCQVUxEzARBgNVBAgTClNvbWUt 12 | U3RhdGUxITAfBgNVBAoTGEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZIIJAL63Nc6K 13 | Y94BMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADgYEAOntoloMGt1325UR0 14 | GGEKQJbiRhLXY4otdgFjEvCG2RPZVLxWYhLMu0LkB6HBYULEuoy12ushtRWlhS1k 15 | 6PNRkaZ+LQTSREj6Do4c4zzLxCDmxYmejOz63cIWX2x5IY6qEx2BNOfmM4xEdF8W 16 | LSGGbQfuAghiEh0giAi4AQloDlY= 17 | -----END CERTIFICATE----- 18 | -------------------------------------------------------------------------------- /dummyserver/certs/client_intermediate.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIChzCCAfCgAwIBAgIUZgix95Zxzc+WryIWanrDezW1VjcwDQYJKoZIhvcNAQEL 3 | BQAwRDEbMBkGA1UECgwSdHJ1c3RtZSB2MC40LjArZGV2MSUwIwYDVQQLDBxUZXN0 4 | aW5nIENBICNwN2dEd0tMS3EydlJOajZmMCAXDTAwMDEwMTAwMDAwMFoYDzMwMDAw 5 | MTAxMDAwMDAwWjBNMRswGQYDVQQKDBJ0cnVzdG1lIHYwLjQuMCtkZXYxLjAsBgNV 6 | BAsMJVRlc3Rpbmcgc2VydmVyIGNlcnQgI0NPajVGVkxXWEVtcmFHNTQwgZ8wDQYJ 7 | KoZIhvcNAQEBBQADgY0AMIGJAoGBAKeE765+Ws1ZdC86tfZ5LvLTjWluQgmsTx2o 8 | 7xhYAOFmTbZb6qNLCDS07R1VP74ve6UlFD55cV8VbxvEZd8Z3LOADF6nTN61XPbj 9 | dn2J6GfsSjaHE6+mJDXhCtVrD4EGdD4nXRem48mjsrAkrvJ8v4gQNzGzQ27D2dWT 10 | B7Ij6mWNAgMBAAGjazBpMB0GA1UdDgQWBBT66uW6I2OfZYacXgQkop4qlX+qJTAM 11 | BgNVHRMBAf8EAjAAMB8GA1UdIwQYMBaAFESoDYfzVyFP3QHyZG9cvxmlBIGsMBkG 12 | A1UdEQEB/wQPMA2CC2xvY2FsY2xpZW50MA0GCSqGSIb3DQEBCwUAA4GBAG8zoqW0 13 | w5ROSuNFE7fi5I4bdC6sbddiFRXX//TkP2vRD3cM11AKp52UjzK2nUrkoigrJ5p8 14 | xa/PGnPfOVCPiKIb1kzeI/7tyBet6n3q2L0wQo3PR/QCHeSiIpm8lAi1a+8ShXFM 15 | F2CG+z7IN0cQO4bzcwtkk8MhcCsMP14K5PK2 16 | -----END CERTIFICATE----- 17 | -----BEGIN CERTIFICATE----- 18 | MIICwjCCAiugAwIBAgIUWL7wOmK0BVMR8LM5UBewDZEEuH0wDQYJKoZIhvcNAQEL 19 | BQAwgYExCzAJBgNVBAYTAkZJMQ4wDAYDVQQIDAVkdW1teTEOMAwGA1UEBwwFZHVt 20 | bXkxDjAMBgNVBAoMBWR1bW15MQ4wDAYDVQQLDAVkdW1teTERMA8GA1UEAwwIU25h 21 | a2VPaWwxHzAdBgkqhkiG9w0BCQEWEGR1bW15QHRlc3QubG9jYWwwIBcNMDAwMTAx 22 | MDAwMDAwWhgPMzAwMDAxMDEwMDAwMDBaMEQxGzAZBgNVBAoMEnRydXN0bWUgdjAu 23 | NC4wK2RldjElMCMGA1UECwwcVGVzdGluZyBDQSAjcDdnRHdLTEtxMnZSTmo2ZjCB 24 | nzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAr7134NKsqNQ44gIFElVC5KnGYIYv 25 | D96Kv+5UgXVAyNNK4NpQXHVFmCZpSuyvlz4UZzFBoykISjU+vcGqbFqwRrYciPwh 26 | 45HVQgtoe0SSpze7sv0qsMJiGNRDK06nVI/aCHP9FRoD5iPq8E7lSNVYipai466G 27 | 1lEvVLb0SGNihAUCAwEAAaNxMG8wHQYDVR0OBBYEFESoDYfzVyFP3QHyZG9cvxml 28 | BIGsMBIGA1UdEwEB/wQIMAYBAf8CAQgwDgYDVR0PAQH/BAQDAgEGMCoGA1UdJQEB 29 | /wQgMB4GCCsGAQUFBwMCBggrBgEFBQcDAQYIKwYBBQUHAwMwDQYJKoZIhvcNAQEL 30 | BQADgYEAEs9EAeL3300UxzmT4zyj2cHB2GQxisteEuz9VcWhrvyNDxQ3ko0BxG04 31 | 4fye7dpElrrbSq8PYkygA1qiBCN2NL+v78XWb2OYd7PptpbPehzaEpCTK37O+Num 32 | sB4v1c63r2w1mH1lSjZDkJfd1hml+VwntSzuCmGERlroE6PQwf8= 33 | -----END CERTIFICATE----- 34 | -------------------------------------------------------------------------------- /dummyserver/certs/client_password.key: -------------------------------------------------------------------------------- 1 | -----BEGIN RSA PRIVATE KEY----- 2 | Proc-Type: 4,ENCRYPTED 3 | DEK-Info: AES-256-CBC,70C641602D5F366DC5DB70645351993D 4 | 5 | /Ijrtw+2Rjc1mQCXWoNCtzjbRoIhBHQu9ZbQoCnC4/lHru2megV0vDQju0yYjs2H 6 | 7Y7tnMe0hlR9F21be6AkoKDF4B5Kg2X47fwG5V9SIbHBkz3KClfnPp/ojrhIWLTo 7 | grtZoXBFkivDnkuF9NO3qRlskP7u//r3kB5uXIG0ZpfUbRwgm13SqHj1oEB9RdYM 8 | bGhB3tL6dxdIXEgyc9numKBQ0lQu5yYlOH+1aiJSQQdN59ZunreIq//UM1Qc7Uj/ 9 | ILJusFmnec40ArJ+aykENWkToHSKkpeL6no6ZRCnkAYqtUJ84B6zMv9zYhN5UF3O 10 | WHP/4FAu4AylJvNx9sYxXdGaBb+YcX46B7wQk2mkmCtK6cgkrNV3/bohUbYt3tSe 11 | K9dH2xe9orxsGQjoKxylwh7+h8o+BwHpk1naFSzliQV4gvi8yBEzXxM98vNU5B4L 12 | ex8Q2ARWvfNc7OBqboPP0yBMKP/cV9n+fNMwbP0koHxBt71527fVQLoemMiPRb5M 13 | +rcufc+80AUK4baAA5Nu2sZGRqoiFemQ2vgEAxOzRbt/pHzdheO6OHqLJ5W4IWaW 14 | Erojm7/ar6gDlIIGwM8IJdbcMG69s7r8u47lD45ONQMq41Io4Svvs0SCgdRhLt/3 15 | Nb6Smxy7vWFOcrHEJVsv27UD0FViaYHy37DIc6lVvX9s6+VKbdIYuiqxalbaCpKo 16 | VP8kdQZ4SFBAxV9cgPjFbQKVBXkLBdxJKGPzzK3Jc9khD1uHp5Um8OSM21Kh55N3 17 | jvDY5h8fQ0cPyJmlZJzRdYi1+8H5TSFvEXd6cqVkYWiJ1ac0gPOoVt7+YAZ6JB2J 18 | -----END RSA PRIVATE KEY----- 19 | -------------------------------------------------------------------------------- /dummyserver/certs/server.combined.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIDczCCAtygAwIBAgIBATANBgkqhkiG9w0BAQUFADCBgTELMAkGA1UEBhMCRkkx 3 | DjAMBgNVBAgMBWR1bW15MQ4wDAYDVQQHDAVkdW1teTEOMAwGA1UECgwFZHVtbXkx 4 | DjAMBgNVBAsMBWR1bW15MREwDwYDVQQDDAhTbmFrZU9pbDEfMB0GCSqGSIb3DQEJ 5 | ARYQZHVtbXlAdGVzdC5sb2NhbDAeFw0xMTEyMjIwNzU4NDBaFw0yMTEyMTgwNzU4 6 | NDBaMGExCzAJBgNVBAYTAkZJMQ4wDAYDVQQIDAVkdW1teTEOMAwGA1UEBwwFZHVt 7 | bXkxDjAMBgNVBAoMBWR1bW15MQ4wDAYDVQQLDAVkdW1teTESMBAGA1UEAwwJbG9j 8 | YWxob3N0MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDXe3FqmCWvP8XPxqtT 9 | +0bfL1Tvzvebi46k0WIcUV8bP3vyYiSRXG9ALmyzZH4GHY9UVs4OEDkCMDOBSezB 10 | 0y9ai/9doTNcaictdEBu8nfdXKoTtzrn+VX4UPrkH5hm7NQ1fTQuj1MR7yBCmYqN 11 | 3Q2Q+Efuujyx0FwBzAuy1aKYuwIDAQABo4IBGDCCARQwCQYDVR0TBAIwADAdBgNV 12 | HQ4EFgQUG+dK5Uos08QUwAWofDb3a8YcYlIwgbYGA1UdIwSBrjCBq4AUGXd/I2Ji 13 | QllF+3Wdx3NyBLszCi2hgYekgYQwgYExCzAJBgNVBAYTAkZJMQ4wDAYDVQQIDAVk 14 | dW1teTEOMAwGA1UEBwwFZHVtbXkxDjAMBgNVBAoMBWR1bW15MQ4wDAYDVQQLDAVk 15 | dW1teTERMA8GA1UEAwwIU25ha2VPaWwxHzAdBgkqhkiG9w0BCQEWEGR1bW15QHRl 16 | c3QubG9jYWyCCQCz67HKL+G/4zAJBgNVHRIEAjAAMCQGA1UdEQQdMBuBDnJvb3RA 17 | bG9jYWxob3N0gglsb2NhbGhvc3QwDQYJKoZIhvcNAQEFBQADgYEAgcW6X1ZUyufm 18 | TFEqEAdpKXdL0rxDwcsM/qqqsXbkz17otH6ujPhBEagzdKtgeNKfy0aXz6rWZugk 19 | lF0IqyC4mcI+vvfgGR5Iy4KdXMrIX98MbrvGJBfbdKhGW2b84wDV42DIDiD2ZGGe 20 | 6YZQQIo9LxjuOTf9jsvf+PIkbI4H0To= 21 | -----END CERTIFICATE----- 22 | -----BEGIN RSA PRIVATE KEY----- 23 | MIICXgIBAAKBgQDXe3FqmCWvP8XPxqtT+0bfL1Tvzvebi46k0WIcUV8bP3vyYiSR 24 | XG9ALmyzZH4GHY9UVs4OEDkCMDOBSezB0y9ai/9doTNcaictdEBu8nfdXKoTtzrn 25 | +VX4UPrkH5hm7NQ1fTQuj1MR7yBCmYqN3Q2Q+Efuujyx0FwBzAuy1aKYuwIDAQAB 26 | AoGBANOGBM6bbhq7ImYU4qf8+RQrdVg2tc9Fzo+yTnn30sF/rx8/AiCDOV4qdGAh 27 | HKjKKaGj2H/rotqoEFcxBy05LrgJXxydBP72e9PYhNgKOcSmCQu4yALIPEXfKuIM 28 | zgAErHVJ2l79fif3D4hzNyz+u5E1A9n3FG9cgaJSiYP8IG2RAkEA82GZ8rBkSGQQ 29 | ZQ3oFuzPAAL21lbj8D0p76fsCpvS7427DtZDOjhOIKZmaeykpv+qSzRraqEqjDRi 30 | S4kjQvwh6QJBAOKniZ+NDo2lSpbOFk+XlmABK1DormVpj8KebHEZYok1lRI+WiX9 31 | Nnoe9YLgix7++6H5SBBCcTB4HvM+5A4BuwMCQQChcX/eZbXP81iQwB3Rfzp8xnqY 32 | icDf7qKvz9Ma4myU7Y5E9EpaB1mD/P14jDpYcMW050vNyqTfpiwB8TFL0NZpAkEA 33 | 02jkFH9UyMgZV6qo4tqI98l/ZrtyF8OrxSNSEPhVkZf6EQc5vN9/lc8Uv1vESEgb 34 | 3AwRrKDcxRH2BHtv6qSwkwJAGjqnkIcEkA75r1e55/EF2chcZW1+tpwKupE8CtAH 35 | VXGd5DVwt4cYWkLUj2gF2fJbV97uu2MAg5CFDb+vQ6p5eA== 36 | -----END RSA PRIVATE KEY----- 37 | -------------------------------------------------------------------------------- /dummyserver/certs/server.crt: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIDczCCAtygAwIBAgIBATANBgkqhkiG9w0BAQUFADCBgTELMAkGA1UEBhMCRkkx 3 | DjAMBgNVBAgMBWR1bW15MQ4wDAYDVQQHDAVkdW1teTEOMAwGA1UECgwFZHVtbXkx 4 | DjAMBgNVBAsMBWR1bW15MREwDwYDVQQDDAhTbmFrZU9pbDEfMB0GCSqGSIb3DQEJ 5 | ARYQZHVtbXlAdGVzdC5sb2NhbDAeFw0xMTEyMjIwNzU4NDBaFw0yMTEyMTgwNzU4 6 | NDBaMGExCzAJBgNVBAYTAkZJMQ4wDAYDVQQIDAVkdW1teTEOMAwGA1UEBwwFZHVt 7 | bXkxDjAMBgNVBAoMBWR1bW15MQ4wDAYDVQQLDAVkdW1teTESMBAGA1UEAwwJbG9j 8 | YWxob3N0MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDXe3FqmCWvP8XPxqtT 9 | +0bfL1Tvzvebi46k0WIcUV8bP3vyYiSRXG9ALmyzZH4GHY9UVs4OEDkCMDOBSezB 10 | 0y9ai/9doTNcaictdEBu8nfdXKoTtzrn+VX4UPrkH5hm7NQ1fTQuj1MR7yBCmYqN 11 | 3Q2Q+Efuujyx0FwBzAuy1aKYuwIDAQABo4IBGDCCARQwCQYDVR0TBAIwADAdBgNV 12 | HQ4EFgQUG+dK5Uos08QUwAWofDb3a8YcYlIwgbYGA1UdIwSBrjCBq4AUGXd/I2Ji 13 | QllF+3Wdx3NyBLszCi2hgYekgYQwgYExCzAJBgNVBAYTAkZJMQ4wDAYDVQQIDAVk 14 | dW1teTEOMAwGA1UEBwwFZHVtbXkxDjAMBgNVBAoMBWR1bW15MQ4wDAYDVQQLDAVk 15 | dW1teTERMA8GA1UEAwwIU25ha2VPaWwxHzAdBgkqhkiG9w0BCQEWEGR1bW15QHRl 16 | c3QubG9jYWyCCQCz67HKL+G/4zAJBgNVHRIEAjAAMCQGA1UdEQQdMBuBDnJvb3RA 17 | bG9jYWxob3N0gglsb2NhbGhvc3QwDQYJKoZIhvcNAQEFBQADgYEAgcW6X1ZUyufm 18 | TFEqEAdpKXdL0rxDwcsM/qqqsXbkz17otH6ujPhBEagzdKtgeNKfy0aXz6rWZugk 19 | lF0IqyC4mcI+vvfgGR5Iy4KdXMrIX98MbrvGJBfbdKhGW2b84wDV42DIDiD2ZGGe 20 | 6YZQQIo9LxjuOTf9jsvf+PIkbI4H0To= 21 | -----END CERTIFICATE----- 22 | -------------------------------------------------------------------------------- /dummyserver/certs/server.csr: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIDqDCCAxGgAwIBAgIBATANBgkqhkiG9w0BAQUFADCBgTELMAkGA1UEBhMCRkkx 3 | DjAMBgNVBAgTBWR1bW15MQ4wDAYDVQQHEwVkdW1teTEOMAwGA1UEChMFZHVtbXkx 4 | DjAMBgNVBAsTBWR1bW15MREwDwYDVQQDEwhTbmFrZU9pbDEfMB0GCSqGSIb3DQEJ 5 | ARYQZHVtbXlAdGVzdC5sb2NhbDAeFw0xMTEyMjIwNzU4NDBaFw0yMTEyMTgwNzU4 6 | NDBaMGExCzAJBgNVBAYTAkZJMQ4wDAYDVQQIEwVkdW1teTEOMAwGA1UEBxMFZHVt 7 | bXkxDjAMBgNVBAoTBWR1bW15MQ4wDAYDVQQLEwVkdW1teTESMBAGA1UEAxMJbG9j 8 | YWxob3N0MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDXe3FqmCWvP8XPxqtT 9 | +0bfL1Tvzvebi46k0WIcUV8bP3vyYiSRXG9ALmyzZH4GHY9UVs4OEDkCMDOBSezB 10 | 0y9ai/9doTNcaictdEBu8nfdXKoTtzrn+VX4UPrkH5hm7NQ1fTQuj1MR7yBCmYqN 11 | 3Q2Q+Efuujyx0FwBzAuy1aKYuwIDAQABo4IBTTCCAUkwCQYDVR0TBAIwADARBglg 12 | hkgBhvhCAQEEBAMCBkAwKwYJYIZIAYb4QgENBB4WHFRpbnlDQSBHZW5lcmF0ZWQg 13 | Q2VydGlmaWNhdGUwHQYDVR0OBBYEFBvnSuVKLNPEFMAFqHw292vGHGJSMIG2BgNV 14 | HSMEga4wgauAFBl3fyNiYkJZRft1ncdzcgS7MwotoYGHpIGEMIGBMQswCQYDVQQG 15 | EwJGSTEOMAwGA1UECBMFZHVtbXkxDjAMBgNVBAcTBWR1bW15MQ4wDAYDVQQKEwVk 16 | dW1teTEOMAwGA1UECxMFZHVtbXkxETAPBgNVBAMTCFNuYWtlT2lsMR8wHQYJKoZI 17 | hvcNAQkBFhBkdW1teUB0ZXN0LmxvY2FsggkAs+uxyi/hv+MwCQYDVR0SBAIwADAZ 18 | BgNVHREEEjAQgQ5yb290QGxvY2FsaG9zdDANBgkqhkiG9w0BAQUFAAOBgQBXdedG 19 | XHLPmOVBeKWjTmaekcaQi44snhYqE1uXRoIQXQsyw+Ya5+n/uRxPKZO/C78EESL0 20 | 8rnLTdZXm4GBYyHYmMy0AdWR7y030viOzAkWWRRRbuecsaUzFCI+F9jTV5LHuRzz 21 | V8fUKwiEE9swzkWgMpfVTPFuPgzxwG9gMbrBfg== 22 | -----END CERTIFICATE----- 23 | -------------------------------------------------------------------------------- /dummyserver/certs/server.ip_san.crt: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIDeTCCAuKgAwIBAgIBATANBgkqhkiG9w0BAQUFADCBgTELMAkGA1UEBhMCRkkx 3 | DjAMBgNVBAgMBWR1bW15MQ4wDAYDVQQHDAVkdW1teTEOMAwGA1UECgwFZHVtbXkx 4 | DjAMBgNVBAsMBWR1bW15MREwDwYDVQQDDAhTbmFrZU9pbDEfMB0GCSqGSIb3DQEJ 5 | ARYQZHVtbXlAdGVzdC5sb2NhbDAeFw0xMTEyMjIwNzU4NDBaFw0yMTEyMTgwNzU4 6 | NDBaMGExCzAJBgNVBAYTAkZJMQ4wDAYDVQQIDAVkdW1teTEOMAwGA1UEBwwFZHVt 7 | bXkxDjAMBgNVBAoMBWR1bW15MQ4wDAYDVQQLDAVkdW1teTESMBAGA1UEAwwJbG9j 8 | YWxob3N0MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDXe3FqmCWvP8XPxqtT 9 | +0bfL1Tvzvebi46k0WIcUV8bP3vyYiSRXG9ALmyzZH4GHY9UVs4OEDkCMDOBSezB 10 | 0y9ai/9doTNcaictdEBu8nfdXKoTtzrn+VX4UPrkH5hm7NQ1fTQuj1MR7yBCmYqN 11 | 3Q2Q+Efuujyx0FwBzAuy1aKYuwIDAQABo4IBHjCCARowCQYDVR0TBAIwADAdBgNV 12 | HQ4EFgQUG+dK5Uos08QUwAWofDb3a8YcYlIwgbYGA1UdIwSBrjCBq4AUGXd/I2Ji 13 | QllF+3Wdx3NyBLszCi2hgYekgYQwgYExCzAJBgNVBAYTAkZJMQ4wDAYDVQQIDAVk 14 | dW1teTEOMAwGA1UEBwwFZHVtbXkxDjAMBgNVBAoMBWR1bW15MQ4wDAYDVQQLDAVk 15 | dW1teTERMA8GA1UEAwwIU25ha2VPaWwxHzAdBgkqhkiG9w0BCQEWEGR1bW15QHRl 16 | c3QubG9jYWyCCQCz67HKL+G/4zAJBgNVHRIEAjAAMCoGA1UdEQQjMCGBDnJvb3RA 17 | bG9jYWxob3N0gglsb2NhbGhvc3SHBH8AAAEwDQYJKoZIhvcNAQEFBQADgYEAFEAy 18 | O9rxM14W0pVJWHTZkWBcDTqp8A8OB3JFVxeuCNcbtyfyYLWs2juv4YMmo1EKBOQe 19 | 7LYfGuIvtIzT7KBa2QAPmX9JR+F6yl0IVSrYYt9hS7w9Cqr8+jK9QRpNwm3k25hp 20 | BmmoT5b9Q+AYcLMtdMu3uFjLmQBI2XobI/9vCT4= 21 | -----END CERTIFICATE----- 22 | -------------------------------------------------------------------------------- /dummyserver/certs/server.ipv6_san.crt: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIICfTCCAeagAwIBAgIJAPcpn3/M5+piMA0GCSqGSIb3DQEBCwUAMEUxCzAJBgNV 3 | BAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEwHwYDVQQKDBhJbnRlcm5ldCBX 4 | aWRnaXRzIFB0eSBMdGQwHhcNMTgxMjE5MDUyMjUyWhcNNDgxMjE4MDUyMjUyWjBF 5 | MQswCQYDVQQGEwJBVTETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UECgwYSW50 6 | ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKB 7 | gQDXe3FqmCWvP8XPxqtT+0bfL1Tvzvebi46k0WIcUV8bP3vyYiSRXG9ALmyzZH4G 8 | HY9UVs4OEDkCMDOBSezB0y9ai/9doTNcaictdEBu8nfdXKoTtzrn+VX4UPrkH5hm 9 | 7NQ1fTQuj1MR7yBCmYqN3Q2Q+Efuujyx0FwBzAuy1aKYuwIDAQABo3UwczAdBgNV 10 | HQ4EFgQUG+dK5Uos08QUwAWofDb3a8YcYlIwHwYDVR0jBBgwFoAUG+dK5Uos08QU 11 | wAWofDb3a8YcYlIwDwYDVR0TAQH/BAUwAwEB/zAgBgNVHREEGTAXggM6OjGHEAAA 12 | AAAAAAAAAAAAAAAAAAEwDQYJKoZIhvcNAQELBQADgYEAjT767TDq6q4lOextf3tZ 13 | BjeuYDUy7bb1fDBAN5rBT1ywr7r0JE6/KOnsZx4jbevx3MllxNpx0gOM2bgwJlnG 14 | 8tgwRB6pxDyln01WBj9b5ymK60jdkw7gg3yYpqEs5/VBQidFO3BmDqf5cGO8PU7p 15 | 0VWdfJBP2UbwblNXdImI1zk= 16 | -----END CERTIFICATE----- 17 | -------------------------------------------------------------------------------- /dummyserver/certs/server.ipv6addr.crt: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIClTCCAX2gAwIBAgIBATANBgkqhkiG9w0BAQsFADAOMQwwCgYDVQQDDAM6OjEw 3 | HhcNMTUxMjE1MTY0NjQxWhcNMjEwODAyMDAwMDAwWjAOMQwwCgYDVQQDDAM6OjEw 4 | ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvFke8g6Pco24cdWuOD8Wb 5 | blIUH6iieNpJqcdNTzKUgtWvlpDeiIOweTuawUWX7bz35fn2KBMty68tmz+64iWJ 6 | AKe6tJtbXQHty+Y09CPDkkC8f0cmXjqwnNbMT7kVPTaPQZkW7hnGS4XgpnzswpqP 7 | dMLpUzYwoucaScC/flawLafP3jq6hfae2F5wafwPIVvlURL7ZR7FZPuLW2L4T1Wu 8 | BHc6gOPQfohjQtiiTNtcEIhsmA3zY4DWuMUJePrEtXhPqcXtogoEiwzLKBeKOYJu 9 | LIQ3++nWLel+HPvhg52wT4Dhb45PQy55ziyelXiHSro5PQmXTiQebuPMLy/8CiSn 10 | AgMBAAEwDQYJKoZIhvcNAQELBQADggEBAILPtFVSOrXiXQ3b8Gslh4TOxxTPSdnO 11 | AkOooYyg5oLJy+CAnDL+u+mFCDFC2maDPm3qyeAn31h5aDyazCzyDvVFVw2uWSuG 12 | a67YNbCLrVBLVIgqyJfMayY3rcjz6rV4n7hpHn42zuwaI8H1z2T1bjvNU6nsizNK 13 | qo80nvJ6Kge2kbAa0aMOIHsGX4KGiUwUb4+LpRAP5ZDC8EnDNNfURt1w94nnAH70 14 | V1RgztaAlVNcPqrSMBLXryNXz+X+Hyd79Nd5runemYUG4fQ50jabC5WHeXqH0uOC 15 | sDtBgqWHxcxnAQhhJ8jF7wodcUjv5AE204ECmZgyQ475kuZpSh6+IvQ= 16 | -----END CERTIFICATE----- 17 | -------------------------------------------------------------------------------- /dummyserver/certs/server.ipv6addr.key: -------------------------------------------------------------------------------- 1 | -----BEGIN RSA PRIVATE KEY----- 2 | MIIEpAIBAAKCAQEArxZHvIOj3KNuHHVrjg/Fm25SFB+oonjaSanHTU8ylILVr5aQ 3 | 3oiDsHk7msFFl+289+X59igTLcuvLZs/uuIliQCnurSbW10B7cvmNPQjw5JAvH9H 4 | Jl46sJzWzE+5FT02j0GZFu4ZxkuF4KZ87MKaj3TC6VM2MKLnGknAv35WsC2nz946 5 | uoX2nthecGn8DyFb5VES+2UexWT7i1ti+E9VrgR3OoDj0H6IY0LYokzbXBCIbJgN 6 | 82OA1rjFCXj6xLV4T6nF7aIKBIsMyygXijmCbiyEN/vp1i3pfhz74YOdsE+A4W+O 7 | T0Muec4snpV4h0q6OT0Jl04kHm7jzC8v/AokpwIDAQABAoIBAB5Av0x3gozRQGly 8 | cI8vcnmU6mHHxp+sOiuYRnO5R4gZw0p/vbB17itEB2SKGLhTv98lwbci3Y2AMFi1 9 | BqIICWxOzKyaIG38+CRaowBrXvKgBseLRoP+rC1wLD1JWTWuSgsezpEDuzhkPFHA 10 | 8r0GMyauii8+zZJB06TbMY7lCZ2PPKFGtojhbRTe//Nnk925KzSQz7Rk/ylouHra 11 | 4Zi9lDodGvZMhZ8zoNDL2/yvCiDIWffpsdFKn4iKNeme1L7JE8AHBeCFo4eIKeAU 12 | uPlZDFgo/WdDVQQO7LnBv7tRVUB89ARBc9Egt0JoUpSq9pDaMkiBjcJcNiHkbvNj 13 | csFN5GECgYEA44VVoxouGs08TqnJmlJvVu4hA5+te50POQbAlcOR+rLpwl0bPuti 14 | tTaarO4hYwtB87s1owdOOIIqfFSoUTZKy8Ip2OE7pU5CYNZU6b/Z3bWn/+p0mOhF 15 | aoB/FuifPcepY5Fspx2SFFOlHxrkIEkkk1FBWEX9uDPzvJoLsg6jAMUCgYEAxQDC 16 | eFj/Mgjb4oIAtBZJGkRpkNK0Ngw2+Mi2ApEVrlylN4vAtNEBa3HRkZprhPrBIqhw 17 | k129jJ81nBWOPDFqhrqmlfolA2K8YxD6wyE6h6qPyO55BbUfAY1uS8ObNLvWZC4o 18 | hO5+AHzMnRc8Qi7CVvPVNbSPE5x5gaCWMiHWDnsCgYEAyfdSTbavtpr5LdsKWzNS 19 | IpjANbdJCLIjETwtOMSikSjA2QhNZ00MElCmfVvlx0X3BaTpuA6EISVrEXMU9wJ6 20 | 4uU4wI0vpU4THmMkfVsQyv62YzZ8yj9lX2Uxa+STdwQGGZy+FprpUaHuse3tE7vZ 21 | ++vlVbbLwvhbJNCaETVs/QECgYApYV139gm0stOtIm2PZlb4o4IhH4EnES3C2DYT 22 | F/Kb623w2vQhhv1rWj8Q/IS1LA7BfuwWqEwWa6LRJF36Gs8rea1gN2jq6yRzEq/5 23 | qNMoiBUnuZ/GKSNYKiMO2wmQ7Bu+c0ujkIz7ATvhi23m4PeFWDZiz4h2MBn9toOW 24 | CDF0XQKBgQCurY35+4jdMOtMuAHquAEtzpf5zbO8p9Bj5qet8N+QXuGTXwaGXlkT 25 | S6i2iXjboNIWfPhqna+BMAmw+NP4TYiNgWoiRd27SDY8LcPXJee8c0+iWKsJkdl8 26 | 90guxzVXSZg478by9ob83Zod2xBhzUSXYGuQrOrItiuiSnMMhHgtrw== 27 | -----END RSA PRIVATE KEY----- 28 | -------------------------------------------------------------------------------- /dummyserver/certs/server.key: -------------------------------------------------------------------------------- 1 | -----BEGIN RSA PRIVATE KEY----- 2 | MIICXgIBAAKBgQDXe3FqmCWvP8XPxqtT+0bfL1Tvzvebi46k0WIcUV8bP3vyYiSR 3 | XG9ALmyzZH4GHY9UVs4OEDkCMDOBSezB0y9ai/9doTNcaictdEBu8nfdXKoTtzrn 4 | +VX4UPrkH5hm7NQ1fTQuj1MR7yBCmYqN3Q2Q+Efuujyx0FwBzAuy1aKYuwIDAQAB 5 | AoGBANOGBM6bbhq7ImYU4qf8+RQrdVg2tc9Fzo+yTnn30sF/rx8/AiCDOV4qdGAh 6 | HKjKKaGj2H/rotqoEFcxBy05LrgJXxydBP72e9PYhNgKOcSmCQu4yALIPEXfKuIM 7 | zgAErHVJ2l79fif3D4hzNyz+u5E1A9n3FG9cgaJSiYP8IG2RAkEA82GZ8rBkSGQQ 8 | ZQ3oFuzPAAL21lbj8D0p76fsCpvS7427DtZDOjhOIKZmaeykpv+qSzRraqEqjDRi 9 | S4kjQvwh6QJBAOKniZ+NDo2lSpbOFk+XlmABK1DormVpj8KebHEZYok1lRI+WiX9 10 | Nnoe9YLgix7++6H5SBBCcTB4HvM+5A4BuwMCQQChcX/eZbXP81iQwB3Rfzp8xnqY 11 | icDf7qKvz9Ma4myU7Y5E9EpaB1mD/P14jDpYcMW050vNyqTfpiwB8TFL0NZpAkEA 12 | 02jkFH9UyMgZV6qo4tqI98l/ZrtyF8OrxSNSEPhVkZf6EQc5vN9/lc8Uv1vESEgb 13 | 3AwRrKDcxRH2BHtv6qSwkwJAGjqnkIcEkA75r1e55/EF2chcZW1+tpwKupE8CtAH 14 | VXGd5DVwt4cYWkLUj2gF2fJbV97uu2MAg5CFDb+vQ6p5eA== 15 | -----END RSA PRIVATE KEY----- 16 | -------------------------------------------------------------------------------- /dummyserver/certs/server.key.org: -------------------------------------------------------------------------------- 1 | -----BEGIN RSA PRIVATE KEY----- 2 | Proc-Type: 4,ENCRYPTED 3 | DEK-Info: DES-EDE3-CBC,8B3708EAD53963D4 4 | 5 | uyLo4sFmSo7+K1uVgSENI+85JsG5o1JmovvxD/ucUl9CDhDj4KgFzs95r7gjjlhS 6 | kA/hIY8Ec9i6T3zMXpAswWI5Mv2LE+UdYR5h60dYtIinLC7KF0QIztSecNWy20Bi 7 | /NkobZhN7VZUuCEoSRWj4Ia3EuATF8Y9ZRGFPNsqMbSAhsGZ1P5xbDMEpE+5PbJP 8 | LvdF9yWDT77rHeI4CKV4aP/yxtm1heEhKw5o6hdpPBQajPpjSQbh7/V6Qd0QsKcV 9 | n27kPnSabsTbbc2IR40il4mZfHvXAlp4KoHL3RUgaons7q0hAUpUi+vJXbEukGGt 10 | 3dlyWwKwEFS7xBQ1pQvzcePI4/fRQxhZNxeFZW6n12Y3X61vg1IsG7usPhRe3iDP 11 | 3g1MXQMAhxaECnDN9b006IeoYdaktd4wrs/fn8x6Yz4= 12 | -----END RSA PRIVATE KEY----- 13 | -------------------------------------------------------------------------------- /dummyserver/certs/server.no_san.crt: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIChzCCAfACCQCmk6is+6REjDANBgkqhkiG9w0BAQUFADCBhzELMAkGA1UEBhMC 3 | Q0ExEDAOBgNVBAgMB09udGFyaW8xEDAOBgNVBAcMB09udGFyaW8xHzAdBgNVBAoM 4 | FlNoYXpvdydzIFVzZWQgQ2FycyBJbmMxEjAQBgNVBAMMCWxvY2FsaG9zdDEfMB0G 5 | CSqGSIb3DQEJARYQc2hhem93QGdtYWlsLmNvbTAeFw0xNDEyMDMyMjE3MjVaFw00 6 | NDEyMDIyMjE3MjVaMIGHMQswCQYDVQQGEwJDQTEQMA4GA1UECAwHT250YXJpbzEQ 7 | MA4GA1UEBwwHT250YXJpbzEfMB0GA1UECgwWU2hhem93J3MgVXNlZCBDYXJzIElu 8 | YzESMBAGA1UEAwwJbG9jYWxob3N0MR8wHQYJKoZIhvcNAQkBFhBzaGF6b3dAZ21h 9 | aWwuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDXe3FqmCWvP8XPxqtT 10 | +0bfL1Tvzvebi46k0WIcUV8bP3vyYiSRXG9ALmyzZH4GHY9UVs4OEDkCMDOBSezB 11 | 0y9ai/9doTNcaictdEBu8nfdXKoTtzrn+VX4UPrkH5hm7NQ1fTQuj1MR7yBCmYqN 12 | 3Q2Q+Efuujyx0FwBzAuy1aKYuwIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAHI/m9/O 13 | bVR3zBOJZUKlHzTRvfYbYhhfrrcQlbwhjKqNyZcQTL/bJdtQSL19g3ftC5wZPI+y 14 | 66R24MqGmRcv5kT32HcuIK1Xhx4nDqTqnTNvGkaIh5CqS4DEP+iqtwDoEbQt8DwL 15 | ejKtvZlyQRKFPTMtmv4VsTIHeVOAj+pXn595 16 | -----END CERTIFICATE----- 17 | -------------------------------------------------------------------------------- /dummyserver/certs/server.no_san.csr: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE REQUEST----- 2 | MIIByDCCATECAQAwgYcxCzAJBgNVBAYTAkNBMRAwDgYDVQQIDAdPbnRhcmlvMRAw 3 | DgYDVQQHDAdPbnRhcmlvMR8wHQYDVQQKDBZTaGF6b3cncyBVc2VkIENhcnMgSW5j 4 | MRIwEAYDVQQDDAlsb2NhbGhvc3QxHzAdBgkqhkiG9w0BCQEWEHNoYXpvd0BnbWFp 5 | bC5jb20wgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBANd7cWqYJa8/xc/Gq1P7 6 | Rt8vVO/O95uLjqTRYhxRXxs/e/JiJJFcb0AubLNkfgYdj1RWzg4QOQIwM4FJ7MHT 7 | L1qL/12hM1xqJy10QG7yd91cqhO3Ouf5VfhQ+uQfmGbs1DV9NC6PUxHvIEKZio3d 8 | DZD4R+66PLHQXAHMC7LVopi7AgMBAAGgADANBgkqhkiG9w0BAQUFAAOBgQDGWkxr 9 | mCa2h+/HnptucimU+T4QESBNc3fHhnnWaj4RXJaS0xwUDaG81INnxj6KNVgOtemK 10 | VlwG7Ziqj1i+gZ1UpbmMp1YkSD/0+N8vb2BStuXlc5rP0+cG1DlzV1Dc+FaDHHsy 11 | 7MfyeHTa5FYdSeKsiAFHlQ84g08Pd7hW0c+SxA== 12 | -----END CERTIFICATE REQUEST----- 13 | -------------------------------------------------------------------------------- /dummyserver/certs/server_password.key: -------------------------------------------------------------------------------- 1 | -----BEGIN RSA PRIVATE KEY----- 2 | Proc-Type: 4,ENCRYPTED 3 | DEK-Info: AES-256-CBC,D1BAE8F8B899FD9C2367B4CCF40917CF 4 | 5 | emTIXrQCcOcHtknXXZwK9X4qS8WcT6ozH2TTTDOcz9+G6CnszwvnsLCnO3eiLVbI 6 | NXiFib7ulQksoHQd2MPC9pjWm1a8vadMYOWgx8jnYkgVE+l1ICGgZVACg55/E6Xj 7 | qC4hZijQnKhPPyzdebeos2IIk7B3op4kHYJQGpwisAuSmT06c2x/jsmFr+2UMSq5 8 | Xf+kWuQlHUPcDct6uLJJ4zhFljcxFvk3cgMZIJaqyWCX7+gDCOi2gWrP2l1osllc 9 | q0egNUdg3RVrbxgxFn4XdHpmTNbIc3NTTR+xYuqHun8UbJrss1Ed25rrK0QzuV0l 10 | vyKLj1MSOV9VRCujF58I9whDZSwt07Aozmm1JC9F8kyMhbL9C4gmwEKHIQ5N5I+V 11 | mZKKAbJyQ2B1Oza/yZUnJG6hUyKTVbbCW57OltTDr4KlUzYUJJzTVyMy14AVv3zU 12 | GzKX5m3AzWMjykpmHjYNcI/zMQem0OQB2U9Pqyyh2GzItnHpnkqb7RDJSIYiOToc 13 | jA65NhS4sIZWWzwsRRaE2sq1rlssQFkzM3gIHi2C+tJD3PvmYRKW+6fLLNCqikMk 14 | w4OvHc8U/hIY2YnGAzjE7bbCrkQduhCwBL7bK08HYrluQv6dgVJLA9TtC4jYLYeo 15 | 1uXDNGcY943fwU5h/YwQbvVQ5oo9oHBJuLgUzXlPjc+va4gw0JSG59GgXaTMVTjw 16 | wybmcNlaFZbK0XrveX3ykJimnuDK29yY4nWSzPFRxvaaWaRAL4IgEXvKCiQhg8NE 17 | snV2L3uQgJNv6RmE+c4HzQQ71iZuZ+iJglzt/iG4pO88zxjLLfT4qwfPAlEdxRmN 18 | -----END RSA PRIVATE KEY----- 19 | -------------------------------------------------------------------------------- /dummyserver/proxy.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # Simple asynchronous HTTP proxy with tunnelling (CONNECT). 4 | # 5 | # GET/POST proxying based on 6 | # http://groups.google.com/group/python-tornado/msg/7bea08e7a049cf26 7 | # 8 | # Copyright (C) 2012 Senko Rasic 9 | # 10 | # Permission is hereby granted, free of charge, to any person obtaining a copy 11 | # of this software and associated documentation files (the "Software"), to deal 12 | # in the Software without restriction, including without limitation the rights 13 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 14 | # copies of the Software, and to permit persons to whom the Software is 15 | # furnished to do so, subject to the following conditions: 16 | # 17 | # The above copyright notice and this permission notice shall be included in 18 | # all copies or substantial portions of the Software. 19 | # 20 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 21 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 22 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 23 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 24 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 25 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 26 | # THE SOFTWARE. 27 | 28 | import sys 29 | import socket 30 | 31 | import tornado.gen 32 | import tornado.httpserver 33 | import tornado.ioloop 34 | import tornado.iostream 35 | import tornado.web 36 | import tornado.httpclient 37 | 38 | __all__ = ["ProxyHandler", "run_proxy"] 39 | 40 | 41 | class ProxyHandler(tornado.web.RequestHandler): 42 | SUPPORTED_METHODS = ["GET", "POST", "CONNECT"] 43 | 44 | @tornado.gen.coroutine 45 | def get(self): 46 | def handle_response(response): 47 | if response.error and not isinstance( 48 | response.error, tornado.httpclient.HTTPError 49 | ): 50 | self.set_status(500) 51 | self.write("Internal server error:\n" + str(response.error)) 52 | self.finish() 53 | else: 54 | self.set_status(response.code) 55 | for header in ( 56 | "Date", 57 | "Cache-Control", 58 | "Server", 59 | "Content-Type", 60 | "Location", 61 | ): 62 | v = response.headers.get(header) 63 | if v: 64 | self.set_header(header, v) 65 | if response.body: 66 | self.write(response.body) 67 | self.finish() 68 | 69 | req = tornado.httpclient.HTTPRequest( 70 | url=self.request.uri, 71 | method=self.request.method, 72 | body=self.request.body, 73 | headers=self.request.headers, 74 | follow_redirects=False, 75 | allow_nonstandard_methods=True, 76 | ) 77 | 78 | client = tornado.httpclient.AsyncHTTPClient() 79 | try: 80 | response = yield client.fetch(req) 81 | yield handle_response(response) 82 | except tornado.httpclient.HTTPError as e: 83 | if hasattr(e, "response") and e.response: 84 | yield handle_response(e.response) 85 | else: 86 | self.set_status(500) 87 | self.write("Internal server error:\n" + str(e)) 88 | self.finish() 89 | 90 | @tornado.gen.coroutine 91 | def post(self): 92 | yield self.get() 93 | 94 | @tornado.gen.coroutine 95 | def connect(self): 96 | host, port = self.request.uri.split(":") 97 | client = self.request.connection.stream 98 | 99 | @tornado.gen.coroutine 100 | def start_forward(reader, writer): 101 | while True: 102 | try: 103 | data = yield reader.read_bytes(4096, partial=True) 104 | except tornado.iostream.StreamClosedError: 105 | break 106 | if not data: 107 | break 108 | writer.write(data) 109 | writer.close() 110 | 111 | s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0) 112 | upstream = tornado.iostream.IOStream(s) 113 | yield upstream.connect((host, int(port))) 114 | 115 | client.write(b"HTTP/1.0 200 Connection established\r\n\r\n") 116 | fu1 = start_forward(client, upstream) 117 | fu2 = start_forward(upstream, client) 118 | yield [fu1, fu2] 119 | 120 | 121 | def run_proxy(port, start_ioloop=True): 122 | """ 123 | Run proxy on the specified port. If start_ioloop is True (default), 124 | the tornado IOLoop will be started immediately. 125 | """ 126 | app = tornado.web.Application([(r".*", ProxyHandler)]) 127 | app.listen(port) 128 | ioloop = tornado.ioloop.IOLoop.instance() 129 | if start_ioloop: 130 | ioloop.start() 131 | 132 | 133 | if __name__ == "__main__": 134 | port = 8888 135 | if len(sys.argv) > 1: 136 | port = int(sys.argv[1]) 137 | 138 | print("Starting HTTP proxy on port %d" % port) 139 | run_proxy(port) 140 | -------------------------------------------------------------------------------- /dummyserver/server.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | """ 4 | Dummy server used for unit testing. 5 | """ 6 | from __future__ import print_function 7 | 8 | import logging 9 | import os 10 | import random 11 | import string 12 | import sys 13 | import threading 14 | import socket 15 | import warnings 16 | import ssl 17 | from datetime import datetime 18 | 19 | from hip.exceptions import HTTPWarning 20 | 21 | import tornado.httpserver 22 | import tornado.ioloop 23 | import tornado.netutil 24 | import tornado.web 25 | 26 | 27 | log = logging.getLogger(__name__) 28 | 29 | CERTS_PATH = os.path.join(os.path.dirname(__file__), "certs") 30 | DEFAULT_CERTS = { 31 | "certfile": os.path.join(CERTS_PATH, "server.crt"), 32 | "keyfile": os.path.join(CERTS_PATH, "server.key"), 33 | "cert_reqs": ssl.CERT_OPTIONAL, 34 | "ca_certs": os.path.join(CERTS_PATH, "cacert.pem"), 35 | } 36 | CLIENT_INTERMEDIATE_PEM = "client_intermediate.pem" 37 | CLIENT_NO_INTERMEDIATE_PEM = "client_no_intermediate.pem" 38 | CLIENT_INTERMEDIATE_KEY = "client_intermediate.key" 39 | CLIENT_CERT = os.path.join(CERTS_PATH, CLIENT_INTERMEDIATE_PEM) 40 | PASSWORD_KEYFILE = os.path.join(CERTS_PATH, "server_password.key") 41 | PASSWORD_CLIENT_KEYFILE = os.path.join(CERTS_PATH, "client_password.key") 42 | NO_SAN_CERTS = { 43 | "certfile": os.path.join(CERTS_PATH, "server.no_san.crt"), 44 | "keyfile": DEFAULT_CERTS["keyfile"], 45 | } 46 | IP_SAN_CERTS = { 47 | "certfile": os.path.join(CERTS_PATH, "server.ip_san.crt"), 48 | "keyfile": DEFAULT_CERTS["keyfile"], 49 | } 50 | IPV6_ADDR_CERTS = { 51 | "certfile": os.path.join(CERTS_PATH, "server.ipv6addr.crt"), 52 | "keyfile": os.path.join(CERTS_PATH, "server.ipv6addr.key"), 53 | } 54 | IPV6_SAN_CERTS = { 55 | "certfile": os.path.join(CERTS_PATH, "server.ipv6_san.crt"), 56 | "keyfile": DEFAULT_CERTS["keyfile"], 57 | } 58 | DEFAULT_CA = os.path.join(CERTS_PATH, "cacert.pem") 59 | DEFAULT_CA_KEY = os.path.join(CERTS_PATH, "cacert.key") 60 | DEFAULT_CA_BAD = os.path.join(CERTS_PATH, "client_bad.pem") 61 | NO_SAN_CA = os.path.join(CERTS_PATH, "cacert.no_san.pem") 62 | IPV6_ADDR_CA = os.path.join(CERTS_PATH, "server.ipv6addr.crt") 63 | IPV6_SAN_CA = os.path.join(CERTS_PATH, "server.ipv6_san.crt") 64 | COMBINED_CERT_AND_KEY = os.path.join(CERTS_PATH, "server.combined.pem") 65 | 66 | 67 | def _has_ipv6(host): 68 | """ Returns True if the system can bind an IPv6 address. """ 69 | sock = None 70 | has_ipv6 = False 71 | 72 | if socket.has_ipv6: 73 | # has_ipv6 returns true if cPython was compiled with IPv6 support. 74 | # It does not tell us if the system has IPv6 support enabled. To 75 | # determine that we must bind to an IPv6 address. 76 | # https://github.com/urllib3/urllib3/pull/611 77 | # https://bugs.python.org/issue658327 78 | try: 79 | sock = socket.socket(socket.AF_INET6) 80 | sock.bind((host, 0)) 81 | has_ipv6 = True 82 | except Exception: 83 | pass 84 | 85 | if sock: 86 | sock.close() 87 | return has_ipv6 88 | 89 | 90 | # Some systems may have IPv6 support but DNS may not be configured 91 | # properly. We can not count that localhost will resolve to ::1 on all 92 | # systems. See https://github.com/urllib3/urllib3/pull/611 and 93 | # https://bugs.python.org/issue18792 94 | HAS_IPV6_AND_DNS = _has_ipv6("localhost") 95 | HAS_IPV6 = _has_ipv6("::1") 96 | 97 | 98 | # Different types of servers we have: 99 | 100 | 101 | class NoIPv6Warning(HTTPWarning): 102 | "IPv6 is not available" 103 | pass 104 | 105 | 106 | class SocketServerThread(threading.Thread): 107 | """ 108 | :param socket_handler: Callable which receives a socket argument for one 109 | request. 110 | :param ready_event: Event which gets set when the socket handler is 111 | ready to receive requests. 112 | """ 113 | 114 | USE_IPV6 = HAS_IPV6_AND_DNS 115 | 116 | def __init__(self, socket_handler, host="localhost", port=8081, ready_event=None): 117 | threading.Thread.__init__(self) 118 | self.daemon = True 119 | 120 | self.socket_handler = socket_handler 121 | self.host = host 122 | self.ready_event = ready_event 123 | 124 | def _start_server(self): 125 | if self.USE_IPV6: 126 | sock = socket.socket(socket.AF_INET6) 127 | else: 128 | warnings.warn("No IPv6 support. Falling back to IPv4.", NoIPv6Warning) 129 | sock = socket.socket(socket.AF_INET) 130 | if sys.platform != "win32": 131 | sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) 132 | sock.bind((self.host, 0)) 133 | self.port = sock.getsockname()[1] 134 | 135 | # Once listen() returns, the server socket is ready 136 | sock.listen(1) 137 | 138 | if self.ready_event: 139 | self.ready_event.set() 140 | 141 | self.socket_handler(sock) 142 | sock.close() 143 | 144 | def run(self): 145 | self.server = self._start_server() 146 | 147 | 148 | def run_tornado_app(app, io_loop, certs, scheme, host): 149 | assert io_loop == tornado.ioloop.IOLoop.current() 150 | 151 | # We can't use fromtimestamp(0) because of CPython issue 29097, so we'll 152 | # just construct the datetime object directly. 153 | app.last_req = datetime(1970, 1, 1) 154 | 155 | if scheme == "https": 156 | http_server = tornado.httpserver.HTTPServer(app, ssl_options=certs) 157 | else: 158 | http_server = tornado.httpserver.HTTPServer(app) 159 | 160 | sockets = tornado.netutil.bind_sockets(None, address=host) 161 | port = sockets[0].getsockname()[1] 162 | http_server.add_sockets(sockets) 163 | return http_server, port 164 | 165 | 166 | def run_loop_in_thread(io_loop): 167 | t = threading.Thread(target=io_loop.start) 168 | t.start() 169 | return t 170 | 171 | 172 | def get_unreachable_address(): 173 | while True: 174 | host = "".join(random.choice(string.ascii_lowercase) for _ in range(60)) 175 | sockaddr = (host, 54321) 176 | 177 | # check if we are really "lucky" and hit an actual server 178 | try: 179 | s = socket.create_connection(sockaddr) 180 | except socket.error: 181 | return sockaddr 182 | else: 183 | s.close() 184 | 185 | 186 | if __name__ == "__main__": 187 | # For debugging dummyserver itself - python -m dummyserver.server 188 | from .testcase import TestingApp 189 | 190 | host = "127.0.0.1" 191 | 192 | io_loop = tornado.ioloop.IOLoop.current() 193 | app = tornado.web.Application([(r".*", TestingApp)]) 194 | server, port = run_tornado_app(app, io_loop, None, "http", host) 195 | server_thread = run_loop_in_thread(io_loop) 196 | 197 | print("Listening on http://{host}:{port}".format(host=host, port=port)) 198 | -------------------------------------------------------------------------------- /noxfile.py: -------------------------------------------------------------------------------- 1 | from xml.etree import ElementTree as ET 2 | import os 3 | import re 4 | import shutil 5 | import sys 6 | 7 | import nox 8 | 9 | 10 | def _clean_coverage(coverage_path): 11 | input_xml = ET.ElementTree(file=coverage_path) 12 | for class_ in input_xml.findall(".//class"): 13 | filename = class_.get("filename") 14 | filename = re.sub("_sync", "_async", filename) 15 | class_.set("filename", filename) 16 | input_xml.write(coverage_path, xml_declaration=True) 17 | 18 | 19 | def tests_impl(session, extras="socks,brotli"): 20 | # Install deps and the package itself. 21 | session.install("-r", "dev-requirements.txt") 22 | session.install(".[{extras}]".format(extras=extras)) 23 | 24 | # Show the pip version. 25 | session.run("pip", "--version") 26 | # Print the Python version and bytesize. 27 | session.run("python", "--version") 28 | session.run("python", "-c", "import struct; print(struct.calcsize('P') * 8)") 29 | 30 | # Only require unasync to run tests 31 | import unasync # please install unasync alongside nox to run tests 32 | 33 | unasync.unasync_files( 34 | [ 35 | "test/with_dummyserver/async/__init__.py", 36 | "test/with_dummyserver/async/test_poolmanager.py", 37 | ], 38 | rules=[ 39 | unasync.Rule( 40 | "test/with_dummyserver/async", 41 | "test/with_dummyserver/sync", 42 | additional_replacements={ 43 | "ahip": "hip", 44 | "test_all_backends": "test_sync_backend", 45 | }, 46 | ) 47 | ], 48 | ) 49 | 50 | session.run( 51 | "pytest", 52 | "-r", 53 | "a", 54 | "--tb=native", 55 | "--cov=hip", 56 | "--no-success-flaky-report", 57 | *(session.posargs or ("test/",)), 58 | env={"PYTHONWARNINGS": "always::DeprecationWarning"} 59 | ) 60 | session.run("coverage", "xml") 61 | _clean_coverage("coverage.xml") 62 | 63 | 64 | @nox.session(python=["3.5", "3.6", "3.7", "3.8", "3.9", "pypy", "pypy3"]) 65 | def test(session): 66 | tests_impl(session) 67 | 68 | 69 | @nox.session(python=["2.7", "3.7"]) 70 | def google_brotli(session): 71 | # https://pypi.org/project/Brotli/ is the Google version of brotli, so 72 | # install it separately and don't install our brotli extra (which installs 73 | # brotlipy). 74 | session.install("brotli") 75 | tests_impl(session, extras="socks") 76 | 77 | 78 | @nox.session() 79 | def blacken(session): 80 | """Run black code formatter.""" 81 | session.install("black") 82 | session.run("black", "src", "dummyserver", "test", "noxfile.py", "setup.py") 83 | 84 | lint(session) 85 | 86 | 87 | @nox.session 88 | def lint(session): 89 | session.install("flake8", "black") 90 | session.run("flake8", "--version") 91 | session.run("black", "--version") 92 | session.run( 93 | "black", "--check", "src", "dummyserver", "test", "noxfile.py", "setup.py" 94 | ) 95 | session.run("flake8", "setup.py", "docs", "dummyserver", "src", "test") 96 | 97 | 98 | @nox.session 99 | def docs(session): 100 | session.install("-r", "docs/requirements.txt") 101 | session.install(".[socks,brotli]") 102 | 103 | session.chdir("docs") 104 | if os.path.exists("_build"): 105 | shutil.rmtree("_build") 106 | session.run("sphinx-build", "-W", ".", "_build/html") 107 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools>=40.6.2", "wheel", "unasync"] 3 | build-backend = "setuptools.build_meta" 4 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | name = hip 3 | description = Hip: A new Python HTTP client for Everyone 4 | long_description = file: README.rst, CHANGES.rst 5 | long_description_content_type = text/x-rst 6 | keywords = http client 7 | license = MIT -or- Apache-2.0 8 | author = The Trio Collective 9 | author_email = njs@pobox.com 10 | url = https://hip.readthedocs.io 11 | project_urls = 12 | Documentation = https://hip.readthedocs.io 13 | Source = https://github.com/python-trio/hip 14 | Issue tracker = https://github.com/python-trio/hip/issues 15 | classifiers = 16 | Environment :: Web Environment 17 | Intended Audience :: Developers 18 | License :: OSI Approved :: Apache Software License 19 | License :: OSI Approved :: MIT License 20 | Operating System :: OS Independent 21 | Programming Language :: Python 22 | Programming Language :: Python :: 2 23 | Programming Language :: Python :: 2.7 24 | Programming Language :: Python :: 3 25 | Programming Language :: Python :: 3.5 26 | Programming Language :: Python :: 3.6 27 | Programming Language :: Python :: 3.7 28 | Programming Language :: Python :: 3.8 29 | Programming Language :: Python :: 3.9 30 | Programming Language :: Python :: Implementation :: CPython 31 | Programming Language :: Python :: Implementation :: PyPy 32 | Topic :: Internet :: WWW/HTTP 33 | Topic :: Software Development :: Libraries 34 | 35 | [options] 36 | package_dir = 37 | = src 38 | packages = find: 39 | python_requires = >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*, <4 40 | install_requires = 41 | h11 >= 0.8.0 42 | sniffio; python_version>='3.6' 43 | 44 | [options.packages.find] 45 | where = src 46 | 47 | [options.extras_require] 48 | brotli = brotlipy>=0.6.0 49 | socks = PySocks >=1.5.6, <2.0, !=1.5.7 50 | 51 | [tool:pytest] 52 | xfail_strict = true 53 | python_classes = Test *TestCase 54 | 55 | [flake8] 56 | ignore = E501, E203, W503, W504 57 | exclude = ./docs/conf.py,./src/ahip/packages/* 58 | max-line-length = 99 59 | 60 | [bdist_wheel] 61 | universal = 1 62 | 63 | [coverage:run] 64 | omit = 65 | *hip/packages/* 66 | *hip/contrib/ntlmpool.py 67 | *hip/contrib/pyopenssl.py 68 | *hip/contrib/securetransport.py 69 | *hip/contrib/_securetransport/* 70 | # remove when ready to test the async backends 71 | *hip/_async/* 72 | *hip/_backends/anyio_backend.py 73 | *hip/_backends/trio_backend.py 74 | *hip/_backends/async_backend.py 75 | *hip/contrib/socks.py 76 | 77 | [coverage:report] 78 | exclude_lines = 79 | except ImportError: 80 | pass 81 | import 82 | raise 83 | .* # Platform-specific.* 84 | .*:.* # Python \d.* 85 | .* # Abstract 86 | .* # Defensive: 87 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import os 4 | import re 5 | 6 | import unasync # requires pip>=10.0 for PEP 518 support 7 | from setuptools import setup 8 | 9 | 10 | # Get the version (borrowed from SQLAlchemy) 11 | base_path = os.path.dirname(__file__) 12 | with open(os.path.join(base_path, "src", "ahip", "__init__.py")) as fp: 13 | version = re.match(r".*__version__ = \"(.*?)\"", fp.read(), re.S).group(1) 14 | 15 | setup( 16 | version=version, 17 | cmdclass={ 18 | "build_py": unasync.cmdclass_build_py( 19 | rules=[ 20 | unasync.Rule( 21 | "/ahip/", 22 | "/hip/", 23 | additional_replacements={ 24 | "anext": "next", 25 | "await_if_coro": "return_non_coro", 26 | }, 27 | ) 28 | ] 29 | ) 30 | }, 31 | ) 32 | -------------------------------------------------------------------------------- /src/ahip/__init__.py: -------------------------------------------------------------------------------- 1 | """Hip: A new Python HTTP client for Everyone""" 2 | 3 | from __future__ import absolute_import 4 | import warnings 5 | 6 | from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, connection_from_url 7 | 8 | from . import exceptions 9 | from .filepost import encode_multipart_formdata 10 | from .poolmanager import PoolManager, ProxyManager, proxy_from_url 11 | from .response import HTTPResponse 12 | from .util.request import make_headers 13 | from .util.timeout import Timeout 14 | from .util.retry import Retry 15 | 16 | 17 | # Set default logging handler to avoid "No handler found" warnings. 18 | import logging 19 | from logging import NullHandler 20 | 21 | __author__ = "The Trio Collective" 22 | __license__ = "MIT -or- Apache-2.0" 23 | __version__ = "0.1.dev0" 24 | 25 | __all__ = [ 26 | "HTTPConnectionPool", 27 | "HTTPSConnectionPool", 28 | "PoolManager", 29 | "ProxyManager", 30 | "HTTPResponse", 31 | "Retry", 32 | "Timeout", 33 | "add_stderr_logger", 34 | "connection_from_url", 35 | "disable_warnings", 36 | "encode_multipart_formdata", 37 | "make_headers", 38 | "proxy_from_url", 39 | ] 40 | 41 | 42 | logging.getLogger("hip").addHandler(NullHandler()) 43 | 44 | 45 | def add_stderr_logger(level=logging.DEBUG): 46 | """ 47 | Helper for quickly adding a StreamHandler to the logger. Useful for 48 | debugging. 49 | 50 | Returns the handler after adding it. 51 | """ 52 | # This method needs to be in this __init__.py to get the __name__ correct 53 | # even if Hip is vendored within another package. 54 | logger = logging.getLogger("hip") 55 | handler = logging.StreamHandler() 56 | handler.setFormatter(logging.Formatter("%(asctime)s %(levelname)s %(message)s")) 57 | logger.addHandler(handler) 58 | logger.setLevel(level) 59 | logger.debug("Added a stderr logging handler to logger: %s", "hip") 60 | return handler 61 | 62 | 63 | # ... Clean up. 64 | del NullHandler 65 | 66 | 67 | # All warning filters *must* be appended unless you're really certain that they 68 | # shouldn't be: otherwise, it's very hard for users to use most Python 69 | # mechanisms to silence them. 70 | # SecurityWarning's always go off by default. 71 | warnings.simplefilter("always", exceptions.SecurityWarning, append=True) 72 | # SubjectAltNameWarning's should go off once per host 73 | warnings.simplefilter("default", exceptions.SubjectAltNameWarning, append=True) 74 | # InsecurePlatformWarning's don't vary between requests, so we keep it default. 75 | warnings.simplefilter("default", exceptions.InsecurePlatformWarning, append=True) 76 | # SNIMissingWarnings should go off only once. 77 | warnings.simplefilter("default", exceptions.SNIMissingWarning, append=True) 78 | 79 | 80 | def disable_warnings(category=exceptions.HTTPWarning): 81 | """ 82 | Helper for quickly disabling all Hip warnings. 83 | """ 84 | warnings.simplefilter("ignore", category) 85 | -------------------------------------------------------------------------------- /src/ahip/_backends/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/python-trio/hip/e198149c677edbeec023aeb934758c9195a4d2e3/src/ahip/_backends/__init__.py -------------------------------------------------------------------------------- /src/ahip/_backends/_common.py: -------------------------------------------------------------------------------- 1 | from .. import util 2 | 3 | __all__ = ["is_readable", "LoopAbort"] 4 | 5 | 6 | def is_readable(sock): 7 | return util.wait_for_read(sock, timeout=0) 8 | 9 | 10 | class LoopAbort(Exception): 11 | """ 12 | Tell backends that enough bytes have been consumed 13 | """ 14 | 15 | pass 16 | -------------------------------------------------------------------------------- /src/ahip/_backends/_loader.py: -------------------------------------------------------------------------------- 1 | from ..backends import Backend 2 | 3 | 4 | class Loader: 5 | def __init__(self, name, loader, is_async): 6 | self.name = name 7 | self.loader = loader 8 | self.is_async = is_async 9 | 10 | def __call__(self, *args, **kwargs): 11 | return self.loader(kwargs) 12 | 13 | 14 | def load_sync_backend(kwargs): 15 | from .sync_backend import SyncBackend 16 | 17 | return SyncBackend(**kwargs) 18 | 19 | 20 | def load_anyio_backend(kwargs): 21 | from .anyio_backend import AnyIOBackend 22 | 23 | return AnyIOBackend(**kwargs) 24 | 25 | 26 | def load_trio_backend(kwargs): 27 | from .trio_backend import TrioBackend 28 | 29 | return TrioBackend(**kwargs) 30 | 31 | 32 | def backend_directory(): 33 | """ 34 | We defer any heavy duty imports until the last minute. 35 | """ 36 | loaders = [ 37 | Loader(name="sync", loader=load_sync_backend, is_async=False), 38 | Loader(name="trio", loader=load_trio_backend, is_async=True), 39 | Loader(name="anyio", loader=load_anyio_backend, is_async=True), 40 | ] 41 | return {loader.name: loader for loader in loaders} 42 | 43 | 44 | def normalize_backend(backend, async_mode): 45 | if backend is None: 46 | if not async_mode: 47 | backend = Backend(name="sync") 48 | else: 49 | import sniffio 50 | 51 | async_library = sniffio.current_async_library() 52 | if async_library in ("asyncio", "curio"): 53 | async_library = "anyio" 54 | backend = Backend(name=async_library) 55 | elif not isinstance(backend, Backend): 56 | backend = Backend(name=backend) 57 | 58 | loaders_by_name = backend_directory() 59 | if backend.name not in loaders_by_name: 60 | raise ValueError("unknown backend specifier {}".format(backend.name)) 61 | 62 | loader = loaders_by_name[backend.name] 63 | 64 | if async_mode and not loader.is_async: 65 | raise ValueError("{} backend needs to be run in sync mode".format(loader.name)) 66 | 67 | if not async_mode and loader.is_async: 68 | raise ValueError("{} backend needs to be run in async mode".format(loader.name)) 69 | 70 | return backend 71 | 72 | 73 | def load_backend(backend): 74 | loaders_by_name = backend_directory() 75 | loader = loaders_by_name[backend.name] 76 | return loader(backend.kwargs) 77 | -------------------------------------------------------------------------------- /src/ahip/_backends/anyio_backend.py: -------------------------------------------------------------------------------- 1 | from ssl import SSLContext 2 | 3 | import anyio 4 | 5 | from ._common import is_readable, LoopAbort 6 | from .async_backend import AsyncBackend, AsyncSocket 7 | 8 | BUFSIZE = 65536 9 | 10 | 11 | # XX support connect_timeout and read_timeout 12 | 13 | 14 | class AnyIOBackend(AsyncBackend): 15 | async def connect( 16 | self, host, port, connect_timeout, source_address=None, socket_options=None 17 | ): 18 | bind_host, bind_port = source_address or (None, None) 19 | stream = await anyio.connect_tcp( 20 | host, port, bind_host=bind_host, bind_port=bind_port 21 | ) 22 | 23 | if socket_options: 24 | for (level, optname, value) in socket_options: 25 | stream.setsockopt(level, optname, value) 26 | 27 | return AnyIOSocket(stream) 28 | 29 | 30 | # XX it turns out that we don't need SSLStream to be robustified against 31 | # cancellation, but we probably should do something to detect when the stream 32 | # has been broken by cancellation (e.g. a timeout) and make is_readable return 33 | # True so the connection won't be reused. 34 | 35 | 36 | class AnyIOSocket(AsyncSocket): 37 | def __init__(self, stream: anyio.SocketStream): 38 | self._stream = stream 39 | 40 | async def start_tls(self, server_hostname, ssl_context: SSLContext): 41 | await self._stream.start_tls( 42 | ssl_context, suppress_ragged_eofs=True, server_hostname=server_hostname 43 | ) 44 | return self 45 | 46 | def getpeercert(self, binary_form=False): 47 | return self._stream.getpeercert(binary_form=binary_form) 48 | 49 | async def receive_some(self, read_timeout): 50 | return await self._stream.receive_some(BUFSIZE) 51 | 52 | async def send_and_receive_for_a_while( 53 | self, produce_bytes, consume_bytes, read_timeout 54 | ): 55 | async def sender(): 56 | while True: 57 | outgoing = await produce_bytes() 58 | if outgoing is None: 59 | break 60 | await self._stream.send_all(outgoing) 61 | 62 | async def receiver(): 63 | while True: 64 | incoming = await self._stream.receive_some(BUFSIZE) 65 | consume_bytes(incoming) 66 | 67 | try: 68 | async with anyio.create_task_group() as tg: 69 | await tg.spawn(sender) 70 | await tg.spawn(receiver) 71 | except LoopAbort: 72 | pass 73 | 74 | # We want this to be synchronous, and don't care about graceful teardown 75 | # of the SSL/TLS layer. 76 | def forceful_close(self): 77 | self._stream._socket._raw_socket.close() 78 | 79 | def is_readable(self): 80 | return is_readable(self._stream._socket._raw_socket) 81 | 82 | def set_readable_watch_state(self, enabled): 83 | pass 84 | -------------------------------------------------------------------------------- /src/ahip/_backends/async_backend.py: -------------------------------------------------------------------------------- 1 | from abc import abstractmethod, ABC 2 | from ssl import SSLContext 3 | from typing import Optional, Tuple, Iterable, Union, Any, Dict, Callable, Awaitable 4 | 5 | 6 | class AsyncBackend(ABC): 7 | @abstractmethod 8 | async def connect( 9 | self, 10 | host: str, 11 | port: int, 12 | connect_timeout: Optional[float], 13 | source_address: Optional[Tuple[str, int]] = None, 14 | socket_options: Optional[Iterable[Tuple[int, int, int]]] = None, 15 | ) -> "AsyncSocket": 16 | raise NotImplementedError() 17 | 18 | 19 | class AsyncSocket(ABC): 20 | @abstractmethod 21 | async def start_tls( 22 | self, server_hostname: Optional[str], ssl_context: SSLContext 23 | ) -> "AsyncSocket": 24 | raise NotImplementedError() 25 | 26 | @abstractmethod 27 | def getpeercert(self, binary_form: bool = False) -> Union[bytes, Dict[str, Any]]: 28 | raise NotImplementedError() 29 | 30 | @abstractmethod 31 | async def receive_some(self, read_timeout: Optional[float]) -> bytes: 32 | raise NotImplementedError() 33 | 34 | @abstractmethod 35 | async def send_and_receive_for_a_while( 36 | self, 37 | produce_bytes: Callable[[], Awaitable[bytes]], 38 | consume_bytes: Callable[[bytes], None], 39 | read_timeout: Optional[float], 40 | ) -> None: 41 | raise NotImplementedError() 42 | 43 | @abstractmethod 44 | def forceful_close(self): 45 | raise NotImplementedError() 46 | 47 | @abstractmethod 48 | def is_readable(self) -> bool: 49 | raise NotImplementedError() 50 | 51 | @abstractmethod 52 | def set_readable_watch_state(self, enabled: bool) -> None: 53 | raise NotImplementedError() 54 | -------------------------------------------------------------------------------- /src/ahip/_backends/sync_backend.py: -------------------------------------------------------------------------------- 1 | import errno 2 | import socket 3 | from ..util.connection import create_connection 4 | from ..util.ssl_ import ssl_wrap_socket 5 | from .. import util 6 | 7 | from ._common import is_readable, LoopAbort 8 | 9 | __all__ = ["SyncBackend"] 10 | 11 | BUFSIZE = 65536 12 | 13 | 14 | class SyncBackend(object): 15 | def connect( 16 | self, host, port, connect_timeout, source_address=None, socket_options=None 17 | ): 18 | conn = create_connection( 19 | (host, port), 20 | connect_timeout, 21 | source_address=source_address, 22 | socket_options=socket_options, 23 | ) 24 | return SyncSocket(conn) 25 | 26 | 27 | class SyncSocket(object): 28 | # _wait_for_socket is a hack for testing. See test_sync_connection.py for 29 | # the tests that use this. 30 | def __init__(self, sock, _wait_for_socket=util.wait_for_socket): 31 | self._sock = sock 32 | # We keep the socket in non-blocking mode, except during connect() and 33 | # during the SSL handshake: 34 | self._sock.setblocking(False) 35 | self._wait_for_socket = _wait_for_socket 36 | 37 | def start_tls(self, server_hostname, ssl_context): 38 | self._sock.setblocking(True) 39 | wrapped = ssl_wrap_socket( 40 | self._sock, server_hostname=server_hostname, ssl_context=ssl_context 41 | ) 42 | wrapped.setblocking(False) 43 | return SyncSocket(wrapped) 44 | 45 | # Only for SSL-wrapped sockets 46 | def getpeercert(self, binary_form=False): 47 | return self._sock.getpeercert(binary_form=binary_form) 48 | 49 | def _wait(self, readable, writable, timeout=None): 50 | assert readable or writable 51 | if not self._wait_for_socket( 52 | self._sock, read=readable, write=writable, timeout=timeout 53 | ): 54 | raise socket.timeout() # XX use a backend-agnostic exception 55 | 56 | def receive_some(self, read_timeout): 57 | while True: 58 | try: 59 | return self._sock.recv(BUFSIZE) 60 | except util.SSLWantReadError: 61 | self._wait(readable=True, writable=False, timeout=read_timeout) 62 | except util.SSLWantWriteError: 63 | self._wait(readable=False, writable=True, timeout=read_timeout) 64 | except (OSError, socket.error) as exc: 65 | if exc.errno in (errno.EWOULDBLOCK, errno.EAGAIN): 66 | self._wait(readable=True, writable=False, timeout=read_timeout) 67 | else: 68 | raise 69 | 70 | def send_and_receive_for_a_while(self, produce_bytes, consume_bytes, read_timeout): 71 | outgoing_finished = False 72 | outgoing = b"" 73 | try: 74 | while True: 75 | if not outgoing_finished and not outgoing: 76 | # Can exit loop here with error 77 | b = produce_bytes() 78 | if b is None: 79 | outgoing = None 80 | outgoing_finished = True 81 | else: 82 | assert b 83 | outgoing = memoryview(b) 84 | 85 | # This controls whether or not we block 86 | made_progress = False 87 | # If we do block, then these determine what can wake us up 88 | want_read = False 89 | want_write = False 90 | 91 | # Important: we do recv before send. This is because we want 92 | # to make sure that after a send completes, we immediately 93 | # call produce_bytes before calling recv and potentially 94 | # getting a LoopAbort. This avoids a race condition -- see the 95 | # "subtle invariant" in the backend API documentation. 96 | 97 | try: 98 | incoming = self._sock.recv(BUFSIZE) 99 | except util.SSLWantReadError: 100 | want_read = True 101 | except util.SSLWantWriteError: 102 | want_write = True 103 | except (OSError, socket.error) as exc: 104 | if exc.errno in (errno.EWOULDBLOCK, errno.EAGAIN): 105 | want_read = True 106 | else: 107 | raise 108 | else: 109 | made_progress = True 110 | # Can exit loop here with LoopAbort 111 | consume_bytes(incoming) 112 | 113 | if not outgoing_finished: 114 | try: 115 | sent = self._sock.send(outgoing) 116 | outgoing = outgoing[sent:] 117 | except util.SSLWantReadError: 118 | want_read = True 119 | except util.SSLWantWriteError: 120 | want_write = True 121 | except (OSError, socket.error) as exc: 122 | if exc.errno in (errno.EWOULDBLOCK, errno.EAGAIN): 123 | want_write = True 124 | else: 125 | raise 126 | else: 127 | made_progress = True 128 | 129 | if not made_progress: 130 | self._wait(want_read, want_write, read_timeout) 131 | except LoopAbort: 132 | pass 133 | 134 | def forceful_close(self): 135 | self._sock.close() 136 | 137 | def is_readable(self): 138 | return is_readable(self._sock) 139 | 140 | def set_readable_watch_state(self, enabled): 141 | pass 142 | 143 | def _version(self): 144 | return self._sock.version() 145 | 146 | def _getsockopt_tcp_nodelay(self): 147 | return self._sock.getsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY) 148 | 149 | def getsockopt(self, level, option): 150 | return self._sock.getsockopt(level, option) 151 | 152 | def close(self): 153 | return self._sock.close() 154 | -------------------------------------------------------------------------------- /src/ahip/_backends/trio_backend.py: -------------------------------------------------------------------------------- 1 | import trio 2 | 3 | from ._common import is_readable, LoopAbort 4 | from .async_backend import AsyncBackend, AsyncSocket 5 | 6 | BUFSIZE = 65536 7 | 8 | 9 | # XX support connect_timeout and read_timeout 10 | 11 | 12 | class TrioBackend(AsyncBackend): 13 | async def connect( 14 | self, host, port, connect_timeout, source_address=None, socket_options=None 15 | ): 16 | if source_address is not None: 17 | # You can't really combine source_address= and happy eyeballs 18 | # (can we get rid of source_address? or at least make it a source 19 | # ip, no port?) 20 | raise NotImplementedError( 21 | "trio backend doesn't support setting source_address" 22 | ) 23 | 24 | stream = await trio.open_tcp_stream(host, port) 25 | 26 | if socket_options: 27 | for (level, optname, value) in socket_options: 28 | stream.setsockopt(level, optname, value) 29 | 30 | return TrioSocket(stream) 31 | 32 | 33 | # XX it turns out that we don't need SSLStream to be robustified against 34 | # cancellation, but we probably should do something to detect when the stream 35 | # has been broken by cancellation (e.g. a timeout) and make is_readable return 36 | # True so the connection won't be reused. 37 | 38 | 39 | class TrioSocket(AsyncSocket): 40 | def __init__(self, stream): 41 | self._stream: trio.SSLStream = stream 42 | 43 | async def start_tls(self, server_hostname, ssl_context): 44 | wrapped = trio.SSLStream( 45 | self._stream, 46 | ssl_context, 47 | server_hostname=server_hostname, 48 | https_compatible=True, 49 | ) 50 | await wrapped.do_handshake() 51 | return TrioSocket(wrapped) 52 | 53 | def getpeercert(self, binary_form=False): 54 | return self._stream.getpeercert(binary_form=binary_form) 55 | 56 | async def receive_some(self, read_timeout): 57 | return await self._stream.receive_some(BUFSIZE) 58 | 59 | async def send_and_receive_for_a_while( 60 | self, produce_bytes, consume_bytes, read_timeout 61 | ): 62 | async def sender(): 63 | while True: 64 | outgoing = await produce_bytes() 65 | if outgoing is None: 66 | break 67 | await self._stream.send_all(outgoing) 68 | 69 | async def receiver(): 70 | while True: 71 | incoming = await self._stream.receive_some(BUFSIZE) 72 | consume_bytes(incoming) 73 | 74 | try: 75 | async with trio.open_nursery() as nursery: 76 | nursery.start_soon(sender) 77 | nursery.start_soon(receiver) 78 | except LoopAbort: 79 | pass 80 | 81 | # Pull out the underlying trio socket, because it turns out HTTP is not so 82 | # great at respecting abstraction boundaries. 83 | def _socket(self): 84 | stream = self._stream 85 | # Strip off any layers of SSLStream 86 | while hasattr(stream, "transport_stream"): 87 | stream = stream.transport_stream 88 | # Now we have a SocketStream 89 | return stream.socket 90 | 91 | # We want this to be synchronous, and don't care about graceful teardown 92 | # of the SSL/TLS layer. 93 | def forceful_close(self): 94 | self._socket().close() 95 | 96 | def is_readable(self): 97 | return is_readable(self._socket()) 98 | 99 | def set_readable_watch_state(self, enabled): 100 | pass 101 | -------------------------------------------------------------------------------- /src/ahip/backends.py: -------------------------------------------------------------------------------- 1 | class Backend: 2 | """ 3 | Specifies the desired backend and any arguments passed to its constructor. 4 | 5 | Projects that use Hip can subclass this interface to expose it to users. 6 | """ 7 | 8 | def __init__(self, name, **kwargs): 9 | self.name = name 10 | self.kwargs = kwargs 11 | 12 | def __eq__(self, other): 13 | return self.name == other.name and self.kwargs == other.kwargs 14 | -------------------------------------------------------------------------------- /src/ahip/base.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | This module provides the base structure of the Request/Response objects that 4 | Hip passes around to manage its HTTP semantic layer. 5 | 6 | These objects are the lowest common denominator: that is, they define the 7 | Request/Response functionality that is always supported by Hip. This means 8 | they do not include any extra function required for asynchrony: that 9 | functionality is handled elsewhere. Any part of Hip is required to be able 10 | to work with one of these objects. 11 | """ 12 | from ._collections import HTTPHeaderDict 13 | 14 | 15 | # This dictionary is used to store the default ports for specific schemes to 16 | # control whether the port is inserted into the Host header. 17 | DEFAULT_PORTS = {"http": 80, "https": 443} 18 | 19 | 20 | class Request(object): 21 | """ 22 | The base, common, Request object. 23 | 24 | This object provides a *semantic* representation of a HTTP request. It 25 | includes all the magical parts of a HTTP request that we have come to know 26 | and love: it has a method, a target (the path & query portions of a URI), 27 | some headers, and optionally a body. 28 | 29 | All of Hip manipulates these Request objects, passing them around and 30 | changing them as necessary. The low-level layers know how to send these 31 | objects. 32 | """ 33 | 34 | def __init__(self, method, target, headers=None, body=None): 35 | #: The HTTP method in use. Must be a byte string. 36 | self.method = method 37 | 38 | #: The request target: that is, the path and query portions of the URI. 39 | self.target = target 40 | 41 | #: The request headers. These are always stored as a HTTPHeaderDict. 42 | self.headers = HTTPHeaderDict(headers) 43 | 44 | #: The request body. This is allowed to be one a few kind of objects: 45 | #: - A byte string. 46 | #: - A "readable" object. 47 | #: - An iterable of byte strings. 48 | #: - A text string (not recommended, auto-encoded to UTF-8) 49 | self.body = body 50 | 51 | def add_host(self, host, port, scheme): 52 | """ 53 | Add the Host header, as needed. 54 | 55 | This helper method exists to circumvent an ordering problem: the best 56 | layer to add the Host header is the bottom layer, but it is the layer 57 | that will add headers last. That means that they will appear at the 58 | bottom of the header block. 59 | 60 | Proxies, caches, and other intermediaries *hate* it when clients do 61 | that because the Host header is routing information, and they'd like to 62 | see it as early as possible. For this reason, this method ensures that 63 | the Host header will be the first one emitted. It also ensures that we 64 | do not duplicate the host header: if there already is one, we just use 65 | that one. 66 | """ 67 | if b"host" not in self.headers: 68 | # We test against a sentinel object here to forcibly always insert 69 | # the port for schemes we don't understand. 70 | if port == DEFAULT_PORTS.get(scheme, object()): 71 | header = host 72 | else: 73 | header = "{}:{}".format(host, port) 74 | 75 | headers = HTTPHeaderDict(host=header) 76 | headers._copy_from(self.headers) 77 | self.headers = headers 78 | 79 | 80 | class Response(object): 81 | """ 82 | The abstract low-level Response object that Hip works on. This is not 83 | the high-level helpful Response object that is exposed at the higher layers 84 | of Hip: it's just a simple object that just exposes the lowest-level 85 | HTTP semantics to allow processing by the higher levels. 86 | """ 87 | 88 | def __init__(self, status_code, headers, body, version): 89 | #: The HTTP status code of the response. 90 | self.status_code = status_code 91 | 92 | #: The headers on the response, as a HTTPHeaderDict. 93 | self.headers = HTTPHeaderDict(headers) 94 | 95 | #: The request body. This is an iterable of bytes, and *must* be 96 | #: iterated if the connection is to be preserved. 97 | self.body = body 98 | 99 | #: The HTTP version of the response. Stored as a bytestring. 100 | self.version = version 101 | 102 | @property 103 | def complete(self): 104 | """ 105 | If the response can be safely returned to the connection pool, returns 106 | True. 107 | """ 108 | return self.body.complete 109 | -------------------------------------------------------------------------------- /src/ahip/contrib/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/python-trio/hip/e198149c677edbeec023aeb934758c9195a4d2e3/src/ahip/contrib/__init__.py -------------------------------------------------------------------------------- /src/ahip/contrib/_securetransport/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/python-trio/hip/e198149c677edbeec023aeb934758c9195a4d2e3/src/ahip/contrib/_securetransport/__init__.py -------------------------------------------------------------------------------- /src/ahip/exceptions.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | 3 | # Base Exceptions 4 | 5 | 6 | class HTTPError(Exception): 7 | "Base exception used by this module." 8 | pass 9 | 10 | 11 | class HTTPWarning(Warning): 12 | "Base warning used by this module." 13 | pass 14 | 15 | 16 | class PoolError(HTTPError): 17 | "Base exception for errors caused within a pool." 18 | 19 | def __init__(self, pool, message): 20 | self.pool = pool 21 | HTTPError.__init__(self, "%s: %s" % (pool, message)) 22 | 23 | def __reduce__(self): 24 | # For pickling purposes. 25 | return self.__class__, (None, None) 26 | 27 | 28 | class RequestError(PoolError): 29 | "Base exception for PoolErrors that have associated URLs." 30 | 31 | def __init__(self, pool, url, message): 32 | self.url = url 33 | PoolError.__init__(self, pool, message) 34 | 35 | def __reduce__(self): 36 | # For pickling purposes. 37 | return self.__class__, (None, self.url, None) 38 | 39 | 40 | class SSLError(HTTPError): 41 | "Raised when SSL certificate fails in an HTTPS connection." 42 | pass 43 | 44 | 45 | class ProxyError(HTTPError): 46 | "Raised when the connection to a proxy fails." 47 | pass 48 | 49 | 50 | class DecodeError(HTTPError): 51 | "Raised when automatic decoding based on Content-Type fails." 52 | pass 53 | 54 | 55 | class ProtocolError(HTTPError): 56 | "Raised when something unexpected happens mid-request/response." 57 | pass 58 | 59 | 60 | #: Renamed to ProtocolError but aliased for backwards compatibility. 61 | ConnectionError = ProtocolError 62 | 63 | 64 | # Leaf Exceptions 65 | 66 | 67 | class MaxRetryError(RequestError): 68 | """Raised when the maximum number of retries is exceeded. 69 | 70 | :param pool: The connection pool 71 | :type pool: :class:`~hip.connectionpool.HTTPConnectionPool` 72 | :param string url: The requested Url 73 | :param exceptions.Exception reason: The underlying error 74 | 75 | """ 76 | 77 | def __init__(self, pool, url, reason=None): 78 | self.reason = reason 79 | 80 | message = "Max retries exceeded with url: %s (Caused by %r)" % (url, reason) 81 | 82 | RequestError.__init__(self, pool, url, message) 83 | 84 | 85 | class TimeoutStateError(HTTPError): 86 | """ Raised when passing an invalid state to a timeout """ 87 | 88 | pass 89 | 90 | 91 | class TimeoutError(HTTPError): 92 | """Raised when a socket timeout error occurs. 93 | 94 | Catching this error will catch both :exc:`ReadTimeoutErrors 95 | ` and :exc:`ConnectTimeoutErrors `. 96 | """ 97 | 98 | pass 99 | 100 | 101 | class ReadTimeoutError(TimeoutError, RequestError): 102 | "Raised when a socket timeout occurs while receiving data from a server" 103 | pass 104 | 105 | 106 | # This timeout error does not have a URL attached and needs to inherit from the 107 | # base HTTPError 108 | class ConnectTimeoutError(TimeoutError): 109 | "Raised when a socket timeout occurs while connecting to a server" 110 | pass 111 | 112 | 113 | class NewConnectionError(ConnectTimeoutError, PoolError): 114 | "Raised when we fail to establish a new connection. Usually ECONNREFUSED." 115 | pass 116 | 117 | 118 | class EmptyPoolError(PoolError): 119 | "Raised when a pool runs out of connections and no more are allowed." 120 | pass 121 | 122 | 123 | class ClosedPoolError(PoolError): 124 | "Raised when a request enters a pool after the pool has been closed." 125 | pass 126 | 127 | 128 | class LocationValueError(ValueError, HTTPError): 129 | "Raised when there is something wrong with a given URL input." 130 | pass 131 | 132 | 133 | class LocationParseError(LocationValueError): 134 | "Raised when get_host or similar fails to parse the URL input." 135 | 136 | def __init__(self, location): 137 | message = "Failed to parse: %s" % location 138 | HTTPError.__init__(self, message) 139 | 140 | self.location = location 141 | 142 | 143 | class ResponseError(HTTPError): 144 | "Used as a container for an error reason supplied in a MaxRetryError." 145 | GENERIC_ERROR = "too many error responses" 146 | SPECIFIC_ERROR = "too many {status_code} error responses" 147 | 148 | 149 | class SecurityWarning(HTTPWarning): 150 | "Warned when performing security reducing actions" 151 | pass 152 | 153 | 154 | class SubjectAltNameWarning(SecurityWarning): 155 | "Warned when connecting to a host with a certificate missing a SAN." 156 | pass 157 | 158 | 159 | class InsecureRequestWarning(SecurityWarning): 160 | "Warned when making an unverified HTTPS request." 161 | pass 162 | 163 | 164 | class SystemTimeWarning(SecurityWarning): 165 | "Warned when system time is suspected to be wrong" 166 | pass 167 | 168 | 169 | class InsecurePlatformWarning(SecurityWarning): 170 | "Warned when certain SSL configuration is not available on a platform." 171 | pass 172 | 173 | 174 | class SNIMissingWarning(HTTPWarning): 175 | "Warned when making a HTTPS request without SNI available." 176 | pass 177 | 178 | 179 | class DependencyWarning(HTTPWarning): 180 | """ 181 | Warned when an attempt is made to import a module with missing optional 182 | dependencies. 183 | """ 184 | 185 | pass 186 | 187 | 188 | class InvalidHeader(HTTPError): 189 | "The header provided was somehow invalid." 190 | pass 191 | 192 | 193 | class BadVersionError(ProtocolError): 194 | """ 195 | The HTTP version in the response is unsupported. 196 | """ 197 | 198 | def __init__(self, version): 199 | message = "HTTP version {} is unsupported".format(version) 200 | super(BadVersionError, self).__init__(message) 201 | 202 | 203 | class ProxySchemeUnknown(AssertionError, ValueError): 204 | "ProxyManager does not support the supplied scheme" 205 | # TODO(t-8ch): Stop inheriting from AssertionError in v2.0. 206 | 207 | def __init__(self, scheme): 208 | message = "Not supported proxy scheme %s" % scheme 209 | super(ProxySchemeUnknown, self).__init__(message) 210 | 211 | 212 | class HeaderParsingError(HTTPError): 213 | "Raised by assert_header_parsing, but we convert it to a log.warning statement." 214 | 215 | def __init__(self, defects, unparsed_data): 216 | message = "%s, unparsed data: %r" % (defects or "Unknown", unparsed_data) 217 | super(HeaderParsingError, self).__init__(message) 218 | 219 | 220 | class UnrewindableBodyError(HTTPError): 221 | "Hip encountered an error when trying to rewind a body" 222 | pass 223 | 224 | 225 | class FailedTunnelError(HTTPError): 226 | """ 227 | An attempt was made to set up a CONNECT tunnel, but that attempt failed. 228 | """ 229 | 230 | def __init__(self, message, response): 231 | super(FailedTunnelError, self).__init__(message) 232 | self.response = response 233 | 234 | 235 | class InvalidBodyError(HTTPError): 236 | """ 237 | An attempt was made to send a request with a body object that Hip does 238 | not support. 239 | """ 240 | 241 | pass 242 | -------------------------------------------------------------------------------- /src/ahip/filepost.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | import binascii 3 | import codecs 4 | import os 5 | 6 | from io import BytesIO 7 | 8 | from .packages import six 9 | from .packages.six import b 10 | from .fields import RequestField 11 | 12 | writer = codecs.lookup("utf-8")[3] 13 | 14 | 15 | def choose_boundary(): 16 | """ 17 | Our embarrassingly-simple replacement for mimetools.choose_boundary. 18 | """ 19 | boundary = binascii.hexlify(os.urandom(16)) 20 | if not six.PY2: 21 | boundary = boundary.decode("ascii") 22 | return boundary 23 | 24 | 25 | def iter_field_objects(fields): 26 | """ 27 | Iterate over fields. 28 | 29 | Supports list of (k, v) tuples and dicts, and lists of 30 | :class:`~hip.fields.RequestField`. 31 | 32 | """ 33 | if isinstance(fields, dict): 34 | i = six.iteritems(fields) 35 | else: 36 | i = iter(fields) 37 | 38 | for field in i: 39 | if isinstance(field, RequestField): 40 | yield field 41 | else: 42 | yield RequestField.from_tuples(*field) 43 | 44 | 45 | def encode_multipart_formdata(fields, boundary=None): 46 | """ 47 | Encode a dictionary of ``fields`` using the multipart/form-data MIME format. 48 | 49 | :param fields: 50 | Dictionary of fields or list of (key, :class:`~hip.fields.RequestField`). 51 | 52 | :param boundary: 53 | If not specified, then a random boundary will be generated using 54 | :func:`hip.filepost.choose_boundary`. 55 | """ 56 | body = BytesIO() 57 | if boundary is None: 58 | boundary = choose_boundary() 59 | 60 | for field in iter_field_objects(fields): 61 | body.write(b("--%s\r\n" % (boundary))) 62 | 63 | writer(body).write(field.render_headers()) 64 | data = field.data 65 | 66 | if isinstance(data, int): 67 | data = str(data) # Backwards compatibility 68 | 69 | if isinstance(data, six.text_type): 70 | writer(body).write(data) 71 | else: 72 | body.write(data) 73 | 74 | body.write(b"\r\n") 75 | 76 | body.write(b("--%s--\r\n" % (boundary))) 77 | 78 | content_type = str("multipart/form-data; boundary=%s" % boundary) 79 | 80 | return body.getvalue(), content_type 81 | -------------------------------------------------------------------------------- /src/ahip/packages/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | 3 | from . import ssl_match_hostname 4 | 5 | __all__ = ("ssl_match_hostname",) 6 | -------------------------------------------------------------------------------- /src/ahip/packages/ssl_match_hostname/__init__.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | try: 4 | # Our match_hostname function is the same as 3.5's, so we only want to 5 | # import the match_hostname function if it's at least that good. 6 | if sys.version_info < (3, 5): 7 | raise ImportError("Fallback to vendored code") 8 | 9 | from ssl import CertificateError, match_hostname 10 | except ImportError: 11 | try: 12 | # Backport of the function from a pypi module 13 | from backports.ssl_match_hostname import CertificateError, match_hostname 14 | except ImportError: 15 | # Our vendored copy 16 | from ._implementation import CertificateError, match_hostname 17 | 18 | # Not needed, but documenting what we provide. 19 | __all__ = ("CertificateError", "match_hostname") 20 | -------------------------------------------------------------------------------- /src/ahip/packages/ssl_match_hostname/_implementation.py: -------------------------------------------------------------------------------- 1 | """The match_hostname() function from Python 3.3.3, essential when using SSL.""" 2 | 3 | # Note: This file is under the PSF license as the code comes from the python 4 | # stdlib. http://docs.python.org/3/license.html 5 | 6 | import re 7 | import sys 8 | 9 | # ipaddress has been backported to 2.6+ in pypi. If it is installed on the 10 | # system, use it to handle IPAddress ServerAltnames (this was added in 11 | # python-3.5) otherwise only do DNS matching. This allows 12 | # backports.ssl_match_hostname to continue to be used in Python 2.7. 13 | try: 14 | import ipaddress 15 | except ImportError: 16 | ipaddress = None 17 | 18 | __version__ = "3.5.0.1" 19 | 20 | 21 | class CertificateError(ValueError): 22 | pass 23 | 24 | 25 | def _dnsname_match(dn, hostname, max_wildcards=1): 26 | """Matching according to RFC 6125, section 6.4.3 27 | 28 | http://tools.ietf.org/html/rfc6125#section-6.4.3 29 | """ 30 | pats = [] 31 | if not dn: 32 | return False 33 | 34 | # Ported from python3-syntax: 35 | # leftmost, *remainder = dn.split(r'.') 36 | parts = dn.split(r".") 37 | leftmost = parts[0] 38 | remainder = parts[1:] 39 | 40 | wildcards = leftmost.count("*") 41 | if wildcards > max_wildcards: 42 | # Issue #17980: avoid denials of service by refusing more 43 | # than one wildcard per fragment. A survey of established 44 | # policy among SSL implementations showed it to be a 45 | # reasonable choice. 46 | raise CertificateError( 47 | "too many wildcards in certificate DNS name: " + repr(dn) 48 | ) 49 | 50 | # speed up common case w/o wildcards 51 | if not wildcards: 52 | return dn.lower() == hostname.lower() 53 | 54 | # RFC 6125, section 6.4.3, subitem 1. 55 | # The client SHOULD NOT attempt to match a presented identifier in which 56 | # the wildcard character comprises a label other than the left-most label. 57 | if leftmost == "*": 58 | # When '*' is a fragment by itself, it matches a non-empty dotless 59 | # fragment. 60 | pats.append("[^.]+") 61 | elif leftmost.startswith("xn--") or hostname.startswith("xn--"): 62 | # RFC 6125, section 6.4.3, subitem 3. 63 | # The client SHOULD NOT attempt to match a presented identifier 64 | # where the wildcard character is embedded within an A-label or 65 | # U-label of an internationalized domain name. 66 | pats.append(re.escape(leftmost)) 67 | else: 68 | # Otherwise, '*' matches any dotless string, e.g. www* 69 | pats.append(re.escape(leftmost).replace(r"\*", "[^.]*")) 70 | 71 | # add the remaining fragments, ignore any wildcards 72 | for frag in remainder: 73 | pats.append(re.escape(frag)) 74 | 75 | pat = re.compile(r"\A" + r"\.".join(pats) + r"\Z", re.IGNORECASE) 76 | return pat.match(hostname) 77 | 78 | 79 | def _to_unicode(obj): 80 | if isinstance(obj, str) and sys.version_info < (3,): 81 | obj = unicode(obj, encoding="ascii", errors="strict") 82 | return obj 83 | 84 | 85 | def _ipaddress_match(ipname, host_ip): 86 | """Exact matching of IP addresses. 87 | 88 | RFC 6125 explicitly doesn't define an algorithm for this 89 | (section 1.7.2 - "Out of Scope"). 90 | """ 91 | # OpenSSL may add a trailing newline to a subjectAltName's IP address 92 | # Divergence from upstream: ipaddress can't handle byte str 93 | ip = ipaddress.ip_address(_to_unicode(ipname).rstrip()) 94 | return ip == host_ip 95 | 96 | 97 | def match_hostname(cert, hostname): 98 | """Verify that *cert* (in decoded format as returned by 99 | SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125 100 | rules are followed, but IP addresses are not accepted for *hostname*. 101 | 102 | CertificateError is raised on failure. On success, the function 103 | returns nothing. 104 | """ 105 | if not cert: 106 | raise ValueError( 107 | "empty or no certificate, match_hostname needs a " 108 | "SSL socket or SSL context with either " 109 | "CERT_OPTIONAL or CERT_REQUIRED" 110 | ) 111 | try: 112 | # Divergence from upstream: ipaddress can't handle byte str 113 | host_ip = ipaddress.ip_address(_to_unicode(hostname)) 114 | except ValueError: 115 | # Not an IP address (common case) 116 | host_ip = None 117 | except UnicodeError: 118 | # Divergence from upstream: Have to deal with ipaddress not taking 119 | # byte strings. addresses should be all ascii, so we consider it not 120 | # an ipaddress in this case 121 | host_ip = None 122 | except AttributeError: 123 | # Divergence from upstream: Make ipaddress library optional 124 | if ipaddress is None: 125 | host_ip = None 126 | else: 127 | raise 128 | dnsnames = [] 129 | san = cert.get("subjectAltName", ()) 130 | for key, value in san: 131 | if key == "DNS": 132 | if host_ip is None and _dnsname_match(value, hostname): 133 | return 134 | dnsnames.append(value) 135 | elif key == "IP Address": 136 | if host_ip is not None and _ipaddress_match(value, host_ip): 137 | return 138 | dnsnames.append(value) 139 | if not dnsnames: 140 | # The subject is only checked when there is no dNSName entry 141 | # in subjectAltName 142 | for sub in cert.get("subject", ()): 143 | for key, value in sub: 144 | # XXX according to RFC 2818, the most specific Common Name 145 | # must be used. 146 | if key == "commonName": 147 | if _dnsname_match(value, hostname): 148 | return 149 | dnsnames.append(value) 150 | if len(dnsnames) > 1: 151 | raise CertificateError( 152 | "hostname %r " 153 | "doesn't match either of %s" % (hostname, ", ".join(map(repr, dnsnames))) 154 | ) 155 | elif len(dnsnames) == 1: 156 | raise CertificateError("hostname %r doesn't match %r" % (hostname, dnsnames[0])) 157 | else: 158 | raise CertificateError( 159 | "no appropriate commonName or subjectAltName fields were found" 160 | ) 161 | -------------------------------------------------------------------------------- /src/ahip/request.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | 3 | from .filepost import encode_multipart_formdata 4 | from .packages import six 5 | from .packages.six.moves.urllib.parse import urlencode 6 | 7 | 8 | __all__ = ["RequestMethods"] 9 | 10 | 11 | class RequestMethods(object): 12 | """ 13 | Convenience mixin for classes who implement a :meth:`urlopen` method, such 14 | as :class:`~hip.connectionpool.HTTPConnectionPool` and 15 | :class:`~hip.poolmanager.PoolManager`. 16 | 17 | Provides behavior for making common types of HTTP request methods and 18 | decides which type of request field encoding to use. 19 | 20 | Specifically, 21 | 22 | :meth:`.request_encode_url` is for sending requests whose fields are 23 | encoded in the URL (such as GET, HEAD, DELETE). 24 | 25 | :meth:`.request_encode_body` is for sending requests whose fields are 26 | encoded in the *body* of the request using multipart or www-form-urlencoded 27 | (such as for POST, PUT, PATCH). 28 | 29 | :meth:`.request` is for making any kind of request, it will look up the 30 | appropriate encoding format and use one of the above two methods to make 31 | the request. 32 | 33 | Initializer parameters: 34 | 35 | :param headers: 36 | Headers to include with all requests, unless other headers are given 37 | explicitly. 38 | """ 39 | 40 | _encode_url_methods = {"DELETE", "GET", "HEAD", "OPTIONS"} 41 | 42 | def __init__(self, headers=None): 43 | self.headers = headers or {} 44 | 45 | async def urlopen( 46 | self, 47 | method, 48 | url, 49 | body=None, 50 | headers=None, 51 | encode_multipart=True, 52 | multipart_boundary=None, 53 | **kw 54 | ): # Abstract 55 | raise NotImplementedError( 56 | "Classes extending RequestMethods must implement " 57 | "their own ``urlopen`` method." 58 | ) 59 | 60 | async def request(self, method, url, fields=None, headers=None, **urlopen_kw): 61 | """ 62 | Make a request using :meth:`urlopen` with the appropriate encoding of 63 | ``fields`` based on the ``method`` used. 64 | 65 | This is a convenience method that requires the least amount of manual 66 | effort. It can be used in most situations, while still having the 67 | option to drop down to more specific methods when necessary, such as 68 | :meth:`request_encode_url`, :meth:`request_encode_body`, 69 | or even the lowest level :meth:`urlopen`. 70 | """ 71 | method = method.upper() 72 | 73 | urlopen_kw["request_url"] = url 74 | 75 | if method in self._encode_url_methods: 76 | return await self.request_encode_url( 77 | method, url, fields=fields, headers=headers, **urlopen_kw 78 | ) 79 | else: 80 | return await self.request_encode_body( 81 | method, url, fields=fields, headers=headers, **urlopen_kw 82 | ) 83 | 84 | async def request_encode_url( 85 | self, method, url, fields=None, headers=None, **urlopen_kw 86 | ): 87 | """ 88 | Make a request using :meth:`urlopen` with the ``fields`` encoded in 89 | the url. This is useful for request methods like GET, HEAD, DELETE, etc. 90 | """ 91 | if headers is None: 92 | headers = self.headers 93 | 94 | extra_kw = {"headers": headers} 95 | extra_kw.update(urlopen_kw) 96 | 97 | if fields: 98 | url += "?" + urlencode(fields) 99 | 100 | return await self.urlopen(method, url, **extra_kw) 101 | 102 | async def request_encode_body( 103 | self, 104 | method, 105 | url, 106 | fields=None, 107 | headers=None, 108 | encode_multipart=True, 109 | multipart_boundary=None, 110 | **urlopen_kw 111 | ): 112 | """ 113 | Make a request using :meth:`urlopen` with the ``fields`` encoded in 114 | the body. This is useful for request methods like POST, PUT, PATCH, etc. 115 | 116 | When ``encode_multipart=True`` (default), then 117 | :meth:`hip.filepost.encode_multipart_formdata` is used to encode 118 | the payload with the appropriate content type. Otherwise 119 | :meth:`urllib.urlencode` is used with the 120 | 'application/x-www-form-urlencoded' content type. 121 | 122 | Multipart encoding must be used when posting files, and it's reasonably 123 | safe to use it in other times too. However, it may break request 124 | signing, such as with OAuth. 125 | 126 | Supports an optional ``fields`` parameter of key/value strings AND 127 | key/filetuple. A filetuple is a (filename, data, MIME type) tuple where 128 | the MIME type is optional. For example:: 129 | 130 | fields = { 131 | 'foo': 'bar', 132 | 'fakefile': ('foofile.txt', 'contents of foofile'), 133 | 'realfile': ('barfile.txt', open('realfile').read()), 134 | 'typedfile': ('bazfile.bin', open('bazfile').read(), 135 | 'image/jpeg'), 136 | 'nonamefile': 'contents of nonamefile field', 137 | } 138 | 139 | When uploading a file, providing a filename (the first parameter of the 140 | tuple) is optional but recommended to best mimic behavior of browsers. 141 | 142 | Note that if ``headers`` are supplied, the 'Content-Type' header will 143 | be overwritten because it depends on the dynamic random boundary string 144 | which is used to compose the body of the request. The random boundary 145 | string can be explicitly set with the ``multipart_boundary`` parameter. 146 | """ 147 | if headers is None: 148 | headers = self.headers 149 | 150 | extra_kw = {"headers": {}} 151 | 152 | if fields: 153 | if "body" in urlopen_kw: 154 | raise TypeError( 155 | "request got values for both 'fields' and 'body', can only specify one." 156 | ) 157 | 158 | if encode_multipart: 159 | body, content_type = encode_multipart_formdata( 160 | fields, boundary=multipart_boundary 161 | ) 162 | else: 163 | body, content_type = ( 164 | urlencode(fields), 165 | "application/x-www-form-urlencoded", 166 | ) 167 | 168 | if isinstance(body, six.text_type): 169 | body = body.encode("utf-8") 170 | 171 | extra_kw["body"] = body 172 | extra_kw["headers"] = {"Content-Type": content_type} 173 | 174 | extra_kw["headers"].update(headers) 175 | extra_kw.update(urlopen_kw) 176 | 177 | return await self.urlopen(method, url, **extra_kw) 178 | -------------------------------------------------------------------------------- /src/ahip/util/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | 3 | # For backwards compatibility, provide imports that used to be here. 4 | from .connection import is_connection_dropped 5 | from .request import make_headers 6 | from .ssl_ import ( 7 | SSLContext, 8 | HAS_SNI, 9 | IS_PYOPENSSL, 10 | IS_SECURETRANSPORT, 11 | assert_fingerprint, 12 | resolve_cert_reqs, 13 | resolve_ssl_version, 14 | ssl_wrap_socket, 15 | SSLWantReadError, 16 | SSLWantWriteError, 17 | PROTOCOL_TLS, 18 | ) 19 | from .timeout import current_time, Timeout 20 | 21 | from .retry import Retry 22 | from .url import parse_url, Url 23 | from .wait import wait_for_read, wait_for_write, wait_for_socket 24 | 25 | __all__ = ( 26 | "HAS_SNI", 27 | "IS_PYOPENSSL", 28 | "IS_SECURETRANSPORT", 29 | "SSLContext", 30 | "PROTOCOL_TLS", 31 | "Retry", 32 | "Timeout", 33 | "Url", 34 | "assert_fingerprint", 35 | "current_time", 36 | "is_connection_dropped", 37 | "parse_url", 38 | "make_headers", 39 | "resolve_cert_reqs", 40 | "resolve_ssl_version", 41 | "ssl_wrap_socket", 42 | "wait_for_read", 43 | "wait_for_write", 44 | "wait_for_socket", 45 | "SSLWantReadError", 46 | "SSLWantWriteError", 47 | ) 48 | -------------------------------------------------------------------------------- /src/ahip/util/connection.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | import socket 3 | 4 | 5 | def is_connection_dropped(conn): # Platform-specific 6 | """ 7 | Returns True if the connection is dropped and should be closed. 8 | """ 9 | sock = getattr(conn, "_sock", False) 10 | 11 | if sock is None: # Connection already closed (such as by httplib). 12 | return True 13 | 14 | return sock.is_readable() 15 | 16 | 17 | # This function is copied from socket.py in the Python 2.7 standard 18 | # library test suite. Added to its signature is only `socket_options`. 19 | # One additional modification is that we avoid binding to IPv6 servers 20 | # discovered in DNS if the system doesn't have IPv6 functionality. 21 | def create_connection( 22 | address, 23 | timeout=socket._GLOBAL_DEFAULT_TIMEOUT, 24 | source_address=None, 25 | socket_options=None, 26 | ): 27 | """Connect to *address* and return the socket object. 28 | 29 | Convenience function. Connect to *address* (a 2-tuple ``(host, 30 | port)``) and return the socket object. Passing the optional 31 | *timeout* parameter will set the timeout on the socket instance 32 | before attempting to connect. If no *timeout* is supplied, the 33 | global default timeout setting returned by :func:`getdefaulttimeout` 34 | is used. If *source_address* is set it must be a tuple of (host, port) 35 | for the socket to bind as a source address before making the connection. 36 | An host of '' or port 0 tells the OS to use the default. 37 | """ 38 | 39 | host, port = address 40 | if host.startswith("["): 41 | host = host.strip("[]") 42 | err = None 43 | 44 | # Using the value from allowed_gai_family() in the context of getaddrinfo lets 45 | # us select whether to work with IPv4 DNS records, IPv6 records, or both. 46 | # The original create_connection function always returns all records. 47 | family = allowed_gai_family() 48 | 49 | for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM): 50 | af, socktype, proto, canonname, sa = res 51 | sock = None 52 | try: 53 | sock = socket.socket(af, socktype, proto) 54 | 55 | # If provided, set socket level options before connecting. 56 | _set_socket_options(sock, socket_options) 57 | 58 | if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT: 59 | sock.settimeout(timeout) 60 | if source_address: 61 | sock.bind(source_address) 62 | sock.connect(sa) 63 | return sock 64 | 65 | except socket.error as e: 66 | err = e 67 | if sock is not None: 68 | sock.close() 69 | sock = None 70 | 71 | if err is not None: 72 | raise err 73 | 74 | raise socket.error("getaddrinfo returns an empty list") 75 | 76 | 77 | def _set_socket_options(sock, options): 78 | if options is None: 79 | return 80 | 81 | for opt in options: 82 | sock.setsockopt(*opt) 83 | 84 | 85 | def allowed_gai_family(): 86 | """This function is designed to work in the context of 87 | getaddrinfo, where family=socket.AF_UNSPEC is the default and 88 | will perform a DNS search for both IPv6 and IPv4 records.""" 89 | 90 | family = socket.AF_INET 91 | if HAS_IPV6: 92 | family = socket.AF_UNSPEC 93 | return family 94 | 95 | 96 | def _has_ipv6(host): 97 | """ Returns True if the system can bind an IPv6 address. """ 98 | sock = None 99 | has_ipv6 = False 100 | 101 | if socket.has_ipv6: 102 | # has_ipv6 returns true if cPython was compiled with IPv6 support. 103 | # It does not tell us if the system has IPv6 support enabled. To 104 | # determine that we must bind to an IPv6 address. 105 | # https://github.com/urllib3/urllib3/pull/611 106 | # https://bugs.python.org/issue658327 107 | try: 108 | sock = socket.socket(socket.AF_INET6) 109 | sock.bind((host, 0)) 110 | has_ipv6 = True 111 | except Exception: 112 | pass 113 | 114 | if sock: 115 | sock.close() 116 | return has_ipv6 117 | 118 | 119 | HAS_IPV6 = _has_ipv6("::1") 120 | -------------------------------------------------------------------------------- /src/ahip/util/queue.py: -------------------------------------------------------------------------------- 1 | import collections 2 | from ..packages import six 3 | from ..packages.six.moves import queue 4 | 5 | if six.PY2: 6 | # Queue is imported for side effects on MS Windows. See issue #229. 7 | import Queue as _unused_module_Queue # noqa: F401 8 | 9 | 10 | class LifoQueue(queue.Queue): 11 | def _init(self, _): 12 | self.queue = collections.deque() 13 | 14 | def _qsize(self, len=len): 15 | return len(self.queue) 16 | 17 | def _put(self, item): 18 | self.queue.append(item) 19 | 20 | def _get(self): 21 | return self.queue.pop() 22 | -------------------------------------------------------------------------------- /src/ahip/util/request.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | from base64 import b64encode 3 | 4 | from .unasync import await_if_coro 5 | from ..packages.six import b, integer_types 6 | from ..exceptions import UnrewindableBodyError 7 | 8 | ACCEPT_ENCODING = "gzip,deflate" 9 | try: 10 | import brotli as _unused_module_brotli # noqa: F401 11 | except ImportError: 12 | pass 13 | else: 14 | ACCEPT_ENCODING += ",br" 15 | 16 | _FAILEDTELL = object() 17 | 18 | 19 | def make_headers( 20 | keep_alive=None, 21 | accept_encoding=None, 22 | user_agent=None, 23 | basic_auth=None, 24 | proxy_basic_auth=None, 25 | disable_cache=None, 26 | ): 27 | """ 28 | Shortcuts for generating request headers. 29 | 30 | :param keep_alive: 31 | If ``True``, adds 'connection: keep-alive' header. 32 | 33 | :param accept_encoding: 34 | Can be a boolean, list, or string. 35 | ``True`` translates to 'gzip,deflate'. 36 | List will get joined by comma. 37 | String will be used as provided. 38 | 39 | :param user_agent: 40 | String representing the user-agent you want, such as 41 | "python-hip/0.6" 42 | 43 | :param basic_auth: 44 | Colon-separated username:password string for 'authorization: basic ...' 45 | auth header. 46 | 47 | :param proxy_basic_auth: 48 | Colon-separated username:password string for 'proxy-authorization: basic ...' 49 | auth header. 50 | 51 | :param disable_cache: 52 | If ``True``, adds 'cache-control: no-cache' header. 53 | 54 | Example:: 55 | 56 | >>> make_headers(keep_alive=True, user_agent="Batman/1.0") 57 | {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'} 58 | >>> make_headers(accept_encoding=True) 59 | {'accept-encoding': 'gzip,deflate'} 60 | """ 61 | headers = {} 62 | if accept_encoding: 63 | if isinstance(accept_encoding, str): 64 | pass 65 | elif isinstance(accept_encoding, list): 66 | accept_encoding = ",".join(accept_encoding) 67 | else: 68 | accept_encoding = ACCEPT_ENCODING 69 | headers["accept-encoding"] = accept_encoding 70 | 71 | if user_agent: 72 | headers["user-agent"] = user_agent 73 | 74 | if keep_alive: 75 | headers["connection"] = "keep-alive" 76 | 77 | if basic_auth: 78 | headers["authorization"] = "Basic " + b64encode(b(basic_auth)).decode("utf-8") 79 | 80 | if proxy_basic_auth: 81 | headers["proxy-authorization"] = "Basic " + b64encode( 82 | b(proxy_basic_auth) 83 | ).decode("utf-8") 84 | 85 | if disable_cache: 86 | headers["cache-control"] = "no-cache" 87 | 88 | return headers 89 | 90 | 91 | async def set_file_position(body, pos): 92 | """ 93 | If a position is provided, move file to that point. 94 | Otherwise, we'll attempt to record a position for future use. 95 | """ 96 | if pos is not None: 97 | await rewind_body(body, pos) 98 | elif getattr(body, "tell", None) is not None: 99 | try: 100 | pos = await await_if_coro(body.tell()) 101 | except (IOError, OSError): 102 | # This differentiates from None, allowing us to catch 103 | # a failed `tell()` later when trying to rewind the body. 104 | pos = _FAILEDTELL 105 | 106 | return pos 107 | 108 | 109 | async def rewind_body(body, body_pos): 110 | """ 111 | Attempt to rewind body to a certain position. 112 | Primarily used for request redirects and retries. 113 | 114 | :param body: 115 | File-like object that supports seek. 116 | 117 | :param int pos: 118 | Position to seek to in file. 119 | """ 120 | body_seek = getattr(body, "seek", None) 121 | if body_seek is not None and isinstance(body_pos, integer_types): 122 | try: 123 | await await_if_coro(body_seek(body_pos)) 124 | except (IOError, OSError): 125 | raise UnrewindableBodyError( 126 | "An error occurred when rewinding request body for redirect/retry." 127 | ) 128 | elif body_pos is _FAILEDTELL: 129 | raise UnrewindableBodyError( 130 | "Unable to record file position for rewinding " 131 | "request body during a redirect/retry." 132 | ) 133 | else: 134 | raise ValueError( 135 | "body_pos must be of type integer, instead it was %s." % type(body_pos) 136 | ) 137 | -------------------------------------------------------------------------------- /src/ahip/util/unasync.py: -------------------------------------------------------------------------------- 1 | """Set of utility functions for unasync that transform into sync counterparts cleanly""" 2 | 3 | import inspect 4 | 5 | _original_next = next 6 | 7 | 8 | def is_async_mode(): 9 | """Tests if we're in the async part of the code or not""" 10 | 11 | async def f(): 12 | """Unasync transforms async functions in sync functions""" 13 | return None 14 | 15 | obj = f() 16 | if obj is None: 17 | return False 18 | else: 19 | obj.close() # prevent unawaited coroutine warning 20 | return True 21 | 22 | 23 | ASYNC_MODE = is_async_mode() 24 | 25 | 26 | async def anext(x): 27 | return await x.__anext__() 28 | 29 | 30 | async def await_if_coro(x): 31 | if inspect.iscoroutine(x): 32 | return await x 33 | return x 34 | 35 | 36 | next = _original_next 37 | 38 | 39 | def return_non_coro(x): 40 | return x 41 | -------------------------------------------------------------------------------- /src/ahip/util/wait.py: -------------------------------------------------------------------------------- 1 | import errno 2 | from functools import partial 3 | import select 4 | import sys 5 | 6 | try: 7 | from time import monotonic 8 | except ImportError: 9 | from time import time as monotonic 10 | 11 | __all__ = ["NoWayToWaitForSocketError", "wait_for_read", "wait_for_write"] 12 | 13 | 14 | class NoWayToWaitForSocketError(Exception): 15 | pass 16 | 17 | 18 | # How should we wait on sockets? 19 | # 20 | # There are two types of APIs you can use for waiting on sockets: the fancy 21 | # modern stateful APIs like epoll/kqueue, and the older stateless APIs like 22 | # select/poll. The stateful APIs are more efficient when you have a lots of 23 | # sockets to keep track of, because you can set them up once and then use them 24 | # lots of times. But we only ever want to wait on a single socket at a time 25 | # and don't want to keep track of state, so the stateless APIs are actually 26 | # more efficient. So we want to use select() or poll(). 27 | # 28 | # Now, how do we choose between select() and poll()? On traditional Unixes, 29 | # select() has a strange calling convention that makes it slow, or fail 30 | # altogether, for high-numbered file descriptors. The point of poll() is to fix 31 | # that, so on Unixes, we prefer poll(). 32 | # 33 | # On Windows, there is no poll() (or at least Python doesn't provide a wrapper 34 | # for it), but that's OK, because on Windows, select() doesn't have this 35 | # strange calling convention; plain select() works fine. 36 | # 37 | # So: on Windows we use select(), and everywhere else we use poll(). We also 38 | # fall back to select() in case poll() is somehow broken or missing. 39 | 40 | if sys.version_info >= (3, 5): 41 | # Modern Python, that retries syscalls by default 42 | def _retry_on_intr(fn, timeout): 43 | return fn(timeout) 44 | 45 | 46 | else: # Python 2.7 47 | # Old and broken Pythons. 48 | def _retry_on_intr(fn, timeout): 49 | if timeout is None: 50 | deadline = float("inf") 51 | else: 52 | deadline = monotonic() + timeout 53 | 54 | while True: 55 | try: 56 | return fn(timeout) 57 | # OSError for 3 <= pyver < 3.5, select.error for pyver <= 2.7 58 | except (OSError, select.error) as e: 59 | # 'e.args[0]' incantation works for both OSError and select.error 60 | if e.args[0] != errno.EINTR: 61 | raise 62 | else: 63 | timeout = deadline - monotonic() 64 | if timeout < 0: 65 | timeout = 0 66 | if timeout == float("inf"): 67 | timeout = None 68 | continue 69 | 70 | 71 | def select_wait_for_socket(sock, read=False, write=False, timeout=None): 72 | if not read and not write: 73 | raise RuntimeError("must specify at least one of read=True, write=True") 74 | rcheck = [] 75 | wcheck = [] 76 | if read: 77 | rcheck.append(sock) 78 | if write: 79 | wcheck.append(sock) 80 | # When doing a non-blocking connect, most systems signal success by 81 | # marking the socket writable. Windows, though, signals success by marked 82 | # it as "exceptional". We paper over the difference by checking the write 83 | # sockets for both conditions. (The stdlib selectors module does the same 84 | # thing.) 85 | fn = partial(select.select, rcheck, wcheck, wcheck) 86 | rready, wready, xready = _retry_on_intr(fn, timeout) 87 | return bool(rready or wready or xready) 88 | 89 | 90 | def poll_wait_for_socket(sock, read=False, write=False, timeout=None): 91 | if not read and not write: 92 | raise RuntimeError("must specify at least one of read=True, write=True") 93 | mask = 0 94 | if read: 95 | mask |= select.POLLIN 96 | if write: 97 | mask |= select.POLLOUT 98 | poll_obj = select.poll() 99 | poll_obj.register(sock, mask) 100 | 101 | # For some reason, poll() takes timeout in milliseconds 102 | def do_poll(t): 103 | if t is not None: 104 | t *= 1000 105 | return poll_obj.poll(t) 106 | 107 | return bool(_retry_on_intr(do_poll, timeout)) 108 | 109 | 110 | def null_wait_for_socket(*args, **kwargs): 111 | raise NoWayToWaitForSocketError("no select-equivalent available") 112 | 113 | 114 | def _have_working_poll(): 115 | # Apparently some systems have a select.poll that fails as soon as you try 116 | # to use it, either due to strange configuration or broken monkeypatching 117 | # from libraries like eventlet/greenlet. 118 | try: 119 | poll_obj = select.poll() 120 | _retry_on_intr(poll_obj.poll, 0) 121 | except (AttributeError, OSError): 122 | return False 123 | else: 124 | return True 125 | 126 | 127 | def wait_for_socket(*args, **kwargs): 128 | # We delay choosing which implementation to use until the first time we're 129 | # called. We could do it at import time, but then we might make the wrong 130 | # decision if someone goes wild with monkeypatching select.poll after 131 | # we're imported. 132 | global wait_for_socket 133 | if _have_working_poll(): 134 | wait_for_socket = poll_wait_for_socket 135 | elif hasattr(select, "select"): 136 | wait_for_socket = select_wait_for_socket 137 | return wait_for_socket(*args, **kwargs) 138 | 139 | 140 | def wait_for_read(sock, timeout=None): 141 | """Waits for reading to be available on a given socket. 142 | Returns True if the socket is readable, or False if the timeout expired. 143 | """ 144 | return wait_for_socket(sock, read=True, timeout=timeout) 145 | 146 | 147 | def wait_for_write(sock, timeout=None): 148 | """Waits for writing to be available on a given socket. 149 | Returns True if the socket is readable, or False if the timeout expired. 150 | """ 151 | return wait_for_socket(sock, write=True, timeout=timeout) 152 | -------------------------------------------------------------------------------- /test/async/test_backends.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import curio 3 | import trio 4 | 5 | from hip._backends._loader import normalize_backend 6 | 7 | 8 | def test_sniff_backends(): 9 | async def _test_sniff_async(expected_name): 10 | backend = normalize_backend(None, async_mode=True) 11 | assert backend.name == expected_name 12 | 13 | trio.run(_test_sniff_async, "trio") 14 | curio.run(_test_sniff_async, "anyio") 15 | loop = asyncio.get_event_loop() 16 | loop.run_until_complete(_test_sniff_async("anyio")) 17 | -------------------------------------------------------------------------------- /test/benchmark.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | """ 4 | Really simple rudimentary benchmark to compare ConnectionPool versus standard 5 | urllib to demonstrate the usefulness of connection re-using. 6 | """ 7 | from __future__ import print_function 8 | 9 | import sys 10 | import time 11 | import urllib 12 | 13 | sys.path.append("../") 14 | import hip # noqa: E402 15 | 16 | 17 | # URLs to download. Doesn't matter as long as they're from the same host, so we 18 | # can take advantage of connection re-using. 19 | TO_DOWNLOAD = [ 20 | "http://code.google.com/apis/apps/", 21 | "http://code.google.com/apis/base/", 22 | "http://code.google.com/apis/blogger/", 23 | "http://code.google.com/apis/calendar/", 24 | "http://code.google.com/apis/codesearch/", 25 | "http://code.google.com/apis/contact/", 26 | "http://code.google.com/apis/books/", 27 | "http://code.google.com/apis/documents/", 28 | "http://code.google.com/apis/finance/", 29 | "http://code.google.com/apis/health/", 30 | "http://code.google.com/apis/notebook/", 31 | "http://code.google.com/apis/picasaweb/", 32 | "http://code.google.com/apis/spreadsheets/", 33 | "http://code.google.com/apis/webmastertools/", 34 | "http://code.google.com/apis/youtube/", 35 | ] 36 | 37 | 38 | def urllib_get(url_list): 39 | assert url_list 40 | for url in url_list: 41 | now = time.time() 42 | urllib.urlopen(url) 43 | elapsed = time.time() - now 44 | print("Got in %0.3f: %s" % (elapsed, url)) 45 | 46 | 47 | def pool_get(url_list): 48 | assert url_list 49 | pool = hip.PoolManager() 50 | for url in url_list: 51 | now = time.time() 52 | pool.request("GET", url, assert_same_host=False) 53 | elapsed = time.time() - now 54 | print("Got in %0.3fs: %s" % (elapsed, url)) 55 | 56 | 57 | if __name__ == "__main__": 58 | print("Running pool_get ...") 59 | now = time.time() 60 | pool_get(TO_DOWNLOAD) 61 | pool_elapsed = time.time() - now 62 | 63 | print("Running urllib_get ...") 64 | now = time.time() 65 | urllib_get(TO_DOWNLOAD) 66 | urllib_elapsed = time.time() - now 67 | 68 | print("Completed pool_get in %0.3fs" % pool_elapsed) 69 | print("Completed urllib_get in %0.3fs" % urllib_elapsed) 70 | 71 | 72 | """ 73 | Example results: 74 | 75 | Completed pool_get in 1.163s 76 | Completed urllib_get in 2.318s 77 | """ 78 | -------------------------------------------------------------------------------- /test/conftest.py: -------------------------------------------------------------------------------- 1 | import platform 2 | import sys 3 | 4 | import pytest 5 | import trustme 6 | 7 | from dummyserver.server import ( 8 | DEFAULT_CA, 9 | DEFAULT_CA_KEY, 10 | CLIENT_INTERMEDIATE_PEM, 11 | CLIENT_NO_INTERMEDIATE_PEM, 12 | CLIENT_INTERMEDIATE_KEY, 13 | ) 14 | 15 | # We support Python 3.6+ for async code 16 | if sys.version_info[:2] < (3, 6): 17 | collect_ignore_glob = ["async/*.py", "with_dummyserver/async*/*.py"] 18 | 19 | 20 | # The Python 3.8+ default loop on Windows breaks Tornado 21 | @pytest.fixture(scope="session", autouse=True) 22 | def configure_windows_event_loop(): 23 | if sys.version_info >= (3, 8) and platform.system() == "Windows": 24 | import asyncio 25 | 26 | asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) 27 | 28 | 29 | @pytest.fixture(scope="session") 30 | def certs_dir(tmp_path_factory): 31 | tmpdir = tmp_path_factory.mktemp("certs") 32 | # Start from existing root CA as we don't want to change the server certificate yet 33 | with open(DEFAULT_CA, "rb") as crt, open(DEFAULT_CA_KEY, "rb") as key: 34 | root_ca = trustme.CA.from_pem(crt.read(), key.read()) 35 | 36 | # client cert chain 37 | intermediate_ca = root_ca.create_child_ca() 38 | cert = intermediate_ca.issue_cert(u"example.com") 39 | 40 | cert.private_key_pem.write_to_path(str(tmpdir / CLIENT_INTERMEDIATE_KEY)) 41 | # Write the client cert and the intermediate CA 42 | client_cert = str(tmpdir / CLIENT_INTERMEDIATE_PEM) 43 | cert.cert_chain_pems[0].write_to_path(client_cert) 44 | cert.cert_chain_pems[1].write_to_path(client_cert, append=True) 45 | # Write only the client cert 46 | cert.cert_chain_pems[0].write_to_path(str(tmpdir / CLIENT_NO_INTERMEDIATE_PEM)) 47 | 48 | yield tmpdir 49 | -------------------------------------------------------------------------------- /test/contrib/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/python-trio/hip/e198149c677edbeec023aeb934758c9195a4d2e3/test/contrib/__init__.py -------------------------------------------------------------------------------- /test/contrib/test_securetransport.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import contextlib 3 | import socket 4 | import ssl 5 | 6 | import pytest 7 | 8 | try: 9 | from hip.contrib.securetransport import WrappedSocket 10 | except ImportError: 11 | pass 12 | 13 | pytestmark = pytest.mark.skip("SecureTransport currently not supported on v2!") 14 | 15 | 16 | def setup_module(): 17 | try: 18 | from hip.contrib.securetransport import inject_into_hip 19 | 20 | inject_into_hip() 21 | except ImportError as e: 22 | pytest.skip("Could not import SecureTransport: %r" % e) 23 | 24 | 25 | def teardown_module(): 26 | try: 27 | from hip.contrib.securetransport import extract_from_hip 28 | 29 | extract_from_hip() 30 | except ImportError: 31 | pass 32 | 33 | 34 | # SecureTransport does not support TLSv1.3 35 | # https://github.com/urllib3/urllib3/issues/1674 36 | from ..with_dummyserver.test_https import ( # noqa: E402, F401 37 | TestHTTPS, 38 | TestHTTPS_TLSv1, 39 | TestHTTPS_TLSv1_1, 40 | TestHTTPS_TLSv1_2, 41 | ) 42 | from ..with_dummyserver.test_socketlevel import ( # noqa: E402, F401 43 | TestSNI, 44 | TestSocketClosing, 45 | TestClientCerts, 46 | ) 47 | 48 | 49 | def test_no_crash_with_empty_trust_bundle(): 50 | with contextlib.closing(socket.socket()) as s: 51 | ws = WrappedSocket(s) 52 | with pytest.raises(ssl.SSLError): 53 | ws._custom_validate(True, b"") 54 | -------------------------------------------------------------------------------- /test/port_helpers.py: -------------------------------------------------------------------------------- 1 | # These helpers are copied from test_support.py in the Python 2.7 standard 2 | # library test suite. 3 | 4 | import socket 5 | 6 | 7 | # Don't use "localhost", since resolving it uses the DNS under recent 8 | # Windows versions (see issue #18792). 9 | HOST = "127.0.0.1" 10 | HOSTv6 = "::1" 11 | 12 | 13 | def find_unused_port(family=socket.AF_INET, socktype=socket.SOCK_STREAM): 14 | """Returns an unused port that should be suitable for binding. This is 15 | achieved by creating a temporary socket with the same family and type as 16 | the 'sock' parameter (default is AF_INET, SOCK_STREAM), and binding it to 17 | the specified host address (defaults to 0.0.0.0) with the port set to 0, 18 | eliciting an unused ephemeral port from the OS. The temporary socket is 19 | then closed and deleted, and the ephemeral port is returned. 20 | 21 | Either this method or bind_port() should be used for any tests where a 22 | server socket needs to be bound to a particular port for the duration of 23 | the test. Which one to use depends on whether the calling code is creating 24 | a python socket, or if an unused port needs to be provided in a constructor 25 | or passed to an external program (i.e. the -accept argument to openssl's 26 | s_server mode). Always prefer bind_port() over find_unused_port() where 27 | possible. Hard coded ports should *NEVER* be used. As soon as a server 28 | socket is bound to a hard coded port, the ability to run multiple instances 29 | of the test simultaneously on the same host is compromised, which makes the 30 | test a ticking time bomb in a buildbot environment. On Unix buildbots, this 31 | may simply manifest as a failed test, which can be recovered from without 32 | intervention in most cases, but on Windows, the entire python process can 33 | completely and utterly wedge, requiring someone to log in to the buildbot 34 | and manually kill the affected process. 35 | 36 | (This is easy to reproduce on Windows, unfortunately, and can be traced to 37 | the SO_REUSEADDR socket option having different semantics on Windows versus 38 | Unix/Linux. On Unix, you can't have two AF_INET SOCK_STREAM sockets bind, 39 | listen and then accept connections on identical host/ports. An EADDRINUSE 40 | socket.error will be raised at some point (depending on the platform and 41 | the order bind and listen were called on each socket). 42 | 43 | However, on Windows, if SO_REUSEADDR is set on the sockets, no EADDRINUSE 44 | will ever be raised when attempting to bind two identical host/ports. When 45 | accept() is called on each socket, the second caller's process will steal 46 | the port from the first caller, leaving them both in an awkwardly wedged 47 | state where they'll no longer respond to any signals or graceful kills, and 48 | must be forcibly killed via OpenProcess()/TerminateProcess(). 49 | 50 | The solution on Windows is to use the SO_EXCLUSIVEADDRUSE socket option 51 | instead of SO_REUSEADDR, which effectively affords the same semantics as 52 | SO_REUSEADDR on Unix. Given the propensity of Unix developers in the Open 53 | Source world compared to Windows ones, this is a common mistake. A quick 54 | look over OpenSSL's 0.9.8g source shows that they use SO_REUSEADDR when 55 | openssl.exe is called with the 's_server' option, for example. See 56 | http://bugs.python.org/issue2550 for more info. The following site also 57 | has a very thorough description about the implications of both REUSEADDR 58 | and EXCLUSIVEADDRUSE on Windows: 59 | http://msdn2.microsoft.com/en-us/library/ms740621(VS.85).aspx) 60 | 61 | XXX: although this approach is a vast improvement on previous attempts to 62 | elicit unused ports, it rests heavily on the assumption that the ephemeral 63 | port returned to us by the OS won't immediately be dished back out to some 64 | other process when we close and delete our temporary socket but before our 65 | calling code has a chance to bind the returned port. We can deal with this 66 | issue if/when we come across it.""" 67 | tempsock = socket.socket(family, socktype) 68 | port = bind_port(tempsock) 69 | tempsock.close() 70 | del tempsock 71 | return port 72 | 73 | 74 | def bind_port(sock, host=HOST): 75 | """Bind the socket to a free port and return the port number. Relies on 76 | ephemeral ports in order to ensure we are using an unbound port. This is 77 | important as many tests may be running simultaneously, especially in a 78 | buildbot environment. This method raises an exception if the sock.family 79 | is AF_INET and sock.type is SOCK_STREAM, *and* the socket has SO_REUSEADDR 80 | or SO_REUSEPORT set on it. Tests should *never* set these socket options 81 | for TCP/IP sockets. The only case for setting these options is testing 82 | multicasting via multiple UDP sockets. 83 | 84 | Additionally, if the SO_EXCLUSIVEADDRUSE socket option is available (i.e. 85 | on Windows), it will be set on the socket. This will prevent anyone else 86 | from bind()'ing to our host/port for the duration of the test. 87 | """ 88 | if sock.family == socket.AF_INET and sock.type == socket.SOCK_STREAM: 89 | if hasattr(socket, "SO_REUSEADDR"): 90 | if sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR) == 1: 91 | raise ValueError( 92 | "tests should never set the SO_REUSEADDR " 93 | "socket option on TCP/IP sockets!" 94 | ) 95 | if hasattr(socket, "SO_REUSEPORT"): 96 | if sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT) == 1: 97 | raise ValueError( 98 | "tests should never set the SO_REUSEPORT " 99 | "socket option on TCP/IP sockets!" 100 | ) 101 | if hasattr(socket, "SO_EXCLUSIVEADDRUSE"): 102 | sock.setsockopt(socket.SOL_SOCKET, socket.SO_EXCLUSIVEADDRUSE, 1) 103 | 104 | sock.bind((host, 0)) 105 | port = sock.getsockname()[1] 106 | return port 107 | -------------------------------------------------------------------------------- /test/socketpair_helper.py: -------------------------------------------------------------------------------- 1 | import socket 2 | 3 | # Figuring out what errors could come out of a socket. There are three 4 | # different situations. Python 3 post-PEP3151 will define and use 5 | # BlockingIOError and InterruptedError from sockets. For Python pre-PEP3151 6 | # both OSError and socket.error can be raised except on Windows where 7 | # WindowsError can also be raised. We want to catch all of these possible 8 | # exceptions so we catch WindowsError if it's defined. 9 | try: 10 | _CONNECT_ERROR = (BlockingIOError, InterruptedError) 11 | except NameError: 12 | try: 13 | _CONNECT_ERROR = (WindowsError, OSError, socket.error) # noqa: F821 14 | except NameError: 15 | _CONNECT_ERROR = (OSError, socket.error) 16 | 17 | if hasattr(socket, "socketpair"): 18 | # Since Python 3.5, socket.socketpair() is now also available on Windows 19 | socketpair = socket.socketpair 20 | else: 21 | # Replacement for socket.socketpair() 22 | def socketpair(family=socket.AF_INET, type=socket.SOCK_STREAM, proto=0): 23 | """A socket pair usable as a self-pipe, for Windows. 24 | 25 | Origin: https://gist.github.com/4325783, by Geert Jansen. 26 | Public domain. 27 | """ 28 | if family == socket.AF_INET: 29 | host = "127.0.0.1" 30 | elif family == socket.AF_INET6: 31 | host = "::1" 32 | else: 33 | raise ValueError( 34 | "Only AF_INET and AF_INET6 socket address families are supported" 35 | ) 36 | if type != socket.SOCK_STREAM: 37 | raise ValueError("Only SOCK_STREAM socket type is supported") 38 | if proto != 0: 39 | raise ValueError("Only protocol zero is supported") 40 | 41 | # We create a connected TCP socket. Note the trick with setblocking(0) 42 | # that prevents us from having to create a thread. 43 | lsock = socket.socket(family, type, proto) 44 | try: 45 | lsock.bind((host, 0)) 46 | lsock.listen(1) 47 | # On IPv6, ignore flow_info and scope_id 48 | addr, port = lsock.getsockname()[:2] 49 | csock = socket.socket(family, type, proto) 50 | try: 51 | csock.setblocking(False) 52 | try: 53 | csock.connect((addr, port)) 54 | except _CONNECT_ERROR: 55 | pass 56 | csock.setblocking(True) 57 | ssock, _ = lsock.accept() 58 | except Exception: 59 | csock.close() 60 | raise 61 | finally: 62 | lsock.close() 63 | return (ssock, csock) 64 | -------------------------------------------------------------------------------- /test/test_backends.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | import hip 4 | from hip.backends import Backend 5 | from hip._backends._loader import normalize_backend, load_backend 6 | 7 | 8 | requires_async_pool_manager = pytest.mark.skipif( 9 | not hasattr(hip, "AsyncPoolManager"), 10 | reason="async backends require AsyncPoolManager", 11 | ) 12 | 13 | 14 | class TestNormalizeBackend(object): 15 | """ 16 | Assert that we fail correctly if we attempt to use an unknown or incompatible backend. 17 | """ 18 | 19 | def test_unknown(self): 20 | with pytest.raises(ValueError) as excinfo: 21 | normalize_backend("_unknown", async_mode=False) 22 | 23 | assert "unknown backend specifier _unknown" == str(excinfo.value) 24 | 25 | def test_sync(self): 26 | assert normalize_backend(Backend("sync"), async_mode=False) == Backend("sync") 27 | assert normalize_backend("sync", async_mode=False) == Backend("sync") 28 | assert normalize_backend(None, async_mode=False) == Backend("sync") 29 | 30 | with pytest.raises(ValueError) as excinfo: 31 | normalize_backend(Backend("anyio"), async_mode=False) 32 | assert "anyio backend needs to be run in async mode" == str(excinfo.value) 33 | 34 | with pytest.raises(ValueError) as excinfo: 35 | normalize_backend(Backend("trio"), async_mode=False) 36 | assert "trio backend needs to be run in async mode" == str(excinfo.value) 37 | 38 | @requires_async_pool_manager 39 | def test_async(self): 40 | assert normalize_backend(Backend("anyio"), async_mode=True) == Backend("anyio") 41 | assert normalize_backend(Backend("trio"), async_mode=True) == Backend("trio") 42 | 43 | with pytest.raises(ValueError) as excinfo: 44 | normalize_backend(Backend("sync"), async_mode=True) 45 | assert "sync backend needs to be run in sync mode" == str(excinfo.value) 46 | 47 | 48 | class TestLoadBackend(object): 49 | """ 50 | Assert that we can load a normalized backend 51 | """ 52 | 53 | def test_sync(self): 54 | load_backend(normalize_backend("sync", async_mode=False)) 55 | -------------------------------------------------------------------------------- /test/test_compatibility.py: -------------------------------------------------------------------------------- 1 | from hip.response import HTTPResponse 2 | from hip.packages.six.moves import http_cookiejar, urllib 3 | 4 | 5 | class TestCookiejar(object): 6 | def test_extract(self): 7 | request = urllib.request.Request("http://google.com") 8 | cookiejar = http_cookiejar.CookieJar() 9 | response = HTTPResponse() 10 | 11 | cookies = [ 12 | "sessionhash=abcabcabcabcab; path=/; HttpOnly", 13 | "lastvisit=1348253375; expires=Sat, 21-Sep-2050 18:49:35 GMT; path=/", 14 | ] 15 | for c in cookies: 16 | response.headers.add("set-cookie", c) 17 | cookiejar.extract_cookies(response, request) 18 | assert len(cookiejar) == len(cookies) 19 | -------------------------------------------------------------------------------- /test/test_connection.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import mock 3 | 4 | import h11 5 | import pytest 6 | 7 | from hip.base import Request 8 | from hip.connection import _request_bytes_iterable, RECENT_DATE 9 | from hip.util.ssl_ import CertificateError, match_hostname 10 | 11 | 12 | class TestConnection(object): 13 | """ 14 | Tests in this suite should not make any network requests or connections. 15 | """ 16 | 17 | def test_match_hostname_no_cert(self): 18 | cert = None 19 | asserted_hostname = "foo" 20 | with pytest.raises(ValueError): 21 | match_hostname(cert, asserted_hostname) 22 | 23 | def test_match_hostname_empty_cert(self): 24 | cert = {} 25 | asserted_hostname = "foo" 26 | with pytest.raises(ValueError): 27 | match_hostname(cert, asserted_hostname) 28 | 29 | def test_match_hostname_match(self): 30 | cert = {"subjectAltName": [("DNS", "foo")]} 31 | asserted_hostname = "foo" 32 | match_hostname(cert, asserted_hostname) 33 | 34 | def test_match_hostname_mismatch(self): 35 | cert = {"subjectAltName": [("DNS", "foo")]} 36 | asserted_hostname = "bar" 37 | try: 38 | with mock.patch("hip.util.ssl_.log.warning") as mock_log: 39 | match_hostname(cert, asserted_hostname) 40 | except CertificateError as e: 41 | assert "hostname 'bar' doesn't match 'foo'" in str(e) 42 | mock_log.assert_called_once_with( 43 | "Certificate did not match expected hostname: %s. Certificate: %s", 44 | "bar", 45 | {"subjectAltName": [("DNS", "foo")]}, 46 | ) 47 | assert e._peer_cert == cert 48 | 49 | def test_recent_date(self): 50 | # This test is to make sure that the RECENT_DATE value 51 | # doesn't get too far behind what the current date is. 52 | # When this test fails update hip.connection.RECENT_DATE 53 | # according to the rules defined in that file. 54 | two_years = datetime.timedelta(days=365 * 2) 55 | assert RECENT_DATE > (datetime.datetime.today() - two_years).date() 56 | 57 | def test_request_bytes_iterable(self): 58 | # Assert that we send the first set of body bytes with the request packet. 59 | body_bytes = [b"Hello, ", b"world!"] 60 | body_size = 13 61 | request = Request( 62 | method=b"POST", 63 | target="post", 64 | body=body_bytes, 65 | headers={"Content-Length": body_size}, 66 | ) 67 | request.add_host("httpbin.org", port=80, scheme="http") 68 | state_machine = h11.Connection(our_role=h11.CLIENT) 69 | iterable = _request_bytes_iterable(request, state_machine) 70 | first_packet, second_packet = next(iterable), next(iterable) 71 | assert request.method in first_packet 72 | assert body_bytes[0] in first_packet 73 | assert body_bytes[1] in second_packet 74 | with pytest.raises(StopIteration): 75 | next(iterable) 76 | 77 | def test_request_default_port_handling(self): 78 | # Verify that the port is only included in the Host header when it 79 | # is necessary. In other words, when the specified port does not 80 | # match the port for a KNOWN protocol 81 | request = Request(method=b"GET", target="/") 82 | request.add_host("httpbin.org", port=80, scheme="http") 83 | assert request.headers["host"] == "httpbin.org" 84 | 85 | request = Request(method=b"GET", target="/") 86 | request.add_host("httpbin.org", port=443, scheme="https") 87 | assert request.headers["host"] == "httpbin.org" 88 | 89 | request = Request(method=b"GET", target="/") 90 | request.add_host("httpbin.org", port=5672, scheme="amqp") 91 | assert request.headers["host"] == "httpbin.org:5672" 92 | -------------------------------------------------------------------------------- /test/test_exceptions.py: -------------------------------------------------------------------------------- 1 | import pickle 2 | 3 | import pytest 4 | 5 | from hip.exceptions import ( 6 | HTTPError, 7 | MaxRetryError, 8 | LocationParseError, 9 | ClosedPoolError, 10 | EmptyPoolError, 11 | ReadTimeoutError, 12 | ConnectTimeoutError, 13 | HeaderParsingError, 14 | ) 15 | from hip.connectionpool import HTTPConnectionPool 16 | 17 | 18 | class TestPickle(object): 19 | @pytest.mark.parametrize( 20 | "exception", 21 | [ 22 | HTTPError(None), 23 | MaxRetryError(None, None, None), 24 | LocationParseError(None), 25 | ConnectTimeoutError(None), 26 | HTTPError("foo"), 27 | HTTPError("foo", IOError("foo")), 28 | MaxRetryError(HTTPConnectionPool("localhost"), "/", None), 29 | LocationParseError("fake location"), 30 | ClosedPoolError(HTTPConnectionPool("localhost"), None), 31 | EmptyPoolError(HTTPConnectionPool("localhost"), None), 32 | ReadTimeoutError(HTTPConnectionPool("localhost"), "/", None), 33 | ], 34 | ) 35 | def test_exceptions(self, exception): 36 | result = pickle.loads(pickle.dumps(exception)) 37 | assert isinstance(result, type(exception)) 38 | 39 | 40 | class TestFormat(object): 41 | def test_header_parsing_errors(self): 42 | hpe = HeaderParsingError("defects", "unparsed_data") 43 | 44 | assert "defects" in str(hpe) 45 | assert "unparsed_data" in str(hpe) 46 | -------------------------------------------------------------------------------- /test/test_fields.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from hip.fields import format_header_param_rfc2231, guess_content_type, RequestField 4 | from hip.packages.six import u 5 | 6 | 7 | class TestRequestField(object): 8 | @pytest.mark.parametrize( 9 | "filename, content_types", 10 | [ 11 | ("image.jpg", ["image/jpeg", "image/pjpeg"]), 12 | ("notsure", ["application/octet-stream"]), 13 | (None, ["application/octet-stream"]), 14 | ], 15 | ) 16 | def test_guess_content_type(self, filename, content_types): 17 | assert guess_content_type(filename) in content_types 18 | 19 | def test_create(self): 20 | simple_field = RequestField("somename", "data") 21 | assert simple_field.render_headers() == "\r\n" 22 | filename_field = RequestField("somename", "data", filename="somefile.txt") 23 | assert filename_field.render_headers() == "\r\n" 24 | headers_field = RequestField("somename", "data", headers={"Content-Length": 4}) 25 | assert headers_field.render_headers() == "Content-Length: 4\r\n\r\n" 26 | 27 | def test_make_multipart(self): 28 | field = RequestField("somename", "data") 29 | field.make_multipart(content_type="image/jpg", content_location="/test") 30 | assert ( 31 | field.render_headers() 32 | == 'Content-Disposition: form-data; name="somename"\r\n' 33 | "Content-Type: image/jpg\r\n" 34 | "Content-Location: /test\r\n" 35 | "\r\n" 36 | ) 37 | 38 | def test_make_multipart_empty_filename(self): 39 | field = RequestField("somename", "data", "") 40 | field.make_multipart(content_type="application/octet-stream") 41 | assert ( 42 | field.render_headers() 43 | == 'Content-Disposition: form-data; name="somename"; filename=""\r\n' 44 | "Content-Type: application/octet-stream\r\n" 45 | "\r\n" 46 | ) 47 | 48 | def test_render_parts(self): 49 | field = RequestField("somename", "data") 50 | parts = field._render_parts({"name": "value", "filename": "value"}) 51 | assert 'name="value"' in parts 52 | assert 'filename="value"' in parts 53 | parts = field._render_parts([("name", "value"), ("filename", "value")]) 54 | assert parts == 'name="value"; filename="value"' 55 | 56 | def test_render_part_rfc2231_unicode(self): 57 | field = RequestField( 58 | "somename", "data", header_formatter=format_header_param_rfc2231 59 | ) 60 | param = field._render_part("filename", u("n\u00e4me")) 61 | assert param == "filename*=utf-8''n%C3%A4me" 62 | 63 | def test_render_part_rfc2231_ascii(self): 64 | field = RequestField( 65 | "somename", "data", header_formatter=format_header_param_rfc2231 66 | ) 67 | param = field._render_part("filename", b"name") 68 | assert param == 'filename="name"' 69 | 70 | def test_render_part_html5_unicode(self): 71 | field = RequestField("somename", "data") 72 | param = field._render_part("filename", u("n\u00e4me")) 73 | assert param == u('filename="n\u00e4me"') 74 | 75 | def test_render_part_html5_ascii(self): 76 | field = RequestField("somename", "data") 77 | param = field._render_part("filename", b"name") 78 | assert param == 'filename="name"' 79 | 80 | def test_render_part_html5_unicode_escape(self): 81 | field = RequestField("somename", "data") 82 | param = field._render_part("filename", u("hello\\world\u0022")) 83 | assert param == u('filename="hello\\\\world%22"') 84 | 85 | def test_render_part_html5_unicode_with_control_character(self): 86 | field = RequestField("somename", "data") 87 | param = field._render_part("filename", u("hello\x1A\x1B\x1C")) 88 | assert param == u('filename="hello%1A\x1B%1C"') 89 | 90 | def test_from_tuples_rfc2231(self): 91 | field = RequestField.from_tuples( 92 | u("fieldname"), 93 | (u("filen\u00e4me"), "data"), 94 | header_formatter=format_header_param_rfc2231, 95 | ) 96 | cd = field.headers["Content-Disposition"] 97 | assert cd == u("form-data; name=\"fieldname\"; filename*=utf-8''filen%C3%A4me") 98 | 99 | def test_from_tuples_html5(self): 100 | field = RequestField.from_tuples(u("fieldname"), (u("filen\u00e4me"), "data")) 101 | cd = field.headers["Content-Disposition"] 102 | assert cd == u('form-data; name="fieldname"; filename="filen\u00e4me"') 103 | -------------------------------------------------------------------------------- /test/test_filepost.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from hip.filepost import encode_multipart_formdata 4 | from hip.fields import RequestField 5 | from hip.packages.six import b, u 6 | 7 | 8 | BOUNDARY = "!! test boundary !!" 9 | 10 | 11 | class TestMultipartEncoding(object): 12 | @pytest.mark.parametrize( 13 | "fields", [dict(k="v", k2="v2"), [("k", "v"), ("k2", "v2")]] 14 | ) 15 | def test_input_datastructures(self, fields): 16 | encoded, _ = encode_multipart_formdata(fields, boundary=BOUNDARY) 17 | assert encoded.count(b(BOUNDARY)) == 3 18 | 19 | @pytest.mark.parametrize( 20 | "fields", 21 | [ 22 | [("k", "v"), ("k2", "v2")], 23 | [("k", b"v"), (u("k2"), b"v2")], 24 | [("k", b"v"), (u("k2"), "v2")], 25 | ], 26 | ) 27 | def test_field_encoding(self, fields): 28 | encoded, content_type = encode_multipart_formdata(fields, boundary=BOUNDARY) 29 | expected = ( 30 | b"--" + b(BOUNDARY) + b"\r\n" 31 | b'Content-Disposition: form-data; name="k"\r\n' 32 | b"\r\n" 33 | b"v\r\n" 34 | b"--" + b(BOUNDARY) + b"\r\n" 35 | b'Content-Disposition: form-data; name="k2"\r\n' 36 | b"\r\n" 37 | b"v2\r\n" 38 | b"--" + b(BOUNDARY) + b"--\r\n" 39 | ) 40 | 41 | assert encoded == expected 42 | 43 | assert content_type == "multipart/form-data; boundary=" + str(BOUNDARY) 44 | 45 | def test_filename(self): 46 | fields = [("k", ("somename", b"v"))] 47 | 48 | encoded, content_type = encode_multipart_formdata(fields, boundary=BOUNDARY) 49 | expected = ( 50 | b"--" + b(BOUNDARY) + b"\r\n" 51 | b'Content-Disposition: form-data; name="k"; filename="somename"\r\n' 52 | b"Content-Type: application/octet-stream\r\n" 53 | b"\r\n" 54 | b"v\r\n" 55 | b"--" + b(BOUNDARY) + b"--\r\n" 56 | ) 57 | 58 | assert encoded == expected 59 | 60 | assert content_type == "multipart/form-data; boundary=" + str(BOUNDARY) 61 | 62 | def test_textplain(self): 63 | fields = [("k", ("somefile.txt", b"v"))] 64 | 65 | encoded, content_type = encode_multipart_formdata(fields, boundary=BOUNDARY) 66 | expected = ( 67 | b"--" + b(BOUNDARY) + b"\r\n" 68 | b'Content-Disposition: form-data; name="k"; filename="somefile.txt"\r\n' 69 | b"Content-Type: text/plain\r\n" 70 | b"\r\n" 71 | b"v\r\n" 72 | b"--" + b(BOUNDARY) + b"--\r\n" 73 | ) 74 | 75 | assert encoded == expected 76 | 77 | assert content_type == "multipart/form-data; boundary=" + str(BOUNDARY) 78 | 79 | def test_explicit(self): 80 | fields = [("k", ("somefile.txt", b"v", "image/jpeg"))] 81 | 82 | encoded, content_type = encode_multipart_formdata(fields, boundary=BOUNDARY) 83 | expected = ( 84 | b"--" + b(BOUNDARY) + b"\r\n" 85 | b'Content-Disposition: form-data; name="k"; filename="somefile.txt"\r\n' 86 | b"Content-Type: image/jpeg\r\n" 87 | b"\r\n" 88 | b"v\r\n" 89 | b"--" + b(BOUNDARY) + b"--\r\n" 90 | ) 91 | 92 | assert encoded == expected 93 | 94 | assert content_type == "multipart/form-data; boundary=" + str(BOUNDARY) 95 | 96 | def test_request_fields(self): 97 | fields = [ 98 | RequestField( 99 | "k", 100 | b"v", 101 | filename="somefile.txt", 102 | headers={"Content-Type": "image/jpeg"}, 103 | ) 104 | ] 105 | 106 | encoded, content_type = encode_multipart_formdata(fields, boundary=BOUNDARY) 107 | expected = ( 108 | b"--" + b(BOUNDARY) + b"\r\n" 109 | b"Content-Type: image/jpeg\r\n" 110 | b"\r\n" 111 | b"v\r\n" 112 | b"--" + b(BOUNDARY) + b"--\r\n" 113 | ) 114 | 115 | assert encoded == expected 116 | -------------------------------------------------------------------------------- /test/test_no_ssl.py: -------------------------------------------------------------------------------- 1 | """ 2 | Test what happens if Python was built without SSL 3 | 4 | * Everything that does not involve HTTPS should still work 5 | * HTTPS requests must fail with an error that points at the ssl module 6 | """ 7 | 8 | import sys 9 | import pytest 10 | 11 | 12 | class ImportBlocker(object): 13 | """ 14 | Block Imports 15 | 16 | To be placed on ``sys.meta_path``. This ensures that the modules 17 | specified cannot be imported, even if they are a builtin. 18 | """ 19 | 20 | def __init__(self, *namestoblock): 21 | self.namestoblock = namestoblock 22 | 23 | def find_module(self, fullname, path=None): 24 | if fullname in self.namestoblock: 25 | return self 26 | return None 27 | 28 | def load_module(self, fullname): 29 | raise ImportError("import of {0} is blocked".format(fullname)) 30 | 31 | 32 | class ModuleStash(object): 33 | """ 34 | Stashes away previously imported modules 35 | 36 | If we reimport a module the data from coverage is lost, so we reuse the old 37 | modules 38 | """ 39 | 40 | def __init__(self, namespace, modules=sys.modules): 41 | self.namespace = namespace 42 | self.modules = modules 43 | self._data = {} 44 | 45 | def stash(self): 46 | self._data[self.namespace] = self.modules.pop(self.namespace, None) 47 | 48 | for module in list(self.modules.keys()): 49 | if module.startswith(self.namespace + "."): 50 | self._data[module] = self.modules.pop(module) 51 | 52 | def pop(self): 53 | self.modules.pop(self.namespace, None) 54 | 55 | for module in list(self.modules.keys()): 56 | if module.startswith(self.namespace + "."): 57 | self.modules.pop(module) 58 | 59 | self.modules.update(self._data) 60 | 61 | 62 | ssl_blocker = ImportBlocker("ssl", "_ssl") 63 | module_stash = ModuleStash("hip") 64 | 65 | 66 | class TestWithoutSSL(object): 67 | @classmethod 68 | def setup_class(cls): 69 | sys.modules.pop("ssl", None) 70 | sys.modules.pop("_ssl", None) 71 | 72 | module_stash.stash() 73 | sys.meta_path.insert(0, ssl_blocker) 74 | 75 | def teardown_class(cls): 76 | sys.meta_path.remove(ssl_blocker) 77 | module_stash.pop() 78 | 79 | 80 | class TestImportWithoutSSL(TestWithoutSSL): 81 | def test_cannot_import_ssl(self): 82 | with pytest.raises(ImportError): 83 | import ssl # noqa: F401 84 | 85 | def test_import_hip(self): 86 | import hip # noqa: F401 87 | -------------------------------------------------------------------------------- /test/test_proxymanager.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from hip.poolmanager import ProxyManager 4 | 5 | 6 | class TestProxyManager(object): 7 | def test_proxy_headers(self): 8 | url = "http://pypi.org/project/hip/" 9 | with ProxyManager("http://something:1234") as p: 10 | # Verify default headers 11 | default_headers = {"Accept": "*/*", "Host": "pypi.org"} 12 | headers = p._set_proxy_headers(url) 13 | 14 | assert headers == default_headers 15 | 16 | # Verify default headers don't overwrite provided headers 17 | provided_headers = { 18 | "Accept": "application/json", 19 | "custom": "header", 20 | "Host": "test.python.org", 21 | } 22 | headers = p._set_proxy_headers(url, provided_headers) 23 | 24 | assert headers == provided_headers 25 | 26 | # Verify proxy with nonstandard port 27 | provided_headers = {"Accept": "application/json"} 28 | expected_headers = provided_headers.copy() 29 | expected_headers.update({"Host": "pypi.org:8080"}) 30 | url_with_port = "http://pypi.org:8080/project/hip/" 31 | headers = p._set_proxy_headers(url_with_port, provided_headers) 32 | 33 | assert headers == expected_headers 34 | 35 | def test_default_port(self): 36 | with ProxyManager("http://something") as p: 37 | assert p.proxy.port == 80 38 | with ProxyManager("https://something") as p: 39 | assert p.proxy.port == 443 40 | 41 | def test_invalid_scheme(self): 42 | with pytest.raises(AssertionError): 43 | ProxyManager("invalid://host/p") 44 | with pytest.raises(ValueError): 45 | ProxyManager("invalid://host/p") 46 | -------------------------------------------------------------------------------- /test/test_queue_monkeypatch.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | 3 | import mock 4 | 5 | import pytest 6 | 7 | from hip import HTTPConnectionPool 8 | from hip.exceptions import EmptyPoolError 9 | from hip.packages.six.moves import queue 10 | 11 | 12 | class BadError(Exception): 13 | """ 14 | This should not be raised. 15 | """ 16 | 17 | pass 18 | 19 | 20 | class TestMonkeypatchResistance(object): 21 | """ 22 | Test that connection pool works even with a monkey patched Queue module, 23 | see obspy/obspy#1599, psf/requests#3742, urllib3/urllib3#1061. 24 | """ 25 | 26 | def test_queue_monkeypatching(self): 27 | with mock.patch.object(queue, "Empty", BadError): 28 | with HTTPConnectionPool(host="localhost", block=True) as http: 29 | http._get_conn() 30 | with pytest.raises(EmptyPoolError): 31 | http._get_conn(timeout=0) 32 | -------------------------------------------------------------------------------- /test/test_ssl.py: -------------------------------------------------------------------------------- 1 | import mock 2 | 3 | import pytest 4 | from hip.util import ssl_ 5 | from hip.exceptions import SNIMissingWarning 6 | 7 | from test import notPyPy2 8 | 9 | 10 | @pytest.mark.parametrize( 11 | "addr", 12 | [ 13 | # IPv6 14 | "::1", 15 | "::", 16 | "FE80::8939:7684:D84b:a5A4%251", 17 | # IPv4 18 | "127.0.0.1", 19 | "8.8.8.8", 20 | b"127.0.0.1", 21 | # IPv6 w/ Zone IDs 22 | "FE80::8939:7684:D84b:a5A4%251", 23 | b"FE80::8939:7684:D84b:a5A4%251", 24 | "FE80::8939:7684:D84b:a5A4%19", 25 | b"FE80::8939:7684:D84b:a5A4%19", 26 | ], 27 | ) 28 | def test_is_ipaddress_true(addr): 29 | assert ssl_.is_ipaddress(addr) 30 | 31 | 32 | @pytest.mark.parametrize( 33 | "addr", 34 | [ 35 | "www.python.org", 36 | b"www.python.org", 37 | "v2.sg.media-imdb.com", 38 | b"v2.sg.media-imdb.com", 39 | ], 40 | ) 41 | def test_is_ipaddress_false(addr): 42 | assert not ssl_.is_ipaddress(addr) 43 | 44 | 45 | @pytest.mark.parametrize( 46 | ["has_sni", "server_hostname", "uses_sni"], 47 | [ 48 | (True, "127.0.0.1", False), 49 | (False, "www.python.org", False), 50 | (False, "0.0.0.0", False), 51 | (True, "www.google.com", True), 52 | (True, None, False), 53 | (False, None, False), 54 | ], 55 | ) 56 | def test_context_sni_with_ip_address(monkeypatch, has_sni, server_hostname, uses_sni): 57 | monkeypatch.setattr(ssl_, "HAS_SNI", has_sni) 58 | 59 | sock = mock.Mock() 60 | context = mock.create_autospec(ssl_.SSLContext) 61 | 62 | ssl_.ssl_wrap_socket(sock, server_hostname=server_hostname, ssl_context=context) 63 | 64 | if uses_sni: 65 | context.wrap_socket.assert_called_with(sock, server_hostname=server_hostname) 66 | else: 67 | context.wrap_socket.assert_called_with(sock) 68 | 69 | 70 | @pytest.mark.parametrize( 71 | ["has_sni", "server_hostname", "should_warn"], 72 | [ 73 | (True, "www.google.com", False), 74 | (True, "127.0.0.1", False), 75 | (False, "127.0.0.1", False), 76 | (False, "www.google.com", True), 77 | (True, None, False), 78 | (False, None, False), 79 | ], 80 | ) 81 | def test_sni_missing_warning_with_ip_addresses( 82 | monkeypatch, has_sni, server_hostname, should_warn 83 | ): 84 | monkeypatch.setattr(ssl_, "HAS_SNI", has_sni) 85 | 86 | sock = mock.Mock() 87 | context = mock.create_autospec(ssl_.SSLContext) 88 | 89 | with mock.patch("warnings.warn") as warn: 90 | ssl_.ssl_wrap_socket(sock, server_hostname=server_hostname, ssl_context=context) 91 | 92 | if should_warn: 93 | assert warn.call_count >= 1 94 | warnings = [call[0][1] for call in warn.call_args_list] 95 | assert SNIMissingWarning in warnings 96 | else: 97 | assert warn.call_count == 0 98 | 99 | 100 | @pytest.mark.parametrize( 101 | ["ciphers", "expected_ciphers"], 102 | [ 103 | (None, ssl_.DEFAULT_CIPHERS), 104 | ("ECDH+AESGCM:ECDH+CHACHA20", "ECDH+AESGCM:ECDH+CHACHA20"), 105 | ], 106 | ) 107 | def test_create_ssl_context_set_ciphers(monkeypatch, ciphers, expected_ciphers): 108 | 109 | context = mock.create_autospec(ssl_.SSLContext) 110 | context.set_ciphers = mock.Mock() 111 | context.options = 0 112 | monkeypatch.setattr(ssl_, "SSLContext", lambda *_, **__: context) 113 | 114 | assert ssl_.create_ssl_context(ciphers=ciphers) is context 115 | 116 | assert context.set_ciphers.call_count == 1 117 | assert context.set_ciphers.call_args == mock.call(expected_ciphers) 118 | 119 | 120 | def test_wrap_socket_given_context_no_load_default_certs(): 121 | context = mock.create_autospec(ssl_.SSLContext) 122 | context.load_default_certs = mock.Mock() 123 | 124 | sock = mock.Mock() 125 | ssl_.ssl_wrap_socket(sock, ssl_context=context) 126 | 127 | context.load_default_certs.assert_not_called() 128 | 129 | 130 | @notPyPy2 131 | def test_wrap_socket_given_ca_certs_no_load_default_certs(monkeypatch): 132 | context = mock.create_autospec(ssl_.SSLContext) 133 | context.load_default_certs = mock.Mock() 134 | context.options = 0 135 | 136 | monkeypatch.setattr(ssl_, "SSLContext", lambda *_, **__: context) 137 | 138 | sock = mock.Mock() 139 | ssl_.ssl_wrap_socket(sock, ca_certs="/tmp/fake-file") 140 | 141 | context.load_default_certs.assert_not_called() 142 | context.load_verify_locations.assert_called_with("/tmp/fake-file", None) 143 | 144 | 145 | def test_wrap_socket_default_loads_default_certs(monkeypatch): 146 | context = mock.create_autospec(ssl_.SSLContext) 147 | context.load_default_certs = mock.Mock() 148 | context.options = 0 149 | 150 | monkeypatch.setattr(ssl_, "SSLContext", lambda *_, **__: context) 151 | 152 | sock = mock.Mock() 153 | ssl_.ssl_wrap_socket(sock) 154 | 155 | context.load_default_certs.assert_called_with() 156 | 157 | 158 | @pytest.mark.parametrize( 159 | ["pha", "expected_pha"], [(None, None), (False, True), (True, True)] 160 | ) 161 | def test_create_ssl_context_pha(monkeypatch, pha, expected_pha): 162 | context = mock.create_autospec(ssl_.SSLContext) 163 | context.set_ciphers = mock.Mock() 164 | context.options = 0 165 | context.post_handshake_auth = pha 166 | monkeypatch.setattr(ssl_, "SSLContext", lambda *_, **__: context) 167 | 168 | assert ssl_.create_ssl_context() is context 169 | 170 | assert context.post_handshake_auth == expected_pha 171 | -------------------------------------------------------------------------------- /test/test_wait.py: -------------------------------------------------------------------------------- 1 | import signal 2 | import socket 3 | import threading 4 | 5 | try: 6 | from time import monotonic 7 | except ImportError: 8 | from time import time as monotonic 9 | import time 10 | 11 | import pytest 12 | 13 | from .socketpair_helper import socketpair 14 | from hip.util.wait import ( 15 | wait_for_read, 16 | wait_for_write, 17 | wait_for_socket, 18 | select_wait_for_socket, 19 | poll_wait_for_socket, 20 | _have_working_poll, 21 | ) 22 | 23 | 24 | @pytest.fixture 25 | def spair(): 26 | a, b = socketpair() 27 | yield a, b 28 | a.close() 29 | b.close() 30 | 31 | 32 | variants = [wait_for_socket, select_wait_for_socket] 33 | if _have_working_poll(): 34 | variants.append(poll_wait_for_socket) 35 | 36 | 37 | @pytest.mark.parametrize("wfs", variants) 38 | def test_wait_for_socket(wfs, spair): 39 | a, b = spair 40 | 41 | with pytest.raises(RuntimeError): 42 | wfs(a, read=False, write=False) 43 | 44 | assert not wfs(a, read=True, timeout=0) 45 | assert wfs(a, write=True, timeout=0) 46 | 47 | b.send(b"x") 48 | assert wfs(a, read=True, timeout=0) 49 | assert wfs(a, read=True, timeout=10) 50 | assert wfs(a, read=True, timeout=None) 51 | 52 | # Fill up the socket with data 53 | a.setblocking(False) 54 | try: 55 | while True: 56 | a.send(b"x" * 999999) 57 | except (OSError, socket.error): 58 | pass 59 | 60 | # Now it's not writable anymore 61 | assert not wfs(a, write=True, timeout=0) 62 | 63 | # But if we ask for read-or-write, that succeeds 64 | assert wfs(a, read=True, write=True, timeout=0) 65 | 66 | # Unless we read from it 67 | assert a.recv(1) == b"x" 68 | assert not wfs(a, read=True, write=True, timeout=0) 69 | 70 | # But if the remote peer closes the socket, then it becomes readable 71 | b.close() 72 | assert wfs(a, read=True, timeout=0) 73 | 74 | # Waiting for a socket that's actually been closed is just a bug, and 75 | # raises some kind of helpful exception (exact details depend on the 76 | # platform). 77 | with pytest.raises(Exception): 78 | wfs(b, read=True) 79 | 80 | 81 | def test_wait_for_read_write(spair): 82 | a, b = spair 83 | 84 | assert not wait_for_read(a, 0) 85 | assert wait_for_write(a, 0) 86 | 87 | b.send(b"x") 88 | 89 | assert wait_for_read(a, 0) 90 | assert wait_for_write(a, 0) 91 | 92 | # Fill up the socket with data 93 | a.setblocking(False) 94 | try: 95 | while True: 96 | a.send(b"x" * 999999) 97 | except (OSError, socket.error): 98 | pass 99 | 100 | # Now it's not writable anymore 101 | assert not wait_for_write(a, 0) 102 | 103 | 104 | @pytest.mark.skipif(not hasattr(signal, "setitimer"), reason="need setitimer() support") 105 | @pytest.mark.parametrize("wfs", variants) 106 | def test_eintr(wfs, spair): 107 | a, b = spair 108 | interrupt_count = [0] 109 | 110 | def handler(sig, frame): 111 | assert sig == signal.SIGALRM 112 | interrupt_count[0] += 1 113 | 114 | old_handler = signal.signal(signal.SIGALRM, handler) 115 | try: 116 | assert not wfs(a, read=True, timeout=0) 117 | start = monotonic() 118 | try: 119 | # Start delivering SIGALRM 10 times per second 120 | signal.setitimer(signal.ITIMER_REAL, 0.1, 0.1) 121 | # Sleep for 1 second (we hope!) 122 | wfs(a, read=True, timeout=1) 123 | finally: 124 | # Stop delivering SIGALRM 125 | signal.setitimer(signal.ITIMER_REAL, 0) 126 | end = monotonic() 127 | dur = end - start 128 | assert 0.9 < dur < 3 129 | finally: 130 | signal.signal(signal.SIGALRM, old_handler) 131 | 132 | assert interrupt_count[0] > 0 133 | 134 | 135 | @pytest.mark.skipif(not hasattr(signal, "setitimer"), reason="need setitimer() support") 136 | @pytest.mark.parametrize("wfs", variants) 137 | def test_eintr_zero_timeout(wfs, spair): 138 | a, b = spair 139 | interrupt_count = [0] 140 | 141 | def handler(sig, frame): 142 | assert sig == signal.SIGALRM 143 | interrupt_count[0] += 1 144 | 145 | old_handler = signal.signal(signal.SIGALRM, handler) 146 | try: 147 | assert not wfs(a, read=True, timeout=0) 148 | try: 149 | # Start delivering SIGALRM 1000 times per second, 150 | # to trigger race conditions such as 151 | # https://github.com/urllib3/urllib3/issues/1396. 152 | signal.setitimer(signal.ITIMER_REAL, 0.001, 0.001) 153 | # Hammer the system call for a while to trigger the 154 | # race. 155 | for i in range(100000): 156 | wfs(a, read=True, timeout=0) 157 | finally: 158 | # Stop delivering SIGALRM 159 | signal.setitimer(signal.ITIMER_REAL, 0) 160 | finally: 161 | signal.signal(signal.SIGALRM, old_handler) 162 | 163 | assert interrupt_count[0] > 0 164 | 165 | 166 | @pytest.mark.skipif(not hasattr(signal, "setitimer"), reason="need setitimer() support") 167 | @pytest.mark.parametrize("wfs", variants) 168 | def test_eintr_infinite_timeout(wfs, spair): 169 | a, b = spair 170 | interrupt_count = [0] 171 | 172 | def handler(sig, frame): 173 | assert sig == signal.SIGALRM 174 | interrupt_count[0] += 1 175 | 176 | def make_a_readable_after_one_second(): 177 | time.sleep(1) 178 | b.send(b"x") 179 | 180 | old_handler = signal.signal(signal.SIGALRM, handler) 181 | try: 182 | assert not wfs(a, read=True, timeout=0) 183 | start = monotonic() 184 | try: 185 | # Start delivering SIGALRM 10 times per second 186 | signal.setitimer(signal.ITIMER_REAL, 0.1, 0.1) 187 | # Sleep for 1 second (we hope!) 188 | thread = threading.Thread(target=make_a_readable_after_one_second) 189 | thread.start() 190 | wfs(a, read=True) 191 | finally: 192 | # Stop delivering SIGALRM 193 | signal.setitimer(signal.ITIMER_REAL, 0) 194 | thread.join() 195 | end = monotonic() 196 | dur = end - start 197 | assert 0.9 < dur < 3 198 | finally: 199 | signal.signal(signal.SIGALRM, old_handler) 200 | 201 | assert interrupt_count[0] > 0 202 | -------------------------------------------------------------------------------- /test/with_dummyserver/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/python-trio/hip/e198149c677edbeec023aeb934758c9195a4d2e3/test/with_dummyserver/__init__.py -------------------------------------------------------------------------------- /test/with_dummyserver/async/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/python-trio/hip/e198149c677edbeec023aeb934758c9195a4d2e3/test/with_dummyserver/async/__init__.py -------------------------------------------------------------------------------- /test/with_dummyserver/async_only/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/python-trio/hip/e198149c677edbeec023aeb934758c9195a4d2e3/test/with_dummyserver/async_only/__init__.py -------------------------------------------------------------------------------- /test/with_dummyserver/async_only/test_poolmanager.py: -------------------------------------------------------------------------------- 1 | import io 2 | import pytest 3 | import trio 4 | import anyio 5 | from ahip import PoolManager 6 | from ahip.exceptions import UnrewindableBodyError 7 | 8 | from test.with_dummyserver import conftest 9 | from dummyserver.testcase import HTTPDummyServerTestCase 10 | 11 | 12 | class TestFileUploads(HTTPDummyServerTestCase): 13 | @classmethod 14 | def setup_class(cls): 15 | super(TestFileUploads, cls).setup_class() 16 | cls.base_url = "http://%s:%d" % (cls.host, cls.port) 17 | cls.base_url_alt = "http://%s:%d" % (cls.host_alt, cls.port) 18 | 19 | @conftest.test_all_backends 20 | async def test_upload_anyio_async_files(self, backend, anyio_backend): 21 | """Uploading a file opened via 'anyio.aopen()' should be possible""" 22 | with open(__file__, mode="rb") as f: 23 | data = f.read() 24 | content_length = len(data) 25 | 26 | headers = { 27 | "Content-Length": str(content_length), 28 | } 29 | url = "%s/echo" % self.base_url 30 | 31 | with PoolManager(backend=backend) as http: 32 | async with await anyio.aopen(__file__, mode="rb") as f: 33 | resp = await http.urlopen("PUT", url, headers=headers, body=f) 34 | assert resp.status == 200 35 | assert resp.data == data 36 | 37 | @pytest.mark.trio 38 | async def test_upload_trio_wrapped_files(self): 39 | """Uploading a file wrapped via 'trio.wrap_file()' should be possible""" 40 | with open(__file__, mode="rb") as f: 41 | data = f.read() 42 | content_length = len(data) 43 | 44 | headers = { 45 | "Content-Length": str(content_length), 46 | } 47 | url = "%s/echo" % self.base_url 48 | 49 | with PoolManager(backend="trio") as http: 50 | with open(__file__, mode="rb") as f: 51 | f = trio.wrap_file(f) 52 | resp = await http.urlopen("PUT", url, headers=headers, body=f) 53 | assert resp.status == 200 54 | assert resp.data == data 55 | 56 | @conftest.test_all_backends 57 | async def test_redirect_with_failed_async_tell(self, backend, anyio_backend): 58 | """Abort request if failed to get a position from async tell()""" 59 | 60 | class BadTellObject(io.BytesIO): 61 | async def tell(self): 62 | raise IOError 63 | 64 | body = BadTellObject(b"the data") 65 | url = "%s/redirect?target=/successful_retry" % self.base_url 66 | # httplib uses fileno if Content-Length isn't supplied, 67 | # which is unsupported by BytesIO. 68 | headers = {"Content-Length": "8"} 69 | 70 | with PoolManager() as http: 71 | with pytest.raises(UnrewindableBodyError) as e: 72 | await http.urlopen("PUT", url, headers=headers, body=body) 73 | assert "Unable to record file position for" in str(e.value) 74 | 75 | @conftest.test_all_backends 76 | async def test_redirect_with_failed_async_seek(self, backend, anyio_backend): 77 | """Abort request if failed to restore position with async seek()""" 78 | 79 | class BadSeekObject(io.BytesIO): 80 | async def seek(self, *_): 81 | raise IOError 82 | 83 | body = BadSeekObject(b"the data") 84 | url = "%s/redirect?target=/successful_retry" % self.base_url 85 | # httplib uses fileno if Content-Length isn't supplied, 86 | # which is unsupported by BytesIO. 87 | headers = {"Content-Length": "8"} 88 | 89 | with PoolManager() as http: 90 | with pytest.raises(UnrewindableBodyError) as e: 91 | await http.urlopen("PUT", url, headers=headers, body=body) 92 | assert ( 93 | "An error occurred when rewinding request body for redirect/retry." 94 | == str(e.value) 95 | ) 96 | -------------------------------------------------------------------------------- /test/with_dummyserver/conftest.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | test_all_backends = pytest.mark.parametrize( 4 | "backend, anyio_backend", 5 | [ 6 | pytest.param("trio", "trio", id="trio-native"), 7 | pytest.param("anyio", "trio", id="anyio-trio"), 8 | pytest.param("anyio", "curio", id="anyio-curio"), 9 | pytest.param("anyio", "asyncio", id="anyio-asyncio"), 10 | ], 11 | ) 12 | 13 | 14 | test_sync_backend = pytest.mark.parametrize("backend, anyio_backend", [("sync", None)]) 15 | -------------------------------------------------------------------------------- /test/with_dummyserver/test_chunked_transfer.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import pytest 4 | 5 | from hip import HTTPConnectionPool 6 | from hip.exceptions import InvalidBodyError 7 | from hip.util.retry import Retry 8 | from dummyserver.testcase import SocketDummyServerTestCase, consume_socket 9 | 10 | # Retry failed tests 11 | pytestmark = pytest.mark.flaky 12 | 13 | 14 | class TestChunkedTransfer(SocketDummyServerTestCase): 15 | def start_chunked_handler(self): 16 | self.buffer = b"" 17 | 18 | def socket_handler(listener): 19 | sock = listener.accept()[0] 20 | 21 | while not self.buffer.endswith(b"\r\n0\r\n\r\n"): 22 | self.buffer += sock.recv(65536) 23 | 24 | sock.send( 25 | b"HTTP/1.1 200 OK\r\n" 26 | b"Content-type: text/plain\r\n" 27 | b"Content-Length: 0\r\n" 28 | b"\r\n" 29 | ) 30 | sock.close() 31 | 32 | self._start_server(socket_handler) 33 | 34 | def test_chunks(self): 35 | self.start_chunked_handler() 36 | chunks = [b"foo", b"bar", b"", b"bazzzzzzzzzzzzzzzzzzzzzz"] 37 | with HTTPConnectionPool(self.host, self.port, retries=False) as pool: 38 | pool.urlopen("GET", "/", chunks, headers=dict(DNT="1")) 39 | 40 | assert b"transfer-encoding" in self.buffer 41 | body = self.buffer.split(b"\r\n\r\n", 1)[1] 42 | lines = body.split(b"\r\n") 43 | # Empty chunks should have been skipped, as this could not be distinguished 44 | # from terminating the transmission 45 | for i, chunk in enumerate([c for c in chunks if c]): 46 | assert lines[i * 2] == hex(len(chunk))[2:].encode("utf-8") 47 | assert lines[i * 2 + 1] == chunk 48 | 49 | def _test_body(self, data): 50 | self.start_chunked_handler() 51 | with HTTPConnectionPool(self.host, self.port, retries=False) as pool: 52 | pool.urlopen("GET", "/", data) 53 | header, body = self.buffer.split(b"\r\n\r\n", 1) 54 | 55 | assert b"transfer-encoding: chunked" in header.split(b"\r\n") 56 | if data: 57 | bdata = data if isinstance(data, bytes) else data.encode("utf-8") 58 | assert b"\r\n" + bdata + b"\r\n" in body 59 | assert body.endswith(b"\r\n0\r\n\r\n") 60 | 61 | len_str = body.split(b"\r\n", 1)[0] 62 | stated_len = int(len_str, 16) 63 | assert stated_len == len(bdata) 64 | else: 65 | assert body == b"0\r\n\r\n" 66 | 67 | def test_bytestring_body(self): 68 | self._test_body(b"thisshouldbeonechunk\r\nasdf") 69 | 70 | def test_unicode_body(self): 71 | # Unicode bodies are not supported. 72 | chunk = u"thisshouldbeonechunk\r\näöüß" 73 | with pytest.raises(InvalidBodyError): 74 | self._test_body(chunk) 75 | 76 | def test_empty_string_body(self): 77 | self._test_body(b"") 78 | 79 | def test_empty_iterable_body(self): 80 | self._test_body([]) 81 | 82 | def test_removes_duplicate_host_header(self): 83 | self.start_chunked_handler() 84 | chunks = [b"foo", b"bar", b"", b"bazzzzzzzzzzzzzzzzzzzzzz"] 85 | with HTTPConnectionPool(self.host, self.port, retries=False) as pool: 86 | pool.urlopen("GET", "/", chunks, headers={"Host": "test.org"}) 87 | 88 | header_block = self.buffer.split(b"\r\n\r\n", 1)[0].lower() 89 | header_lines = header_block.split(b"\r\n")[1:] 90 | 91 | host_headers = [x for x in header_lines if x.startswith(b"host")] 92 | assert len(host_headers) == 1 93 | 94 | def test_provides_default_host_header(self): 95 | self.start_chunked_handler() 96 | chunks = [b"foo", b"bar", b"", b"bazzzzzzzzzzzzzzzzzzzzzz"] 97 | with HTTPConnectionPool(self.host, self.port, retries=False) as pool: 98 | pool.urlopen("GET", "/", chunks) 99 | 100 | header_block = self.buffer.split(b"\r\n\r\n", 1)[0].lower() 101 | header_lines = header_block.split(b"\r\n")[1:] 102 | 103 | host_headers = [x for x in header_lines if x.startswith(b"host")] 104 | assert len(host_headers) == 1 105 | 106 | def test_preserve_chunked_on_retry(self): 107 | self.chunked_requests = 0 108 | 109 | def socket_handler(listener): 110 | for _ in range(2): 111 | sock = listener.accept()[0] 112 | request = consume_socket(sock) 113 | if b"transfer-encoding: chunked" in request.split(b"\r\n"): 114 | self.chunked_requests += 1 115 | 116 | sock.send( 117 | b"HTTP/1.1 429 Too Many Requests\r\n" 118 | b"Content-Type: text/plain\r\n" 119 | b"Retry-After: 1\r\n" 120 | b"\r\n" 121 | ) 122 | sock.close() 123 | 124 | self._start_server(socket_handler) 125 | with HTTPConnectionPool(self.host, self.port) as pool: 126 | retries = Retry(total=1) 127 | pool.urlopen( 128 | "GET", 129 | "/", 130 | body=iter([b"chunk1", b"chunk2"]), 131 | preload_content=False, 132 | retries=retries, 133 | ) 134 | assert self.chunked_requests == 2 135 | 136 | def test_preserve_chunked_on_broken_connection(self): 137 | self.chunked_requests = 0 138 | 139 | def socket_handler(listener): 140 | for i in range(2): 141 | sock = listener.accept()[0] 142 | request = consume_socket(sock) 143 | if b"transfer-encoding: chunked" in request.split(b"\r\n"): 144 | self.chunked_requests += 1 145 | 146 | if i == 0: 147 | # Bad HTTP version will trigger a connection close 148 | sock.send(b"HTTP/0.5 200 OK\r\n\r\n") 149 | else: 150 | sock.send(b"HTTP/1.1 200 OK\r\n\r\n") 151 | sock.close() 152 | 153 | self._start_server(socket_handler) 154 | with HTTPConnectionPool(self.host, self.port) as pool: 155 | retries = Retry(read=1) 156 | pool.urlopen( 157 | "GET", 158 | "/", 159 | body=iter([b"chunk1", b"chunk2"]), 160 | preload_content=False, 161 | retries=retries, 162 | ) 163 | assert self.chunked_requests == 2 164 | -------------------------------------------------------------------------------- /test/with_dummyserver/test_no_ssl.py: -------------------------------------------------------------------------------- 1 | """ 2 | Test connections without the builtin ssl module 3 | 4 | Note: Import hip inside the test functions to get the importblocker to work 5 | """ 6 | import pytest 7 | from ..test_no_ssl import TestWithoutSSL 8 | 9 | from dummyserver.testcase import HTTPDummyServerTestCase, HTTPSDummyServerTestCase 10 | 11 | import hip 12 | 13 | # Retry failed tests 14 | pytestmark = pytest.mark.flaky 15 | 16 | 17 | class TestHTTPWithoutSSL(HTTPDummyServerTestCase, TestWithoutSSL): 18 | @pytest.mark.skip( 19 | reason=( 20 | "TestWithoutSSL mutates sys.modules." 21 | "This breaks the backend loading code which imports modules at runtime." 22 | "See discussion at https://github.com/python-trio/hip/pull/42" 23 | ) 24 | ) 25 | def test_simple(self): 26 | with hip.HTTPConnectionPool(self.host, self.port) as pool: 27 | r = pool.request("GET", "/") 28 | assert r.status == 200, r.data 29 | 30 | 31 | class TestHTTPSWithoutSSL(HTTPSDummyServerTestCase, TestWithoutSSL): 32 | def test_simple(self): 33 | try: 34 | pool = hip.HTTPSConnectionPool(self.host, self.port, cert_reqs="NONE") 35 | except hip.exceptions.SSLError as e: 36 | assert "SSL module is not available" in str(e) 37 | finally: 38 | pool.close() 39 | --------------------------------------------------------------------------------